Fix build on sparc64-linux-gnu.
[official-gcc.git] / gcc / gimple-fold.c
blob5468b604dec58520304daf4488efa67c82eadcd2
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
642 static bool
643 size_must_be_zero_p (tree size)
645 if (integer_zerop (size))
646 return true;
648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
649 return false;
651 wide_int min, max;
652 enum value_range_kind rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
659 wide_int wone = wi::one (prec);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
702 gsi_replace (gsi, repl, false);
703 return true;
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
721 goto done;
723 else
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
728 const char *tmp_str;
729 unsigned HOST_WIDE_INT tmp_len;
731 /* Build accesses at offset zero with a ref-all character type. */
732 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
733 ptr_mode, true), 0);
735 /* If we can perform the copy efficiently with first doing all loads
736 and then all stores inline it that way. Currently efficiently
737 means that we can load all the memory into a single integer
738 register which is what MOVE_MAX gives us. */
739 src_align = get_pointer_alignment (src);
740 dest_align = get_pointer_alignment (dest);
741 if (tree_fits_uhwi_p (len)
742 && compare_tree_int (len, MOVE_MAX) <= 0
743 /* ??? Don't transform copies from strings with known length this
744 confuses the tree-ssa-strlen.c. This doesn't handle
745 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
746 reason. */
747 && !c_strlen (src, 2)
748 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
749 && memchr (tmp_str, 0, tmp_len) == NULL))
751 unsigned ilen = tree_to_uhwi (len);
752 if (pow2p_hwi (ilen))
754 /* Detect invalid bounds and overlapping copies and issue
755 either -Warray-bounds or -Wrestrict. */
756 if (!nowarn
757 && check_bounds_or_overlap (as_a <gcall *>(stmt),
758 dest, src, len, len))
759 gimple_set_no_warning (stmt, true);
761 scalar_int_mode mode;
762 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
763 if (type
764 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
765 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
766 /* If the destination pointer is not aligned we must be able
767 to emit an unaligned store. */
768 && (dest_align >= GET_MODE_ALIGNMENT (mode)
769 || !targetm.slow_unaligned_access (mode, dest_align)
770 || (optab_handler (movmisalign_optab, mode)
771 != CODE_FOR_nothing)))
773 tree srctype = type;
774 tree desttype = type;
775 if (src_align < GET_MODE_ALIGNMENT (mode))
776 srctype = build_aligned_type (type, src_align);
777 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
778 tree tem = fold_const_aggregate_ref (srcmem);
779 if (tem)
780 srcmem = tem;
781 else if (src_align < GET_MODE_ALIGNMENT (mode)
782 && targetm.slow_unaligned_access (mode, src_align)
783 && (optab_handler (movmisalign_optab, mode)
784 == CODE_FOR_nothing))
785 srcmem = NULL_TREE;
786 if (srcmem)
788 gimple *new_stmt;
789 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
791 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
792 srcmem
793 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
794 new_stmt);
795 gimple_assign_set_lhs (new_stmt, srcmem);
796 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
797 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
799 if (dest_align < GET_MODE_ALIGNMENT (mode))
800 desttype = build_aligned_type (type, dest_align);
801 new_stmt
802 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
803 dest, off0),
804 srcmem);
805 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
806 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
807 if (gimple_vdef (new_stmt)
808 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
809 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
810 if (!lhs)
812 gsi_replace (gsi, new_stmt, false);
813 return true;
815 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 goto done;
822 if (endp == 3)
824 /* Both DEST and SRC must be pointer types.
825 ??? This is what old code did. Is the testing for pointer types
826 really mandatory?
828 If either SRC is readonly or length is 1, we can use memcpy. */
829 if (!dest_align || !src_align)
830 return false;
831 if (readonly_data_expr (src)
832 || (tree_fits_uhwi_p (len)
833 && (MIN (src_align, dest_align) / BITS_PER_UNIT
834 >= tree_to_uhwi (len))))
836 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
837 if (!fn)
838 return false;
839 gimple_call_set_fndecl (stmt, fn);
840 gimple_call_set_arg (stmt, 0, dest);
841 gimple_call_set_arg (stmt, 1, src);
842 fold_stmt (gsi);
843 return true;
846 /* If *src and *dest can't overlap, optimize into memcpy as well. */
847 if (TREE_CODE (src) == ADDR_EXPR
848 && TREE_CODE (dest) == ADDR_EXPR)
850 tree src_base, dest_base, fn;
851 poly_int64 src_offset = 0, dest_offset = 0;
852 poly_uint64 maxsize;
854 srcvar = TREE_OPERAND (src, 0);
855 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
856 if (src_base == NULL)
857 src_base = srcvar;
858 destvar = TREE_OPERAND (dest, 0);
859 dest_base = get_addr_base_and_unit_offset (destvar,
860 &dest_offset);
861 if (dest_base == NULL)
862 dest_base = destvar;
863 if (!poly_int_tree_p (len, &maxsize))
864 maxsize = -1;
865 if (SSA_VAR_P (src_base)
866 && SSA_VAR_P (dest_base))
868 if (operand_equal_p (src_base, dest_base, 0)
869 && ranges_maybe_overlap_p (src_offset, maxsize,
870 dest_offset, maxsize))
871 return false;
873 else if (TREE_CODE (src_base) == MEM_REF
874 && TREE_CODE (dest_base) == MEM_REF)
876 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
877 TREE_OPERAND (dest_base, 0), 0))
878 return false;
879 poly_offset_int full_src_offset
880 = mem_ref_offset (src_base) + src_offset;
881 poly_offset_int full_dest_offset
882 = mem_ref_offset (dest_base) + dest_offset;
883 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
884 full_dest_offset, maxsize))
885 return false;
887 else
888 return false;
890 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
891 if (!fn)
892 return false;
893 gimple_call_set_fndecl (stmt, fn);
894 gimple_call_set_arg (stmt, 0, dest);
895 gimple_call_set_arg (stmt, 1, src);
896 fold_stmt (gsi);
897 return true;
900 /* If the destination and source do not alias optimize into
901 memcpy as well. */
902 if ((is_gimple_min_invariant (dest)
903 || TREE_CODE (dest) == SSA_NAME)
904 && (is_gimple_min_invariant (src)
905 || TREE_CODE (src) == SSA_NAME))
907 ao_ref destr, srcr;
908 ao_ref_init_from_ptr_and_size (&destr, dest, len);
909 ao_ref_init_from_ptr_and_size (&srcr, src, len);
910 if (!refs_may_alias_p_1 (&destr, &srcr, false))
912 tree fn;
913 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
914 if (!fn)
915 return false;
916 gimple_call_set_fndecl (stmt, fn);
917 gimple_call_set_arg (stmt, 0, dest);
918 gimple_call_set_arg (stmt, 1, src);
919 fold_stmt (gsi);
920 return true;
924 return false;
927 if (!tree_fits_shwi_p (len))
928 return false;
929 if (!POINTER_TYPE_P (TREE_TYPE (src))
930 || !POINTER_TYPE_P (TREE_TYPE (dest)))
931 return false;
932 /* In the following try to find a type that is most natural to be
933 used for the memcpy source and destination and that allows
934 the most optimization when memcpy is turned into a plain assignment
935 using that type. In theory we could always use a char[len] type
936 but that only gains us that the destination and source possibly
937 no longer will have their address taken. */
938 srctype = TREE_TYPE (TREE_TYPE (src));
939 if (TREE_CODE (srctype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
941 srctype = TREE_TYPE (srctype);
942 desttype = TREE_TYPE (TREE_TYPE (dest));
943 if (TREE_CODE (desttype) == ARRAY_TYPE
944 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
945 desttype = TREE_TYPE (desttype);
946 if (TREE_ADDRESSABLE (srctype)
947 || TREE_ADDRESSABLE (desttype))
948 return false;
950 /* Make sure we are not copying using a floating-point mode or
951 a type whose size possibly does not match its precision. */
952 if (FLOAT_MODE_P (TYPE_MODE (desttype))
953 || TREE_CODE (desttype) == BOOLEAN_TYPE
954 || TREE_CODE (desttype) == ENUMERAL_TYPE)
955 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
956 if (FLOAT_MODE_P (TYPE_MODE (srctype))
957 || TREE_CODE (srctype) == BOOLEAN_TYPE
958 || TREE_CODE (srctype) == ENUMERAL_TYPE)
959 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
960 if (!srctype)
961 srctype = desttype;
962 if (!desttype)
963 desttype = srctype;
964 if (!srctype)
965 return false;
967 src_align = get_pointer_alignment (src);
968 dest_align = get_pointer_alignment (dest);
969 if (dest_align < TYPE_ALIGN (desttype)
970 || src_align < TYPE_ALIGN (srctype))
971 return false;
973 destvar = NULL_TREE;
974 if (TREE_CODE (dest) == ADDR_EXPR
975 && var_decl_component_p (TREE_OPERAND (dest, 0))
976 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
977 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
979 srcvar = NULL_TREE;
980 if (TREE_CODE (src) == ADDR_EXPR
981 && var_decl_component_p (TREE_OPERAND (src, 0))
982 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
984 if (!destvar
985 || src_align >= TYPE_ALIGN (desttype))
986 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
987 src, off0);
988 else if (!STRICT_ALIGNMENT)
990 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
991 src_align);
992 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
996 if (srcvar == NULL_TREE && destvar == NULL_TREE)
997 return false;
999 if (srcvar == NULL_TREE)
1001 if (src_align >= TYPE_ALIGN (desttype))
1002 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1003 else
1005 if (STRICT_ALIGNMENT)
1006 return false;
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1012 else if (destvar == NULL_TREE)
1014 if (dest_align >= TYPE_ALIGN (srctype))
1015 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1016 else
1018 if (STRICT_ALIGNMENT)
1019 return false;
1020 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1021 dest_align);
1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1026 /* Detect invalid bounds and overlapping copies and issue either
1027 -Warray-bounds or -Wrestrict. */
1028 if (!nowarn)
1029 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1031 gimple *new_stmt;
1032 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1034 tree tem = fold_const_aggregate_ref (srcvar);
1035 if (tem)
1036 srcvar = tem;
1037 if (! is_gimple_min_invariant (srcvar))
1039 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1040 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1041 new_stmt);
1042 gimple_assign_set_lhs (new_stmt, srcvar);
1043 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1044 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1046 new_stmt = gimple_build_assign (destvar, srcvar);
1047 goto set_vop_and_replace;
1050 /* We get an aggregate copy. Use an unsigned char[] type to
1051 perform the copying to preserve padding and to avoid any issues
1052 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1053 desttype = build_array_type_nelts (unsigned_char_type_node,
1054 tree_to_uhwi (len));
1055 srctype = desttype;
1056 if (src_align > TYPE_ALIGN (srctype))
1057 srctype = build_aligned_type (srctype, src_align);
1058 if (dest_align > TYPE_ALIGN (desttype))
1059 desttype = build_aligned_type (desttype, dest_align);
1060 new_stmt
1061 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1062 fold_build2 (MEM_REF, srctype, src, off0));
1063 set_vop_and_replace:
1064 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1065 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1066 if (gimple_vdef (new_stmt)
1067 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1068 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1069 if (!lhs)
1071 gsi_replace (gsi, new_stmt, false);
1072 return true;
1074 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1077 done:
1078 gimple_seq stmts = NULL;
1079 if (endp == 0 || endp == 3)
1080 len = NULL_TREE;
1081 else if (endp == 2)
1082 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1083 ssize_int (1));
1084 if (endp == 2 || endp == 1)
1086 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1087 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1088 TREE_TYPE (dest), dest, len);
1091 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1092 gimple *repl = gimple_build_assign (lhs, dest);
1093 gsi_replace (gsi, repl, false);
1094 return true;
1097 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1098 to built-in memcmp (a, b, len). */
1100 static bool
1101 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1103 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1105 if (!fn)
1106 return false;
1108 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1110 gimple *stmt = gsi_stmt (*gsi);
1111 tree a = gimple_call_arg (stmt, 0);
1112 tree b = gimple_call_arg (stmt, 1);
1113 tree len = gimple_call_arg (stmt, 2);
1115 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1116 replace_call_with_call_and_fold (gsi, repl);
1118 return true;
1121 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1122 to built-in memmove (dest, src, len). */
1124 static bool
1125 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1127 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1129 if (!fn)
1130 return false;
1132 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1133 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1134 len) into memmove (dest, src, len). */
1136 gimple *stmt = gsi_stmt (*gsi);
1137 tree src = gimple_call_arg (stmt, 0);
1138 tree dest = gimple_call_arg (stmt, 1);
1139 tree len = gimple_call_arg (stmt, 2);
1141 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1142 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1143 replace_call_with_call_and_fold (gsi, repl);
1145 return true;
1148 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1149 to built-in memset (dest, 0, len). */
1151 static bool
1152 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1154 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1156 if (!fn)
1157 return false;
1159 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1161 gimple *stmt = gsi_stmt (*gsi);
1162 tree dest = gimple_call_arg (stmt, 0);
1163 tree len = gimple_call_arg (stmt, 1);
1165 gimple_seq seq = NULL;
1166 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1167 gimple_seq_add_stmt_without_update (&seq, repl);
1168 gsi_replace_with_seq_vops (gsi, seq);
1169 fold_stmt (gsi);
1171 return true;
1174 /* Fold function call to builtin memset or bzero at *GSI setting the
1175 memory of size LEN to VAL. Return whether a simplification was made. */
1177 static bool
1178 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1180 gimple *stmt = gsi_stmt (*gsi);
1181 tree etype;
1182 unsigned HOST_WIDE_INT length, cval;
1184 /* If the LEN parameter is zero, return DEST. */
1185 if (integer_zerop (len))
1187 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1188 return true;
1191 if (! tree_fits_uhwi_p (len))
1192 return false;
1194 if (TREE_CODE (c) != INTEGER_CST)
1195 return false;
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree var = dest;
1199 if (TREE_CODE (var) != ADDR_EXPR)
1200 return false;
1202 var = TREE_OPERAND (var, 0);
1203 if (TREE_THIS_VOLATILE (var))
1204 return false;
1206 etype = TREE_TYPE (var);
1207 if (TREE_CODE (etype) == ARRAY_TYPE)
1208 etype = TREE_TYPE (etype);
1210 if (!INTEGRAL_TYPE_P (etype)
1211 && !POINTER_TYPE_P (etype))
1212 return NULL_TREE;
1214 if (! var_decl_component_p (var))
1215 return NULL_TREE;
1217 length = tree_to_uhwi (len);
1218 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1219 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1220 return NULL_TREE;
1222 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1223 return NULL_TREE;
1225 if (integer_zerop (c))
1226 cval = 0;
1227 else
1229 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1230 return NULL_TREE;
1232 cval = TREE_INT_CST_LOW (c);
1233 cval &= 0xff;
1234 cval |= cval << 8;
1235 cval |= cval << 16;
1236 cval |= (cval << 31) << 1;
1239 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1240 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1241 gimple_set_vuse (store, gimple_vuse (stmt));
1242 tree vdef = gimple_vdef (stmt);
1243 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1245 gimple_set_vdef (store, gimple_vdef (stmt));
1246 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1248 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1249 if (gimple_call_lhs (stmt))
1251 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1252 gsi_replace (gsi, asgn, false);
1254 else
1256 gimple_stmt_iterator gsi2 = *gsi;
1257 gsi_prev (gsi);
1258 gsi_remove (&gsi2, true);
1261 return true;
1265 /* Obtain the minimum and maximum string length or minimum and maximum
1266 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1267 If ARG is an SSA name variable, follow its use-def chains. When
1268 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1269 if we are unable to determine the length or value, return false.
1270 VISITED is a bitmap of visited variables.
1271 TYPE is 0 if string length should be obtained, 1 for maximum string
1272 length and 2 for maximum value ARG can have.
1273 When FUZZY is non-zero and the length of a string cannot be determined,
1274 the function instead considers as the maximum possible length the
1275 size of a character array it may refer to. If FUZZY is 2, it will handle
1276 PHIs and COND_EXPRs optimistically, if we can determine string length
1277 minimum and maximum, it will use the minimum from the ones where it
1278 can be determined.
1279 Set *FLEXP to true if the range of the string lengths has been
1280 obtained from the upper bound of an array at the end of a struct.
1281 Such an array may hold a string that's longer than its upper bound
1282 due to it being used as a poor-man's flexible array member.
1283 Pass NONSTR through to children.
1284 ELTSIZE is 1 for normal single byte character strings, and 2 or
1285 4 for wide characer strings. ELTSIZE is by default 1. */
1287 static bool
1288 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1289 int fuzzy, bool *flexp, unsigned eltsize, tree *nonstr)
1291 tree var, val = NULL_TREE;
1292 gimple *def_stmt;
1294 /* The minimum and maximum length. */
1295 tree *const minlen = length;
1296 tree *const maxlen = length + 1;
1298 if (TREE_CODE (arg) != SSA_NAME)
1300 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1301 if (TREE_CODE (arg) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1304 tree op = TREE_OPERAND (arg, 0);
1305 if (integer_zerop (TREE_OPERAND (op, 1)))
1307 tree aop0 = TREE_OPERAND (op, 0);
1308 if (TREE_CODE (aop0) == INDIRECT_REF
1309 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1310 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
1311 visited, type, fuzzy, flexp,
1312 eltsize, nonstr);
1314 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1316 /* Fail if an array is the last member of a struct object
1317 since it could be treated as a (fake) flexible array
1318 member. */
1319 tree idx = TREE_OPERAND (op, 1);
1321 arg = TREE_OPERAND (op, 0);
1322 tree optype = TREE_TYPE (arg);
1323 if (tree dom = TYPE_DOMAIN (optype))
1324 if (tree bound = TYPE_MAX_VALUE (dom))
1325 if (TREE_CODE (bound) == INTEGER_CST
1326 && TREE_CODE (idx) == INTEGER_CST
1327 && tree_int_cst_lt (bound, idx))
1328 return false;
1332 if (type == 2)
1334 val = arg;
1335 if (TREE_CODE (val) != INTEGER_CST
1336 || tree_int_cst_sgn (val) < 0)
1337 return false;
1339 else
1341 c_strlen_data data;
1342 memset (&data, 0, sizeof (c_strlen_data));
1343 val = c_strlen (arg, 1, &data, eltsize);
1345 /* If we potentially had a non-terminated string, then
1346 bubble that information up to the caller. */
1347 if (!val && data.decl)
1349 *nonstr = data.decl;
1350 *minlen = data.len;
1351 *maxlen = data.len;
1352 return type == 0 ? false : true;
1356 if (!val && fuzzy)
1358 if (TREE_CODE (arg) == ADDR_EXPR)
1359 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1360 visited, type, fuzzy, flexp,
1361 eltsize, nonstr);
1363 if (TREE_CODE (arg) == ARRAY_REF)
1365 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1367 /* Determine the "innermost" array type. */
1368 while (TREE_CODE (type) == ARRAY_TYPE
1369 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1370 type = TREE_TYPE (type);
1372 /* Avoid arrays of pointers. */
1373 tree eltype = TREE_TYPE (type);
1374 if (TREE_CODE (type) != ARRAY_TYPE
1375 || !INTEGRAL_TYPE_P (eltype))
1376 return false;
1378 val = TYPE_SIZE_UNIT (type);
1379 if (!val || integer_zerop (val))
1380 return false;
1382 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1383 integer_one_node);
1384 /* Set the minimum size to zero since the string in
1385 the array could have zero length. */
1386 *minlen = ssize_int (0);
1388 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1389 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1390 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1391 *flexp = true;
1393 else if (TREE_CODE (arg) == COMPONENT_REF
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1395 == ARRAY_TYPE))
1397 /* Use the type of the member array to determine the upper
1398 bound on the length of the array. This may be overly
1399 optimistic if the array itself isn't NUL-terminated and
1400 the caller relies on the subsequent member to contain
1401 the NUL but that would only be considered valid if
1402 the array were the last member of a struct.
1403 Set *FLEXP to true if the array whose bound is being
1404 used is at the end of a struct. */
1405 if (array_at_struct_end_p (arg))
1406 *flexp = true;
1408 arg = TREE_OPERAND (arg, 1);
1410 tree type = TREE_TYPE (arg);
1412 while (TREE_CODE (type) == ARRAY_TYPE
1413 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1414 type = TREE_TYPE (type);
1416 /* Fail when the array bound is unknown or zero. */
1417 val = TYPE_SIZE_UNIT (type);
1418 if (!val || integer_zerop (val))
1419 return false;
1420 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1421 integer_one_node);
1422 /* Set the minimum size to zero since the string in
1423 the array could have zero length. */
1424 *minlen = ssize_int (0);
1427 if (VAR_P (arg))
1429 tree type = TREE_TYPE (arg);
1430 if (POINTER_TYPE_P (type))
1431 type = TREE_TYPE (type);
1433 if (TREE_CODE (type) == ARRAY_TYPE)
1435 val = TYPE_SIZE_UNIT (type);
1436 if (!val
1437 || TREE_CODE (val) != INTEGER_CST
1438 || integer_zerop (val))
1439 return false;
1440 val = wide_int_to_tree (TREE_TYPE (val),
1441 wi::sub (wi::to_wide (val), 1));
1442 /* Set the minimum size to zero since the string in
1443 the array could have zero length. */
1444 *minlen = ssize_int (0);
1449 if (!val)
1450 return false;
1452 if (!*minlen
1453 || (type > 0
1454 && TREE_CODE (*minlen) == INTEGER_CST
1455 && TREE_CODE (val) == INTEGER_CST
1456 && tree_int_cst_lt (val, *minlen)))
1457 *minlen = val;
1459 if (*maxlen)
1461 if (type > 0)
1463 if (TREE_CODE (*maxlen) != INTEGER_CST
1464 || TREE_CODE (val) != INTEGER_CST)
1465 return false;
1467 if (tree_int_cst_lt (*maxlen, val))
1468 *maxlen = val;
1469 return true;
1471 else if (simple_cst_equal (val, *maxlen) != 1)
1472 return false;
1475 *maxlen = val;
1476 return true;
1479 /* If ARG is registered for SSA update we cannot look at its defining
1480 statement. */
1481 if (name_registered_for_update_p (arg))
1482 return false;
1484 /* If we were already here, break the infinite cycle. */
1485 if (!*visited)
1486 *visited = BITMAP_ALLOC (NULL);
1487 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1488 return true;
1490 var = arg;
1491 def_stmt = SSA_NAME_DEF_STMT (var);
1493 switch (gimple_code (def_stmt))
1495 case GIMPLE_ASSIGN:
1496 /* The RHS of the statement defining VAR must either have a
1497 constant length or come from another SSA_NAME with a constant
1498 length. */
1499 if (gimple_assign_single_p (def_stmt)
1500 || gimple_assign_unary_nop_p (def_stmt))
1502 tree rhs = gimple_assign_rhs1 (def_stmt);
1503 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp,
1504 eltsize, nonstr);
1506 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1508 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1509 gimple_assign_rhs3 (def_stmt) };
1511 for (unsigned int i = 0; i < 2; i++)
1512 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1513 flexp, eltsize, nonstr))
1515 if (fuzzy == 2)
1516 *maxlen = build_all_ones_cst (size_type_node);
1517 else
1518 return false;
1520 return true;
1522 return false;
1524 case GIMPLE_PHI:
1525 /* All the arguments of the PHI node must have the same constant
1526 length. */
1527 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1529 tree arg = gimple_phi_arg (def_stmt, i)->def;
1531 /* If this PHI has itself as an argument, we cannot
1532 determine the string length of this argument. However,
1533 if we can find a constant string length for the other
1534 PHI args then we can still be sure that this is a
1535 constant string length. So be optimistic and just
1536 continue with the next argument. */
1537 if (arg == gimple_phi_result (def_stmt))
1538 continue;
1540 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp,
1541 eltsize, nonstr))
1543 if (fuzzy == 2)
1544 *maxlen = build_all_ones_cst (size_type_node);
1545 else
1546 return false;
1549 return true;
1551 default:
1552 return false;
1556 /* Determine the minimum and maximum value or string length that ARG
1557 refers to and store each in the first two elements of MINMAXLEN.
1558 For expressions that point to strings of unknown lengths that are
1559 character arrays, use the upper bound of the array as the maximum
1560 length. For example, given an expression like 'x ? array : "xyz"'
1561 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1562 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1563 stored in array.
1564 Return true if the range of the string lengths has been obtained
1565 from the upper bound of an array at the end of a struct. Such
1566 an array may hold a string that's longer than its upper bound
1567 due to it being used as a poor-man's flexible array member.
1569 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1570 and false if PHIs and COND_EXPRs are to be handled optimistically,
1571 if we can determine string length minimum and maximum; it will use
1572 the minimum from the ones where it can be determined.
1573 STRICT false should be only used for warning code.
1574 When non-null, clear *NONSTR if ARG refers to a constant array
1575 that is known not be nul-terminated. Otherwise set it to
1576 the declaration of the constant non-terminated array.
1578 ELTSIZE is 1 for normal single byte character strings, and 2 or
1579 4 for wide characer strings. ELTSIZE is by default 1. */
1581 bool
1582 get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1583 bool strict, tree *nonstr /* = NULL */)
1585 bitmap visited = NULL;
1587 minmaxlen[0] = NULL_TREE;
1588 minmaxlen[1] = NULL_TREE;
1590 tree nonstrbuf;
1591 if (!nonstr)
1592 nonstr = &nonstrbuf;
1593 *nonstr = NULL_TREE;
1595 bool flexarray = false;
1596 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1597 &flexarray, eltsize, nonstr))
1599 minmaxlen[0] = NULL_TREE;
1600 minmaxlen[1] = NULL_TREE;
1603 if (visited)
1604 BITMAP_FREE (visited);
1606 return flexarray;
1609 /* Return the maximum string length for ARG, counting by TYPE
1610 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1611 if the caller is prepared to handle unterminated strings.
1613 If an unterminated string is discovered and our caller handles
1614 unterminated strings, then bubble up the offending DECL and
1615 return the maximum size. Otherwise return NULL. */
1617 tree
1618 get_maxval_strlen (tree arg, int type, tree *nonstr /* = NULL */)
1620 bitmap visited = NULL;
1621 tree len[2] = { NULL_TREE, NULL_TREE };
1623 bool dummy;
1624 /* Set to non-null if ARG refers to an untermianted array. */
1625 tree mynonstr = NULL_TREE;
1626 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy, 1, &mynonstr))
1627 len[1] = NULL_TREE;
1628 if (visited)
1629 BITMAP_FREE (visited);
1631 if (nonstr)
1633 /* For callers prepared to handle unterminated arrays set
1634 *NONSTR to point to the declaration of the array and return
1635 the maximum length/size. */
1636 *nonstr = mynonstr;
1637 return len[1];
1640 /* Fail if the constant array isn't nul-terminated. */
1641 return mynonstr ? NULL_TREE : len[1];
1645 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1646 If LEN is not NULL, it represents the length of the string to be
1647 copied. Return NULL_TREE if no simplification can be made. */
1649 static bool
1650 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1651 tree dest, tree src)
1653 gimple *stmt = gsi_stmt (*gsi);
1654 location_t loc = gimple_location (stmt);
1655 tree fn;
1657 /* If SRC and DEST are the same (and not volatile), return DEST. */
1658 if (operand_equal_p (src, dest, 0))
1660 /* Issue -Wrestrict unless the pointers are null (those do
1661 not point to objects and so do not indicate an overlap;
1662 such calls could be the result of sanitization and jump
1663 threading). */
1664 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1666 tree func = gimple_call_fndecl (stmt);
1668 warning_at (loc, OPT_Wrestrict,
1669 "%qD source argument is the same as destination",
1670 func);
1673 replace_call_with_value (gsi, dest);
1674 return true;
1677 if (optimize_function_for_size_p (cfun))
1678 return false;
1680 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1681 if (!fn)
1682 return false;
1684 /* Set to non-null if ARG refers to an unterminated array. */
1685 tree nonstr = NULL;
1686 tree len = get_maxval_strlen (src, 0, &nonstr);
1688 if (nonstr)
1690 /* Avoid folding calls with unterminated arrays. */
1691 if (!gimple_no_warning_p (stmt))
1692 warn_string_no_nul (loc, "strcpy", src, nonstr);
1693 gimple_set_no_warning (stmt, true);
1694 return false;
1697 if (!len)
1698 return false;
1700 len = fold_convert_loc (loc, size_type_node, len);
1701 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1702 len = force_gimple_operand_gsi (gsi, len, true,
1703 NULL_TREE, true, GSI_SAME_STMT);
1704 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1705 replace_call_with_call_and_fold (gsi, repl);
1706 return true;
1709 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1710 If SLEN is not NULL, it represents the length of the source string.
1711 Return NULL_TREE if no simplification can be made. */
1713 static bool
1714 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1715 tree dest, tree src, tree len)
1717 gimple *stmt = gsi_stmt (*gsi);
1718 location_t loc = gimple_location (stmt);
1719 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1721 /* If the LEN parameter is zero, return DEST. */
1722 if (integer_zerop (len))
1724 /* Avoid warning if the destination refers to a an array/pointer
1725 decorate with attribute nonstring. */
1726 if (!nonstring)
1728 tree fndecl = gimple_call_fndecl (stmt);
1730 /* Warn about the lack of nul termination: the result is not
1731 a (nul-terminated) string. */
1732 tree slen = get_maxval_strlen (src, 0);
1733 if (slen && !integer_zerop (slen))
1734 warning_at (loc, OPT_Wstringop_truncation,
1735 "%G%qD destination unchanged after copying no bytes "
1736 "from a string of length %E",
1737 stmt, fndecl, slen);
1738 else
1739 warning_at (loc, OPT_Wstringop_truncation,
1740 "%G%qD destination unchanged after copying no bytes",
1741 stmt, fndecl);
1744 replace_call_with_value (gsi, dest);
1745 return true;
1748 /* We can't compare slen with len as constants below if len is not a
1749 constant. */
1750 if (TREE_CODE (len) != INTEGER_CST)
1751 return false;
1753 /* Now, we must be passed a constant src ptr parameter. */
1754 tree slen = get_maxval_strlen (src, 0);
1755 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1756 return false;
1758 /* The size of the source string including the terminating nul. */
1759 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1761 /* We do not support simplification of this case, though we do
1762 support it when expanding trees into RTL. */
1763 /* FIXME: generate a call to __builtin_memset. */
1764 if (tree_int_cst_lt (ssize, len))
1765 return false;
1767 /* Diagnose truncation that leaves the copy unterminated. */
1768 maybe_diag_stxncpy_trunc (*gsi, src, len);
1770 /* OK transform into builtin memcpy. */
1771 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1772 if (!fn)
1773 return false;
1775 len = fold_convert_loc (loc, size_type_node, len);
1776 len = force_gimple_operand_gsi (gsi, len, true,
1777 NULL_TREE, true, GSI_SAME_STMT);
1778 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1779 replace_call_with_call_and_fold (gsi, repl);
1781 return true;
1784 /* Fold function call to builtin strchr or strrchr.
1785 If both arguments are constant, evaluate and fold the result,
1786 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1787 In general strlen is significantly faster than strchr
1788 due to being a simpler operation. */
1789 static bool
1790 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1792 gimple *stmt = gsi_stmt (*gsi);
1793 tree str = gimple_call_arg (stmt, 0);
1794 tree c = gimple_call_arg (stmt, 1);
1795 location_t loc = gimple_location (stmt);
1796 const char *p;
1797 char ch;
1799 if (!gimple_call_lhs (stmt))
1800 return false;
1802 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1804 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1806 if (p1 == NULL)
1808 replace_call_with_value (gsi, integer_zero_node);
1809 return true;
1812 tree len = build_int_cst (size_type_node, p1 - p);
1813 gimple_seq stmts = NULL;
1814 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1815 POINTER_PLUS_EXPR, str, len);
1816 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1817 gsi_replace_with_seq_vops (gsi, stmts);
1818 return true;
1821 if (!integer_zerop (c))
1822 return false;
1824 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1825 if (is_strrchr && optimize_function_for_size_p (cfun))
1827 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1829 if (strchr_fn)
1831 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1832 replace_call_with_call_and_fold (gsi, repl);
1833 return true;
1836 return false;
1839 tree len;
1840 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1842 if (!strlen_fn)
1843 return false;
1845 /* Create newstr = strlen (str). */
1846 gimple_seq stmts = NULL;
1847 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1848 gimple_set_location (new_stmt, loc);
1849 len = create_tmp_reg_or_ssa_name (size_type_node);
1850 gimple_call_set_lhs (new_stmt, len);
1851 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1853 /* Create (str p+ strlen (str)). */
1854 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1855 POINTER_PLUS_EXPR, str, len);
1856 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1857 gsi_replace_with_seq_vops (gsi, stmts);
1858 /* gsi now points at the assignment to the lhs, get a
1859 stmt iterator to the strlen.
1860 ??? We can't use gsi_for_stmt as that doesn't work when the
1861 CFG isn't built yet. */
1862 gimple_stmt_iterator gsi2 = *gsi;
1863 gsi_prev (&gsi2);
1864 fold_stmt (&gsi2);
1865 return true;
1868 /* Fold function call to builtin strstr.
1869 If both arguments are constant, evaluate and fold the result,
1870 additionally fold strstr (x, "") into x and strstr (x, "c")
1871 into strchr (x, 'c'). */
1872 static bool
1873 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1875 gimple *stmt = gsi_stmt (*gsi);
1876 tree haystack = gimple_call_arg (stmt, 0);
1877 tree needle = gimple_call_arg (stmt, 1);
1878 const char *p, *q;
1880 if (!gimple_call_lhs (stmt))
1881 return false;
1883 q = c_getstr (needle);
1884 if (q == NULL)
1885 return false;
1887 if ((p = c_getstr (haystack)))
1889 const char *r = strstr (p, q);
1891 if (r == NULL)
1893 replace_call_with_value (gsi, integer_zero_node);
1894 return true;
1897 tree len = build_int_cst (size_type_node, r - p);
1898 gimple_seq stmts = NULL;
1899 gimple *new_stmt
1900 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1901 haystack, len);
1902 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1903 gsi_replace_with_seq_vops (gsi, stmts);
1904 return true;
1907 /* For strstr (x, "") return x. */
1908 if (q[0] == '\0')
1910 replace_call_with_value (gsi, haystack);
1911 return true;
1914 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1915 if (q[1] == '\0')
1917 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1918 if (strchr_fn)
1920 tree c = build_int_cst (integer_type_node, q[0]);
1921 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1922 replace_call_with_call_and_fold (gsi, repl);
1923 return true;
1927 return false;
1930 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1931 to the call.
1933 Return NULL_TREE if no simplification was possible, otherwise return the
1934 simplified form of the call as a tree.
1936 The simplified form may be a constant or other expression which
1937 computes the same value, but in a more efficient manner (including
1938 calls to other builtin functions).
1940 The call may contain arguments which need to be evaluated, but
1941 which are not useful to determine the result of the call. In
1942 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1943 COMPOUND_EXPR will be an argument which must be evaluated.
1944 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1945 COMPOUND_EXPR in the chain will contain the tree for the simplified
1946 form of the builtin function call. */
1948 static bool
1949 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1951 gimple *stmt = gsi_stmt (*gsi);
1952 location_t loc = gimple_location (stmt);
1954 const char *p = c_getstr (src);
1956 /* If the string length is zero, return the dst parameter. */
1957 if (p && *p == '\0')
1959 replace_call_with_value (gsi, dst);
1960 return true;
1963 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1964 return false;
1966 /* See if we can store by pieces into (dst + strlen(dst)). */
1967 tree newdst;
1968 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1969 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1971 if (!strlen_fn || !memcpy_fn)
1972 return false;
1974 /* If the length of the source string isn't computable don't
1975 split strcat into strlen and memcpy. */
1976 tree len = get_maxval_strlen (src, 0);
1977 if (! len)
1978 return false;
1980 /* Create strlen (dst). */
1981 gimple_seq stmts = NULL, stmts2;
1982 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1983 gimple_set_location (repl, loc);
1984 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1985 gimple_call_set_lhs (repl, newdst);
1986 gimple_seq_add_stmt_without_update (&stmts, repl);
1988 /* Create (dst p+ strlen (dst)). */
1989 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1990 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1991 gimple_seq_add_seq_without_update (&stmts, stmts2);
1993 len = fold_convert_loc (loc, size_type_node, len);
1994 len = size_binop_loc (loc, PLUS_EXPR, len,
1995 build_int_cst (size_type_node, 1));
1996 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1997 gimple_seq_add_seq_without_update (&stmts, stmts2);
1999 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2000 gimple_seq_add_stmt_without_update (&stmts, repl);
2001 if (gimple_call_lhs (stmt))
2003 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2004 gimple_seq_add_stmt_without_update (&stmts, repl);
2005 gsi_replace_with_seq_vops (gsi, stmts);
2006 /* gsi now points at the assignment to the lhs, get a
2007 stmt iterator to the memcpy call.
2008 ??? We can't use gsi_for_stmt as that doesn't work when the
2009 CFG isn't built yet. */
2010 gimple_stmt_iterator gsi2 = *gsi;
2011 gsi_prev (&gsi2);
2012 fold_stmt (&gsi2);
2014 else
2016 gsi_replace_with_seq_vops (gsi, stmts);
2017 fold_stmt (gsi);
2019 return true;
2022 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2023 are the arguments to the call. */
2025 static bool
2026 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2028 gimple *stmt = gsi_stmt (*gsi);
2029 tree dest = gimple_call_arg (stmt, 0);
2030 tree src = gimple_call_arg (stmt, 1);
2031 tree size = gimple_call_arg (stmt, 2);
2032 tree fn;
2033 const char *p;
2036 p = c_getstr (src);
2037 /* If the SRC parameter is "", return DEST. */
2038 if (p && *p == '\0')
2040 replace_call_with_value (gsi, dest);
2041 return true;
2044 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2045 return false;
2047 /* If __builtin_strcat_chk is used, assume strcat is available. */
2048 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2049 if (!fn)
2050 return false;
2052 gimple *repl = gimple_build_call (fn, 2, dest, src);
2053 replace_call_with_call_and_fold (gsi, repl);
2054 return true;
2057 /* Simplify a call to the strncat builtin. */
2059 static bool
2060 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2062 gimple *stmt = gsi_stmt (*gsi);
2063 tree dst = gimple_call_arg (stmt, 0);
2064 tree src = gimple_call_arg (stmt, 1);
2065 tree len = gimple_call_arg (stmt, 2);
2067 const char *p = c_getstr (src);
2069 /* If the requested length is zero, or the src parameter string
2070 length is zero, return the dst parameter. */
2071 if (integer_zerop (len) || (p && *p == '\0'))
2073 replace_call_with_value (gsi, dst);
2074 return true;
2077 if (TREE_CODE (len) != INTEGER_CST || !p)
2078 return false;
2080 unsigned srclen = strlen (p);
2082 int cmpsrc = compare_tree_int (len, srclen);
2084 /* Return early if the requested len is less than the string length.
2085 Warnings will be issued elsewhere later. */
2086 if (cmpsrc < 0)
2087 return false;
2089 unsigned HOST_WIDE_INT dstsize;
2091 bool nowarn = gimple_no_warning_p (stmt);
2093 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2095 int cmpdst = compare_tree_int (len, dstsize);
2097 if (cmpdst >= 0)
2099 tree fndecl = gimple_call_fndecl (stmt);
2101 /* Strncat copies (at most) LEN bytes and always appends
2102 the terminating NUL so the specified bound should never
2103 be equal to (or greater than) the size of the destination.
2104 If it is, the copy could overflow. */
2105 location_t loc = gimple_location (stmt);
2106 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2107 cmpdst == 0
2108 ? G_("%G%qD specified bound %E equals "
2109 "destination size")
2110 : G_("%G%qD specified bound %E exceeds "
2111 "destination size %wu"),
2112 stmt, fndecl, len, dstsize);
2113 if (nowarn)
2114 gimple_set_no_warning (stmt, true);
2118 if (!nowarn && cmpsrc == 0)
2120 tree fndecl = gimple_call_fndecl (stmt);
2121 location_t loc = gimple_location (stmt);
2123 /* To avoid possible overflow the specified bound should also
2124 not be equal to the length of the source, even when the size
2125 of the destination is unknown (it's not an uncommon mistake
2126 to specify as the bound to strncpy the length of the source). */
2127 if (warning_at (loc, OPT_Wstringop_overflow_,
2128 "%G%qD specified bound %E equals source length",
2129 stmt, fndecl, len))
2130 gimple_set_no_warning (stmt, true);
2133 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2135 /* If the replacement _DECL isn't initialized, don't do the
2136 transformation. */
2137 if (!fn)
2138 return false;
2140 /* Otherwise, emit a call to strcat. */
2141 gcall *repl = gimple_build_call (fn, 2, dst, src);
2142 replace_call_with_call_and_fold (gsi, repl);
2143 return true;
2146 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2147 LEN, and SIZE. */
2149 static bool
2150 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2152 gimple *stmt = gsi_stmt (*gsi);
2153 tree dest = gimple_call_arg (stmt, 0);
2154 tree src = gimple_call_arg (stmt, 1);
2155 tree len = gimple_call_arg (stmt, 2);
2156 tree size = gimple_call_arg (stmt, 3);
2157 tree fn;
2158 const char *p;
2160 p = c_getstr (src);
2161 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2162 if ((p && *p == '\0')
2163 || integer_zerop (len))
2165 replace_call_with_value (gsi, dest);
2166 return true;
2169 if (! tree_fits_uhwi_p (size))
2170 return false;
2172 if (! integer_all_onesp (size))
2174 tree src_len = c_strlen (src, 1);
2175 if (src_len
2176 && tree_fits_uhwi_p (src_len)
2177 && tree_fits_uhwi_p (len)
2178 && ! tree_int_cst_lt (len, src_len))
2180 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2181 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2182 if (!fn)
2183 return false;
2185 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2186 replace_call_with_call_and_fold (gsi, repl);
2187 return true;
2189 return false;
2192 /* If __builtin_strncat_chk is used, assume strncat is available. */
2193 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2194 if (!fn)
2195 return false;
2197 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2198 replace_call_with_call_and_fold (gsi, repl);
2199 return true;
2202 /* Build and append gimple statements to STMTS that would load a first
2203 character of a memory location identified by STR. LOC is location
2204 of the statement. */
2206 static tree
2207 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2209 tree var;
2211 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2212 tree cst_uchar_ptr_node
2213 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2214 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2216 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2217 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2218 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2220 gimple_assign_set_lhs (stmt, var);
2221 gimple_seq_add_stmt_without_update (stmts, stmt);
2223 return var;
2226 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2227 FCODE is the name of the builtin. */
2229 static bool
2230 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2232 gimple *stmt = gsi_stmt (*gsi);
2233 tree callee = gimple_call_fndecl (stmt);
2234 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2236 tree type = integer_type_node;
2237 tree str1 = gimple_call_arg (stmt, 0);
2238 tree str2 = gimple_call_arg (stmt, 1);
2239 tree lhs = gimple_call_lhs (stmt);
2240 HOST_WIDE_INT length = -1;
2242 /* Handle strncmp and strncasecmp functions. */
2243 if (gimple_call_num_args (stmt) == 3)
2245 tree len = gimple_call_arg (stmt, 2);
2246 if (tree_fits_uhwi_p (len))
2247 length = tree_to_uhwi (len);
2250 /* If the LEN parameter is zero, return zero. */
2251 if (length == 0)
2253 replace_call_with_value (gsi, integer_zero_node);
2254 return true;
2257 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2258 if (operand_equal_p (str1, str2, 0))
2260 replace_call_with_value (gsi, integer_zero_node);
2261 return true;
2264 const char *p1 = c_getstr (str1);
2265 const char *p2 = c_getstr (str2);
2267 /* For known strings, return an immediate value. */
2268 if (p1 && p2)
2270 int r = 0;
2271 bool known_result = false;
2273 switch (fcode)
2275 case BUILT_IN_STRCMP:
2276 case BUILT_IN_STRCMP_EQ:
2278 r = strcmp (p1, p2);
2279 known_result = true;
2280 break;
2282 case BUILT_IN_STRNCMP:
2283 case BUILT_IN_STRNCMP_EQ:
2285 if (length == -1)
2286 break;
2287 r = strncmp (p1, p2, length);
2288 known_result = true;
2289 break;
2291 /* Only handleable situation is where the string are equal (result 0),
2292 which is already handled by operand_equal_p case. */
2293 case BUILT_IN_STRCASECMP:
2294 break;
2295 case BUILT_IN_STRNCASECMP:
2297 if (length == -1)
2298 break;
2299 r = strncmp (p1, p2, length);
2300 if (r == 0)
2301 known_result = true;
2302 break;
2304 default:
2305 gcc_unreachable ();
2308 if (known_result)
2310 replace_call_with_value (gsi, build_cmp_result (type, r));
2311 return true;
2315 bool nonzero_length = length >= 1
2316 || fcode == BUILT_IN_STRCMP
2317 || fcode == BUILT_IN_STRCMP_EQ
2318 || fcode == BUILT_IN_STRCASECMP;
2320 location_t loc = gimple_location (stmt);
2322 /* If the second arg is "", return *(const unsigned char*)arg1. */
2323 if (p2 && *p2 == '\0' && nonzero_length)
2325 gimple_seq stmts = NULL;
2326 tree var = gimple_load_first_char (loc, str1, &stmts);
2327 if (lhs)
2329 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2330 gimple_seq_add_stmt_without_update (&stmts, stmt);
2333 gsi_replace_with_seq_vops (gsi, stmts);
2334 return true;
2337 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2338 if (p1 && *p1 == '\0' && nonzero_length)
2340 gimple_seq stmts = NULL;
2341 tree var = gimple_load_first_char (loc, str2, &stmts);
2343 if (lhs)
2345 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2346 stmt = gimple_build_assign (c, NOP_EXPR, var);
2347 gimple_seq_add_stmt_without_update (&stmts, stmt);
2349 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2350 gimple_seq_add_stmt_without_update (&stmts, stmt);
2353 gsi_replace_with_seq_vops (gsi, stmts);
2354 return true;
2357 /* If len parameter is one, return an expression corresponding to
2358 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2359 if (fcode == BUILT_IN_STRNCMP && length == 1)
2361 gimple_seq stmts = NULL;
2362 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2363 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2365 if (lhs)
2367 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2368 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2369 gimple_seq_add_stmt_without_update (&stmts, convert1);
2371 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2372 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2373 gimple_seq_add_stmt_without_update (&stmts, convert2);
2375 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2376 gimple_seq_add_stmt_without_update (&stmts, stmt);
2379 gsi_replace_with_seq_vops (gsi, stmts);
2380 return true;
2383 /* If length is larger than the length of one constant string,
2384 replace strncmp with corresponding strcmp */
2385 if (fcode == BUILT_IN_STRNCMP
2386 && length > 0
2387 && ((p2 && (size_t) length > strlen (p2))
2388 || (p1 && (size_t) length > strlen (p1))))
2390 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2391 if (!fn)
2392 return false;
2393 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2398 return false;
2401 /* Fold a call to the memchr pointed by GSI iterator. */
2403 static bool
2404 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2406 gimple *stmt = gsi_stmt (*gsi);
2407 tree lhs = gimple_call_lhs (stmt);
2408 tree arg1 = gimple_call_arg (stmt, 0);
2409 tree arg2 = gimple_call_arg (stmt, 1);
2410 tree len = gimple_call_arg (stmt, 2);
2412 /* If the LEN parameter is zero, return zero. */
2413 if (integer_zerop (len))
2415 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2416 return true;
2419 char c;
2420 if (TREE_CODE (arg2) != INTEGER_CST
2421 || !tree_fits_uhwi_p (len)
2422 || !target_char_cst_p (arg2, &c))
2423 return false;
2425 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2426 unsigned HOST_WIDE_INT string_length;
2427 const char *p1 = c_getstr (arg1, &string_length);
2429 if (p1)
2431 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2432 if (r == NULL)
2434 if (length <= string_length)
2436 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2437 return true;
2440 else
2442 unsigned HOST_WIDE_INT offset = r - p1;
2443 gimple_seq stmts = NULL;
2444 if (lhs != NULL_TREE)
2446 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2447 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2448 arg1, offset_cst);
2449 gimple_seq_add_stmt_without_update (&stmts, stmt);
2451 else
2452 gimple_seq_add_stmt_without_update (&stmts,
2453 gimple_build_nop ());
2455 gsi_replace_with_seq_vops (gsi, stmts);
2456 return true;
2460 return false;
2463 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2464 to the call. IGNORE is true if the value returned
2465 by the builtin will be ignored. UNLOCKED is true is true if this
2466 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2467 the known length of the string. Return NULL_TREE if no simplification
2468 was possible. */
2470 static bool
2471 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2472 tree arg0, tree arg1,
2473 bool unlocked)
2475 gimple *stmt = gsi_stmt (*gsi);
2477 /* If we're using an unlocked function, assume the other unlocked
2478 functions exist explicitly. */
2479 tree const fn_fputc = (unlocked
2480 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2481 : builtin_decl_implicit (BUILT_IN_FPUTC));
2482 tree const fn_fwrite = (unlocked
2483 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2484 : builtin_decl_implicit (BUILT_IN_FWRITE));
2486 /* If the return value is used, don't do the transformation. */
2487 if (gimple_call_lhs (stmt))
2488 return false;
2490 /* Get the length of the string passed to fputs. If the length
2491 can't be determined, punt. */
2492 tree len = get_maxval_strlen (arg0, 0);
2493 if (!len
2494 || TREE_CODE (len) != INTEGER_CST)
2495 return false;
2497 switch (compare_tree_int (len, 1))
2499 case -1: /* length is 0, delete the call entirely . */
2500 replace_call_with_value (gsi, integer_zero_node);
2501 return true;
2503 case 0: /* length is 1, call fputc. */
2505 const char *p = c_getstr (arg0);
2506 if (p != NULL)
2508 if (!fn_fputc)
2509 return false;
2511 gimple *repl = gimple_build_call (fn_fputc, 2,
2512 build_int_cst
2513 (integer_type_node, p[0]), arg1);
2514 replace_call_with_call_and_fold (gsi, repl);
2515 return true;
2518 /* FALLTHROUGH */
2519 case 1: /* length is greater than 1, call fwrite. */
2521 /* If optimizing for size keep fputs. */
2522 if (optimize_function_for_size_p (cfun))
2523 return false;
2524 /* New argument list transforming fputs(string, stream) to
2525 fwrite(string, 1, len, stream). */
2526 if (!fn_fwrite)
2527 return false;
2529 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2530 size_one_node, len, arg1);
2531 replace_call_with_call_and_fold (gsi, repl);
2532 return true;
2534 default:
2535 gcc_unreachable ();
2537 return false;
2540 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2541 DEST, SRC, LEN, and SIZE are the arguments to the call.
2542 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2543 code of the builtin. If MAXLEN is not NULL, it is maximum length
2544 passed as third argument. */
2546 static bool
2547 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2548 tree dest, tree src, tree len, tree size,
2549 enum built_in_function fcode)
2551 gimple *stmt = gsi_stmt (*gsi);
2552 location_t loc = gimple_location (stmt);
2553 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2554 tree fn;
2556 /* If SRC and DEST are the same (and not volatile), return DEST
2557 (resp. DEST+LEN for __mempcpy_chk). */
2558 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2560 if (fcode != BUILT_IN_MEMPCPY_CHK)
2562 replace_call_with_value (gsi, dest);
2563 return true;
2565 else
2567 gimple_seq stmts = NULL;
2568 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2569 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2570 TREE_TYPE (dest), dest, len);
2571 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2572 replace_call_with_value (gsi, temp);
2573 return true;
2577 if (! tree_fits_uhwi_p (size))
2578 return false;
2580 tree maxlen = get_maxval_strlen (len, 2);
2581 if (! integer_all_onesp (size))
2583 if (! tree_fits_uhwi_p (len))
2585 /* If LEN is not constant, try MAXLEN too.
2586 For MAXLEN only allow optimizing into non-_ocs function
2587 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2588 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2590 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2592 /* (void) __mempcpy_chk () can be optimized into
2593 (void) __memcpy_chk (). */
2594 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2595 if (!fn)
2596 return false;
2598 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2599 replace_call_with_call_and_fold (gsi, repl);
2600 return true;
2602 return false;
2605 else
2606 maxlen = len;
2608 if (tree_int_cst_lt (size, maxlen))
2609 return false;
2612 fn = NULL_TREE;
2613 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2614 mem{cpy,pcpy,move,set} is available. */
2615 switch (fcode)
2617 case BUILT_IN_MEMCPY_CHK:
2618 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2619 break;
2620 case BUILT_IN_MEMPCPY_CHK:
2621 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2622 break;
2623 case BUILT_IN_MEMMOVE_CHK:
2624 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2625 break;
2626 case BUILT_IN_MEMSET_CHK:
2627 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2628 break;
2629 default:
2630 break;
2633 if (!fn)
2634 return false;
2636 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2637 replace_call_with_call_and_fold (gsi, repl);
2638 return true;
2641 /* Fold a call to the __st[rp]cpy_chk builtin.
2642 DEST, SRC, and SIZE are the arguments to the call.
2643 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2644 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2645 strings passed as second argument. */
2647 static bool
2648 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2649 tree dest,
2650 tree src, tree size,
2651 enum built_in_function fcode)
2653 gimple *stmt = gsi_stmt (*gsi);
2654 location_t loc = gimple_location (stmt);
2655 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2656 tree len, fn;
2658 /* If SRC and DEST are the same (and not volatile), return DEST. */
2659 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2661 /* Issue -Wrestrict unless the pointers are null (those do
2662 not point to objects and so do not indicate an overlap;
2663 such calls could be the result of sanitization and jump
2664 threading). */
2665 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2667 tree func = gimple_call_fndecl (stmt);
2669 warning_at (loc, OPT_Wrestrict,
2670 "%qD source argument is the same as destination",
2671 func);
2674 replace_call_with_value (gsi, dest);
2675 return true;
2678 if (! tree_fits_uhwi_p (size))
2679 return false;
2681 tree maxlen = get_maxval_strlen (src, 1);
2682 if (! integer_all_onesp (size))
2684 len = c_strlen (src, 1);
2685 if (! len || ! tree_fits_uhwi_p (len))
2687 /* If LEN is not constant, try MAXLEN too.
2688 For MAXLEN only allow optimizing into non-_ocs function
2689 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2690 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2692 if (fcode == BUILT_IN_STPCPY_CHK)
2694 if (! ignore)
2695 return false;
2697 /* If return value of __stpcpy_chk is ignored,
2698 optimize into __strcpy_chk. */
2699 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2700 if (!fn)
2701 return false;
2703 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2704 replace_call_with_call_and_fold (gsi, repl);
2705 return true;
2708 if (! len || TREE_SIDE_EFFECTS (len))
2709 return false;
2711 /* If c_strlen returned something, but not a constant,
2712 transform __strcpy_chk into __memcpy_chk. */
2713 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2714 if (!fn)
2715 return false;
2717 gimple_seq stmts = NULL;
2718 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2719 len = gimple_convert (&stmts, loc, size_type_node, len);
2720 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2721 build_int_cst (size_type_node, 1));
2722 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2723 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2724 replace_call_with_call_and_fold (gsi, repl);
2725 return true;
2728 else
2729 maxlen = len;
2731 if (! tree_int_cst_lt (maxlen, size))
2732 return false;
2735 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2736 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2737 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2738 if (!fn)
2739 return false;
2741 gimple *repl = gimple_build_call (fn, 2, dest, src);
2742 replace_call_with_call_and_fold (gsi, repl);
2743 return true;
2746 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2747 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2748 length passed as third argument. IGNORE is true if return value can be
2749 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2751 static bool
2752 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2753 tree dest, tree src,
2754 tree len, tree size,
2755 enum built_in_function fcode)
2757 gimple *stmt = gsi_stmt (*gsi);
2758 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2759 tree fn;
2761 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2763 /* If return value of __stpncpy_chk is ignored,
2764 optimize into __strncpy_chk. */
2765 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2766 if (fn)
2768 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2769 replace_call_with_call_and_fold (gsi, repl);
2770 return true;
2774 if (! tree_fits_uhwi_p (size))
2775 return false;
2777 tree maxlen = get_maxval_strlen (len, 2);
2778 if (! integer_all_onesp (size))
2780 if (! tree_fits_uhwi_p (len))
2782 /* If LEN is not constant, try MAXLEN too.
2783 For MAXLEN only allow optimizing into non-_ocs function
2784 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2785 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2786 return false;
2788 else
2789 maxlen = len;
2791 if (tree_int_cst_lt (size, maxlen))
2792 return false;
2795 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2796 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2797 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2798 if (!fn)
2799 return false;
2801 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2802 replace_call_with_call_and_fold (gsi, repl);
2803 return true;
2806 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2807 Return NULL_TREE if no simplification can be made. */
2809 static bool
2810 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2812 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2813 location_t loc = gimple_location (stmt);
2814 tree dest = gimple_call_arg (stmt, 0);
2815 tree src = gimple_call_arg (stmt, 1);
2816 tree fn, lenp1;
2818 /* If the result is unused, replace stpcpy with strcpy. */
2819 if (gimple_call_lhs (stmt) == NULL_TREE)
2821 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2822 if (!fn)
2823 return false;
2824 gimple_call_set_fndecl (stmt, fn);
2825 fold_stmt (gsi);
2826 return true;
2829 /* Set to non-null if ARG refers to an unterminated array. */
2830 c_strlen_data data;
2831 memset (&data, 0, sizeof (c_strlen_data));
2832 tree len = c_strlen (src, 1, &data, 1);
2833 if (!len
2834 || TREE_CODE (len) != INTEGER_CST)
2836 data.decl = unterminated_array (src);
2837 if (!data.decl)
2838 return false;
2841 if (data.decl)
2843 /* Avoid folding calls with unterminated arrays. */
2844 if (!gimple_no_warning_p (stmt))
2845 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2846 gimple_set_no_warning (stmt, true);
2847 return false;
2850 if (optimize_function_for_size_p (cfun)
2851 /* If length is zero it's small enough. */
2852 && !integer_zerop (len))
2853 return false;
2855 /* If the source has a known length replace stpcpy with memcpy. */
2856 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2857 if (!fn)
2858 return false;
2860 gimple_seq stmts = NULL;
2861 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2862 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2863 tem, build_int_cst (size_type_node, 1));
2864 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2865 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2866 gimple_set_vuse (repl, gimple_vuse (stmt));
2867 gimple_set_vdef (repl, gimple_vdef (stmt));
2868 if (gimple_vdef (repl)
2869 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2870 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2871 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2872 /* Replace the result with dest + len. */
2873 stmts = NULL;
2874 tem = gimple_convert (&stmts, loc, sizetype, len);
2875 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2876 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2877 POINTER_PLUS_EXPR, dest, tem);
2878 gsi_replace (gsi, ret, false);
2879 /* Finally fold the memcpy call. */
2880 gimple_stmt_iterator gsi2 = *gsi;
2881 gsi_prev (&gsi2);
2882 fold_stmt (&gsi2);
2883 return true;
2886 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2887 NULL_TREE if a normal call should be emitted rather than expanding
2888 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2889 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2890 passed as second argument. */
2892 static bool
2893 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2894 enum built_in_function fcode)
2896 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2897 tree dest, size, len, fn, fmt, flag;
2898 const char *fmt_str;
2900 /* Verify the required arguments in the original call. */
2901 if (gimple_call_num_args (stmt) < 5)
2902 return false;
2904 dest = gimple_call_arg (stmt, 0);
2905 len = gimple_call_arg (stmt, 1);
2906 flag = gimple_call_arg (stmt, 2);
2907 size = gimple_call_arg (stmt, 3);
2908 fmt = gimple_call_arg (stmt, 4);
2910 if (! tree_fits_uhwi_p (size))
2911 return false;
2913 if (! integer_all_onesp (size))
2915 tree maxlen = get_maxval_strlen (len, 2);
2916 if (! tree_fits_uhwi_p (len))
2918 /* If LEN is not constant, try MAXLEN too.
2919 For MAXLEN only allow optimizing into non-_ocs function
2920 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2921 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2922 return false;
2924 else
2925 maxlen = len;
2927 if (tree_int_cst_lt (size, maxlen))
2928 return false;
2931 if (!init_target_chars ())
2932 return false;
2934 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2935 or if format doesn't contain % chars or is "%s". */
2936 if (! integer_zerop (flag))
2938 fmt_str = c_getstr (fmt);
2939 if (fmt_str == NULL)
2940 return false;
2941 if (strchr (fmt_str, target_percent) != NULL
2942 && strcmp (fmt_str, target_percent_s))
2943 return false;
2946 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2947 available. */
2948 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2949 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2950 if (!fn)
2951 return false;
2953 /* Replace the called function and the first 5 argument by 3 retaining
2954 trailing varargs. */
2955 gimple_call_set_fndecl (stmt, fn);
2956 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2957 gimple_call_set_arg (stmt, 0, dest);
2958 gimple_call_set_arg (stmt, 1, len);
2959 gimple_call_set_arg (stmt, 2, fmt);
2960 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2961 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2962 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2963 fold_stmt (gsi);
2964 return true;
2967 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2968 Return NULL_TREE if a normal call should be emitted rather than
2969 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2970 or BUILT_IN_VSPRINTF_CHK. */
2972 static bool
2973 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2974 enum built_in_function fcode)
2976 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2977 tree dest, size, len, fn, fmt, flag;
2978 const char *fmt_str;
2979 unsigned nargs = gimple_call_num_args (stmt);
2981 /* Verify the required arguments in the original call. */
2982 if (nargs < 4)
2983 return false;
2984 dest = gimple_call_arg (stmt, 0);
2985 flag = gimple_call_arg (stmt, 1);
2986 size = gimple_call_arg (stmt, 2);
2987 fmt = gimple_call_arg (stmt, 3);
2989 if (! tree_fits_uhwi_p (size))
2990 return false;
2992 len = NULL_TREE;
2994 if (!init_target_chars ())
2995 return false;
2997 /* Check whether the format is a literal string constant. */
2998 fmt_str = c_getstr (fmt);
2999 if (fmt_str != NULL)
3001 /* If the format doesn't contain % args or %%, we know the size. */
3002 if (strchr (fmt_str, target_percent) == 0)
3004 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3005 len = build_int_cstu (size_type_node, strlen (fmt_str));
3007 /* If the format is "%s" and first ... argument is a string literal,
3008 we know the size too. */
3009 else if (fcode == BUILT_IN_SPRINTF_CHK
3010 && strcmp (fmt_str, target_percent_s) == 0)
3012 tree arg;
3014 if (nargs == 5)
3016 arg = gimple_call_arg (stmt, 4);
3017 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3019 len = c_strlen (arg, 1);
3020 if (! len || ! tree_fits_uhwi_p (len))
3021 len = NULL_TREE;
3027 if (! integer_all_onesp (size))
3029 if (! len || ! tree_int_cst_lt (len, size))
3030 return false;
3033 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3034 or if format doesn't contain % chars or is "%s". */
3035 if (! integer_zerop (flag))
3037 if (fmt_str == NULL)
3038 return false;
3039 if (strchr (fmt_str, target_percent) != NULL
3040 && strcmp (fmt_str, target_percent_s))
3041 return false;
3044 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3045 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3046 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3047 if (!fn)
3048 return false;
3050 /* Replace the called function and the first 4 argument by 2 retaining
3051 trailing varargs. */
3052 gimple_call_set_fndecl (stmt, fn);
3053 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3054 gimple_call_set_arg (stmt, 0, dest);
3055 gimple_call_set_arg (stmt, 1, fmt);
3056 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3057 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3058 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3059 fold_stmt (gsi);
3060 return true;
3063 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3064 ORIG may be null if this is a 2-argument call. We don't attempt to
3065 simplify calls with more than 3 arguments.
3067 Return true if simplification was possible, otherwise false. */
3069 bool
3070 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3072 gimple *stmt = gsi_stmt (*gsi);
3073 tree dest = gimple_call_arg (stmt, 0);
3074 tree fmt = gimple_call_arg (stmt, 1);
3075 tree orig = NULL_TREE;
3076 const char *fmt_str = NULL;
3078 /* Verify the required arguments in the original call. We deal with two
3079 types of sprintf() calls: 'sprintf (str, fmt)' and
3080 'sprintf (dest, "%s", orig)'. */
3081 if (gimple_call_num_args (stmt) > 3)
3082 return false;
3084 if (gimple_call_num_args (stmt) == 3)
3085 orig = gimple_call_arg (stmt, 2);
3087 /* Check whether the format is a literal string constant. */
3088 fmt_str = c_getstr (fmt);
3089 if (fmt_str == NULL)
3090 return false;
3092 if (!init_target_chars ())
3093 return false;
3095 /* If the format doesn't contain % args or %%, use strcpy. */
3096 if (strchr (fmt_str, target_percent) == NULL)
3098 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3100 if (!fn)
3101 return false;
3103 /* Don't optimize sprintf (buf, "abc", ptr++). */
3104 if (orig)
3105 return false;
3107 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3108 'format' is known to contain no % formats. */
3109 gimple_seq stmts = NULL;
3110 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3112 /* Propagate the NO_WARNING bit to avoid issuing the same
3113 warning more than once. */
3114 if (gimple_no_warning_p (stmt))
3115 gimple_set_no_warning (repl, true);
3117 gimple_seq_add_stmt_without_update (&stmts, repl);
3118 if (gimple_call_lhs (stmt))
3120 repl = gimple_build_assign (gimple_call_lhs (stmt),
3121 build_int_cst (integer_type_node,
3122 strlen (fmt_str)));
3123 gimple_seq_add_stmt_without_update (&stmts, repl);
3124 gsi_replace_with_seq_vops (gsi, stmts);
3125 /* gsi now points at the assignment to the lhs, get a
3126 stmt iterator to the memcpy call.
3127 ??? We can't use gsi_for_stmt as that doesn't work when the
3128 CFG isn't built yet. */
3129 gimple_stmt_iterator gsi2 = *gsi;
3130 gsi_prev (&gsi2);
3131 fold_stmt (&gsi2);
3133 else
3135 gsi_replace_with_seq_vops (gsi, stmts);
3136 fold_stmt (gsi);
3138 return true;
3141 /* If the format is "%s", use strcpy if the result isn't used. */
3142 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3144 tree fn;
3145 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3147 if (!fn)
3148 return false;
3150 /* Don't crash on sprintf (str1, "%s"). */
3151 if (!orig)
3152 return false;
3154 tree orig_len = NULL_TREE;
3155 if (gimple_call_lhs (stmt))
3157 orig_len = get_maxval_strlen (orig, 0);
3158 if (!orig_len)
3159 return false;
3162 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3163 gimple_seq stmts = NULL;
3164 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3166 /* Propagate the NO_WARNING bit to avoid issuing the same
3167 warning more than once. */
3168 if (gimple_no_warning_p (stmt))
3169 gimple_set_no_warning (repl, true);
3171 gimple_seq_add_stmt_without_update (&stmts, repl);
3172 if (gimple_call_lhs (stmt))
3174 if (!useless_type_conversion_p (integer_type_node,
3175 TREE_TYPE (orig_len)))
3176 orig_len = fold_convert (integer_type_node, orig_len);
3177 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3178 gimple_seq_add_stmt_without_update (&stmts, repl);
3179 gsi_replace_with_seq_vops (gsi, stmts);
3180 /* gsi now points at the assignment to the lhs, get a
3181 stmt iterator to the memcpy call.
3182 ??? We can't use gsi_for_stmt as that doesn't work when the
3183 CFG isn't built yet. */
3184 gimple_stmt_iterator gsi2 = *gsi;
3185 gsi_prev (&gsi2);
3186 fold_stmt (&gsi2);
3188 else
3190 gsi_replace_with_seq_vops (gsi, stmts);
3191 fold_stmt (gsi);
3193 return true;
3195 return false;
3198 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3199 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3200 attempt to simplify calls with more than 4 arguments.
3202 Return true if simplification was possible, otherwise false. */
3204 bool
3205 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3207 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3208 tree dest = gimple_call_arg (stmt, 0);
3209 tree destsize = gimple_call_arg (stmt, 1);
3210 tree fmt = gimple_call_arg (stmt, 2);
3211 tree orig = NULL_TREE;
3212 const char *fmt_str = NULL;
3214 if (gimple_call_num_args (stmt) > 4)
3215 return false;
3217 if (gimple_call_num_args (stmt) == 4)
3218 orig = gimple_call_arg (stmt, 3);
3220 if (!tree_fits_uhwi_p (destsize))
3221 return false;
3222 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3224 /* Check whether the format is a literal string constant. */
3225 fmt_str = c_getstr (fmt);
3226 if (fmt_str == NULL)
3227 return false;
3229 if (!init_target_chars ())
3230 return false;
3232 /* If the format doesn't contain % args or %%, use strcpy. */
3233 if (strchr (fmt_str, target_percent) == NULL)
3235 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3236 if (!fn)
3237 return false;
3239 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3240 if (orig)
3241 return false;
3243 /* We could expand this as
3244 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3245 or to
3246 memcpy (str, fmt_with_nul_at_cstm1, cst);
3247 but in the former case that might increase code size
3248 and in the latter case grow .rodata section too much.
3249 So punt for now. */
3250 size_t len = strlen (fmt_str);
3251 if (len >= destlen)
3252 return false;
3254 gimple_seq stmts = NULL;
3255 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3256 gimple_seq_add_stmt_without_update (&stmts, repl);
3257 if (gimple_call_lhs (stmt))
3259 repl = gimple_build_assign (gimple_call_lhs (stmt),
3260 build_int_cst (integer_type_node, len));
3261 gimple_seq_add_stmt_without_update (&stmts, repl);
3262 gsi_replace_with_seq_vops (gsi, stmts);
3263 /* gsi now points at the assignment to the lhs, get a
3264 stmt iterator to the memcpy call.
3265 ??? We can't use gsi_for_stmt as that doesn't work when the
3266 CFG isn't built yet. */
3267 gimple_stmt_iterator gsi2 = *gsi;
3268 gsi_prev (&gsi2);
3269 fold_stmt (&gsi2);
3271 else
3273 gsi_replace_with_seq_vops (gsi, stmts);
3274 fold_stmt (gsi);
3276 return true;
3279 /* If the format is "%s", use strcpy if the result isn't used. */
3280 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3282 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3283 if (!fn)
3284 return false;
3286 /* Don't crash on snprintf (str1, cst, "%s"). */
3287 if (!orig)
3288 return false;
3290 tree orig_len = get_maxval_strlen (orig, 0);
3291 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3292 return false;
3294 /* We could expand this as
3295 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3296 or to
3297 memcpy (str1, str2_with_nul_at_cstm1, cst);
3298 but in the former case that might increase code size
3299 and in the latter case grow .rodata section too much.
3300 So punt for now. */
3301 if (compare_tree_int (orig_len, destlen) >= 0)
3302 return false;
3304 /* Convert snprintf (str1, cst, "%s", str2) into
3305 strcpy (str1, str2) if strlen (str2) < cst. */
3306 gimple_seq stmts = NULL;
3307 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3308 gimple_seq_add_stmt_without_update (&stmts, repl);
3309 if (gimple_call_lhs (stmt))
3311 if (!useless_type_conversion_p (integer_type_node,
3312 TREE_TYPE (orig_len)))
3313 orig_len = fold_convert (integer_type_node, orig_len);
3314 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3315 gimple_seq_add_stmt_without_update (&stmts, repl);
3316 gsi_replace_with_seq_vops (gsi, stmts);
3317 /* gsi now points at the assignment to the lhs, get a
3318 stmt iterator to the memcpy call.
3319 ??? We can't use gsi_for_stmt as that doesn't work when the
3320 CFG isn't built yet. */
3321 gimple_stmt_iterator gsi2 = *gsi;
3322 gsi_prev (&gsi2);
3323 fold_stmt (&gsi2);
3325 else
3327 gsi_replace_with_seq_vops (gsi, stmts);
3328 fold_stmt (gsi);
3330 return true;
3332 return false;
3335 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3336 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3337 more than 3 arguments, and ARG may be null in the 2-argument case.
3339 Return NULL_TREE if no simplification was possible, otherwise return the
3340 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3341 code of the function to be simplified. */
3343 static bool
3344 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3345 tree fp, tree fmt, tree arg,
3346 enum built_in_function fcode)
3348 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3349 tree fn_fputc, fn_fputs;
3350 const char *fmt_str = NULL;
3352 /* If the return value is used, don't do the transformation. */
3353 if (gimple_call_lhs (stmt) != NULL_TREE)
3354 return false;
3356 /* Check whether the format is a literal string constant. */
3357 fmt_str = c_getstr (fmt);
3358 if (fmt_str == NULL)
3359 return false;
3361 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3363 /* If we're using an unlocked function, assume the other
3364 unlocked functions exist explicitly. */
3365 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3366 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3368 else
3370 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3371 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3374 if (!init_target_chars ())
3375 return false;
3377 /* If the format doesn't contain % args or %%, use strcpy. */
3378 if (strchr (fmt_str, target_percent) == NULL)
3380 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3381 && arg)
3382 return false;
3384 /* If the format specifier was "", fprintf does nothing. */
3385 if (fmt_str[0] == '\0')
3387 replace_call_with_value (gsi, NULL_TREE);
3388 return true;
3391 /* When "string" doesn't contain %, replace all cases of
3392 fprintf (fp, string) with fputs (string, fp). The fputs
3393 builtin will take care of special cases like length == 1. */
3394 if (fn_fputs)
3396 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3397 replace_call_with_call_and_fold (gsi, repl);
3398 return true;
3402 /* The other optimizations can be done only on the non-va_list variants. */
3403 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3404 return false;
3406 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3407 else if (strcmp (fmt_str, target_percent_s) == 0)
3409 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3410 return false;
3411 if (fn_fputs)
3413 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3414 replace_call_with_call_and_fold (gsi, repl);
3415 return true;
3419 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3420 else if (strcmp (fmt_str, target_percent_c) == 0)
3422 if (!arg
3423 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3424 return false;
3425 if (fn_fputc)
3427 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3428 replace_call_with_call_and_fold (gsi, repl);
3429 return true;
3433 return false;
3436 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3437 FMT and ARG are the arguments to the call; we don't fold cases with
3438 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3440 Return NULL_TREE if no simplification was possible, otherwise return the
3441 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3442 code of the function to be simplified. */
3444 static bool
3445 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3446 tree arg, enum built_in_function fcode)
3448 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3449 tree fn_putchar, fn_puts, newarg;
3450 const char *fmt_str = NULL;
3452 /* If the return value is used, don't do the transformation. */
3453 if (gimple_call_lhs (stmt) != NULL_TREE)
3454 return false;
3456 /* Check whether the format is a literal string constant. */
3457 fmt_str = c_getstr (fmt);
3458 if (fmt_str == NULL)
3459 return false;
3461 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3463 /* If we're using an unlocked function, assume the other
3464 unlocked functions exist explicitly. */
3465 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3466 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3468 else
3470 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3471 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3474 if (!init_target_chars ())
3475 return false;
3477 if (strcmp (fmt_str, target_percent_s) == 0
3478 || strchr (fmt_str, target_percent) == NULL)
3480 const char *str;
3482 if (strcmp (fmt_str, target_percent_s) == 0)
3484 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3485 return false;
3487 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3488 return false;
3490 str = c_getstr (arg);
3491 if (str == NULL)
3492 return false;
3494 else
3496 /* The format specifier doesn't contain any '%' characters. */
3497 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3498 && arg)
3499 return false;
3500 str = fmt_str;
3503 /* If the string was "", printf does nothing. */
3504 if (str[0] == '\0')
3506 replace_call_with_value (gsi, NULL_TREE);
3507 return true;
3510 /* If the string has length of 1, call putchar. */
3511 if (str[1] == '\0')
3513 /* Given printf("c"), (where c is any one character,)
3514 convert "c"[0] to an int and pass that to the replacement
3515 function. */
3516 newarg = build_int_cst (integer_type_node, str[0]);
3517 if (fn_putchar)
3519 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3520 replace_call_with_call_and_fold (gsi, repl);
3521 return true;
3524 else
3526 /* If the string was "string\n", call puts("string"). */
3527 size_t len = strlen (str);
3528 if ((unsigned char)str[len - 1] == target_newline
3529 && (size_t) (int) len == len
3530 && (int) len > 0)
3532 char *newstr;
3534 /* Create a NUL-terminated string that's one char shorter
3535 than the original, stripping off the trailing '\n'. */
3536 newstr = xstrdup (str);
3537 newstr[len - 1] = '\0';
3538 newarg = build_string_literal (len, newstr);
3539 free (newstr);
3540 if (fn_puts)
3542 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3543 replace_call_with_call_and_fold (gsi, repl);
3544 return true;
3547 else
3548 /* We'd like to arrange to call fputs(string,stdout) here,
3549 but we need stdout and don't have a way to get it yet. */
3550 return false;
3554 /* The other optimizations can be done only on the non-va_list variants. */
3555 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3556 return false;
3558 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3559 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3561 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3562 return false;
3563 if (fn_puts)
3565 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3566 replace_call_with_call_and_fold (gsi, repl);
3567 return true;
3571 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3572 else if (strcmp (fmt_str, target_percent_c) == 0)
3574 if (!arg || ! useless_type_conversion_p (integer_type_node,
3575 TREE_TYPE (arg)))
3576 return false;
3577 if (fn_putchar)
3579 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3580 replace_call_with_call_and_fold (gsi, repl);
3581 return true;
3585 return false;
3590 /* Fold a call to __builtin_strlen with known length LEN. */
3592 static bool
3593 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3595 gimple *stmt = gsi_stmt (*gsi);
3596 tree arg = gimple_call_arg (stmt, 0);
3598 wide_int minlen;
3599 wide_int maxlen;
3601 /* Set to non-null if ARG refers to an unterminated array. */
3602 tree nonstr;
3603 tree lenrange[2];
3604 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
3605 && !nonstr
3606 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3607 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3609 /* The range of lengths refers to either a single constant
3610 string or to the longest and shortest constant string
3611 referenced by the argument of the strlen() call, or to
3612 the strings that can possibly be stored in the arrays
3613 the argument refers to. */
3614 minlen = wi::to_wide (lenrange[0]);
3615 maxlen = wi::to_wide (lenrange[1]);
3617 else
3619 unsigned prec = TYPE_PRECISION (sizetype);
3621 minlen = wi::shwi (0, prec);
3622 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3625 if (minlen == maxlen)
3627 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3628 true, GSI_SAME_STMT);
3629 replace_call_with_value (gsi, lenrange[0]);
3630 return true;
3633 if (tree lhs = gimple_call_lhs (stmt))
3634 if (TREE_CODE (lhs) == SSA_NAME
3635 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3636 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3638 return false;
3641 /* Fold a call to __builtin_acc_on_device. */
3643 static bool
3644 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3646 /* Defer folding until we know which compiler we're in. */
3647 if (symtab->state != EXPANSION)
3648 return false;
3650 unsigned val_host = GOMP_DEVICE_HOST;
3651 unsigned val_dev = GOMP_DEVICE_NONE;
3653 #ifdef ACCEL_COMPILER
3654 val_host = GOMP_DEVICE_NOT_HOST;
3655 val_dev = ACCEL_COMPILER_acc_device;
3656 #endif
3658 location_t loc = gimple_location (gsi_stmt (*gsi));
3660 tree host_eq = make_ssa_name (boolean_type_node);
3661 gimple *host_ass = gimple_build_assign
3662 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3663 gimple_set_location (host_ass, loc);
3664 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3666 tree dev_eq = make_ssa_name (boolean_type_node);
3667 gimple *dev_ass = gimple_build_assign
3668 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3669 gimple_set_location (dev_ass, loc);
3670 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3672 tree result = make_ssa_name (boolean_type_node);
3673 gimple *result_ass = gimple_build_assign
3674 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3675 gimple_set_location (result_ass, loc);
3676 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3678 replace_call_with_value (gsi, result);
3680 return true;
3683 /* Fold realloc (0, n) -> malloc (n). */
3685 static bool
3686 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3688 gimple *stmt = gsi_stmt (*gsi);
3689 tree arg = gimple_call_arg (stmt, 0);
3690 tree size = gimple_call_arg (stmt, 1);
3692 if (operand_equal_p (arg, null_pointer_node, 0))
3694 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3695 if (fn_malloc)
3697 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3698 replace_call_with_call_and_fold (gsi, repl);
3699 return true;
3702 return false;
3705 /* Fold the non-target builtin at *GSI and return whether any simplification
3706 was made. */
3708 static bool
3709 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3711 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3712 tree callee = gimple_call_fndecl (stmt);
3714 /* Give up for always_inline inline builtins until they are
3715 inlined. */
3716 if (avoid_folding_inline_builtin (callee))
3717 return false;
3719 unsigned n = gimple_call_num_args (stmt);
3720 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3721 switch (fcode)
3723 case BUILT_IN_BCMP:
3724 return gimple_fold_builtin_bcmp (gsi);
3725 case BUILT_IN_BCOPY:
3726 return gimple_fold_builtin_bcopy (gsi);
3727 case BUILT_IN_BZERO:
3728 return gimple_fold_builtin_bzero (gsi);
3730 case BUILT_IN_MEMSET:
3731 return gimple_fold_builtin_memset (gsi,
3732 gimple_call_arg (stmt, 1),
3733 gimple_call_arg (stmt, 2));
3734 case BUILT_IN_MEMCPY:
3735 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3736 gimple_call_arg (stmt, 1), 0);
3737 case BUILT_IN_MEMPCPY:
3738 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3739 gimple_call_arg (stmt, 1), 1);
3740 case BUILT_IN_MEMMOVE:
3741 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3742 gimple_call_arg (stmt, 1), 3);
3743 case BUILT_IN_SPRINTF_CHK:
3744 case BUILT_IN_VSPRINTF_CHK:
3745 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3746 case BUILT_IN_STRCAT_CHK:
3747 return gimple_fold_builtin_strcat_chk (gsi);
3748 case BUILT_IN_STRNCAT_CHK:
3749 return gimple_fold_builtin_strncat_chk (gsi);
3750 case BUILT_IN_STRLEN:
3751 return gimple_fold_builtin_strlen (gsi);
3752 case BUILT_IN_STRCPY:
3753 return gimple_fold_builtin_strcpy (gsi,
3754 gimple_call_arg (stmt, 0),
3755 gimple_call_arg (stmt, 1));
3756 case BUILT_IN_STRNCPY:
3757 return gimple_fold_builtin_strncpy (gsi,
3758 gimple_call_arg (stmt, 0),
3759 gimple_call_arg (stmt, 1),
3760 gimple_call_arg (stmt, 2));
3761 case BUILT_IN_STRCAT:
3762 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3763 gimple_call_arg (stmt, 1));
3764 case BUILT_IN_STRNCAT:
3765 return gimple_fold_builtin_strncat (gsi);
3766 case BUILT_IN_INDEX:
3767 case BUILT_IN_STRCHR:
3768 return gimple_fold_builtin_strchr (gsi, false);
3769 case BUILT_IN_RINDEX:
3770 case BUILT_IN_STRRCHR:
3771 return gimple_fold_builtin_strchr (gsi, true);
3772 case BUILT_IN_STRSTR:
3773 return gimple_fold_builtin_strstr (gsi);
3774 case BUILT_IN_STRCMP:
3775 case BUILT_IN_STRCMP_EQ:
3776 case BUILT_IN_STRCASECMP:
3777 case BUILT_IN_STRNCMP:
3778 case BUILT_IN_STRNCMP_EQ:
3779 case BUILT_IN_STRNCASECMP:
3780 return gimple_fold_builtin_string_compare (gsi);
3781 case BUILT_IN_MEMCHR:
3782 return gimple_fold_builtin_memchr (gsi);
3783 case BUILT_IN_FPUTS:
3784 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3785 gimple_call_arg (stmt, 1), false);
3786 case BUILT_IN_FPUTS_UNLOCKED:
3787 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3788 gimple_call_arg (stmt, 1), true);
3789 case BUILT_IN_MEMCPY_CHK:
3790 case BUILT_IN_MEMPCPY_CHK:
3791 case BUILT_IN_MEMMOVE_CHK:
3792 case BUILT_IN_MEMSET_CHK:
3793 return gimple_fold_builtin_memory_chk (gsi,
3794 gimple_call_arg (stmt, 0),
3795 gimple_call_arg (stmt, 1),
3796 gimple_call_arg (stmt, 2),
3797 gimple_call_arg (stmt, 3),
3798 fcode);
3799 case BUILT_IN_STPCPY:
3800 return gimple_fold_builtin_stpcpy (gsi);
3801 case BUILT_IN_STRCPY_CHK:
3802 case BUILT_IN_STPCPY_CHK:
3803 return gimple_fold_builtin_stxcpy_chk (gsi,
3804 gimple_call_arg (stmt, 0),
3805 gimple_call_arg (stmt, 1),
3806 gimple_call_arg (stmt, 2),
3807 fcode);
3808 case BUILT_IN_STRNCPY_CHK:
3809 case BUILT_IN_STPNCPY_CHK:
3810 return gimple_fold_builtin_stxncpy_chk (gsi,
3811 gimple_call_arg (stmt, 0),
3812 gimple_call_arg (stmt, 1),
3813 gimple_call_arg (stmt, 2),
3814 gimple_call_arg (stmt, 3),
3815 fcode);
3816 case BUILT_IN_SNPRINTF_CHK:
3817 case BUILT_IN_VSNPRINTF_CHK:
3818 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3820 case BUILT_IN_FPRINTF:
3821 case BUILT_IN_FPRINTF_UNLOCKED:
3822 case BUILT_IN_VFPRINTF:
3823 if (n == 2 || n == 3)
3824 return gimple_fold_builtin_fprintf (gsi,
3825 gimple_call_arg (stmt, 0),
3826 gimple_call_arg (stmt, 1),
3827 n == 3
3828 ? gimple_call_arg (stmt, 2)
3829 : NULL_TREE,
3830 fcode);
3831 break;
3832 case BUILT_IN_FPRINTF_CHK:
3833 case BUILT_IN_VFPRINTF_CHK:
3834 if (n == 3 || n == 4)
3835 return gimple_fold_builtin_fprintf (gsi,
3836 gimple_call_arg (stmt, 0),
3837 gimple_call_arg (stmt, 2),
3838 n == 4
3839 ? gimple_call_arg (stmt, 3)
3840 : NULL_TREE,
3841 fcode);
3842 break;
3843 case BUILT_IN_PRINTF:
3844 case BUILT_IN_PRINTF_UNLOCKED:
3845 case BUILT_IN_VPRINTF:
3846 if (n == 1 || n == 2)
3847 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3848 n == 2
3849 ? gimple_call_arg (stmt, 1)
3850 : NULL_TREE, fcode);
3851 break;
3852 case BUILT_IN_PRINTF_CHK:
3853 case BUILT_IN_VPRINTF_CHK:
3854 if (n == 2 || n == 3)
3855 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3856 n == 3
3857 ? gimple_call_arg (stmt, 2)
3858 : NULL_TREE, fcode);
3859 break;
3860 case BUILT_IN_ACC_ON_DEVICE:
3861 return gimple_fold_builtin_acc_on_device (gsi,
3862 gimple_call_arg (stmt, 0));
3863 case BUILT_IN_REALLOC:
3864 return gimple_fold_builtin_realloc (gsi);
3866 default:;
3869 /* Try the generic builtin folder. */
3870 bool ignore = (gimple_call_lhs (stmt) == NULL);
3871 tree result = fold_call_stmt (stmt, ignore);
3872 if (result)
3874 if (ignore)
3875 STRIP_NOPS (result);
3876 else
3877 result = fold_convert (gimple_call_return_type (stmt), result);
3878 if (!update_call_from_tree (gsi, result))
3879 gimplify_and_update_call_from_tree (gsi, result);
3880 return true;
3883 return false;
3886 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3887 function calls to constants, where possible. */
3889 static tree
3890 fold_internal_goacc_dim (const gimple *call)
3892 int axis = oacc_get_ifn_dim_arg (call);
3893 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3894 tree result = NULL_TREE;
3895 tree type = TREE_TYPE (gimple_call_lhs (call));
3897 switch (gimple_call_internal_fn (call))
3899 case IFN_GOACC_DIM_POS:
3900 /* If the size is 1, we know the answer. */
3901 if (size == 1)
3902 result = build_int_cst (type, 0);
3903 break;
3904 case IFN_GOACC_DIM_SIZE:
3905 /* If the size is not dynamic, we know the answer. */
3906 if (size)
3907 result = build_int_cst (type, size);
3908 break;
3909 default:
3910 break;
3913 return result;
3916 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3917 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3918 &var where var is only addressable because of such calls. */
3920 bool
3921 optimize_atomic_compare_exchange_p (gimple *stmt)
3923 if (gimple_call_num_args (stmt) != 6
3924 || !flag_inline_atomics
3925 || !optimize
3926 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3927 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3928 || !gimple_vdef (stmt)
3929 || !gimple_vuse (stmt))
3930 return false;
3932 tree fndecl = gimple_call_fndecl (stmt);
3933 switch (DECL_FUNCTION_CODE (fndecl))
3935 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3936 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3937 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3938 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3939 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3940 break;
3941 default:
3942 return false;
3945 tree expected = gimple_call_arg (stmt, 1);
3946 if (TREE_CODE (expected) != ADDR_EXPR
3947 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3948 return false;
3950 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3951 if (!is_gimple_reg_type (etype)
3952 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3953 || TREE_THIS_VOLATILE (etype)
3954 || VECTOR_TYPE_P (etype)
3955 || TREE_CODE (etype) == COMPLEX_TYPE
3956 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3957 might not preserve all the bits. See PR71716. */
3958 || SCALAR_FLOAT_TYPE_P (etype)
3959 || maybe_ne (TYPE_PRECISION (etype),
3960 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3961 return false;
3963 tree weak = gimple_call_arg (stmt, 3);
3964 if (!integer_zerop (weak) && !integer_onep (weak))
3965 return false;
3967 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3968 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3969 machine_mode mode = TYPE_MODE (itype);
3971 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3972 == CODE_FOR_nothing
3973 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3974 return false;
3976 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3977 return false;
3979 return true;
3982 /* Fold
3983 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3984 into
3985 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3986 i = IMAGPART_EXPR <t>;
3987 r = (_Bool) i;
3988 e = REALPART_EXPR <t>; */
3990 void
3991 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3993 gimple *stmt = gsi_stmt (*gsi);
3994 tree fndecl = gimple_call_fndecl (stmt);
3995 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3996 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3997 tree ctype = build_complex_type (itype);
3998 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3999 bool throws = false;
4000 edge e = NULL;
4001 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4002 expected);
4003 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4004 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4005 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4007 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4008 build1 (VIEW_CONVERT_EXPR, itype,
4009 gimple_assign_lhs (g)));
4010 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4012 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4013 + int_size_in_bytes (itype);
4014 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4015 gimple_call_arg (stmt, 0),
4016 gimple_assign_lhs (g),
4017 gimple_call_arg (stmt, 2),
4018 build_int_cst (integer_type_node, flag),
4019 gimple_call_arg (stmt, 4),
4020 gimple_call_arg (stmt, 5));
4021 tree lhs = make_ssa_name (ctype);
4022 gimple_call_set_lhs (g, lhs);
4023 gimple_set_vdef (g, gimple_vdef (stmt));
4024 gimple_set_vuse (g, gimple_vuse (stmt));
4025 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4026 tree oldlhs = gimple_call_lhs (stmt);
4027 if (stmt_can_throw_internal (cfun, stmt))
4029 throws = true;
4030 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4032 gimple_call_set_nothrow (as_a <gcall *> (g),
4033 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4034 gimple_call_set_lhs (stmt, NULL_TREE);
4035 gsi_replace (gsi, g, true);
4036 if (oldlhs)
4038 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4039 build1 (IMAGPART_EXPR, itype, lhs));
4040 if (throws)
4042 gsi_insert_on_edge_immediate (e, g);
4043 *gsi = gsi_for_stmt (g);
4045 else
4046 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4047 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4048 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4050 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4051 build1 (REALPART_EXPR, itype, lhs));
4052 if (throws && oldlhs == NULL_TREE)
4054 gsi_insert_on_edge_immediate (e, g);
4055 *gsi = gsi_for_stmt (g);
4057 else
4058 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4059 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4061 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4062 VIEW_CONVERT_EXPR,
4063 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4064 gimple_assign_lhs (g)));
4065 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4067 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4068 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4069 *gsi = gsiret;
4072 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4073 doesn't fit into TYPE. The test for overflow should be regardless of
4074 -fwrapv, and even for unsigned types. */
4076 bool
4077 arith_overflowed_p (enum tree_code code, const_tree type,
4078 const_tree arg0, const_tree arg1)
4080 widest2_int warg0 = widest2_int_cst (arg0);
4081 widest2_int warg1 = widest2_int_cst (arg1);
4082 widest2_int wres;
4083 switch (code)
4085 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4086 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4087 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4088 default: gcc_unreachable ();
4090 signop sign = TYPE_SIGN (type);
4091 if (sign == UNSIGNED && wi::neg_p (wres))
4092 return true;
4093 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4096 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4097 The statement may be replaced by another statement, e.g., if the call
4098 simplifies to a constant value. Return true if any changes were made.
4099 It is assumed that the operands have been previously folded. */
4101 static bool
4102 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4104 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4105 tree callee;
4106 bool changed = false;
4107 unsigned i;
4109 /* Fold *& in call arguments. */
4110 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4111 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4113 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4114 if (tmp)
4116 gimple_call_set_arg (stmt, i, tmp);
4117 changed = true;
4121 /* Check for virtual calls that became direct calls. */
4122 callee = gimple_call_fn (stmt);
4123 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4125 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4127 if (dump_file && virtual_method_call_p (callee)
4128 && !possible_polymorphic_call_target_p
4129 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4130 (OBJ_TYPE_REF_EXPR (callee)))))
4132 fprintf (dump_file,
4133 "Type inheritance inconsistent devirtualization of ");
4134 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4135 fprintf (dump_file, " to ");
4136 print_generic_expr (dump_file, callee, TDF_SLIM);
4137 fprintf (dump_file, "\n");
4140 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4141 changed = true;
4143 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4145 bool final;
4146 vec <cgraph_node *>targets
4147 = possible_polymorphic_call_targets (callee, stmt, &final);
4148 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4150 tree lhs = gimple_call_lhs (stmt);
4151 if (dump_enabled_p ())
4153 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4154 "folding virtual function call to %s\n",
4155 targets.length () == 1
4156 ? targets[0]->name ()
4157 : "__builtin_unreachable");
4159 if (targets.length () == 1)
4161 tree fndecl = targets[0]->decl;
4162 gimple_call_set_fndecl (stmt, fndecl);
4163 changed = true;
4164 /* If changing the call to __cxa_pure_virtual
4165 or similar noreturn function, adjust gimple_call_fntype
4166 too. */
4167 if (gimple_call_noreturn_p (stmt)
4168 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4169 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4170 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4171 == void_type_node))
4172 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4173 /* If the call becomes noreturn, remove the lhs. */
4174 if (lhs
4175 && gimple_call_noreturn_p (stmt)
4176 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4177 || should_remove_lhs_p (lhs)))
4179 if (TREE_CODE (lhs) == SSA_NAME)
4181 tree var = create_tmp_var (TREE_TYPE (lhs));
4182 tree def = get_or_create_ssa_default_def (cfun, var);
4183 gimple *new_stmt = gimple_build_assign (lhs, def);
4184 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4186 gimple_call_set_lhs (stmt, NULL_TREE);
4188 maybe_remove_unused_call_args (cfun, stmt);
4190 else
4192 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4193 gimple *new_stmt = gimple_build_call (fndecl, 0);
4194 gimple_set_location (new_stmt, gimple_location (stmt));
4195 /* If the call had a SSA name as lhs morph that into
4196 an uninitialized value. */
4197 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4199 tree var = create_tmp_var (TREE_TYPE (lhs));
4200 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4201 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4202 set_ssa_default_def (cfun, var, lhs);
4204 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4205 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4206 gsi_replace (gsi, new_stmt, false);
4207 return true;
4213 /* Check for indirect calls that became direct calls, and then
4214 no longer require a static chain. */
4215 if (gimple_call_chain (stmt))
4217 tree fn = gimple_call_fndecl (stmt);
4218 if (fn && !DECL_STATIC_CHAIN (fn))
4220 gimple_call_set_chain (stmt, NULL);
4221 changed = true;
4223 else
4225 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4226 if (tmp)
4228 gimple_call_set_chain (stmt, tmp);
4229 changed = true;
4234 if (inplace)
4235 return changed;
4237 /* Check for builtins that CCP can handle using information not
4238 available in the generic fold routines. */
4239 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4241 if (gimple_fold_builtin (gsi))
4242 changed = true;
4244 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4246 changed |= targetm.gimple_fold_builtin (gsi);
4248 else if (gimple_call_internal_p (stmt))
4250 enum tree_code subcode = ERROR_MARK;
4251 tree result = NULL_TREE;
4252 bool cplx_result = false;
4253 tree overflow = NULL_TREE;
4254 switch (gimple_call_internal_fn (stmt))
4256 case IFN_BUILTIN_EXPECT:
4257 result = fold_builtin_expect (gimple_location (stmt),
4258 gimple_call_arg (stmt, 0),
4259 gimple_call_arg (stmt, 1),
4260 gimple_call_arg (stmt, 2),
4261 NULL_TREE);
4262 break;
4263 case IFN_UBSAN_OBJECT_SIZE:
4265 tree offset = gimple_call_arg (stmt, 1);
4266 tree objsize = gimple_call_arg (stmt, 2);
4267 if (integer_all_onesp (objsize)
4268 || (TREE_CODE (offset) == INTEGER_CST
4269 && TREE_CODE (objsize) == INTEGER_CST
4270 && tree_int_cst_le (offset, objsize)))
4272 replace_call_with_value (gsi, NULL_TREE);
4273 return true;
4276 break;
4277 case IFN_UBSAN_PTR:
4278 if (integer_zerop (gimple_call_arg (stmt, 1)))
4280 replace_call_with_value (gsi, NULL_TREE);
4281 return true;
4283 break;
4284 case IFN_UBSAN_BOUNDS:
4286 tree index = gimple_call_arg (stmt, 1);
4287 tree bound = gimple_call_arg (stmt, 2);
4288 if (TREE_CODE (index) == INTEGER_CST
4289 && TREE_CODE (bound) == INTEGER_CST)
4291 index = fold_convert (TREE_TYPE (bound), index);
4292 if (TREE_CODE (index) == INTEGER_CST
4293 && tree_int_cst_le (index, bound))
4295 replace_call_with_value (gsi, NULL_TREE);
4296 return true;
4300 break;
4301 case IFN_GOACC_DIM_SIZE:
4302 case IFN_GOACC_DIM_POS:
4303 result = fold_internal_goacc_dim (stmt);
4304 break;
4305 case IFN_UBSAN_CHECK_ADD:
4306 subcode = PLUS_EXPR;
4307 break;
4308 case IFN_UBSAN_CHECK_SUB:
4309 subcode = MINUS_EXPR;
4310 break;
4311 case IFN_UBSAN_CHECK_MUL:
4312 subcode = MULT_EXPR;
4313 break;
4314 case IFN_ADD_OVERFLOW:
4315 subcode = PLUS_EXPR;
4316 cplx_result = true;
4317 break;
4318 case IFN_SUB_OVERFLOW:
4319 subcode = MINUS_EXPR;
4320 cplx_result = true;
4321 break;
4322 case IFN_MUL_OVERFLOW:
4323 subcode = MULT_EXPR;
4324 cplx_result = true;
4325 break;
4326 default:
4327 break;
4329 if (subcode != ERROR_MARK)
4331 tree arg0 = gimple_call_arg (stmt, 0);
4332 tree arg1 = gimple_call_arg (stmt, 1);
4333 tree type = TREE_TYPE (arg0);
4334 if (cplx_result)
4336 tree lhs = gimple_call_lhs (stmt);
4337 if (lhs == NULL_TREE)
4338 type = NULL_TREE;
4339 else
4340 type = TREE_TYPE (TREE_TYPE (lhs));
4342 if (type == NULL_TREE)
4344 /* x = y + 0; x = y - 0; x = y * 0; */
4345 else if (integer_zerop (arg1))
4346 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4347 /* x = 0 + y; x = 0 * y; */
4348 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4349 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4350 /* x = y - y; */
4351 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4352 result = integer_zero_node;
4353 /* x = y * 1; x = 1 * y; */
4354 else if (subcode == MULT_EXPR && integer_onep (arg1))
4355 result = arg0;
4356 else if (subcode == MULT_EXPR && integer_onep (arg0))
4357 result = arg1;
4358 else if (TREE_CODE (arg0) == INTEGER_CST
4359 && TREE_CODE (arg1) == INTEGER_CST)
4361 if (cplx_result)
4362 result = int_const_binop (subcode, fold_convert (type, arg0),
4363 fold_convert (type, arg1));
4364 else
4365 result = int_const_binop (subcode, arg0, arg1);
4366 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4368 if (cplx_result)
4369 overflow = build_one_cst (type);
4370 else
4371 result = NULL_TREE;
4374 if (result)
4376 if (result == integer_zero_node)
4377 result = build_zero_cst (type);
4378 else if (cplx_result && TREE_TYPE (result) != type)
4380 if (TREE_CODE (result) == INTEGER_CST)
4382 if (arith_overflowed_p (PLUS_EXPR, type, result,
4383 integer_zero_node))
4384 overflow = build_one_cst (type);
4386 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4387 && TYPE_UNSIGNED (type))
4388 || (TYPE_PRECISION (type)
4389 < (TYPE_PRECISION (TREE_TYPE (result))
4390 + (TYPE_UNSIGNED (TREE_TYPE (result))
4391 && !TYPE_UNSIGNED (type)))))
4392 result = NULL_TREE;
4393 if (result)
4394 result = fold_convert (type, result);
4399 if (result)
4401 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4402 result = drop_tree_overflow (result);
4403 if (cplx_result)
4405 if (overflow == NULL_TREE)
4406 overflow = build_zero_cst (TREE_TYPE (result));
4407 tree ctype = build_complex_type (TREE_TYPE (result));
4408 if (TREE_CODE (result) == INTEGER_CST
4409 && TREE_CODE (overflow) == INTEGER_CST)
4410 result = build_complex (ctype, result, overflow);
4411 else
4412 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4413 ctype, result, overflow);
4415 if (!update_call_from_tree (gsi, result))
4416 gimplify_and_update_call_from_tree (gsi, result);
4417 changed = true;
4421 return changed;
4425 /* Return true whether NAME has a use on STMT. */
4427 static bool
4428 has_use_on_stmt (tree name, gimple *stmt)
4430 imm_use_iterator iter;
4431 use_operand_p use_p;
4432 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4433 if (USE_STMT (use_p) == stmt)
4434 return true;
4435 return false;
4438 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4439 gimple_simplify.
4441 Replaces *GSI with the simplification result in RCODE and OPS
4442 and the associated statements in *SEQ. Does the replacement
4443 according to INPLACE and returns true if the operation succeeded. */
4445 static bool
4446 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4447 gimple_match_op *res_op,
4448 gimple_seq *seq, bool inplace)
4450 gimple *stmt = gsi_stmt (*gsi);
4451 tree *ops = res_op->ops;
4452 unsigned int num_ops = res_op->num_ops;
4454 /* Play safe and do not allow abnormals to be mentioned in
4455 newly created statements. See also maybe_push_res_to_seq.
4456 As an exception allow such uses if there was a use of the
4457 same SSA name on the old stmt. */
4458 for (unsigned int i = 0; i < num_ops; ++i)
4459 if (TREE_CODE (ops[i]) == SSA_NAME
4460 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4461 && !has_use_on_stmt (ops[i], stmt))
4462 return false;
4464 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4465 for (unsigned int i = 0; i < 2; ++i)
4466 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4467 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4468 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4469 return false;
4471 /* Don't insert new statements when INPLACE is true, even if we could
4472 reuse STMT for the final statement. */
4473 if (inplace && !gimple_seq_empty_p (*seq))
4474 return false;
4476 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4478 gcc_assert (res_op->code.is_tree_code ());
4479 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4480 /* GIMPLE_CONDs condition may not throw. */
4481 && (!flag_exceptions
4482 || !cfun->can_throw_non_call_exceptions
4483 || !operation_could_trap_p (res_op->code,
4484 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4485 false, NULL_TREE)))
4486 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4487 else if (res_op->code == SSA_NAME)
4488 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4489 build_zero_cst (TREE_TYPE (ops[0])));
4490 else if (res_op->code == INTEGER_CST)
4492 if (integer_zerop (ops[0]))
4493 gimple_cond_make_false (cond_stmt);
4494 else
4495 gimple_cond_make_true (cond_stmt);
4497 else if (!inplace)
4499 tree res = maybe_push_res_to_seq (res_op, seq);
4500 if (!res)
4501 return false;
4502 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4503 build_zero_cst (TREE_TYPE (res)));
4505 else
4506 return false;
4507 if (dump_file && (dump_flags & TDF_DETAILS))
4509 fprintf (dump_file, "gimple_simplified to ");
4510 if (!gimple_seq_empty_p (*seq))
4511 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4512 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4513 0, TDF_SLIM);
4515 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4516 return true;
4518 else if (is_gimple_assign (stmt)
4519 && res_op->code.is_tree_code ())
4521 if (!inplace
4522 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4524 maybe_build_generic_op (res_op);
4525 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4526 res_op->op_or_null (0),
4527 res_op->op_or_null (1),
4528 res_op->op_or_null (2));
4529 if (dump_file && (dump_flags & TDF_DETAILS))
4531 fprintf (dump_file, "gimple_simplified to ");
4532 if (!gimple_seq_empty_p (*seq))
4533 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4534 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4535 0, TDF_SLIM);
4537 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4538 return true;
4541 else if (res_op->code.is_fn_code ()
4542 && gimple_call_combined_fn (stmt) == res_op->code)
4544 gcc_assert (num_ops == gimple_call_num_args (stmt));
4545 for (unsigned int i = 0; i < num_ops; ++i)
4546 gimple_call_set_arg (stmt, i, ops[i]);
4547 if (dump_file && (dump_flags & TDF_DETAILS))
4549 fprintf (dump_file, "gimple_simplified to ");
4550 if (!gimple_seq_empty_p (*seq))
4551 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4552 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4554 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4555 return true;
4557 else if (!inplace)
4559 if (gimple_has_lhs (stmt))
4561 tree lhs = gimple_get_lhs (stmt);
4562 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4563 return false;
4564 if (dump_file && (dump_flags & TDF_DETAILS))
4566 fprintf (dump_file, "gimple_simplified to ");
4567 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4569 gsi_replace_with_seq_vops (gsi, *seq);
4570 return true;
4572 else
4573 gcc_unreachable ();
4576 return false;
4579 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4581 static bool
4582 maybe_canonicalize_mem_ref_addr (tree *t)
4584 bool res = false;
4586 if (TREE_CODE (*t) == ADDR_EXPR)
4587 t = &TREE_OPERAND (*t, 0);
4589 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4590 generic vector extension. The actual vector referenced is
4591 view-converted to an array type for this purpose. If the index
4592 is constant the canonical representation in the middle-end is a
4593 BIT_FIELD_REF so re-write the former to the latter here. */
4594 if (TREE_CODE (*t) == ARRAY_REF
4595 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4596 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4597 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4599 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4600 if (VECTOR_TYPE_P (vtype))
4602 tree low = array_ref_low_bound (*t);
4603 if (TREE_CODE (low) == INTEGER_CST)
4605 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4607 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4608 wi::to_widest (low));
4609 idx = wi::mul (idx, wi::to_widest
4610 (TYPE_SIZE (TREE_TYPE (*t))));
4611 widest_int ext
4612 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4613 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4615 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4616 TREE_TYPE (*t),
4617 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4618 TYPE_SIZE (TREE_TYPE (*t)),
4619 wide_int_to_tree (bitsizetype, idx));
4620 res = true;
4627 while (handled_component_p (*t))
4628 t = &TREE_OPERAND (*t, 0);
4630 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4631 of invariant addresses into a SSA name MEM_REF address. */
4632 if (TREE_CODE (*t) == MEM_REF
4633 || TREE_CODE (*t) == TARGET_MEM_REF)
4635 tree addr = TREE_OPERAND (*t, 0);
4636 if (TREE_CODE (addr) == ADDR_EXPR
4637 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4638 || handled_component_p (TREE_OPERAND (addr, 0))))
4640 tree base;
4641 poly_int64 coffset;
4642 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4643 &coffset);
4644 if (!base)
4645 gcc_unreachable ();
4647 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4648 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4649 TREE_OPERAND (*t, 1),
4650 size_int (coffset));
4651 res = true;
4653 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4654 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4657 /* Canonicalize back MEM_REFs to plain reference trees if the object
4658 accessed is a decl that has the same access semantics as the MEM_REF. */
4659 if (TREE_CODE (*t) == MEM_REF
4660 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4661 && integer_zerop (TREE_OPERAND (*t, 1))
4662 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4664 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4665 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4666 if (/* Same volatile qualification. */
4667 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4668 /* Same TBAA behavior with -fstrict-aliasing. */
4669 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4670 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4671 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4672 /* Same alignment. */
4673 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4674 /* We have to look out here to not drop a required conversion
4675 from the rhs to the lhs if *t appears on the lhs or vice-versa
4676 if it appears on the rhs. Thus require strict type
4677 compatibility. */
4678 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4680 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4681 res = true;
4685 /* Canonicalize TARGET_MEM_REF in particular with respect to
4686 the indexes becoming constant. */
4687 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4689 tree tem = maybe_fold_tmr (*t);
4690 if (tem)
4692 *t = tem;
4693 res = true;
4697 return res;
4700 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4701 distinguishes both cases. */
4703 static bool
4704 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4706 bool changed = false;
4707 gimple *stmt = gsi_stmt (*gsi);
4708 bool nowarning = gimple_no_warning_p (stmt);
4709 unsigned i;
4710 fold_defer_overflow_warnings ();
4712 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4713 after propagation.
4714 ??? This shouldn't be done in generic folding but in the
4715 propagation helpers which also know whether an address was
4716 propagated.
4717 Also canonicalize operand order. */
4718 switch (gimple_code (stmt))
4720 case GIMPLE_ASSIGN:
4721 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4723 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4724 if ((REFERENCE_CLASS_P (*rhs)
4725 || TREE_CODE (*rhs) == ADDR_EXPR)
4726 && maybe_canonicalize_mem_ref_addr (rhs))
4727 changed = true;
4728 tree *lhs = gimple_assign_lhs_ptr (stmt);
4729 if (REFERENCE_CLASS_P (*lhs)
4730 && maybe_canonicalize_mem_ref_addr (lhs))
4731 changed = true;
4733 else
4735 /* Canonicalize operand order. */
4736 enum tree_code code = gimple_assign_rhs_code (stmt);
4737 if (TREE_CODE_CLASS (code) == tcc_comparison
4738 || commutative_tree_code (code)
4739 || commutative_ternary_tree_code (code))
4741 tree rhs1 = gimple_assign_rhs1 (stmt);
4742 tree rhs2 = gimple_assign_rhs2 (stmt);
4743 if (tree_swap_operands_p (rhs1, rhs2))
4745 gimple_assign_set_rhs1 (stmt, rhs2);
4746 gimple_assign_set_rhs2 (stmt, rhs1);
4747 if (TREE_CODE_CLASS (code) == tcc_comparison)
4748 gimple_assign_set_rhs_code (stmt,
4749 swap_tree_comparison (code));
4750 changed = true;
4754 break;
4755 case GIMPLE_CALL:
4757 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4759 tree *arg = gimple_call_arg_ptr (stmt, i);
4760 if (REFERENCE_CLASS_P (*arg)
4761 && maybe_canonicalize_mem_ref_addr (arg))
4762 changed = true;
4764 tree *lhs = gimple_call_lhs_ptr (stmt);
4765 if (*lhs
4766 && REFERENCE_CLASS_P (*lhs)
4767 && maybe_canonicalize_mem_ref_addr (lhs))
4768 changed = true;
4769 break;
4771 case GIMPLE_ASM:
4773 gasm *asm_stmt = as_a <gasm *> (stmt);
4774 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4776 tree link = gimple_asm_output_op (asm_stmt, i);
4777 tree op = TREE_VALUE (link);
4778 if (REFERENCE_CLASS_P (op)
4779 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4780 changed = true;
4782 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4784 tree link = gimple_asm_input_op (asm_stmt, i);
4785 tree op = TREE_VALUE (link);
4786 if ((REFERENCE_CLASS_P (op)
4787 || TREE_CODE (op) == ADDR_EXPR)
4788 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4789 changed = true;
4792 break;
4793 case GIMPLE_DEBUG:
4794 if (gimple_debug_bind_p (stmt))
4796 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4797 if (*val
4798 && (REFERENCE_CLASS_P (*val)
4799 || TREE_CODE (*val) == ADDR_EXPR)
4800 && maybe_canonicalize_mem_ref_addr (val))
4801 changed = true;
4803 break;
4804 case GIMPLE_COND:
4806 /* Canonicalize operand order. */
4807 tree lhs = gimple_cond_lhs (stmt);
4808 tree rhs = gimple_cond_rhs (stmt);
4809 if (tree_swap_operands_p (lhs, rhs))
4811 gcond *gc = as_a <gcond *> (stmt);
4812 gimple_cond_set_lhs (gc, rhs);
4813 gimple_cond_set_rhs (gc, lhs);
4814 gimple_cond_set_code (gc,
4815 swap_tree_comparison (gimple_cond_code (gc)));
4816 changed = true;
4819 default:;
4822 /* Dispatch to pattern-based folding. */
4823 if (!inplace
4824 || is_gimple_assign (stmt)
4825 || gimple_code (stmt) == GIMPLE_COND)
4827 gimple_seq seq = NULL;
4828 gimple_match_op res_op;
4829 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4830 valueize, valueize))
4832 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4833 changed = true;
4834 else
4835 gimple_seq_discard (seq);
4839 stmt = gsi_stmt (*gsi);
4841 /* Fold the main computation performed by the statement. */
4842 switch (gimple_code (stmt))
4844 case GIMPLE_ASSIGN:
4846 /* Try to canonicalize for boolean-typed X the comparisons
4847 X == 0, X == 1, X != 0, and X != 1. */
4848 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4849 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4851 tree lhs = gimple_assign_lhs (stmt);
4852 tree op1 = gimple_assign_rhs1 (stmt);
4853 tree op2 = gimple_assign_rhs2 (stmt);
4854 tree type = TREE_TYPE (op1);
4856 /* Check whether the comparison operands are of the same boolean
4857 type as the result type is.
4858 Check that second operand is an integer-constant with value
4859 one or zero. */
4860 if (TREE_CODE (op2) == INTEGER_CST
4861 && (integer_zerop (op2) || integer_onep (op2))
4862 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4864 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4865 bool is_logical_not = false;
4867 /* X == 0 and X != 1 is a logical-not.of X
4868 X == 1 and X != 0 is X */
4869 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4870 || (cmp_code == NE_EXPR && integer_onep (op2)))
4871 is_logical_not = true;
4873 if (is_logical_not == false)
4874 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4875 /* Only for one-bit precision typed X the transformation
4876 !X -> ~X is valied. */
4877 else if (TYPE_PRECISION (type) == 1)
4878 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4879 /* Otherwise we use !X -> X ^ 1. */
4880 else
4881 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4882 build_int_cst (type, 1));
4883 changed = true;
4884 break;
4888 unsigned old_num_ops = gimple_num_ops (stmt);
4889 tree lhs = gimple_assign_lhs (stmt);
4890 tree new_rhs = fold_gimple_assign (gsi);
4891 if (new_rhs
4892 && !useless_type_conversion_p (TREE_TYPE (lhs),
4893 TREE_TYPE (new_rhs)))
4894 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4895 if (new_rhs
4896 && (!inplace
4897 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4899 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4900 changed = true;
4902 break;
4905 case GIMPLE_CALL:
4906 changed |= gimple_fold_call (gsi, inplace);
4907 break;
4909 case GIMPLE_ASM:
4910 /* Fold *& in asm operands. */
4912 gasm *asm_stmt = as_a <gasm *> (stmt);
4913 size_t noutputs;
4914 const char **oconstraints;
4915 const char *constraint;
4916 bool allows_mem, allows_reg;
4918 noutputs = gimple_asm_noutputs (asm_stmt);
4919 oconstraints = XALLOCAVEC (const char *, noutputs);
4921 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4923 tree link = gimple_asm_output_op (asm_stmt, i);
4924 tree op = TREE_VALUE (link);
4925 oconstraints[i]
4926 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4927 if (REFERENCE_CLASS_P (op)
4928 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4930 TREE_VALUE (link) = op;
4931 changed = true;
4934 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4936 tree link = gimple_asm_input_op (asm_stmt, i);
4937 tree op = TREE_VALUE (link);
4938 constraint
4939 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4940 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4941 oconstraints, &allows_mem, &allows_reg);
4942 if (REFERENCE_CLASS_P (op)
4943 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4944 != NULL_TREE)
4946 TREE_VALUE (link) = op;
4947 changed = true;
4951 break;
4953 case GIMPLE_DEBUG:
4954 if (gimple_debug_bind_p (stmt))
4956 tree val = gimple_debug_bind_get_value (stmt);
4957 if (val
4958 && REFERENCE_CLASS_P (val))
4960 tree tem = maybe_fold_reference (val, false);
4961 if (tem)
4963 gimple_debug_bind_set_value (stmt, tem);
4964 changed = true;
4967 else if (val
4968 && TREE_CODE (val) == ADDR_EXPR)
4970 tree ref = TREE_OPERAND (val, 0);
4971 tree tem = maybe_fold_reference (ref, false);
4972 if (tem)
4974 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4975 gimple_debug_bind_set_value (stmt, tem);
4976 changed = true;
4980 break;
4982 case GIMPLE_RETURN:
4984 greturn *ret_stmt = as_a<greturn *> (stmt);
4985 tree ret = gimple_return_retval(ret_stmt);
4987 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4989 tree val = valueize (ret);
4990 if (val && val != ret
4991 && may_propagate_copy (ret, val))
4993 gimple_return_set_retval (ret_stmt, val);
4994 changed = true;
4998 break;
5000 default:;
5003 stmt = gsi_stmt (*gsi);
5005 /* Fold *& on the lhs. */
5006 if (gimple_has_lhs (stmt))
5008 tree lhs = gimple_get_lhs (stmt);
5009 if (lhs && REFERENCE_CLASS_P (lhs))
5011 tree new_lhs = maybe_fold_reference (lhs, true);
5012 if (new_lhs)
5014 gimple_set_lhs (stmt, new_lhs);
5015 changed = true;
5020 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5021 return changed;
5024 /* Valueziation callback that ends up not following SSA edges. */
5026 tree
5027 no_follow_ssa_edges (tree)
5029 return NULL_TREE;
5032 /* Valueization callback that ends up following single-use SSA edges only. */
5034 tree
5035 follow_single_use_edges (tree val)
5037 if (TREE_CODE (val) == SSA_NAME
5038 && !has_single_use (val))
5039 return NULL_TREE;
5040 return val;
5043 /* Valueization callback that follows all SSA edges. */
5045 tree
5046 follow_all_ssa_edges (tree val)
5048 return val;
5051 /* Fold the statement pointed to by GSI. In some cases, this function may
5052 replace the whole statement with a new one. Returns true iff folding
5053 makes any changes.
5054 The statement pointed to by GSI should be in valid gimple form but may
5055 be in unfolded state as resulting from for example constant propagation
5056 which can produce *&x = 0. */
5058 bool
5059 fold_stmt (gimple_stmt_iterator *gsi)
5061 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5064 bool
5065 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5067 return fold_stmt_1 (gsi, false, valueize);
5070 /* Perform the minimal folding on statement *GSI. Only operations like
5071 *&x created by constant propagation are handled. The statement cannot
5072 be replaced with a new one. Return true if the statement was
5073 changed, false otherwise.
5074 The statement *GSI should be in valid gimple form but may
5075 be in unfolded state as resulting from for example constant propagation
5076 which can produce *&x = 0. */
5078 bool
5079 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5081 gimple *stmt = gsi_stmt (*gsi);
5082 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5083 gcc_assert (gsi_stmt (*gsi) == stmt);
5084 return changed;
5087 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5088 if EXPR is null or we don't know how.
5089 If non-null, the result always has boolean type. */
5091 static tree
5092 canonicalize_bool (tree expr, bool invert)
5094 if (!expr)
5095 return NULL_TREE;
5096 else if (invert)
5098 if (integer_nonzerop (expr))
5099 return boolean_false_node;
5100 else if (integer_zerop (expr))
5101 return boolean_true_node;
5102 else if (TREE_CODE (expr) == SSA_NAME)
5103 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5104 build_int_cst (TREE_TYPE (expr), 0));
5105 else if (COMPARISON_CLASS_P (expr))
5106 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5107 boolean_type_node,
5108 TREE_OPERAND (expr, 0),
5109 TREE_OPERAND (expr, 1));
5110 else
5111 return NULL_TREE;
5113 else
5115 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5116 return expr;
5117 if (integer_nonzerop (expr))
5118 return boolean_true_node;
5119 else if (integer_zerop (expr))
5120 return boolean_false_node;
5121 else if (TREE_CODE (expr) == SSA_NAME)
5122 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5123 build_int_cst (TREE_TYPE (expr), 0));
5124 else if (COMPARISON_CLASS_P (expr))
5125 return fold_build2 (TREE_CODE (expr),
5126 boolean_type_node,
5127 TREE_OPERAND (expr, 0),
5128 TREE_OPERAND (expr, 1));
5129 else
5130 return NULL_TREE;
5134 /* Check to see if a boolean expression EXPR is logically equivalent to the
5135 comparison (OP1 CODE OP2). Check for various identities involving
5136 SSA_NAMEs. */
5138 static bool
5139 same_bool_comparison_p (const_tree expr, enum tree_code code,
5140 const_tree op1, const_tree op2)
5142 gimple *s;
5144 /* The obvious case. */
5145 if (TREE_CODE (expr) == code
5146 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5147 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5148 return true;
5150 /* Check for comparing (name, name != 0) and the case where expr
5151 is an SSA_NAME with a definition matching the comparison. */
5152 if (TREE_CODE (expr) == SSA_NAME
5153 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5155 if (operand_equal_p (expr, op1, 0))
5156 return ((code == NE_EXPR && integer_zerop (op2))
5157 || (code == EQ_EXPR && integer_nonzerop (op2)));
5158 s = SSA_NAME_DEF_STMT (expr);
5159 if (is_gimple_assign (s)
5160 && gimple_assign_rhs_code (s) == code
5161 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5162 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5163 return true;
5166 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5167 of name is a comparison, recurse. */
5168 if (TREE_CODE (op1) == SSA_NAME
5169 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5171 s = SSA_NAME_DEF_STMT (op1);
5172 if (is_gimple_assign (s)
5173 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5175 enum tree_code c = gimple_assign_rhs_code (s);
5176 if ((c == NE_EXPR && integer_zerop (op2))
5177 || (c == EQ_EXPR && integer_nonzerop (op2)))
5178 return same_bool_comparison_p (expr, c,
5179 gimple_assign_rhs1 (s),
5180 gimple_assign_rhs2 (s));
5181 if ((c == EQ_EXPR && integer_zerop (op2))
5182 || (c == NE_EXPR && integer_nonzerop (op2)))
5183 return same_bool_comparison_p (expr,
5184 invert_tree_comparison (c, false),
5185 gimple_assign_rhs1 (s),
5186 gimple_assign_rhs2 (s));
5189 return false;
5192 /* Check to see if two boolean expressions OP1 and OP2 are logically
5193 equivalent. */
5195 static bool
5196 same_bool_result_p (const_tree op1, const_tree op2)
5198 /* Simple cases first. */
5199 if (operand_equal_p (op1, op2, 0))
5200 return true;
5202 /* Check the cases where at least one of the operands is a comparison.
5203 These are a bit smarter than operand_equal_p in that they apply some
5204 identifies on SSA_NAMEs. */
5205 if (COMPARISON_CLASS_P (op2)
5206 && same_bool_comparison_p (op1, TREE_CODE (op2),
5207 TREE_OPERAND (op2, 0),
5208 TREE_OPERAND (op2, 1)))
5209 return true;
5210 if (COMPARISON_CLASS_P (op1)
5211 && same_bool_comparison_p (op2, TREE_CODE (op1),
5212 TREE_OPERAND (op1, 0),
5213 TREE_OPERAND (op1, 1)))
5214 return true;
5216 /* Default case. */
5217 return false;
5220 /* Forward declarations for some mutually recursive functions. */
5222 static tree
5223 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5224 enum tree_code code2, tree op2a, tree op2b);
5225 static tree
5226 and_var_with_comparison (tree var, bool invert,
5227 enum tree_code code2, tree op2a, tree op2b);
5228 static tree
5229 and_var_with_comparison_1 (gimple *stmt,
5230 enum tree_code code2, tree op2a, tree op2b);
5231 static tree
5232 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5233 enum tree_code code2, tree op2a, tree op2b);
5234 static tree
5235 or_var_with_comparison (tree var, bool invert,
5236 enum tree_code code2, tree op2a, tree op2b);
5237 static tree
5238 or_var_with_comparison_1 (gimple *stmt,
5239 enum tree_code code2, tree op2a, tree op2b);
5241 /* Helper function for and_comparisons_1: try to simplify the AND of the
5242 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5243 If INVERT is true, invert the value of the VAR before doing the AND.
5244 Return NULL_EXPR if we can't simplify this to a single expression. */
5246 static tree
5247 and_var_with_comparison (tree var, bool invert,
5248 enum tree_code code2, tree op2a, tree op2b)
5250 tree t;
5251 gimple *stmt = SSA_NAME_DEF_STMT (var);
5253 /* We can only deal with variables whose definitions are assignments. */
5254 if (!is_gimple_assign (stmt))
5255 return NULL_TREE;
5257 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5258 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5259 Then we only have to consider the simpler non-inverted cases. */
5260 if (invert)
5261 t = or_var_with_comparison_1 (stmt,
5262 invert_tree_comparison (code2, false),
5263 op2a, op2b);
5264 else
5265 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5266 return canonicalize_bool (t, invert);
5269 /* Try to simplify the AND of the ssa variable defined by the assignment
5270 STMT with the comparison specified by (OP2A CODE2 OP2B).
5271 Return NULL_EXPR if we can't simplify this to a single expression. */
5273 static tree
5274 and_var_with_comparison_1 (gimple *stmt,
5275 enum tree_code code2, tree op2a, tree op2b)
5277 tree var = gimple_assign_lhs (stmt);
5278 tree true_test_var = NULL_TREE;
5279 tree false_test_var = NULL_TREE;
5280 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5282 /* Check for identities like (var AND (var == 0)) => false. */
5283 if (TREE_CODE (op2a) == SSA_NAME
5284 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5286 if ((code2 == NE_EXPR && integer_zerop (op2b))
5287 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5289 true_test_var = op2a;
5290 if (var == true_test_var)
5291 return var;
5293 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5294 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5296 false_test_var = op2a;
5297 if (var == false_test_var)
5298 return boolean_false_node;
5302 /* If the definition is a comparison, recurse on it. */
5303 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5305 tree t = and_comparisons_1 (innercode,
5306 gimple_assign_rhs1 (stmt),
5307 gimple_assign_rhs2 (stmt),
5308 code2,
5309 op2a,
5310 op2b);
5311 if (t)
5312 return t;
5315 /* If the definition is an AND or OR expression, we may be able to
5316 simplify by reassociating. */
5317 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5318 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5320 tree inner1 = gimple_assign_rhs1 (stmt);
5321 tree inner2 = gimple_assign_rhs2 (stmt);
5322 gimple *s;
5323 tree t;
5324 tree partial = NULL_TREE;
5325 bool is_and = (innercode == BIT_AND_EXPR);
5327 /* Check for boolean identities that don't require recursive examination
5328 of inner1/inner2:
5329 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5330 inner1 AND (inner1 OR inner2) => inner1
5331 !inner1 AND (inner1 AND inner2) => false
5332 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5333 Likewise for similar cases involving inner2. */
5334 if (inner1 == true_test_var)
5335 return (is_and ? var : inner1);
5336 else if (inner2 == true_test_var)
5337 return (is_and ? var : inner2);
5338 else if (inner1 == false_test_var)
5339 return (is_and
5340 ? boolean_false_node
5341 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5342 else if (inner2 == false_test_var)
5343 return (is_and
5344 ? boolean_false_node
5345 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5347 /* Next, redistribute/reassociate the AND across the inner tests.
5348 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5349 if (TREE_CODE (inner1) == SSA_NAME
5350 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5351 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5352 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5353 gimple_assign_rhs1 (s),
5354 gimple_assign_rhs2 (s),
5355 code2, op2a, op2b)))
5357 /* Handle the AND case, where we are reassociating:
5358 (inner1 AND inner2) AND (op2a code2 op2b)
5359 => (t AND inner2)
5360 If the partial result t is a constant, we win. Otherwise
5361 continue on to try reassociating with the other inner test. */
5362 if (is_and)
5364 if (integer_onep (t))
5365 return inner2;
5366 else if (integer_zerop (t))
5367 return boolean_false_node;
5370 /* Handle the OR case, where we are redistributing:
5371 (inner1 OR inner2) AND (op2a code2 op2b)
5372 => (t OR (inner2 AND (op2a code2 op2b))) */
5373 else if (integer_onep (t))
5374 return boolean_true_node;
5376 /* Save partial result for later. */
5377 partial = t;
5380 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5381 if (TREE_CODE (inner2) == SSA_NAME
5382 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5383 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5384 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5385 gimple_assign_rhs1 (s),
5386 gimple_assign_rhs2 (s),
5387 code2, op2a, op2b)))
5389 /* Handle the AND case, where we are reassociating:
5390 (inner1 AND inner2) AND (op2a code2 op2b)
5391 => (inner1 AND t) */
5392 if (is_and)
5394 if (integer_onep (t))
5395 return inner1;
5396 else if (integer_zerop (t))
5397 return boolean_false_node;
5398 /* If both are the same, we can apply the identity
5399 (x AND x) == x. */
5400 else if (partial && same_bool_result_p (t, partial))
5401 return t;
5404 /* Handle the OR case. where we are redistributing:
5405 (inner1 OR inner2) AND (op2a code2 op2b)
5406 => (t OR (inner1 AND (op2a code2 op2b)))
5407 => (t OR partial) */
5408 else
5410 if (integer_onep (t))
5411 return boolean_true_node;
5412 else if (partial)
5414 /* We already got a simplification for the other
5415 operand to the redistributed OR expression. The
5416 interesting case is when at least one is false.
5417 Or, if both are the same, we can apply the identity
5418 (x OR x) == x. */
5419 if (integer_zerop (partial))
5420 return t;
5421 else if (integer_zerop (t))
5422 return partial;
5423 else if (same_bool_result_p (t, partial))
5424 return t;
5429 return NULL_TREE;
5432 /* Try to simplify the AND of two comparisons defined by
5433 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5434 If this can be done without constructing an intermediate value,
5435 return the resulting tree; otherwise NULL_TREE is returned.
5436 This function is deliberately asymmetric as it recurses on SSA_DEFs
5437 in the first comparison but not the second. */
5439 static tree
5440 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5441 enum tree_code code2, tree op2a, tree op2b)
5443 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5445 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5446 if (operand_equal_p (op1a, op2a, 0)
5447 && operand_equal_p (op1b, op2b, 0))
5449 /* Result will be either NULL_TREE, or a combined comparison. */
5450 tree t = combine_comparisons (UNKNOWN_LOCATION,
5451 TRUTH_ANDIF_EXPR, code1, code2,
5452 truth_type, op1a, op1b);
5453 if (t)
5454 return t;
5457 /* Likewise the swapped case of the above. */
5458 if (operand_equal_p (op1a, op2b, 0)
5459 && operand_equal_p (op1b, op2a, 0))
5461 /* Result will be either NULL_TREE, or a combined comparison. */
5462 tree t = combine_comparisons (UNKNOWN_LOCATION,
5463 TRUTH_ANDIF_EXPR, code1,
5464 swap_tree_comparison (code2),
5465 truth_type, op1a, op1b);
5466 if (t)
5467 return t;
5470 /* If both comparisons are of the same value against constants, we might
5471 be able to merge them. */
5472 if (operand_equal_p (op1a, op2a, 0)
5473 && TREE_CODE (op1b) == INTEGER_CST
5474 && TREE_CODE (op2b) == INTEGER_CST)
5476 int cmp = tree_int_cst_compare (op1b, op2b);
5478 /* If we have (op1a == op1b), we should either be able to
5479 return that or FALSE, depending on whether the constant op1b
5480 also satisfies the other comparison against op2b. */
5481 if (code1 == EQ_EXPR)
5483 bool done = true;
5484 bool val;
5485 switch (code2)
5487 case EQ_EXPR: val = (cmp == 0); break;
5488 case NE_EXPR: val = (cmp != 0); break;
5489 case LT_EXPR: val = (cmp < 0); break;
5490 case GT_EXPR: val = (cmp > 0); break;
5491 case LE_EXPR: val = (cmp <= 0); break;
5492 case GE_EXPR: val = (cmp >= 0); break;
5493 default: done = false;
5495 if (done)
5497 if (val)
5498 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5499 else
5500 return boolean_false_node;
5503 /* Likewise if the second comparison is an == comparison. */
5504 else if (code2 == EQ_EXPR)
5506 bool done = true;
5507 bool val;
5508 switch (code1)
5510 case EQ_EXPR: val = (cmp == 0); break;
5511 case NE_EXPR: val = (cmp != 0); break;
5512 case LT_EXPR: val = (cmp > 0); break;
5513 case GT_EXPR: val = (cmp < 0); break;
5514 case LE_EXPR: val = (cmp >= 0); break;
5515 case GE_EXPR: val = (cmp <= 0); break;
5516 default: done = false;
5518 if (done)
5520 if (val)
5521 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5522 else
5523 return boolean_false_node;
5527 /* Same business with inequality tests. */
5528 else if (code1 == NE_EXPR)
5530 bool val;
5531 switch (code2)
5533 case EQ_EXPR: val = (cmp != 0); break;
5534 case NE_EXPR: val = (cmp == 0); break;
5535 case LT_EXPR: val = (cmp >= 0); break;
5536 case GT_EXPR: val = (cmp <= 0); break;
5537 case LE_EXPR: val = (cmp > 0); break;
5538 case GE_EXPR: val = (cmp < 0); break;
5539 default:
5540 val = false;
5542 if (val)
5543 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5545 else if (code2 == NE_EXPR)
5547 bool val;
5548 switch (code1)
5550 case EQ_EXPR: val = (cmp == 0); break;
5551 case NE_EXPR: val = (cmp != 0); break;
5552 case LT_EXPR: val = (cmp <= 0); break;
5553 case GT_EXPR: val = (cmp >= 0); break;
5554 case LE_EXPR: val = (cmp < 0); break;
5555 case GE_EXPR: val = (cmp > 0); break;
5556 default:
5557 val = false;
5559 if (val)
5560 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5563 /* Chose the more restrictive of two < or <= comparisons. */
5564 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5565 && (code2 == LT_EXPR || code2 == LE_EXPR))
5567 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5568 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5569 else
5570 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5573 /* Likewise chose the more restrictive of two > or >= comparisons. */
5574 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5575 && (code2 == GT_EXPR || code2 == GE_EXPR))
5577 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5578 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5579 else
5580 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5583 /* Check for singleton ranges. */
5584 else if (cmp == 0
5585 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5586 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5587 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5589 /* Check for disjoint ranges. */
5590 else if (cmp <= 0
5591 && (code1 == LT_EXPR || code1 == LE_EXPR)
5592 && (code2 == GT_EXPR || code2 == GE_EXPR))
5593 return boolean_false_node;
5594 else if (cmp >= 0
5595 && (code1 == GT_EXPR || code1 == GE_EXPR)
5596 && (code2 == LT_EXPR || code2 == LE_EXPR))
5597 return boolean_false_node;
5600 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5601 NAME's definition is a truth value. See if there are any simplifications
5602 that can be done against the NAME's definition. */
5603 if (TREE_CODE (op1a) == SSA_NAME
5604 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5605 && (integer_zerop (op1b) || integer_onep (op1b)))
5607 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5608 || (code1 == NE_EXPR && integer_onep (op1b)));
5609 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5610 switch (gimple_code (stmt))
5612 case GIMPLE_ASSIGN:
5613 /* Try to simplify by copy-propagating the definition. */
5614 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5616 case GIMPLE_PHI:
5617 /* If every argument to the PHI produces the same result when
5618 ANDed with the second comparison, we win.
5619 Do not do this unless the type is bool since we need a bool
5620 result here anyway. */
5621 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5623 tree result = NULL_TREE;
5624 unsigned i;
5625 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5627 tree arg = gimple_phi_arg_def (stmt, i);
5629 /* If this PHI has itself as an argument, ignore it.
5630 If all the other args produce the same result,
5631 we're still OK. */
5632 if (arg == gimple_phi_result (stmt))
5633 continue;
5634 else if (TREE_CODE (arg) == INTEGER_CST)
5636 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5638 if (!result)
5639 result = boolean_false_node;
5640 else if (!integer_zerop (result))
5641 return NULL_TREE;
5643 else if (!result)
5644 result = fold_build2 (code2, boolean_type_node,
5645 op2a, op2b);
5646 else if (!same_bool_comparison_p (result,
5647 code2, op2a, op2b))
5648 return NULL_TREE;
5650 else if (TREE_CODE (arg) == SSA_NAME
5651 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5653 tree temp;
5654 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5655 /* In simple cases we can look through PHI nodes,
5656 but we have to be careful with loops.
5657 See PR49073. */
5658 if (! dom_info_available_p (CDI_DOMINATORS)
5659 || gimple_bb (def_stmt) == gimple_bb (stmt)
5660 || dominated_by_p (CDI_DOMINATORS,
5661 gimple_bb (def_stmt),
5662 gimple_bb (stmt)))
5663 return NULL_TREE;
5664 temp = and_var_with_comparison (arg, invert, code2,
5665 op2a, op2b);
5666 if (!temp)
5667 return NULL_TREE;
5668 else if (!result)
5669 result = temp;
5670 else if (!same_bool_result_p (result, temp))
5671 return NULL_TREE;
5673 else
5674 return NULL_TREE;
5676 return result;
5679 default:
5680 break;
5683 return NULL_TREE;
5686 /* Try to simplify the AND of two comparisons, specified by
5687 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5688 If this can be simplified to a single expression (without requiring
5689 introducing more SSA variables to hold intermediate values),
5690 return the resulting tree. Otherwise return NULL_TREE.
5691 If the result expression is non-null, it has boolean type. */
5693 tree
5694 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5695 enum tree_code code2, tree op2a, tree op2b)
5697 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5698 if (t)
5699 return t;
5700 else
5701 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5704 /* Helper function for or_comparisons_1: try to simplify the OR of the
5705 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5706 If INVERT is true, invert the value of VAR before doing the OR.
5707 Return NULL_EXPR if we can't simplify this to a single expression. */
5709 static tree
5710 or_var_with_comparison (tree var, bool invert,
5711 enum tree_code code2, tree op2a, tree op2b)
5713 tree t;
5714 gimple *stmt = SSA_NAME_DEF_STMT (var);
5716 /* We can only deal with variables whose definitions are assignments. */
5717 if (!is_gimple_assign (stmt))
5718 return NULL_TREE;
5720 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5721 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5722 Then we only have to consider the simpler non-inverted cases. */
5723 if (invert)
5724 t = and_var_with_comparison_1 (stmt,
5725 invert_tree_comparison (code2, false),
5726 op2a, op2b);
5727 else
5728 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5729 return canonicalize_bool (t, invert);
5732 /* Try to simplify the OR of the ssa variable defined by the assignment
5733 STMT with the comparison specified by (OP2A CODE2 OP2B).
5734 Return NULL_EXPR if we can't simplify this to a single expression. */
5736 static tree
5737 or_var_with_comparison_1 (gimple *stmt,
5738 enum tree_code code2, tree op2a, tree op2b)
5740 tree var = gimple_assign_lhs (stmt);
5741 tree true_test_var = NULL_TREE;
5742 tree false_test_var = NULL_TREE;
5743 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5745 /* Check for identities like (var OR (var != 0)) => true . */
5746 if (TREE_CODE (op2a) == SSA_NAME
5747 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5749 if ((code2 == NE_EXPR && integer_zerop (op2b))
5750 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5752 true_test_var = op2a;
5753 if (var == true_test_var)
5754 return var;
5756 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5757 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5759 false_test_var = op2a;
5760 if (var == false_test_var)
5761 return boolean_true_node;
5765 /* If the definition is a comparison, recurse on it. */
5766 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5768 tree t = or_comparisons_1 (innercode,
5769 gimple_assign_rhs1 (stmt),
5770 gimple_assign_rhs2 (stmt),
5771 code2,
5772 op2a,
5773 op2b);
5774 if (t)
5775 return t;
5778 /* If the definition is an AND or OR expression, we may be able to
5779 simplify by reassociating. */
5780 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5781 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5783 tree inner1 = gimple_assign_rhs1 (stmt);
5784 tree inner2 = gimple_assign_rhs2 (stmt);
5785 gimple *s;
5786 tree t;
5787 tree partial = NULL_TREE;
5788 bool is_or = (innercode == BIT_IOR_EXPR);
5790 /* Check for boolean identities that don't require recursive examination
5791 of inner1/inner2:
5792 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5793 inner1 OR (inner1 AND inner2) => inner1
5794 !inner1 OR (inner1 OR inner2) => true
5795 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5797 if (inner1 == true_test_var)
5798 return (is_or ? var : inner1);
5799 else if (inner2 == true_test_var)
5800 return (is_or ? var : inner2);
5801 else if (inner1 == false_test_var)
5802 return (is_or
5803 ? boolean_true_node
5804 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5805 else if (inner2 == false_test_var)
5806 return (is_or
5807 ? boolean_true_node
5808 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5810 /* Next, redistribute/reassociate the OR across the inner tests.
5811 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5812 if (TREE_CODE (inner1) == SSA_NAME
5813 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5814 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5815 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5816 gimple_assign_rhs1 (s),
5817 gimple_assign_rhs2 (s),
5818 code2, op2a, op2b)))
5820 /* Handle the OR case, where we are reassociating:
5821 (inner1 OR inner2) OR (op2a code2 op2b)
5822 => (t OR inner2)
5823 If the partial result t is a constant, we win. Otherwise
5824 continue on to try reassociating with the other inner test. */
5825 if (is_or)
5827 if (integer_onep (t))
5828 return boolean_true_node;
5829 else if (integer_zerop (t))
5830 return inner2;
5833 /* Handle the AND case, where we are redistributing:
5834 (inner1 AND inner2) OR (op2a code2 op2b)
5835 => (t AND (inner2 OR (op2a code op2b))) */
5836 else if (integer_zerop (t))
5837 return boolean_false_node;
5839 /* Save partial result for later. */
5840 partial = t;
5843 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5844 if (TREE_CODE (inner2) == SSA_NAME
5845 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5846 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5847 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5848 gimple_assign_rhs1 (s),
5849 gimple_assign_rhs2 (s),
5850 code2, op2a, op2b)))
5852 /* Handle the OR case, where we are reassociating:
5853 (inner1 OR inner2) OR (op2a code2 op2b)
5854 => (inner1 OR t)
5855 => (t OR partial) */
5856 if (is_or)
5858 if (integer_zerop (t))
5859 return inner1;
5860 else if (integer_onep (t))
5861 return boolean_true_node;
5862 /* If both are the same, we can apply the identity
5863 (x OR x) == x. */
5864 else if (partial && same_bool_result_p (t, partial))
5865 return t;
5868 /* Handle the AND case, where we are redistributing:
5869 (inner1 AND inner2) OR (op2a code2 op2b)
5870 => (t AND (inner1 OR (op2a code2 op2b)))
5871 => (t AND partial) */
5872 else
5874 if (integer_zerop (t))
5875 return boolean_false_node;
5876 else if (partial)
5878 /* We already got a simplification for the other
5879 operand to the redistributed AND expression. The
5880 interesting case is when at least one is true.
5881 Or, if both are the same, we can apply the identity
5882 (x AND x) == x. */
5883 if (integer_onep (partial))
5884 return t;
5885 else if (integer_onep (t))
5886 return partial;
5887 else if (same_bool_result_p (t, partial))
5888 return t;
5893 return NULL_TREE;
5896 /* Try to simplify the OR of two comparisons defined by
5897 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5898 If this can be done without constructing an intermediate value,
5899 return the resulting tree; otherwise NULL_TREE is returned.
5900 This function is deliberately asymmetric as it recurses on SSA_DEFs
5901 in the first comparison but not the second. */
5903 static tree
5904 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5905 enum tree_code code2, tree op2a, tree op2b)
5907 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5909 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5910 if (operand_equal_p (op1a, op2a, 0)
5911 && operand_equal_p (op1b, op2b, 0))
5913 /* Result will be either NULL_TREE, or a combined comparison. */
5914 tree t = combine_comparisons (UNKNOWN_LOCATION,
5915 TRUTH_ORIF_EXPR, code1, code2,
5916 truth_type, op1a, op1b);
5917 if (t)
5918 return t;
5921 /* Likewise the swapped case of the above. */
5922 if (operand_equal_p (op1a, op2b, 0)
5923 && operand_equal_p (op1b, op2a, 0))
5925 /* Result will be either NULL_TREE, or a combined comparison. */
5926 tree t = combine_comparisons (UNKNOWN_LOCATION,
5927 TRUTH_ORIF_EXPR, code1,
5928 swap_tree_comparison (code2),
5929 truth_type, op1a, op1b);
5930 if (t)
5931 return t;
5934 /* If both comparisons are of the same value against constants, we might
5935 be able to merge them. */
5936 if (operand_equal_p (op1a, op2a, 0)
5937 && TREE_CODE (op1b) == INTEGER_CST
5938 && TREE_CODE (op2b) == INTEGER_CST)
5940 int cmp = tree_int_cst_compare (op1b, op2b);
5942 /* If we have (op1a != op1b), we should either be able to
5943 return that or TRUE, depending on whether the constant op1b
5944 also satisfies the other comparison against op2b. */
5945 if (code1 == NE_EXPR)
5947 bool done = true;
5948 bool val;
5949 switch (code2)
5951 case EQ_EXPR: val = (cmp == 0); break;
5952 case NE_EXPR: val = (cmp != 0); break;
5953 case LT_EXPR: val = (cmp < 0); break;
5954 case GT_EXPR: val = (cmp > 0); break;
5955 case LE_EXPR: val = (cmp <= 0); break;
5956 case GE_EXPR: val = (cmp >= 0); break;
5957 default: done = false;
5959 if (done)
5961 if (val)
5962 return boolean_true_node;
5963 else
5964 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5967 /* Likewise if the second comparison is a != comparison. */
5968 else if (code2 == NE_EXPR)
5970 bool done = true;
5971 bool val;
5972 switch (code1)
5974 case EQ_EXPR: val = (cmp == 0); break;
5975 case NE_EXPR: val = (cmp != 0); break;
5976 case LT_EXPR: val = (cmp > 0); break;
5977 case GT_EXPR: val = (cmp < 0); break;
5978 case LE_EXPR: val = (cmp >= 0); break;
5979 case GE_EXPR: val = (cmp <= 0); break;
5980 default: done = false;
5982 if (done)
5984 if (val)
5985 return boolean_true_node;
5986 else
5987 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5991 /* See if an equality test is redundant with the other comparison. */
5992 else if (code1 == EQ_EXPR)
5994 bool val;
5995 switch (code2)
5997 case EQ_EXPR: val = (cmp == 0); break;
5998 case NE_EXPR: val = (cmp != 0); break;
5999 case LT_EXPR: val = (cmp < 0); break;
6000 case GT_EXPR: val = (cmp > 0); break;
6001 case LE_EXPR: val = (cmp <= 0); break;
6002 case GE_EXPR: val = (cmp >= 0); break;
6003 default:
6004 val = false;
6006 if (val)
6007 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6009 else if (code2 == EQ_EXPR)
6011 bool val;
6012 switch (code1)
6014 case EQ_EXPR: val = (cmp == 0); break;
6015 case NE_EXPR: val = (cmp != 0); break;
6016 case LT_EXPR: val = (cmp > 0); break;
6017 case GT_EXPR: val = (cmp < 0); break;
6018 case LE_EXPR: val = (cmp >= 0); break;
6019 case GE_EXPR: val = (cmp <= 0); break;
6020 default:
6021 val = false;
6023 if (val)
6024 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6027 /* Chose the less restrictive of two < or <= comparisons. */
6028 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6029 && (code2 == LT_EXPR || code2 == LE_EXPR))
6031 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6032 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6033 else
6034 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6037 /* Likewise chose the less restrictive of two > or >= comparisons. */
6038 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6039 && (code2 == GT_EXPR || code2 == GE_EXPR))
6041 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6042 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6043 else
6044 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6047 /* Check for singleton ranges. */
6048 else if (cmp == 0
6049 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6050 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6051 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6053 /* Check for less/greater pairs that don't restrict the range at all. */
6054 else if (cmp >= 0
6055 && (code1 == LT_EXPR || code1 == LE_EXPR)
6056 && (code2 == GT_EXPR || code2 == GE_EXPR))
6057 return boolean_true_node;
6058 else if (cmp <= 0
6059 && (code1 == GT_EXPR || code1 == GE_EXPR)
6060 && (code2 == LT_EXPR || code2 == LE_EXPR))
6061 return boolean_true_node;
6064 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6065 NAME's definition is a truth value. See if there are any simplifications
6066 that can be done against the NAME's definition. */
6067 if (TREE_CODE (op1a) == SSA_NAME
6068 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6069 && (integer_zerop (op1b) || integer_onep (op1b)))
6071 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6072 || (code1 == NE_EXPR && integer_onep (op1b)));
6073 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6074 switch (gimple_code (stmt))
6076 case GIMPLE_ASSIGN:
6077 /* Try to simplify by copy-propagating the definition. */
6078 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6080 case GIMPLE_PHI:
6081 /* If every argument to the PHI produces the same result when
6082 ORed with the second comparison, we win.
6083 Do not do this unless the type is bool since we need a bool
6084 result here anyway. */
6085 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6087 tree result = NULL_TREE;
6088 unsigned i;
6089 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6091 tree arg = gimple_phi_arg_def (stmt, i);
6093 /* If this PHI has itself as an argument, ignore it.
6094 If all the other args produce the same result,
6095 we're still OK. */
6096 if (arg == gimple_phi_result (stmt))
6097 continue;
6098 else if (TREE_CODE (arg) == INTEGER_CST)
6100 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6102 if (!result)
6103 result = boolean_true_node;
6104 else if (!integer_onep (result))
6105 return NULL_TREE;
6107 else if (!result)
6108 result = fold_build2 (code2, boolean_type_node,
6109 op2a, op2b);
6110 else if (!same_bool_comparison_p (result,
6111 code2, op2a, op2b))
6112 return NULL_TREE;
6114 else if (TREE_CODE (arg) == SSA_NAME
6115 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6117 tree temp;
6118 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6119 /* In simple cases we can look through PHI nodes,
6120 but we have to be careful with loops.
6121 See PR49073. */
6122 if (! dom_info_available_p (CDI_DOMINATORS)
6123 || gimple_bb (def_stmt) == gimple_bb (stmt)
6124 || dominated_by_p (CDI_DOMINATORS,
6125 gimple_bb (def_stmt),
6126 gimple_bb (stmt)))
6127 return NULL_TREE;
6128 temp = or_var_with_comparison (arg, invert, code2,
6129 op2a, op2b);
6130 if (!temp)
6131 return NULL_TREE;
6132 else if (!result)
6133 result = temp;
6134 else if (!same_bool_result_p (result, temp))
6135 return NULL_TREE;
6137 else
6138 return NULL_TREE;
6140 return result;
6143 default:
6144 break;
6147 return NULL_TREE;
6150 /* Try to simplify the OR of two comparisons, specified by
6151 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6152 If this can be simplified to a single expression (without requiring
6153 introducing more SSA variables to hold intermediate values),
6154 return the resulting tree. Otherwise return NULL_TREE.
6155 If the result expression is non-null, it has boolean type. */
6157 tree
6158 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6159 enum tree_code code2, tree op2a, tree op2b)
6161 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6162 if (t)
6163 return t;
6164 else
6165 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6169 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6171 Either NULL_TREE, a simplified but non-constant or a constant
6172 is returned.
6174 ??? This should go into a gimple-fold-inline.h file to be eventually
6175 privatized with the single valueize function used in the various TUs
6176 to avoid the indirect function call overhead. */
6178 tree
6179 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6180 tree (*gvalueize) (tree))
6182 gimple_match_op res_op;
6183 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6184 edges if there are intermediate VARYING defs. For this reason
6185 do not follow SSA edges here even though SCCVN can technically
6186 just deal fine with that. */
6187 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6189 tree res = NULL_TREE;
6190 if (gimple_simplified_result_is_gimple_val (&res_op))
6191 res = res_op.ops[0];
6192 else if (mprts_hook)
6193 res = mprts_hook (&res_op);
6194 if (res)
6196 if (dump_file && dump_flags & TDF_DETAILS)
6198 fprintf (dump_file, "Match-and-simplified ");
6199 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6200 fprintf (dump_file, " to ");
6201 print_generic_expr (dump_file, res);
6202 fprintf (dump_file, "\n");
6204 return res;
6208 location_t loc = gimple_location (stmt);
6209 switch (gimple_code (stmt))
6211 case GIMPLE_ASSIGN:
6213 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6215 switch (get_gimple_rhs_class (subcode))
6217 case GIMPLE_SINGLE_RHS:
6219 tree rhs = gimple_assign_rhs1 (stmt);
6220 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6222 if (TREE_CODE (rhs) == SSA_NAME)
6224 /* If the RHS is an SSA_NAME, return its known constant value,
6225 if any. */
6226 return (*valueize) (rhs);
6228 /* Handle propagating invariant addresses into address
6229 operations. */
6230 else if (TREE_CODE (rhs) == ADDR_EXPR
6231 && !is_gimple_min_invariant (rhs))
6233 poly_int64 offset = 0;
6234 tree base;
6235 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6236 &offset,
6237 valueize);
6238 if (base
6239 && (CONSTANT_CLASS_P (base)
6240 || decl_address_invariant_p (base)))
6241 return build_invariant_address (TREE_TYPE (rhs),
6242 base, offset);
6244 else if (TREE_CODE (rhs) == CONSTRUCTOR
6245 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6246 && known_eq (CONSTRUCTOR_NELTS (rhs),
6247 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6249 unsigned i, nelts;
6250 tree val;
6252 nelts = CONSTRUCTOR_NELTS (rhs);
6253 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6254 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6256 val = (*valueize) (val);
6257 if (TREE_CODE (val) == INTEGER_CST
6258 || TREE_CODE (val) == REAL_CST
6259 || TREE_CODE (val) == FIXED_CST)
6260 vec.quick_push (val);
6261 else
6262 return NULL_TREE;
6265 return vec.build ();
6267 if (subcode == OBJ_TYPE_REF)
6269 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6270 /* If callee is constant, we can fold away the wrapper. */
6271 if (is_gimple_min_invariant (val))
6272 return val;
6275 if (kind == tcc_reference)
6277 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6278 || TREE_CODE (rhs) == REALPART_EXPR
6279 || TREE_CODE (rhs) == IMAGPART_EXPR)
6280 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6282 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6283 return fold_unary_loc (EXPR_LOCATION (rhs),
6284 TREE_CODE (rhs),
6285 TREE_TYPE (rhs), val);
6287 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6288 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6290 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6291 return fold_ternary_loc (EXPR_LOCATION (rhs),
6292 TREE_CODE (rhs),
6293 TREE_TYPE (rhs), val,
6294 TREE_OPERAND (rhs, 1),
6295 TREE_OPERAND (rhs, 2));
6297 else if (TREE_CODE (rhs) == MEM_REF
6298 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6300 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6301 if (TREE_CODE (val) == ADDR_EXPR
6302 && is_gimple_min_invariant (val))
6304 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6305 unshare_expr (val),
6306 TREE_OPERAND (rhs, 1));
6307 if (tem)
6308 rhs = tem;
6311 return fold_const_aggregate_ref_1 (rhs, valueize);
6313 else if (kind == tcc_declaration)
6314 return get_symbol_constant_value (rhs);
6315 return rhs;
6318 case GIMPLE_UNARY_RHS:
6319 return NULL_TREE;
6321 case GIMPLE_BINARY_RHS:
6322 /* Translate &x + CST into an invariant form suitable for
6323 further propagation. */
6324 if (subcode == POINTER_PLUS_EXPR)
6326 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6327 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6328 if (TREE_CODE (op0) == ADDR_EXPR
6329 && TREE_CODE (op1) == INTEGER_CST)
6331 tree off = fold_convert (ptr_type_node, op1);
6332 return build_fold_addr_expr_loc
6333 (loc,
6334 fold_build2 (MEM_REF,
6335 TREE_TYPE (TREE_TYPE (op0)),
6336 unshare_expr (op0), off));
6339 /* Canonicalize bool != 0 and bool == 0 appearing after
6340 valueization. While gimple_simplify handles this
6341 it can get confused by the ~X == 1 -> X == 0 transform
6342 which we cant reduce to a SSA name or a constant
6343 (and we have no way to tell gimple_simplify to not
6344 consider those transforms in the first place). */
6345 else if (subcode == EQ_EXPR
6346 || subcode == NE_EXPR)
6348 tree lhs = gimple_assign_lhs (stmt);
6349 tree op0 = gimple_assign_rhs1 (stmt);
6350 if (useless_type_conversion_p (TREE_TYPE (lhs),
6351 TREE_TYPE (op0)))
6353 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6354 op0 = (*valueize) (op0);
6355 if (TREE_CODE (op0) == INTEGER_CST)
6356 std::swap (op0, op1);
6357 if (TREE_CODE (op1) == INTEGER_CST
6358 && ((subcode == NE_EXPR && integer_zerop (op1))
6359 || (subcode == EQ_EXPR && integer_onep (op1))))
6360 return op0;
6363 return NULL_TREE;
6365 case GIMPLE_TERNARY_RHS:
6367 /* Handle ternary operators that can appear in GIMPLE form. */
6368 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6369 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6370 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6371 return fold_ternary_loc (loc, subcode,
6372 gimple_expr_type (stmt), op0, op1, op2);
6375 default:
6376 gcc_unreachable ();
6380 case GIMPLE_CALL:
6382 tree fn;
6383 gcall *call_stmt = as_a <gcall *> (stmt);
6385 if (gimple_call_internal_p (stmt))
6387 enum tree_code subcode = ERROR_MARK;
6388 switch (gimple_call_internal_fn (stmt))
6390 case IFN_UBSAN_CHECK_ADD:
6391 subcode = PLUS_EXPR;
6392 break;
6393 case IFN_UBSAN_CHECK_SUB:
6394 subcode = MINUS_EXPR;
6395 break;
6396 case IFN_UBSAN_CHECK_MUL:
6397 subcode = MULT_EXPR;
6398 break;
6399 case IFN_BUILTIN_EXPECT:
6401 tree arg0 = gimple_call_arg (stmt, 0);
6402 tree op0 = (*valueize) (arg0);
6403 if (TREE_CODE (op0) == INTEGER_CST)
6404 return op0;
6405 return NULL_TREE;
6407 default:
6408 return NULL_TREE;
6410 tree arg0 = gimple_call_arg (stmt, 0);
6411 tree arg1 = gimple_call_arg (stmt, 1);
6412 tree op0 = (*valueize) (arg0);
6413 tree op1 = (*valueize) (arg1);
6415 if (TREE_CODE (op0) != INTEGER_CST
6416 || TREE_CODE (op1) != INTEGER_CST)
6418 switch (subcode)
6420 case MULT_EXPR:
6421 /* x * 0 = 0 * x = 0 without overflow. */
6422 if (integer_zerop (op0) || integer_zerop (op1))
6423 return build_zero_cst (TREE_TYPE (arg0));
6424 break;
6425 case MINUS_EXPR:
6426 /* y - y = 0 without overflow. */
6427 if (operand_equal_p (op0, op1, 0))
6428 return build_zero_cst (TREE_TYPE (arg0));
6429 break;
6430 default:
6431 break;
6434 tree res
6435 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6436 if (res
6437 && TREE_CODE (res) == INTEGER_CST
6438 && !TREE_OVERFLOW (res))
6439 return res;
6440 return NULL_TREE;
6443 fn = (*valueize) (gimple_call_fn (stmt));
6444 if (TREE_CODE (fn) == ADDR_EXPR
6445 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6446 && gimple_builtin_call_types_compatible_p (stmt,
6447 TREE_OPERAND (fn, 0)))
6449 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6450 tree retval;
6451 unsigned i;
6452 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6453 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6454 retval = fold_builtin_call_array (loc,
6455 gimple_call_return_type (call_stmt),
6456 fn, gimple_call_num_args (stmt), args);
6457 if (retval)
6459 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6460 STRIP_NOPS (retval);
6461 retval = fold_convert (gimple_call_return_type (call_stmt),
6462 retval);
6464 return retval;
6466 return NULL_TREE;
6469 default:
6470 return NULL_TREE;
6474 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6475 Returns NULL_TREE if folding to a constant is not possible, otherwise
6476 returns a constant according to is_gimple_min_invariant. */
6478 tree
6479 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6481 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6482 if (res && is_gimple_min_invariant (res))
6483 return res;
6484 return NULL_TREE;
6488 /* The following set of functions are supposed to fold references using
6489 their constant initializers. */
6491 /* See if we can find constructor defining value of BASE.
6492 When we know the consructor with constant offset (such as
6493 base is array[40] and we do know constructor of array), then
6494 BIT_OFFSET is adjusted accordingly.
6496 As a special case, return error_mark_node when constructor
6497 is not explicitly available, but it is known to be zero
6498 such as 'static const int a;'. */
6499 static tree
6500 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6501 tree (*valueize)(tree))
6503 poly_int64 bit_offset2, size, max_size;
6504 bool reverse;
6506 if (TREE_CODE (base) == MEM_REF)
6508 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6509 if (!boff.to_shwi (bit_offset))
6510 return NULL_TREE;
6512 if (valueize
6513 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6514 base = valueize (TREE_OPERAND (base, 0));
6515 if (!base || TREE_CODE (base) != ADDR_EXPR)
6516 return NULL_TREE;
6517 base = TREE_OPERAND (base, 0);
6519 else if (valueize
6520 && TREE_CODE (base) == SSA_NAME)
6521 base = valueize (base);
6523 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6524 DECL_INITIAL. If BASE is a nested reference into another
6525 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6526 the inner reference. */
6527 switch (TREE_CODE (base))
6529 case VAR_DECL:
6530 case CONST_DECL:
6532 tree init = ctor_for_folding (base);
6534 /* Our semantic is exact opposite of ctor_for_folding;
6535 NULL means unknown, while error_mark_node is 0. */
6536 if (init == error_mark_node)
6537 return NULL_TREE;
6538 if (!init)
6539 return error_mark_node;
6540 return init;
6543 case VIEW_CONVERT_EXPR:
6544 return get_base_constructor (TREE_OPERAND (base, 0),
6545 bit_offset, valueize);
6547 case ARRAY_REF:
6548 case COMPONENT_REF:
6549 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6550 &reverse);
6551 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6552 return NULL_TREE;
6553 *bit_offset += bit_offset2;
6554 return get_base_constructor (base, bit_offset, valueize);
6556 case CONSTRUCTOR:
6557 return base;
6559 default:
6560 if (CONSTANT_CLASS_P (base))
6561 return base;
6563 return NULL_TREE;
6567 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6568 to the memory at bit OFFSET. When non-null, TYPE is the expected
6569 type of the reference; otherwise the type of the referenced element
6570 is used instead. When SIZE is zero, attempt to fold a reference to
6571 the entire element which OFFSET refers to. Increment *SUBOFF by
6572 the bit offset of the accessed element. */
6574 static tree
6575 fold_array_ctor_reference (tree type, tree ctor,
6576 unsigned HOST_WIDE_INT offset,
6577 unsigned HOST_WIDE_INT size,
6578 tree from_decl,
6579 unsigned HOST_WIDE_INT *suboff)
6581 offset_int low_bound;
6582 offset_int elt_size;
6583 offset_int access_index;
6584 tree domain_type = NULL_TREE;
6585 HOST_WIDE_INT inner_offset;
6587 /* Compute low bound and elt size. */
6588 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6589 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6590 if (domain_type && TYPE_MIN_VALUE (domain_type))
6592 /* Static constructors for variably sized objects makes no sense. */
6593 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6594 return NULL_TREE;
6595 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6597 else
6598 low_bound = 0;
6599 /* Static constructors for variably sized objects makes no sense. */
6600 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6601 return NULL_TREE;
6602 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6604 /* When TYPE is non-null, verify that it specifies a constant-sized
6605 accessed not larger than size of array element. */
6606 if (type
6607 && (!TYPE_SIZE_UNIT (type)
6608 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6609 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6610 || elt_size == 0))
6611 return NULL_TREE;
6613 /* Compute the array index we look for. */
6614 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6615 elt_size);
6616 access_index += low_bound;
6618 /* And offset within the access. */
6619 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6621 /* See if the array field is large enough to span whole access. We do not
6622 care to fold accesses spanning multiple array indexes. */
6623 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6624 return NULL_TREE;
6625 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6627 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6629 /* For the final reference to the entire accessed element
6630 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6631 may be null) in favor of the type of the element, and set
6632 SIZE to the size of the accessed element. */
6633 inner_offset = 0;
6634 type = TREE_TYPE (val);
6635 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6638 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6639 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6640 suboff);
6643 /* Memory not explicitly mentioned in constructor is 0 (or
6644 the reference is out of range). */
6645 return type ? build_zero_cst (type) : NULL_TREE;
6648 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6649 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6650 is the expected type of the reference; otherwise the type of
6651 the referenced member is used instead. When SIZE is zero,
6652 attempt to fold a reference to the entire member which OFFSET
6653 refers to; in this case. Increment *SUBOFF by the bit offset
6654 of the accessed member. */
6656 static tree
6657 fold_nonarray_ctor_reference (tree type, tree ctor,
6658 unsigned HOST_WIDE_INT offset,
6659 unsigned HOST_WIDE_INT size,
6660 tree from_decl,
6661 unsigned HOST_WIDE_INT *suboff)
6663 unsigned HOST_WIDE_INT cnt;
6664 tree cfield, cval;
6666 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6667 cval)
6669 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6670 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6671 tree field_size = DECL_SIZE (cfield);
6673 if (!field_size)
6675 /* Determine the size of the flexible array member from
6676 the size of the initializer provided for it. */
6677 field_size = TYPE_SIZE (TREE_TYPE (cval));
6680 /* Variable sized objects in static constructors makes no sense,
6681 but field_size can be NULL for flexible array members. */
6682 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6683 && TREE_CODE (byte_offset) == INTEGER_CST
6684 && (field_size != NULL_TREE
6685 ? TREE_CODE (field_size) == INTEGER_CST
6686 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6688 /* Compute bit offset of the field. */
6689 offset_int bitoffset
6690 = (wi::to_offset (field_offset)
6691 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6692 /* Compute bit offset where the field ends. */
6693 offset_int bitoffset_end;
6694 if (field_size != NULL_TREE)
6695 bitoffset_end = bitoffset + wi::to_offset (field_size);
6696 else
6697 bitoffset_end = 0;
6699 /* Compute the bit offset of the end of the desired access.
6700 As a special case, if the size of the desired access is
6701 zero, assume the access is to the entire field (and let
6702 the caller make any necessary adjustments by storing
6703 the actual bounds of the field in FIELDBOUNDS). */
6704 offset_int access_end = offset_int (offset);
6705 if (size)
6706 access_end += size;
6707 else
6708 access_end = bitoffset_end;
6710 /* Is there any overlap between the desired access at
6711 [OFFSET, OFFSET+SIZE) and the offset of the field within
6712 the object at [BITOFFSET, BITOFFSET_END)? */
6713 if (wi::cmps (access_end, bitoffset) > 0
6714 && (field_size == NULL_TREE
6715 || wi::lts_p (offset, bitoffset_end)))
6717 *suboff += bitoffset.to_uhwi ();
6719 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6721 /* For the final reference to the entire accessed member
6722 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6723 be null) in favor of the type of the member, and set
6724 SIZE to the size of the accessed member. */
6725 offset = bitoffset.to_uhwi ();
6726 type = TREE_TYPE (cval);
6727 size = (bitoffset_end - bitoffset).to_uhwi ();
6730 /* We do have overlap. Now see if the field is large enough
6731 to cover the access. Give up for accesses that extend
6732 beyond the end of the object or that span multiple fields. */
6733 if (wi::cmps (access_end, bitoffset_end) > 0)
6734 return NULL_TREE;
6735 if (offset < bitoffset)
6736 return NULL_TREE;
6738 offset_int inner_offset = offset_int (offset) - bitoffset;
6739 return fold_ctor_reference (type, cval,
6740 inner_offset.to_uhwi (), size,
6741 from_decl, suboff);
6744 /* Memory not explicitly mentioned in constructor is 0. */
6745 return type ? build_zero_cst (type) : NULL_TREE;
6748 /* CTOR is value initializing memory. Fold a reference of TYPE and
6749 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6750 is zero, attempt to fold a reference to the entire subobject
6751 which OFFSET refers to. This is used when folding accesses to
6752 string members of aggregates. When non-null, set *SUBOFF to
6753 the bit offset of the accessed subobject. */
6755 tree
6756 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6757 const poly_uint64 &poly_size, tree from_decl,
6758 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6760 tree ret;
6762 /* We found the field with exact match. */
6763 if (type
6764 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6765 && known_eq (poly_offset, 0U))
6766 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6768 /* The remaining optimizations need a constant size and offset. */
6769 unsigned HOST_WIDE_INT size, offset;
6770 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6771 return NULL_TREE;
6773 /* We are at the end of walk, see if we can view convert the
6774 result. */
6775 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6776 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6777 && !compare_tree_int (TYPE_SIZE (type), size)
6778 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6780 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6781 if (ret)
6783 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6784 if (ret)
6785 STRIP_USELESS_TYPE_CONVERSION (ret);
6787 return ret;
6789 /* For constants and byte-aligned/sized reads try to go through
6790 native_encode/interpret. */
6791 if (CONSTANT_CLASS_P (ctor)
6792 && BITS_PER_UNIT == 8
6793 && offset % BITS_PER_UNIT == 0
6794 && size % BITS_PER_UNIT == 0
6795 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6797 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6798 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6799 offset / BITS_PER_UNIT);
6800 if (len > 0)
6801 return native_interpret_expr (type, buf, len);
6803 if (TREE_CODE (ctor) == CONSTRUCTOR)
6805 unsigned HOST_WIDE_INT dummy = 0;
6806 if (!suboff)
6807 suboff = &dummy;
6809 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6810 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6811 return fold_array_ctor_reference (type, ctor, offset, size,
6812 from_decl, suboff);
6814 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6815 from_decl, suboff);
6818 return NULL_TREE;
6821 /* Return the tree representing the element referenced by T if T is an
6822 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6823 names using VALUEIZE. Return NULL_TREE otherwise. */
6825 tree
6826 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6828 tree ctor, idx, base;
6829 poly_int64 offset, size, max_size;
6830 tree tem;
6831 bool reverse;
6833 if (TREE_THIS_VOLATILE (t))
6834 return NULL_TREE;
6836 if (DECL_P (t))
6837 return get_symbol_constant_value (t);
6839 tem = fold_read_from_constant_string (t);
6840 if (tem)
6841 return tem;
6843 switch (TREE_CODE (t))
6845 case ARRAY_REF:
6846 case ARRAY_RANGE_REF:
6847 /* Constant indexes are handled well by get_base_constructor.
6848 Only special case variable offsets.
6849 FIXME: This code can't handle nested references with variable indexes
6850 (they will be handled only by iteration of ccp). Perhaps we can bring
6851 get_ref_base_and_extent here and make it use a valueize callback. */
6852 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6853 && valueize
6854 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6855 && poly_int_tree_p (idx))
6857 tree low_bound, unit_size;
6859 /* If the resulting bit-offset is constant, track it. */
6860 if ((low_bound = array_ref_low_bound (t),
6861 poly_int_tree_p (low_bound))
6862 && (unit_size = array_ref_element_size (t),
6863 tree_fits_uhwi_p (unit_size)))
6865 poly_offset_int woffset
6866 = wi::sext (wi::to_poly_offset (idx)
6867 - wi::to_poly_offset (low_bound),
6868 TYPE_PRECISION (TREE_TYPE (idx)));
6870 if (woffset.to_shwi (&offset))
6872 /* TODO: This code seems wrong, multiply then check
6873 to see if it fits. */
6874 offset *= tree_to_uhwi (unit_size);
6875 offset *= BITS_PER_UNIT;
6877 base = TREE_OPERAND (t, 0);
6878 ctor = get_base_constructor (base, &offset, valueize);
6879 /* Empty constructor. Always fold to 0. */
6880 if (ctor == error_mark_node)
6881 return build_zero_cst (TREE_TYPE (t));
6882 /* Out of bound array access. Value is undefined,
6883 but don't fold. */
6884 if (maybe_lt (offset, 0))
6885 return NULL_TREE;
6886 /* We can not determine ctor. */
6887 if (!ctor)
6888 return NULL_TREE;
6889 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6890 tree_to_uhwi (unit_size)
6891 * BITS_PER_UNIT,
6892 base);
6896 /* Fallthru. */
6898 case COMPONENT_REF:
6899 case BIT_FIELD_REF:
6900 case TARGET_MEM_REF:
6901 case MEM_REF:
6902 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6903 ctor = get_base_constructor (base, &offset, valueize);
6905 /* Empty constructor. Always fold to 0. */
6906 if (ctor == error_mark_node)
6907 return build_zero_cst (TREE_TYPE (t));
6908 /* We do not know precise address. */
6909 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6910 return NULL_TREE;
6911 /* We can not determine ctor. */
6912 if (!ctor)
6913 return NULL_TREE;
6915 /* Out of bound array access. Value is undefined, but don't fold. */
6916 if (maybe_lt (offset, 0))
6917 return NULL_TREE;
6919 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6920 base);
6922 case REALPART_EXPR:
6923 case IMAGPART_EXPR:
6925 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6926 if (c && TREE_CODE (c) == COMPLEX_CST)
6927 return fold_build1_loc (EXPR_LOCATION (t),
6928 TREE_CODE (t), TREE_TYPE (t), c);
6929 break;
6932 default:
6933 break;
6936 return NULL_TREE;
6939 tree
6940 fold_const_aggregate_ref (tree t)
6942 return fold_const_aggregate_ref_1 (t, NULL);
6945 /* Lookup virtual method with index TOKEN in a virtual table V
6946 at OFFSET.
6947 Set CAN_REFER if non-NULL to false if method
6948 is not referable or if the virtual table is ill-formed (such as rewriten
6949 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6951 tree
6952 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6953 tree v,
6954 unsigned HOST_WIDE_INT offset,
6955 bool *can_refer)
6957 tree vtable = v, init, fn;
6958 unsigned HOST_WIDE_INT size;
6959 unsigned HOST_WIDE_INT elt_size, access_index;
6960 tree domain_type;
6962 if (can_refer)
6963 *can_refer = true;
6965 /* First of all double check we have virtual table. */
6966 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6968 /* Pass down that we lost track of the target. */
6969 if (can_refer)
6970 *can_refer = false;
6971 return NULL_TREE;
6974 init = ctor_for_folding (v);
6976 /* The virtual tables should always be born with constructors
6977 and we always should assume that they are avaialble for
6978 folding. At the moment we do not stream them in all cases,
6979 but it should never happen that ctor seem unreachable. */
6980 gcc_assert (init);
6981 if (init == error_mark_node)
6983 /* Pass down that we lost track of the target. */
6984 if (can_refer)
6985 *can_refer = false;
6986 return NULL_TREE;
6988 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6989 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6990 offset *= BITS_PER_UNIT;
6991 offset += token * size;
6993 /* Lookup the value in the constructor that is assumed to be array.
6994 This is equivalent to
6995 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6996 offset, size, NULL);
6997 but in a constant time. We expect that frontend produced a simple
6998 array without indexed initializers. */
7000 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7001 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7002 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7003 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7005 access_index = offset / BITS_PER_UNIT / elt_size;
7006 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7008 /* The C++ FE can now produce indexed fields, and we check if the indexes
7009 match. */
7010 if (access_index < CONSTRUCTOR_NELTS (init))
7012 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7013 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7014 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7015 STRIP_NOPS (fn);
7017 else
7018 fn = NULL;
7020 /* For type inconsistent program we may end up looking up virtual method
7021 in virtual table that does not contain TOKEN entries. We may overrun
7022 the virtual table and pick up a constant or RTTI info pointer.
7023 In any case the call is undefined. */
7024 if (!fn
7025 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7026 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7027 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7028 else
7030 fn = TREE_OPERAND (fn, 0);
7032 /* When cgraph node is missing and function is not public, we cannot
7033 devirtualize. This can happen in WHOPR when the actual method
7034 ends up in other partition, because we found devirtualization
7035 possibility too late. */
7036 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7038 if (can_refer)
7040 *can_refer = false;
7041 return fn;
7043 return NULL_TREE;
7047 /* Make sure we create a cgraph node for functions we'll reference.
7048 They can be non-existent if the reference comes from an entry
7049 of an external vtable for example. */
7050 cgraph_node::get_create (fn);
7052 return fn;
7055 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7056 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7057 KNOWN_BINFO carries the binfo describing the true type of
7058 OBJ_TYPE_REF_OBJECT(REF).
7059 Set CAN_REFER if non-NULL to false if method
7060 is not referable or if the virtual table is ill-formed (such as rewriten
7061 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7063 tree
7064 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7065 bool *can_refer)
7067 unsigned HOST_WIDE_INT offset;
7068 tree v;
7070 v = BINFO_VTABLE (known_binfo);
7071 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7072 if (!v)
7073 return NULL_TREE;
7075 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7077 if (can_refer)
7078 *can_refer = false;
7079 return NULL_TREE;
7081 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7084 /* Given a pointer value T, return a simplified version of an
7085 indirection through T, or NULL_TREE if no simplification is
7086 possible. Note that the resulting type may be different from
7087 the type pointed to in the sense that it is still compatible
7088 from the langhooks point of view. */
7090 tree
7091 gimple_fold_indirect_ref (tree t)
7093 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7094 tree sub = t;
7095 tree subtype;
7097 STRIP_NOPS (sub);
7098 subtype = TREE_TYPE (sub);
7099 if (!POINTER_TYPE_P (subtype)
7100 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7101 return NULL_TREE;
7103 if (TREE_CODE (sub) == ADDR_EXPR)
7105 tree op = TREE_OPERAND (sub, 0);
7106 tree optype = TREE_TYPE (op);
7107 /* *&p => p */
7108 if (useless_type_conversion_p (type, optype))
7109 return op;
7111 /* *(foo *)&fooarray => fooarray[0] */
7112 if (TREE_CODE (optype) == ARRAY_TYPE
7113 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7114 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7116 tree type_domain = TYPE_DOMAIN (optype);
7117 tree min_val = size_zero_node;
7118 if (type_domain && TYPE_MIN_VALUE (type_domain))
7119 min_val = TYPE_MIN_VALUE (type_domain);
7120 if (TREE_CODE (min_val) == INTEGER_CST)
7121 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7123 /* *(foo *)&complexfoo => __real__ complexfoo */
7124 else if (TREE_CODE (optype) == COMPLEX_TYPE
7125 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7126 return fold_build1 (REALPART_EXPR, type, op);
7127 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7128 else if (TREE_CODE (optype) == VECTOR_TYPE
7129 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7131 tree part_width = TYPE_SIZE (type);
7132 tree index = bitsize_int (0);
7133 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7137 /* *(p + CST) -> ... */
7138 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7139 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7141 tree addr = TREE_OPERAND (sub, 0);
7142 tree off = TREE_OPERAND (sub, 1);
7143 tree addrtype;
7145 STRIP_NOPS (addr);
7146 addrtype = TREE_TYPE (addr);
7148 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7149 if (TREE_CODE (addr) == ADDR_EXPR
7150 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7151 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7152 && tree_fits_uhwi_p (off))
7154 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7155 tree part_width = TYPE_SIZE (type);
7156 unsigned HOST_WIDE_INT part_widthi
7157 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7158 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7159 tree index = bitsize_int (indexi);
7160 if (known_lt (offset / part_widthi,
7161 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7162 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7163 part_width, index);
7166 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7167 if (TREE_CODE (addr) == ADDR_EXPR
7168 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7169 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7171 tree size = TYPE_SIZE_UNIT (type);
7172 if (tree_int_cst_equal (size, off))
7173 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7176 /* *(p + CST) -> MEM_REF <p, CST>. */
7177 if (TREE_CODE (addr) != ADDR_EXPR
7178 || DECL_P (TREE_OPERAND (addr, 0)))
7179 return fold_build2 (MEM_REF, type,
7180 addr,
7181 wide_int_to_tree (ptype, wi::to_wide (off)));
7184 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7185 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7186 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7187 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7189 tree type_domain;
7190 tree min_val = size_zero_node;
7191 tree osub = sub;
7192 sub = gimple_fold_indirect_ref (sub);
7193 if (! sub)
7194 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7195 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7196 if (type_domain && TYPE_MIN_VALUE (type_domain))
7197 min_val = TYPE_MIN_VALUE (type_domain);
7198 if (TREE_CODE (min_val) == INTEGER_CST)
7199 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7202 return NULL_TREE;
7205 /* Return true if CODE is an operation that when operating on signed
7206 integer types involves undefined behavior on overflow and the
7207 operation can be expressed with unsigned arithmetic. */
7209 bool
7210 arith_code_with_undefined_signed_overflow (tree_code code)
7212 switch (code)
7214 case PLUS_EXPR:
7215 case MINUS_EXPR:
7216 case MULT_EXPR:
7217 case NEGATE_EXPR:
7218 case POINTER_PLUS_EXPR:
7219 return true;
7220 default:
7221 return false;
7225 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7226 operation that can be transformed to unsigned arithmetic by converting
7227 its operand, carrying out the operation in the corresponding unsigned
7228 type and converting the result back to the original type.
7230 Returns a sequence of statements that replace STMT and also contain
7231 a modified form of STMT itself. */
7233 gimple_seq
7234 rewrite_to_defined_overflow (gimple *stmt)
7236 if (dump_file && (dump_flags & TDF_DETAILS))
7238 fprintf (dump_file, "rewriting stmt with undefined signed "
7239 "overflow ");
7240 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7243 tree lhs = gimple_assign_lhs (stmt);
7244 tree type = unsigned_type_for (TREE_TYPE (lhs));
7245 gimple_seq stmts = NULL;
7246 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7248 tree op = gimple_op (stmt, i);
7249 op = gimple_convert (&stmts, type, op);
7250 gimple_set_op (stmt, i, op);
7252 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7253 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7254 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7255 gimple_seq_add_stmt (&stmts, stmt);
7256 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7257 gimple_seq_add_stmt (&stmts, cvt);
7259 return stmts;
7263 /* The valueization hook we use for the gimple_build API simplification.
7264 This makes us match fold_buildN behavior by only combining with
7265 statements in the sequence(s) we are currently building. */
7267 static tree
7268 gimple_build_valueize (tree op)
7270 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7271 return op;
7272 return NULL_TREE;
7275 /* Build the expression CODE OP0 of type TYPE with location LOC,
7276 simplifying it first if possible. Returns the built
7277 expression value and appends statements possibly defining it
7278 to SEQ. */
7280 tree
7281 gimple_build (gimple_seq *seq, location_t loc,
7282 enum tree_code code, tree type, tree op0)
7284 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7285 if (!res)
7287 res = create_tmp_reg_or_ssa_name (type);
7288 gimple *stmt;
7289 if (code == REALPART_EXPR
7290 || code == IMAGPART_EXPR
7291 || code == VIEW_CONVERT_EXPR)
7292 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7293 else
7294 stmt = gimple_build_assign (res, code, op0);
7295 gimple_set_location (stmt, loc);
7296 gimple_seq_add_stmt_without_update (seq, stmt);
7298 return res;
7301 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7302 simplifying it first if possible. Returns the built
7303 expression value and appends statements possibly defining it
7304 to SEQ. */
7306 tree
7307 gimple_build (gimple_seq *seq, location_t loc,
7308 enum tree_code code, tree type, tree op0, tree op1)
7310 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7311 if (!res)
7313 res = create_tmp_reg_or_ssa_name (type);
7314 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7315 gimple_set_location (stmt, loc);
7316 gimple_seq_add_stmt_without_update (seq, stmt);
7318 return res;
7321 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7322 simplifying it first if possible. Returns the built
7323 expression value and appends statements possibly defining it
7324 to SEQ. */
7326 tree
7327 gimple_build (gimple_seq *seq, location_t loc,
7328 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7330 tree res = gimple_simplify (code, type, op0, op1, op2,
7331 seq, gimple_build_valueize);
7332 if (!res)
7334 res = create_tmp_reg_or_ssa_name (type);
7335 gimple *stmt;
7336 if (code == BIT_FIELD_REF)
7337 stmt = gimple_build_assign (res, code,
7338 build3 (code, type, op0, op1, op2));
7339 else
7340 stmt = gimple_build_assign (res, code, op0, op1, op2);
7341 gimple_set_location (stmt, loc);
7342 gimple_seq_add_stmt_without_update (seq, stmt);
7344 return res;
7347 /* Build the call FN (ARG0) with a result of type TYPE
7348 (or no result if TYPE is void) with location LOC,
7349 simplifying it first if possible. Returns the built
7350 expression value (or NULL_TREE if TYPE is void) and appends
7351 statements possibly defining it to SEQ. */
7353 tree
7354 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7355 tree type, tree arg0)
7357 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7358 if (!res)
7360 gcall *stmt;
7361 if (internal_fn_p (fn))
7362 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7363 else
7365 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7366 stmt = gimple_build_call (decl, 1, arg0);
7368 if (!VOID_TYPE_P (type))
7370 res = create_tmp_reg_or_ssa_name (type);
7371 gimple_call_set_lhs (stmt, res);
7373 gimple_set_location (stmt, loc);
7374 gimple_seq_add_stmt_without_update (seq, stmt);
7376 return res;
7379 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7380 (or no result if TYPE is void) with location LOC,
7381 simplifying it first if possible. Returns the built
7382 expression value (or NULL_TREE if TYPE is void) and appends
7383 statements possibly defining it to SEQ. */
7385 tree
7386 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7387 tree type, tree arg0, tree arg1)
7389 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7390 if (!res)
7392 gcall *stmt;
7393 if (internal_fn_p (fn))
7394 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7395 else
7397 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7398 stmt = gimple_build_call (decl, 2, arg0, arg1);
7400 if (!VOID_TYPE_P (type))
7402 res = create_tmp_reg_or_ssa_name (type);
7403 gimple_call_set_lhs (stmt, res);
7405 gimple_set_location (stmt, loc);
7406 gimple_seq_add_stmt_without_update (seq, stmt);
7408 return res;
7411 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7412 (or no result if TYPE is void) with location LOC,
7413 simplifying it first if possible. Returns the built
7414 expression value (or NULL_TREE if TYPE is void) and appends
7415 statements possibly defining it to SEQ. */
7417 tree
7418 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7419 tree type, tree arg0, tree arg1, tree arg2)
7421 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7422 seq, gimple_build_valueize);
7423 if (!res)
7425 gcall *stmt;
7426 if (internal_fn_p (fn))
7427 stmt = gimple_build_call_internal (as_internal_fn (fn),
7428 3, arg0, arg1, arg2);
7429 else
7431 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7432 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7434 if (!VOID_TYPE_P (type))
7436 res = create_tmp_reg_or_ssa_name (type);
7437 gimple_call_set_lhs (stmt, res);
7439 gimple_set_location (stmt, loc);
7440 gimple_seq_add_stmt_without_update (seq, stmt);
7442 return res;
7445 /* Build the conversion (TYPE) OP with a result of type TYPE
7446 with location LOC if such conversion is neccesary in GIMPLE,
7447 simplifying it first.
7448 Returns the built expression value and appends
7449 statements possibly defining it to SEQ. */
7451 tree
7452 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7454 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7455 return op;
7456 return gimple_build (seq, loc, NOP_EXPR, type, op);
7459 /* Build the conversion (ptrofftype) OP with a result of a type
7460 compatible with ptrofftype with location LOC if such conversion
7461 is neccesary in GIMPLE, simplifying it first.
7462 Returns the built expression value and appends
7463 statements possibly defining it to SEQ. */
7465 tree
7466 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7468 if (ptrofftype_p (TREE_TYPE (op)))
7469 return op;
7470 return gimple_convert (seq, loc, sizetype, op);
7473 /* Build a vector of type TYPE in which each element has the value OP.
7474 Return a gimple value for the result, appending any new statements
7475 to SEQ. */
7477 tree
7478 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7479 tree op)
7481 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7482 && !CONSTANT_CLASS_P (op))
7483 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7485 tree res, vec = build_vector_from_val (type, op);
7486 if (is_gimple_val (vec))
7487 return vec;
7488 if (gimple_in_ssa_p (cfun))
7489 res = make_ssa_name (type);
7490 else
7491 res = create_tmp_reg (type);
7492 gimple *stmt = gimple_build_assign (res, vec);
7493 gimple_set_location (stmt, loc);
7494 gimple_seq_add_stmt_without_update (seq, stmt);
7495 return res;
7498 /* Build a vector from BUILDER, handling the case in which some elements
7499 are non-constant. Return a gimple value for the result, appending any
7500 new instructions to SEQ.
7502 BUILDER must not have a stepped encoding on entry. This is because
7503 the function is not geared up to handle the arithmetic that would
7504 be needed in the variable case, and any code building a vector that
7505 is known to be constant should use BUILDER->build () directly. */
7507 tree
7508 gimple_build_vector (gimple_seq *seq, location_t loc,
7509 tree_vector_builder *builder)
7511 gcc_assert (builder->nelts_per_pattern () <= 2);
7512 unsigned int encoded_nelts = builder->encoded_nelts ();
7513 for (unsigned int i = 0; i < encoded_nelts; ++i)
7514 if (!TREE_CONSTANT ((*builder)[i]))
7516 tree type = builder->type ();
7517 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7518 vec<constructor_elt, va_gc> *v;
7519 vec_alloc (v, nelts);
7520 for (i = 0; i < nelts; ++i)
7521 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7523 tree res;
7524 if (gimple_in_ssa_p (cfun))
7525 res = make_ssa_name (type);
7526 else
7527 res = create_tmp_reg (type);
7528 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7529 gimple_set_location (stmt, loc);
7530 gimple_seq_add_stmt_without_update (seq, stmt);
7531 return res;
7533 return builder->build ();
7536 /* Return true if the result of assignment STMT is known to be non-negative.
7537 If the return value is based on the assumption that signed overflow is
7538 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7539 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7541 static bool
7542 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7543 int depth)
7545 enum tree_code code = gimple_assign_rhs_code (stmt);
7546 switch (get_gimple_rhs_class (code))
7548 case GIMPLE_UNARY_RHS:
7549 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7550 gimple_expr_type (stmt),
7551 gimple_assign_rhs1 (stmt),
7552 strict_overflow_p, depth);
7553 case GIMPLE_BINARY_RHS:
7554 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7555 gimple_expr_type (stmt),
7556 gimple_assign_rhs1 (stmt),
7557 gimple_assign_rhs2 (stmt),
7558 strict_overflow_p, depth);
7559 case GIMPLE_TERNARY_RHS:
7560 return false;
7561 case GIMPLE_SINGLE_RHS:
7562 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7563 strict_overflow_p, depth);
7564 case GIMPLE_INVALID_RHS:
7565 break;
7567 gcc_unreachable ();
7570 /* Return true if return value of call STMT is known to be non-negative.
7571 If the return value is based on the assumption that signed overflow is
7572 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7573 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7575 static bool
7576 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7577 int depth)
7579 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7580 gimple_call_arg (stmt, 0) : NULL_TREE;
7581 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7582 gimple_call_arg (stmt, 1) : NULL_TREE;
7584 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7585 gimple_call_combined_fn (stmt),
7586 arg0,
7587 arg1,
7588 strict_overflow_p, depth);
7591 /* Return true if return value of call STMT is known to be non-negative.
7592 If the return value is based on the assumption that signed overflow is
7593 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7594 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7596 static bool
7597 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7598 int depth)
7600 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7602 tree arg = gimple_phi_arg_def (stmt, i);
7603 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7604 return false;
7606 return true;
7609 /* Return true if STMT is known to compute a non-negative value.
7610 If the return value is based on the assumption that signed overflow is
7611 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7612 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7614 bool
7615 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7616 int depth)
7618 switch (gimple_code (stmt))
7620 case GIMPLE_ASSIGN:
7621 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7622 depth);
7623 case GIMPLE_CALL:
7624 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7625 depth);
7626 case GIMPLE_PHI:
7627 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7628 depth);
7629 default:
7630 return false;
7634 /* Return true if the floating-point value computed by assignment STMT
7635 is known to have an integer value. We also allow +Inf, -Inf and NaN
7636 to be considered integer values. Return false for signaling NaN.
7638 DEPTH is the current nesting depth of the query. */
7640 static bool
7641 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7643 enum tree_code code = gimple_assign_rhs_code (stmt);
7644 switch (get_gimple_rhs_class (code))
7646 case GIMPLE_UNARY_RHS:
7647 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7648 gimple_assign_rhs1 (stmt), depth);
7649 case GIMPLE_BINARY_RHS:
7650 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7651 gimple_assign_rhs1 (stmt),
7652 gimple_assign_rhs2 (stmt), depth);
7653 case GIMPLE_TERNARY_RHS:
7654 return false;
7655 case GIMPLE_SINGLE_RHS:
7656 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7657 case GIMPLE_INVALID_RHS:
7658 break;
7660 gcc_unreachable ();
7663 /* Return true if the floating-point value computed by call STMT is known
7664 to have an integer value. We also allow +Inf, -Inf and NaN to be
7665 considered integer values. Return false for signaling NaN.
7667 DEPTH is the current nesting depth of the query. */
7669 static bool
7670 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7672 tree arg0 = (gimple_call_num_args (stmt) > 0
7673 ? gimple_call_arg (stmt, 0)
7674 : NULL_TREE);
7675 tree arg1 = (gimple_call_num_args (stmt) > 1
7676 ? gimple_call_arg (stmt, 1)
7677 : NULL_TREE);
7678 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7679 arg0, arg1, depth);
7682 /* Return true if the floating-point result of phi STMT is known to have
7683 an integer value. We also allow +Inf, -Inf and NaN to be considered
7684 integer values. Return false for signaling NaN.
7686 DEPTH is the current nesting depth of the query. */
7688 static bool
7689 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7691 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7693 tree arg = gimple_phi_arg_def (stmt, i);
7694 if (!integer_valued_real_single_p (arg, depth + 1))
7695 return false;
7697 return true;
7700 /* Return true if the floating-point value computed by STMT is known
7701 to have an integer value. We also allow +Inf, -Inf and NaN to be
7702 considered integer values. Return false for signaling NaN.
7704 DEPTH is the current nesting depth of the query. */
7706 bool
7707 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7709 switch (gimple_code (stmt))
7711 case GIMPLE_ASSIGN:
7712 return gimple_assign_integer_valued_real_p (stmt, depth);
7713 case GIMPLE_CALL:
7714 return gimple_call_integer_valued_real_p (stmt, depth);
7715 case GIMPLE_PHI:
7716 return gimple_phi_integer_valued_real_p (stmt, depth);
7717 default:
7718 return false;