Fix compilation failure with C++98 compilers
[official-gcc.git] / gcc / gimple-fold.c
blobfe6bc08bdd986e27002a050438eeea43b7dda430
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
642 static bool
643 size_must_be_zero_p (tree size)
645 if (integer_zerop (size))
646 return true;
648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
649 return false;
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
659 wide_int wone = wi::one (prec);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
702 gsi_replace (gsi, repl, false);
703 return true;
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
721 goto done;
723 else
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
728 const char *tmp_str;
729 unsigned HOST_WIDE_INT tmp_len;
731 /* Build accesses at offset zero with a ref-all character type. */
732 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
733 ptr_mode, true), 0);
735 /* If we can perform the copy efficiently with first doing all loads
736 and then all stores inline it that way. Currently efficiently
737 means that we can load all the memory into a single integer
738 register which is what MOVE_MAX gives us. */
739 src_align = get_pointer_alignment (src);
740 dest_align = get_pointer_alignment (dest);
741 if (tree_fits_uhwi_p (len)
742 && compare_tree_int (len, MOVE_MAX) <= 0
743 /* ??? Don't transform copies from strings with known length this
744 confuses the tree-ssa-strlen.c. This doesn't handle
745 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
746 reason. */
747 && !c_strlen (src, 2)
748 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
749 && memchr (tmp_str, 0, tmp_len) == NULL))
751 unsigned ilen = tree_to_uhwi (len);
752 if (pow2p_hwi (ilen))
754 /* Detect invalid bounds and overlapping copies and issue
755 either -Warray-bounds or -Wrestrict. */
756 if (!nowarn
757 && check_bounds_or_overlap (as_a <gcall *>(stmt),
758 dest, src, len, len))
759 gimple_set_no_warning (stmt, true);
761 scalar_int_mode mode;
762 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
763 if (type
764 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
765 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
766 /* If the destination pointer is not aligned we must be able
767 to emit an unaligned store. */
768 && (dest_align >= GET_MODE_ALIGNMENT (mode)
769 || !targetm.slow_unaligned_access (mode, dest_align)
770 || (optab_handler (movmisalign_optab, mode)
771 != CODE_FOR_nothing)))
773 tree srctype = type;
774 tree desttype = type;
775 if (src_align < GET_MODE_ALIGNMENT (mode))
776 srctype = build_aligned_type (type, src_align);
777 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
778 tree tem = fold_const_aggregate_ref (srcmem);
779 if (tem)
780 srcmem = tem;
781 else if (src_align < GET_MODE_ALIGNMENT (mode)
782 && targetm.slow_unaligned_access (mode, src_align)
783 && (optab_handler (movmisalign_optab, mode)
784 == CODE_FOR_nothing))
785 srcmem = NULL_TREE;
786 if (srcmem)
788 gimple *new_stmt;
789 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
791 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
792 srcmem
793 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
794 new_stmt);
795 gimple_assign_set_lhs (new_stmt, srcmem);
796 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
797 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
799 if (dest_align < GET_MODE_ALIGNMENT (mode))
800 desttype = build_aligned_type (type, dest_align);
801 new_stmt
802 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
803 dest, off0),
804 srcmem);
805 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
806 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
807 if (gimple_vdef (new_stmt)
808 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
809 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
810 if (!lhs)
812 gsi_replace (gsi, new_stmt, false);
813 return true;
815 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 goto done;
822 if (endp == 3)
824 /* Both DEST and SRC must be pointer types.
825 ??? This is what old code did. Is the testing for pointer types
826 really mandatory?
828 If either SRC is readonly or length is 1, we can use memcpy. */
829 if (!dest_align || !src_align)
830 return false;
831 if (readonly_data_expr (src)
832 || (tree_fits_uhwi_p (len)
833 && (MIN (src_align, dest_align) / BITS_PER_UNIT
834 >= tree_to_uhwi (len))))
836 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
837 if (!fn)
838 return false;
839 gimple_call_set_fndecl (stmt, fn);
840 gimple_call_set_arg (stmt, 0, dest);
841 gimple_call_set_arg (stmt, 1, src);
842 fold_stmt (gsi);
843 return true;
846 /* If *src and *dest can't overlap, optimize into memcpy as well. */
847 if (TREE_CODE (src) == ADDR_EXPR
848 && TREE_CODE (dest) == ADDR_EXPR)
850 tree src_base, dest_base, fn;
851 poly_int64 src_offset = 0, dest_offset = 0;
852 poly_uint64 maxsize;
854 srcvar = TREE_OPERAND (src, 0);
855 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
856 if (src_base == NULL)
857 src_base = srcvar;
858 destvar = TREE_OPERAND (dest, 0);
859 dest_base = get_addr_base_and_unit_offset (destvar,
860 &dest_offset);
861 if (dest_base == NULL)
862 dest_base = destvar;
863 if (!poly_int_tree_p (len, &maxsize))
864 maxsize = -1;
865 if (SSA_VAR_P (src_base)
866 && SSA_VAR_P (dest_base))
868 if (operand_equal_p (src_base, dest_base, 0)
869 && ranges_maybe_overlap_p (src_offset, maxsize,
870 dest_offset, maxsize))
871 return false;
873 else if (TREE_CODE (src_base) == MEM_REF
874 && TREE_CODE (dest_base) == MEM_REF)
876 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
877 TREE_OPERAND (dest_base, 0), 0))
878 return false;
879 poly_offset_int full_src_offset
880 = mem_ref_offset (src_base) + src_offset;
881 poly_offset_int full_dest_offset
882 = mem_ref_offset (dest_base) + dest_offset;
883 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
884 full_dest_offset, maxsize))
885 return false;
887 else
888 return false;
890 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
891 if (!fn)
892 return false;
893 gimple_call_set_fndecl (stmt, fn);
894 gimple_call_set_arg (stmt, 0, dest);
895 gimple_call_set_arg (stmt, 1, src);
896 fold_stmt (gsi);
897 return true;
900 /* If the destination and source do not alias optimize into
901 memcpy as well. */
902 if ((is_gimple_min_invariant (dest)
903 || TREE_CODE (dest) == SSA_NAME)
904 && (is_gimple_min_invariant (src)
905 || TREE_CODE (src) == SSA_NAME))
907 ao_ref destr, srcr;
908 ao_ref_init_from_ptr_and_size (&destr, dest, len);
909 ao_ref_init_from_ptr_and_size (&srcr, src, len);
910 if (!refs_may_alias_p_1 (&destr, &srcr, false))
912 tree fn;
913 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
914 if (!fn)
915 return false;
916 gimple_call_set_fndecl (stmt, fn);
917 gimple_call_set_arg (stmt, 0, dest);
918 gimple_call_set_arg (stmt, 1, src);
919 fold_stmt (gsi);
920 return true;
924 return false;
927 if (!tree_fits_shwi_p (len))
928 return false;
929 if (!POINTER_TYPE_P (TREE_TYPE (src))
930 || !POINTER_TYPE_P (TREE_TYPE (dest)))
931 return false;
932 /* In the following try to find a type that is most natural to be
933 used for the memcpy source and destination and that allows
934 the most optimization when memcpy is turned into a plain assignment
935 using that type. In theory we could always use a char[len] type
936 but that only gains us that the destination and source possibly
937 no longer will have their address taken. */
938 srctype = TREE_TYPE (TREE_TYPE (src));
939 if (TREE_CODE (srctype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
941 srctype = TREE_TYPE (srctype);
942 desttype = TREE_TYPE (TREE_TYPE (dest));
943 if (TREE_CODE (desttype) == ARRAY_TYPE
944 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
945 desttype = TREE_TYPE (desttype);
946 if (TREE_ADDRESSABLE (srctype)
947 || TREE_ADDRESSABLE (desttype))
948 return false;
950 /* Make sure we are not copying using a floating-point mode or
951 a type whose size possibly does not match its precision. */
952 if (FLOAT_MODE_P (TYPE_MODE (desttype))
953 || TREE_CODE (desttype) == BOOLEAN_TYPE
954 || TREE_CODE (desttype) == ENUMERAL_TYPE)
955 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
956 if (FLOAT_MODE_P (TYPE_MODE (srctype))
957 || TREE_CODE (srctype) == BOOLEAN_TYPE
958 || TREE_CODE (srctype) == ENUMERAL_TYPE)
959 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
960 if (!srctype)
961 srctype = desttype;
962 if (!desttype)
963 desttype = srctype;
964 if (!srctype)
965 return false;
967 src_align = get_pointer_alignment (src);
968 dest_align = get_pointer_alignment (dest);
969 if (dest_align < TYPE_ALIGN (desttype)
970 || src_align < TYPE_ALIGN (srctype))
971 return false;
973 destvar = NULL_TREE;
974 if (TREE_CODE (dest) == ADDR_EXPR
975 && var_decl_component_p (TREE_OPERAND (dest, 0))
976 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
977 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
979 srcvar = NULL_TREE;
980 if (TREE_CODE (src) == ADDR_EXPR
981 && var_decl_component_p (TREE_OPERAND (src, 0))
982 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
984 if (!destvar
985 || src_align >= TYPE_ALIGN (desttype))
986 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
987 src, off0);
988 else if (!STRICT_ALIGNMENT)
990 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
991 src_align);
992 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
996 if (srcvar == NULL_TREE && destvar == NULL_TREE)
997 return false;
999 if (srcvar == NULL_TREE)
1001 if (src_align >= TYPE_ALIGN (desttype))
1002 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1003 else
1005 if (STRICT_ALIGNMENT)
1006 return false;
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1012 else if (destvar == NULL_TREE)
1014 if (dest_align >= TYPE_ALIGN (srctype))
1015 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1016 else
1018 if (STRICT_ALIGNMENT)
1019 return false;
1020 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1021 dest_align);
1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1026 /* Detect invalid bounds and overlapping copies and issue either
1027 -Warray-bounds or -Wrestrict. */
1028 if (!nowarn)
1029 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1031 gimple *new_stmt;
1032 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1034 tree tem = fold_const_aggregate_ref (srcvar);
1035 if (tem)
1036 srcvar = tem;
1037 if (! is_gimple_min_invariant (srcvar))
1039 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1040 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1041 new_stmt);
1042 gimple_assign_set_lhs (new_stmt, srcvar);
1043 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1044 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1046 new_stmt = gimple_build_assign (destvar, srcvar);
1047 goto set_vop_and_replace;
1050 /* We get an aggregate copy. Use an unsigned char[] type to
1051 perform the copying to preserve padding and to avoid any issues
1052 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1053 desttype = build_array_type_nelts (unsigned_char_type_node,
1054 tree_to_uhwi (len));
1055 srctype = desttype;
1056 if (src_align > TYPE_ALIGN (srctype))
1057 srctype = build_aligned_type (srctype, src_align);
1058 if (dest_align > TYPE_ALIGN (desttype))
1059 desttype = build_aligned_type (desttype, dest_align);
1060 new_stmt
1061 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1062 fold_build2 (MEM_REF, srctype, src, off0));
1063 set_vop_and_replace:
1064 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1065 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1066 if (gimple_vdef (new_stmt)
1067 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1068 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1069 if (!lhs)
1071 gsi_replace (gsi, new_stmt, false);
1072 return true;
1074 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1077 done:
1078 gimple_seq stmts = NULL;
1079 if (endp == 0 || endp == 3)
1080 len = NULL_TREE;
1081 else if (endp == 2)
1082 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1083 ssize_int (1));
1084 if (endp == 2 || endp == 1)
1086 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1087 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1088 TREE_TYPE (dest), dest, len);
1091 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1092 gimple *repl = gimple_build_assign (lhs, dest);
1093 gsi_replace (gsi, repl, false);
1094 return true;
1097 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1098 to built-in memcmp (a, b, len). */
1100 static bool
1101 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1103 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1105 if (!fn)
1106 return false;
1108 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1110 gimple *stmt = gsi_stmt (*gsi);
1111 tree a = gimple_call_arg (stmt, 0);
1112 tree b = gimple_call_arg (stmt, 1);
1113 tree len = gimple_call_arg (stmt, 2);
1115 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1116 replace_call_with_call_and_fold (gsi, repl);
1118 return true;
1121 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1122 to built-in memmove (dest, src, len). */
1124 static bool
1125 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1127 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1129 if (!fn)
1130 return false;
1132 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1133 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1134 len) into memmove (dest, src, len). */
1136 gimple *stmt = gsi_stmt (*gsi);
1137 tree src = gimple_call_arg (stmt, 0);
1138 tree dest = gimple_call_arg (stmt, 1);
1139 tree len = gimple_call_arg (stmt, 2);
1141 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1142 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1143 replace_call_with_call_and_fold (gsi, repl);
1145 return true;
1148 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1149 to built-in memset (dest, 0, len). */
1151 static bool
1152 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1154 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1156 if (!fn)
1157 return false;
1159 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1161 gimple *stmt = gsi_stmt (*gsi);
1162 tree dest = gimple_call_arg (stmt, 0);
1163 tree len = gimple_call_arg (stmt, 1);
1165 gimple_seq seq = NULL;
1166 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1167 gimple_seq_add_stmt_without_update (&seq, repl);
1168 gsi_replace_with_seq_vops (gsi, seq);
1169 fold_stmt (gsi);
1171 return true;
1174 /* Fold function call to builtin memset or bzero at *GSI setting the
1175 memory of size LEN to VAL. Return whether a simplification was made. */
1177 static bool
1178 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1180 gimple *stmt = gsi_stmt (*gsi);
1181 tree etype;
1182 unsigned HOST_WIDE_INT length, cval;
1184 /* If the LEN parameter is zero, return DEST. */
1185 if (integer_zerop (len))
1187 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1188 return true;
1191 if (! tree_fits_uhwi_p (len))
1192 return false;
1194 if (TREE_CODE (c) != INTEGER_CST)
1195 return false;
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree var = dest;
1199 if (TREE_CODE (var) != ADDR_EXPR)
1200 return false;
1202 var = TREE_OPERAND (var, 0);
1203 if (TREE_THIS_VOLATILE (var))
1204 return false;
1206 etype = TREE_TYPE (var);
1207 if (TREE_CODE (etype) == ARRAY_TYPE)
1208 etype = TREE_TYPE (etype);
1210 if (!INTEGRAL_TYPE_P (etype)
1211 && !POINTER_TYPE_P (etype))
1212 return NULL_TREE;
1214 if (! var_decl_component_p (var))
1215 return NULL_TREE;
1217 length = tree_to_uhwi (len);
1218 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1219 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1220 return NULL_TREE;
1222 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1223 return NULL_TREE;
1225 if (integer_zerop (c))
1226 cval = 0;
1227 else
1229 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1230 return NULL_TREE;
1232 cval = TREE_INT_CST_LOW (c);
1233 cval &= 0xff;
1234 cval |= cval << 8;
1235 cval |= cval << 16;
1236 cval |= (cval << 31) << 1;
1239 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1240 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1241 gimple_set_vuse (store, gimple_vuse (stmt));
1242 tree vdef = gimple_vdef (stmt);
1243 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1245 gimple_set_vdef (store, gimple_vdef (stmt));
1246 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1248 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1249 if (gimple_call_lhs (stmt))
1251 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1252 gsi_replace (gsi, asgn, false);
1254 else
1256 gimple_stmt_iterator gsi2 = *gsi;
1257 gsi_prev (gsi);
1258 gsi_remove (&gsi2, true);
1261 return true;
1265 /* Obtain the minimum and maximum string length or minimum and maximum
1266 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1267 If ARG is an SSA name variable, follow its use-def chains. When
1268 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1269 if we are unable to determine the length or value, return false.
1270 VISITED is a bitmap of visited variables.
1271 TYPE is 0 if string length should be obtained, 1 for maximum string
1272 length and 2 for maximum value ARG can have.
1273 When FUZZY is non-zero and the length of a string cannot be determined,
1274 the function instead considers as the maximum possible length the
1275 size of a character array it may refer to. If FUZZY is 2, it will handle
1276 PHIs and COND_EXPRs optimistically, if we can determine string length
1277 minimum and maximum, it will use the minimum from the ones where it
1278 can be determined.
1279 Set *FLEXP to true if the range of the string lengths has been
1280 obtained from the upper bound of an array at the end of a struct.
1281 Such an array may hold a string that's longer than its upper bound
1282 due to it being used as a poor-man's flexible array member.
1283 Pass NONSTR through to children.
1284 ELTSIZE is 1 for normal single byte character strings, and 2 or
1285 4 for wide characer strings. ELTSIZE is by default 1. */
1287 static bool
1288 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1289 int fuzzy, bool *flexp, unsigned eltsize, tree *nonstr)
1291 tree var, val = NULL_TREE;
1292 gimple *def_stmt;
1294 /* The minimum and maximum length. */
1295 tree *const minlen = length;
1296 tree *const maxlen = length + 1;
1298 if (TREE_CODE (arg) != SSA_NAME)
1300 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1301 if (TREE_CODE (arg) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1304 tree op = TREE_OPERAND (arg, 0);
1305 if (integer_zerop (TREE_OPERAND (op, 1)))
1307 tree aop0 = TREE_OPERAND (op, 0);
1308 if (TREE_CODE (aop0) == INDIRECT_REF
1309 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1310 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
1311 visited, type, fuzzy, flexp,
1312 eltsize, nonstr);
1314 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1316 /* Fail if an array is the last member of a struct object
1317 since it could be treated as a (fake) flexible array
1318 member. */
1319 tree idx = TREE_OPERAND (op, 1);
1321 arg = TREE_OPERAND (op, 0);
1322 tree optype = TREE_TYPE (arg);
1323 if (tree dom = TYPE_DOMAIN (optype))
1324 if (tree bound = TYPE_MAX_VALUE (dom))
1325 if (TREE_CODE (bound) == INTEGER_CST
1326 && TREE_CODE (idx) == INTEGER_CST
1327 && tree_int_cst_lt (bound, idx))
1328 return false;
1332 if (type == 2)
1334 val = arg;
1335 if (TREE_CODE (val) != INTEGER_CST
1336 || tree_int_cst_sgn (val) < 0)
1337 return false;
1339 else
1341 c_strlen_data data;
1342 memset (&data, 0, sizeof (c_strlen_data));
1343 val = c_strlen (arg, 1, &data, eltsize);
1345 /* If we potentially had a non-terminated string, then
1346 bubble that information up to the caller. */
1347 if (!val && data.decl)
1349 *nonstr = data.decl;
1350 *minlen = data.len;
1351 *maxlen = data.len;
1352 return type == 0 ? false : true;
1356 if (!val && fuzzy)
1358 if (TREE_CODE (arg) == ADDR_EXPR)
1359 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1360 visited, type, fuzzy, flexp,
1361 eltsize, nonstr);
1363 if (TREE_CODE (arg) == ARRAY_REF)
1365 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1367 /* Determine the "innermost" array type. */
1368 while (TREE_CODE (type) == ARRAY_TYPE
1369 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1370 type = TREE_TYPE (type);
1372 /* Avoid arrays of pointers. */
1373 tree eltype = TREE_TYPE (type);
1374 if (TREE_CODE (type) != ARRAY_TYPE
1375 || !INTEGRAL_TYPE_P (eltype))
1376 return false;
1378 val = TYPE_SIZE_UNIT (type);
1379 if (!val || integer_zerop (val))
1380 return false;
1382 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1383 integer_one_node);
1384 /* Set the minimum size to zero since the string in
1385 the array could have zero length. */
1386 *minlen = ssize_int (0);
1388 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1389 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1390 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1391 *flexp = true;
1393 else if (TREE_CODE (arg) == COMPONENT_REF
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1395 == ARRAY_TYPE))
1397 /* Use the type of the member array to determine the upper
1398 bound on the length of the array. This may be overly
1399 optimistic if the array itself isn't NUL-terminated and
1400 the caller relies on the subsequent member to contain
1401 the NUL but that would only be considered valid if
1402 the array were the last member of a struct.
1403 Set *FLEXP to true if the array whose bound is being
1404 used is at the end of a struct. */
1405 if (array_at_struct_end_p (arg))
1406 *flexp = true;
1408 arg = TREE_OPERAND (arg, 1);
1410 tree type = TREE_TYPE (arg);
1412 while (TREE_CODE (type) == ARRAY_TYPE
1413 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1414 type = TREE_TYPE (type);
1416 /* Fail when the array bound is unknown or zero. */
1417 val = TYPE_SIZE_UNIT (type);
1418 if (!val || integer_zerop (val))
1419 return false;
1420 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1421 integer_one_node);
1422 /* Set the minimum size to zero since the string in
1423 the array could have zero length. */
1424 *minlen = ssize_int (0);
1427 if (VAR_P (arg))
1429 tree type = TREE_TYPE (arg);
1430 if (POINTER_TYPE_P (type))
1431 type = TREE_TYPE (type);
1433 if (TREE_CODE (type) == ARRAY_TYPE)
1435 val = TYPE_SIZE_UNIT (type);
1436 if (!val
1437 || TREE_CODE (val) != INTEGER_CST
1438 || integer_zerop (val))
1439 return false;
1440 val = wide_int_to_tree (TREE_TYPE (val),
1441 wi::sub (wi::to_wide (val), 1));
1442 /* Set the minimum size to zero since the string in
1443 the array could have zero length. */
1444 *minlen = ssize_int (0);
1449 if (!val)
1450 return false;
1452 if (!*minlen
1453 || (type > 0
1454 && TREE_CODE (*minlen) == INTEGER_CST
1455 && TREE_CODE (val) == INTEGER_CST
1456 && tree_int_cst_lt (val, *minlen)))
1457 *minlen = val;
1459 if (*maxlen)
1461 if (type > 0)
1463 if (TREE_CODE (*maxlen) != INTEGER_CST
1464 || TREE_CODE (val) != INTEGER_CST)
1465 return false;
1467 if (tree_int_cst_lt (*maxlen, val))
1468 *maxlen = val;
1469 return true;
1471 else if (simple_cst_equal (val, *maxlen) != 1)
1472 return false;
1475 *maxlen = val;
1476 return true;
1479 /* If ARG is registered for SSA update we cannot look at its defining
1480 statement. */
1481 if (name_registered_for_update_p (arg))
1482 return false;
1484 /* If we were already here, break the infinite cycle. */
1485 if (!*visited)
1486 *visited = BITMAP_ALLOC (NULL);
1487 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1488 return true;
1490 var = arg;
1491 def_stmt = SSA_NAME_DEF_STMT (var);
1493 switch (gimple_code (def_stmt))
1495 case GIMPLE_ASSIGN:
1496 /* The RHS of the statement defining VAR must either have a
1497 constant length or come from another SSA_NAME with a constant
1498 length. */
1499 if (gimple_assign_single_p (def_stmt)
1500 || gimple_assign_unary_nop_p (def_stmt))
1502 tree rhs = gimple_assign_rhs1 (def_stmt);
1503 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp,
1504 eltsize, nonstr);
1506 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1508 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1509 gimple_assign_rhs3 (def_stmt) };
1511 for (unsigned int i = 0; i < 2; i++)
1512 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1513 flexp, eltsize, nonstr))
1515 if (fuzzy == 2)
1516 *maxlen = build_all_ones_cst (size_type_node);
1517 else
1518 return false;
1520 return true;
1522 return false;
1524 case GIMPLE_PHI:
1525 /* All the arguments of the PHI node must have the same constant
1526 length. */
1527 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1529 tree arg = gimple_phi_arg (def_stmt, i)->def;
1531 /* If this PHI has itself as an argument, we cannot
1532 determine the string length of this argument. However,
1533 if we can find a constant string length for the other
1534 PHI args then we can still be sure that this is a
1535 constant string length. So be optimistic and just
1536 continue with the next argument. */
1537 if (arg == gimple_phi_result (def_stmt))
1538 continue;
1540 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp,
1541 eltsize, nonstr))
1543 if (fuzzy == 2)
1544 *maxlen = build_all_ones_cst (size_type_node);
1545 else
1546 return false;
1549 return true;
1551 default:
1552 return false;
1556 /* Determine the minimum and maximum value or string length that ARG
1557 refers to and store each in the first two elements of MINMAXLEN.
1558 For expressions that point to strings of unknown lengths that are
1559 character arrays, use the upper bound of the array as the maximum
1560 length. For example, given an expression like 'x ? array : "xyz"'
1561 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1562 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1563 stored in array.
1564 Return true if the range of the string lengths has been obtained
1565 from the upper bound of an array at the end of a struct. Such
1566 an array may hold a string that's longer than its upper bound
1567 due to it being used as a poor-man's flexible array member.
1569 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1570 and false if PHIs and COND_EXPRs are to be handled optimistically,
1571 if we can determine string length minimum and maximum; it will use
1572 the minimum from the ones where it can be determined.
1573 STRICT false should be only used for warning code.
1574 When non-null, clear *NONSTR if ARG refers to a constant array
1575 that is known not be nul-terminated. Otherwise set it to
1576 the declaration of the constant non-terminated array.
1578 ELTSIZE is 1 for normal single byte character strings, and 2 or
1579 4 for wide characer strings. ELTSIZE is by default 1. */
1581 bool
1582 get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1583 bool strict, tree *nonstr /* = NULL */)
1585 bitmap visited = NULL;
1587 minmaxlen[0] = NULL_TREE;
1588 minmaxlen[1] = NULL_TREE;
1590 tree nonstrbuf;
1591 if (!nonstr)
1592 nonstr = &nonstrbuf;
1593 *nonstr = NULL_TREE;
1595 bool flexarray = false;
1596 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1597 &flexarray, eltsize, nonstr))
1599 minmaxlen[0] = NULL_TREE;
1600 minmaxlen[1] = NULL_TREE;
1603 if (visited)
1604 BITMAP_FREE (visited);
1606 return flexarray;
1609 /* Return the maximum string length for ARG, counting by TYPE
1610 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1611 if the caller is prepared to handle unterminated strings.
1613 If an unterminated string is discovered and our caller handles
1614 unterminated strings, then bubble up the offending DECL and
1615 return the maximum size. Otherwise return NULL. */
1617 tree
1618 get_maxval_strlen (tree arg, int type, tree *nonstr /* = NULL */)
1620 bitmap visited = NULL;
1621 tree len[2] = { NULL_TREE, NULL_TREE };
1623 bool dummy;
1624 /* Set to non-null if ARG refers to an untermianted array. */
1625 tree mynonstr = NULL_TREE;
1626 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy, 1, &mynonstr))
1627 len[1] = NULL_TREE;
1628 if (visited)
1629 BITMAP_FREE (visited);
1631 if (nonstr)
1633 /* For callers prepared to handle unterminated arrays set
1634 *NONSTR to point to the declaration of the array and return
1635 the maximum length/size. */
1636 *nonstr = mynonstr;
1637 return len[1];
1640 /* Fail if the constant array isn't nul-terminated. */
1641 return mynonstr ? NULL_TREE : len[1];
1645 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1646 If LEN is not NULL, it represents the length of the string to be
1647 copied. Return NULL_TREE if no simplification can be made. */
1649 static bool
1650 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1651 tree dest, tree src)
1653 gimple *stmt = gsi_stmt (*gsi);
1654 location_t loc = gimple_location (stmt);
1655 tree fn;
1657 /* If SRC and DEST are the same (and not volatile), return DEST. */
1658 if (operand_equal_p (src, dest, 0))
1660 /* Issue -Wrestrict unless the pointers are null (those do
1661 not point to objects and so do not indicate an overlap;
1662 such calls could be the result of sanitization and jump
1663 threading). */
1664 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1666 tree func = gimple_call_fndecl (stmt);
1668 warning_at (loc, OPT_Wrestrict,
1669 "%qD source argument is the same as destination",
1670 func);
1673 replace_call_with_value (gsi, dest);
1674 return true;
1677 if (optimize_function_for_size_p (cfun))
1678 return false;
1680 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1681 if (!fn)
1682 return false;
1684 /* Set to non-null if ARG refers to an unterminated array. */
1685 tree nonstr = NULL;
1686 tree len = get_maxval_strlen (src, 0, &nonstr);
1688 if (nonstr)
1690 /* Avoid folding calls with unterminated arrays. */
1691 if (!gimple_no_warning_p (stmt))
1692 warn_string_no_nul (loc, "strcpy", src, nonstr);
1693 gimple_set_no_warning (stmt, true);
1694 return false;
1697 if (!len)
1698 return false;
1700 len = fold_convert_loc (loc, size_type_node, len);
1701 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1702 len = force_gimple_operand_gsi (gsi, len, true,
1703 NULL_TREE, true, GSI_SAME_STMT);
1704 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1705 replace_call_with_call_and_fold (gsi, repl);
1706 return true;
1709 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1710 If SLEN is not NULL, it represents the length of the source string.
1711 Return NULL_TREE if no simplification can be made. */
1713 static bool
1714 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1715 tree dest, tree src, tree len)
1717 gimple *stmt = gsi_stmt (*gsi);
1718 location_t loc = gimple_location (stmt);
1719 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1721 /* If the LEN parameter is zero, return DEST. */
1722 if (integer_zerop (len))
1724 /* Avoid warning if the destination refers to a an array/pointer
1725 decorate with attribute nonstring. */
1726 if (!nonstring)
1728 tree fndecl = gimple_call_fndecl (stmt);
1730 /* Warn about the lack of nul termination: the result is not
1731 a (nul-terminated) string. */
1732 tree slen = get_maxval_strlen (src, 0);
1733 if (slen && !integer_zerop (slen))
1734 warning_at (loc, OPT_Wstringop_truncation,
1735 "%G%qD destination unchanged after copying no bytes "
1736 "from a string of length %E",
1737 stmt, fndecl, slen);
1738 else
1739 warning_at (loc, OPT_Wstringop_truncation,
1740 "%G%qD destination unchanged after copying no bytes",
1741 stmt, fndecl);
1744 replace_call_with_value (gsi, dest);
1745 return true;
1748 /* We can't compare slen with len as constants below if len is not a
1749 constant. */
1750 if (TREE_CODE (len) != INTEGER_CST)
1751 return false;
1753 /* Now, we must be passed a constant src ptr parameter. */
1754 tree slen = get_maxval_strlen (src, 0);
1755 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1756 return false;
1758 /* The size of the source string including the terminating nul. */
1759 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1761 /* We do not support simplification of this case, though we do
1762 support it when expanding trees into RTL. */
1763 /* FIXME: generate a call to __builtin_memset. */
1764 if (tree_int_cst_lt (ssize, len))
1765 return false;
1767 /* Diagnose truncation that leaves the copy unterminated. */
1768 maybe_diag_stxncpy_trunc (*gsi, src, len);
1770 /* OK transform into builtin memcpy. */
1771 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1772 if (!fn)
1773 return false;
1775 len = fold_convert_loc (loc, size_type_node, len);
1776 len = force_gimple_operand_gsi (gsi, len, true,
1777 NULL_TREE, true, GSI_SAME_STMT);
1778 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1779 replace_call_with_call_and_fold (gsi, repl);
1781 return true;
1784 /* Fold function call to builtin strchr or strrchr.
1785 If both arguments are constant, evaluate and fold the result,
1786 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1787 In general strlen is significantly faster than strchr
1788 due to being a simpler operation. */
1789 static bool
1790 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1792 gimple *stmt = gsi_stmt (*gsi);
1793 tree str = gimple_call_arg (stmt, 0);
1794 tree c = gimple_call_arg (stmt, 1);
1795 location_t loc = gimple_location (stmt);
1796 const char *p;
1797 char ch;
1799 if (!gimple_call_lhs (stmt))
1800 return false;
1802 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1804 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1806 if (p1 == NULL)
1808 replace_call_with_value (gsi, integer_zero_node);
1809 return true;
1812 tree len = build_int_cst (size_type_node, p1 - p);
1813 gimple_seq stmts = NULL;
1814 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1815 POINTER_PLUS_EXPR, str, len);
1816 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1817 gsi_replace_with_seq_vops (gsi, stmts);
1818 return true;
1821 if (!integer_zerop (c))
1822 return false;
1824 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1825 if (is_strrchr && optimize_function_for_size_p (cfun))
1827 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1829 if (strchr_fn)
1831 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1832 replace_call_with_call_and_fold (gsi, repl);
1833 return true;
1836 return false;
1839 tree len;
1840 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1842 if (!strlen_fn)
1843 return false;
1845 /* Create newstr = strlen (str). */
1846 gimple_seq stmts = NULL;
1847 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1848 gimple_set_location (new_stmt, loc);
1849 len = create_tmp_reg_or_ssa_name (size_type_node);
1850 gimple_call_set_lhs (new_stmt, len);
1851 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1853 /* Create (str p+ strlen (str)). */
1854 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1855 POINTER_PLUS_EXPR, str, len);
1856 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1857 gsi_replace_with_seq_vops (gsi, stmts);
1858 /* gsi now points at the assignment to the lhs, get a
1859 stmt iterator to the strlen.
1860 ??? We can't use gsi_for_stmt as that doesn't work when the
1861 CFG isn't built yet. */
1862 gimple_stmt_iterator gsi2 = *gsi;
1863 gsi_prev (&gsi2);
1864 fold_stmt (&gsi2);
1865 return true;
1868 /* Fold function call to builtin strstr.
1869 If both arguments are constant, evaluate and fold the result,
1870 additionally fold strstr (x, "") into x and strstr (x, "c")
1871 into strchr (x, 'c'). */
1872 static bool
1873 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1875 gimple *stmt = gsi_stmt (*gsi);
1876 tree haystack = gimple_call_arg (stmt, 0);
1877 tree needle = gimple_call_arg (stmt, 1);
1878 const char *p, *q;
1880 if (!gimple_call_lhs (stmt))
1881 return false;
1883 q = c_getstr (needle);
1884 if (q == NULL)
1885 return false;
1887 if ((p = c_getstr (haystack)))
1889 const char *r = strstr (p, q);
1891 if (r == NULL)
1893 replace_call_with_value (gsi, integer_zero_node);
1894 return true;
1897 tree len = build_int_cst (size_type_node, r - p);
1898 gimple_seq stmts = NULL;
1899 gimple *new_stmt
1900 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1901 haystack, len);
1902 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1903 gsi_replace_with_seq_vops (gsi, stmts);
1904 return true;
1907 /* For strstr (x, "") return x. */
1908 if (q[0] == '\0')
1910 replace_call_with_value (gsi, haystack);
1911 return true;
1914 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1915 if (q[1] == '\0')
1917 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1918 if (strchr_fn)
1920 tree c = build_int_cst (integer_type_node, q[0]);
1921 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1922 replace_call_with_call_and_fold (gsi, repl);
1923 return true;
1927 return false;
1930 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1931 to the call.
1933 Return NULL_TREE if no simplification was possible, otherwise return the
1934 simplified form of the call as a tree.
1936 The simplified form may be a constant or other expression which
1937 computes the same value, but in a more efficient manner (including
1938 calls to other builtin functions).
1940 The call may contain arguments which need to be evaluated, but
1941 which are not useful to determine the result of the call. In
1942 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1943 COMPOUND_EXPR will be an argument which must be evaluated.
1944 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1945 COMPOUND_EXPR in the chain will contain the tree for the simplified
1946 form of the builtin function call. */
1948 static bool
1949 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1951 gimple *stmt = gsi_stmt (*gsi);
1952 location_t loc = gimple_location (stmt);
1954 const char *p = c_getstr (src);
1956 /* If the string length is zero, return the dst parameter. */
1957 if (p && *p == '\0')
1959 replace_call_with_value (gsi, dst);
1960 return true;
1963 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1964 return false;
1966 /* See if we can store by pieces into (dst + strlen(dst)). */
1967 tree newdst;
1968 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1969 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1971 if (!strlen_fn || !memcpy_fn)
1972 return false;
1974 /* If the length of the source string isn't computable don't
1975 split strcat into strlen and memcpy. */
1976 tree len = get_maxval_strlen (src, 0);
1977 if (! len)
1978 return false;
1980 /* Create strlen (dst). */
1981 gimple_seq stmts = NULL, stmts2;
1982 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1983 gimple_set_location (repl, loc);
1984 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1985 gimple_call_set_lhs (repl, newdst);
1986 gimple_seq_add_stmt_without_update (&stmts, repl);
1988 /* Create (dst p+ strlen (dst)). */
1989 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1990 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1991 gimple_seq_add_seq_without_update (&stmts, stmts2);
1993 len = fold_convert_loc (loc, size_type_node, len);
1994 len = size_binop_loc (loc, PLUS_EXPR, len,
1995 build_int_cst (size_type_node, 1));
1996 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1997 gimple_seq_add_seq_without_update (&stmts, stmts2);
1999 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2000 gimple_seq_add_stmt_without_update (&stmts, repl);
2001 if (gimple_call_lhs (stmt))
2003 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2004 gimple_seq_add_stmt_without_update (&stmts, repl);
2005 gsi_replace_with_seq_vops (gsi, stmts);
2006 /* gsi now points at the assignment to the lhs, get a
2007 stmt iterator to the memcpy call.
2008 ??? We can't use gsi_for_stmt as that doesn't work when the
2009 CFG isn't built yet. */
2010 gimple_stmt_iterator gsi2 = *gsi;
2011 gsi_prev (&gsi2);
2012 fold_stmt (&gsi2);
2014 else
2016 gsi_replace_with_seq_vops (gsi, stmts);
2017 fold_stmt (gsi);
2019 return true;
2022 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2023 are the arguments to the call. */
2025 static bool
2026 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2028 gimple *stmt = gsi_stmt (*gsi);
2029 tree dest = gimple_call_arg (stmt, 0);
2030 tree src = gimple_call_arg (stmt, 1);
2031 tree size = gimple_call_arg (stmt, 2);
2032 tree fn;
2033 const char *p;
2036 p = c_getstr (src);
2037 /* If the SRC parameter is "", return DEST. */
2038 if (p && *p == '\0')
2040 replace_call_with_value (gsi, dest);
2041 return true;
2044 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2045 return false;
2047 /* If __builtin_strcat_chk is used, assume strcat is available. */
2048 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2049 if (!fn)
2050 return false;
2052 gimple *repl = gimple_build_call (fn, 2, dest, src);
2053 replace_call_with_call_and_fold (gsi, repl);
2054 return true;
2057 /* Simplify a call to the strncat builtin. */
2059 static bool
2060 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2062 gimple *stmt = gsi_stmt (*gsi);
2063 tree dst = gimple_call_arg (stmt, 0);
2064 tree src = gimple_call_arg (stmt, 1);
2065 tree len = gimple_call_arg (stmt, 2);
2067 const char *p = c_getstr (src);
2069 /* If the requested length is zero, or the src parameter string
2070 length is zero, return the dst parameter. */
2071 if (integer_zerop (len) || (p && *p == '\0'))
2073 replace_call_with_value (gsi, dst);
2074 return true;
2077 if (TREE_CODE (len) != INTEGER_CST || !p)
2078 return false;
2080 unsigned srclen = strlen (p);
2082 int cmpsrc = compare_tree_int (len, srclen);
2084 /* Return early if the requested len is less than the string length.
2085 Warnings will be issued elsewhere later. */
2086 if (cmpsrc < 0)
2087 return false;
2089 unsigned HOST_WIDE_INT dstsize;
2091 bool nowarn = gimple_no_warning_p (stmt);
2093 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2095 int cmpdst = compare_tree_int (len, dstsize);
2097 if (cmpdst >= 0)
2099 tree fndecl = gimple_call_fndecl (stmt);
2101 /* Strncat copies (at most) LEN bytes and always appends
2102 the terminating NUL so the specified bound should never
2103 be equal to (or greater than) the size of the destination.
2104 If it is, the copy could overflow. */
2105 location_t loc = gimple_location (stmt);
2106 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2107 cmpdst == 0
2108 ? G_("%G%qD specified bound %E equals "
2109 "destination size")
2110 : G_("%G%qD specified bound %E exceeds "
2111 "destination size %wu"),
2112 stmt, fndecl, len, dstsize);
2113 if (nowarn)
2114 gimple_set_no_warning (stmt, true);
2118 if (!nowarn && cmpsrc == 0)
2120 tree fndecl = gimple_call_fndecl (stmt);
2121 location_t loc = gimple_location (stmt);
2123 /* To avoid possible overflow the specified bound should also
2124 not be equal to the length of the source, even when the size
2125 of the destination is unknown (it's not an uncommon mistake
2126 to specify as the bound to strncpy the length of the source). */
2127 if (warning_at (loc, OPT_Wstringop_overflow_,
2128 "%G%qD specified bound %E equals source length",
2129 stmt, fndecl, len))
2130 gimple_set_no_warning (stmt, true);
2133 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2135 /* If the replacement _DECL isn't initialized, don't do the
2136 transformation. */
2137 if (!fn)
2138 return false;
2140 /* Otherwise, emit a call to strcat. */
2141 gcall *repl = gimple_build_call (fn, 2, dst, src);
2142 replace_call_with_call_and_fold (gsi, repl);
2143 return true;
2146 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2147 LEN, and SIZE. */
2149 static bool
2150 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2152 gimple *stmt = gsi_stmt (*gsi);
2153 tree dest = gimple_call_arg (stmt, 0);
2154 tree src = gimple_call_arg (stmt, 1);
2155 tree len = gimple_call_arg (stmt, 2);
2156 tree size = gimple_call_arg (stmt, 3);
2157 tree fn;
2158 const char *p;
2160 p = c_getstr (src);
2161 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2162 if ((p && *p == '\0')
2163 || integer_zerop (len))
2165 replace_call_with_value (gsi, dest);
2166 return true;
2169 if (! tree_fits_uhwi_p (size))
2170 return false;
2172 if (! integer_all_onesp (size))
2174 tree src_len = c_strlen (src, 1);
2175 if (src_len
2176 && tree_fits_uhwi_p (src_len)
2177 && tree_fits_uhwi_p (len)
2178 && ! tree_int_cst_lt (len, src_len))
2180 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2181 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2182 if (!fn)
2183 return false;
2185 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2186 replace_call_with_call_and_fold (gsi, repl);
2187 return true;
2189 return false;
2192 /* If __builtin_strncat_chk is used, assume strncat is available. */
2193 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2194 if (!fn)
2195 return false;
2197 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2198 replace_call_with_call_and_fold (gsi, repl);
2199 return true;
2202 /* Build and append gimple statements to STMTS that would load a first
2203 character of a memory location identified by STR. LOC is location
2204 of the statement. */
2206 static tree
2207 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2209 tree var;
2211 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2212 tree cst_uchar_ptr_node
2213 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2214 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2216 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2217 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2218 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2220 gimple_assign_set_lhs (stmt, var);
2221 gimple_seq_add_stmt_without_update (stmts, stmt);
2223 return var;
2226 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2227 FCODE is the name of the builtin. */
2229 static bool
2230 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2232 gimple *stmt = gsi_stmt (*gsi);
2233 tree callee = gimple_call_fndecl (stmt);
2234 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2236 tree type = integer_type_node;
2237 tree str1 = gimple_call_arg (stmt, 0);
2238 tree str2 = gimple_call_arg (stmt, 1);
2239 tree lhs = gimple_call_lhs (stmt);
2240 HOST_WIDE_INT length = -1;
2242 /* Handle strncmp and strncasecmp functions. */
2243 if (gimple_call_num_args (stmt) == 3)
2245 tree len = gimple_call_arg (stmt, 2);
2246 if (tree_fits_uhwi_p (len))
2247 length = tree_to_uhwi (len);
2250 /* If the LEN parameter is zero, return zero. */
2251 if (length == 0)
2253 replace_call_with_value (gsi, integer_zero_node);
2254 return true;
2257 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2258 if (operand_equal_p (str1, str2, 0))
2260 replace_call_with_value (gsi, integer_zero_node);
2261 return true;
2264 const char *p1 = c_getstr (str1);
2265 const char *p2 = c_getstr (str2);
2267 /* For known strings, return an immediate value. */
2268 if (p1 && p2)
2270 int r = 0;
2271 bool known_result = false;
2273 switch (fcode)
2275 case BUILT_IN_STRCMP:
2276 case BUILT_IN_STRCMP_EQ:
2278 r = strcmp (p1, p2);
2279 known_result = true;
2280 break;
2282 case BUILT_IN_STRNCMP:
2283 case BUILT_IN_STRNCMP_EQ:
2285 if (length == -1)
2286 break;
2287 r = strncmp (p1, p2, length);
2288 known_result = true;
2289 break;
2291 /* Only handleable situation is where the string are equal (result 0),
2292 which is already handled by operand_equal_p case. */
2293 case BUILT_IN_STRCASECMP:
2294 break;
2295 case BUILT_IN_STRNCASECMP:
2297 if (length == -1)
2298 break;
2299 r = strncmp (p1, p2, length);
2300 if (r == 0)
2301 known_result = true;
2302 break;
2304 default:
2305 gcc_unreachable ();
2308 if (known_result)
2310 replace_call_with_value (gsi, build_cmp_result (type, r));
2311 return true;
2315 bool nonzero_length = length >= 1
2316 || fcode == BUILT_IN_STRCMP
2317 || fcode == BUILT_IN_STRCMP_EQ
2318 || fcode == BUILT_IN_STRCASECMP;
2320 location_t loc = gimple_location (stmt);
2322 /* If the second arg is "", return *(const unsigned char*)arg1. */
2323 if (p2 && *p2 == '\0' && nonzero_length)
2325 gimple_seq stmts = NULL;
2326 tree var = gimple_load_first_char (loc, str1, &stmts);
2327 if (lhs)
2329 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2330 gimple_seq_add_stmt_without_update (&stmts, stmt);
2333 gsi_replace_with_seq_vops (gsi, stmts);
2334 return true;
2337 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2338 if (p1 && *p1 == '\0' && nonzero_length)
2340 gimple_seq stmts = NULL;
2341 tree var = gimple_load_first_char (loc, str2, &stmts);
2343 if (lhs)
2345 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2346 stmt = gimple_build_assign (c, NOP_EXPR, var);
2347 gimple_seq_add_stmt_without_update (&stmts, stmt);
2349 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2350 gimple_seq_add_stmt_without_update (&stmts, stmt);
2353 gsi_replace_with_seq_vops (gsi, stmts);
2354 return true;
2357 /* If len parameter is one, return an expression corresponding to
2358 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2359 if (fcode == BUILT_IN_STRNCMP && length == 1)
2361 gimple_seq stmts = NULL;
2362 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2363 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2365 if (lhs)
2367 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2368 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2369 gimple_seq_add_stmt_without_update (&stmts, convert1);
2371 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2372 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2373 gimple_seq_add_stmt_without_update (&stmts, convert2);
2375 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2376 gimple_seq_add_stmt_without_update (&stmts, stmt);
2379 gsi_replace_with_seq_vops (gsi, stmts);
2380 return true;
2383 /* If length is larger than the length of one constant string,
2384 replace strncmp with corresponding strcmp */
2385 if (fcode == BUILT_IN_STRNCMP
2386 && length > 0
2387 && ((p2 && (size_t) length > strlen (p2))
2388 || (p1 && (size_t) length > strlen (p1))))
2390 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2391 if (!fn)
2392 return false;
2393 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2398 return false;
2401 /* Fold a call to the memchr pointed by GSI iterator. */
2403 static bool
2404 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2406 gimple *stmt = gsi_stmt (*gsi);
2407 tree lhs = gimple_call_lhs (stmt);
2408 tree arg1 = gimple_call_arg (stmt, 0);
2409 tree arg2 = gimple_call_arg (stmt, 1);
2410 tree len = gimple_call_arg (stmt, 2);
2412 /* If the LEN parameter is zero, return zero. */
2413 if (integer_zerop (len))
2415 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2416 return true;
2419 char c;
2420 if (TREE_CODE (arg2) != INTEGER_CST
2421 || !tree_fits_uhwi_p (len)
2422 || !target_char_cst_p (arg2, &c))
2423 return false;
2425 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2426 unsigned HOST_WIDE_INT string_length;
2427 const char *p1 = c_getstr (arg1, &string_length);
2429 if (p1)
2431 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2432 if (r == NULL)
2434 if (length <= string_length)
2436 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2437 return true;
2440 else
2442 unsigned HOST_WIDE_INT offset = r - p1;
2443 gimple_seq stmts = NULL;
2444 if (lhs != NULL_TREE)
2446 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2447 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2448 arg1, offset_cst);
2449 gimple_seq_add_stmt_without_update (&stmts, stmt);
2451 else
2452 gimple_seq_add_stmt_without_update (&stmts,
2453 gimple_build_nop ());
2455 gsi_replace_with_seq_vops (gsi, stmts);
2456 return true;
2460 return false;
2463 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2464 to the call. IGNORE is true if the value returned
2465 by the builtin will be ignored. UNLOCKED is true is true if this
2466 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2467 the known length of the string. Return NULL_TREE if no simplification
2468 was possible. */
2470 static bool
2471 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2472 tree arg0, tree arg1,
2473 bool unlocked)
2475 gimple *stmt = gsi_stmt (*gsi);
2477 /* If we're using an unlocked function, assume the other unlocked
2478 functions exist explicitly. */
2479 tree const fn_fputc = (unlocked
2480 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2481 : builtin_decl_implicit (BUILT_IN_FPUTC));
2482 tree const fn_fwrite = (unlocked
2483 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2484 : builtin_decl_implicit (BUILT_IN_FWRITE));
2486 /* If the return value is used, don't do the transformation. */
2487 if (gimple_call_lhs (stmt))
2488 return false;
2490 /* Get the length of the string passed to fputs. If the length
2491 can't be determined, punt. */
2492 tree len = get_maxval_strlen (arg0, 0);
2493 if (!len
2494 || TREE_CODE (len) != INTEGER_CST)
2495 return false;
2497 switch (compare_tree_int (len, 1))
2499 case -1: /* length is 0, delete the call entirely . */
2500 replace_call_with_value (gsi, integer_zero_node);
2501 return true;
2503 case 0: /* length is 1, call fputc. */
2505 const char *p = c_getstr (arg0);
2506 if (p != NULL)
2508 if (!fn_fputc)
2509 return false;
2511 gimple *repl = gimple_build_call (fn_fputc, 2,
2512 build_int_cst
2513 (integer_type_node, p[0]), arg1);
2514 replace_call_with_call_and_fold (gsi, repl);
2515 return true;
2518 /* FALLTHROUGH */
2519 case 1: /* length is greater than 1, call fwrite. */
2521 /* If optimizing for size keep fputs. */
2522 if (optimize_function_for_size_p (cfun))
2523 return false;
2524 /* New argument list transforming fputs(string, stream) to
2525 fwrite(string, 1, len, stream). */
2526 if (!fn_fwrite)
2527 return false;
2529 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2530 size_one_node, len, arg1);
2531 replace_call_with_call_and_fold (gsi, repl);
2532 return true;
2534 default:
2535 gcc_unreachable ();
2537 return false;
2540 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2541 DEST, SRC, LEN, and SIZE are the arguments to the call.
2542 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2543 code of the builtin. If MAXLEN is not NULL, it is maximum length
2544 passed as third argument. */
2546 static bool
2547 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2548 tree dest, tree src, tree len, tree size,
2549 enum built_in_function fcode)
2551 gimple *stmt = gsi_stmt (*gsi);
2552 location_t loc = gimple_location (stmt);
2553 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2554 tree fn;
2556 /* If SRC and DEST are the same (and not volatile), return DEST
2557 (resp. DEST+LEN for __mempcpy_chk). */
2558 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2560 if (fcode != BUILT_IN_MEMPCPY_CHK)
2562 replace_call_with_value (gsi, dest);
2563 return true;
2565 else
2567 gimple_seq stmts = NULL;
2568 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2569 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2570 TREE_TYPE (dest), dest, len);
2571 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2572 replace_call_with_value (gsi, temp);
2573 return true;
2577 if (! tree_fits_uhwi_p (size))
2578 return false;
2580 tree maxlen = get_maxval_strlen (len, 2);
2581 if (! integer_all_onesp (size))
2583 if (! tree_fits_uhwi_p (len))
2585 /* If LEN is not constant, try MAXLEN too.
2586 For MAXLEN only allow optimizing into non-_ocs function
2587 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2588 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2590 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2592 /* (void) __mempcpy_chk () can be optimized into
2593 (void) __memcpy_chk (). */
2594 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2595 if (!fn)
2596 return false;
2598 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2599 replace_call_with_call_and_fold (gsi, repl);
2600 return true;
2602 return false;
2605 else
2606 maxlen = len;
2608 if (tree_int_cst_lt (size, maxlen))
2609 return false;
2612 fn = NULL_TREE;
2613 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2614 mem{cpy,pcpy,move,set} is available. */
2615 switch (fcode)
2617 case BUILT_IN_MEMCPY_CHK:
2618 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2619 break;
2620 case BUILT_IN_MEMPCPY_CHK:
2621 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2622 break;
2623 case BUILT_IN_MEMMOVE_CHK:
2624 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2625 break;
2626 case BUILT_IN_MEMSET_CHK:
2627 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2628 break;
2629 default:
2630 break;
2633 if (!fn)
2634 return false;
2636 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2637 replace_call_with_call_and_fold (gsi, repl);
2638 return true;
2641 /* Fold a call to the __st[rp]cpy_chk builtin.
2642 DEST, SRC, and SIZE are the arguments to the call.
2643 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2644 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2645 strings passed as second argument. */
2647 static bool
2648 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2649 tree dest,
2650 tree src, tree size,
2651 enum built_in_function fcode)
2653 gimple *stmt = gsi_stmt (*gsi);
2654 location_t loc = gimple_location (stmt);
2655 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2656 tree len, fn;
2658 /* If SRC and DEST are the same (and not volatile), return DEST. */
2659 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2661 /* Issue -Wrestrict unless the pointers are null (those do
2662 not point to objects and so do not indicate an overlap;
2663 such calls could be the result of sanitization and jump
2664 threading). */
2665 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2667 tree func = gimple_call_fndecl (stmt);
2669 warning_at (loc, OPT_Wrestrict,
2670 "%qD source argument is the same as destination",
2671 func);
2674 replace_call_with_value (gsi, dest);
2675 return true;
2678 if (! tree_fits_uhwi_p (size))
2679 return false;
2681 tree maxlen = get_maxval_strlen (src, 1);
2682 if (! integer_all_onesp (size))
2684 len = c_strlen (src, 1);
2685 if (! len || ! tree_fits_uhwi_p (len))
2687 /* If LEN is not constant, try MAXLEN too.
2688 For MAXLEN only allow optimizing into non-_ocs function
2689 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2690 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2692 if (fcode == BUILT_IN_STPCPY_CHK)
2694 if (! ignore)
2695 return false;
2697 /* If return value of __stpcpy_chk is ignored,
2698 optimize into __strcpy_chk. */
2699 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2700 if (!fn)
2701 return false;
2703 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2704 replace_call_with_call_and_fold (gsi, repl);
2705 return true;
2708 if (! len || TREE_SIDE_EFFECTS (len))
2709 return false;
2711 /* If c_strlen returned something, but not a constant,
2712 transform __strcpy_chk into __memcpy_chk. */
2713 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2714 if (!fn)
2715 return false;
2717 gimple_seq stmts = NULL;
2718 len = gimple_convert (&stmts, loc, size_type_node, len);
2719 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2720 build_int_cst (size_type_node, 1));
2721 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2722 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2723 replace_call_with_call_and_fold (gsi, repl);
2724 return true;
2727 else
2728 maxlen = len;
2730 if (! tree_int_cst_lt (maxlen, size))
2731 return false;
2734 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2735 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2736 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2737 if (!fn)
2738 return false;
2740 gimple *repl = gimple_build_call (fn, 2, dest, src);
2741 replace_call_with_call_and_fold (gsi, repl);
2742 return true;
2745 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2746 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2747 length passed as third argument. IGNORE is true if return value can be
2748 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2750 static bool
2751 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2752 tree dest, tree src,
2753 tree len, tree size,
2754 enum built_in_function fcode)
2756 gimple *stmt = gsi_stmt (*gsi);
2757 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2758 tree fn;
2760 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2762 /* If return value of __stpncpy_chk is ignored,
2763 optimize into __strncpy_chk. */
2764 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2765 if (fn)
2767 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2768 replace_call_with_call_and_fold (gsi, repl);
2769 return true;
2773 if (! tree_fits_uhwi_p (size))
2774 return false;
2776 tree maxlen = get_maxval_strlen (len, 2);
2777 if (! integer_all_onesp (size))
2779 if (! tree_fits_uhwi_p (len))
2781 /* If LEN is not constant, try MAXLEN too.
2782 For MAXLEN only allow optimizing into non-_ocs function
2783 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2784 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2785 return false;
2787 else
2788 maxlen = len;
2790 if (tree_int_cst_lt (size, maxlen))
2791 return false;
2794 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2795 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2796 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2797 if (!fn)
2798 return false;
2800 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2801 replace_call_with_call_and_fold (gsi, repl);
2802 return true;
2805 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2806 Return NULL_TREE if no simplification can be made. */
2808 static bool
2809 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2811 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2812 location_t loc = gimple_location (stmt);
2813 tree dest = gimple_call_arg (stmt, 0);
2814 tree src = gimple_call_arg (stmt, 1);
2815 tree fn, lenp1;
2817 /* If the result is unused, replace stpcpy with strcpy. */
2818 if (gimple_call_lhs (stmt) == NULL_TREE)
2820 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2821 if (!fn)
2822 return false;
2823 gimple_call_set_fndecl (stmt, fn);
2824 fold_stmt (gsi);
2825 return true;
2828 /* Set to non-null if ARG refers to an unterminated array. */
2829 c_strlen_data data;
2830 memset (&data, 0, sizeof (c_strlen_data));
2831 tree len = c_strlen (src, 1, &data, 1);
2832 if (!len
2833 || TREE_CODE (len) != INTEGER_CST)
2835 data.decl = unterminated_array (src);
2836 if (!data.decl)
2837 return false;
2840 if (data.decl)
2842 /* Avoid folding calls with unterminated arrays. */
2843 if (!gimple_no_warning_p (stmt))
2844 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2845 gimple_set_no_warning (stmt, true);
2846 return false;
2849 if (optimize_function_for_size_p (cfun)
2850 /* If length is zero it's small enough. */
2851 && !integer_zerop (len))
2852 return false;
2854 /* If the source has a known length replace stpcpy with memcpy. */
2855 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2856 if (!fn)
2857 return false;
2859 gimple_seq stmts = NULL;
2860 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2861 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2862 tem, build_int_cst (size_type_node, 1));
2863 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2864 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2865 gimple_set_vuse (repl, gimple_vuse (stmt));
2866 gimple_set_vdef (repl, gimple_vdef (stmt));
2867 if (gimple_vdef (repl)
2868 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2869 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2870 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2871 /* Replace the result with dest + len. */
2872 stmts = NULL;
2873 tem = gimple_convert (&stmts, loc, sizetype, len);
2874 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2875 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2876 POINTER_PLUS_EXPR, dest, tem);
2877 gsi_replace (gsi, ret, false);
2878 /* Finally fold the memcpy call. */
2879 gimple_stmt_iterator gsi2 = *gsi;
2880 gsi_prev (&gsi2);
2881 fold_stmt (&gsi2);
2882 return true;
2885 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2886 NULL_TREE if a normal call should be emitted rather than expanding
2887 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2888 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2889 passed as second argument. */
2891 static bool
2892 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2893 enum built_in_function fcode)
2895 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2896 tree dest, size, len, fn, fmt, flag;
2897 const char *fmt_str;
2899 /* Verify the required arguments in the original call. */
2900 if (gimple_call_num_args (stmt) < 5)
2901 return false;
2903 dest = gimple_call_arg (stmt, 0);
2904 len = gimple_call_arg (stmt, 1);
2905 flag = gimple_call_arg (stmt, 2);
2906 size = gimple_call_arg (stmt, 3);
2907 fmt = gimple_call_arg (stmt, 4);
2909 if (! tree_fits_uhwi_p (size))
2910 return false;
2912 if (! integer_all_onesp (size))
2914 tree maxlen = get_maxval_strlen (len, 2);
2915 if (! tree_fits_uhwi_p (len))
2917 /* If LEN is not constant, try MAXLEN too.
2918 For MAXLEN only allow optimizing into non-_ocs function
2919 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2920 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2921 return false;
2923 else
2924 maxlen = len;
2926 if (tree_int_cst_lt (size, maxlen))
2927 return false;
2930 if (!init_target_chars ())
2931 return false;
2933 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2934 or if format doesn't contain % chars or is "%s". */
2935 if (! integer_zerop (flag))
2937 fmt_str = c_getstr (fmt);
2938 if (fmt_str == NULL)
2939 return false;
2940 if (strchr (fmt_str, target_percent) != NULL
2941 && strcmp (fmt_str, target_percent_s))
2942 return false;
2945 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2946 available. */
2947 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2948 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2949 if (!fn)
2950 return false;
2952 /* Replace the called function and the first 5 argument by 3 retaining
2953 trailing varargs. */
2954 gimple_call_set_fndecl (stmt, fn);
2955 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2956 gimple_call_set_arg (stmt, 0, dest);
2957 gimple_call_set_arg (stmt, 1, len);
2958 gimple_call_set_arg (stmt, 2, fmt);
2959 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2960 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2961 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2962 fold_stmt (gsi);
2963 return true;
2966 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2967 Return NULL_TREE if a normal call should be emitted rather than
2968 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2969 or BUILT_IN_VSPRINTF_CHK. */
2971 static bool
2972 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2973 enum built_in_function fcode)
2975 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2976 tree dest, size, len, fn, fmt, flag;
2977 const char *fmt_str;
2978 unsigned nargs = gimple_call_num_args (stmt);
2980 /* Verify the required arguments in the original call. */
2981 if (nargs < 4)
2982 return false;
2983 dest = gimple_call_arg (stmt, 0);
2984 flag = gimple_call_arg (stmt, 1);
2985 size = gimple_call_arg (stmt, 2);
2986 fmt = gimple_call_arg (stmt, 3);
2988 if (! tree_fits_uhwi_p (size))
2989 return false;
2991 len = NULL_TREE;
2993 if (!init_target_chars ())
2994 return false;
2996 /* Check whether the format is a literal string constant. */
2997 fmt_str = c_getstr (fmt);
2998 if (fmt_str != NULL)
3000 /* If the format doesn't contain % args or %%, we know the size. */
3001 if (strchr (fmt_str, target_percent) == 0)
3003 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3004 len = build_int_cstu (size_type_node, strlen (fmt_str));
3006 /* If the format is "%s" and first ... argument is a string literal,
3007 we know the size too. */
3008 else if (fcode == BUILT_IN_SPRINTF_CHK
3009 && strcmp (fmt_str, target_percent_s) == 0)
3011 tree arg;
3013 if (nargs == 5)
3015 arg = gimple_call_arg (stmt, 4);
3016 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3018 len = c_strlen (arg, 1);
3019 if (! len || ! tree_fits_uhwi_p (len))
3020 len = NULL_TREE;
3026 if (! integer_all_onesp (size))
3028 if (! len || ! tree_int_cst_lt (len, size))
3029 return false;
3032 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3033 or if format doesn't contain % chars or is "%s". */
3034 if (! integer_zerop (flag))
3036 if (fmt_str == NULL)
3037 return false;
3038 if (strchr (fmt_str, target_percent) != NULL
3039 && strcmp (fmt_str, target_percent_s))
3040 return false;
3043 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3044 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3045 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3046 if (!fn)
3047 return false;
3049 /* Replace the called function and the first 4 argument by 2 retaining
3050 trailing varargs. */
3051 gimple_call_set_fndecl (stmt, fn);
3052 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3053 gimple_call_set_arg (stmt, 0, dest);
3054 gimple_call_set_arg (stmt, 1, fmt);
3055 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3056 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3057 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3058 fold_stmt (gsi);
3059 return true;
3062 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3063 ORIG may be null if this is a 2-argument call. We don't attempt to
3064 simplify calls with more than 3 arguments.
3066 Return true if simplification was possible, otherwise false. */
3068 bool
3069 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3071 gimple *stmt = gsi_stmt (*gsi);
3072 tree dest = gimple_call_arg (stmt, 0);
3073 tree fmt = gimple_call_arg (stmt, 1);
3074 tree orig = NULL_TREE;
3075 const char *fmt_str = NULL;
3077 /* Verify the required arguments in the original call. We deal with two
3078 types of sprintf() calls: 'sprintf (str, fmt)' and
3079 'sprintf (dest, "%s", orig)'. */
3080 if (gimple_call_num_args (stmt) > 3)
3081 return false;
3083 if (gimple_call_num_args (stmt) == 3)
3084 orig = gimple_call_arg (stmt, 2);
3086 /* Check whether the format is a literal string constant. */
3087 fmt_str = c_getstr (fmt);
3088 if (fmt_str == NULL)
3089 return false;
3091 if (!init_target_chars ())
3092 return false;
3094 /* If the format doesn't contain % args or %%, use strcpy. */
3095 if (strchr (fmt_str, target_percent) == NULL)
3097 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3099 if (!fn)
3100 return false;
3102 /* Don't optimize sprintf (buf, "abc", ptr++). */
3103 if (orig)
3104 return false;
3106 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3107 'format' is known to contain no % formats. */
3108 gimple_seq stmts = NULL;
3109 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3111 /* Propagate the NO_WARNING bit to avoid issuing the same
3112 warning more than once. */
3113 if (gimple_no_warning_p (stmt))
3114 gimple_set_no_warning (repl, true);
3116 gimple_seq_add_stmt_without_update (&stmts, repl);
3117 if (gimple_call_lhs (stmt))
3119 repl = gimple_build_assign (gimple_call_lhs (stmt),
3120 build_int_cst (integer_type_node,
3121 strlen (fmt_str)));
3122 gimple_seq_add_stmt_without_update (&stmts, repl);
3123 gsi_replace_with_seq_vops (gsi, stmts);
3124 /* gsi now points at the assignment to the lhs, get a
3125 stmt iterator to the memcpy call.
3126 ??? We can't use gsi_for_stmt as that doesn't work when the
3127 CFG isn't built yet. */
3128 gimple_stmt_iterator gsi2 = *gsi;
3129 gsi_prev (&gsi2);
3130 fold_stmt (&gsi2);
3132 else
3134 gsi_replace_with_seq_vops (gsi, stmts);
3135 fold_stmt (gsi);
3137 return true;
3140 /* If the format is "%s", use strcpy if the result isn't used. */
3141 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3143 tree fn;
3144 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3146 if (!fn)
3147 return false;
3149 /* Don't crash on sprintf (str1, "%s"). */
3150 if (!orig)
3151 return false;
3153 tree orig_len = NULL_TREE;
3154 if (gimple_call_lhs (stmt))
3156 orig_len = get_maxval_strlen (orig, 0);
3157 if (!orig_len)
3158 return false;
3161 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3162 gimple_seq stmts = NULL;
3163 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3165 /* Propagate the NO_WARNING bit to avoid issuing the same
3166 warning more than once. */
3167 if (gimple_no_warning_p (stmt))
3168 gimple_set_no_warning (repl, true);
3170 gimple_seq_add_stmt_without_update (&stmts, repl);
3171 if (gimple_call_lhs (stmt))
3173 if (!useless_type_conversion_p (integer_type_node,
3174 TREE_TYPE (orig_len)))
3175 orig_len = fold_convert (integer_type_node, orig_len);
3176 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3177 gimple_seq_add_stmt_without_update (&stmts, repl);
3178 gsi_replace_with_seq_vops (gsi, stmts);
3179 /* gsi now points at the assignment to the lhs, get a
3180 stmt iterator to the memcpy call.
3181 ??? We can't use gsi_for_stmt as that doesn't work when the
3182 CFG isn't built yet. */
3183 gimple_stmt_iterator gsi2 = *gsi;
3184 gsi_prev (&gsi2);
3185 fold_stmt (&gsi2);
3187 else
3189 gsi_replace_with_seq_vops (gsi, stmts);
3190 fold_stmt (gsi);
3192 return true;
3194 return false;
3197 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3198 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3199 attempt to simplify calls with more than 4 arguments.
3201 Return true if simplification was possible, otherwise false. */
3203 bool
3204 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3206 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3207 tree dest = gimple_call_arg (stmt, 0);
3208 tree destsize = gimple_call_arg (stmt, 1);
3209 tree fmt = gimple_call_arg (stmt, 2);
3210 tree orig = NULL_TREE;
3211 const char *fmt_str = NULL;
3213 if (gimple_call_num_args (stmt) > 4)
3214 return false;
3216 if (gimple_call_num_args (stmt) == 4)
3217 orig = gimple_call_arg (stmt, 3);
3219 if (!tree_fits_uhwi_p (destsize))
3220 return false;
3221 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3223 /* Check whether the format is a literal string constant. */
3224 fmt_str = c_getstr (fmt);
3225 if (fmt_str == NULL)
3226 return false;
3228 if (!init_target_chars ())
3229 return false;
3231 /* If the format doesn't contain % args or %%, use strcpy. */
3232 if (strchr (fmt_str, target_percent) == NULL)
3234 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3235 if (!fn)
3236 return false;
3238 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3239 if (orig)
3240 return false;
3242 /* We could expand this as
3243 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3244 or to
3245 memcpy (str, fmt_with_nul_at_cstm1, cst);
3246 but in the former case that might increase code size
3247 and in the latter case grow .rodata section too much.
3248 So punt for now. */
3249 size_t len = strlen (fmt_str);
3250 if (len >= destlen)
3251 return false;
3253 gimple_seq stmts = NULL;
3254 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3255 gimple_seq_add_stmt_without_update (&stmts, repl);
3256 if (gimple_call_lhs (stmt))
3258 repl = gimple_build_assign (gimple_call_lhs (stmt),
3259 build_int_cst (integer_type_node, len));
3260 gimple_seq_add_stmt_without_update (&stmts, repl);
3261 gsi_replace_with_seq_vops (gsi, stmts);
3262 /* gsi now points at the assignment to the lhs, get a
3263 stmt iterator to the memcpy call.
3264 ??? We can't use gsi_for_stmt as that doesn't work when the
3265 CFG isn't built yet. */
3266 gimple_stmt_iterator gsi2 = *gsi;
3267 gsi_prev (&gsi2);
3268 fold_stmt (&gsi2);
3270 else
3272 gsi_replace_with_seq_vops (gsi, stmts);
3273 fold_stmt (gsi);
3275 return true;
3278 /* If the format is "%s", use strcpy if the result isn't used. */
3279 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3281 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3282 if (!fn)
3283 return false;
3285 /* Don't crash on snprintf (str1, cst, "%s"). */
3286 if (!orig)
3287 return false;
3289 tree orig_len = get_maxval_strlen (orig, 0);
3290 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3291 return false;
3293 /* We could expand this as
3294 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3295 or to
3296 memcpy (str1, str2_with_nul_at_cstm1, cst);
3297 but in the former case that might increase code size
3298 and in the latter case grow .rodata section too much.
3299 So punt for now. */
3300 if (compare_tree_int (orig_len, destlen) >= 0)
3301 return false;
3303 /* Convert snprintf (str1, cst, "%s", str2) into
3304 strcpy (str1, str2) if strlen (str2) < cst. */
3305 gimple_seq stmts = NULL;
3306 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3307 gimple_seq_add_stmt_without_update (&stmts, repl);
3308 if (gimple_call_lhs (stmt))
3310 if (!useless_type_conversion_p (integer_type_node,
3311 TREE_TYPE (orig_len)))
3312 orig_len = fold_convert (integer_type_node, orig_len);
3313 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3314 gimple_seq_add_stmt_without_update (&stmts, repl);
3315 gsi_replace_with_seq_vops (gsi, stmts);
3316 /* gsi now points at the assignment to the lhs, get a
3317 stmt iterator to the memcpy call.
3318 ??? We can't use gsi_for_stmt as that doesn't work when the
3319 CFG isn't built yet. */
3320 gimple_stmt_iterator gsi2 = *gsi;
3321 gsi_prev (&gsi2);
3322 fold_stmt (&gsi2);
3324 else
3326 gsi_replace_with_seq_vops (gsi, stmts);
3327 fold_stmt (gsi);
3329 return true;
3331 return false;
3334 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3335 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3336 more than 3 arguments, and ARG may be null in the 2-argument case.
3338 Return NULL_TREE if no simplification was possible, otherwise return the
3339 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3340 code of the function to be simplified. */
3342 static bool
3343 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3344 tree fp, tree fmt, tree arg,
3345 enum built_in_function fcode)
3347 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3348 tree fn_fputc, fn_fputs;
3349 const char *fmt_str = NULL;
3351 /* If the return value is used, don't do the transformation. */
3352 if (gimple_call_lhs (stmt) != NULL_TREE)
3353 return false;
3355 /* Check whether the format is a literal string constant. */
3356 fmt_str = c_getstr (fmt);
3357 if (fmt_str == NULL)
3358 return false;
3360 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3362 /* If we're using an unlocked function, assume the other
3363 unlocked functions exist explicitly. */
3364 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3365 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3367 else
3369 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3370 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3373 if (!init_target_chars ())
3374 return false;
3376 /* If the format doesn't contain % args or %%, use strcpy. */
3377 if (strchr (fmt_str, target_percent) == NULL)
3379 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3380 && arg)
3381 return false;
3383 /* If the format specifier was "", fprintf does nothing. */
3384 if (fmt_str[0] == '\0')
3386 replace_call_with_value (gsi, NULL_TREE);
3387 return true;
3390 /* When "string" doesn't contain %, replace all cases of
3391 fprintf (fp, string) with fputs (string, fp). The fputs
3392 builtin will take care of special cases like length == 1. */
3393 if (fn_fputs)
3395 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3396 replace_call_with_call_and_fold (gsi, repl);
3397 return true;
3401 /* The other optimizations can be done only on the non-va_list variants. */
3402 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3403 return false;
3405 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3406 else if (strcmp (fmt_str, target_percent_s) == 0)
3408 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3409 return false;
3410 if (fn_fputs)
3412 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3413 replace_call_with_call_and_fold (gsi, repl);
3414 return true;
3418 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3419 else if (strcmp (fmt_str, target_percent_c) == 0)
3421 if (!arg
3422 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3423 return false;
3424 if (fn_fputc)
3426 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3427 replace_call_with_call_and_fold (gsi, repl);
3428 return true;
3432 return false;
3435 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3436 FMT and ARG are the arguments to the call; we don't fold cases with
3437 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3439 Return NULL_TREE if no simplification was possible, otherwise return the
3440 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3441 code of the function to be simplified. */
3443 static bool
3444 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3445 tree arg, enum built_in_function fcode)
3447 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3448 tree fn_putchar, fn_puts, newarg;
3449 const char *fmt_str = NULL;
3451 /* If the return value is used, don't do the transformation. */
3452 if (gimple_call_lhs (stmt) != NULL_TREE)
3453 return false;
3455 /* Check whether the format is a literal string constant. */
3456 fmt_str = c_getstr (fmt);
3457 if (fmt_str == NULL)
3458 return false;
3460 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3462 /* If we're using an unlocked function, assume the other
3463 unlocked functions exist explicitly. */
3464 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3465 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3467 else
3469 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3470 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3473 if (!init_target_chars ())
3474 return false;
3476 if (strcmp (fmt_str, target_percent_s) == 0
3477 || strchr (fmt_str, target_percent) == NULL)
3479 const char *str;
3481 if (strcmp (fmt_str, target_percent_s) == 0)
3483 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3484 return false;
3486 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3487 return false;
3489 str = c_getstr (arg);
3490 if (str == NULL)
3491 return false;
3493 else
3495 /* The format specifier doesn't contain any '%' characters. */
3496 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3497 && arg)
3498 return false;
3499 str = fmt_str;
3502 /* If the string was "", printf does nothing. */
3503 if (str[0] == '\0')
3505 replace_call_with_value (gsi, NULL_TREE);
3506 return true;
3509 /* If the string has length of 1, call putchar. */
3510 if (str[1] == '\0')
3512 /* Given printf("c"), (where c is any one character,)
3513 convert "c"[0] to an int and pass that to the replacement
3514 function. */
3515 newarg = build_int_cst (integer_type_node, str[0]);
3516 if (fn_putchar)
3518 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3519 replace_call_with_call_and_fold (gsi, repl);
3520 return true;
3523 else
3525 /* If the string was "string\n", call puts("string"). */
3526 size_t len = strlen (str);
3527 if ((unsigned char)str[len - 1] == target_newline
3528 && (size_t) (int) len == len
3529 && (int) len > 0)
3531 char *newstr;
3533 /* Create a NUL-terminated string that's one char shorter
3534 than the original, stripping off the trailing '\n'. */
3535 newstr = xstrdup (str);
3536 newstr[len - 1] = '\0';
3537 newarg = build_string_literal (len, newstr);
3538 free (newstr);
3539 if (fn_puts)
3541 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3542 replace_call_with_call_and_fold (gsi, repl);
3543 return true;
3546 else
3547 /* We'd like to arrange to call fputs(string,stdout) here,
3548 but we need stdout and don't have a way to get it yet. */
3549 return false;
3553 /* The other optimizations can be done only on the non-va_list variants. */
3554 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3555 return false;
3557 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3558 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3560 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3561 return false;
3562 if (fn_puts)
3564 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3565 replace_call_with_call_and_fold (gsi, repl);
3566 return true;
3570 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3571 else if (strcmp (fmt_str, target_percent_c) == 0)
3573 if (!arg || ! useless_type_conversion_p (integer_type_node,
3574 TREE_TYPE (arg)))
3575 return false;
3576 if (fn_putchar)
3578 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3579 replace_call_with_call_and_fold (gsi, repl);
3580 return true;
3584 return false;
3589 /* Fold a call to __builtin_strlen with known length LEN. */
3591 static bool
3592 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3594 gimple *stmt = gsi_stmt (*gsi);
3595 tree arg = gimple_call_arg (stmt, 0);
3597 wide_int minlen;
3598 wide_int maxlen;
3600 /* Set to non-null if ARG refers to an unterminated array. */
3601 tree nonstr;
3602 tree lenrange[2];
3603 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
3604 && !nonstr
3605 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3606 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3608 /* The range of lengths refers to either a single constant
3609 string or to the longest and shortest constant string
3610 referenced by the argument of the strlen() call, or to
3611 the strings that can possibly be stored in the arrays
3612 the argument refers to. */
3613 minlen = wi::to_wide (lenrange[0]);
3614 maxlen = wi::to_wide (lenrange[1]);
3616 else
3618 unsigned prec = TYPE_PRECISION (sizetype);
3620 minlen = wi::shwi (0, prec);
3621 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3624 if (minlen == maxlen)
3626 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3627 true, GSI_SAME_STMT);
3628 replace_call_with_value (gsi, lenrange[0]);
3629 return true;
3632 if (tree lhs = gimple_call_lhs (stmt))
3633 if (TREE_CODE (lhs) == SSA_NAME
3634 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3635 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3637 return false;
3640 /* Fold a call to __builtin_acc_on_device. */
3642 static bool
3643 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3645 /* Defer folding until we know which compiler we're in. */
3646 if (symtab->state != EXPANSION)
3647 return false;
3649 unsigned val_host = GOMP_DEVICE_HOST;
3650 unsigned val_dev = GOMP_DEVICE_NONE;
3652 #ifdef ACCEL_COMPILER
3653 val_host = GOMP_DEVICE_NOT_HOST;
3654 val_dev = ACCEL_COMPILER_acc_device;
3655 #endif
3657 location_t loc = gimple_location (gsi_stmt (*gsi));
3659 tree host_eq = make_ssa_name (boolean_type_node);
3660 gimple *host_ass = gimple_build_assign
3661 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3662 gimple_set_location (host_ass, loc);
3663 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3665 tree dev_eq = make_ssa_name (boolean_type_node);
3666 gimple *dev_ass = gimple_build_assign
3667 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3668 gimple_set_location (dev_ass, loc);
3669 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3671 tree result = make_ssa_name (boolean_type_node);
3672 gimple *result_ass = gimple_build_assign
3673 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3674 gimple_set_location (result_ass, loc);
3675 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3677 replace_call_with_value (gsi, result);
3679 return true;
3682 /* Fold realloc (0, n) -> malloc (n). */
3684 static bool
3685 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3687 gimple *stmt = gsi_stmt (*gsi);
3688 tree arg = gimple_call_arg (stmt, 0);
3689 tree size = gimple_call_arg (stmt, 1);
3691 if (operand_equal_p (arg, null_pointer_node, 0))
3693 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3694 if (fn_malloc)
3696 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3697 replace_call_with_call_and_fold (gsi, repl);
3698 return true;
3701 return false;
3704 /* Fold the non-target builtin at *GSI and return whether any simplification
3705 was made. */
3707 static bool
3708 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3710 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3711 tree callee = gimple_call_fndecl (stmt);
3713 /* Give up for always_inline inline builtins until they are
3714 inlined. */
3715 if (avoid_folding_inline_builtin (callee))
3716 return false;
3718 unsigned n = gimple_call_num_args (stmt);
3719 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3720 switch (fcode)
3722 case BUILT_IN_BCMP:
3723 return gimple_fold_builtin_bcmp (gsi);
3724 case BUILT_IN_BCOPY:
3725 return gimple_fold_builtin_bcopy (gsi);
3726 case BUILT_IN_BZERO:
3727 return gimple_fold_builtin_bzero (gsi);
3729 case BUILT_IN_MEMSET:
3730 return gimple_fold_builtin_memset (gsi,
3731 gimple_call_arg (stmt, 1),
3732 gimple_call_arg (stmt, 2));
3733 case BUILT_IN_MEMCPY:
3734 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3735 gimple_call_arg (stmt, 1), 0);
3736 case BUILT_IN_MEMPCPY:
3737 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3738 gimple_call_arg (stmt, 1), 1);
3739 case BUILT_IN_MEMMOVE:
3740 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3741 gimple_call_arg (stmt, 1), 3);
3742 case BUILT_IN_SPRINTF_CHK:
3743 case BUILT_IN_VSPRINTF_CHK:
3744 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3745 case BUILT_IN_STRCAT_CHK:
3746 return gimple_fold_builtin_strcat_chk (gsi);
3747 case BUILT_IN_STRNCAT_CHK:
3748 return gimple_fold_builtin_strncat_chk (gsi);
3749 case BUILT_IN_STRLEN:
3750 return gimple_fold_builtin_strlen (gsi);
3751 case BUILT_IN_STRCPY:
3752 return gimple_fold_builtin_strcpy (gsi,
3753 gimple_call_arg (stmt, 0),
3754 gimple_call_arg (stmt, 1));
3755 case BUILT_IN_STRNCPY:
3756 return gimple_fold_builtin_strncpy (gsi,
3757 gimple_call_arg (stmt, 0),
3758 gimple_call_arg (stmt, 1),
3759 gimple_call_arg (stmt, 2));
3760 case BUILT_IN_STRCAT:
3761 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3762 gimple_call_arg (stmt, 1));
3763 case BUILT_IN_STRNCAT:
3764 return gimple_fold_builtin_strncat (gsi);
3765 case BUILT_IN_INDEX:
3766 case BUILT_IN_STRCHR:
3767 return gimple_fold_builtin_strchr (gsi, false);
3768 case BUILT_IN_RINDEX:
3769 case BUILT_IN_STRRCHR:
3770 return gimple_fold_builtin_strchr (gsi, true);
3771 case BUILT_IN_STRSTR:
3772 return gimple_fold_builtin_strstr (gsi);
3773 case BUILT_IN_STRCMP:
3774 case BUILT_IN_STRCMP_EQ:
3775 case BUILT_IN_STRCASECMP:
3776 case BUILT_IN_STRNCMP:
3777 case BUILT_IN_STRNCMP_EQ:
3778 case BUILT_IN_STRNCASECMP:
3779 return gimple_fold_builtin_string_compare (gsi);
3780 case BUILT_IN_MEMCHR:
3781 return gimple_fold_builtin_memchr (gsi);
3782 case BUILT_IN_FPUTS:
3783 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3784 gimple_call_arg (stmt, 1), false);
3785 case BUILT_IN_FPUTS_UNLOCKED:
3786 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3787 gimple_call_arg (stmt, 1), true);
3788 case BUILT_IN_MEMCPY_CHK:
3789 case BUILT_IN_MEMPCPY_CHK:
3790 case BUILT_IN_MEMMOVE_CHK:
3791 case BUILT_IN_MEMSET_CHK:
3792 return gimple_fold_builtin_memory_chk (gsi,
3793 gimple_call_arg (stmt, 0),
3794 gimple_call_arg (stmt, 1),
3795 gimple_call_arg (stmt, 2),
3796 gimple_call_arg (stmt, 3),
3797 fcode);
3798 case BUILT_IN_STPCPY:
3799 return gimple_fold_builtin_stpcpy (gsi);
3800 case BUILT_IN_STRCPY_CHK:
3801 case BUILT_IN_STPCPY_CHK:
3802 return gimple_fold_builtin_stxcpy_chk (gsi,
3803 gimple_call_arg (stmt, 0),
3804 gimple_call_arg (stmt, 1),
3805 gimple_call_arg (stmt, 2),
3806 fcode);
3807 case BUILT_IN_STRNCPY_CHK:
3808 case BUILT_IN_STPNCPY_CHK:
3809 return gimple_fold_builtin_stxncpy_chk (gsi,
3810 gimple_call_arg (stmt, 0),
3811 gimple_call_arg (stmt, 1),
3812 gimple_call_arg (stmt, 2),
3813 gimple_call_arg (stmt, 3),
3814 fcode);
3815 case BUILT_IN_SNPRINTF_CHK:
3816 case BUILT_IN_VSNPRINTF_CHK:
3817 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3819 case BUILT_IN_FPRINTF:
3820 case BUILT_IN_FPRINTF_UNLOCKED:
3821 case BUILT_IN_VFPRINTF:
3822 if (n == 2 || n == 3)
3823 return gimple_fold_builtin_fprintf (gsi,
3824 gimple_call_arg (stmt, 0),
3825 gimple_call_arg (stmt, 1),
3826 n == 3
3827 ? gimple_call_arg (stmt, 2)
3828 : NULL_TREE,
3829 fcode);
3830 break;
3831 case BUILT_IN_FPRINTF_CHK:
3832 case BUILT_IN_VFPRINTF_CHK:
3833 if (n == 3 || n == 4)
3834 return gimple_fold_builtin_fprintf (gsi,
3835 gimple_call_arg (stmt, 0),
3836 gimple_call_arg (stmt, 2),
3837 n == 4
3838 ? gimple_call_arg (stmt, 3)
3839 : NULL_TREE,
3840 fcode);
3841 break;
3842 case BUILT_IN_PRINTF:
3843 case BUILT_IN_PRINTF_UNLOCKED:
3844 case BUILT_IN_VPRINTF:
3845 if (n == 1 || n == 2)
3846 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3847 n == 2
3848 ? gimple_call_arg (stmt, 1)
3849 : NULL_TREE, fcode);
3850 break;
3851 case BUILT_IN_PRINTF_CHK:
3852 case BUILT_IN_VPRINTF_CHK:
3853 if (n == 2 || n == 3)
3854 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3855 n == 3
3856 ? gimple_call_arg (stmt, 2)
3857 : NULL_TREE, fcode);
3858 break;
3859 case BUILT_IN_ACC_ON_DEVICE:
3860 return gimple_fold_builtin_acc_on_device (gsi,
3861 gimple_call_arg (stmt, 0));
3862 case BUILT_IN_REALLOC:
3863 return gimple_fold_builtin_realloc (gsi);
3865 default:;
3868 /* Try the generic builtin folder. */
3869 bool ignore = (gimple_call_lhs (stmt) == NULL);
3870 tree result = fold_call_stmt (stmt, ignore);
3871 if (result)
3873 if (ignore)
3874 STRIP_NOPS (result);
3875 else
3876 result = fold_convert (gimple_call_return_type (stmt), result);
3877 if (!update_call_from_tree (gsi, result))
3878 gimplify_and_update_call_from_tree (gsi, result);
3879 return true;
3882 return false;
3885 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3886 function calls to constants, where possible. */
3888 static tree
3889 fold_internal_goacc_dim (const gimple *call)
3891 int axis = oacc_get_ifn_dim_arg (call);
3892 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3893 tree result = NULL_TREE;
3894 tree type = TREE_TYPE (gimple_call_lhs (call));
3896 switch (gimple_call_internal_fn (call))
3898 case IFN_GOACC_DIM_POS:
3899 /* If the size is 1, we know the answer. */
3900 if (size == 1)
3901 result = build_int_cst (type, 0);
3902 break;
3903 case IFN_GOACC_DIM_SIZE:
3904 /* If the size is not dynamic, we know the answer. */
3905 if (size)
3906 result = build_int_cst (type, size);
3907 break;
3908 default:
3909 break;
3912 return result;
3915 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3916 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3917 &var where var is only addressable because of such calls. */
3919 bool
3920 optimize_atomic_compare_exchange_p (gimple *stmt)
3922 if (gimple_call_num_args (stmt) != 6
3923 || !flag_inline_atomics
3924 || !optimize
3925 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3926 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3927 || !gimple_vdef (stmt)
3928 || !gimple_vuse (stmt))
3929 return false;
3931 tree fndecl = gimple_call_fndecl (stmt);
3932 switch (DECL_FUNCTION_CODE (fndecl))
3934 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3935 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3936 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3937 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3938 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3939 break;
3940 default:
3941 return false;
3944 tree expected = gimple_call_arg (stmt, 1);
3945 if (TREE_CODE (expected) != ADDR_EXPR
3946 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3947 return false;
3949 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3950 if (!is_gimple_reg_type (etype)
3951 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3952 || TREE_THIS_VOLATILE (etype)
3953 || VECTOR_TYPE_P (etype)
3954 || TREE_CODE (etype) == COMPLEX_TYPE
3955 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3956 might not preserve all the bits. See PR71716. */
3957 || SCALAR_FLOAT_TYPE_P (etype)
3958 || maybe_ne (TYPE_PRECISION (etype),
3959 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3960 return false;
3962 tree weak = gimple_call_arg (stmt, 3);
3963 if (!integer_zerop (weak) && !integer_onep (weak))
3964 return false;
3966 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3967 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3968 machine_mode mode = TYPE_MODE (itype);
3970 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3971 == CODE_FOR_nothing
3972 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3973 return false;
3975 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3976 return false;
3978 return true;
3981 /* Fold
3982 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3983 into
3984 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3985 i = IMAGPART_EXPR <t>;
3986 r = (_Bool) i;
3987 e = REALPART_EXPR <t>; */
3989 void
3990 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3992 gimple *stmt = gsi_stmt (*gsi);
3993 tree fndecl = gimple_call_fndecl (stmt);
3994 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3995 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3996 tree ctype = build_complex_type (itype);
3997 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3998 bool throws = false;
3999 edge e = NULL;
4000 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4001 expected);
4002 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4003 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4004 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4006 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4007 build1 (VIEW_CONVERT_EXPR, itype,
4008 gimple_assign_lhs (g)));
4009 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4011 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4012 + int_size_in_bytes (itype);
4013 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4014 gimple_call_arg (stmt, 0),
4015 gimple_assign_lhs (g),
4016 gimple_call_arg (stmt, 2),
4017 build_int_cst (integer_type_node, flag),
4018 gimple_call_arg (stmt, 4),
4019 gimple_call_arg (stmt, 5));
4020 tree lhs = make_ssa_name (ctype);
4021 gimple_call_set_lhs (g, lhs);
4022 gimple_set_vdef (g, gimple_vdef (stmt));
4023 gimple_set_vuse (g, gimple_vuse (stmt));
4024 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4025 tree oldlhs = gimple_call_lhs (stmt);
4026 if (stmt_can_throw_internal (stmt))
4028 throws = true;
4029 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4031 gimple_call_set_nothrow (as_a <gcall *> (g),
4032 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4033 gimple_call_set_lhs (stmt, NULL_TREE);
4034 gsi_replace (gsi, g, true);
4035 if (oldlhs)
4037 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4038 build1 (IMAGPART_EXPR, itype, lhs));
4039 if (throws)
4041 gsi_insert_on_edge_immediate (e, g);
4042 *gsi = gsi_for_stmt (g);
4044 else
4045 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4046 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4047 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4049 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4050 build1 (REALPART_EXPR, itype, lhs));
4051 if (throws && oldlhs == NULL_TREE)
4053 gsi_insert_on_edge_immediate (e, g);
4054 *gsi = gsi_for_stmt (g);
4056 else
4057 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4058 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4060 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4061 VIEW_CONVERT_EXPR,
4062 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4063 gimple_assign_lhs (g)));
4064 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4066 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4067 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4068 *gsi = gsiret;
4071 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4072 doesn't fit into TYPE. The test for overflow should be regardless of
4073 -fwrapv, and even for unsigned types. */
4075 bool
4076 arith_overflowed_p (enum tree_code code, const_tree type,
4077 const_tree arg0, const_tree arg1)
4079 widest2_int warg0 = widest2_int_cst (arg0);
4080 widest2_int warg1 = widest2_int_cst (arg1);
4081 widest2_int wres;
4082 switch (code)
4084 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4085 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4086 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4087 default: gcc_unreachable ();
4089 signop sign = TYPE_SIGN (type);
4090 if (sign == UNSIGNED && wi::neg_p (wres))
4091 return true;
4092 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4095 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4096 The statement may be replaced by another statement, e.g., if the call
4097 simplifies to a constant value. Return true if any changes were made.
4098 It is assumed that the operands have been previously folded. */
4100 static bool
4101 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4103 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4104 tree callee;
4105 bool changed = false;
4106 unsigned i;
4108 /* Fold *& in call arguments. */
4109 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4110 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4112 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4113 if (tmp)
4115 gimple_call_set_arg (stmt, i, tmp);
4116 changed = true;
4120 /* Check for virtual calls that became direct calls. */
4121 callee = gimple_call_fn (stmt);
4122 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4124 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4126 if (dump_file && virtual_method_call_p (callee)
4127 && !possible_polymorphic_call_target_p
4128 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4129 (OBJ_TYPE_REF_EXPR (callee)))))
4131 fprintf (dump_file,
4132 "Type inheritance inconsistent devirtualization of ");
4133 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4134 fprintf (dump_file, " to ");
4135 print_generic_expr (dump_file, callee, TDF_SLIM);
4136 fprintf (dump_file, "\n");
4139 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4140 changed = true;
4142 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4144 bool final;
4145 vec <cgraph_node *>targets
4146 = possible_polymorphic_call_targets (callee, stmt, &final);
4147 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4149 tree lhs = gimple_call_lhs (stmt);
4150 if (dump_enabled_p ())
4152 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4153 "folding virtual function call to %s\n",
4154 targets.length () == 1
4155 ? targets[0]->name ()
4156 : "__builtin_unreachable");
4158 if (targets.length () == 1)
4160 tree fndecl = targets[0]->decl;
4161 gimple_call_set_fndecl (stmt, fndecl);
4162 changed = true;
4163 /* If changing the call to __cxa_pure_virtual
4164 or similar noreturn function, adjust gimple_call_fntype
4165 too. */
4166 if (gimple_call_noreturn_p (stmt)
4167 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4168 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4169 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4170 == void_type_node))
4171 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4172 /* If the call becomes noreturn, remove the lhs. */
4173 if (lhs
4174 && gimple_call_noreturn_p (stmt)
4175 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4176 || should_remove_lhs_p (lhs)))
4178 if (TREE_CODE (lhs) == SSA_NAME)
4180 tree var = create_tmp_var (TREE_TYPE (lhs));
4181 tree def = get_or_create_ssa_default_def (cfun, var);
4182 gimple *new_stmt = gimple_build_assign (lhs, def);
4183 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4185 gimple_call_set_lhs (stmt, NULL_TREE);
4187 maybe_remove_unused_call_args (cfun, stmt);
4189 else
4191 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4192 gimple *new_stmt = gimple_build_call (fndecl, 0);
4193 gimple_set_location (new_stmt, gimple_location (stmt));
4194 /* If the call had a SSA name as lhs morph that into
4195 an uninitialized value. */
4196 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4198 tree var = create_tmp_var (TREE_TYPE (lhs));
4199 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4200 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4201 set_ssa_default_def (cfun, var, lhs);
4203 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4204 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4205 gsi_replace (gsi, new_stmt, false);
4206 return true;
4212 /* Check for indirect calls that became direct calls, and then
4213 no longer require a static chain. */
4214 if (gimple_call_chain (stmt))
4216 tree fn = gimple_call_fndecl (stmt);
4217 if (fn && !DECL_STATIC_CHAIN (fn))
4219 gimple_call_set_chain (stmt, NULL);
4220 changed = true;
4222 else
4224 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4225 if (tmp)
4227 gimple_call_set_chain (stmt, tmp);
4228 changed = true;
4233 if (inplace)
4234 return changed;
4236 /* Check for builtins that CCP can handle using information not
4237 available in the generic fold routines. */
4238 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4240 if (gimple_fold_builtin (gsi))
4241 changed = true;
4243 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4245 changed |= targetm.gimple_fold_builtin (gsi);
4247 else if (gimple_call_internal_p (stmt))
4249 enum tree_code subcode = ERROR_MARK;
4250 tree result = NULL_TREE;
4251 bool cplx_result = false;
4252 tree overflow = NULL_TREE;
4253 switch (gimple_call_internal_fn (stmt))
4255 case IFN_BUILTIN_EXPECT:
4256 result = fold_builtin_expect (gimple_location (stmt),
4257 gimple_call_arg (stmt, 0),
4258 gimple_call_arg (stmt, 1),
4259 gimple_call_arg (stmt, 2),
4260 NULL_TREE);
4261 break;
4262 case IFN_UBSAN_OBJECT_SIZE:
4264 tree offset = gimple_call_arg (stmt, 1);
4265 tree objsize = gimple_call_arg (stmt, 2);
4266 if (integer_all_onesp (objsize)
4267 || (TREE_CODE (offset) == INTEGER_CST
4268 && TREE_CODE (objsize) == INTEGER_CST
4269 && tree_int_cst_le (offset, objsize)))
4271 replace_call_with_value (gsi, NULL_TREE);
4272 return true;
4275 break;
4276 case IFN_UBSAN_PTR:
4277 if (integer_zerop (gimple_call_arg (stmt, 1)))
4279 replace_call_with_value (gsi, NULL_TREE);
4280 return true;
4282 break;
4283 case IFN_UBSAN_BOUNDS:
4285 tree index = gimple_call_arg (stmt, 1);
4286 tree bound = gimple_call_arg (stmt, 2);
4287 if (TREE_CODE (index) == INTEGER_CST
4288 && TREE_CODE (bound) == INTEGER_CST)
4290 index = fold_convert (TREE_TYPE (bound), index);
4291 if (TREE_CODE (index) == INTEGER_CST
4292 && tree_int_cst_le (index, bound))
4294 replace_call_with_value (gsi, NULL_TREE);
4295 return true;
4299 break;
4300 case IFN_GOACC_DIM_SIZE:
4301 case IFN_GOACC_DIM_POS:
4302 result = fold_internal_goacc_dim (stmt);
4303 break;
4304 case IFN_UBSAN_CHECK_ADD:
4305 subcode = PLUS_EXPR;
4306 break;
4307 case IFN_UBSAN_CHECK_SUB:
4308 subcode = MINUS_EXPR;
4309 break;
4310 case IFN_UBSAN_CHECK_MUL:
4311 subcode = MULT_EXPR;
4312 break;
4313 case IFN_ADD_OVERFLOW:
4314 subcode = PLUS_EXPR;
4315 cplx_result = true;
4316 break;
4317 case IFN_SUB_OVERFLOW:
4318 subcode = MINUS_EXPR;
4319 cplx_result = true;
4320 break;
4321 case IFN_MUL_OVERFLOW:
4322 subcode = MULT_EXPR;
4323 cplx_result = true;
4324 break;
4325 default:
4326 break;
4328 if (subcode != ERROR_MARK)
4330 tree arg0 = gimple_call_arg (stmt, 0);
4331 tree arg1 = gimple_call_arg (stmt, 1);
4332 tree type = TREE_TYPE (arg0);
4333 if (cplx_result)
4335 tree lhs = gimple_call_lhs (stmt);
4336 if (lhs == NULL_TREE)
4337 type = NULL_TREE;
4338 else
4339 type = TREE_TYPE (TREE_TYPE (lhs));
4341 if (type == NULL_TREE)
4343 /* x = y + 0; x = y - 0; x = y * 0; */
4344 else if (integer_zerop (arg1))
4345 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4346 /* x = 0 + y; x = 0 * y; */
4347 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4348 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4349 /* x = y - y; */
4350 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4351 result = integer_zero_node;
4352 /* x = y * 1; x = 1 * y; */
4353 else if (subcode == MULT_EXPR && integer_onep (arg1))
4354 result = arg0;
4355 else if (subcode == MULT_EXPR && integer_onep (arg0))
4356 result = arg1;
4357 else if (TREE_CODE (arg0) == INTEGER_CST
4358 && TREE_CODE (arg1) == INTEGER_CST)
4360 if (cplx_result)
4361 result = int_const_binop (subcode, fold_convert (type, arg0),
4362 fold_convert (type, arg1));
4363 else
4364 result = int_const_binop (subcode, arg0, arg1);
4365 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4367 if (cplx_result)
4368 overflow = build_one_cst (type);
4369 else
4370 result = NULL_TREE;
4373 if (result)
4375 if (result == integer_zero_node)
4376 result = build_zero_cst (type);
4377 else if (cplx_result && TREE_TYPE (result) != type)
4379 if (TREE_CODE (result) == INTEGER_CST)
4381 if (arith_overflowed_p (PLUS_EXPR, type, result,
4382 integer_zero_node))
4383 overflow = build_one_cst (type);
4385 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4386 && TYPE_UNSIGNED (type))
4387 || (TYPE_PRECISION (type)
4388 < (TYPE_PRECISION (TREE_TYPE (result))
4389 + (TYPE_UNSIGNED (TREE_TYPE (result))
4390 && !TYPE_UNSIGNED (type)))))
4391 result = NULL_TREE;
4392 if (result)
4393 result = fold_convert (type, result);
4398 if (result)
4400 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4401 result = drop_tree_overflow (result);
4402 if (cplx_result)
4404 if (overflow == NULL_TREE)
4405 overflow = build_zero_cst (TREE_TYPE (result));
4406 tree ctype = build_complex_type (TREE_TYPE (result));
4407 if (TREE_CODE (result) == INTEGER_CST
4408 && TREE_CODE (overflow) == INTEGER_CST)
4409 result = build_complex (ctype, result, overflow);
4410 else
4411 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4412 ctype, result, overflow);
4414 if (!update_call_from_tree (gsi, result))
4415 gimplify_and_update_call_from_tree (gsi, result);
4416 changed = true;
4420 return changed;
4424 /* Return true whether NAME has a use on STMT. */
4426 static bool
4427 has_use_on_stmt (tree name, gimple *stmt)
4429 imm_use_iterator iter;
4430 use_operand_p use_p;
4431 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4432 if (USE_STMT (use_p) == stmt)
4433 return true;
4434 return false;
4437 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4438 gimple_simplify.
4440 Replaces *GSI with the simplification result in RCODE and OPS
4441 and the associated statements in *SEQ. Does the replacement
4442 according to INPLACE and returns true if the operation succeeded. */
4444 static bool
4445 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4446 gimple_match_op *res_op,
4447 gimple_seq *seq, bool inplace)
4449 gimple *stmt = gsi_stmt (*gsi);
4450 tree *ops = res_op->ops;
4451 unsigned int num_ops = res_op->num_ops;
4453 /* Play safe and do not allow abnormals to be mentioned in
4454 newly created statements. See also maybe_push_res_to_seq.
4455 As an exception allow such uses if there was a use of the
4456 same SSA name on the old stmt. */
4457 for (unsigned int i = 0; i < num_ops; ++i)
4458 if (TREE_CODE (ops[i]) == SSA_NAME
4459 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4460 && !has_use_on_stmt (ops[i], stmt))
4461 return false;
4463 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4464 for (unsigned int i = 0; i < 2; ++i)
4465 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4466 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4467 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4468 return false;
4470 /* Don't insert new statements when INPLACE is true, even if we could
4471 reuse STMT for the final statement. */
4472 if (inplace && !gimple_seq_empty_p (*seq))
4473 return false;
4475 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4477 gcc_assert (res_op->code.is_tree_code ());
4478 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4479 /* GIMPLE_CONDs condition may not throw. */
4480 && (!flag_exceptions
4481 || !cfun->can_throw_non_call_exceptions
4482 || !operation_could_trap_p (res_op->code,
4483 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4484 false, NULL_TREE)))
4485 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4486 else if (res_op->code == SSA_NAME)
4487 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4488 build_zero_cst (TREE_TYPE (ops[0])));
4489 else if (res_op->code == INTEGER_CST)
4491 if (integer_zerop (ops[0]))
4492 gimple_cond_make_false (cond_stmt);
4493 else
4494 gimple_cond_make_true (cond_stmt);
4496 else if (!inplace)
4498 tree res = maybe_push_res_to_seq (res_op, seq);
4499 if (!res)
4500 return false;
4501 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4502 build_zero_cst (TREE_TYPE (res)));
4504 else
4505 return false;
4506 if (dump_file && (dump_flags & TDF_DETAILS))
4508 fprintf (dump_file, "gimple_simplified to ");
4509 if (!gimple_seq_empty_p (*seq))
4510 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4511 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4512 0, TDF_SLIM);
4514 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4515 return true;
4517 else if (is_gimple_assign (stmt)
4518 && res_op->code.is_tree_code ())
4520 if (!inplace
4521 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4523 maybe_build_generic_op (res_op);
4524 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4525 res_op->op_or_null (0),
4526 res_op->op_or_null (1),
4527 res_op->op_or_null (2));
4528 if (dump_file && (dump_flags & TDF_DETAILS))
4530 fprintf (dump_file, "gimple_simplified to ");
4531 if (!gimple_seq_empty_p (*seq))
4532 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4533 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4534 0, TDF_SLIM);
4536 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4537 return true;
4540 else if (res_op->code.is_fn_code ()
4541 && gimple_call_combined_fn (stmt) == res_op->code)
4543 gcc_assert (num_ops == gimple_call_num_args (stmt));
4544 for (unsigned int i = 0; i < num_ops; ++i)
4545 gimple_call_set_arg (stmt, i, ops[i]);
4546 if (dump_file && (dump_flags & TDF_DETAILS))
4548 fprintf (dump_file, "gimple_simplified to ");
4549 if (!gimple_seq_empty_p (*seq))
4550 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4551 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4553 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4554 return true;
4556 else if (!inplace)
4558 if (gimple_has_lhs (stmt))
4560 tree lhs = gimple_get_lhs (stmt);
4561 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4562 return false;
4563 if (dump_file && (dump_flags & TDF_DETAILS))
4565 fprintf (dump_file, "gimple_simplified to ");
4566 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4568 gsi_replace_with_seq_vops (gsi, *seq);
4569 return true;
4571 else
4572 gcc_unreachable ();
4575 return false;
4578 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4580 static bool
4581 maybe_canonicalize_mem_ref_addr (tree *t)
4583 bool res = false;
4585 if (TREE_CODE (*t) == ADDR_EXPR)
4586 t = &TREE_OPERAND (*t, 0);
4588 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4589 generic vector extension. The actual vector referenced is
4590 view-converted to an array type for this purpose. If the index
4591 is constant the canonical representation in the middle-end is a
4592 BIT_FIELD_REF so re-write the former to the latter here. */
4593 if (TREE_CODE (*t) == ARRAY_REF
4594 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4595 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4596 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4598 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4599 if (VECTOR_TYPE_P (vtype))
4601 tree low = array_ref_low_bound (*t);
4602 if (TREE_CODE (low) == INTEGER_CST)
4604 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4606 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4607 wi::to_widest (low));
4608 idx = wi::mul (idx, wi::to_widest
4609 (TYPE_SIZE (TREE_TYPE (*t))));
4610 widest_int ext
4611 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4612 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4614 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4615 TREE_TYPE (*t),
4616 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4617 TYPE_SIZE (TREE_TYPE (*t)),
4618 wide_int_to_tree (bitsizetype, idx));
4619 res = true;
4626 while (handled_component_p (*t))
4627 t = &TREE_OPERAND (*t, 0);
4629 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4630 of invariant addresses into a SSA name MEM_REF address. */
4631 if (TREE_CODE (*t) == MEM_REF
4632 || TREE_CODE (*t) == TARGET_MEM_REF)
4634 tree addr = TREE_OPERAND (*t, 0);
4635 if (TREE_CODE (addr) == ADDR_EXPR
4636 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4637 || handled_component_p (TREE_OPERAND (addr, 0))))
4639 tree base;
4640 poly_int64 coffset;
4641 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4642 &coffset);
4643 if (!base)
4644 gcc_unreachable ();
4646 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4647 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4648 TREE_OPERAND (*t, 1),
4649 size_int (coffset));
4650 res = true;
4652 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4653 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4656 /* Canonicalize back MEM_REFs to plain reference trees if the object
4657 accessed is a decl that has the same access semantics as the MEM_REF. */
4658 if (TREE_CODE (*t) == MEM_REF
4659 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4660 && integer_zerop (TREE_OPERAND (*t, 1))
4661 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4663 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4664 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4665 if (/* Same volatile qualification. */
4666 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4667 /* Same TBAA behavior with -fstrict-aliasing. */
4668 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4669 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4670 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4671 /* Same alignment. */
4672 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4673 /* We have to look out here to not drop a required conversion
4674 from the rhs to the lhs if *t appears on the lhs or vice-versa
4675 if it appears on the rhs. Thus require strict type
4676 compatibility. */
4677 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4679 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4680 res = true;
4684 /* Canonicalize TARGET_MEM_REF in particular with respect to
4685 the indexes becoming constant. */
4686 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4688 tree tem = maybe_fold_tmr (*t);
4689 if (tem)
4691 *t = tem;
4692 res = true;
4696 return res;
4699 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4700 distinguishes both cases. */
4702 static bool
4703 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4705 bool changed = false;
4706 gimple *stmt = gsi_stmt (*gsi);
4707 bool nowarning = gimple_no_warning_p (stmt);
4708 unsigned i;
4709 fold_defer_overflow_warnings ();
4711 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4712 after propagation.
4713 ??? This shouldn't be done in generic folding but in the
4714 propagation helpers which also know whether an address was
4715 propagated.
4716 Also canonicalize operand order. */
4717 switch (gimple_code (stmt))
4719 case GIMPLE_ASSIGN:
4720 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4722 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4723 if ((REFERENCE_CLASS_P (*rhs)
4724 || TREE_CODE (*rhs) == ADDR_EXPR)
4725 && maybe_canonicalize_mem_ref_addr (rhs))
4726 changed = true;
4727 tree *lhs = gimple_assign_lhs_ptr (stmt);
4728 if (REFERENCE_CLASS_P (*lhs)
4729 && maybe_canonicalize_mem_ref_addr (lhs))
4730 changed = true;
4732 else
4734 /* Canonicalize operand order. */
4735 enum tree_code code = gimple_assign_rhs_code (stmt);
4736 if (TREE_CODE_CLASS (code) == tcc_comparison
4737 || commutative_tree_code (code)
4738 || commutative_ternary_tree_code (code))
4740 tree rhs1 = gimple_assign_rhs1 (stmt);
4741 tree rhs2 = gimple_assign_rhs2 (stmt);
4742 if (tree_swap_operands_p (rhs1, rhs2))
4744 gimple_assign_set_rhs1 (stmt, rhs2);
4745 gimple_assign_set_rhs2 (stmt, rhs1);
4746 if (TREE_CODE_CLASS (code) == tcc_comparison)
4747 gimple_assign_set_rhs_code (stmt,
4748 swap_tree_comparison (code));
4749 changed = true;
4753 break;
4754 case GIMPLE_CALL:
4756 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4758 tree *arg = gimple_call_arg_ptr (stmt, i);
4759 if (REFERENCE_CLASS_P (*arg)
4760 && maybe_canonicalize_mem_ref_addr (arg))
4761 changed = true;
4763 tree *lhs = gimple_call_lhs_ptr (stmt);
4764 if (*lhs
4765 && REFERENCE_CLASS_P (*lhs)
4766 && maybe_canonicalize_mem_ref_addr (lhs))
4767 changed = true;
4768 break;
4770 case GIMPLE_ASM:
4772 gasm *asm_stmt = as_a <gasm *> (stmt);
4773 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4775 tree link = gimple_asm_output_op (asm_stmt, i);
4776 tree op = TREE_VALUE (link);
4777 if (REFERENCE_CLASS_P (op)
4778 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4779 changed = true;
4781 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4783 tree link = gimple_asm_input_op (asm_stmt, i);
4784 tree op = TREE_VALUE (link);
4785 if ((REFERENCE_CLASS_P (op)
4786 || TREE_CODE (op) == ADDR_EXPR)
4787 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4788 changed = true;
4791 break;
4792 case GIMPLE_DEBUG:
4793 if (gimple_debug_bind_p (stmt))
4795 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4796 if (*val
4797 && (REFERENCE_CLASS_P (*val)
4798 || TREE_CODE (*val) == ADDR_EXPR)
4799 && maybe_canonicalize_mem_ref_addr (val))
4800 changed = true;
4802 break;
4803 case GIMPLE_COND:
4805 /* Canonicalize operand order. */
4806 tree lhs = gimple_cond_lhs (stmt);
4807 tree rhs = gimple_cond_rhs (stmt);
4808 if (tree_swap_operands_p (lhs, rhs))
4810 gcond *gc = as_a <gcond *> (stmt);
4811 gimple_cond_set_lhs (gc, rhs);
4812 gimple_cond_set_rhs (gc, lhs);
4813 gimple_cond_set_code (gc,
4814 swap_tree_comparison (gimple_cond_code (gc)));
4815 changed = true;
4818 default:;
4821 /* Dispatch to pattern-based folding. */
4822 if (!inplace
4823 || is_gimple_assign (stmt)
4824 || gimple_code (stmt) == GIMPLE_COND)
4826 gimple_seq seq = NULL;
4827 gimple_match_op res_op;
4828 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4829 valueize, valueize))
4831 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4832 changed = true;
4833 else
4834 gimple_seq_discard (seq);
4838 stmt = gsi_stmt (*gsi);
4840 /* Fold the main computation performed by the statement. */
4841 switch (gimple_code (stmt))
4843 case GIMPLE_ASSIGN:
4845 /* Try to canonicalize for boolean-typed X the comparisons
4846 X == 0, X == 1, X != 0, and X != 1. */
4847 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4848 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4850 tree lhs = gimple_assign_lhs (stmt);
4851 tree op1 = gimple_assign_rhs1 (stmt);
4852 tree op2 = gimple_assign_rhs2 (stmt);
4853 tree type = TREE_TYPE (op1);
4855 /* Check whether the comparison operands are of the same boolean
4856 type as the result type is.
4857 Check that second operand is an integer-constant with value
4858 one or zero. */
4859 if (TREE_CODE (op2) == INTEGER_CST
4860 && (integer_zerop (op2) || integer_onep (op2))
4861 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4863 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4864 bool is_logical_not = false;
4866 /* X == 0 and X != 1 is a logical-not.of X
4867 X == 1 and X != 0 is X */
4868 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4869 || (cmp_code == NE_EXPR && integer_onep (op2)))
4870 is_logical_not = true;
4872 if (is_logical_not == false)
4873 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4874 /* Only for one-bit precision typed X the transformation
4875 !X -> ~X is valied. */
4876 else if (TYPE_PRECISION (type) == 1)
4877 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4878 /* Otherwise we use !X -> X ^ 1. */
4879 else
4880 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4881 build_int_cst (type, 1));
4882 changed = true;
4883 break;
4887 unsigned old_num_ops = gimple_num_ops (stmt);
4888 tree lhs = gimple_assign_lhs (stmt);
4889 tree new_rhs = fold_gimple_assign (gsi);
4890 if (new_rhs
4891 && !useless_type_conversion_p (TREE_TYPE (lhs),
4892 TREE_TYPE (new_rhs)))
4893 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4894 if (new_rhs
4895 && (!inplace
4896 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4898 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4899 changed = true;
4901 break;
4904 case GIMPLE_CALL:
4905 changed |= gimple_fold_call (gsi, inplace);
4906 break;
4908 case GIMPLE_ASM:
4909 /* Fold *& in asm operands. */
4911 gasm *asm_stmt = as_a <gasm *> (stmt);
4912 size_t noutputs;
4913 const char **oconstraints;
4914 const char *constraint;
4915 bool allows_mem, allows_reg;
4917 noutputs = gimple_asm_noutputs (asm_stmt);
4918 oconstraints = XALLOCAVEC (const char *, noutputs);
4920 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4922 tree link = gimple_asm_output_op (asm_stmt, i);
4923 tree op = TREE_VALUE (link);
4924 oconstraints[i]
4925 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4926 if (REFERENCE_CLASS_P (op)
4927 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4929 TREE_VALUE (link) = op;
4930 changed = true;
4933 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4935 tree link = gimple_asm_input_op (asm_stmt, i);
4936 tree op = TREE_VALUE (link);
4937 constraint
4938 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4939 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4940 oconstraints, &allows_mem, &allows_reg);
4941 if (REFERENCE_CLASS_P (op)
4942 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4943 != NULL_TREE)
4945 TREE_VALUE (link) = op;
4946 changed = true;
4950 break;
4952 case GIMPLE_DEBUG:
4953 if (gimple_debug_bind_p (stmt))
4955 tree val = gimple_debug_bind_get_value (stmt);
4956 if (val
4957 && REFERENCE_CLASS_P (val))
4959 tree tem = maybe_fold_reference (val, false);
4960 if (tem)
4962 gimple_debug_bind_set_value (stmt, tem);
4963 changed = true;
4966 else if (val
4967 && TREE_CODE (val) == ADDR_EXPR)
4969 tree ref = TREE_OPERAND (val, 0);
4970 tree tem = maybe_fold_reference (ref, false);
4971 if (tem)
4973 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4974 gimple_debug_bind_set_value (stmt, tem);
4975 changed = true;
4979 break;
4981 case GIMPLE_RETURN:
4983 greturn *ret_stmt = as_a<greturn *> (stmt);
4984 tree ret = gimple_return_retval(ret_stmt);
4986 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4988 tree val = valueize (ret);
4989 if (val && val != ret
4990 && may_propagate_copy (ret, val))
4992 gimple_return_set_retval (ret_stmt, val);
4993 changed = true;
4997 break;
4999 default:;
5002 stmt = gsi_stmt (*gsi);
5004 /* Fold *& on the lhs. */
5005 if (gimple_has_lhs (stmt))
5007 tree lhs = gimple_get_lhs (stmt);
5008 if (lhs && REFERENCE_CLASS_P (lhs))
5010 tree new_lhs = maybe_fold_reference (lhs, true);
5011 if (new_lhs)
5013 gimple_set_lhs (stmt, new_lhs);
5014 changed = true;
5019 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5020 return changed;
5023 /* Valueziation callback that ends up not following SSA edges. */
5025 tree
5026 no_follow_ssa_edges (tree)
5028 return NULL_TREE;
5031 /* Valueization callback that ends up following single-use SSA edges only. */
5033 tree
5034 follow_single_use_edges (tree val)
5036 if (TREE_CODE (val) == SSA_NAME
5037 && !has_single_use (val))
5038 return NULL_TREE;
5039 return val;
5042 /* Valueization callback that follows all SSA edges. */
5044 tree
5045 follow_all_ssa_edges (tree val)
5047 return val;
5050 /* Fold the statement pointed to by GSI. In some cases, this function may
5051 replace the whole statement with a new one. Returns true iff folding
5052 makes any changes.
5053 The statement pointed to by GSI should be in valid gimple form but may
5054 be in unfolded state as resulting from for example constant propagation
5055 which can produce *&x = 0. */
5057 bool
5058 fold_stmt (gimple_stmt_iterator *gsi)
5060 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5063 bool
5064 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5066 return fold_stmt_1 (gsi, false, valueize);
5069 /* Perform the minimal folding on statement *GSI. Only operations like
5070 *&x created by constant propagation are handled. The statement cannot
5071 be replaced with a new one. Return true if the statement was
5072 changed, false otherwise.
5073 The statement *GSI should be in valid gimple form but may
5074 be in unfolded state as resulting from for example constant propagation
5075 which can produce *&x = 0. */
5077 bool
5078 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5080 gimple *stmt = gsi_stmt (*gsi);
5081 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5082 gcc_assert (gsi_stmt (*gsi) == stmt);
5083 return changed;
5086 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5087 if EXPR is null or we don't know how.
5088 If non-null, the result always has boolean type. */
5090 static tree
5091 canonicalize_bool (tree expr, bool invert)
5093 if (!expr)
5094 return NULL_TREE;
5095 else if (invert)
5097 if (integer_nonzerop (expr))
5098 return boolean_false_node;
5099 else if (integer_zerop (expr))
5100 return boolean_true_node;
5101 else if (TREE_CODE (expr) == SSA_NAME)
5102 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5103 build_int_cst (TREE_TYPE (expr), 0));
5104 else if (COMPARISON_CLASS_P (expr))
5105 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5106 boolean_type_node,
5107 TREE_OPERAND (expr, 0),
5108 TREE_OPERAND (expr, 1));
5109 else
5110 return NULL_TREE;
5112 else
5114 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5115 return expr;
5116 if (integer_nonzerop (expr))
5117 return boolean_true_node;
5118 else if (integer_zerop (expr))
5119 return boolean_false_node;
5120 else if (TREE_CODE (expr) == SSA_NAME)
5121 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5122 build_int_cst (TREE_TYPE (expr), 0));
5123 else if (COMPARISON_CLASS_P (expr))
5124 return fold_build2 (TREE_CODE (expr),
5125 boolean_type_node,
5126 TREE_OPERAND (expr, 0),
5127 TREE_OPERAND (expr, 1));
5128 else
5129 return NULL_TREE;
5133 /* Check to see if a boolean expression EXPR is logically equivalent to the
5134 comparison (OP1 CODE OP2). Check for various identities involving
5135 SSA_NAMEs. */
5137 static bool
5138 same_bool_comparison_p (const_tree expr, enum tree_code code,
5139 const_tree op1, const_tree op2)
5141 gimple *s;
5143 /* The obvious case. */
5144 if (TREE_CODE (expr) == code
5145 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5146 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5147 return true;
5149 /* Check for comparing (name, name != 0) and the case where expr
5150 is an SSA_NAME with a definition matching the comparison. */
5151 if (TREE_CODE (expr) == SSA_NAME
5152 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5154 if (operand_equal_p (expr, op1, 0))
5155 return ((code == NE_EXPR && integer_zerop (op2))
5156 || (code == EQ_EXPR && integer_nonzerop (op2)));
5157 s = SSA_NAME_DEF_STMT (expr);
5158 if (is_gimple_assign (s)
5159 && gimple_assign_rhs_code (s) == code
5160 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5161 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5162 return true;
5165 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5166 of name is a comparison, recurse. */
5167 if (TREE_CODE (op1) == SSA_NAME
5168 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5170 s = SSA_NAME_DEF_STMT (op1);
5171 if (is_gimple_assign (s)
5172 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5174 enum tree_code c = gimple_assign_rhs_code (s);
5175 if ((c == NE_EXPR && integer_zerop (op2))
5176 || (c == EQ_EXPR && integer_nonzerop (op2)))
5177 return same_bool_comparison_p (expr, c,
5178 gimple_assign_rhs1 (s),
5179 gimple_assign_rhs2 (s));
5180 if ((c == EQ_EXPR && integer_zerop (op2))
5181 || (c == NE_EXPR && integer_nonzerop (op2)))
5182 return same_bool_comparison_p (expr,
5183 invert_tree_comparison (c, false),
5184 gimple_assign_rhs1 (s),
5185 gimple_assign_rhs2 (s));
5188 return false;
5191 /* Check to see if two boolean expressions OP1 and OP2 are logically
5192 equivalent. */
5194 static bool
5195 same_bool_result_p (const_tree op1, const_tree op2)
5197 /* Simple cases first. */
5198 if (operand_equal_p (op1, op2, 0))
5199 return true;
5201 /* Check the cases where at least one of the operands is a comparison.
5202 These are a bit smarter than operand_equal_p in that they apply some
5203 identifies on SSA_NAMEs. */
5204 if (COMPARISON_CLASS_P (op2)
5205 && same_bool_comparison_p (op1, TREE_CODE (op2),
5206 TREE_OPERAND (op2, 0),
5207 TREE_OPERAND (op2, 1)))
5208 return true;
5209 if (COMPARISON_CLASS_P (op1)
5210 && same_bool_comparison_p (op2, TREE_CODE (op1),
5211 TREE_OPERAND (op1, 0),
5212 TREE_OPERAND (op1, 1)))
5213 return true;
5215 /* Default case. */
5216 return false;
5219 /* Forward declarations for some mutually recursive functions. */
5221 static tree
5222 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5223 enum tree_code code2, tree op2a, tree op2b);
5224 static tree
5225 and_var_with_comparison (tree var, bool invert,
5226 enum tree_code code2, tree op2a, tree op2b);
5227 static tree
5228 and_var_with_comparison_1 (gimple *stmt,
5229 enum tree_code code2, tree op2a, tree op2b);
5230 static tree
5231 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5232 enum tree_code code2, tree op2a, tree op2b);
5233 static tree
5234 or_var_with_comparison (tree var, bool invert,
5235 enum tree_code code2, tree op2a, tree op2b);
5236 static tree
5237 or_var_with_comparison_1 (gimple *stmt,
5238 enum tree_code code2, tree op2a, tree op2b);
5240 /* Helper function for and_comparisons_1: try to simplify the AND of the
5241 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5242 If INVERT is true, invert the value of the VAR before doing the AND.
5243 Return NULL_EXPR if we can't simplify this to a single expression. */
5245 static tree
5246 and_var_with_comparison (tree var, bool invert,
5247 enum tree_code code2, tree op2a, tree op2b)
5249 tree t;
5250 gimple *stmt = SSA_NAME_DEF_STMT (var);
5252 /* We can only deal with variables whose definitions are assignments. */
5253 if (!is_gimple_assign (stmt))
5254 return NULL_TREE;
5256 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5257 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5258 Then we only have to consider the simpler non-inverted cases. */
5259 if (invert)
5260 t = or_var_with_comparison_1 (stmt,
5261 invert_tree_comparison (code2, false),
5262 op2a, op2b);
5263 else
5264 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5265 return canonicalize_bool (t, invert);
5268 /* Try to simplify the AND of the ssa variable defined by the assignment
5269 STMT with the comparison specified by (OP2A CODE2 OP2B).
5270 Return NULL_EXPR if we can't simplify this to a single expression. */
5272 static tree
5273 and_var_with_comparison_1 (gimple *stmt,
5274 enum tree_code code2, tree op2a, tree op2b)
5276 tree var = gimple_assign_lhs (stmt);
5277 tree true_test_var = NULL_TREE;
5278 tree false_test_var = NULL_TREE;
5279 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5281 /* Check for identities like (var AND (var == 0)) => false. */
5282 if (TREE_CODE (op2a) == SSA_NAME
5283 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5285 if ((code2 == NE_EXPR && integer_zerop (op2b))
5286 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5288 true_test_var = op2a;
5289 if (var == true_test_var)
5290 return var;
5292 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5293 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5295 false_test_var = op2a;
5296 if (var == false_test_var)
5297 return boolean_false_node;
5301 /* If the definition is a comparison, recurse on it. */
5302 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5304 tree t = and_comparisons_1 (innercode,
5305 gimple_assign_rhs1 (stmt),
5306 gimple_assign_rhs2 (stmt),
5307 code2,
5308 op2a,
5309 op2b);
5310 if (t)
5311 return t;
5314 /* If the definition is an AND or OR expression, we may be able to
5315 simplify by reassociating. */
5316 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5317 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5319 tree inner1 = gimple_assign_rhs1 (stmt);
5320 tree inner2 = gimple_assign_rhs2 (stmt);
5321 gimple *s;
5322 tree t;
5323 tree partial = NULL_TREE;
5324 bool is_and = (innercode == BIT_AND_EXPR);
5326 /* Check for boolean identities that don't require recursive examination
5327 of inner1/inner2:
5328 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5329 inner1 AND (inner1 OR inner2) => inner1
5330 !inner1 AND (inner1 AND inner2) => false
5331 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5332 Likewise for similar cases involving inner2. */
5333 if (inner1 == true_test_var)
5334 return (is_and ? var : inner1);
5335 else if (inner2 == true_test_var)
5336 return (is_and ? var : inner2);
5337 else if (inner1 == false_test_var)
5338 return (is_and
5339 ? boolean_false_node
5340 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5341 else if (inner2 == false_test_var)
5342 return (is_and
5343 ? boolean_false_node
5344 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5346 /* Next, redistribute/reassociate the AND across the inner tests.
5347 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5348 if (TREE_CODE (inner1) == SSA_NAME
5349 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5350 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5351 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5352 gimple_assign_rhs1 (s),
5353 gimple_assign_rhs2 (s),
5354 code2, op2a, op2b)))
5356 /* Handle the AND case, where we are reassociating:
5357 (inner1 AND inner2) AND (op2a code2 op2b)
5358 => (t AND inner2)
5359 If the partial result t is a constant, we win. Otherwise
5360 continue on to try reassociating with the other inner test. */
5361 if (is_and)
5363 if (integer_onep (t))
5364 return inner2;
5365 else if (integer_zerop (t))
5366 return boolean_false_node;
5369 /* Handle the OR case, where we are redistributing:
5370 (inner1 OR inner2) AND (op2a code2 op2b)
5371 => (t OR (inner2 AND (op2a code2 op2b))) */
5372 else if (integer_onep (t))
5373 return boolean_true_node;
5375 /* Save partial result for later. */
5376 partial = t;
5379 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5380 if (TREE_CODE (inner2) == SSA_NAME
5381 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5382 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5383 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5384 gimple_assign_rhs1 (s),
5385 gimple_assign_rhs2 (s),
5386 code2, op2a, op2b)))
5388 /* Handle the AND case, where we are reassociating:
5389 (inner1 AND inner2) AND (op2a code2 op2b)
5390 => (inner1 AND t) */
5391 if (is_and)
5393 if (integer_onep (t))
5394 return inner1;
5395 else if (integer_zerop (t))
5396 return boolean_false_node;
5397 /* If both are the same, we can apply the identity
5398 (x AND x) == x. */
5399 else if (partial && same_bool_result_p (t, partial))
5400 return t;
5403 /* Handle the OR case. where we are redistributing:
5404 (inner1 OR inner2) AND (op2a code2 op2b)
5405 => (t OR (inner1 AND (op2a code2 op2b)))
5406 => (t OR partial) */
5407 else
5409 if (integer_onep (t))
5410 return boolean_true_node;
5411 else if (partial)
5413 /* We already got a simplification for the other
5414 operand to the redistributed OR expression. The
5415 interesting case is when at least one is false.
5416 Or, if both are the same, we can apply the identity
5417 (x OR x) == x. */
5418 if (integer_zerop (partial))
5419 return t;
5420 else if (integer_zerop (t))
5421 return partial;
5422 else if (same_bool_result_p (t, partial))
5423 return t;
5428 return NULL_TREE;
5431 /* Try to simplify the AND of two comparisons defined by
5432 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5433 If this can be done without constructing an intermediate value,
5434 return the resulting tree; otherwise NULL_TREE is returned.
5435 This function is deliberately asymmetric as it recurses on SSA_DEFs
5436 in the first comparison but not the second. */
5438 static tree
5439 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5440 enum tree_code code2, tree op2a, tree op2b)
5442 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5444 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5445 if (operand_equal_p (op1a, op2a, 0)
5446 && operand_equal_p (op1b, op2b, 0))
5448 /* Result will be either NULL_TREE, or a combined comparison. */
5449 tree t = combine_comparisons (UNKNOWN_LOCATION,
5450 TRUTH_ANDIF_EXPR, code1, code2,
5451 truth_type, op1a, op1b);
5452 if (t)
5453 return t;
5456 /* Likewise the swapped case of the above. */
5457 if (operand_equal_p (op1a, op2b, 0)
5458 && operand_equal_p (op1b, op2a, 0))
5460 /* Result will be either NULL_TREE, or a combined comparison. */
5461 tree t = combine_comparisons (UNKNOWN_LOCATION,
5462 TRUTH_ANDIF_EXPR, code1,
5463 swap_tree_comparison (code2),
5464 truth_type, op1a, op1b);
5465 if (t)
5466 return t;
5469 /* If both comparisons are of the same value against constants, we might
5470 be able to merge them. */
5471 if (operand_equal_p (op1a, op2a, 0)
5472 && TREE_CODE (op1b) == INTEGER_CST
5473 && TREE_CODE (op2b) == INTEGER_CST)
5475 int cmp = tree_int_cst_compare (op1b, op2b);
5477 /* If we have (op1a == op1b), we should either be able to
5478 return that or FALSE, depending on whether the constant op1b
5479 also satisfies the other comparison against op2b. */
5480 if (code1 == EQ_EXPR)
5482 bool done = true;
5483 bool val;
5484 switch (code2)
5486 case EQ_EXPR: val = (cmp == 0); break;
5487 case NE_EXPR: val = (cmp != 0); break;
5488 case LT_EXPR: val = (cmp < 0); break;
5489 case GT_EXPR: val = (cmp > 0); break;
5490 case LE_EXPR: val = (cmp <= 0); break;
5491 case GE_EXPR: val = (cmp >= 0); break;
5492 default: done = false;
5494 if (done)
5496 if (val)
5497 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5498 else
5499 return boolean_false_node;
5502 /* Likewise if the second comparison is an == comparison. */
5503 else if (code2 == EQ_EXPR)
5505 bool done = true;
5506 bool val;
5507 switch (code1)
5509 case EQ_EXPR: val = (cmp == 0); break;
5510 case NE_EXPR: val = (cmp != 0); break;
5511 case LT_EXPR: val = (cmp > 0); break;
5512 case GT_EXPR: val = (cmp < 0); break;
5513 case LE_EXPR: val = (cmp >= 0); break;
5514 case GE_EXPR: val = (cmp <= 0); break;
5515 default: done = false;
5517 if (done)
5519 if (val)
5520 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5521 else
5522 return boolean_false_node;
5526 /* Same business with inequality tests. */
5527 else if (code1 == NE_EXPR)
5529 bool val;
5530 switch (code2)
5532 case EQ_EXPR: val = (cmp != 0); break;
5533 case NE_EXPR: val = (cmp == 0); break;
5534 case LT_EXPR: val = (cmp >= 0); break;
5535 case GT_EXPR: val = (cmp <= 0); break;
5536 case LE_EXPR: val = (cmp > 0); break;
5537 case GE_EXPR: val = (cmp < 0); break;
5538 default:
5539 val = false;
5541 if (val)
5542 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5544 else if (code2 == NE_EXPR)
5546 bool val;
5547 switch (code1)
5549 case EQ_EXPR: val = (cmp == 0); break;
5550 case NE_EXPR: val = (cmp != 0); break;
5551 case LT_EXPR: val = (cmp <= 0); break;
5552 case GT_EXPR: val = (cmp >= 0); break;
5553 case LE_EXPR: val = (cmp < 0); break;
5554 case GE_EXPR: val = (cmp > 0); break;
5555 default:
5556 val = false;
5558 if (val)
5559 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5562 /* Chose the more restrictive of two < or <= comparisons. */
5563 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5564 && (code2 == LT_EXPR || code2 == LE_EXPR))
5566 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5567 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5568 else
5569 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5572 /* Likewise chose the more restrictive of two > or >= comparisons. */
5573 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5574 && (code2 == GT_EXPR || code2 == GE_EXPR))
5576 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5577 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5578 else
5579 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5582 /* Check for singleton ranges. */
5583 else if (cmp == 0
5584 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5585 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5586 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5588 /* Check for disjoint ranges. */
5589 else if (cmp <= 0
5590 && (code1 == LT_EXPR || code1 == LE_EXPR)
5591 && (code2 == GT_EXPR || code2 == GE_EXPR))
5592 return boolean_false_node;
5593 else if (cmp >= 0
5594 && (code1 == GT_EXPR || code1 == GE_EXPR)
5595 && (code2 == LT_EXPR || code2 == LE_EXPR))
5596 return boolean_false_node;
5599 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5600 NAME's definition is a truth value. See if there are any simplifications
5601 that can be done against the NAME's definition. */
5602 if (TREE_CODE (op1a) == SSA_NAME
5603 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5604 && (integer_zerop (op1b) || integer_onep (op1b)))
5606 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5607 || (code1 == NE_EXPR && integer_onep (op1b)));
5608 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5609 switch (gimple_code (stmt))
5611 case GIMPLE_ASSIGN:
5612 /* Try to simplify by copy-propagating the definition. */
5613 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5615 case GIMPLE_PHI:
5616 /* If every argument to the PHI produces the same result when
5617 ANDed with the second comparison, we win.
5618 Do not do this unless the type is bool since we need a bool
5619 result here anyway. */
5620 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5622 tree result = NULL_TREE;
5623 unsigned i;
5624 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5626 tree arg = gimple_phi_arg_def (stmt, i);
5628 /* If this PHI has itself as an argument, ignore it.
5629 If all the other args produce the same result,
5630 we're still OK. */
5631 if (arg == gimple_phi_result (stmt))
5632 continue;
5633 else if (TREE_CODE (arg) == INTEGER_CST)
5635 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5637 if (!result)
5638 result = boolean_false_node;
5639 else if (!integer_zerop (result))
5640 return NULL_TREE;
5642 else if (!result)
5643 result = fold_build2 (code2, boolean_type_node,
5644 op2a, op2b);
5645 else if (!same_bool_comparison_p (result,
5646 code2, op2a, op2b))
5647 return NULL_TREE;
5649 else if (TREE_CODE (arg) == SSA_NAME
5650 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5652 tree temp;
5653 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5654 /* In simple cases we can look through PHI nodes,
5655 but we have to be careful with loops.
5656 See PR49073. */
5657 if (! dom_info_available_p (CDI_DOMINATORS)
5658 || gimple_bb (def_stmt) == gimple_bb (stmt)
5659 || dominated_by_p (CDI_DOMINATORS,
5660 gimple_bb (def_stmt),
5661 gimple_bb (stmt)))
5662 return NULL_TREE;
5663 temp = and_var_with_comparison (arg, invert, code2,
5664 op2a, op2b);
5665 if (!temp)
5666 return NULL_TREE;
5667 else if (!result)
5668 result = temp;
5669 else if (!same_bool_result_p (result, temp))
5670 return NULL_TREE;
5672 else
5673 return NULL_TREE;
5675 return result;
5678 default:
5679 break;
5682 return NULL_TREE;
5685 /* Try to simplify the AND of two comparisons, specified by
5686 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5687 If this can be simplified to a single expression (without requiring
5688 introducing more SSA variables to hold intermediate values),
5689 return the resulting tree. Otherwise return NULL_TREE.
5690 If the result expression is non-null, it has boolean type. */
5692 tree
5693 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5694 enum tree_code code2, tree op2a, tree op2b)
5696 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5697 if (t)
5698 return t;
5699 else
5700 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5703 /* Helper function for or_comparisons_1: try to simplify the OR of the
5704 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5705 If INVERT is true, invert the value of VAR before doing the OR.
5706 Return NULL_EXPR if we can't simplify this to a single expression. */
5708 static tree
5709 or_var_with_comparison (tree var, bool invert,
5710 enum tree_code code2, tree op2a, tree op2b)
5712 tree t;
5713 gimple *stmt = SSA_NAME_DEF_STMT (var);
5715 /* We can only deal with variables whose definitions are assignments. */
5716 if (!is_gimple_assign (stmt))
5717 return NULL_TREE;
5719 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5720 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5721 Then we only have to consider the simpler non-inverted cases. */
5722 if (invert)
5723 t = and_var_with_comparison_1 (stmt,
5724 invert_tree_comparison (code2, false),
5725 op2a, op2b);
5726 else
5727 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5728 return canonicalize_bool (t, invert);
5731 /* Try to simplify the OR of the ssa variable defined by the assignment
5732 STMT with the comparison specified by (OP2A CODE2 OP2B).
5733 Return NULL_EXPR if we can't simplify this to a single expression. */
5735 static tree
5736 or_var_with_comparison_1 (gimple *stmt,
5737 enum tree_code code2, tree op2a, tree op2b)
5739 tree var = gimple_assign_lhs (stmt);
5740 tree true_test_var = NULL_TREE;
5741 tree false_test_var = NULL_TREE;
5742 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5744 /* Check for identities like (var OR (var != 0)) => true . */
5745 if (TREE_CODE (op2a) == SSA_NAME
5746 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5748 if ((code2 == NE_EXPR && integer_zerop (op2b))
5749 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5751 true_test_var = op2a;
5752 if (var == true_test_var)
5753 return var;
5755 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5756 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5758 false_test_var = op2a;
5759 if (var == false_test_var)
5760 return boolean_true_node;
5764 /* If the definition is a comparison, recurse on it. */
5765 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5767 tree t = or_comparisons_1 (innercode,
5768 gimple_assign_rhs1 (stmt),
5769 gimple_assign_rhs2 (stmt),
5770 code2,
5771 op2a,
5772 op2b);
5773 if (t)
5774 return t;
5777 /* If the definition is an AND or OR expression, we may be able to
5778 simplify by reassociating. */
5779 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5780 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5782 tree inner1 = gimple_assign_rhs1 (stmt);
5783 tree inner2 = gimple_assign_rhs2 (stmt);
5784 gimple *s;
5785 tree t;
5786 tree partial = NULL_TREE;
5787 bool is_or = (innercode == BIT_IOR_EXPR);
5789 /* Check for boolean identities that don't require recursive examination
5790 of inner1/inner2:
5791 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5792 inner1 OR (inner1 AND inner2) => inner1
5793 !inner1 OR (inner1 OR inner2) => true
5794 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5796 if (inner1 == true_test_var)
5797 return (is_or ? var : inner1);
5798 else if (inner2 == true_test_var)
5799 return (is_or ? var : inner2);
5800 else if (inner1 == false_test_var)
5801 return (is_or
5802 ? boolean_true_node
5803 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5804 else if (inner2 == false_test_var)
5805 return (is_or
5806 ? boolean_true_node
5807 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5809 /* Next, redistribute/reassociate the OR across the inner tests.
5810 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5811 if (TREE_CODE (inner1) == SSA_NAME
5812 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5813 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5814 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5815 gimple_assign_rhs1 (s),
5816 gimple_assign_rhs2 (s),
5817 code2, op2a, op2b)))
5819 /* Handle the OR case, where we are reassociating:
5820 (inner1 OR inner2) OR (op2a code2 op2b)
5821 => (t OR inner2)
5822 If the partial result t is a constant, we win. Otherwise
5823 continue on to try reassociating with the other inner test. */
5824 if (is_or)
5826 if (integer_onep (t))
5827 return boolean_true_node;
5828 else if (integer_zerop (t))
5829 return inner2;
5832 /* Handle the AND case, where we are redistributing:
5833 (inner1 AND inner2) OR (op2a code2 op2b)
5834 => (t AND (inner2 OR (op2a code op2b))) */
5835 else if (integer_zerop (t))
5836 return boolean_false_node;
5838 /* Save partial result for later. */
5839 partial = t;
5842 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5843 if (TREE_CODE (inner2) == SSA_NAME
5844 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5845 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5846 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5847 gimple_assign_rhs1 (s),
5848 gimple_assign_rhs2 (s),
5849 code2, op2a, op2b)))
5851 /* Handle the OR case, where we are reassociating:
5852 (inner1 OR inner2) OR (op2a code2 op2b)
5853 => (inner1 OR t)
5854 => (t OR partial) */
5855 if (is_or)
5857 if (integer_zerop (t))
5858 return inner1;
5859 else if (integer_onep (t))
5860 return boolean_true_node;
5861 /* If both are the same, we can apply the identity
5862 (x OR x) == x. */
5863 else if (partial && same_bool_result_p (t, partial))
5864 return t;
5867 /* Handle the AND case, where we are redistributing:
5868 (inner1 AND inner2) OR (op2a code2 op2b)
5869 => (t AND (inner1 OR (op2a code2 op2b)))
5870 => (t AND partial) */
5871 else
5873 if (integer_zerop (t))
5874 return boolean_false_node;
5875 else if (partial)
5877 /* We already got a simplification for the other
5878 operand to the redistributed AND expression. The
5879 interesting case is when at least one is true.
5880 Or, if both are the same, we can apply the identity
5881 (x AND x) == x. */
5882 if (integer_onep (partial))
5883 return t;
5884 else if (integer_onep (t))
5885 return partial;
5886 else if (same_bool_result_p (t, partial))
5887 return t;
5892 return NULL_TREE;
5895 /* Try to simplify the OR of two comparisons defined by
5896 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5897 If this can be done without constructing an intermediate value,
5898 return the resulting tree; otherwise NULL_TREE is returned.
5899 This function is deliberately asymmetric as it recurses on SSA_DEFs
5900 in the first comparison but not the second. */
5902 static tree
5903 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5904 enum tree_code code2, tree op2a, tree op2b)
5906 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5908 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5909 if (operand_equal_p (op1a, op2a, 0)
5910 && operand_equal_p (op1b, op2b, 0))
5912 /* Result will be either NULL_TREE, or a combined comparison. */
5913 tree t = combine_comparisons (UNKNOWN_LOCATION,
5914 TRUTH_ORIF_EXPR, code1, code2,
5915 truth_type, op1a, op1b);
5916 if (t)
5917 return t;
5920 /* Likewise the swapped case of the above. */
5921 if (operand_equal_p (op1a, op2b, 0)
5922 && operand_equal_p (op1b, op2a, 0))
5924 /* Result will be either NULL_TREE, or a combined comparison. */
5925 tree t = combine_comparisons (UNKNOWN_LOCATION,
5926 TRUTH_ORIF_EXPR, code1,
5927 swap_tree_comparison (code2),
5928 truth_type, op1a, op1b);
5929 if (t)
5930 return t;
5933 /* If both comparisons are of the same value against constants, we might
5934 be able to merge them. */
5935 if (operand_equal_p (op1a, op2a, 0)
5936 && TREE_CODE (op1b) == INTEGER_CST
5937 && TREE_CODE (op2b) == INTEGER_CST)
5939 int cmp = tree_int_cst_compare (op1b, op2b);
5941 /* If we have (op1a != op1b), we should either be able to
5942 return that or TRUE, depending on whether the constant op1b
5943 also satisfies the other comparison against op2b. */
5944 if (code1 == NE_EXPR)
5946 bool done = true;
5947 bool val;
5948 switch (code2)
5950 case EQ_EXPR: val = (cmp == 0); break;
5951 case NE_EXPR: val = (cmp != 0); break;
5952 case LT_EXPR: val = (cmp < 0); break;
5953 case GT_EXPR: val = (cmp > 0); break;
5954 case LE_EXPR: val = (cmp <= 0); break;
5955 case GE_EXPR: val = (cmp >= 0); break;
5956 default: done = false;
5958 if (done)
5960 if (val)
5961 return boolean_true_node;
5962 else
5963 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5966 /* Likewise if the second comparison is a != comparison. */
5967 else if (code2 == NE_EXPR)
5969 bool done = true;
5970 bool val;
5971 switch (code1)
5973 case EQ_EXPR: val = (cmp == 0); break;
5974 case NE_EXPR: val = (cmp != 0); break;
5975 case LT_EXPR: val = (cmp > 0); break;
5976 case GT_EXPR: val = (cmp < 0); break;
5977 case LE_EXPR: val = (cmp >= 0); break;
5978 case GE_EXPR: val = (cmp <= 0); break;
5979 default: done = false;
5981 if (done)
5983 if (val)
5984 return boolean_true_node;
5985 else
5986 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5990 /* See if an equality test is redundant with the other comparison. */
5991 else if (code1 == EQ_EXPR)
5993 bool val;
5994 switch (code2)
5996 case EQ_EXPR: val = (cmp == 0); break;
5997 case NE_EXPR: val = (cmp != 0); break;
5998 case LT_EXPR: val = (cmp < 0); break;
5999 case GT_EXPR: val = (cmp > 0); break;
6000 case LE_EXPR: val = (cmp <= 0); break;
6001 case GE_EXPR: val = (cmp >= 0); break;
6002 default:
6003 val = false;
6005 if (val)
6006 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6008 else if (code2 == EQ_EXPR)
6010 bool val;
6011 switch (code1)
6013 case EQ_EXPR: val = (cmp == 0); break;
6014 case NE_EXPR: val = (cmp != 0); break;
6015 case LT_EXPR: val = (cmp > 0); break;
6016 case GT_EXPR: val = (cmp < 0); break;
6017 case LE_EXPR: val = (cmp >= 0); break;
6018 case GE_EXPR: val = (cmp <= 0); break;
6019 default:
6020 val = false;
6022 if (val)
6023 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6026 /* Chose the less restrictive of two < or <= comparisons. */
6027 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6028 && (code2 == LT_EXPR || code2 == LE_EXPR))
6030 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6031 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6032 else
6033 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6036 /* Likewise chose the less restrictive of two > or >= comparisons. */
6037 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6038 && (code2 == GT_EXPR || code2 == GE_EXPR))
6040 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6041 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6042 else
6043 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6046 /* Check for singleton ranges. */
6047 else if (cmp == 0
6048 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6049 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6050 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6052 /* Check for less/greater pairs that don't restrict the range at all. */
6053 else if (cmp >= 0
6054 && (code1 == LT_EXPR || code1 == LE_EXPR)
6055 && (code2 == GT_EXPR || code2 == GE_EXPR))
6056 return boolean_true_node;
6057 else if (cmp <= 0
6058 && (code1 == GT_EXPR || code1 == GE_EXPR)
6059 && (code2 == LT_EXPR || code2 == LE_EXPR))
6060 return boolean_true_node;
6063 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6064 NAME's definition is a truth value. See if there are any simplifications
6065 that can be done against the NAME's definition. */
6066 if (TREE_CODE (op1a) == SSA_NAME
6067 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6068 && (integer_zerop (op1b) || integer_onep (op1b)))
6070 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6071 || (code1 == NE_EXPR && integer_onep (op1b)));
6072 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6073 switch (gimple_code (stmt))
6075 case GIMPLE_ASSIGN:
6076 /* Try to simplify by copy-propagating the definition. */
6077 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6079 case GIMPLE_PHI:
6080 /* If every argument to the PHI produces the same result when
6081 ORed with the second comparison, we win.
6082 Do not do this unless the type is bool since we need a bool
6083 result here anyway. */
6084 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6086 tree result = NULL_TREE;
6087 unsigned i;
6088 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6090 tree arg = gimple_phi_arg_def (stmt, i);
6092 /* If this PHI has itself as an argument, ignore it.
6093 If all the other args produce the same result,
6094 we're still OK. */
6095 if (arg == gimple_phi_result (stmt))
6096 continue;
6097 else if (TREE_CODE (arg) == INTEGER_CST)
6099 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6101 if (!result)
6102 result = boolean_true_node;
6103 else if (!integer_onep (result))
6104 return NULL_TREE;
6106 else if (!result)
6107 result = fold_build2 (code2, boolean_type_node,
6108 op2a, op2b);
6109 else if (!same_bool_comparison_p (result,
6110 code2, op2a, op2b))
6111 return NULL_TREE;
6113 else if (TREE_CODE (arg) == SSA_NAME
6114 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6116 tree temp;
6117 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6118 /* In simple cases we can look through PHI nodes,
6119 but we have to be careful with loops.
6120 See PR49073. */
6121 if (! dom_info_available_p (CDI_DOMINATORS)
6122 || gimple_bb (def_stmt) == gimple_bb (stmt)
6123 || dominated_by_p (CDI_DOMINATORS,
6124 gimple_bb (def_stmt),
6125 gimple_bb (stmt)))
6126 return NULL_TREE;
6127 temp = or_var_with_comparison (arg, invert, code2,
6128 op2a, op2b);
6129 if (!temp)
6130 return NULL_TREE;
6131 else if (!result)
6132 result = temp;
6133 else if (!same_bool_result_p (result, temp))
6134 return NULL_TREE;
6136 else
6137 return NULL_TREE;
6139 return result;
6142 default:
6143 break;
6146 return NULL_TREE;
6149 /* Try to simplify the OR of two comparisons, specified by
6150 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6151 If this can be simplified to a single expression (without requiring
6152 introducing more SSA variables to hold intermediate values),
6153 return the resulting tree. Otherwise return NULL_TREE.
6154 If the result expression is non-null, it has boolean type. */
6156 tree
6157 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6158 enum tree_code code2, tree op2a, tree op2b)
6160 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6161 if (t)
6162 return t;
6163 else
6164 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6168 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6170 Either NULL_TREE, a simplified but non-constant or a constant
6171 is returned.
6173 ??? This should go into a gimple-fold-inline.h file to be eventually
6174 privatized with the single valueize function used in the various TUs
6175 to avoid the indirect function call overhead. */
6177 tree
6178 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6179 tree (*gvalueize) (tree))
6181 gimple_match_op res_op;
6182 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6183 edges if there are intermediate VARYING defs. For this reason
6184 do not follow SSA edges here even though SCCVN can technically
6185 just deal fine with that. */
6186 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6188 tree res = NULL_TREE;
6189 if (gimple_simplified_result_is_gimple_val (&res_op))
6190 res = res_op.ops[0];
6191 else if (mprts_hook)
6192 res = mprts_hook (&res_op);
6193 if (res)
6195 if (dump_file && dump_flags & TDF_DETAILS)
6197 fprintf (dump_file, "Match-and-simplified ");
6198 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6199 fprintf (dump_file, " to ");
6200 print_generic_expr (dump_file, res);
6201 fprintf (dump_file, "\n");
6203 return res;
6207 location_t loc = gimple_location (stmt);
6208 switch (gimple_code (stmt))
6210 case GIMPLE_ASSIGN:
6212 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6214 switch (get_gimple_rhs_class (subcode))
6216 case GIMPLE_SINGLE_RHS:
6218 tree rhs = gimple_assign_rhs1 (stmt);
6219 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6221 if (TREE_CODE (rhs) == SSA_NAME)
6223 /* If the RHS is an SSA_NAME, return its known constant value,
6224 if any. */
6225 return (*valueize) (rhs);
6227 /* Handle propagating invariant addresses into address
6228 operations. */
6229 else if (TREE_CODE (rhs) == ADDR_EXPR
6230 && !is_gimple_min_invariant (rhs))
6232 poly_int64 offset = 0;
6233 tree base;
6234 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6235 &offset,
6236 valueize);
6237 if (base
6238 && (CONSTANT_CLASS_P (base)
6239 || decl_address_invariant_p (base)))
6240 return build_invariant_address (TREE_TYPE (rhs),
6241 base, offset);
6243 else if (TREE_CODE (rhs) == CONSTRUCTOR
6244 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6245 && known_eq (CONSTRUCTOR_NELTS (rhs),
6246 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6248 unsigned i, nelts;
6249 tree val;
6251 nelts = CONSTRUCTOR_NELTS (rhs);
6252 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6253 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6255 val = (*valueize) (val);
6256 if (TREE_CODE (val) == INTEGER_CST
6257 || TREE_CODE (val) == REAL_CST
6258 || TREE_CODE (val) == FIXED_CST)
6259 vec.quick_push (val);
6260 else
6261 return NULL_TREE;
6264 return vec.build ();
6266 if (subcode == OBJ_TYPE_REF)
6268 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6269 /* If callee is constant, we can fold away the wrapper. */
6270 if (is_gimple_min_invariant (val))
6271 return val;
6274 if (kind == tcc_reference)
6276 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6277 || TREE_CODE (rhs) == REALPART_EXPR
6278 || TREE_CODE (rhs) == IMAGPART_EXPR)
6279 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6281 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6282 return fold_unary_loc (EXPR_LOCATION (rhs),
6283 TREE_CODE (rhs),
6284 TREE_TYPE (rhs), val);
6286 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6287 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6289 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6290 return fold_ternary_loc (EXPR_LOCATION (rhs),
6291 TREE_CODE (rhs),
6292 TREE_TYPE (rhs), val,
6293 TREE_OPERAND (rhs, 1),
6294 TREE_OPERAND (rhs, 2));
6296 else if (TREE_CODE (rhs) == MEM_REF
6297 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6299 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6300 if (TREE_CODE (val) == ADDR_EXPR
6301 && is_gimple_min_invariant (val))
6303 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6304 unshare_expr (val),
6305 TREE_OPERAND (rhs, 1));
6306 if (tem)
6307 rhs = tem;
6310 return fold_const_aggregate_ref_1 (rhs, valueize);
6312 else if (kind == tcc_declaration)
6313 return get_symbol_constant_value (rhs);
6314 return rhs;
6317 case GIMPLE_UNARY_RHS:
6318 return NULL_TREE;
6320 case GIMPLE_BINARY_RHS:
6321 /* Translate &x + CST into an invariant form suitable for
6322 further propagation. */
6323 if (subcode == POINTER_PLUS_EXPR)
6325 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6326 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6327 if (TREE_CODE (op0) == ADDR_EXPR
6328 && TREE_CODE (op1) == INTEGER_CST)
6330 tree off = fold_convert (ptr_type_node, op1);
6331 return build_fold_addr_expr_loc
6332 (loc,
6333 fold_build2 (MEM_REF,
6334 TREE_TYPE (TREE_TYPE (op0)),
6335 unshare_expr (op0), off));
6338 /* Canonicalize bool != 0 and bool == 0 appearing after
6339 valueization. While gimple_simplify handles this
6340 it can get confused by the ~X == 1 -> X == 0 transform
6341 which we cant reduce to a SSA name or a constant
6342 (and we have no way to tell gimple_simplify to not
6343 consider those transforms in the first place). */
6344 else if (subcode == EQ_EXPR
6345 || subcode == NE_EXPR)
6347 tree lhs = gimple_assign_lhs (stmt);
6348 tree op0 = gimple_assign_rhs1 (stmt);
6349 if (useless_type_conversion_p (TREE_TYPE (lhs),
6350 TREE_TYPE (op0)))
6352 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6353 op0 = (*valueize) (op0);
6354 if (TREE_CODE (op0) == INTEGER_CST)
6355 std::swap (op0, op1);
6356 if (TREE_CODE (op1) == INTEGER_CST
6357 && ((subcode == NE_EXPR && integer_zerop (op1))
6358 || (subcode == EQ_EXPR && integer_onep (op1))))
6359 return op0;
6362 return NULL_TREE;
6364 case GIMPLE_TERNARY_RHS:
6366 /* Handle ternary operators that can appear in GIMPLE form. */
6367 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6368 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6369 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6370 return fold_ternary_loc (loc, subcode,
6371 gimple_expr_type (stmt), op0, op1, op2);
6374 default:
6375 gcc_unreachable ();
6379 case GIMPLE_CALL:
6381 tree fn;
6382 gcall *call_stmt = as_a <gcall *> (stmt);
6384 if (gimple_call_internal_p (stmt))
6386 enum tree_code subcode = ERROR_MARK;
6387 switch (gimple_call_internal_fn (stmt))
6389 case IFN_UBSAN_CHECK_ADD:
6390 subcode = PLUS_EXPR;
6391 break;
6392 case IFN_UBSAN_CHECK_SUB:
6393 subcode = MINUS_EXPR;
6394 break;
6395 case IFN_UBSAN_CHECK_MUL:
6396 subcode = MULT_EXPR;
6397 break;
6398 case IFN_BUILTIN_EXPECT:
6400 tree arg0 = gimple_call_arg (stmt, 0);
6401 tree op0 = (*valueize) (arg0);
6402 if (TREE_CODE (op0) == INTEGER_CST)
6403 return op0;
6404 return NULL_TREE;
6406 default:
6407 return NULL_TREE;
6409 tree arg0 = gimple_call_arg (stmt, 0);
6410 tree arg1 = gimple_call_arg (stmt, 1);
6411 tree op0 = (*valueize) (arg0);
6412 tree op1 = (*valueize) (arg1);
6414 if (TREE_CODE (op0) != INTEGER_CST
6415 || TREE_CODE (op1) != INTEGER_CST)
6417 switch (subcode)
6419 case MULT_EXPR:
6420 /* x * 0 = 0 * x = 0 without overflow. */
6421 if (integer_zerop (op0) || integer_zerop (op1))
6422 return build_zero_cst (TREE_TYPE (arg0));
6423 break;
6424 case MINUS_EXPR:
6425 /* y - y = 0 without overflow. */
6426 if (operand_equal_p (op0, op1, 0))
6427 return build_zero_cst (TREE_TYPE (arg0));
6428 break;
6429 default:
6430 break;
6433 tree res
6434 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6435 if (res
6436 && TREE_CODE (res) == INTEGER_CST
6437 && !TREE_OVERFLOW (res))
6438 return res;
6439 return NULL_TREE;
6442 fn = (*valueize) (gimple_call_fn (stmt));
6443 if (TREE_CODE (fn) == ADDR_EXPR
6444 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6445 && gimple_builtin_call_types_compatible_p (stmt,
6446 TREE_OPERAND (fn, 0)))
6448 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6449 tree retval;
6450 unsigned i;
6451 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6452 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6453 retval = fold_builtin_call_array (loc,
6454 gimple_call_return_type (call_stmt),
6455 fn, gimple_call_num_args (stmt), args);
6456 if (retval)
6458 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6459 STRIP_NOPS (retval);
6460 retval = fold_convert (gimple_call_return_type (call_stmt),
6461 retval);
6463 return retval;
6465 return NULL_TREE;
6468 default:
6469 return NULL_TREE;
6473 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6474 Returns NULL_TREE if folding to a constant is not possible, otherwise
6475 returns a constant according to is_gimple_min_invariant. */
6477 tree
6478 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6480 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6481 if (res && is_gimple_min_invariant (res))
6482 return res;
6483 return NULL_TREE;
6487 /* The following set of functions are supposed to fold references using
6488 their constant initializers. */
6490 /* See if we can find constructor defining value of BASE.
6491 When we know the consructor with constant offset (such as
6492 base is array[40] and we do know constructor of array), then
6493 BIT_OFFSET is adjusted accordingly.
6495 As a special case, return error_mark_node when constructor
6496 is not explicitly available, but it is known to be zero
6497 such as 'static const int a;'. */
6498 static tree
6499 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6500 tree (*valueize)(tree))
6502 poly_int64 bit_offset2, size, max_size;
6503 bool reverse;
6505 if (TREE_CODE (base) == MEM_REF)
6507 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6508 if (!boff.to_shwi (bit_offset))
6509 return NULL_TREE;
6511 if (valueize
6512 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6513 base = valueize (TREE_OPERAND (base, 0));
6514 if (!base || TREE_CODE (base) != ADDR_EXPR)
6515 return NULL_TREE;
6516 base = TREE_OPERAND (base, 0);
6518 else if (valueize
6519 && TREE_CODE (base) == SSA_NAME)
6520 base = valueize (base);
6522 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6523 DECL_INITIAL. If BASE is a nested reference into another
6524 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6525 the inner reference. */
6526 switch (TREE_CODE (base))
6528 case VAR_DECL:
6529 case CONST_DECL:
6531 tree init = ctor_for_folding (base);
6533 /* Our semantic is exact opposite of ctor_for_folding;
6534 NULL means unknown, while error_mark_node is 0. */
6535 if (init == error_mark_node)
6536 return NULL_TREE;
6537 if (!init)
6538 return error_mark_node;
6539 return init;
6542 case VIEW_CONVERT_EXPR:
6543 return get_base_constructor (TREE_OPERAND (base, 0),
6544 bit_offset, valueize);
6546 case ARRAY_REF:
6547 case COMPONENT_REF:
6548 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6549 &reverse);
6550 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6551 return NULL_TREE;
6552 *bit_offset += bit_offset2;
6553 return get_base_constructor (base, bit_offset, valueize);
6555 case CONSTRUCTOR:
6556 return base;
6558 default:
6559 if (CONSTANT_CLASS_P (base))
6560 return base;
6562 return NULL_TREE;
6566 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6567 to the memory at bit OFFSET. When non-null, TYPE is the expected
6568 type of the reference; otherwise the type of the referenced element
6569 is used instead. When SIZE is zero, attempt to fold a reference to
6570 the entire element which OFFSET refers to. Increment *SUBOFF by
6571 the bit offset of the accessed element. */
6573 static tree
6574 fold_array_ctor_reference (tree type, tree ctor,
6575 unsigned HOST_WIDE_INT offset,
6576 unsigned HOST_WIDE_INT size,
6577 tree from_decl,
6578 unsigned HOST_WIDE_INT *suboff)
6580 offset_int low_bound;
6581 offset_int elt_size;
6582 offset_int access_index;
6583 tree domain_type = NULL_TREE;
6584 HOST_WIDE_INT inner_offset;
6586 /* Compute low bound and elt size. */
6587 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6588 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6589 if (domain_type && TYPE_MIN_VALUE (domain_type))
6591 /* Static constructors for variably sized objects makes no sense. */
6592 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6593 return NULL_TREE;
6594 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6596 else
6597 low_bound = 0;
6598 /* Static constructors for variably sized objects makes no sense. */
6599 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6600 return NULL_TREE;
6601 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6603 /* When TYPE is non-null, verify that it specifies a constant-sized
6604 accessed not larger than size of array element. */
6605 if (type
6606 && (!TYPE_SIZE_UNIT (type)
6607 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6608 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6609 || elt_size == 0))
6610 return NULL_TREE;
6612 /* Compute the array index we look for. */
6613 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6614 elt_size);
6615 access_index += low_bound;
6617 /* And offset within the access. */
6618 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6620 /* See if the array field is large enough to span whole access. We do not
6621 care to fold accesses spanning multiple array indexes. */
6622 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6623 return NULL_TREE;
6624 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6626 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6628 /* For the final reference to the entire accessed element
6629 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6630 may be null) in favor of the type of the element, and set
6631 SIZE to the size of the accessed element. */
6632 inner_offset = 0;
6633 type = TREE_TYPE (val);
6634 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6637 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6638 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6639 suboff);
6642 /* Memory not explicitly mentioned in constructor is 0 (or
6643 the reference is out of range). */
6644 return type ? build_zero_cst (type) : NULL_TREE;
6647 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6648 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6649 is the expected type of the reference; otherwise the type of
6650 the referenced member is used instead. When SIZE is zero,
6651 attempt to fold a reference to the entire member which OFFSET
6652 refers to; in this case. Increment *SUBOFF by the bit offset
6653 of the accessed member. */
6655 static tree
6656 fold_nonarray_ctor_reference (tree type, tree ctor,
6657 unsigned HOST_WIDE_INT offset,
6658 unsigned HOST_WIDE_INT size,
6659 tree from_decl,
6660 unsigned HOST_WIDE_INT *suboff)
6662 unsigned HOST_WIDE_INT cnt;
6663 tree cfield, cval;
6665 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6666 cval)
6668 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6669 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6670 tree field_size = DECL_SIZE (cfield);
6672 if (!field_size)
6674 /* Determine the size of the flexible array member from
6675 the size of the initializer provided for it. */
6676 field_size = TYPE_SIZE (TREE_TYPE (cval));
6679 /* Variable sized objects in static constructors makes no sense,
6680 but field_size can be NULL for flexible array members. */
6681 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6682 && TREE_CODE (byte_offset) == INTEGER_CST
6683 && (field_size != NULL_TREE
6684 ? TREE_CODE (field_size) == INTEGER_CST
6685 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6687 /* Compute bit offset of the field. */
6688 offset_int bitoffset
6689 = (wi::to_offset (field_offset)
6690 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6691 /* Compute bit offset where the field ends. */
6692 offset_int bitoffset_end;
6693 if (field_size != NULL_TREE)
6694 bitoffset_end = bitoffset + wi::to_offset (field_size);
6695 else
6696 bitoffset_end = 0;
6698 /* Compute the bit offset of the end of the desired access.
6699 As a special case, if the size of the desired access is
6700 zero, assume the access is to the entire field (and let
6701 the caller make any necessary adjustments by storing
6702 the actual bounds of the field in FIELDBOUNDS). */
6703 offset_int access_end = offset_int (offset);
6704 if (size)
6705 access_end += size;
6706 else
6707 access_end = bitoffset_end;
6709 /* Is there any overlap between the desired access at
6710 [OFFSET, OFFSET+SIZE) and the offset of the field within
6711 the object at [BITOFFSET, BITOFFSET_END)? */
6712 if (wi::cmps (access_end, bitoffset) > 0
6713 && (field_size == NULL_TREE
6714 || wi::lts_p (offset, bitoffset_end)))
6716 *suboff += bitoffset.to_uhwi ();
6718 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6720 /* For the final reference to the entire accessed member
6721 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6722 be null) in favor of the type of the member, and set
6723 SIZE to the size of the accessed member. */
6724 offset = bitoffset.to_uhwi ();
6725 type = TREE_TYPE (cval);
6726 size = (bitoffset_end - bitoffset).to_uhwi ();
6729 /* We do have overlap. Now see if the field is large enough
6730 to cover the access. Give up for accesses that extend
6731 beyond the end of the object or that span multiple fields. */
6732 if (wi::cmps (access_end, bitoffset_end) > 0)
6733 return NULL_TREE;
6734 if (offset < bitoffset)
6735 return NULL_TREE;
6737 offset_int inner_offset = offset_int (offset) - bitoffset;
6738 return fold_ctor_reference (type, cval,
6739 inner_offset.to_uhwi (), size,
6740 from_decl, suboff);
6743 /* Memory not explicitly mentioned in constructor is 0. */
6744 return type ? build_zero_cst (type) : NULL_TREE;
6747 /* CTOR is value initializing memory. Fold a reference of TYPE and
6748 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6749 is zero, attempt to fold a reference to the entire subobject
6750 which OFFSET refers to. This is used when folding accesses to
6751 string members of aggregates. When non-null, set *SUBOFF to
6752 the bit offset of the accessed subobject. */
6754 tree
6755 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6756 const poly_uint64 &poly_size, tree from_decl,
6757 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6759 tree ret;
6761 /* We found the field with exact match. */
6762 if (type
6763 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6764 && known_eq (poly_offset, 0U))
6765 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6767 /* The remaining optimizations need a constant size and offset. */
6768 unsigned HOST_WIDE_INT size, offset;
6769 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6770 return NULL_TREE;
6772 /* We are at the end of walk, see if we can view convert the
6773 result. */
6774 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6775 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6776 && !compare_tree_int (TYPE_SIZE (type), size)
6777 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6779 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6780 if (ret)
6782 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6783 if (ret)
6784 STRIP_USELESS_TYPE_CONVERSION (ret);
6786 return ret;
6788 /* For constants and byte-aligned/sized reads try to go through
6789 native_encode/interpret. */
6790 if (CONSTANT_CLASS_P (ctor)
6791 && BITS_PER_UNIT == 8
6792 && offset % BITS_PER_UNIT == 0
6793 && size % BITS_PER_UNIT == 0
6794 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6796 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6797 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6798 offset / BITS_PER_UNIT);
6799 if (len > 0)
6800 return native_interpret_expr (type, buf, len);
6802 if (TREE_CODE (ctor) == CONSTRUCTOR)
6804 unsigned HOST_WIDE_INT dummy = 0;
6805 if (!suboff)
6806 suboff = &dummy;
6808 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6809 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6810 return fold_array_ctor_reference (type, ctor, offset, size,
6811 from_decl, suboff);
6813 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6814 from_decl, suboff);
6817 return NULL_TREE;
6820 /* Return the tree representing the element referenced by T if T is an
6821 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6822 names using VALUEIZE. Return NULL_TREE otherwise. */
6824 tree
6825 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6827 tree ctor, idx, base;
6828 poly_int64 offset, size, max_size;
6829 tree tem;
6830 bool reverse;
6832 if (TREE_THIS_VOLATILE (t))
6833 return NULL_TREE;
6835 if (DECL_P (t))
6836 return get_symbol_constant_value (t);
6838 tem = fold_read_from_constant_string (t);
6839 if (tem)
6840 return tem;
6842 switch (TREE_CODE (t))
6844 case ARRAY_REF:
6845 case ARRAY_RANGE_REF:
6846 /* Constant indexes are handled well by get_base_constructor.
6847 Only special case variable offsets.
6848 FIXME: This code can't handle nested references with variable indexes
6849 (they will be handled only by iteration of ccp). Perhaps we can bring
6850 get_ref_base_and_extent here and make it use a valueize callback. */
6851 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6852 && valueize
6853 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6854 && poly_int_tree_p (idx))
6856 tree low_bound, unit_size;
6858 /* If the resulting bit-offset is constant, track it. */
6859 if ((low_bound = array_ref_low_bound (t),
6860 poly_int_tree_p (low_bound))
6861 && (unit_size = array_ref_element_size (t),
6862 tree_fits_uhwi_p (unit_size)))
6864 poly_offset_int woffset
6865 = wi::sext (wi::to_poly_offset (idx)
6866 - wi::to_poly_offset (low_bound),
6867 TYPE_PRECISION (TREE_TYPE (idx)));
6869 if (woffset.to_shwi (&offset))
6871 /* TODO: This code seems wrong, multiply then check
6872 to see if it fits. */
6873 offset *= tree_to_uhwi (unit_size);
6874 offset *= BITS_PER_UNIT;
6876 base = TREE_OPERAND (t, 0);
6877 ctor = get_base_constructor (base, &offset, valueize);
6878 /* Empty constructor. Always fold to 0. */
6879 if (ctor == error_mark_node)
6880 return build_zero_cst (TREE_TYPE (t));
6881 /* Out of bound array access. Value is undefined,
6882 but don't fold. */
6883 if (maybe_lt (offset, 0))
6884 return NULL_TREE;
6885 /* We can not determine ctor. */
6886 if (!ctor)
6887 return NULL_TREE;
6888 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6889 tree_to_uhwi (unit_size)
6890 * BITS_PER_UNIT,
6891 base);
6895 /* Fallthru. */
6897 case COMPONENT_REF:
6898 case BIT_FIELD_REF:
6899 case TARGET_MEM_REF:
6900 case MEM_REF:
6901 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6902 ctor = get_base_constructor (base, &offset, valueize);
6904 /* Empty constructor. Always fold to 0. */
6905 if (ctor == error_mark_node)
6906 return build_zero_cst (TREE_TYPE (t));
6907 /* We do not know precise address. */
6908 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6909 return NULL_TREE;
6910 /* We can not determine ctor. */
6911 if (!ctor)
6912 return NULL_TREE;
6914 /* Out of bound array access. Value is undefined, but don't fold. */
6915 if (maybe_lt (offset, 0))
6916 return NULL_TREE;
6918 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6919 base);
6921 case REALPART_EXPR:
6922 case IMAGPART_EXPR:
6924 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6925 if (c && TREE_CODE (c) == COMPLEX_CST)
6926 return fold_build1_loc (EXPR_LOCATION (t),
6927 TREE_CODE (t), TREE_TYPE (t), c);
6928 break;
6931 default:
6932 break;
6935 return NULL_TREE;
6938 tree
6939 fold_const_aggregate_ref (tree t)
6941 return fold_const_aggregate_ref_1 (t, NULL);
6944 /* Lookup virtual method with index TOKEN in a virtual table V
6945 at OFFSET.
6946 Set CAN_REFER if non-NULL to false if method
6947 is not referable or if the virtual table is ill-formed (such as rewriten
6948 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6950 tree
6951 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6952 tree v,
6953 unsigned HOST_WIDE_INT offset,
6954 bool *can_refer)
6956 tree vtable = v, init, fn;
6957 unsigned HOST_WIDE_INT size;
6958 unsigned HOST_WIDE_INT elt_size, access_index;
6959 tree domain_type;
6961 if (can_refer)
6962 *can_refer = true;
6964 /* First of all double check we have virtual table. */
6965 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6967 /* Pass down that we lost track of the target. */
6968 if (can_refer)
6969 *can_refer = false;
6970 return NULL_TREE;
6973 init = ctor_for_folding (v);
6975 /* The virtual tables should always be born with constructors
6976 and we always should assume that they are avaialble for
6977 folding. At the moment we do not stream them in all cases,
6978 but it should never happen that ctor seem unreachable. */
6979 gcc_assert (init);
6980 if (init == error_mark_node)
6982 /* Pass down that we lost track of the target. */
6983 if (can_refer)
6984 *can_refer = false;
6985 return NULL_TREE;
6987 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6988 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6989 offset *= BITS_PER_UNIT;
6990 offset += token * size;
6992 /* Lookup the value in the constructor that is assumed to be array.
6993 This is equivalent to
6994 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6995 offset, size, NULL);
6996 but in a constant time. We expect that frontend produced a simple
6997 array without indexed initializers. */
6999 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7000 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7001 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7002 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7004 access_index = offset / BITS_PER_UNIT / elt_size;
7005 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7007 /* The C++ FE can now produce indexed fields, and we check if the indexes
7008 match. */
7009 if (access_index < CONSTRUCTOR_NELTS (init))
7011 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7012 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7013 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7014 STRIP_NOPS (fn);
7016 else
7017 fn = NULL;
7019 /* For type inconsistent program we may end up looking up virtual method
7020 in virtual table that does not contain TOKEN entries. We may overrun
7021 the virtual table and pick up a constant or RTTI info pointer.
7022 In any case the call is undefined. */
7023 if (!fn
7024 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7025 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7026 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7027 else
7029 fn = TREE_OPERAND (fn, 0);
7031 /* When cgraph node is missing and function is not public, we cannot
7032 devirtualize. This can happen in WHOPR when the actual method
7033 ends up in other partition, because we found devirtualization
7034 possibility too late. */
7035 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7037 if (can_refer)
7039 *can_refer = false;
7040 return fn;
7042 return NULL_TREE;
7046 /* Make sure we create a cgraph node for functions we'll reference.
7047 They can be non-existent if the reference comes from an entry
7048 of an external vtable for example. */
7049 cgraph_node::get_create (fn);
7051 return fn;
7054 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7055 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7056 KNOWN_BINFO carries the binfo describing the true type of
7057 OBJ_TYPE_REF_OBJECT(REF).
7058 Set CAN_REFER if non-NULL to false if method
7059 is not referable or if the virtual table is ill-formed (such as rewriten
7060 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7062 tree
7063 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7064 bool *can_refer)
7066 unsigned HOST_WIDE_INT offset;
7067 tree v;
7069 v = BINFO_VTABLE (known_binfo);
7070 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7071 if (!v)
7072 return NULL_TREE;
7074 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7076 if (can_refer)
7077 *can_refer = false;
7078 return NULL_TREE;
7080 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7083 /* Given a pointer value T, return a simplified version of an
7084 indirection through T, or NULL_TREE if no simplification is
7085 possible. Note that the resulting type may be different from
7086 the type pointed to in the sense that it is still compatible
7087 from the langhooks point of view. */
7089 tree
7090 gimple_fold_indirect_ref (tree t)
7092 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7093 tree sub = t;
7094 tree subtype;
7096 STRIP_NOPS (sub);
7097 subtype = TREE_TYPE (sub);
7098 if (!POINTER_TYPE_P (subtype)
7099 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7100 return NULL_TREE;
7102 if (TREE_CODE (sub) == ADDR_EXPR)
7104 tree op = TREE_OPERAND (sub, 0);
7105 tree optype = TREE_TYPE (op);
7106 /* *&p => p */
7107 if (useless_type_conversion_p (type, optype))
7108 return op;
7110 /* *(foo *)&fooarray => fooarray[0] */
7111 if (TREE_CODE (optype) == ARRAY_TYPE
7112 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7113 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7115 tree type_domain = TYPE_DOMAIN (optype);
7116 tree min_val = size_zero_node;
7117 if (type_domain && TYPE_MIN_VALUE (type_domain))
7118 min_val = TYPE_MIN_VALUE (type_domain);
7119 if (TREE_CODE (min_val) == INTEGER_CST)
7120 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7122 /* *(foo *)&complexfoo => __real__ complexfoo */
7123 else if (TREE_CODE (optype) == COMPLEX_TYPE
7124 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7125 return fold_build1 (REALPART_EXPR, type, op);
7126 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7127 else if (TREE_CODE (optype) == VECTOR_TYPE
7128 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7130 tree part_width = TYPE_SIZE (type);
7131 tree index = bitsize_int (0);
7132 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7136 /* *(p + CST) -> ... */
7137 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7138 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7140 tree addr = TREE_OPERAND (sub, 0);
7141 tree off = TREE_OPERAND (sub, 1);
7142 tree addrtype;
7144 STRIP_NOPS (addr);
7145 addrtype = TREE_TYPE (addr);
7147 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7148 if (TREE_CODE (addr) == ADDR_EXPR
7149 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7150 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7151 && tree_fits_uhwi_p (off))
7153 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7154 tree part_width = TYPE_SIZE (type);
7155 unsigned HOST_WIDE_INT part_widthi
7156 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7157 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7158 tree index = bitsize_int (indexi);
7159 if (known_lt (offset / part_widthi,
7160 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7161 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7162 part_width, index);
7165 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7166 if (TREE_CODE (addr) == ADDR_EXPR
7167 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7168 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7170 tree size = TYPE_SIZE_UNIT (type);
7171 if (tree_int_cst_equal (size, off))
7172 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7175 /* *(p + CST) -> MEM_REF <p, CST>. */
7176 if (TREE_CODE (addr) != ADDR_EXPR
7177 || DECL_P (TREE_OPERAND (addr, 0)))
7178 return fold_build2 (MEM_REF, type,
7179 addr,
7180 wide_int_to_tree (ptype, wi::to_wide (off)));
7183 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7184 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7185 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7186 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7188 tree type_domain;
7189 tree min_val = size_zero_node;
7190 tree osub = sub;
7191 sub = gimple_fold_indirect_ref (sub);
7192 if (! sub)
7193 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7194 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7195 if (type_domain && TYPE_MIN_VALUE (type_domain))
7196 min_val = TYPE_MIN_VALUE (type_domain);
7197 if (TREE_CODE (min_val) == INTEGER_CST)
7198 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7201 return NULL_TREE;
7204 /* Return true if CODE is an operation that when operating on signed
7205 integer types involves undefined behavior on overflow and the
7206 operation can be expressed with unsigned arithmetic. */
7208 bool
7209 arith_code_with_undefined_signed_overflow (tree_code code)
7211 switch (code)
7213 case PLUS_EXPR:
7214 case MINUS_EXPR:
7215 case MULT_EXPR:
7216 case NEGATE_EXPR:
7217 case POINTER_PLUS_EXPR:
7218 return true;
7219 default:
7220 return false;
7224 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7225 operation that can be transformed to unsigned arithmetic by converting
7226 its operand, carrying out the operation in the corresponding unsigned
7227 type and converting the result back to the original type.
7229 Returns a sequence of statements that replace STMT and also contain
7230 a modified form of STMT itself. */
7232 gimple_seq
7233 rewrite_to_defined_overflow (gimple *stmt)
7235 if (dump_file && (dump_flags & TDF_DETAILS))
7237 fprintf (dump_file, "rewriting stmt with undefined signed "
7238 "overflow ");
7239 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7242 tree lhs = gimple_assign_lhs (stmt);
7243 tree type = unsigned_type_for (TREE_TYPE (lhs));
7244 gimple_seq stmts = NULL;
7245 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7247 tree op = gimple_op (stmt, i);
7248 op = gimple_convert (&stmts, type, op);
7249 gimple_set_op (stmt, i, op);
7251 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7252 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7253 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7254 gimple_seq_add_stmt (&stmts, stmt);
7255 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7256 gimple_seq_add_stmt (&stmts, cvt);
7258 return stmts;
7262 /* The valueization hook we use for the gimple_build API simplification.
7263 This makes us match fold_buildN behavior by only combining with
7264 statements in the sequence(s) we are currently building. */
7266 static tree
7267 gimple_build_valueize (tree op)
7269 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7270 return op;
7271 return NULL_TREE;
7274 /* Build the expression CODE OP0 of type TYPE with location LOC,
7275 simplifying it first if possible. Returns the built
7276 expression value and appends statements possibly defining it
7277 to SEQ. */
7279 tree
7280 gimple_build (gimple_seq *seq, location_t loc,
7281 enum tree_code code, tree type, tree op0)
7283 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7284 if (!res)
7286 res = create_tmp_reg_or_ssa_name (type);
7287 gimple *stmt;
7288 if (code == REALPART_EXPR
7289 || code == IMAGPART_EXPR
7290 || code == VIEW_CONVERT_EXPR)
7291 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7292 else
7293 stmt = gimple_build_assign (res, code, op0);
7294 gimple_set_location (stmt, loc);
7295 gimple_seq_add_stmt_without_update (seq, stmt);
7297 return res;
7300 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7301 simplifying it first if possible. Returns the built
7302 expression value and appends statements possibly defining it
7303 to SEQ. */
7305 tree
7306 gimple_build (gimple_seq *seq, location_t loc,
7307 enum tree_code code, tree type, tree op0, tree op1)
7309 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7310 if (!res)
7312 res = create_tmp_reg_or_ssa_name (type);
7313 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7314 gimple_set_location (stmt, loc);
7315 gimple_seq_add_stmt_without_update (seq, stmt);
7317 return res;
7320 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7321 simplifying it first if possible. Returns the built
7322 expression value and appends statements possibly defining it
7323 to SEQ. */
7325 tree
7326 gimple_build (gimple_seq *seq, location_t loc,
7327 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7329 tree res = gimple_simplify (code, type, op0, op1, op2,
7330 seq, gimple_build_valueize);
7331 if (!res)
7333 res = create_tmp_reg_or_ssa_name (type);
7334 gimple *stmt;
7335 if (code == BIT_FIELD_REF)
7336 stmt = gimple_build_assign (res, code,
7337 build3 (code, type, op0, op1, op2));
7338 else
7339 stmt = gimple_build_assign (res, code, op0, op1, op2);
7340 gimple_set_location (stmt, loc);
7341 gimple_seq_add_stmt_without_update (seq, stmt);
7343 return res;
7346 /* Build the call FN (ARG0) with a result of type TYPE
7347 (or no result if TYPE is void) with location LOC,
7348 simplifying it first if possible. Returns the built
7349 expression value (or NULL_TREE if TYPE is void) and appends
7350 statements possibly defining it to SEQ. */
7352 tree
7353 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7354 tree type, tree arg0)
7356 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7357 if (!res)
7359 gcall *stmt;
7360 if (internal_fn_p (fn))
7361 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7362 else
7364 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7365 stmt = gimple_build_call (decl, 1, arg0);
7367 if (!VOID_TYPE_P (type))
7369 res = create_tmp_reg_or_ssa_name (type);
7370 gimple_call_set_lhs (stmt, res);
7372 gimple_set_location (stmt, loc);
7373 gimple_seq_add_stmt_without_update (seq, stmt);
7375 return res;
7378 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7379 (or no result if TYPE is void) with location LOC,
7380 simplifying it first if possible. Returns the built
7381 expression value (or NULL_TREE if TYPE is void) and appends
7382 statements possibly defining it to SEQ. */
7384 tree
7385 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7386 tree type, tree arg0, tree arg1)
7388 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7389 if (!res)
7391 gcall *stmt;
7392 if (internal_fn_p (fn))
7393 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7394 else
7396 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7397 stmt = gimple_build_call (decl, 2, arg0, arg1);
7399 if (!VOID_TYPE_P (type))
7401 res = create_tmp_reg_or_ssa_name (type);
7402 gimple_call_set_lhs (stmt, res);
7404 gimple_set_location (stmt, loc);
7405 gimple_seq_add_stmt_without_update (seq, stmt);
7407 return res;
7410 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7411 (or no result if TYPE is void) with location LOC,
7412 simplifying it first if possible. Returns the built
7413 expression value (or NULL_TREE if TYPE is void) and appends
7414 statements possibly defining it to SEQ. */
7416 tree
7417 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7418 tree type, tree arg0, tree arg1, tree arg2)
7420 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7421 seq, gimple_build_valueize);
7422 if (!res)
7424 gcall *stmt;
7425 if (internal_fn_p (fn))
7426 stmt = gimple_build_call_internal (as_internal_fn (fn),
7427 3, arg0, arg1, arg2);
7428 else
7430 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7431 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7433 if (!VOID_TYPE_P (type))
7435 res = create_tmp_reg_or_ssa_name (type);
7436 gimple_call_set_lhs (stmt, res);
7438 gimple_set_location (stmt, loc);
7439 gimple_seq_add_stmt_without_update (seq, stmt);
7441 return res;
7444 /* Build the conversion (TYPE) OP with a result of type TYPE
7445 with location LOC if such conversion is neccesary in GIMPLE,
7446 simplifying it first.
7447 Returns the built expression value and appends
7448 statements possibly defining it to SEQ. */
7450 tree
7451 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7453 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7454 return op;
7455 return gimple_build (seq, loc, NOP_EXPR, type, op);
7458 /* Build the conversion (ptrofftype) OP with a result of a type
7459 compatible with ptrofftype with location LOC if such conversion
7460 is neccesary in GIMPLE, simplifying it first.
7461 Returns the built expression value and appends
7462 statements possibly defining it to SEQ. */
7464 tree
7465 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7467 if (ptrofftype_p (TREE_TYPE (op)))
7468 return op;
7469 return gimple_convert (seq, loc, sizetype, op);
7472 /* Build a vector of type TYPE in which each element has the value OP.
7473 Return a gimple value for the result, appending any new statements
7474 to SEQ. */
7476 tree
7477 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7478 tree op)
7480 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7481 && !CONSTANT_CLASS_P (op))
7482 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7484 tree res, vec = build_vector_from_val (type, op);
7485 if (is_gimple_val (vec))
7486 return vec;
7487 if (gimple_in_ssa_p (cfun))
7488 res = make_ssa_name (type);
7489 else
7490 res = create_tmp_reg (type);
7491 gimple *stmt = gimple_build_assign (res, vec);
7492 gimple_set_location (stmt, loc);
7493 gimple_seq_add_stmt_without_update (seq, stmt);
7494 return res;
7497 /* Build a vector from BUILDER, handling the case in which some elements
7498 are non-constant. Return a gimple value for the result, appending any
7499 new instructions to SEQ.
7501 BUILDER must not have a stepped encoding on entry. This is because
7502 the function is not geared up to handle the arithmetic that would
7503 be needed in the variable case, and any code building a vector that
7504 is known to be constant should use BUILDER->build () directly. */
7506 tree
7507 gimple_build_vector (gimple_seq *seq, location_t loc,
7508 tree_vector_builder *builder)
7510 gcc_assert (builder->nelts_per_pattern () <= 2);
7511 unsigned int encoded_nelts = builder->encoded_nelts ();
7512 for (unsigned int i = 0; i < encoded_nelts; ++i)
7513 if (!TREE_CONSTANT ((*builder)[i]))
7515 tree type = builder->type ();
7516 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7517 vec<constructor_elt, va_gc> *v;
7518 vec_alloc (v, nelts);
7519 for (i = 0; i < nelts; ++i)
7520 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7522 tree res;
7523 if (gimple_in_ssa_p (cfun))
7524 res = make_ssa_name (type);
7525 else
7526 res = create_tmp_reg (type);
7527 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7528 gimple_set_location (stmt, loc);
7529 gimple_seq_add_stmt_without_update (seq, stmt);
7530 return res;
7532 return builder->build ();
7535 /* Return true if the result of assignment STMT is known to be non-negative.
7536 If the return value is based on the assumption that signed overflow is
7537 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7538 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7540 static bool
7541 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7542 int depth)
7544 enum tree_code code = gimple_assign_rhs_code (stmt);
7545 switch (get_gimple_rhs_class (code))
7547 case GIMPLE_UNARY_RHS:
7548 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7549 gimple_expr_type (stmt),
7550 gimple_assign_rhs1 (stmt),
7551 strict_overflow_p, depth);
7552 case GIMPLE_BINARY_RHS:
7553 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7554 gimple_expr_type (stmt),
7555 gimple_assign_rhs1 (stmt),
7556 gimple_assign_rhs2 (stmt),
7557 strict_overflow_p, depth);
7558 case GIMPLE_TERNARY_RHS:
7559 return false;
7560 case GIMPLE_SINGLE_RHS:
7561 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7562 strict_overflow_p, depth);
7563 case GIMPLE_INVALID_RHS:
7564 break;
7566 gcc_unreachable ();
7569 /* Return true if return value of call STMT is known to be non-negative.
7570 If the return value is based on the assumption that signed overflow is
7571 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7572 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7574 static bool
7575 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7576 int depth)
7578 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7579 gimple_call_arg (stmt, 0) : NULL_TREE;
7580 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7581 gimple_call_arg (stmt, 1) : NULL_TREE;
7583 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7584 gimple_call_combined_fn (stmt),
7585 arg0,
7586 arg1,
7587 strict_overflow_p, depth);
7590 /* Return true if return value of call STMT is known to be non-negative.
7591 If the return value is based on the assumption that signed overflow is
7592 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7593 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7595 static bool
7596 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7597 int depth)
7599 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7601 tree arg = gimple_phi_arg_def (stmt, i);
7602 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7603 return false;
7605 return true;
7608 /* Return true if STMT is known to compute a non-negative value.
7609 If the return value is based on the assumption that signed overflow is
7610 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7611 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7613 bool
7614 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7615 int depth)
7617 switch (gimple_code (stmt))
7619 case GIMPLE_ASSIGN:
7620 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7621 depth);
7622 case GIMPLE_CALL:
7623 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7624 depth);
7625 case GIMPLE_PHI:
7626 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7627 depth);
7628 default:
7629 return false;
7633 /* Return true if the floating-point value computed by assignment STMT
7634 is known to have an integer value. We also allow +Inf, -Inf and NaN
7635 to be considered integer values. Return false for signaling NaN.
7637 DEPTH is the current nesting depth of the query. */
7639 static bool
7640 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7642 enum tree_code code = gimple_assign_rhs_code (stmt);
7643 switch (get_gimple_rhs_class (code))
7645 case GIMPLE_UNARY_RHS:
7646 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7647 gimple_assign_rhs1 (stmt), depth);
7648 case GIMPLE_BINARY_RHS:
7649 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7650 gimple_assign_rhs1 (stmt),
7651 gimple_assign_rhs2 (stmt), depth);
7652 case GIMPLE_TERNARY_RHS:
7653 return false;
7654 case GIMPLE_SINGLE_RHS:
7655 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7656 case GIMPLE_INVALID_RHS:
7657 break;
7659 gcc_unreachable ();
7662 /* Return true if the floating-point value computed by call STMT is known
7663 to have an integer value. We also allow +Inf, -Inf and NaN to be
7664 considered integer values. Return false for signaling NaN.
7666 DEPTH is the current nesting depth of the query. */
7668 static bool
7669 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7671 tree arg0 = (gimple_call_num_args (stmt) > 0
7672 ? gimple_call_arg (stmt, 0)
7673 : NULL_TREE);
7674 tree arg1 = (gimple_call_num_args (stmt) > 1
7675 ? gimple_call_arg (stmt, 1)
7676 : NULL_TREE);
7677 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7678 arg0, arg1, depth);
7681 /* Return true if the floating-point result of phi STMT is known to have
7682 an integer value. We also allow +Inf, -Inf and NaN to be considered
7683 integer values. Return false for signaling NaN.
7685 DEPTH is the current nesting depth of the query. */
7687 static bool
7688 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7690 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7692 tree arg = gimple_phi_arg_def (stmt, i);
7693 if (!integer_valued_real_single_p (arg, depth + 1))
7694 return false;
7696 return true;
7699 /* Return true if the floating-point value computed by STMT is known
7700 to have an integer value. We also allow +Inf, -Inf and NaN to be
7701 considered integer values. Return false for signaling NaN.
7703 DEPTH is the current nesting depth of the query. */
7705 bool
7706 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7708 switch (gimple_code (stmt))
7710 case GIMPLE_ASSIGN:
7711 return gimple_assign_integer_valued_real_p (stmt, depth);
7712 case GIMPLE_CALL:
7713 return gimple_call_integer_valued_real_p (stmt, depth);
7714 case GIMPLE_PHI:
7715 return gimple_phi_integer_valued_real_p (stmt, depth);
7716 default:
7717 return false;