debug/dwarf: formStrp uses a 64-bit value for 64-bit DWARF
[official-gcc.git] / gcc / gimple-fold.c
blob504a85d1441da6814229dee3028793b13cba2cde
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 location_t loc = gimple_location_safe (stmt);
351 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
352 "resolving virtual function address "
353 "reference to function %s\n",
354 targets.length () == 1
355 ? targets[0]->name ()
356 : "NULL");
358 if (targets.length () == 1)
360 val = fold_convert (TREE_TYPE (val),
361 build_fold_addr_expr_loc
362 (loc, targets[0]->decl));
363 STRIP_USELESS_TYPE_CONVERSION (val);
365 else
366 /* We can not use __builtin_unreachable here because it
367 can not have address taken. */
368 val = build_int_cst (TREE_TYPE (val), 0);
369 return val;
374 else if (TREE_CODE (rhs) == ADDR_EXPR)
376 tree ref = TREE_OPERAND (rhs, 0);
377 tree tem = maybe_fold_reference (ref, true);
378 if (tem
379 && TREE_CODE (tem) == MEM_REF
380 && integer_zerop (TREE_OPERAND (tem, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
382 else if (tem)
383 result = fold_convert (TREE_TYPE (rhs),
384 build_fold_addr_expr_loc (loc, tem));
385 else if (TREE_CODE (ref) == MEM_REF
386 && integer_zerop (TREE_OPERAND (ref, 1)))
387 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
389 if (result)
391 /* Strip away useless type conversions. Both the
392 NON_LVALUE_EXPR that may have been added by fold, and
393 "useless" type conversions that might now be apparent
394 due to propagation. */
395 STRIP_USELESS_TYPE_CONVERSION (result);
397 if (result != rhs && valid_gimple_rhs_p (result))
398 return result;
402 else if (TREE_CODE (rhs) == CONSTRUCTOR
403 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
406 unsigned i;
407 tree val;
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
410 if (! CONSTANT_CLASS_P (val))
411 return NULL_TREE;
413 return build_vector_from_ctor (TREE_TYPE (rhs),
414 CONSTRUCTOR_ELTS (rhs));
417 else if (DECL_P (rhs))
418 return get_symbol_constant_value (rhs);
420 break;
422 case GIMPLE_UNARY_RHS:
423 break;
425 case GIMPLE_BINARY_RHS:
426 break;
428 case GIMPLE_TERNARY_RHS:
429 result = fold_ternary_loc (loc, subcode,
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt),
433 gimple_assign_rhs3 (stmt));
435 if (result)
437 STRIP_USELESS_TYPE_CONVERSION (result);
438 if (valid_gimple_rhs_p (result))
439 return result;
441 break;
443 case GIMPLE_INVALID_RHS:
444 gcc_unreachable ();
447 return NULL_TREE;
451 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
452 adjusting the replacement stmts location and virtual operands.
453 If the statement has a lhs the last stmt in the sequence is expected
454 to assign to that lhs. */
456 static void
457 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459 gimple *stmt = gsi_stmt (*si_p);
461 if (gimple_has_location (stmt))
462 annotate_all_with_location (stmts, gimple_location (stmt));
464 /* First iterate over the replacement statements backward, assigning
465 virtual operands to their defining statements. */
466 gimple *laststore = NULL;
467 for (gimple_stmt_iterator i = gsi_last (stmts);
468 !gsi_end_p (i); gsi_prev (&i))
470 gimple *new_stmt = gsi_stmt (i);
471 if ((gimple_assign_single_p (new_stmt)
472 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
473 || (is_gimple_call (new_stmt)
474 && (gimple_call_flags (new_stmt)
475 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 tree vdef;
478 if (!laststore)
479 vdef = gimple_vdef (stmt);
480 else
481 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
482 gimple_set_vdef (new_stmt, vdef);
483 if (vdef && TREE_CODE (vdef) == SSA_NAME)
484 SSA_NAME_DEF_STMT (vdef) = new_stmt;
485 laststore = new_stmt;
489 /* Second iterate over the statements forward, assigning virtual
490 operands to their uses. */
491 tree reaching_vuse = gimple_vuse (stmt);
492 for (gimple_stmt_iterator i = gsi_start (stmts);
493 !gsi_end_p (i); gsi_next (&i))
495 gimple *new_stmt = gsi_stmt (i);
496 /* If the new statement possibly has a VUSE, update it with exact SSA
497 name we know will reach this one. */
498 if (gimple_has_mem_ops (new_stmt))
499 gimple_set_vuse (new_stmt, reaching_vuse);
500 gimple_set_modified (new_stmt, true);
501 if (gimple_vdef (new_stmt))
502 reaching_vuse = gimple_vdef (new_stmt);
505 /* If the new sequence does not do a store release the virtual
506 definition of the original statement. */
507 if (reaching_vuse
508 && reaching_vuse == gimple_vuse (stmt))
510 tree vdef = gimple_vdef (stmt);
511 if (vdef
512 && TREE_CODE (vdef) == SSA_NAME)
514 unlink_stmt_vdef (stmt);
515 release_ssa_name (vdef);
519 /* Finally replace the original statement with the sequence. */
520 gsi_replace_with_seq (si_p, stmts, false);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 tree lhs;
537 gimple *stmt, *new_stmt;
538 gimple_stmt_iterator i;
539 gimple_seq stmts = NULL;
541 stmt = gsi_stmt (*si_p);
543 gcc_assert (is_gimple_call (stmt));
545 push_gimplify_context (gimple_in_ssa_p (cfun));
547 lhs = gimple_call_lhs (stmt);
548 if (lhs == NULL_TREE)
550 gimplify_and_add (expr, &stmts);
551 /* We can end up with folding a memcpy of an empty class assignment
552 which gets optimized away by C++ gimplification. */
553 if (gimple_seq_empty_p (stmts))
555 pop_gimplify_context (NULL);
556 if (gimple_in_ssa_p (cfun))
558 unlink_stmt_vdef (stmt);
559 release_defs (stmt);
561 gsi_replace (si_p, gimple_build_nop (), false);
562 return;
565 else
567 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
568 new_stmt = gimple_build_assign (lhs, tmp);
569 i = gsi_last (stmts);
570 gsi_insert_after_without_update (&i, new_stmt,
571 GSI_CONTINUE_LINKING);
574 pop_gimplify_context (NULL);
576 gsi_replace_with_seq_vops (si_p, stmts);
580 /* Replace the call at *GSI with the gimple value VAL. */
582 void
583 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585 gimple *stmt = gsi_stmt (*gsi);
586 tree lhs = gimple_call_lhs (stmt);
587 gimple *repl;
588 if (lhs)
590 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
591 val = fold_convert (TREE_TYPE (lhs), val);
592 repl = gimple_build_assign (lhs, val);
594 else
595 repl = gimple_build_nop ();
596 tree vdef = gimple_vdef (stmt);
597 if (vdef && TREE_CODE (vdef) == SSA_NAME)
599 unlink_stmt_vdef (stmt);
600 release_ssa_name (vdef);
602 gsi_replace (gsi, repl, false);
605 /* Replace the call at *GSI with the new call REPL and fold that
606 again. */
608 static void
609 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
611 gimple *stmt = gsi_stmt (*gsi);
612 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
613 gimple_set_location (repl, gimple_location (stmt));
614 if (gimple_vdef (stmt)
615 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617 gimple_set_vdef (repl, gimple_vdef (stmt));
618 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620 if (gimple_vuse (stmt))
621 gimple_set_vuse (repl, gimple_vuse (stmt));
622 gsi_replace (gsi, repl, false);
623 fold_stmt (gsi);
626 /* Return true if VAR is a VAR_DECL or a component thereof. */
628 static bool
629 var_decl_component_p (tree var)
631 tree inner = var;
632 while (handled_component_p (inner))
633 inner = TREE_OPERAND (inner, 0);
634 return SSA_VAR_P (inner);
637 /* If the SIZE argument representing the size of an object is in a range
638 of values of which exactly one is valid (and that is zero), return
639 true, otherwise false. */
641 static bool
642 size_must_be_zero_p (tree size)
644 if (integer_zerop (size))
645 return true;
647 if (TREE_CODE (size) != SSA_NAME)
648 return false;
650 wide_int min, max;
651 enum value_range_type rtype = get_range_info (size, &min, &max);
652 if (rtype != VR_ANTI_RANGE)
653 return false;
655 tree type = TREE_TYPE (size);
656 int prec = TYPE_PRECISION (type);
658 wide_int wone = wi::one (prec);
660 /* Compute the value of SSIZE_MAX, the largest positive value that
661 can be stored in ssize_t, the signed counterpart of size_t. */
662 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
667 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
668 diagnose (otherwise undefined) overlapping copies without preventing
669 folding. When folded, GCC guarantees that overlapping memcpy has
670 the same semantics as memmove. Call to the library memcpy need not
671 provide the same guarantee. Return false if no simplification can
672 be made. */
674 static bool
675 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
676 tree dest, tree src, int endp)
678 gimple *stmt = gsi_stmt (*gsi);
679 tree lhs = gimple_call_lhs (stmt);
680 tree len = gimple_call_arg (stmt, 2);
681 tree destvar, srcvar;
682 location_t loc = gimple_location (stmt);
684 tree func = gimple_call_fndecl (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
686 bool check_overlap = (DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE
687 && DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE_CHK
688 && !nowarn);
690 /* If the LEN parameter is a constant zero or in range where
691 the only valid value is zero, return DEST. */
692 if (size_must_be_zero_p (len))
694 gimple *repl;
695 if (gimple_call_lhs (stmt))
696 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
697 else
698 repl = gimple_build_nop ();
699 tree vdef = gimple_vdef (stmt);
700 if (vdef && TREE_CODE (vdef) == SSA_NAME)
702 unlink_stmt_vdef (stmt);
703 release_ssa_name (vdef);
705 gsi_replace (gsi, repl, false);
706 return true;
709 /* If SRC and DEST are the same (and not volatile), return
710 DEST{,+LEN,+LEN-1}. */
711 if (operand_equal_p (src, dest, 0))
713 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
714 It's safe and may even be emitted by GCC itself (see bug
715 32667). However, diagnose it in explicit calls to the memcpy
716 function. */
717 if (check_overlap && *IDENTIFIER_POINTER (DECL_NAME (func)) != '_')
718 warning_at (loc, OPT_Wrestrict,
719 "%qD source argument is the same as destination",
720 func);
722 unlink_stmt_vdef (stmt);
723 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
724 release_ssa_name (gimple_vdef (stmt));
725 if (!lhs)
727 gsi_replace (gsi, gimple_build_nop (), false);
728 return true;
730 goto done;
732 else
734 tree srctype, desttype;
735 unsigned int src_align, dest_align;
736 tree off0;
738 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
739 pointers as wide integer) and also may result in huge function
740 size because of inlined bounds copy. Thus don't inline for
741 functions we want to instrument. */
742 if (flag_check_pointer_bounds
743 && chkp_instrumentable_p (cfun->decl)
744 /* Even if data may contain pointers we can inline if copy
745 less than a pointer size. */
746 && (!tree_fits_uhwi_p (len)
747 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
748 return false;
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* ??? Don't transform copies from strings with known length this
763 confuses the tree-ssa-strlen.c. This doesn't handle
764 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
765 reason. */
766 && !c_strlen (src, 2))
768 unsigned ilen = tree_to_uhwi (len);
769 if (pow2p_hwi (ilen))
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
778 scalar_int_mode mode;
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
786 || !targetm.slow_unaligned_access (mode, dest_align)
787 || (optab_handler (movmisalign_optab, mode)
788 != CODE_FOR_nothing)))
790 tree srctype = type;
791 tree desttype = type;
792 if (src_align < GET_MODE_ALIGNMENT (mode))
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
798 else if (src_align < GET_MODE_ALIGNMENT (mode)
799 && targetm.slow_unaligned_access (mode, src_align)
800 && (optab_handler (movmisalign_optab, mode)
801 == CODE_FOR_nothing))
802 srcmem = NULL_TREE;
803 if (srcmem)
805 gimple *new_stmt;
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 if (dest_align < GET_MODE_ALIGNMENT (mode))
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
829 gsi_replace (gsi, new_stmt, false);
830 return true;
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
839 if (endp == 3)
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
867 tree src_base, dest_base, fn;
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
871 srcvar = TREE_OPERAND (src, 0);
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
875 destvar = TREE_OPERAND (dest, 0);
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
880 if (!poly_int_tree_p (len, &maxsize))
881 maxsize = -1;
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
885 if (operand_equal_p (src_base, dest_base, 0)
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
888 return false;
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
902 return false;
904 else
905 return false;
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
941 return false;
944 if (!tree_fits_shwi_p (len))
945 return false;
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
958 srctype = TREE_TYPE (srctype);
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 desttype = TREE_TYPE (desttype);
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1004 src, off0);
1005 else if (!STRICT_ALIGNMENT)
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1016 if (srcvar == NULL_TREE)
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1029 else if (destvar == NULL_TREE)
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1048 gimple *new_stmt;
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080 set_vop_and_replace:
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1088 gsi_replace (gsi, new_stmt, false);
1089 return true;
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1094 done:
1095 gimple_seq stmts = NULL;
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
1101 if (endp == 2 || endp == 1)
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1109 gimple *repl = gimple_build_assign (lhs, dest);
1110 gsi_replace (gsi, repl, false);
1111 return true;
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1117 static bool
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1122 if (!fn)
1123 return false;
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1135 return true;
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1141 static bool
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1146 if (!fn)
1147 return false;
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1162 return true;
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1168 static bool
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1173 if (!fn)
1174 return false;
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1188 return true;
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1194 static bool
1195 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1197 gimple *stmt = gsi_stmt (*gsi);
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1234 length = tree_to_uhwi (len);
1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1269 gsi_replace (gsi, asgn, false);
1271 else
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1278 return true;
1282 /* Obtain the minimum and maximum string length or minimum and maximum
1283 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1284 If ARG is an SSA name variable, follow its use-def chains. When
1285 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1286 if we are unable to determine the length or value, return False.
1287 VISITED is a bitmap of visited variables.
1288 TYPE is 0 if string length should be obtained, 1 for maximum string
1289 length and 2 for maximum value ARG can have.
1290 When FUZZY is set and the length of a string cannot be determined,
1291 the function instead considers as the maximum possible length the
1292 size of a character array it may refer to.
1293 Set *FLEXP to true if the range of the string lengths has been
1294 obtained from the upper bound of an array at the end of a struct.
1295 Such an array may hold a string that's longer than its upper bound
1296 due to it being used as a poor-man's flexible array member. */
1298 static bool
1299 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1300 bool fuzzy, bool *flexp)
1302 tree var, val = NULL_TREE;
1303 gimple *def_stmt;
1305 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1306 but MINLEN may be cleared during the execution of the function. */
1307 tree *minlen = length;
1308 tree *const maxlen = length + 1;
1310 if (TREE_CODE (arg) != SSA_NAME)
1312 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1313 if (TREE_CODE (arg) == ADDR_EXPR
1314 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1316 tree op = TREE_OPERAND (arg, 0);
1317 if (integer_zerop (TREE_OPERAND (op, 1)))
1319 tree aop0 = TREE_OPERAND (op, 0);
1320 if (TREE_CODE (aop0) == INDIRECT_REF
1321 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1322 return get_range_strlen (TREE_OPERAND (aop0, 0),
1323 length, visited, type, fuzzy, flexp);
1325 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1327 /* Fail if an array is the last member of a struct object
1328 since it could be treated as a (fake) flexible array
1329 member. */
1330 tree idx = TREE_OPERAND (op, 1);
1332 arg = TREE_OPERAND (op, 0);
1333 tree optype = TREE_TYPE (arg);
1334 if (tree dom = TYPE_DOMAIN (optype))
1335 if (tree bound = TYPE_MAX_VALUE (dom))
1336 if (TREE_CODE (bound) == INTEGER_CST
1337 && TREE_CODE (idx) == INTEGER_CST
1338 && tree_int_cst_lt (bound, idx))
1339 return false;
1343 if (type == 2)
1345 val = arg;
1346 if (TREE_CODE (val) != INTEGER_CST
1347 || tree_int_cst_sgn (val) < 0)
1348 return false;
1350 else
1351 val = c_strlen (arg, 1);
1353 if (!val && fuzzy)
1355 if (TREE_CODE (arg) == ADDR_EXPR)
1356 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1357 visited, type, fuzzy, flexp);
1359 if (TREE_CODE (arg) == ARRAY_REF)
1361 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1363 while (TREE_CODE (type) == ARRAY_TYPE
1364 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1365 type = TREE_TYPE (type);
1367 val = TYPE_SIZE_UNIT (type);
1368 if (!val || integer_zerop (val))
1369 return false;
1371 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1372 integer_one_node);
1373 /* Set the minimum size to zero since the string in
1374 the array could have zero length. */
1375 *minlen = ssize_int (0);
1377 else if (TREE_CODE (arg) == COMPONENT_REF
1378 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1380 /* Use the type of the member array to determine the upper
1381 bound on the length of the array. This may be overly
1382 optimistic if the array itself isn't NUL-terminated and
1383 the caller relies on the subsequent member to contain
1384 the NUL but that would only be considered valid if
1385 the array were the last member of a struct.
1386 Set *FLEXP to true if the array whose bound is being
1387 used is at the end of a struct. */
1388 if (array_at_struct_end_p (arg))
1389 *flexp = true;
1391 arg = TREE_OPERAND (arg, 1);
1393 tree type = TREE_TYPE (arg);
1395 while (TREE_CODE (type) == ARRAY_TYPE
1396 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1397 type = TREE_TYPE (type);
1399 /* Fail when the array bound is unknown or zero. */
1400 val = TYPE_SIZE_UNIT (type);
1401 if (!val || integer_zerop (val))
1402 return false;
1403 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1404 integer_one_node);
1405 /* Set the minimum size to zero since the string in
1406 the array could have zero length. */
1407 *minlen = ssize_int (0);
1410 if (VAR_P (arg))
1412 tree type = TREE_TYPE (arg);
1413 if (POINTER_TYPE_P (type))
1414 type = TREE_TYPE (type);
1416 if (TREE_CODE (type) == ARRAY_TYPE)
1418 val = TYPE_SIZE_UNIT (type);
1419 if (!val
1420 || TREE_CODE (val) != INTEGER_CST
1421 || integer_zerop (val))
1422 return false;
1423 val = wide_int_to_tree (TREE_TYPE (val),
1424 wi::sub(wi::to_wide (val), 1));
1425 /* Set the minimum size to zero since the string in
1426 the array could have zero length. */
1427 *minlen = ssize_int (0);
1432 if (!val)
1433 return false;
1435 if (minlen
1436 && (!*minlen
1437 || (type > 0
1438 && TREE_CODE (*minlen) == INTEGER_CST
1439 && TREE_CODE (val) == INTEGER_CST
1440 && tree_int_cst_lt (val, *minlen))))
1441 *minlen = val;
1443 if (*maxlen)
1445 if (type > 0)
1447 if (TREE_CODE (*maxlen) != INTEGER_CST
1448 || TREE_CODE (val) != INTEGER_CST)
1449 return false;
1451 if (tree_int_cst_lt (*maxlen, val))
1452 *maxlen = val;
1453 return true;
1455 else if (simple_cst_equal (val, *maxlen) != 1)
1456 return false;
1459 *maxlen = val;
1460 return true;
1463 /* If ARG is registered for SSA update we cannot look at its defining
1464 statement. */
1465 if (name_registered_for_update_p (arg))
1466 return false;
1468 /* If we were already here, break the infinite cycle. */
1469 if (!*visited)
1470 *visited = BITMAP_ALLOC (NULL);
1471 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1472 return true;
1474 var = arg;
1475 def_stmt = SSA_NAME_DEF_STMT (var);
1477 switch (gimple_code (def_stmt))
1479 case GIMPLE_ASSIGN:
1480 /* The RHS of the statement defining VAR must either have a
1481 constant length or come from another SSA_NAME with a constant
1482 length. */
1483 if (gimple_assign_single_p (def_stmt)
1484 || gimple_assign_unary_nop_p (def_stmt))
1486 tree rhs = gimple_assign_rhs1 (def_stmt);
1487 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1489 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1491 tree op2 = gimple_assign_rhs2 (def_stmt);
1492 tree op3 = gimple_assign_rhs3 (def_stmt);
1493 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1494 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1496 return false;
1498 case GIMPLE_PHI:
1500 /* All the arguments of the PHI node must have the same constant
1501 length. */
1502 unsigned i;
1504 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1506 tree arg = gimple_phi_arg (def_stmt, i)->def;
1508 /* If this PHI has itself as an argument, we cannot
1509 determine the string length of this argument. However,
1510 if we can find a constant string length for the other
1511 PHI args then we can still be sure that this is a
1512 constant string length. So be optimistic and just
1513 continue with the next argument. */
1514 if (arg == gimple_phi_result (def_stmt))
1515 continue;
1517 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1519 if (fuzzy)
1520 *maxlen = build_all_ones_cst (size_type_node);
1521 else
1522 return false;
1526 return true;
1528 default:
1529 return false;
1533 /* Determine the minimum and maximum value or string length that ARG
1534 refers to and store each in the first two elements of MINMAXLEN.
1535 For expressions that point to strings of unknown lengths that are
1536 character arrays, use the upper bound of the array as the maximum
1537 length. For example, given an expression like 'x ? array : "xyz"'
1538 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1539 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1540 stored in array.
1541 Return true if the range of the string lengths has been obtained
1542 from the upper bound of an array at the end of a struct. Such
1543 an array may hold a string that's longer than its upper bound
1544 due to it being used as a poor-man's flexible array member. */
1546 bool
1547 get_range_strlen (tree arg, tree minmaxlen[2])
1549 bitmap visited = NULL;
1551 minmaxlen[0] = NULL_TREE;
1552 minmaxlen[1] = NULL_TREE;
1554 bool flexarray = false;
1555 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1557 if (visited)
1558 BITMAP_FREE (visited);
1560 return flexarray;
1563 tree
1564 get_maxval_strlen (tree arg, int type)
1566 bitmap visited = NULL;
1567 tree len[2] = { NULL_TREE, NULL_TREE };
1569 bool dummy;
1570 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1571 len[1] = NULL_TREE;
1572 if (visited)
1573 BITMAP_FREE (visited);
1575 return len[1];
1579 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1580 If LEN is not NULL, it represents the length of the string to be
1581 copied. Return NULL_TREE if no simplification can be made. */
1583 static bool
1584 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1585 tree dest, tree src)
1587 gimple *stmt = gsi_stmt (*gsi);
1588 location_t loc = gimple_location (stmt);
1589 tree fn;
1591 /* If SRC and DEST are the same (and not volatile), return DEST. */
1592 if (operand_equal_p (src, dest, 0))
1594 tree func = gimple_call_fndecl (stmt);
1596 warning_at (loc, OPT_Wrestrict,
1597 "%qD source argument is the same as destination",
1598 func);
1600 replace_call_with_value (gsi, dest);
1601 return true;
1604 if (optimize_function_for_size_p (cfun))
1605 return false;
1607 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1608 if (!fn)
1609 return false;
1611 tree len = get_maxval_strlen (src, 0);
1612 if (!len)
1613 return false;
1615 len = fold_convert_loc (loc, size_type_node, len);
1616 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1617 len = force_gimple_operand_gsi (gsi, len, true,
1618 NULL_TREE, true, GSI_SAME_STMT);
1619 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1620 replace_call_with_call_and_fold (gsi, repl);
1621 return true;
1624 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1625 If SLEN is not NULL, it represents the length of the source string.
1626 Return NULL_TREE if no simplification can be made. */
1628 static bool
1629 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1630 tree dest, tree src, tree len)
1632 gimple *stmt = gsi_stmt (*gsi);
1633 location_t loc = gimple_location (stmt);
1634 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1636 /* If the LEN parameter is zero, return DEST. */
1637 if (integer_zerop (len))
1639 /* Avoid warning if the destination refers to a an array/pointer
1640 decorate with attribute nonstring. */
1641 if (!nonstring)
1643 tree fndecl = gimple_call_fndecl (stmt);
1644 gcall *call = as_a <gcall *> (stmt);
1646 /* Warn about the lack of nul termination: the result is not
1647 a (nul-terminated) string. */
1648 tree slen = get_maxval_strlen (src, 0);
1649 if (slen && !integer_zerop (slen))
1650 warning_at (loc, OPT_Wstringop_truncation,
1651 "%G%qD destination unchanged after copying no bytes "
1652 "from a string of length %E",
1653 call, fndecl, slen);
1654 else
1655 warning_at (loc, OPT_Wstringop_truncation,
1656 "%G%qD destination unchanged after copying no bytes",
1657 call, fndecl);
1660 replace_call_with_value (gsi, dest);
1661 return true;
1664 /* We can't compare slen with len as constants below if len is not a
1665 constant. */
1666 if (TREE_CODE (len) != INTEGER_CST)
1667 return false;
1669 /* Now, we must be passed a constant src ptr parameter. */
1670 tree slen = get_maxval_strlen (src, 0);
1671 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1672 return false;
1674 /* The size of the source string including the terminating nul. */
1675 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1677 /* We do not support simplification of this case, though we do
1678 support it when expanding trees into RTL. */
1679 /* FIXME: generate a call to __builtin_memset. */
1680 if (tree_int_cst_lt (ssize, len))
1681 return false;
1683 if (!nonstring)
1685 if (tree_int_cst_lt (len, slen))
1687 tree fndecl = gimple_call_fndecl (stmt);
1688 gcall *call = as_a <gcall *> (stmt);
1690 warning_at (loc, OPT_Wstringop_truncation,
1691 (tree_int_cst_equal (size_one_node, len)
1692 ? G_("%G%qD output truncated copying %E byte "
1693 "from a string of length %E")
1694 : G_("%G%qD output truncated copying %E bytes "
1695 "from a string of length %E")),
1696 call, fndecl, len, slen);
1698 else if (tree_int_cst_equal (len, slen))
1700 tree fndecl = gimple_call_fndecl (stmt);
1701 gcall *call = as_a <gcall *> (stmt);
1703 warning_at (loc, OPT_Wstringop_truncation,
1704 (tree_int_cst_equal (size_one_node, len)
1705 ? G_("%G%qD output truncated before terminating nul "
1706 "copying %E byte from a string of the same "
1707 "length")
1708 : G_("%G%qD output truncated before terminating nul "
1709 "copying %E bytes from a string of the same "
1710 "length")),
1711 call, fndecl, len);
1715 /* OK transform into builtin memcpy. */
1716 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1717 if (!fn)
1718 return false;
1720 len = fold_convert_loc (loc, size_type_node, len);
1721 len = force_gimple_operand_gsi (gsi, len, true,
1722 NULL_TREE, true, GSI_SAME_STMT);
1723 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1724 replace_call_with_call_and_fold (gsi, repl);
1726 return true;
1729 /* Fold function call to builtin strchr or strrchr.
1730 If both arguments are constant, evaluate and fold the result,
1731 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1732 In general strlen is significantly faster than strchr
1733 due to being a simpler operation. */
1734 static bool
1735 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1737 gimple *stmt = gsi_stmt (*gsi);
1738 tree str = gimple_call_arg (stmt, 0);
1739 tree c = gimple_call_arg (stmt, 1);
1740 location_t loc = gimple_location (stmt);
1741 const char *p;
1742 char ch;
1744 if (!gimple_call_lhs (stmt))
1745 return false;
1747 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1749 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1751 if (p1 == NULL)
1753 replace_call_with_value (gsi, integer_zero_node);
1754 return true;
1757 tree len = build_int_cst (size_type_node, p1 - p);
1758 gimple_seq stmts = NULL;
1759 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1760 POINTER_PLUS_EXPR, str, len);
1761 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1762 gsi_replace_with_seq_vops (gsi, stmts);
1763 return true;
1766 if (!integer_zerop (c))
1767 return false;
1769 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1770 if (is_strrchr && optimize_function_for_size_p (cfun))
1772 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1774 if (strchr_fn)
1776 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1777 replace_call_with_call_and_fold (gsi, repl);
1778 return true;
1781 return false;
1784 tree len;
1785 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1787 if (!strlen_fn)
1788 return false;
1790 /* Create newstr = strlen (str). */
1791 gimple_seq stmts = NULL;
1792 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1793 gimple_set_location (new_stmt, loc);
1794 len = create_tmp_reg_or_ssa_name (size_type_node);
1795 gimple_call_set_lhs (new_stmt, len);
1796 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1798 /* Create (str p+ strlen (str)). */
1799 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1800 POINTER_PLUS_EXPR, str, len);
1801 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1802 gsi_replace_with_seq_vops (gsi, stmts);
1803 /* gsi now points at the assignment to the lhs, get a
1804 stmt iterator to the strlen.
1805 ??? We can't use gsi_for_stmt as that doesn't work when the
1806 CFG isn't built yet. */
1807 gimple_stmt_iterator gsi2 = *gsi;
1808 gsi_prev (&gsi2);
1809 fold_stmt (&gsi2);
1810 return true;
1813 /* Fold function call to builtin strstr.
1814 If both arguments are constant, evaluate and fold the result,
1815 additionally fold strstr (x, "") into x and strstr (x, "c")
1816 into strchr (x, 'c'). */
1817 static bool
1818 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1820 gimple *stmt = gsi_stmt (*gsi);
1821 tree haystack = gimple_call_arg (stmt, 0);
1822 tree needle = gimple_call_arg (stmt, 1);
1823 const char *p, *q;
1825 if (!gimple_call_lhs (stmt))
1826 return false;
1828 q = c_getstr (needle);
1829 if (q == NULL)
1830 return false;
1832 if ((p = c_getstr (haystack)))
1834 const char *r = strstr (p, q);
1836 if (r == NULL)
1838 replace_call_with_value (gsi, integer_zero_node);
1839 return true;
1842 tree len = build_int_cst (size_type_node, r - p);
1843 gimple_seq stmts = NULL;
1844 gimple *new_stmt
1845 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1846 haystack, len);
1847 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1848 gsi_replace_with_seq_vops (gsi, stmts);
1849 return true;
1852 /* For strstr (x, "") return x. */
1853 if (q[0] == '\0')
1855 replace_call_with_value (gsi, haystack);
1856 return true;
1859 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1860 if (q[1] == '\0')
1862 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1863 if (strchr_fn)
1865 tree c = build_int_cst (integer_type_node, q[0]);
1866 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1867 replace_call_with_call_and_fold (gsi, repl);
1868 return true;
1872 return false;
1875 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1876 to the call.
1878 Return NULL_TREE if no simplification was possible, otherwise return the
1879 simplified form of the call as a tree.
1881 The simplified form may be a constant or other expression which
1882 computes the same value, but in a more efficient manner (including
1883 calls to other builtin functions).
1885 The call may contain arguments which need to be evaluated, but
1886 which are not useful to determine the result of the call. In
1887 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1888 COMPOUND_EXPR will be an argument which must be evaluated.
1889 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1890 COMPOUND_EXPR in the chain will contain the tree for the simplified
1891 form of the builtin function call. */
1893 static bool
1894 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1896 gimple *stmt = gsi_stmt (*gsi);
1897 location_t loc = gimple_location (stmt);
1899 const char *p = c_getstr (src);
1901 /* If the string length is zero, return the dst parameter. */
1902 if (p && *p == '\0')
1904 replace_call_with_value (gsi, dst);
1905 return true;
1908 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1909 return false;
1911 /* See if we can store by pieces into (dst + strlen(dst)). */
1912 tree newdst;
1913 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1914 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1916 if (!strlen_fn || !memcpy_fn)
1917 return false;
1919 /* If the length of the source string isn't computable don't
1920 split strcat into strlen and memcpy. */
1921 tree len = get_maxval_strlen (src, 0);
1922 if (! len)
1923 return false;
1925 /* Create strlen (dst). */
1926 gimple_seq stmts = NULL, stmts2;
1927 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1928 gimple_set_location (repl, loc);
1929 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1930 gimple_call_set_lhs (repl, newdst);
1931 gimple_seq_add_stmt_without_update (&stmts, repl);
1933 /* Create (dst p+ strlen (dst)). */
1934 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1935 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1936 gimple_seq_add_seq_without_update (&stmts, stmts2);
1938 len = fold_convert_loc (loc, size_type_node, len);
1939 len = size_binop_loc (loc, PLUS_EXPR, len,
1940 build_int_cst (size_type_node, 1));
1941 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1942 gimple_seq_add_seq_without_update (&stmts, stmts2);
1944 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1945 gimple_seq_add_stmt_without_update (&stmts, repl);
1946 if (gimple_call_lhs (stmt))
1948 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1949 gimple_seq_add_stmt_without_update (&stmts, repl);
1950 gsi_replace_with_seq_vops (gsi, stmts);
1951 /* gsi now points at the assignment to the lhs, get a
1952 stmt iterator to the memcpy call.
1953 ??? We can't use gsi_for_stmt as that doesn't work when the
1954 CFG isn't built yet. */
1955 gimple_stmt_iterator gsi2 = *gsi;
1956 gsi_prev (&gsi2);
1957 fold_stmt (&gsi2);
1959 else
1961 gsi_replace_with_seq_vops (gsi, stmts);
1962 fold_stmt (gsi);
1964 return true;
1967 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1968 are the arguments to the call. */
1970 static bool
1971 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1973 gimple *stmt = gsi_stmt (*gsi);
1974 tree dest = gimple_call_arg (stmt, 0);
1975 tree src = gimple_call_arg (stmt, 1);
1976 tree size = gimple_call_arg (stmt, 2);
1977 tree fn;
1978 const char *p;
1981 p = c_getstr (src);
1982 /* If the SRC parameter is "", return DEST. */
1983 if (p && *p == '\0')
1985 replace_call_with_value (gsi, dest);
1986 return true;
1989 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1990 return false;
1992 /* If __builtin_strcat_chk is used, assume strcat is available. */
1993 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1994 if (!fn)
1995 return false;
1997 gimple *repl = gimple_build_call (fn, 2, dest, src);
1998 replace_call_with_call_and_fold (gsi, repl);
1999 return true;
2002 /* Simplify a call to the strncat builtin. */
2004 static bool
2005 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2007 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2008 tree dst = gimple_call_arg (stmt, 0);
2009 tree src = gimple_call_arg (stmt, 1);
2010 tree len = gimple_call_arg (stmt, 2);
2012 const char *p = c_getstr (src);
2014 /* If the requested length is zero, or the src parameter string
2015 length is zero, return the dst parameter. */
2016 if (integer_zerop (len) || (p && *p == '\0'))
2018 replace_call_with_value (gsi, dst);
2019 return true;
2022 if (TREE_CODE (len) != INTEGER_CST || !p)
2023 return false;
2025 unsigned srclen = strlen (p);
2027 int cmpsrc = compare_tree_int (len, srclen);
2029 /* Return early if the requested len is less than the string length.
2030 Warnings will be issued elsewhere later. */
2031 if (cmpsrc < 0)
2032 return false;
2034 unsigned HOST_WIDE_INT dstsize;
2036 bool nowarn = gimple_no_warning_p (stmt);
2038 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2040 int cmpdst = compare_tree_int (len, dstsize);
2042 if (cmpdst >= 0)
2044 tree fndecl = gimple_call_fndecl (stmt);
2046 /* Strncat copies (at most) LEN bytes and always appends
2047 the terminating NUL so the specified bound should never
2048 be equal to (or greater than) the size of the destination.
2049 If it is, the copy could overflow. */
2050 location_t loc = gimple_location (stmt);
2051 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2052 cmpdst == 0
2053 ? G_("%G%qD specified bound %E equals "
2054 "destination size")
2055 : G_("%G%qD specified bound %E exceeds "
2056 "destination size %wu"),
2057 stmt, fndecl, len, dstsize);
2058 if (nowarn)
2059 gimple_set_no_warning (stmt, true);
2063 if (!nowarn && cmpsrc == 0)
2065 tree fndecl = gimple_call_fndecl (stmt);
2067 /* To avoid certain truncation the specified bound should also
2068 not be equal to (or less than) the length of the source. */
2069 location_t loc = gimple_location (stmt);
2070 if (warning_at (loc, OPT_Wstringop_overflow_,
2071 "%G%qD specified bound %E equals source length",
2072 stmt, fndecl, len))
2073 gimple_set_no_warning (stmt, true);
2076 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2078 /* If the replacement _DECL isn't initialized, don't do the
2079 transformation. */
2080 if (!fn)
2081 return false;
2083 /* Otherwise, emit a call to strcat. */
2084 gcall *repl = gimple_build_call (fn, 2, dst, src);
2085 replace_call_with_call_and_fold (gsi, repl);
2086 return true;
2089 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2090 LEN, and SIZE. */
2092 static bool
2093 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2095 gimple *stmt = gsi_stmt (*gsi);
2096 tree dest = gimple_call_arg (stmt, 0);
2097 tree src = gimple_call_arg (stmt, 1);
2098 tree len = gimple_call_arg (stmt, 2);
2099 tree size = gimple_call_arg (stmt, 3);
2100 tree fn;
2101 const char *p;
2103 p = c_getstr (src);
2104 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2105 if ((p && *p == '\0')
2106 || integer_zerop (len))
2108 replace_call_with_value (gsi, dest);
2109 return true;
2112 if (! tree_fits_uhwi_p (size))
2113 return false;
2115 if (! integer_all_onesp (size))
2117 tree src_len = c_strlen (src, 1);
2118 if (src_len
2119 && tree_fits_uhwi_p (src_len)
2120 && tree_fits_uhwi_p (len)
2121 && ! tree_int_cst_lt (len, src_len))
2123 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2124 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2125 if (!fn)
2126 return false;
2128 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2129 replace_call_with_call_and_fold (gsi, repl);
2130 return true;
2132 return false;
2135 /* If __builtin_strncat_chk is used, assume strncat is available. */
2136 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2137 if (!fn)
2138 return false;
2140 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2141 replace_call_with_call_and_fold (gsi, repl);
2142 return true;
2145 /* Build and append gimple statements to STMTS that would load a first
2146 character of a memory location identified by STR. LOC is location
2147 of the statement. */
2149 static tree
2150 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2152 tree var;
2154 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2155 tree cst_uchar_ptr_node
2156 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2157 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2159 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2160 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2161 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2163 gimple_assign_set_lhs (stmt, var);
2164 gimple_seq_add_stmt_without_update (stmts, stmt);
2166 return var;
2169 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2170 FCODE is the name of the builtin. */
2172 static bool
2173 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2175 gimple *stmt = gsi_stmt (*gsi);
2176 tree callee = gimple_call_fndecl (stmt);
2177 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2179 tree type = integer_type_node;
2180 tree str1 = gimple_call_arg (stmt, 0);
2181 tree str2 = gimple_call_arg (stmt, 1);
2182 tree lhs = gimple_call_lhs (stmt);
2183 HOST_WIDE_INT length = -1;
2185 /* Handle strncmp and strncasecmp functions. */
2186 if (gimple_call_num_args (stmt) == 3)
2188 tree len = gimple_call_arg (stmt, 2);
2189 if (tree_fits_uhwi_p (len))
2190 length = tree_to_uhwi (len);
2193 /* If the LEN parameter is zero, return zero. */
2194 if (length == 0)
2196 replace_call_with_value (gsi, integer_zero_node);
2197 return true;
2200 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2201 if (operand_equal_p (str1, str2, 0))
2203 replace_call_with_value (gsi, integer_zero_node);
2204 return true;
2207 const char *p1 = c_getstr (str1);
2208 const char *p2 = c_getstr (str2);
2210 /* For known strings, return an immediate value. */
2211 if (p1 && p2)
2213 int r = 0;
2214 bool known_result = false;
2216 switch (fcode)
2218 case BUILT_IN_STRCMP:
2220 r = strcmp (p1, p2);
2221 known_result = true;
2222 break;
2224 case BUILT_IN_STRNCMP:
2226 if (length == -1)
2227 break;
2228 r = strncmp (p1, p2, length);
2229 known_result = true;
2230 break;
2232 /* Only handleable situation is where the string are equal (result 0),
2233 which is already handled by operand_equal_p case. */
2234 case BUILT_IN_STRCASECMP:
2235 break;
2236 case BUILT_IN_STRNCASECMP:
2238 if (length == -1)
2239 break;
2240 r = strncmp (p1, p2, length);
2241 if (r == 0)
2242 known_result = true;
2243 break;
2245 default:
2246 gcc_unreachable ();
2249 if (known_result)
2251 replace_call_with_value (gsi, build_cmp_result (type, r));
2252 return true;
2256 bool nonzero_length = length >= 1
2257 || fcode == BUILT_IN_STRCMP
2258 || fcode == BUILT_IN_STRCASECMP;
2260 location_t loc = gimple_location (stmt);
2262 /* If the second arg is "", return *(const unsigned char*)arg1. */
2263 if (p2 && *p2 == '\0' && nonzero_length)
2265 gimple_seq stmts = NULL;
2266 tree var = gimple_load_first_char (loc, str1, &stmts);
2267 if (lhs)
2269 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2270 gimple_seq_add_stmt_without_update (&stmts, stmt);
2273 gsi_replace_with_seq_vops (gsi, stmts);
2274 return true;
2277 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2278 if (p1 && *p1 == '\0' && nonzero_length)
2280 gimple_seq stmts = NULL;
2281 tree var = gimple_load_first_char (loc, str2, &stmts);
2283 if (lhs)
2285 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2286 stmt = gimple_build_assign (c, NOP_EXPR, var);
2287 gimple_seq_add_stmt_without_update (&stmts, stmt);
2289 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2290 gimple_seq_add_stmt_without_update (&stmts, stmt);
2293 gsi_replace_with_seq_vops (gsi, stmts);
2294 return true;
2297 /* If len parameter is one, return an expression corresponding to
2298 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2299 if (fcode == BUILT_IN_STRNCMP && length == 1)
2301 gimple_seq stmts = NULL;
2302 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2303 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2305 if (lhs)
2307 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2308 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2309 gimple_seq_add_stmt_without_update (&stmts, convert1);
2311 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2312 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2313 gimple_seq_add_stmt_without_update (&stmts, convert2);
2315 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2316 gimple_seq_add_stmt_without_update (&stmts, stmt);
2319 gsi_replace_with_seq_vops (gsi, stmts);
2320 return true;
2323 /* If length is larger than the length of one constant string,
2324 replace strncmp with corresponding strcmp */
2325 if (fcode == BUILT_IN_STRNCMP
2326 && length > 0
2327 && ((p2 && (size_t) length > strlen (p2))
2328 || (p1 && (size_t) length > strlen (p1))))
2330 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2331 if (!fn)
2332 return false;
2333 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2334 replace_call_with_call_and_fold (gsi, repl);
2335 return true;
2338 return false;
2341 /* Fold a call to the memchr pointed by GSI iterator. */
2343 static bool
2344 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2346 gimple *stmt = gsi_stmt (*gsi);
2347 tree lhs = gimple_call_lhs (stmt);
2348 tree arg1 = gimple_call_arg (stmt, 0);
2349 tree arg2 = gimple_call_arg (stmt, 1);
2350 tree len = gimple_call_arg (stmt, 2);
2352 /* If the LEN parameter is zero, return zero. */
2353 if (integer_zerop (len))
2355 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2356 return true;
2359 char c;
2360 if (TREE_CODE (arg2) != INTEGER_CST
2361 || !tree_fits_uhwi_p (len)
2362 || !target_char_cst_p (arg2, &c))
2363 return false;
2365 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2366 unsigned HOST_WIDE_INT string_length;
2367 const char *p1 = c_getstr (arg1, &string_length);
2369 if (p1)
2371 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2372 if (r == NULL)
2374 if (length <= string_length)
2376 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2377 return true;
2380 else
2382 unsigned HOST_WIDE_INT offset = r - p1;
2383 gimple_seq stmts = NULL;
2384 if (lhs != NULL_TREE)
2386 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2387 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2388 arg1, offset_cst);
2389 gimple_seq_add_stmt_without_update (&stmts, stmt);
2391 else
2392 gimple_seq_add_stmt_without_update (&stmts,
2393 gimple_build_nop ());
2395 gsi_replace_with_seq_vops (gsi, stmts);
2396 return true;
2400 return false;
2403 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2404 to the call. IGNORE is true if the value returned
2405 by the builtin will be ignored. UNLOCKED is true is true if this
2406 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2407 the known length of the string. Return NULL_TREE if no simplification
2408 was possible. */
2410 static bool
2411 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2412 tree arg0, tree arg1,
2413 bool unlocked)
2415 gimple *stmt = gsi_stmt (*gsi);
2417 /* If we're using an unlocked function, assume the other unlocked
2418 functions exist explicitly. */
2419 tree const fn_fputc = (unlocked
2420 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2421 : builtin_decl_implicit (BUILT_IN_FPUTC));
2422 tree const fn_fwrite = (unlocked
2423 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2424 : builtin_decl_implicit (BUILT_IN_FWRITE));
2426 /* If the return value is used, don't do the transformation. */
2427 if (gimple_call_lhs (stmt))
2428 return false;
2430 /* Get the length of the string passed to fputs. If the length
2431 can't be determined, punt. */
2432 tree len = get_maxval_strlen (arg0, 0);
2433 if (!len
2434 || TREE_CODE (len) != INTEGER_CST)
2435 return false;
2437 switch (compare_tree_int (len, 1))
2439 case -1: /* length is 0, delete the call entirely . */
2440 replace_call_with_value (gsi, integer_zero_node);
2441 return true;
2443 case 0: /* length is 1, call fputc. */
2445 const char *p = c_getstr (arg0);
2446 if (p != NULL)
2448 if (!fn_fputc)
2449 return false;
2451 gimple *repl = gimple_build_call (fn_fputc, 2,
2452 build_int_cst
2453 (integer_type_node, p[0]), arg1);
2454 replace_call_with_call_and_fold (gsi, repl);
2455 return true;
2458 /* FALLTHROUGH */
2459 case 1: /* length is greater than 1, call fwrite. */
2461 /* If optimizing for size keep fputs. */
2462 if (optimize_function_for_size_p (cfun))
2463 return false;
2464 /* New argument list transforming fputs(string, stream) to
2465 fwrite(string, 1, len, stream). */
2466 if (!fn_fwrite)
2467 return false;
2469 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2470 size_one_node, len, arg1);
2471 replace_call_with_call_and_fold (gsi, repl);
2472 return true;
2474 default:
2475 gcc_unreachable ();
2477 return false;
2480 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2481 DEST, SRC, LEN, and SIZE are the arguments to the call.
2482 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2483 code of the builtin. If MAXLEN is not NULL, it is maximum length
2484 passed as third argument. */
2486 static bool
2487 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2488 tree dest, tree src, tree len, tree size,
2489 enum built_in_function fcode)
2491 gimple *stmt = gsi_stmt (*gsi);
2492 location_t loc = gimple_location (stmt);
2493 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2494 tree fn;
2496 /* If SRC and DEST are the same (and not volatile), return DEST
2497 (resp. DEST+LEN for __mempcpy_chk). */
2498 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2500 if (fcode != BUILT_IN_MEMMOVE && fcode != BUILT_IN_MEMMOVE_CHK)
2502 tree func = gimple_call_fndecl (stmt);
2504 warning_at (loc, OPT_Wrestrict,
2505 "%qD source argument is the same as destination",
2506 func);
2509 if (fcode != BUILT_IN_MEMPCPY_CHK)
2511 replace_call_with_value (gsi, dest);
2512 return true;
2514 else
2516 gimple_seq stmts = NULL;
2517 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2518 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2519 TREE_TYPE (dest), dest, len);
2520 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2521 replace_call_with_value (gsi, temp);
2522 return true;
2526 if (! tree_fits_uhwi_p (size))
2527 return false;
2529 tree maxlen = get_maxval_strlen (len, 2);
2530 if (! integer_all_onesp (size))
2532 if (! tree_fits_uhwi_p (len))
2534 /* If LEN is not constant, try MAXLEN too.
2535 For MAXLEN only allow optimizing into non-_ocs function
2536 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2537 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2539 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2541 /* (void) __mempcpy_chk () can be optimized into
2542 (void) __memcpy_chk (). */
2543 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2544 if (!fn)
2545 return false;
2547 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2548 replace_call_with_call_and_fold (gsi, repl);
2549 return true;
2551 return false;
2554 else
2555 maxlen = len;
2557 if (tree_int_cst_lt (size, maxlen))
2558 return false;
2561 fn = NULL_TREE;
2562 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2563 mem{cpy,pcpy,move,set} is available. */
2564 switch (fcode)
2566 case BUILT_IN_MEMCPY_CHK:
2567 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2568 break;
2569 case BUILT_IN_MEMPCPY_CHK:
2570 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2571 break;
2572 case BUILT_IN_MEMMOVE_CHK:
2573 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2574 break;
2575 case BUILT_IN_MEMSET_CHK:
2576 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2577 break;
2578 default:
2579 break;
2582 if (!fn)
2583 return false;
2585 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2586 replace_call_with_call_and_fold (gsi, repl);
2587 return true;
2590 /* Fold a call to the __st[rp]cpy_chk builtin.
2591 DEST, SRC, and SIZE are the arguments to the call.
2592 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2593 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2594 strings passed as second argument. */
2596 static bool
2597 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2598 tree dest,
2599 tree src, tree size,
2600 enum built_in_function fcode)
2602 gimple *stmt = gsi_stmt (*gsi);
2603 location_t loc = gimple_location (stmt);
2604 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2605 tree len, fn;
2607 /* If SRC and DEST are the same (and not volatile), return DEST. */
2608 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2610 tree func = gimple_call_fndecl (stmt);
2612 warning_at (loc, OPT_Wrestrict,
2613 "%qD source argument is the same as destination",
2614 func);
2616 replace_call_with_value (gsi, dest);
2617 return true;
2620 if (! tree_fits_uhwi_p (size))
2621 return false;
2623 tree maxlen = get_maxval_strlen (src, 1);
2624 if (! integer_all_onesp (size))
2626 len = c_strlen (src, 1);
2627 if (! len || ! tree_fits_uhwi_p (len))
2629 /* If LEN is not constant, try MAXLEN too.
2630 For MAXLEN only allow optimizing into non-_ocs function
2631 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2632 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2634 if (fcode == BUILT_IN_STPCPY_CHK)
2636 if (! ignore)
2637 return false;
2639 /* If return value of __stpcpy_chk is ignored,
2640 optimize into __strcpy_chk. */
2641 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2642 if (!fn)
2643 return false;
2645 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2646 replace_call_with_call_and_fold (gsi, repl);
2647 return true;
2650 if (! len || TREE_SIDE_EFFECTS (len))
2651 return false;
2653 /* If c_strlen returned something, but not a constant,
2654 transform __strcpy_chk into __memcpy_chk. */
2655 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2656 if (!fn)
2657 return false;
2659 gimple_seq stmts = NULL;
2660 len = gimple_convert (&stmts, loc, size_type_node, len);
2661 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2662 build_int_cst (size_type_node, 1));
2663 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2664 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2665 replace_call_with_call_and_fold (gsi, repl);
2666 return true;
2669 else
2670 maxlen = len;
2672 if (! tree_int_cst_lt (maxlen, size))
2673 return false;
2676 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2677 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2678 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2679 if (!fn)
2680 return false;
2682 gimple *repl = gimple_build_call (fn, 2, dest, src);
2683 replace_call_with_call_and_fold (gsi, repl);
2684 return true;
2687 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2688 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2689 length passed as third argument. IGNORE is true if return value can be
2690 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2692 static bool
2693 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2694 tree dest, tree src,
2695 tree len, tree size,
2696 enum built_in_function fcode)
2698 gimple *stmt = gsi_stmt (*gsi);
2699 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2700 tree fn;
2702 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2704 /* If return value of __stpncpy_chk is ignored,
2705 optimize into __strncpy_chk. */
2706 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2707 if (fn)
2709 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2710 replace_call_with_call_and_fold (gsi, repl);
2711 return true;
2715 if (! tree_fits_uhwi_p (size))
2716 return false;
2718 tree maxlen = get_maxval_strlen (len, 2);
2719 if (! integer_all_onesp (size))
2721 if (! tree_fits_uhwi_p (len))
2723 /* If LEN is not constant, try MAXLEN too.
2724 For MAXLEN only allow optimizing into non-_ocs function
2725 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2726 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2727 return false;
2729 else
2730 maxlen = len;
2732 if (tree_int_cst_lt (size, maxlen))
2733 return false;
2736 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2737 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2738 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2739 if (!fn)
2740 return false;
2742 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2743 replace_call_with_call_and_fold (gsi, repl);
2744 return true;
2747 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2748 Return NULL_TREE if no simplification can be made. */
2750 static bool
2751 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2753 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2754 location_t loc = gimple_location (stmt);
2755 tree dest = gimple_call_arg (stmt, 0);
2756 tree src = gimple_call_arg (stmt, 1);
2757 tree fn, len, lenp1;
2759 /* If the result is unused, replace stpcpy with strcpy. */
2760 if (gimple_call_lhs (stmt) == NULL_TREE)
2762 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2763 if (!fn)
2764 return false;
2765 gimple_call_set_fndecl (stmt, fn);
2766 fold_stmt (gsi);
2767 return true;
2770 len = c_strlen (src, 1);
2771 if (!len
2772 || TREE_CODE (len) != INTEGER_CST)
2773 return false;
2775 if (optimize_function_for_size_p (cfun)
2776 /* If length is zero it's small enough. */
2777 && !integer_zerop (len))
2778 return false;
2780 /* If the source has a known length replace stpcpy with memcpy. */
2781 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2782 if (!fn)
2783 return false;
2785 gimple_seq stmts = NULL;
2786 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2787 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2788 tem, build_int_cst (size_type_node, 1));
2789 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2790 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2791 gimple_set_vuse (repl, gimple_vuse (stmt));
2792 gimple_set_vdef (repl, gimple_vdef (stmt));
2793 if (gimple_vdef (repl)
2794 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2795 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2796 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2797 /* Replace the result with dest + len. */
2798 stmts = NULL;
2799 tem = gimple_convert (&stmts, loc, sizetype, len);
2800 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2801 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2802 POINTER_PLUS_EXPR, dest, tem);
2803 gsi_replace (gsi, ret, false);
2804 /* Finally fold the memcpy call. */
2805 gimple_stmt_iterator gsi2 = *gsi;
2806 gsi_prev (&gsi2);
2807 fold_stmt (&gsi2);
2808 return true;
2811 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2812 NULL_TREE if a normal call should be emitted rather than expanding
2813 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2814 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2815 passed as second argument. */
2817 static bool
2818 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2819 enum built_in_function fcode)
2821 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2822 tree dest, size, len, fn, fmt, flag;
2823 const char *fmt_str;
2825 /* Verify the required arguments in the original call. */
2826 if (gimple_call_num_args (stmt) < 5)
2827 return false;
2829 dest = gimple_call_arg (stmt, 0);
2830 len = gimple_call_arg (stmt, 1);
2831 flag = gimple_call_arg (stmt, 2);
2832 size = gimple_call_arg (stmt, 3);
2833 fmt = gimple_call_arg (stmt, 4);
2835 if (! tree_fits_uhwi_p (size))
2836 return false;
2838 if (! integer_all_onesp (size))
2840 tree maxlen = get_maxval_strlen (len, 2);
2841 if (! tree_fits_uhwi_p (len))
2843 /* If LEN is not constant, try MAXLEN too.
2844 For MAXLEN only allow optimizing into non-_ocs function
2845 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2846 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2847 return false;
2849 else
2850 maxlen = len;
2852 if (tree_int_cst_lt (size, maxlen))
2853 return false;
2856 if (!init_target_chars ())
2857 return false;
2859 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2860 or if format doesn't contain % chars or is "%s". */
2861 if (! integer_zerop (flag))
2863 fmt_str = c_getstr (fmt);
2864 if (fmt_str == NULL)
2865 return false;
2866 if (strchr (fmt_str, target_percent) != NULL
2867 && strcmp (fmt_str, target_percent_s))
2868 return false;
2871 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2872 available. */
2873 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2874 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2875 if (!fn)
2876 return false;
2878 /* Replace the called function and the first 5 argument by 3 retaining
2879 trailing varargs. */
2880 gimple_call_set_fndecl (stmt, fn);
2881 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2882 gimple_call_set_arg (stmt, 0, dest);
2883 gimple_call_set_arg (stmt, 1, len);
2884 gimple_call_set_arg (stmt, 2, fmt);
2885 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2886 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2887 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2888 fold_stmt (gsi);
2889 return true;
2892 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2893 Return NULL_TREE if a normal call should be emitted rather than
2894 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2895 or BUILT_IN_VSPRINTF_CHK. */
2897 static bool
2898 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2899 enum built_in_function fcode)
2901 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2902 tree dest, size, len, fn, fmt, flag;
2903 const char *fmt_str;
2904 unsigned nargs = gimple_call_num_args (stmt);
2906 /* Verify the required arguments in the original call. */
2907 if (nargs < 4)
2908 return false;
2909 dest = gimple_call_arg (stmt, 0);
2910 flag = gimple_call_arg (stmt, 1);
2911 size = gimple_call_arg (stmt, 2);
2912 fmt = gimple_call_arg (stmt, 3);
2914 if (! tree_fits_uhwi_p (size))
2915 return false;
2917 len = NULL_TREE;
2919 if (!init_target_chars ())
2920 return false;
2922 /* Check whether the format is a literal string constant. */
2923 fmt_str = c_getstr (fmt);
2924 if (fmt_str != NULL)
2926 /* If the format doesn't contain % args or %%, we know the size. */
2927 if (strchr (fmt_str, target_percent) == 0)
2929 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2930 len = build_int_cstu (size_type_node, strlen (fmt_str));
2932 /* If the format is "%s" and first ... argument is a string literal,
2933 we know the size too. */
2934 else if (fcode == BUILT_IN_SPRINTF_CHK
2935 && strcmp (fmt_str, target_percent_s) == 0)
2937 tree arg;
2939 if (nargs == 5)
2941 arg = gimple_call_arg (stmt, 4);
2942 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2944 len = c_strlen (arg, 1);
2945 if (! len || ! tree_fits_uhwi_p (len))
2946 len = NULL_TREE;
2952 if (! integer_all_onesp (size))
2954 if (! len || ! tree_int_cst_lt (len, size))
2955 return false;
2958 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2959 or if format doesn't contain % chars or is "%s". */
2960 if (! integer_zerop (flag))
2962 if (fmt_str == NULL)
2963 return false;
2964 if (strchr (fmt_str, target_percent) != NULL
2965 && strcmp (fmt_str, target_percent_s))
2966 return false;
2969 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2970 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2971 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2972 if (!fn)
2973 return false;
2975 /* Replace the called function and the first 4 argument by 2 retaining
2976 trailing varargs. */
2977 gimple_call_set_fndecl (stmt, fn);
2978 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2979 gimple_call_set_arg (stmt, 0, dest);
2980 gimple_call_set_arg (stmt, 1, fmt);
2981 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2982 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2983 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2984 fold_stmt (gsi);
2985 return true;
2988 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2989 ORIG may be null if this is a 2-argument call. We don't attempt to
2990 simplify calls with more than 3 arguments.
2992 Return true if simplification was possible, otherwise false. */
2994 bool
2995 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2997 gimple *stmt = gsi_stmt (*gsi);
2998 tree dest = gimple_call_arg (stmt, 0);
2999 tree fmt = gimple_call_arg (stmt, 1);
3000 tree orig = NULL_TREE;
3001 const char *fmt_str = NULL;
3003 /* Verify the required arguments in the original call. We deal with two
3004 types of sprintf() calls: 'sprintf (str, fmt)' and
3005 'sprintf (dest, "%s", orig)'. */
3006 if (gimple_call_num_args (stmt) > 3)
3007 return false;
3009 if (gimple_call_num_args (stmt) == 3)
3010 orig = gimple_call_arg (stmt, 2);
3012 /* Check whether the format is a literal string constant. */
3013 fmt_str = c_getstr (fmt);
3014 if (fmt_str == NULL)
3015 return false;
3017 if (!init_target_chars ())
3018 return false;
3020 /* If the format doesn't contain % args or %%, use strcpy. */
3021 if (strchr (fmt_str, target_percent) == NULL)
3023 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3025 if (!fn)
3026 return false;
3028 /* Don't optimize sprintf (buf, "abc", ptr++). */
3029 if (orig)
3030 return false;
3032 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3033 'format' is known to contain no % formats. */
3034 gimple_seq stmts = NULL;
3035 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3036 gimple_seq_add_stmt_without_update (&stmts, repl);
3037 if (gimple_call_lhs (stmt))
3039 repl = gimple_build_assign (gimple_call_lhs (stmt),
3040 build_int_cst (integer_type_node,
3041 strlen (fmt_str)));
3042 gimple_seq_add_stmt_without_update (&stmts, repl);
3043 gsi_replace_with_seq_vops (gsi, stmts);
3044 /* gsi now points at the assignment to the lhs, get a
3045 stmt iterator to the memcpy call.
3046 ??? We can't use gsi_for_stmt as that doesn't work when the
3047 CFG isn't built yet. */
3048 gimple_stmt_iterator gsi2 = *gsi;
3049 gsi_prev (&gsi2);
3050 fold_stmt (&gsi2);
3052 else
3054 gsi_replace_with_seq_vops (gsi, stmts);
3055 fold_stmt (gsi);
3057 return true;
3060 /* If the format is "%s", use strcpy if the result isn't used. */
3061 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3063 tree fn;
3064 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3066 if (!fn)
3067 return false;
3069 /* Don't crash on sprintf (str1, "%s"). */
3070 if (!orig)
3071 return false;
3073 tree orig_len = NULL_TREE;
3074 if (gimple_call_lhs (stmt))
3076 orig_len = get_maxval_strlen (orig, 0);
3077 if (!orig_len)
3078 return false;
3081 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3082 gimple_seq stmts = NULL;
3083 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3084 gimple_seq_add_stmt_without_update (&stmts, repl);
3085 if (gimple_call_lhs (stmt))
3087 if (!useless_type_conversion_p (integer_type_node,
3088 TREE_TYPE (orig_len)))
3089 orig_len = fold_convert (integer_type_node, orig_len);
3090 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3091 gimple_seq_add_stmt_without_update (&stmts, repl);
3092 gsi_replace_with_seq_vops (gsi, stmts);
3093 /* gsi now points at the assignment to the lhs, get a
3094 stmt iterator to the memcpy call.
3095 ??? We can't use gsi_for_stmt as that doesn't work when the
3096 CFG isn't built yet. */
3097 gimple_stmt_iterator gsi2 = *gsi;
3098 gsi_prev (&gsi2);
3099 fold_stmt (&gsi2);
3101 else
3103 gsi_replace_with_seq_vops (gsi, stmts);
3104 fold_stmt (gsi);
3106 return true;
3108 return false;
3111 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3112 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3113 attempt to simplify calls with more than 4 arguments.
3115 Return true if simplification was possible, otherwise false. */
3117 bool
3118 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3120 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3121 tree dest = gimple_call_arg (stmt, 0);
3122 tree destsize = gimple_call_arg (stmt, 1);
3123 tree fmt = gimple_call_arg (stmt, 2);
3124 tree orig = NULL_TREE;
3125 const char *fmt_str = NULL;
3127 if (gimple_call_num_args (stmt) > 4)
3128 return false;
3130 if (gimple_call_num_args (stmt) == 4)
3131 orig = gimple_call_arg (stmt, 3);
3133 if (!tree_fits_uhwi_p (destsize))
3134 return false;
3135 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3137 /* Check whether the format is a literal string constant. */
3138 fmt_str = c_getstr (fmt);
3139 if (fmt_str == NULL)
3140 return false;
3142 if (!init_target_chars ())
3143 return false;
3145 /* If the format doesn't contain % args or %%, use strcpy. */
3146 if (strchr (fmt_str, target_percent) == NULL)
3148 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3149 if (!fn)
3150 return false;
3152 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3153 if (orig)
3154 return false;
3156 /* We could expand this as
3157 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3158 or to
3159 memcpy (str, fmt_with_nul_at_cstm1, cst);
3160 but in the former case that might increase code size
3161 and in the latter case grow .rodata section too much.
3162 So punt for now. */
3163 size_t len = strlen (fmt_str);
3164 if (len >= destlen)
3165 return false;
3167 gimple_seq stmts = NULL;
3168 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3169 gimple_seq_add_stmt_without_update (&stmts, repl);
3170 if (gimple_call_lhs (stmt))
3172 repl = gimple_build_assign (gimple_call_lhs (stmt),
3173 build_int_cst (integer_type_node, len));
3174 gimple_seq_add_stmt_without_update (&stmts, repl);
3175 gsi_replace_with_seq_vops (gsi, stmts);
3176 /* gsi now points at the assignment to the lhs, get a
3177 stmt iterator to the memcpy call.
3178 ??? We can't use gsi_for_stmt as that doesn't work when the
3179 CFG isn't built yet. */
3180 gimple_stmt_iterator gsi2 = *gsi;
3181 gsi_prev (&gsi2);
3182 fold_stmt (&gsi2);
3184 else
3186 gsi_replace_with_seq_vops (gsi, stmts);
3187 fold_stmt (gsi);
3189 return true;
3192 /* If the format is "%s", use strcpy if the result isn't used. */
3193 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3195 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3196 if (!fn)
3197 return false;
3199 /* Don't crash on snprintf (str1, cst, "%s"). */
3200 if (!orig)
3201 return false;
3203 tree orig_len = get_maxval_strlen (orig, 0);
3204 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3205 return false;
3207 /* We could expand this as
3208 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3209 or to
3210 memcpy (str1, str2_with_nul_at_cstm1, cst);
3211 but in the former case that might increase code size
3212 and in the latter case grow .rodata section too much.
3213 So punt for now. */
3214 if (compare_tree_int (orig_len, destlen) >= 0)
3215 return false;
3217 /* Convert snprintf (str1, cst, "%s", str2) into
3218 strcpy (str1, str2) if strlen (str2) < cst. */
3219 gimple_seq stmts = NULL;
3220 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3221 gimple_seq_add_stmt_without_update (&stmts, repl);
3222 if (gimple_call_lhs (stmt))
3224 if (!useless_type_conversion_p (integer_type_node,
3225 TREE_TYPE (orig_len)))
3226 orig_len = fold_convert (integer_type_node, orig_len);
3227 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3228 gimple_seq_add_stmt_without_update (&stmts, repl);
3229 gsi_replace_with_seq_vops (gsi, stmts);
3230 /* gsi now points at the assignment to the lhs, get a
3231 stmt iterator to the memcpy call.
3232 ??? We can't use gsi_for_stmt as that doesn't work when the
3233 CFG isn't built yet. */
3234 gimple_stmt_iterator gsi2 = *gsi;
3235 gsi_prev (&gsi2);
3236 fold_stmt (&gsi2);
3238 else
3240 gsi_replace_with_seq_vops (gsi, stmts);
3241 fold_stmt (gsi);
3243 return true;
3245 return false;
3248 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3249 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3250 more than 3 arguments, and ARG may be null in the 2-argument case.
3252 Return NULL_TREE if no simplification was possible, otherwise return the
3253 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3254 code of the function to be simplified. */
3256 static bool
3257 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3258 tree fp, tree fmt, tree arg,
3259 enum built_in_function fcode)
3261 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3262 tree fn_fputc, fn_fputs;
3263 const char *fmt_str = NULL;
3265 /* If the return value is used, don't do the transformation. */
3266 if (gimple_call_lhs (stmt) != NULL_TREE)
3267 return false;
3269 /* Check whether the format is a literal string constant. */
3270 fmt_str = c_getstr (fmt);
3271 if (fmt_str == NULL)
3272 return false;
3274 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3276 /* If we're using an unlocked function, assume the other
3277 unlocked functions exist explicitly. */
3278 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3279 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3281 else
3283 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3284 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3287 if (!init_target_chars ())
3288 return false;
3290 /* If the format doesn't contain % args or %%, use strcpy. */
3291 if (strchr (fmt_str, target_percent) == NULL)
3293 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3294 && arg)
3295 return false;
3297 /* If the format specifier was "", fprintf does nothing. */
3298 if (fmt_str[0] == '\0')
3300 replace_call_with_value (gsi, NULL_TREE);
3301 return true;
3304 /* When "string" doesn't contain %, replace all cases of
3305 fprintf (fp, string) with fputs (string, fp). The fputs
3306 builtin will take care of special cases like length == 1. */
3307 if (fn_fputs)
3309 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3310 replace_call_with_call_and_fold (gsi, repl);
3311 return true;
3315 /* The other optimizations can be done only on the non-va_list variants. */
3316 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3317 return false;
3319 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3320 else if (strcmp (fmt_str, target_percent_s) == 0)
3322 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3323 return false;
3324 if (fn_fputs)
3326 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3327 replace_call_with_call_and_fold (gsi, repl);
3328 return true;
3332 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3333 else if (strcmp (fmt_str, target_percent_c) == 0)
3335 if (!arg
3336 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3337 return false;
3338 if (fn_fputc)
3340 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3341 replace_call_with_call_and_fold (gsi, repl);
3342 return true;
3346 return false;
3349 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3350 FMT and ARG are the arguments to the call; we don't fold cases with
3351 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3353 Return NULL_TREE if no simplification was possible, otherwise return the
3354 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3355 code of the function to be simplified. */
3357 static bool
3358 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3359 tree arg, enum built_in_function fcode)
3361 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3362 tree fn_putchar, fn_puts, newarg;
3363 const char *fmt_str = NULL;
3365 /* If the return value is used, don't do the transformation. */
3366 if (gimple_call_lhs (stmt) != NULL_TREE)
3367 return false;
3369 /* Check whether the format is a literal string constant. */
3370 fmt_str = c_getstr (fmt);
3371 if (fmt_str == NULL)
3372 return false;
3374 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3376 /* If we're using an unlocked function, assume the other
3377 unlocked functions exist explicitly. */
3378 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3379 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3381 else
3383 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3384 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3387 if (!init_target_chars ())
3388 return false;
3390 if (strcmp (fmt_str, target_percent_s) == 0
3391 || strchr (fmt_str, target_percent) == NULL)
3393 const char *str;
3395 if (strcmp (fmt_str, target_percent_s) == 0)
3397 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3398 return false;
3400 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3401 return false;
3403 str = c_getstr (arg);
3404 if (str == NULL)
3405 return false;
3407 else
3409 /* The format specifier doesn't contain any '%' characters. */
3410 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3411 && arg)
3412 return false;
3413 str = fmt_str;
3416 /* If the string was "", printf does nothing. */
3417 if (str[0] == '\0')
3419 replace_call_with_value (gsi, NULL_TREE);
3420 return true;
3423 /* If the string has length of 1, call putchar. */
3424 if (str[1] == '\0')
3426 /* Given printf("c"), (where c is any one character,)
3427 convert "c"[0] to an int and pass that to the replacement
3428 function. */
3429 newarg = build_int_cst (integer_type_node, str[0]);
3430 if (fn_putchar)
3432 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3433 replace_call_with_call_and_fold (gsi, repl);
3434 return true;
3437 else
3439 /* If the string was "string\n", call puts("string"). */
3440 size_t len = strlen (str);
3441 if ((unsigned char)str[len - 1] == target_newline
3442 && (size_t) (int) len == len
3443 && (int) len > 0)
3445 char *newstr;
3446 tree offset_node, string_cst;
3448 /* Create a NUL-terminated string that's one char shorter
3449 than the original, stripping off the trailing '\n'. */
3450 newarg = build_string_literal (len, str);
3451 string_cst = string_constant (newarg, &offset_node);
3452 gcc_checking_assert (string_cst
3453 && (TREE_STRING_LENGTH (string_cst)
3454 == (int) len)
3455 && integer_zerop (offset_node)
3456 && (unsigned char)
3457 TREE_STRING_POINTER (string_cst)[len - 1]
3458 == target_newline);
3459 /* build_string_literal creates a new STRING_CST,
3460 modify it in place to avoid double copying. */
3461 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3462 newstr[len - 1] = '\0';
3463 if (fn_puts)
3465 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3466 replace_call_with_call_and_fold (gsi, repl);
3467 return true;
3470 else
3471 /* We'd like to arrange to call fputs(string,stdout) here,
3472 but we need stdout and don't have a way to get it yet. */
3473 return false;
3477 /* The other optimizations can be done only on the non-va_list variants. */
3478 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3479 return false;
3481 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3482 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3484 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3485 return false;
3486 if (fn_puts)
3488 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3489 replace_call_with_call_and_fold (gsi, repl);
3490 return true;
3494 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3495 else if (strcmp (fmt_str, target_percent_c) == 0)
3497 if (!arg || ! useless_type_conversion_p (integer_type_node,
3498 TREE_TYPE (arg)))
3499 return false;
3500 if (fn_putchar)
3502 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3503 replace_call_with_call_and_fold (gsi, repl);
3504 return true;
3508 return false;
3513 /* Fold a call to __builtin_strlen with known length LEN. */
3515 static bool
3516 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3518 gimple *stmt = gsi_stmt (*gsi);
3520 wide_int minlen;
3521 wide_int maxlen;
3523 tree lenrange[2];
3524 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange)
3525 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3526 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3528 /* The range of lengths refers to either a single constant
3529 string or to the longest and shortest constant string
3530 referenced by the argument of the strlen() call, or to
3531 the strings that can possibly be stored in the arrays
3532 the argument refers to. */
3533 minlen = wi::to_wide (lenrange[0]);
3534 maxlen = wi::to_wide (lenrange[1]);
3536 else
3538 unsigned prec = TYPE_PRECISION (sizetype);
3540 minlen = wi::shwi (0, prec);
3541 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3544 if (minlen == maxlen)
3546 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3547 true, GSI_SAME_STMT);
3548 replace_call_with_value (gsi, lenrange[0]);
3549 return true;
3552 tree lhs = gimple_call_lhs (stmt);
3553 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3554 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3556 return false;
3559 /* Fold a call to __builtin_acc_on_device. */
3561 static bool
3562 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3564 /* Defer folding until we know which compiler we're in. */
3565 if (symtab->state != EXPANSION)
3566 return false;
3568 unsigned val_host = GOMP_DEVICE_HOST;
3569 unsigned val_dev = GOMP_DEVICE_NONE;
3571 #ifdef ACCEL_COMPILER
3572 val_host = GOMP_DEVICE_NOT_HOST;
3573 val_dev = ACCEL_COMPILER_acc_device;
3574 #endif
3576 location_t loc = gimple_location (gsi_stmt (*gsi));
3578 tree host_eq = make_ssa_name (boolean_type_node);
3579 gimple *host_ass = gimple_build_assign
3580 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3581 gimple_set_location (host_ass, loc);
3582 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3584 tree dev_eq = make_ssa_name (boolean_type_node);
3585 gimple *dev_ass = gimple_build_assign
3586 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3587 gimple_set_location (dev_ass, loc);
3588 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3590 tree result = make_ssa_name (boolean_type_node);
3591 gimple *result_ass = gimple_build_assign
3592 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3593 gimple_set_location (result_ass, loc);
3594 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3596 replace_call_with_value (gsi, result);
3598 return true;
3601 /* Fold realloc (0, n) -> malloc (n). */
3603 static bool
3604 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3606 gimple *stmt = gsi_stmt (*gsi);
3607 tree arg = gimple_call_arg (stmt, 0);
3608 tree size = gimple_call_arg (stmt, 1);
3610 if (operand_equal_p (arg, null_pointer_node, 0))
3612 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3613 if (fn_malloc)
3615 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3616 replace_call_with_call_and_fold (gsi, repl);
3617 return true;
3620 return false;
3623 /* Fold the non-target builtin at *GSI and return whether any simplification
3624 was made. */
3626 static bool
3627 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3629 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3630 tree callee = gimple_call_fndecl (stmt);
3632 /* Give up for always_inline inline builtins until they are
3633 inlined. */
3634 if (avoid_folding_inline_builtin (callee))
3635 return false;
3637 unsigned n = gimple_call_num_args (stmt);
3638 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3639 switch (fcode)
3641 case BUILT_IN_BCMP:
3642 return gimple_fold_builtin_bcmp (gsi);
3643 case BUILT_IN_BCOPY:
3644 return gimple_fold_builtin_bcopy (gsi);
3645 case BUILT_IN_BZERO:
3646 return gimple_fold_builtin_bzero (gsi);
3648 case BUILT_IN_MEMSET:
3649 return gimple_fold_builtin_memset (gsi,
3650 gimple_call_arg (stmt, 1),
3651 gimple_call_arg (stmt, 2));
3652 case BUILT_IN_MEMCPY:
3653 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3654 gimple_call_arg (stmt, 1), 0);
3655 case BUILT_IN_MEMPCPY:
3656 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3657 gimple_call_arg (stmt, 1), 1);
3658 case BUILT_IN_MEMMOVE:
3659 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3660 gimple_call_arg (stmt, 1), 3);
3661 case BUILT_IN_SPRINTF_CHK:
3662 case BUILT_IN_VSPRINTF_CHK:
3663 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3664 case BUILT_IN_STRCAT_CHK:
3665 return gimple_fold_builtin_strcat_chk (gsi);
3666 case BUILT_IN_STRNCAT_CHK:
3667 return gimple_fold_builtin_strncat_chk (gsi);
3668 case BUILT_IN_STRLEN:
3669 return gimple_fold_builtin_strlen (gsi);
3670 case BUILT_IN_STRCPY:
3671 return gimple_fold_builtin_strcpy (gsi,
3672 gimple_call_arg (stmt, 0),
3673 gimple_call_arg (stmt, 1));
3674 case BUILT_IN_STRNCPY:
3675 return gimple_fold_builtin_strncpy (gsi,
3676 gimple_call_arg (stmt, 0),
3677 gimple_call_arg (stmt, 1),
3678 gimple_call_arg (stmt, 2));
3679 case BUILT_IN_STRCAT:
3680 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3681 gimple_call_arg (stmt, 1));
3682 case BUILT_IN_STRNCAT:
3683 return gimple_fold_builtin_strncat (gsi);
3684 case BUILT_IN_INDEX:
3685 case BUILT_IN_STRCHR:
3686 return gimple_fold_builtin_strchr (gsi, false);
3687 case BUILT_IN_RINDEX:
3688 case BUILT_IN_STRRCHR:
3689 return gimple_fold_builtin_strchr (gsi, true);
3690 case BUILT_IN_STRSTR:
3691 return gimple_fold_builtin_strstr (gsi);
3692 case BUILT_IN_STRCMP:
3693 case BUILT_IN_STRCASECMP:
3694 case BUILT_IN_STRNCMP:
3695 case BUILT_IN_STRNCASECMP:
3696 return gimple_fold_builtin_string_compare (gsi);
3697 case BUILT_IN_MEMCHR:
3698 return gimple_fold_builtin_memchr (gsi);
3699 case BUILT_IN_FPUTS:
3700 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3701 gimple_call_arg (stmt, 1), false);
3702 case BUILT_IN_FPUTS_UNLOCKED:
3703 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3704 gimple_call_arg (stmt, 1), true);
3705 case BUILT_IN_MEMCPY_CHK:
3706 case BUILT_IN_MEMPCPY_CHK:
3707 case BUILT_IN_MEMMOVE_CHK:
3708 case BUILT_IN_MEMSET_CHK:
3709 return gimple_fold_builtin_memory_chk (gsi,
3710 gimple_call_arg (stmt, 0),
3711 gimple_call_arg (stmt, 1),
3712 gimple_call_arg (stmt, 2),
3713 gimple_call_arg (stmt, 3),
3714 fcode);
3715 case BUILT_IN_STPCPY:
3716 return gimple_fold_builtin_stpcpy (gsi);
3717 case BUILT_IN_STRCPY_CHK:
3718 case BUILT_IN_STPCPY_CHK:
3719 return gimple_fold_builtin_stxcpy_chk (gsi,
3720 gimple_call_arg (stmt, 0),
3721 gimple_call_arg (stmt, 1),
3722 gimple_call_arg (stmt, 2),
3723 fcode);
3724 case BUILT_IN_STRNCPY_CHK:
3725 case BUILT_IN_STPNCPY_CHK:
3726 return gimple_fold_builtin_stxncpy_chk (gsi,
3727 gimple_call_arg (stmt, 0),
3728 gimple_call_arg (stmt, 1),
3729 gimple_call_arg (stmt, 2),
3730 gimple_call_arg (stmt, 3),
3731 fcode);
3732 case BUILT_IN_SNPRINTF_CHK:
3733 case BUILT_IN_VSNPRINTF_CHK:
3734 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3736 case BUILT_IN_FPRINTF:
3737 case BUILT_IN_FPRINTF_UNLOCKED:
3738 case BUILT_IN_VFPRINTF:
3739 if (n == 2 || n == 3)
3740 return gimple_fold_builtin_fprintf (gsi,
3741 gimple_call_arg (stmt, 0),
3742 gimple_call_arg (stmt, 1),
3743 n == 3
3744 ? gimple_call_arg (stmt, 2)
3745 : NULL_TREE,
3746 fcode);
3747 break;
3748 case BUILT_IN_FPRINTF_CHK:
3749 case BUILT_IN_VFPRINTF_CHK:
3750 if (n == 3 || n == 4)
3751 return gimple_fold_builtin_fprintf (gsi,
3752 gimple_call_arg (stmt, 0),
3753 gimple_call_arg (stmt, 2),
3754 n == 4
3755 ? gimple_call_arg (stmt, 3)
3756 : NULL_TREE,
3757 fcode);
3758 break;
3759 case BUILT_IN_PRINTF:
3760 case BUILT_IN_PRINTF_UNLOCKED:
3761 case BUILT_IN_VPRINTF:
3762 if (n == 1 || n == 2)
3763 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3764 n == 2
3765 ? gimple_call_arg (stmt, 1)
3766 : NULL_TREE, fcode);
3767 break;
3768 case BUILT_IN_PRINTF_CHK:
3769 case BUILT_IN_VPRINTF_CHK:
3770 if (n == 2 || n == 3)
3771 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3772 n == 3
3773 ? gimple_call_arg (stmt, 2)
3774 : NULL_TREE, fcode);
3775 break;
3776 case BUILT_IN_ACC_ON_DEVICE:
3777 return gimple_fold_builtin_acc_on_device (gsi,
3778 gimple_call_arg (stmt, 0));
3779 case BUILT_IN_REALLOC:
3780 return gimple_fold_builtin_realloc (gsi);
3782 default:;
3785 /* Try the generic builtin folder. */
3786 bool ignore = (gimple_call_lhs (stmt) == NULL);
3787 tree result = fold_call_stmt (stmt, ignore);
3788 if (result)
3790 if (ignore)
3791 STRIP_NOPS (result);
3792 else
3793 result = fold_convert (gimple_call_return_type (stmt), result);
3794 if (!update_call_from_tree (gsi, result))
3795 gimplify_and_update_call_from_tree (gsi, result);
3796 return true;
3799 return false;
3802 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3803 function calls to constants, where possible. */
3805 static tree
3806 fold_internal_goacc_dim (const gimple *call)
3808 int axis = oacc_get_ifn_dim_arg (call);
3809 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3810 tree result = NULL_TREE;
3811 tree type = TREE_TYPE (gimple_call_lhs (call));
3813 switch (gimple_call_internal_fn (call))
3815 case IFN_GOACC_DIM_POS:
3816 /* If the size is 1, we know the answer. */
3817 if (size == 1)
3818 result = build_int_cst (type, 0);
3819 break;
3820 case IFN_GOACC_DIM_SIZE:
3821 /* If the size is not dynamic, we know the answer. */
3822 if (size)
3823 result = build_int_cst (type, size);
3824 break;
3825 default:
3826 break;
3829 return result;
3832 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3833 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3834 &var where var is only addressable because of such calls. */
3836 bool
3837 optimize_atomic_compare_exchange_p (gimple *stmt)
3839 if (gimple_call_num_args (stmt) != 6
3840 || !flag_inline_atomics
3841 || !optimize
3842 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3843 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3844 || !gimple_vdef (stmt)
3845 || !gimple_vuse (stmt))
3846 return false;
3848 tree fndecl = gimple_call_fndecl (stmt);
3849 switch (DECL_FUNCTION_CODE (fndecl))
3851 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3852 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3853 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3854 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3855 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3856 break;
3857 default:
3858 return false;
3861 tree expected = gimple_call_arg (stmt, 1);
3862 if (TREE_CODE (expected) != ADDR_EXPR
3863 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3864 return false;
3866 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3867 if (!is_gimple_reg_type (etype)
3868 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3869 || TREE_THIS_VOLATILE (etype)
3870 || VECTOR_TYPE_P (etype)
3871 || TREE_CODE (etype) == COMPLEX_TYPE
3872 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3873 might not preserve all the bits. See PR71716. */
3874 || SCALAR_FLOAT_TYPE_P (etype)
3875 || maybe_ne (TYPE_PRECISION (etype),
3876 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3877 return false;
3879 tree weak = gimple_call_arg (stmt, 3);
3880 if (!integer_zerop (weak) && !integer_onep (weak))
3881 return false;
3883 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3884 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3885 machine_mode mode = TYPE_MODE (itype);
3887 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3888 == CODE_FOR_nothing
3889 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3890 return false;
3892 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3893 return false;
3895 return true;
3898 /* Fold
3899 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3900 into
3901 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3902 i = IMAGPART_EXPR <t>;
3903 r = (_Bool) i;
3904 e = REALPART_EXPR <t>; */
3906 void
3907 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3909 gimple *stmt = gsi_stmt (*gsi);
3910 tree fndecl = gimple_call_fndecl (stmt);
3911 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3912 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3913 tree ctype = build_complex_type (itype);
3914 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3915 bool throws = false;
3916 edge e = NULL;
3917 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3918 expected);
3919 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3920 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3921 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3923 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3924 build1 (VIEW_CONVERT_EXPR, itype,
3925 gimple_assign_lhs (g)));
3926 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3928 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3929 + int_size_in_bytes (itype);
3930 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3931 gimple_call_arg (stmt, 0),
3932 gimple_assign_lhs (g),
3933 gimple_call_arg (stmt, 2),
3934 build_int_cst (integer_type_node, flag),
3935 gimple_call_arg (stmt, 4),
3936 gimple_call_arg (stmt, 5));
3937 tree lhs = make_ssa_name (ctype);
3938 gimple_call_set_lhs (g, lhs);
3939 gimple_set_vdef (g, gimple_vdef (stmt));
3940 gimple_set_vuse (g, gimple_vuse (stmt));
3941 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3942 tree oldlhs = gimple_call_lhs (stmt);
3943 if (stmt_can_throw_internal (stmt))
3945 throws = true;
3946 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3948 gimple_call_set_nothrow (as_a <gcall *> (g),
3949 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3950 gimple_call_set_lhs (stmt, NULL_TREE);
3951 gsi_replace (gsi, g, true);
3952 if (oldlhs)
3954 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3955 build1 (IMAGPART_EXPR, itype, lhs));
3956 if (throws)
3958 gsi_insert_on_edge_immediate (e, g);
3959 *gsi = gsi_for_stmt (g);
3961 else
3962 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3963 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3964 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3966 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3967 build1 (REALPART_EXPR, itype, lhs));
3968 if (throws && oldlhs == NULL_TREE)
3970 gsi_insert_on_edge_immediate (e, g);
3971 *gsi = gsi_for_stmt (g);
3973 else
3974 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3975 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3977 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3978 VIEW_CONVERT_EXPR,
3979 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3980 gimple_assign_lhs (g)));
3981 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3983 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3984 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3985 *gsi = gsiret;
3988 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3989 doesn't fit into TYPE. The test for overflow should be regardless of
3990 -fwrapv, and even for unsigned types. */
3992 bool
3993 arith_overflowed_p (enum tree_code code, const_tree type,
3994 const_tree arg0, const_tree arg1)
3996 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3997 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3998 widest2_int_cst;
3999 widest2_int warg0 = widest2_int_cst (arg0);
4000 widest2_int warg1 = widest2_int_cst (arg1);
4001 widest2_int wres;
4002 switch (code)
4004 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4005 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4006 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4007 default: gcc_unreachable ();
4009 signop sign = TYPE_SIGN (type);
4010 if (sign == UNSIGNED && wi::neg_p (wres))
4011 return true;
4012 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4015 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4016 The statement may be replaced by another statement, e.g., if the call
4017 simplifies to a constant value. Return true if any changes were made.
4018 It is assumed that the operands have been previously folded. */
4020 static bool
4021 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4023 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4024 tree callee;
4025 bool changed = false;
4026 unsigned i;
4028 /* Fold *& in call arguments. */
4029 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4030 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4032 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4033 if (tmp)
4035 gimple_call_set_arg (stmt, i, tmp);
4036 changed = true;
4040 /* Check for virtual calls that became direct calls. */
4041 callee = gimple_call_fn (stmt);
4042 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4044 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4046 if (dump_file && virtual_method_call_p (callee)
4047 && !possible_polymorphic_call_target_p
4048 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4049 (OBJ_TYPE_REF_EXPR (callee)))))
4051 fprintf (dump_file,
4052 "Type inheritance inconsistent devirtualization of ");
4053 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4054 fprintf (dump_file, " to ");
4055 print_generic_expr (dump_file, callee, TDF_SLIM);
4056 fprintf (dump_file, "\n");
4059 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4060 changed = true;
4062 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4064 bool final;
4065 vec <cgraph_node *>targets
4066 = possible_polymorphic_call_targets (callee, stmt, &final);
4067 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4069 tree lhs = gimple_call_lhs (stmt);
4070 if (dump_enabled_p ())
4072 location_t loc = gimple_location_safe (stmt);
4073 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4074 "folding virtual function call to %s\n",
4075 targets.length () == 1
4076 ? targets[0]->name ()
4077 : "__builtin_unreachable");
4079 if (targets.length () == 1)
4081 tree fndecl = targets[0]->decl;
4082 gimple_call_set_fndecl (stmt, fndecl);
4083 changed = true;
4084 /* If changing the call to __cxa_pure_virtual
4085 or similar noreturn function, adjust gimple_call_fntype
4086 too. */
4087 if (gimple_call_noreturn_p (stmt)
4088 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4089 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4090 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4091 == void_type_node))
4092 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4093 /* If the call becomes noreturn, remove the lhs. */
4094 if (lhs
4095 && gimple_call_noreturn_p (stmt)
4096 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4097 || should_remove_lhs_p (lhs)))
4099 if (TREE_CODE (lhs) == SSA_NAME)
4101 tree var = create_tmp_var (TREE_TYPE (lhs));
4102 tree def = get_or_create_ssa_default_def (cfun, var);
4103 gimple *new_stmt = gimple_build_assign (lhs, def);
4104 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4106 gimple_call_set_lhs (stmt, NULL_TREE);
4108 maybe_remove_unused_call_args (cfun, stmt);
4110 else
4112 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4113 gimple *new_stmt = gimple_build_call (fndecl, 0);
4114 gimple_set_location (new_stmt, gimple_location (stmt));
4115 /* If the call had a SSA name as lhs morph that into
4116 an uninitialized value. */
4117 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4119 tree var = create_tmp_var (TREE_TYPE (lhs));
4120 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4121 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4122 set_ssa_default_def (cfun, var, lhs);
4124 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4125 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4126 gsi_replace (gsi, new_stmt, false);
4127 return true;
4133 /* Check for indirect calls that became direct calls, and then
4134 no longer require a static chain. */
4135 if (gimple_call_chain (stmt))
4137 tree fn = gimple_call_fndecl (stmt);
4138 if (fn && !DECL_STATIC_CHAIN (fn))
4140 gimple_call_set_chain (stmt, NULL);
4141 changed = true;
4143 else
4145 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4146 if (tmp)
4148 gimple_call_set_chain (stmt, tmp);
4149 changed = true;
4154 if (inplace)
4155 return changed;
4157 /* Check for builtins that CCP can handle using information not
4158 available in the generic fold routines. */
4159 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4161 if (gimple_fold_builtin (gsi))
4162 changed = true;
4164 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4166 changed |= targetm.gimple_fold_builtin (gsi);
4168 else if (gimple_call_internal_p (stmt))
4170 enum tree_code subcode = ERROR_MARK;
4171 tree result = NULL_TREE;
4172 bool cplx_result = false;
4173 tree overflow = NULL_TREE;
4174 switch (gimple_call_internal_fn (stmt))
4176 case IFN_BUILTIN_EXPECT:
4177 result = fold_builtin_expect (gimple_location (stmt),
4178 gimple_call_arg (stmt, 0),
4179 gimple_call_arg (stmt, 1),
4180 gimple_call_arg (stmt, 2));
4181 break;
4182 case IFN_UBSAN_OBJECT_SIZE:
4184 tree offset = gimple_call_arg (stmt, 1);
4185 tree objsize = gimple_call_arg (stmt, 2);
4186 if (integer_all_onesp (objsize)
4187 || (TREE_CODE (offset) == INTEGER_CST
4188 && TREE_CODE (objsize) == INTEGER_CST
4189 && tree_int_cst_le (offset, objsize)))
4191 replace_call_with_value (gsi, NULL_TREE);
4192 return true;
4195 break;
4196 case IFN_UBSAN_PTR:
4197 if (integer_zerop (gimple_call_arg (stmt, 1)))
4199 replace_call_with_value (gsi, NULL_TREE);
4200 return true;
4202 break;
4203 case IFN_UBSAN_BOUNDS:
4205 tree index = gimple_call_arg (stmt, 1);
4206 tree bound = gimple_call_arg (stmt, 2);
4207 if (TREE_CODE (index) == INTEGER_CST
4208 && TREE_CODE (bound) == INTEGER_CST)
4210 index = fold_convert (TREE_TYPE (bound), index);
4211 if (TREE_CODE (index) == INTEGER_CST
4212 && tree_int_cst_le (index, bound))
4214 replace_call_with_value (gsi, NULL_TREE);
4215 return true;
4219 break;
4220 case IFN_GOACC_DIM_SIZE:
4221 case IFN_GOACC_DIM_POS:
4222 result = fold_internal_goacc_dim (stmt);
4223 break;
4224 case IFN_UBSAN_CHECK_ADD:
4225 subcode = PLUS_EXPR;
4226 break;
4227 case IFN_UBSAN_CHECK_SUB:
4228 subcode = MINUS_EXPR;
4229 break;
4230 case IFN_UBSAN_CHECK_MUL:
4231 subcode = MULT_EXPR;
4232 break;
4233 case IFN_ADD_OVERFLOW:
4234 subcode = PLUS_EXPR;
4235 cplx_result = true;
4236 break;
4237 case IFN_SUB_OVERFLOW:
4238 subcode = MINUS_EXPR;
4239 cplx_result = true;
4240 break;
4241 case IFN_MUL_OVERFLOW:
4242 subcode = MULT_EXPR;
4243 cplx_result = true;
4244 break;
4245 default:
4246 break;
4248 if (subcode != ERROR_MARK)
4250 tree arg0 = gimple_call_arg (stmt, 0);
4251 tree arg1 = gimple_call_arg (stmt, 1);
4252 tree type = TREE_TYPE (arg0);
4253 if (cplx_result)
4255 tree lhs = gimple_call_lhs (stmt);
4256 if (lhs == NULL_TREE)
4257 type = NULL_TREE;
4258 else
4259 type = TREE_TYPE (TREE_TYPE (lhs));
4261 if (type == NULL_TREE)
4263 /* x = y + 0; x = y - 0; x = y * 0; */
4264 else if (integer_zerop (arg1))
4265 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4266 /* x = 0 + y; x = 0 * y; */
4267 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4268 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4269 /* x = y - y; */
4270 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4271 result = integer_zero_node;
4272 /* x = y * 1; x = 1 * y; */
4273 else if (subcode == MULT_EXPR && integer_onep (arg1))
4274 result = arg0;
4275 else if (subcode == MULT_EXPR && integer_onep (arg0))
4276 result = arg1;
4277 else if (TREE_CODE (arg0) == INTEGER_CST
4278 && TREE_CODE (arg1) == INTEGER_CST)
4280 if (cplx_result)
4281 result = int_const_binop (subcode, fold_convert (type, arg0),
4282 fold_convert (type, arg1));
4283 else
4284 result = int_const_binop (subcode, arg0, arg1);
4285 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4287 if (cplx_result)
4288 overflow = build_one_cst (type);
4289 else
4290 result = NULL_TREE;
4293 if (result)
4295 if (result == integer_zero_node)
4296 result = build_zero_cst (type);
4297 else if (cplx_result && TREE_TYPE (result) != type)
4299 if (TREE_CODE (result) == INTEGER_CST)
4301 if (arith_overflowed_p (PLUS_EXPR, type, result,
4302 integer_zero_node))
4303 overflow = build_one_cst (type);
4305 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4306 && TYPE_UNSIGNED (type))
4307 || (TYPE_PRECISION (type)
4308 < (TYPE_PRECISION (TREE_TYPE (result))
4309 + (TYPE_UNSIGNED (TREE_TYPE (result))
4310 && !TYPE_UNSIGNED (type)))))
4311 result = NULL_TREE;
4312 if (result)
4313 result = fold_convert (type, result);
4318 if (result)
4320 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4321 result = drop_tree_overflow (result);
4322 if (cplx_result)
4324 if (overflow == NULL_TREE)
4325 overflow = build_zero_cst (TREE_TYPE (result));
4326 tree ctype = build_complex_type (TREE_TYPE (result));
4327 if (TREE_CODE (result) == INTEGER_CST
4328 && TREE_CODE (overflow) == INTEGER_CST)
4329 result = build_complex (ctype, result, overflow);
4330 else
4331 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4332 ctype, result, overflow);
4334 if (!update_call_from_tree (gsi, result))
4335 gimplify_and_update_call_from_tree (gsi, result);
4336 changed = true;
4340 return changed;
4344 /* Return true whether NAME has a use on STMT. */
4346 static bool
4347 has_use_on_stmt (tree name, gimple *stmt)
4349 imm_use_iterator iter;
4350 use_operand_p use_p;
4351 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4352 if (USE_STMT (use_p) == stmt)
4353 return true;
4354 return false;
4357 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4358 gimple_simplify.
4360 Replaces *GSI with the simplification result in RCODE and OPS
4361 and the associated statements in *SEQ. Does the replacement
4362 according to INPLACE and returns true if the operation succeeded. */
4364 static bool
4365 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4366 code_helper rcode, tree *ops,
4367 gimple_seq *seq, bool inplace)
4369 gimple *stmt = gsi_stmt (*gsi);
4371 /* Play safe and do not allow abnormals to be mentioned in
4372 newly created statements. See also maybe_push_res_to_seq.
4373 As an exception allow such uses if there was a use of the
4374 same SSA name on the old stmt. */
4375 if ((TREE_CODE (ops[0]) == SSA_NAME
4376 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4377 && !has_use_on_stmt (ops[0], stmt))
4378 || (ops[1]
4379 && TREE_CODE (ops[1]) == SSA_NAME
4380 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4381 && !has_use_on_stmt (ops[1], stmt))
4382 || (ops[2]
4383 && TREE_CODE (ops[2]) == SSA_NAME
4384 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4385 && !has_use_on_stmt (ops[2], stmt))
4386 || (COMPARISON_CLASS_P (ops[0])
4387 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4388 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4389 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4390 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4391 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4392 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4393 return false;
4395 /* Don't insert new statements when INPLACE is true, even if we could
4396 reuse STMT for the final statement. */
4397 if (inplace && !gimple_seq_empty_p (*seq))
4398 return false;
4400 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4402 gcc_assert (rcode.is_tree_code ());
4403 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4404 /* GIMPLE_CONDs condition may not throw. */
4405 && (!flag_exceptions
4406 || !cfun->can_throw_non_call_exceptions
4407 || !operation_could_trap_p (rcode,
4408 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4409 false, NULL_TREE)))
4410 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4411 else if (rcode == SSA_NAME)
4412 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4413 build_zero_cst (TREE_TYPE (ops[0])));
4414 else if (rcode == INTEGER_CST)
4416 if (integer_zerop (ops[0]))
4417 gimple_cond_make_false (cond_stmt);
4418 else
4419 gimple_cond_make_true (cond_stmt);
4421 else if (!inplace)
4423 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4424 ops, seq);
4425 if (!res)
4426 return false;
4427 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4428 build_zero_cst (TREE_TYPE (res)));
4430 else
4431 return false;
4432 if (dump_file && (dump_flags & TDF_DETAILS))
4434 fprintf (dump_file, "gimple_simplified to ");
4435 if (!gimple_seq_empty_p (*seq))
4436 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4437 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4438 0, TDF_SLIM);
4440 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4441 return true;
4443 else if (is_gimple_assign (stmt)
4444 && rcode.is_tree_code ())
4446 if (!inplace
4447 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4449 maybe_build_generic_op (rcode,
4450 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4451 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4452 if (dump_file && (dump_flags & TDF_DETAILS))
4454 fprintf (dump_file, "gimple_simplified to ");
4455 if (!gimple_seq_empty_p (*seq))
4456 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4457 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4458 0, TDF_SLIM);
4460 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4461 return true;
4464 else if (rcode.is_fn_code ()
4465 && gimple_call_combined_fn (stmt) == rcode)
4467 unsigned i;
4468 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4470 gcc_assert (ops[i] != NULL_TREE);
4471 gimple_call_set_arg (stmt, i, ops[i]);
4473 if (i < 3)
4474 gcc_assert (ops[i] == NULL_TREE);
4475 if (dump_file && (dump_flags & TDF_DETAILS))
4477 fprintf (dump_file, "gimple_simplified to ");
4478 if (!gimple_seq_empty_p (*seq))
4479 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4480 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4482 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4483 return true;
4485 else if (!inplace)
4487 if (gimple_has_lhs (stmt))
4489 tree lhs = gimple_get_lhs (stmt);
4490 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4491 ops, seq, lhs))
4492 return false;
4493 if (dump_file && (dump_flags & TDF_DETAILS))
4495 fprintf (dump_file, "gimple_simplified to ");
4496 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4498 gsi_replace_with_seq_vops (gsi, *seq);
4499 return true;
4501 else
4502 gcc_unreachable ();
4505 return false;
4508 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4510 static bool
4511 maybe_canonicalize_mem_ref_addr (tree *t)
4513 bool res = false;
4515 if (TREE_CODE (*t) == ADDR_EXPR)
4516 t = &TREE_OPERAND (*t, 0);
4518 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4519 generic vector extension. The actual vector referenced is
4520 view-converted to an array type for this purpose. If the index
4521 is constant the canonical representation in the middle-end is a
4522 BIT_FIELD_REF so re-write the former to the latter here. */
4523 if (TREE_CODE (*t) == ARRAY_REF
4524 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4525 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4526 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4528 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4529 if (VECTOR_TYPE_P (vtype))
4531 tree low = array_ref_low_bound (*t);
4532 if (TREE_CODE (low) == INTEGER_CST)
4534 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4536 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4537 wi::to_widest (low));
4538 idx = wi::mul (idx, wi::to_widest
4539 (TYPE_SIZE (TREE_TYPE (*t))));
4540 widest_int ext
4541 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4542 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4544 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4545 TREE_TYPE (*t),
4546 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4547 TYPE_SIZE (TREE_TYPE (*t)),
4548 wide_int_to_tree (bitsizetype, idx));
4549 res = true;
4556 while (handled_component_p (*t))
4557 t = &TREE_OPERAND (*t, 0);
4559 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4560 of invariant addresses into a SSA name MEM_REF address. */
4561 if (TREE_CODE (*t) == MEM_REF
4562 || TREE_CODE (*t) == TARGET_MEM_REF)
4564 tree addr = TREE_OPERAND (*t, 0);
4565 if (TREE_CODE (addr) == ADDR_EXPR
4566 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4567 || handled_component_p (TREE_OPERAND (addr, 0))))
4569 tree base;
4570 poly_int64 coffset;
4571 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4572 &coffset);
4573 if (!base)
4574 gcc_unreachable ();
4576 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4577 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4578 TREE_OPERAND (*t, 1),
4579 size_int (coffset));
4580 res = true;
4582 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4583 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4586 /* Canonicalize back MEM_REFs to plain reference trees if the object
4587 accessed is a decl that has the same access semantics as the MEM_REF. */
4588 if (TREE_CODE (*t) == MEM_REF
4589 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4590 && integer_zerop (TREE_OPERAND (*t, 1))
4591 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4593 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4594 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4595 if (/* Same volatile qualification. */
4596 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4597 /* Same TBAA behavior with -fstrict-aliasing. */
4598 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4599 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4600 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4601 /* Same alignment. */
4602 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4603 /* We have to look out here to not drop a required conversion
4604 from the rhs to the lhs if *t appears on the lhs or vice-versa
4605 if it appears on the rhs. Thus require strict type
4606 compatibility. */
4607 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4609 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4610 res = true;
4614 /* Canonicalize TARGET_MEM_REF in particular with respect to
4615 the indexes becoming constant. */
4616 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4618 tree tem = maybe_fold_tmr (*t);
4619 if (tem)
4621 *t = tem;
4622 res = true;
4626 return res;
4629 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4630 distinguishes both cases. */
4632 static bool
4633 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4635 bool changed = false;
4636 gimple *stmt = gsi_stmt (*gsi);
4637 bool nowarning = gimple_no_warning_p (stmt);
4638 unsigned i;
4639 fold_defer_overflow_warnings ();
4641 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4642 after propagation.
4643 ??? This shouldn't be done in generic folding but in the
4644 propagation helpers which also know whether an address was
4645 propagated.
4646 Also canonicalize operand order. */
4647 switch (gimple_code (stmt))
4649 case GIMPLE_ASSIGN:
4650 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4652 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4653 if ((REFERENCE_CLASS_P (*rhs)
4654 || TREE_CODE (*rhs) == ADDR_EXPR)
4655 && maybe_canonicalize_mem_ref_addr (rhs))
4656 changed = true;
4657 tree *lhs = gimple_assign_lhs_ptr (stmt);
4658 if (REFERENCE_CLASS_P (*lhs)
4659 && maybe_canonicalize_mem_ref_addr (lhs))
4660 changed = true;
4662 else
4664 /* Canonicalize operand order. */
4665 enum tree_code code = gimple_assign_rhs_code (stmt);
4666 if (TREE_CODE_CLASS (code) == tcc_comparison
4667 || commutative_tree_code (code)
4668 || commutative_ternary_tree_code (code))
4670 tree rhs1 = gimple_assign_rhs1 (stmt);
4671 tree rhs2 = gimple_assign_rhs2 (stmt);
4672 if (tree_swap_operands_p (rhs1, rhs2))
4674 gimple_assign_set_rhs1 (stmt, rhs2);
4675 gimple_assign_set_rhs2 (stmt, rhs1);
4676 if (TREE_CODE_CLASS (code) == tcc_comparison)
4677 gimple_assign_set_rhs_code (stmt,
4678 swap_tree_comparison (code));
4679 changed = true;
4683 break;
4684 case GIMPLE_CALL:
4686 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4688 tree *arg = gimple_call_arg_ptr (stmt, i);
4689 if (REFERENCE_CLASS_P (*arg)
4690 && maybe_canonicalize_mem_ref_addr (arg))
4691 changed = true;
4693 tree *lhs = gimple_call_lhs_ptr (stmt);
4694 if (*lhs
4695 && REFERENCE_CLASS_P (*lhs)
4696 && maybe_canonicalize_mem_ref_addr (lhs))
4697 changed = true;
4698 break;
4700 case GIMPLE_ASM:
4702 gasm *asm_stmt = as_a <gasm *> (stmt);
4703 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4705 tree link = gimple_asm_output_op (asm_stmt, i);
4706 tree op = TREE_VALUE (link);
4707 if (REFERENCE_CLASS_P (op)
4708 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4709 changed = true;
4711 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4713 tree link = gimple_asm_input_op (asm_stmt, i);
4714 tree op = TREE_VALUE (link);
4715 if ((REFERENCE_CLASS_P (op)
4716 || TREE_CODE (op) == ADDR_EXPR)
4717 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4718 changed = true;
4721 break;
4722 case GIMPLE_DEBUG:
4723 if (gimple_debug_bind_p (stmt))
4725 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4726 if (*val
4727 && (REFERENCE_CLASS_P (*val)
4728 || TREE_CODE (*val) == ADDR_EXPR)
4729 && maybe_canonicalize_mem_ref_addr (val))
4730 changed = true;
4732 break;
4733 case GIMPLE_COND:
4735 /* Canonicalize operand order. */
4736 tree lhs = gimple_cond_lhs (stmt);
4737 tree rhs = gimple_cond_rhs (stmt);
4738 if (tree_swap_operands_p (lhs, rhs))
4740 gcond *gc = as_a <gcond *> (stmt);
4741 gimple_cond_set_lhs (gc, rhs);
4742 gimple_cond_set_rhs (gc, lhs);
4743 gimple_cond_set_code (gc,
4744 swap_tree_comparison (gimple_cond_code (gc)));
4745 changed = true;
4748 default:;
4751 /* Dispatch to pattern-based folding. */
4752 if (!inplace
4753 || is_gimple_assign (stmt)
4754 || gimple_code (stmt) == GIMPLE_COND)
4756 gimple_seq seq = NULL;
4757 code_helper rcode;
4758 tree ops[3] = {};
4759 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4760 valueize, valueize))
4762 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4763 changed = true;
4764 else
4765 gimple_seq_discard (seq);
4769 stmt = gsi_stmt (*gsi);
4771 /* Fold the main computation performed by the statement. */
4772 switch (gimple_code (stmt))
4774 case GIMPLE_ASSIGN:
4776 /* Try to canonicalize for boolean-typed X the comparisons
4777 X == 0, X == 1, X != 0, and X != 1. */
4778 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4779 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4781 tree lhs = gimple_assign_lhs (stmt);
4782 tree op1 = gimple_assign_rhs1 (stmt);
4783 tree op2 = gimple_assign_rhs2 (stmt);
4784 tree type = TREE_TYPE (op1);
4786 /* Check whether the comparison operands are of the same boolean
4787 type as the result type is.
4788 Check that second operand is an integer-constant with value
4789 one or zero. */
4790 if (TREE_CODE (op2) == INTEGER_CST
4791 && (integer_zerop (op2) || integer_onep (op2))
4792 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4794 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4795 bool is_logical_not = false;
4797 /* X == 0 and X != 1 is a logical-not.of X
4798 X == 1 and X != 0 is X */
4799 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4800 || (cmp_code == NE_EXPR && integer_onep (op2)))
4801 is_logical_not = true;
4803 if (is_logical_not == false)
4804 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4805 /* Only for one-bit precision typed X the transformation
4806 !X -> ~X is valied. */
4807 else if (TYPE_PRECISION (type) == 1)
4808 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4809 /* Otherwise we use !X -> X ^ 1. */
4810 else
4811 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4812 build_int_cst (type, 1));
4813 changed = true;
4814 break;
4818 unsigned old_num_ops = gimple_num_ops (stmt);
4819 tree lhs = gimple_assign_lhs (stmt);
4820 tree new_rhs = fold_gimple_assign (gsi);
4821 if (new_rhs
4822 && !useless_type_conversion_p (TREE_TYPE (lhs),
4823 TREE_TYPE (new_rhs)))
4824 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4825 if (new_rhs
4826 && (!inplace
4827 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4829 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4830 changed = true;
4832 break;
4835 case GIMPLE_CALL:
4836 changed |= gimple_fold_call (gsi, inplace);
4837 break;
4839 case GIMPLE_ASM:
4840 /* Fold *& in asm operands. */
4842 gasm *asm_stmt = as_a <gasm *> (stmt);
4843 size_t noutputs;
4844 const char **oconstraints;
4845 const char *constraint;
4846 bool allows_mem, allows_reg;
4848 noutputs = gimple_asm_noutputs (asm_stmt);
4849 oconstraints = XALLOCAVEC (const char *, noutputs);
4851 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4853 tree link = gimple_asm_output_op (asm_stmt, i);
4854 tree op = TREE_VALUE (link);
4855 oconstraints[i]
4856 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4857 if (REFERENCE_CLASS_P (op)
4858 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4860 TREE_VALUE (link) = op;
4861 changed = true;
4864 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4866 tree link = gimple_asm_input_op (asm_stmt, i);
4867 tree op = TREE_VALUE (link);
4868 constraint
4869 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4870 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4871 oconstraints, &allows_mem, &allows_reg);
4872 if (REFERENCE_CLASS_P (op)
4873 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4874 != NULL_TREE)
4876 TREE_VALUE (link) = op;
4877 changed = true;
4881 break;
4883 case GIMPLE_DEBUG:
4884 if (gimple_debug_bind_p (stmt))
4886 tree val = gimple_debug_bind_get_value (stmt);
4887 if (val
4888 && REFERENCE_CLASS_P (val))
4890 tree tem = maybe_fold_reference (val, false);
4891 if (tem)
4893 gimple_debug_bind_set_value (stmt, tem);
4894 changed = true;
4897 else if (val
4898 && TREE_CODE (val) == ADDR_EXPR)
4900 tree ref = TREE_OPERAND (val, 0);
4901 tree tem = maybe_fold_reference (ref, false);
4902 if (tem)
4904 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4905 gimple_debug_bind_set_value (stmt, tem);
4906 changed = true;
4910 break;
4912 case GIMPLE_RETURN:
4914 greturn *ret_stmt = as_a<greturn *> (stmt);
4915 tree ret = gimple_return_retval(ret_stmt);
4917 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4919 tree val = valueize (ret);
4920 if (val && val != ret
4921 && may_propagate_copy (ret, val))
4923 gimple_return_set_retval (ret_stmt, val);
4924 changed = true;
4928 break;
4930 default:;
4933 stmt = gsi_stmt (*gsi);
4935 /* Fold *& on the lhs. */
4936 if (gimple_has_lhs (stmt))
4938 tree lhs = gimple_get_lhs (stmt);
4939 if (lhs && REFERENCE_CLASS_P (lhs))
4941 tree new_lhs = maybe_fold_reference (lhs, true);
4942 if (new_lhs)
4944 gimple_set_lhs (stmt, new_lhs);
4945 changed = true;
4950 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4951 return changed;
4954 /* Valueziation callback that ends up not following SSA edges. */
4956 tree
4957 no_follow_ssa_edges (tree)
4959 return NULL_TREE;
4962 /* Valueization callback that ends up following single-use SSA edges only. */
4964 tree
4965 follow_single_use_edges (tree val)
4967 if (TREE_CODE (val) == SSA_NAME
4968 && !has_single_use (val))
4969 return NULL_TREE;
4970 return val;
4973 /* Fold the statement pointed to by GSI. In some cases, this function may
4974 replace the whole statement with a new one. Returns true iff folding
4975 makes any changes.
4976 The statement pointed to by GSI should be in valid gimple form but may
4977 be in unfolded state as resulting from for example constant propagation
4978 which can produce *&x = 0. */
4980 bool
4981 fold_stmt (gimple_stmt_iterator *gsi)
4983 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4986 bool
4987 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4989 return fold_stmt_1 (gsi, false, valueize);
4992 /* Perform the minimal folding on statement *GSI. Only operations like
4993 *&x created by constant propagation are handled. The statement cannot
4994 be replaced with a new one. Return true if the statement was
4995 changed, false otherwise.
4996 The statement *GSI should be in valid gimple form but may
4997 be in unfolded state as resulting from for example constant propagation
4998 which can produce *&x = 0. */
5000 bool
5001 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5003 gimple *stmt = gsi_stmt (*gsi);
5004 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5005 gcc_assert (gsi_stmt (*gsi) == stmt);
5006 return changed;
5009 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5010 if EXPR is null or we don't know how.
5011 If non-null, the result always has boolean type. */
5013 static tree
5014 canonicalize_bool (tree expr, bool invert)
5016 if (!expr)
5017 return NULL_TREE;
5018 else if (invert)
5020 if (integer_nonzerop (expr))
5021 return boolean_false_node;
5022 else if (integer_zerop (expr))
5023 return boolean_true_node;
5024 else if (TREE_CODE (expr) == SSA_NAME)
5025 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5026 build_int_cst (TREE_TYPE (expr), 0));
5027 else if (COMPARISON_CLASS_P (expr))
5028 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5029 boolean_type_node,
5030 TREE_OPERAND (expr, 0),
5031 TREE_OPERAND (expr, 1));
5032 else
5033 return NULL_TREE;
5035 else
5037 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5038 return expr;
5039 if (integer_nonzerop (expr))
5040 return boolean_true_node;
5041 else if (integer_zerop (expr))
5042 return boolean_false_node;
5043 else if (TREE_CODE (expr) == SSA_NAME)
5044 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5045 build_int_cst (TREE_TYPE (expr), 0));
5046 else if (COMPARISON_CLASS_P (expr))
5047 return fold_build2 (TREE_CODE (expr),
5048 boolean_type_node,
5049 TREE_OPERAND (expr, 0),
5050 TREE_OPERAND (expr, 1));
5051 else
5052 return NULL_TREE;
5056 /* Check to see if a boolean expression EXPR is logically equivalent to the
5057 comparison (OP1 CODE OP2). Check for various identities involving
5058 SSA_NAMEs. */
5060 static bool
5061 same_bool_comparison_p (const_tree expr, enum tree_code code,
5062 const_tree op1, const_tree op2)
5064 gimple *s;
5066 /* The obvious case. */
5067 if (TREE_CODE (expr) == code
5068 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5069 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5070 return true;
5072 /* Check for comparing (name, name != 0) and the case where expr
5073 is an SSA_NAME with a definition matching the comparison. */
5074 if (TREE_CODE (expr) == SSA_NAME
5075 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5077 if (operand_equal_p (expr, op1, 0))
5078 return ((code == NE_EXPR && integer_zerop (op2))
5079 || (code == EQ_EXPR && integer_nonzerop (op2)));
5080 s = SSA_NAME_DEF_STMT (expr);
5081 if (is_gimple_assign (s)
5082 && gimple_assign_rhs_code (s) == code
5083 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5084 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5085 return true;
5088 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5089 of name is a comparison, recurse. */
5090 if (TREE_CODE (op1) == SSA_NAME
5091 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5093 s = SSA_NAME_DEF_STMT (op1);
5094 if (is_gimple_assign (s)
5095 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5097 enum tree_code c = gimple_assign_rhs_code (s);
5098 if ((c == NE_EXPR && integer_zerop (op2))
5099 || (c == EQ_EXPR && integer_nonzerop (op2)))
5100 return same_bool_comparison_p (expr, c,
5101 gimple_assign_rhs1 (s),
5102 gimple_assign_rhs2 (s));
5103 if ((c == EQ_EXPR && integer_zerop (op2))
5104 || (c == NE_EXPR && integer_nonzerop (op2)))
5105 return same_bool_comparison_p (expr,
5106 invert_tree_comparison (c, false),
5107 gimple_assign_rhs1 (s),
5108 gimple_assign_rhs2 (s));
5111 return false;
5114 /* Check to see if two boolean expressions OP1 and OP2 are logically
5115 equivalent. */
5117 static bool
5118 same_bool_result_p (const_tree op1, const_tree op2)
5120 /* Simple cases first. */
5121 if (operand_equal_p (op1, op2, 0))
5122 return true;
5124 /* Check the cases where at least one of the operands is a comparison.
5125 These are a bit smarter than operand_equal_p in that they apply some
5126 identifies on SSA_NAMEs. */
5127 if (COMPARISON_CLASS_P (op2)
5128 && same_bool_comparison_p (op1, TREE_CODE (op2),
5129 TREE_OPERAND (op2, 0),
5130 TREE_OPERAND (op2, 1)))
5131 return true;
5132 if (COMPARISON_CLASS_P (op1)
5133 && same_bool_comparison_p (op2, TREE_CODE (op1),
5134 TREE_OPERAND (op1, 0),
5135 TREE_OPERAND (op1, 1)))
5136 return true;
5138 /* Default case. */
5139 return false;
5142 /* Forward declarations for some mutually recursive functions. */
5144 static tree
5145 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5146 enum tree_code code2, tree op2a, tree op2b);
5147 static tree
5148 and_var_with_comparison (tree var, bool invert,
5149 enum tree_code code2, tree op2a, tree op2b);
5150 static tree
5151 and_var_with_comparison_1 (gimple *stmt,
5152 enum tree_code code2, tree op2a, tree op2b);
5153 static tree
5154 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5155 enum tree_code code2, tree op2a, tree op2b);
5156 static tree
5157 or_var_with_comparison (tree var, bool invert,
5158 enum tree_code code2, tree op2a, tree op2b);
5159 static tree
5160 or_var_with_comparison_1 (gimple *stmt,
5161 enum tree_code code2, tree op2a, tree op2b);
5163 /* Helper function for and_comparisons_1: try to simplify the AND of the
5164 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5165 If INVERT is true, invert the value of the VAR before doing the AND.
5166 Return NULL_EXPR if we can't simplify this to a single expression. */
5168 static tree
5169 and_var_with_comparison (tree var, bool invert,
5170 enum tree_code code2, tree op2a, tree op2b)
5172 tree t;
5173 gimple *stmt = SSA_NAME_DEF_STMT (var);
5175 /* We can only deal with variables whose definitions are assignments. */
5176 if (!is_gimple_assign (stmt))
5177 return NULL_TREE;
5179 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5180 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5181 Then we only have to consider the simpler non-inverted cases. */
5182 if (invert)
5183 t = or_var_with_comparison_1 (stmt,
5184 invert_tree_comparison (code2, false),
5185 op2a, op2b);
5186 else
5187 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5188 return canonicalize_bool (t, invert);
5191 /* Try to simplify the AND of the ssa variable defined by the assignment
5192 STMT with the comparison specified by (OP2A CODE2 OP2B).
5193 Return NULL_EXPR if we can't simplify this to a single expression. */
5195 static tree
5196 and_var_with_comparison_1 (gimple *stmt,
5197 enum tree_code code2, tree op2a, tree op2b)
5199 tree var = gimple_assign_lhs (stmt);
5200 tree true_test_var = NULL_TREE;
5201 tree false_test_var = NULL_TREE;
5202 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5204 /* Check for identities like (var AND (var == 0)) => false. */
5205 if (TREE_CODE (op2a) == SSA_NAME
5206 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5208 if ((code2 == NE_EXPR && integer_zerop (op2b))
5209 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5211 true_test_var = op2a;
5212 if (var == true_test_var)
5213 return var;
5215 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5216 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5218 false_test_var = op2a;
5219 if (var == false_test_var)
5220 return boolean_false_node;
5224 /* If the definition is a comparison, recurse on it. */
5225 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5227 tree t = and_comparisons_1 (innercode,
5228 gimple_assign_rhs1 (stmt),
5229 gimple_assign_rhs2 (stmt),
5230 code2,
5231 op2a,
5232 op2b);
5233 if (t)
5234 return t;
5237 /* If the definition is an AND or OR expression, we may be able to
5238 simplify by reassociating. */
5239 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5240 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5242 tree inner1 = gimple_assign_rhs1 (stmt);
5243 tree inner2 = gimple_assign_rhs2 (stmt);
5244 gimple *s;
5245 tree t;
5246 tree partial = NULL_TREE;
5247 bool is_and = (innercode == BIT_AND_EXPR);
5249 /* Check for boolean identities that don't require recursive examination
5250 of inner1/inner2:
5251 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5252 inner1 AND (inner1 OR inner2) => inner1
5253 !inner1 AND (inner1 AND inner2) => false
5254 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5255 Likewise for similar cases involving inner2. */
5256 if (inner1 == true_test_var)
5257 return (is_and ? var : inner1);
5258 else if (inner2 == true_test_var)
5259 return (is_and ? var : inner2);
5260 else if (inner1 == false_test_var)
5261 return (is_and
5262 ? boolean_false_node
5263 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5264 else if (inner2 == false_test_var)
5265 return (is_and
5266 ? boolean_false_node
5267 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5269 /* Next, redistribute/reassociate the AND across the inner tests.
5270 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5271 if (TREE_CODE (inner1) == SSA_NAME
5272 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5273 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5274 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5275 gimple_assign_rhs1 (s),
5276 gimple_assign_rhs2 (s),
5277 code2, op2a, op2b)))
5279 /* Handle the AND case, where we are reassociating:
5280 (inner1 AND inner2) AND (op2a code2 op2b)
5281 => (t AND inner2)
5282 If the partial result t is a constant, we win. Otherwise
5283 continue on to try reassociating with the other inner test. */
5284 if (is_and)
5286 if (integer_onep (t))
5287 return inner2;
5288 else if (integer_zerop (t))
5289 return boolean_false_node;
5292 /* Handle the OR case, where we are redistributing:
5293 (inner1 OR inner2) AND (op2a code2 op2b)
5294 => (t OR (inner2 AND (op2a code2 op2b))) */
5295 else if (integer_onep (t))
5296 return boolean_true_node;
5298 /* Save partial result for later. */
5299 partial = t;
5302 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5303 if (TREE_CODE (inner2) == SSA_NAME
5304 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5305 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5306 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5307 gimple_assign_rhs1 (s),
5308 gimple_assign_rhs2 (s),
5309 code2, op2a, op2b)))
5311 /* Handle the AND case, where we are reassociating:
5312 (inner1 AND inner2) AND (op2a code2 op2b)
5313 => (inner1 AND t) */
5314 if (is_and)
5316 if (integer_onep (t))
5317 return inner1;
5318 else if (integer_zerop (t))
5319 return boolean_false_node;
5320 /* If both are the same, we can apply the identity
5321 (x AND x) == x. */
5322 else if (partial && same_bool_result_p (t, partial))
5323 return t;
5326 /* Handle the OR case. where we are redistributing:
5327 (inner1 OR inner2) AND (op2a code2 op2b)
5328 => (t OR (inner1 AND (op2a code2 op2b)))
5329 => (t OR partial) */
5330 else
5332 if (integer_onep (t))
5333 return boolean_true_node;
5334 else if (partial)
5336 /* We already got a simplification for the other
5337 operand to the redistributed OR expression. The
5338 interesting case is when at least one is false.
5339 Or, if both are the same, we can apply the identity
5340 (x OR x) == x. */
5341 if (integer_zerop (partial))
5342 return t;
5343 else if (integer_zerop (t))
5344 return partial;
5345 else if (same_bool_result_p (t, partial))
5346 return t;
5351 return NULL_TREE;
5354 /* Try to simplify the AND of two comparisons defined by
5355 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5356 If this can be done without constructing an intermediate value,
5357 return the resulting tree; otherwise NULL_TREE is returned.
5358 This function is deliberately asymmetric as it recurses on SSA_DEFs
5359 in the first comparison but not the second. */
5361 static tree
5362 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5363 enum tree_code code2, tree op2a, tree op2b)
5365 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5367 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5368 if (operand_equal_p (op1a, op2a, 0)
5369 && operand_equal_p (op1b, op2b, 0))
5371 /* Result will be either NULL_TREE, or a combined comparison. */
5372 tree t = combine_comparisons (UNKNOWN_LOCATION,
5373 TRUTH_ANDIF_EXPR, code1, code2,
5374 truth_type, op1a, op1b);
5375 if (t)
5376 return t;
5379 /* Likewise the swapped case of the above. */
5380 if (operand_equal_p (op1a, op2b, 0)
5381 && operand_equal_p (op1b, op2a, 0))
5383 /* Result will be either NULL_TREE, or a combined comparison. */
5384 tree t = combine_comparisons (UNKNOWN_LOCATION,
5385 TRUTH_ANDIF_EXPR, code1,
5386 swap_tree_comparison (code2),
5387 truth_type, op1a, op1b);
5388 if (t)
5389 return t;
5392 /* If both comparisons are of the same value against constants, we might
5393 be able to merge them. */
5394 if (operand_equal_p (op1a, op2a, 0)
5395 && TREE_CODE (op1b) == INTEGER_CST
5396 && TREE_CODE (op2b) == INTEGER_CST)
5398 int cmp = tree_int_cst_compare (op1b, op2b);
5400 /* If we have (op1a == op1b), we should either be able to
5401 return that or FALSE, depending on whether the constant op1b
5402 also satisfies the other comparison against op2b. */
5403 if (code1 == EQ_EXPR)
5405 bool done = true;
5406 bool val;
5407 switch (code2)
5409 case EQ_EXPR: val = (cmp == 0); break;
5410 case NE_EXPR: val = (cmp != 0); break;
5411 case LT_EXPR: val = (cmp < 0); break;
5412 case GT_EXPR: val = (cmp > 0); break;
5413 case LE_EXPR: val = (cmp <= 0); break;
5414 case GE_EXPR: val = (cmp >= 0); break;
5415 default: done = false;
5417 if (done)
5419 if (val)
5420 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5421 else
5422 return boolean_false_node;
5425 /* Likewise if the second comparison is an == comparison. */
5426 else if (code2 == EQ_EXPR)
5428 bool done = true;
5429 bool val;
5430 switch (code1)
5432 case EQ_EXPR: val = (cmp == 0); break;
5433 case NE_EXPR: val = (cmp != 0); break;
5434 case LT_EXPR: val = (cmp > 0); break;
5435 case GT_EXPR: val = (cmp < 0); break;
5436 case LE_EXPR: val = (cmp >= 0); break;
5437 case GE_EXPR: val = (cmp <= 0); break;
5438 default: done = false;
5440 if (done)
5442 if (val)
5443 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5444 else
5445 return boolean_false_node;
5449 /* Same business with inequality tests. */
5450 else if (code1 == NE_EXPR)
5452 bool val;
5453 switch (code2)
5455 case EQ_EXPR: val = (cmp != 0); break;
5456 case NE_EXPR: val = (cmp == 0); break;
5457 case LT_EXPR: val = (cmp >= 0); break;
5458 case GT_EXPR: val = (cmp <= 0); break;
5459 case LE_EXPR: val = (cmp > 0); break;
5460 case GE_EXPR: val = (cmp < 0); break;
5461 default:
5462 val = false;
5464 if (val)
5465 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5467 else if (code2 == NE_EXPR)
5469 bool val;
5470 switch (code1)
5472 case EQ_EXPR: val = (cmp == 0); break;
5473 case NE_EXPR: val = (cmp != 0); break;
5474 case LT_EXPR: val = (cmp <= 0); break;
5475 case GT_EXPR: val = (cmp >= 0); break;
5476 case LE_EXPR: val = (cmp < 0); break;
5477 case GE_EXPR: val = (cmp > 0); break;
5478 default:
5479 val = false;
5481 if (val)
5482 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5485 /* Chose the more restrictive of two < or <= comparisons. */
5486 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5487 && (code2 == LT_EXPR || code2 == LE_EXPR))
5489 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5490 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5491 else
5492 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5495 /* Likewise chose the more restrictive of two > or >= comparisons. */
5496 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5497 && (code2 == GT_EXPR || code2 == GE_EXPR))
5499 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5500 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5501 else
5502 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5505 /* Check for singleton ranges. */
5506 else if (cmp == 0
5507 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5508 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5509 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5511 /* Check for disjoint ranges. */
5512 else if (cmp <= 0
5513 && (code1 == LT_EXPR || code1 == LE_EXPR)
5514 && (code2 == GT_EXPR || code2 == GE_EXPR))
5515 return boolean_false_node;
5516 else if (cmp >= 0
5517 && (code1 == GT_EXPR || code1 == GE_EXPR)
5518 && (code2 == LT_EXPR || code2 == LE_EXPR))
5519 return boolean_false_node;
5522 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5523 NAME's definition is a truth value. See if there are any simplifications
5524 that can be done against the NAME's definition. */
5525 if (TREE_CODE (op1a) == SSA_NAME
5526 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5527 && (integer_zerop (op1b) || integer_onep (op1b)))
5529 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5530 || (code1 == NE_EXPR && integer_onep (op1b)));
5531 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5532 switch (gimple_code (stmt))
5534 case GIMPLE_ASSIGN:
5535 /* Try to simplify by copy-propagating the definition. */
5536 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5538 case GIMPLE_PHI:
5539 /* If every argument to the PHI produces the same result when
5540 ANDed with the second comparison, we win.
5541 Do not do this unless the type is bool since we need a bool
5542 result here anyway. */
5543 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5545 tree result = NULL_TREE;
5546 unsigned i;
5547 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5549 tree arg = gimple_phi_arg_def (stmt, i);
5551 /* If this PHI has itself as an argument, ignore it.
5552 If all the other args produce the same result,
5553 we're still OK. */
5554 if (arg == gimple_phi_result (stmt))
5555 continue;
5556 else if (TREE_CODE (arg) == INTEGER_CST)
5558 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5560 if (!result)
5561 result = boolean_false_node;
5562 else if (!integer_zerop (result))
5563 return NULL_TREE;
5565 else if (!result)
5566 result = fold_build2 (code2, boolean_type_node,
5567 op2a, op2b);
5568 else if (!same_bool_comparison_p (result,
5569 code2, op2a, op2b))
5570 return NULL_TREE;
5572 else if (TREE_CODE (arg) == SSA_NAME
5573 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5575 tree temp;
5576 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5577 /* In simple cases we can look through PHI nodes,
5578 but we have to be careful with loops.
5579 See PR49073. */
5580 if (! dom_info_available_p (CDI_DOMINATORS)
5581 || gimple_bb (def_stmt) == gimple_bb (stmt)
5582 || dominated_by_p (CDI_DOMINATORS,
5583 gimple_bb (def_stmt),
5584 gimple_bb (stmt)))
5585 return NULL_TREE;
5586 temp = and_var_with_comparison (arg, invert, code2,
5587 op2a, op2b);
5588 if (!temp)
5589 return NULL_TREE;
5590 else if (!result)
5591 result = temp;
5592 else if (!same_bool_result_p (result, temp))
5593 return NULL_TREE;
5595 else
5596 return NULL_TREE;
5598 return result;
5601 default:
5602 break;
5605 return NULL_TREE;
5608 /* Try to simplify the AND of two comparisons, specified by
5609 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5610 If this can be simplified to a single expression (without requiring
5611 introducing more SSA variables to hold intermediate values),
5612 return the resulting tree. Otherwise return NULL_TREE.
5613 If the result expression is non-null, it has boolean type. */
5615 tree
5616 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5617 enum tree_code code2, tree op2a, tree op2b)
5619 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5620 if (t)
5621 return t;
5622 else
5623 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5626 /* Helper function for or_comparisons_1: try to simplify the OR of the
5627 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5628 If INVERT is true, invert the value of VAR before doing the OR.
5629 Return NULL_EXPR if we can't simplify this to a single expression. */
5631 static tree
5632 or_var_with_comparison (tree var, bool invert,
5633 enum tree_code code2, tree op2a, tree op2b)
5635 tree t;
5636 gimple *stmt = SSA_NAME_DEF_STMT (var);
5638 /* We can only deal with variables whose definitions are assignments. */
5639 if (!is_gimple_assign (stmt))
5640 return NULL_TREE;
5642 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5643 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5644 Then we only have to consider the simpler non-inverted cases. */
5645 if (invert)
5646 t = and_var_with_comparison_1 (stmt,
5647 invert_tree_comparison (code2, false),
5648 op2a, op2b);
5649 else
5650 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5651 return canonicalize_bool (t, invert);
5654 /* Try to simplify the OR of the ssa variable defined by the assignment
5655 STMT with the comparison specified by (OP2A CODE2 OP2B).
5656 Return NULL_EXPR if we can't simplify this to a single expression. */
5658 static tree
5659 or_var_with_comparison_1 (gimple *stmt,
5660 enum tree_code code2, tree op2a, tree op2b)
5662 tree var = gimple_assign_lhs (stmt);
5663 tree true_test_var = NULL_TREE;
5664 tree false_test_var = NULL_TREE;
5665 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5667 /* Check for identities like (var OR (var != 0)) => true . */
5668 if (TREE_CODE (op2a) == SSA_NAME
5669 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5671 if ((code2 == NE_EXPR && integer_zerop (op2b))
5672 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5674 true_test_var = op2a;
5675 if (var == true_test_var)
5676 return var;
5678 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5679 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5681 false_test_var = op2a;
5682 if (var == false_test_var)
5683 return boolean_true_node;
5687 /* If the definition is a comparison, recurse on it. */
5688 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5690 tree t = or_comparisons_1 (innercode,
5691 gimple_assign_rhs1 (stmt),
5692 gimple_assign_rhs2 (stmt),
5693 code2,
5694 op2a,
5695 op2b);
5696 if (t)
5697 return t;
5700 /* If the definition is an AND or OR expression, we may be able to
5701 simplify by reassociating. */
5702 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5703 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5705 tree inner1 = gimple_assign_rhs1 (stmt);
5706 tree inner2 = gimple_assign_rhs2 (stmt);
5707 gimple *s;
5708 tree t;
5709 tree partial = NULL_TREE;
5710 bool is_or = (innercode == BIT_IOR_EXPR);
5712 /* Check for boolean identities that don't require recursive examination
5713 of inner1/inner2:
5714 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5715 inner1 OR (inner1 AND inner2) => inner1
5716 !inner1 OR (inner1 OR inner2) => true
5717 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5719 if (inner1 == true_test_var)
5720 return (is_or ? var : inner1);
5721 else if (inner2 == true_test_var)
5722 return (is_or ? var : inner2);
5723 else if (inner1 == false_test_var)
5724 return (is_or
5725 ? boolean_true_node
5726 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5727 else if (inner2 == false_test_var)
5728 return (is_or
5729 ? boolean_true_node
5730 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5732 /* Next, redistribute/reassociate the OR across the inner tests.
5733 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5734 if (TREE_CODE (inner1) == SSA_NAME
5735 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5736 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5737 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5738 gimple_assign_rhs1 (s),
5739 gimple_assign_rhs2 (s),
5740 code2, op2a, op2b)))
5742 /* Handle the OR case, where we are reassociating:
5743 (inner1 OR inner2) OR (op2a code2 op2b)
5744 => (t OR inner2)
5745 If the partial result t is a constant, we win. Otherwise
5746 continue on to try reassociating with the other inner test. */
5747 if (is_or)
5749 if (integer_onep (t))
5750 return boolean_true_node;
5751 else if (integer_zerop (t))
5752 return inner2;
5755 /* Handle the AND case, where we are redistributing:
5756 (inner1 AND inner2) OR (op2a code2 op2b)
5757 => (t AND (inner2 OR (op2a code op2b))) */
5758 else if (integer_zerop (t))
5759 return boolean_false_node;
5761 /* Save partial result for later. */
5762 partial = t;
5765 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5766 if (TREE_CODE (inner2) == SSA_NAME
5767 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5768 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5769 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5770 gimple_assign_rhs1 (s),
5771 gimple_assign_rhs2 (s),
5772 code2, op2a, op2b)))
5774 /* Handle the OR case, where we are reassociating:
5775 (inner1 OR inner2) OR (op2a code2 op2b)
5776 => (inner1 OR t)
5777 => (t OR partial) */
5778 if (is_or)
5780 if (integer_zerop (t))
5781 return inner1;
5782 else if (integer_onep (t))
5783 return boolean_true_node;
5784 /* If both are the same, we can apply the identity
5785 (x OR x) == x. */
5786 else if (partial && same_bool_result_p (t, partial))
5787 return t;
5790 /* Handle the AND case, where we are redistributing:
5791 (inner1 AND inner2) OR (op2a code2 op2b)
5792 => (t AND (inner1 OR (op2a code2 op2b)))
5793 => (t AND partial) */
5794 else
5796 if (integer_zerop (t))
5797 return boolean_false_node;
5798 else if (partial)
5800 /* We already got a simplification for the other
5801 operand to the redistributed AND expression. The
5802 interesting case is when at least one is true.
5803 Or, if both are the same, we can apply the identity
5804 (x AND x) == x. */
5805 if (integer_onep (partial))
5806 return t;
5807 else if (integer_onep (t))
5808 return partial;
5809 else if (same_bool_result_p (t, partial))
5810 return t;
5815 return NULL_TREE;
5818 /* Try to simplify the OR of two comparisons defined by
5819 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5820 If this can be done without constructing an intermediate value,
5821 return the resulting tree; otherwise NULL_TREE is returned.
5822 This function is deliberately asymmetric as it recurses on SSA_DEFs
5823 in the first comparison but not the second. */
5825 static tree
5826 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5827 enum tree_code code2, tree op2a, tree op2b)
5829 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5831 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5832 if (operand_equal_p (op1a, op2a, 0)
5833 && operand_equal_p (op1b, op2b, 0))
5835 /* Result will be either NULL_TREE, or a combined comparison. */
5836 tree t = combine_comparisons (UNKNOWN_LOCATION,
5837 TRUTH_ORIF_EXPR, code1, code2,
5838 truth_type, op1a, op1b);
5839 if (t)
5840 return t;
5843 /* Likewise the swapped case of the above. */
5844 if (operand_equal_p (op1a, op2b, 0)
5845 && operand_equal_p (op1b, op2a, 0))
5847 /* Result will be either NULL_TREE, or a combined comparison. */
5848 tree t = combine_comparisons (UNKNOWN_LOCATION,
5849 TRUTH_ORIF_EXPR, code1,
5850 swap_tree_comparison (code2),
5851 truth_type, op1a, op1b);
5852 if (t)
5853 return t;
5856 /* If both comparisons are of the same value against constants, we might
5857 be able to merge them. */
5858 if (operand_equal_p (op1a, op2a, 0)
5859 && TREE_CODE (op1b) == INTEGER_CST
5860 && TREE_CODE (op2b) == INTEGER_CST)
5862 int cmp = tree_int_cst_compare (op1b, op2b);
5864 /* If we have (op1a != op1b), we should either be able to
5865 return that or TRUE, depending on whether the constant op1b
5866 also satisfies the other comparison against op2b. */
5867 if (code1 == NE_EXPR)
5869 bool done = true;
5870 bool val;
5871 switch (code2)
5873 case EQ_EXPR: val = (cmp == 0); break;
5874 case NE_EXPR: val = (cmp != 0); break;
5875 case LT_EXPR: val = (cmp < 0); break;
5876 case GT_EXPR: val = (cmp > 0); break;
5877 case LE_EXPR: val = (cmp <= 0); break;
5878 case GE_EXPR: val = (cmp >= 0); break;
5879 default: done = false;
5881 if (done)
5883 if (val)
5884 return boolean_true_node;
5885 else
5886 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5889 /* Likewise if the second comparison is a != comparison. */
5890 else if (code2 == NE_EXPR)
5892 bool done = true;
5893 bool val;
5894 switch (code1)
5896 case EQ_EXPR: val = (cmp == 0); break;
5897 case NE_EXPR: val = (cmp != 0); break;
5898 case LT_EXPR: val = (cmp > 0); break;
5899 case GT_EXPR: val = (cmp < 0); break;
5900 case LE_EXPR: val = (cmp >= 0); break;
5901 case GE_EXPR: val = (cmp <= 0); break;
5902 default: done = false;
5904 if (done)
5906 if (val)
5907 return boolean_true_node;
5908 else
5909 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5913 /* See if an equality test is redundant with the other comparison. */
5914 else if (code1 == EQ_EXPR)
5916 bool val;
5917 switch (code2)
5919 case EQ_EXPR: val = (cmp == 0); break;
5920 case NE_EXPR: val = (cmp != 0); break;
5921 case LT_EXPR: val = (cmp < 0); break;
5922 case GT_EXPR: val = (cmp > 0); break;
5923 case LE_EXPR: val = (cmp <= 0); break;
5924 case GE_EXPR: val = (cmp >= 0); break;
5925 default:
5926 val = false;
5928 if (val)
5929 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5931 else if (code2 == EQ_EXPR)
5933 bool val;
5934 switch (code1)
5936 case EQ_EXPR: val = (cmp == 0); break;
5937 case NE_EXPR: val = (cmp != 0); break;
5938 case LT_EXPR: val = (cmp > 0); break;
5939 case GT_EXPR: val = (cmp < 0); break;
5940 case LE_EXPR: val = (cmp >= 0); break;
5941 case GE_EXPR: val = (cmp <= 0); break;
5942 default:
5943 val = false;
5945 if (val)
5946 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5949 /* Chose the less restrictive of two < or <= comparisons. */
5950 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5951 && (code2 == LT_EXPR || code2 == LE_EXPR))
5953 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5954 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5955 else
5956 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5959 /* Likewise chose the less restrictive of two > or >= comparisons. */
5960 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5961 && (code2 == GT_EXPR || code2 == GE_EXPR))
5963 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5964 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5965 else
5966 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5969 /* Check for singleton ranges. */
5970 else if (cmp == 0
5971 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5972 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5973 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5975 /* Check for less/greater pairs that don't restrict the range at all. */
5976 else if (cmp >= 0
5977 && (code1 == LT_EXPR || code1 == LE_EXPR)
5978 && (code2 == GT_EXPR || code2 == GE_EXPR))
5979 return boolean_true_node;
5980 else if (cmp <= 0
5981 && (code1 == GT_EXPR || code1 == GE_EXPR)
5982 && (code2 == LT_EXPR || code2 == LE_EXPR))
5983 return boolean_true_node;
5986 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5987 NAME's definition is a truth value. See if there are any simplifications
5988 that can be done against the NAME's definition. */
5989 if (TREE_CODE (op1a) == SSA_NAME
5990 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5991 && (integer_zerop (op1b) || integer_onep (op1b)))
5993 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5994 || (code1 == NE_EXPR && integer_onep (op1b)));
5995 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5996 switch (gimple_code (stmt))
5998 case GIMPLE_ASSIGN:
5999 /* Try to simplify by copy-propagating the definition. */
6000 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6002 case GIMPLE_PHI:
6003 /* If every argument to the PHI produces the same result when
6004 ORed with the second comparison, we win.
6005 Do not do this unless the type is bool since we need a bool
6006 result here anyway. */
6007 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6009 tree result = NULL_TREE;
6010 unsigned i;
6011 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6013 tree arg = gimple_phi_arg_def (stmt, i);
6015 /* If this PHI has itself as an argument, ignore it.
6016 If all the other args produce the same result,
6017 we're still OK. */
6018 if (arg == gimple_phi_result (stmt))
6019 continue;
6020 else if (TREE_CODE (arg) == INTEGER_CST)
6022 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6024 if (!result)
6025 result = boolean_true_node;
6026 else if (!integer_onep (result))
6027 return NULL_TREE;
6029 else if (!result)
6030 result = fold_build2 (code2, boolean_type_node,
6031 op2a, op2b);
6032 else if (!same_bool_comparison_p (result,
6033 code2, op2a, op2b))
6034 return NULL_TREE;
6036 else if (TREE_CODE (arg) == SSA_NAME
6037 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6039 tree temp;
6040 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6041 /* In simple cases we can look through PHI nodes,
6042 but we have to be careful with loops.
6043 See PR49073. */
6044 if (! dom_info_available_p (CDI_DOMINATORS)
6045 || gimple_bb (def_stmt) == gimple_bb (stmt)
6046 || dominated_by_p (CDI_DOMINATORS,
6047 gimple_bb (def_stmt),
6048 gimple_bb (stmt)))
6049 return NULL_TREE;
6050 temp = or_var_with_comparison (arg, invert, code2,
6051 op2a, op2b);
6052 if (!temp)
6053 return NULL_TREE;
6054 else if (!result)
6055 result = temp;
6056 else if (!same_bool_result_p (result, temp))
6057 return NULL_TREE;
6059 else
6060 return NULL_TREE;
6062 return result;
6065 default:
6066 break;
6069 return NULL_TREE;
6072 /* Try to simplify the OR of two comparisons, specified by
6073 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6074 If this can be simplified to a single expression (without requiring
6075 introducing more SSA variables to hold intermediate values),
6076 return the resulting tree. Otherwise return NULL_TREE.
6077 If the result expression is non-null, it has boolean type. */
6079 tree
6080 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6081 enum tree_code code2, tree op2a, tree op2b)
6083 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6084 if (t)
6085 return t;
6086 else
6087 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6091 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6093 Either NULL_TREE, a simplified but non-constant or a constant
6094 is returned.
6096 ??? This should go into a gimple-fold-inline.h file to be eventually
6097 privatized with the single valueize function used in the various TUs
6098 to avoid the indirect function call overhead. */
6100 tree
6101 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6102 tree (*gvalueize) (tree))
6104 code_helper rcode;
6105 tree ops[3] = {};
6106 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6107 edges if there are intermediate VARYING defs. For this reason
6108 do not follow SSA edges here even though SCCVN can technically
6109 just deal fine with that. */
6110 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6112 tree res = NULL_TREE;
6113 if (gimple_simplified_result_is_gimple_val (rcode, ops))
6114 res = ops[0];
6115 else if (mprts_hook)
6116 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6117 if (res)
6119 if (dump_file && dump_flags & TDF_DETAILS)
6121 fprintf (dump_file, "Match-and-simplified ");
6122 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6123 fprintf (dump_file, " to ");
6124 print_generic_expr (dump_file, res);
6125 fprintf (dump_file, "\n");
6127 return res;
6131 location_t loc = gimple_location (stmt);
6132 switch (gimple_code (stmt))
6134 case GIMPLE_ASSIGN:
6136 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6138 switch (get_gimple_rhs_class (subcode))
6140 case GIMPLE_SINGLE_RHS:
6142 tree rhs = gimple_assign_rhs1 (stmt);
6143 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6145 if (TREE_CODE (rhs) == SSA_NAME)
6147 /* If the RHS is an SSA_NAME, return its known constant value,
6148 if any. */
6149 return (*valueize) (rhs);
6151 /* Handle propagating invariant addresses into address
6152 operations. */
6153 else if (TREE_CODE (rhs) == ADDR_EXPR
6154 && !is_gimple_min_invariant (rhs))
6156 poly_int64 offset = 0;
6157 tree base;
6158 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6159 &offset,
6160 valueize);
6161 if (base
6162 && (CONSTANT_CLASS_P (base)
6163 || decl_address_invariant_p (base)))
6164 return build_invariant_address (TREE_TYPE (rhs),
6165 base, offset);
6167 else if (TREE_CODE (rhs) == CONSTRUCTOR
6168 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6169 && known_eq (CONSTRUCTOR_NELTS (rhs),
6170 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6172 unsigned i, nelts;
6173 tree val;
6175 nelts = CONSTRUCTOR_NELTS (rhs);
6176 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6177 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6179 val = (*valueize) (val);
6180 if (TREE_CODE (val) == INTEGER_CST
6181 || TREE_CODE (val) == REAL_CST
6182 || TREE_CODE (val) == FIXED_CST)
6183 vec.quick_push (val);
6184 else
6185 return NULL_TREE;
6188 return vec.build ();
6190 if (subcode == OBJ_TYPE_REF)
6192 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6193 /* If callee is constant, we can fold away the wrapper. */
6194 if (is_gimple_min_invariant (val))
6195 return val;
6198 if (kind == tcc_reference)
6200 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6201 || TREE_CODE (rhs) == REALPART_EXPR
6202 || TREE_CODE (rhs) == IMAGPART_EXPR)
6203 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6205 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6206 return fold_unary_loc (EXPR_LOCATION (rhs),
6207 TREE_CODE (rhs),
6208 TREE_TYPE (rhs), val);
6210 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6211 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6213 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6214 return fold_ternary_loc (EXPR_LOCATION (rhs),
6215 TREE_CODE (rhs),
6216 TREE_TYPE (rhs), val,
6217 TREE_OPERAND (rhs, 1),
6218 TREE_OPERAND (rhs, 2));
6220 else if (TREE_CODE (rhs) == MEM_REF
6221 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6223 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6224 if (TREE_CODE (val) == ADDR_EXPR
6225 && is_gimple_min_invariant (val))
6227 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6228 unshare_expr (val),
6229 TREE_OPERAND (rhs, 1));
6230 if (tem)
6231 rhs = tem;
6234 return fold_const_aggregate_ref_1 (rhs, valueize);
6236 else if (kind == tcc_declaration)
6237 return get_symbol_constant_value (rhs);
6238 return rhs;
6241 case GIMPLE_UNARY_RHS:
6242 return NULL_TREE;
6244 case GIMPLE_BINARY_RHS:
6245 /* Translate &x + CST into an invariant form suitable for
6246 further propagation. */
6247 if (subcode == POINTER_PLUS_EXPR)
6249 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6250 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6251 if (TREE_CODE (op0) == ADDR_EXPR
6252 && TREE_CODE (op1) == INTEGER_CST)
6254 tree off = fold_convert (ptr_type_node, op1);
6255 return build_fold_addr_expr_loc
6256 (loc,
6257 fold_build2 (MEM_REF,
6258 TREE_TYPE (TREE_TYPE (op0)),
6259 unshare_expr (op0), off));
6262 /* Canonicalize bool != 0 and bool == 0 appearing after
6263 valueization. While gimple_simplify handles this
6264 it can get confused by the ~X == 1 -> X == 0 transform
6265 which we cant reduce to a SSA name or a constant
6266 (and we have no way to tell gimple_simplify to not
6267 consider those transforms in the first place). */
6268 else if (subcode == EQ_EXPR
6269 || subcode == NE_EXPR)
6271 tree lhs = gimple_assign_lhs (stmt);
6272 tree op0 = gimple_assign_rhs1 (stmt);
6273 if (useless_type_conversion_p (TREE_TYPE (lhs),
6274 TREE_TYPE (op0)))
6276 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6277 op0 = (*valueize) (op0);
6278 if (TREE_CODE (op0) == INTEGER_CST)
6279 std::swap (op0, op1);
6280 if (TREE_CODE (op1) == INTEGER_CST
6281 && ((subcode == NE_EXPR && integer_zerop (op1))
6282 || (subcode == EQ_EXPR && integer_onep (op1))))
6283 return op0;
6286 return NULL_TREE;
6288 case GIMPLE_TERNARY_RHS:
6290 /* Handle ternary operators that can appear in GIMPLE form. */
6291 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6292 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6293 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6294 return fold_ternary_loc (loc, subcode,
6295 gimple_expr_type (stmt), op0, op1, op2);
6298 default:
6299 gcc_unreachable ();
6303 case GIMPLE_CALL:
6305 tree fn;
6306 gcall *call_stmt = as_a <gcall *> (stmt);
6308 if (gimple_call_internal_p (stmt))
6310 enum tree_code subcode = ERROR_MARK;
6311 switch (gimple_call_internal_fn (stmt))
6313 case IFN_UBSAN_CHECK_ADD:
6314 subcode = PLUS_EXPR;
6315 break;
6316 case IFN_UBSAN_CHECK_SUB:
6317 subcode = MINUS_EXPR;
6318 break;
6319 case IFN_UBSAN_CHECK_MUL:
6320 subcode = MULT_EXPR;
6321 break;
6322 case IFN_BUILTIN_EXPECT:
6324 tree arg0 = gimple_call_arg (stmt, 0);
6325 tree op0 = (*valueize) (arg0);
6326 if (TREE_CODE (op0) == INTEGER_CST)
6327 return op0;
6328 return NULL_TREE;
6330 default:
6331 return NULL_TREE;
6333 tree arg0 = gimple_call_arg (stmt, 0);
6334 tree arg1 = gimple_call_arg (stmt, 1);
6335 tree op0 = (*valueize) (arg0);
6336 tree op1 = (*valueize) (arg1);
6338 if (TREE_CODE (op0) != INTEGER_CST
6339 || TREE_CODE (op1) != INTEGER_CST)
6341 switch (subcode)
6343 case MULT_EXPR:
6344 /* x * 0 = 0 * x = 0 without overflow. */
6345 if (integer_zerop (op0) || integer_zerop (op1))
6346 return build_zero_cst (TREE_TYPE (arg0));
6347 break;
6348 case MINUS_EXPR:
6349 /* y - y = 0 without overflow. */
6350 if (operand_equal_p (op0, op1, 0))
6351 return build_zero_cst (TREE_TYPE (arg0));
6352 break;
6353 default:
6354 break;
6357 tree res
6358 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6359 if (res
6360 && TREE_CODE (res) == INTEGER_CST
6361 && !TREE_OVERFLOW (res))
6362 return res;
6363 return NULL_TREE;
6366 fn = (*valueize) (gimple_call_fn (stmt));
6367 if (TREE_CODE (fn) == ADDR_EXPR
6368 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6369 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6370 && gimple_builtin_call_types_compatible_p (stmt,
6371 TREE_OPERAND (fn, 0)))
6373 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6374 tree retval;
6375 unsigned i;
6376 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6377 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6378 retval = fold_builtin_call_array (loc,
6379 gimple_call_return_type (call_stmt),
6380 fn, gimple_call_num_args (stmt), args);
6381 if (retval)
6383 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6384 STRIP_NOPS (retval);
6385 retval = fold_convert (gimple_call_return_type (call_stmt),
6386 retval);
6388 return retval;
6390 return NULL_TREE;
6393 default:
6394 return NULL_TREE;
6398 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6399 Returns NULL_TREE if folding to a constant is not possible, otherwise
6400 returns a constant according to is_gimple_min_invariant. */
6402 tree
6403 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6405 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6406 if (res && is_gimple_min_invariant (res))
6407 return res;
6408 return NULL_TREE;
6412 /* The following set of functions are supposed to fold references using
6413 their constant initializers. */
6415 /* See if we can find constructor defining value of BASE.
6416 When we know the consructor with constant offset (such as
6417 base is array[40] and we do know constructor of array), then
6418 BIT_OFFSET is adjusted accordingly.
6420 As a special case, return error_mark_node when constructor
6421 is not explicitly available, but it is known to be zero
6422 such as 'static const int a;'. */
6423 static tree
6424 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6425 tree (*valueize)(tree))
6427 poly_int64 bit_offset2, size, max_size;
6428 bool reverse;
6430 if (TREE_CODE (base) == MEM_REF)
6432 if (!integer_zerop (TREE_OPERAND (base, 1)))
6434 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6435 return NULL_TREE;
6436 *bit_offset += (mem_ref_offset (base).force_shwi ()
6437 * BITS_PER_UNIT);
6440 if (valueize
6441 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6442 base = valueize (TREE_OPERAND (base, 0));
6443 if (!base || TREE_CODE (base) != ADDR_EXPR)
6444 return NULL_TREE;
6445 base = TREE_OPERAND (base, 0);
6447 else if (valueize
6448 && TREE_CODE (base) == SSA_NAME)
6449 base = valueize (base);
6451 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6452 DECL_INITIAL. If BASE is a nested reference into another
6453 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6454 the inner reference. */
6455 switch (TREE_CODE (base))
6457 case VAR_DECL:
6458 case CONST_DECL:
6460 tree init = ctor_for_folding (base);
6462 /* Our semantic is exact opposite of ctor_for_folding;
6463 NULL means unknown, while error_mark_node is 0. */
6464 if (init == error_mark_node)
6465 return NULL_TREE;
6466 if (!init)
6467 return error_mark_node;
6468 return init;
6471 case VIEW_CONVERT_EXPR:
6472 return get_base_constructor (TREE_OPERAND (base, 0),
6473 bit_offset, valueize);
6475 case ARRAY_REF:
6476 case COMPONENT_REF:
6477 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6478 &reverse);
6479 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6480 return NULL_TREE;
6481 *bit_offset += bit_offset2;
6482 return get_base_constructor (base, bit_offset, valueize);
6484 case CONSTRUCTOR:
6485 return base;
6487 default:
6488 if (CONSTANT_CLASS_P (base))
6489 return base;
6491 return NULL_TREE;
6495 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6496 SIZE to the memory at bit OFFSET. */
6498 static tree
6499 fold_array_ctor_reference (tree type, tree ctor,
6500 unsigned HOST_WIDE_INT offset,
6501 unsigned HOST_WIDE_INT size,
6502 tree from_decl)
6504 offset_int low_bound;
6505 offset_int elt_size;
6506 offset_int access_index;
6507 tree domain_type = NULL_TREE;
6508 HOST_WIDE_INT inner_offset;
6510 /* Compute low bound and elt size. */
6511 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6512 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6513 if (domain_type && TYPE_MIN_VALUE (domain_type))
6515 /* Static constructors for variably sized objects makes no sense. */
6516 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6517 return NULL_TREE;
6518 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6520 else
6521 low_bound = 0;
6522 /* Static constructors for variably sized objects makes no sense. */
6523 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6524 return NULL_TREE;
6525 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6527 /* We can handle only constantly sized accesses that are known to not
6528 be larger than size of array element. */
6529 if (!TYPE_SIZE_UNIT (type)
6530 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6531 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6532 || elt_size == 0)
6533 return NULL_TREE;
6535 /* Compute the array index we look for. */
6536 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6537 elt_size);
6538 access_index += low_bound;
6540 /* And offset within the access. */
6541 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6543 /* See if the array field is large enough to span whole access. We do not
6544 care to fold accesses spanning multiple array indexes. */
6545 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6546 return NULL_TREE;
6547 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6548 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6550 /* When memory is not explicitely mentioned in constructor,
6551 it is 0 (or out of range). */
6552 return build_zero_cst (type);
6555 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6556 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6558 static tree
6559 fold_nonarray_ctor_reference (tree type, tree ctor,
6560 unsigned HOST_WIDE_INT offset,
6561 unsigned HOST_WIDE_INT size,
6562 tree from_decl)
6564 unsigned HOST_WIDE_INT cnt;
6565 tree cfield, cval;
6567 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6568 cval)
6570 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6571 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6572 tree field_size = DECL_SIZE (cfield);
6573 offset_int bitoffset;
6574 offset_int bitoffset_end, access_end;
6576 /* Variable sized objects in static constructors makes no sense,
6577 but field_size can be NULL for flexible array members. */
6578 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6579 && TREE_CODE (byte_offset) == INTEGER_CST
6580 && (field_size != NULL_TREE
6581 ? TREE_CODE (field_size) == INTEGER_CST
6582 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6584 /* Compute bit offset of the field. */
6585 bitoffset = (wi::to_offset (field_offset)
6586 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6587 /* Compute bit offset where the field ends. */
6588 if (field_size != NULL_TREE)
6589 bitoffset_end = bitoffset + wi::to_offset (field_size);
6590 else
6591 bitoffset_end = 0;
6593 access_end = offset_int (offset) + size;
6595 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6596 [BITOFFSET, BITOFFSET_END)? */
6597 if (wi::cmps (access_end, bitoffset) > 0
6598 && (field_size == NULL_TREE
6599 || wi::lts_p (offset, bitoffset_end)))
6601 offset_int inner_offset = offset_int (offset) - bitoffset;
6602 /* We do have overlap. Now see if field is large enough to
6603 cover the access. Give up for accesses spanning multiple
6604 fields. */
6605 if (wi::cmps (access_end, bitoffset_end) > 0)
6606 return NULL_TREE;
6607 if (offset < bitoffset)
6608 return NULL_TREE;
6609 return fold_ctor_reference (type, cval,
6610 inner_offset.to_uhwi (), size,
6611 from_decl);
6614 /* When memory is not explicitely mentioned in constructor, it is 0. */
6615 return build_zero_cst (type);
6618 /* CTOR is value initializing memory, fold reference of type TYPE and
6619 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6621 tree
6622 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6623 poly_uint64 poly_size, tree from_decl)
6625 tree ret;
6627 /* We found the field with exact match. */
6628 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6629 && known_eq (poly_offset, 0U))
6630 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6632 /* The remaining optimizations need a constant size and offset. */
6633 unsigned HOST_WIDE_INT size, offset;
6634 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6635 return NULL_TREE;
6637 /* We are at the end of walk, see if we can view convert the
6638 result. */
6639 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6640 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6641 && !compare_tree_int (TYPE_SIZE (type), size)
6642 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6644 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6645 if (ret)
6647 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6648 if (ret)
6649 STRIP_USELESS_TYPE_CONVERSION (ret);
6651 return ret;
6653 /* For constants and byte-aligned/sized reads try to go through
6654 native_encode/interpret. */
6655 if (CONSTANT_CLASS_P (ctor)
6656 && BITS_PER_UNIT == 8
6657 && offset % BITS_PER_UNIT == 0
6658 && size % BITS_PER_UNIT == 0
6659 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6661 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6662 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6663 offset / BITS_PER_UNIT);
6664 if (len > 0)
6665 return native_interpret_expr (type, buf, len);
6667 if (TREE_CODE (ctor) == CONSTRUCTOR)
6670 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6671 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6672 return fold_array_ctor_reference (type, ctor, offset, size,
6673 from_decl);
6674 else
6675 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6676 from_decl);
6679 return NULL_TREE;
6682 /* Return the tree representing the element referenced by T if T is an
6683 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6684 names using VALUEIZE. Return NULL_TREE otherwise. */
6686 tree
6687 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6689 tree ctor, idx, base;
6690 poly_int64 offset, size, max_size;
6691 tree tem;
6692 bool reverse;
6694 if (TREE_THIS_VOLATILE (t))
6695 return NULL_TREE;
6697 if (DECL_P (t))
6698 return get_symbol_constant_value (t);
6700 tem = fold_read_from_constant_string (t);
6701 if (tem)
6702 return tem;
6704 switch (TREE_CODE (t))
6706 case ARRAY_REF:
6707 case ARRAY_RANGE_REF:
6708 /* Constant indexes are handled well by get_base_constructor.
6709 Only special case variable offsets.
6710 FIXME: This code can't handle nested references with variable indexes
6711 (they will be handled only by iteration of ccp). Perhaps we can bring
6712 get_ref_base_and_extent here and make it use a valueize callback. */
6713 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6714 && valueize
6715 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6716 && poly_int_tree_p (idx))
6718 tree low_bound, unit_size;
6720 /* If the resulting bit-offset is constant, track it. */
6721 if ((low_bound = array_ref_low_bound (t),
6722 poly_int_tree_p (low_bound))
6723 && (unit_size = array_ref_element_size (t),
6724 tree_fits_uhwi_p (unit_size)))
6726 poly_offset_int woffset
6727 = wi::sext (wi::to_poly_offset (idx)
6728 - wi::to_poly_offset (low_bound),
6729 TYPE_PRECISION (TREE_TYPE (idx)));
6731 if (woffset.to_shwi (&offset))
6733 /* TODO: This code seems wrong, multiply then check
6734 to see if it fits. */
6735 offset *= tree_to_uhwi (unit_size);
6736 offset *= BITS_PER_UNIT;
6738 base = TREE_OPERAND (t, 0);
6739 ctor = get_base_constructor (base, &offset, valueize);
6740 /* Empty constructor. Always fold to 0. */
6741 if (ctor == error_mark_node)
6742 return build_zero_cst (TREE_TYPE (t));
6743 /* Out of bound array access. Value is undefined,
6744 but don't fold. */
6745 if (maybe_lt (offset, 0))
6746 return NULL_TREE;
6747 /* We can not determine ctor. */
6748 if (!ctor)
6749 return NULL_TREE;
6750 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6751 tree_to_uhwi (unit_size)
6752 * BITS_PER_UNIT,
6753 base);
6757 /* Fallthru. */
6759 case COMPONENT_REF:
6760 case BIT_FIELD_REF:
6761 case TARGET_MEM_REF:
6762 case MEM_REF:
6763 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6764 ctor = get_base_constructor (base, &offset, valueize);
6766 /* Empty constructor. Always fold to 0. */
6767 if (ctor == error_mark_node)
6768 return build_zero_cst (TREE_TYPE (t));
6769 /* We do not know precise address. */
6770 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6771 return NULL_TREE;
6772 /* We can not determine ctor. */
6773 if (!ctor)
6774 return NULL_TREE;
6776 /* Out of bound array access. Value is undefined, but don't fold. */
6777 if (maybe_lt (offset, 0))
6778 return NULL_TREE;
6780 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6781 base);
6783 case REALPART_EXPR:
6784 case IMAGPART_EXPR:
6786 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6787 if (c && TREE_CODE (c) == COMPLEX_CST)
6788 return fold_build1_loc (EXPR_LOCATION (t),
6789 TREE_CODE (t), TREE_TYPE (t), c);
6790 break;
6793 default:
6794 break;
6797 return NULL_TREE;
6800 tree
6801 fold_const_aggregate_ref (tree t)
6803 return fold_const_aggregate_ref_1 (t, NULL);
6806 /* Lookup virtual method with index TOKEN in a virtual table V
6807 at OFFSET.
6808 Set CAN_REFER if non-NULL to false if method
6809 is not referable or if the virtual table is ill-formed (such as rewriten
6810 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6812 tree
6813 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6814 tree v,
6815 unsigned HOST_WIDE_INT offset,
6816 bool *can_refer)
6818 tree vtable = v, init, fn;
6819 unsigned HOST_WIDE_INT size;
6820 unsigned HOST_WIDE_INT elt_size, access_index;
6821 tree domain_type;
6823 if (can_refer)
6824 *can_refer = true;
6826 /* First of all double check we have virtual table. */
6827 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6829 /* Pass down that we lost track of the target. */
6830 if (can_refer)
6831 *can_refer = false;
6832 return NULL_TREE;
6835 init = ctor_for_folding (v);
6837 /* The virtual tables should always be born with constructors
6838 and we always should assume that they are avaialble for
6839 folding. At the moment we do not stream them in all cases,
6840 but it should never happen that ctor seem unreachable. */
6841 gcc_assert (init);
6842 if (init == error_mark_node)
6844 /* Pass down that we lost track of the target. */
6845 if (can_refer)
6846 *can_refer = false;
6847 return NULL_TREE;
6849 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6850 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6851 offset *= BITS_PER_UNIT;
6852 offset += token * size;
6854 /* Lookup the value in the constructor that is assumed to be array.
6855 This is equivalent to
6856 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6857 offset, size, NULL);
6858 but in a constant time. We expect that frontend produced a simple
6859 array without indexed initializers. */
6861 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6862 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6863 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6864 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6866 access_index = offset / BITS_PER_UNIT / elt_size;
6867 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6869 /* This code makes an assumption that there are no
6870 indexed fileds produced by C++ FE, so we can directly index the array. */
6871 if (access_index < CONSTRUCTOR_NELTS (init))
6873 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6874 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6875 STRIP_NOPS (fn);
6877 else
6878 fn = NULL;
6880 /* For type inconsistent program we may end up looking up virtual method
6881 in virtual table that does not contain TOKEN entries. We may overrun
6882 the virtual table and pick up a constant or RTTI info pointer.
6883 In any case the call is undefined. */
6884 if (!fn
6885 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6886 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6887 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6888 else
6890 fn = TREE_OPERAND (fn, 0);
6892 /* When cgraph node is missing and function is not public, we cannot
6893 devirtualize. This can happen in WHOPR when the actual method
6894 ends up in other partition, because we found devirtualization
6895 possibility too late. */
6896 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6898 if (can_refer)
6900 *can_refer = false;
6901 return fn;
6903 return NULL_TREE;
6907 /* Make sure we create a cgraph node for functions we'll reference.
6908 They can be non-existent if the reference comes from an entry
6909 of an external vtable for example. */
6910 cgraph_node::get_create (fn);
6912 return fn;
6915 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6916 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6917 KNOWN_BINFO carries the binfo describing the true type of
6918 OBJ_TYPE_REF_OBJECT(REF).
6919 Set CAN_REFER if non-NULL to false if method
6920 is not referable or if the virtual table is ill-formed (such as rewriten
6921 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6923 tree
6924 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6925 bool *can_refer)
6927 unsigned HOST_WIDE_INT offset;
6928 tree v;
6930 v = BINFO_VTABLE (known_binfo);
6931 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6932 if (!v)
6933 return NULL_TREE;
6935 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6937 if (can_refer)
6938 *can_refer = false;
6939 return NULL_TREE;
6941 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6944 /* Given a pointer value T, return a simplified version of an
6945 indirection through T, or NULL_TREE if no simplification is
6946 possible. Note that the resulting type may be different from
6947 the type pointed to in the sense that it is still compatible
6948 from the langhooks point of view. */
6950 tree
6951 gimple_fold_indirect_ref (tree t)
6953 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6954 tree sub = t;
6955 tree subtype;
6957 STRIP_NOPS (sub);
6958 subtype = TREE_TYPE (sub);
6959 if (!POINTER_TYPE_P (subtype)
6960 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6961 return NULL_TREE;
6963 if (TREE_CODE (sub) == ADDR_EXPR)
6965 tree op = TREE_OPERAND (sub, 0);
6966 tree optype = TREE_TYPE (op);
6967 /* *&p => p */
6968 if (useless_type_conversion_p (type, optype))
6969 return op;
6971 /* *(foo *)&fooarray => fooarray[0] */
6972 if (TREE_CODE (optype) == ARRAY_TYPE
6973 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6974 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6976 tree type_domain = TYPE_DOMAIN (optype);
6977 tree min_val = size_zero_node;
6978 if (type_domain && TYPE_MIN_VALUE (type_domain))
6979 min_val = TYPE_MIN_VALUE (type_domain);
6980 if (TREE_CODE (min_val) == INTEGER_CST)
6981 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6983 /* *(foo *)&complexfoo => __real__ complexfoo */
6984 else if (TREE_CODE (optype) == COMPLEX_TYPE
6985 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6986 return fold_build1 (REALPART_EXPR, type, op);
6987 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6988 else if (TREE_CODE (optype) == VECTOR_TYPE
6989 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6991 tree part_width = TYPE_SIZE (type);
6992 tree index = bitsize_int (0);
6993 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6997 /* *(p + CST) -> ... */
6998 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6999 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7001 tree addr = TREE_OPERAND (sub, 0);
7002 tree off = TREE_OPERAND (sub, 1);
7003 tree addrtype;
7005 STRIP_NOPS (addr);
7006 addrtype = TREE_TYPE (addr);
7008 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7009 if (TREE_CODE (addr) == ADDR_EXPR
7010 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7011 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7012 && tree_fits_uhwi_p (off))
7014 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7015 tree part_width = TYPE_SIZE (type);
7016 unsigned HOST_WIDE_INT part_widthi
7017 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7018 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7019 tree index = bitsize_int (indexi);
7020 if (known_lt (offset / part_widthi,
7021 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7022 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7023 part_width, index);
7026 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7027 if (TREE_CODE (addr) == ADDR_EXPR
7028 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7029 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7031 tree size = TYPE_SIZE_UNIT (type);
7032 if (tree_int_cst_equal (size, off))
7033 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7036 /* *(p + CST) -> MEM_REF <p, CST>. */
7037 if (TREE_CODE (addr) != ADDR_EXPR
7038 || DECL_P (TREE_OPERAND (addr, 0)))
7039 return fold_build2 (MEM_REF, type,
7040 addr,
7041 wide_int_to_tree (ptype, wi::to_wide (off)));
7044 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7045 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7046 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7047 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7049 tree type_domain;
7050 tree min_val = size_zero_node;
7051 tree osub = sub;
7052 sub = gimple_fold_indirect_ref (sub);
7053 if (! sub)
7054 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7055 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7056 if (type_domain && TYPE_MIN_VALUE (type_domain))
7057 min_val = TYPE_MIN_VALUE (type_domain);
7058 if (TREE_CODE (min_val) == INTEGER_CST)
7059 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7062 return NULL_TREE;
7065 /* Return true if CODE is an operation that when operating on signed
7066 integer types involves undefined behavior on overflow and the
7067 operation can be expressed with unsigned arithmetic. */
7069 bool
7070 arith_code_with_undefined_signed_overflow (tree_code code)
7072 switch (code)
7074 case PLUS_EXPR:
7075 case MINUS_EXPR:
7076 case MULT_EXPR:
7077 case NEGATE_EXPR:
7078 case POINTER_PLUS_EXPR:
7079 return true;
7080 default:
7081 return false;
7085 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7086 operation that can be transformed to unsigned arithmetic by converting
7087 its operand, carrying out the operation in the corresponding unsigned
7088 type and converting the result back to the original type.
7090 Returns a sequence of statements that replace STMT and also contain
7091 a modified form of STMT itself. */
7093 gimple_seq
7094 rewrite_to_defined_overflow (gimple *stmt)
7096 if (dump_file && (dump_flags & TDF_DETAILS))
7098 fprintf (dump_file, "rewriting stmt with undefined signed "
7099 "overflow ");
7100 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7103 tree lhs = gimple_assign_lhs (stmt);
7104 tree type = unsigned_type_for (TREE_TYPE (lhs));
7105 gimple_seq stmts = NULL;
7106 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7108 tree op = gimple_op (stmt, i);
7109 op = gimple_convert (&stmts, type, op);
7110 gimple_set_op (stmt, i, op);
7112 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7113 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7114 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7115 gimple_seq_add_stmt (&stmts, stmt);
7116 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7117 gimple_seq_add_stmt (&stmts, cvt);
7119 return stmts;
7123 /* The valueization hook we use for the gimple_build API simplification.
7124 This makes us match fold_buildN behavior by only combining with
7125 statements in the sequence(s) we are currently building. */
7127 static tree
7128 gimple_build_valueize (tree op)
7130 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7131 return op;
7132 return NULL_TREE;
7135 /* Build the expression CODE OP0 of type TYPE with location LOC,
7136 simplifying it first if possible. Returns the built
7137 expression value and appends statements possibly defining it
7138 to SEQ. */
7140 tree
7141 gimple_build (gimple_seq *seq, location_t loc,
7142 enum tree_code code, tree type, tree op0)
7144 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7145 if (!res)
7147 res = create_tmp_reg_or_ssa_name (type);
7148 gimple *stmt;
7149 if (code == REALPART_EXPR
7150 || code == IMAGPART_EXPR
7151 || code == VIEW_CONVERT_EXPR)
7152 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7153 else
7154 stmt = gimple_build_assign (res, code, op0);
7155 gimple_set_location (stmt, loc);
7156 gimple_seq_add_stmt_without_update (seq, stmt);
7158 return res;
7161 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7162 simplifying it first if possible. Returns the built
7163 expression value and appends statements possibly defining it
7164 to SEQ. */
7166 tree
7167 gimple_build (gimple_seq *seq, location_t loc,
7168 enum tree_code code, tree type, tree op0, tree op1)
7170 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7171 if (!res)
7173 res = create_tmp_reg_or_ssa_name (type);
7174 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7175 gimple_set_location (stmt, loc);
7176 gimple_seq_add_stmt_without_update (seq, stmt);
7178 return res;
7181 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7182 simplifying it first if possible. Returns the built
7183 expression value and appends statements possibly defining it
7184 to SEQ. */
7186 tree
7187 gimple_build (gimple_seq *seq, location_t loc,
7188 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7190 tree res = gimple_simplify (code, type, op0, op1, op2,
7191 seq, gimple_build_valueize);
7192 if (!res)
7194 res = create_tmp_reg_or_ssa_name (type);
7195 gimple *stmt;
7196 if (code == BIT_FIELD_REF)
7197 stmt = gimple_build_assign (res, code,
7198 build3 (code, type, op0, op1, op2));
7199 else
7200 stmt = gimple_build_assign (res, code, op0, op1, op2);
7201 gimple_set_location (stmt, loc);
7202 gimple_seq_add_stmt_without_update (seq, stmt);
7204 return res;
7207 /* Build the call FN (ARG0) with a result of type TYPE
7208 (or no result if TYPE is void) with location LOC,
7209 simplifying it first if possible. Returns the built
7210 expression value (or NULL_TREE if TYPE is void) and appends
7211 statements possibly defining it to SEQ. */
7213 tree
7214 gimple_build (gimple_seq *seq, location_t loc,
7215 enum built_in_function fn, tree type, tree arg0)
7217 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7218 if (!res)
7220 tree decl = builtin_decl_implicit (fn);
7221 gimple *stmt = gimple_build_call (decl, 1, arg0);
7222 if (!VOID_TYPE_P (type))
7224 res = create_tmp_reg_or_ssa_name (type);
7225 gimple_call_set_lhs (stmt, res);
7227 gimple_set_location (stmt, loc);
7228 gimple_seq_add_stmt_without_update (seq, stmt);
7230 return res;
7233 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7234 (or no result if TYPE is void) with location LOC,
7235 simplifying it first if possible. Returns the built
7236 expression value (or NULL_TREE if TYPE is void) and appends
7237 statements possibly defining it to SEQ. */
7239 tree
7240 gimple_build (gimple_seq *seq, location_t loc,
7241 enum built_in_function fn, tree type, tree arg0, tree arg1)
7243 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7244 if (!res)
7246 tree decl = builtin_decl_implicit (fn);
7247 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7248 if (!VOID_TYPE_P (type))
7250 res = create_tmp_reg_or_ssa_name (type);
7251 gimple_call_set_lhs (stmt, res);
7253 gimple_set_location (stmt, loc);
7254 gimple_seq_add_stmt_without_update (seq, stmt);
7256 return res;
7259 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7260 (or no result if TYPE is void) with location LOC,
7261 simplifying it first if possible. Returns the built
7262 expression value (or NULL_TREE if TYPE is void) and appends
7263 statements possibly defining it to SEQ. */
7265 tree
7266 gimple_build (gimple_seq *seq, location_t loc,
7267 enum built_in_function fn, tree type,
7268 tree arg0, tree arg1, tree arg2)
7270 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7271 seq, gimple_build_valueize);
7272 if (!res)
7274 tree decl = builtin_decl_implicit (fn);
7275 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7276 if (!VOID_TYPE_P (type))
7278 res = create_tmp_reg_or_ssa_name (type);
7279 gimple_call_set_lhs (stmt, res);
7281 gimple_set_location (stmt, loc);
7282 gimple_seq_add_stmt_without_update (seq, stmt);
7284 return res;
7287 /* Build the conversion (TYPE) OP with a result of type TYPE
7288 with location LOC if such conversion is neccesary in GIMPLE,
7289 simplifying it first.
7290 Returns the built expression value and appends
7291 statements possibly defining it to SEQ. */
7293 tree
7294 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7296 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7297 return op;
7298 return gimple_build (seq, loc, NOP_EXPR, type, op);
7301 /* Build the conversion (ptrofftype) OP with a result of a type
7302 compatible with ptrofftype with location LOC if such conversion
7303 is neccesary in GIMPLE, simplifying it first.
7304 Returns the built expression value and appends
7305 statements possibly defining it to SEQ. */
7307 tree
7308 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7310 if (ptrofftype_p (TREE_TYPE (op)))
7311 return op;
7312 return gimple_convert (seq, loc, sizetype, op);
7315 /* Build a vector of type TYPE in which each element has the value OP.
7316 Return a gimple value for the result, appending any new statements
7317 to SEQ. */
7319 tree
7320 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7321 tree op)
7323 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7324 && !CONSTANT_CLASS_P (op))
7325 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7327 tree res, vec = build_vector_from_val (type, op);
7328 if (is_gimple_val (vec))
7329 return vec;
7330 if (gimple_in_ssa_p (cfun))
7331 res = make_ssa_name (type);
7332 else
7333 res = create_tmp_reg (type);
7334 gimple *stmt = gimple_build_assign (res, vec);
7335 gimple_set_location (stmt, loc);
7336 gimple_seq_add_stmt_without_update (seq, stmt);
7337 return res;
7340 /* Build a vector from BUILDER, handling the case in which some elements
7341 are non-constant. Return a gimple value for the result, appending any
7342 new instructions to SEQ.
7344 BUILDER must not have a stepped encoding on entry. This is because
7345 the function is not geared up to handle the arithmetic that would
7346 be needed in the variable case, and any code building a vector that
7347 is known to be constant should use BUILDER->build () directly. */
7349 tree
7350 gimple_build_vector (gimple_seq *seq, location_t loc,
7351 tree_vector_builder *builder)
7353 gcc_assert (builder->nelts_per_pattern () <= 2);
7354 unsigned int encoded_nelts = builder->encoded_nelts ();
7355 for (unsigned int i = 0; i < encoded_nelts; ++i)
7356 if (!TREE_CONSTANT ((*builder)[i]))
7358 tree type = builder->type ();
7359 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7360 vec<constructor_elt, va_gc> *v;
7361 vec_alloc (v, nelts);
7362 for (i = 0; i < nelts; ++i)
7363 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7365 tree res;
7366 if (gimple_in_ssa_p (cfun))
7367 res = make_ssa_name (type);
7368 else
7369 res = create_tmp_reg (type);
7370 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7371 gimple_set_location (stmt, loc);
7372 gimple_seq_add_stmt_without_update (seq, stmt);
7373 return res;
7375 return builder->build ();
7378 /* Return true if the result of assignment STMT is known to be non-negative.
7379 If the return value is based on the assumption that signed overflow is
7380 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7381 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7383 static bool
7384 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7385 int depth)
7387 enum tree_code code = gimple_assign_rhs_code (stmt);
7388 switch (get_gimple_rhs_class (code))
7390 case GIMPLE_UNARY_RHS:
7391 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7392 gimple_expr_type (stmt),
7393 gimple_assign_rhs1 (stmt),
7394 strict_overflow_p, depth);
7395 case GIMPLE_BINARY_RHS:
7396 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7397 gimple_expr_type (stmt),
7398 gimple_assign_rhs1 (stmt),
7399 gimple_assign_rhs2 (stmt),
7400 strict_overflow_p, depth);
7401 case GIMPLE_TERNARY_RHS:
7402 return false;
7403 case GIMPLE_SINGLE_RHS:
7404 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7405 strict_overflow_p, depth);
7406 case GIMPLE_INVALID_RHS:
7407 break;
7409 gcc_unreachable ();
7412 /* Return true if return value of call STMT is known to be non-negative.
7413 If the return value is based on the assumption that signed overflow is
7414 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7415 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7417 static bool
7418 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7419 int depth)
7421 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7422 gimple_call_arg (stmt, 0) : NULL_TREE;
7423 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7424 gimple_call_arg (stmt, 1) : NULL_TREE;
7426 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7427 gimple_call_combined_fn (stmt),
7428 arg0,
7429 arg1,
7430 strict_overflow_p, depth);
7433 /* Return true if return value of call STMT is known to be non-negative.
7434 If the return value is based on the assumption that signed overflow is
7435 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7436 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7438 static bool
7439 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7440 int depth)
7442 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7444 tree arg = gimple_phi_arg_def (stmt, i);
7445 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7446 return false;
7448 return true;
7451 /* Return true if STMT is known to compute a non-negative value.
7452 If the return value is based on the assumption that signed overflow is
7453 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7454 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7456 bool
7457 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7458 int depth)
7460 switch (gimple_code (stmt))
7462 case GIMPLE_ASSIGN:
7463 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7464 depth);
7465 case GIMPLE_CALL:
7466 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7467 depth);
7468 case GIMPLE_PHI:
7469 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7470 depth);
7471 default:
7472 return false;
7476 /* Return true if the floating-point value computed by assignment STMT
7477 is known to have an integer value. We also allow +Inf, -Inf and NaN
7478 to be considered integer values. Return false for signaling NaN.
7480 DEPTH is the current nesting depth of the query. */
7482 static bool
7483 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7485 enum tree_code code = gimple_assign_rhs_code (stmt);
7486 switch (get_gimple_rhs_class (code))
7488 case GIMPLE_UNARY_RHS:
7489 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7490 gimple_assign_rhs1 (stmt), depth);
7491 case GIMPLE_BINARY_RHS:
7492 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7493 gimple_assign_rhs1 (stmt),
7494 gimple_assign_rhs2 (stmt), depth);
7495 case GIMPLE_TERNARY_RHS:
7496 return false;
7497 case GIMPLE_SINGLE_RHS:
7498 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7499 case GIMPLE_INVALID_RHS:
7500 break;
7502 gcc_unreachable ();
7505 /* Return true if the floating-point value computed by call STMT is known
7506 to have an integer value. We also allow +Inf, -Inf and NaN to be
7507 considered integer values. Return false for signaling NaN.
7509 DEPTH is the current nesting depth of the query. */
7511 static bool
7512 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7514 tree arg0 = (gimple_call_num_args (stmt) > 0
7515 ? gimple_call_arg (stmt, 0)
7516 : NULL_TREE);
7517 tree arg1 = (gimple_call_num_args (stmt) > 1
7518 ? gimple_call_arg (stmt, 1)
7519 : NULL_TREE);
7520 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7521 arg0, arg1, depth);
7524 /* Return true if the floating-point result of phi STMT is known to have
7525 an integer value. We also allow +Inf, -Inf and NaN to be considered
7526 integer values. Return false for signaling NaN.
7528 DEPTH is the current nesting depth of the query. */
7530 static bool
7531 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7533 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7535 tree arg = gimple_phi_arg_def (stmt, i);
7536 if (!integer_valued_real_single_p (arg, depth + 1))
7537 return false;
7539 return true;
7542 /* Return true if the floating-point value computed by STMT is known
7543 to have an integer value. We also allow +Inf, -Inf and NaN to be
7544 considered integer values. Return false for signaling NaN.
7546 DEPTH is the current nesting depth of the query. */
7548 bool
7549 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7551 switch (gimple_code (stmt))
7553 case GIMPLE_ASSIGN:
7554 return gimple_assign_integer_valued_real_p (stmt, depth);
7555 case GIMPLE_CALL:
7556 return gimple_call_integer_valued_real_p (stmt, depth);
7557 case GIMPLE_PHI:
7558 return gimple_phi_integer_valued_real_p (stmt, depth);
7559 default:
7560 return false;