* gcc-interface/trans.c (process_freeze_entity): Be prepared for a
[official-gcc.git] / gcc / gimple-fold.c
blob24f7e76228b9000f6250e39365843982ba15b4c8
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-object-size.h"
44 #include "tree-ssa.h"
45 #include "tree-ssa-propagate.h"
46 #include "ipa-utils.h"
47 #include "tree-ssa-address.h"
48 #include "langhooks.h"
49 #include "gimplify-me.h"
50 #include "dbgcnt.h"
51 #include "builtins.h"
52 #include "tree-eh.h"
53 #include "gimple-match.h"
54 #include "gomp-constants.h"
55 #include "optabs-query.h"
56 #include "omp-general.h"
57 #include "ipa-chkp.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
68 /* Return true when DECL can be referenced from current unit.
69 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
70 We can get declarations that are not possible to reference for various
71 reasons:
73 1) When analyzing C++ virtual tables.
74 C++ virtual tables do have known constructors even
75 when they are keyed to other compilation unit.
76 Those tables can contain pointers to methods and vars
77 in other units. Those methods have both STATIC and EXTERNAL
78 set.
79 2) In WHOPR mode devirtualization might lead to reference
80 to method that was partitioned elsehwere.
81 In this case we have static VAR_DECL or FUNCTION_DECL
82 that has no corresponding callgraph/varpool node
83 declaring the body.
84 3) COMDAT functions referred by external vtables that
85 we devirtualize only during final compilation stage.
86 At this time we already decided that we will not output
87 the function body and thus we can't reference the symbol
88 directly. */
90 static bool
91 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
93 varpool_node *vnode;
94 struct cgraph_node *node;
95 symtab_node *snode;
97 if (DECL_ABSTRACT_P (decl))
98 return false;
100 /* We are concerned only about static/external vars and functions. */
101 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
102 || !VAR_OR_FUNCTION_DECL_P (decl))
103 return true;
105 /* Static objects can be referred only if they was not optimized out yet. */
106 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
108 /* Before we start optimizing unreachable code we can be sure all
109 static objects are defined. */
110 if (symtab->function_flags_ready)
111 return true;
112 snode = symtab_node::get (decl);
113 if (!snode || !snode->definition)
114 return false;
115 node = dyn_cast <cgraph_node *> (snode);
116 return !node || !node->global.inlined_to;
119 /* We will later output the initializer, so we can refer to it.
120 So we are concerned only when DECL comes from initializer of
121 external var or var that has been optimized out. */
122 if (!from_decl
123 || !VAR_P (from_decl)
124 || (!DECL_EXTERNAL (from_decl)
125 && (vnode = varpool_node::get (from_decl)) != NULL
126 && vnode->definition)
127 || (flag_ltrans
128 && (vnode = varpool_node::get (from_decl)) != NULL
129 && vnode->in_other_partition))
130 return true;
131 /* We are folding reference from external vtable. The vtable may reffer
132 to a symbol keyed to other compilation unit. The other compilation
133 unit may be in separate DSO and the symbol may be hidden. */
134 if (DECL_VISIBILITY_SPECIFIED (decl)
135 && DECL_EXTERNAL (decl)
136 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
137 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
138 return false;
139 /* When function is public, we always can introduce new reference.
140 Exception are the COMDAT functions where introducing a direct
141 reference imply need to include function body in the curren tunit. */
142 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
143 return true;
144 /* We have COMDAT. We are going to check if we still have definition
145 or if the definition is going to be output in other partition.
146 Bypass this when gimplifying; all needed functions will be produced.
148 As observed in PR20991 for already optimized out comdat virtual functions
149 it may be tempting to not necessarily give up because the copy will be
150 output elsewhere when corresponding vtable is output.
151 This is however not possible - ABI specify that COMDATs are output in
152 units where they are used and when the other unit was compiled with LTO
153 it is possible that vtable was kept public while the function itself
154 was privatized. */
155 if (!symtab->function_flags_ready)
156 return true;
158 snode = symtab_node::get (decl);
159 if (!snode
160 || ((!snode->definition || DECL_EXTERNAL (decl))
161 && (!snode->in_other_partition
162 || (!snode->forced_by_abi && !snode->force_output))))
163 return false;
164 node = dyn_cast <cgraph_node *> (snode);
165 return !node || !node->global.inlined_to;
168 /* Create a temporary for TYPE for a statement STMT. If the current function
169 is in SSA form, a SSA name is created. Otherwise a temporary register
170 is made. */
172 tree
173 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
175 if (gimple_in_ssa_p (cfun))
176 return make_ssa_name (type, stmt);
177 else
178 return create_tmp_reg (type);
181 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
182 acceptable form for is_gimple_min_invariant.
183 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
185 tree
186 canonicalize_constructor_val (tree cval, tree from_decl)
188 tree orig_cval = cval;
189 STRIP_NOPS (cval);
190 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
191 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
193 tree ptr = TREE_OPERAND (cval, 0);
194 if (is_gimple_min_invariant (ptr))
195 cval = build1_loc (EXPR_LOCATION (cval),
196 ADDR_EXPR, TREE_TYPE (ptr),
197 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
198 ptr,
199 fold_convert (ptr_type_node,
200 TREE_OPERAND (cval, 1))));
202 if (TREE_CODE (cval) == ADDR_EXPR)
204 tree base = NULL_TREE;
205 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
207 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
208 if (base)
209 TREE_OPERAND (cval, 0) = base;
211 else
212 base = get_base_address (TREE_OPERAND (cval, 0));
213 if (!base)
214 return NULL_TREE;
216 if (VAR_OR_FUNCTION_DECL_P (base)
217 && !can_refer_decl_in_current_unit_p (base, from_decl))
218 return NULL_TREE;
219 if (TREE_TYPE (base) == error_mark_node)
220 return NULL_TREE;
221 if (VAR_P (base))
222 TREE_ADDRESSABLE (base) = 1;
223 else if (TREE_CODE (base) == FUNCTION_DECL)
225 /* Make sure we create a cgraph node for functions we'll reference.
226 They can be non-existent if the reference comes from an entry
227 of an external vtable for example. */
228 cgraph_node::get_create (base);
230 /* Fixup types in global initializers. */
231 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
232 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
234 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
235 cval = fold_convert (TREE_TYPE (orig_cval), cval);
236 return cval;
238 if (TREE_OVERFLOW_P (cval))
239 return drop_tree_overflow (cval);
240 return orig_cval;
243 /* If SYM is a constant variable with known value, return the value.
244 NULL_TREE is returned otherwise. */
246 tree
247 get_symbol_constant_value (tree sym)
249 tree val = ctor_for_folding (sym);
250 if (val != error_mark_node)
252 if (val)
254 val = canonicalize_constructor_val (unshare_expr (val), sym);
255 if (val && is_gimple_min_invariant (val))
256 return val;
257 else
258 return NULL_TREE;
260 /* Variables declared 'const' without an initializer
261 have zero as the initializer if they may not be
262 overridden at link or run time. */
263 if (!val
264 && is_gimple_reg_type (TREE_TYPE (sym)))
265 return build_zero_cst (TREE_TYPE (sym));
268 return NULL_TREE;
273 /* Subroutine of fold_stmt. We perform several simplifications of the
274 memory reference tree EXPR and make sure to re-gimplify them properly
275 after propagation of constant addresses. IS_LHS is true if the
276 reference is supposed to be an lvalue. */
278 static tree
279 maybe_fold_reference (tree expr, bool is_lhs)
281 tree result;
283 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
284 || TREE_CODE (expr) == REALPART_EXPR
285 || TREE_CODE (expr) == IMAGPART_EXPR)
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
287 return fold_unary_loc (EXPR_LOCATION (expr),
288 TREE_CODE (expr),
289 TREE_TYPE (expr),
290 TREE_OPERAND (expr, 0));
291 else if (TREE_CODE (expr) == BIT_FIELD_REF
292 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
293 return fold_ternary_loc (EXPR_LOCATION (expr),
294 TREE_CODE (expr),
295 TREE_TYPE (expr),
296 TREE_OPERAND (expr, 0),
297 TREE_OPERAND (expr, 1),
298 TREE_OPERAND (expr, 2));
300 if (!is_lhs
301 && (result = fold_const_aggregate_ref (expr))
302 && is_gimple_min_invariant (result))
303 return result;
305 return NULL_TREE;
309 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
310 replacement rhs for the statement or NULL_TREE if no simplification
311 could be made. It is assumed that the operands have been previously
312 folded. */
314 static tree
315 fold_gimple_assign (gimple_stmt_iterator *si)
317 gimple *stmt = gsi_stmt (*si);
318 enum tree_code subcode = gimple_assign_rhs_code (stmt);
319 location_t loc = gimple_location (stmt);
321 tree result = NULL_TREE;
323 switch (get_gimple_rhs_class (subcode))
325 case GIMPLE_SINGLE_RHS:
327 tree rhs = gimple_assign_rhs1 (stmt);
329 if (TREE_CLOBBER_P (rhs))
330 return NULL_TREE;
332 if (REFERENCE_CLASS_P (rhs))
333 return maybe_fold_reference (rhs, false);
335 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
337 tree val = OBJ_TYPE_REF_EXPR (rhs);
338 if (is_gimple_min_invariant (val))
339 return val;
340 else if (flag_devirtualize && virtual_method_call_p (rhs))
342 bool final;
343 vec <cgraph_node *>targets
344 = possible_polymorphic_call_targets (rhs, stmt, &final);
345 if (final && targets.length () <= 1 && dbg_cnt (devirt))
347 if (dump_enabled_p ())
349 location_t loc = gimple_location_safe (stmt);
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return SSA_VAR_P (inner);
636 /* If the SIZE argument representing the size of an object is in a range
637 of values of which exactly one is valid (and that is zero), return
638 true, otherwise false. */
640 static bool
641 size_must_be_zero_p (tree size)
643 if (integer_zerop (size))
644 return true;
646 if (TREE_CODE (size) != SSA_NAME)
647 return false;
649 wide_int min, max;
650 enum value_range_type rtype = get_range_info (size, &min, &max);
651 if (rtype != VR_ANTI_RANGE)
652 return false;
654 tree type = TREE_TYPE (size);
655 int prec = TYPE_PRECISION (type);
657 wide_int wone = wi::one (prec);
659 /* Compute the value of SSIZE_MAX, the largest positive value that
660 can be stored in ssize_t, the signed counterpart of size_t. */
661 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
663 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
666 /* Fold function call to builtin mem{{,p}cpy,move}. Return
667 false if no simplification can be made.
668 If ENDP is 0, return DEST (like memcpy).
669 If ENDP is 1, return DEST+LEN (like mempcpy).
670 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
671 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
672 (memmove). */
674 static bool
675 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
676 tree dest, tree src, int endp)
678 gimple *stmt = gsi_stmt (*gsi);
679 tree lhs = gimple_call_lhs (stmt);
680 tree len = gimple_call_arg (stmt, 2);
681 tree destvar, srcvar;
682 location_t loc = gimple_location (stmt);
684 /* If the LEN parameter is a constant zero or in range where
685 the only valid value is zero, return DEST. */
686 if (size_must_be_zero_p (len))
688 gimple *repl;
689 if (gimple_call_lhs (stmt))
690 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
691 else
692 repl = gimple_build_nop ();
693 tree vdef = gimple_vdef (stmt);
694 if (vdef && TREE_CODE (vdef) == SSA_NAME)
696 unlink_stmt_vdef (stmt);
697 release_ssa_name (vdef);
699 gsi_replace (gsi, repl, false);
700 return true;
703 /* If SRC and DEST are the same (and not volatile), return
704 DEST{,+LEN,+LEN-1}. */
705 if (operand_equal_p (src, dest, 0))
707 unlink_stmt_vdef (stmt);
708 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
709 release_ssa_name (gimple_vdef (stmt));
710 if (!lhs)
712 gsi_replace (gsi, gimple_build_nop (), false);
713 return true;
715 goto done;
717 else
719 tree srctype, desttype;
720 unsigned int src_align, dest_align;
721 tree off0;
723 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
724 pointers as wide integer) and also may result in huge function
725 size because of inlined bounds copy. Thus don't inline for
726 functions we want to instrument. */
727 if (flag_check_pointer_bounds
728 && chkp_instrumentable_p (cfun->decl)
729 /* Even if data may contain pointers we can inline if copy
730 less than a pointer size. */
731 && (!tree_fits_uhwi_p (len)
732 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
733 return false;
735 /* Build accesses at offset zero with a ref-all character type. */
736 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
737 ptr_mode, true), 0);
739 /* If we can perform the copy efficiently with first doing all loads
740 and then all stores inline it that way. Currently efficiently
741 means that we can load all the memory into a single integer
742 register which is what MOVE_MAX gives us. */
743 src_align = get_pointer_alignment (src);
744 dest_align = get_pointer_alignment (dest);
745 if (tree_fits_uhwi_p (len)
746 && compare_tree_int (len, MOVE_MAX) <= 0
747 /* ??? Don't transform copies from strings with known length this
748 confuses the tree-ssa-strlen.c. This doesn't handle
749 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
750 reason. */
751 && !c_strlen (src, 2))
753 unsigned ilen = tree_to_uhwi (len);
754 if (pow2p_hwi (ilen))
756 scalar_int_mode mode;
757 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
758 if (type
759 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
760 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
761 /* If the destination pointer is not aligned we must be able
762 to emit an unaligned store. */
763 && (dest_align >= GET_MODE_ALIGNMENT (mode)
764 || !targetm.slow_unaligned_access (mode, dest_align)
765 || (optab_handler (movmisalign_optab, mode)
766 != CODE_FOR_nothing)))
768 tree srctype = type;
769 tree desttype = type;
770 if (src_align < GET_MODE_ALIGNMENT (mode))
771 srctype = build_aligned_type (type, src_align);
772 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
773 tree tem = fold_const_aggregate_ref (srcmem);
774 if (tem)
775 srcmem = tem;
776 else if (src_align < GET_MODE_ALIGNMENT (mode)
777 && targetm.slow_unaligned_access (mode, src_align)
778 && (optab_handler (movmisalign_optab, mode)
779 == CODE_FOR_nothing))
780 srcmem = NULL_TREE;
781 if (srcmem)
783 gimple *new_stmt;
784 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
786 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
787 srcmem
788 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
789 new_stmt);
790 gimple_assign_set_lhs (new_stmt, srcmem);
791 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
792 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
794 if (dest_align < GET_MODE_ALIGNMENT (mode))
795 desttype = build_aligned_type (type, dest_align);
796 new_stmt
797 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
798 dest, off0),
799 srcmem);
800 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
801 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
802 if (gimple_vdef (new_stmt)
803 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
804 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
805 if (!lhs)
807 gsi_replace (gsi, new_stmt, false);
808 return true;
810 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
811 goto done;
817 if (endp == 3)
819 /* Both DEST and SRC must be pointer types.
820 ??? This is what old code did. Is the testing for pointer types
821 really mandatory?
823 If either SRC is readonly or length is 1, we can use memcpy. */
824 if (!dest_align || !src_align)
825 return false;
826 if (readonly_data_expr (src)
827 || (tree_fits_uhwi_p (len)
828 && (MIN (src_align, dest_align) / BITS_PER_UNIT
829 >= tree_to_uhwi (len))))
831 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
832 if (!fn)
833 return false;
834 gimple_call_set_fndecl (stmt, fn);
835 gimple_call_set_arg (stmt, 0, dest);
836 gimple_call_set_arg (stmt, 1, src);
837 fold_stmt (gsi);
838 return true;
841 /* If *src and *dest can't overlap, optimize into memcpy as well. */
842 if (TREE_CODE (src) == ADDR_EXPR
843 && TREE_CODE (dest) == ADDR_EXPR)
845 tree src_base, dest_base, fn;
846 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
847 HOST_WIDE_INT maxsize;
849 srcvar = TREE_OPERAND (src, 0);
850 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
851 if (src_base == NULL)
852 src_base = srcvar;
853 destvar = TREE_OPERAND (dest, 0);
854 dest_base = get_addr_base_and_unit_offset (destvar,
855 &dest_offset);
856 if (dest_base == NULL)
857 dest_base = destvar;
858 if (tree_fits_uhwi_p (len))
859 maxsize = tree_to_uhwi (len);
860 else
861 maxsize = -1;
862 if (SSA_VAR_P (src_base)
863 && SSA_VAR_P (dest_base))
865 if (operand_equal_p (src_base, dest_base, 0)
866 && ranges_overlap_p (src_offset, maxsize,
867 dest_offset, maxsize))
868 return false;
870 else if (TREE_CODE (src_base) == MEM_REF
871 && TREE_CODE (dest_base) == MEM_REF)
873 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
874 TREE_OPERAND (dest_base, 0), 0))
875 return false;
876 offset_int off = mem_ref_offset (src_base) + src_offset;
877 if (!wi::fits_shwi_p (off))
878 return false;
879 src_offset = off.to_shwi ();
881 off = mem_ref_offset (dest_base) + dest_offset;
882 if (!wi::fits_shwi_p (off))
883 return false;
884 dest_offset = off.to_shwi ();
885 if (ranges_overlap_p (src_offset, maxsize,
886 dest_offset, maxsize))
887 return false;
889 else
890 return false;
892 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
893 if (!fn)
894 return false;
895 gimple_call_set_fndecl (stmt, fn);
896 gimple_call_set_arg (stmt, 0, dest);
897 gimple_call_set_arg (stmt, 1, src);
898 fold_stmt (gsi);
899 return true;
902 /* If the destination and source do not alias optimize into
903 memcpy as well. */
904 if ((is_gimple_min_invariant (dest)
905 || TREE_CODE (dest) == SSA_NAME)
906 && (is_gimple_min_invariant (src)
907 || TREE_CODE (src) == SSA_NAME))
909 ao_ref destr, srcr;
910 ao_ref_init_from_ptr_and_size (&destr, dest, len);
911 ao_ref_init_from_ptr_and_size (&srcr, src, len);
912 if (!refs_may_alias_p_1 (&destr, &srcr, false))
914 tree fn;
915 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
916 if (!fn)
917 return false;
918 gimple_call_set_fndecl (stmt, fn);
919 gimple_call_set_arg (stmt, 0, dest);
920 gimple_call_set_arg (stmt, 1, src);
921 fold_stmt (gsi);
922 return true;
926 return false;
929 if (!tree_fits_shwi_p (len))
930 return false;
931 if (!POINTER_TYPE_P (TREE_TYPE (src))
932 || !POINTER_TYPE_P (TREE_TYPE (dest)))
933 return false;
934 /* In the following try to find a type that is most natural to be
935 used for the memcpy source and destination and that allows
936 the most optimization when memcpy is turned into a plain assignment
937 using that type. In theory we could always use a char[len] type
938 but that only gains us that the destination and source possibly
939 no longer will have their address taken. */
940 srctype = TREE_TYPE (TREE_TYPE (src));
941 if (TREE_CODE (srctype) == ARRAY_TYPE
942 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
943 srctype = TREE_TYPE (srctype);
944 desttype = TREE_TYPE (TREE_TYPE (dest));
945 if (TREE_CODE (desttype) == ARRAY_TYPE
946 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
947 desttype = TREE_TYPE (desttype);
948 if (TREE_ADDRESSABLE (srctype)
949 || TREE_ADDRESSABLE (desttype))
950 return false;
952 /* Make sure we are not copying using a floating-point mode or
953 a type whose size possibly does not match its precision. */
954 if (FLOAT_MODE_P (TYPE_MODE (desttype))
955 || TREE_CODE (desttype) == BOOLEAN_TYPE
956 || TREE_CODE (desttype) == ENUMERAL_TYPE)
957 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
958 if (FLOAT_MODE_P (TYPE_MODE (srctype))
959 || TREE_CODE (srctype) == BOOLEAN_TYPE
960 || TREE_CODE (srctype) == ENUMERAL_TYPE)
961 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
962 if (!srctype)
963 srctype = desttype;
964 if (!desttype)
965 desttype = srctype;
966 if (!srctype)
967 return false;
969 src_align = get_pointer_alignment (src);
970 dest_align = get_pointer_alignment (dest);
971 if (dest_align < TYPE_ALIGN (desttype)
972 || src_align < TYPE_ALIGN (srctype))
973 return false;
975 destvar = NULL_TREE;
976 if (TREE_CODE (dest) == ADDR_EXPR
977 && var_decl_component_p (TREE_OPERAND (dest, 0))
978 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
979 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
981 srcvar = NULL_TREE;
982 if (TREE_CODE (src) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (src, 0))
984 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
986 if (!destvar
987 || src_align >= TYPE_ALIGN (desttype))
988 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
989 src, off0);
990 else if (!STRICT_ALIGNMENT)
992 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
993 src_align);
994 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
998 if (srcvar == NULL_TREE && destvar == NULL_TREE)
999 return false;
1001 if (srcvar == NULL_TREE)
1003 if (src_align >= TYPE_ALIGN (desttype))
1004 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1005 else
1007 if (STRICT_ALIGNMENT)
1008 return false;
1009 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1010 src_align);
1011 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1014 else if (destvar == NULL_TREE)
1016 if (dest_align >= TYPE_ALIGN (srctype))
1017 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1018 else
1020 if (STRICT_ALIGNMENT)
1021 return false;
1022 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1023 dest_align);
1024 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1028 gimple *new_stmt;
1029 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1031 tree tem = fold_const_aggregate_ref (srcvar);
1032 if (tem)
1033 srcvar = tem;
1034 if (! is_gimple_min_invariant (srcvar))
1036 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1037 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1038 new_stmt);
1039 gimple_assign_set_lhs (new_stmt, srcvar);
1040 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1041 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 new_stmt = gimple_build_assign (destvar, srcvar);
1044 goto set_vop_and_replace;
1047 /* We get an aggregate copy. Use an unsigned char[] type to
1048 perform the copying to preserve padding and to avoid any issues
1049 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1050 desttype = build_array_type_nelts (unsigned_char_type_node,
1051 tree_to_uhwi (len));
1052 srctype = desttype;
1053 if (src_align > TYPE_ALIGN (srctype))
1054 srctype = build_aligned_type (srctype, src_align);
1055 if (dest_align > TYPE_ALIGN (desttype))
1056 desttype = build_aligned_type (desttype, dest_align);
1057 new_stmt
1058 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1059 fold_build2 (MEM_REF, srctype, src, off0));
1060 set_vop_and_replace:
1061 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1062 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1063 if (gimple_vdef (new_stmt)
1064 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1065 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1066 if (!lhs)
1068 gsi_replace (gsi, new_stmt, false);
1069 return true;
1071 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1074 done:
1075 gimple_seq stmts = NULL;
1076 if (endp == 0 || endp == 3)
1077 len = NULL_TREE;
1078 else if (endp == 2)
1079 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1080 ssize_int (1));
1081 if (endp == 2 || endp == 1)
1083 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1084 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1085 TREE_TYPE (dest), dest, len);
1088 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1089 gimple *repl = gimple_build_assign (lhs, dest);
1090 gsi_replace (gsi, repl, false);
1091 return true;
1094 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1095 to built-in memcmp (a, b, len). */
1097 static bool
1098 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1100 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1102 if (!fn)
1103 return false;
1105 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1107 gimple *stmt = gsi_stmt (*gsi);
1108 tree a = gimple_call_arg (stmt, 0);
1109 tree b = gimple_call_arg (stmt, 1);
1110 tree len = gimple_call_arg (stmt, 2);
1112 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1113 replace_call_with_call_and_fold (gsi, repl);
1115 return true;
1118 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1119 to built-in memmove (dest, src, len). */
1121 static bool
1122 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1124 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1126 if (!fn)
1127 return false;
1129 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1130 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1131 len) into memmove (dest, src, len). */
1133 gimple *stmt = gsi_stmt (*gsi);
1134 tree src = gimple_call_arg (stmt, 0);
1135 tree dest = gimple_call_arg (stmt, 1);
1136 tree len = gimple_call_arg (stmt, 2);
1138 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1139 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1140 replace_call_with_call_and_fold (gsi, repl);
1142 return true;
1145 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1146 to built-in memset (dest, 0, len). */
1148 static bool
1149 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1151 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1153 if (!fn)
1154 return false;
1156 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1158 gimple *stmt = gsi_stmt (*gsi);
1159 tree dest = gimple_call_arg (stmt, 0);
1160 tree len = gimple_call_arg (stmt, 1);
1162 gimple_seq seq = NULL;
1163 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1164 gimple_seq_add_stmt_without_update (&seq, repl);
1165 gsi_replace_with_seq_vops (gsi, seq);
1166 fold_stmt (gsi);
1168 return true;
1171 /* Fold function call to builtin memset or bzero at *GSI setting the
1172 memory of size LEN to VAL. Return whether a simplification was made. */
1174 static bool
1175 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1177 gimple *stmt = gsi_stmt (*gsi);
1178 tree etype;
1179 unsigned HOST_WIDE_INT length, cval;
1181 /* If the LEN parameter is zero, return DEST. */
1182 if (integer_zerop (len))
1184 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1185 return true;
1188 if (! tree_fits_uhwi_p (len))
1189 return false;
1191 if (TREE_CODE (c) != INTEGER_CST)
1192 return false;
1194 tree dest = gimple_call_arg (stmt, 0);
1195 tree var = dest;
1196 if (TREE_CODE (var) != ADDR_EXPR)
1197 return false;
1199 var = TREE_OPERAND (var, 0);
1200 if (TREE_THIS_VOLATILE (var))
1201 return false;
1203 etype = TREE_TYPE (var);
1204 if (TREE_CODE (etype) == ARRAY_TYPE)
1205 etype = TREE_TYPE (etype);
1207 if (!INTEGRAL_TYPE_P (etype)
1208 && !POINTER_TYPE_P (etype))
1209 return NULL_TREE;
1211 if (! var_decl_component_p (var))
1212 return NULL_TREE;
1214 length = tree_to_uhwi (len);
1215 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1216 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1217 return NULL_TREE;
1219 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1220 return NULL_TREE;
1222 if (integer_zerop (c))
1223 cval = 0;
1224 else
1226 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1227 return NULL_TREE;
1229 cval = TREE_INT_CST_LOW (c);
1230 cval &= 0xff;
1231 cval |= cval << 8;
1232 cval |= cval << 16;
1233 cval |= (cval << 31) << 1;
1236 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1237 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1238 gimple_set_vuse (store, gimple_vuse (stmt));
1239 tree vdef = gimple_vdef (stmt);
1240 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1242 gimple_set_vdef (store, gimple_vdef (stmt));
1243 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1245 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1246 if (gimple_call_lhs (stmt))
1248 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1249 gsi_replace (gsi, asgn, false);
1251 else
1253 gimple_stmt_iterator gsi2 = *gsi;
1254 gsi_prev (gsi);
1255 gsi_remove (&gsi2, true);
1258 return true;
1262 /* Obtain the minimum and maximum string length or minimum and maximum
1263 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1264 If ARG is an SSA name variable, follow its use-def chains. When
1265 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1266 if we are unable to determine the length or value, return False.
1267 VISITED is a bitmap of visited variables.
1268 TYPE is 0 if string length should be obtained, 1 for maximum string
1269 length and 2 for maximum value ARG can have.
1270 When FUZZY is set and the length of a string cannot be determined,
1271 the function instead considers as the maximum possible length the
1272 size of a character array it may refer to.
1273 Set *FLEXP to true if the range of the string lengths has been
1274 obtained from the upper bound of an array at the end of a struct.
1275 Such an array may hold a string that's longer than its upper bound
1276 due to it being used as a poor-man's flexible array member. */
1278 static bool
1279 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1280 bool fuzzy, bool *flexp)
1282 tree var, val;
1283 gimple *def_stmt;
1285 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1286 but MINLEN may be cleared during the execution of the function. */
1287 tree *minlen = length;
1288 tree *const maxlen = length + 1;
1290 if (TREE_CODE (arg) != SSA_NAME)
1292 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1293 if (TREE_CODE (arg) == ADDR_EXPR
1294 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1295 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1297 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1298 if (TREE_CODE (aop0) == INDIRECT_REF
1299 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1300 return get_range_strlen (TREE_OPERAND (aop0, 0),
1301 length, visited, type, fuzzy, flexp);
1304 if (type == 2)
1306 val = arg;
1307 if (TREE_CODE (val) != INTEGER_CST
1308 || tree_int_cst_sgn (val) < 0)
1309 return false;
1311 else
1312 val = c_strlen (arg, 1);
1314 if (!val && fuzzy)
1316 if (TREE_CODE (arg) == ADDR_EXPR)
1317 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1318 visited, type, fuzzy, flexp);
1320 if (TREE_CODE (arg) == COMPONENT_REF
1321 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1323 /* Use the type of the member array to determine the upper
1324 bound on the length of the array. This may be overly
1325 optimistic if the array itself isn't NUL-terminated and
1326 the caller relies on the subsequent member to contain
1327 the NUL.
1328 Set *FLEXP to true if the array whose bound is being
1329 used is at the end of a struct. */
1330 if (array_at_struct_end_p (arg))
1331 *flexp = true;
1333 arg = TREE_OPERAND (arg, 1);
1334 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1335 if (!val || integer_zerop (val))
1336 return false;
1337 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1338 integer_one_node);
1339 /* Set the minimum size to zero since the string in
1340 the array could have zero length. */
1341 *minlen = ssize_int (0);
1345 if (!val)
1346 return false;
1348 if (minlen
1349 && (!*minlen
1350 || (type > 0
1351 && TREE_CODE (*minlen) == INTEGER_CST
1352 && TREE_CODE (val) == INTEGER_CST
1353 && tree_int_cst_lt (val, *minlen))))
1354 *minlen = val;
1356 if (*maxlen)
1358 if (type > 0)
1360 if (TREE_CODE (*maxlen) != INTEGER_CST
1361 || TREE_CODE (val) != INTEGER_CST)
1362 return false;
1364 if (tree_int_cst_lt (*maxlen, val))
1365 *maxlen = val;
1366 return true;
1368 else if (simple_cst_equal (val, *maxlen) != 1)
1369 return false;
1372 *maxlen = val;
1373 return true;
1376 /* If ARG is registered for SSA update we cannot look at its defining
1377 statement. */
1378 if (name_registered_for_update_p (arg))
1379 return false;
1381 /* If we were already here, break the infinite cycle. */
1382 if (!*visited)
1383 *visited = BITMAP_ALLOC (NULL);
1384 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1385 return true;
1387 var = arg;
1388 def_stmt = SSA_NAME_DEF_STMT (var);
1390 switch (gimple_code (def_stmt))
1392 case GIMPLE_ASSIGN:
1393 /* The RHS of the statement defining VAR must either have a
1394 constant length or come from another SSA_NAME with a constant
1395 length. */
1396 if (gimple_assign_single_p (def_stmt)
1397 || gimple_assign_unary_nop_p (def_stmt))
1399 tree rhs = gimple_assign_rhs1 (def_stmt);
1400 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1402 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1404 tree op2 = gimple_assign_rhs2 (def_stmt);
1405 tree op3 = gimple_assign_rhs3 (def_stmt);
1406 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1407 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1409 return false;
1411 case GIMPLE_PHI:
1413 /* All the arguments of the PHI node must have the same constant
1414 length. */
1415 unsigned i;
1417 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1419 tree arg = gimple_phi_arg (def_stmt, i)->def;
1421 /* If this PHI has itself as an argument, we cannot
1422 determine the string length of this argument. However,
1423 if we can find a constant string length for the other
1424 PHI args then we can still be sure that this is a
1425 constant string length. So be optimistic and just
1426 continue with the next argument. */
1427 if (arg == gimple_phi_result (def_stmt))
1428 continue;
1430 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1432 if (fuzzy)
1433 *maxlen = build_all_ones_cst (size_type_node);
1434 else
1435 return false;
1439 return true;
1441 default:
1442 return false;
1446 /* Determine the minimum and maximum value or string length that ARG
1447 refers to and store each in the first two elements of MINMAXLEN.
1448 For expressions that point to strings of unknown lengths that are
1449 character arrays, use the upper bound of the array as the maximum
1450 length. For example, given an expression like 'x ? array : "xyz"'
1451 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1452 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1453 stored in array.
1454 Return true if the range of the string lengths has been obtained
1455 from the upper bound of an array at the end of a struct. Such
1456 an array may hold a string that's longer than its upper bound
1457 due to it being used as a poor-man's flexible array member. */
1459 bool
1460 get_range_strlen (tree arg, tree minmaxlen[2])
1462 bitmap visited = NULL;
1464 minmaxlen[0] = NULL_TREE;
1465 minmaxlen[1] = NULL_TREE;
1467 bool flexarray = false;
1468 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1470 if (visited)
1471 BITMAP_FREE (visited);
1473 return flexarray;
1476 tree
1477 get_maxval_strlen (tree arg, int type)
1479 bitmap visited = NULL;
1480 tree len[2] = { NULL_TREE, NULL_TREE };
1482 bool dummy;
1483 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1484 len[1] = NULL_TREE;
1485 if (visited)
1486 BITMAP_FREE (visited);
1488 return len[1];
1492 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1493 If LEN is not NULL, it represents the length of the string to be
1494 copied. Return NULL_TREE if no simplification can be made. */
1496 static bool
1497 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1498 tree dest, tree src)
1500 location_t loc = gimple_location (gsi_stmt (*gsi));
1501 tree fn;
1503 /* If SRC and DEST are the same (and not volatile), return DEST. */
1504 if (operand_equal_p (src, dest, 0))
1506 replace_call_with_value (gsi, dest);
1507 return true;
1510 if (optimize_function_for_size_p (cfun))
1511 return false;
1513 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1514 if (!fn)
1515 return false;
1517 tree len = get_maxval_strlen (src, 0);
1518 if (!len)
1519 return false;
1521 len = fold_convert_loc (loc, size_type_node, len);
1522 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1523 len = force_gimple_operand_gsi (gsi, len, true,
1524 NULL_TREE, true, GSI_SAME_STMT);
1525 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1526 replace_call_with_call_and_fold (gsi, repl);
1527 return true;
1530 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1531 If SLEN is not NULL, it represents the length of the source string.
1532 Return NULL_TREE if no simplification can be made. */
1534 static bool
1535 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1536 tree dest, tree src, tree len)
1538 gimple *stmt = gsi_stmt (*gsi);
1539 location_t loc = gimple_location (stmt);
1540 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1542 /* If the LEN parameter is zero, return DEST. */
1543 if (integer_zerop (len))
1545 /* Avoid warning if the destination refers to a an array/pointer
1546 decorate with attribute nonstring. */
1547 if (!nonstring)
1549 tree fndecl = gimple_call_fndecl (stmt);
1550 gcall *call = as_a <gcall *> (stmt);
1552 /* Warn about the lack of nul termination: the result is not
1553 a (nul-terminated) string. */
1554 tree slen = get_maxval_strlen (src, 0);
1555 if (slen && !integer_zerop (slen))
1556 warning_at (loc, OPT_Wstringop_truncation,
1557 "%G%qD destination unchanged after copying no bytes "
1558 "from a string of length %E",
1559 call, fndecl, slen);
1560 else
1561 warning_at (loc, OPT_Wstringop_truncation,
1562 "%G%qD destination unchanged after copying no bytes",
1563 call, fndecl);
1566 replace_call_with_value (gsi, dest);
1567 return true;
1570 /* We can't compare slen with len as constants below if len is not a
1571 constant. */
1572 if (TREE_CODE (len) != INTEGER_CST)
1573 return false;
1575 /* Now, we must be passed a constant src ptr parameter. */
1576 tree slen = get_maxval_strlen (src, 0);
1577 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1578 return false;
1580 /* The size of the source string including the terminating nul. */
1581 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1583 /* We do not support simplification of this case, though we do
1584 support it when expanding trees into RTL. */
1585 /* FIXME: generate a call to __builtin_memset. */
1586 if (tree_int_cst_lt (ssize, len))
1587 return false;
1589 if (!nonstring)
1591 if (tree_int_cst_lt (len, slen))
1593 tree fndecl = gimple_call_fndecl (stmt);
1594 gcall *call = as_a <gcall *> (stmt);
1596 warning_at (loc, OPT_Wstringop_truncation,
1597 (tree_int_cst_equal (size_one_node, len)
1598 ? G_("%G%qD output truncated copying %E byte "
1599 "from a string of length %E")
1600 : G_("%G%qD output truncated copying %E bytes "
1601 "from a string of length %E")),
1602 call, fndecl, len, slen);
1604 else if (tree_int_cst_equal (len, slen))
1606 tree fndecl = gimple_call_fndecl (stmt);
1607 gcall *call = as_a <gcall *> (stmt);
1609 warning_at (loc, OPT_Wstringop_truncation,
1610 (tree_int_cst_equal (size_one_node, len)
1611 ? G_("%G%qD output truncated before terminating nul "
1612 "copying %E byte from a string of the same "
1613 "length")
1614 : G_("%G%qD output truncated before terminating nul "
1615 "copying %E bytes from a string of the same "
1616 "length")),
1617 call, fndecl, len);
1621 /* OK transform into builtin memcpy. */
1622 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1623 if (!fn)
1624 return false;
1626 len = fold_convert_loc (loc, size_type_node, len);
1627 len = force_gimple_operand_gsi (gsi, len, true,
1628 NULL_TREE, true, GSI_SAME_STMT);
1629 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1630 replace_call_with_call_and_fold (gsi, repl);
1632 return true;
1635 /* Fold function call to builtin strchr or strrchr.
1636 If both arguments are constant, evaluate and fold the result,
1637 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1638 In general strlen is significantly faster than strchr
1639 due to being a simpler operation. */
1640 static bool
1641 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1643 gimple *stmt = gsi_stmt (*gsi);
1644 tree str = gimple_call_arg (stmt, 0);
1645 tree c = gimple_call_arg (stmt, 1);
1646 location_t loc = gimple_location (stmt);
1647 const char *p;
1648 char ch;
1650 if (!gimple_call_lhs (stmt))
1651 return false;
1653 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1655 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1657 if (p1 == NULL)
1659 replace_call_with_value (gsi, integer_zero_node);
1660 return true;
1663 tree len = build_int_cst (size_type_node, p1 - p);
1664 gimple_seq stmts = NULL;
1665 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1666 POINTER_PLUS_EXPR, str, len);
1667 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1668 gsi_replace_with_seq_vops (gsi, stmts);
1669 return true;
1672 if (!integer_zerop (c))
1673 return false;
1675 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1676 if (is_strrchr && optimize_function_for_size_p (cfun))
1678 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1680 if (strchr_fn)
1682 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1683 replace_call_with_call_and_fold (gsi, repl);
1684 return true;
1687 return false;
1690 tree len;
1691 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1693 if (!strlen_fn)
1694 return false;
1696 /* Create newstr = strlen (str). */
1697 gimple_seq stmts = NULL;
1698 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1699 gimple_set_location (new_stmt, loc);
1700 len = create_tmp_reg_or_ssa_name (size_type_node);
1701 gimple_call_set_lhs (new_stmt, len);
1702 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1704 /* Create (str p+ strlen (str)). */
1705 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1706 POINTER_PLUS_EXPR, str, len);
1707 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1708 gsi_replace_with_seq_vops (gsi, stmts);
1709 /* gsi now points at the assignment to the lhs, get a
1710 stmt iterator to the strlen.
1711 ??? We can't use gsi_for_stmt as that doesn't work when the
1712 CFG isn't built yet. */
1713 gimple_stmt_iterator gsi2 = *gsi;
1714 gsi_prev (&gsi2);
1715 fold_stmt (&gsi2);
1716 return true;
1719 /* Fold function call to builtin strstr.
1720 If both arguments are constant, evaluate and fold the result,
1721 additionally fold strstr (x, "") into x and strstr (x, "c")
1722 into strchr (x, 'c'). */
1723 static bool
1724 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1726 gimple *stmt = gsi_stmt (*gsi);
1727 tree haystack = gimple_call_arg (stmt, 0);
1728 tree needle = gimple_call_arg (stmt, 1);
1729 const char *p, *q;
1731 if (!gimple_call_lhs (stmt))
1732 return false;
1734 q = c_getstr (needle);
1735 if (q == NULL)
1736 return false;
1738 if ((p = c_getstr (haystack)))
1740 const char *r = strstr (p, q);
1742 if (r == NULL)
1744 replace_call_with_value (gsi, integer_zero_node);
1745 return true;
1748 tree len = build_int_cst (size_type_node, r - p);
1749 gimple_seq stmts = NULL;
1750 gimple *new_stmt
1751 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1752 haystack, len);
1753 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1754 gsi_replace_with_seq_vops (gsi, stmts);
1755 return true;
1758 /* For strstr (x, "") return x. */
1759 if (q[0] == '\0')
1761 replace_call_with_value (gsi, haystack);
1762 return true;
1765 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1766 if (q[1] == '\0')
1768 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1769 if (strchr_fn)
1771 tree c = build_int_cst (integer_type_node, q[0]);
1772 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1773 replace_call_with_call_and_fold (gsi, repl);
1774 return true;
1778 return false;
1781 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1782 to the call.
1784 Return NULL_TREE if no simplification was possible, otherwise return the
1785 simplified form of the call as a tree.
1787 The simplified form may be a constant or other expression which
1788 computes the same value, but in a more efficient manner (including
1789 calls to other builtin functions).
1791 The call may contain arguments which need to be evaluated, but
1792 which are not useful to determine the result of the call. In
1793 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1794 COMPOUND_EXPR will be an argument which must be evaluated.
1795 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1796 COMPOUND_EXPR in the chain will contain the tree for the simplified
1797 form of the builtin function call. */
1799 static bool
1800 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1802 gimple *stmt = gsi_stmt (*gsi);
1803 location_t loc = gimple_location (stmt);
1805 const char *p = c_getstr (src);
1807 /* If the string length is zero, return the dst parameter. */
1808 if (p && *p == '\0')
1810 replace_call_with_value (gsi, dst);
1811 return true;
1814 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1815 return false;
1817 /* See if we can store by pieces into (dst + strlen(dst)). */
1818 tree newdst;
1819 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1820 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1822 if (!strlen_fn || !memcpy_fn)
1823 return false;
1825 /* If the length of the source string isn't computable don't
1826 split strcat into strlen and memcpy. */
1827 tree len = get_maxval_strlen (src, 0);
1828 if (! len)
1829 return false;
1831 /* Create strlen (dst). */
1832 gimple_seq stmts = NULL, stmts2;
1833 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1834 gimple_set_location (repl, loc);
1835 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1836 gimple_call_set_lhs (repl, newdst);
1837 gimple_seq_add_stmt_without_update (&stmts, repl);
1839 /* Create (dst p+ strlen (dst)). */
1840 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1841 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1842 gimple_seq_add_seq_without_update (&stmts, stmts2);
1844 len = fold_convert_loc (loc, size_type_node, len);
1845 len = size_binop_loc (loc, PLUS_EXPR, len,
1846 build_int_cst (size_type_node, 1));
1847 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1848 gimple_seq_add_seq_without_update (&stmts, stmts2);
1850 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1851 gimple_seq_add_stmt_without_update (&stmts, repl);
1852 if (gimple_call_lhs (stmt))
1854 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1855 gimple_seq_add_stmt_without_update (&stmts, repl);
1856 gsi_replace_with_seq_vops (gsi, stmts);
1857 /* gsi now points at the assignment to the lhs, get a
1858 stmt iterator to the memcpy call.
1859 ??? We can't use gsi_for_stmt as that doesn't work when the
1860 CFG isn't built yet. */
1861 gimple_stmt_iterator gsi2 = *gsi;
1862 gsi_prev (&gsi2);
1863 fold_stmt (&gsi2);
1865 else
1867 gsi_replace_with_seq_vops (gsi, stmts);
1868 fold_stmt (gsi);
1870 return true;
1873 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1874 are the arguments to the call. */
1876 static bool
1877 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1879 gimple *stmt = gsi_stmt (*gsi);
1880 tree dest = gimple_call_arg (stmt, 0);
1881 tree src = gimple_call_arg (stmt, 1);
1882 tree size = gimple_call_arg (stmt, 2);
1883 tree fn;
1884 const char *p;
1887 p = c_getstr (src);
1888 /* If the SRC parameter is "", return DEST. */
1889 if (p && *p == '\0')
1891 replace_call_with_value (gsi, dest);
1892 return true;
1895 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1896 return false;
1898 /* If __builtin_strcat_chk is used, assume strcat is available. */
1899 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1900 if (!fn)
1901 return false;
1903 gimple *repl = gimple_build_call (fn, 2, dest, src);
1904 replace_call_with_call_and_fold (gsi, repl);
1905 return true;
1908 /* Simplify a call to the strncat builtin. */
1910 static bool
1911 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1913 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1914 tree dst = gimple_call_arg (stmt, 0);
1915 tree src = gimple_call_arg (stmt, 1);
1916 tree len = gimple_call_arg (stmt, 2);
1918 const char *p = c_getstr (src);
1920 /* If the requested length is zero, or the src parameter string
1921 length is zero, return the dst parameter. */
1922 if (integer_zerop (len) || (p && *p == '\0'))
1924 replace_call_with_value (gsi, dst);
1925 return true;
1928 if (TREE_CODE (len) != INTEGER_CST || !p)
1929 return false;
1931 unsigned srclen = strlen (p);
1933 int cmpsrc = compare_tree_int (len, srclen);
1935 /* Return early if the requested len is less than the string length.
1936 Warnings will be issued elsewhere later. */
1937 if (cmpsrc < 0)
1938 return false;
1940 unsigned HOST_WIDE_INT dstsize;
1942 bool nowarn = gimple_no_warning_p (stmt);
1944 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
1946 int cmpdst = compare_tree_int (len, dstsize);
1948 if (cmpdst >= 0)
1950 tree fndecl = gimple_call_fndecl (stmt);
1952 /* Strncat copies (at most) LEN bytes and always appends
1953 the terminating NUL so the specified bound should never
1954 be equal to (or greater than) the size of the destination.
1955 If it is, the copy could overflow. */
1956 location_t loc = gimple_location (stmt);
1957 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
1958 cmpdst == 0
1959 ? G_("%G%qD specified bound %E equals "
1960 "destination size")
1961 : G_("%G%qD specified bound %E exceeds "
1962 "destination size %wu"),
1963 stmt, fndecl, len, dstsize);
1964 if (nowarn)
1965 gimple_set_no_warning (stmt, true);
1969 if (!nowarn && cmpsrc == 0)
1971 tree fndecl = gimple_call_fndecl (stmt);
1973 /* To avoid certain truncation the specified bound should also
1974 not be equal to (or less than) the length of the source. */
1975 location_t loc = gimple_location (stmt);
1976 if (warning_at (loc, OPT_Wstringop_overflow_,
1977 "%G%qD specified bound %E equals source length",
1978 stmt, fndecl, len))
1979 gimple_set_no_warning (stmt, true);
1982 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1984 /* If the replacement _DECL isn't initialized, don't do the
1985 transformation. */
1986 if (!fn)
1987 return false;
1989 /* Otherwise, emit a call to strcat. */
1990 gcall *repl = gimple_build_call (fn, 2, dst, src);
1991 replace_call_with_call_and_fold (gsi, repl);
1992 return true;
1995 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1996 LEN, and SIZE. */
1998 static bool
1999 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2001 gimple *stmt = gsi_stmt (*gsi);
2002 tree dest = gimple_call_arg (stmt, 0);
2003 tree src = gimple_call_arg (stmt, 1);
2004 tree len = gimple_call_arg (stmt, 2);
2005 tree size = gimple_call_arg (stmt, 3);
2006 tree fn;
2007 const char *p;
2009 p = c_getstr (src);
2010 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2011 if ((p && *p == '\0')
2012 || integer_zerop (len))
2014 replace_call_with_value (gsi, dest);
2015 return true;
2018 if (! tree_fits_uhwi_p (size))
2019 return false;
2021 if (! integer_all_onesp (size))
2023 tree src_len = c_strlen (src, 1);
2024 if (src_len
2025 && tree_fits_uhwi_p (src_len)
2026 && tree_fits_uhwi_p (len)
2027 && ! tree_int_cst_lt (len, src_len))
2029 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2030 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2031 if (!fn)
2032 return false;
2034 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2035 replace_call_with_call_and_fold (gsi, repl);
2036 return true;
2038 return false;
2041 /* If __builtin_strncat_chk is used, assume strncat is available. */
2042 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2043 if (!fn)
2044 return false;
2046 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2047 replace_call_with_call_and_fold (gsi, repl);
2048 return true;
2051 /* Build and append gimple statements to STMTS that would load a first
2052 character of a memory location identified by STR. LOC is location
2053 of the statement. */
2055 static tree
2056 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2058 tree var;
2060 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2061 tree cst_uchar_ptr_node
2062 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2063 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2065 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2066 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2067 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2069 gimple_assign_set_lhs (stmt, var);
2070 gimple_seq_add_stmt_without_update (stmts, stmt);
2072 return var;
2075 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2076 FCODE is the name of the builtin. */
2078 static bool
2079 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2081 gimple *stmt = gsi_stmt (*gsi);
2082 tree callee = gimple_call_fndecl (stmt);
2083 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2085 tree type = integer_type_node;
2086 tree str1 = gimple_call_arg (stmt, 0);
2087 tree str2 = gimple_call_arg (stmt, 1);
2088 tree lhs = gimple_call_lhs (stmt);
2089 HOST_WIDE_INT length = -1;
2091 /* Handle strncmp and strncasecmp functions. */
2092 if (gimple_call_num_args (stmt) == 3)
2094 tree len = gimple_call_arg (stmt, 2);
2095 if (tree_fits_uhwi_p (len))
2096 length = tree_to_uhwi (len);
2099 /* If the LEN parameter is zero, return zero. */
2100 if (length == 0)
2102 replace_call_with_value (gsi, integer_zero_node);
2103 return true;
2106 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2107 if (operand_equal_p (str1, str2, 0))
2109 replace_call_with_value (gsi, integer_zero_node);
2110 return true;
2113 const char *p1 = c_getstr (str1);
2114 const char *p2 = c_getstr (str2);
2116 /* For known strings, return an immediate value. */
2117 if (p1 && p2)
2119 int r = 0;
2120 bool known_result = false;
2122 switch (fcode)
2124 case BUILT_IN_STRCMP:
2126 r = strcmp (p1, p2);
2127 known_result = true;
2128 break;
2130 case BUILT_IN_STRNCMP:
2132 if (length == -1)
2133 break;
2134 r = strncmp (p1, p2, length);
2135 known_result = true;
2136 break;
2138 /* Only handleable situation is where the string are equal (result 0),
2139 which is already handled by operand_equal_p case. */
2140 case BUILT_IN_STRCASECMP:
2141 break;
2142 case BUILT_IN_STRNCASECMP:
2144 if (length == -1)
2145 break;
2146 r = strncmp (p1, p2, length);
2147 if (r == 0)
2148 known_result = true;
2149 break;
2151 default:
2152 gcc_unreachable ();
2155 if (known_result)
2157 replace_call_with_value (gsi, build_cmp_result (type, r));
2158 return true;
2162 bool nonzero_length = length >= 1
2163 || fcode == BUILT_IN_STRCMP
2164 || fcode == BUILT_IN_STRCASECMP;
2166 location_t loc = gimple_location (stmt);
2168 /* If the second arg is "", return *(const unsigned char*)arg1. */
2169 if (p2 && *p2 == '\0' && nonzero_length)
2171 gimple_seq stmts = NULL;
2172 tree var = gimple_load_first_char (loc, str1, &stmts);
2173 if (lhs)
2175 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2176 gimple_seq_add_stmt_without_update (&stmts, stmt);
2179 gsi_replace_with_seq_vops (gsi, stmts);
2180 return true;
2183 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2184 if (p1 && *p1 == '\0' && nonzero_length)
2186 gimple_seq stmts = NULL;
2187 tree var = gimple_load_first_char (loc, str2, &stmts);
2189 if (lhs)
2191 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2192 stmt = gimple_build_assign (c, NOP_EXPR, var);
2193 gimple_seq_add_stmt_without_update (&stmts, stmt);
2195 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2196 gimple_seq_add_stmt_without_update (&stmts, stmt);
2199 gsi_replace_with_seq_vops (gsi, stmts);
2200 return true;
2203 /* If len parameter is one, return an expression corresponding to
2204 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2205 if (fcode == BUILT_IN_STRNCMP && length == 1)
2207 gimple_seq stmts = NULL;
2208 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2209 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2211 if (lhs)
2213 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2214 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2215 gimple_seq_add_stmt_without_update (&stmts, convert1);
2217 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2218 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2219 gimple_seq_add_stmt_without_update (&stmts, convert2);
2221 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2222 gimple_seq_add_stmt_without_update (&stmts, stmt);
2225 gsi_replace_with_seq_vops (gsi, stmts);
2226 return true;
2229 /* If length is larger than the length of one constant string,
2230 replace strncmp with corresponding strcmp */
2231 if (fcode == BUILT_IN_STRNCMP
2232 && length > 0
2233 && ((p2 && (size_t) length > strlen (p2))
2234 || (p1 && (size_t) length > strlen (p1))))
2236 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2237 if (!fn)
2238 return false;
2239 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2240 replace_call_with_call_and_fold (gsi, repl);
2241 return true;
2244 return false;
2247 /* Fold a call to the memchr pointed by GSI iterator. */
2249 static bool
2250 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2252 gimple *stmt = gsi_stmt (*gsi);
2253 tree lhs = gimple_call_lhs (stmt);
2254 tree arg1 = gimple_call_arg (stmt, 0);
2255 tree arg2 = gimple_call_arg (stmt, 1);
2256 tree len = gimple_call_arg (stmt, 2);
2258 /* If the LEN parameter is zero, return zero. */
2259 if (integer_zerop (len))
2261 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2262 return true;
2265 char c;
2266 if (TREE_CODE (arg2) != INTEGER_CST
2267 || !tree_fits_uhwi_p (len)
2268 || !target_char_cst_p (arg2, &c))
2269 return false;
2271 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2272 unsigned HOST_WIDE_INT string_length;
2273 const char *p1 = c_getstr (arg1, &string_length);
2275 if (p1)
2277 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2278 if (r == NULL)
2280 if (length <= string_length)
2282 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2283 return true;
2286 else
2288 unsigned HOST_WIDE_INT offset = r - p1;
2289 gimple_seq stmts = NULL;
2290 if (lhs != NULL_TREE)
2292 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2293 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2294 arg1, offset_cst);
2295 gimple_seq_add_stmt_without_update (&stmts, stmt);
2297 else
2298 gimple_seq_add_stmt_without_update (&stmts,
2299 gimple_build_nop ());
2301 gsi_replace_with_seq_vops (gsi, stmts);
2302 return true;
2306 return false;
2309 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2310 to the call. IGNORE is true if the value returned
2311 by the builtin will be ignored. UNLOCKED is true is true if this
2312 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2313 the known length of the string. Return NULL_TREE if no simplification
2314 was possible. */
2316 static bool
2317 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2318 tree arg0, tree arg1,
2319 bool unlocked)
2321 gimple *stmt = gsi_stmt (*gsi);
2323 /* If we're using an unlocked function, assume the other unlocked
2324 functions exist explicitly. */
2325 tree const fn_fputc = (unlocked
2326 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2327 : builtin_decl_implicit (BUILT_IN_FPUTC));
2328 tree const fn_fwrite = (unlocked
2329 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2330 : builtin_decl_implicit (BUILT_IN_FWRITE));
2332 /* If the return value is used, don't do the transformation. */
2333 if (gimple_call_lhs (stmt))
2334 return false;
2336 /* Get the length of the string passed to fputs. If the length
2337 can't be determined, punt. */
2338 tree len = get_maxval_strlen (arg0, 0);
2339 if (!len
2340 || TREE_CODE (len) != INTEGER_CST)
2341 return false;
2343 switch (compare_tree_int (len, 1))
2345 case -1: /* length is 0, delete the call entirely . */
2346 replace_call_with_value (gsi, integer_zero_node);
2347 return true;
2349 case 0: /* length is 1, call fputc. */
2351 const char *p = c_getstr (arg0);
2352 if (p != NULL)
2354 if (!fn_fputc)
2355 return false;
2357 gimple *repl = gimple_build_call (fn_fputc, 2,
2358 build_int_cst
2359 (integer_type_node, p[0]), arg1);
2360 replace_call_with_call_and_fold (gsi, repl);
2361 return true;
2364 /* FALLTHROUGH */
2365 case 1: /* length is greater than 1, call fwrite. */
2367 /* If optimizing for size keep fputs. */
2368 if (optimize_function_for_size_p (cfun))
2369 return false;
2370 /* New argument list transforming fputs(string, stream) to
2371 fwrite(string, 1, len, stream). */
2372 if (!fn_fwrite)
2373 return false;
2375 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2376 size_one_node, len, arg1);
2377 replace_call_with_call_and_fold (gsi, repl);
2378 return true;
2380 default:
2381 gcc_unreachable ();
2383 return false;
2386 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2387 DEST, SRC, LEN, and SIZE are the arguments to the call.
2388 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2389 code of the builtin. If MAXLEN is not NULL, it is maximum length
2390 passed as third argument. */
2392 static bool
2393 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2394 tree dest, tree src, tree len, tree size,
2395 enum built_in_function fcode)
2397 gimple *stmt = gsi_stmt (*gsi);
2398 location_t loc = gimple_location (stmt);
2399 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2400 tree fn;
2402 /* If SRC and DEST are the same (and not volatile), return DEST
2403 (resp. DEST+LEN for __mempcpy_chk). */
2404 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2406 if (fcode != BUILT_IN_MEMPCPY_CHK)
2408 replace_call_with_value (gsi, dest);
2409 return true;
2411 else
2413 gimple_seq stmts = NULL;
2414 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2415 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2416 TREE_TYPE (dest), dest, len);
2417 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2418 replace_call_with_value (gsi, temp);
2419 return true;
2423 if (! tree_fits_uhwi_p (size))
2424 return false;
2426 tree maxlen = get_maxval_strlen (len, 2);
2427 if (! integer_all_onesp (size))
2429 if (! tree_fits_uhwi_p (len))
2431 /* If LEN is not constant, try MAXLEN too.
2432 For MAXLEN only allow optimizing into non-_ocs function
2433 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2434 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2436 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2438 /* (void) __mempcpy_chk () can be optimized into
2439 (void) __memcpy_chk (). */
2440 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2441 if (!fn)
2442 return false;
2444 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2445 replace_call_with_call_and_fold (gsi, repl);
2446 return true;
2448 return false;
2451 else
2452 maxlen = len;
2454 if (tree_int_cst_lt (size, maxlen))
2455 return false;
2458 fn = NULL_TREE;
2459 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2460 mem{cpy,pcpy,move,set} is available. */
2461 switch (fcode)
2463 case BUILT_IN_MEMCPY_CHK:
2464 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2465 break;
2466 case BUILT_IN_MEMPCPY_CHK:
2467 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2468 break;
2469 case BUILT_IN_MEMMOVE_CHK:
2470 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2471 break;
2472 case BUILT_IN_MEMSET_CHK:
2473 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2474 break;
2475 default:
2476 break;
2479 if (!fn)
2480 return false;
2482 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2483 replace_call_with_call_and_fold (gsi, repl);
2484 return true;
2487 /* Fold a call to the __st[rp]cpy_chk builtin.
2488 DEST, SRC, and SIZE are the arguments to the call.
2489 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2490 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2491 strings passed as second argument. */
2493 static bool
2494 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2495 tree dest,
2496 tree src, tree size,
2497 enum built_in_function fcode)
2499 gimple *stmt = gsi_stmt (*gsi);
2500 location_t loc = gimple_location (stmt);
2501 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2502 tree len, fn;
2504 /* If SRC and DEST are the same (and not volatile), return DEST. */
2505 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2507 replace_call_with_value (gsi, dest);
2508 return true;
2511 if (! tree_fits_uhwi_p (size))
2512 return false;
2514 tree maxlen = get_maxval_strlen (src, 1);
2515 if (! integer_all_onesp (size))
2517 len = c_strlen (src, 1);
2518 if (! len || ! tree_fits_uhwi_p (len))
2520 /* If LEN is not constant, try MAXLEN too.
2521 For MAXLEN only allow optimizing into non-_ocs function
2522 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2523 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2525 if (fcode == BUILT_IN_STPCPY_CHK)
2527 if (! ignore)
2528 return false;
2530 /* If return value of __stpcpy_chk is ignored,
2531 optimize into __strcpy_chk. */
2532 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2533 if (!fn)
2534 return false;
2536 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2537 replace_call_with_call_and_fold (gsi, repl);
2538 return true;
2541 if (! len || TREE_SIDE_EFFECTS (len))
2542 return false;
2544 /* If c_strlen returned something, but not a constant,
2545 transform __strcpy_chk into __memcpy_chk. */
2546 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2547 if (!fn)
2548 return false;
2550 gimple_seq stmts = NULL;
2551 len = gimple_convert (&stmts, loc, size_type_node, len);
2552 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2553 build_int_cst (size_type_node, 1));
2554 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2555 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2556 replace_call_with_call_and_fold (gsi, repl);
2557 return true;
2560 else
2561 maxlen = len;
2563 if (! tree_int_cst_lt (maxlen, size))
2564 return false;
2567 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2568 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2569 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2570 if (!fn)
2571 return false;
2573 gimple *repl = gimple_build_call (fn, 2, dest, src);
2574 replace_call_with_call_and_fold (gsi, repl);
2575 return true;
2578 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2579 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2580 length passed as third argument. IGNORE is true if return value can be
2581 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2583 static bool
2584 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2585 tree dest, tree src,
2586 tree len, tree size,
2587 enum built_in_function fcode)
2589 gimple *stmt = gsi_stmt (*gsi);
2590 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2591 tree fn;
2593 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2595 /* If return value of __stpncpy_chk is ignored,
2596 optimize into __strncpy_chk. */
2597 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2598 if (fn)
2600 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2601 replace_call_with_call_and_fold (gsi, repl);
2602 return true;
2606 if (! tree_fits_uhwi_p (size))
2607 return false;
2609 tree maxlen = get_maxval_strlen (len, 2);
2610 if (! integer_all_onesp (size))
2612 if (! tree_fits_uhwi_p (len))
2614 /* If LEN is not constant, try MAXLEN too.
2615 For MAXLEN only allow optimizing into non-_ocs function
2616 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2617 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2618 return false;
2620 else
2621 maxlen = len;
2623 if (tree_int_cst_lt (size, maxlen))
2624 return false;
2627 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2628 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2629 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2630 if (!fn)
2631 return false;
2633 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2634 replace_call_with_call_and_fold (gsi, repl);
2635 return true;
2638 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2639 Return NULL_TREE if no simplification can be made. */
2641 static bool
2642 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2644 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2645 location_t loc = gimple_location (stmt);
2646 tree dest = gimple_call_arg (stmt, 0);
2647 tree src = gimple_call_arg (stmt, 1);
2648 tree fn, len, lenp1;
2650 /* If the result is unused, replace stpcpy with strcpy. */
2651 if (gimple_call_lhs (stmt) == NULL_TREE)
2653 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2654 if (!fn)
2655 return false;
2656 gimple_call_set_fndecl (stmt, fn);
2657 fold_stmt (gsi);
2658 return true;
2661 len = c_strlen (src, 1);
2662 if (!len
2663 || TREE_CODE (len) != INTEGER_CST)
2664 return false;
2666 if (optimize_function_for_size_p (cfun)
2667 /* If length is zero it's small enough. */
2668 && !integer_zerop (len))
2669 return false;
2671 /* If the source has a known length replace stpcpy with memcpy. */
2672 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2673 if (!fn)
2674 return false;
2676 gimple_seq stmts = NULL;
2677 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2678 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2679 tem, build_int_cst (size_type_node, 1));
2680 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2681 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2682 gimple_set_vuse (repl, gimple_vuse (stmt));
2683 gimple_set_vdef (repl, gimple_vdef (stmt));
2684 if (gimple_vdef (repl)
2685 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2686 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2687 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2688 /* Replace the result with dest + len. */
2689 stmts = NULL;
2690 tem = gimple_convert (&stmts, loc, sizetype, len);
2691 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2692 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2693 POINTER_PLUS_EXPR, dest, tem);
2694 gsi_replace (gsi, ret, false);
2695 /* Finally fold the memcpy call. */
2696 gimple_stmt_iterator gsi2 = *gsi;
2697 gsi_prev (&gsi2);
2698 fold_stmt (&gsi2);
2699 return true;
2702 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2703 NULL_TREE if a normal call should be emitted rather than expanding
2704 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2705 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2706 passed as second argument. */
2708 static bool
2709 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2710 enum built_in_function fcode)
2712 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2713 tree dest, size, len, fn, fmt, flag;
2714 const char *fmt_str;
2716 /* Verify the required arguments in the original call. */
2717 if (gimple_call_num_args (stmt) < 5)
2718 return false;
2720 dest = gimple_call_arg (stmt, 0);
2721 len = gimple_call_arg (stmt, 1);
2722 flag = gimple_call_arg (stmt, 2);
2723 size = gimple_call_arg (stmt, 3);
2724 fmt = gimple_call_arg (stmt, 4);
2726 if (! tree_fits_uhwi_p (size))
2727 return false;
2729 if (! integer_all_onesp (size))
2731 tree maxlen = get_maxval_strlen (len, 2);
2732 if (! tree_fits_uhwi_p (len))
2734 /* If LEN is not constant, try MAXLEN too.
2735 For MAXLEN only allow optimizing into non-_ocs function
2736 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2737 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2738 return false;
2740 else
2741 maxlen = len;
2743 if (tree_int_cst_lt (size, maxlen))
2744 return false;
2747 if (!init_target_chars ())
2748 return false;
2750 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2751 or if format doesn't contain % chars or is "%s". */
2752 if (! integer_zerop (flag))
2754 fmt_str = c_getstr (fmt);
2755 if (fmt_str == NULL)
2756 return false;
2757 if (strchr (fmt_str, target_percent) != NULL
2758 && strcmp (fmt_str, target_percent_s))
2759 return false;
2762 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2763 available. */
2764 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2765 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2766 if (!fn)
2767 return false;
2769 /* Replace the called function and the first 5 argument by 3 retaining
2770 trailing varargs. */
2771 gimple_call_set_fndecl (stmt, fn);
2772 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2773 gimple_call_set_arg (stmt, 0, dest);
2774 gimple_call_set_arg (stmt, 1, len);
2775 gimple_call_set_arg (stmt, 2, fmt);
2776 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2777 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2778 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2779 fold_stmt (gsi);
2780 return true;
2783 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2784 Return NULL_TREE if a normal call should be emitted rather than
2785 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2786 or BUILT_IN_VSPRINTF_CHK. */
2788 static bool
2789 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2790 enum built_in_function fcode)
2792 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2793 tree dest, size, len, fn, fmt, flag;
2794 const char *fmt_str;
2795 unsigned nargs = gimple_call_num_args (stmt);
2797 /* Verify the required arguments in the original call. */
2798 if (nargs < 4)
2799 return false;
2800 dest = gimple_call_arg (stmt, 0);
2801 flag = gimple_call_arg (stmt, 1);
2802 size = gimple_call_arg (stmt, 2);
2803 fmt = gimple_call_arg (stmt, 3);
2805 if (! tree_fits_uhwi_p (size))
2806 return false;
2808 len = NULL_TREE;
2810 if (!init_target_chars ())
2811 return false;
2813 /* Check whether the format is a literal string constant. */
2814 fmt_str = c_getstr (fmt);
2815 if (fmt_str != NULL)
2817 /* If the format doesn't contain % args or %%, we know the size. */
2818 if (strchr (fmt_str, target_percent) == 0)
2820 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2821 len = build_int_cstu (size_type_node, strlen (fmt_str));
2823 /* If the format is "%s" and first ... argument is a string literal,
2824 we know the size too. */
2825 else if (fcode == BUILT_IN_SPRINTF_CHK
2826 && strcmp (fmt_str, target_percent_s) == 0)
2828 tree arg;
2830 if (nargs == 5)
2832 arg = gimple_call_arg (stmt, 4);
2833 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2835 len = c_strlen (arg, 1);
2836 if (! len || ! tree_fits_uhwi_p (len))
2837 len = NULL_TREE;
2843 if (! integer_all_onesp (size))
2845 if (! len || ! tree_int_cst_lt (len, size))
2846 return false;
2849 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2850 or if format doesn't contain % chars or is "%s". */
2851 if (! integer_zerop (flag))
2853 if (fmt_str == NULL)
2854 return false;
2855 if (strchr (fmt_str, target_percent) != NULL
2856 && strcmp (fmt_str, target_percent_s))
2857 return false;
2860 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2861 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2862 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2863 if (!fn)
2864 return false;
2866 /* Replace the called function and the first 4 argument by 2 retaining
2867 trailing varargs. */
2868 gimple_call_set_fndecl (stmt, fn);
2869 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2870 gimple_call_set_arg (stmt, 0, dest);
2871 gimple_call_set_arg (stmt, 1, fmt);
2872 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2873 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2874 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2875 fold_stmt (gsi);
2876 return true;
2879 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2880 ORIG may be null if this is a 2-argument call. We don't attempt to
2881 simplify calls with more than 3 arguments.
2883 Return true if simplification was possible, otherwise false. */
2885 bool
2886 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2888 gimple *stmt = gsi_stmt (*gsi);
2889 tree dest = gimple_call_arg (stmt, 0);
2890 tree fmt = gimple_call_arg (stmt, 1);
2891 tree orig = NULL_TREE;
2892 const char *fmt_str = NULL;
2894 /* Verify the required arguments in the original call. We deal with two
2895 types of sprintf() calls: 'sprintf (str, fmt)' and
2896 'sprintf (dest, "%s", orig)'. */
2897 if (gimple_call_num_args (stmt) > 3)
2898 return false;
2900 if (gimple_call_num_args (stmt) == 3)
2901 orig = gimple_call_arg (stmt, 2);
2903 /* Check whether the format is a literal string constant. */
2904 fmt_str = c_getstr (fmt);
2905 if (fmt_str == NULL)
2906 return false;
2908 if (!init_target_chars ())
2909 return false;
2911 /* If the format doesn't contain % args or %%, use strcpy. */
2912 if (strchr (fmt_str, target_percent) == NULL)
2914 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2916 if (!fn)
2917 return false;
2919 /* Don't optimize sprintf (buf, "abc", ptr++). */
2920 if (orig)
2921 return false;
2923 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2924 'format' is known to contain no % formats. */
2925 gimple_seq stmts = NULL;
2926 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2927 gimple_seq_add_stmt_without_update (&stmts, repl);
2928 if (gimple_call_lhs (stmt))
2930 repl = gimple_build_assign (gimple_call_lhs (stmt),
2931 build_int_cst (integer_type_node,
2932 strlen (fmt_str)));
2933 gimple_seq_add_stmt_without_update (&stmts, repl);
2934 gsi_replace_with_seq_vops (gsi, stmts);
2935 /* gsi now points at the assignment to the lhs, get a
2936 stmt iterator to the memcpy call.
2937 ??? We can't use gsi_for_stmt as that doesn't work when the
2938 CFG isn't built yet. */
2939 gimple_stmt_iterator gsi2 = *gsi;
2940 gsi_prev (&gsi2);
2941 fold_stmt (&gsi2);
2943 else
2945 gsi_replace_with_seq_vops (gsi, stmts);
2946 fold_stmt (gsi);
2948 return true;
2951 /* If the format is "%s", use strcpy if the result isn't used. */
2952 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2954 tree fn;
2955 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2957 if (!fn)
2958 return false;
2960 /* Don't crash on sprintf (str1, "%s"). */
2961 if (!orig)
2962 return false;
2964 tree orig_len = NULL_TREE;
2965 if (gimple_call_lhs (stmt))
2967 orig_len = get_maxval_strlen (orig, 0);
2968 if (!orig_len)
2969 return false;
2972 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2973 gimple_seq stmts = NULL;
2974 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2975 gimple_seq_add_stmt_without_update (&stmts, repl);
2976 if (gimple_call_lhs (stmt))
2978 if (!useless_type_conversion_p (integer_type_node,
2979 TREE_TYPE (orig_len)))
2980 orig_len = fold_convert (integer_type_node, orig_len);
2981 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2982 gimple_seq_add_stmt_without_update (&stmts, repl);
2983 gsi_replace_with_seq_vops (gsi, stmts);
2984 /* gsi now points at the assignment to the lhs, get a
2985 stmt iterator to the memcpy call.
2986 ??? We can't use gsi_for_stmt as that doesn't work when the
2987 CFG isn't built yet. */
2988 gimple_stmt_iterator gsi2 = *gsi;
2989 gsi_prev (&gsi2);
2990 fold_stmt (&gsi2);
2992 else
2994 gsi_replace_with_seq_vops (gsi, stmts);
2995 fold_stmt (gsi);
2997 return true;
2999 return false;
3002 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3003 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3004 attempt to simplify calls with more than 4 arguments.
3006 Return true if simplification was possible, otherwise false. */
3008 bool
3009 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3011 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3012 tree dest = gimple_call_arg (stmt, 0);
3013 tree destsize = gimple_call_arg (stmt, 1);
3014 tree fmt = gimple_call_arg (stmt, 2);
3015 tree orig = NULL_TREE;
3016 const char *fmt_str = NULL;
3018 if (gimple_call_num_args (stmt) > 4)
3019 return false;
3021 if (gimple_call_num_args (stmt) == 4)
3022 orig = gimple_call_arg (stmt, 3);
3024 if (!tree_fits_uhwi_p (destsize))
3025 return false;
3026 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3028 /* Check whether the format is a literal string constant. */
3029 fmt_str = c_getstr (fmt);
3030 if (fmt_str == NULL)
3031 return false;
3033 if (!init_target_chars ())
3034 return false;
3036 /* If the format doesn't contain % args or %%, use strcpy. */
3037 if (strchr (fmt_str, target_percent) == NULL)
3039 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3040 if (!fn)
3041 return false;
3043 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3044 if (orig)
3045 return false;
3047 /* We could expand this as
3048 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3049 or to
3050 memcpy (str, fmt_with_nul_at_cstm1, cst);
3051 but in the former case that might increase code size
3052 and in the latter case grow .rodata section too much.
3053 So punt for now. */
3054 size_t len = strlen (fmt_str);
3055 if (len >= destlen)
3056 return false;
3058 gimple_seq stmts = NULL;
3059 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3060 gimple_seq_add_stmt_without_update (&stmts, repl);
3061 if (gimple_call_lhs (stmt))
3063 repl = gimple_build_assign (gimple_call_lhs (stmt),
3064 build_int_cst (integer_type_node, len));
3065 gimple_seq_add_stmt_without_update (&stmts, repl);
3066 gsi_replace_with_seq_vops (gsi, stmts);
3067 /* gsi now points at the assignment to the lhs, get a
3068 stmt iterator to the memcpy call.
3069 ??? We can't use gsi_for_stmt as that doesn't work when the
3070 CFG isn't built yet. */
3071 gimple_stmt_iterator gsi2 = *gsi;
3072 gsi_prev (&gsi2);
3073 fold_stmt (&gsi2);
3075 else
3077 gsi_replace_with_seq_vops (gsi, stmts);
3078 fold_stmt (gsi);
3080 return true;
3083 /* If the format is "%s", use strcpy if the result isn't used. */
3084 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3086 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3087 if (!fn)
3088 return false;
3090 /* Don't crash on snprintf (str1, cst, "%s"). */
3091 if (!orig)
3092 return false;
3094 tree orig_len = get_maxval_strlen (orig, 0);
3095 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3096 return false;
3098 /* We could expand this as
3099 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3100 or to
3101 memcpy (str1, str2_with_nul_at_cstm1, cst);
3102 but in the former case that might increase code size
3103 and in the latter case grow .rodata section too much.
3104 So punt for now. */
3105 if (compare_tree_int (orig_len, destlen) >= 0)
3106 return false;
3108 /* Convert snprintf (str1, cst, "%s", str2) into
3109 strcpy (str1, str2) if strlen (str2) < cst. */
3110 gimple_seq stmts = NULL;
3111 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3112 gimple_seq_add_stmt_without_update (&stmts, repl);
3113 if (gimple_call_lhs (stmt))
3115 if (!useless_type_conversion_p (integer_type_node,
3116 TREE_TYPE (orig_len)))
3117 orig_len = fold_convert (integer_type_node, orig_len);
3118 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3119 gimple_seq_add_stmt_without_update (&stmts, repl);
3120 gsi_replace_with_seq_vops (gsi, stmts);
3121 /* gsi now points at the assignment to the lhs, get a
3122 stmt iterator to the memcpy call.
3123 ??? We can't use gsi_for_stmt as that doesn't work when the
3124 CFG isn't built yet. */
3125 gimple_stmt_iterator gsi2 = *gsi;
3126 gsi_prev (&gsi2);
3127 fold_stmt (&gsi2);
3129 else
3131 gsi_replace_with_seq_vops (gsi, stmts);
3132 fold_stmt (gsi);
3134 return true;
3136 return false;
3139 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3140 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3141 more than 3 arguments, and ARG may be null in the 2-argument case.
3143 Return NULL_TREE if no simplification was possible, otherwise return the
3144 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3145 code of the function to be simplified. */
3147 static bool
3148 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3149 tree fp, tree fmt, tree arg,
3150 enum built_in_function fcode)
3152 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3153 tree fn_fputc, fn_fputs;
3154 const char *fmt_str = NULL;
3156 /* If the return value is used, don't do the transformation. */
3157 if (gimple_call_lhs (stmt) != NULL_TREE)
3158 return false;
3160 /* Check whether the format is a literal string constant. */
3161 fmt_str = c_getstr (fmt);
3162 if (fmt_str == NULL)
3163 return false;
3165 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3167 /* If we're using an unlocked function, assume the other
3168 unlocked functions exist explicitly. */
3169 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3170 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3172 else
3174 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3175 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3178 if (!init_target_chars ())
3179 return false;
3181 /* If the format doesn't contain % args or %%, use strcpy. */
3182 if (strchr (fmt_str, target_percent) == NULL)
3184 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3185 && arg)
3186 return false;
3188 /* If the format specifier was "", fprintf does nothing. */
3189 if (fmt_str[0] == '\0')
3191 replace_call_with_value (gsi, NULL_TREE);
3192 return true;
3195 /* When "string" doesn't contain %, replace all cases of
3196 fprintf (fp, string) with fputs (string, fp). The fputs
3197 builtin will take care of special cases like length == 1. */
3198 if (fn_fputs)
3200 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3201 replace_call_with_call_and_fold (gsi, repl);
3202 return true;
3206 /* The other optimizations can be done only on the non-va_list variants. */
3207 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3208 return false;
3210 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3211 else if (strcmp (fmt_str, target_percent_s) == 0)
3213 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3214 return false;
3215 if (fn_fputs)
3217 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3218 replace_call_with_call_and_fold (gsi, repl);
3219 return true;
3223 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3224 else if (strcmp (fmt_str, target_percent_c) == 0)
3226 if (!arg
3227 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3228 return false;
3229 if (fn_fputc)
3231 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3232 replace_call_with_call_and_fold (gsi, repl);
3233 return true;
3237 return false;
3240 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3241 FMT and ARG are the arguments to the call; we don't fold cases with
3242 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3244 Return NULL_TREE if no simplification was possible, otherwise return the
3245 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3246 code of the function to be simplified. */
3248 static bool
3249 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3250 tree arg, enum built_in_function fcode)
3252 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3253 tree fn_putchar, fn_puts, newarg;
3254 const char *fmt_str = NULL;
3256 /* If the return value is used, don't do the transformation. */
3257 if (gimple_call_lhs (stmt) != NULL_TREE)
3258 return false;
3260 /* Check whether the format is a literal string constant. */
3261 fmt_str = c_getstr (fmt);
3262 if (fmt_str == NULL)
3263 return false;
3265 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3267 /* If we're using an unlocked function, assume the other
3268 unlocked functions exist explicitly. */
3269 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3270 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3272 else
3274 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3275 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3278 if (!init_target_chars ())
3279 return false;
3281 if (strcmp (fmt_str, target_percent_s) == 0
3282 || strchr (fmt_str, target_percent) == NULL)
3284 const char *str;
3286 if (strcmp (fmt_str, target_percent_s) == 0)
3288 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3289 return false;
3291 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3292 return false;
3294 str = c_getstr (arg);
3295 if (str == NULL)
3296 return false;
3298 else
3300 /* The format specifier doesn't contain any '%' characters. */
3301 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3302 && arg)
3303 return false;
3304 str = fmt_str;
3307 /* If the string was "", printf does nothing. */
3308 if (str[0] == '\0')
3310 replace_call_with_value (gsi, NULL_TREE);
3311 return true;
3314 /* If the string has length of 1, call putchar. */
3315 if (str[1] == '\0')
3317 /* Given printf("c"), (where c is any one character,)
3318 convert "c"[0] to an int and pass that to the replacement
3319 function. */
3320 newarg = build_int_cst (integer_type_node, str[0]);
3321 if (fn_putchar)
3323 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3324 replace_call_with_call_and_fold (gsi, repl);
3325 return true;
3328 else
3330 /* If the string was "string\n", call puts("string"). */
3331 size_t len = strlen (str);
3332 if ((unsigned char)str[len - 1] == target_newline
3333 && (size_t) (int) len == len
3334 && (int) len > 0)
3336 char *newstr;
3337 tree offset_node, string_cst;
3339 /* Create a NUL-terminated string that's one char shorter
3340 than the original, stripping off the trailing '\n'. */
3341 newarg = build_string_literal (len, str);
3342 string_cst = string_constant (newarg, &offset_node);
3343 gcc_checking_assert (string_cst
3344 && (TREE_STRING_LENGTH (string_cst)
3345 == (int) len)
3346 && integer_zerop (offset_node)
3347 && (unsigned char)
3348 TREE_STRING_POINTER (string_cst)[len - 1]
3349 == target_newline);
3350 /* build_string_literal creates a new STRING_CST,
3351 modify it in place to avoid double copying. */
3352 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3353 newstr[len - 1] = '\0';
3354 if (fn_puts)
3356 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3357 replace_call_with_call_and_fold (gsi, repl);
3358 return true;
3361 else
3362 /* We'd like to arrange to call fputs(string,stdout) here,
3363 but we need stdout and don't have a way to get it yet. */
3364 return false;
3368 /* The other optimizations can be done only on the non-va_list variants. */
3369 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3370 return false;
3372 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3373 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3375 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3376 return false;
3377 if (fn_puts)
3379 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3380 replace_call_with_call_and_fold (gsi, repl);
3381 return true;
3385 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3386 else if (strcmp (fmt_str, target_percent_c) == 0)
3388 if (!arg || ! useless_type_conversion_p (integer_type_node,
3389 TREE_TYPE (arg)))
3390 return false;
3391 if (fn_putchar)
3393 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3394 replace_call_with_call_and_fold (gsi, repl);
3395 return true;
3399 return false;
3404 /* Fold a call to __builtin_strlen with known length LEN. */
3406 static bool
3407 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3409 gimple *stmt = gsi_stmt (*gsi);
3410 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3411 if (!len)
3412 return false;
3413 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3414 replace_call_with_value (gsi, len);
3415 return true;
3418 /* Fold a call to __builtin_acc_on_device. */
3420 static bool
3421 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3423 /* Defer folding until we know which compiler we're in. */
3424 if (symtab->state != EXPANSION)
3425 return false;
3427 unsigned val_host = GOMP_DEVICE_HOST;
3428 unsigned val_dev = GOMP_DEVICE_NONE;
3430 #ifdef ACCEL_COMPILER
3431 val_host = GOMP_DEVICE_NOT_HOST;
3432 val_dev = ACCEL_COMPILER_acc_device;
3433 #endif
3435 location_t loc = gimple_location (gsi_stmt (*gsi));
3437 tree host_eq = make_ssa_name (boolean_type_node);
3438 gimple *host_ass = gimple_build_assign
3439 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3440 gimple_set_location (host_ass, loc);
3441 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3443 tree dev_eq = make_ssa_name (boolean_type_node);
3444 gimple *dev_ass = gimple_build_assign
3445 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3446 gimple_set_location (dev_ass, loc);
3447 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3449 tree result = make_ssa_name (boolean_type_node);
3450 gimple *result_ass = gimple_build_assign
3451 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3452 gimple_set_location (result_ass, loc);
3453 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3455 replace_call_with_value (gsi, result);
3457 return true;
3460 /* Fold realloc (0, n) -> malloc (n). */
3462 static bool
3463 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3465 gimple *stmt = gsi_stmt (*gsi);
3466 tree arg = gimple_call_arg (stmt, 0);
3467 tree size = gimple_call_arg (stmt, 1);
3469 if (operand_equal_p (arg, null_pointer_node, 0))
3471 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3472 if (fn_malloc)
3474 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3475 replace_call_with_call_and_fold (gsi, repl);
3476 return true;
3479 return false;
3482 /* Fold the non-target builtin at *GSI and return whether any simplification
3483 was made. */
3485 static bool
3486 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3488 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3489 tree callee = gimple_call_fndecl (stmt);
3491 /* Give up for always_inline inline builtins until they are
3492 inlined. */
3493 if (avoid_folding_inline_builtin (callee))
3494 return false;
3496 unsigned n = gimple_call_num_args (stmt);
3497 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3498 switch (fcode)
3500 case BUILT_IN_BCMP:
3501 return gimple_fold_builtin_bcmp (gsi);
3502 case BUILT_IN_BCOPY:
3503 return gimple_fold_builtin_bcopy (gsi);
3504 case BUILT_IN_BZERO:
3505 return gimple_fold_builtin_bzero (gsi);
3507 case BUILT_IN_MEMSET:
3508 return gimple_fold_builtin_memset (gsi,
3509 gimple_call_arg (stmt, 1),
3510 gimple_call_arg (stmt, 2));
3511 case BUILT_IN_MEMCPY:
3512 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3513 gimple_call_arg (stmt, 1), 0);
3514 case BUILT_IN_MEMPCPY:
3515 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3516 gimple_call_arg (stmt, 1), 1);
3517 case BUILT_IN_MEMMOVE:
3518 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3519 gimple_call_arg (stmt, 1), 3);
3520 case BUILT_IN_SPRINTF_CHK:
3521 case BUILT_IN_VSPRINTF_CHK:
3522 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3523 case BUILT_IN_STRCAT_CHK:
3524 return gimple_fold_builtin_strcat_chk (gsi);
3525 case BUILT_IN_STRNCAT_CHK:
3526 return gimple_fold_builtin_strncat_chk (gsi);
3527 case BUILT_IN_STRLEN:
3528 return gimple_fold_builtin_strlen (gsi);
3529 case BUILT_IN_STRCPY:
3530 return gimple_fold_builtin_strcpy (gsi,
3531 gimple_call_arg (stmt, 0),
3532 gimple_call_arg (stmt, 1));
3533 case BUILT_IN_STRNCPY:
3534 return gimple_fold_builtin_strncpy (gsi,
3535 gimple_call_arg (stmt, 0),
3536 gimple_call_arg (stmt, 1),
3537 gimple_call_arg (stmt, 2));
3538 case BUILT_IN_STRCAT:
3539 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3540 gimple_call_arg (stmt, 1));
3541 case BUILT_IN_STRNCAT:
3542 return gimple_fold_builtin_strncat (gsi);
3543 case BUILT_IN_INDEX:
3544 case BUILT_IN_STRCHR:
3545 return gimple_fold_builtin_strchr (gsi, false);
3546 case BUILT_IN_RINDEX:
3547 case BUILT_IN_STRRCHR:
3548 return gimple_fold_builtin_strchr (gsi, true);
3549 case BUILT_IN_STRSTR:
3550 return gimple_fold_builtin_strstr (gsi);
3551 case BUILT_IN_STRCMP:
3552 case BUILT_IN_STRCASECMP:
3553 case BUILT_IN_STRNCMP:
3554 case BUILT_IN_STRNCASECMP:
3555 return gimple_fold_builtin_string_compare (gsi);
3556 case BUILT_IN_MEMCHR:
3557 return gimple_fold_builtin_memchr (gsi);
3558 case BUILT_IN_FPUTS:
3559 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3560 gimple_call_arg (stmt, 1), false);
3561 case BUILT_IN_FPUTS_UNLOCKED:
3562 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3563 gimple_call_arg (stmt, 1), true);
3564 case BUILT_IN_MEMCPY_CHK:
3565 case BUILT_IN_MEMPCPY_CHK:
3566 case BUILT_IN_MEMMOVE_CHK:
3567 case BUILT_IN_MEMSET_CHK:
3568 return gimple_fold_builtin_memory_chk (gsi,
3569 gimple_call_arg (stmt, 0),
3570 gimple_call_arg (stmt, 1),
3571 gimple_call_arg (stmt, 2),
3572 gimple_call_arg (stmt, 3),
3573 fcode);
3574 case BUILT_IN_STPCPY:
3575 return gimple_fold_builtin_stpcpy (gsi);
3576 case BUILT_IN_STRCPY_CHK:
3577 case BUILT_IN_STPCPY_CHK:
3578 return gimple_fold_builtin_stxcpy_chk (gsi,
3579 gimple_call_arg (stmt, 0),
3580 gimple_call_arg (stmt, 1),
3581 gimple_call_arg (stmt, 2),
3582 fcode);
3583 case BUILT_IN_STRNCPY_CHK:
3584 case BUILT_IN_STPNCPY_CHK:
3585 return gimple_fold_builtin_stxncpy_chk (gsi,
3586 gimple_call_arg (stmt, 0),
3587 gimple_call_arg (stmt, 1),
3588 gimple_call_arg (stmt, 2),
3589 gimple_call_arg (stmt, 3),
3590 fcode);
3591 case BUILT_IN_SNPRINTF_CHK:
3592 case BUILT_IN_VSNPRINTF_CHK:
3593 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3595 case BUILT_IN_FPRINTF:
3596 case BUILT_IN_FPRINTF_UNLOCKED:
3597 case BUILT_IN_VFPRINTF:
3598 if (n == 2 || n == 3)
3599 return gimple_fold_builtin_fprintf (gsi,
3600 gimple_call_arg (stmt, 0),
3601 gimple_call_arg (stmt, 1),
3602 n == 3
3603 ? gimple_call_arg (stmt, 2)
3604 : NULL_TREE,
3605 fcode);
3606 break;
3607 case BUILT_IN_FPRINTF_CHK:
3608 case BUILT_IN_VFPRINTF_CHK:
3609 if (n == 3 || n == 4)
3610 return gimple_fold_builtin_fprintf (gsi,
3611 gimple_call_arg (stmt, 0),
3612 gimple_call_arg (stmt, 2),
3613 n == 4
3614 ? gimple_call_arg (stmt, 3)
3615 : NULL_TREE,
3616 fcode);
3617 break;
3618 case BUILT_IN_PRINTF:
3619 case BUILT_IN_PRINTF_UNLOCKED:
3620 case BUILT_IN_VPRINTF:
3621 if (n == 1 || n == 2)
3622 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3623 n == 2
3624 ? gimple_call_arg (stmt, 1)
3625 : NULL_TREE, fcode);
3626 break;
3627 case BUILT_IN_PRINTF_CHK:
3628 case BUILT_IN_VPRINTF_CHK:
3629 if (n == 2 || n == 3)
3630 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3631 n == 3
3632 ? gimple_call_arg (stmt, 2)
3633 : NULL_TREE, fcode);
3634 break;
3635 case BUILT_IN_ACC_ON_DEVICE:
3636 return gimple_fold_builtin_acc_on_device (gsi,
3637 gimple_call_arg (stmt, 0));
3638 case BUILT_IN_REALLOC:
3639 return gimple_fold_builtin_realloc (gsi);
3641 default:;
3644 /* Try the generic builtin folder. */
3645 bool ignore = (gimple_call_lhs (stmt) == NULL);
3646 tree result = fold_call_stmt (stmt, ignore);
3647 if (result)
3649 if (ignore)
3650 STRIP_NOPS (result);
3651 else
3652 result = fold_convert (gimple_call_return_type (stmt), result);
3653 if (!update_call_from_tree (gsi, result))
3654 gimplify_and_update_call_from_tree (gsi, result);
3655 return true;
3658 return false;
3661 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3662 function calls to constants, where possible. */
3664 static tree
3665 fold_internal_goacc_dim (const gimple *call)
3667 int axis = oacc_get_ifn_dim_arg (call);
3668 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3669 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3670 tree result = NULL_TREE;
3672 /* If the size is 1, or we only want the size and it is not dynamic,
3673 we know the answer. */
3674 if (size == 1 || (!is_pos && size))
3676 tree type = TREE_TYPE (gimple_call_lhs (call));
3677 result = build_int_cst (type, size - is_pos);
3680 return result;
3683 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3684 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3685 &var where var is only addressable because of such calls. */
3687 bool
3688 optimize_atomic_compare_exchange_p (gimple *stmt)
3690 if (gimple_call_num_args (stmt) != 6
3691 || !flag_inline_atomics
3692 || !optimize
3693 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3694 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3695 || !gimple_vdef (stmt)
3696 || !gimple_vuse (stmt))
3697 return false;
3699 tree fndecl = gimple_call_fndecl (stmt);
3700 switch (DECL_FUNCTION_CODE (fndecl))
3702 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3703 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3704 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3705 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3706 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3707 break;
3708 default:
3709 return false;
3712 tree expected = gimple_call_arg (stmt, 1);
3713 if (TREE_CODE (expected) != ADDR_EXPR
3714 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3715 return false;
3717 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3718 if (!is_gimple_reg_type (etype)
3719 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3720 || TREE_THIS_VOLATILE (etype)
3721 || VECTOR_TYPE_P (etype)
3722 || TREE_CODE (etype) == COMPLEX_TYPE
3723 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3724 might not preserve all the bits. See PR71716. */
3725 || SCALAR_FLOAT_TYPE_P (etype)
3726 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3727 return false;
3729 tree weak = gimple_call_arg (stmt, 3);
3730 if (!integer_zerop (weak) && !integer_onep (weak))
3731 return false;
3733 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3734 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3735 machine_mode mode = TYPE_MODE (itype);
3737 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3738 == CODE_FOR_nothing
3739 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3740 return false;
3742 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3743 return false;
3745 return true;
3748 /* Fold
3749 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3750 into
3751 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3752 i = IMAGPART_EXPR <t>;
3753 r = (_Bool) i;
3754 e = REALPART_EXPR <t>; */
3756 void
3757 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3759 gimple *stmt = gsi_stmt (*gsi);
3760 tree fndecl = gimple_call_fndecl (stmt);
3761 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3762 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3763 tree ctype = build_complex_type (itype);
3764 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3765 bool throws = false;
3766 edge e = NULL;
3767 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3768 expected);
3769 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3770 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3771 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3773 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3774 build1 (VIEW_CONVERT_EXPR, itype,
3775 gimple_assign_lhs (g)));
3776 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3778 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3779 + int_size_in_bytes (itype);
3780 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3781 gimple_call_arg (stmt, 0),
3782 gimple_assign_lhs (g),
3783 gimple_call_arg (stmt, 2),
3784 build_int_cst (integer_type_node, flag),
3785 gimple_call_arg (stmt, 4),
3786 gimple_call_arg (stmt, 5));
3787 tree lhs = make_ssa_name (ctype);
3788 gimple_call_set_lhs (g, lhs);
3789 gimple_set_vdef (g, gimple_vdef (stmt));
3790 gimple_set_vuse (g, gimple_vuse (stmt));
3791 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3792 tree oldlhs = gimple_call_lhs (stmt);
3793 if (stmt_can_throw_internal (stmt))
3795 throws = true;
3796 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3798 gimple_call_set_nothrow (as_a <gcall *> (g),
3799 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3800 gimple_call_set_lhs (stmt, NULL_TREE);
3801 gsi_replace (gsi, g, true);
3802 if (oldlhs)
3804 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3805 build1 (IMAGPART_EXPR, itype, lhs));
3806 if (throws)
3808 gsi_insert_on_edge_immediate (e, g);
3809 *gsi = gsi_for_stmt (g);
3811 else
3812 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3813 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3814 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3816 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3817 build1 (REALPART_EXPR, itype, lhs));
3818 if (throws && oldlhs == NULL_TREE)
3820 gsi_insert_on_edge_immediate (e, g);
3821 *gsi = gsi_for_stmt (g);
3823 else
3824 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3825 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3827 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3828 VIEW_CONVERT_EXPR,
3829 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3830 gimple_assign_lhs (g)));
3831 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3833 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3834 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3835 *gsi = gsiret;
3838 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3839 doesn't fit into TYPE. The test for overflow should be regardless of
3840 -fwrapv, and even for unsigned types. */
3842 bool
3843 arith_overflowed_p (enum tree_code code, const_tree type,
3844 const_tree arg0, const_tree arg1)
3846 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3847 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3848 widest2_int_cst;
3849 widest2_int warg0 = widest2_int_cst (arg0);
3850 widest2_int warg1 = widest2_int_cst (arg1);
3851 widest2_int wres;
3852 switch (code)
3854 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3855 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3856 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3857 default: gcc_unreachable ();
3859 signop sign = TYPE_SIGN (type);
3860 if (sign == UNSIGNED && wi::neg_p (wres))
3861 return true;
3862 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3865 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3866 The statement may be replaced by another statement, e.g., if the call
3867 simplifies to a constant value. Return true if any changes were made.
3868 It is assumed that the operands have been previously folded. */
3870 static bool
3871 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3873 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3874 tree callee;
3875 bool changed = false;
3876 unsigned i;
3878 /* Fold *& in call arguments. */
3879 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3880 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3882 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3883 if (tmp)
3885 gimple_call_set_arg (stmt, i, tmp);
3886 changed = true;
3890 /* Check for virtual calls that became direct calls. */
3891 callee = gimple_call_fn (stmt);
3892 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3894 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3896 if (dump_file && virtual_method_call_p (callee)
3897 && !possible_polymorphic_call_target_p
3898 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3899 (OBJ_TYPE_REF_EXPR (callee)))))
3901 fprintf (dump_file,
3902 "Type inheritance inconsistent devirtualization of ");
3903 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3904 fprintf (dump_file, " to ");
3905 print_generic_expr (dump_file, callee, TDF_SLIM);
3906 fprintf (dump_file, "\n");
3909 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3910 changed = true;
3912 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3914 bool final;
3915 vec <cgraph_node *>targets
3916 = possible_polymorphic_call_targets (callee, stmt, &final);
3917 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3919 tree lhs = gimple_call_lhs (stmt);
3920 if (dump_enabled_p ())
3922 location_t loc = gimple_location_safe (stmt);
3923 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3924 "folding virtual function call to %s\n",
3925 targets.length () == 1
3926 ? targets[0]->name ()
3927 : "__builtin_unreachable");
3929 if (targets.length () == 1)
3931 tree fndecl = targets[0]->decl;
3932 gimple_call_set_fndecl (stmt, fndecl);
3933 changed = true;
3934 /* If changing the call to __cxa_pure_virtual
3935 or similar noreturn function, adjust gimple_call_fntype
3936 too. */
3937 if (gimple_call_noreturn_p (stmt)
3938 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3939 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3940 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3941 == void_type_node))
3942 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3943 /* If the call becomes noreturn, remove the lhs. */
3944 if (lhs
3945 && gimple_call_noreturn_p (stmt)
3946 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3947 || should_remove_lhs_p (lhs)))
3949 if (TREE_CODE (lhs) == SSA_NAME)
3951 tree var = create_tmp_var (TREE_TYPE (lhs));
3952 tree def = get_or_create_ssa_default_def (cfun, var);
3953 gimple *new_stmt = gimple_build_assign (lhs, def);
3954 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3956 gimple_call_set_lhs (stmt, NULL_TREE);
3958 maybe_remove_unused_call_args (cfun, stmt);
3960 else
3962 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3963 gimple *new_stmt = gimple_build_call (fndecl, 0);
3964 gimple_set_location (new_stmt, gimple_location (stmt));
3965 /* If the call had a SSA name as lhs morph that into
3966 an uninitialized value. */
3967 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3969 tree var = create_tmp_var (TREE_TYPE (lhs));
3970 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
3971 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
3972 set_ssa_default_def (cfun, var, lhs);
3974 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3975 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3976 gsi_replace (gsi, new_stmt, false);
3977 return true;
3983 /* Check for indirect calls that became direct calls, and then
3984 no longer require a static chain. */
3985 if (gimple_call_chain (stmt))
3987 tree fn = gimple_call_fndecl (stmt);
3988 if (fn && !DECL_STATIC_CHAIN (fn))
3990 gimple_call_set_chain (stmt, NULL);
3991 changed = true;
3993 else
3995 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3996 if (tmp)
3998 gimple_call_set_chain (stmt, tmp);
3999 changed = true;
4004 if (inplace)
4005 return changed;
4007 /* Check for builtins that CCP can handle using information not
4008 available in the generic fold routines. */
4009 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4011 if (gimple_fold_builtin (gsi))
4012 changed = true;
4014 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4016 changed |= targetm.gimple_fold_builtin (gsi);
4018 else if (gimple_call_internal_p (stmt))
4020 enum tree_code subcode = ERROR_MARK;
4021 tree result = NULL_TREE;
4022 bool cplx_result = false;
4023 tree overflow = NULL_TREE;
4024 switch (gimple_call_internal_fn (stmt))
4026 case IFN_BUILTIN_EXPECT:
4027 result = fold_builtin_expect (gimple_location (stmt),
4028 gimple_call_arg (stmt, 0),
4029 gimple_call_arg (stmt, 1),
4030 gimple_call_arg (stmt, 2));
4031 break;
4032 case IFN_UBSAN_OBJECT_SIZE:
4034 tree offset = gimple_call_arg (stmt, 1);
4035 tree objsize = gimple_call_arg (stmt, 2);
4036 if (integer_all_onesp (objsize)
4037 || (TREE_CODE (offset) == INTEGER_CST
4038 && TREE_CODE (objsize) == INTEGER_CST
4039 && tree_int_cst_le (offset, objsize)))
4041 replace_call_with_value (gsi, NULL_TREE);
4042 return true;
4045 break;
4046 case IFN_UBSAN_PTR:
4047 if (integer_zerop (gimple_call_arg (stmt, 1)))
4049 replace_call_with_value (gsi, NULL_TREE);
4050 return true;
4052 break;
4053 case IFN_UBSAN_BOUNDS:
4055 tree index = gimple_call_arg (stmt, 1);
4056 tree bound = gimple_call_arg (stmt, 2);
4057 if (TREE_CODE (index) == INTEGER_CST
4058 && TREE_CODE (bound) == INTEGER_CST)
4060 index = fold_convert (TREE_TYPE (bound), index);
4061 if (TREE_CODE (index) == INTEGER_CST
4062 && tree_int_cst_le (index, bound))
4064 replace_call_with_value (gsi, NULL_TREE);
4065 return true;
4069 break;
4070 case IFN_GOACC_DIM_SIZE:
4071 case IFN_GOACC_DIM_POS:
4072 result = fold_internal_goacc_dim (stmt);
4073 break;
4074 case IFN_UBSAN_CHECK_ADD:
4075 subcode = PLUS_EXPR;
4076 break;
4077 case IFN_UBSAN_CHECK_SUB:
4078 subcode = MINUS_EXPR;
4079 break;
4080 case IFN_UBSAN_CHECK_MUL:
4081 subcode = MULT_EXPR;
4082 break;
4083 case IFN_ADD_OVERFLOW:
4084 subcode = PLUS_EXPR;
4085 cplx_result = true;
4086 break;
4087 case IFN_SUB_OVERFLOW:
4088 subcode = MINUS_EXPR;
4089 cplx_result = true;
4090 break;
4091 case IFN_MUL_OVERFLOW:
4092 subcode = MULT_EXPR;
4093 cplx_result = true;
4094 break;
4095 default:
4096 break;
4098 if (subcode != ERROR_MARK)
4100 tree arg0 = gimple_call_arg (stmt, 0);
4101 tree arg1 = gimple_call_arg (stmt, 1);
4102 tree type = TREE_TYPE (arg0);
4103 if (cplx_result)
4105 tree lhs = gimple_call_lhs (stmt);
4106 if (lhs == NULL_TREE)
4107 type = NULL_TREE;
4108 else
4109 type = TREE_TYPE (TREE_TYPE (lhs));
4111 if (type == NULL_TREE)
4113 /* x = y + 0; x = y - 0; x = y * 0; */
4114 else if (integer_zerop (arg1))
4115 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4116 /* x = 0 + y; x = 0 * y; */
4117 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4118 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4119 /* x = y - y; */
4120 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4121 result = integer_zero_node;
4122 /* x = y * 1; x = 1 * y; */
4123 else if (subcode == MULT_EXPR && integer_onep (arg1))
4124 result = arg0;
4125 else if (subcode == MULT_EXPR && integer_onep (arg0))
4126 result = arg1;
4127 else if (TREE_CODE (arg0) == INTEGER_CST
4128 && TREE_CODE (arg1) == INTEGER_CST)
4130 if (cplx_result)
4131 result = int_const_binop (subcode, fold_convert (type, arg0),
4132 fold_convert (type, arg1));
4133 else
4134 result = int_const_binop (subcode, arg0, arg1);
4135 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4137 if (cplx_result)
4138 overflow = build_one_cst (type);
4139 else
4140 result = NULL_TREE;
4143 if (result)
4145 if (result == integer_zero_node)
4146 result = build_zero_cst (type);
4147 else if (cplx_result && TREE_TYPE (result) != type)
4149 if (TREE_CODE (result) == INTEGER_CST)
4151 if (arith_overflowed_p (PLUS_EXPR, type, result,
4152 integer_zero_node))
4153 overflow = build_one_cst (type);
4155 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4156 && TYPE_UNSIGNED (type))
4157 || (TYPE_PRECISION (type)
4158 < (TYPE_PRECISION (TREE_TYPE (result))
4159 + (TYPE_UNSIGNED (TREE_TYPE (result))
4160 && !TYPE_UNSIGNED (type)))))
4161 result = NULL_TREE;
4162 if (result)
4163 result = fold_convert (type, result);
4168 if (result)
4170 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4171 result = drop_tree_overflow (result);
4172 if (cplx_result)
4174 if (overflow == NULL_TREE)
4175 overflow = build_zero_cst (TREE_TYPE (result));
4176 tree ctype = build_complex_type (TREE_TYPE (result));
4177 if (TREE_CODE (result) == INTEGER_CST
4178 && TREE_CODE (overflow) == INTEGER_CST)
4179 result = build_complex (ctype, result, overflow);
4180 else
4181 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4182 ctype, result, overflow);
4184 if (!update_call_from_tree (gsi, result))
4185 gimplify_and_update_call_from_tree (gsi, result);
4186 changed = true;
4190 return changed;
4194 /* Return true whether NAME has a use on STMT. */
4196 static bool
4197 has_use_on_stmt (tree name, gimple *stmt)
4199 imm_use_iterator iter;
4200 use_operand_p use_p;
4201 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4202 if (USE_STMT (use_p) == stmt)
4203 return true;
4204 return false;
4207 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4208 gimple_simplify.
4210 Replaces *GSI with the simplification result in RCODE and OPS
4211 and the associated statements in *SEQ. Does the replacement
4212 according to INPLACE and returns true if the operation succeeded. */
4214 static bool
4215 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4216 code_helper rcode, tree *ops,
4217 gimple_seq *seq, bool inplace)
4219 gimple *stmt = gsi_stmt (*gsi);
4221 /* Play safe and do not allow abnormals to be mentioned in
4222 newly created statements. See also maybe_push_res_to_seq.
4223 As an exception allow such uses if there was a use of the
4224 same SSA name on the old stmt. */
4225 if ((TREE_CODE (ops[0]) == SSA_NAME
4226 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4227 && !has_use_on_stmt (ops[0], stmt))
4228 || (ops[1]
4229 && TREE_CODE (ops[1]) == SSA_NAME
4230 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4231 && !has_use_on_stmt (ops[1], stmt))
4232 || (ops[2]
4233 && TREE_CODE (ops[2]) == SSA_NAME
4234 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4235 && !has_use_on_stmt (ops[2], stmt))
4236 || (COMPARISON_CLASS_P (ops[0])
4237 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4238 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4239 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4240 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4241 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4242 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4243 return false;
4245 /* Don't insert new statements when INPLACE is true, even if we could
4246 reuse STMT for the final statement. */
4247 if (inplace && !gimple_seq_empty_p (*seq))
4248 return false;
4250 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4252 gcc_assert (rcode.is_tree_code ());
4253 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4254 /* GIMPLE_CONDs condition may not throw. */
4255 && (!flag_exceptions
4256 || !cfun->can_throw_non_call_exceptions
4257 || !operation_could_trap_p (rcode,
4258 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4259 false, NULL_TREE)))
4260 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4261 else if (rcode == SSA_NAME)
4262 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4263 build_zero_cst (TREE_TYPE (ops[0])));
4264 else if (rcode == INTEGER_CST)
4266 if (integer_zerop (ops[0]))
4267 gimple_cond_make_false (cond_stmt);
4268 else
4269 gimple_cond_make_true (cond_stmt);
4271 else if (!inplace)
4273 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4274 ops, seq);
4275 if (!res)
4276 return false;
4277 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4278 build_zero_cst (TREE_TYPE (res)));
4280 else
4281 return false;
4282 if (dump_file && (dump_flags & TDF_DETAILS))
4284 fprintf (dump_file, "gimple_simplified to ");
4285 if (!gimple_seq_empty_p (*seq))
4286 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4287 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4288 0, TDF_SLIM);
4290 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4291 return true;
4293 else if (is_gimple_assign (stmt)
4294 && rcode.is_tree_code ())
4296 if (!inplace
4297 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4299 maybe_build_generic_op (rcode,
4300 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4301 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4302 if (dump_file && (dump_flags & TDF_DETAILS))
4304 fprintf (dump_file, "gimple_simplified to ");
4305 if (!gimple_seq_empty_p (*seq))
4306 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4307 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4308 0, TDF_SLIM);
4310 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4311 return true;
4314 else if (rcode.is_fn_code ()
4315 && gimple_call_combined_fn (stmt) == rcode)
4317 unsigned i;
4318 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4320 gcc_assert (ops[i] != NULL_TREE);
4321 gimple_call_set_arg (stmt, i, ops[i]);
4323 if (i < 3)
4324 gcc_assert (ops[i] == NULL_TREE);
4325 if (dump_file && (dump_flags & TDF_DETAILS))
4327 fprintf (dump_file, "gimple_simplified to ");
4328 if (!gimple_seq_empty_p (*seq))
4329 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4330 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4332 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4333 return true;
4335 else if (!inplace)
4337 if (gimple_has_lhs (stmt))
4339 tree lhs = gimple_get_lhs (stmt);
4340 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4341 ops, seq, lhs))
4342 return false;
4343 if (dump_file && (dump_flags & TDF_DETAILS))
4345 fprintf (dump_file, "gimple_simplified to ");
4346 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4348 gsi_replace_with_seq_vops (gsi, *seq);
4349 return true;
4351 else
4352 gcc_unreachable ();
4355 return false;
4358 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4360 static bool
4361 maybe_canonicalize_mem_ref_addr (tree *t)
4363 bool res = false;
4365 if (TREE_CODE (*t) == ADDR_EXPR)
4366 t = &TREE_OPERAND (*t, 0);
4368 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4369 generic vector extension. The actual vector referenced is
4370 view-converted to an array type for this purpose. If the index
4371 is constant the canonical representation in the middle-end is a
4372 BIT_FIELD_REF so re-write the former to the latter here. */
4373 if (TREE_CODE (*t) == ARRAY_REF
4374 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4375 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4376 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4378 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4379 if (VECTOR_TYPE_P (vtype))
4381 tree low = array_ref_low_bound (*t);
4382 if (TREE_CODE (low) == INTEGER_CST)
4384 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4386 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4387 wi::to_widest (low));
4388 idx = wi::mul (idx, wi::to_widest
4389 (TYPE_SIZE (TREE_TYPE (*t))));
4390 widest_int ext
4391 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4392 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4394 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4395 TREE_TYPE (*t),
4396 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4397 TYPE_SIZE (TREE_TYPE (*t)),
4398 wide_int_to_tree (bitsizetype, idx));
4399 res = true;
4406 while (handled_component_p (*t))
4407 t = &TREE_OPERAND (*t, 0);
4409 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4410 of invariant addresses into a SSA name MEM_REF address. */
4411 if (TREE_CODE (*t) == MEM_REF
4412 || TREE_CODE (*t) == TARGET_MEM_REF)
4414 tree addr = TREE_OPERAND (*t, 0);
4415 if (TREE_CODE (addr) == ADDR_EXPR
4416 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4417 || handled_component_p (TREE_OPERAND (addr, 0))))
4419 tree base;
4420 HOST_WIDE_INT coffset;
4421 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4422 &coffset);
4423 if (!base)
4424 gcc_unreachable ();
4426 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4427 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4428 TREE_OPERAND (*t, 1),
4429 size_int (coffset));
4430 res = true;
4432 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4433 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4436 /* Canonicalize back MEM_REFs to plain reference trees if the object
4437 accessed is a decl that has the same access semantics as the MEM_REF. */
4438 if (TREE_CODE (*t) == MEM_REF
4439 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4440 && integer_zerop (TREE_OPERAND (*t, 1))
4441 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4443 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4444 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4445 if (/* Same volatile qualification. */
4446 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4447 /* Same TBAA behavior with -fstrict-aliasing. */
4448 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4449 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4450 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4451 /* Same alignment. */
4452 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4453 /* We have to look out here to not drop a required conversion
4454 from the rhs to the lhs if *t appears on the lhs or vice-versa
4455 if it appears on the rhs. Thus require strict type
4456 compatibility. */
4457 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4459 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4460 res = true;
4464 /* Canonicalize TARGET_MEM_REF in particular with respect to
4465 the indexes becoming constant. */
4466 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4468 tree tem = maybe_fold_tmr (*t);
4469 if (tem)
4471 *t = tem;
4472 res = true;
4476 return res;
4479 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4480 distinguishes both cases. */
4482 static bool
4483 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4485 bool changed = false;
4486 gimple *stmt = gsi_stmt (*gsi);
4487 bool nowarning = gimple_no_warning_p (stmt);
4488 unsigned i;
4489 fold_defer_overflow_warnings ();
4491 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4492 after propagation.
4493 ??? This shouldn't be done in generic folding but in the
4494 propagation helpers which also know whether an address was
4495 propagated.
4496 Also canonicalize operand order. */
4497 switch (gimple_code (stmt))
4499 case GIMPLE_ASSIGN:
4500 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4502 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4503 if ((REFERENCE_CLASS_P (*rhs)
4504 || TREE_CODE (*rhs) == ADDR_EXPR)
4505 && maybe_canonicalize_mem_ref_addr (rhs))
4506 changed = true;
4507 tree *lhs = gimple_assign_lhs_ptr (stmt);
4508 if (REFERENCE_CLASS_P (*lhs)
4509 && maybe_canonicalize_mem_ref_addr (lhs))
4510 changed = true;
4512 else
4514 /* Canonicalize operand order. */
4515 enum tree_code code = gimple_assign_rhs_code (stmt);
4516 if (TREE_CODE_CLASS (code) == tcc_comparison
4517 || commutative_tree_code (code)
4518 || commutative_ternary_tree_code (code))
4520 tree rhs1 = gimple_assign_rhs1 (stmt);
4521 tree rhs2 = gimple_assign_rhs2 (stmt);
4522 if (tree_swap_operands_p (rhs1, rhs2))
4524 gimple_assign_set_rhs1 (stmt, rhs2);
4525 gimple_assign_set_rhs2 (stmt, rhs1);
4526 if (TREE_CODE_CLASS (code) == tcc_comparison)
4527 gimple_assign_set_rhs_code (stmt,
4528 swap_tree_comparison (code));
4529 changed = true;
4533 break;
4534 case GIMPLE_CALL:
4536 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4538 tree *arg = gimple_call_arg_ptr (stmt, i);
4539 if (REFERENCE_CLASS_P (*arg)
4540 && maybe_canonicalize_mem_ref_addr (arg))
4541 changed = true;
4543 tree *lhs = gimple_call_lhs_ptr (stmt);
4544 if (*lhs
4545 && REFERENCE_CLASS_P (*lhs)
4546 && maybe_canonicalize_mem_ref_addr (lhs))
4547 changed = true;
4548 break;
4550 case GIMPLE_ASM:
4552 gasm *asm_stmt = as_a <gasm *> (stmt);
4553 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4555 tree link = gimple_asm_output_op (asm_stmt, i);
4556 tree op = TREE_VALUE (link);
4557 if (REFERENCE_CLASS_P (op)
4558 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4559 changed = true;
4561 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4563 tree link = gimple_asm_input_op (asm_stmt, i);
4564 tree op = TREE_VALUE (link);
4565 if ((REFERENCE_CLASS_P (op)
4566 || TREE_CODE (op) == ADDR_EXPR)
4567 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4568 changed = true;
4571 break;
4572 case GIMPLE_DEBUG:
4573 if (gimple_debug_bind_p (stmt))
4575 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4576 if (*val
4577 && (REFERENCE_CLASS_P (*val)
4578 || TREE_CODE (*val) == ADDR_EXPR)
4579 && maybe_canonicalize_mem_ref_addr (val))
4580 changed = true;
4582 break;
4583 case GIMPLE_COND:
4585 /* Canonicalize operand order. */
4586 tree lhs = gimple_cond_lhs (stmt);
4587 tree rhs = gimple_cond_rhs (stmt);
4588 if (tree_swap_operands_p (lhs, rhs))
4590 gcond *gc = as_a <gcond *> (stmt);
4591 gimple_cond_set_lhs (gc, rhs);
4592 gimple_cond_set_rhs (gc, lhs);
4593 gimple_cond_set_code (gc,
4594 swap_tree_comparison (gimple_cond_code (gc)));
4595 changed = true;
4598 default:;
4601 /* Dispatch to pattern-based folding. */
4602 if (!inplace
4603 || is_gimple_assign (stmt)
4604 || gimple_code (stmt) == GIMPLE_COND)
4606 gimple_seq seq = NULL;
4607 code_helper rcode;
4608 tree ops[3] = {};
4609 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4610 valueize, valueize))
4612 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4613 changed = true;
4614 else
4615 gimple_seq_discard (seq);
4619 stmt = gsi_stmt (*gsi);
4621 /* Fold the main computation performed by the statement. */
4622 switch (gimple_code (stmt))
4624 case GIMPLE_ASSIGN:
4626 /* Try to canonicalize for boolean-typed X the comparisons
4627 X == 0, X == 1, X != 0, and X != 1. */
4628 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4629 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4631 tree lhs = gimple_assign_lhs (stmt);
4632 tree op1 = gimple_assign_rhs1 (stmt);
4633 tree op2 = gimple_assign_rhs2 (stmt);
4634 tree type = TREE_TYPE (op1);
4636 /* Check whether the comparison operands are of the same boolean
4637 type as the result type is.
4638 Check that second operand is an integer-constant with value
4639 one or zero. */
4640 if (TREE_CODE (op2) == INTEGER_CST
4641 && (integer_zerop (op2) || integer_onep (op2))
4642 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4644 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4645 bool is_logical_not = false;
4647 /* X == 0 and X != 1 is a logical-not.of X
4648 X == 1 and X != 0 is X */
4649 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4650 || (cmp_code == NE_EXPR && integer_onep (op2)))
4651 is_logical_not = true;
4653 if (is_logical_not == false)
4654 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4655 /* Only for one-bit precision typed X the transformation
4656 !X -> ~X is valied. */
4657 else if (TYPE_PRECISION (type) == 1)
4658 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4659 /* Otherwise we use !X -> X ^ 1. */
4660 else
4661 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4662 build_int_cst (type, 1));
4663 changed = true;
4664 break;
4668 unsigned old_num_ops = gimple_num_ops (stmt);
4669 tree lhs = gimple_assign_lhs (stmt);
4670 tree new_rhs = fold_gimple_assign (gsi);
4671 if (new_rhs
4672 && !useless_type_conversion_p (TREE_TYPE (lhs),
4673 TREE_TYPE (new_rhs)))
4674 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4675 if (new_rhs
4676 && (!inplace
4677 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4679 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4680 changed = true;
4682 break;
4685 case GIMPLE_CALL:
4686 changed |= gimple_fold_call (gsi, inplace);
4687 break;
4689 case GIMPLE_ASM:
4690 /* Fold *& in asm operands. */
4692 gasm *asm_stmt = as_a <gasm *> (stmt);
4693 size_t noutputs;
4694 const char **oconstraints;
4695 const char *constraint;
4696 bool allows_mem, allows_reg;
4698 noutputs = gimple_asm_noutputs (asm_stmt);
4699 oconstraints = XALLOCAVEC (const char *, noutputs);
4701 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4703 tree link = gimple_asm_output_op (asm_stmt, i);
4704 tree op = TREE_VALUE (link);
4705 oconstraints[i]
4706 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4707 if (REFERENCE_CLASS_P (op)
4708 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4710 TREE_VALUE (link) = op;
4711 changed = true;
4714 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4716 tree link = gimple_asm_input_op (asm_stmt, i);
4717 tree op = TREE_VALUE (link);
4718 constraint
4719 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4720 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4721 oconstraints, &allows_mem, &allows_reg);
4722 if (REFERENCE_CLASS_P (op)
4723 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4724 != NULL_TREE)
4726 TREE_VALUE (link) = op;
4727 changed = true;
4731 break;
4733 case GIMPLE_DEBUG:
4734 if (gimple_debug_bind_p (stmt))
4736 tree val = gimple_debug_bind_get_value (stmt);
4737 if (val
4738 && REFERENCE_CLASS_P (val))
4740 tree tem = maybe_fold_reference (val, false);
4741 if (tem)
4743 gimple_debug_bind_set_value (stmt, tem);
4744 changed = true;
4747 else if (val
4748 && TREE_CODE (val) == ADDR_EXPR)
4750 tree ref = TREE_OPERAND (val, 0);
4751 tree tem = maybe_fold_reference (ref, false);
4752 if (tem)
4754 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4755 gimple_debug_bind_set_value (stmt, tem);
4756 changed = true;
4760 break;
4762 case GIMPLE_RETURN:
4764 greturn *ret_stmt = as_a<greturn *> (stmt);
4765 tree ret = gimple_return_retval(ret_stmt);
4767 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4769 tree val = valueize (ret);
4770 if (val && val != ret
4771 && may_propagate_copy (ret, val))
4773 gimple_return_set_retval (ret_stmt, val);
4774 changed = true;
4778 break;
4780 default:;
4783 stmt = gsi_stmt (*gsi);
4785 /* Fold *& on the lhs. */
4786 if (gimple_has_lhs (stmt))
4788 tree lhs = gimple_get_lhs (stmt);
4789 if (lhs && REFERENCE_CLASS_P (lhs))
4791 tree new_lhs = maybe_fold_reference (lhs, true);
4792 if (new_lhs)
4794 gimple_set_lhs (stmt, new_lhs);
4795 changed = true;
4800 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4801 return changed;
4804 /* Valueziation callback that ends up not following SSA edges. */
4806 tree
4807 no_follow_ssa_edges (tree)
4809 return NULL_TREE;
4812 /* Valueization callback that ends up following single-use SSA edges only. */
4814 tree
4815 follow_single_use_edges (tree val)
4817 if (TREE_CODE (val) == SSA_NAME
4818 && !has_single_use (val))
4819 return NULL_TREE;
4820 return val;
4823 /* Fold the statement pointed to by GSI. In some cases, this function may
4824 replace the whole statement with a new one. Returns true iff folding
4825 makes any changes.
4826 The statement pointed to by GSI should be in valid gimple form but may
4827 be in unfolded state as resulting from for example constant propagation
4828 which can produce *&x = 0. */
4830 bool
4831 fold_stmt (gimple_stmt_iterator *gsi)
4833 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4836 bool
4837 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4839 return fold_stmt_1 (gsi, false, valueize);
4842 /* Perform the minimal folding on statement *GSI. Only operations like
4843 *&x created by constant propagation are handled. The statement cannot
4844 be replaced with a new one. Return true if the statement was
4845 changed, false otherwise.
4846 The statement *GSI should be in valid gimple form but may
4847 be in unfolded state as resulting from for example constant propagation
4848 which can produce *&x = 0. */
4850 bool
4851 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4853 gimple *stmt = gsi_stmt (*gsi);
4854 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4855 gcc_assert (gsi_stmt (*gsi) == stmt);
4856 return changed;
4859 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4860 if EXPR is null or we don't know how.
4861 If non-null, the result always has boolean type. */
4863 static tree
4864 canonicalize_bool (tree expr, bool invert)
4866 if (!expr)
4867 return NULL_TREE;
4868 else if (invert)
4870 if (integer_nonzerop (expr))
4871 return boolean_false_node;
4872 else if (integer_zerop (expr))
4873 return boolean_true_node;
4874 else if (TREE_CODE (expr) == SSA_NAME)
4875 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4876 build_int_cst (TREE_TYPE (expr), 0));
4877 else if (COMPARISON_CLASS_P (expr))
4878 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4879 boolean_type_node,
4880 TREE_OPERAND (expr, 0),
4881 TREE_OPERAND (expr, 1));
4882 else
4883 return NULL_TREE;
4885 else
4887 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4888 return expr;
4889 if (integer_nonzerop (expr))
4890 return boolean_true_node;
4891 else if (integer_zerop (expr))
4892 return boolean_false_node;
4893 else if (TREE_CODE (expr) == SSA_NAME)
4894 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4895 build_int_cst (TREE_TYPE (expr), 0));
4896 else if (COMPARISON_CLASS_P (expr))
4897 return fold_build2 (TREE_CODE (expr),
4898 boolean_type_node,
4899 TREE_OPERAND (expr, 0),
4900 TREE_OPERAND (expr, 1));
4901 else
4902 return NULL_TREE;
4906 /* Check to see if a boolean expression EXPR is logically equivalent to the
4907 comparison (OP1 CODE OP2). Check for various identities involving
4908 SSA_NAMEs. */
4910 static bool
4911 same_bool_comparison_p (const_tree expr, enum tree_code code,
4912 const_tree op1, const_tree op2)
4914 gimple *s;
4916 /* The obvious case. */
4917 if (TREE_CODE (expr) == code
4918 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4919 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4920 return true;
4922 /* Check for comparing (name, name != 0) and the case where expr
4923 is an SSA_NAME with a definition matching the comparison. */
4924 if (TREE_CODE (expr) == SSA_NAME
4925 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4927 if (operand_equal_p (expr, op1, 0))
4928 return ((code == NE_EXPR && integer_zerop (op2))
4929 || (code == EQ_EXPR && integer_nonzerop (op2)));
4930 s = SSA_NAME_DEF_STMT (expr);
4931 if (is_gimple_assign (s)
4932 && gimple_assign_rhs_code (s) == code
4933 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4934 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4935 return true;
4938 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4939 of name is a comparison, recurse. */
4940 if (TREE_CODE (op1) == SSA_NAME
4941 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4943 s = SSA_NAME_DEF_STMT (op1);
4944 if (is_gimple_assign (s)
4945 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4947 enum tree_code c = gimple_assign_rhs_code (s);
4948 if ((c == NE_EXPR && integer_zerop (op2))
4949 || (c == EQ_EXPR && integer_nonzerop (op2)))
4950 return same_bool_comparison_p (expr, c,
4951 gimple_assign_rhs1 (s),
4952 gimple_assign_rhs2 (s));
4953 if ((c == EQ_EXPR && integer_zerop (op2))
4954 || (c == NE_EXPR && integer_nonzerop (op2)))
4955 return same_bool_comparison_p (expr,
4956 invert_tree_comparison (c, false),
4957 gimple_assign_rhs1 (s),
4958 gimple_assign_rhs2 (s));
4961 return false;
4964 /* Check to see if two boolean expressions OP1 and OP2 are logically
4965 equivalent. */
4967 static bool
4968 same_bool_result_p (const_tree op1, const_tree op2)
4970 /* Simple cases first. */
4971 if (operand_equal_p (op1, op2, 0))
4972 return true;
4974 /* Check the cases where at least one of the operands is a comparison.
4975 These are a bit smarter than operand_equal_p in that they apply some
4976 identifies on SSA_NAMEs. */
4977 if (COMPARISON_CLASS_P (op2)
4978 && same_bool_comparison_p (op1, TREE_CODE (op2),
4979 TREE_OPERAND (op2, 0),
4980 TREE_OPERAND (op2, 1)))
4981 return true;
4982 if (COMPARISON_CLASS_P (op1)
4983 && same_bool_comparison_p (op2, TREE_CODE (op1),
4984 TREE_OPERAND (op1, 0),
4985 TREE_OPERAND (op1, 1)))
4986 return true;
4988 /* Default case. */
4989 return false;
4992 /* Forward declarations for some mutually recursive functions. */
4994 static tree
4995 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4996 enum tree_code code2, tree op2a, tree op2b);
4997 static tree
4998 and_var_with_comparison (tree var, bool invert,
4999 enum tree_code code2, tree op2a, tree op2b);
5000 static tree
5001 and_var_with_comparison_1 (gimple *stmt,
5002 enum tree_code code2, tree op2a, tree op2b);
5003 static tree
5004 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5005 enum tree_code code2, tree op2a, tree op2b);
5006 static tree
5007 or_var_with_comparison (tree var, bool invert,
5008 enum tree_code code2, tree op2a, tree op2b);
5009 static tree
5010 or_var_with_comparison_1 (gimple *stmt,
5011 enum tree_code code2, tree op2a, tree op2b);
5013 /* Helper function for and_comparisons_1: try to simplify the AND of the
5014 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5015 If INVERT is true, invert the value of the VAR before doing the AND.
5016 Return NULL_EXPR if we can't simplify this to a single expression. */
5018 static tree
5019 and_var_with_comparison (tree var, bool invert,
5020 enum tree_code code2, tree op2a, tree op2b)
5022 tree t;
5023 gimple *stmt = SSA_NAME_DEF_STMT (var);
5025 /* We can only deal with variables whose definitions are assignments. */
5026 if (!is_gimple_assign (stmt))
5027 return NULL_TREE;
5029 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5030 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5031 Then we only have to consider the simpler non-inverted cases. */
5032 if (invert)
5033 t = or_var_with_comparison_1 (stmt,
5034 invert_tree_comparison (code2, false),
5035 op2a, op2b);
5036 else
5037 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5038 return canonicalize_bool (t, invert);
5041 /* Try to simplify the AND of the ssa variable defined by the assignment
5042 STMT with the comparison specified by (OP2A CODE2 OP2B).
5043 Return NULL_EXPR if we can't simplify this to a single expression. */
5045 static tree
5046 and_var_with_comparison_1 (gimple *stmt,
5047 enum tree_code code2, tree op2a, tree op2b)
5049 tree var = gimple_assign_lhs (stmt);
5050 tree true_test_var = NULL_TREE;
5051 tree false_test_var = NULL_TREE;
5052 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5054 /* Check for identities like (var AND (var == 0)) => false. */
5055 if (TREE_CODE (op2a) == SSA_NAME
5056 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5058 if ((code2 == NE_EXPR && integer_zerop (op2b))
5059 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5061 true_test_var = op2a;
5062 if (var == true_test_var)
5063 return var;
5065 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5066 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5068 false_test_var = op2a;
5069 if (var == false_test_var)
5070 return boolean_false_node;
5074 /* If the definition is a comparison, recurse on it. */
5075 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5077 tree t = and_comparisons_1 (innercode,
5078 gimple_assign_rhs1 (stmt),
5079 gimple_assign_rhs2 (stmt),
5080 code2,
5081 op2a,
5082 op2b);
5083 if (t)
5084 return t;
5087 /* If the definition is an AND or OR expression, we may be able to
5088 simplify by reassociating. */
5089 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5090 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5092 tree inner1 = gimple_assign_rhs1 (stmt);
5093 tree inner2 = gimple_assign_rhs2 (stmt);
5094 gimple *s;
5095 tree t;
5096 tree partial = NULL_TREE;
5097 bool is_and = (innercode == BIT_AND_EXPR);
5099 /* Check for boolean identities that don't require recursive examination
5100 of inner1/inner2:
5101 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5102 inner1 AND (inner1 OR inner2) => inner1
5103 !inner1 AND (inner1 AND inner2) => false
5104 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5105 Likewise for similar cases involving inner2. */
5106 if (inner1 == true_test_var)
5107 return (is_and ? var : inner1);
5108 else if (inner2 == true_test_var)
5109 return (is_and ? var : inner2);
5110 else if (inner1 == false_test_var)
5111 return (is_and
5112 ? boolean_false_node
5113 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5114 else if (inner2 == false_test_var)
5115 return (is_and
5116 ? boolean_false_node
5117 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5119 /* Next, redistribute/reassociate the AND across the inner tests.
5120 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5121 if (TREE_CODE (inner1) == SSA_NAME
5122 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5123 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5124 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5125 gimple_assign_rhs1 (s),
5126 gimple_assign_rhs2 (s),
5127 code2, op2a, op2b)))
5129 /* Handle the AND case, where we are reassociating:
5130 (inner1 AND inner2) AND (op2a code2 op2b)
5131 => (t AND inner2)
5132 If the partial result t is a constant, we win. Otherwise
5133 continue on to try reassociating with the other inner test. */
5134 if (is_and)
5136 if (integer_onep (t))
5137 return inner2;
5138 else if (integer_zerop (t))
5139 return boolean_false_node;
5142 /* Handle the OR case, where we are redistributing:
5143 (inner1 OR inner2) AND (op2a code2 op2b)
5144 => (t OR (inner2 AND (op2a code2 op2b))) */
5145 else if (integer_onep (t))
5146 return boolean_true_node;
5148 /* Save partial result for later. */
5149 partial = t;
5152 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5153 if (TREE_CODE (inner2) == SSA_NAME
5154 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5155 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5156 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5157 gimple_assign_rhs1 (s),
5158 gimple_assign_rhs2 (s),
5159 code2, op2a, op2b)))
5161 /* Handle the AND case, where we are reassociating:
5162 (inner1 AND inner2) AND (op2a code2 op2b)
5163 => (inner1 AND t) */
5164 if (is_and)
5166 if (integer_onep (t))
5167 return inner1;
5168 else if (integer_zerop (t))
5169 return boolean_false_node;
5170 /* If both are the same, we can apply the identity
5171 (x AND x) == x. */
5172 else if (partial && same_bool_result_p (t, partial))
5173 return t;
5176 /* Handle the OR case. where we are redistributing:
5177 (inner1 OR inner2) AND (op2a code2 op2b)
5178 => (t OR (inner1 AND (op2a code2 op2b)))
5179 => (t OR partial) */
5180 else
5182 if (integer_onep (t))
5183 return boolean_true_node;
5184 else if (partial)
5186 /* We already got a simplification for the other
5187 operand to the redistributed OR expression. The
5188 interesting case is when at least one is false.
5189 Or, if both are the same, we can apply the identity
5190 (x OR x) == x. */
5191 if (integer_zerop (partial))
5192 return t;
5193 else if (integer_zerop (t))
5194 return partial;
5195 else if (same_bool_result_p (t, partial))
5196 return t;
5201 return NULL_TREE;
5204 /* Try to simplify the AND of two comparisons defined by
5205 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5206 If this can be done without constructing an intermediate value,
5207 return the resulting tree; otherwise NULL_TREE is returned.
5208 This function is deliberately asymmetric as it recurses on SSA_DEFs
5209 in the first comparison but not the second. */
5211 static tree
5212 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5213 enum tree_code code2, tree op2a, tree op2b)
5215 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5217 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5218 if (operand_equal_p (op1a, op2a, 0)
5219 && operand_equal_p (op1b, op2b, 0))
5221 /* Result will be either NULL_TREE, or a combined comparison. */
5222 tree t = combine_comparisons (UNKNOWN_LOCATION,
5223 TRUTH_ANDIF_EXPR, code1, code2,
5224 truth_type, op1a, op1b);
5225 if (t)
5226 return t;
5229 /* Likewise the swapped case of the above. */
5230 if (operand_equal_p (op1a, op2b, 0)
5231 && operand_equal_p (op1b, op2a, 0))
5233 /* Result will be either NULL_TREE, or a combined comparison. */
5234 tree t = combine_comparisons (UNKNOWN_LOCATION,
5235 TRUTH_ANDIF_EXPR, code1,
5236 swap_tree_comparison (code2),
5237 truth_type, op1a, op1b);
5238 if (t)
5239 return t;
5242 /* If both comparisons are of the same value against constants, we might
5243 be able to merge them. */
5244 if (operand_equal_p (op1a, op2a, 0)
5245 && TREE_CODE (op1b) == INTEGER_CST
5246 && TREE_CODE (op2b) == INTEGER_CST)
5248 int cmp = tree_int_cst_compare (op1b, op2b);
5250 /* If we have (op1a == op1b), we should either be able to
5251 return that or FALSE, depending on whether the constant op1b
5252 also satisfies the other comparison against op2b. */
5253 if (code1 == EQ_EXPR)
5255 bool done = true;
5256 bool val;
5257 switch (code2)
5259 case EQ_EXPR: val = (cmp == 0); break;
5260 case NE_EXPR: val = (cmp != 0); break;
5261 case LT_EXPR: val = (cmp < 0); break;
5262 case GT_EXPR: val = (cmp > 0); break;
5263 case LE_EXPR: val = (cmp <= 0); break;
5264 case GE_EXPR: val = (cmp >= 0); break;
5265 default: done = false;
5267 if (done)
5269 if (val)
5270 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5271 else
5272 return boolean_false_node;
5275 /* Likewise if the second comparison is an == comparison. */
5276 else if (code2 == EQ_EXPR)
5278 bool done = true;
5279 bool val;
5280 switch (code1)
5282 case EQ_EXPR: val = (cmp == 0); break;
5283 case NE_EXPR: val = (cmp != 0); break;
5284 case LT_EXPR: val = (cmp > 0); break;
5285 case GT_EXPR: val = (cmp < 0); break;
5286 case LE_EXPR: val = (cmp >= 0); break;
5287 case GE_EXPR: val = (cmp <= 0); break;
5288 default: done = false;
5290 if (done)
5292 if (val)
5293 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5294 else
5295 return boolean_false_node;
5299 /* Same business with inequality tests. */
5300 else if (code1 == NE_EXPR)
5302 bool val;
5303 switch (code2)
5305 case EQ_EXPR: val = (cmp != 0); break;
5306 case NE_EXPR: val = (cmp == 0); break;
5307 case LT_EXPR: val = (cmp >= 0); break;
5308 case GT_EXPR: val = (cmp <= 0); break;
5309 case LE_EXPR: val = (cmp > 0); break;
5310 case GE_EXPR: val = (cmp < 0); break;
5311 default:
5312 val = false;
5314 if (val)
5315 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5317 else if (code2 == NE_EXPR)
5319 bool val;
5320 switch (code1)
5322 case EQ_EXPR: val = (cmp == 0); break;
5323 case NE_EXPR: val = (cmp != 0); break;
5324 case LT_EXPR: val = (cmp <= 0); break;
5325 case GT_EXPR: val = (cmp >= 0); break;
5326 case LE_EXPR: val = (cmp < 0); break;
5327 case GE_EXPR: val = (cmp > 0); break;
5328 default:
5329 val = false;
5331 if (val)
5332 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5335 /* Chose the more restrictive of two < or <= comparisons. */
5336 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5337 && (code2 == LT_EXPR || code2 == LE_EXPR))
5339 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5340 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5341 else
5342 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5345 /* Likewise chose the more restrictive of two > or >= comparisons. */
5346 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5347 && (code2 == GT_EXPR || code2 == GE_EXPR))
5349 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5350 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5351 else
5352 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5355 /* Check for singleton ranges. */
5356 else if (cmp == 0
5357 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5358 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5359 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5361 /* Check for disjoint ranges. */
5362 else if (cmp <= 0
5363 && (code1 == LT_EXPR || code1 == LE_EXPR)
5364 && (code2 == GT_EXPR || code2 == GE_EXPR))
5365 return boolean_false_node;
5366 else if (cmp >= 0
5367 && (code1 == GT_EXPR || code1 == GE_EXPR)
5368 && (code2 == LT_EXPR || code2 == LE_EXPR))
5369 return boolean_false_node;
5372 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5373 NAME's definition is a truth value. See if there are any simplifications
5374 that can be done against the NAME's definition. */
5375 if (TREE_CODE (op1a) == SSA_NAME
5376 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5377 && (integer_zerop (op1b) || integer_onep (op1b)))
5379 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5380 || (code1 == NE_EXPR && integer_onep (op1b)));
5381 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5382 switch (gimple_code (stmt))
5384 case GIMPLE_ASSIGN:
5385 /* Try to simplify by copy-propagating the definition. */
5386 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5388 case GIMPLE_PHI:
5389 /* If every argument to the PHI produces the same result when
5390 ANDed with the second comparison, we win.
5391 Do not do this unless the type is bool since we need a bool
5392 result here anyway. */
5393 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5395 tree result = NULL_TREE;
5396 unsigned i;
5397 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5399 tree arg = gimple_phi_arg_def (stmt, i);
5401 /* If this PHI has itself as an argument, ignore it.
5402 If all the other args produce the same result,
5403 we're still OK. */
5404 if (arg == gimple_phi_result (stmt))
5405 continue;
5406 else if (TREE_CODE (arg) == INTEGER_CST)
5408 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5410 if (!result)
5411 result = boolean_false_node;
5412 else if (!integer_zerop (result))
5413 return NULL_TREE;
5415 else if (!result)
5416 result = fold_build2 (code2, boolean_type_node,
5417 op2a, op2b);
5418 else if (!same_bool_comparison_p (result,
5419 code2, op2a, op2b))
5420 return NULL_TREE;
5422 else if (TREE_CODE (arg) == SSA_NAME
5423 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5425 tree temp;
5426 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5427 /* In simple cases we can look through PHI nodes,
5428 but we have to be careful with loops.
5429 See PR49073. */
5430 if (! dom_info_available_p (CDI_DOMINATORS)
5431 || gimple_bb (def_stmt) == gimple_bb (stmt)
5432 || dominated_by_p (CDI_DOMINATORS,
5433 gimple_bb (def_stmt),
5434 gimple_bb (stmt)))
5435 return NULL_TREE;
5436 temp = and_var_with_comparison (arg, invert, code2,
5437 op2a, op2b);
5438 if (!temp)
5439 return NULL_TREE;
5440 else if (!result)
5441 result = temp;
5442 else if (!same_bool_result_p (result, temp))
5443 return NULL_TREE;
5445 else
5446 return NULL_TREE;
5448 return result;
5451 default:
5452 break;
5455 return NULL_TREE;
5458 /* Try to simplify the AND of two comparisons, specified by
5459 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5460 If this can be simplified to a single expression (without requiring
5461 introducing more SSA variables to hold intermediate values),
5462 return the resulting tree. Otherwise return NULL_TREE.
5463 If the result expression is non-null, it has boolean type. */
5465 tree
5466 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5467 enum tree_code code2, tree op2a, tree op2b)
5469 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5470 if (t)
5471 return t;
5472 else
5473 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5476 /* Helper function for or_comparisons_1: try to simplify the OR of the
5477 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5478 If INVERT is true, invert the value of VAR before doing the OR.
5479 Return NULL_EXPR if we can't simplify this to a single expression. */
5481 static tree
5482 or_var_with_comparison (tree var, bool invert,
5483 enum tree_code code2, tree op2a, tree op2b)
5485 tree t;
5486 gimple *stmt = SSA_NAME_DEF_STMT (var);
5488 /* We can only deal with variables whose definitions are assignments. */
5489 if (!is_gimple_assign (stmt))
5490 return NULL_TREE;
5492 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5493 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5494 Then we only have to consider the simpler non-inverted cases. */
5495 if (invert)
5496 t = and_var_with_comparison_1 (stmt,
5497 invert_tree_comparison (code2, false),
5498 op2a, op2b);
5499 else
5500 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5501 return canonicalize_bool (t, invert);
5504 /* Try to simplify the OR of the ssa variable defined by the assignment
5505 STMT with the comparison specified by (OP2A CODE2 OP2B).
5506 Return NULL_EXPR if we can't simplify this to a single expression. */
5508 static tree
5509 or_var_with_comparison_1 (gimple *stmt,
5510 enum tree_code code2, tree op2a, tree op2b)
5512 tree var = gimple_assign_lhs (stmt);
5513 tree true_test_var = NULL_TREE;
5514 tree false_test_var = NULL_TREE;
5515 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5517 /* Check for identities like (var OR (var != 0)) => true . */
5518 if (TREE_CODE (op2a) == SSA_NAME
5519 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5521 if ((code2 == NE_EXPR && integer_zerop (op2b))
5522 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5524 true_test_var = op2a;
5525 if (var == true_test_var)
5526 return var;
5528 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5529 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5531 false_test_var = op2a;
5532 if (var == false_test_var)
5533 return boolean_true_node;
5537 /* If the definition is a comparison, recurse on it. */
5538 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5540 tree t = or_comparisons_1 (innercode,
5541 gimple_assign_rhs1 (stmt),
5542 gimple_assign_rhs2 (stmt),
5543 code2,
5544 op2a,
5545 op2b);
5546 if (t)
5547 return t;
5550 /* If the definition is an AND or OR expression, we may be able to
5551 simplify by reassociating. */
5552 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5553 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5555 tree inner1 = gimple_assign_rhs1 (stmt);
5556 tree inner2 = gimple_assign_rhs2 (stmt);
5557 gimple *s;
5558 tree t;
5559 tree partial = NULL_TREE;
5560 bool is_or = (innercode == BIT_IOR_EXPR);
5562 /* Check for boolean identities that don't require recursive examination
5563 of inner1/inner2:
5564 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5565 inner1 OR (inner1 AND inner2) => inner1
5566 !inner1 OR (inner1 OR inner2) => true
5567 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5569 if (inner1 == true_test_var)
5570 return (is_or ? var : inner1);
5571 else if (inner2 == true_test_var)
5572 return (is_or ? var : inner2);
5573 else if (inner1 == false_test_var)
5574 return (is_or
5575 ? boolean_true_node
5576 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5577 else if (inner2 == false_test_var)
5578 return (is_or
5579 ? boolean_true_node
5580 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5582 /* Next, redistribute/reassociate the OR across the inner tests.
5583 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5584 if (TREE_CODE (inner1) == SSA_NAME
5585 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5586 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5587 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5588 gimple_assign_rhs1 (s),
5589 gimple_assign_rhs2 (s),
5590 code2, op2a, op2b)))
5592 /* Handle the OR case, where we are reassociating:
5593 (inner1 OR inner2) OR (op2a code2 op2b)
5594 => (t OR inner2)
5595 If the partial result t is a constant, we win. Otherwise
5596 continue on to try reassociating with the other inner test. */
5597 if (is_or)
5599 if (integer_onep (t))
5600 return boolean_true_node;
5601 else if (integer_zerop (t))
5602 return inner2;
5605 /* Handle the AND case, where we are redistributing:
5606 (inner1 AND inner2) OR (op2a code2 op2b)
5607 => (t AND (inner2 OR (op2a code op2b))) */
5608 else if (integer_zerop (t))
5609 return boolean_false_node;
5611 /* Save partial result for later. */
5612 partial = t;
5615 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5616 if (TREE_CODE (inner2) == SSA_NAME
5617 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5618 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5619 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5620 gimple_assign_rhs1 (s),
5621 gimple_assign_rhs2 (s),
5622 code2, op2a, op2b)))
5624 /* Handle the OR case, where we are reassociating:
5625 (inner1 OR inner2) OR (op2a code2 op2b)
5626 => (inner1 OR t)
5627 => (t OR partial) */
5628 if (is_or)
5630 if (integer_zerop (t))
5631 return inner1;
5632 else if (integer_onep (t))
5633 return boolean_true_node;
5634 /* If both are the same, we can apply the identity
5635 (x OR x) == x. */
5636 else if (partial && same_bool_result_p (t, partial))
5637 return t;
5640 /* Handle the AND case, where we are redistributing:
5641 (inner1 AND inner2) OR (op2a code2 op2b)
5642 => (t AND (inner1 OR (op2a code2 op2b)))
5643 => (t AND partial) */
5644 else
5646 if (integer_zerop (t))
5647 return boolean_false_node;
5648 else if (partial)
5650 /* We already got a simplification for the other
5651 operand to the redistributed AND expression. The
5652 interesting case is when at least one is true.
5653 Or, if both are the same, we can apply the identity
5654 (x AND x) == x. */
5655 if (integer_onep (partial))
5656 return t;
5657 else if (integer_onep (t))
5658 return partial;
5659 else if (same_bool_result_p (t, partial))
5660 return t;
5665 return NULL_TREE;
5668 /* Try to simplify the OR of two comparisons defined by
5669 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5670 If this can be done without constructing an intermediate value,
5671 return the resulting tree; otherwise NULL_TREE is returned.
5672 This function is deliberately asymmetric as it recurses on SSA_DEFs
5673 in the first comparison but not the second. */
5675 static tree
5676 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5677 enum tree_code code2, tree op2a, tree op2b)
5679 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5681 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5682 if (operand_equal_p (op1a, op2a, 0)
5683 && operand_equal_p (op1b, op2b, 0))
5685 /* Result will be either NULL_TREE, or a combined comparison. */
5686 tree t = combine_comparisons (UNKNOWN_LOCATION,
5687 TRUTH_ORIF_EXPR, code1, code2,
5688 truth_type, op1a, op1b);
5689 if (t)
5690 return t;
5693 /* Likewise the swapped case of the above. */
5694 if (operand_equal_p (op1a, op2b, 0)
5695 && operand_equal_p (op1b, op2a, 0))
5697 /* Result will be either NULL_TREE, or a combined comparison. */
5698 tree t = combine_comparisons (UNKNOWN_LOCATION,
5699 TRUTH_ORIF_EXPR, code1,
5700 swap_tree_comparison (code2),
5701 truth_type, op1a, op1b);
5702 if (t)
5703 return t;
5706 /* If both comparisons are of the same value against constants, we might
5707 be able to merge them. */
5708 if (operand_equal_p (op1a, op2a, 0)
5709 && TREE_CODE (op1b) == INTEGER_CST
5710 && TREE_CODE (op2b) == INTEGER_CST)
5712 int cmp = tree_int_cst_compare (op1b, op2b);
5714 /* If we have (op1a != op1b), we should either be able to
5715 return that or TRUE, depending on whether the constant op1b
5716 also satisfies the other comparison against op2b. */
5717 if (code1 == NE_EXPR)
5719 bool done = true;
5720 bool val;
5721 switch (code2)
5723 case EQ_EXPR: val = (cmp == 0); break;
5724 case NE_EXPR: val = (cmp != 0); break;
5725 case LT_EXPR: val = (cmp < 0); break;
5726 case GT_EXPR: val = (cmp > 0); break;
5727 case LE_EXPR: val = (cmp <= 0); break;
5728 case GE_EXPR: val = (cmp >= 0); break;
5729 default: done = false;
5731 if (done)
5733 if (val)
5734 return boolean_true_node;
5735 else
5736 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5739 /* Likewise if the second comparison is a != comparison. */
5740 else if (code2 == NE_EXPR)
5742 bool done = true;
5743 bool val;
5744 switch (code1)
5746 case EQ_EXPR: val = (cmp == 0); break;
5747 case NE_EXPR: val = (cmp != 0); break;
5748 case LT_EXPR: val = (cmp > 0); break;
5749 case GT_EXPR: val = (cmp < 0); break;
5750 case LE_EXPR: val = (cmp >= 0); break;
5751 case GE_EXPR: val = (cmp <= 0); break;
5752 default: done = false;
5754 if (done)
5756 if (val)
5757 return boolean_true_node;
5758 else
5759 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5763 /* See if an equality test is redundant with the other comparison. */
5764 else if (code1 == EQ_EXPR)
5766 bool val;
5767 switch (code2)
5769 case EQ_EXPR: val = (cmp == 0); break;
5770 case NE_EXPR: val = (cmp != 0); break;
5771 case LT_EXPR: val = (cmp < 0); break;
5772 case GT_EXPR: val = (cmp > 0); break;
5773 case LE_EXPR: val = (cmp <= 0); break;
5774 case GE_EXPR: val = (cmp >= 0); break;
5775 default:
5776 val = false;
5778 if (val)
5779 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5781 else if (code2 == EQ_EXPR)
5783 bool val;
5784 switch (code1)
5786 case EQ_EXPR: val = (cmp == 0); break;
5787 case NE_EXPR: val = (cmp != 0); break;
5788 case LT_EXPR: val = (cmp > 0); break;
5789 case GT_EXPR: val = (cmp < 0); break;
5790 case LE_EXPR: val = (cmp >= 0); break;
5791 case GE_EXPR: val = (cmp <= 0); break;
5792 default:
5793 val = false;
5795 if (val)
5796 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5799 /* Chose the less restrictive of two < or <= comparisons. */
5800 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5801 && (code2 == LT_EXPR || code2 == LE_EXPR))
5803 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5804 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5805 else
5806 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5809 /* Likewise chose the less restrictive of two > or >= comparisons. */
5810 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5811 && (code2 == GT_EXPR || code2 == GE_EXPR))
5813 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5814 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5815 else
5816 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5819 /* Check for singleton ranges. */
5820 else if (cmp == 0
5821 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5822 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5823 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5825 /* Check for less/greater pairs that don't restrict the range at all. */
5826 else if (cmp >= 0
5827 && (code1 == LT_EXPR || code1 == LE_EXPR)
5828 && (code2 == GT_EXPR || code2 == GE_EXPR))
5829 return boolean_true_node;
5830 else if (cmp <= 0
5831 && (code1 == GT_EXPR || code1 == GE_EXPR)
5832 && (code2 == LT_EXPR || code2 == LE_EXPR))
5833 return boolean_true_node;
5836 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5837 NAME's definition is a truth value. See if there are any simplifications
5838 that can be done against the NAME's definition. */
5839 if (TREE_CODE (op1a) == SSA_NAME
5840 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5841 && (integer_zerop (op1b) || integer_onep (op1b)))
5843 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5844 || (code1 == NE_EXPR && integer_onep (op1b)));
5845 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5846 switch (gimple_code (stmt))
5848 case GIMPLE_ASSIGN:
5849 /* Try to simplify by copy-propagating the definition. */
5850 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5852 case GIMPLE_PHI:
5853 /* If every argument to the PHI produces the same result when
5854 ORed with the second comparison, we win.
5855 Do not do this unless the type is bool since we need a bool
5856 result here anyway. */
5857 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5859 tree result = NULL_TREE;
5860 unsigned i;
5861 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5863 tree arg = gimple_phi_arg_def (stmt, i);
5865 /* If this PHI has itself as an argument, ignore it.
5866 If all the other args produce the same result,
5867 we're still OK. */
5868 if (arg == gimple_phi_result (stmt))
5869 continue;
5870 else if (TREE_CODE (arg) == INTEGER_CST)
5872 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5874 if (!result)
5875 result = boolean_true_node;
5876 else if (!integer_onep (result))
5877 return NULL_TREE;
5879 else if (!result)
5880 result = fold_build2 (code2, boolean_type_node,
5881 op2a, op2b);
5882 else if (!same_bool_comparison_p (result,
5883 code2, op2a, op2b))
5884 return NULL_TREE;
5886 else if (TREE_CODE (arg) == SSA_NAME
5887 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5889 tree temp;
5890 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5891 /* In simple cases we can look through PHI nodes,
5892 but we have to be careful with loops.
5893 See PR49073. */
5894 if (! dom_info_available_p (CDI_DOMINATORS)
5895 || gimple_bb (def_stmt) == gimple_bb (stmt)
5896 || dominated_by_p (CDI_DOMINATORS,
5897 gimple_bb (def_stmt),
5898 gimple_bb (stmt)))
5899 return NULL_TREE;
5900 temp = or_var_with_comparison (arg, invert, code2,
5901 op2a, op2b);
5902 if (!temp)
5903 return NULL_TREE;
5904 else if (!result)
5905 result = temp;
5906 else if (!same_bool_result_p (result, temp))
5907 return NULL_TREE;
5909 else
5910 return NULL_TREE;
5912 return result;
5915 default:
5916 break;
5919 return NULL_TREE;
5922 /* Try to simplify the OR of two comparisons, specified by
5923 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5924 If this can be simplified to a single expression (without requiring
5925 introducing more SSA variables to hold intermediate values),
5926 return the resulting tree. Otherwise return NULL_TREE.
5927 If the result expression is non-null, it has boolean type. */
5929 tree
5930 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5931 enum tree_code code2, tree op2a, tree op2b)
5933 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5934 if (t)
5935 return t;
5936 else
5937 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5941 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5943 Either NULL_TREE, a simplified but non-constant or a constant
5944 is returned.
5946 ??? This should go into a gimple-fold-inline.h file to be eventually
5947 privatized with the single valueize function used in the various TUs
5948 to avoid the indirect function call overhead. */
5950 tree
5951 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5952 tree (*gvalueize) (tree))
5954 code_helper rcode;
5955 tree ops[3] = {};
5956 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5957 edges if there are intermediate VARYING defs. For this reason
5958 do not follow SSA edges here even though SCCVN can technically
5959 just deal fine with that. */
5960 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5962 tree res = NULL_TREE;
5963 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5964 res = ops[0];
5965 else if (mprts_hook)
5966 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5967 if (res)
5969 if (dump_file && dump_flags & TDF_DETAILS)
5971 fprintf (dump_file, "Match-and-simplified ");
5972 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5973 fprintf (dump_file, " to ");
5974 print_generic_expr (dump_file, res);
5975 fprintf (dump_file, "\n");
5977 return res;
5981 location_t loc = gimple_location (stmt);
5982 switch (gimple_code (stmt))
5984 case GIMPLE_ASSIGN:
5986 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5988 switch (get_gimple_rhs_class (subcode))
5990 case GIMPLE_SINGLE_RHS:
5992 tree rhs = gimple_assign_rhs1 (stmt);
5993 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5995 if (TREE_CODE (rhs) == SSA_NAME)
5997 /* If the RHS is an SSA_NAME, return its known constant value,
5998 if any. */
5999 return (*valueize) (rhs);
6001 /* Handle propagating invariant addresses into address
6002 operations. */
6003 else if (TREE_CODE (rhs) == ADDR_EXPR
6004 && !is_gimple_min_invariant (rhs))
6006 HOST_WIDE_INT offset = 0;
6007 tree base;
6008 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6009 &offset,
6010 valueize);
6011 if (base
6012 && (CONSTANT_CLASS_P (base)
6013 || decl_address_invariant_p (base)))
6014 return build_invariant_address (TREE_TYPE (rhs),
6015 base, offset);
6017 else if (TREE_CODE (rhs) == CONSTRUCTOR
6018 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6019 && (CONSTRUCTOR_NELTS (rhs)
6020 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6022 unsigned i, nelts;
6023 tree val;
6025 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
6026 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6027 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6029 val = (*valueize) (val);
6030 if (TREE_CODE (val) == INTEGER_CST
6031 || TREE_CODE (val) == REAL_CST
6032 || TREE_CODE (val) == FIXED_CST)
6033 vec.quick_push (val);
6034 else
6035 return NULL_TREE;
6038 return vec.build ();
6040 if (subcode == OBJ_TYPE_REF)
6042 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6043 /* If callee is constant, we can fold away the wrapper. */
6044 if (is_gimple_min_invariant (val))
6045 return val;
6048 if (kind == tcc_reference)
6050 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6051 || TREE_CODE (rhs) == REALPART_EXPR
6052 || TREE_CODE (rhs) == IMAGPART_EXPR)
6053 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6055 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6056 return fold_unary_loc (EXPR_LOCATION (rhs),
6057 TREE_CODE (rhs),
6058 TREE_TYPE (rhs), val);
6060 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6061 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6063 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6064 return fold_ternary_loc (EXPR_LOCATION (rhs),
6065 TREE_CODE (rhs),
6066 TREE_TYPE (rhs), val,
6067 TREE_OPERAND (rhs, 1),
6068 TREE_OPERAND (rhs, 2));
6070 else if (TREE_CODE (rhs) == MEM_REF
6071 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6073 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6074 if (TREE_CODE (val) == ADDR_EXPR
6075 && is_gimple_min_invariant (val))
6077 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6078 unshare_expr (val),
6079 TREE_OPERAND (rhs, 1));
6080 if (tem)
6081 rhs = tem;
6084 return fold_const_aggregate_ref_1 (rhs, valueize);
6086 else if (kind == tcc_declaration)
6087 return get_symbol_constant_value (rhs);
6088 return rhs;
6091 case GIMPLE_UNARY_RHS:
6092 return NULL_TREE;
6094 case GIMPLE_BINARY_RHS:
6095 /* Translate &x + CST into an invariant form suitable for
6096 further propagation. */
6097 if (subcode == POINTER_PLUS_EXPR)
6099 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6100 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6101 if (TREE_CODE (op0) == ADDR_EXPR
6102 && TREE_CODE (op1) == INTEGER_CST)
6104 tree off = fold_convert (ptr_type_node, op1);
6105 return build_fold_addr_expr_loc
6106 (loc,
6107 fold_build2 (MEM_REF,
6108 TREE_TYPE (TREE_TYPE (op0)),
6109 unshare_expr (op0), off));
6112 /* Canonicalize bool != 0 and bool == 0 appearing after
6113 valueization. While gimple_simplify handles this
6114 it can get confused by the ~X == 1 -> X == 0 transform
6115 which we cant reduce to a SSA name or a constant
6116 (and we have no way to tell gimple_simplify to not
6117 consider those transforms in the first place). */
6118 else if (subcode == EQ_EXPR
6119 || subcode == NE_EXPR)
6121 tree lhs = gimple_assign_lhs (stmt);
6122 tree op0 = gimple_assign_rhs1 (stmt);
6123 if (useless_type_conversion_p (TREE_TYPE (lhs),
6124 TREE_TYPE (op0)))
6126 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6127 op0 = (*valueize) (op0);
6128 if (TREE_CODE (op0) == INTEGER_CST)
6129 std::swap (op0, op1);
6130 if (TREE_CODE (op1) == INTEGER_CST
6131 && ((subcode == NE_EXPR && integer_zerop (op1))
6132 || (subcode == EQ_EXPR && integer_onep (op1))))
6133 return op0;
6136 return NULL_TREE;
6138 case GIMPLE_TERNARY_RHS:
6140 /* Handle ternary operators that can appear in GIMPLE form. */
6141 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6142 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6143 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6144 return fold_ternary_loc (loc, subcode,
6145 gimple_expr_type (stmt), op0, op1, op2);
6148 default:
6149 gcc_unreachable ();
6153 case GIMPLE_CALL:
6155 tree fn;
6156 gcall *call_stmt = as_a <gcall *> (stmt);
6158 if (gimple_call_internal_p (stmt))
6160 enum tree_code subcode = ERROR_MARK;
6161 switch (gimple_call_internal_fn (stmt))
6163 case IFN_UBSAN_CHECK_ADD:
6164 subcode = PLUS_EXPR;
6165 break;
6166 case IFN_UBSAN_CHECK_SUB:
6167 subcode = MINUS_EXPR;
6168 break;
6169 case IFN_UBSAN_CHECK_MUL:
6170 subcode = MULT_EXPR;
6171 break;
6172 case IFN_BUILTIN_EXPECT:
6174 tree arg0 = gimple_call_arg (stmt, 0);
6175 tree op0 = (*valueize) (arg0);
6176 if (TREE_CODE (op0) == INTEGER_CST)
6177 return op0;
6178 return NULL_TREE;
6180 default:
6181 return NULL_TREE;
6183 tree arg0 = gimple_call_arg (stmt, 0);
6184 tree arg1 = gimple_call_arg (stmt, 1);
6185 tree op0 = (*valueize) (arg0);
6186 tree op1 = (*valueize) (arg1);
6188 if (TREE_CODE (op0) != INTEGER_CST
6189 || TREE_CODE (op1) != INTEGER_CST)
6191 switch (subcode)
6193 case MULT_EXPR:
6194 /* x * 0 = 0 * x = 0 without overflow. */
6195 if (integer_zerop (op0) || integer_zerop (op1))
6196 return build_zero_cst (TREE_TYPE (arg0));
6197 break;
6198 case MINUS_EXPR:
6199 /* y - y = 0 without overflow. */
6200 if (operand_equal_p (op0, op1, 0))
6201 return build_zero_cst (TREE_TYPE (arg0));
6202 break;
6203 default:
6204 break;
6207 tree res
6208 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6209 if (res
6210 && TREE_CODE (res) == INTEGER_CST
6211 && !TREE_OVERFLOW (res))
6212 return res;
6213 return NULL_TREE;
6216 fn = (*valueize) (gimple_call_fn (stmt));
6217 if (TREE_CODE (fn) == ADDR_EXPR
6218 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6219 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6220 && gimple_builtin_call_types_compatible_p (stmt,
6221 TREE_OPERAND (fn, 0)))
6223 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6224 tree retval;
6225 unsigned i;
6226 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6227 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6228 retval = fold_builtin_call_array (loc,
6229 gimple_call_return_type (call_stmt),
6230 fn, gimple_call_num_args (stmt), args);
6231 if (retval)
6233 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6234 STRIP_NOPS (retval);
6235 retval = fold_convert (gimple_call_return_type (call_stmt),
6236 retval);
6238 return retval;
6240 return NULL_TREE;
6243 default:
6244 return NULL_TREE;
6248 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6249 Returns NULL_TREE if folding to a constant is not possible, otherwise
6250 returns a constant according to is_gimple_min_invariant. */
6252 tree
6253 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6255 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6256 if (res && is_gimple_min_invariant (res))
6257 return res;
6258 return NULL_TREE;
6262 /* The following set of functions are supposed to fold references using
6263 their constant initializers. */
6265 /* See if we can find constructor defining value of BASE.
6266 When we know the consructor with constant offset (such as
6267 base is array[40] and we do know constructor of array), then
6268 BIT_OFFSET is adjusted accordingly.
6270 As a special case, return error_mark_node when constructor
6271 is not explicitly available, but it is known to be zero
6272 such as 'static const int a;'. */
6273 static tree
6274 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6275 tree (*valueize)(tree))
6277 HOST_WIDE_INT bit_offset2, size, max_size;
6278 bool reverse;
6280 if (TREE_CODE (base) == MEM_REF)
6282 if (!integer_zerop (TREE_OPERAND (base, 1)))
6284 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6285 return NULL_TREE;
6286 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6287 * BITS_PER_UNIT);
6290 if (valueize
6291 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6292 base = valueize (TREE_OPERAND (base, 0));
6293 if (!base || TREE_CODE (base) != ADDR_EXPR)
6294 return NULL_TREE;
6295 base = TREE_OPERAND (base, 0);
6297 else if (valueize
6298 && TREE_CODE (base) == SSA_NAME)
6299 base = valueize (base);
6301 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6302 DECL_INITIAL. If BASE is a nested reference into another
6303 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6304 the inner reference. */
6305 switch (TREE_CODE (base))
6307 case VAR_DECL:
6308 case CONST_DECL:
6310 tree init = ctor_for_folding (base);
6312 /* Our semantic is exact opposite of ctor_for_folding;
6313 NULL means unknown, while error_mark_node is 0. */
6314 if (init == error_mark_node)
6315 return NULL_TREE;
6316 if (!init)
6317 return error_mark_node;
6318 return init;
6321 case VIEW_CONVERT_EXPR:
6322 return get_base_constructor (TREE_OPERAND (base, 0),
6323 bit_offset, valueize);
6325 case ARRAY_REF:
6326 case COMPONENT_REF:
6327 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6328 &reverse);
6329 if (max_size == -1 || size != max_size)
6330 return NULL_TREE;
6331 *bit_offset += bit_offset2;
6332 return get_base_constructor (base, bit_offset, valueize);
6334 case CONSTRUCTOR:
6335 return base;
6337 default:
6338 if (CONSTANT_CLASS_P (base))
6339 return base;
6341 return NULL_TREE;
6345 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6346 SIZE to the memory at bit OFFSET. */
6348 static tree
6349 fold_array_ctor_reference (tree type, tree ctor,
6350 unsigned HOST_WIDE_INT offset,
6351 unsigned HOST_WIDE_INT size,
6352 tree from_decl)
6354 offset_int low_bound;
6355 offset_int elt_size;
6356 offset_int access_index;
6357 tree domain_type = NULL_TREE;
6358 HOST_WIDE_INT inner_offset;
6360 /* Compute low bound and elt size. */
6361 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6362 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6363 if (domain_type && TYPE_MIN_VALUE (domain_type))
6365 /* Static constructors for variably sized objects makes no sense. */
6366 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6367 return NULL_TREE;
6368 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6370 else
6371 low_bound = 0;
6372 /* Static constructors for variably sized objects makes no sense. */
6373 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6374 return NULL_TREE;
6375 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6377 /* We can handle only constantly sized accesses that are known to not
6378 be larger than size of array element. */
6379 if (!TYPE_SIZE_UNIT (type)
6380 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6381 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6382 || elt_size == 0)
6383 return NULL_TREE;
6385 /* Compute the array index we look for. */
6386 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6387 elt_size);
6388 access_index += low_bound;
6390 /* And offset within the access. */
6391 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6393 /* See if the array field is large enough to span whole access. We do not
6394 care to fold accesses spanning multiple array indexes. */
6395 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6396 return NULL_TREE;
6397 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6398 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6400 /* When memory is not explicitely mentioned in constructor,
6401 it is 0 (or out of range). */
6402 return build_zero_cst (type);
6405 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6406 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6408 static tree
6409 fold_nonarray_ctor_reference (tree type, tree ctor,
6410 unsigned HOST_WIDE_INT offset,
6411 unsigned HOST_WIDE_INT size,
6412 tree from_decl)
6414 unsigned HOST_WIDE_INT cnt;
6415 tree cfield, cval;
6417 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6418 cval)
6420 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6421 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6422 tree field_size = DECL_SIZE (cfield);
6423 offset_int bitoffset;
6424 offset_int bitoffset_end, access_end;
6426 /* Variable sized objects in static constructors makes no sense,
6427 but field_size can be NULL for flexible array members. */
6428 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6429 && TREE_CODE (byte_offset) == INTEGER_CST
6430 && (field_size != NULL_TREE
6431 ? TREE_CODE (field_size) == INTEGER_CST
6432 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6434 /* Compute bit offset of the field. */
6435 bitoffset = (wi::to_offset (field_offset)
6436 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6437 /* Compute bit offset where the field ends. */
6438 if (field_size != NULL_TREE)
6439 bitoffset_end = bitoffset + wi::to_offset (field_size);
6440 else
6441 bitoffset_end = 0;
6443 access_end = offset_int (offset) + size;
6445 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6446 [BITOFFSET, BITOFFSET_END)? */
6447 if (wi::cmps (access_end, bitoffset) > 0
6448 && (field_size == NULL_TREE
6449 || wi::lts_p (offset, bitoffset_end)))
6451 offset_int inner_offset = offset_int (offset) - bitoffset;
6452 /* We do have overlap. Now see if field is large enough to
6453 cover the access. Give up for accesses spanning multiple
6454 fields. */
6455 if (wi::cmps (access_end, bitoffset_end) > 0)
6456 return NULL_TREE;
6457 if (offset < bitoffset)
6458 return NULL_TREE;
6459 return fold_ctor_reference (type, cval,
6460 inner_offset.to_uhwi (), size,
6461 from_decl);
6464 /* When memory is not explicitely mentioned in constructor, it is 0. */
6465 return build_zero_cst (type);
6468 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6469 to the memory at bit OFFSET. */
6471 tree
6472 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6473 unsigned HOST_WIDE_INT size, tree from_decl)
6475 tree ret;
6477 /* We found the field with exact match. */
6478 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6479 && !offset)
6480 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6482 /* We are at the end of walk, see if we can view convert the
6483 result. */
6484 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6485 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6486 && !compare_tree_int (TYPE_SIZE (type), size)
6487 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6489 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6490 if (ret)
6492 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6493 if (ret)
6494 STRIP_USELESS_TYPE_CONVERSION (ret);
6496 return ret;
6498 /* For constants and byte-aligned/sized reads try to go through
6499 native_encode/interpret. */
6500 if (CONSTANT_CLASS_P (ctor)
6501 && BITS_PER_UNIT == 8
6502 && offset % BITS_PER_UNIT == 0
6503 && size % BITS_PER_UNIT == 0
6504 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6506 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6507 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6508 offset / BITS_PER_UNIT);
6509 if (len > 0)
6510 return native_interpret_expr (type, buf, len);
6512 if (TREE_CODE (ctor) == CONSTRUCTOR)
6515 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6516 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6517 return fold_array_ctor_reference (type, ctor, offset, size,
6518 from_decl);
6519 else
6520 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6521 from_decl);
6524 return NULL_TREE;
6527 /* Return the tree representing the element referenced by T if T is an
6528 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6529 names using VALUEIZE. Return NULL_TREE otherwise. */
6531 tree
6532 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6534 tree ctor, idx, base;
6535 HOST_WIDE_INT offset, size, max_size;
6536 tree tem;
6537 bool reverse;
6539 if (TREE_THIS_VOLATILE (t))
6540 return NULL_TREE;
6542 if (DECL_P (t))
6543 return get_symbol_constant_value (t);
6545 tem = fold_read_from_constant_string (t);
6546 if (tem)
6547 return tem;
6549 switch (TREE_CODE (t))
6551 case ARRAY_REF:
6552 case ARRAY_RANGE_REF:
6553 /* Constant indexes are handled well by get_base_constructor.
6554 Only special case variable offsets.
6555 FIXME: This code can't handle nested references with variable indexes
6556 (they will be handled only by iteration of ccp). Perhaps we can bring
6557 get_ref_base_and_extent here and make it use a valueize callback. */
6558 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6559 && valueize
6560 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6561 && TREE_CODE (idx) == INTEGER_CST)
6563 tree low_bound, unit_size;
6565 /* If the resulting bit-offset is constant, track it. */
6566 if ((low_bound = array_ref_low_bound (t),
6567 TREE_CODE (low_bound) == INTEGER_CST)
6568 && (unit_size = array_ref_element_size (t),
6569 tree_fits_uhwi_p (unit_size)))
6571 offset_int woffset
6572 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6573 TYPE_PRECISION (TREE_TYPE (idx)));
6575 if (wi::fits_shwi_p (woffset))
6577 offset = woffset.to_shwi ();
6578 /* TODO: This code seems wrong, multiply then check
6579 to see if it fits. */
6580 offset *= tree_to_uhwi (unit_size);
6581 offset *= BITS_PER_UNIT;
6583 base = TREE_OPERAND (t, 0);
6584 ctor = get_base_constructor (base, &offset, valueize);
6585 /* Empty constructor. Always fold to 0. */
6586 if (ctor == error_mark_node)
6587 return build_zero_cst (TREE_TYPE (t));
6588 /* Out of bound array access. Value is undefined,
6589 but don't fold. */
6590 if (offset < 0)
6591 return NULL_TREE;
6592 /* We can not determine ctor. */
6593 if (!ctor)
6594 return NULL_TREE;
6595 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6596 tree_to_uhwi (unit_size)
6597 * BITS_PER_UNIT,
6598 base);
6602 /* Fallthru. */
6604 case COMPONENT_REF:
6605 case BIT_FIELD_REF:
6606 case TARGET_MEM_REF:
6607 case MEM_REF:
6608 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6609 ctor = get_base_constructor (base, &offset, valueize);
6611 /* Empty constructor. Always fold to 0. */
6612 if (ctor == error_mark_node)
6613 return build_zero_cst (TREE_TYPE (t));
6614 /* We do not know precise address. */
6615 if (max_size == -1 || max_size != size)
6616 return NULL_TREE;
6617 /* We can not determine ctor. */
6618 if (!ctor)
6619 return NULL_TREE;
6621 /* Out of bound array access. Value is undefined, but don't fold. */
6622 if (offset < 0)
6623 return NULL_TREE;
6625 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6626 base);
6628 case REALPART_EXPR:
6629 case IMAGPART_EXPR:
6631 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6632 if (c && TREE_CODE (c) == COMPLEX_CST)
6633 return fold_build1_loc (EXPR_LOCATION (t),
6634 TREE_CODE (t), TREE_TYPE (t), c);
6635 break;
6638 default:
6639 break;
6642 return NULL_TREE;
6645 tree
6646 fold_const_aggregate_ref (tree t)
6648 return fold_const_aggregate_ref_1 (t, NULL);
6651 /* Lookup virtual method with index TOKEN in a virtual table V
6652 at OFFSET.
6653 Set CAN_REFER if non-NULL to false if method
6654 is not referable or if the virtual table is ill-formed (such as rewriten
6655 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6657 tree
6658 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6659 tree v,
6660 unsigned HOST_WIDE_INT offset,
6661 bool *can_refer)
6663 tree vtable = v, init, fn;
6664 unsigned HOST_WIDE_INT size;
6665 unsigned HOST_WIDE_INT elt_size, access_index;
6666 tree domain_type;
6668 if (can_refer)
6669 *can_refer = true;
6671 /* First of all double check we have virtual table. */
6672 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6674 /* Pass down that we lost track of the target. */
6675 if (can_refer)
6676 *can_refer = false;
6677 return NULL_TREE;
6680 init = ctor_for_folding (v);
6682 /* The virtual tables should always be born with constructors
6683 and we always should assume that they are avaialble for
6684 folding. At the moment we do not stream them in all cases,
6685 but it should never happen that ctor seem unreachable. */
6686 gcc_assert (init);
6687 if (init == error_mark_node)
6689 /* Pass down that we lost track of the target. */
6690 if (can_refer)
6691 *can_refer = false;
6692 return NULL_TREE;
6694 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6695 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6696 offset *= BITS_PER_UNIT;
6697 offset += token * size;
6699 /* Lookup the value in the constructor that is assumed to be array.
6700 This is equivalent to
6701 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6702 offset, size, NULL);
6703 but in a constant time. We expect that frontend produced a simple
6704 array without indexed initializers. */
6706 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6707 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6708 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6709 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6711 access_index = offset / BITS_PER_UNIT / elt_size;
6712 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6714 /* This code makes an assumption that there are no
6715 indexed fileds produced by C++ FE, so we can directly index the array. */
6716 if (access_index < CONSTRUCTOR_NELTS (init))
6718 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6719 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6720 STRIP_NOPS (fn);
6722 else
6723 fn = NULL;
6725 /* For type inconsistent program we may end up looking up virtual method
6726 in virtual table that does not contain TOKEN entries. We may overrun
6727 the virtual table and pick up a constant or RTTI info pointer.
6728 In any case the call is undefined. */
6729 if (!fn
6730 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6731 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6732 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6733 else
6735 fn = TREE_OPERAND (fn, 0);
6737 /* When cgraph node is missing and function is not public, we cannot
6738 devirtualize. This can happen in WHOPR when the actual method
6739 ends up in other partition, because we found devirtualization
6740 possibility too late. */
6741 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6743 if (can_refer)
6745 *can_refer = false;
6746 return fn;
6748 return NULL_TREE;
6752 /* Make sure we create a cgraph node for functions we'll reference.
6753 They can be non-existent if the reference comes from an entry
6754 of an external vtable for example. */
6755 cgraph_node::get_create (fn);
6757 return fn;
6760 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6761 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6762 KNOWN_BINFO carries the binfo describing the true type of
6763 OBJ_TYPE_REF_OBJECT(REF).
6764 Set CAN_REFER if non-NULL to false if method
6765 is not referable or if the virtual table is ill-formed (such as rewriten
6766 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6768 tree
6769 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6770 bool *can_refer)
6772 unsigned HOST_WIDE_INT offset;
6773 tree v;
6775 v = BINFO_VTABLE (known_binfo);
6776 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6777 if (!v)
6778 return NULL_TREE;
6780 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6782 if (can_refer)
6783 *can_refer = false;
6784 return NULL_TREE;
6786 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6789 /* Given a pointer value T, return a simplified version of an
6790 indirection through T, or NULL_TREE if no simplification is
6791 possible. Note that the resulting type may be different from
6792 the type pointed to in the sense that it is still compatible
6793 from the langhooks point of view. */
6795 tree
6796 gimple_fold_indirect_ref (tree t)
6798 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6799 tree sub = t;
6800 tree subtype;
6802 STRIP_NOPS (sub);
6803 subtype = TREE_TYPE (sub);
6804 if (!POINTER_TYPE_P (subtype)
6805 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6806 return NULL_TREE;
6808 if (TREE_CODE (sub) == ADDR_EXPR)
6810 tree op = TREE_OPERAND (sub, 0);
6811 tree optype = TREE_TYPE (op);
6812 /* *&p => p */
6813 if (useless_type_conversion_p (type, optype))
6814 return op;
6816 /* *(foo *)&fooarray => fooarray[0] */
6817 if (TREE_CODE (optype) == ARRAY_TYPE
6818 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6819 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6821 tree type_domain = TYPE_DOMAIN (optype);
6822 tree min_val = size_zero_node;
6823 if (type_domain && TYPE_MIN_VALUE (type_domain))
6824 min_val = TYPE_MIN_VALUE (type_domain);
6825 if (TREE_CODE (min_val) == INTEGER_CST)
6826 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6828 /* *(foo *)&complexfoo => __real__ complexfoo */
6829 else if (TREE_CODE (optype) == COMPLEX_TYPE
6830 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6831 return fold_build1 (REALPART_EXPR, type, op);
6832 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6833 else if (TREE_CODE (optype) == VECTOR_TYPE
6834 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6836 tree part_width = TYPE_SIZE (type);
6837 tree index = bitsize_int (0);
6838 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6842 /* *(p + CST) -> ... */
6843 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6844 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6846 tree addr = TREE_OPERAND (sub, 0);
6847 tree off = TREE_OPERAND (sub, 1);
6848 tree addrtype;
6850 STRIP_NOPS (addr);
6851 addrtype = TREE_TYPE (addr);
6853 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6854 if (TREE_CODE (addr) == ADDR_EXPR
6855 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6856 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6857 && tree_fits_uhwi_p (off))
6859 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6860 tree part_width = TYPE_SIZE (type);
6861 unsigned HOST_WIDE_INT part_widthi
6862 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6863 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6864 tree index = bitsize_int (indexi);
6865 if (offset / part_widthi
6866 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6867 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6868 part_width, index);
6871 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6872 if (TREE_CODE (addr) == ADDR_EXPR
6873 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6874 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6876 tree size = TYPE_SIZE_UNIT (type);
6877 if (tree_int_cst_equal (size, off))
6878 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6881 /* *(p + CST) -> MEM_REF <p, CST>. */
6882 if (TREE_CODE (addr) != ADDR_EXPR
6883 || DECL_P (TREE_OPERAND (addr, 0)))
6884 return fold_build2 (MEM_REF, type,
6885 addr,
6886 wide_int_to_tree (ptype, wi::to_wide (off)));
6889 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6890 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6891 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6892 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6894 tree type_domain;
6895 tree min_val = size_zero_node;
6896 tree osub = sub;
6897 sub = gimple_fold_indirect_ref (sub);
6898 if (! sub)
6899 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6900 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6901 if (type_domain && TYPE_MIN_VALUE (type_domain))
6902 min_val = TYPE_MIN_VALUE (type_domain);
6903 if (TREE_CODE (min_val) == INTEGER_CST)
6904 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6907 return NULL_TREE;
6910 /* Return true if CODE is an operation that when operating on signed
6911 integer types involves undefined behavior on overflow and the
6912 operation can be expressed with unsigned arithmetic. */
6914 bool
6915 arith_code_with_undefined_signed_overflow (tree_code code)
6917 switch (code)
6919 case PLUS_EXPR:
6920 case MINUS_EXPR:
6921 case MULT_EXPR:
6922 case NEGATE_EXPR:
6923 case POINTER_PLUS_EXPR:
6924 return true;
6925 default:
6926 return false;
6930 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6931 operation that can be transformed to unsigned arithmetic by converting
6932 its operand, carrying out the operation in the corresponding unsigned
6933 type and converting the result back to the original type.
6935 Returns a sequence of statements that replace STMT and also contain
6936 a modified form of STMT itself. */
6938 gimple_seq
6939 rewrite_to_defined_overflow (gimple *stmt)
6941 if (dump_file && (dump_flags & TDF_DETAILS))
6943 fprintf (dump_file, "rewriting stmt with undefined signed "
6944 "overflow ");
6945 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6948 tree lhs = gimple_assign_lhs (stmt);
6949 tree type = unsigned_type_for (TREE_TYPE (lhs));
6950 gimple_seq stmts = NULL;
6951 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6953 tree op = gimple_op (stmt, i);
6954 op = gimple_convert (&stmts, type, op);
6955 gimple_set_op (stmt, i, op);
6957 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6958 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6959 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6960 gimple_seq_add_stmt (&stmts, stmt);
6961 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6962 gimple_seq_add_stmt (&stmts, cvt);
6964 return stmts;
6968 /* The valueization hook we use for the gimple_build API simplification.
6969 This makes us match fold_buildN behavior by only combining with
6970 statements in the sequence(s) we are currently building. */
6972 static tree
6973 gimple_build_valueize (tree op)
6975 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6976 return op;
6977 return NULL_TREE;
6980 /* Build the expression CODE OP0 of type TYPE with location LOC,
6981 simplifying it first if possible. Returns the built
6982 expression value and appends statements possibly defining it
6983 to SEQ. */
6985 tree
6986 gimple_build (gimple_seq *seq, location_t loc,
6987 enum tree_code code, tree type, tree op0)
6989 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6990 if (!res)
6992 res = create_tmp_reg_or_ssa_name (type);
6993 gimple *stmt;
6994 if (code == REALPART_EXPR
6995 || code == IMAGPART_EXPR
6996 || code == VIEW_CONVERT_EXPR)
6997 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6998 else
6999 stmt = gimple_build_assign (res, code, op0);
7000 gimple_set_location (stmt, loc);
7001 gimple_seq_add_stmt_without_update (seq, stmt);
7003 return res;
7006 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7007 simplifying it first if possible. Returns the built
7008 expression value and appends statements possibly defining it
7009 to SEQ. */
7011 tree
7012 gimple_build (gimple_seq *seq, location_t loc,
7013 enum tree_code code, tree type, tree op0, tree op1)
7015 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7016 if (!res)
7018 res = create_tmp_reg_or_ssa_name (type);
7019 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7020 gimple_set_location (stmt, loc);
7021 gimple_seq_add_stmt_without_update (seq, stmt);
7023 return res;
7026 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7027 simplifying it first if possible. Returns the built
7028 expression value and appends statements possibly defining it
7029 to SEQ. */
7031 tree
7032 gimple_build (gimple_seq *seq, location_t loc,
7033 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7035 tree res = gimple_simplify (code, type, op0, op1, op2,
7036 seq, gimple_build_valueize);
7037 if (!res)
7039 res = create_tmp_reg_or_ssa_name (type);
7040 gimple *stmt;
7041 if (code == BIT_FIELD_REF)
7042 stmt = gimple_build_assign (res, code,
7043 build3 (code, type, op0, op1, op2));
7044 else
7045 stmt = gimple_build_assign (res, code, op0, op1, op2);
7046 gimple_set_location (stmt, loc);
7047 gimple_seq_add_stmt_without_update (seq, stmt);
7049 return res;
7052 /* Build the call FN (ARG0) with a result of type TYPE
7053 (or no result if TYPE is void) with location LOC,
7054 simplifying it first if possible. Returns the built
7055 expression value (or NULL_TREE if TYPE is void) and appends
7056 statements possibly defining it to SEQ. */
7058 tree
7059 gimple_build (gimple_seq *seq, location_t loc,
7060 enum built_in_function fn, tree type, tree arg0)
7062 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7063 if (!res)
7065 tree decl = builtin_decl_implicit (fn);
7066 gimple *stmt = gimple_build_call (decl, 1, arg0);
7067 if (!VOID_TYPE_P (type))
7069 res = create_tmp_reg_or_ssa_name (type);
7070 gimple_call_set_lhs (stmt, res);
7072 gimple_set_location (stmt, loc);
7073 gimple_seq_add_stmt_without_update (seq, stmt);
7075 return res;
7078 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7079 (or no result if TYPE is void) with location LOC,
7080 simplifying it first if possible. Returns the built
7081 expression value (or NULL_TREE if TYPE is void) and appends
7082 statements possibly defining it to SEQ. */
7084 tree
7085 gimple_build (gimple_seq *seq, location_t loc,
7086 enum built_in_function fn, tree type, tree arg0, tree arg1)
7088 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7089 if (!res)
7091 tree decl = builtin_decl_implicit (fn);
7092 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7093 if (!VOID_TYPE_P (type))
7095 res = create_tmp_reg_or_ssa_name (type);
7096 gimple_call_set_lhs (stmt, res);
7098 gimple_set_location (stmt, loc);
7099 gimple_seq_add_stmt_without_update (seq, stmt);
7101 return res;
7104 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7105 (or no result if TYPE is void) with location LOC,
7106 simplifying it first if possible. Returns the built
7107 expression value (or NULL_TREE if TYPE is void) and appends
7108 statements possibly defining it to SEQ. */
7110 tree
7111 gimple_build (gimple_seq *seq, location_t loc,
7112 enum built_in_function fn, tree type,
7113 tree arg0, tree arg1, tree arg2)
7115 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7116 seq, gimple_build_valueize);
7117 if (!res)
7119 tree decl = builtin_decl_implicit (fn);
7120 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7121 if (!VOID_TYPE_P (type))
7123 res = create_tmp_reg_or_ssa_name (type);
7124 gimple_call_set_lhs (stmt, res);
7126 gimple_set_location (stmt, loc);
7127 gimple_seq_add_stmt_without_update (seq, stmt);
7129 return res;
7132 /* Build the conversion (TYPE) OP with a result of type TYPE
7133 with location LOC if such conversion is neccesary in GIMPLE,
7134 simplifying it first.
7135 Returns the built expression value and appends
7136 statements possibly defining it to SEQ. */
7138 tree
7139 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7141 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7142 return op;
7143 return gimple_build (seq, loc, NOP_EXPR, type, op);
7146 /* Build the conversion (ptrofftype) OP with a result of a type
7147 compatible with ptrofftype with location LOC if such conversion
7148 is neccesary in GIMPLE, simplifying it first.
7149 Returns the built expression value and appends
7150 statements possibly defining it to SEQ. */
7152 tree
7153 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7155 if (ptrofftype_p (TREE_TYPE (op)))
7156 return op;
7157 return gimple_convert (seq, loc, sizetype, op);
7160 /* Build a vector of type TYPE in which each element has the value OP.
7161 Return a gimple value for the result, appending any new statements
7162 to SEQ. */
7164 tree
7165 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7166 tree op)
7168 tree res, vec = build_vector_from_val (type, op);
7169 if (is_gimple_val (vec))
7170 return vec;
7171 if (gimple_in_ssa_p (cfun))
7172 res = make_ssa_name (type);
7173 else
7174 res = create_tmp_reg (type);
7175 gimple *stmt = gimple_build_assign (res, vec);
7176 gimple_set_location (stmt, loc);
7177 gimple_seq_add_stmt_without_update (seq, stmt);
7178 return res;
7181 /* Build a vector from BUILDER, handling the case in which some elements
7182 are non-constant. Return a gimple value for the result, appending any
7183 new instructions to SEQ.
7185 BUILDER must not have a stepped encoding on entry. This is because
7186 the function is not geared up to handle the arithmetic that would
7187 be needed in the variable case, and any code building a vector that
7188 is known to be constant should use BUILDER->build () directly. */
7190 tree
7191 gimple_build_vector (gimple_seq *seq, location_t loc,
7192 tree_vector_builder *builder)
7194 gcc_assert (builder->nelts_per_pattern () <= 2);
7195 unsigned int encoded_nelts = builder->encoded_nelts ();
7196 for (unsigned int i = 0; i < encoded_nelts; ++i)
7197 if (!TREE_CONSTANT ((*builder)[i]))
7199 tree type = builder->type ();
7200 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
7201 vec<constructor_elt, va_gc> *v;
7202 vec_alloc (v, nelts);
7203 for (i = 0; i < nelts; ++i)
7204 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7206 tree res;
7207 if (gimple_in_ssa_p (cfun))
7208 res = make_ssa_name (type);
7209 else
7210 res = create_tmp_reg (type);
7211 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7212 gimple_set_location (stmt, loc);
7213 gimple_seq_add_stmt_without_update (seq, stmt);
7214 return res;
7216 return builder->build ();
7219 /* Return true if the result of assignment STMT is known to be non-negative.
7220 If the return value is based on the assumption that signed overflow is
7221 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7222 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7224 static bool
7225 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7226 int depth)
7228 enum tree_code code = gimple_assign_rhs_code (stmt);
7229 switch (get_gimple_rhs_class (code))
7231 case GIMPLE_UNARY_RHS:
7232 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7233 gimple_expr_type (stmt),
7234 gimple_assign_rhs1 (stmt),
7235 strict_overflow_p, depth);
7236 case GIMPLE_BINARY_RHS:
7237 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7238 gimple_expr_type (stmt),
7239 gimple_assign_rhs1 (stmt),
7240 gimple_assign_rhs2 (stmt),
7241 strict_overflow_p, depth);
7242 case GIMPLE_TERNARY_RHS:
7243 return false;
7244 case GIMPLE_SINGLE_RHS:
7245 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7246 strict_overflow_p, depth);
7247 case GIMPLE_INVALID_RHS:
7248 break;
7250 gcc_unreachable ();
7253 /* Return true if return value of call STMT is known to be non-negative.
7254 If the return value is based on the assumption that signed overflow is
7255 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7256 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7258 static bool
7259 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7260 int depth)
7262 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7263 gimple_call_arg (stmt, 0) : NULL_TREE;
7264 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7265 gimple_call_arg (stmt, 1) : NULL_TREE;
7267 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7268 gimple_call_combined_fn (stmt),
7269 arg0,
7270 arg1,
7271 strict_overflow_p, depth);
7274 /* Return true if return value of call STMT is known to be non-negative.
7275 If the return value is based on the assumption that signed overflow is
7276 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7277 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7279 static bool
7280 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7281 int depth)
7283 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7285 tree arg = gimple_phi_arg_def (stmt, i);
7286 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7287 return false;
7289 return true;
7292 /* Return true if STMT is known to compute a non-negative value.
7293 If the return value is based on the assumption that signed overflow is
7294 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7295 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7297 bool
7298 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7299 int depth)
7301 switch (gimple_code (stmt))
7303 case GIMPLE_ASSIGN:
7304 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7305 depth);
7306 case GIMPLE_CALL:
7307 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7308 depth);
7309 case GIMPLE_PHI:
7310 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7311 depth);
7312 default:
7313 return false;
7317 /* Return true if the floating-point value computed by assignment STMT
7318 is known to have an integer value. We also allow +Inf, -Inf and NaN
7319 to be considered integer values. Return false for signaling NaN.
7321 DEPTH is the current nesting depth of the query. */
7323 static bool
7324 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7326 enum tree_code code = gimple_assign_rhs_code (stmt);
7327 switch (get_gimple_rhs_class (code))
7329 case GIMPLE_UNARY_RHS:
7330 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7331 gimple_assign_rhs1 (stmt), depth);
7332 case GIMPLE_BINARY_RHS:
7333 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7334 gimple_assign_rhs1 (stmt),
7335 gimple_assign_rhs2 (stmt), depth);
7336 case GIMPLE_TERNARY_RHS:
7337 return false;
7338 case GIMPLE_SINGLE_RHS:
7339 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7340 case GIMPLE_INVALID_RHS:
7341 break;
7343 gcc_unreachable ();
7346 /* Return true if the floating-point value computed by call STMT is known
7347 to have an integer value. We also allow +Inf, -Inf and NaN to be
7348 considered integer values. Return false for signaling NaN.
7350 DEPTH is the current nesting depth of the query. */
7352 static bool
7353 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7355 tree arg0 = (gimple_call_num_args (stmt) > 0
7356 ? gimple_call_arg (stmt, 0)
7357 : NULL_TREE);
7358 tree arg1 = (gimple_call_num_args (stmt) > 1
7359 ? gimple_call_arg (stmt, 1)
7360 : NULL_TREE);
7361 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7362 arg0, arg1, depth);
7365 /* Return true if the floating-point result of phi STMT is known to have
7366 an integer value. We also allow +Inf, -Inf and NaN to be considered
7367 integer values. Return false for signaling NaN.
7369 DEPTH is the current nesting depth of the query. */
7371 static bool
7372 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7374 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7376 tree arg = gimple_phi_arg_def (stmt, i);
7377 if (!integer_valued_real_single_p (arg, depth + 1))
7378 return false;
7380 return true;
7383 /* Return true if the floating-point value computed by STMT is known
7384 to have an integer value. We also allow +Inf, -Inf and NaN to be
7385 considered integer values. Return false for signaling NaN.
7387 DEPTH is the current nesting depth of the query. */
7389 bool
7390 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7392 switch (gimple_code (stmt))
7394 case GIMPLE_ASSIGN:
7395 return gimple_assign_integer_valued_real_p (stmt, depth);
7396 case GIMPLE_CALL:
7397 return gimple_call_integer_valued_real_p (stmt, depth);
7398 case GIMPLE_PHI:
7399 return gimple_phi_integer_valued_real_p (stmt, depth);
7400 default:
7401 return false;