Daily bump.
[official-gcc.git] / gcc / gimple-fold.c
blobef2ea8f765495439d236d2dce397f1cd1a1c1c74
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
81 static bool
82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 varpool_node *vnode;
85 struct cgraph_node *node;
86 symtab_node *snode;
88 if (DECL_ABSTRACT_P (decl))
89 return false;
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab->function_flags_ready)
102 return true;
103 snode = symtab_node::get (decl);
104 if (!snode || !snode->definition)
105 return false;
106 node = dyn_cast <cgraph_node *> (snode);
107 return !node || !node->global.inlined_to;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl)
116 && (vnode = varpool_node::get (from_decl)) != NULL
117 && vnode->definition)
118 || (flag_ltrans
119 && (vnode = varpool_node::get (from_decl)) != NULL
120 && vnode->in_other_partition))
121 return true;
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
129 return false;
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
146 if (!symtab->function_flags_ready)
147 return true;
149 snode = symtab_node::get (decl);
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163 tree
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
191 if (!base)
192 return NULL_TREE;
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
197 return NULL_TREE;
198 if (TREE_TYPE (base) == error_mark_node)
199 return NULL_TREE;
200 if (TREE_CODE (base) == VAR_DECL)
201 TREE_ADDRESSABLE (base) = 1;
202 else if (TREE_CODE (base) == FUNCTION_DECL)
204 /* Make sure we create a cgraph node for functions we'll reference.
205 They can be non-existent if the reference comes from an entry
206 of an external vtable for example. */
207 cgraph_node::get_create (base);
209 /* Fixup types in global initializers. */
210 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
211 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
213 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
214 cval = fold_convert (TREE_TYPE (orig_cval), cval);
215 return cval;
217 if (TREE_OVERFLOW_P (cval))
218 return drop_tree_overflow (cval);
219 return orig_cval;
222 /* If SYM is a constant variable with known value, return the value.
223 NULL_TREE is returned otherwise. */
225 tree
226 get_symbol_constant_value (tree sym)
228 tree val = ctor_for_folding (sym);
229 if (val != error_mark_node)
231 if (val)
233 val = canonicalize_constructor_val (unshare_expr (val), sym);
234 if (val && is_gimple_min_invariant (val))
235 return val;
236 else
237 return NULL_TREE;
239 /* Variables declared 'const' without an initializer
240 have zero as the initializer if they may not be
241 overridden at link or run time. */
242 if (!val
243 && is_gimple_reg_type (TREE_TYPE (sym)))
244 return build_zero_cst (TREE_TYPE (sym));
247 return NULL_TREE;
252 /* Subroutine of fold_stmt. We perform several simplifications of the
253 memory reference tree EXPR and make sure to re-gimplify them properly
254 after propagation of constant addresses. IS_LHS is true if the
255 reference is supposed to be an lvalue. */
257 static tree
258 maybe_fold_reference (tree expr, bool is_lhs)
260 tree result;
262 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263 || TREE_CODE (expr) == REALPART_EXPR
264 || TREE_CODE (expr) == IMAGPART_EXPR)
265 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266 return fold_unary_loc (EXPR_LOCATION (expr),
267 TREE_CODE (expr),
268 TREE_TYPE (expr),
269 TREE_OPERAND (expr, 0));
270 else if (TREE_CODE (expr) == BIT_FIELD_REF
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_ternary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0),
276 TREE_OPERAND (expr, 1),
277 TREE_OPERAND (expr, 2));
279 if (!is_lhs
280 && (result = fold_const_aggregate_ref (expr))
281 && is_gimple_min_invariant (result))
282 return result;
284 return NULL_TREE;
288 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
289 replacement rhs for the statement or NULL_TREE if no simplification
290 could be made. It is assumed that the operands have been previously
291 folded. */
293 static tree
294 fold_gimple_assign (gimple_stmt_iterator *si)
296 gimple *stmt = gsi_stmt (*si);
297 enum tree_code subcode = gimple_assign_rhs_code (stmt);
298 location_t loc = gimple_location (stmt);
300 tree result = NULL_TREE;
302 switch (get_gimple_rhs_class (subcode))
304 case GIMPLE_SINGLE_RHS:
306 tree rhs = gimple_assign_rhs1 (stmt);
308 if (TREE_CLOBBER_P (rhs))
309 return NULL_TREE;
311 if (REFERENCE_CLASS_P (rhs))
312 return maybe_fold_reference (rhs, false);
314 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
316 tree val = OBJ_TYPE_REF_EXPR (rhs);
317 if (is_gimple_min_invariant (val))
318 return val;
319 else if (flag_devirtualize && virtual_method_call_p (rhs))
321 bool final;
322 vec <cgraph_node *>targets
323 = possible_polymorphic_call_targets (rhs, stmt, &final);
324 if (final && targets.length () <= 1 && dbg_cnt (devirt))
326 if (dump_enabled_p ())
328 location_t loc = gimple_location_safe (stmt);
329 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
330 "resolving virtual function address "
331 "reference to function %s\n",
332 targets.length () == 1
333 ? targets[0]->name ()
334 : "NULL");
336 if (targets.length () == 1)
338 val = fold_convert (TREE_TYPE (val),
339 build_fold_addr_expr_loc
340 (loc, targets[0]->decl));
341 STRIP_USELESS_TYPE_CONVERSION (val);
343 else
344 /* We can not use __builtin_unreachable here because it
345 can not have address taken. */
346 val = build_int_cst (TREE_TYPE (val), 0);
347 return val;
352 else if (TREE_CODE (rhs) == ADDR_EXPR)
354 tree ref = TREE_OPERAND (rhs, 0);
355 tree tem = maybe_fold_reference (ref, true);
356 if (tem
357 && TREE_CODE (tem) == MEM_REF
358 && integer_zerop (TREE_OPERAND (tem, 1)))
359 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
360 else if (tem)
361 result = fold_convert (TREE_TYPE (rhs),
362 build_fold_addr_expr_loc (loc, tem));
363 else if (TREE_CODE (ref) == MEM_REF
364 && integer_zerop (TREE_OPERAND (ref, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
367 if (result)
369 /* Strip away useless type conversions. Both the
370 NON_LVALUE_EXPR that may have been added by fold, and
371 "useless" type conversions that might now be apparent
372 due to propagation. */
373 STRIP_USELESS_TYPE_CONVERSION (result);
375 if (result != rhs && valid_gimple_rhs_p (result))
376 return result;
380 else if (TREE_CODE (rhs) == CONSTRUCTOR
381 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
383 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
384 unsigned i;
385 tree val;
387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
388 if (! CONSTANT_CLASS_P (val))
389 return NULL_TREE;
391 return build_vector_from_ctor (TREE_TYPE (rhs),
392 CONSTRUCTOR_ELTS (rhs));
395 else if (DECL_P (rhs))
396 return get_symbol_constant_value (rhs);
398 break;
400 case GIMPLE_UNARY_RHS:
401 break;
403 case GIMPLE_BINARY_RHS:
404 break;
406 case GIMPLE_TERNARY_RHS:
407 result = fold_ternary_loc (loc, subcode,
408 TREE_TYPE (gimple_assign_lhs (stmt)),
409 gimple_assign_rhs1 (stmt),
410 gimple_assign_rhs2 (stmt),
411 gimple_assign_rhs3 (stmt));
413 if (result)
415 STRIP_USELESS_TYPE_CONVERSION (result);
416 if (valid_gimple_rhs_p (result))
417 return result;
419 break;
421 case GIMPLE_INVALID_RHS:
422 gcc_unreachable ();
425 return NULL_TREE;
429 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
430 adjusting the replacement stmts location and virtual operands.
431 If the statement has a lhs the last stmt in the sequence is expected
432 to assign to that lhs. */
434 static void
435 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
437 gimple *stmt = gsi_stmt (*si_p);
439 if (gimple_has_location (stmt))
440 annotate_all_with_location (stmts, gimple_location (stmt));
442 /* First iterate over the replacement statements backward, assigning
443 virtual operands to their defining statements. */
444 gimple *laststore = NULL;
445 for (gimple_stmt_iterator i = gsi_last (stmts);
446 !gsi_end_p (i); gsi_prev (&i))
448 gimple *new_stmt = gsi_stmt (i);
449 if ((gimple_assign_single_p (new_stmt)
450 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
451 || (is_gimple_call (new_stmt)
452 && (gimple_call_flags (new_stmt)
453 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
455 tree vdef;
456 if (!laststore)
457 vdef = gimple_vdef (stmt);
458 else
459 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
460 gimple_set_vdef (new_stmt, vdef);
461 if (vdef && TREE_CODE (vdef) == SSA_NAME)
462 SSA_NAME_DEF_STMT (vdef) = new_stmt;
463 laststore = new_stmt;
467 /* Second iterate over the statements forward, assigning virtual
468 operands to their uses. */
469 tree reaching_vuse = gimple_vuse (stmt);
470 for (gimple_stmt_iterator i = gsi_start (stmts);
471 !gsi_end_p (i); gsi_next (&i))
473 gimple *new_stmt = gsi_stmt (i);
474 /* If the new statement possibly has a VUSE, update it with exact SSA
475 name we know will reach this one. */
476 if (gimple_has_mem_ops (new_stmt))
477 gimple_set_vuse (new_stmt, reaching_vuse);
478 gimple_set_modified (new_stmt, true);
479 if (gimple_vdef (new_stmt))
480 reaching_vuse = gimple_vdef (new_stmt);
483 /* If the new sequence does not do a store release the virtual
484 definition of the original statement. */
485 if (reaching_vuse
486 && reaching_vuse == gimple_vuse (stmt))
488 tree vdef = gimple_vdef (stmt);
489 if (vdef
490 && TREE_CODE (vdef) == SSA_NAME)
492 unlink_stmt_vdef (stmt);
493 release_ssa_name (vdef);
497 /* Finally replace the original statement with the sequence. */
498 gsi_replace_with_seq (si_p, stmts, false);
501 /* Convert EXPR into a GIMPLE value suitable for substitution on the
502 RHS of an assignment. Insert the necessary statements before
503 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
504 is replaced. If the call is expected to produces a result, then it
505 is replaced by an assignment of the new RHS to the result variable.
506 If the result is to be ignored, then the call is replaced by a
507 GIMPLE_NOP. A proper VDEF chain is retained by making the first
508 VUSE and the last VDEF of the whole sequence be the same as the replaced
509 statement and using new SSA names for stores in between. */
511 void
512 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
514 tree lhs;
515 gimple *stmt, *new_stmt;
516 gimple_stmt_iterator i;
517 gimple_seq stmts = NULL;
519 stmt = gsi_stmt (*si_p);
521 gcc_assert (is_gimple_call (stmt));
523 push_gimplify_context (gimple_in_ssa_p (cfun));
525 lhs = gimple_call_lhs (stmt);
526 if (lhs == NULL_TREE)
528 gimplify_and_add (expr, &stmts);
529 /* We can end up with folding a memcpy of an empty class assignment
530 which gets optimized away by C++ gimplification. */
531 if (gimple_seq_empty_p (stmts))
533 pop_gimplify_context (NULL);
534 if (gimple_in_ssa_p (cfun))
536 unlink_stmt_vdef (stmt);
537 release_defs (stmt);
539 gsi_replace (si_p, gimple_build_nop (), false);
540 return;
543 else
545 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
546 new_stmt = gimple_build_assign (lhs, tmp);
547 i = gsi_last (stmts);
548 gsi_insert_after_without_update (&i, new_stmt,
549 GSI_CONTINUE_LINKING);
552 pop_gimplify_context (NULL);
554 gsi_replace_with_seq_vops (si_p, stmts);
558 /* Replace the call at *GSI with the gimple value VAL. */
560 static void
561 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
563 gimple *stmt = gsi_stmt (*gsi);
564 tree lhs = gimple_call_lhs (stmt);
565 gimple *repl;
566 if (lhs)
568 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
569 val = fold_convert (TREE_TYPE (lhs), val);
570 repl = gimple_build_assign (lhs, val);
572 else
573 repl = gimple_build_nop ();
574 tree vdef = gimple_vdef (stmt);
575 if (vdef && TREE_CODE (vdef) == SSA_NAME)
577 unlink_stmt_vdef (stmt);
578 release_ssa_name (vdef);
580 gsi_replace (gsi, repl, false);
583 /* Replace the call at *GSI with the new call REPL and fold that
584 again. */
586 static void
587 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
589 gimple *stmt = gsi_stmt (*gsi);
590 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
591 gimple_set_location (repl, gimple_location (stmt));
592 if (gimple_vdef (stmt)
593 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
595 gimple_set_vdef (repl, gimple_vdef (stmt));
596 gimple_set_vuse (repl, gimple_vuse (stmt));
597 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
599 gsi_replace (gsi, repl, false);
600 fold_stmt (gsi);
603 /* Return true if VAR is a VAR_DECL or a component thereof. */
605 static bool
606 var_decl_component_p (tree var)
608 tree inner = var;
609 while (handled_component_p (inner))
610 inner = TREE_OPERAND (inner, 0);
611 return SSA_VAR_P (inner);
614 /* Fold function call to builtin mem{{,p}cpy,move}. Return
615 false if no simplification can be made.
616 If ENDP is 0, return DEST (like memcpy).
617 If ENDP is 1, return DEST+LEN (like mempcpy).
618 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
619 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
620 (memmove). */
622 static bool
623 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
624 tree dest, tree src, int endp)
626 gimple *stmt = gsi_stmt (*gsi);
627 tree lhs = gimple_call_lhs (stmt);
628 tree len = gimple_call_arg (stmt, 2);
629 tree destvar, srcvar;
630 location_t loc = gimple_location (stmt);
632 /* If the LEN parameter is zero, return DEST. */
633 if (integer_zerop (len))
635 gimple *repl;
636 if (gimple_call_lhs (stmt))
637 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
638 else
639 repl = gimple_build_nop ();
640 tree vdef = gimple_vdef (stmt);
641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
643 unlink_stmt_vdef (stmt);
644 release_ssa_name (vdef);
646 gsi_replace (gsi, repl, false);
647 return true;
650 /* If SRC and DEST are the same (and not volatile), return
651 DEST{,+LEN,+LEN-1}. */
652 if (operand_equal_p (src, dest, 0))
654 unlink_stmt_vdef (stmt);
655 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
656 release_ssa_name (gimple_vdef (stmt));
657 if (!lhs)
659 gsi_replace (gsi, gimple_build_nop (), false);
660 return true;
662 goto done;
664 else
666 tree srctype, desttype;
667 unsigned int src_align, dest_align;
668 tree off0;
670 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
671 pointers as wide integer) and also may result in huge function
672 size because of inlined bounds copy. Thus don't inline for
673 functions we want to instrument. */
674 if (flag_check_pointer_bounds
675 && chkp_instrumentable_p (cfun->decl)
676 /* Even if data may contain pointers we can inline if copy
677 less than a pointer size. */
678 && (!tree_fits_uhwi_p (len)
679 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
680 return false;
682 /* Build accesses at offset zero with a ref-all character type. */
683 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
684 ptr_mode, true), 0);
686 /* If we can perform the copy efficiently with first doing all loads
687 and then all stores inline it that way. Currently efficiently
688 means that we can load all the memory into a single integer
689 register which is what MOVE_MAX gives us. */
690 src_align = get_pointer_alignment (src);
691 dest_align = get_pointer_alignment (dest);
692 if (tree_fits_uhwi_p (len)
693 && compare_tree_int (len, MOVE_MAX) <= 0
694 /* ??? Don't transform copies from strings with known length this
695 confuses the tree-ssa-strlen.c. This doesn't handle
696 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
697 reason. */
698 && !c_strlen (src, 2))
700 unsigned ilen = tree_to_uhwi (len);
701 if (exact_log2 (ilen) != -1)
703 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
704 if (type
705 && TYPE_MODE (type) != BLKmode
706 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
707 == ilen * 8)
708 /* If the destination pointer is not aligned we must be able
709 to emit an unaligned store. */
710 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
711 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
712 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
713 != CODE_FOR_nothing)))
715 tree srctype = type;
716 tree desttype = type;
717 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
718 srctype = build_aligned_type (type, src_align);
719 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
720 tree tem = fold_const_aggregate_ref (srcmem);
721 if (tem)
722 srcmem = tem;
723 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
725 src_align)
726 && (optab_handler (movmisalign_optab,
727 TYPE_MODE (type))
728 == CODE_FOR_nothing))
729 srcmem = NULL_TREE;
730 if (srcmem)
732 gimple *new_stmt;
733 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
735 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
736 if (gimple_in_ssa_p (cfun))
737 srcmem = make_ssa_name (TREE_TYPE (srcmem),
738 new_stmt);
739 else
740 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
741 gimple_assign_set_lhs (new_stmt, srcmem);
742 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
743 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
745 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
746 desttype = build_aligned_type (type, dest_align);
747 new_stmt
748 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
749 dest, off0),
750 srcmem);
751 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
752 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
753 if (gimple_vdef (new_stmt)
754 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
755 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
756 if (!lhs)
758 gsi_replace (gsi, new_stmt, false);
759 return true;
761 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
762 goto done;
768 if (endp == 3)
770 /* Both DEST and SRC must be pointer types.
771 ??? This is what old code did. Is the testing for pointer types
772 really mandatory?
774 If either SRC is readonly or length is 1, we can use memcpy. */
775 if (!dest_align || !src_align)
776 return false;
777 if (readonly_data_expr (src)
778 || (tree_fits_uhwi_p (len)
779 && (MIN (src_align, dest_align) / BITS_PER_UNIT
780 >= tree_to_uhwi (len))))
782 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
783 if (!fn)
784 return false;
785 gimple_call_set_fndecl (stmt, fn);
786 gimple_call_set_arg (stmt, 0, dest);
787 gimple_call_set_arg (stmt, 1, src);
788 fold_stmt (gsi);
789 return true;
792 /* If *src and *dest can't overlap, optimize into memcpy as well. */
793 if (TREE_CODE (src) == ADDR_EXPR
794 && TREE_CODE (dest) == ADDR_EXPR)
796 tree src_base, dest_base, fn;
797 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
798 HOST_WIDE_INT maxsize;
800 srcvar = TREE_OPERAND (src, 0);
801 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
802 if (src_base == NULL)
803 src_base = srcvar;
804 destvar = TREE_OPERAND (dest, 0);
805 dest_base = get_addr_base_and_unit_offset (destvar,
806 &dest_offset);
807 if (dest_base == NULL)
808 dest_base = destvar;
809 if (tree_fits_uhwi_p (len))
810 maxsize = tree_to_uhwi (len);
811 else
812 maxsize = -1;
813 if (SSA_VAR_P (src_base)
814 && SSA_VAR_P (dest_base))
816 if (operand_equal_p (src_base, dest_base, 0)
817 && ranges_overlap_p (src_offset, maxsize,
818 dest_offset, maxsize))
819 return false;
821 else if (TREE_CODE (src_base) == MEM_REF
822 && TREE_CODE (dest_base) == MEM_REF)
824 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
825 TREE_OPERAND (dest_base, 0), 0))
826 return false;
827 offset_int off = mem_ref_offset (src_base) + src_offset;
828 if (!wi::fits_shwi_p (off))
829 return false;
830 src_offset = off.to_shwi ();
832 off = mem_ref_offset (dest_base) + dest_offset;
833 if (!wi::fits_shwi_p (off))
834 return false;
835 dest_offset = off.to_shwi ();
836 if (ranges_overlap_p (src_offset, maxsize,
837 dest_offset, maxsize))
838 return false;
840 else
841 return false;
843 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
844 if (!fn)
845 return false;
846 gimple_call_set_fndecl (stmt, fn);
847 gimple_call_set_arg (stmt, 0, dest);
848 gimple_call_set_arg (stmt, 1, src);
849 fold_stmt (gsi);
850 return true;
853 /* If the destination and source do not alias optimize into
854 memcpy as well. */
855 if ((is_gimple_min_invariant (dest)
856 || TREE_CODE (dest) == SSA_NAME)
857 && (is_gimple_min_invariant (src)
858 || TREE_CODE (src) == SSA_NAME))
860 ao_ref destr, srcr;
861 ao_ref_init_from_ptr_and_size (&destr, dest, len);
862 ao_ref_init_from_ptr_and_size (&srcr, src, len);
863 if (!refs_may_alias_p_1 (&destr, &srcr, false))
865 tree fn;
866 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
867 if (!fn)
868 return false;
869 gimple_call_set_fndecl (stmt, fn);
870 gimple_call_set_arg (stmt, 0, dest);
871 gimple_call_set_arg (stmt, 1, src);
872 fold_stmt (gsi);
873 return true;
877 return false;
880 if (!tree_fits_shwi_p (len))
881 return false;
882 /* FIXME:
883 This logic lose for arguments like (type *)malloc (sizeof (type)),
884 since we strip the casts of up to VOID return value from malloc.
885 Perhaps we ought to inherit type from non-VOID argument here? */
886 STRIP_NOPS (src);
887 STRIP_NOPS (dest);
888 if (!POINTER_TYPE_P (TREE_TYPE (src))
889 || !POINTER_TYPE_P (TREE_TYPE (dest)))
890 return false;
891 /* In the following try to find a type that is most natural to be
892 used for the memcpy source and destination and that allows
893 the most optimization when memcpy is turned into a plain assignment
894 using that type. In theory we could always use a char[len] type
895 but that only gains us that the destination and source possibly
896 no longer will have their address taken. */
897 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
898 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
900 tree tem = TREE_OPERAND (src, 0);
901 STRIP_NOPS (tem);
902 if (tem != TREE_OPERAND (src, 0))
903 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
905 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
907 tree tem = TREE_OPERAND (dest, 0);
908 STRIP_NOPS (tem);
909 if (tem != TREE_OPERAND (dest, 0))
910 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
912 srctype = TREE_TYPE (TREE_TYPE (src));
913 if (TREE_CODE (srctype) == ARRAY_TYPE
914 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
916 srctype = TREE_TYPE (srctype);
917 STRIP_NOPS (src);
918 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
920 desttype = TREE_TYPE (TREE_TYPE (dest));
921 if (TREE_CODE (desttype) == ARRAY_TYPE
922 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
924 desttype = TREE_TYPE (desttype);
925 STRIP_NOPS (dest);
926 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
928 if (TREE_ADDRESSABLE (srctype)
929 || TREE_ADDRESSABLE (desttype))
930 return false;
932 /* Make sure we are not copying using a floating-point mode or
933 a type whose size possibly does not match its precision. */
934 if (FLOAT_MODE_P (TYPE_MODE (desttype))
935 || TREE_CODE (desttype) == BOOLEAN_TYPE
936 || TREE_CODE (desttype) == ENUMERAL_TYPE)
937 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
938 if (FLOAT_MODE_P (TYPE_MODE (srctype))
939 || TREE_CODE (srctype) == BOOLEAN_TYPE
940 || TREE_CODE (srctype) == ENUMERAL_TYPE)
941 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
942 if (!srctype)
943 srctype = desttype;
944 if (!desttype)
945 desttype = srctype;
946 if (!srctype)
947 return false;
949 src_align = get_pointer_alignment (src);
950 dest_align = get_pointer_alignment (dest);
951 if (dest_align < TYPE_ALIGN (desttype)
952 || src_align < TYPE_ALIGN (srctype))
953 return false;
955 destvar = dest;
956 STRIP_NOPS (destvar);
957 if (TREE_CODE (destvar) == ADDR_EXPR
958 && var_decl_component_p (TREE_OPERAND (destvar, 0))
959 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
960 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
961 else
962 destvar = NULL_TREE;
964 srcvar = src;
965 STRIP_NOPS (srcvar);
966 if (TREE_CODE (srcvar) == ADDR_EXPR
967 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
968 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
970 if (!destvar
971 || src_align >= TYPE_ALIGN (desttype))
972 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
973 srcvar, off0);
974 else if (!STRICT_ALIGNMENT)
976 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
977 src_align);
978 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
980 else
981 srcvar = NULL_TREE;
983 else
984 srcvar = NULL_TREE;
986 if (srcvar == NULL_TREE && destvar == NULL_TREE)
987 return false;
989 if (srcvar == NULL_TREE)
991 STRIP_NOPS (src);
992 if (src_align >= TYPE_ALIGN (desttype))
993 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
994 else
996 if (STRICT_ALIGNMENT)
997 return false;
998 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
999 src_align);
1000 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1003 else if (destvar == NULL_TREE)
1005 STRIP_NOPS (dest);
1006 if (dest_align >= TYPE_ALIGN (srctype))
1007 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1008 else
1010 if (STRICT_ALIGNMENT)
1011 return false;
1012 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1013 dest_align);
1014 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1018 gimple *new_stmt;
1019 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1021 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1022 if (gimple_in_ssa_p (cfun))
1023 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1024 else
1025 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1026 gimple_assign_set_lhs (new_stmt, srcvar);
1027 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1028 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1030 new_stmt = gimple_build_assign (destvar, srcvar);
1031 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1032 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1033 if (gimple_vdef (new_stmt)
1034 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1035 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1036 if (!lhs)
1038 gsi_replace (gsi, new_stmt, false);
1039 return true;
1041 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1044 done:
1045 gimple_seq stmts = NULL;
1046 if (endp == 0 || endp == 3)
1047 len = NULL_TREE;
1048 else if (endp == 2)
1049 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1050 ssize_int (1));
1051 if (endp == 2 || endp == 1)
1053 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1054 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1055 TREE_TYPE (dest), dest, len);
1058 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1059 gimple *repl = gimple_build_assign (lhs, dest);
1060 gsi_replace (gsi, repl, false);
1061 return true;
1064 /* Fold function call to builtin memset or bzero at *GSI setting the
1065 memory of size LEN to VAL. Return whether a simplification was made. */
1067 static bool
1068 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1070 gimple *stmt = gsi_stmt (*gsi);
1071 tree etype;
1072 unsigned HOST_WIDE_INT length, cval;
1074 /* If the LEN parameter is zero, return DEST. */
1075 if (integer_zerop (len))
1077 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1078 return true;
1081 if (! tree_fits_uhwi_p (len))
1082 return false;
1084 if (TREE_CODE (c) != INTEGER_CST)
1085 return false;
1087 tree dest = gimple_call_arg (stmt, 0);
1088 tree var = dest;
1089 if (TREE_CODE (var) != ADDR_EXPR)
1090 return false;
1092 var = TREE_OPERAND (var, 0);
1093 if (TREE_THIS_VOLATILE (var))
1094 return false;
1096 etype = TREE_TYPE (var);
1097 if (TREE_CODE (etype) == ARRAY_TYPE)
1098 etype = TREE_TYPE (etype);
1100 if (!INTEGRAL_TYPE_P (etype)
1101 && !POINTER_TYPE_P (etype))
1102 return NULL_TREE;
1104 if (! var_decl_component_p (var))
1105 return NULL_TREE;
1107 length = tree_to_uhwi (len);
1108 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1109 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1110 return NULL_TREE;
1112 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1113 return NULL_TREE;
1115 if (integer_zerop (c))
1116 cval = 0;
1117 else
1119 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1120 return NULL_TREE;
1122 cval = TREE_INT_CST_LOW (c);
1123 cval &= 0xff;
1124 cval |= cval << 8;
1125 cval |= cval << 16;
1126 cval |= (cval << 31) << 1;
1129 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1130 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1131 gimple_set_vuse (store, gimple_vuse (stmt));
1132 tree vdef = gimple_vdef (stmt);
1133 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1135 gimple_set_vdef (store, gimple_vdef (stmt));
1136 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1138 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1139 if (gimple_call_lhs (stmt))
1141 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1142 gsi_replace (gsi, asgn, false);
1144 else
1146 gimple_stmt_iterator gsi2 = *gsi;
1147 gsi_prev (gsi);
1148 gsi_remove (&gsi2, true);
1151 return true;
1155 /* Return the string length, maximum string length or maximum value of
1156 ARG in LENGTH.
1157 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1158 is not NULL and, for TYPE == 0, its value is not equal to the length
1159 we determine or if we are unable to determine the length or value,
1160 return false. VISITED is a bitmap of visited variables.
1161 TYPE is 0 if string length should be returned, 1 for maximum string
1162 length and 2 for maximum value ARG can have. */
1164 static bool
1165 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1167 tree var, val;
1168 gimple *def_stmt;
1170 if (TREE_CODE (arg) != SSA_NAME)
1172 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1173 if (TREE_CODE (arg) == ADDR_EXPR
1174 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1175 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1177 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1178 if (TREE_CODE (aop0) == INDIRECT_REF
1179 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1180 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1181 length, visited, type);
1184 if (type == 2)
1186 val = arg;
1187 if (TREE_CODE (val) != INTEGER_CST
1188 || tree_int_cst_sgn (val) < 0)
1189 return false;
1191 else
1192 val = c_strlen (arg, 1);
1193 if (!val)
1194 return false;
1196 if (*length)
1198 if (type > 0)
1200 if (TREE_CODE (*length) != INTEGER_CST
1201 || TREE_CODE (val) != INTEGER_CST)
1202 return false;
1204 if (tree_int_cst_lt (*length, val))
1205 *length = val;
1206 return true;
1208 else if (simple_cst_equal (val, *length) != 1)
1209 return false;
1212 *length = val;
1213 return true;
1216 /* If ARG is registered for SSA update we cannot look at its defining
1217 statement. */
1218 if (name_registered_for_update_p (arg))
1219 return false;
1221 /* If we were already here, break the infinite cycle. */
1222 if (!*visited)
1223 *visited = BITMAP_ALLOC (NULL);
1224 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1225 return true;
1227 var = arg;
1228 def_stmt = SSA_NAME_DEF_STMT (var);
1230 switch (gimple_code (def_stmt))
1232 case GIMPLE_ASSIGN:
1233 /* The RHS of the statement defining VAR must either have a
1234 constant length or come from another SSA_NAME with a constant
1235 length. */
1236 if (gimple_assign_single_p (def_stmt)
1237 || gimple_assign_unary_nop_p (def_stmt))
1239 tree rhs = gimple_assign_rhs1 (def_stmt);
1240 return get_maxval_strlen (rhs, length, visited, type);
1242 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1244 tree op2 = gimple_assign_rhs2 (def_stmt);
1245 tree op3 = gimple_assign_rhs3 (def_stmt);
1246 return get_maxval_strlen (op2, length, visited, type)
1247 && get_maxval_strlen (op3, length, visited, type);
1249 return false;
1251 case GIMPLE_PHI:
1253 /* All the arguments of the PHI node must have the same constant
1254 length. */
1255 unsigned i;
1257 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1259 tree arg = gimple_phi_arg (def_stmt, i)->def;
1261 /* If this PHI has itself as an argument, we cannot
1262 determine the string length of this argument. However,
1263 if we can find a constant string length for the other
1264 PHI args then we can still be sure that this is a
1265 constant string length. So be optimistic and just
1266 continue with the next argument. */
1267 if (arg == gimple_phi_result (def_stmt))
1268 continue;
1270 if (!get_maxval_strlen (arg, length, visited, type))
1271 return false;
1274 return true;
1276 default:
1277 return false;
1281 tree
1282 get_maxval_strlen (tree arg, int type)
1284 bitmap visited = NULL;
1285 tree len = NULL_TREE;
1286 if (!get_maxval_strlen (arg, &len, &visited, type))
1287 len = NULL_TREE;
1288 if (visited)
1289 BITMAP_FREE (visited);
1291 return len;
1295 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1296 If LEN is not NULL, it represents the length of the string to be
1297 copied. Return NULL_TREE if no simplification can be made. */
1299 static bool
1300 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1301 tree dest, tree src)
1303 location_t loc = gimple_location (gsi_stmt (*gsi));
1304 tree fn;
1306 /* If SRC and DEST are the same (and not volatile), return DEST. */
1307 if (operand_equal_p (src, dest, 0))
1309 replace_call_with_value (gsi, dest);
1310 return true;
1313 if (optimize_function_for_size_p (cfun))
1314 return false;
1316 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1317 if (!fn)
1318 return false;
1320 tree len = get_maxval_strlen (src, 0);
1321 if (!len)
1322 return false;
1324 len = fold_convert_loc (loc, size_type_node, len);
1325 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1326 len = force_gimple_operand_gsi (gsi, len, true,
1327 NULL_TREE, true, GSI_SAME_STMT);
1328 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1329 replace_call_with_call_and_fold (gsi, repl);
1330 return true;
1333 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1334 If SLEN is not NULL, it represents the length of the source string.
1335 Return NULL_TREE if no simplification can be made. */
1337 static bool
1338 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1339 tree dest, tree src, tree len)
1341 location_t loc = gimple_location (gsi_stmt (*gsi));
1342 tree fn;
1344 /* If the LEN parameter is zero, return DEST. */
1345 if (integer_zerop (len))
1347 replace_call_with_value (gsi, dest);
1348 return true;
1351 /* We can't compare slen with len as constants below if len is not a
1352 constant. */
1353 if (TREE_CODE (len) != INTEGER_CST)
1354 return false;
1356 /* Now, we must be passed a constant src ptr parameter. */
1357 tree slen = get_maxval_strlen (src, 0);
1358 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1359 return false;
1361 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1363 /* We do not support simplification of this case, though we do
1364 support it when expanding trees into RTL. */
1365 /* FIXME: generate a call to __builtin_memset. */
1366 if (tree_int_cst_lt (slen, len))
1367 return false;
1369 /* OK transform into builtin memcpy. */
1370 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1371 if (!fn)
1372 return false;
1374 len = fold_convert_loc (loc, size_type_node, len);
1375 len = force_gimple_operand_gsi (gsi, len, true,
1376 NULL_TREE, true, GSI_SAME_STMT);
1377 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1378 replace_call_with_call_and_fold (gsi, repl);
1379 return true;
1382 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1383 to the call.
1385 Return NULL_TREE if no simplification was possible, otherwise return the
1386 simplified form of the call as a tree.
1388 The simplified form may be a constant or other expression which
1389 computes the same value, but in a more efficient manner (including
1390 calls to other builtin functions).
1392 The call may contain arguments which need to be evaluated, but
1393 which are not useful to determine the result of the call. In
1394 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1395 COMPOUND_EXPR will be an argument which must be evaluated.
1396 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1397 COMPOUND_EXPR in the chain will contain the tree for the simplified
1398 form of the builtin function call. */
1400 static bool
1401 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1403 gimple *stmt = gsi_stmt (*gsi);
1404 location_t loc = gimple_location (stmt);
1406 const char *p = c_getstr (src);
1408 /* If the string length is zero, return the dst parameter. */
1409 if (p && *p == '\0')
1411 replace_call_with_value (gsi, dst);
1412 return true;
1415 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1416 return false;
1418 /* See if we can store by pieces into (dst + strlen(dst)). */
1419 tree newdst;
1420 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1421 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1423 if (!strlen_fn || !memcpy_fn)
1424 return false;
1426 /* If the length of the source string isn't computable don't
1427 split strcat into strlen and memcpy. */
1428 tree len = get_maxval_strlen (src, 0);
1429 if (! len)
1430 return false;
1432 /* Create strlen (dst). */
1433 gimple_seq stmts = NULL, stmts2;
1434 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1435 gimple_set_location (repl, loc);
1436 if (gimple_in_ssa_p (cfun))
1437 newdst = make_ssa_name (size_type_node);
1438 else
1439 newdst = create_tmp_reg (size_type_node);
1440 gimple_call_set_lhs (repl, newdst);
1441 gimple_seq_add_stmt_without_update (&stmts, repl);
1443 /* Create (dst p+ strlen (dst)). */
1444 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1445 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1446 gimple_seq_add_seq_without_update (&stmts, stmts2);
1448 len = fold_convert_loc (loc, size_type_node, len);
1449 len = size_binop_loc (loc, PLUS_EXPR, len,
1450 build_int_cst (size_type_node, 1));
1451 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1452 gimple_seq_add_seq_without_update (&stmts, stmts2);
1454 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1455 gimple_seq_add_stmt_without_update (&stmts, repl);
1456 if (gimple_call_lhs (stmt))
1458 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1459 gimple_seq_add_stmt_without_update (&stmts, repl);
1460 gsi_replace_with_seq_vops (gsi, stmts);
1461 /* gsi now points at the assignment to the lhs, get a
1462 stmt iterator to the memcpy call.
1463 ??? We can't use gsi_for_stmt as that doesn't work when the
1464 CFG isn't built yet. */
1465 gimple_stmt_iterator gsi2 = *gsi;
1466 gsi_prev (&gsi2);
1467 fold_stmt (&gsi2);
1469 else
1471 gsi_replace_with_seq_vops (gsi, stmts);
1472 fold_stmt (gsi);
1474 return true;
1477 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1478 are the arguments to the call. */
1480 static bool
1481 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1483 gimple *stmt = gsi_stmt (*gsi);
1484 tree dest = gimple_call_arg (stmt, 0);
1485 tree src = gimple_call_arg (stmt, 1);
1486 tree size = gimple_call_arg (stmt, 2);
1487 tree fn;
1488 const char *p;
1491 p = c_getstr (src);
1492 /* If the SRC parameter is "", return DEST. */
1493 if (p && *p == '\0')
1495 replace_call_with_value (gsi, dest);
1496 return true;
1499 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1500 return false;
1502 /* If __builtin_strcat_chk is used, assume strcat is available. */
1503 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1504 if (!fn)
1505 return false;
1507 gimple *repl = gimple_build_call (fn, 2, dest, src);
1508 replace_call_with_call_and_fold (gsi, repl);
1509 return true;
1512 /* Simplify a call to the strncat builtin. */
1514 static bool
1515 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1517 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1518 tree dst = gimple_call_arg (stmt, 0);
1519 tree src = gimple_call_arg (stmt, 1);
1520 tree len = gimple_call_arg (stmt, 2);
1522 const char *p = c_getstr (src);
1524 /* If the requested length is zero, or the src parameter string
1525 length is zero, return the dst parameter. */
1526 if (integer_zerop (len) || (p && *p == '\0'))
1528 replace_call_with_value (gsi, dst);
1529 return true;
1532 /* If the requested len is greater than or equal to the string
1533 length, call strcat. */
1534 if (TREE_CODE (len) == INTEGER_CST && p
1535 && compare_tree_int (len, strlen (p)) >= 0)
1537 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1539 /* If the replacement _DECL isn't initialized, don't do the
1540 transformation. */
1541 if (!fn)
1542 return false;
1544 gcall *repl = gimple_build_call (fn, 2, dst, src);
1545 replace_call_with_call_and_fold (gsi, repl);
1546 return true;
1549 return false;
1552 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1553 LEN, and SIZE. */
1555 static bool
1556 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1558 gimple *stmt = gsi_stmt (*gsi);
1559 tree dest = gimple_call_arg (stmt, 0);
1560 tree src = gimple_call_arg (stmt, 1);
1561 tree len = gimple_call_arg (stmt, 2);
1562 tree size = gimple_call_arg (stmt, 3);
1563 tree fn;
1564 const char *p;
1566 p = c_getstr (src);
1567 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1568 if ((p && *p == '\0')
1569 || integer_zerop (len))
1571 replace_call_with_value (gsi, dest);
1572 return true;
1575 if (! tree_fits_uhwi_p (size))
1576 return false;
1578 if (! integer_all_onesp (size))
1580 tree src_len = c_strlen (src, 1);
1581 if (src_len
1582 && tree_fits_uhwi_p (src_len)
1583 && tree_fits_uhwi_p (len)
1584 && ! tree_int_cst_lt (len, src_len))
1586 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1587 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1588 if (!fn)
1589 return false;
1591 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1592 replace_call_with_call_and_fold (gsi, repl);
1593 return true;
1595 return false;
1598 /* If __builtin_strncat_chk is used, assume strncat is available. */
1599 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1600 if (!fn)
1601 return false;
1603 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1604 replace_call_with_call_and_fold (gsi, repl);
1605 return true;
1608 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1609 to the call. IGNORE is true if the value returned
1610 by the builtin will be ignored. UNLOCKED is true is true if this
1611 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1612 the known length of the string. Return NULL_TREE if no simplification
1613 was possible. */
1615 static bool
1616 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1617 tree arg0, tree arg1,
1618 bool unlocked)
1620 gimple *stmt = gsi_stmt (*gsi);
1622 /* If we're using an unlocked function, assume the other unlocked
1623 functions exist explicitly. */
1624 tree const fn_fputc = (unlocked
1625 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1626 : builtin_decl_implicit (BUILT_IN_FPUTC));
1627 tree const fn_fwrite = (unlocked
1628 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1629 : builtin_decl_implicit (BUILT_IN_FWRITE));
1631 /* If the return value is used, don't do the transformation. */
1632 if (gimple_call_lhs (stmt))
1633 return false;
1635 /* Get the length of the string passed to fputs. If the length
1636 can't be determined, punt. */
1637 tree len = get_maxval_strlen (arg0, 0);
1638 if (!len
1639 || TREE_CODE (len) != INTEGER_CST)
1640 return false;
1642 switch (compare_tree_int (len, 1))
1644 case -1: /* length is 0, delete the call entirely . */
1645 replace_call_with_value (gsi, integer_zero_node);
1646 return true;
1648 case 0: /* length is 1, call fputc. */
1650 const char *p = c_getstr (arg0);
1651 if (p != NULL)
1653 if (!fn_fputc)
1654 return false;
1656 gimple *repl = gimple_build_call (fn_fputc, 2,
1657 build_int_cst
1658 (integer_type_node, p[0]), arg1);
1659 replace_call_with_call_and_fold (gsi, repl);
1660 return true;
1663 /* FALLTHROUGH */
1664 case 1: /* length is greater than 1, call fwrite. */
1666 /* If optimizing for size keep fputs. */
1667 if (optimize_function_for_size_p (cfun))
1668 return false;
1669 /* New argument list transforming fputs(string, stream) to
1670 fwrite(string, 1, len, stream). */
1671 if (!fn_fwrite)
1672 return false;
1674 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1675 size_one_node, len, arg1);
1676 replace_call_with_call_and_fold (gsi, repl);
1677 return true;
1679 default:
1680 gcc_unreachable ();
1682 return false;
1685 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1686 DEST, SRC, LEN, and SIZE are the arguments to the call.
1687 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1688 code of the builtin. If MAXLEN is not NULL, it is maximum length
1689 passed as third argument. */
1691 static bool
1692 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1693 tree dest, tree src, tree len, tree size,
1694 enum built_in_function fcode)
1696 gimple *stmt = gsi_stmt (*gsi);
1697 location_t loc = gimple_location (stmt);
1698 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1699 tree fn;
1701 /* If SRC and DEST are the same (and not volatile), return DEST
1702 (resp. DEST+LEN for __mempcpy_chk). */
1703 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1705 if (fcode != BUILT_IN_MEMPCPY_CHK)
1707 replace_call_with_value (gsi, dest);
1708 return true;
1710 else
1712 gimple_seq stmts = NULL;
1713 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1714 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1715 TREE_TYPE (dest), dest, len);
1716 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1717 replace_call_with_value (gsi, temp);
1718 return true;
1722 if (! tree_fits_uhwi_p (size))
1723 return false;
1725 tree maxlen = get_maxval_strlen (len, 2);
1726 if (! integer_all_onesp (size))
1728 if (! tree_fits_uhwi_p (len))
1730 /* If LEN is not constant, try MAXLEN too.
1731 For MAXLEN only allow optimizing into non-_ocs function
1732 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1733 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1735 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1737 /* (void) __mempcpy_chk () can be optimized into
1738 (void) __memcpy_chk (). */
1739 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1740 if (!fn)
1741 return false;
1743 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1744 replace_call_with_call_and_fold (gsi, repl);
1745 return true;
1747 return false;
1750 else
1751 maxlen = len;
1753 if (tree_int_cst_lt (size, maxlen))
1754 return false;
1757 fn = NULL_TREE;
1758 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1759 mem{cpy,pcpy,move,set} is available. */
1760 switch (fcode)
1762 case BUILT_IN_MEMCPY_CHK:
1763 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1764 break;
1765 case BUILT_IN_MEMPCPY_CHK:
1766 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1767 break;
1768 case BUILT_IN_MEMMOVE_CHK:
1769 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1770 break;
1771 case BUILT_IN_MEMSET_CHK:
1772 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1773 break;
1774 default:
1775 break;
1778 if (!fn)
1779 return false;
1781 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1782 replace_call_with_call_and_fold (gsi, repl);
1783 return true;
1786 /* Fold a call to the __st[rp]cpy_chk builtin.
1787 DEST, SRC, and SIZE are the arguments to the call.
1788 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1789 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1790 strings passed as second argument. */
1792 static bool
1793 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1794 tree dest,
1795 tree src, tree size,
1796 enum built_in_function fcode)
1798 gimple *stmt = gsi_stmt (*gsi);
1799 location_t loc = gimple_location (stmt);
1800 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1801 tree len, fn;
1803 /* If SRC and DEST are the same (and not volatile), return DEST. */
1804 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1806 replace_call_with_value (gsi, dest);
1807 return true;
1810 if (! tree_fits_uhwi_p (size))
1811 return false;
1813 tree maxlen = get_maxval_strlen (src, 1);
1814 if (! integer_all_onesp (size))
1816 len = c_strlen (src, 1);
1817 if (! len || ! tree_fits_uhwi_p (len))
1819 /* If LEN is not constant, try MAXLEN too.
1820 For MAXLEN only allow optimizing into non-_ocs function
1821 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1822 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1824 if (fcode == BUILT_IN_STPCPY_CHK)
1826 if (! ignore)
1827 return false;
1829 /* If return value of __stpcpy_chk is ignored,
1830 optimize into __strcpy_chk. */
1831 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1832 if (!fn)
1833 return false;
1835 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1836 replace_call_with_call_and_fold (gsi, repl);
1837 return true;
1840 if (! len || TREE_SIDE_EFFECTS (len))
1841 return false;
1843 /* If c_strlen returned something, but not a constant,
1844 transform __strcpy_chk into __memcpy_chk. */
1845 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1846 if (!fn)
1847 return false;
1849 gimple_seq stmts = NULL;
1850 len = gimple_convert (&stmts, loc, size_type_node, len);
1851 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1852 build_int_cst (size_type_node, 1));
1853 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1854 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1855 replace_call_with_call_and_fold (gsi, repl);
1856 return true;
1859 else
1860 maxlen = len;
1862 if (! tree_int_cst_lt (maxlen, size))
1863 return false;
1866 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1867 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1868 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1869 if (!fn)
1870 return false;
1872 gimple *repl = gimple_build_call (fn, 2, dest, src);
1873 replace_call_with_call_and_fold (gsi, repl);
1874 return true;
1877 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1878 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1879 length passed as third argument. IGNORE is true if return value can be
1880 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1882 static bool
1883 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1884 tree dest, tree src,
1885 tree len, tree size,
1886 enum built_in_function fcode)
1888 gimple *stmt = gsi_stmt (*gsi);
1889 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1890 tree fn;
1892 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1894 /* If return value of __stpncpy_chk is ignored,
1895 optimize into __strncpy_chk. */
1896 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1897 if (fn)
1899 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1900 replace_call_with_call_and_fold (gsi, repl);
1901 return true;
1905 if (! tree_fits_uhwi_p (size))
1906 return false;
1908 tree maxlen = get_maxval_strlen (len, 2);
1909 if (! integer_all_onesp (size))
1911 if (! tree_fits_uhwi_p (len))
1913 /* If LEN is not constant, try MAXLEN too.
1914 For MAXLEN only allow optimizing into non-_ocs function
1915 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1916 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1917 return false;
1919 else
1920 maxlen = len;
1922 if (tree_int_cst_lt (size, maxlen))
1923 return false;
1926 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1927 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1928 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1929 if (!fn)
1930 return false;
1932 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1933 replace_call_with_call_and_fold (gsi, repl);
1934 return true;
1937 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1938 Return NULL_TREE if no simplification can be made. */
1940 static bool
1941 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1943 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1944 location_t loc = gimple_location (stmt);
1945 tree dest = gimple_call_arg (stmt, 0);
1946 tree src = gimple_call_arg (stmt, 1);
1947 tree fn, len, lenp1;
1949 /* If the result is unused, replace stpcpy with strcpy. */
1950 if (gimple_call_lhs (stmt) == NULL_TREE)
1952 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1953 if (!fn)
1954 return false;
1955 gimple_call_set_fndecl (stmt, fn);
1956 fold_stmt (gsi);
1957 return true;
1960 len = c_strlen (src, 1);
1961 if (!len
1962 || TREE_CODE (len) != INTEGER_CST)
1963 return false;
1965 if (optimize_function_for_size_p (cfun)
1966 /* If length is zero it's small enough. */
1967 && !integer_zerop (len))
1968 return false;
1970 /* If the source has a known length replace stpcpy with memcpy. */
1971 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1972 if (!fn)
1973 return false;
1975 gimple_seq stmts = NULL;
1976 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1977 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1978 tem, build_int_cst (size_type_node, 1));
1979 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1980 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1981 gimple_set_vuse (repl, gimple_vuse (stmt));
1982 gimple_set_vdef (repl, gimple_vdef (stmt));
1983 if (gimple_vdef (repl)
1984 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1985 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1986 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1987 /* Replace the result with dest + len. */
1988 stmts = NULL;
1989 tem = gimple_convert (&stmts, loc, sizetype, len);
1990 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1991 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1992 POINTER_PLUS_EXPR, dest, tem);
1993 gsi_replace (gsi, ret, false);
1994 /* Finally fold the memcpy call. */
1995 gimple_stmt_iterator gsi2 = *gsi;
1996 gsi_prev (&gsi2);
1997 fold_stmt (&gsi2);
1998 return true;
2001 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2002 NULL_TREE if a normal call should be emitted rather than expanding
2003 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2004 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2005 passed as second argument. */
2007 static bool
2008 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2009 enum built_in_function fcode)
2011 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2012 tree dest, size, len, fn, fmt, flag;
2013 const char *fmt_str;
2015 /* Verify the required arguments in the original call. */
2016 if (gimple_call_num_args (stmt) < 5)
2017 return false;
2019 dest = gimple_call_arg (stmt, 0);
2020 len = gimple_call_arg (stmt, 1);
2021 flag = gimple_call_arg (stmt, 2);
2022 size = gimple_call_arg (stmt, 3);
2023 fmt = gimple_call_arg (stmt, 4);
2025 if (! tree_fits_uhwi_p (size))
2026 return false;
2028 if (! integer_all_onesp (size))
2030 tree maxlen = get_maxval_strlen (len, 2);
2031 if (! tree_fits_uhwi_p (len))
2033 /* If LEN is not constant, try MAXLEN too.
2034 For MAXLEN only allow optimizing into non-_ocs function
2035 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2036 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2037 return false;
2039 else
2040 maxlen = len;
2042 if (tree_int_cst_lt (size, maxlen))
2043 return false;
2046 if (!init_target_chars ())
2047 return false;
2049 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2050 or if format doesn't contain % chars or is "%s". */
2051 if (! integer_zerop (flag))
2053 fmt_str = c_getstr (fmt);
2054 if (fmt_str == NULL)
2055 return false;
2056 if (strchr (fmt_str, target_percent) != NULL
2057 && strcmp (fmt_str, target_percent_s))
2058 return false;
2061 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2062 available. */
2063 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2064 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2065 if (!fn)
2066 return false;
2068 /* Replace the called function and the first 5 argument by 3 retaining
2069 trailing varargs. */
2070 gimple_call_set_fndecl (stmt, fn);
2071 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2072 gimple_call_set_arg (stmt, 0, dest);
2073 gimple_call_set_arg (stmt, 1, len);
2074 gimple_call_set_arg (stmt, 2, fmt);
2075 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2076 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2077 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2078 fold_stmt (gsi);
2079 return true;
2082 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2083 Return NULL_TREE if a normal call should be emitted rather than
2084 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2085 or BUILT_IN_VSPRINTF_CHK. */
2087 static bool
2088 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2089 enum built_in_function fcode)
2091 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2092 tree dest, size, len, fn, fmt, flag;
2093 const char *fmt_str;
2094 unsigned nargs = gimple_call_num_args (stmt);
2096 /* Verify the required arguments in the original call. */
2097 if (nargs < 4)
2098 return false;
2099 dest = gimple_call_arg (stmt, 0);
2100 flag = gimple_call_arg (stmt, 1);
2101 size = gimple_call_arg (stmt, 2);
2102 fmt = gimple_call_arg (stmt, 3);
2104 if (! tree_fits_uhwi_p (size))
2105 return false;
2107 len = NULL_TREE;
2109 if (!init_target_chars ())
2110 return false;
2112 /* Check whether the format is a literal string constant. */
2113 fmt_str = c_getstr (fmt);
2114 if (fmt_str != NULL)
2116 /* If the format doesn't contain % args or %%, we know the size. */
2117 if (strchr (fmt_str, target_percent) == 0)
2119 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2120 len = build_int_cstu (size_type_node, strlen (fmt_str));
2122 /* If the format is "%s" and first ... argument is a string literal,
2123 we know the size too. */
2124 else if (fcode == BUILT_IN_SPRINTF_CHK
2125 && strcmp (fmt_str, target_percent_s) == 0)
2127 tree arg;
2129 if (nargs == 5)
2131 arg = gimple_call_arg (stmt, 4);
2132 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2134 len = c_strlen (arg, 1);
2135 if (! len || ! tree_fits_uhwi_p (len))
2136 len = NULL_TREE;
2142 if (! integer_all_onesp (size))
2144 if (! len || ! tree_int_cst_lt (len, size))
2145 return false;
2148 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2149 or if format doesn't contain % chars or is "%s". */
2150 if (! integer_zerop (flag))
2152 if (fmt_str == NULL)
2153 return false;
2154 if (strchr (fmt_str, target_percent) != NULL
2155 && strcmp (fmt_str, target_percent_s))
2156 return false;
2159 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2160 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2161 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2162 if (!fn)
2163 return false;
2165 /* Replace the called function and the first 4 argument by 2 retaining
2166 trailing varargs. */
2167 gimple_call_set_fndecl (stmt, fn);
2168 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2169 gimple_call_set_arg (stmt, 0, dest);
2170 gimple_call_set_arg (stmt, 1, fmt);
2171 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2172 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2173 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2174 fold_stmt (gsi);
2175 return true;
2178 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2179 ORIG may be null if this is a 2-argument call. We don't attempt to
2180 simplify calls with more than 3 arguments.
2182 Return NULL_TREE if no simplification was possible, otherwise return the
2183 simplified form of the call as a tree. If IGNORED is true, it means that
2184 the caller does not use the returned value of the function. */
2186 static bool
2187 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2189 gimple *stmt = gsi_stmt (*gsi);
2190 tree dest = gimple_call_arg (stmt, 0);
2191 tree fmt = gimple_call_arg (stmt, 1);
2192 tree orig = NULL_TREE;
2193 const char *fmt_str = NULL;
2195 /* Verify the required arguments in the original call. We deal with two
2196 types of sprintf() calls: 'sprintf (str, fmt)' and
2197 'sprintf (dest, "%s", orig)'. */
2198 if (gimple_call_num_args (stmt) > 3)
2199 return false;
2201 if (gimple_call_num_args (stmt) == 3)
2202 orig = gimple_call_arg (stmt, 2);
2204 /* Check whether the format is a literal string constant. */
2205 fmt_str = c_getstr (fmt);
2206 if (fmt_str == NULL)
2207 return false;
2209 if (!init_target_chars ())
2210 return false;
2212 /* If the format doesn't contain % args or %%, use strcpy. */
2213 if (strchr (fmt_str, target_percent) == NULL)
2215 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2217 if (!fn)
2218 return false;
2220 /* Don't optimize sprintf (buf, "abc", ptr++). */
2221 if (orig)
2222 return false;
2224 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2225 'format' is known to contain no % formats. */
2226 gimple_seq stmts = NULL;
2227 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2228 gimple_seq_add_stmt_without_update (&stmts, repl);
2229 if (gimple_call_lhs (stmt))
2231 repl = gimple_build_assign (gimple_call_lhs (stmt),
2232 build_int_cst (integer_type_node,
2233 strlen (fmt_str)));
2234 gimple_seq_add_stmt_without_update (&stmts, repl);
2235 gsi_replace_with_seq_vops (gsi, stmts);
2236 /* gsi now points at the assignment to the lhs, get a
2237 stmt iterator to the memcpy call.
2238 ??? We can't use gsi_for_stmt as that doesn't work when the
2239 CFG isn't built yet. */
2240 gimple_stmt_iterator gsi2 = *gsi;
2241 gsi_prev (&gsi2);
2242 fold_stmt (&gsi2);
2244 else
2246 gsi_replace_with_seq_vops (gsi, stmts);
2247 fold_stmt (gsi);
2249 return true;
2252 /* If the format is "%s", use strcpy if the result isn't used. */
2253 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2255 tree fn;
2256 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2258 if (!fn)
2259 return false;
2261 /* Don't crash on sprintf (str1, "%s"). */
2262 if (!orig)
2263 return false;
2265 tree orig_len = NULL_TREE;
2266 if (gimple_call_lhs (stmt))
2268 orig_len = get_maxval_strlen (orig, 0);
2269 if (!orig_len)
2270 return false;
2273 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2274 gimple_seq stmts = NULL;
2275 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2276 gimple_seq_add_stmt_without_update (&stmts, repl);
2277 if (gimple_call_lhs (stmt))
2279 if (!useless_type_conversion_p (integer_type_node,
2280 TREE_TYPE (orig_len)))
2281 orig_len = fold_convert (integer_type_node, orig_len);
2282 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2283 gimple_seq_add_stmt_without_update (&stmts, repl);
2284 gsi_replace_with_seq_vops (gsi, stmts);
2285 /* gsi now points at the assignment to the lhs, get a
2286 stmt iterator to the memcpy call.
2287 ??? We can't use gsi_for_stmt as that doesn't work when the
2288 CFG isn't built yet. */
2289 gimple_stmt_iterator gsi2 = *gsi;
2290 gsi_prev (&gsi2);
2291 fold_stmt (&gsi2);
2293 else
2295 gsi_replace_with_seq_vops (gsi, stmts);
2296 fold_stmt (gsi);
2298 return true;
2300 return false;
2303 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2304 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2305 attempt to simplify calls with more than 4 arguments.
2307 Return NULL_TREE if no simplification was possible, otherwise return the
2308 simplified form of the call as a tree. If IGNORED is true, it means that
2309 the caller does not use the returned value of the function. */
2311 static bool
2312 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2314 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2315 tree dest = gimple_call_arg (stmt, 0);
2316 tree destsize = gimple_call_arg (stmt, 1);
2317 tree fmt = gimple_call_arg (stmt, 2);
2318 tree orig = NULL_TREE;
2319 const char *fmt_str = NULL;
2321 if (gimple_call_num_args (stmt) > 4)
2322 return false;
2324 if (gimple_call_num_args (stmt) == 4)
2325 orig = gimple_call_arg (stmt, 3);
2327 if (!tree_fits_uhwi_p (destsize))
2328 return false;
2329 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2331 /* Check whether the format is a literal string constant. */
2332 fmt_str = c_getstr (fmt);
2333 if (fmt_str == NULL)
2334 return false;
2336 if (!init_target_chars ())
2337 return false;
2339 /* If the format doesn't contain % args or %%, use strcpy. */
2340 if (strchr (fmt_str, target_percent) == NULL)
2342 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2343 if (!fn)
2344 return false;
2346 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2347 if (orig)
2348 return false;
2350 /* We could expand this as
2351 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2352 or to
2353 memcpy (str, fmt_with_nul_at_cstm1, cst);
2354 but in the former case that might increase code size
2355 and in the latter case grow .rodata section too much.
2356 So punt for now. */
2357 size_t len = strlen (fmt_str);
2358 if (len >= destlen)
2359 return false;
2361 gimple_seq stmts = NULL;
2362 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2363 gimple_seq_add_stmt_without_update (&stmts, repl);
2364 if (gimple_call_lhs (stmt))
2366 repl = gimple_build_assign (gimple_call_lhs (stmt),
2367 build_int_cst (integer_type_node, len));
2368 gimple_seq_add_stmt_without_update (&stmts, repl);
2369 gsi_replace_with_seq_vops (gsi, stmts);
2370 /* gsi now points at the assignment to the lhs, get a
2371 stmt iterator to the memcpy call.
2372 ??? We can't use gsi_for_stmt as that doesn't work when the
2373 CFG isn't built yet. */
2374 gimple_stmt_iterator gsi2 = *gsi;
2375 gsi_prev (&gsi2);
2376 fold_stmt (&gsi2);
2378 else
2380 gsi_replace_with_seq_vops (gsi, stmts);
2381 fold_stmt (gsi);
2383 return true;
2386 /* If the format is "%s", use strcpy if the result isn't used. */
2387 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2389 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2390 if (!fn)
2391 return false;
2393 /* Don't crash on snprintf (str1, cst, "%s"). */
2394 if (!orig)
2395 return false;
2397 tree orig_len = get_maxval_strlen (orig, 0);
2398 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2399 return false;
2401 /* We could expand this as
2402 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2403 or to
2404 memcpy (str1, str2_with_nul_at_cstm1, cst);
2405 but in the former case that might increase code size
2406 and in the latter case grow .rodata section too much.
2407 So punt for now. */
2408 if (compare_tree_int (orig_len, destlen) >= 0)
2409 return false;
2411 /* Convert snprintf (str1, cst, "%s", str2) into
2412 strcpy (str1, str2) if strlen (str2) < cst. */
2413 gimple_seq stmts = NULL;
2414 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2415 gimple_seq_add_stmt_without_update (&stmts, repl);
2416 if (gimple_call_lhs (stmt))
2418 if (!useless_type_conversion_p (integer_type_node,
2419 TREE_TYPE (orig_len)))
2420 orig_len = fold_convert (integer_type_node, orig_len);
2421 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2422 gimple_seq_add_stmt_without_update (&stmts, repl);
2423 gsi_replace_with_seq_vops (gsi, stmts);
2424 /* gsi now points at the assignment to the lhs, get a
2425 stmt iterator to the memcpy call.
2426 ??? We can't use gsi_for_stmt as that doesn't work when the
2427 CFG isn't built yet. */
2428 gimple_stmt_iterator gsi2 = *gsi;
2429 gsi_prev (&gsi2);
2430 fold_stmt (&gsi2);
2432 else
2434 gsi_replace_with_seq_vops (gsi, stmts);
2435 fold_stmt (gsi);
2437 return true;
2439 return false;
2442 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2443 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2444 more than 3 arguments, and ARG may be null in the 2-argument case.
2446 Return NULL_TREE if no simplification was possible, otherwise return the
2447 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2448 code of the function to be simplified. */
2450 static bool
2451 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2452 tree fp, tree fmt, tree arg,
2453 enum built_in_function fcode)
2455 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2456 tree fn_fputc, fn_fputs;
2457 const char *fmt_str = NULL;
2459 /* If the return value is used, don't do the transformation. */
2460 if (gimple_call_lhs (stmt) != NULL_TREE)
2461 return false;
2463 /* Check whether the format is a literal string constant. */
2464 fmt_str = c_getstr (fmt);
2465 if (fmt_str == NULL)
2466 return false;
2468 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2470 /* If we're using an unlocked function, assume the other
2471 unlocked functions exist explicitly. */
2472 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2473 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2475 else
2477 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2478 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2481 if (!init_target_chars ())
2482 return false;
2484 /* If the format doesn't contain % args or %%, use strcpy. */
2485 if (strchr (fmt_str, target_percent) == NULL)
2487 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2488 && arg)
2489 return false;
2491 /* If the format specifier was "", fprintf does nothing. */
2492 if (fmt_str[0] == '\0')
2494 replace_call_with_value (gsi, NULL_TREE);
2495 return true;
2498 /* When "string" doesn't contain %, replace all cases of
2499 fprintf (fp, string) with fputs (string, fp). The fputs
2500 builtin will take care of special cases like length == 1. */
2501 if (fn_fputs)
2503 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2504 replace_call_with_call_and_fold (gsi, repl);
2505 return true;
2509 /* The other optimizations can be done only on the non-va_list variants. */
2510 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2511 return false;
2513 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2514 else if (strcmp (fmt_str, target_percent_s) == 0)
2516 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2517 return false;
2518 if (fn_fputs)
2520 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2521 replace_call_with_call_and_fold (gsi, repl);
2522 return true;
2526 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2527 else if (strcmp (fmt_str, target_percent_c) == 0)
2529 if (!arg
2530 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2531 return false;
2532 if (fn_fputc)
2534 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2535 replace_call_with_call_and_fold (gsi, repl);
2536 return true;
2540 return false;
2543 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2544 FMT and ARG are the arguments to the call; we don't fold cases with
2545 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2547 Return NULL_TREE if no simplification was possible, otherwise return the
2548 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2549 code of the function to be simplified. */
2551 static bool
2552 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2553 tree arg, enum built_in_function fcode)
2555 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2556 tree fn_putchar, fn_puts, newarg;
2557 const char *fmt_str = NULL;
2559 /* If the return value is used, don't do the transformation. */
2560 if (gimple_call_lhs (stmt) != NULL_TREE)
2561 return false;
2563 /* Check whether the format is a literal string constant. */
2564 fmt_str = c_getstr (fmt);
2565 if (fmt_str == NULL)
2566 return false;
2568 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2570 /* If we're using an unlocked function, assume the other
2571 unlocked functions exist explicitly. */
2572 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2573 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2575 else
2577 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2578 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2581 if (!init_target_chars ())
2582 return false;
2584 if (strcmp (fmt_str, target_percent_s) == 0
2585 || strchr (fmt_str, target_percent) == NULL)
2587 const char *str;
2589 if (strcmp (fmt_str, target_percent_s) == 0)
2591 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2592 return false;
2594 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2595 return false;
2597 str = c_getstr (arg);
2598 if (str == NULL)
2599 return false;
2601 else
2603 /* The format specifier doesn't contain any '%' characters. */
2604 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2605 && arg)
2606 return false;
2607 str = fmt_str;
2610 /* If the string was "", printf does nothing. */
2611 if (str[0] == '\0')
2613 replace_call_with_value (gsi, NULL_TREE);
2614 return true;
2617 /* If the string has length of 1, call putchar. */
2618 if (str[1] == '\0')
2620 /* Given printf("c"), (where c is any one character,)
2621 convert "c"[0] to an int and pass that to the replacement
2622 function. */
2623 newarg = build_int_cst (integer_type_node, str[0]);
2624 if (fn_putchar)
2626 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2627 replace_call_with_call_and_fold (gsi, repl);
2628 return true;
2631 else
2633 /* If the string was "string\n", call puts("string"). */
2634 size_t len = strlen (str);
2635 if ((unsigned char)str[len - 1] == target_newline
2636 && (size_t) (int) len == len
2637 && (int) len > 0)
2639 char *newstr;
2640 tree offset_node, string_cst;
2642 /* Create a NUL-terminated string that's one char shorter
2643 than the original, stripping off the trailing '\n'. */
2644 newarg = build_string_literal (len, str);
2645 string_cst = string_constant (newarg, &offset_node);
2646 gcc_checking_assert (string_cst
2647 && (TREE_STRING_LENGTH (string_cst)
2648 == (int) len)
2649 && integer_zerop (offset_node)
2650 && (unsigned char)
2651 TREE_STRING_POINTER (string_cst)[len - 1]
2652 == target_newline);
2653 /* build_string_literal creates a new STRING_CST,
2654 modify it in place to avoid double copying. */
2655 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2656 newstr[len - 1] = '\0';
2657 if (fn_puts)
2659 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2660 replace_call_with_call_and_fold (gsi, repl);
2661 return true;
2664 else
2665 /* We'd like to arrange to call fputs(string,stdout) here,
2666 but we need stdout and don't have a way to get it yet. */
2667 return false;
2671 /* The other optimizations can be done only on the non-va_list variants. */
2672 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2673 return false;
2675 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2676 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2678 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2679 return false;
2680 if (fn_puts)
2682 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2683 replace_call_with_call_and_fold (gsi, repl);
2684 return true;
2688 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2689 else if (strcmp (fmt_str, target_percent_c) == 0)
2691 if (!arg || ! useless_type_conversion_p (integer_type_node,
2692 TREE_TYPE (arg)))
2693 return false;
2694 if (fn_putchar)
2696 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2697 replace_call_with_call_and_fold (gsi, repl);
2698 return true;
2702 return false;
2707 /* Fold a call to __builtin_strlen with known length LEN. */
2709 static bool
2710 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2712 gimple *stmt = gsi_stmt (*gsi);
2713 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2714 if (!len)
2715 return false;
2716 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2717 replace_call_with_value (gsi, len);
2718 return true;
2721 /* Fold a call to __builtin_acc_on_device. */
2723 static bool
2724 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2726 /* Defer folding until we know which compiler we're in. */
2727 if (symtab->state != EXPANSION)
2728 return false;
2730 unsigned val_host = GOMP_DEVICE_HOST;
2731 unsigned val_dev = GOMP_DEVICE_NONE;
2733 #ifdef ACCEL_COMPILER
2734 val_host = GOMP_DEVICE_NOT_HOST;
2735 val_dev = ACCEL_COMPILER_acc_device;
2736 #endif
2738 location_t loc = gimple_location (gsi_stmt (*gsi));
2740 tree host_eq = make_ssa_name (boolean_type_node);
2741 gimple *host_ass = gimple_build_assign
2742 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2743 gimple_set_location (host_ass, loc);
2744 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2746 tree dev_eq = make_ssa_name (boolean_type_node);
2747 gimple *dev_ass = gimple_build_assign
2748 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2749 gimple_set_location (dev_ass, loc);
2750 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2752 tree result = make_ssa_name (boolean_type_node);
2753 gimple *result_ass = gimple_build_assign
2754 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2755 gimple_set_location (result_ass, loc);
2756 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2758 replace_call_with_value (gsi, result);
2760 return true;
2763 /* Fold the non-target builtin at *GSI and return whether any simplification
2764 was made. */
2766 static bool
2767 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2769 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2770 tree callee = gimple_call_fndecl (stmt);
2772 /* Give up for always_inline inline builtins until they are
2773 inlined. */
2774 if (avoid_folding_inline_builtin (callee))
2775 return false;
2777 unsigned n = gimple_call_num_args (stmt);
2778 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2779 switch (fcode)
2781 case BUILT_IN_BZERO:
2782 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2783 gimple_call_arg (stmt, 1));
2784 case BUILT_IN_MEMSET:
2785 return gimple_fold_builtin_memset (gsi,
2786 gimple_call_arg (stmt, 1),
2787 gimple_call_arg (stmt, 2));
2788 case BUILT_IN_BCOPY:
2789 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2790 gimple_call_arg (stmt, 0), 3);
2791 case BUILT_IN_MEMCPY:
2792 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2793 gimple_call_arg (stmt, 1), 0);
2794 case BUILT_IN_MEMPCPY:
2795 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2796 gimple_call_arg (stmt, 1), 1);
2797 case BUILT_IN_MEMMOVE:
2798 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2799 gimple_call_arg (stmt, 1), 3);
2800 case BUILT_IN_SPRINTF_CHK:
2801 case BUILT_IN_VSPRINTF_CHK:
2802 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2803 case BUILT_IN_STRCAT_CHK:
2804 return gimple_fold_builtin_strcat_chk (gsi);
2805 case BUILT_IN_STRNCAT_CHK:
2806 return gimple_fold_builtin_strncat_chk (gsi);
2807 case BUILT_IN_STRLEN:
2808 return gimple_fold_builtin_strlen (gsi);
2809 case BUILT_IN_STRCPY:
2810 return gimple_fold_builtin_strcpy (gsi,
2811 gimple_call_arg (stmt, 0),
2812 gimple_call_arg (stmt, 1));
2813 case BUILT_IN_STRNCPY:
2814 return gimple_fold_builtin_strncpy (gsi,
2815 gimple_call_arg (stmt, 0),
2816 gimple_call_arg (stmt, 1),
2817 gimple_call_arg (stmt, 2));
2818 case BUILT_IN_STRCAT:
2819 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2820 gimple_call_arg (stmt, 1));
2821 case BUILT_IN_STRNCAT:
2822 return gimple_fold_builtin_strncat (gsi);
2823 case BUILT_IN_FPUTS:
2824 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2825 gimple_call_arg (stmt, 1), false);
2826 case BUILT_IN_FPUTS_UNLOCKED:
2827 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2828 gimple_call_arg (stmt, 1), true);
2829 case BUILT_IN_MEMCPY_CHK:
2830 case BUILT_IN_MEMPCPY_CHK:
2831 case BUILT_IN_MEMMOVE_CHK:
2832 case BUILT_IN_MEMSET_CHK:
2833 return gimple_fold_builtin_memory_chk (gsi,
2834 gimple_call_arg (stmt, 0),
2835 gimple_call_arg (stmt, 1),
2836 gimple_call_arg (stmt, 2),
2837 gimple_call_arg (stmt, 3),
2838 fcode);
2839 case BUILT_IN_STPCPY:
2840 return gimple_fold_builtin_stpcpy (gsi);
2841 case BUILT_IN_STRCPY_CHK:
2842 case BUILT_IN_STPCPY_CHK:
2843 return gimple_fold_builtin_stxcpy_chk (gsi,
2844 gimple_call_arg (stmt, 0),
2845 gimple_call_arg (stmt, 1),
2846 gimple_call_arg (stmt, 2),
2847 fcode);
2848 case BUILT_IN_STRNCPY_CHK:
2849 case BUILT_IN_STPNCPY_CHK:
2850 return gimple_fold_builtin_stxncpy_chk (gsi,
2851 gimple_call_arg (stmt, 0),
2852 gimple_call_arg (stmt, 1),
2853 gimple_call_arg (stmt, 2),
2854 gimple_call_arg (stmt, 3),
2855 fcode);
2856 case BUILT_IN_SNPRINTF_CHK:
2857 case BUILT_IN_VSNPRINTF_CHK:
2858 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2859 case BUILT_IN_SNPRINTF:
2860 return gimple_fold_builtin_snprintf (gsi);
2861 case BUILT_IN_SPRINTF:
2862 return gimple_fold_builtin_sprintf (gsi);
2863 case BUILT_IN_FPRINTF:
2864 case BUILT_IN_FPRINTF_UNLOCKED:
2865 case BUILT_IN_VFPRINTF:
2866 if (n == 2 || n == 3)
2867 return gimple_fold_builtin_fprintf (gsi,
2868 gimple_call_arg (stmt, 0),
2869 gimple_call_arg (stmt, 1),
2870 n == 3
2871 ? gimple_call_arg (stmt, 2)
2872 : NULL_TREE,
2873 fcode);
2874 break;
2875 case BUILT_IN_FPRINTF_CHK:
2876 case BUILT_IN_VFPRINTF_CHK:
2877 if (n == 3 || n == 4)
2878 return gimple_fold_builtin_fprintf (gsi,
2879 gimple_call_arg (stmt, 0),
2880 gimple_call_arg (stmt, 2),
2881 n == 4
2882 ? gimple_call_arg (stmt, 3)
2883 : NULL_TREE,
2884 fcode);
2885 break;
2886 case BUILT_IN_PRINTF:
2887 case BUILT_IN_PRINTF_UNLOCKED:
2888 case BUILT_IN_VPRINTF:
2889 if (n == 1 || n == 2)
2890 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2891 n == 2
2892 ? gimple_call_arg (stmt, 1)
2893 : NULL_TREE, fcode);
2894 break;
2895 case BUILT_IN_PRINTF_CHK:
2896 case BUILT_IN_VPRINTF_CHK:
2897 if (n == 2 || n == 3)
2898 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2899 n == 3
2900 ? gimple_call_arg (stmt, 2)
2901 : NULL_TREE, fcode);
2902 break;
2903 case BUILT_IN_ACC_ON_DEVICE:
2904 return gimple_fold_builtin_acc_on_device (gsi,
2905 gimple_call_arg (stmt, 0));
2906 default:;
2909 /* Try the generic builtin folder. */
2910 bool ignore = (gimple_call_lhs (stmt) == NULL);
2911 tree result = fold_call_stmt (stmt, ignore);
2912 if (result)
2914 if (ignore)
2915 STRIP_NOPS (result);
2916 else
2917 result = fold_convert (gimple_call_return_type (stmt), result);
2918 if (!update_call_from_tree (gsi, result))
2919 gimplify_and_update_call_from_tree (gsi, result);
2920 return true;
2923 return false;
2926 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2927 function calls to constants, where possible. */
2929 static tree
2930 fold_internal_goacc_dim (const gimple *call)
2932 int axis = get_oacc_ifn_dim_arg (call);
2933 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2934 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2935 tree result = NULL_TREE;
2937 /* If the size is 1, or we only want the size and it is not dynamic,
2938 we know the answer. */
2939 if (size == 1 || (!is_pos && size))
2941 tree type = TREE_TYPE (gimple_call_lhs (call));
2942 result = build_int_cst (type, size - is_pos);
2945 return result;
2948 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2949 doesn't fit into TYPE. The test for overflow should be regardless of
2950 -fwrapv, and even for unsigned types. */
2952 bool
2953 arith_overflowed_p (enum tree_code code, const_tree type,
2954 const_tree arg0, const_tree arg1)
2956 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2957 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2958 widest2_int_cst;
2959 widest2_int warg0 = widest2_int_cst (arg0);
2960 widest2_int warg1 = widest2_int_cst (arg1);
2961 widest2_int wres;
2962 switch (code)
2964 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2965 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2966 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2967 default: gcc_unreachable ();
2969 signop sign = TYPE_SIGN (type);
2970 if (sign == UNSIGNED && wi::neg_p (wres))
2971 return true;
2972 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2975 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2976 The statement may be replaced by another statement, e.g., if the call
2977 simplifies to a constant value. Return true if any changes were made.
2978 It is assumed that the operands have been previously folded. */
2980 static bool
2981 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2983 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2984 tree callee;
2985 bool changed = false;
2986 unsigned i;
2988 /* Fold *& in call arguments. */
2989 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2990 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2992 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2993 if (tmp)
2995 gimple_call_set_arg (stmt, i, tmp);
2996 changed = true;
3000 /* Check for virtual calls that became direct calls. */
3001 callee = gimple_call_fn (stmt);
3002 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3004 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3006 if (dump_file && virtual_method_call_p (callee)
3007 && !possible_polymorphic_call_target_p
3008 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3009 (OBJ_TYPE_REF_EXPR (callee)))))
3011 fprintf (dump_file,
3012 "Type inheritance inconsistent devirtualization of ");
3013 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3014 fprintf (dump_file, " to ");
3015 print_generic_expr (dump_file, callee, TDF_SLIM);
3016 fprintf (dump_file, "\n");
3019 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3020 changed = true;
3022 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3024 bool final;
3025 vec <cgraph_node *>targets
3026 = possible_polymorphic_call_targets (callee, stmt, &final);
3027 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3029 tree lhs = gimple_call_lhs (stmt);
3030 if (dump_enabled_p ())
3032 location_t loc = gimple_location_safe (stmt);
3033 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3034 "folding virtual function call to %s\n",
3035 targets.length () == 1
3036 ? targets[0]->name ()
3037 : "__builtin_unreachable");
3039 if (targets.length () == 1)
3041 tree fndecl = targets[0]->decl;
3042 gimple_call_set_fndecl (stmt, fndecl);
3043 changed = true;
3044 /* If changing the call to __cxa_pure_virtual
3045 or similar noreturn function, adjust gimple_call_fntype
3046 too. */
3047 if ((gimple_call_flags (stmt) & ECF_NORETURN)
3048 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3049 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3050 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3051 == void_type_node))
3052 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3053 /* If the call becomes noreturn, remove the lhs. */
3054 if (lhs
3055 && (gimple_call_flags (stmt) & ECF_NORETURN)
3056 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3057 || ((TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))
3058 == INTEGER_CST)
3059 && !TREE_ADDRESSABLE (TREE_TYPE (lhs)))))
3061 if (TREE_CODE (lhs) == SSA_NAME)
3063 tree var = create_tmp_var (TREE_TYPE (lhs));
3064 tree def = get_or_create_ssa_default_def (cfun, var);
3065 gimple *new_stmt = gimple_build_assign (lhs, def);
3066 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3068 gimple_call_set_lhs (stmt, NULL_TREE);
3070 maybe_remove_unused_call_args (cfun, stmt);
3072 else
3074 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3075 gimple *new_stmt = gimple_build_call (fndecl, 0);
3076 gimple_set_location (new_stmt, gimple_location (stmt));
3077 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3079 tree var = create_tmp_var (TREE_TYPE (lhs));
3080 tree def = get_or_create_ssa_default_def (cfun, var);
3082 /* To satisfy condition for
3083 cgraph_update_edges_for_call_stmt_node,
3084 we need to preserve GIMPLE_CALL statement
3085 at position of GSI iterator. */
3086 update_call_from_tree (gsi, def);
3087 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3089 else
3091 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3092 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3093 gsi_replace (gsi, new_stmt, false);
3095 return true;
3101 /* Check for indirect calls that became direct calls, and then
3102 no longer require a static chain. */
3103 if (gimple_call_chain (stmt))
3105 tree fn = gimple_call_fndecl (stmt);
3106 if (fn && !DECL_STATIC_CHAIN (fn))
3108 gimple_call_set_chain (stmt, NULL);
3109 changed = true;
3111 else
3113 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3114 if (tmp)
3116 gimple_call_set_chain (stmt, tmp);
3117 changed = true;
3122 if (inplace)
3123 return changed;
3125 /* Check for builtins that CCP can handle using information not
3126 available in the generic fold routines. */
3127 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3129 if (gimple_fold_builtin (gsi))
3130 changed = true;
3132 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3134 changed |= targetm.gimple_fold_builtin (gsi);
3136 else if (gimple_call_internal_p (stmt))
3138 enum tree_code subcode = ERROR_MARK;
3139 tree result = NULL_TREE;
3140 bool cplx_result = false;
3141 tree overflow = NULL_TREE;
3142 switch (gimple_call_internal_fn (stmt))
3144 case IFN_BUILTIN_EXPECT:
3145 result = fold_builtin_expect (gimple_location (stmt),
3146 gimple_call_arg (stmt, 0),
3147 gimple_call_arg (stmt, 1),
3148 gimple_call_arg (stmt, 2));
3149 break;
3150 case IFN_UBSAN_OBJECT_SIZE:
3151 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3152 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3153 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3154 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3155 gimple_call_arg (stmt, 2))))
3157 gsi_replace (gsi, gimple_build_nop (), false);
3158 unlink_stmt_vdef (stmt);
3159 release_defs (stmt);
3160 return true;
3162 break;
3163 case IFN_GOACC_DIM_SIZE:
3164 case IFN_GOACC_DIM_POS:
3165 result = fold_internal_goacc_dim (stmt);
3166 break;
3167 case IFN_UBSAN_CHECK_ADD:
3168 subcode = PLUS_EXPR;
3169 break;
3170 case IFN_UBSAN_CHECK_SUB:
3171 subcode = MINUS_EXPR;
3172 break;
3173 case IFN_UBSAN_CHECK_MUL:
3174 subcode = MULT_EXPR;
3175 break;
3176 case IFN_ADD_OVERFLOW:
3177 subcode = PLUS_EXPR;
3178 cplx_result = true;
3179 break;
3180 case IFN_SUB_OVERFLOW:
3181 subcode = MINUS_EXPR;
3182 cplx_result = true;
3183 break;
3184 case IFN_MUL_OVERFLOW:
3185 subcode = MULT_EXPR;
3186 cplx_result = true;
3187 break;
3188 default:
3189 break;
3191 if (subcode != ERROR_MARK)
3193 tree arg0 = gimple_call_arg (stmt, 0);
3194 tree arg1 = gimple_call_arg (stmt, 1);
3195 tree type = TREE_TYPE (arg0);
3196 if (cplx_result)
3198 tree lhs = gimple_call_lhs (stmt);
3199 if (lhs == NULL_TREE)
3200 type = NULL_TREE;
3201 else
3202 type = TREE_TYPE (TREE_TYPE (lhs));
3204 if (type == NULL_TREE)
3206 /* x = y + 0; x = y - 0; x = y * 0; */
3207 else if (integer_zerop (arg1))
3208 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3209 /* x = 0 + y; x = 0 * y; */
3210 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3211 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3212 /* x = y - y; */
3213 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3214 result = integer_zero_node;
3215 /* x = y * 1; x = 1 * y; */
3216 else if (subcode == MULT_EXPR && integer_onep (arg1))
3217 result = arg0;
3218 else if (subcode == MULT_EXPR && integer_onep (arg0))
3219 result = arg1;
3220 else if (TREE_CODE (arg0) == INTEGER_CST
3221 && TREE_CODE (arg1) == INTEGER_CST)
3223 if (cplx_result)
3224 result = int_const_binop (subcode, fold_convert (type, arg0),
3225 fold_convert (type, arg1));
3226 else
3227 result = int_const_binop (subcode, arg0, arg1);
3228 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3230 if (cplx_result)
3231 overflow = build_one_cst (type);
3232 else
3233 result = NULL_TREE;
3236 if (result)
3238 if (result == integer_zero_node)
3239 result = build_zero_cst (type);
3240 else if (cplx_result && TREE_TYPE (result) != type)
3242 if (TREE_CODE (result) == INTEGER_CST)
3244 if (arith_overflowed_p (PLUS_EXPR, type, result,
3245 integer_zero_node))
3246 overflow = build_one_cst (type);
3248 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3249 && TYPE_UNSIGNED (type))
3250 || (TYPE_PRECISION (type)
3251 < (TYPE_PRECISION (TREE_TYPE (result))
3252 + (TYPE_UNSIGNED (TREE_TYPE (result))
3253 && !TYPE_UNSIGNED (type)))))
3254 result = NULL_TREE;
3255 if (result)
3256 result = fold_convert (type, result);
3261 if (result)
3263 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3264 result = drop_tree_overflow (result);
3265 if (cplx_result)
3267 if (overflow == NULL_TREE)
3268 overflow = build_zero_cst (TREE_TYPE (result));
3269 tree ctype = build_complex_type (TREE_TYPE (result));
3270 if (TREE_CODE (result) == INTEGER_CST
3271 && TREE_CODE (overflow) == INTEGER_CST)
3272 result = build_complex (ctype, result, overflow);
3273 else
3274 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3275 ctype, result, overflow);
3277 if (!update_call_from_tree (gsi, result))
3278 gimplify_and_update_call_from_tree (gsi, result);
3279 changed = true;
3283 return changed;
3287 /* Return true whether NAME has a use on STMT. */
3289 static bool
3290 has_use_on_stmt (tree name, gimple *stmt)
3292 imm_use_iterator iter;
3293 use_operand_p use_p;
3294 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3295 if (USE_STMT (use_p) == stmt)
3296 return true;
3297 return false;
3300 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3301 gimple_simplify.
3303 Replaces *GSI with the simplification result in RCODE and OPS
3304 and the associated statements in *SEQ. Does the replacement
3305 according to INPLACE and returns true if the operation succeeded. */
3307 static bool
3308 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3309 code_helper rcode, tree *ops,
3310 gimple_seq *seq, bool inplace)
3312 gimple *stmt = gsi_stmt (*gsi);
3314 /* Play safe and do not allow abnormals to be mentioned in
3315 newly created statements. See also maybe_push_res_to_seq.
3316 As an exception allow such uses if there was a use of the
3317 same SSA name on the old stmt. */
3318 if ((TREE_CODE (ops[0]) == SSA_NAME
3319 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3320 && !has_use_on_stmt (ops[0], stmt))
3321 || (ops[1]
3322 && TREE_CODE (ops[1]) == SSA_NAME
3323 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3324 && !has_use_on_stmt (ops[1], stmt))
3325 || (ops[2]
3326 && TREE_CODE (ops[2]) == SSA_NAME
3327 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3328 && !has_use_on_stmt (ops[2], stmt))
3329 || (COMPARISON_CLASS_P (ops[0])
3330 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3331 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3332 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3333 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3334 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3335 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3336 return false;
3338 /* Don't insert new statements when INPLACE is true, even if we could
3339 reuse STMT for the final statement. */
3340 if (inplace && !gimple_seq_empty_p (*seq))
3341 return false;
3343 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3345 gcc_assert (rcode.is_tree_code ());
3346 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3347 /* GIMPLE_CONDs condition may not throw. */
3348 && (!flag_exceptions
3349 || !cfun->can_throw_non_call_exceptions
3350 || !operation_could_trap_p (rcode,
3351 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3352 false, NULL_TREE)))
3353 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3354 else if (rcode == SSA_NAME)
3355 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3356 build_zero_cst (TREE_TYPE (ops[0])));
3357 else if (rcode == INTEGER_CST)
3359 if (integer_zerop (ops[0]))
3360 gimple_cond_make_false (cond_stmt);
3361 else
3362 gimple_cond_make_true (cond_stmt);
3364 else if (!inplace)
3366 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3367 ops, seq);
3368 if (!res)
3369 return false;
3370 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3371 build_zero_cst (TREE_TYPE (res)));
3373 else
3374 return false;
3375 if (dump_file && (dump_flags & TDF_DETAILS))
3377 fprintf (dump_file, "gimple_simplified to ");
3378 if (!gimple_seq_empty_p (*seq))
3379 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3380 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3381 0, TDF_SLIM);
3383 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3384 return true;
3386 else if (is_gimple_assign (stmt)
3387 && rcode.is_tree_code ())
3389 if (!inplace
3390 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3392 maybe_build_generic_op (rcode,
3393 TREE_TYPE (gimple_assign_lhs (stmt)),
3394 &ops[0], ops[1], ops[2]);
3395 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3396 if (dump_file && (dump_flags & TDF_DETAILS))
3398 fprintf (dump_file, "gimple_simplified to ");
3399 if (!gimple_seq_empty_p (*seq))
3400 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3401 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3402 0, TDF_SLIM);
3404 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3405 return true;
3408 else if (rcode.is_fn_code ()
3409 && gimple_call_combined_fn (stmt) == rcode)
3411 unsigned i;
3412 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3414 gcc_assert (ops[i] != NULL_TREE);
3415 gimple_call_set_arg (stmt, i, ops[i]);
3417 if (i < 3)
3418 gcc_assert (ops[i] == NULL_TREE);
3419 if (dump_file && (dump_flags & TDF_DETAILS))
3421 fprintf (dump_file, "gimple_simplified to ");
3422 if (!gimple_seq_empty_p (*seq))
3423 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3424 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3426 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3427 return true;
3429 else if (!inplace)
3431 if (gimple_has_lhs (stmt))
3433 tree lhs = gimple_get_lhs (stmt);
3434 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3435 ops, seq, lhs))
3436 return false;
3437 if (dump_file && (dump_flags & TDF_DETAILS))
3439 fprintf (dump_file, "gimple_simplified to ");
3440 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3442 gsi_replace_with_seq_vops (gsi, *seq);
3443 return true;
3445 else
3446 gcc_unreachable ();
3449 return false;
3452 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3454 static bool
3455 maybe_canonicalize_mem_ref_addr (tree *t)
3457 bool res = false;
3459 if (TREE_CODE (*t) == ADDR_EXPR)
3460 t = &TREE_OPERAND (*t, 0);
3462 while (handled_component_p (*t))
3463 t = &TREE_OPERAND (*t, 0);
3465 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3466 of invariant addresses into a SSA name MEM_REF address. */
3467 if (TREE_CODE (*t) == MEM_REF
3468 || TREE_CODE (*t) == TARGET_MEM_REF)
3470 tree addr = TREE_OPERAND (*t, 0);
3471 if (TREE_CODE (addr) == ADDR_EXPR
3472 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3473 || handled_component_p (TREE_OPERAND (addr, 0))))
3475 tree base;
3476 HOST_WIDE_INT coffset;
3477 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3478 &coffset);
3479 if (!base)
3480 gcc_unreachable ();
3482 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3483 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3484 TREE_OPERAND (*t, 1),
3485 size_int (coffset));
3486 res = true;
3488 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3489 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3492 /* Canonicalize back MEM_REFs to plain reference trees if the object
3493 accessed is a decl that has the same access semantics as the MEM_REF. */
3494 if (TREE_CODE (*t) == MEM_REF
3495 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3496 && integer_zerop (TREE_OPERAND (*t, 1))
3497 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3499 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3500 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3501 if (/* Same volatile qualification. */
3502 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3503 /* Same TBAA behavior with -fstrict-aliasing. */
3504 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3505 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3506 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3507 /* Same alignment. */
3508 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3509 /* We have to look out here to not drop a required conversion
3510 from the rhs to the lhs if *t appears on the lhs or vice-versa
3511 if it appears on the rhs. Thus require strict type
3512 compatibility. */
3513 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3515 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3516 res = true;
3520 /* Canonicalize TARGET_MEM_REF in particular with respect to
3521 the indexes becoming constant. */
3522 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3524 tree tem = maybe_fold_tmr (*t);
3525 if (tem)
3527 *t = tem;
3528 res = true;
3532 return res;
3535 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3536 distinguishes both cases. */
3538 static bool
3539 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3541 bool changed = false;
3542 gimple *stmt = gsi_stmt (*gsi);
3543 unsigned i;
3545 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3546 after propagation.
3547 ??? This shouldn't be done in generic folding but in the
3548 propagation helpers which also know whether an address was
3549 propagated.
3550 Also canonicalize operand order. */
3551 switch (gimple_code (stmt))
3553 case GIMPLE_ASSIGN:
3554 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3556 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3557 if ((REFERENCE_CLASS_P (*rhs)
3558 || TREE_CODE (*rhs) == ADDR_EXPR)
3559 && maybe_canonicalize_mem_ref_addr (rhs))
3560 changed = true;
3561 tree *lhs = gimple_assign_lhs_ptr (stmt);
3562 if (REFERENCE_CLASS_P (*lhs)
3563 && maybe_canonicalize_mem_ref_addr (lhs))
3564 changed = true;
3566 else
3568 /* Canonicalize operand order. */
3569 enum tree_code code = gimple_assign_rhs_code (stmt);
3570 if (TREE_CODE_CLASS (code) == tcc_comparison
3571 || commutative_tree_code (code)
3572 || commutative_ternary_tree_code (code))
3574 tree rhs1 = gimple_assign_rhs1 (stmt);
3575 tree rhs2 = gimple_assign_rhs2 (stmt);
3576 if (tree_swap_operands_p (rhs1, rhs2, false))
3578 gimple_assign_set_rhs1 (stmt, rhs2);
3579 gimple_assign_set_rhs2 (stmt, rhs1);
3580 if (TREE_CODE_CLASS (code) == tcc_comparison)
3581 gimple_assign_set_rhs_code (stmt,
3582 swap_tree_comparison (code));
3583 changed = true;
3587 break;
3588 case GIMPLE_CALL:
3590 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3592 tree *arg = gimple_call_arg_ptr (stmt, i);
3593 if (REFERENCE_CLASS_P (*arg)
3594 && maybe_canonicalize_mem_ref_addr (arg))
3595 changed = true;
3597 tree *lhs = gimple_call_lhs_ptr (stmt);
3598 if (*lhs
3599 && REFERENCE_CLASS_P (*lhs)
3600 && maybe_canonicalize_mem_ref_addr (lhs))
3601 changed = true;
3602 break;
3604 case GIMPLE_ASM:
3606 gasm *asm_stmt = as_a <gasm *> (stmt);
3607 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3609 tree link = gimple_asm_output_op (asm_stmt, i);
3610 tree op = TREE_VALUE (link);
3611 if (REFERENCE_CLASS_P (op)
3612 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3613 changed = true;
3615 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3617 tree link = gimple_asm_input_op (asm_stmt, i);
3618 tree op = TREE_VALUE (link);
3619 if ((REFERENCE_CLASS_P (op)
3620 || TREE_CODE (op) == ADDR_EXPR)
3621 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3622 changed = true;
3625 break;
3626 case GIMPLE_DEBUG:
3627 if (gimple_debug_bind_p (stmt))
3629 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3630 if (*val
3631 && (REFERENCE_CLASS_P (*val)
3632 || TREE_CODE (*val) == ADDR_EXPR)
3633 && maybe_canonicalize_mem_ref_addr (val))
3634 changed = true;
3636 break;
3637 case GIMPLE_COND:
3639 /* Canonicalize operand order. */
3640 tree lhs = gimple_cond_lhs (stmt);
3641 tree rhs = gimple_cond_rhs (stmt);
3642 if (tree_swap_operands_p (lhs, rhs, false))
3644 gcond *gc = as_a <gcond *> (stmt);
3645 gimple_cond_set_lhs (gc, rhs);
3646 gimple_cond_set_rhs (gc, lhs);
3647 gimple_cond_set_code (gc,
3648 swap_tree_comparison (gimple_cond_code (gc)));
3649 changed = true;
3652 default:;
3655 /* Dispatch to pattern-based folding. */
3656 if (!inplace
3657 || is_gimple_assign (stmt)
3658 || gimple_code (stmt) == GIMPLE_COND)
3660 gimple_seq seq = NULL;
3661 code_helper rcode;
3662 tree ops[3] = {};
3663 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3664 valueize, valueize))
3666 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3667 changed = true;
3668 else
3669 gimple_seq_discard (seq);
3673 stmt = gsi_stmt (*gsi);
3675 /* Fold the main computation performed by the statement. */
3676 switch (gimple_code (stmt))
3678 case GIMPLE_ASSIGN:
3680 /* Try to canonicalize for boolean-typed X the comparisons
3681 X == 0, X == 1, X != 0, and X != 1. */
3682 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3683 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3685 tree lhs = gimple_assign_lhs (stmt);
3686 tree op1 = gimple_assign_rhs1 (stmt);
3687 tree op2 = gimple_assign_rhs2 (stmt);
3688 tree type = TREE_TYPE (op1);
3690 /* Check whether the comparison operands are of the same boolean
3691 type as the result type is.
3692 Check that second operand is an integer-constant with value
3693 one or zero. */
3694 if (TREE_CODE (op2) == INTEGER_CST
3695 && (integer_zerop (op2) || integer_onep (op2))
3696 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3698 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3699 bool is_logical_not = false;
3701 /* X == 0 and X != 1 is a logical-not.of X
3702 X == 1 and X != 0 is X */
3703 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3704 || (cmp_code == NE_EXPR && integer_onep (op2)))
3705 is_logical_not = true;
3707 if (is_logical_not == false)
3708 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3709 /* Only for one-bit precision typed X the transformation
3710 !X -> ~X is valied. */
3711 else if (TYPE_PRECISION (type) == 1)
3712 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3713 /* Otherwise we use !X -> X ^ 1. */
3714 else
3715 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3716 build_int_cst (type, 1));
3717 changed = true;
3718 break;
3722 unsigned old_num_ops = gimple_num_ops (stmt);
3723 tree lhs = gimple_assign_lhs (stmt);
3724 tree new_rhs = fold_gimple_assign (gsi);
3725 if (new_rhs
3726 && !useless_type_conversion_p (TREE_TYPE (lhs),
3727 TREE_TYPE (new_rhs)))
3728 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3729 if (new_rhs
3730 && (!inplace
3731 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3733 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3734 changed = true;
3736 break;
3739 case GIMPLE_CALL:
3740 changed |= gimple_fold_call (gsi, inplace);
3741 break;
3743 case GIMPLE_ASM:
3744 /* Fold *& in asm operands. */
3746 gasm *asm_stmt = as_a <gasm *> (stmt);
3747 size_t noutputs;
3748 const char **oconstraints;
3749 const char *constraint;
3750 bool allows_mem, allows_reg;
3752 noutputs = gimple_asm_noutputs (asm_stmt);
3753 oconstraints = XALLOCAVEC (const char *, noutputs);
3755 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3757 tree link = gimple_asm_output_op (asm_stmt, i);
3758 tree op = TREE_VALUE (link);
3759 oconstraints[i]
3760 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3761 if (REFERENCE_CLASS_P (op)
3762 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3764 TREE_VALUE (link) = op;
3765 changed = true;
3768 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3770 tree link = gimple_asm_input_op (asm_stmt, i);
3771 tree op = TREE_VALUE (link);
3772 constraint
3773 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3774 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3775 oconstraints, &allows_mem, &allows_reg);
3776 if (REFERENCE_CLASS_P (op)
3777 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3778 != NULL_TREE)
3780 TREE_VALUE (link) = op;
3781 changed = true;
3785 break;
3787 case GIMPLE_DEBUG:
3788 if (gimple_debug_bind_p (stmt))
3790 tree val = gimple_debug_bind_get_value (stmt);
3791 if (val
3792 && REFERENCE_CLASS_P (val))
3794 tree tem = maybe_fold_reference (val, false);
3795 if (tem)
3797 gimple_debug_bind_set_value (stmt, tem);
3798 changed = true;
3801 else if (val
3802 && TREE_CODE (val) == ADDR_EXPR)
3804 tree ref = TREE_OPERAND (val, 0);
3805 tree tem = maybe_fold_reference (ref, false);
3806 if (tem)
3808 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3809 gimple_debug_bind_set_value (stmt, tem);
3810 changed = true;
3814 break;
3816 default:;
3819 stmt = gsi_stmt (*gsi);
3821 /* Fold *& on the lhs. */
3822 if (gimple_has_lhs (stmt))
3824 tree lhs = gimple_get_lhs (stmt);
3825 if (lhs && REFERENCE_CLASS_P (lhs))
3827 tree new_lhs = maybe_fold_reference (lhs, true);
3828 if (new_lhs)
3830 gimple_set_lhs (stmt, new_lhs);
3831 changed = true;
3836 return changed;
3839 /* Valueziation callback that ends up not following SSA edges. */
3841 tree
3842 no_follow_ssa_edges (tree)
3844 return NULL_TREE;
3847 /* Valueization callback that ends up following single-use SSA edges only. */
3849 tree
3850 follow_single_use_edges (tree val)
3852 if (TREE_CODE (val) == SSA_NAME
3853 && !has_single_use (val))
3854 return NULL_TREE;
3855 return val;
3858 /* Fold the statement pointed to by GSI. In some cases, this function may
3859 replace the whole statement with a new one. Returns true iff folding
3860 makes any changes.
3861 The statement pointed to by GSI should be in valid gimple form but may
3862 be in unfolded state as resulting from for example constant propagation
3863 which can produce *&x = 0. */
3865 bool
3866 fold_stmt (gimple_stmt_iterator *gsi)
3868 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3871 bool
3872 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3874 return fold_stmt_1 (gsi, false, valueize);
3877 /* Perform the minimal folding on statement *GSI. Only operations like
3878 *&x created by constant propagation are handled. The statement cannot
3879 be replaced with a new one. Return true if the statement was
3880 changed, false otherwise.
3881 The statement *GSI should be in valid gimple form but may
3882 be in unfolded state as resulting from for example constant propagation
3883 which can produce *&x = 0. */
3885 bool
3886 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3888 gimple *stmt = gsi_stmt (*gsi);
3889 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3890 gcc_assert (gsi_stmt (*gsi) == stmt);
3891 return changed;
3894 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3895 if EXPR is null or we don't know how.
3896 If non-null, the result always has boolean type. */
3898 static tree
3899 canonicalize_bool (tree expr, bool invert)
3901 if (!expr)
3902 return NULL_TREE;
3903 else if (invert)
3905 if (integer_nonzerop (expr))
3906 return boolean_false_node;
3907 else if (integer_zerop (expr))
3908 return boolean_true_node;
3909 else if (TREE_CODE (expr) == SSA_NAME)
3910 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3911 build_int_cst (TREE_TYPE (expr), 0));
3912 else if (COMPARISON_CLASS_P (expr))
3913 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3914 boolean_type_node,
3915 TREE_OPERAND (expr, 0),
3916 TREE_OPERAND (expr, 1));
3917 else
3918 return NULL_TREE;
3920 else
3922 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3923 return expr;
3924 if (integer_nonzerop (expr))
3925 return boolean_true_node;
3926 else if (integer_zerop (expr))
3927 return boolean_false_node;
3928 else if (TREE_CODE (expr) == SSA_NAME)
3929 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3930 build_int_cst (TREE_TYPE (expr), 0));
3931 else if (COMPARISON_CLASS_P (expr))
3932 return fold_build2 (TREE_CODE (expr),
3933 boolean_type_node,
3934 TREE_OPERAND (expr, 0),
3935 TREE_OPERAND (expr, 1));
3936 else
3937 return NULL_TREE;
3941 /* Check to see if a boolean expression EXPR is logically equivalent to the
3942 comparison (OP1 CODE OP2). Check for various identities involving
3943 SSA_NAMEs. */
3945 static bool
3946 same_bool_comparison_p (const_tree expr, enum tree_code code,
3947 const_tree op1, const_tree op2)
3949 gimple *s;
3951 /* The obvious case. */
3952 if (TREE_CODE (expr) == code
3953 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3954 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3955 return true;
3957 /* Check for comparing (name, name != 0) and the case where expr
3958 is an SSA_NAME with a definition matching the comparison. */
3959 if (TREE_CODE (expr) == SSA_NAME
3960 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3962 if (operand_equal_p (expr, op1, 0))
3963 return ((code == NE_EXPR && integer_zerop (op2))
3964 || (code == EQ_EXPR && integer_nonzerop (op2)));
3965 s = SSA_NAME_DEF_STMT (expr);
3966 if (is_gimple_assign (s)
3967 && gimple_assign_rhs_code (s) == code
3968 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3969 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3970 return true;
3973 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3974 of name is a comparison, recurse. */
3975 if (TREE_CODE (op1) == SSA_NAME
3976 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3978 s = SSA_NAME_DEF_STMT (op1);
3979 if (is_gimple_assign (s)
3980 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3982 enum tree_code c = gimple_assign_rhs_code (s);
3983 if ((c == NE_EXPR && integer_zerop (op2))
3984 || (c == EQ_EXPR && integer_nonzerop (op2)))
3985 return same_bool_comparison_p (expr, c,
3986 gimple_assign_rhs1 (s),
3987 gimple_assign_rhs2 (s));
3988 if ((c == EQ_EXPR && integer_zerop (op2))
3989 || (c == NE_EXPR && integer_nonzerop (op2)))
3990 return same_bool_comparison_p (expr,
3991 invert_tree_comparison (c, false),
3992 gimple_assign_rhs1 (s),
3993 gimple_assign_rhs2 (s));
3996 return false;
3999 /* Check to see if two boolean expressions OP1 and OP2 are logically
4000 equivalent. */
4002 static bool
4003 same_bool_result_p (const_tree op1, const_tree op2)
4005 /* Simple cases first. */
4006 if (operand_equal_p (op1, op2, 0))
4007 return true;
4009 /* Check the cases where at least one of the operands is a comparison.
4010 These are a bit smarter than operand_equal_p in that they apply some
4011 identifies on SSA_NAMEs. */
4012 if (COMPARISON_CLASS_P (op2)
4013 && same_bool_comparison_p (op1, TREE_CODE (op2),
4014 TREE_OPERAND (op2, 0),
4015 TREE_OPERAND (op2, 1)))
4016 return true;
4017 if (COMPARISON_CLASS_P (op1)
4018 && same_bool_comparison_p (op2, TREE_CODE (op1),
4019 TREE_OPERAND (op1, 0),
4020 TREE_OPERAND (op1, 1)))
4021 return true;
4023 /* Default case. */
4024 return false;
4027 /* Forward declarations for some mutually recursive functions. */
4029 static tree
4030 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4031 enum tree_code code2, tree op2a, tree op2b);
4032 static tree
4033 and_var_with_comparison (tree var, bool invert,
4034 enum tree_code code2, tree op2a, tree op2b);
4035 static tree
4036 and_var_with_comparison_1 (gimple *stmt,
4037 enum tree_code code2, tree op2a, tree op2b);
4038 static tree
4039 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4040 enum tree_code code2, tree op2a, tree op2b);
4041 static tree
4042 or_var_with_comparison (tree var, bool invert,
4043 enum tree_code code2, tree op2a, tree op2b);
4044 static tree
4045 or_var_with_comparison_1 (gimple *stmt,
4046 enum tree_code code2, tree op2a, tree op2b);
4048 /* Helper function for and_comparisons_1: try to simplify the AND of the
4049 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4050 If INVERT is true, invert the value of the VAR before doing the AND.
4051 Return NULL_EXPR if we can't simplify this to a single expression. */
4053 static tree
4054 and_var_with_comparison (tree var, bool invert,
4055 enum tree_code code2, tree op2a, tree op2b)
4057 tree t;
4058 gimple *stmt = SSA_NAME_DEF_STMT (var);
4060 /* We can only deal with variables whose definitions are assignments. */
4061 if (!is_gimple_assign (stmt))
4062 return NULL_TREE;
4064 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4065 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4066 Then we only have to consider the simpler non-inverted cases. */
4067 if (invert)
4068 t = or_var_with_comparison_1 (stmt,
4069 invert_tree_comparison (code2, false),
4070 op2a, op2b);
4071 else
4072 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4073 return canonicalize_bool (t, invert);
4076 /* Try to simplify the AND of the ssa variable defined by the assignment
4077 STMT with the comparison specified by (OP2A CODE2 OP2B).
4078 Return NULL_EXPR if we can't simplify this to a single expression. */
4080 static tree
4081 and_var_with_comparison_1 (gimple *stmt,
4082 enum tree_code code2, tree op2a, tree op2b)
4084 tree var = gimple_assign_lhs (stmt);
4085 tree true_test_var = NULL_TREE;
4086 tree false_test_var = NULL_TREE;
4087 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4089 /* Check for identities like (var AND (var == 0)) => false. */
4090 if (TREE_CODE (op2a) == SSA_NAME
4091 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4093 if ((code2 == NE_EXPR && integer_zerop (op2b))
4094 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4096 true_test_var = op2a;
4097 if (var == true_test_var)
4098 return var;
4100 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4101 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4103 false_test_var = op2a;
4104 if (var == false_test_var)
4105 return boolean_false_node;
4109 /* If the definition is a comparison, recurse on it. */
4110 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4112 tree t = and_comparisons_1 (innercode,
4113 gimple_assign_rhs1 (stmt),
4114 gimple_assign_rhs2 (stmt),
4115 code2,
4116 op2a,
4117 op2b);
4118 if (t)
4119 return t;
4122 /* If the definition is an AND or OR expression, we may be able to
4123 simplify by reassociating. */
4124 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4125 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4127 tree inner1 = gimple_assign_rhs1 (stmt);
4128 tree inner2 = gimple_assign_rhs2 (stmt);
4129 gimple *s;
4130 tree t;
4131 tree partial = NULL_TREE;
4132 bool is_and = (innercode == BIT_AND_EXPR);
4134 /* Check for boolean identities that don't require recursive examination
4135 of inner1/inner2:
4136 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4137 inner1 AND (inner1 OR inner2) => inner1
4138 !inner1 AND (inner1 AND inner2) => false
4139 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4140 Likewise for similar cases involving inner2. */
4141 if (inner1 == true_test_var)
4142 return (is_and ? var : inner1);
4143 else if (inner2 == true_test_var)
4144 return (is_and ? var : inner2);
4145 else if (inner1 == false_test_var)
4146 return (is_and
4147 ? boolean_false_node
4148 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4149 else if (inner2 == false_test_var)
4150 return (is_and
4151 ? boolean_false_node
4152 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4154 /* Next, redistribute/reassociate the AND across the inner tests.
4155 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4156 if (TREE_CODE (inner1) == SSA_NAME
4157 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4158 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4159 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4160 gimple_assign_rhs1 (s),
4161 gimple_assign_rhs2 (s),
4162 code2, op2a, op2b)))
4164 /* Handle the AND case, where we are reassociating:
4165 (inner1 AND inner2) AND (op2a code2 op2b)
4166 => (t AND inner2)
4167 If the partial result t is a constant, we win. Otherwise
4168 continue on to try reassociating with the other inner test. */
4169 if (is_and)
4171 if (integer_onep (t))
4172 return inner2;
4173 else if (integer_zerop (t))
4174 return boolean_false_node;
4177 /* Handle the OR case, where we are redistributing:
4178 (inner1 OR inner2) AND (op2a code2 op2b)
4179 => (t OR (inner2 AND (op2a code2 op2b))) */
4180 else if (integer_onep (t))
4181 return boolean_true_node;
4183 /* Save partial result for later. */
4184 partial = t;
4187 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4188 if (TREE_CODE (inner2) == SSA_NAME
4189 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4190 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4191 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4192 gimple_assign_rhs1 (s),
4193 gimple_assign_rhs2 (s),
4194 code2, op2a, op2b)))
4196 /* Handle the AND case, where we are reassociating:
4197 (inner1 AND inner2) AND (op2a code2 op2b)
4198 => (inner1 AND t) */
4199 if (is_and)
4201 if (integer_onep (t))
4202 return inner1;
4203 else if (integer_zerop (t))
4204 return boolean_false_node;
4205 /* If both are the same, we can apply the identity
4206 (x AND x) == x. */
4207 else if (partial && same_bool_result_p (t, partial))
4208 return t;
4211 /* Handle the OR case. where we are redistributing:
4212 (inner1 OR inner2) AND (op2a code2 op2b)
4213 => (t OR (inner1 AND (op2a code2 op2b)))
4214 => (t OR partial) */
4215 else
4217 if (integer_onep (t))
4218 return boolean_true_node;
4219 else if (partial)
4221 /* We already got a simplification for the other
4222 operand to the redistributed OR expression. The
4223 interesting case is when at least one is false.
4224 Or, if both are the same, we can apply the identity
4225 (x OR x) == x. */
4226 if (integer_zerop (partial))
4227 return t;
4228 else if (integer_zerop (t))
4229 return partial;
4230 else if (same_bool_result_p (t, partial))
4231 return t;
4236 return NULL_TREE;
4239 /* Try to simplify the AND of two comparisons defined by
4240 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4241 If this can be done without constructing an intermediate value,
4242 return the resulting tree; otherwise NULL_TREE is returned.
4243 This function is deliberately asymmetric as it recurses on SSA_DEFs
4244 in the first comparison but not the second. */
4246 static tree
4247 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4248 enum tree_code code2, tree op2a, tree op2b)
4250 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4252 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4253 if (operand_equal_p (op1a, op2a, 0)
4254 && operand_equal_p (op1b, op2b, 0))
4256 /* Result will be either NULL_TREE, or a combined comparison. */
4257 tree t = combine_comparisons (UNKNOWN_LOCATION,
4258 TRUTH_ANDIF_EXPR, code1, code2,
4259 truth_type, op1a, op1b);
4260 if (t)
4261 return t;
4264 /* Likewise the swapped case of the above. */
4265 if (operand_equal_p (op1a, op2b, 0)
4266 && operand_equal_p (op1b, op2a, 0))
4268 /* Result will be either NULL_TREE, or a combined comparison. */
4269 tree t = combine_comparisons (UNKNOWN_LOCATION,
4270 TRUTH_ANDIF_EXPR, code1,
4271 swap_tree_comparison (code2),
4272 truth_type, op1a, op1b);
4273 if (t)
4274 return t;
4277 /* If both comparisons are of the same value against constants, we might
4278 be able to merge them. */
4279 if (operand_equal_p (op1a, op2a, 0)
4280 && TREE_CODE (op1b) == INTEGER_CST
4281 && TREE_CODE (op2b) == INTEGER_CST)
4283 int cmp = tree_int_cst_compare (op1b, op2b);
4285 /* If we have (op1a == op1b), we should either be able to
4286 return that or FALSE, depending on whether the constant op1b
4287 also satisfies the other comparison against op2b. */
4288 if (code1 == EQ_EXPR)
4290 bool done = true;
4291 bool val;
4292 switch (code2)
4294 case EQ_EXPR: val = (cmp == 0); break;
4295 case NE_EXPR: val = (cmp != 0); break;
4296 case LT_EXPR: val = (cmp < 0); break;
4297 case GT_EXPR: val = (cmp > 0); break;
4298 case LE_EXPR: val = (cmp <= 0); break;
4299 case GE_EXPR: val = (cmp >= 0); break;
4300 default: done = false;
4302 if (done)
4304 if (val)
4305 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4306 else
4307 return boolean_false_node;
4310 /* Likewise if the second comparison is an == comparison. */
4311 else if (code2 == EQ_EXPR)
4313 bool done = true;
4314 bool val;
4315 switch (code1)
4317 case EQ_EXPR: val = (cmp == 0); break;
4318 case NE_EXPR: val = (cmp != 0); break;
4319 case LT_EXPR: val = (cmp > 0); break;
4320 case GT_EXPR: val = (cmp < 0); break;
4321 case LE_EXPR: val = (cmp >= 0); break;
4322 case GE_EXPR: val = (cmp <= 0); break;
4323 default: done = false;
4325 if (done)
4327 if (val)
4328 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4329 else
4330 return boolean_false_node;
4334 /* Same business with inequality tests. */
4335 else if (code1 == NE_EXPR)
4337 bool val;
4338 switch (code2)
4340 case EQ_EXPR: val = (cmp != 0); break;
4341 case NE_EXPR: val = (cmp == 0); break;
4342 case LT_EXPR: val = (cmp >= 0); break;
4343 case GT_EXPR: val = (cmp <= 0); break;
4344 case LE_EXPR: val = (cmp > 0); break;
4345 case GE_EXPR: val = (cmp < 0); break;
4346 default:
4347 val = false;
4349 if (val)
4350 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4352 else if (code2 == NE_EXPR)
4354 bool val;
4355 switch (code1)
4357 case EQ_EXPR: val = (cmp == 0); break;
4358 case NE_EXPR: val = (cmp != 0); break;
4359 case LT_EXPR: val = (cmp <= 0); break;
4360 case GT_EXPR: val = (cmp >= 0); break;
4361 case LE_EXPR: val = (cmp < 0); break;
4362 case GE_EXPR: val = (cmp > 0); break;
4363 default:
4364 val = false;
4366 if (val)
4367 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4370 /* Chose the more restrictive of two < or <= comparisons. */
4371 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4372 && (code2 == LT_EXPR || code2 == LE_EXPR))
4374 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4375 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4376 else
4377 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4380 /* Likewise chose the more restrictive of two > or >= comparisons. */
4381 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4382 && (code2 == GT_EXPR || code2 == GE_EXPR))
4384 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4385 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4386 else
4387 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4390 /* Check for singleton ranges. */
4391 else if (cmp == 0
4392 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4393 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4394 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4396 /* Check for disjoint ranges. */
4397 else if (cmp <= 0
4398 && (code1 == LT_EXPR || code1 == LE_EXPR)
4399 && (code2 == GT_EXPR || code2 == GE_EXPR))
4400 return boolean_false_node;
4401 else if (cmp >= 0
4402 && (code1 == GT_EXPR || code1 == GE_EXPR)
4403 && (code2 == LT_EXPR || code2 == LE_EXPR))
4404 return boolean_false_node;
4407 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4408 NAME's definition is a truth value. See if there are any simplifications
4409 that can be done against the NAME's definition. */
4410 if (TREE_CODE (op1a) == SSA_NAME
4411 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4412 && (integer_zerop (op1b) || integer_onep (op1b)))
4414 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4415 || (code1 == NE_EXPR && integer_onep (op1b)));
4416 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4417 switch (gimple_code (stmt))
4419 case GIMPLE_ASSIGN:
4420 /* Try to simplify by copy-propagating the definition. */
4421 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4423 case GIMPLE_PHI:
4424 /* If every argument to the PHI produces the same result when
4425 ANDed with the second comparison, we win.
4426 Do not do this unless the type is bool since we need a bool
4427 result here anyway. */
4428 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4430 tree result = NULL_TREE;
4431 unsigned i;
4432 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4434 tree arg = gimple_phi_arg_def (stmt, i);
4436 /* If this PHI has itself as an argument, ignore it.
4437 If all the other args produce the same result,
4438 we're still OK. */
4439 if (arg == gimple_phi_result (stmt))
4440 continue;
4441 else if (TREE_CODE (arg) == INTEGER_CST)
4443 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4445 if (!result)
4446 result = boolean_false_node;
4447 else if (!integer_zerop (result))
4448 return NULL_TREE;
4450 else if (!result)
4451 result = fold_build2 (code2, boolean_type_node,
4452 op2a, op2b);
4453 else if (!same_bool_comparison_p (result,
4454 code2, op2a, op2b))
4455 return NULL_TREE;
4457 else if (TREE_CODE (arg) == SSA_NAME
4458 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4460 tree temp;
4461 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4462 /* In simple cases we can look through PHI nodes,
4463 but we have to be careful with loops.
4464 See PR49073. */
4465 if (! dom_info_available_p (CDI_DOMINATORS)
4466 || gimple_bb (def_stmt) == gimple_bb (stmt)
4467 || dominated_by_p (CDI_DOMINATORS,
4468 gimple_bb (def_stmt),
4469 gimple_bb (stmt)))
4470 return NULL_TREE;
4471 temp = and_var_with_comparison (arg, invert, code2,
4472 op2a, op2b);
4473 if (!temp)
4474 return NULL_TREE;
4475 else if (!result)
4476 result = temp;
4477 else if (!same_bool_result_p (result, temp))
4478 return NULL_TREE;
4480 else
4481 return NULL_TREE;
4483 return result;
4486 default:
4487 break;
4490 return NULL_TREE;
4493 /* Try to simplify the AND of two comparisons, specified by
4494 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4495 If this can be simplified to a single expression (without requiring
4496 introducing more SSA variables to hold intermediate values),
4497 return the resulting tree. Otherwise return NULL_TREE.
4498 If the result expression is non-null, it has boolean type. */
4500 tree
4501 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4502 enum tree_code code2, tree op2a, tree op2b)
4504 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4505 if (t)
4506 return t;
4507 else
4508 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4511 /* Helper function for or_comparisons_1: try to simplify the OR of the
4512 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4513 If INVERT is true, invert the value of VAR before doing the OR.
4514 Return NULL_EXPR if we can't simplify this to a single expression. */
4516 static tree
4517 or_var_with_comparison (tree var, bool invert,
4518 enum tree_code code2, tree op2a, tree op2b)
4520 tree t;
4521 gimple *stmt = SSA_NAME_DEF_STMT (var);
4523 /* We can only deal with variables whose definitions are assignments. */
4524 if (!is_gimple_assign (stmt))
4525 return NULL_TREE;
4527 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4528 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4529 Then we only have to consider the simpler non-inverted cases. */
4530 if (invert)
4531 t = and_var_with_comparison_1 (stmt,
4532 invert_tree_comparison (code2, false),
4533 op2a, op2b);
4534 else
4535 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4536 return canonicalize_bool (t, invert);
4539 /* Try to simplify the OR of the ssa variable defined by the assignment
4540 STMT with the comparison specified by (OP2A CODE2 OP2B).
4541 Return NULL_EXPR if we can't simplify this to a single expression. */
4543 static tree
4544 or_var_with_comparison_1 (gimple *stmt,
4545 enum tree_code code2, tree op2a, tree op2b)
4547 tree var = gimple_assign_lhs (stmt);
4548 tree true_test_var = NULL_TREE;
4549 tree false_test_var = NULL_TREE;
4550 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4552 /* Check for identities like (var OR (var != 0)) => true . */
4553 if (TREE_CODE (op2a) == SSA_NAME
4554 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4556 if ((code2 == NE_EXPR && integer_zerop (op2b))
4557 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4559 true_test_var = op2a;
4560 if (var == true_test_var)
4561 return var;
4563 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4564 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4566 false_test_var = op2a;
4567 if (var == false_test_var)
4568 return boolean_true_node;
4572 /* If the definition is a comparison, recurse on it. */
4573 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4575 tree t = or_comparisons_1 (innercode,
4576 gimple_assign_rhs1 (stmt),
4577 gimple_assign_rhs2 (stmt),
4578 code2,
4579 op2a,
4580 op2b);
4581 if (t)
4582 return t;
4585 /* If the definition is an AND or OR expression, we may be able to
4586 simplify by reassociating. */
4587 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4588 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4590 tree inner1 = gimple_assign_rhs1 (stmt);
4591 tree inner2 = gimple_assign_rhs2 (stmt);
4592 gimple *s;
4593 tree t;
4594 tree partial = NULL_TREE;
4595 bool is_or = (innercode == BIT_IOR_EXPR);
4597 /* Check for boolean identities that don't require recursive examination
4598 of inner1/inner2:
4599 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4600 inner1 OR (inner1 AND inner2) => inner1
4601 !inner1 OR (inner1 OR inner2) => true
4602 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4604 if (inner1 == true_test_var)
4605 return (is_or ? var : inner1);
4606 else if (inner2 == true_test_var)
4607 return (is_or ? var : inner2);
4608 else if (inner1 == false_test_var)
4609 return (is_or
4610 ? boolean_true_node
4611 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4612 else if (inner2 == false_test_var)
4613 return (is_or
4614 ? boolean_true_node
4615 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4617 /* Next, redistribute/reassociate the OR across the inner tests.
4618 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4619 if (TREE_CODE (inner1) == SSA_NAME
4620 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4621 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4622 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4623 gimple_assign_rhs1 (s),
4624 gimple_assign_rhs2 (s),
4625 code2, op2a, op2b)))
4627 /* Handle the OR case, where we are reassociating:
4628 (inner1 OR inner2) OR (op2a code2 op2b)
4629 => (t OR inner2)
4630 If the partial result t is a constant, we win. Otherwise
4631 continue on to try reassociating with the other inner test. */
4632 if (is_or)
4634 if (integer_onep (t))
4635 return boolean_true_node;
4636 else if (integer_zerop (t))
4637 return inner2;
4640 /* Handle the AND case, where we are redistributing:
4641 (inner1 AND inner2) OR (op2a code2 op2b)
4642 => (t AND (inner2 OR (op2a code op2b))) */
4643 else if (integer_zerop (t))
4644 return boolean_false_node;
4646 /* Save partial result for later. */
4647 partial = t;
4650 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4651 if (TREE_CODE (inner2) == SSA_NAME
4652 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4653 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4654 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4655 gimple_assign_rhs1 (s),
4656 gimple_assign_rhs2 (s),
4657 code2, op2a, op2b)))
4659 /* Handle the OR case, where we are reassociating:
4660 (inner1 OR inner2) OR (op2a code2 op2b)
4661 => (inner1 OR t)
4662 => (t OR partial) */
4663 if (is_or)
4665 if (integer_zerop (t))
4666 return inner1;
4667 else if (integer_onep (t))
4668 return boolean_true_node;
4669 /* If both are the same, we can apply the identity
4670 (x OR x) == x. */
4671 else if (partial && same_bool_result_p (t, partial))
4672 return t;
4675 /* Handle the AND case, where we are redistributing:
4676 (inner1 AND inner2) OR (op2a code2 op2b)
4677 => (t AND (inner1 OR (op2a code2 op2b)))
4678 => (t AND partial) */
4679 else
4681 if (integer_zerop (t))
4682 return boolean_false_node;
4683 else if (partial)
4685 /* We already got a simplification for the other
4686 operand to the redistributed AND expression. The
4687 interesting case is when at least one is true.
4688 Or, if both are the same, we can apply the identity
4689 (x AND x) == x. */
4690 if (integer_onep (partial))
4691 return t;
4692 else if (integer_onep (t))
4693 return partial;
4694 else if (same_bool_result_p (t, partial))
4695 return t;
4700 return NULL_TREE;
4703 /* Try to simplify the OR of two comparisons defined by
4704 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4705 If this can be done without constructing an intermediate value,
4706 return the resulting tree; otherwise NULL_TREE is returned.
4707 This function is deliberately asymmetric as it recurses on SSA_DEFs
4708 in the first comparison but not the second. */
4710 static tree
4711 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4712 enum tree_code code2, tree op2a, tree op2b)
4714 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4716 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4717 if (operand_equal_p (op1a, op2a, 0)
4718 && operand_equal_p (op1b, op2b, 0))
4720 /* Result will be either NULL_TREE, or a combined comparison. */
4721 tree t = combine_comparisons (UNKNOWN_LOCATION,
4722 TRUTH_ORIF_EXPR, code1, code2,
4723 truth_type, op1a, op1b);
4724 if (t)
4725 return t;
4728 /* Likewise the swapped case of the above. */
4729 if (operand_equal_p (op1a, op2b, 0)
4730 && operand_equal_p (op1b, op2a, 0))
4732 /* Result will be either NULL_TREE, or a combined comparison. */
4733 tree t = combine_comparisons (UNKNOWN_LOCATION,
4734 TRUTH_ORIF_EXPR, code1,
4735 swap_tree_comparison (code2),
4736 truth_type, op1a, op1b);
4737 if (t)
4738 return t;
4741 /* If both comparisons are of the same value against constants, we might
4742 be able to merge them. */
4743 if (operand_equal_p (op1a, op2a, 0)
4744 && TREE_CODE (op1b) == INTEGER_CST
4745 && TREE_CODE (op2b) == INTEGER_CST)
4747 int cmp = tree_int_cst_compare (op1b, op2b);
4749 /* If we have (op1a != op1b), we should either be able to
4750 return that or TRUE, depending on whether the constant op1b
4751 also satisfies the other comparison against op2b. */
4752 if (code1 == NE_EXPR)
4754 bool done = true;
4755 bool val;
4756 switch (code2)
4758 case EQ_EXPR: val = (cmp == 0); break;
4759 case NE_EXPR: val = (cmp != 0); break;
4760 case LT_EXPR: val = (cmp < 0); break;
4761 case GT_EXPR: val = (cmp > 0); break;
4762 case LE_EXPR: val = (cmp <= 0); break;
4763 case GE_EXPR: val = (cmp >= 0); break;
4764 default: done = false;
4766 if (done)
4768 if (val)
4769 return boolean_true_node;
4770 else
4771 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4774 /* Likewise if the second comparison is a != comparison. */
4775 else if (code2 == NE_EXPR)
4777 bool done = true;
4778 bool val;
4779 switch (code1)
4781 case EQ_EXPR: val = (cmp == 0); break;
4782 case NE_EXPR: val = (cmp != 0); break;
4783 case LT_EXPR: val = (cmp > 0); break;
4784 case GT_EXPR: val = (cmp < 0); break;
4785 case LE_EXPR: val = (cmp >= 0); break;
4786 case GE_EXPR: val = (cmp <= 0); break;
4787 default: done = false;
4789 if (done)
4791 if (val)
4792 return boolean_true_node;
4793 else
4794 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4798 /* See if an equality test is redundant with the other comparison. */
4799 else if (code1 == EQ_EXPR)
4801 bool val;
4802 switch (code2)
4804 case EQ_EXPR: val = (cmp == 0); break;
4805 case NE_EXPR: val = (cmp != 0); break;
4806 case LT_EXPR: val = (cmp < 0); break;
4807 case GT_EXPR: val = (cmp > 0); break;
4808 case LE_EXPR: val = (cmp <= 0); break;
4809 case GE_EXPR: val = (cmp >= 0); break;
4810 default:
4811 val = false;
4813 if (val)
4814 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4816 else if (code2 == EQ_EXPR)
4818 bool val;
4819 switch (code1)
4821 case EQ_EXPR: val = (cmp == 0); break;
4822 case NE_EXPR: val = (cmp != 0); break;
4823 case LT_EXPR: val = (cmp > 0); break;
4824 case GT_EXPR: val = (cmp < 0); break;
4825 case LE_EXPR: val = (cmp >= 0); break;
4826 case GE_EXPR: val = (cmp <= 0); break;
4827 default:
4828 val = false;
4830 if (val)
4831 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4834 /* Chose the less restrictive of two < or <= comparisons. */
4835 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4836 && (code2 == LT_EXPR || code2 == LE_EXPR))
4838 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4839 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4840 else
4841 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4844 /* Likewise chose the less restrictive of two > or >= comparisons. */
4845 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4846 && (code2 == GT_EXPR || code2 == GE_EXPR))
4848 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4849 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4850 else
4851 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4854 /* Check for singleton ranges. */
4855 else if (cmp == 0
4856 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4857 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4858 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4860 /* Check for less/greater pairs that don't restrict the range at all. */
4861 else if (cmp >= 0
4862 && (code1 == LT_EXPR || code1 == LE_EXPR)
4863 && (code2 == GT_EXPR || code2 == GE_EXPR))
4864 return boolean_true_node;
4865 else if (cmp <= 0
4866 && (code1 == GT_EXPR || code1 == GE_EXPR)
4867 && (code2 == LT_EXPR || code2 == LE_EXPR))
4868 return boolean_true_node;
4871 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4872 NAME's definition is a truth value. See if there are any simplifications
4873 that can be done against the NAME's definition. */
4874 if (TREE_CODE (op1a) == SSA_NAME
4875 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4876 && (integer_zerop (op1b) || integer_onep (op1b)))
4878 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4879 || (code1 == NE_EXPR && integer_onep (op1b)));
4880 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4881 switch (gimple_code (stmt))
4883 case GIMPLE_ASSIGN:
4884 /* Try to simplify by copy-propagating the definition. */
4885 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4887 case GIMPLE_PHI:
4888 /* If every argument to the PHI produces the same result when
4889 ORed with the second comparison, we win.
4890 Do not do this unless the type is bool since we need a bool
4891 result here anyway. */
4892 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4894 tree result = NULL_TREE;
4895 unsigned i;
4896 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4898 tree arg = gimple_phi_arg_def (stmt, i);
4900 /* If this PHI has itself as an argument, ignore it.
4901 If all the other args produce the same result,
4902 we're still OK. */
4903 if (arg == gimple_phi_result (stmt))
4904 continue;
4905 else if (TREE_CODE (arg) == INTEGER_CST)
4907 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4909 if (!result)
4910 result = boolean_true_node;
4911 else if (!integer_onep (result))
4912 return NULL_TREE;
4914 else if (!result)
4915 result = fold_build2 (code2, boolean_type_node,
4916 op2a, op2b);
4917 else if (!same_bool_comparison_p (result,
4918 code2, op2a, op2b))
4919 return NULL_TREE;
4921 else if (TREE_CODE (arg) == SSA_NAME
4922 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4924 tree temp;
4925 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4926 /* In simple cases we can look through PHI nodes,
4927 but we have to be careful with loops.
4928 See PR49073. */
4929 if (! dom_info_available_p (CDI_DOMINATORS)
4930 || gimple_bb (def_stmt) == gimple_bb (stmt)
4931 || dominated_by_p (CDI_DOMINATORS,
4932 gimple_bb (def_stmt),
4933 gimple_bb (stmt)))
4934 return NULL_TREE;
4935 temp = or_var_with_comparison (arg, invert, code2,
4936 op2a, op2b);
4937 if (!temp)
4938 return NULL_TREE;
4939 else if (!result)
4940 result = temp;
4941 else if (!same_bool_result_p (result, temp))
4942 return NULL_TREE;
4944 else
4945 return NULL_TREE;
4947 return result;
4950 default:
4951 break;
4954 return NULL_TREE;
4957 /* Try to simplify the OR of two comparisons, specified by
4958 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4959 If this can be simplified to a single expression (without requiring
4960 introducing more SSA variables to hold intermediate values),
4961 return the resulting tree. Otherwise return NULL_TREE.
4962 If the result expression is non-null, it has boolean type. */
4964 tree
4965 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4966 enum tree_code code2, tree op2a, tree op2b)
4968 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4969 if (t)
4970 return t;
4971 else
4972 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4976 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4978 Either NULL_TREE, a simplified but non-constant or a constant
4979 is returned.
4981 ??? This should go into a gimple-fold-inline.h file to be eventually
4982 privatized with the single valueize function used in the various TUs
4983 to avoid the indirect function call overhead. */
4985 tree
4986 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4987 tree (*gvalueize) (tree))
4989 code_helper rcode;
4990 tree ops[3] = {};
4991 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4992 edges if there are intermediate VARYING defs. For this reason
4993 do not follow SSA edges here even though SCCVN can technically
4994 just deal fine with that. */
4995 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4997 tree res = NULL_TREE;
4998 if (gimple_simplified_result_is_gimple_val (rcode, ops))
4999 res = ops[0];
5000 else if (mprts_hook)
5001 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5002 if (res)
5004 if (dump_file && dump_flags & TDF_DETAILS)
5006 fprintf (dump_file, "Match-and-simplified ");
5007 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5008 fprintf (dump_file, " to ");
5009 print_generic_expr (dump_file, res, 0);
5010 fprintf (dump_file, "\n");
5012 return res;
5016 location_t loc = gimple_location (stmt);
5017 switch (gimple_code (stmt))
5019 case GIMPLE_ASSIGN:
5021 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5023 switch (get_gimple_rhs_class (subcode))
5025 case GIMPLE_SINGLE_RHS:
5027 tree rhs = gimple_assign_rhs1 (stmt);
5028 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5030 if (TREE_CODE (rhs) == SSA_NAME)
5032 /* If the RHS is an SSA_NAME, return its known constant value,
5033 if any. */
5034 return (*valueize) (rhs);
5036 /* Handle propagating invariant addresses into address
5037 operations. */
5038 else if (TREE_CODE (rhs) == ADDR_EXPR
5039 && !is_gimple_min_invariant (rhs))
5041 HOST_WIDE_INT offset = 0;
5042 tree base;
5043 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5044 &offset,
5045 valueize);
5046 if (base
5047 && (CONSTANT_CLASS_P (base)
5048 || decl_address_invariant_p (base)))
5049 return build_invariant_address (TREE_TYPE (rhs),
5050 base, offset);
5052 else if (TREE_CODE (rhs) == CONSTRUCTOR
5053 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5054 && (CONSTRUCTOR_NELTS (rhs)
5055 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5057 unsigned i;
5058 tree val, *vec;
5060 vec = XALLOCAVEC (tree,
5061 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5062 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5064 val = (*valueize) (val);
5065 if (TREE_CODE (val) == INTEGER_CST
5066 || TREE_CODE (val) == REAL_CST
5067 || TREE_CODE (val) == FIXED_CST)
5068 vec[i] = val;
5069 else
5070 return NULL_TREE;
5073 return build_vector (TREE_TYPE (rhs), vec);
5075 if (subcode == OBJ_TYPE_REF)
5077 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5078 /* If callee is constant, we can fold away the wrapper. */
5079 if (is_gimple_min_invariant (val))
5080 return val;
5083 if (kind == tcc_reference)
5085 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5086 || TREE_CODE (rhs) == REALPART_EXPR
5087 || TREE_CODE (rhs) == IMAGPART_EXPR)
5088 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5090 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5091 return fold_unary_loc (EXPR_LOCATION (rhs),
5092 TREE_CODE (rhs),
5093 TREE_TYPE (rhs), val);
5095 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5096 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5098 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5099 return fold_ternary_loc (EXPR_LOCATION (rhs),
5100 TREE_CODE (rhs),
5101 TREE_TYPE (rhs), val,
5102 TREE_OPERAND (rhs, 1),
5103 TREE_OPERAND (rhs, 2));
5105 else if (TREE_CODE (rhs) == MEM_REF
5106 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5108 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5109 if (TREE_CODE (val) == ADDR_EXPR
5110 && is_gimple_min_invariant (val))
5112 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5113 unshare_expr (val),
5114 TREE_OPERAND (rhs, 1));
5115 if (tem)
5116 rhs = tem;
5119 return fold_const_aggregate_ref_1 (rhs, valueize);
5121 else if (kind == tcc_declaration)
5122 return get_symbol_constant_value (rhs);
5123 return rhs;
5126 case GIMPLE_UNARY_RHS:
5127 return NULL_TREE;
5129 case GIMPLE_BINARY_RHS:
5130 /* Translate &x + CST into an invariant form suitable for
5131 further propagation. */
5132 if (subcode == POINTER_PLUS_EXPR)
5134 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5135 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5136 if (TREE_CODE (op0) == ADDR_EXPR
5137 && TREE_CODE (op1) == INTEGER_CST)
5139 tree off = fold_convert (ptr_type_node, op1);
5140 return build_fold_addr_expr_loc
5141 (loc,
5142 fold_build2 (MEM_REF,
5143 TREE_TYPE (TREE_TYPE (op0)),
5144 unshare_expr (op0), off));
5147 /* Canonicalize bool != 0 and bool == 0 appearing after
5148 valueization. While gimple_simplify handles this
5149 it can get confused by the ~X == 1 -> X == 0 transform
5150 which we cant reduce to a SSA name or a constant
5151 (and we have no way to tell gimple_simplify to not
5152 consider those transforms in the first place). */
5153 else if (subcode == EQ_EXPR
5154 || subcode == NE_EXPR)
5156 tree lhs = gimple_assign_lhs (stmt);
5157 tree op0 = gimple_assign_rhs1 (stmt);
5158 if (useless_type_conversion_p (TREE_TYPE (lhs),
5159 TREE_TYPE (op0)))
5161 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5162 op0 = (*valueize) (op0);
5163 if (TREE_CODE (op0) == INTEGER_CST)
5164 std::swap (op0, op1);
5165 if (TREE_CODE (op1) == INTEGER_CST
5166 && ((subcode == NE_EXPR && integer_zerop (op1))
5167 || (subcode == EQ_EXPR && integer_onep (op1))))
5168 return op0;
5171 return NULL_TREE;
5173 case GIMPLE_TERNARY_RHS:
5175 /* Handle ternary operators that can appear in GIMPLE form. */
5176 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5177 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5178 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5179 return fold_ternary_loc (loc, subcode,
5180 gimple_expr_type (stmt), op0, op1, op2);
5183 default:
5184 gcc_unreachable ();
5188 case GIMPLE_CALL:
5190 tree fn;
5191 gcall *call_stmt = as_a <gcall *> (stmt);
5193 if (gimple_call_internal_p (stmt))
5195 enum tree_code subcode = ERROR_MARK;
5196 switch (gimple_call_internal_fn (stmt))
5198 case IFN_UBSAN_CHECK_ADD:
5199 subcode = PLUS_EXPR;
5200 break;
5201 case IFN_UBSAN_CHECK_SUB:
5202 subcode = MINUS_EXPR;
5203 break;
5204 case IFN_UBSAN_CHECK_MUL:
5205 subcode = MULT_EXPR;
5206 break;
5207 default:
5208 return NULL_TREE;
5210 tree arg0 = gimple_call_arg (stmt, 0);
5211 tree arg1 = gimple_call_arg (stmt, 1);
5212 tree op0 = (*valueize) (arg0);
5213 tree op1 = (*valueize) (arg1);
5215 if (TREE_CODE (op0) != INTEGER_CST
5216 || TREE_CODE (op1) != INTEGER_CST)
5218 switch (subcode)
5220 case MULT_EXPR:
5221 /* x * 0 = 0 * x = 0 without overflow. */
5222 if (integer_zerop (op0) || integer_zerop (op1))
5223 return build_zero_cst (TREE_TYPE (arg0));
5224 break;
5225 case MINUS_EXPR:
5226 /* y - y = 0 without overflow. */
5227 if (operand_equal_p (op0, op1, 0))
5228 return build_zero_cst (TREE_TYPE (arg0));
5229 break;
5230 default:
5231 break;
5234 tree res
5235 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5236 if (res
5237 && TREE_CODE (res) == INTEGER_CST
5238 && !TREE_OVERFLOW (res))
5239 return res;
5240 return NULL_TREE;
5243 fn = (*valueize) (gimple_call_fn (stmt));
5244 if (TREE_CODE (fn) == ADDR_EXPR
5245 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5246 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5247 && gimple_builtin_call_types_compatible_p (stmt,
5248 TREE_OPERAND (fn, 0)))
5250 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5251 tree retval;
5252 unsigned i;
5253 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5254 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5255 retval = fold_builtin_call_array (loc,
5256 gimple_call_return_type (call_stmt),
5257 fn, gimple_call_num_args (stmt), args);
5258 if (retval)
5260 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5261 STRIP_NOPS (retval);
5262 retval = fold_convert (gimple_call_return_type (call_stmt),
5263 retval);
5265 return retval;
5267 return NULL_TREE;
5270 default:
5271 return NULL_TREE;
5275 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5276 Returns NULL_TREE if folding to a constant is not possible, otherwise
5277 returns a constant according to is_gimple_min_invariant. */
5279 tree
5280 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5282 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5283 if (res && is_gimple_min_invariant (res))
5284 return res;
5285 return NULL_TREE;
5289 /* The following set of functions are supposed to fold references using
5290 their constant initializers. */
5292 /* See if we can find constructor defining value of BASE.
5293 When we know the consructor with constant offset (such as
5294 base is array[40] and we do know constructor of array), then
5295 BIT_OFFSET is adjusted accordingly.
5297 As a special case, return error_mark_node when constructor
5298 is not explicitly available, but it is known to be zero
5299 such as 'static const int a;'. */
5300 static tree
5301 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5302 tree (*valueize)(tree))
5304 HOST_WIDE_INT bit_offset2, size, max_size;
5305 bool reverse;
5307 if (TREE_CODE (base) == MEM_REF)
5309 if (!integer_zerop (TREE_OPERAND (base, 1)))
5311 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5312 return NULL_TREE;
5313 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5314 * BITS_PER_UNIT);
5317 if (valueize
5318 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5319 base = valueize (TREE_OPERAND (base, 0));
5320 if (!base || TREE_CODE (base) != ADDR_EXPR)
5321 return NULL_TREE;
5322 base = TREE_OPERAND (base, 0);
5325 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5326 DECL_INITIAL. If BASE is a nested reference into another
5327 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5328 the inner reference. */
5329 switch (TREE_CODE (base))
5331 case VAR_DECL:
5332 case CONST_DECL:
5334 tree init = ctor_for_folding (base);
5336 /* Our semantic is exact opposite of ctor_for_folding;
5337 NULL means unknown, while error_mark_node is 0. */
5338 if (init == error_mark_node)
5339 return NULL_TREE;
5340 if (!init)
5341 return error_mark_node;
5342 return init;
5345 case ARRAY_REF:
5346 case COMPONENT_REF:
5347 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5348 &reverse);
5349 if (max_size == -1 || size != max_size)
5350 return NULL_TREE;
5351 *bit_offset += bit_offset2;
5352 return get_base_constructor (base, bit_offset, valueize);
5354 case STRING_CST:
5355 case CONSTRUCTOR:
5356 return base;
5358 default:
5359 return NULL_TREE;
5363 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5364 SIZE to the memory at bit OFFSET. */
5366 static tree
5367 fold_array_ctor_reference (tree type, tree ctor,
5368 unsigned HOST_WIDE_INT offset,
5369 unsigned HOST_WIDE_INT size,
5370 tree from_decl)
5372 offset_int low_bound;
5373 offset_int elt_size;
5374 offset_int access_index;
5375 tree domain_type = NULL_TREE;
5376 HOST_WIDE_INT inner_offset;
5378 /* Compute low bound and elt size. */
5379 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5380 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5381 if (domain_type && TYPE_MIN_VALUE (domain_type))
5383 /* Static constructors for variably sized objects makes no sense. */
5384 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
5385 return NULL_TREE;
5386 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5388 else
5389 low_bound = 0;
5390 /* Static constructors for variably sized objects makes no sense. */
5391 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
5392 return NULL_TREE;
5393 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5395 /* We can handle only constantly sized accesses that are known to not
5396 be larger than size of array element. */
5397 if (!TYPE_SIZE_UNIT (type)
5398 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5399 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5400 || elt_size == 0)
5401 return NULL_TREE;
5403 /* Compute the array index we look for. */
5404 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5405 elt_size);
5406 access_index += low_bound;
5408 /* And offset within the access. */
5409 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5411 /* See if the array field is large enough to span whole access. We do not
5412 care to fold accesses spanning multiple array indexes. */
5413 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5414 return NULL_TREE;
5415 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5416 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5418 /* When memory is not explicitely mentioned in constructor,
5419 it is 0 (or out of range). */
5420 return build_zero_cst (type);
5423 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5424 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5426 static tree
5427 fold_nonarray_ctor_reference (tree type, tree ctor,
5428 unsigned HOST_WIDE_INT offset,
5429 unsigned HOST_WIDE_INT size,
5430 tree from_decl)
5432 unsigned HOST_WIDE_INT cnt;
5433 tree cfield, cval;
5435 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5436 cval)
5438 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5439 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5440 tree field_size = DECL_SIZE (cfield);
5441 offset_int bitoffset;
5442 offset_int bitoffset_end, access_end;
5444 /* Variable sized objects in static constructors makes no sense,
5445 but field_size can be NULL for flexible array members. */
5446 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5447 && TREE_CODE (byte_offset) == INTEGER_CST
5448 && (field_size != NULL_TREE
5449 ? TREE_CODE (field_size) == INTEGER_CST
5450 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5452 /* Compute bit offset of the field. */
5453 bitoffset = (wi::to_offset (field_offset)
5454 + wi::lshift (wi::to_offset (byte_offset),
5455 LOG2_BITS_PER_UNIT));
5456 /* Compute bit offset where the field ends. */
5457 if (field_size != NULL_TREE)
5458 bitoffset_end = bitoffset + wi::to_offset (field_size);
5459 else
5460 bitoffset_end = 0;
5462 access_end = offset_int (offset) + size;
5464 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5465 [BITOFFSET, BITOFFSET_END)? */
5466 if (wi::cmps (access_end, bitoffset) > 0
5467 && (field_size == NULL_TREE
5468 || wi::lts_p (offset, bitoffset_end)))
5470 offset_int inner_offset = offset_int (offset) - bitoffset;
5471 /* We do have overlap. Now see if field is large enough to
5472 cover the access. Give up for accesses spanning multiple
5473 fields. */
5474 if (wi::cmps (access_end, bitoffset_end) > 0)
5475 return NULL_TREE;
5476 if (wi::lts_p (offset, bitoffset))
5477 return NULL_TREE;
5478 return fold_ctor_reference (type, cval,
5479 inner_offset.to_uhwi (), size,
5480 from_decl);
5483 /* When memory is not explicitely mentioned in constructor, it is 0. */
5484 return build_zero_cst (type);
5487 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5488 to the memory at bit OFFSET. */
5490 tree
5491 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5492 unsigned HOST_WIDE_INT size, tree from_decl)
5494 tree ret;
5496 /* We found the field with exact match. */
5497 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5498 && !offset)
5499 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5501 /* We are at the end of walk, see if we can view convert the
5502 result. */
5503 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5504 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5505 && !compare_tree_int (TYPE_SIZE (type), size)
5506 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5508 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5509 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5510 if (ret)
5511 STRIP_USELESS_TYPE_CONVERSION (ret);
5512 return ret;
5514 /* For constants and byte-aligned/sized reads try to go through
5515 native_encode/interpret. */
5516 if (CONSTANT_CLASS_P (ctor)
5517 && BITS_PER_UNIT == 8
5518 && offset % BITS_PER_UNIT == 0
5519 && size % BITS_PER_UNIT == 0
5520 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5522 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5523 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5524 offset / BITS_PER_UNIT);
5525 if (len > 0)
5526 return native_interpret_expr (type, buf, len);
5528 if (TREE_CODE (ctor) == CONSTRUCTOR)
5531 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5532 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5533 return fold_array_ctor_reference (type, ctor, offset, size,
5534 from_decl);
5535 else
5536 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5537 from_decl);
5540 return NULL_TREE;
5543 /* Return the tree representing the element referenced by T if T is an
5544 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5545 names using VALUEIZE. Return NULL_TREE otherwise. */
5547 tree
5548 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5550 tree ctor, idx, base;
5551 HOST_WIDE_INT offset, size, max_size;
5552 tree tem;
5553 bool reverse;
5555 if (TREE_THIS_VOLATILE (t))
5556 return NULL_TREE;
5558 if (DECL_P (t))
5559 return get_symbol_constant_value (t);
5561 tem = fold_read_from_constant_string (t);
5562 if (tem)
5563 return tem;
5565 switch (TREE_CODE (t))
5567 case ARRAY_REF:
5568 case ARRAY_RANGE_REF:
5569 /* Constant indexes are handled well by get_base_constructor.
5570 Only special case variable offsets.
5571 FIXME: This code can't handle nested references with variable indexes
5572 (they will be handled only by iteration of ccp). Perhaps we can bring
5573 get_ref_base_and_extent here and make it use a valueize callback. */
5574 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5575 && valueize
5576 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5577 && TREE_CODE (idx) == INTEGER_CST)
5579 tree low_bound, unit_size;
5581 /* If the resulting bit-offset is constant, track it. */
5582 if ((low_bound = array_ref_low_bound (t),
5583 TREE_CODE (low_bound) == INTEGER_CST)
5584 && (unit_size = array_ref_element_size (t),
5585 tree_fits_uhwi_p (unit_size)))
5587 offset_int woffset
5588 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5589 TYPE_PRECISION (TREE_TYPE (idx)));
5591 if (wi::fits_shwi_p (woffset))
5593 offset = woffset.to_shwi ();
5594 /* TODO: This code seems wrong, multiply then check
5595 to see if it fits. */
5596 offset *= tree_to_uhwi (unit_size);
5597 offset *= BITS_PER_UNIT;
5599 base = TREE_OPERAND (t, 0);
5600 ctor = get_base_constructor (base, &offset, valueize);
5601 /* Empty constructor. Always fold to 0. */
5602 if (ctor == error_mark_node)
5603 return build_zero_cst (TREE_TYPE (t));
5604 /* Out of bound array access. Value is undefined,
5605 but don't fold. */
5606 if (offset < 0)
5607 return NULL_TREE;
5608 /* We can not determine ctor. */
5609 if (!ctor)
5610 return NULL_TREE;
5611 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5612 tree_to_uhwi (unit_size)
5613 * BITS_PER_UNIT,
5614 base);
5618 /* Fallthru. */
5620 case COMPONENT_REF:
5621 case BIT_FIELD_REF:
5622 case TARGET_MEM_REF:
5623 case MEM_REF:
5624 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5625 ctor = get_base_constructor (base, &offset, valueize);
5627 /* Empty constructor. Always fold to 0. */
5628 if (ctor == error_mark_node)
5629 return build_zero_cst (TREE_TYPE (t));
5630 /* We do not know precise address. */
5631 if (max_size == -1 || max_size != size)
5632 return NULL_TREE;
5633 /* We can not determine ctor. */
5634 if (!ctor)
5635 return NULL_TREE;
5637 /* Out of bound array access. Value is undefined, but don't fold. */
5638 if (offset < 0)
5639 return NULL_TREE;
5641 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5642 base);
5644 case REALPART_EXPR:
5645 case IMAGPART_EXPR:
5647 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5648 if (c && TREE_CODE (c) == COMPLEX_CST)
5649 return fold_build1_loc (EXPR_LOCATION (t),
5650 TREE_CODE (t), TREE_TYPE (t), c);
5651 break;
5654 default:
5655 break;
5658 return NULL_TREE;
5661 tree
5662 fold_const_aggregate_ref (tree t)
5664 return fold_const_aggregate_ref_1 (t, NULL);
5667 /* Lookup virtual method with index TOKEN in a virtual table V
5668 at OFFSET.
5669 Set CAN_REFER if non-NULL to false if method
5670 is not referable or if the virtual table is ill-formed (such as rewriten
5671 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5673 tree
5674 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5675 tree v,
5676 unsigned HOST_WIDE_INT offset,
5677 bool *can_refer)
5679 tree vtable = v, init, fn;
5680 unsigned HOST_WIDE_INT size;
5681 unsigned HOST_WIDE_INT elt_size, access_index;
5682 tree domain_type;
5684 if (can_refer)
5685 *can_refer = true;
5687 /* First of all double check we have virtual table. */
5688 if (TREE_CODE (v) != VAR_DECL
5689 || !DECL_VIRTUAL_P (v))
5691 /* Pass down that we lost track of the target. */
5692 if (can_refer)
5693 *can_refer = false;
5694 return NULL_TREE;
5697 init = ctor_for_folding (v);
5699 /* The virtual tables should always be born with constructors
5700 and we always should assume that they are avaialble for
5701 folding. At the moment we do not stream them in all cases,
5702 but it should never happen that ctor seem unreachable. */
5703 gcc_assert (init);
5704 if (init == error_mark_node)
5706 gcc_assert (in_lto_p);
5707 /* Pass down that we lost track of the target. */
5708 if (can_refer)
5709 *can_refer = false;
5710 return NULL_TREE;
5712 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5713 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5714 offset *= BITS_PER_UNIT;
5715 offset += token * size;
5717 /* Lookup the value in the constructor that is assumed to be array.
5718 This is equivalent to
5719 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5720 offset, size, NULL);
5721 but in a constant time. We expect that frontend produced a simple
5722 array without indexed initializers. */
5724 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5725 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5726 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5727 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5729 access_index = offset / BITS_PER_UNIT / elt_size;
5730 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5732 /* This code makes an assumption that there are no
5733 indexed fileds produced by C++ FE, so we can directly index the array. */
5734 if (access_index < CONSTRUCTOR_NELTS (init))
5736 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5737 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5738 STRIP_NOPS (fn);
5740 else
5741 fn = NULL;
5743 /* For type inconsistent program we may end up looking up virtual method
5744 in virtual table that does not contain TOKEN entries. We may overrun
5745 the virtual table and pick up a constant or RTTI info pointer.
5746 In any case the call is undefined. */
5747 if (!fn
5748 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5749 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5750 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5751 else
5753 fn = TREE_OPERAND (fn, 0);
5755 /* When cgraph node is missing and function is not public, we cannot
5756 devirtualize. This can happen in WHOPR when the actual method
5757 ends up in other partition, because we found devirtualization
5758 possibility too late. */
5759 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5761 if (can_refer)
5763 *can_refer = false;
5764 return fn;
5766 return NULL_TREE;
5770 /* Make sure we create a cgraph node for functions we'll reference.
5771 They can be non-existent if the reference comes from an entry
5772 of an external vtable for example. */
5773 cgraph_node::get_create (fn);
5775 return fn;
5778 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5779 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5780 KNOWN_BINFO carries the binfo describing the true type of
5781 OBJ_TYPE_REF_OBJECT(REF).
5782 Set CAN_REFER if non-NULL to false if method
5783 is not referable or if the virtual table is ill-formed (such as rewriten
5784 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5786 tree
5787 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5788 bool *can_refer)
5790 unsigned HOST_WIDE_INT offset;
5791 tree v;
5793 v = BINFO_VTABLE (known_binfo);
5794 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5795 if (!v)
5796 return NULL_TREE;
5798 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5800 if (can_refer)
5801 *can_refer = false;
5802 return NULL_TREE;
5804 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5807 /* Given a pointer value OP0, return a simplified version of an
5808 indirection through OP0, or NULL_TREE if no simplification is
5809 possible. Note that the resulting type may be different from
5810 the type pointed to in the sense that it is still compatible
5811 from the langhooks point of view. */
5813 tree
5814 gimple_fold_indirect_ref (tree t)
5816 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5817 tree sub = t;
5818 tree subtype;
5820 STRIP_NOPS (sub);
5821 subtype = TREE_TYPE (sub);
5822 if (!POINTER_TYPE_P (subtype))
5823 return NULL_TREE;
5825 if (TREE_CODE (sub) == ADDR_EXPR)
5827 tree op = TREE_OPERAND (sub, 0);
5828 tree optype = TREE_TYPE (op);
5829 /* *&p => p */
5830 if (useless_type_conversion_p (type, optype))
5831 return op;
5833 /* *(foo *)&fooarray => fooarray[0] */
5834 if (TREE_CODE (optype) == ARRAY_TYPE
5835 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5836 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5838 tree type_domain = TYPE_DOMAIN (optype);
5839 tree min_val = size_zero_node;
5840 if (type_domain && TYPE_MIN_VALUE (type_domain))
5841 min_val = TYPE_MIN_VALUE (type_domain);
5842 if (TREE_CODE (min_val) == INTEGER_CST)
5843 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5845 /* *(foo *)&complexfoo => __real__ complexfoo */
5846 else if (TREE_CODE (optype) == COMPLEX_TYPE
5847 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5848 return fold_build1 (REALPART_EXPR, type, op);
5849 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5850 else if (TREE_CODE (optype) == VECTOR_TYPE
5851 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5853 tree part_width = TYPE_SIZE (type);
5854 tree index = bitsize_int (0);
5855 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5859 /* *(p + CST) -> ... */
5860 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5861 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5863 tree addr = TREE_OPERAND (sub, 0);
5864 tree off = TREE_OPERAND (sub, 1);
5865 tree addrtype;
5867 STRIP_NOPS (addr);
5868 addrtype = TREE_TYPE (addr);
5870 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5871 if (TREE_CODE (addr) == ADDR_EXPR
5872 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5873 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5874 && tree_fits_uhwi_p (off))
5876 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5877 tree part_width = TYPE_SIZE (type);
5878 unsigned HOST_WIDE_INT part_widthi
5879 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5880 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5881 tree index = bitsize_int (indexi);
5882 if (offset / part_widthi
5883 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5884 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5885 part_width, index);
5888 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5889 if (TREE_CODE (addr) == ADDR_EXPR
5890 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5891 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5893 tree size = TYPE_SIZE_UNIT (type);
5894 if (tree_int_cst_equal (size, off))
5895 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5898 /* *(p + CST) -> MEM_REF <p, CST>. */
5899 if (TREE_CODE (addr) != ADDR_EXPR
5900 || DECL_P (TREE_OPERAND (addr, 0)))
5901 return fold_build2 (MEM_REF, type,
5902 addr,
5903 wide_int_to_tree (ptype, off));
5906 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5907 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5908 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5909 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5911 tree type_domain;
5912 tree min_val = size_zero_node;
5913 tree osub = sub;
5914 sub = gimple_fold_indirect_ref (sub);
5915 if (! sub)
5916 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5917 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5918 if (type_domain && TYPE_MIN_VALUE (type_domain))
5919 min_val = TYPE_MIN_VALUE (type_domain);
5920 if (TREE_CODE (min_val) == INTEGER_CST)
5921 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5924 return NULL_TREE;
5927 /* Return true if CODE is an operation that when operating on signed
5928 integer types involves undefined behavior on overflow and the
5929 operation can be expressed with unsigned arithmetic. */
5931 bool
5932 arith_code_with_undefined_signed_overflow (tree_code code)
5934 switch (code)
5936 case PLUS_EXPR:
5937 case MINUS_EXPR:
5938 case MULT_EXPR:
5939 case NEGATE_EXPR:
5940 case POINTER_PLUS_EXPR:
5941 return true;
5942 default:
5943 return false;
5947 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5948 operation that can be transformed to unsigned arithmetic by converting
5949 its operand, carrying out the operation in the corresponding unsigned
5950 type and converting the result back to the original type.
5952 Returns a sequence of statements that replace STMT and also contain
5953 a modified form of STMT itself. */
5955 gimple_seq
5956 rewrite_to_defined_overflow (gimple *stmt)
5958 if (dump_file && (dump_flags & TDF_DETAILS))
5960 fprintf (dump_file, "rewriting stmt with undefined signed "
5961 "overflow ");
5962 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5965 tree lhs = gimple_assign_lhs (stmt);
5966 tree type = unsigned_type_for (TREE_TYPE (lhs));
5967 gimple_seq stmts = NULL;
5968 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5970 tree op = gimple_op (stmt, i);
5971 op = gimple_convert (&stmts, type, op);
5972 gimple_set_op (stmt, i, op);
5974 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5975 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5976 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5977 gimple_seq_add_stmt (&stmts, stmt);
5978 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5979 gimple_seq_add_stmt (&stmts, cvt);
5981 return stmts;
5985 /* The valueization hook we use for the gimple_build API simplification.
5986 This makes us match fold_buildN behavior by only combining with
5987 statements in the sequence(s) we are currently building. */
5989 static tree
5990 gimple_build_valueize (tree op)
5992 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5993 return op;
5994 return NULL_TREE;
5997 /* Build the expression CODE OP0 of type TYPE with location LOC,
5998 simplifying it first if possible. Returns the built
5999 expression value and appends statements possibly defining it
6000 to SEQ. */
6002 tree
6003 gimple_build (gimple_seq *seq, location_t loc,
6004 enum tree_code code, tree type, tree op0)
6006 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6007 if (!res)
6009 if (gimple_in_ssa_p (cfun))
6010 res = make_ssa_name (type);
6011 else
6012 res = create_tmp_reg (type);
6013 gimple *stmt;
6014 if (code == REALPART_EXPR
6015 || code == IMAGPART_EXPR
6016 || code == VIEW_CONVERT_EXPR)
6017 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6018 else
6019 stmt = gimple_build_assign (res, code, op0);
6020 gimple_set_location (stmt, loc);
6021 gimple_seq_add_stmt_without_update (seq, stmt);
6023 return res;
6026 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6027 simplifying it first if possible. Returns the built
6028 expression value and appends statements possibly defining it
6029 to SEQ. */
6031 tree
6032 gimple_build (gimple_seq *seq, location_t loc,
6033 enum tree_code code, tree type, tree op0, tree op1)
6035 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6036 if (!res)
6038 if (gimple_in_ssa_p (cfun))
6039 res = make_ssa_name (type);
6040 else
6041 res = create_tmp_reg (type);
6042 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6043 gimple_set_location (stmt, loc);
6044 gimple_seq_add_stmt_without_update (seq, stmt);
6046 return res;
6049 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6050 simplifying it first if possible. Returns the built
6051 expression value and appends statements possibly defining it
6052 to SEQ. */
6054 tree
6055 gimple_build (gimple_seq *seq, location_t loc,
6056 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6058 tree res = gimple_simplify (code, type, op0, op1, op2,
6059 seq, gimple_build_valueize);
6060 if (!res)
6062 if (gimple_in_ssa_p (cfun))
6063 res = make_ssa_name (type);
6064 else
6065 res = create_tmp_reg (type);
6066 gimple *stmt;
6067 if (code == BIT_FIELD_REF)
6068 stmt = gimple_build_assign (res, code,
6069 build3 (code, type, op0, op1, op2));
6070 else
6071 stmt = gimple_build_assign (res, code, op0, op1, op2);
6072 gimple_set_location (stmt, loc);
6073 gimple_seq_add_stmt_without_update (seq, stmt);
6075 return res;
6078 /* Build the call FN (ARG0) with a result of type TYPE
6079 (or no result if TYPE is void) with location LOC,
6080 simplifying it first if possible. Returns the built
6081 expression value (or NULL_TREE if TYPE is void) and appends
6082 statements possibly defining it to SEQ. */
6084 tree
6085 gimple_build (gimple_seq *seq, location_t loc,
6086 enum built_in_function fn, tree type, tree arg0)
6088 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6089 if (!res)
6091 tree decl = builtin_decl_implicit (fn);
6092 gimple *stmt = gimple_build_call (decl, 1, arg0);
6093 if (!VOID_TYPE_P (type))
6095 if (gimple_in_ssa_p (cfun))
6096 res = make_ssa_name (type);
6097 else
6098 res = create_tmp_reg (type);
6099 gimple_call_set_lhs (stmt, res);
6101 gimple_set_location (stmt, loc);
6102 gimple_seq_add_stmt_without_update (seq, stmt);
6104 return res;
6107 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6108 (or no result if TYPE is void) with location LOC,
6109 simplifying it first if possible. Returns the built
6110 expression value (or NULL_TREE if TYPE is void) and appends
6111 statements possibly defining it to SEQ. */
6113 tree
6114 gimple_build (gimple_seq *seq, location_t loc,
6115 enum built_in_function fn, tree type, tree arg0, tree arg1)
6117 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6118 if (!res)
6120 tree decl = builtin_decl_implicit (fn);
6121 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6122 if (!VOID_TYPE_P (type))
6124 if (gimple_in_ssa_p (cfun))
6125 res = make_ssa_name (type);
6126 else
6127 res = create_tmp_reg (type);
6128 gimple_call_set_lhs (stmt, res);
6130 gimple_set_location (stmt, loc);
6131 gimple_seq_add_stmt_without_update (seq, stmt);
6133 return res;
6136 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6137 (or no result if TYPE is void) with location LOC,
6138 simplifying it first if possible. Returns the built
6139 expression value (or NULL_TREE if TYPE is void) and appends
6140 statements possibly defining it to SEQ. */
6142 tree
6143 gimple_build (gimple_seq *seq, location_t loc,
6144 enum built_in_function fn, tree type,
6145 tree arg0, tree arg1, tree arg2)
6147 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6148 seq, gimple_build_valueize);
6149 if (!res)
6151 tree decl = builtin_decl_implicit (fn);
6152 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6153 if (!VOID_TYPE_P (type))
6155 if (gimple_in_ssa_p (cfun))
6156 res = make_ssa_name (type);
6157 else
6158 res = create_tmp_reg (type);
6159 gimple_call_set_lhs (stmt, res);
6161 gimple_set_location (stmt, loc);
6162 gimple_seq_add_stmt_without_update (seq, stmt);
6164 return res;
6167 /* Build the conversion (TYPE) OP with a result of type TYPE
6168 with location LOC if such conversion is neccesary in GIMPLE,
6169 simplifying it first.
6170 Returns the built expression value and appends
6171 statements possibly defining it to SEQ. */
6173 tree
6174 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6176 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6177 return op;
6178 return gimple_build (seq, loc, NOP_EXPR, type, op);
6181 /* Build the conversion (ptrofftype) OP with a result of a type
6182 compatible with ptrofftype with location LOC if such conversion
6183 is neccesary in GIMPLE, simplifying it first.
6184 Returns the built expression value and appends
6185 statements possibly defining it to SEQ. */
6187 tree
6188 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6190 if (ptrofftype_p (TREE_TYPE (op)))
6191 return op;
6192 return gimple_convert (seq, loc, sizetype, op);
6195 /* Return true if the result of assignment STMT is known to be non-negative.
6196 If the return value is based on the assumption that signed overflow is
6197 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6198 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6200 static bool
6201 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6202 int depth)
6204 enum tree_code code = gimple_assign_rhs_code (stmt);
6205 switch (get_gimple_rhs_class (code))
6207 case GIMPLE_UNARY_RHS:
6208 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6209 gimple_expr_type (stmt),
6210 gimple_assign_rhs1 (stmt),
6211 strict_overflow_p, depth);
6212 case GIMPLE_BINARY_RHS:
6213 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6214 gimple_expr_type (stmt),
6215 gimple_assign_rhs1 (stmt),
6216 gimple_assign_rhs2 (stmt),
6217 strict_overflow_p, depth);
6218 case GIMPLE_TERNARY_RHS:
6219 return false;
6220 case GIMPLE_SINGLE_RHS:
6221 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6222 strict_overflow_p, depth);
6223 case GIMPLE_INVALID_RHS:
6224 break;
6226 gcc_unreachable ();
6229 /* Return true if return value of call STMT is known to be non-negative.
6230 If the return value is based on the assumption that signed overflow is
6231 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6232 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6234 static bool
6235 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6236 int depth)
6238 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6239 gimple_call_arg (stmt, 0) : NULL_TREE;
6240 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6241 gimple_call_arg (stmt, 1) : NULL_TREE;
6243 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6244 gimple_call_combined_fn (stmt),
6245 arg0,
6246 arg1,
6247 strict_overflow_p, depth);
6250 /* Return true if return value of call STMT is known to be non-negative.
6251 If the return value is based on the assumption that signed overflow is
6252 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6253 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6255 static bool
6256 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6257 int depth)
6259 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6261 tree arg = gimple_phi_arg_def (stmt, i);
6262 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6263 return false;
6265 return true;
6268 /* Return true if STMT is known to compute a non-negative value.
6269 If the return value is based on the assumption that signed overflow is
6270 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6271 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6273 bool
6274 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6275 int depth)
6277 switch (gimple_code (stmt))
6279 case GIMPLE_ASSIGN:
6280 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6281 depth);
6282 case GIMPLE_CALL:
6283 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6284 depth);
6285 case GIMPLE_PHI:
6286 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6287 depth);
6288 default:
6289 return false;
6293 /* Return true if the floating-point value computed by assignment STMT
6294 is known to have an integer value. We also allow +Inf, -Inf and NaN
6295 to be considered integer values. Return false for signaling NaN.
6297 DEPTH is the current nesting depth of the query. */
6299 static bool
6300 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6302 enum tree_code code = gimple_assign_rhs_code (stmt);
6303 switch (get_gimple_rhs_class (code))
6305 case GIMPLE_UNARY_RHS:
6306 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6307 gimple_assign_rhs1 (stmt), depth);
6308 case GIMPLE_BINARY_RHS:
6309 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6310 gimple_assign_rhs1 (stmt),
6311 gimple_assign_rhs2 (stmt), depth);
6312 case GIMPLE_TERNARY_RHS:
6313 return false;
6314 case GIMPLE_SINGLE_RHS:
6315 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6316 case GIMPLE_INVALID_RHS:
6317 break;
6319 gcc_unreachable ();
6322 /* Return true if the floating-point value computed by call STMT is known
6323 to have an integer value. We also allow +Inf, -Inf and NaN to be
6324 considered integer values. Return false for signaling NaN.
6326 DEPTH is the current nesting depth of the query. */
6328 static bool
6329 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6331 tree arg0 = (gimple_call_num_args (stmt) > 0
6332 ? gimple_call_arg (stmt, 0)
6333 : NULL_TREE);
6334 tree arg1 = (gimple_call_num_args (stmt) > 1
6335 ? gimple_call_arg (stmt, 1)
6336 : NULL_TREE);
6337 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6338 arg0, arg1, depth);
6341 /* Return true if the floating-point result of phi STMT is known to have
6342 an integer value. We also allow +Inf, -Inf and NaN to be considered
6343 integer values. Return false for signaling NaN.
6345 DEPTH is the current nesting depth of the query. */
6347 static bool
6348 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6350 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6352 tree arg = gimple_phi_arg_def (stmt, i);
6353 if (!integer_valued_real_single_p (arg, depth + 1))
6354 return false;
6356 return true;
6359 /* Return true if the floating-point value computed by STMT is known
6360 to have an integer value. We also allow +Inf, -Inf and NaN to be
6361 considered integer values. Return false for signaling NaN.
6363 DEPTH is the current nesting depth of the query. */
6365 bool
6366 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6368 switch (gimple_code (stmt))
6370 case GIMPLE_ASSIGN:
6371 return gimple_assign_integer_valued_real_p (stmt, depth);
6372 case GIMPLE_CALL:
6373 return gimple_call_integer_valued_real_p (stmt, depth);
6374 case GIMPLE_PHI:
6375 return gimple_phi_integer_valued_real_p (stmt, depth);
6376 default:
6377 return false;