Remove assert in get_def_bb_for_const
[official-gcc.git] / gcc / gimple-fold.c
blob600aa72f217185f17230d4bf774f2b66e7e5aa85
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
82 static bool
83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
85 varpool_node *vnode;
86 struct cgraph_node *node;
87 symtab_node *snode;
89 if (DECL_ABSTRACT_P (decl))
90 return false;
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
95 return true;
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab->function_flags_ready)
103 return true;
104 snode = symtab_node::get (decl);
105 if (!snode || !snode->definition)
106 return false;
107 node = dyn_cast <cgraph_node *> (snode);
108 return !node || !node->global.inlined_to;
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
114 if (!from_decl
115 || TREE_CODE (from_decl) != VAR_DECL
116 || (!DECL_EXTERNAL (from_decl)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->definition)
119 || (flag_ltrans
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->in_other_partition))
122 return true;
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 return false;
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
147 if (!symtab->function_flags_ready)
148 return true;
150 snode = symtab_node::get (decl);
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
160 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
161 acceptable form for is_gimple_min_invariant.
162 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
164 tree
165 canonicalize_constructor_val (tree cval, tree from_decl)
167 tree orig_cval = cval;
168 STRIP_NOPS (cval);
169 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
170 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
172 tree ptr = TREE_OPERAND (cval, 0);
173 if (is_gimple_min_invariant (ptr))
174 cval = build1_loc (EXPR_LOCATION (cval),
175 ADDR_EXPR, TREE_TYPE (ptr),
176 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
177 ptr,
178 fold_convert (ptr_type_node,
179 TREE_OPERAND (cval, 1))));
181 if (TREE_CODE (cval) == ADDR_EXPR)
183 tree base = NULL_TREE;
184 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
186 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
187 if (base)
188 TREE_OPERAND (cval, 0) = base;
190 else
191 base = get_base_address (TREE_OPERAND (cval, 0));
192 if (!base)
193 return NULL_TREE;
195 if ((TREE_CODE (base) == VAR_DECL
196 || TREE_CODE (base) == FUNCTION_DECL)
197 && !can_refer_decl_in_current_unit_p (base, from_decl))
198 return NULL_TREE;
199 if (TREE_TYPE (base) == error_mark_node)
200 return NULL_TREE;
201 if (TREE_CODE (base) == VAR_DECL)
202 TREE_ADDRESSABLE (base) = 1;
203 else if (TREE_CODE (base) == FUNCTION_DECL)
205 /* Make sure we create a cgraph node for functions we'll reference.
206 They can be non-existent if the reference comes from an entry
207 of an external vtable for example. */
208 cgraph_node::get_create (base);
210 /* Fixup types in global initializers. */
211 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
212 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
214 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
215 cval = fold_convert (TREE_TYPE (orig_cval), cval);
216 return cval;
218 if (TREE_OVERFLOW_P (cval))
219 return drop_tree_overflow (cval);
220 return orig_cval;
223 /* If SYM is a constant variable with known value, return the value.
224 NULL_TREE is returned otherwise. */
226 tree
227 get_symbol_constant_value (tree sym)
229 tree val = ctor_for_folding (sym);
230 if (val != error_mark_node)
232 if (val)
234 val = canonicalize_constructor_val (unshare_expr (val), sym);
235 if (val && is_gimple_min_invariant (val))
236 return val;
237 else
238 return NULL_TREE;
240 /* Variables declared 'const' without an initializer
241 have zero as the initializer if they may not be
242 overridden at link or run time. */
243 if (!val
244 && is_gimple_reg_type (TREE_TYPE (sym)))
245 return build_zero_cst (TREE_TYPE (sym));
248 return NULL_TREE;
253 /* Subroutine of fold_stmt. We perform several simplifications of the
254 memory reference tree EXPR and make sure to re-gimplify them properly
255 after propagation of constant addresses. IS_LHS is true if the
256 reference is supposed to be an lvalue. */
258 static tree
259 maybe_fold_reference (tree expr, bool is_lhs)
261 tree result;
263 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
264 || TREE_CODE (expr) == REALPART_EXPR
265 || TREE_CODE (expr) == IMAGPART_EXPR)
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
267 return fold_unary_loc (EXPR_LOCATION (expr),
268 TREE_CODE (expr),
269 TREE_TYPE (expr),
270 TREE_OPERAND (expr, 0));
271 else if (TREE_CODE (expr) == BIT_FIELD_REF
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_ternary_loc (EXPR_LOCATION (expr),
274 TREE_CODE (expr),
275 TREE_TYPE (expr),
276 TREE_OPERAND (expr, 0),
277 TREE_OPERAND (expr, 1),
278 TREE_OPERAND (expr, 2));
280 if (!is_lhs
281 && (result = fold_const_aggregate_ref (expr))
282 && is_gimple_min_invariant (result))
283 return result;
285 return NULL_TREE;
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
292 folded. */
294 static tree
295 fold_gimple_assign (gimple_stmt_iterator *si)
297 gimple *stmt = gsi_stmt (*si);
298 enum tree_code subcode = gimple_assign_rhs_code (stmt);
299 location_t loc = gimple_location (stmt);
301 tree result = NULL_TREE;
303 switch (get_gimple_rhs_class (subcode))
305 case GIMPLE_SINGLE_RHS:
307 tree rhs = gimple_assign_rhs1 (stmt);
309 if (TREE_CLOBBER_P (rhs))
310 return NULL_TREE;
312 if (REFERENCE_CLASS_P (rhs))
313 return maybe_fold_reference (rhs, false);
315 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
317 tree val = OBJ_TYPE_REF_EXPR (rhs);
318 if (is_gimple_min_invariant (val))
319 return val;
320 else if (flag_devirtualize && virtual_method_call_p (rhs))
322 bool final;
323 vec <cgraph_node *>targets
324 = possible_polymorphic_call_targets (rhs, stmt, &final);
325 if (final && targets.length () <= 1 && dbg_cnt (devirt))
327 if (dump_enabled_p ())
329 location_t loc = gimple_location_safe (stmt);
330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
331 "resolving virtual function address "
332 "reference to function %s\n",
333 targets.length () == 1
334 ? targets[0]->name ()
335 : "NULL");
337 if (targets.length () == 1)
339 val = fold_convert (TREE_TYPE (val),
340 build_fold_addr_expr_loc
341 (loc, targets[0]->decl));
342 STRIP_USELESS_TYPE_CONVERSION (val);
344 else
345 /* We can not use __builtin_unreachable here because it
346 can not have address taken. */
347 val = build_int_cst (TREE_TYPE (val), 0);
348 return val;
353 else if (TREE_CODE (rhs) == ADDR_EXPR)
355 tree ref = TREE_OPERAND (rhs, 0);
356 tree tem = maybe_fold_reference (ref, true);
357 if (tem
358 && TREE_CODE (tem) == MEM_REF
359 && integer_zerop (TREE_OPERAND (tem, 1)))
360 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
361 else if (tem)
362 result = fold_convert (TREE_TYPE (rhs),
363 build_fold_addr_expr_loc (loc, tem));
364 else if (TREE_CODE (ref) == MEM_REF
365 && integer_zerop (TREE_OPERAND (ref, 1)))
366 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
368 if (result)
370 /* Strip away useless type conversions. Both the
371 NON_LVALUE_EXPR that may have been added by fold, and
372 "useless" type conversions that might now be apparent
373 due to propagation. */
374 STRIP_USELESS_TYPE_CONVERSION (result);
376 if (result != rhs && valid_gimple_rhs_p (result))
377 return result;
381 else if (TREE_CODE (rhs) == CONSTRUCTOR
382 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
384 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
385 unsigned i;
386 tree val;
388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
389 if (! CONSTANT_CLASS_P (val))
390 return NULL_TREE;
392 return build_vector_from_ctor (TREE_TYPE (rhs),
393 CONSTRUCTOR_ELTS (rhs));
396 else if (DECL_P (rhs))
397 return get_symbol_constant_value (rhs);
399 break;
401 case GIMPLE_UNARY_RHS:
402 break;
404 case GIMPLE_BINARY_RHS:
405 break;
407 case GIMPLE_TERNARY_RHS:
408 result = fold_ternary_loc (loc, subcode,
409 TREE_TYPE (gimple_assign_lhs (stmt)),
410 gimple_assign_rhs1 (stmt),
411 gimple_assign_rhs2 (stmt),
412 gimple_assign_rhs3 (stmt));
414 if (result)
416 STRIP_USELESS_TYPE_CONVERSION (result);
417 if (valid_gimple_rhs_p (result))
418 return result;
420 break;
422 case GIMPLE_INVALID_RHS:
423 gcc_unreachable ();
426 return NULL_TREE;
430 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
431 adjusting the replacement stmts location and virtual operands.
432 If the statement has a lhs the last stmt in the sequence is expected
433 to assign to that lhs. */
435 static void
436 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
438 gimple *stmt = gsi_stmt (*si_p);
440 if (gimple_has_location (stmt))
441 annotate_all_with_location (stmts, gimple_location (stmt));
443 /* First iterate over the replacement statements backward, assigning
444 virtual operands to their defining statements. */
445 gimple *laststore = NULL;
446 for (gimple_stmt_iterator i = gsi_last (stmts);
447 !gsi_end_p (i); gsi_prev (&i))
449 gimple *new_stmt = gsi_stmt (i);
450 if ((gimple_assign_single_p (new_stmt)
451 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
452 || (is_gimple_call (new_stmt)
453 && (gimple_call_flags (new_stmt)
454 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
456 tree vdef;
457 if (!laststore)
458 vdef = gimple_vdef (stmt);
459 else
460 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
461 gimple_set_vdef (new_stmt, vdef);
462 if (vdef && TREE_CODE (vdef) == SSA_NAME)
463 SSA_NAME_DEF_STMT (vdef) = new_stmt;
464 laststore = new_stmt;
468 /* Second iterate over the statements forward, assigning virtual
469 operands to their uses. */
470 tree reaching_vuse = gimple_vuse (stmt);
471 for (gimple_stmt_iterator i = gsi_start (stmts);
472 !gsi_end_p (i); gsi_next (&i))
474 gimple *new_stmt = gsi_stmt (i);
475 /* If the new statement possibly has a VUSE, update it with exact SSA
476 name we know will reach this one. */
477 if (gimple_has_mem_ops (new_stmt))
478 gimple_set_vuse (new_stmt, reaching_vuse);
479 gimple_set_modified (new_stmt, true);
480 if (gimple_vdef (new_stmt))
481 reaching_vuse = gimple_vdef (new_stmt);
484 /* If the new sequence does not do a store release the virtual
485 definition of the original statement. */
486 if (reaching_vuse
487 && reaching_vuse == gimple_vuse (stmt))
489 tree vdef = gimple_vdef (stmt);
490 if (vdef
491 && TREE_CODE (vdef) == SSA_NAME)
493 unlink_stmt_vdef (stmt);
494 release_ssa_name (vdef);
498 /* Finally replace the original statement with the sequence. */
499 gsi_replace_with_seq (si_p, stmts, false);
502 /* Convert EXPR into a GIMPLE value suitable for substitution on the
503 RHS of an assignment. Insert the necessary statements before
504 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
505 is replaced. If the call is expected to produces a result, then it
506 is replaced by an assignment of the new RHS to the result variable.
507 If the result is to be ignored, then the call is replaced by a
508 GIMPLE_NOP. A proper VDEF chain is retained by making the first
509 VUSE and the last VDEF of the whole sequence be the same as the replaced
510 statement and using new SSA names for stores in between. */
512 void
513 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
515 tree lhs;
516 gimple *stmt, *new_stmt;
517 gimple_stmt_iterator i;
518 gimple_seq stmts = NULL;
520 stmt = gsi_stmt (*si_p);
522 gcc_assert (is_gimple_call (stmt));
524 push_gimplify_context (gimple_in_ssa_p (cfun));
526 lhs = gimple_call_lhs (stmt);
527 if (lhs == NULL_TREE)
529 gimplify_and_add (expr, &stmts);
530 /* We can end up with folding a memcpy of an empty class assignment
531 which gets optimized away by C++ gimplification. */
532 if (gimple_seq_empty_p (stmts))
534 pop_gimplify_context (NULL);
535 if (gimple_in_ssa_p (cfun))
537 unlink_stmt_vdef (stmt);
538 release_defs (stmt);
540 gsi_replace (si_p, gimple_build_nop (), false);
541 return;
544 else
546 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
547 new_stmt = gimple_build_assign (lhs, tmp);
548 i = gsi_last (stmts);
549 gsi_insert_after_without_update (&i, new_stmt,
550 GSI_CONTINUE_LINKING);
553 pop_gimplify_context (NULL);
555 gsi_replace_with_seq_vops (si_p, stmts);
559 /* Replace the call at *GSI with the gimple value VAL. */
561 static void
562 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
564 gimple *stmt = gsi_stmt (*gsi);
565 tree lhs = gimple_call_lhs (stmt);
566 gimple *repl;
567 if (lhs)
569 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
570 val = fold_convert (TREE_TYPE (lhs), val);
571 repl = gimple_build_assign (lhs, val);
573 else
574 repl = gimple_build_nop ();
575 tree vdef = gimple_vdef (stmt);
576 if (vdef && TREE_CODE (vdef) == SSA_NAME)
578 unlink_stmt_vdef (stmt);
579 release_ssa_name (vdef);
581 gsi_replace (gsi, repl, false);
584 /* Replace the call at *GSI with the new call REPL and fold that
585 again. */
587 static void
588 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
590 gimple *stmt = gsi_stmt (*gsi);
591 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
592 gimple_set_location (repl, gimple_location (stmt));
593 if (gimple_vdef (stmt)
594 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
596 gimple_set_vdef (repl, gimple_vdef (stmt));
597 gimple_set_vuse (repl, gimple_vuse (stmt));
598 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
600 gsi_replace (gsi, repl, false);
601 fold_stmt (gsi);
604 /* Return true if VAR is a VAR_DECL or a component thereof. */
606 static bool
607 var_decl_component_p (tree var)
609 tree inner = var;
610 while (handled_component_p (inner))
611 inner = TREE_OPERAND (inner, 0);
612 return SSA_VAR_P (inner);
615 /* Fold function call to builtin mem{{,p}cpy,move}. Return
616 false if no simplification can be made.
617 If ENDP is 0, return DEST (like memcpy).
618 If ENDP is 1, return DEST+LEN (like mempcpy).
619 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
620 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
621 (memmove). */
623 static bool
624 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
625 tree dest, tree src, int endp)
627 gimple *stmt = gsi_stmt (*gsi);
628 tree lhs = gimple_call_lhs (stmt);
629 tree len = gimple_call_arg (stmt, 2);
630 tree destvar, srcvar;
631 location_t loc = gimple_location (stmt);
633 /* If the LEN parameter is zero, return DEST. */
634 if (integer_zerop (len))
636 gimple *repl;
637 if (gimple_call_lhs (stmt))
638 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
639 else
640 repl = gimple_build_nop ();
641 tree vdef = gimple_vdef (stmt);
642 if (vdef && TREE_CODE (vdef) == SSA_NAME)
644 unlink_stmt_vdef (stmt);
645 release_ssa_name (vdef);
647 gsi_replace (gsi, repl, false);
648 return true;
651 /* If SRC and DEST are the same (and not volatile), return
652 DEST{,+LEN,+LEN-1}. */
653 if (operand_equal_p (src, dest, 0))
655 unlink_stmt_vdef (stmt);
656 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
657 release_ssa_name (gimple_vdef (stmt));
658 if (!lhs)
660 gsi_replace (gsi, gimple_build_nop (), false);
661 return true;
663 goto done;
665 else
667 tree srctype, desttype;
668 unsigned int src_align, dest_align;
669 tree off0;
671 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
672 pointers as wide integer) and also may result in huge function
673 size because of inlined bounds copy. Thus don't inline for
674 functions we want to instrument. */
675 if (flag_check_pointer_bounds
676 && chkp_instrumentable_p (cfun->decl)
677 /* Even if data may contain pointers we can inline if copy
678 less than a pointer size. */
679 && (!tree_fits_uhwi_p (len)
680 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
681 return false;
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
685 ptr_mode, true), 0);
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 reason. */
699 && !c_strlen (src, 2))
701 unsigned ilen = tree_to_uhwi (len);
702 if (exact_log2 (ilen) != -1)
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 if (type
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 == ilen * 8)
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
713 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
714 != CODE_FOR_nothing)))
716 tree srctype = type;
717 tree desttype = type;
718 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
719 srctype = build_aligned_type (type, src_align);
720 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
721 tree tem = fold_const_aggregate_ref (srcmem);
722 if (tem)
723 srcmem = tem;
724 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
725 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
726 src_align)
727 && (optab_handler (movmisalign_optab,
728 TYPE_MODE (type))
729 == CODE_FOR_nothing))
730 srcmem = NULL_TREE;
731 if (srcmem)
733 gimple *new_stmt;
734 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
736 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
737 if (gimple_in_ssa_p (cfun))
738 srcmem = make_ssa_name (TREE_TYPE (srcmem),
739 new_stmt);
740 else
741 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
742 gimple_assign_set_lhs (new_stmt, srcmem);
743 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
744 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
746 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
747 desttype = build_aligned_type (type, dest_align);
748 new_stmt
749 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
750 dest, off0),
751 srcmem);
752 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
753 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
754 if (gimple_vdef (new_stmt)
755 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
756 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
757 if (!lhs)
759 gsi_replace (gsi, new_stmt, false);
760 return true;
762 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
763 goto done;
769 if (endp == 3)
771 /* Both DEST and SRC must be pointer types.
772 ??? This is what old code did. Is the testing for pointer types
773 really mandatory?
775 If either SRC is readonly or length is 1, we can use memcpy. */
776 if (!dest_align || !src_align)
777 return false;
778 if (readonly_data_expr (src)
779 || (tree_fits_uhwi_p (len)
780 && (MIN (src_align, dest_align) / BITS_PER_UNIT
781 >= tree_to_uhwi (len))))
783 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
784 if (!fn)
785 return false;
786 gimple_call_set_fndecl (stmt, fn);
787 gimple_call_set_arg (stmt, 0, dest);
788 gimple_call_set_arg (stmt, 1, src);
789 fold_stmt (gsi);
790 return true;
793 /* If *src and *dest can't overlap, optimize into memcpy as well. */
794 if (TREE_CODE (src) == ADDR_EXPR
795 && TREE_CODE (dest) == ADDR_EXPR)
797 tree src_base, dest_base, fn;
798 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
799 HOST_WIDE_INT size = -1;
800 HOST_WIDE_INT maxsize = -1;
801 bool reverse;
803 srcvar = TREE_OPERAND (src, 0);
804 src_base = get_ref_base_and_extent (srcvar, &src_offset,
805 &size, &maxsize, &reverse);
806 destvar = TREE_OPERAND (dest, 0);
807 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
808 &size, &maxsize, &reverse);
809 if (tree_fits_uhwi_p (len))
810 maxsize = tree_to_uhwi (len);
811 else
812 maxsize = -1;
813 src_offset /= BITS_PER_UNIT;
814 dest_offset /= BITS_PER_UNIT;
815 if (SSA_VAR_P (src_base)
816 && SSA_VAR_P (dest_base))
818 if (operand_equal_p (src_base, dest_base, 0)
819 && ranges_overlap_p (src_offset, maxsize,
820 dest_offset, maxsize))
821 return false;
823 else if (TREE_CODE (src_base) == MEM_REF
824 && TREE_CODE (dest_base) == MEM_REF)
826 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
827 TREE_OPERAND (dest_base, 0), 0))
828 return false;
829 offset_int off = mem_ref_offset (src_base) + src_offset;
830 if (!wi::fits_shwi_p (off))
831 return false;
832 src_offset = off.to_shwi ();
834 off = mem_ref_offset (dest_base) + dest_offset;
835 if (!wi::fits_shwi_p (off))
836 return false;
837 dest_offset = off.to_shwi ();
838 if (ranges_overlap_p (src_offset, maxsize,
839 dest_offset, maxsize))
840 return false;
842 else
843 return false;
845 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
846 if (!fn)
847 return false;
848 gimple_call_set_fndecl (stmt, fn);
849 gimple_call_set_arg (stmt, 0, dest);
850 gimple_call_set_arg (stmt, 1, src);
851 fold_stmt (gsi);
852 return true;
855 /* If the destination and source do not alias optimize into
856 memcpy as well. */
857 if ((is_gimple_min_invariant (dest)
858 || TREE_CODE (dest) == SSA_NAME)
859 && (is_gimple_min_invariant (src)
860 || TREE_CODE (src) == SSA_NAME))
862 ao_ref destr, srcr;
863 ao_ref_init_from_ptr_and_size (&destr, dest, len);
864 ao_ref_init_from_ptr_and_size (&srcr, src, len);
865 if (!refs_may_alias_p_1 (&destr, &srcr, false))
867 tree fn;
868 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
869 if (!fn)
870 return false;
871 gimple_call_set_fndecl (stmt, fn);
872 gimple_call_set_arg (stmt, 0, dest);
873 gimple_call_set_arg (stmt, 1, src);
874 fold_stmt (gsi);
875 return true;
879 return false;
882 if (!tree_fits_shwi_p (len))
883 return false;
884 /* FIXME:
885 This logic lose for arguments like (type *)malloc (sizeof (type)),
886 since we strip the casts of up to VOID return value from malloc.
887 Perhaps we ought to inherit type from non-VOID argument here? */
888 STRIP_NOPS (src);
889 STRIP_NOPS (dest);
890 if (!POINTER_TYPE_P (TREE_TYPE (src))
891 || !POINTER_TYPE_P (TREE_TYPE (dest)))
892 return false;
893 /* In the following try to find a type that is most natural to be
894 used for the memcpy source and destination and that allows
895 the most optimization when memcpy is turned into a plain assignment
896 using that type. In theory we could always use a char[len] type
897 but that only gains us that the destination and source possibly
898 no longer will have their address taken. */
899 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
900 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
902 tree tem = TREE_OPERAND (src, 0);
903 STRIP_NOPS (tem);
904 if (tem != TREE_OPERAND (src, 0))
905 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
907 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
909 tree tem = TREE_OPERAND (dest, 0);
910 STRIP_NOPS (tem);
911 if (tem != TREE_OPERAND (dest, 0))
912 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
914 srctype = TREE_TYPE (TREE_TYPE (src));
915 if (TREE_CODE (srctype) == ARRAY_TYPE
916 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
918 srctype = TREE_TYPE (srctype);
919 STRIP_NOPS (src);
920 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
922 desttype = TREE_TYPE (TREE_TYPE (dest));
923 if (TREE_CODE (desttype) == ARRAY_TYPE
924 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
926 desttype = TREE_TYPE (desttype);
927 STRIP_NOPS (dest);
928 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
930 if (TREE_ADDRESSABLE (srctype)
931 || TREE_ADDRESSABLE (desttype))
932 return false;
934 /* Make sure we are not copying using a floating-point mode or
935 a type whose size possibly does not match its precision. */
936 if (FLOAT_MODE_P (TYPE_MODE (desttype))
937 || TREE_CODE (desttype) == BOOLEAN_TYPE
938 || TREE_CODE (desttype) == ENUMERAL_TYPE)
939 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
940 if (FLOAT_MODE_P (TYPE_MODE (srctype))
941 || TREE_CODE (srctype) == BOOLEAN_TYPE
942 || TREE_CODE (srctype) == ENUMERAL_TYPE)
943 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
944 if (!srctype)
945 srctype = desttype;
946 if (!desttype)
947 desttype = srctype;
948 if (!srctype)
949 return false;
951 src_align = get_pointer_alignment (src);
952 dest_align = get_pointer_alignment (dest);
953 if (dest_align < TYPE_ALIGN (desttype)
954 || src_align < TYPE_ALIGN (srctype))
955 return false;
957 destvar = dest;
958 STRIP_NOPS (destvar);
959 if (TREE_CODE (destvar) == ADDR_EXPR
960 && var_decl_component_p (TREE_OPERAND (destvar, 0))
961 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
963 else
964 destvar = NULL_TREE;
966 srcvar = src;
967 STRIP_NOPS (srcvar);
968 if (TREE_CODE (srcvar) == ADDR_EXPR
969 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
970 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
972 if (!destvar
973 || src_align >= TYPE_ALIGN (desttype))
974 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
975 srcvar, off0);
976 else if (!STRICT_ALIGNMENT)
978 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
979 src_align);
980 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
982 else
983 srcvar = NULL_TREE;
985 else
986 srcvar = NULL_TREE;
988 if (srcvar == NULL_TREE && destvar == NULL_TREE)
989 return false;
991 if (srcvar == NULL_TREE)
993 STRIP_NOPS (src);
994 if (src_align >= TYPE_ALIGN (desttype))
995 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
996 else
998 if (STRICT_ALIGNMENT)
999 return false;
1000 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1001 src_align);
1002 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1005 else if (destvar == NULL_TREE)
1007 STRIP_NOPS (dest);
1008 if (dest_align >= TYPE_ALIGN (srctype))
1009 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1010 else
1012 if (STRICT_ALIGNMENT)
1013 return false;
1014 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1015 dest_align);
1016 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1020 gimple *new_stmt;
1021 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1023 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1024 if (gimple_in_ssa_p (cfun))
1025 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1026 else
1027 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1028 gimple_assign_set_lhs (new_stmt, srcvar);
1029 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1030 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1032 new_stmt = gimple_build_assign (destvar, srcvar);
1033 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1034 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1035 if (gimple_vdef (new_stmt)
1036 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1037 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1038 if (!lhs)
1040 gsi_replace (gsi, new_stmt, false);
1041 return true;
1043 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1046 done:
1047 gimple_seq stmts = NULL;
1048 if (endp == 0 || endp == 3)
1049 len = NULL_TREE;
1050 else if (endp == 2)
1051 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1052 ssize_int (1));
1053 if (endp == 2 || endp == 1)
1055 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1056 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1057 TREE_TYPE (dest), dest, len);
1060 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1061 gimple *repl = gimple_build_assign (lhs, dest);
1062 gsi_replace (gsi, repl, false);
1063 return true;
1066 /* Fold function call to builtin memset or bzero at *GSI setting the
1067 memory of size LEN to VAL. Return whether a simplification was made. */
1069 static bool
1070 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1072 gimple *stmt = gsi_stmt (*gsi);
1073 tree etype;
1074 unsigned HOST_WIDE_INT length, cval;
1076 /* If the LEN parameter is zero, return DEST. */
1077 if (integer_zerop (len))
1079 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1080 return true;
1083 if (! tree_fits_uhwi_p (len))
1084 return false;
1086 if (TREE_CODE (c) != INTEGER_CST)
1087 return false;
1089 tree dest = gimple_call_arg (stmt, 0);
1090 tree var = dest;
1091 if (TREE_CODE (var) != ADDR_EXPR)
1092 return false;
1094 var = TREE_OPERAND (var, 0);
1095 if (TREE_THIS_VOLATILE (var))
1096 return false;
1098 etype = TREE_TYPE (var);
1099 if (TREE_CODE (etype) == ARRAY_TYPE)
1100 etype = TREE_TYPE (etype);
1102 if (!INTEGRAL_TYPE_P (etype)
1103 && !POINTER_TYPE_P (etype))
1104 return NULL_TREE;
1106 if (! var_decl_component_p (var))
1107 return NULL_TREE;
1109 length = tree_to_uhwi (len);
1110 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1111 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1112 return NULL_TREE;
1114 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1115 return NULL_TREE;
1117 if (integer_zerop (c))
1118 cval = 0;
1119 else
1121 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1122 return NULL_TREE;
1124 cval = TREE_INT_CST_LOW (c);
1125 cval &= 0xff;
1126 cval |= cval << 8;
1127 cval |= cval << 16;
1128 cval |= (cval << 31) << 1;
1131 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1132 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1133 gimple_set_vuse (store, gimple_vuse (stmt));
1134 tree vdef = gimple_vdef (stmt);
1135 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1137 gimple_set_vdef (store, gimple_vdef (stmt));
1138 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1140 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1141 if (gimple_call_lhs (stmt))
1143 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1144 gsi_replace (gsi, asgn, false);
1146 else
1148 gimple_stmt_iterator gsi2 = *gsi;
1149 gsi_prev (gsi);
1150 gsi_remove (&gsi2, true);
1153 return true;
1157 /* Return the string length, maximum string length or maximum value of
1158 ARG in LENGTH.
1159 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1160 is not NULL and, for TYPE == 0, its value is not equal to the length
1161 we determine or if we are unable to determine the length or value,
1162 return false. VISITED is a bitmap of visited variables.
1163 TYPE is 0 if string length should be returned, 1 for maximum string
1164 length and 2 for maximum value ARG can have. */
1166 static bool
1167 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1169 tree var, val;
1170 gimple *def_stmt;
1172 if (TREE_CODE (arg) != SSA_NAME)
1174 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1175 if (TREE_CODE (arg) == ADDR_EXPR
1176 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1177 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1179 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1180 if (TREE_CODE (aop0) == INDIRECT_REF
1181 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1182 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1183 length, visited, type);
1186 if (type == 2)
1188 val = arg;
1189 if (TREE_CODE (val) != INTEGER_CST
1190 || tree_int_cst_sgn (val) < 0)
1191 return false;
1193 else
1194 val = c_strlen (arg, 1);
1195 if (!val)
1196 return false;
1198 if (*length)
1200 if (type > 0)
1202 if (TREE_CODE (*length) != INTEGER_CST
1203 || TREE_CODE (val) != INTEGER_CST)
1204 return false;
1206 if (tree_int_cst_lt (*length, val))
1207 *length = val;
1208 return true;
1210 else if (simple_cst_equal (val, *length) != 1)
1211 return false;
1214 *length = val;
1215 return true;
1218 /* If ARG is registered for SSA update we cannot look at its defining
1219 statement. */
1220 if (name_registered_for_update_p (arg))
1221 return false;
1223 /* If we were already here, break the infinite cycle. */
1224 if (!*visited)
1225 *visited = BITMAP_ALLOC (NULL);
1226 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1227 return true;
1229 var = arg;
1230 def_stmt = SSA_NAME_DEF_STMT (var);
1232 switch (gimple_code (def_stmt))
1234 case GIMPLE_ASSIGN:
1235 /* The RHS of the statement defining VAR must either have a
1236 constant length or come from another SSA_NAME with a constant
1237 length. */
1238 if (gimple_assign_single_p (def_stmt)
1239 || gimple_assign_unary_nop_p (def_stmt))
1241 tree rhs = gimple_assign_rhs1 (def_stmt);
1242 return get_maxval_strlen (rhs, length, visited, type);
1244 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1246 tree op2 = gimple_assign_rhs2 (def_stmt);
1247 tree op3 = gimple_assign_rhs3 (def_stmt);
1248 return get_maxval_strlen (op2, length, visited, type)
1249 && get_maxval_strlen (op3, length, visited, type);
1251 return false;
1253 case GIMPLE_PHI:
1255 /* All the arguments of the PHI node must have the same constant
1256 length. */
1257 unsigned i;
1259 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1261 tree arg = gimple_phi_arg (def_stmt, i)->def;
1263 /* If this PHI has itself as an argument, we cannot
1264 determine the string length of this argument. However,
1265 if we can find a constant string length for the other
1266 PHI args then we can still be sure that this is a
1267 constant string length. So be optimistic and just
1268 continue with the next argument. */
1269 if (arg == gimple_phi_result (def_stmt))
1270 continue;
1272 if (!get_maxval_strlen (arg, length, visited, type))
1273 return false;
1276 return true;
1278 default:
1279 return false;
1283 tree
1284 get_maxval_strlen (tree arg, int type)
1286 bitmap visited = NULL;
1287 tree len = NULL_TREE;
1288 if (!get_maxval_strlen (arg, &len, &visited, type))
1289 len = NULL_TREE;
1290 if (visited)
1291 BITMAP_FREE (visited);
1293 return len;
1297 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1298 If LEN is not NULL, it represents the length of the string to be
1299 copied. Return NULL_TREE if no simplification can be made. */
1301 static bool
1302 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1303 tree dest, tree src)
1305 location_t loc = gimple_location (gsi_stmt (*gsi));
1306 tree fn;
1308 /* If SRC and DEST are the same (and not volatile), return DEST. */
1309 if (operand_equal_p (src, dest, 0))
1311 replace_call_with_value (gsi, dest);
1312 return true;
1315 if (optimize_function_for_size_p (cfun))
1316 return false;
1318 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1319 if (!fn)
1320 return false;
1322 tree len = get_maxval_strlen (src, 0);
1323 if (!len)
1324 return false;
1326 len = fold_convert_loc (loc, size_type_node, len);
1327 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1328 len = force_gimple_operand_gsi (gsi, len, true,
1329 NULL_TREE, true, GSI_SAME_STMT);
1330 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1331 replace_call_with_call_and_fold (gsi, repl);
1332 return true;
1335 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1336 If SLEN is not NULL, it represents the length of the source string.
1337 Return NULL_TREE if no simplification can be made. */
1339 static bool
1340 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1341 tree dest, tree src, tree len)
1343 location_t loc = gimple_location (gsi_stmt (*gsi));
1344 tree fn;
1346 /* If the LEN parameter is zero, return DEST. */
1347 if (integer_zerop (len))
1349 replace_call_with_value (gsi, dest);
1350 return true;
1353 /* We can't compare slen with len as constants below if len is not a
1354 constant. */
1355 if (TREE_CODE (len) != INTEGER_CST)
1356 return false;
1358 /* Now, we must be passed a constant src ptr parameter. */
1359 tree slen = get_maxval_strlen (src, 0);
1360 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1361 return false;
1363 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1365 /* We do not support simplification of this case, though we do
1366 support it when expanding trees into RTL. */
1367 /* FIXME: generate a call to __builtin_memset. */
1368 if (tree_int_cst_lt (slen, len))
1369 return false;
1371 /* OK transform into builtin memcpy. */
1372 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1373 if (!fn)
1374 return false;
1376 len = fold_convert_loc (loc, size_type_node, len);
1377 len = force_gimple_operand_gsi (gsi, len, true,
1378 NULL_TREE, true, GSI_SAME_STMT);
1379 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1380 replace_call_with_call_and_fold (gsi, repl);
1381 return true;
1384 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1385 to the call.
1387 Return NULL_TREE if no simplification was possible, otherwise return the
1388 simplified form of the call as a tree.
1390 The simplified form may be a constant or other expression which
1391 computes the same value, but in a more efficient manner (including
1392 calls to other builtin functions).
1394 The call may contain arguments which need to be evaluated, but
1395 which are not useful to determine the result of the call. In
1396 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1397 COMPOUND_EXPR will be an argument which must be evaluated.
1398 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1399 COMPOUND_EXPR in the chain will contain the tree for the simplified
1400 form of the builtin function call. */
1402 static bool
1403 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1405 gimple *stmt = gsi_stmt (*gsi);
1406 location_t loc = gimple_location (stmt);
1408 const char *p = c_getstr (src);
1410 /* If the string length is zero, return the dst parameter. */
1411 if (p && *p == '\0')
1413 replace_call_with_value (gsi, dst);
1414 return true;
1417 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1418 return false;
1420 /* See if we can store by pieces into (dst + strlen(dst)). */
1421 tree newdst;
1422 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1423 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1425 if (!strlen_fn || !memcpy_fn)
1426 return false;
1428 /* If the length of the source string isn't computable don't
1429 split strcat into strlen and memcpy. */
1430 tree len = get_maxval_strlen (src, 0);
1431 if (! len)
1432 return false;
1434 /* Create strlen (dst). */
1435 gimple_seq stmts = NULL, stmts2;
1436 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1437 gimple_set_location (repl, loc);
1438 if (gimple_in_ssa_p (cfun))
1439 newdst = make_ssa_name (size_type_node);
1440 else
1441 newdst = create_tmp_reg (size_type_node);
1442 gimple_call_set_lhs (repl, newdst);
1443 gimple_seq_add_stmt_without_update (&stmts, repl);
1445 /* Create (dst p+ strlen (dst)). */
1446 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1447 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1448 gimple_seq_add_seq_without_update (&stmts, stmts2);
1450 len = fold_convert_loc (loc, size_type_node, len);
1451 len = size_binop_loc (loc, PLUS_EXPR, len,
1452 build_int_cst (size_type_node, 1));
1453 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1454 gimple_seq_add_seq_without_update (&stmts, stmts2);
1456 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1457 gimple_seq_add_stmt_without_update (&stmts, repl);
1458 if (gimple_call_lhs (stmt))
1460 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1461 gimple_seq_add_stmt_without_update (&stmts, repl);
1462 gsi_replace_with_seq_vops (gsi, stmts);
1463 /* gsi now points at the assignment to the lhs, get a
1464 stmt iterator to the memcpy call.
1465 ??? We can't use gsi_for_stmt as that doesn't work when the
1466 CFG isn't built yet. */
1467 gimple_stmt_iterator gsi2 = *gsi;
1468 gsi_prev (&gsi2);
1469 fold_stmt (&gsi2);
1471 else
1473 gsi_replace_with_seq_vops (gsi, stmts);
1474 fold_stmt (gsi);
1476 return true;
1479 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1480 are the arguments to the call. */
1482 static bool
1483 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1485 gimple *stmt = gsi_stmt (*gsi);
1486 tree dest = gimple_call_arg (stmt, 0);
1487 tree src = gimple_call_arg (stmt, 1);
1488 tree size = gimple_call_arg (stmt, 2);
1489 tree fn;
1490 const char *p;
1493 p = c_getstr (src);
1494 /* If the SRC parameter is "", return DEST. */
1495 if (p && *p == '\0')
1497 replace_call_with_value (gsi, dest);
1498 return true;
1501 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1502 return false;
1504 /* If __builtin_strcat_chk is used, assume strcat is available. */
1505 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1506 if (!fn)
1507 return false;
1509 gimple *repl = gimple_build_call (fn, 2, dest, src);
1510 replace_call_with_call_and_fold (gsi, repl);
1511 return true;
1514 /* Simplify a call to the strncat builtin. */
1516 static bool
1517 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1519 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1520 tree dst = gimple_call_arg (stmt, 0);
1521 tree src = gimple_call_arg (stmt, 1);
1522 tree len = gimple_call_arg (stmt, 2);
1524 const char *p = c_getstr (src);
1526 /* If the requested length is zero, or the src parameter string
1527 length is zero, return the dst parameter. */
1528 if (integer_zerop (len) || (p && *p == '\0'))
1530 replace_call_with_value (gsi, dst);
1531 return true;
1534 /* If the requested len is greater than or equal to the string
1535 length, call strcat. */
1536 if (TREE_CODE (len) == INTEGER_CST && p
1537 && compare_tree_int (len, strlen (p)) >= 0)
1539 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1541 /* If the replacement _DECL isn't initialized, don't do the
1542 transformation. */
1543 if (!fn)
1544 return false;
1546 gcall *repl = gimple_build_call (fn, 2, dst, src);
1547 replace_call_with_call_and_fold (gsi, repl);
1548 return true;
1551 return false;
1554 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1555 LEN, and SIZE. */
1557 static bool
1558 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1560 gimple *stmt = gsi_stmt (*gsi);
1561 tree dest = gimple_call_arg (stmt, 0);
1562 tree src = gimple_call_arg (stmt, 1);
1563 tree len = gimple_call_arg (stmt, 2);
1564 tree size = gimple_call_arg (stmt, 3);
1565 tree fn;
1566 const char *p;
1568 p = c_getstr (src);
1569 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1570 if ((p && *p == '\0')
1571 || integer_zerop (len))
1573 replace_call_with_value (gsi, dest);
1574 return true;
1577 if (! tree_fits_uhwi_p (size))
1578 return false;
1580 if (! integer_all_onesp (size))
1582 tree src_len = c_strlen (src, 1);
1583 if (src_len
1584 && tree_fits_uhwi_p (src_len)
1585 && tree_fits_uhwi_p (len)
1586 && ! tree_int_cst_lt (len, src_len))
1588 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1589 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1590 if (!fn)
1591 return false;
1593 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1594 replace_call_with_call_and_fold (gsi, repl);
1595 return true;
1597 return false;
1600 /* If __builtin_strncat_chk is used, assume strncat is available. */
1601 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1602 if (!fn)
1603 return false;
1605 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1606 replace_call_with_call_and_fold (gsi, repl);
1607 return true;
1610 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1611 to the call. IGNORE is true if the value returned
1612 by the builtin will be ignored. UNLOCKED is true is true if this
1613 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1614 the known length of the string. Return NULL_TREE if no simplification
1615 was possible. */
1617 static bool
1618 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1619 tree arg0, tree arg1,
1620 bool unlocked)
1622 gimple *stmt = gsi_stmt (*gsi);
1624 /* If we're using an unlocked function, assume the other unlocked
1625 functions exist explicitly. */
1626 tree const fn_fputc = (unlocked
1627 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1628 : builtin_decl_implicit (BUILT_IN_FPUTC));
1629 tree const fn_fwrite = (unlocked
1630 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1631 : builtin_decl_implicit (BUILT_IN_FWRITE));
1633 /* If the return value is used, don't do the transformation. */
1634 if (gimple_call_lhs (stmt))
1635 return false;
1637 /* Get the length of the string passed to fputs. If the length
1638 can't be determined, punt. */
1639 tree len = get_maxval_strlen (arg0, 0);
1640 if (!len
1641 || TREE_CODE (len) != INTEGER_CST)
1642 return false;
1644 switch (compare_tree_int (len, 1))
1646 case -1: /* length is 0, delete the call entirely . */
1647 replace_call_with_value (gsi, integer_zero_node);
1648 return true;
1650 case 0: /* length is 1, call fputc. */
1652 const char *p = c_getstr (arg0);
1653 if (p != NULL)
1655 if (!fn_fputc)
1656 return false;
1658 gimple *repl = gimple_build_call (fn_fputc, 2,
1659 build_int_cst
1660 (integer_type_node, p[0]), arg1);
1661 replace_call_with_call_and_fold (gsi, repl);
1662 return true;
1665 /* FALLTHROUGH */
1666 case 1: /* length is greater than 1, call fwrite. */
1668 /* If optimizing for size keep fputs. */
1669 if (optimize_function_for_size_p (cfun))
1670 return false;
1671 /* New argument list transforming fputs(string, stream) to
1672 fwrite(string, 1, len, stream). */
1673 if (!fn_fwrite)
1674 return false;
1676 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1677 size_one_node, len, arg1);
1678 replace_call_with_call_and_fold (gsi, repl);
1679 return true;
1681 default:
1682 gcc_unreachable ();
1684 return false;
1687 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1688 DEST, SRC, LEN, and SIZE are the arguments to the call.
1689 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1690 code of the builtin. If MAXLEN is not NULL, it is maximum length
1691 passed as third argument. */
1693 static bool
1694 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1695 tree dest, tree src, tree len, tree size,
1696 enum built_in_function fcode)
1698 gimple *stmt = gsi_stmt (*gsi);
1699 location_t loc = gimple_location (stmt);
1700 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1701 tree fn;
1703 /* If SRC and DEST are the same (and not volatile), return DEST
1704 (resp. DEST+LEN for __mempcpy_chk). */
1705 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1707 if (fcode != BUILT_IN_MEMPCPY_CHK)
1709 replace_call_with_value (gsi, dest);
1710 return true;
1712 else
1714 gimple_seq stmts = NULL;
1715 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1716 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1717 TREE_TYPE (dest), dest, len);
1718 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1719 replace_call_with_value (gsi, temp);
1720 return true;
1724 if (! tree_fits_uhwi_p (size))
1725 return false;
1727 tree maxlen = get_maxval_strlen (len, 2);
1728 if (! integer_all_onesp (size))
1730 if (! tree_fits_uhwi_p (len))
1732 /* If LEN is not constant, try MAXLEN too.
1733 For MAXLEN only allow optimizing into non-_ocs function
1734 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1735 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1737 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1739 /* (void) __mempcpy_chk () can be optimized into
1740 (void) __memcpy_chk (). */
1741 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1742 if (!fn)
1743 return false;
1745 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1746 replace_call_with_call_and_fold (gsi, repl);
1747 return true;
1749 return false;
1752 else
1753 maxlen = len;
1755 if (tree_int_cst_lt (size, maxlen))
1756 return false;
1759 fn = NULL_TREE;
1760 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1761 mem{cpy,pcpy,move,set} is available. */
1762 switch (fcode)
1764 case BUILT_IN_MEMCPY_CHK:
1765 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1766 break;
1767 case BUILT_IN_MEMPCPY_CHK:
1768 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1769 break;
1770 case BUILT_IN_MEMMOVE_CHK:
1771 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1772 break;
1773 case BUILT_IN_MEMSET_CHK:
1774 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1775 break;
1776 default:
1777 break;
1780 if (!fn)
1781 return false;
1783 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1784 replace_call_with_call_and_fold (gsi, repl);
1785 return true;
1788 /* Fold a call to the __st[rp]cpy_chk builtin.
1789 DEST, SRC, and SIZE are the arguments to the call.
1790 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1791 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1792 strings passed as second argument. */
1794 static bool
1795 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1796 tree dest,
1797 tree src, tree size,
1798 enum built_in_function fcode)
1800 gimple *stmt = gsi_stmt (*gsi);
1801 location_t loc = gimple_location (stmt);
1802 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1803 tree len, fn;
1805 /* If SRC and DEST are the same (and not volatile), return DEST. */
1806 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1808 replace_call_with_value (gsi, dest);
1809 return true;
1812 if (! tree_fits_uhwi_p (size))
1813 return false;
1815 tree maxlen = get_maxval_strlen (src, 1);
1816 if (! integer_all_onesp (size))
1818 len = c_strlen (src, 1);
1819 if (! len || ! tree_fits_uhwi_p (len))
1821 /* If LEN is not constant, try MAXLEN too.
1822 For MAXLEN only allow optimizing into non-_ocs function
1823 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1824 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1826 if (fcode == BUILT_IN_STPCPY_CHK)
1828 if (! ignore)
1829 return false;
1831 /* If return value of __stpcpy_chk is ignored,
1832 optimize into __strcpy_chk. */
1833 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1834 if (!fn)
1835 return false;
1837 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1838 replace_call_with_call_and_fold (gsi, repl);
1839 return true;
1842 if (! len || TREE_SIDE_EFFECTS (len))
1843 return false;
1845 /* If c_strlen returned something, but not a constant,
1846 transform __strcpy_chk into __memcpy_chk. */
1847 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1848 if (!fn)
1849 return false;
1851 gimple_seq stmts = NULL;
1852 len = gimple_convert (&stmts, loc, size_type_node, len);
1853 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1854 build_int_cst (size_type_node, 1));
1855 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1856 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1857 replace_call_with_call_and_fold (gsi, repl);
1858 return true;
1861 else
1862 maxlen = len;
1864 if (! tree_int_cst_lt (maxlen, size))
1865 return false;
1868 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1869 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1870 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1871 if (!fn)
1872 return false;
1874 gimple *repl = gimple_build_call (fn, 2, dest, src);
1875 replace_call_with_call_and_fold (gsi, repl);
1876 return true;
1879 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1880 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1881 length passed as third argument. IGNORE is true if return value can be
1882 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1884 static bool
1885 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1886 tree dest, tree src,
1887 tree len, tree size,
1888 enum built_in_function fcode)
1890 gimple *stmt = gsi_stmt (*gsi);
1891 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1892 tree fn;
1894 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1896 /* If return value of __stpncpy_chk is ignored,
1897 optimize into __strncpy_chk. */
1898 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1899 if (fn)
1901 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1902 replace_call_with_call_and_fold (gsi, repl);
1903 return true;
1907 if (! tree_fits_uhwi_p (size))
1908 return false;
1910 tree maxlen = get_maxval_strlen (len, 2);
1911 if (! integer_all_onesp (size))
1913 if (! tree_fits_uhwi_p (len))
1915 /* If LEN is not constant, try MAXLEN too.
1916 For MAXLEN only allow optimizing into non-_ocs function
1917 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1918 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1919 return false;
1921 else
1922 maxlen = len;
1924 if (tree_int_cst_lt (size, maxlen))
1925 return false;
1928 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1929 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1930 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1931 if (!fn)
1932 return false;
1934 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1935 replace_call_with_call_and_fold (gsi, repl);
1936 return true;
1939 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1940 Return NULL_TREE if no simplification can be made. */
1942 static bool
1943 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1945 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1946 location_t loc = gimple_location (stmt);
1947 tree dest = gimple_call_arg (stmt, 0);
1948 tree src = gimple_call_arg (stmt, 1);
1949 tree fn, len, lenp1;
1951 /* If the result is unused, replace stpcpy with strcpy. */
1952 if (gimple_call_lhs (stmt) == NULL_TREE)
1954 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1955 if (!fn)
1956 return false;
1957 gimple_call_set_fndecl (stmt, fn);
1958 fold_stmt (gsi);
1959 return true;
1962 len = c_strlen (src, 1);
1963 if (!len
1964 || TREE_CODE (len) != INTEGER_CST)
1965 return false;
1967 if (optimize_function_for_size_p (cfun)
1968 /* If length is zero it's small enough. */
1969 && !integer_zerop (len))
1970 return false;
1972 /* If the source has a known length replace stpcpy with memcpy. */
1973 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1974 if (!fn)
1975 return false;
1977 gimple_seq stmts = NULL;
1978 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1979 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1980 tem, build_int_cst (size_type_node, 1));
1981 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1982 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1983 gimple_set_vuse (repl, gimple_vuse (stmt));
1984 gimple_set_vdef (repl, gimple_vdef (stmt));
1985 if (gimple_vdef (repl)
1986 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1987 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1988 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1989 /* Replace the result with dest + len. */
1990 stmts = NULL;
1991 tem = gimple_convert (&stmts, loc, sizetype, len);
1992 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1993 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1994 POINTER_PLUS_EXPR, dest, tem);
1995 gsi_replace (gsi, ret, false);
1996 /* Finally fold the memcpy call. */
1997 gimple_stmt_iterator gsi2 = *gsi;
1998 gsi_prev (&gsi2);
1999 fold_stmt (&gsi2);
2000 return true;
2003 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2004 NULL_TREE if a normal call should be emitted rather than expanding
2005 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2006 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2007 passed as second argument. */
2009 static bool
2010 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2011 enum built_in_function fcode)
2013 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2014 tree dest, size, len, fn, fmt, flag;
2015 const char *fmt_str;
2017 /* Verify the required arguments in the original call. */
2018 if (gimple_call_num_args (stmt) < 5)
2019 return false;
2021 dest = gimple_call_arg (stmt, 0);
2022 len = gimple_call_arg (stmt, 1);
2023 flag = gimple_call_arg (stmt, 2);
2024 size = gimple_call_arg (stmt, 3);
2025 fmt = gimple_call_arg (stmt, 4);
2027 if (! tree_fits_uhwi_p (size))
2028 return false;
2030 if (! integer_all_onesp (size))
2032 tree maxlen = get_maxval_strlen (len, 2);
2033 if (! tree_fits_uhwi_p (len))
2035 /* If LEN is not constant, try MAXLEN too.
2036 For MAXLEN only allow optimizing into non-_ocs function
2037 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2038 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2039 return false;
2041 else
2042 maxlen = len;
2044 if (tree_int_cst_lt (size, maxlen))
2045 return false;
2048 if (!init_target_chars ())
2049 return false;
2051 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2052 or if format doesn't contain % chars or is "%s". */
2053 if (! integer_zerop (flag))
2055 fmt_str = c_getstr (fmt);
2056 if (fmt_str == NULL)
2057 return false;
2058 if (strchr (fmt_str, target_percent) != NULL
2059 && strcmp (fmt_str, target_percent_s))
2060 return false;
2063 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2064 available. */
2065 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2066 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2067 if (!fn)
2068 return false;
2070 /* Replace the called function and the first 5 argument by 3 retaining
2071 trailing varargs. */
2072 gimple_call_set_fndecl (stmt, fn);
2073 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2074 gimple_call_set_arg (stmt, 0, dest);
2075 gimple_call_set_arg (stmt, 1, len);
2076 gimple_call_set_arg (stmt, 2, fmt);
2077 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2078 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2079 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2080 fold_stmt (gsi);
2081 return true;
2084 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2085 Return NULL_TREE if a normal call should be emitted rather than
2086 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2087 or BUILT_IN_VSPRINTF_CHK. */
2089 static bool
2090 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2091 enum built_in_function fcode)
2093 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2094 tree dest, size, len, fn, fmt, flag;
2095 const char *fmt_str;
2096 unsigned nargs = gimple_call_num_args (stmt);
2098 /* Verify the required arguments in the original call. */
2099 if (nargs < 4)
2100 return false;
2101 dest = gimple_call_arg (stmt, 0);
2102 flag = gimple_call_arg (stmt, 1);
2103 size = gimple_call_arg (stmt, 2);
2104 fmt = gimple_call_arg (stmt, 3);
2106 if (! tree_fits_uhwi_p (size))
2107 return false;
2109 len = NULL_TREE;
2111 if (!init_target_chars ())
2112 return false;
2114 /* Check whether the format is a literal string constant. */
2115 fmt_str = c_getstr (fmt);
2116 if (fmt_str != NULL)
2118 /* If the format doesn't contain % args or %%, we know the size. */
2119 if (strchr (fmt_str, target_percent) == 0)
2121 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2122 len = build_int_cstu (size_type_node, strlen (fmt_str));
2124 /* If the format is "%s" and first ... argument is a string literal,
2125 we know the size too. */
2126 else if (fcode == BUILT_IN_SPRINTF_CHK
2127 && strcmp (fmt_str, target_percent_s) == 0)
2129 tree arg;
2131 if (nargs == 5)
2133 arg = gimple_call_arg (stmt, 4);
2134 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2136 len = c_strlen (arg, 1);
2137 if (! len || ! tree_fits_uhwi_p (len))
2138 len = NULL_TREE;
2144 if (! integer_all_onesp (size))
2146 if (! len || ! tree_int_cst_lt (len, size))
2147 return false;
2150 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2151 or if format doesn't contain % chars or is "%s". */
2152 if (! integer_zerop (flag))
2154 if (fmt_str == NULL)
2155 return false;
2156 if (strchr (fmt_str, target_percent) != NULL
2157 && strcmp (fmt_str, target_percent_s))
2158 return false;
2161 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2162 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2163 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2164 if (!fn)
2165 return false;
2167 /* Replace the called function and the first 4 argument by 2 retaining
2168 trailing varargs. */
2169 gimple_call_set_fndecl (stmt, fn);
2170 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2171 gimple_call_set_arg (stmt, 0, dest);
2172 gimple_call_set_arg (stmt, 1, fmt);
2173 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2174 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2175 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2176 fold_stmt (gsi);
2177 return true;
2180 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2181 ORIG may be null if this is a 2-argument call. We don't attempt to
2182 simplify calls with more than 3 arguments.
2184 Return NULL_TREE if no simplification was possible, otherwise return the
2185 simplified form of the call as a tree. If IGNORED is true, it means that
2186 the caller does not use the returned value of the function. */
2188 static bool
2189 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2191 gimple *stmt = gsi_stmt (*gsi);
2192 tree dest = gimple_call_arg (stmt, 0);
2193 tree fmt = gimple_call_arg (stmt, 1);
2194 tree orig = NULL_TREE;
2195 const char *fmt_str = NULL;
2197 /* Verify the required arguments in the original call. We deal with two
2198 types of sprintf() calls: 'sprintf (str, fmt)' and
2199 'sprintf (dest, "%s", orig)'. */
2200 if (gimple_call_num_args (stmt) > 3)
2201 return false;
2203 if (gimple_call_num_args (stmt) == 3)
2204 orig = gimple_call_arg (stmt, 2);
2206 /* Check whether the format is a literal string constant. */
2207 fmt_str = c_getstr (fmt);
2208 if (fmt_str == NULL)
2209 return false;
2211 if (!init_target_chars ())
2212 return false;
2214 /* If the format doesn't contain % args or %%, use strcpy. */
2215 if (strchr (fmt_str, target_percent) == NULL)
2217 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2219 if (!fn)
2220 return false;
2222 /* Don't optimize sprintf (buf, "abc", ptr++). */
2223 if (orig)
2224 return false;
2226 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2227 'format' is known to contain no % formats. */
2228 gimple_seq stmts = NULL;
2229 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2230 gimple_seq_add_stmt_without_update (&stmts, repl);
2231 if (gimple_call_lhs (stmt))
2233 repl = gimple_build_assign (gimple_call_lhs (stmt),
2234 build_int_cst (integer_type_node,
2235 strlen (fmt_str)));
2236 gimple_seq_add_stmt_without_update (&stmts, repl);
2237 gsi_replace_with_seq_vops (gsi, stmts);
2238 /* gsi now points at the assignment to the lhs, get a
2239 stmt iterator to the memcpy call.
2240 ??? We can't use gsi_for_stmt as that doesn't work when the
2241 CFG isn't built yet. */
2242 gimple_stmt_iterator gsi2 = *gsi;
2243 gsi_prev (&gsi2);
2244 fold_stmt (&gsi2);
2246 else
2248 gsi_replace_with_seq_vops (gsi, stmts);
2249 fold_stmt (gsi);
2251 return true;
2254 /* If the format is "%s", use strcpy if the result isn't used. */
2255 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2257 tree fn;
2258 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2260 if (!fn)
2261 return false;
2263 /* Don't crash on sprintf (str1, "%s"). */
2264 if (!orig)
2265 return false;
2267 tree orig_len = NULL_TREE;
2268 if (gimple_call_lhs (stmt))
2270 orig_len = get_maxval_strlen (orig, 0);
2271 if (!orig_len)
2272 return false;
2275 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2276 gimple_seq stmts = NULL;
2277 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2278 gimple_seq_add_stmt_without_update (&stmts, repl);
2279 if (gimple_call_lhs (stmt))
2281 if (!useless_type_conversion_p (integer_type_node,
2282 TREE_TYPE (orig_len)))
2283 orig_len = fold_convert (integer_type_node, orig_len);
2284 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2285 gimple_seq_add_stmt_without_update (&stmts, repl);
2286 gsi_replace_with_seq_vops (gsi, stmts);
2287 /* gsi now points at the assignment to the lhs, get a
2288 stmt iterator to the memcpy call.
2289 ??? We can't use gsi_for_stmt as that doesn't work when the
2290 CFG isn't built yet. */
2291 gimple_stmt_iterator gsi2 = *gsi;
2292 gsi_prev (&gsi2);
2293 fold_stmt (&gsi2);
2295 else
2297 gsi_replace_with_seq_vops (gsi, stmts);
2298 fold_stmt (gsi);
2300 return true;
2302 return false;
2305 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2306 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2307 attempt to simplify calls with more than 4 arguments.
2309 Return NULL_TREE if no simplification was possible, otherwise return the
2310 simplified form of the call as a tree. If IGNORED is true, it means that
2311 the caller does not use the returned value of the function. */
2313 static bool
2314 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2316 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2317 tree dest = gimple_call_arg (stmt, 0);
2318 tree destsize = gimple_call_arg (stmt, 1);
2319 tree fmt = gimple_call_arg (stmt, 2);
2320 tree orig = NULL_TREE;
2321 const char *fmt_str = NULL;
2323 if (gimple_call_num_args (stmt) > 4)
2324 return false;
2326 if (gimple_call_num_args (stmt) == 4)
2327 orig = gimple_call_arg (stmt, 3);
2329 if (!tree_fits_uhwi_p (destsize))
2330 return false;
2331 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2333 /* Check whether the format is a literal string constant. */
2334 fmt_str = c_getstr (fmt);
2335 if (fmt_str == NULL)
2336 return false;
2338 if (!init_target_chars ())
2339 return false;
2341 /* If the format doesn't contain % args or %%, use strcpy. */
2342 if (strchr (fmt_str, target_percent) == NULL)
2344 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2345 if (!fn)
2346 return false;
2348 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2349 if (orig)
2350 return false;
2352 /* We could expand this as
2353 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2354 or to
2355 memcpy (str, fmt_with_nul_at_cstm1, cst);
2356 but in the former case that might increase code size
2357 and in the latter case grow .rodata section too much.
2358 So punt for now. */
2359 size_t len = strlen (fmt_str);
2360 if (len >= destlen)
2361 return false;
2363 gimple_seq stmts = NULL;
2364 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2365 gimple_seq_add_stmt_without_update (&stmts, repl);
2366 if (gimple_call_lhs (stmt))
2368 repl = gimple_build_assign (gimple_call_lhs (stmt),
2369 build_int_cst (integer_type_node, len));
2370 gimple_seq_add_stmt_without_update (&stmts, repl);
2371 gsi_replace_with_seq_vops (gsi, stmts);
2372 /* gsi now points at the assignment to the lhs, get a
2373 stmt iterator to the memcpy call.
2374 ??? We can't use gsi_for_stmt as that doesn't work when the
2375 CFG isn't built yet. */
2376 gimple_stmt_iterator gsi2 = *gsi;
2377 gsi_prev (&gsi2);
2378 fold_stmt (&gsi2);
2380 else
2382 gsi_replace_with_seq_vops (gsi, stmts);
2383 fold_stmt (gsi);
2385 return true;
2388 /* If the format is "%s", use strcpy if the result isn't used. */
2389 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2391 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2392 if (!fn)
2393 return false;
2395 /* Don't crash on snprintf (str1, cst, "%s"). */
2396 if (!orig)
2397 return false;
2399 tree orig_len = get_maxval_strlen (orig, 0);
2400 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2401 return false;
2403 /* We could expand this as
2404 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2405 or to
2406 memcpy (str1, str2_with_nul_at_cstm1, cst);
2407 but in the former case that might increase code size
2408 and in the latter case grow .rodata section too much.
2409 So punt for now. */
2410 if (compare_tree_int (orig_len, destlen) >= 0)
2411 return false;
2413 /* Convert snprintf (str1, cst, "%s", str2) into
2414 strcpy (str1, str2) if strlen (str2) < cst. */
2415 gimple_seq stmts = NULL;
2416 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2417 gimple_seq_add_stmt_without_update (&stmts, repl);
2418 if (gimple_call_lhs (stmt))
2420 if (!useless_type_conversion_p (integer_type_node,
2421 TREE_TYPE (orig_len)))
2422 orig_len = fold_convert (integer_type_node, orig_len);
2423 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2424 gimple_seq_add_stmt_without_update (&stmts, repl);
2425 gsi_replace_with_seq_vops (gsi, stmts);
2426 /* gsi now points at the assignment to the lhs, get a
2427 stmt iterator to the memcpy call.
2428 ??? We can't use gsi_for_stmt as that doesn't work when the
2429 CFG isn't built yet. */
2430 gimple_stmt_iterator gsi2 = *gsi;
2431 gsi_prev (&gsi2);
2432 fold_stmt (&gsi2);
2434 else
2436 gsi_replace_with_seq_vops (gsi, stmts);
2437 fold_stmt (gsi);
2439 return true;
2441 return false;
2444 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2445 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2446 more than 3 arguments, and ARG may be null in the 2-argument case.
2448 Return NULL_TREE if no simplification was possible, otherwise return the
2449 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2450 code of the function to be simplified. */
2452 static bool
2453 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2454 tree fp, tree fmt, tree arg,
2455 enum built_in_function fcode)
2457 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2458 tree fn_fputc, fn_fputs;
2459 const char *fmt_str = NULL;
2461 /* If the return value is used, don't do the transformation. */
2462 if (gimple_call_lhs (stmt) != NULL_TREE)
2463 return false;
2465 /* Check whether the format is a literal string constant. */
2466 fmt_str = c_getstr (fmt);
2467 if (fmt_str == NULL)
2468 return false;
2470 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2472 /* If we're using an unlocked function, assume the other
2473 unlocked functions exist explicitly. */
2474 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2475 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2477 else
2479 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2480 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2483 if (!init_target_chars ())
2484 return false;
2486 /* If the format doesn't contain % args or %%, use strcpy. */
2487 if (strchr (fmt_str, target_percent) == NULL)
2489 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2490 && arg)
2491 return false;
2493 /* If the format specifier was "", fprintf does nothing. */
2494 if (fmt_str[0] == '\0')
2496 replace_call_with_value (gsi, NULL_TREE);
2497 return true;
2500 /* When "string" doesn't contain %, replace all cases of
2501 fprintf (fp, string) with fputs (string, fp). The fputs
2502 builtin will take care of special cases like length == 1. */
2503 if (fn_fputs)
2505 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2506 replace_call_with_call_and_fold (gsi, repl);
2507 return true;
2511 /* The other optimizations can be done only on the non-va_list variants. */
2512 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2513 return false;
2515 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2516 else if (strcmp (fmt_str, target_percent_s) == 0)
2518 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2519 return false;
2520 if (fn_fputs)
2522 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2523 replace_call_with_call_and_fold (gsi, repl);
2524 return true;
2528 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2529 else if (strcmp (fmt_str, target_percent_c) == 0)
2531 if (!arg
2532 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2533 return false;
2534 if (fn_fputc)
2536 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2537 replace_call_with_call_and_fold (gsi, repl);
2538 return true;
2542 return false;
2545 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2546 FMT and ARG are the arguments to the call; we don't fold cases with
2547 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2549 Return NULL_TREE if no simplification was possible, otherwise return the
2550 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2551 code of the function to be simplified. */
2553 static bool
2554 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2555 tree arg, enum built_in_function fcode)
2557 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2558 tree fn_putchar, fn_puts, newarg;
2559 const char *fmt_str = NULL;
2561 /* If the return value is used, don't do the transformation. */
2562 if (gimple_call_lhs (stmt) != NULL_TREE)
2563 return false;
2565 /* Check whether the format is a literal string constant. */
2566 fmt_str = c_getstr (fmt);
2567 if (fmt_str == NULL)
2568 return false;
2570 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2572 /* If we're using an unlocked function, assume the other
2573 unlocked functions exist explicitly. */
2574 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2575 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2577 else
2579 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2580 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2583 if (!init_target_chars ())
2584 return false;
2586 if (strcmp (fmt_str, target_percent_s) == 0
2587 || strchr (fmt_str, target_percent) == NULL)
2589 const char *str;
2591 if (strcmp (fmt_str, target_percent_s) == 0)
2593 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2594 return false;
2596 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2597 return false;
2599 str = c_getstr (arg);
2600 if (str == NULL)
2601 return false;
2603 else
2605 /* The format specifier doesn't contain any '%' characters. */
2606 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2607 && arg)
2608 return false;
2609 str = fmt_str;
2612 /* If the string was "", printf does nothing. */
2613 if (str[0] == '\0')
2615 replace_call_with_value (gsi, NULL_TREE);
2616 return true;
2619 /* If the string has length of 1, call putchar. */
2620 if (str[1] == '\0')
2622 /* Given printf("c"), (where c is any one character,)
2623 convert "c"[0] to an int and pass that to the replacement
2624 function. */
2625 newarg = build_int_cst (integer_type_node, str[0]);
2626 if (fn_putchar)
2628 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2629 replace_call_with_call_and_fold (gsi, repl);
2630 return true;
2633 else
2635 /* If the string was "string\n", call puts("string"). */
2636 size_t len = strlen (str);
2637 if ((unsigned char)str[len - 1] == target_newline
2638 && (size_t) (int) len == len
2639 && (int) len > 0)
2641 char *newstr;
2642 tree offset_node, string_cst;
2644 /* Create a NUL-terminated string that's one char shorter
2645 than the original, stripping off the trailing '\n'. */
2646 newarg = build_string_literal (len, str);
2647 string_cst = string_constant (newarg, &offset_node);
2648 gcc_checking_assert (string_cst
2649 && (TREE_STRING_LENGTH (string_cst)
2650 == (int) len)
2651 && integer_zerop (offset_node)
2652 && (unsigned char)
2653 TREE_STRING_POINTER (string_cst)[len - 1]
2654 == target_newline);
2655 /* build_string_literal creates a new STRING_CST,
2656 modify it in place to avoid double copying. */
2657 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2658 newstr[len - 1] = '\0';
2659 if (fn_puts)
2661 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2662 replace_call_with_call_and_fold (gsi, repl);
2663 return true;
2666 else
2667 /* We'd like to arrange to call fputs(string,stdout) here,
2668 but we need stdout and don't have a way to get it yet. */
2669 return false;
2673 /* The other optimizations can be done only on the non-va_list variants. */
2674 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2675 return false;
2677 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2678 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2680 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2681 return false;
2682 if (fn_puts)
2684 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2685 replace_call_with_call_and_fold (gsi, repl);
2686 return true;
2690 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2691 else if (strcmp (fmt_str, target_percent_c) == 0)
2693 if (!arg || ! useless_type_conversion_p (integer_type_node,
2694 TREE_TYPE (arg)))
2695 return false;
2696 if (fn_putchar)
2698 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2699 replace_call_with_call_and_fold (gsi, repl);
2700 return true;
2704 return false;
2709 /* Fold a call to __builtin_strlen with known length LEN. */
2711 static bool
2712 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2714 gimple *stmt = gsi_stmt (*gsi);
2715 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2716 if (!len)
2717 return false;
2718 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2719 replace_call_with_value (gsi, len);
2720 return true;
2723 /* Fold a call to __builtin_acc_on_device. */
2725 static bool
2726 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2728 /* Defer folding until we know which compiler we're in. */
2729 if (symtab->state != EXPANSION)
2730 return false;
2732 unsigned val_host = GOMP_DEVICE_HOST;
2733 unsigned val_dev = GOMP_DEVICE_NONE;
2735 #ifdef ACCEL_COMPILER
2736 val_host = GOMP_DEVICE_NOT_HOST;
2737 val_dev = ACCEL_COMPILER_acc_device;
2738 #endif
2740 location_t loc = gimple_location (gsi_stmt (*gsi));
2742 tree host_eq = make_ssa_name (boolean_type_node);
2743 gimple *host_ass = gimple_build_assign
2744 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2745 gimple_set_location (host_ass, loc);
2746 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2748 tree dev_eq = make_ssa_name (boolean_type_node);
2749 gimple *dev_ass = gimple_build_assign
2750 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2751 gimple_set_location (dev_ass, loc);
2752 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2754 tree result = make_ssa_name (boolean_type_node);
2755 gimple *result_ass = gimple_build_assign
2756 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2757 gimple_set_location (result_ass, loc);
2758 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2760 replace_call_with_value (gsi, result);
2762 return true;
2765 /* Fold the non-target builtin at *GSI and return whether any simplification
2766 was made. */
2768 static bool
2769 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2771 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2772 tree callee = gimple_call_fndecl (stmt);
2774 /* Give up for always_inline inline builtins until they are
2775 inlined. */
2776 if (avoid_folding_inline_builtin (callee))
2777 return false;
2779 unsigned n = gimple_call_num_args (stmt);
2780 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2781 switch (fcode)
2783 case BUILT_IN_BZERO:
2784 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2785 gimple_call_arg (stmt, 1));
2786 case BUILT_IN_MEMSET:
2787 return gimple_fold_builtin_memset (gsi,
2788 gimple_call_arg (stmt, 1),
2789 gimple_call_arg (stmt, 2));
2790 case BUILT_IN_BCOPY:
2791 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2792 gimple_call_arg (stmt, 0), 3);
2793 case BUILT_IN_MEMCPY:
2794 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2795 gimple_call_arg (stmt, 1), 0);
2796 case BUILT_IN_MEMPCPY:
2797 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2798 gimple_call_arg (stmt, 1), 1);
2799 case BUILT_IN_MEMMOVE:
2800 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2801 gimple_call_arg (stmt, 1), 3);
2802 case BUILT_IN_SPRINTF_CHK:
2803 case BUILT_IN_VSPRINTF_CHK:
2804 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2805 case BUILT_IN_STRCAT_CHK:
2806 return gimple_fold_builtin_strcat_chk (gsi);
2807 case BUILT_IN_STRNCAT_CHK:
2808 return gimple_fold_builtin_strncat_chk (gsi);
2809 case BUILT_IN_STRLEN:
2810 return gimple_fold_builtin_strlen (gsi);
2811 case BUILT_IN_STRCPY:
2812 return gimple_fold_builtin_strcpy (gsi,
2813 gimple_call_arg (stmt, 0),
2814 gimple_call_arg (stmt, 1));
2815 case BUILT_IN_STRNCPY:
2816 return gimple_fold_builtin_strncpy (gsi,
2817 gimple_call_arg (stmt, 0),
2818 gimple_call_arg (stmt, 1),
2819 gimple_call_arg (stmt, 2));
2820 case BUILT_IN_STRCAT:
2821 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2822 gimple_call_arg (stmt, 1));
2823 case BUILT_IN_STRNCAT:
2824 return gimple_fold_builtin_strncat (gsi);
2825 case BUILT_IN_FPUTS:
2826 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2827 gimple_call_arg (stmt, 1), false);
2828 case BUILT_IN_FPUTS_UNLOCKED:
2829 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2830 gimple_call_arg (stmt, 1), true);
2831 case BUILT_IN_MEMCPY_CHK:
2832 case BUILT_IN_MEMPCPY_CHK:
2833 case BUILT_IN_MEMMOVE_CHK:
2834 case BUILT_IN_MEMSET_CHK:
2835 return gimple_fold_builtin_memory_chk (gsi,
2836 gimple_call_arg (stmt, 0),
2837 gimple_call_arg (stmt, 1),
2838 gimple_call_arg (stmt, 2),
2839 gimple_call_arg (stmt, 3),
2840 fcode);
2841 case BUILT_IN_STPCPY:
2842 return gimple_fold_builtin_stpcpy (gsi);
2843 case BUILT_IN_STRCPY_CHK:
2844 case BUILT_IN_STPCPY_CHK:
2845 return gimple_fold_builtin_stxcpy_chk (gsi,
2846 gimple_call_arg (stmt, 0),
2847 gimple_call_arg (stmt, 1),
2848 gimple_call_arg (stmt, 2),
2849 fcode);
2850 case BUILT_IN_STRNCPY_CHK:
2851 case BUILT_IN_STPNCPY_CHK:
2852 return gimple_fold_builtin_stxncpy_chk (gsi,
2853 gimple_call_arg (stmt, 0),
2854 gimple_call_arg (stmt, 1),
2855 gimple_call_arg (stmt, 2),
2856 gimple_call_arg (stmt, 3),
2857 fcode);
2858 case BUILT_IN_SNPRINTF_CHK:
2859 case BUILT_IN_VSNPRINTF_CHK:
2860 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2861 case BUILT_IN_SNPRINTF:
2862 return gimple_fold_builtin_snprintf (gsi);
2863 case BUILT_IN_SPRINTF:
2864 return gimple_fold_builtin_sprintf (gsi);
2865 case BUILT_IN_FPRINTF:
2866 case BUILT_IN_FPRINTF_UNLOCKED:
2867 case BUILT_IN_VFPRINTF:
2868 if (n == 2 || n == 3)
2869 return gimple_fold_builtin_fprintf (gsi,
2870 gimple_call_arg (stmt, 0),
2871 gimple_call_arg (stmt, 1),
2872 n == 3
2873 ? gimple_call_arg (stmt, 2)
2874 : NULL_TREE,
2875 fcode);
2876 break;
2877 case BUILT_IN_FPRINTF_CHK:
2878 case BUILT_IN_VFPRINTF_CHK:
2879 if (n == 3 || n == 4)
2880 return gimple_fold_builtin_fprintf (gsi,
2881 gimple_call_arg (stmt, 0),
2882 gimple_call_arg (stmt, 2),
2883 n == 4
2884 ? gimple_call_arg (stmt, 3)
2885 : NULL_TREE,
2886 fcode);
2887 break;
2888 case BUILT_IN_PRINTF:
2889 case BUILT_IN_PRINTF_UNLOCKED:
2890 case BUILT_IN_VPRINTF:
2891 if (n == 1 || n == 2)
2892 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2893 n == 2
2894 ? gimple_call_arg (stmt, 1)
2895 : NULL_TREE, fcode);
2896 break;
2897 case BUILT_IN_PRINTF_CHK:
2898 case BUILT_IN_VPRINTF_CHK:
2899 if (n == 2 || n == 3)
2900 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2901 n == 3
2902 ? gimple_call_arg (stmt, 2)
2903 : NULL_TREE, fcode);
2904 break;
2905 case BUILT_IN_ACC_ON_DEVICE:
2906 return gimple_fold_builtin_acc_on_device (gsi,
2907 gimple_call_arg (stmt, 0));
2908 default:;
2911 /* Try the generic builtin folder. */
2912 bool ignore = (gimple_call_lhs (stmt) == NULL);
2913 tree result = fold_call_stmt (stmt, ignore);
2914 if (result)
2916 if (ignore)
2917 STRIP_NOPS (result);
2918 else
2919 result = fold_convert (gimple_call_return_type (stmt), result);
2920 if (!update_call_from_tree (gsi, result))
2921 gimplify_and_update_call_from_tree (gsi, result);
2922 return true;
2925 return false;
2928 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2929 function calls to constants, where possible. */
2931 static tree
2932 fold_internal_goacc_dim (const gimple *call)
2934 int axis = get_oacc_ifn_dim_arg (call);
2935 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2936 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2937 tree result = NULL_TREE;
2939 /* If the size is 1, or we only want the size and it is not dynamic,
2940 we know the answer. */
2941 if (size == 1 || (!is_pos && size))
2943 tree type = TREE_TYPE (gimple_call_lhs (call));
2944 result = build_int_cst (type, size - is_pos);
2947 return result;
2950 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2951 doesn't fit into TYPE. The test for overflow should be regardless of
2952 -fwrapv, and even for unsigned types. */
2954 bool
2955 arith_overflowed_p (enum tree_code code, const_tree type,
2956 const_tree arg0, const_tree arg1)
2958 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2959 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2960 widest2_int_cst;
2961 widest2_int warg0 = widest2_int_cst (arg0);
2962 widest2_int warg1 = widest2_int_cst (arg1);
2963 widest2_int wres;
2964 switch (code)
2966 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2967 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2968 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2969 default: gcc_unreachable ();
2971 signop sign = TYPE_SIGN (type);
2972 if (sign == UNSIGNED && wi::neg_p (wres))
2973 return true;
2974 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2977 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2978 The statement may be replaced by another statement, e.g., if the call
2979 simplifies to a constant value. Return true if any changes were made.
2980 It is assumed that the operands have been previously folded. */
2982 static bool
2983 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2985 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2986 tree callee;
2987 bool changed = false;
2988 unsigned i;
2990 /* Fold *& in call arguments. */
2991 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2992 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2994 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2995 if (tmp)
2997 gimple_call_set_arg (stmt, i, tmp);
2998 changed = true;
3002 /* Check for virtual calls that became direct calls. */
3003 callee = gimple_call_fn (stmt);
3004 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3006 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3008 if (dump_file && virtual_method_call_p (callee)
3009 && !possible_polymorphic_call_target_p
3010 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3011 (OBJ_TYPE_REF_EXPR (callee)))))
3013 fprintf (dump_file,
3014 "Type inheritance inconsistent devirtualization of ");
3015 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3016 fprintf (dump_file, " to ");
3017 print_generic_expr (dump_file, callee, TDF_SLIM);
3018 fprintf (dump_file, "\n");
3021 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3022 changed = true;
3024 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3026 bool final;
3027 vec <cgraph_node *>targets
3028 = possible_polymorphic_call_targets (callee, stmt, &final);
3029 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3031 tree lhs = gimple_call_lhs (stmt);
3032 if (dump_enabled_p ())
3034 location_t loc = gimple_location_safe (stmt);
3035 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3036 "folding virtual function call to %s\n",
3037 targets.length () == 1
3038 ? targets[0]->name ()
3039 : "__builtin_unreachable");
3041 if (targets.length () == 1)
3043 tree fndecl = targets[0]->decl;
3044 gimple_call_set_fndecl (stmt, fndecl);
3045 changed = true;
3046 /* If changing the call to __cxa_pure_virtual
3047 or similar noreturn function, adjust gimple_call_fntype
3048 too. */
3049 if ((gimple_call_flags (stmt) & ECF_NORETURN)
3050 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3051 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3052 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3053 == void_type_node))
3054 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3055 /* If the call becomes noreturn, remove the lhs. */
3056 if (lhs
3057 && gimple_call_noreturn_p (stmt)
3058 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3059 || should_remove_lhs_p (lhs)))
3061 if (TREE_CODE (lhs) == SSA_NAME)
3063 tree var = create_tmp_var (TREE_TYPE (lhs));
3064 tree def = get_or_create_ssa_default_def (cfun, var);
3065 gimple *new_stmt = gimple_build_assign (lhs, def);
3066 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3068 gimple_call_set_lhs (stmt, NULL_TREE);
3070 maybe_remove_unused_call_args (cfun, stmt);
3072 else
3074 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3075 gimple *new_stmt = gimple_build_call (fndecl, 0);
3076 gimple_set_location (new_stmt, gimple_location (stmt));
3077 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3079 tree var = create_tmp_var (TREE_TYPE (lhs));
3080 tree def = get_or_create_ssa_default_def (cfun, var);
3082 /* To satisfy condition for
3083 cgraph_update_edges_for_call_stmt_node,
3084 we need to preserve GIMPLE_CALL statement
3085 at position of GSI iterator. */
3086 update_call_from_tree (gsi, def);
3087 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3089 else
3091 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3092 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3093 gsi_replace (gsi, new_stmt, false);
3095 return true;
3101 /* Check for indirect calls that became direct calls, and then
3102 no longer require a static chain. */
3103 if (gimple_call_chain (stmt))
3105 tree fn = gimple_call_fndecl (stmt);
3106 if (fn && !DECL_STATIC_CHAIN (fn))
3108 gimple_call_set_chain (stmt, NULL);
3109 changed = true;
3111 else
3113 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3114 if (tmp)
3116 gimple_call_set_chain (stmt, tmp);
3117 changed = true;
3122 if (inplace)
3123 return changed;
3125 /* Check for builtins that CCP can handle using information not
3126 available in the generic fold routines. */
3127 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3129 if (gimple_fold_builtin (gsi))
3130 changed = true;
3132 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3134 changed |= targetm.gimple_fold_builtin (gsi);
3136 else if (gimple_call_internal_p (stmt))
3138 enum tree_code subcode = ERROR_MARK;
3139 tree result = NULL_TREE;
3140 bool cplx_result = false;
3141 tree overflow = NULL_TREE;
3142 switch (gimple_call_internal_fn (stmt))
3144 case IFN_BUILTIN_EXPECT:
3145 result = fold_builtin_expect (gimple_location (stmt),
3146 gimple_call_arg (stmt, 0),
3147 gimple_call_arg (stmt, 1),
3148 gimple_call_arg (stmt, 2));
3149 break;
3150 case IFN_UBSAN_OBJECT_SIZE:
3151 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3152 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3153 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3154 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3155 gimple_call_arg (stmt, 2))))
3157 gsi_replace (gsi, gimple_build_nop (), false);
3158 unlink_stmt_vdef (stmt);
3159 release_defs (stmt);
3160 return true;
3162 break;
3163 case IFN_GOACC_DIM_SIZE:
3164 case IFN_GOACC_DIM_POS:
3165 result = fold_internal_goacc_dim (stmt);
3166 break;
3167 case IFN_UBSAN_CHECK_ADD:
3168 subcode = PLUS_EXPR;
3169 break;
3170 case IFN_UBSAN_CHECK_SUB:
3171 subcode = MINUS_EXPR;
3172 break;
3173 case IFN_UBSAN_CHECK_MUL:
3174 subcode = MULT_EXPR;
3175 break;
3176 case IFN_ADD_OVERFLOW:
3177 subcode = PLUS_EXPR;
3178 cplx_result = true;
3179 break;
3180 case IFN_SUB_OVERFLOW:
3181 subcode = MINUS_EXPR;
3182 cplx_result = true;
3183 break;
3184 case IFN_MUL_OVERFLOW:
3185 subcode = MULT_EXPR;
3186 cplx_result = true;
3187 break;
3188 default:
3189 break;
3191 if (subcode != ERROR_MARK)
3193 tree arg0 = gimple_call_arg (stmt, 0);
3194 tree arg1 = gimple_call_arg (stmt, 1);
3195 tree type = TREE_TYPE (arg0);
3196 if (cplx_result)
3198 tree lhs = gimple_call_lhs (stmt);
3199 if (lhs == NULL_TREE)
3200 type = NULL_TREE;
3201 else
3202 type = TREE_TYPE (TREE_TYPE (lhs));
3204 if (type == NULL_TREE)
3206 /* x = y + 0; x = y - 0; x = y * 0; */
3207 else if (integer_zerop (arg1))
3208 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3209 /* x = 0 + y; x = 0 * y; */
3210 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3211 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3212 /* x = y - y; */
3213 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3214 result = integer_zero_node;
3215 /* x = y * 1; x = 1 * y; */
3216 else if (subcode == MULT_EXPR && integer_onep (arg1))
3217 result = arg0;
3218 else if (subcode == MULT_EXPR && integer_onep (arg0))
3219 result = arg1;
3220 else if (TREE_CODE (arg0) == INTEGER_CST
3221 && TREE_CODE (arg1) == INTEGER_CST)
3223 if (cplx_result)
3224 result = int_const_binop (subcode, fold_convert (type, arg0),
3225 fold_convert (type, arg1));
3226 else
3227 result = int_const_binop (subcode, arg0, arg1);
3228 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3230 if (cplx_result)
3231 overflow = build_one_cst (type);
3232 else
3233 result = NULL_TREE;
3236 if (result)
3238 if (result == integer_zero_node)
3239 result = build_zero_cst (type);
3240 else if (cplx_result && TREE_TYPE (result) != type)
3242 if (TREE_CODE (result) == INTEGER_CST)
3244 if (arith_overflowed_p (PLUS_EXPR, type, result,
3245 integer_zero_node))
3246 overflow = build_one_cst (type);
3248 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3249 && TYPE_UNSIGNED (type))
3250 || (TYPE_PRECISION (type)
3251 < (TYPE_PRECISION (TREE_TYPE (result))
3252 + (TYPE_UNSIGNED (TREE_TYPE (result))
3253 && !TYPE_UNSIGNED (type)))))
3254 result = NULL_TREE;
3255 if (result)
3256 result = fold_convert (type, result);
3261 if (result)
3263 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3264 result = drop_tree_overflow (result);
3265 if (cplx_result)
3267 if (overflow == NULL_TREE)
3268 overflow = build_zero_cst (TREE_TYPE (result));
3269 tree ctype = build_complex_type (TREE_TYPE (result));
3270 if (TREE_CODE (result) == INTEGER_CST
3271 && TREE_CODE (overflow) == INTEGER_CST)
3272 result = build_complex (ctype, result, overflow);
3273 else
3274 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3275 ctype, result, overflow);
3277 if (!update_call_from_tree (gsi, result))
3278 gimplify_and_update_call_from_tree (gsi, result);
3279 changed = true;
3283 return changed;
3287 /* Return true whether NAME has a use on STMT. */
3289 static bool
3290 has_use_on_stmt (tree name, gimple *stmt)
3292 imm_use_iterator iter;
3293 use_operand_p use_p;
3294 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3295 if (USE_STMT (use_p) == stmt)
3296 return true;
3297 return false;
3300 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3301 gimple_simplify.
3303 Replaces *GSI with the simplification result in RCODE and OPS
3304 and the associated statements in *SEQ. Does the replacement
3305 according to INPLACE and returns true if the operation succeeded. */
3307 static bool
3308 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3309 code_helper rcode, tree *ops,
3310 gimple_seq *seq, bool inplace)
3312 gimple *stmt = gsi_stmt (*gsi);
3314 /* Play safe and do not allow abnormals to be mentioned in
3315 newly created statements. See also maybe_push_res_to_seq.
3316 As an exception allow such uses if there was a use of the
3317 same SSA name on the old stmt. */
3318 if ((TREE_CODE (ops[0]) == SSA_NAME
3319 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3320 && !has_use_on_stmt (ops[0], stmt))
3321 || (ops[1]
3322 && TREE_CODE (ops[1]) == SSA_NAME
3323 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3324 && !has_use_on_stmt (ops[1], stmt))
3325 || (ops[2]
3326 && TREE_CODE (ops[2]) == SSA_NAME
3327 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3328 && !has_use_on_stmt (ops[2], stmt))
3329 || (COMPARISON_CLASS_P (ops[0])
3330 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3331 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3332 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3333 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3334 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3335 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3336 return false;
3338 /* Don't insert new statements when INPLACE is true, even if we could
3339 reuse STMT for the final statement. */
3340 if (inplace && !gimple_seq_empty_p (*seq))
3341 return false;
3343 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3345 gcc_assert (rcode.is_tree_code ());
3346 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3347 /* GIMPLE_CONDs condition may not throw. */
3348 && (!flag_exceptions
3349 || !cfun->can_throw_non_call_exceptions
3350 || !operation_could_trap_p (rcode,
3351 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3352 false, NULL_TREE)))
3353 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3354 else if (rcode == SSA_NAME)
3355 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3356 build_zero_cst (TREE_TYPE (ops[0])));
3357 else if (rcode == INTEGER_CST)
3359 if (integer_zerop (ops[0]))
3360 gimple_cond_make_false (cond_stmt);
3361 else
3362 gimple_cond_make_true (cond_stmt);
3364 else if (!inplace)
3366 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3367 ops, seq);
3368 if (!res)
3369 return false;
3370 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3371 build_zero_cst (TREE_TYPE (res)));
3373 else
3374 return false;
3375 if (dump_file && (dump_flags & TDF_DETAILS))
3377 fprintf (dump_file, "gimple_simplified to ");
3378 if (!gimple_seq_empty_p (*seq))
3379 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3380 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3381 0, TDF_SLIM);
3383 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3384 return true;
3386 else if (is_gimple_assign (stmt)
3387 && rcode.is_tree_code ())
3389 if (!inplace
3390 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3392 maybe_build_generic_op (rcode,
3393 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3394 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3395 if (dump_file && (dump_flags & TDF_DETAILS))
3397 fprintf (dump_file, "gimple_simplified to ");
3398 if (!gimple_seq_empty_p (*seq))
3399 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3400 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3401 0, TDF_SLIM);
3403 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3404 return true;
3407 else if (rcode.is_fn_code ()
3408 && gimple_call_combined_fn (stmt) == rcode)
3410 unsigned i;
3411 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3413 gcc_assert (ops[i] != NULL_TREE);
3414 gimple_call_set_arg (stmt, i, ops[i]);
3416 if (i < 3)
3417 gcc_assert (ops[i] == NULL_TREE);
3418 if (dump_file && (dump_flags & TDF_DETAILS))
3420 fprintf (dump_file, "gimple_simplified to ");
3421 if (!gimple_seq_empty_p (*seq))
3422 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3423 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3425 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3426 return true;
3428 else if (!inplace)
3430 if (gimple_has_lhs (stmt))
3432 tree lhs = gimple_get_lhs (stmt);
3433 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3434 ops, seq, lhs))
3435 return false;
3436 if (dump_file && (dump_flags & TDF_DETAILS))
3438 fprintf (dump_file, "gimple_simplified to ");
3439 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3441 gsi_replace_with_seq_vops (gsi, *seq);
3442 return true;
3444 else
3445 gcc_unreachable ();
3448 return false;
3451 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3453 static bool
3454 maybe_canonicalize_mem_ref_addr (tree *t)
3456 bool res = false;
3458 if (TREE_CODE (*t) == ADDR_EXPR)
3459 t = &TREE_OPERAND (*t, 0);
3461 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3462 generic vector extension. The actual vector referenced is
3463 view-converted to an array type for this purpose. If the index
3464 is constant the canonical representation in the middle-end is a
3465 BIT_FIELD_REF so re-write the former to the latter here. */
3466 if (TREE_CODE (*t) == ARRAY_REF
3467 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3468 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3469 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3471 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3472 if (VECTOR_TYPE_P (vtype))
3474 tree low = array_ref_low_bound (*t);
3475 if (TREE_CODE (low) == INTEGER_CST)
3477 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3479 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3480 wi::to_widest (low));
3481 idx = wi::mul (idx, wi::to_widest
3482 (TYPE_SIZE (TREE_TYPE (*t))));
3483 widest_int ext
3484 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3485 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3487 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3488 TREE_TYPE (*t),
3489 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3490 TYPE_SIZE (TREE_TYPE (*t)),
3491 wide_int_to_tree (sizetype, idx));
3492 res = true;
3499 while (handled_component_p (*t))
3500 t = &TREE_OPERAND (*t, 0);
3502 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3503 of invariant addresses into a SSA name MEM_REF address. */
3504 if (TREE_CODE (*t) == MEM_REF
3505 || TREE_CODE (*t) == TARGET_MEM_REF)
3507 tree addr = TREE_OPERAND (*t, 0);
3508 if (TREE_CODE (addr) == ADDR_EXPR
3509 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3510 || handled_component_p (TREE_OPERAND (addr, 0))))
3512 tree base;
3513 HOST_WIDE_INT coffset;
3514 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3515 &coffset);
3516 if (!base)
3517 gcc_unreachable ();
3519 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3520 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3521 TREE_OPERAND (*t, 1),
3522 size_int (coffset));
3523 res = true;
3525 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3526 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3529 /* Canonicalize back MEM_REFs to plain reference trees if the object
3530 accessed is a decl that has the same access semantics as the MEM_REF. */
3531 if (TREE_CODE (*t) == MEM_REF
3532 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3533 && integer_zerop (TREE_OPERAND (*t, 1))
3534 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3536 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3537 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3538 if (/* Same volatile qualification. */
3539 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3540 /* Same TBAA behavior with -fstrict-aliasing. */
3541 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3542 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3543 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3544 /* Same alignment. */
3545 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3546 /* We have to look out here to not drop a required conversion
3547 from the rhs to the lhs if *t appears on the lhs or vice-versa
3548 if it appears on the rhs. Thus require strict type
3549 compatibility. */
3550 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3552 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3553 res = true;
3557 /* Canonicalize TARGET_MEM_REF in particular with respect to
3558 the indexes becoming constant. */
3559 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3561 tree tem = maybe_fold_tmr (*t);
3562 if (tem)
3564 *t = tem;
3565 res = true;
3569 return res;
3572 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3573 distinguishes both cases. */
3575 static bool
3576 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3578 bool changed = false;
3579 gimple *stmt = gsi_stmt (*gsi);
3580 bool nowarning = gimple_no_warning_p (stmt);
3581 unsigned i;
3582 fold_defer_overflow_warnings ();
3584 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3585 after propagation.
3586 ??? This shouldn't be done in generic folding but in the
3587 propagation helpers which also know whether an address was
3588 propagated.
3589 Also canonicalize operand order. */
3590 switch (gimple_code (stmt))
3592 case GIMPLE_ASSIGN:
3593 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3595 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3596 if ((REFERENCE_CLASS_P (*rhs)
3597 || TREE_CODE (*rhs) == ADDR_EXPR)
3598 && maybe_canonicalize_mem_ref_addr (rhs))
3599 changed = true;
3600 tree *lhs = gimple_assign_lhs_ptr (stmt);
3601 if (REFERENCE_CLASS_P (*lhs)
3602 && maybe_canonicalize_mem_ref_addr (lhs))
3603 changed = true;
3605 else
3607 /* Canonicalize operand order. */
3608 enum tree_code code = gimple_assign_rhs_code (stmt);
3609 if (TREE_CODE_CLASS (code) == tcc_comparison
3610 || commutative_tree_code (code)
3611 || commutative_ternary_tree_code (code))
3613 tree rhs1 = gimple_assign_rhs1 (stmt);
3614 tree rhs2 = gimple_assign_rhs2 (stmt);
3615 if (tree_swap_operands_p (rhs1, rhs2, false))
3617 gimple_assign_set_rhs1 (stmt, rhs2);
3618 gimple_assign_set_rhs2 (stmt, rhs1);
3619 if (TREE_CODE_CLASS (code) == tcc_comparison)
3620 gimple_assign_set_rhs_code (stmt,
3621 swap_tree_comparison (code));
3622 changed = true;
3626 break;
3627 case GIMPLE_CALL:
3629 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3631 tree *arg = gimple_call_arg_ptr (stmt, i);
3632 if (REFERENCE_CLASS_P (*arg)
3633 && maybe_canonicalize_mem_ref_addr (arg))
3634 changed = true;
3636 tree *lhs = gimple_call_lhs_ptr (stmt);
3637 if (*lhs
3638 && REFERENCE_CLASS_P (*lhs)
3639 && maybe_canonicalize_mem_ref_addr (lhs))
3640 changed = true;
3641 break;
3643 case GIMPLE_ASM:
3645 gasm *asm_stmt = as_a <gasm *> (stmt);
3646 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3648 tree link = gimple_asm_output_op (asm_stmt, i);
3649 tree op = TREE_VALUE (link);
3650 if (REFERENCE_CLASS_P (op)
3651 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3652 changed = true;
3654 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3656 tree link = gimple_asm_input_op (asm_stmt, i);
3657 tree op = TREE_VALUE (link);
3658 if ((REFERENCE_CLASS_P (op)
3659 || TREE_CODE (op) == ADDR_EXPR)
3660 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3661 changed = true;
3664 break;
3665 case GIMPLE_DEBUG:
3666 if (gimple_debug_bind_p (stmt))
3668 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3669 if (*val
3670 && (REFERENCE_CLASS_P (*val)
3671 || TREE_CODE (*val) == ADDR_EXPR)
3672 && maybe_canonicalize_mem_ref_addr (val))
3673 changed = true;
3675 break;
3676 case GIMPLE_COND:
3678 /* Canonicalize operand order. */
3679 tree lhs = gimple_cond_lhs (stmt);
3680 tree rhs = gimple_cond_rhs (stmt);
3681 if (tree_swap_operands_p (lhs, rhs, false))
3683 gcond *gc = as_a <gcond *> (stmt);
3684 gimple_cond_set_lhs (gc, rhs);
3685 gimple_cond_set_rhs (gc, lhs);
3686 gimple_cond_set_code (gc,
3687 swap_tree_comparison (gimple_cond_code (gc)));
3688 changed = true;
3691 default:;
3694 /* Dispatch to pattern-based folding. */
3695 if (!inplace
3696 || is_gimple_assign (stmt)
3697 || gimple_code (stmt) == GIMPLE_COND)
3699 gimple_seq seq = NULL;
3700 code_helper rcode;
3701 tree ops[3] = {};
3702 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3703 valueize, valueize))
3705 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3706 changed = true;
3707 else
3708 gimple_seq_discard (seq);
3712 stmt = gsi_stmt (*gsi);
3714 /* Fold the main computation performed by the statement. */
3715 switch (gimple_code (stmt))
3717 case GIMPLE_ASSIGN:
3719 /* Try to canonicalize for boolean-typed X the comparisons
3720 X == 0, X == 1, X != 0, and X != 1. */
3721 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3722 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3724 tree lhs = gimple_assign_lhs (stmt);
3725 tree op1 = gimple_assign_rhs1 (stmt);
3726 tree op2 = gimple_assign_rhs2 (stmt);
3727 tree type = TREE_TYPE (op1);
3729 /* Check whether the comparison operands are of the same boolean
3730 type as the result type is.
3731 Check that second operand is an integer-constant with value
3732 one or zero. */
3733 if (TREE_CODE (op2) == INTEGER_CST
3734 && (integer_zerop (op2) || integer_onep (op2))
3735 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3737 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3738 bool is_logical_not = false;
3740 /* X == 0 and X != 1 is a logical-not.of X
3741 X == 1 and X != 0 is X */
3742 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3743 || (cmp_code == NE_EXPR && integer_onep (op2)))
3744 is_logical_not = true;
3746 if (is_logical_not == false)
3747 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3748 /* Only for one-bit precision typed X the transformation
3749 !X -> ~X is valied. */
3750 else if (TYPE_PRECISION (type) == 1)
3751 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3752 /* Otherwise we use !X -> X ^ 1. */
3753 else
3754 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3755 build_int_cst (type, 1));
3756 changed = true;
3757 break;
3761 unsigned old_num_ops = gimple_num_ops (stmt);
3762 tree lhs = gimple_assign_lhs (stmt);
3763 tree new_rhs = fold_gimple_assign (gsi);
3764 if (new_rhs
3765 && !useless_type_conversion_p (TREE_TYPE (lhs),
3766 TREE_TYPE (new_rhs)))
3767 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3768 if (new_rhs
3769 && (!inplace
3770 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3772 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3773 changed = true;
3775 break;
3778 case GIMPLE_CALL:
3779 changed |= gimple_fold_call (gsi, inplace);
3780 break;
3782 case GIMPLE_ASM:
3783 /* Fold *& in asm operands. */
3785 gasm *asm_stmt = as_a <gasm *> (stmt);
3786 size_t noutputs;
3787 const char **oconstraints;
3788 const char *constraint;
3789 bool allows_mem, allows_reg;
3791 noutputs = gimple_asm_noutputs (asm_stmt);
3792 oconstraints = XALLOCAVEC (const char *, noutputs);
3794 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3796 tree link = gimple_asm_output_op (asm_stmt, i);
3797 tree op = TREE_VALUE (link);
3798 oconstraints[i]
3799 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3800 if (REFERENCE_CLASS_P (op)
3801 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3803 TREE_VALUE (link) = op;
3804 changed = true;
3807 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3809 tree link = gimple_asm_input_op (asm_stmt, i);
3810 tree op = TREE_VALUE (link);
3811 constraint
3812 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3813 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3814 oconstraints, &allows_mem, &allows_reg);
3815 if (REFERENCE_CLASS_P (op)
3816 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3817 != NULL_TREE)
3819 TREE_VALUE (link) = op;
3820 changed = true;
3824 break;
3826 case GIMPLE_DEBUG:
3827 if (gimple_debug_bind_p (stmt))
3829 tree val = gimple_debug_bind_get_value (stmt);
3830 if (val
3831 && REFERENCE_CLASS_P (val))
3833 tree tem = maybe_fold_reference (val, false);
3834 if (tem)
3836 gimple_debug_bind_set_value (stmt, tem);
3837 changed = true;
3840 else if (val
3841 && TREE_CODE (val) == ADDR_EXPR)
3843 tree ref = TREE_OPERAND (val, 0);
3844 tree tem = maybe_fold_reference (ref, false);
3845 if (tem)
3847 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3848 gimple_debug_bind_set_value (stmt, tem);
3849 changed = true;
3853 break;
3855 default:;
3858 stmt = gsi_stmt (*gsi);
3860 /* Fold *& on the lhs. */
3861 if (gimple_has_lhs (stmt))
3863 tree lhs = gimple_get_lhs (stmt);
3864 if (lhs && REFERENCE_CLASS_P (lhs))
3866 tree new_lhs = maybe_fold_reference (lhs, true);
3867 if (new_lhs)
3869 gimple_set_lhs (stmt, new_lhs);
3870 changed = true;
3875 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
3876 return changed;
3879 /* Valueziation callback that ends up not following SSA edges. */
3881 tree
3882 no_follow_ssa_edges (tree)
3884 return NULL_TREE;
3887 /* Valueization callback that ends up following single-use SSA edges only. */
3889 tree
3890 follow_single_use_edges (tree val)
3892 if (TREE_CODE (val) == SSA_NAME
3893 && !has_single_use (val))
3894 return NULL_TREE;
3895 return val;
3898 /* Fold the statement pointed to by GSI. In some cases, this function may
3899 replace the whole statement with a new one. Returns true iff folding
3900 makes any changes.
3901 The statement pointed to by GSI should be in valid gimple form but may
3902 be in unfolded state as resulting from for example constant propagation
3903 which can produce *&x = 0. */
3905 bool
3906 fold_stmt (gimple_stmt_iterator *gsi)
3908 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3911 bool
3912 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3914 return fold_stmt_1 (gsi, false, valueize);
3917 /* Perform the minimal folding on statement *GSI. Only operations like
3918 *&x created by constant propagation are handled. The statement cannot
3919 be replaced with a new one. Return true if the statement was
3920 changed, false otherwise.
3921 The statement *GSI should be in valid gimple form but may
3922 be in unfolded state as resulting from for example constant propagation
3923 which can produce *&x = 0. */
3925 bool
3926 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3928 gimple *stmt = gsi_stmt (*gsi);
3929 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3930 gcc_assert (gsi_stmt (*gsi) == stmt);
3931 return changed;
3934 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3935 if EXPR is null or we don't know how.
3936 If non-null, the result always has boolean type. */
3938 static tree
3939 canonicalize_bool (tree expr, bool invert)
3941 if (!expr)
3942 return NULL_TREE;
3943 else if (invert)
3945 if (integer_nonzerop (expr))
3946 return boolean_false_node;
3947 else if (integer_zerop (expr))
3948 return boolean_true_node;
3949 else if (TREE_CODE (expr) == SSA_NAME)
3950 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3951 build_int_cst (TREE_TYPE (expr), 0));
3952 else if (COMPARISON_CLASS_P (expr))
3953 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3954 boolean_type_node,
3955 TREE_OPERAND (expr, 0),
3956 TREE_OPERAND (expr, 1));
3957 else
3958 return NULL_TREE;
3960 else
3962 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3963 return expr;
3964 if (integer_nonzerop (expr))
3965 return boolean_true_node;
3966 else if (integer_zerop (expr))
3967 return boolean_false_node;
3968 else if (TREE_CODE (expr) == SSA_NAME)
3969 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3970 build_int_cst (TREE_TYPE (expr), 0));
3971 else if (COMPARISON_CLASS_P (expr))
3972 return fold_build2 (TREE_CODE (expr),
3973 boolean_type_node,
3974 TREE_OPERAND (expr, 0),
3975 TREE_OPERAND (expr, 1));
3976 else
3977 return NULL_TREE;
3981 /* Check to see if a boolean expression EXPR is logically equivalent to the
3982 comparison (OP1 CODE OP2). Check for various identities involving
3983 SSA_NAMEs. */
3985 static bool
3986 same_bool_comparison_p (const_tree expr, enum tree_code code,
3987 const_tree op1, const_tree op2)
3989 gimple *s;
3991 /* The obvious case. */
3992 if (TREE_CODE (expr) == code
3993 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3994 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3995 return true;
3997 /* Check for comparing (name, name != 0) and the case where expr
3998 is an SSA_NAME with a definition matching the comparison. */
3999 if (TREE_CODE (expr) == SSA_NAME
4000 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4002 if (operand_equal_p (expr, op1, 0))
4003 return ((code == NE_EXPR && integer_zerop (op2))
4004 || (code == EQ_EXPR && integer_nonzerop (op2)));
4005 s = SSA_NAME_DEF_STMT (expr);
4006 if (is_gimple_assign (s)
4007 && gimple_assign_rhs_code (s) == code
4008 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4009 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4010 return true;
4013 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4014 of name is a comparison, recurse. */
4015 if (TREE_CODE (op1) == SSA_NAME
4016 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4018 s = SSA_NAME_DEF_STMT (op1);
4019 if (is_gimple_assign (s)
4020 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4022 enum tree_code c = gimple_assign_rhs_code (s);
4023 if ((c == NE_EXPR && integer_zerop (op2))
4024 || (c == EQ_EXPR && integer_nonzerop (op2)))
4025 return same_bool_comparison_p (expr, c,
4026 gimple_assign_rhs1 (s),
4027 gimple_assign_rhs2 (s));
4028 if ((c == EQ_EXPR && integer_zerop (op2))
4029 || (c == NE_EXPR && integer_nonzerop (op2)))
4030 return same_bool_comparison_p (expr,
4031 invert_tree_comparison (c, false),
4032 gimple_assign_rhs1 (s),
4033 gimple_assign_rhs2 (s));
4036 return false;
4039 /* Check to see if two boolean expressions OP1 and OP2 are logically
4040 equivalent. */
4042 static bool
4043 same_bool_result_p (const_tree op1, const_tree op2)
4045 /* Simple cases first. */
4046 if (operand_equal_p (op1, op2, 0))
4047 return true;
4049 /* Check the cases where at least one of the operands is a comparison.
4050 These are a bit smarter than operand_equal_p in that they apply some
4051 identifies on SSA_NAMEs. */
4052 if (COMPARISON_CLASS_P (op2)
4053 && same_bool_comparison_p (op1, TREE_CODE (op2),
4054 TREE_OPERAND (op2, 0),
4055 TREE_OPERAND (op2, 1)))
4056 return true;
4057 if (COMPARISON_CLASS_P (op1)
4058 && same_bool_comparison_p (op2, TREE_CODE (op1),
4059 TREE_OPERAND (op1, 0),
4060 TREE_OPERAND (op1, 1)))
4061 return true;
4063 /* Default case. */
4064 return false;
4067 /* Forward declarations for some mutually recursive functions. */
4069 static tree
4070 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4071 enum tree_code code2, tree op2a, tree op2b);
4072 static tree
4073 and_var_with_comparison (tree var, bool invert,
4074 enum tree_code code2, tree op2a, tree op2b);
4075 static tree
4076 and_var_with_comparison_1 (gimple *stmt,
4077 enum tree_code code2, tree op2a, tree op2b);
4078 static tree
4079 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4080 enum tree_code code2, tree op2a, tree op2b);
4081 static tree
4082 or_var_with_comparison (tree var, bool invert,
4083 enum tree_code code2, tree op2a, tree op2b);
4084 static tree
4085 or_var_with_comparison_1 (gimple *stmt,
4086 enum tree_code code2, tree op2a, tree op2b);
4088 /* Helper function for and_comparisons_1: try to simplify the AND of the
4089 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4090 If INVERT is true, invert the value of the VAR before doing the AND.
4091 Return NULL_EXPR if we can't simplify this to a single expression. */
4093 static tree
4094 and_var_with_comparison (tree var, bool invert,
4095 enum tree_code code2, tree op2a, tree op2b)
4097 tree t;
4098 gimple *stmt = SSA_NAME_DEF_STMT (var);
4100 /* We can only deal with variables whose definitions are assignments. */
4101 if (!is_gimple_assign (stmt))
4102 return NULL_TREE;
4104 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4105 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4106 Then we only have to consider the simpler non-inverted cases. */
4107 if (invert)
4108 t = or_var_with_comparison_1 (stmt,
4109 invert_tree_comparison (code2, false),
4110 op2a, op2b);
4111 else
4112 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4113 return canonicalize_bool (t, invert);
4116 /* Try to simplify the AND of the ssa variable defined by the assignment
4117 STMT with the comparison specified by (OP2A CODE2 OP2B).
4118 Return NULL_EXPR if we can't simplify this to a single expression. */
4120 static tree
4121 and_var_with_comparison_1 (gimple *stmt,
4122 enum tree_code code2, tree op2a, tree op2b)
4124 tree var = gimple_assign_lhs (stmt);
4125 tree true_test_var = NULL_TREE;
4126 tree false_test_var = NULL_TREE;
4127 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4129 /* Check for identities like (var AND (var == 0)) => false. */
4130 if (TREE_CODE (op2a) == SSA_NAME
4131 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4133 if ((code2 == NE_EXPR && integer_zerop (op2b))
4134 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4136 true_test_var = op2a;
4137 if (var == true_test_var)
4138 return var;
4140 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4141 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4143 false_test_var = op2a;
4144 if (var == false_test_var)
4145 return boolean_false_node;
4149 /* If the definition is a comparison, recurse on it. */
4150 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4152 tree t = and_comparisons_1 (innercode,
4153 gimple_assign_rhs1 (stmt),
4154 gimple_assign_rhs2 (stmt),
4155 code2,
4156 op2a,
4157 op2b);
4158 if (t)
4159 return t;
4162 /* If the definition is an AND or OR expression, we may be able to
4163 simplify by reassociating. */
4164 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4165 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4167 tree inner1 = gimple_assign_rhs1 (stmt);
4168 tree inner2 = gimple_assign_rhs2 (stmt);
4169 gimple *s;
4170 tree t;
4171 tree partial = NULL_TREE;
4172 bool is_and = (innercode == BIT_AND_EXPR);
4174 /* Check for boolean identities that don't require recursive examination
4175 of inner1/inner2:
4176 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4177 inner1 AND (inner1 OR inner2) => inner1
4178 !inner1 AND (inner1 AND inner2) => false
4179 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4180 Likewise for similar cases involving inner2. */
4181 if (inner1 == true_test_var)
4182 return (is_and ? var : inner1);
4183 else if (inner2 == true_test_var)
4184 return (is_and ? var : inner2);
4185 else if (inner1 == false_test_var)
4186 return (is_and
4187 ? boolean_false_node
4188 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4189 else if (inner2 == false_test_var)
4190 return (is_and
4191 ? boolean_false_node
4192 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4194 /* Next, redistribute/reassociate the AND across the inner tests.
4195 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4196 if (TREE_CODE (inner1) == SSA_NAME
4197 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4198 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4199 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4200 gimple_assign_rhs1 (s),
4201 gimple_assign_rhs2 (s),
4202 code2, op2a, op2b)))
4204 /* Handle the AND case, where we are reassociating:
4205 (inner1 AND inner2) AND (op2a code2 op2b)
4206 => (t AND inner2)
4207 If the partial result t is a constant, we win. Otherwise
4208 continue on to try reassociating with the other inner test. */
4209 if (is_and)
4211 if (integer_onep (t))
4212 return inner2;
4213 else if (integer_zerop (t))
4214 return boolean_false_node;
4217 /* Handle the OR case, where we are redistributing:
4218 (inner1 OR inner2) AND (op2a code2 op2b)
4219 => (t OR (inner2 AND (op2a code2 op2b))) */
4220 else if (integer_onep (t))
4221 return boolean_true_node;
4223 /* Save partial result for later. */
4224 partial = t;
4227 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4228 if (TREE_CODE (inner2) == SSA_NAME
4229 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4230 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4231 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4232 gimple_assign_rhs1 (s),
4233 gimple_assign_rhs2 (s),
4234 code2, op2a, op2b)))
4236 /* Handle the AND case, where we are reassociating:
4237 (inner1 AND inner2) AND (op2a code2 op2b)
4238 => (inner1 AND t) */
4239 if (is_and)
4241 if (integer_onep (t))
4242 return inner1;
4243 else if (integer_zerop (t))
4244 return boolean_false_node;
4245 /* If both are the same, we can apply the identity
4246 (x AND x) == x. */
4247 else if (partial && same_bool_result_p (t, partial))
4248 return t;
4251 /* Handle the OR case. where we are redistributing:
4252 (inner1 OR inner2) AND (op2a code2 op2b)
4253 => (t OR (inner1 AND (op2a code2 op2b)))
4254 => (t OR partial) */
4255 else
4257 if (integer_onep (t))
4258 return boolean_true_node;
4259 else if (partial)
4261 /* We already got a simplification for the other
4262 operand to the redistributed OR expression. The
4263 interesting case is when at least one is false.
4264 Or, if both are the same, we can apply the identity
4265 (x OR x) == x. */
4266 if (integer_zerop (partial))
4267 return t;
4268 else if (integer_zerop (t))
4269 return partial;
4270 else if (same_bool_result_p (t, partial))
4271 return t;
4276 return NULL_TREE;
4279 /* Try to simplify the AND of two comparisons defined by
4280 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4281 If this can be done without constructing an intermediate value,
4282 return the resulting tree; otherwise NULL_TREE is returned.
4283 This function is deliberately asymmetric as it recurses on SSA_DEFs
4284 in the first comparison but not the second. */
4286 static tree
4287 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4288 enum tree_code code2, tree op2a, tree op2b)
4290 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4292 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4293 if (operand_equal_p (op1a, op2a, 0)
4294 && operand_equal_p (op1b, op2b, 0))
4296 /* Result will be either NULL_TREE, or a combined comparison. */
4297 tree t = combine_comparisons (UNKNOWN_LOCATION,
4298 TRUTH_ANDIF_EXPR, code1, code2,
4299 truth_type, op1a, op1b);
4300 if (t)
4301 return t;
4304 /* Likewise the swapped case of the above. */
4305 if (operand_equal_p (op1a, op2b, 0)
4306 && operand_equal_p (op1b, op2a, 0))
4308 /* Result will be either NULL_TREE, or a combined comparison. */
4309 tree t = combine_comparisons (UNKNOWN_LOCATION,
4310 TRUTH_ANDIF_EXPR, code1,
4311 swap_tree_comparison (code2),
4312 truth_type, op1a, op1b);
4313 if (t)
4314 return t;
4317 /* If both comparisons are of the same value against constants, we might
4318 be able to merge them. */
4319 if (operand_equal_p (op1a, op2a, 0)
4320 && TREE_CODE (op1b) == INTEGER_CST
4321 && TREE_CODE (op2b) == INTEGER_CST)
4323 int cmp = tree_int_cst_compare (op1b, op2b);
4325 /* If we have (op1a == op1b), we should either be able to
4326 return that or FALSE, depending on whether the constant op1b
4327 also satisfies the other comparison against op2b. */
4328 if (code1 == EQ_EXPR)
4330 bool done = true;
4331 bool val;
4332 switch (code2)
4334 case EQ_EXPR: val = (cmp == 0); break;
4335 case NE_EXPR: val = (cmp != 0); break;
4336 case LT_EXPR: val = (cmp < 0); break;
4337 case GT_EXPR: val = (cmp > 0); break;
4338 case LE_EXPR: val = (cmp <= 0); break;
4339 case GE_EXPR: val = (cmp >= 0); break;
4340 default: done = false;
4342 if (done)
4344 if (val)
4345 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4346 else
4347 return boolean_false_node;
4350 /* Likewise if the second comparison is an == comparison. */
4351 else if (code2 == EQ_EXPR)
4353 bool done = true;
4354 bool val;
4355 switch (code1)
4357 case EQ_EXPR: val = (cmp == 0); break;
4358 case NE_EXPR: val = (cmp != 0); break;
4359 case LT_EXPR: val = (cmp > 0); break;
4360 case GT_EXPR: val = (cmp < 0); break;
4361 case LE_EXPR: val = (cmp >= 0); break;
4362 case GE_EXPR: val = (cmp <= 0); break;
4363 default: done = false;
4365 if (done)
4367 if (val)
4368 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4369 else
4370 return boolean_false_node;
4374 /* Same business with inequality tests. */
4375 else if (code1 == NE_EXPR)
4377 bool val;
4378 switch (code2)
4380 case EQ_EXPR: val = (cmp != 0); break;
4381 case NE_EXPR: val = (cmp == 0); break;
4382 case LT_EXPR: val = (cmp >= 0); break;
4383 case GT_EXPR: val = (cmp <= 0); break;
4384 case LE_EXPR: val = (cmp > 0); break;
4385 case GE_EXPR: val = (cmp < 0); break;
4386 default:
4387 val = false;
4389 if (val)
4390 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4392 else if (code2 == NE_EXPR)
4394 bool val;
4395 switch (code1)
4397 case EQ_EXPR: val = (cmp == 0); break;
4398 case NE_EXPR: val = (cmp != 0); break;
4399 case LT_EXPR: val = (cmp <= 0); break;
4400 case GT_EXPR: val = (cmp >= 0); break;
4401 case LE_EXPR: val = (cmp < 0); break;
4402 case GE_EXPR: val = (cmp > 0); break;
4403 default:
4404 val = false;
4406 if (val)
4407 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4410 /* Chose the more restrictive of two < or <= comparisons. */
4411 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4412 && (code2 == LT_EXPR || code2 == LE_EXPR))
4414 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4415 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4416 else
4417 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4420 /* Likewise chose the more restrictive of two > or >= comparisons. */
4421 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4422 && (code2 == GT_EXPR || code2 == GE_EXPR))
4424 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4425 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4426 else
4427 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4430 /* Check for singleton ranges. */
4431 else if (cmp == 0
4432 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4433 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4434 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4436 /* Check for disjoint ranges. */
4437 else if (cmp <= 0
4438 && (code1 == LT_EXPR || code1 == LE_EXPR)
4439 && (code2 == GT_EXPR || code2 == GE_EXPR))
4440 return boolean_false_node;
4441 else if (cmp >= 0
4442 && (code1 == GT_EXPR || code1 == GE_EXPR)
4443 && (code2 == LT_EXPR || code2 == LE_EXPR))
4444 return boolean_false_node;
4447 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4448 NAME's definition is a truth value. See if there are any simplifications
4449 that can be done against the NAME's definition. */
4450 if (TREE_CODE (op1a) == SSA_NAME
4451 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4452 && (integer_zerop (op1b) || integer_onep (op1b)))
4454 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4455 || (code1 == NE_EXPR && integer_onep (op1b)));
4456 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4457 switch (gimple_code (stmt))
4459 case GIMPLE_ASSIGN:
4460 /* Try to simplify by copy-propagating the definition. */
4461 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4463 case GIMPLE_PHI:
4464 /* If every argument to the PHI produces the same result when
4465 ANDed with the second comparison, we win.
4466 Do not do this unless the type is bool since we need a bool
4467 result here anyway. */
4468 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4470 tree result = NULL_TREE;
4471 unsigned i;
4472 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4474 tree arg = gimple_phi_arg_def (stmt, i);
4476 /* If this PHI has itself as an argument, ignore it.
4477 If all the other args produce the same result,
4478 we're still OK. */
4479 if (arg == gimple_phi_result (stmt))
4480 continue;
4481 else if (TREE_CODE (arg) == INTEGER_CST)
4483 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4485 if (!result)
4486 result = boolean_false_node;
4487 else if (!integer_zerop (result))
4488 return NULL_TREE;
4490 else if (!result)
4491 result = fold_build2 (code2, boolean_type_node,
4492 op2a, op2b);
4493 else if (!same_bool_comparison_p (result,
4494 code2, op2a, op2b))
4495 return NULL_TREE;
4497 else if (TREE_CODE (arg) == SSA_NAME
4498 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4500 tree temp;
4501 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4502 /* In simple cases we can look through PHI nodes,
4503 but we have to be careful with loops.
4504 See PR49073. */
4505 if (! dom_info_available_p (CDI_DOMINATORS)
4506 || gimple_bb (def_stmt) == gimple_bb (stmt)
4507 || dominated_by_p (CDI_DOMINATORS,
4508 gimple_bb (def_stmt),
4509 gimple_bb (stmt)))
4510 return NULL_TREE;
4511 temp = and_var_with_comparison (arg, invert, code2,
4512 op2a, op2b);
4513 if (!temp)
4514 return NULL_TREE;
4515 else if (!result)
4516 result = temp;
4517 else if (!same_bool_result_p (result, temp))
4518 return NULL_TREE;
4520 else
4521 return NULL_TREE;
4523 return result;
4526 default:
4527 break;
4530 return NULL_TREE;
4533 /* Try to simplify the AND of two comparisons, specified by
4534 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4535 If this can be simplified to a single expression (without requiring
4536 introducing more SSA variables to hold intermediate values),
4537 return the resulting tree. Otherwise return NULL_TREE.
4538 If the result expression is non-null, it has boolean type. */
4540 tree
4541 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4542 enum tree_code code2, tree op2a, tree op2b)
4544 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4545 if (t)
4546 return t;
4547 else
4548 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4551 /* Helper function for or_comparisons_1: try to simplify the OR of the
4552 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4553 If INVERT is true, invert the value of VAR before doing the OR.
4554 Return NULL_EXPR if we can't simplify this to a single expression. */
4556 static tree
4557 or_var_with_comparison (tree var, bool invert,
4558 enum tree_code code2, tree op2a, tree op2b)
4560 tree t;
4561 gimple *stmt = SSA_NAME_DEF_STMT (var);
4563 /* We can only deal with variables whose definitions are assignments. */
4564 if (!is_gimple_assign (stmt))
4565 return NULL_TREE;
4567 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4568 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4569 Then we only have to consider the simpler non-inverted cases. */
4570 if (invert)
4571 t = and_var_with_comparison_1 (stmt,
4572 invert_tree_comparison (code2, false),
4573 op2a, op2b);
4574 else
4575 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4576 return canonicalize_bool (t, invert);
4579 /* Try to simplify the OR of the ssa variable defined by the assignment
4580 STMT with the comparison specified by (OP2A CODE2 OP2B).
4581 Return NULL_EXPR if we can't simplify this to a single expression. */
4583 static tree
4584 or_var_with_comparison_1 (gimple *stmt,
4585 enum tree_code code2, tree op2a, tree op2b)
4587 tree var = gimple_assign_lhs (stmt);
4588 tree true_test_var = NULL_TREE;
4589 tree false_test_var = NULL_TREE;
4590 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4592 /* Check for identities like (var OR (var != 0)) => true . */
4593 if (TREE_CODE (op2a) == SSA_NAME
4594 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4596 if ((code2 == NE_EXPR && integer_zerop (op2b))
4597 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4599 true_test_var = op2a;
4600 if (var == true_test_var)
4601 return var;
4603 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4604 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4606 false_test_var = op2a;
4607 if (var == false_test_var)
4608 return boolean_true_node;
4612 /* If the definition is a comparison, recurse on it. */
4613 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4615 tree t = or_comparisons_1 (innercode,
4616 gimple_assign_rhs1 (stmt),
4617 gimple_assign_rhs2 (stmt),
4618 code2,
4619 op2a,
4620 op2b);
4621 if (t)
4622 return t;
4625 /* If the definition is an AND or OR expression, we may be able to
4626 simplify by reassociating. */
4627 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4628 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4630 tree inner1 = gimple_assign_rhs1 (stmt);
4631 tree inner2 = gimple_assign_rhs2 (stmt);
4632 gimple *s;
4633 tree t;
4634 tree partial = NULL_TREE;
4635 bool is_or = (innercode == BIT_IOR_EXPR);
4637 /* Check for boolean identities that don't require recursive examination
4638 of inner1/inner2:
4639 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4640 inner1 OR (inner1 AND inner2) => inner1
4641 !inner1 OR (inner1 OR inner2) => true
4642 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4644 if (inner1 == true_test_var)
4645 return (is_or ? var : inner1);
4646 else if (inner2 == true_test_var)
4647 return (is_or ? var : inner2);
4648 else if (inner1 == false_test_var)
4649 return (is_or
4650 ? boolean_true_node
4651 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4652 else if (inner2 == false_test_var)
4653 return (is_or
4654 ? boolean_true_node
4655 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4657 /* Next, redistribute/reassociate the OR across the inner tests.
4658 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4659 if (TREE_CODE (inner1) == SSA_NAME
4660 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4661 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4662 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4663 gimple_assign_rhs1 (s),
4664 gimple_assign_rhs2 (s),
4665 code2, op2a, op2b)))
4667 /* Handle the OR case, where we are reassociating:
4668 (inner1 OR inner2) OR (op2a code2 op2b)
4669 => (t OR inner2)
4670 If the partial result t is a constant, we win. Otherwise
4671 continue on to try reassociating with the other inner test. */
4672 if (is_or)
4674 if (integer_onep (t))
4675 return boolean_true_node;
4676 else if (integer_zerop (t))
4677 return inner2;
4680 /* Handle the AND case, where we are redistributing:
4681 (inner1 AND inner2) OR (op2a code2 op2b)
4682 => (t AND (inner2 OR (op2a code op2b))) */
4683 else if (integer_zerop (t))
4684 return boolean_false_node;
4686 /* Save partial result for later. */
4687 partial = t;
4690 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4691 if (TREE_CODE (inner2) == SSA_NAME
4692 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4693 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4694 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4695 gimple_assign_rhs1 (s),
4696 gimple_assign_rhs2 (s),
4697 code2, op2a, op2b)))
4699 /* Handle the OR case, where we are reassociating:
4700 (inner1 OR inner2) OR (op2a code2 op2b)
4701 => (inner1 OR t)
4702 => (t OR partial) */
4703 if (is_or)
4705 if (integer_zerop (t))
4706 return inner1;
4707 else if (integer_onep (t))
4708 return boolean_true_node;
4709 /* If both are the same, we can apply the identity
4710 (x OR x) == x. */
4711 else if (partial && same_bool_result_p (t, partial))
4712 return t;
4715 /* Handle the AND case, where we are redistributing:
4716 (inner1 AND inner2) OR (op2a code2 op2b)
4717 => (t AND (inner1 OR (op2a code2 op2b)))
4718 => (t AND partial) */
4719 else
4721 if (integer_zerop (t))
4722 return boolean_false_node;
4723 else if (partial)
4725 /* We already got a simplification for the other
4726 operand to the redistributed AND expression. The
4727 interesting case is when at least one is true.
4728 Or, if both are the same, we can apply the identity
4729 (x AND x) == x. */
4730 if (integer_onep (partial))
4731 return t;
4732 else if (integer_onep (t))
4733 return partial;
4734 else if (same_bool_result_p (t, partial))
4735 return t;
4740 return NULL_TREE;
4743 /* Try to simplify the OR of two comparisons defined by
4744 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4745 If this can be done without constructing an intermediate value,
4746 return the resulting tree; otherwise NULL_TREE is returned.
4747 This function is deliberately asymmetric as it recurses on SSA_DEFs
4748 in the first comparison but not the second. */
4750 static tree
4751 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4752 enum tree_code code2, tree op2a, tree op2b)
4754 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4756 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4757 if (operand_equal_p (op1a, op2a, 0)
4758 && operand_equal_p (op1b, op2b, 0))
4760 /* Result will be either NULL_TREE, or a combined comparison. */
4761 tree t = combine_comparisons (UNKNOWN_LOCATION,
4762 TRUTH_ORIF_EXPR, code1, code2,
4763 truth_type, op1a, op1b);
4764 if (t)
4765 return t;
4768 /* Likewise the swapped case of the above. */
4769 if (operand_equal_p (op1a, op2b, 0)
4770 && operand_equal_p (op1b, op2a, 0))
4772 /* Result will be either NULL_TREE, or a combined comparison. */
4773 tree t = combine_comparisons (UNKNOWN_LOCATION,
4774 TRUTH_ORIF_EXPR, code1,
4775 swap_tree_comparison (code2),
4776 truth_type, op1a, op1b);
4777 if (t)
4778 return t;
4781 /* If both comparisons are of the same value against constants, we might
4782 be able to merge them. */
4783 if (operand_equal_p (op1a, op2a, 0)
4784 && TREE_CODE (op1b) == INTEGER_CST
4785 && TREE_CODE (op2b) == INTEGER_CST)
4787 int cmp = tree_int_cst_compare (op1b, op2b);
4789 /* If we have (op1a != op1b), we should either be able to
4790 return that or TRUE, depending on whether the constant op1b
4791 also satisfies the other comparison against op2b. */
4792 if (code1 == NE_EXPR)
4794 bool done = true;
4795 bool val;
4796 switch (code2)
4798 case EQ_EXPR: val = (cmp == 0); break;
4799 case NE_EXPR: val = (cmp != 0); break;
4800 case LT_EXPR: val = (cmp < 0); break;
4801 case GT_EXPR: val = (cmp > 0); break;
4802 case LE_EXPR: val = (cmp <= 0); break;
4803 case GE_EXPR: val = (cmp >= 0); break;
4804 default: done = false;
4806 if (done)
4808 if (val)
4809 return boolean_true_node;
4810 else
4811 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4814 /* Likewise if the second comparison is a != comparison. */
4815 else if (code2 == NE_EXPR)
4817 bool done = true;
4818 bool val;
4819 switch (code1)
4821 case EQ_EXPR: val = (cmp == 0); break;
4822 case NE_EXPR: val = (cmp != 0); break;
4823 case LT_EXPR: val = (cmp > 0); break;
4824 case GT_EXPR: val = (cmp < 0); break;
4825 case LE_EXPR: val = (cmp >= 0); break;
4826 case GE_EXPR: val = (cmp <= 0); break;
4827 default: done = false;
4829 if (done)
4831 if (val)
4832 return boolean_true_node;
4833 else
4834 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4838 /* See if an equality test is redundant with the other comparison. */
4839 else if (code1 == EQ_EXPR)
4841 bool val;
4842 switch (code2)
4844 case EQ_EXPR: val = (cmp == 0); break;
4845 case NE_EXPR: val = (cmp != 0); break;
4846 case LT_EXPR: val = (cmp < 0); break;
4847 case GT_EXPR: val = (cmp > 0); break;
4848 case LE_EXPR: val = (cmp <= 0); break;
4849 case GE_EXPR: val = (cmp >= 0); break;
4850 default:
4851 val = false;
4853 if (val)
4854 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4856 else if (code2 == EQ_EXPR)
4858 bool val;
4859 switch (code1)
4861 case EQ_EXPR: val = (cmp == 0); break;
4862 case NE_EXPR: val = (cmp != 0); break;
4863 case LT_EXPR: val = (cmp > 0); break;
4864 case GT_EXPR: val = (cmp < 0); break;
4865 case LE_EXPR: val = (cmp >= 0); break;
4866 case GE_EXPR: val = (cmp <= 0); break;
4867 default:
4868 val = false;
4870 if (val)
4871 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4874 /* Chose the less restrictive of two < or <= comparisons. */
4875 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4876 && (code2 == LT_EXPR || code2 == LE_EXPR))
4878 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4879 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4880 else
4881 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4884 /* Likewise chose the less restrictive of two > or >= comparisons. */
4885 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4886 && (code2 == GT_EXPR || code2 == GE_EXPR))
4888 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4889 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4890 else
4891 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4894 /* Check for singleton ranges. */
4895 else if (cmp == 0
4896 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4897 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4898 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4900 /* Check for less/greater pairs that don't restrict the range at all. */
4901 else if (cmp >= 0
4902 && (code1 == LT_EXPR || code1 == LE_EXPR)
4903 && (code2 == GT_EXPR || code2 == GE_EXPR))
4904 return boolean_true_node;
4905 else if (cmp <= 0
4906 && (code1 == GT_EXPR || code1 == GE_EXPR)
4907 && (code2 == LT_EXPR || code2 == LE_EXPR))
4908 return boolean_true_node;
4911 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4912 NAME's definition is a truth value. See if there are any simplifications
4913 that can be done against the NAME's definition. */
4914 if (TREE_CODE (op1a) == SSA_NAME
4915 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4916 && (integer_zerop (op1b) || integer_onep (op1b)))
4918 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4919 || (code1 == NE_EXPR && integer_onep (op1b)));
4920 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4921 switch (gimple_code (stmt))
4923 case GIMPLE_ASSIGN:
4924 /* Try to simplify by copy-propagating the definition. */
4925 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4927 case GIMPLE_PHI:
4928 /* If every argument to the PHI produces the same result when
4929 ORed with the second comparison, we win.
4930 Do not do this unless the type is bool since we need a bool
4931 result here anyway. */
4932 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4934 tree result = NULL_TREE;
4935 unsigned i;
4936 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4938 tree arg = gimple_phi_arg_def (stmt, i);
4940 /* If this PHI has itself as an argument, ignore it.
4941 If all the other args produce the same result,
4942 we're still OK. */
4943 if (arg == gimple_phi_result (stmt))
4944 continue;
4945 else if (TREE_CODE (arg) == INTEGER_CST)
4947 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4949 if (!result)
4950 result = boolean_true_node;
4951 else if (!integer_onep (result))
4952 return NULL_TREE;
4954 else if (!result)
4955 result = fold_build2 (code2, boolean_type_node,
4956 op2a, op2b);
4957 else if (!same_bool_comparison_p (result,
4958 code2, op2a, op2b))
4959 return NULL_TREE;
4961 else if (TREE_CODE (arg) == SSA_NAME
4962 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4964 tree temp;
4965 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4966 /* In simple cases we can look through PHI nodes,
4967 but we have to be careful with loops.
4968 See PR49073. */
4969 if (! dom_info_available_p (CDI_DOMINATORS)
4970 || gimple_bb (def_stmt) == gimple_bb (stmt)
4971 || dominated_by_p (CDI_DOMINATORS,
4972 gimple_bb (def_stmt),
4973 gimple_bb (stmt)))
4974 return NULL_TREE;
4975 temp = or_var_with_comparison (arg, invert, code2,
4976 op2a, op2b);
4977 if (!temp)
4978 return NULL_TREE;
4979 else if (!result)
4980 result = temp;
4981 else if (!same_bool_result_p (result, temp))
4982 return NULL_TREE;
4984 else
4985 return NULL_TREE;
4987 return result;
4990 default:
4991 break;
4994 return NULL_TREE;
4997 /* Try to simplify the OR of two comparisons, specified by
4998 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4999 If this can be simplified to a single expression (without requiring
5000 introducing more SSA variables to hold intermediate values),
5001 return the resulting tree. Otherwise return NULL_TREE.
5002 If the result expression is non-null, it has boolean type. */
5004 tree
5005 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5006 enum tree_code code2, tree op2a, tree op2b)
5008 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5009 if (t)
5010 return t;
5011 else
5012 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5016 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5018 Either NULL_TREE, a simplified but non-constant or a constant
5019 is returned.
5021 ??? This should go into a gimple-fold-inline.h file to be eventually
5022 privatized with the single valueize function used in the various TUs
5023 to avoid the indirect function call overhead. */
5025 tree
5026 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5027 tree (*gvalueize) (tree))
5029 code_helper rcode;
5030 tree ops[3] = {};
5031 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5032 edges if there are intermediate VARYING defs. For this reason
5033 do not follow SSA edges here even though SCCVN can technically
5034 just deal fine with that. */
5035 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5037 tree res = NULL_TREE;
5038 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5039 res = ops[0];
5040 else if (mprts_hook)
5041 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5042 if (res)
5044 if (dump_file && dump_flags & TDF_DETAILS)
5046 fprintf (dump_file, "Match-and-simplified ");
5047 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5048 fprintf (dump_file, " to ");
5049 print_generic_expr (dump_file, res, 0);
5050 fprintf (dump_file, "\n");
5052 return res;
5056 location_t loc = gimple_location (stmt);
5057 switch (gimple_code (stmt))
5059 case GIMPLE_ASSIGN:
5061 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5063 switch (get_gimple_rhs_class (subcode))
5065 case GIMPLE_SINGLE_RHS:
5067 tree rhs = gimple_assign_rhs1 (stmt);
5068 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5070 if (TREE_CODE (rhs) == SSA_NAME)
5072 /* If the RHS is an SSA_NAME, return its known constant value,
5073 if any. */
5074 return (*valueize) (rhs);
5076 /* Handle propagating invariant addresses into address
5077 operations. */
5078 else if (TREE_CODE (rhs) == ADDR_EXPR
5079 && !is_gimple_min_invariant (rhs))
5081 HOST_WIDE_INT offset = 0;
5082 tree base;
5083 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5084 &offset,
5085 valueize);
5086 if (base
5087 && (CONSTANT_CLASS_P (base)
5088 || decl_address_invariant_p (base)))
5089 return build_invariant_address (TREE_TYPE (rhs),
5090 base, offset);
5092 else if (TREE_CODE (rhs) == CONSTRUCTOR
5093 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5094 && (CONSTRUCTOR_NELTS (rhs)
5095 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5097 unsigned i;
5098 tree val, *vec;
5100 vec = XALLOCAVEC (tree,
5101 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5102 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5104 val = (*valueize) (val);
5105 if (TREE_CODE (val) == INTEGER_CST
5106 || TREE_CODE (val) == REAL_CST
5107 || TREE_CODE (val) == FIXED_CST)
5108 vec[i] = val;
5109 else
5110 return NULL_TREE;
5113 return build_vector (TREE_TYPE (rhs), vec);
5115 if (subcode == OBJ_TYPE_REF)
5117 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5118 /* If callee is constant, we can fold away the wrapper. */
5119 if (is_gimple_min_invariant (val))
5120 return val;
5123 if (kind == tcc_reference)
5125 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5126 || TREE_CODE (rhs) == REALPART_EXPR
5127 || TREE_CODE (rhs) == IMAGPART_EXPR)
5128 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5130 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5131 return fold_unary_loc (EXPR_LOCATION (rhs),
5132 TREE_CODE (rhs),
5133 TREE_TYPE (rhs), val);
5135 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5136 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5138 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5139 return fold_ternary_loc (EXPR_LOCATION (rhs),
5140 TREE_CODE (rhs),
5141 TREE_TYPE (rhs), val,
5142 TREE_OPERAND (rhs, 1),
5143 TREE_OPERAND (rhs, 2));
5145 else if (TREE_CODE (rhs) == MEM_REF
5146 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5148 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5149 if (TREE_CODE (val) == ADDR_EXPR
5150 && is_gimple_min_invariant (val))
5152 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5153 unshare_expr (val),
5154 TREE_OPERAND (rhs, 1));
5155 if (tem)
5156 rhs = tem;
5159 return fold_const_aggregate_ref_1 (rhs, valueize);
5161 else if (kind == tcc_declaration)
5162 return get_symbol_constant_value (rhs);
5163 return rhs;
5166 case GIMPLE_UNARY_RHS:
5167 return NULL_TREE;
5169 case GIMPLE_BINARY_RHS:
5170 /* Translate &x + CST into an invariant form suitable for
5171 further propagation. */
5172 if (subcode == POINTER_PLUS_EXPR)
5174 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5175 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5176 if (TREE_CODE (op0) == ADDR_EXPR
5177 && TREE_CODE (op1) == INTEGER_CST)
5179 tree off = fold_convert (ptr_type_node, op1);
5180 return build_fold_addr_expr_loc
5181 (loc,
5182 fold_build2 (MEM_REF,
5183 TREE_TYPE (TREE_TYPE (op0)),
5184 unshare_expr (op0), off));
5187 /* Canonicalize bool != 0 and bool == 0 appearing after
5188 valueization. While gimple_simplify handles this
5189 it can get confused by the ~X == 1 -> X == 0 transform
5190 which we cant reduce to a SSA name or a constant
5191 (and we have no way to tell gimple_simplify to not
5192 consider those transforms in the first place). */
5193 else if (subcode == EQ_EXPR
5194 || subcode == NE_EXPR)
5196 tree lhs = gimple_assign_lhs (stmt);
5197 tree op0 = gimple_assign_rhs1 (stmt);
5198 if (useless_type_conversion_p (TREE_TYPE (lhs),
5199 TREE_TYPE (op0)))
5201 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5202 op0 = (*valueize) (op0);
5203 if (TREE_CODE (op0) == INTEGER_CST)
5204 std::swap (op0, op1);
5205 if (TREE_CODE (op1) == INTEGER_CST
5206 && ((subcode == NE_EXPR && integer_zerop (op1))
5207 || (subcode == EQ_EXPR && integer_onep (op1))))
5208 return op0;
5211 return NULL_TREE;
5213 case GIMPLE_TERNARY_RHS:
5215 /* Handle ternary operators that can appear in GIMPLE form. */
5216 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5217 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5218 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5219 return fold_ternary_loc (loc, subcode,
5220 gimple_expr_type (stmt), op0, op1, op2);
5223 default:
5224 gcc_unreachable ();
5228 case GIMPLE_CALL:
5230 tree fn;
5231 gcall *call_stmt = as_a <gcall *> (stmt);
5233 if (gimple_call_internal_p (stmt))
5235 enum tree_code subcode = ERROR_MARK;
5236 switch (gimple_call_internal_fn (stmt))
5238 case IFN_UBSAN_CHECK_ADD:
5239 subcode = PLUS_EXPR;
5240 break;
5241 case IFN_UBSAN_CHECK_SUB:
5242 subcode = MINUS_EXPR;
5243 break;
5244 case IFN_UBSAN_CHECK_MUL:
5245 subcode = MULT_EXPR;
5246 break;
5247 default:
5248 return NULL_TREE;
5250 tree arg0 = gimple_call_arg (stmt, 0);
5251 tree arg1 = gimple_call_arg (stmt, 1);
5252 tree op0 = (*valueize) (arg0);
5253 tree op1 = (*valueize) (arg1);
5255 if (TREE_CODE (op0) != INTEGER_CST
5256 || TREE_CODE (op1) != INTEGER_CST)
5258 switch (subcode)
5260 case MULT_EXPR:
5261 /* x * 0 = 0 * x = 0 without overflow. */
5262 if (integer_zerop (op0) || integer_zerop (op1))
5263 return build_zero_cst (TREE_TYPE (arg0));
5264 break;
5265 case MINUS_EXPR:
5266 /* y - y = 0 without overflow. */
5267 if (operand_equal_p (op0, op1, 0))
5268 return build_zero_cst (TREE_TYPE (arg0));
5269 break;
5270 default:
5271 break;
5274 tree res
5275 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5276 if (res
5277 && TREE_CODE (res) == INTEGER_CST
5278 && !TREE_OVERFLOW (res))
5279 return res;
5280 return NULL_TREE;
5283 fn = (*valueize) (gimple_call_fn (stmt));
5284 if (TREE_CODE (fn) == ADDR_EXPR
5285 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5286 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5287 && gimple_builtin_call_types_compatible_p (stmt,
5288 TREE_OPERAND (fn, 0)))
5290 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5291 tree retval;
5292 unsigned i;
5293 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5294 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5295 retval = fold_builtin_call_array (loc,
5296 gimple_call_return_type (call_stmt),
5297 fn, gimple_call_num_args (stmt), args);
5298 if (retval)
5300 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5301 STRIP_NOPS (retval);
5302 retval = fold_convert (gimple_call_return_type (call_stmt),
5303 retval);
5305 return retval;
5307 return NULL_TREE;
5310 default:
5311 return NULL_TREE;
5315 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5316 Returns NULL_TREE if folding to a constant is not possible, otherwise
5317 returns a constant according to is_gimple_min_invariant. */
5319 tree
5320 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5322 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5323 if (res && is_gimple_min_invariant (res))
5324 return res;
5325 return NULL_TREE;
5329 /* The following set of functions are supposed to fold references using
5330 their constant initializers. */
5332 /* See if we can find constructor defining value of BASE.
5333 When we know the consructor with constant offset (such as
5334 base is array[40] and we do know constructor of array), then
5335 BIT_OFFSET is adjusted accordingly.
5337 As a special case, return error_mark_node when constructor
5338 is not explicitly available, but it is known to be zero
5339 such as 'static const int a;'. */
5340 static tree
5341 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5342 tree (*valueize)(tree))
5344 HOST_WIDE_INT bit_offset2, size, max_size;
5345 bool reverse;
5347 if (TREE_CODE (base) == MEM_REF)
5349 if (!integer_zerop (TREE_OPERAND (base, 1)))
5351 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5352 return NULL_TREE;
5353 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5354 * BITS_PER_UNIT);
5357 if (valueize
5358 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5359 base = valueize (TREE_OPERAND (base, 0));
5360 if (!base || TREE_CODE (base) != ADDR_EXPR)
5361 return NULL_TREE;
5362 base = TREE_OPERAND (base, 0);
5365 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5366 DECL_INITIAL. If BASE is a nested reference into another
5367 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5368 the inner reference. */
5369 switch (TREE_CODE (base))
5371 case VAR_DECL:
5372 case CONST_DECL:
5374 tree init = ctor_for_folding (base);
5376 /* Our semantic is exact opposite of ctor_for_folding;
5377 NULL means unknown, while error_mark_node is 0. */
5378 if (init == error_mark_node)
5379 return NULL_TREE;
5380 if (!init)
5381 return error_mark_node;
5382 return init;
5385 case ARRAY_REF:
5386 case COMPONENT_REF:
5387 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5388 &reverse);
5389 if (max_size == -1 || size != max_size)
5390 return NULL_TREE;
5391 *bit_offset += bit_offset2;
5392 return get_base_constructor (base, bit_offset, valueize);
5394 case STRING_CST:
5395 case CONSTRUCTOR:
5396 return base;
5398 default:
5399 return NULL_TREE;
5403 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5404 SIZE to the memory at bit OFFSET. */
5406 static tree
5407 fold_array_ctor_reference (tree type, tree ctor,
5408 unsigned HOST_WIDE_INT offset,
5409 unsigned HOST_WIDE_INT size,
5410 tree from_decl)
5412 offset_int low_bound;
5413 offset_int elt_size;
5414 offset_int access_index;
5415 tree domain_type = NULL_TREE;
5416 HOST_WIDE_INT inner_offset;
5418 /* Compute low bound and elt size. */
5419 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5420 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5421 if (domain_type && TYPE_MIN_VALUE (domain_type))
5423 /* Static constructors for variably sized objects makes no sense. */
5424 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5425 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5427 else
5428 low_bound = 0;
5429 /* Static constructors for variably sized objects makes no sense. */
5430 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5431 == INTEGER_CST);
5432 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5434 /* We can handle only constantly sized accesses that are known to not
5435 be larger than size of array element. */
5436 if (!TYPE_SIZE_UNIT (type)
5437 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5438 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5439 || elt_size == 0)
5440 return NULL_TREE;
5442 /* Compute the array index we look for. */
5443 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5444 elt_size);
5445 access_index += low_bound;
5447 /* And offset within the access. */
5448 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5450 /* See if the array field is large enough to span whole access. We do not
5451 care to fold accesses spanning multiple array indexes. */
5452 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5453 return NULL_TREE;
5454 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5455 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5457 /* When memory is not explicitely mentioned in constructor,
5458 it is 0 (or out of range). */
5459 return build_zero_cst (type);
5462 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5463 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5465 static tree
5466 fold_nonarray_ctor_reference (tree type, tree ctor,
5467 unsigned HOST_WIDE_INT offset,
5468 unsigned HOST_WIDE_INT size,
5469 tree from_decl)
5471 unsigned HOST_WIDE_INT cnt;
5472 tree cfield, cval;
5474 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5475 cval)
5477 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5478 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5479 tree field_size = DECL_SIZE (cfield);
5480 offset_int bitoffset;
5481 offset_int bitoffset_end, access_end;
5483 /* Variable sized objects in static constructors makes no sense,
5484 but field_size can be NULL for flexible array members. */
5485 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5486 && TREE_CODE (byte_offset) == INTEGER_CST
5487 && (field_size != NULL_TREE
5488 ? TREE_CODE (field_size) == INTEGER_CST
5489 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5491 /* Compute bit offset of the field. */
5492 bitoffset = (wi::to_offset (field_offset)
5493 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
5494 /* Compute bit offset where the field ends. */
5495 if (field_size != NULL_TREE)
5496 bitoffset_end = bitoffset + wi::to_offset (field_size);
5497 else
5498 bitoffset_end = 0;
5500 access_end = offset_int (offset) + size;
5502 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5503 [BITOFFSET, BITOFFSET_END)? */
5504 if (wi::cmps (access_end, bitoffset) > 0
5505 && (field_size == NULL_TREE
5506 || wi::lts_p (offset, bitoffset_end)))
5508 offset_int inner_offset = offset_int (offset) - bitoffset;
5509 /* We do have overlap. Now see if field is large enough to
5510 cover the access. Give up for accesses spanning multiple
5511 fields. */
5512 if (wi::cmps (access_end, bitoffset_end) > 0)
5513 return NULL_TREE;
5514 if (offset < bitoffset)
5515 return NULL_TREE;
5516 return fold_ctor_reference (type, cval,
5517 inner_offset.to_uhwi (), size,
5518 from_decl);
5521 /* When memory is not explicitely mentioned in constructor, it is 0. */
5522 return build_zero_cst (type);
5525 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5526 to the memory at bit OFFSET. */
5528 tree
5529 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5530 unsigned HOST_WIDE_INT size, tree from_decl)
5532 tree ret;
5534 /* We found the field with exact match. */
5535 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5536 && !offset)
5537 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5539 /* We are at the end of walk, see if we can view convert the
5540 result. */
5541 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5542 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5543 && !compare_tree_int (TYPE_SIZE (type), size)
5544 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5546 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5547 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5548 if (ret)
5549 STRIP_USELESS_TYPE_CONVERSION (ret);
5550 return ret;
5552 /* For constants and byte-aligned/sized reads try to go through
5553 native_encode/interpret. */
5554 if (CONSTANT_CLASS_P (ctor)
5555 && BITS_PER_UNIT == 8
5556 && offset % BITS_PER_UNIT == 0
5557 && size % BITS_PER_UNIT == 0
5558 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5560 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5561 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5562 offset / BITS_PER_UNIT);
5563 if (len > 0)
5564 return native_interpret_expr (type, buf, len);
5566 if (TREE_CODE (ctor) == CONSTRUCTOR)
5569 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5570 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5571 return fold_array_ctor_reference (type, ctor, offset, size,
5572 from_decl);
5573 else
5574 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5575 from_decl);
5578 return NULL_TREE;
5581 /* Return the tree representing the element referenced by T if T is an
5582 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5583 names using VALUEIZE. Return NULL_TREE otherwise. */
5585 tree
5586 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5588 tree ctor, idx, base;
5589 HOST_WIDE_INT offset, size, max_size;
5590 tree tem;
5591 bool reverse;
5593 if (TREE_THIS_VOLATILE (t))
5594 return NULL_TREE;
5596 if (DECL_P (t))
5597 return get_symbol_constant_value (t);
5599 tem = fold_read_from_constant_string (t);
5600 if (tem)
5601 return tem;
5603 switch (TREE_CODE (t))
5605 case ARRAY_REF:
5606 case ARRAY_RANGE_REF:
5607 /* Constant indexes are handled well by get_base_constructor.
5608 Only special case variable offsets.
5609 FIXME: This code can't handle nested references with variable indexes
5610 (they will be handled only by iteration of ccp). Perhaps we can bring
5611 get_ref_base_and_extent here and make it use a valueize callback. */
5612 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5613 && valueize
5614 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5615 && TREE_CODE (idx) == INTEGER_CST)
5617 tree low_bound, unit_size;
5619 /* If the resulting bit-offset is constant, track it. */
5620 if ((low_bound = array_ref_low_bound (t),
5621 TREE_CODE (low_bound) == INTEGER_CST)
5622 && (unit_size = array_ref_element_size (t),
5623 tree_fits_uhwi_p (unit_size)))
5625 offset_int woffset
5626 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5627 TYPE_PRECISION (TREE_TYPE (idx)));
5629 if (wi::fits_shwi_p (woffset))
5631 offset = woffset.to_shwi ();
5632 /* TODO: This code seems wrong, multiply then check
5633 to see if it fits. */
5634 offset *= tree_to_uhwi (unit_size);
5635 offset *= BITS_PER_UNIT;
5637 base = TREE_OPERAND (t, 0);
5638 ctor = get_base_constructor (base, &offset, valueize);
5639 /* Empty constructor. Always fold to 0. */
5640 if (ctor == error_mark_node)
5641 return build_zero_cst (TREE_TYPE (t));
5642 /* Out of bound array access. Value is undefined,
5643 but don't fold. */
5644 if (offset < 0)
5645 return NULL_TREE;
5646 /* We can not determine ctor. */
5647 if (!ctor)
5648 return NULL_TREE;
5649 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5650 tree_to_uhwi (unit_size)
5651 * BITS_PER_UNIT,
5652 base);
5656 /* Fallthru. */
5658 case COMPONENT_REF:
5659 case BIT_FIELD_REF:
5660 case TARGET_MEM_REF:
5661 case MEM_REF:
5662 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5663 ctor = get_base_constructor (base, &offset, valueize);
5665 /* Empty constructor. Always fold to 0. */
5666 if (ctor == error_mark_node)
5667 return build_zero_cst (TREE_TYPE (t));
5668 /* We do not know precise address. */
5669 if (max_size == -1 || max_size != size)
5670 return NULL_TREE;
5671 /* We can not determine ctor. */
5672 if (!ctor)
5673 return NULL_TREE;
5675 /* Out of bound array access. Value is undefined, but don't fold. */
5676 if (offset < 0)
5677 return NULL_TREE;
5679 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5680 base);
5682 case REALPART_EXPR:
5683 case IMAGPART_EXPR:
5685 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5686 if (c && TREE_CODE (c) == COMPLEX_CST)
5687 return fold_build1_loc (EXPR_LOCATION (t),
5688 TREE_CODE (t), TREE_TYPE (t), c);
5689 break;
5692 default:
5693 break;
5696 return NULL_TREE;
5699 tree
5700 fold_const_aggregate_ref (tree t)
5702 return fold_const_aggregate_ref_1 (t, NULL);
5705 /* Lookup virtual method with index TOKEN in a virtual table V
5706 at OFFSET.
5707 Set CAN_REFER if non-NULL to false if method
5708 is not referable or if the virtual table is ill-formed (such as rewriten
5709 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5711 tree
5712 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5713 tree v,
5714 unsigned HOST_WIDE_INT offset,
5715 bool *can_refer)
5717 tree vtable = v, init, fn;
5718 unsigned HOST_WIDE_INT size;
5719 unsigned HOST_WIDE_INT elt_size, access_index;
5720 tree domain_type;
5722 if (can_refer)
5723 *can_refer = true;
5725 /* First of all double check we have virtual table. */
5726 if (TREE_CODE (v) != VAR_DECL
5727 || !DECL_VIRTUAL_P (v))
5729 /* Pass down that we lost track of the target. */
5730 if (can_refer)
5731 *can_refer = false;
5732 return NULL_TREE;
5735 init = ctor_for_folding (v);
5737 /* The virtual tables should always be born with constructors
5738 and we always should assume that they are avaialble for
5739 folding. At the moment we do not stream them in all cases,
5740 but it should never happen that ctor seem unreachable. */
5741 gcc_assert (init);
5742 if (init == error_mark_node)
5744 gcc_assert (in_lto_p);
5745 /* Pass down that we lost track of the target. */
5746 if (can_refer)
5747 *can_refer = false;
5748 return NULL_TREE;
5750 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5751 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5752 offset *= BITS_PER_UNIT;
5753 offset += token * size;
5755 /* Lookup the value in the constructor that is assumed to be array.
5756 This is equivalent to
5757 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5758 offset, size, NULL);
5759 but in a constant time. We expect that frontend produced a simple
5760 array without indexed initializers. */
5762 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5763 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5764 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5765 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5767 access_index = offset / BITS_PER_UNIT / elt_size;
5768 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5770 /* This code makes an assumption that there are no
5771 indexed fileds produced by C++ FE, so we can directly index the array. */
5772 if (access_index < CONSTRUCTOR_NELTS (init))
5774 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5775 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5776 STRIP_NOPS (fn);
5778 else
5779 fn = NULL;
5781 /* For type inconsistent program we may end up looking up virtual method
5782 in virtual table that does not contain TOKEN entries. We may overrun
5783 the virtual table and pick up a constant or RTTI info pointer.
5784 In any case the call is undefined. */
5785 if (!fn
5786 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5787 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5788 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5789 else
5791 fn = TREE_OPERAND (fn, 0);
5793 /* When cgraph node is missing and function is not public, we cannot
5794 devirtualize. This can happen in WHOPR when the actual method
5795 ends up in other partition, because we found devirtualization
5796 possibility too late. */
5797 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5799 if (can_refer)
5801 *can_refer = false;
5802 return fn;
5804 return NULL_TREE;
5808 /* Make sure we create a cgraph node for functions we'll reference.
5809 They can be non-existent if the reference comes from an entry
5810 of an external vtable for example. */
5811 cgraph_node::get_create (fn);
5813 return fn;
5816 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5817 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5818 KNOWN_BINFO carries the binfo describing the true type of
5819 OBJ_TYPE_REF_OBJECT(REF).
5820 Set CAN_REFER if non-NULL to false if method
5821 is not referable or if the virtual table is ill-formed (such as rewriten
5822 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5824 tree
5825 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5826 bool *can_refer)
5828 unsigned HOST_WIDE_INT offset;
5829 tree v;
5831 v = BINFO_VTABLE (known_binfo);
5832 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5833 if (!v)
5834 return NULL_TREE;
5836 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5838 if (can_refer)
5839 *can_refer = false;
5840 return NULL_TREE;
5842 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5845 /* Given a pointer value OP0, return a simplified version of an
5846 indirection through OP0, or NULL_TREE if no simplification is
5847 possible. Note that the resulting type may be different from
5848 the type pointed to in the sense that it is still compatible
5849 from the langhooks point of view. */
5851 tree
5852 gimple_fold_indirect_ref (tree t)
5854 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5855 tree sub = t;
5856 tree subtype;
5858 STRIP_NOPS (sub);
5859 subtype = TREE_TYPE (sub);
5860 if (!POINTER_TYPE_P (subtype))
5861 return NULL_TREE;
5863 if (TREE_CODE (sub) == ADDR_EXPR)
5865 tree op = TREE_OPERAND (sub, 0);
5866 tree optype = TREE_TYPE (op);
5867 /* *&p => p */
5868 if (useless_type_conversion_p (type, optype))
5869 return op;
5871 /* *(foo *)&fooarray => fooarray[0] */
5872 if (TREE_CODE (optype) == ARRAY_TYPE
5873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5874 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5876 tree type_domain = TYPE_DOMAIN (optype);
5877 tree min_val = size_zero_node;
5878 if (type_domain && TYPE_MIN_VALUE (type_domain))
5879 min_val = TYPE_MIN_VALUE (type_domain);
5880 if (TREE_CODE (min_val) == INTEGER_CST)
5881 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5883 /* *(foo *)&complexfoo => __real__ complexfoo */
5884 else if (TREE_CODE (optype) == COMPLEX_TYPE
5885 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5886 return fold_build1 (REALPART_EXPR, type, op);
5887 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5888 else if (TREE_CODE (optype) == VECTOR_TYPE
5889 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5891 tree part_width = TYPE_SIZE (type);
5892 tree index = bitsize_int (0);
5893 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5897 /* *(p + CST) -> ... */
5898 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5899 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5901 tree addr = TREE_OPERAND (sub, 0);
5902 tree off = TREE_OPERAND (sub, 1);
5903 tree addrtype;
5905 STRIP_NOPS (addr);
5906 addrtype = TREE_TYPE (addr);
5908 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5909 if (TREE_CODE (addr) == ADDR_EXPR
5910 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5911 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5912 && tree_fits_uhwi_p (off))
5914 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5915 tree part_width = TYPE_SIZE (type);
5916 unsigned HOST_WIDE_INT part_widthi
5917 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5918 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5919 tree index = bitsize_int (indexi);
5920 if (offset / part_widthi
5921 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5922 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5923 part_width, index);
5926 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5927 if (TREE_CODE (addr) == ADDR_EXPR
5928 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5929 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5931 tree size = TYPE_SIZE_UNIT (type);
5932 if (tree_int_cst_equal (size, off))
5933 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5936 /* *(p + CST) -> MEM_REF <p, CST>. */
5937 if (TREE_CODE (addr) != ADDR_EXPR
5938 || DECL_P (TREE_OPERAND (addr, 0)))
5939 return fold_build2 (MEM_REF, type,
5940 addr,
5941 wide_int_to_tree (ptype, off));
5944 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5945 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5946 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5947 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5949 tree type_domain;
5950 tree min_val = size_zero_node;
5951 tree osub = sub;
5952 sub = gimple_fold_indirect_ref (sub);
5953 if (! sub)
5954 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5955 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5956 if (type_domain && TYPE_MIN_VALUE (type_domain))
5957 min_val = TYPE_MIN_VALUE (type_domain);
5958 if (TREE_CODE (min_val) == INTEGER_CST)
5959 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5962 return NULL_TREE;
5965 /* Return true if CODE is an operation that when operating on signed
5966 integer types involves undefined behavior on overflow and the
5967 operation can be expressed with unsigned arithmetic. */
5969 bool
5970 arith_code_with_undefined_signed_overflow (tree_code code)
5972 switch (code)
5974 case PLUS_EXPR:
5975 case MINUS_EXPR:
5976 case MULT_EXPR:
5977 case NEGATE_EXPR:
5978 case POINTER_PLUS_EXPR:
5979 return true;
5980 default:
5981 return false;
5985 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5986 operation that can be transformed to unsigned arithmetic by converting
5987 its operand, carrying out the operation in the corresponding unsigned
5988 type and converting the result back to the original type.
5990 Returns a sequence of statements that replace STMT and also contain
5991 a modified form of STMT itself. */
5993 gimple_seq
5994 rewrite_to_defined_overflow (gimple *stmt)
5996 if (dump_file && (dump_flags & TDF_DETAILS))
5998 fprintf (dump_file, "rewriting stmt with undefined signed "
5999 "overflow ");
6000 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6003 tree lhs = gimple_assign_lhs (stmt);
6004 tree type = unsigned_type_for (TREE_TYPE (lhs));
6005 gimple_seq stmts = NULL;
6006 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6008 tree op = gimple_op (stmt, i);
6009 op = gimple_convert (&stmts, type, op);
6010 gimple_set_op (stmt, i, op);
6012 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6013 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6014 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6015 gimple_seq_add_stmt (&stmts, stmt);
6016 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6017 gimple_seq_add_stmt (&stmts, cvt);
6019 return stmts;
6023 /* The valueization hook we use for the gimple_build API simplification.
6024 This makes us match fold_buildN behavior by only combining with
6025 statements in the sequence(s) we are currently building. */
6027 static tree
6028 gimple_build_valueize (tree op)
6030 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6031 return op;
6032 return NULL_TREE;
6035 /* Build the expression CODE OP0 of type TYPE with location LOC,
6036 simplifying it first if possible. Returns the built
6037 expression value and appends statements possibly defining it
6038 to SEQ. */
6040 tree
6041 gimple_build (gimple_seq *seq, location_t loc,
6042 enum tree_code code, tree type, tree op0)
6044 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6045 if (!res)
6047 if (gimple_in_ssa_p (cfun))
6048 res = make_ssa_name (type);
6049 else
6050 res = create_tmp_reg (type);
6051 gimple *stmt;
6052 if (code == REALPART_EXPR
6053 || code == IMAGPART_EXPR
6054 || code == VIEW_CONVERT_EXPR)
6055 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6056 else
6057 stmt = gimple_build_assign (res, code, op0);
6058 gimple_set_location (stmt, loc);
6059 gimple_seq_add_stmt_without_update (seq, stmt);
6061 return res;
6064 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6065 simplifying it first if possible. Returns the built
6066 expression value and appends statements possibly defining it
6067 to SEQ. */
6069 tree
6070 gimple_build (gimple_seq *seq, location_t loc,
6071 enum tree_code code, tree type, tree op0, tree op1)
6073 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6074 if (!res)
6076 if (gimple_in_ssa_p (cfun))
6077 res = make_ssa_name (type);
6078 else
6079 res = create_tmp_reg (type);
6080 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6081 gimple_set_location (stmt, loc);
6082 gimple_seq_add_stmt_without_update (seq, stmt);
6084 return res;
6087 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6088 simplifying it first if possible. Returns the built
6089 expression value and appends statements possibly defining it
6090 to SEQ. */
6092 tree
6093 gimple_build (gimple_seq *seq, location_t loc,
6094 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6096 tree res = gimple_simplify (code, type, op0, op1, op2,
6097 seq, gimple_build_valueize);
6098 if (!res)
6100 if (gimple_in_ssa_p (cfun))
6101 res = make_ssa_name (type);
6102 else
6103 res = create_tmp_reg (type);
6104 gimple *stmt;
6105 if (code == BIT_FIELD_REF)
6106 stmt = gimple_build_assign (res, code,
6107 build3 (code, type, op0, op1, op2));
6108 else
6109 stmt = gimple_build_assign (res, code, op0, op1, op2);
6110 gimple_set_location (stmt, loc);
6111 gimple_seq_add_stmt_without_update (seq, stmt);
6113 return res;
6116 /* Build the call FN (ARG0) with a result of type TYPE
6117 (or no result if TYPE is void) with location LOC,
6118 simplifying it first if possible. Returns the built
6119 expression value (or NULL_TREE if TYPE is void) and appends
6120 statements possibly defining it to SEQ. */
6122 tree
6123 gimple_build (gimple_seq *seq, location_t loc,
6124 enum built_in_function fn, tree type, tree arg0)
6126 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6127 if (!res)
6129 tree decl = builtin_decl_implicit (fn);
6130 gimple *stmt = gimple_build_call (decl, 1, arg0);
6131 if (!VOID_TYPE_P (type))
6133 if (gimple_in_ssa_p (cfun))
6134 res = make_ssa_name (type);
6135 else
6136 res = create_tmp_reg (type);
6137 gimple_call_set_lhs (stmt, res);
6139 gimple_set_location (stmt, loc);
6140 gimple_seq_add_stmt_without_update (seq, stmt);
6142 return res;
6145 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6146 (or no result if TYPE is void) with location LOC,
6147 simplifying it first if possible. Returns the built
6148 expression value (or NULL_TREE if TYPE is void) and appends
6149 statements possibly defining it to SEQ. */
6151 tree
6152 gimple_build (gimple_seq *seq, location_t loc,
6153 enum built_in_function fn, tree type, tree arg0, tree arg1)
6155 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6156 if (!res)
6158 tree decl = builtin_decl_implicit (fn);
6159 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6160 if (!VOID_TYPE_P (type))
6162 if (gimple_in_ssa_p (cfun))
6163 res = make_ssa_name (type);
6164 else
6165 res = create_tmp_reg (type);
6166 gimple_call_set_lhs (stmt, res);
6168 gimple_set_location (stmt, loc);
6169 gimple_seq_add_stmt_without_update (seq, stmt);
6171 return res;
6174 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6175 (or no result if TYPE is void) with location LOC,
6176 simplifying it first if possible. Returns the built
6177 expression value (or NULL_TREE if TYPE is void) and appends
6178 statements possibly defining it to SEQ. */
6180 tree
6181 gimple_build (gimple_seq *seq, location_t loc,
6182 enum built_in_function fn, tree type,
6183 tree arg0, tree arg1, tree arg2)
6185 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6186 seq, gimple_build_valueize);
6187 if (!res)
6189 tree decl = builtin_decl_implicit (fn);
6190 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6191 if (!VOID_TYPE_P (type))
6193 if (gimple_in_ssa_p (cfun))
6194 res = make_ssa_name (type);
6195 else
6196 res = create_tmp_reg (type);
6197 gimple_call_set_lhs (stmt, res);
6199 gimple_set_location (stmt, loc);
6200 gimple_seq_add_stmt_without_update (seq, stmt);
6202 return res;
6205 /* Build the conversion (TYPE) OP with a result of type TYPE
6206 with location LOC if such conversion is neccesary in GIMPLE,
6207 simplifying it first.
6208 Returns the built expression value and appends
6209 statements possibly defining it to SEQ. */
6211 tree
6212 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6214 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6215 return op;
6216 return gimple_build (seq, loc, NOP_EXPR, type, op);
6219 /* Build the conversion (ptrofftype) OP with a result of a type
6220 compatible with ptrofftype with location LOC if such conversion
6221 is neccesary in GIMPLE, simplifying it first.
6222 Returns the built expression value and appends
6223 statements possibly defining it to SEQ. */
6225 tree
6226 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6228 if (ptrofftype_p (TREE_TYPE (op)))
6229 return op;
6230 return gimple_convert (seq, loc, sizetype, op);
6233 /* Return true if the result of assignment STMT is known to be non-negative.
6234 If the return value is based on the assumption that signed overflow is
6235 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6236 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6238 static bool
6239 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6240 int depth)
6242 enum tree_code code = gimple_assign_rhs_code (stmt);
6243 switch (get_gimple_rhs_class (code))
6245 case GIMPLE_UNARY_RHS:
6246 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6247 gimple_expr_type (stmt),
6248 gimple_assign_rhs1 (stmt),
6249 strict_overflow_p, depth);
6250 case GIMPLE_BINARY_RHS:
6251 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6252 gimple_expr_type (stmt),
6253 gimple_assign_rhs1 (stmt),
6254 gimple_assign_rhs2 (stmt),
6255 strict_overflow_p, depth);
6256 case GIMPLE_TERNARY_RHS:
6257 return false;
6258 case GIMPLE_SINGLE_RHS:
6259 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6260 strict_overflow_p, depth);
6261 case GIMPLE_INVALID_RHS:
6262 break;
6264 gcc_unreachable ();
6267 /* Return true if return value of call STMT is known to be non-negative.
6268 If the return value is based on the assumption that signed overflow is
6269 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6270 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6272 static bool
6273 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6274 int depth)
6276 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6277 gimple_call_arg (stmt, 0) : NULL_TREE;
6278 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6279 gimple_call_arg (stmt, 1) : NULL_TREE;
6281 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6282 gimple_call_combined_fn (stmt),
6283 arg0,
6284 arg1,
6285 strict_overflow_p, depth);
6288 /* Return true if return value of call STMT is known to be non-negative.
6289 If the return value is based on the assumption that signed overflow is
6290 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6291 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6293 static bool
6294 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6295 int depth)
6297 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6299 tree arg = gimple_phi_arg_def (stmt, i);
6300 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6301 return false;
6303 return true;
6306 /* Return true if STMT is known to compute a non-negative value.
6307 If the return value is based on the assumption that signed overflow is
6308 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6309 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6311 bool
6312 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6313 int depth)
6315 switch (gimple_code (stmt))
6317 case GIMPLE_ASSIGN:
6318 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6319 depth);
6320 case GIMPLE_CALL:
6321 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6322 depth);
6323 case GIMPLE_PHI:
6324 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6325 depth);
6326 default:
6327 return false;
6331 /* Return true if the floating-point value computed by assignment STMT
6332 is known to have an integer value. We also allow +Inf, -Inf and NaN
6333 to be considered integer values. Return false for signaling NaN.
6335 DEPTH is the current nesting depth of the query. */
6337 static bool
6338 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6340 enum tree_code code = gimple_assign_rhs_code (stmt);
6341 switch (get_gimple_rhs_class (code))
6343 case GIMPLE_UNARY_RHS:
6344 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6345 gimple_assign_rhs1 (stmt), depth);
6346 case GIMPLE_BINARY_RHS:
6347 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6348 gimple_assign_rhs1 (stmt),
6349 gimple_assign_rhs2 (stmt), depth);
6350 case GIMPLE_TERNARY_RHS:
6351 return false;
6352 case GIMPLE_SINGLE_RHS:
6353 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6354 case GIMPLE_INVALID_RHS:
6355 break;
6357 gcc_unreachable ();
6360 /* Return true if the floating-point value computed by call STMT is known
6361 to have an integer value. We also allow +Inf, -Inf and NaN to be
6362 considered integer values. Return false for signaling NaN.
6364 DEPTH is the current nesting depth of the query. */
6366 static bool
6367 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6369 tree arg0 = (gimple_call_num_args (stmt) > 0
6370 ? gimple_call_arg (stmt, 0)
6371 : NULL_TREE);
6372 tree arg1 = (gimple_call_num_args (stmt) > 1
6373 ? gimple_call_arg (stmt, 1)
6374 : NULL_TREE);
6375 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6376 arg0, arg1, depth);
6379 /* Return true if the floating-point result of phi STMT is known to have
6380 an integer value. We also allow +Inf, -Inf and NaN to be considered
6381 integer values. Return false for signaling NaN.
6383 DEPTH is the current nesting depth of the query. */
6385 static bool
6386 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6388 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6390 tree arg = gimple_phi_arg_def (stmt, i);
6391 if (!integer_valued_real_single_p (arg, depth + 1))
6392 return false;
6394 return true;
6397 /* Return true if the floating-point value computed by STMT is known
6398 to have an integer value. We also allow +Inf, -Inf and NaN to be
6399 considered integer values. Return false for signaling NaN.
6401 DEPTH is the current nesting depth of the query. */
6403 bool
6404 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6406 switch (gimple_code (stmt))
6408 case GIMPLE_ASSIGN:
6409 return gimple_assign_integer_valued_real_p (stmt, depth);
6410 case GIMPLE_CALL:
6411 return gimple_call_integer_valued_real_p (stmt, depth);
6412 case GIMPLE_PHI:
6413 return gimple_phi_integer_valued_real_p (stmt, depth);
6414 default:
6415 return false;