* tree-ssa.c (target_for_debug_bind, verify_phi_args,
[official-gcc.git] / gcc / gimple-fold.c
blob3aaa09c0bef6906a81b20f8a3005b393cc4c7feb
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
60 /* Return true if T is a constant and the value cast to a target char
61 can be represented by a host char.
62 Store the casted char constant in *P if so. */
64 static bool
65 target_char_cst_p (tree t, char *p)
67 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
68 return false;
70 *p = (char)tree_to_uhwi (t);
71 return true;
74 /* Return true when DECL can be referenced from current unit.
75 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
76 We can get declarations that are not possible to reference for various
77 reasons:
79 1) When analyzing C++ virtual tables.
80 C++ virtual tables do have known constructors even
81 when they are keyed to other compilation unit.
82 Those tables can contain pointers to methods and vars
83 in other units. Those methods have both STATIC and EXTERNAL
84 set.
85 2) In WHOPR mode devirtualization might lead to reference
86 to method that was partitioned elsehwere.
87 In this case we have static VAR_DECL or FUNCTION_DECL
88 that has no corresponding callgraph/varpool node
89 declaring the body.
90 3) COMDAT functions referred by external vtables that
91 we devirtualize only during final compilation stage.
92 At this time we already decided that we will not output
93 the function body and thus we can't reference the symbol
94 directly. */
96 static bool
97 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
99 varpool_node *vnode;
100 struct cgraph_node *node;
101 symtab_node *snode;
103 if (DECL_ABSTRACT_P (decl))
104 return false;
106 /* We are concerned only about static/external vars and functions. */
107 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
108 || !VAR_OR_FUNCTION_DECL_P (decl))
109 return true;
111 /* Static objects can be referred only if they was not optimized out yet. */
112 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 /* Before we start optimizing unreachable code we can be sure all
115 static objects are defined. */
116 if (symtab->function_flags_ready)
117 return true;
118 snode = symtab_node::get (decl);
119 if (!snode || !snode->definition)
120 return false;
121 node = dyn_cast <cgraph_node *> (snode);
122 return !node || !node->global.inlined_to;
125 /* We will later output the initializer, so we can refer to it.
126 So we are concerned only when DECL comes from initializer of
127 external var or var that has been optimized out. */
128 if (!from_decl
129 || !VAR_P (from_decl)
130 || (!DECL_EXTERNAL (from_decl)
131 && (vnode = varpool_node::get (from_decl)) != NULL
132 && vnode->definition)
133 || (flag_ltrans
134 && (vnode = varpool_node::get (from_decl)) != NULL
135 && vnode->in_other_partition))
136 return true;
137 /* We are folding reference from external vtable. The vtable may reffer
138 to a symbol keyed to other compilation unit. The other compilation
139 unit may be in separate DSO and the symbol may be hidden. */
140 if (DECL_VISIBILITY_SPECIFIED (decl)
141 && DECL_EXTERNAL (decl)
142 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
143 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
144 return false;
145 /* When function is public, we always can introduce new reference.
146 Exception are the COMDAT functions where introducing a direct
147 reference imply need to include function body in the curren tunit. */
148 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
149 return true;
150 /* We have COMDAT. We are going to check if we still have definition
151 or if the definition is going to be output in other partition.
152 Bypass this when gimplifying; all needed functions will be produced.
154 As observed in PR20991 for already optimized out comdat virtual functions
155 it may be tempting to not necessarily give up because the copy will be
156 output elsewhere when corresponding vtable is output.
157 This is however not possible - ABI specify that COMDATs are output in
158 units where they are used and when the other unit was compiled with LTO
159 it is possible that vtable was kept public while the function itself
160 was privatized. */
161 if (!symtab->function_flags_ready)
162 return true;
164 snode = symtab_node::get (decl);
165 if (!snode
166 || ((!snode->definition || DECL_EXTERNAL (decl))
167 && (!snode->in_other_partition
168 || (!snode->forced_by_abi && !snode->force_output))))
169 return false;
170 node = dyn_cast <cgraph_node *> (snode);
171 return !node || !node->global.inlined_to;
174 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
175 acceptable form for is_gimple_min_invariant.
176 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
178 tree
179 canonicalize_constructor_val (tree cval, tree from_decl)
181 tree orig_cval = cval;
182 STRIP_NOPS (cval);
183 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
184 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
186 tree ptr = TREE_OPERAND (cval, 0);
187 if (is_gimple_min_invariant (ptr))
188 cval = build1_loc (EXPR_LOCATION (cval),
189 ADDR_EXPR, TREE_TYPE (ptr),
190 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
191 ptr,
192 fold_convert (ptr_type_node,
193 TREE_OPERAND (cval, 1))));
195 if (TREE_CODE (cval) == ADDR_EXPR)
197 tree base = NULL_TREE;
198 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
200 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
201 if (base)
202 TREE_OPERAND (cval, 0) = base;
204 else
205 base = get_base_address (TREE_OPERAND (cval, 0));
206 if (!base)
207 return NULL_TREE;
209 if (VAR_OR_FUNCTION_DECL_P (base)
210 && !can_refer_decl_in_current_unit_p (base, from_decl))
211 return NULL_TREE;
212 if (TREE_TYPE (base) == error_mark_node)
213 return NULL_TREE;
214 if (VAR_P (base))
215 TREE_ADDRESSABLE (base) = 1;
216 else if (TREE_CODE (base) == FUNCTION_DECL)
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base);
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
233 return orig_cval;
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
239 tree
240 get_symbol_constant_value (tree sym)
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
245 if (val)
247 val = canonicalize_constructor_val (unshare_expr (val), sym);
248 if (val && is_gimple_min_invariant (val))
249 return val;
250 else
251 return NULL_TREE;
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
257 && is_gimple_reg_type (TREE_TYPE (sym)))
258 return build_zero_cst (TREE_TYPE (sym));
261 return NULL_TREE;
266 /* Subroutine of fold_stmt. We perform several simplifications of the
267 memory reference tree EXPR and make sure to re-gimplify them properly
268 after propagation of constant addresses. IS_LHS is true if the
269 reference is supposed to be an lvalue. */
271 static tree
272 maybe_fold_reference (tree expr, bool is_lhs)
274 tree result;
276 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
277 || TREE_CODE (expr) == REALPART_EXPR
278 || TREE_CODE (expr) == IMAGPART_EXPR)
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
280 return fold_unary_loc (EXPR_LOCATION (expr),
281 TREE_CODE (expr),
282 TREE_TYPE (expr),
283 TREE_OPERAND (expr, 0));
284 else if (TREE_CODE (expr) == BIT_FIELD_REF
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
286 return fold_ternary_loc (EXPR_LOCATION (expr),
287 TREE_CODE (expr),
288 TREE_TYPE (expr),
289 TREE_OPERAND (expr, 0),
290 TREE_OPERAND (expr, 1),
291 TREE_OPERAND (expr, 2));
293 if (!is_lhs
294 && (result = fold_const_aggregate_ref (expr))
295 && is_gimple_min_invariant (result))
296 return result;
298 return NULL_TREE;
302 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
303 replacement rhs for the statement or NULL_TREE if no simplification
304 could be made. It is assumed that the operands have been previously
305 folded. */
307 static tree
308 fold_gimple_assign (gimple_stmt_iterator *si)
310 gimple *stmt = gsi_stmt (*si);
311 enum tree_code subcode = gimple_assign_rhs_code (stmt);
312 location_t loc = gimple_location (stmt);
314 tree result = NULL_TREE;
316 switch (get_gimple_rhs_class (subcode))
318 case GIMPLE_SINGLE_RHS:
320 tree rhs = gimple_assign_rhs1 (stmt);
322 if (TREE_CLOBBER_P (rhs))
323 return NULL_TREE;
325 if (REFERENCE_CLASS_P (rhs))
326 return maybe_fold_reference (rhs, false);
328 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
330 tree val = OBJ_TYPE_REF_EXPR (rhs);
331 if (is_gimple_min_invariant (val))
332 return val;
333 else if (flag_devirtualize && virtual_method_call_p (rhs))
335 bool final;
336 vec <cgraph_node *>targets
337 = possible_polymorphic_call_targets (rhs, stmt, &final);
338 if (final && targets.length () <= 1 && dbg_cnt (devirt))
340 if (dump_enabled_p ())
342 location_t loc = gimple_location_safe (stmt);
343 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
344 "resolving virtual function address "
345 "reference to function %s\n",
346 targets.length () == 1
347 ? targets[0]->name ()
348 : "NULL");
350 if (targets.length () == 1)
352 val = fold_convert (TREE_TYPE (val),
353 build_fold_addr_expr_loc
354 (loc, targets[0]->decl));
355 STRIP_USELESS_TYPE_CONVERSION (val);
357 else
358 /* We can not use __builtin_unreachable here because it
359 can not have address taken. */
360 val = build_int_cst (TREE_TYPE (val), 0);
361 return val;
366 else if (TREE_CODE (rhs) == ADDR_EXPR)
368 tree ref = TREE_OPERAND (rhs, 0);
369 tree tem = maybe_fold_reference (ref, true);
370 if (tem
371 && TREE_CODE (tem) == MEM_REF
372 && integer_zerop (TREE_OPERAND (tem, 1)))
373 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
374 else if (tem)
375 result = fold_convert (TREE_TYPE (rhs),
376 build_fold_addr_expr_loc (loc, tem));
377 else if (TREE_CODE (ref) == MEM_REF
378 && integer_zerop (TREE_OPERAND (ref, 1)))
379 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
381 if (result)
383 /* Strip away useless type conversions. Both the
384 NON_LVALUE_EXPR that may have been added by fold, and
385 "useless" type conversions that might now be apparent
386 due to propagation. */
387 STRIP_USELESS_TYPE_CONVERSION (result);
389 if (result != rhs && valid_gimple_rhs_p (result))
390 return result;
394 else if (TREE_CODE (rhs) == CONSTRUCTOR
395 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
397 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
398 unsigned i;
399 tree val;
401 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
402 if (! CONSTANT_CLASS_P (val))
403 return NULL_TREE;
405 return build_vector_from_ctor (TREE_TYPE (rhs),
406 CONSTRUCTOR_ELTS (rhs));
409 else if (DECL_P (rhs))
410 return get_symbol_constant_value (rhs);
412 break;
414 case GIMPLE_UNARY_RHS:
415 break;
417 case GIMPLE_BINARY_RHS:
418 break;
420 case GIMPLE_TERNARY_RHS:
421 result = fold_ternary_loc (loc, subcode,
422 TREE_TYPE (gimple_assign_lhs (stmt)),
423 gimple_assign_rhs1 (stmt),
424 gimple_assign_rhs2 (stmt),
425 gimple_assign_rhs3 (stmt));
427 if (result)
429 STRIP_USELESS_TYPE_CONVERSION (result);
430 if (valid_gimple_rhs_p (result))
431 return result;
433 break;
435 case GIMPLE_INVALID_RHS:
436 gcc_unreachable ();
439 return NULL_TREE;
443 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
444 adjusting the replacement stmts location and virtual operands.
445 If the statement has a lhs the last stmt in the sequence is expected
446 to assign to that lhs. */
448 static void
449 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
451 gimple *stmt = gsi_stmt (*si_p);
453 if (gimple_has_location (stmt))
454 annotate_all_with_location (stmts, gimple_location (stmt));
456 /* First iterate over the replacement statements backward, assigning
457 virtual operands to their defining statements. */
458 gimple *laststore = NULL;
459 for (gimple_stmt_iterator i = gsi_last (stmts);
460 !gsi_end_p (i); gsi_prev (&i))
462 gimple *new_stmt = gsi_stmt (i);
463 if ((gimple_assign_single_p (new_stmt)
464 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
465 || (is_gimple_call (new_stmt)
466 && (gimple_call_flags (new_stmt)
467 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
469 tree vdef;
470 if (!laststore)
471 vdef = gimple_vdef (stmt);
472 else
473 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
474 gimple_set_vdef (new_stmt, vdef);
475 if (vdef && TREE_CODE (vdef) == SSA_NAME)
476 SSA_NAME_DEF_STMT (vdef) = new_stmt;
477 laststore = new_stmt;
481 /* Second iterate over the statements forward, assigning virtual
482 operands to their uses. */
483 tree reaching_vuse = gimple_vuse (stmt);
484 for (gimple_stmt_iterator i = gsi_start (stmts);
485 !gsi_end_p (i); gsi_next (&i))
487 gimple *new_stmt = gsi_stmt (i);
488 /* If the new statement possibly has a VUSE, update it with exact SSA
489 name we know will reach this one. */
490 if (gimple_has_mem_ops (new_stmt))
491 gimple_set_vuse (new_stmt, reaching_vuse);
492 gimple_set_modified (new_stmt, true);
493 if (gimple_vdef (new_stmt))
494 reaching_vuse = gimple_vdef (new_stmt);
497 /* If the new sequence does not do a store release the virtual
498 definition of the original statement. */
499 if (reaching_vuse
500 && reaching_vuse == gimple_vuse (stmt))
502 tree vdef = gimple_vdef (stmt);
503 if (vdef
504 && TREE_CODE (vdef) == SSA_NAME)
506 unlink_stmt_vdef (stmt);
507 release_ssa_name (vdef);
511 /* Finally replace the original statement with the sequence. */
512 gsi_replace_with_seq (si_p, stmts, false);
515 /* Convert EXPR into a GIMPLE value suitable for substitution on the
516 RHS of an assignment. Insert the necessary statements before
517 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
518 is replaced. If the call is expected to produces a result, then it
519 is replaced by an assignment of the new RHS to the result variable.
520 If the result is to be ignored, then the call is replaced by a
521 GIMPLE_NOP. A proper VDEF chain is retained by making the first
522 VUSE and the last VDEF of the whole sequence be the same as the replaced
523 statement and using new SSA names for stores in between. */
525 void
526 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
528 tree lhs;
529 gimple *stmt, *new_stmt;
530 gimple_stmt_iterator i;
531 gimple_seq stmts = NULL;
533 stmt = gsi_stmt (*si_p);
535 gcc_assert (is_gimple_call (stmt));
537 push_gimplify_context (gimple_in_ssa_p (cfun));
539 lhs = gimple_call_lhs (stmt);
540 if (lhs == NULL_TREE)
542 gimplify_and_add (expr, &stmts);
543 /* We can end up with folding a memcpy of an empty class assignment
544 which gets optimized away by C++ gimplification. */
545 if (gimple_seq_empty_p (stmts))
547 pop_gimplify_context (NULL);
548 if (gimple_in_ssa_p (cfun))
550 unlink_stmt_vdef (stmt);
551 release_defs (stmt);
553 gsi_replace (si_p, gimple_build_nop (), false);
554 return;
557 else
559 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
560 new_stmt = gimple_build_assign (lhs, tmp);
561 i = gsi_last (stmts);
562 gsi_insert_after_without_update (&i, new_stmt,
563 GSI_CONTINUE_LINKING);
566 pop_gimplify_context (NULL);
568 gsi_replace_with_seq_vops (si_p, stmts);
572 /* Replace the call at *GSI with the gimple value VAL. */
574 static void
575 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
577 gimple *stmt = gsi_stmt (*gsi);
578 tree lhs = gimple_call_lhs (stmt);
579 gimple *repl;
580 if (lhs)
582 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
583 val = fold_convert (TREE_TYPE (lhs), val);
584 repl = gimple_build_assign (lhs, val);
586 else
587 repl = gimple_build_nop ();
588 tree vdef = gimple_vdef (stmt);
589 if (vdef && TREE_CODE (vdef) == SSA_NAME)
591 unlink_stmt_vdef (stmt);
592 release_ssa_name (vdef);
594 gsi_replace (gsi, repl, false);
597 /* Replace the call at *GSI with the new call REPL and fold that
598 again. */
600 static void
601 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
603 gimple *stmt = gsi_stmt (*gsi);
604 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
605 gimple_set_location (repl, gimple_location (stmt));
606 if (gimple_vdef (stmt)
607 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
609 gimple_set_vdef (repl, gimple_vdef (stmt));
610 gimple_set_vuse (repl, gimple_vuse (stmt));
611 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
613 gsi_replace (gsi, repl, false);
614 fold_stmt (gsi);
617 /* Return true if VAR is a VAR_DECL or a component thereof. */
619 static bool
620 var_decl_component_p (tree var)
622 tree inner = var;
623 while (handled_component_p (inner))
624 inner = TREE_OPERAND (inner, 0);
625 return SSA_VAR_P (inner);
628 /* Fold function call to builtin mem{{,p}cpy,move}. Return
629 false if no simplification can be made.
630 If ENDP is 0, return DEST (like memcpy).
631 If ENDP is 1, return DEST+LEN (like mempcpy).
632 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
633 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
634 (memmove). */
636 static bool
637 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
638 tree dest, tree src, int endp)
640 gimple *stmt = gsi_stmt (*gsi);
641 tree lhs = gimple_call_lhs (stmt);
642 tree len = gimple_call_arg (stmt, 2);
643 tree destvar, srcvar;
644 location_t loc = gimple_location (stmt);
646 /* If the LEN parameter is zero, return DEST. */
647 if (integer_zerop (len))
649 gimple *repl;
650 if (gimple_call_lhs (stmt))
651 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
652 else
653 repl = gimple_build_nop ();
654 tree vdef = gimple_vdef (stmt);
655 if (vdef && TREE_CODE (vdef) == SSA_NAME)
657 unlink_stmt_vdef (stmt);
658 release_ssa_name (vdef);
660 gsi_replace (gsi, repl, false);
661 return true;
664 /* If SRC and DEST are the same (and not volatile), return
665 DEST{,+LEN,+LEN-1}. */
666 if (operand_equal_p (src, dest, 0))
668 unlink_stmt_vdef (stmt);
669 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
670 release_ssa_name (gimple_vdef (stmt));
671 if (!lhs)
673 gsi_replace (gsi, gimple_build_nop (), false);
674 return true;
676 goto done;
678 else
680 tree srctype, desttype;
681 unsigned int src_align, dest_align;
682 tree off0;
684 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
685 pointers as wide integer) and also may result in huge function
686 size because of inlined bounds copy. Thus don't inline for
687 functions we want to instrument. */
688 if (flag_check_pointer_bounds
689 && chkp_instrumentable_p (cfun->decl)
690 /* Even if data may contain pointers we can inline if copy
691 less than a pointer size. */
692 && (!tree_fits_uhwi_p (len)
693 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
694 return false;
696 /* Build accesses at offset zero with a ref-all character type. */
697 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
698 ptr_mode, true), 0);
700 /* If we can perform the copy efficiently with first doing all loads
701 and then all stores inline it that way. Currently efficiently
702 means that we can load all the memory into a single integer
703 register which is what MOVE_MAX gives us. */
704 src_align = get_pointer_alignment (src);
705 dest_align = get_pointer_alignment (dest);
706 if (tree_fits_uhwi_p (len)
707 && compare_tree_int (len, MOVE_MAX) <= 0
708 /* ??? Don't transform copies from strings with known length this
709 confuses the tree-ssa-strlen.c. This doesn't handle
710 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
711 reason. */
712 && !c_strlen (src, 2))
714 unsigned ilen = tree_to_uhwi (len);
715 if (pow2p_hwi (ilen))
717 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
718 if (type
719 && TYPE_MODE (type) != BLKmode
720 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
721 == ilen * 8)
722 /* If the destination pointer is not aligned we must be able
723 to emit an unaligned store. */
724 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
725 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
726 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
727 != CODE_FOR_nothing)))
729 tree srctype = type;
730 tree desttype = type;
731 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
732 srctype = build_aligned_type (type, src_align);
733 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
734 tree tem = fold_const_aggregate_ref (srcmem);
735 if (tem)
736 srcmem = tem;
737 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
738 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
739 src_align)
740 && (optab_handler (movmisalign_optab,
741 TYPE_MODE (type))
742 == CODE_FOR_nothing))
743 srcmem = NULL_TREE;
744 if (srcmem)
746 gimple *new_stmt;
747 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
749 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
750 if (gimple_in_ssa_p (cfun))
751 srcmem = make_ssa_name (TREE_TYPE (srcmem),
752 new_stmt);
753 else
754 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
755 gimple_assign_set_lhs (new_stmt, srcmem);
756 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
757 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
759 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
760 desttype = build_aligned_type (type, dest_align);
761 new_stmt
762 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
763 dest, off0),
764 srcmem);
765 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
766 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
767 if (gimple_vdef (new_stmt)
768 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
769 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
770 if (!lhs)
772 gsi_replace (gsi, new_stmt, false);
773 return true;
775 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
776 goto done;
782 if (endp == 3)
784 /* Both DEST and SRC must be pointer types.
785 ??? This is what old code did. Is the testing for pointer types
786 really mandatory?
788 If either SRC is readonly or length is 1, we can use memcpy. */
789 if (!dest_align || !src_align)
790 return false;
791 if (readonly_data_expr (src)
792 || (tree_fits_uhwi_p (len)
793 && (MIN (src_align, dest_align) / BITS_PER_UNIT
794 >= tree_to_uhwi (len))))
796 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
797 if (!fn)
798 return false;
799 gimple_call_set_fndecl (stmt, fn);
800 gimple_call_set_arg (stmt, 0, dest);
801 gimple_call_set_arg (stmt, 1, src);
802 fold_stmt (gsi);
803 return true;
806 /* If *src and *dest can't overlap, optimize into memcpy as well. */
807 if (TREE_CODE (src) == ADDR_EXPR
808 && TREE_CODE (dest) == ADDR_EXPR)
810 tree src_base, dest_base, fn;
811 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
812 HOST_WIDE_INT maxsize;
814 srcvar = TREE_OPERAND (src, 0);
815 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
816 if (src_base == NULL)
817 src_base = srcvar;
818 destvar = TREE_OPERAND (dest, 0);
819 dest_base = get_addr_base_and_unit_offset (destvar,
820 &dest_offset);
821 if (dest_base == NULL)
822 dest_base = destvar;
823 if (tree_fits_uhwi_p (len))
824 maxsize = tree_to_uhwi (len);
825 else
826 maxsize = -1;
827 if (SSA_VAR_P (src_base)
828 && SSA_VAR_P (dest_base))
830 if (operand_equal_p (src_base, dest_base, 0)
831 && ranges_overlap_p (src_offset, maxsize,
832 dest_offset, maxsize))
833 return false;
835 else if (TREE_CODE (src_base) == MEM_REF
836 && TREE_CODE (dest_base) == MEM_REF)
838 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
839 TREE_OPERAND (dest_base, 0), 0))
840 return false;
841 offset_int off = mem_ref_offset (src_base) + src_offset;
842 if (!wi::fits_shwi_p (off))
843 return false;
844 src_offset = off.to_shwi ();
846 off = mem_ref_offset (dest_base) + dest_offset;
847 if (!wi::fits_shwi_p (off))
848 return false;
849 dest_offset = off.to_shwi ();
850 if (ranges_overlap_p (src_offset, maxsize,
851 dest_offset, maxsize))
852 return false;
854 else
855 return false;
857 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
858 if (!fn)
859 return false;
860 gimple_call_set_fndecl (stmt, fn);
861 gimple_call_set_arg (stmt, 0, dest);
862 gimple_call_set_arg (stmt, 1, src);
863 fold_stmt (gsi);
864 return true;
867 /* If the destination and source do not alias optimize into
868 memcpy as well. */
869 if ((is_gimple_min_invariant (dest)
870 || TREE_CODE (dest) == SSA_NAME)
871 && (is_gimple_min_invariant (src)
872 || TREE_CODE (src) == SSA_NAME))
874 ao_ref destr, srcr;
875 ao_ref_init_from_ptr_and_size (&destr, dest, len);
876 ao_ref_init_from_ptr_and_size (&srcr, src, len);
877 if (!refs_may_alias_p_1 (&destr, &srcr, false))
879 tree fn;
880 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
881 if (!fn)
882 return false;
883 gimple_call_set_fndecl (stmt, fn);
884 gimple_call_set_arg (stmt, 0, dest);
885 gimple_call_set_arg (stmt, 1, src);
886 fold_stmt (gsi);
887 return true;
891 return false;
894 if (!tree_fits_shwi_p (len))
895 return false;
896 /* FIXME:
897 This logic lose for arguments like (type *)malloc (sizeof (type)),
898 since we strip the casts of up to VOID return value from malloc.
899 Perhaps we ought to inherit type from non-VOID argument here? */
900 STRIP_NOPS (src);
901 STRIP_NOPS (dest);
902 if (!POINTER_TYPE_P (TREE_TYPE (src))
903 || !POINTER_TYPE_P (TREE_TYPE (dest)))
904 return false;
905 /* In the following try to find a type that is most natural to be
906 used for the memcpy source and destination and that allows
907 the most optimization when memcpy is turned into a plain assignment
908 using that type. In theory we could always use a char[len] type
909 but that only gains us that the destination and source possibly
910 no longer will have their address taken. */
911 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
912 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
914 tree tem = TREE_OPERAND (src, 0);
915 STRIP_NOPS (tem);
916 if (tem != TREE_OPERAND (src, 0))
917 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
919 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
921 tree tem = TREE_OPERAND (dest, 0);
922 STRIP_NOPS (tem);
923 if (tem != TREE_OPERAND (dest, 0))
924 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
926 srctype = TREE_TYPE (TREE_TYPE (src));
927 if (TREE_CODE (srctype) == ARRAY_TYPE
928 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
930 srctype = TREE_TYPE (srctype);
931 STRIP_NOPS (src);
932 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
934 desttype = TREE_TYPE (TREE_TYPE (dest));
935 if (TREE_CODE (desttype) == ARRAY_TYPE
936 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
938 desttype = TREE_TYPE (desttype);
939 STRIP_NOPS (dest);
940 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
942 if (TREE_ADDRESSABLE (srctype)
943 || TREE_ADDRESSABLE (desttype))
944 return false;
946 /* Make sure we are not copying using a floating-point mode or
947 a type whose size possibly does not match its precision. */
948 if (FLOAT_MODE_P (TYPE_MODE (desttype))
949 || TREE_CODE (desttype) == BOOLEAN_TYPE
950 || TREE_CODE (desttype) == ENUMERAL_TYPE)
951 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
952 if (FLOAT_MODE_P (TYPE_MODE (srctype))
953 || TREE_CODE (srctype) == BOOLEAN_TYPE
954 || TREE_CODE (srctype) == ENUMERAL_TYPE)
955 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
956 if (!srctype)
957 srctype = desttype;
958 if (!desttype)
959 desttype = srctype;
960 if (!srctype)
961 return false;
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (dest_align < TYPE_ALIGN (desttype)
966 || src_align < TYPE_ALIGN (srctype))
967 return false;
969 destvar = dest;
970 STRIP_NOPS (destvar);
971 if (TREE_CODE (destvar) == ADDR_EXPR
972 && var_decl_component_p (TREE_OPERAND (destvar, 0))
973 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
974 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
975 else
976 destvar = NULL_TREE;
978 srcvar = src;
979 STRIP_NOPS (srcvar);
980 if (TREE_CODE (srcvar) == ADDR_EXPR
981 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
982 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
984 if (!destvar
985 || src_align >= TYPE_ALIGN (desttype))
986 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
987 srcvar, off0);
988 else if (!STRICT_ALIGNMENT)
990 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
991 src_align);
992 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
994 else
995 srcvar = NULL_TREE;
997 else
998 srcvar = NULL_TREE;
1000 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1001 return false;
1003 if (srcvar == NULL_TREE)
1005 STRIP_NOPS (src);
1006 if (src_align >= TYPE_ALIGN (desttype))
1007 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1008 else
1010 if (STRICT_ALIGNMENT)
1011 return false;
1012 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1013 src_align);
1014 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1017 else if (destvar == NULL_TREE)
1019 STRIP_NOPS (dest);
1020 if (dest_align >= TYPE_ALIGN (srctype))
1021 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1022 else
1024 if (STRICT_ALIGNMENT)
1025 return false;
1026 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1027 dest_align);
1028 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1032 gimple *new_stmt;
1033 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1035 tree tem = fold_const_aggregate_ref (srcvar);
1036 if (tem)
1037 srcvar = tem;
1038 if (! is_gimple_min_invariant (srcvar))
1040 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1041 if (gimple_in_ssa_p (cfun))
1042 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1043 else
1044 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1045 gimple_assign_set_lhs (new_stmt, srcvar);
1046 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1047 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1050 new_stmt = gimple_build_assign (destvar, srcvar);
1051 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1052 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1053 if (gimple_vdef (new_stmt)
1054 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1055 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1056 if (!lhs)
1058 gsi_replace (gsi, new_stmt, false);
1059 return true;
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1064 done:
1065 gimple_seq stmts = NULL;
1066 if (endp == 0 || endp == 3)
1067 len = NULL_TREE;
1068 else if (endp == 2)
1069 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1070 ssize_int (1));
1071 if (endp == 2 || endp == 1)
1073 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1074 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1075 TREE_TYPE (dest), dest, len);
1078 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1079 gimple *repl = gimple_build_assign (lhs, dest);
1080 gsi_replace (gsi, repl, false);
1081 return true;
1084 /* Fold function call to builtin memset or bzero at *GSI setting the
1085 memory of size LEN to VAL. Return whether a simplification was made. */
1087 static bool
1088 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1090 gimple *stmt = gsi_stmt (*gsi);
1091 tree etype;
1092 unsigned HOST_WIDE_INT length, cval;
1094 /* If the LEN parameter is zero, return DEST. */
1095 if (integer_zerop (len))
1097 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1098 return true;
1101 if (! tree_fits_uhwi_p (len))
1102 return false;
1104 if (TREE_CODE (c) != INTEGER_CST)
1105 return false;
1107 tree dest = gimple_call_arg (stmt, 0);
1108 tree var = dest;
1109 if (TREE_CODE (var) != ADDR_EXPR)
1110 return false;
1112 var = TREE_OPERAND (var, 0);
1113 if (TREE_THIS_VOLATILE (var))
1114 return false;
1116 etype = TREE_TYPE (var);
1117 if (TREE_CODE (etype) == ARRAY_TYPE)
1118 etype = TREE_TYPE (etype);
1120 if (!INTEGRAL_TYPE_P (etype)
1121 && !POINTER_TYPE_P (etype))
1122 return NULL_TREE;
1124 if (! var_decl_component_p (var))
1125 return NULL_TREE;
1127 length = tree_to_uhwi (len);
1128 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1129 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1130 return NULL_TREE;
1132 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1133 return NULL_TREE;
1135 if (integer_zerop (c))
1136 cval = 0;
1137 else
1139 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1140 return NULL_TREE;
1142 cval = TREE_INT_CST_LOW (c);
1143 cval &= 0xff;
1144 cval |= cval << 8;
1145 cval |= cval << 16;
1146 cval |= (cval << 31) << 1;
1149 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1150 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1151 gimple_set_vuse (store, gimple_vuse (stmt));
1152 tree vdef = gimple_vdef (stmt);
1153 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1155 gimple_set_vdef (store, gimple_vdef (stmt));
1156 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1158 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1159 if (gimple_call_lhs (stmt))
1161 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1162 gsi_replace (gsi, asgn, false);
1164 else
1166 gimple_stmt_iterator gsi2 = *gsi;
1167 gsi_prev (gsi);
1168 gsi_remove (&gsi2, true);
1171 return true;
1175 /* Obtain the minimum and maximum string length or minimum and maximum
1176 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1177 If ARG is an SSA name variable, follow its use-def chains. When
1178 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1179 if we are unable to determine the length or value, return False.
1180 VISITED is a bitmap of visited variables.
1181 TYPE is 0 if string length should be obtained, 1 for maximum string
1182 length and 2 for maximum value ARG can have.
1183 When FUZZY is set and the length of a string cannot be determined,
1184 the function instead considers as the maximum possible length the
1185 size of a character array it may refer to. */
1187 static bool
1188 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1189 bool fuzzy)
1191 tree var, val;
1192 gimple *def_stmt;
1194 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1195 but MINLEN may be cleared during the execution of the function. */
1196 tree *minlen = length;
1197 tree *const maxlen = length + 1;
1199 if (TREE_CODE (arg) != SSA_NAME)
1201 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1202 if (TREE_CODE (arg) == ADDR_EXPR
1203 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1204 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1206 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1207 if (TREE_CODE (aop0) == INDIRECT_REF
1208 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1209 return get_range_strlen (TREE_OPERAND (aop0, 0),
1210 length, visited, type, fuzzy);
1213 if (type == 2)
1215 val = arg;
1216 if (TREE_CODE (val) != INTEGER_CST
1217 || tree_int_cst_sgn (val) < 0)
1218 return false;
1220 else
1221 val = c_strlen (arg, 1);
1223 if (!val && fuzzy)
1225 if (TREE_CODE (arg) == ADDR_EXPR)
1226 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1227 visited, type, fuzzy);
1229 if (TREE_CODE (arg) == COMPONENT_REF
1230 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1232 /* Use the type of the member array to determine the upper
1233 bound on the length of the array. This may be overly
1234 optimistic if the array itself isn't NUL-terminated and
1235 the caller relies on the subsequent member to contain
1236 the NUL. */
1237 arg = TREE_OPERAND (arg, 1);
1238 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1239 if (!val || integer_zerop (val))
1240 return false;
1241 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1242 integer_one_node);
1243 /* Avoid using the array size as the minimum. */
1244 minlen = NULL;
1248 if (!val)
1249 return false;
1251 if (minlen
1252 && (!*minlen
1253 || (type > 0
1254 && TREE_CODE (*minlen) == INTEGER_CST
1255 && TREE_CODE (val) == INTEGER_CST
1256 && tree_int_cst_lt (val, *minlen))))
1257 *minlen = val;
1259 if (*maxlen)
1261 if (type > 0)
1263 if (TREE_CODE (*maxlen) != INTEGER_CST
1264 || TREE_CODE (val) != INTEGER_CST)
1265 return false;
1267 if (tree_int_cst_lt (*maxlen, val))
1268 *maxlen = val;
1269 return true;
1271 else if (simple_cst_equal (val, *maxlen) != 1)
1272 return false;
1275 *maxlen = val;
1276 return true;
1279 /* If ARG is registered for SSA update we cannot look at its defining
1280 statement. */
1281 if (name_registered_for_update_p (arg))
1282 return false;
1284 /* If we were already here, break the infinite cycle. */
1285 if (!*visited)
1286 *visited = BITMAP_ALLOC (NULL);
1287 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1288 return true;
1290 var = arg;
1291 def_stmt = SSA_NAME_DEF_STMT (var);
1293 switch (gimple_code (def_stmt))
1295 case GIMPLE_ASSIGN:
1296 /* The RHS of the statement defining VAR must either have a
1297 constant length or come from another SSA_NAME with a constant
1298 length. */
1299 if (gimple_assign_single_p (def_stmt)
1300 || gimple_assign_unary_nop_p (def_stmt))
1302 tree rhs = gimple_assign_rhs1 (def_stmt);
1303 return get_range_strlen (rhs, length, visited, type, fuzzy);
1305 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1307 tree op2 = gimple_assign_rhs2 (def_stmt);
1308 tree op3 = gimple_assign_rhs3 (def_stmt);
1309 return get_range_strlen (op2, length, visited, type, fuzzy)
1310 && get_range_strlen (op3, length, visited, type, fuzzy);
1312 return false;
1314 case GIMPLE_PHI:
1316 /* All the arguments of the PHI node must have the same constant
1317 length. */
1318 unsigned i;
1320 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1322 tree arg = gimple_phi_arg (def_stmt, i)->def;
1324 /* If this PHI has itself as an argument, we cannot
1325 determine the string length of this argument. However,
1326 if we can find a constant string length for the other
1327 PHI args then we can still be sure that this is a
1328 constant string length. So be optimistic and just
1329 continue with the next argument. */
1330 if (arg == gimple_phi_result (def_stmt))
1331 continue;
1333 if (!get_range_strlen (arg, length, visited, type, fuzzy))
1335 if (fuzzy)
1336 *maxlen = build_all_ones_cst (size_type_node);
1337 else
1338 return false;
1342 return true;
1344 default:
1345 return false;
1349 /* Determine the minimum and maximum value or string length that ARG
1350 refers to and store each in the first two elements of MINMAXLEN.
1351 For expressions that point to strings of unknown lengths that are
1352 character arrays, use the upper bound of the array as the maximum
1353 length. For example, given an expression like 'x ? array : "xyz"'
1354 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1355 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1356 stored in array.
1359 void get_range_strlen (tree arg, tree minmaxlen[2])
1361 bitmap visited = NULL;
1363 minmaxlen[0] = NULL_TREE;
1364 minmaxlen[1] = NULL_TREE;
1366 get_range_strlen (arg, minmaxlen, &visited, 1, true);
1368 if (visited)
1369 BITMAP_FREE (visited);
1372 tree
1373 get_maxval_strlen (tree arg, int type)
1375 bitmap visited = NULL;
1376 tree len[2] = { NULL_TREE, NULL_TREE };
1377 if (!get_range_strlen (arg, len, &visited, type, false))
1378 len[1] = NULL_TREE;
1379 if (visited)
1380 BITMAP_FREE (visited);
1382 return len[1];
1386 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1387 If LEN is not NULL, it represents the length of the string to be
1388 copied. Return NULL_TREE if no simplification can be made. */
1390 static bool
1391 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1392 tree dest, tree src)
1394 location_t loc = gimple_location (gsi_stmt (*gsi));
1395 tree fn;
1397 /* If SRC and DEST are the same (and not volatile), return DEST. */
1398 if (operand_equal_p (src, dest, 0))
1400 replace_call_with_value (gsi, dest);
1401 return true;
1404 if (optimize_function_for_size_p (cfun))
1405 return false;
1407 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1408 if (!fn)
1409 return false;
1411 tree len = get_maxval_strlen (src, 0);
1412 if (!len)
1413 return false;
1415 len = fold_convert_loc (loc, size_type_node, len);
1416 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1417 len = force_gimple_operand_gsi (gsi, len, true,
1418 NULL_TREE, true, GSI_SAME_STMT);
1419 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1420 replace_call_with_call_and_fold (gsi, repl);
1421 return true;
1424 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1425 If SLEN is not NULL, it represents the length of the source string.
1426 Return NULL_TREE if no simplification can be made. */
1428 static bool
1429 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1430 tree dest, tree src, tree len)
1432 location_t loc = gimple_location (gsi_stmt (*gsi));
1433 tree fn;
1435 /* If the LEN parameter is zero, return DEST. */
1436 if (integer_zerop (len))
1438 replace_call_with_value (gsi, dest);
1439 return true;
1442 /* We can't compare slen with len as constants below if len is not a
1443 constant. */
1444 if (TREE_CODE (len) != INTEGER_CST)
1445 return false;
1447 /* Now, we must be passed a constant src ptr parameter. */
1448 tree slen = get_maxval_strlen (src, 0);
1449 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1450 return false;
1452 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1454 /* We do not support simplification of this case, though we do
1455 support it when expanding trees into RTL. */
1456 /* FIXME: generate a call to __builtin_memset. */
1457 if (tree_int_cst_lt (slen, len))
1458 return false;
1460 /* OK transform into builtin memcpy. */
1461 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1462 if (!fn)
1463 return false;
1465 len = fold_convert_loc (loc, size_type_node, len);
1466 len = force_gimple_operand_gsi (gsi, len, true,
1467 NULL_TREE, true, GSI_SAME_STMT);
1468 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1469 replace_call_with_call_and_fold (gsi, repl);
1470 return true;
1473 /* Fold function call to builtin strchr or strrchr.
1474 If both arguments are constant, evaluate and fold the result,
1475 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1476 In general strlen is significantly faster than strchr
1477 due to being a simpler operation. */
1478 static bool
1479 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1481 gimple *stmt = gsi_stmt (*gsi);
1482 tree str = gimple_call_arg (stmt, 0);
1483 tree c = gimple_call_arg (stmt, 1);
1484 location_t loc = gimple_location (stmt);
1485 const char *p;
1486 char ch;
1488 if (!gimple_call_lhs (stmt))
1489 return false;
1491 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1493 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1495 if (p1 == NULL)
1497 replace_call_with_value (gsi, integer_zero_node);
1498 return true;
1501 tree len = build_int_cst (size_type_node, p1 - p);
1502 gimple_seq stmts = NULL;
1503 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1504 POINTER_PLUS_EXPR, str, len);
1505 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1506 gsi_replace_with_seq_vops (gsi, stmts);
1507 return true;
1510 if (!integer_zerop (c))
1511 return false;
1513 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1514 if (optimize_function_for_size_p (cfun))
1516 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1518 if (is_strrchr && strchr_fn)
1520 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1521 replace_call_with_call_and_fold (gsi, repl);
1522 return true;
1525 return false;
1528 tree len;
1529 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1531 if (!strlen_fn)
1532 return false;
1534 /* Create newstr = strlen (str). */
1535 gimple_seq stmts = NULL;
1536 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1537 gimple_set_location (new_stmt, loc);
1538 if (gimple_in_ssa_p (cfun))
1539 len = make_ssa_name (size_type_node);
1540 else
1541 len = create_tmp_reg (size_type_node);
1542 gimple_call_set_lhs (new_stmt, len);
1543 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1545 /* Create (str p+ strlen (str)). */
1546 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1547 POINTER_PLUS_EXPR, str, len);
1548 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1549 gsi_replace_with_seq_vops (gsi, stmts);
1550 /* gsi now points at the assignment to the lhs, get a
1551 stmt iterator to the strlen.
1552 ??? We can't use gsi_for_stmt as that doesn't work when the
1553 CFG isn't built yet. */
1554 gimple_stmt_iterator gsi2 = *gsi;
1555 gsi_prev (&gsi2);
1556 fold_stmt (&gsi2);
1557 return true;
1560 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1561 to the call.
1563 Return NULL_TREE if no simplification was possible, otherwise return the
1564 simplified form of the call as a tree.
1566 The simplified form may be a constant or other expression which
1567 computes the same value, but in a more efficient manner (including
1568 calls to other builtin functions).
1570 The call may contain arguments which need to be evaluated, but
1571 which are not useful to determine the result of the call. In
1572 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1573 COMPOUND_EXPR will be an argument which must be evaluated.
1574 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1575 COMPOUND_EXPR in the chain will contain the tree for the simplified
1576 form of the builtin function call. */
1578 static bool
1579 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1581 gimple *stmt = gsi_stmt (*gsi);
1582 location_t loc = gimple_location (stmt);
1584 const char *p = c_getstr (src);
1586 /* If the string length is zero, return the dst parameter. */
1587 if (p && *p == '\0')
1589 replace_call_with_value (gsi, dst);
1590 return true;
1593 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1594 return false;
1596 /* See if we can store by pieces into (dst + strlen(dst)). */
1597 tree newdst;
1598 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1599 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1601 if (!strlen_fn || !memcpy_fn)
1602 return false;
1604 /* If the length of the source string isn't computable don't
1605 split strcat into strlen and memcpy. */
1606 tree len = get_maxval_strlen (src, 0);
1607 if (! len)
1608 return false;
1610 /* Create strlen (dst). */
1611 gimple_seq stmts = NULL, stmts2;
1612 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1613 gimple_set_location (repl, loc);
1614 if (gimple_in_ssa_p (cfun))
1615 newdst = make_ssa_name (size_type_node);
1616 else
1617 newdst = create_tmp_reg (size_type_node);
1618 gimple_call_set_lhs (repl, newdst);
1619 gimple_seq_add_stmt_without_update (&stmts, repl);
1621 /* Create (dst p+ strlen (dst)). */
1622 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1623 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1624 gimple_seq_add_seq_without_update (&stmts, stmts2);
1626 len = fold_convert_loc (loc, size_type_node, len);
1627 len = size_binop_loc (loc, PLUS_EXPR, len,
1628 build_int_cst (size_type_node, 1));
1629 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1630 gimple_seq_add_seq_without_update (&stmts, stmts2);
1632 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1633 gimple_seq_add_stmt_without_update (&stmts, repl);
1634 if (gimple_call_lhs (stmt))
1636 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1637 gimple_seq_add_stmt_without_update (&stmts, repl);
1638 gsi_replace_with_seq_vops (gsi, stmts);
1639 /* gsi now points at the assignment to the lhs, get a
1640 stmt iterator to the memcpy call.
1641 ??? We can't use gsi_for_stmt as that doesn't work when the
1642 CFG isn't built yet. */
1643 gimple_stmt_iterator gsi2 = *gsi;
1644 gsi_prev (&gsi2);
1645 fold_stmt (&gsi2);
1647 else
1649 gsi_replace_with_seq_vops (gsi, stmts);
1650 fold_stmt (gsi);
1652 return true;
1655 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1656 are the arguments to the call. */
1658 static bool
1659 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1661 gimple *stmt = gsi_stmt (*gsi);
1662 tree dest = gimple_call_arg (stmt, 0);
1663 tree src = gimple_call_arg (stmt, 1);
1664 tree size = gimple_call_arg (stmt, 2);
1665 tree fn;
1666 const char *p;
1669 p = c_getstr (src);
1670 /* If the SRC parameter is "", return DEST. */
1671 if (p && *p == '\0')
1673 replace_call_with_value (gsi, dest);
1674 return true;
1677 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1678 return false;
1680 /* If __builtin_strcat_chk is used, assume strcat is available. */
1681 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1682 if (!fn)
1683 return false;
1685 gimple *repl = gimple_build_call (fn, 2, dest, src);
1686 replace_call_with_call_and_fold (gsi, repl);
1687 return true;
1690 /* Simplify a call to the strncat builtin. */
1692 static bool
1693 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1695 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1696 tree dst = gimple_call_arg (stmt, 0);
1697 tree src = gimple_call_arg (stmt, 1);
1698 tree len = gimple_call_arg (stmt, 2);
1700 const char *p = c_getstr (src);
1702 /* If the requested length is zero, or the src parameter string
1703 length is zero, return the dst parameter. */
1704 if (integer_zerop (len) || (p && *p == '\0'))
1706 replace_call_with_value (gsi, dst);
1707 return true;
1710 /* If the requested len is greater than or equal to the string
1711 length, call strcat. */
1712 if (TREE_CODE (len) == INTEGER_CST && p
1713 && compare_tree_int (len, strlen (p)) >= 0)
1715 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1717 /* If the replacement _DECL isn't initialized, don't do the
1718 transformation. */
1719 if (!fn)
1720 return false;
1722 gcall *repl = gimple_build_call (fn, 2, dst, src);
1723 replace_call_with_call_and_fold (gsi, repl);
1724 return true;
1727 return false;
1730 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1731 LEN, and SIZE. */
1733 static bool
1734 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1736 gimple *stmt = gsi_stmt (*gsi);
1737 tree dest = gimple_call_arg (stmt, 0);
1738 tree src = gimple_call_arg (stmt, 1);
1739 tree len = gimple_call_arg (stmt, 2);
1740 tree size = gimple_call_arg (stmt, 3);
1741 tree fn;
1742 const char *p;
1744 p = c_getstr (src);
1745 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1746 if ((p && *p == '\0')
1747 || integer_zerop (len))
1749 replace_call_with_value (gsi, dest);
1750 return true;
1753 if (! tree_fits_uhwi_p (size))
1754 return false;
1756 if (! integer_all_onesp (size))
1758 tree src_len = c_strlen (src, 1);
1759 if (src_len
1760 && tree_fits_uhwi_p (src_len)
1761 && tree_fits_uhwi_p (len)
1762 && ! tree_int_cst_lt (len, src_len))
1764 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1765 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1766 if (!fn)
1767 return false;
1769 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1770 replace_call_with_call_and_fold (gsi, repl);
1771 return true;
1773 return false;
1776 /* If __builtin_strncat_chk is used, assume strncat is available. */
1777 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1778 if (!fn)
1779 return false;
1781 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1782 replace_call_with_call_and_fold (gsi, repl);
1783 return true;
1786 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1787 to the call. IGNORE is true if the value returned
1788 by the builtin will be ignored. UNLOCKED is true is true if this
1789 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1790 the known length of the string. Return NULL_TREE if no simplification
1791 was possible. */
1793 static bool
1794 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1795 tree arg0, tree arg1,
1796 bool unlocked)
1798 gimple *stmt = gsi_stmt (*gsi);
1800 /* If we're using an unlocked function, assume the other unlocked
1801 functions exist explicitly. */
1802 tree const fn_fputc = (unlocked
1803 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1804 : builtin_decl_implicit (BUILT_IN_FPUTC));
1805 tree const fn_fwrite = (unlocked
1806 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1807 : builtin_decl_implicit (BUILT_IN_FWRITE));
1809 /* If the return value is used, don't do the transformation. */
1810 if (gimple_call_lhs (stmt))
1811 return false;
1813 /* Get the length of the string passed to fputs. If the length
1814 can't be determined, punt. */
1815 tree len = get_maxval_strlen (arg0, 0);
1816 if (!len
1817 || TREE_CODE (len) != INTEGER_CST)
1818 return false;
1820 switch (compare_tree_int (len, 1))
1822 case -1: /* length is 0, delete the call entirely . */
1823 replace_call_with_value (gsi, integer_zero_node);
1824 return true;
1826 case 0: /* length is 1, call fputc. */
1828 const char *p = c_getstr (arg0);
1829 if (p != NULL)
1831 if (!fn_fputc)
1832 return false;
1834 gimple *repl = gimple_build_call (fn_fputc, 2,
1835 build_int_cst
1836 (integer_type_node, p[0]), arg1);
1837 replace_call_with_call_and_fold (gsi, repl);
1838 return true;
1841 /* FALLTHROUGH */
1842 case 1: /* length is greater than 1, call fwrite. */
1844 /* If optimizing for size keep fputs. */
1845 if (optimize_function_for_size_p (cfun))
1846 return false;
1847 /* New argument list transforming fputs(string, stream) to
1848 fwrite(string, 1, len, stream). */
1849 if (!fn_fwrite)
1850 return false;
1852 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1853 size_one_node, len, arg1);
1854 replace_call_with_call_and_fold (gsi, repl);
1855 return true;
1857 default:
1858 gcc_unreachable ();
1860 return false;
1863 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1864 DEST, SRC, LEN, and SIZE are the arguments to the call.
1865 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1866 code of the builtin. If MAXLEN is not NULL, it is maximum length
1867 passed as third argument. */
1869 static bool
1870 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1871 tree dest, tree src, tree len, tree size,
1872 enum built_in_function fcode)
1874 gimple *stmt = gsi_stmt (*gsi);
1875 location_t loc = gimple_location (stmt);
1876 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1877 tree fn;
1879 /* If SRC and DEST are the same (and not volatile), return DEST
1880 (resp. DEST+LEN for __mempcpy_chk). */
1881 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1883 if (fcode != BUILT_IN_MEMPCPY_CHK)
1885 replace_call_with_value (gsi, dest);
1886 return true;
1888 else
1890 gimple_seq stmts = NULL;
1891 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1892 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1893 TREE_TYPE (dest), dest, len);
1894 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1895 replace_call_with_value (gsi, temp);
1896 return true;
1900 if (! tree_fits_uhwi_p (size))
1901 return false;
1903 tree maxlen = get_maxval_strlen (len, 2);
1904 if (! integer_all_onesp (size))
1906 if (! tree_fits_uhwi_p (len))
1908 /* If LEN is not constant, try MAXLEN too.
1909 For MAXLEN only allow optimizing into non-_ocs function
1910 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1911 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1913 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1915 /* (void) __mempcpy_chk () can be optimized into
1916 (void) __memcpy_chk (). */
1917 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1918 if (!fn)
1919 return false;
1921 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1922 replace_call_with_call_and_fold (gsi, repl);
1923 return true;
1925 return false;
1928 else
1929 maxlen = len;
1931 if (tree_int_cst_lt (size, maxlen))
1932 return false;
1935 fn = NULL_TREE;
1936 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1937 mem{cpy,pcpy,move,set} is available. */
1938 switch (fcode)
1940 case BUILT_IN_MEMCPY_CHK:
1941 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1942 break;
1943 case BUILT_IN_MEMPCPY_CHK:
1944 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1945 break;
1946 case BUILT_IN_MEMMOVE_CHK:
1947 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1948 break;
1949 case BUILT_IN_MEMSET_CHK:
1950 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1951 break;
1952 default:
1953 break;
1956 if (!fn)
1957 return false;
1959 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1960 replace_call_with_call_and_fold (gsi, repl);
1961 return true;
1964 /* Fold a call to the __st[rp]cpy_chk builtin.
1965 DEST, SRC, and SIZE are the arguments to the call.
1966 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1967 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1968 strings passed as second argument. */
1970 static bool
1971 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1972 tree dest,
1973 tree src, tree size,
1974 enum built_in_function fcode)
1976 gimple *stmt = gsi_stmt (*gsi);
1977 location_t loc = gimple_location (stmt);
1978 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1979 tree len, fn;
1981 /* If SRC and DEST are the same (and not volatile), return DEST. */
1982 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1984 replace_call_with_value (gsi, dest);
1985 return true;
1988 if (! tree_fits_uhwi_p (size))
1989 return false;
1991 tree maxlen = get_maxval_strlen (src, 1);
1992 if (! integer_all_onesp (size))
1994 len = c_strlen (src, 1);
1995 if (! len || ! tree_fits_uhwi_p (len))
1997 /* If LEN is not constant, try MAXLEN too.
1998 For MAXLEN only allow optimizing into non-_ocs function
1999 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2000 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2002 if (fcode == BUILT_IN_STPCPY_CHK)
2004 if (! ignore)
2005 return false;
2007 /* If return value of __stpcpy_chk is ignored,
2008 optimize into __strcpy_chk. */
2009 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2010 if (!fn)
2011 return false;
2013 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2014 replace_call_with_call_and_fold (gsi, repl);
2015 return true;
2018 if (! len || TREE_SIDE_EFFECTS (len))
2019 return false;
2021 /* If c_strlen returned something, but not a constant,
2022 transform __strcpy_chk into __memcpy_chk. */
2023 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2024 if (!fn)
2025 return false;
2027 gimple_seq stmts = NULL;
2028 len = gimple_convert (&stmts, loc, size_type_node, len);
2029 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2030 build_int_cst (size_type_node, 1));
2031 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2032 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2033 replace_call_with_call_and_fold (gsi, repl);
2034 return true;
2037 else
2038 maxlen = len;
2040 if (! tree_int_cst_lt (maxlen, size))
2041 return false;
2044 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2045 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2046 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2047 if (!fn)
2048 return false;
2050 gimple *repl = gimple_build_call (fn, 2, dest, src);
2051 replace_call_with_call_and_fold (gsi, repl);
2052 return true;
2055 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2056 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2057 length passed as third argument. IGNORE is true if return value can be
2058 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2060 static bool
2061 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2062 tree dest, tree src,
2063 tree len, tree size,
2064 enum built_in_function fcode)
2066 gimple *stmt = gsi_stmt (*gsi);
2067 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2068 tree fn;
2070 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2072 /* If return value of __stpncpy_chk is ignored,
2073 optimize into __strncpy_chk. */
2074 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2075 if (fn)
2077 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2078 replace_call_with_call_and_fold (gsi, repl);
2079 return true;
2083 if (! tree_fits_uhwi_p (size))
2084 return false;
2086 tree maxlen = get_maxval_strlen (len, 2);
2087 if (! integer_all_onesp (size))
2089 if (! tree_fits_uhwi_p (len))
2091 /* If LEN is not constant, try MAXLEN too.
2092 For MAXLEN only allow optimizing into non-_ocs function
2093 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2094 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2095 return false;
2097 else
2098 maxlen = len;
2100 if (tree_int_cst_lt (size, maxlen))
2101 return false;
2104 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2105 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2106 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2107 if (!fn)
2108 return false;
2110 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2111 replace_call_with_call_and_fold (gsi, repl);
2112 return true;
2115 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2116 Return NULL_TREE if no simplification can be made. */
2118 static bool
2119 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2121 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2122 location_t loc = gimple_location (stmt);
2123 tree dest = gimple_call_arg (stmt, 0);
2124 tree src = gimple_call_arg (stmt, 1);
2125 tree fn, len, lenp1;
2127 /* If the result is unused, replace stpcpy with strcpy. */
2128 if (gimple_call_lhs (stmt) == NULL_TREE)
2130 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2131 if (!fn)
2132 return false;
2133 gimple_call_set_fndecl (stmt, fn);
2134 fold_stmt (gsi);
2135 return true;
2138 len = c_strlen (src, 1);
2139 if (!len
2140 || TREE_CODE (len) != INTEGER_CST)
2141 return false;
2143 if (optimize_function_for_size_p (cfun)
2144 /* If length is zero it's small enough. */
2145 && !integer_zerop (len))
2146 return false;
2148 /* If the source has a known length replace stpcpy with memcpy. */
2149 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2150 if (!fn)
2151 return false;
2153 gimple_seq stmts = NULL;
2154 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2155 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2156 tem, build_int_cst (size_type_node, 1));
2157 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2158 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2159 gimple_set_vuse (repl, gimple_vuse (stmt));
2160 gimple_set_vdef (repl, gimple_vdef (stmt));
2161 if (gimple_vdef (repl)
2162 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2163 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2164 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2165 /* Replace the result with dest + len. */
2166 stmts = NULL;
2167 tem = gimple_convert (&stmts, loc, sizetype, len);
2168 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2169 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2170 POINTER_PLUS_EXPR, dest, tem);
2171 gsi_replace (gsi, ret, false);
2172 /* Finally fold the memcpy call. */
2173 gimple_stmt_iterator gsi2 = *gsi;
2174 gsi_prev (&gsi2);
2175 fold_stmt (&gsi2);
2176 return true;
2179 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2180 NULL_TREE if a normal call should be emitted rather than expanding
2181 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2182 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2183 passed as second argument. */
2185 static bool
2186 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2187 enum built_in_function fcode)
2189 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2190 tree dest, size, len, fn, fmt, flag;
2191 const char *fmt_str;
2193 /* Verify the required arguments in the original call. */
2194 if (gimple_call_num_args (stmt) < 5)
2195 return false;
2197 dest = gimple_call_arg (stmt, 0);
2198 len = gimple_call_arg (stmt, 1);
2199 flag = gimple_call_arg (stmt, 2);
2200 size = gimple_call_arg (stmt, 3);
2201 fmt = gimple_call_arg (stmt, 4);
2203 if (! tree_fits_uhwi_p (size))
2204 return false;
2206 if (! integer_all_onesp (size))
2208 tree maxlen = get_maxval_strlen (len, 2);
2209 if (! tree_fits_uhwi_p (len))
2211 /* If LEN is not constant, try MAXLEN too.
2212 For MAXLEN only allow optimizing into non-_ocs function
2213 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2214 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2215 return false;
2217 else
2218 maxlen = len;
2220 if (tree_int_cst_lt (size, maxlen))
2221 return false;
2224 if (!init_target_chars ())
2225 return false;
2227 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2228 or if format doesn't contain % chars or is "%s". */
2229 if (! integer_zerop (flag))
2231 fmt_str = c_getstr (fmt);
2232 if (fmt_str == NULL)
2233 return false;
2234 if (strchr (fmt_str, target_percent) != NULL
2235 && strcmp (fmt_str, target_percent_s))
2236 return false;
2239 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2240 available. */
2241 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2242 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2243 if (!fn)
2244 return false;
2246 /* Replace the called function and the first 5 argument by 3 retaining
2247 trailing varargs. */
2248 gimple_call_set_fndecl (stmt, fn);
2249 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2250 gimple_call_set_arg (stmt, 0, dest);
2251 gimple_call_set_arg (stmt, 1, len);
2252 gimple_call_set_arg (stmt, 2, fmt);
2253 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2254 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2255 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2256 fold_stmt (gsi);
2257 return true;
2260 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2261 Return NULL_TREE if a normal call should be emitted rather than
2262 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2263 or BUILT_IN_VSPRINTF_CHK. */
2265 static bool
2266 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2267 enum built_in_function fcode)
2269 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2270 tree dest, size, len, fn, fmt, flag;
2271 const char *fmt_str;
2272 unsigned nargs = gimple_call_num_args (stmt);
2274 /* Verify the required arguments in the original call. */
2275 if (nargs < 4)
2276 return false;
2277 dest = gimple_call_arg (stmt, 0);
2278 flag = gimple_call_arg (stmt, 1);
2279 size = gimple_call_arg (stmt, 2);
2280 fmt = gimple_call_arg (stmt, 3);
2282 if (! tree_fits_uhwi_p (size))
2283 return false;
2285 len = NULL_TREE;
2287 if (!init_target_chars ())
2288 return false;
2290 /* Check whether the format is a literal string constant. */
2291 fmt_str = c_getstr (fmt);
2292 if (fmt_str != NULL)
2294 /* If the format doesn't contain % args or %%, we know the size. */
2295 if (strchr (fmt_str, target_percent) == 0)
2297 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2298 len = build_int_cstu (size_type_node, strlen (fmt_str));
2300 /* If the format is "%s" and first ... argument is a string literal,
2301 we know the size too. */
2302 else if (fcode == BUILT_IN_SPRINTF_CHK
2303 && strcmp (fmt_str, target_percent_s) == 0)
2305 tree arg;
2307 if (nargs == 5)
2309 arg = gimple_call_arg (stmt, 4);
2310 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2312 len = c_strlen (arg, 1);
2313 if (! len || ! tree_fits_uhwi_p (len))
2314 len = NULL_TREE;
2320 if (! integer_all_onesp (size))
2322 if (! len || ! tree_int_cst_lt (len, size))
2323 return false;
2326 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2327 or if format doesn't contain % chars or is "%s". */
2328 if (! integer_zerop (flag))
2330 if (fmt_str == NULL)
2331 return false;
2332 if (strchr (fmt_str, target_percent) != NULL
2333 && strcmp (fmt_str, target_percent_s))
2334 return false;
2337 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2338 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2339 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2340 if (!fn)
2341 return false;
2343 /* Replace the called function and the first 4 argument by 2 retaining
2344 trailing varargs. */
2345 gimple_call_set_fndecl (stmt, fn);
2346 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2347 gimple_call_set_arg (stmt, 0, dest);
2348 gimple_call_set_arg (stmt, 1, fmt);
2349 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2350 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2351 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2352 fold_stmt (gsi);
2353 return true;
2356 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2357 ORIG may be null if this is a 2-argument call. We don't attempt to
2358 simplify calls with more than 3 arguments.
2360 Return NULL_TREE if no simplification was possible, otherwise return the
2361 simplified form of the call as a tree. If IGNORED is true, it means that
2362 the caller does not use the returned value of the function. */
2364 static bool
2365 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2367 gimple *stmt = gsi_stmt (*gsi);
2368 tree dest = gimple_call_arg (stmt, 0);
2369 tree fmt = gimple_call_arg (stmt, 1);
2370 tree orig = NULL_TREE;
2371 const char *fmt_str = NULL;
2373 /* Verify the required arguments in the original call. We deal with two
2374 types of sprintf() calls: 'sprintf (str, fmt)' and
2375 'sprintf (dest, "%s", orig)'. */
2376 if (gimple_call_num_args (stmt) > 3)
2377 return false;
2379 if (gimple_call_num_args (stmt) == 3)
2380 orig = gimple_call_arg (stmt, 2);
2382 /* Check whether the format is a literal string constant. */
2383 fmt_str = c_getstr (fmt);
2384 if (fmt_str == NULL)
2385 return false;
2387 if (!init_target_chars ())
2388 return false;
2390 /* If the format doesn't contain % args or %%, use strcpy. */
2391 if (strchr (fmt_str, target_percent) == NULL)
2393 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2395 if (!fn)
2396 return false;
2398 /* Don't optimize sprintf (buf, "abc", ptr++). */
2399 if (orig)
2400 return false;
2402 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2403 'format' is known to contain no % formats. */
2404 gimple_seq stmts = NULL;
2405 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2406 gimple_seq_add_stmt_without_update (&stmts, repl);
2407 if (gimple_call_lhs (stmt))
2409 repl = gimple_build_assign (gimple_call_lhs (stmt),
2410 build_int_cst (integer_type_node,
2411 strlen (fmt_str)));
2412 gimple_seq_add_stmt_without_update (&stmts, repl);
2413 gsi_replace_with_seq_vops (gsi, stmts);
2414 /* gsi now points at the assignment to the lhs, get a
2415 stmt iterator to the memcpy call.
2416 ??? We can't use gsi_for_stmt as that doesn't work when the
2417 CFG isn't built yet. */
2418 gimple_stmt_iterator gsi2 = *gsi;
2419 gsi_prev (&gsi2);
2420 fold_stmt (&gsi2);
2422 else
2424 gsi_replace_with_seq_vops (gsi, stmts);
2425 fold_stmt (gsi);
2427 return true;
2430 /* If the format is "%s", use strcpy if the result isn't used. */
2431 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2433 tree fn;
2434 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2436 if (!fn)
2437 return false;
2439 /* Don't crash on sprintf (str1, "%s"). */
2440 if (!orig)
2441 return false;
2443 tree orig_len = NULL_TREE;
2444 if (gimple_call_lhs (stmt))
2446 orig_len = get_maxval_strlen (orig, 0);
2447 if (!orig_len)
2448 return false;
2451 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2452 gimple_seq stmts = NULL;
2453 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2454 gimple_seq_add_stmt_without_update (&stmts, repl);
2455 if (gimple_call_lhs (stmt))
2457 if (!useless_type_conversion_p (integer_type_node,
2458 TREE_TYPE (orig_len)))
2459 orig_len = fold_convert (integer_type_node, orig_len);
2460 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2461 gimple_seq_add_stmt_without_update (&stmts, repl);
2462 gsi_replace_with_seq_vops (gsi, stmts);
2463 /* gsi now points at the assignment to the lhs, get a
2464 stmt iterator to the memcpy call.
2465 ??? We can't use gsi_for_stmt as that doesn't work when the
2466 CFG isn't built yet. */
2467 gimple_stmt_iterator gsi2 = *gsi;
2468 gsi_prev (&gsi2);
2469 fold_stmt (&gsi2);
2471 else
2473 gsi_replace_with_seq_vops (gsi, stmts);
2474 fold_stmt (gsi);
2476 return true;
2478 return false;
2481 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2482 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2483 attempt to simplify calls with more than 4 arguments.
2485 Return NULL_TREE if no simplification was possible, otherwise return the
2486 simplified form of the call as a tree. If IGNORED is true, it means that
2487 the caller does not use the returned value of the function. */
2489 static bool
2490 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2492 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2493 tree dest = gimple_call_arg (stmt, 0);
2494 tree destsize = gimple_call_arg (stmt, 1);
2495 tree fmt = gimple_call_arg (stmt, 2);
2496 tree orig = NULL_TREE;
2497 const char *fmt_str = NULL;
2499 if (gimple_call_num_args (stmt) > 4)
2500 return false;
2502 if (gimple_call_num_args (stmt) == 4)
2503 orig = gimple_call_arg (stmt, 3);
2505 if (!tree_fits_uhwi_p (destsize))
2506 return false;
2507 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2509 /* Check whether the format is a literal string constant. */
2510 fmt_str = c_getstr (fmt);
2511 if (fmt_str == NULL)
2512 return false;
2514 if (!init_target_chars ())
2515 return false;
2517 /* If the format doesn't contain % args or %%, use strcpy. */
2518 if (strchr (fmt_str, target_percent) == NULL)
2520 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2521 if (!fn)
2522 return false;
2524 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2525 if (orig)
2526 return false;
2528 /* We could expand this as
2529 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2530 or to
2531 memcpy (str, fmt_with_nul_at_cstm1, cst);
2532 but in the former case that might increase code size
2533 and in the latter case grow .rodata section too much.
2534 So punt for now. */
2535 size_t len = strlen (fmt_str);
2536 if (len >= destlen)
2537 return false;
2539 gimple_seq stmts = NULL;
2540 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2541 gimple_seq_add_stmt_without_update (&stmts, repl);
2542 if (gimple_call_lhs (stmt))
2544 repl = gimple_build_assign (gimple_call_lhs (stmt),
2545 build_int_cst (integer_type_node, len));
2546 gimple_seq_add_stmt_without_update (&stmts, repl);
2547 gsi_replace_with_seq_vops (gsi, stmts);
2548 /* gsi now points at the assignment to the lhs, get a
2549 stmt iterator to the memcpy call.
2550 ??? We can't use gsi_for_stmt as that doesn't work when the
2551 CFG isn't built yet. */
2552 gimple_stmt_iterator gsi2 = *gsi;
2553 gsi_prev (&gsi2);
2554 fold_stmt (&gsi2);
2556 else
2558 gsi_replace_with_seq_vops (gsi, stmts);
2559 fold_stmt (gsi);
2561 return true;
2564 /* If the format is "%s", use strcpy if the result isn't used. */
2565 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2567 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2568 if (!fn)
2569 return false;
2571 /* Don't crash on snprintf (str1, cst, "%s"). */
2572 if (!orig)
2573 return false;
2575 tree orig_len = get_maxval_strlen (orig, 0);
2576 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2577 return false;
2579 /* We could expand this as
2580 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2581 or to
2582 memcpy (str1, str2_with_nul_at_cstm1, cst);
2583 but in the former case that might increase code size
2584 and in the latter case grow .rodata section too much.
2585 So punt for now. */
2586 if (compare_tree_int (orig_len, destlen) >= 0)
2587 return false;
2589 /* Convert snprintf (str1, cst, "%s", str2) into
2590 strcpy (str1, str2) if strlen (str2) < cst. */
2591 gimple_seq stmts = NULL;
2592 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2593 gimple_seq_add_stmt_without_update (&stmts, repl);
2594 if (gimple_call_lhs (stmt))
2596 if (!useless_type_conversion_p (integer_type_node,
2597 TREE_TYPE (orig_len)))
2598 orig_len = fold_convert (integer_type_node, orig_len);
2599 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2600 gimple_seq_add_stmt_without_update (&stmts, repl);
2601 gsi_replace_with_seq_vops (gsi, stmts);
2602 /* gsi now points at the assignment to the lhs, get a
2603 stmt iterator to the memcpy call.
2604 ??? We can't use gsi_for_stmt as that doesn't work when the
2605 CFG isn't built yet. */
2606 gimple_stmt_iterator gsi2 = *gsi;
2607 gsi_prev (&gsi2);
2608 fold_stmt (&gsi2);
2610 else
2612 gsi_replace_with_seq_vops (gsi, stmts);
2613 fold_stmt (gsi);
2615 return true;
2617 return false;
2620 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2621 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2622 more than 3 arguments, and ARG may be null in the 2-argument case.
2624 Return NULL_TREE if no simplification was possible, otherwise return the
2625 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2626 code of the function to be simplified. */
2628 static bool
2629 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2630 tree fp, tree fmt, tree arg,
2631 enum built_in_function fcode)
2633 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2634 tree fn_fputc, fn_fputs;
2635 const char *fmt_str = NULL;
2637 /* If the return value is used, don't do the transformation. */
2638 if (gimple_call_lhs (stmt) != NULL_TREE)
2639 return false;
2641 /* Check whether the format is a literal string constant. */
2642 fmt_str = c_getstr (fmt);
2643 if (fmt_str == NULL)
2644 return false;
2646 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2648 /* If we're using an unlocked function, assume the other
2649 unlocked functions exist explicitly. */
2650 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2651 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2653 else
2655 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2656 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2659 if (!init_target_chars ())
2660 return false;
2662 /* If the format doesn't contain % args or %%, use strcpy. */
2663 if (strchr (fmt_str, target_percent) == NULL)
2665 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2666 && arg)
2667 return false;
2669 /* If the format specifier was "", fprintf does nothing. */
2670 if (fmt_str[0] == '\0')
2672 replace_call_with_value (gsi, NULL_TREE);
2673 return true;
2676 /* When "string" doesn't contain %, replace all cases of
2677 fprintf (fp, string) with fputs (string, fp). The fputs
2678 builtin will take care of special cases like length == 1. */
2679 if (fn_fputs)
2681 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2682 replace_call_with_call_and_fold (gsi, repl);
2683 return true;
2687 /* The other optimizations can be done only on the non-va_list variants. */
2688 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2689 return false;
2691 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2692 else if (strcmp (fmt_str, target_percent_s) == 0)
2694 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2695 return false;
2696 if (fn_fputs)
2698 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2699 replace_call_with_call_and_fold (gsi, repl);
2700 return true;
2704 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2705 else if (strcmp (fmt_str, target_percent_c) == 0)
2707 if (!arg
2708 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2709 return false;
2710 if (fn_fputc)
2712 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2713 replace_call_with_call_and_fold (gsi, repl);
2714 return true;
2718 return false;
2721 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2722 FMT and ARG are the arguments to the call; we don't fold cases with
2723 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2725 Return NULL_TREE if no simplification was possible, otherwise return the
2726 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2727 code of the function to be simplified. */
2729 static bool
2730 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2731 tree arg, enum built_in_function fcode)
2733 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2734 tree fn_putchar, fn_puts, newarg;
2735 const char *fmt_str = NULL;
2737 /* If the return value is used, don't do the transformation. */
2738 if (gimple_call_lhs (stmt) != NULL_TREE)
2739 return false;
2741 /* Check whether the format is a literal string constant. */
2742 fmt_str = c_getstr (fmt);
2743 if (fmt_str == NULL)
2744 return false;
2746 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2748 /* If we're using an unlocked function, assume the other
2749 unlocked functions exist explicitly. */
2750 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2751 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2753 else
2755 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2756 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2759 if (!init_target_chars ())
2760 return false;
2762 if (strcmp (fmt_str, target_percent_s) == 0
2763 || strchr (fmt_str, target_percent) == NULL)
2765 const char *str;
2767 if (strcmp (fmt_str, target_percent_s) == 0)
2769 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2770 return false;
2772 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2773 return false;
2775 str = c_getstr (arg);
2776 if (str == NULL)
2777 return false;
2779 else
2781 /* The format specifier doesn't contain any '%' characters. */
2782 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2783 && arg)
2784 return false;
2785 str = fmt_str;
2788 /* If the string was "", printf does nothing. */
2789 if (str[0] == '\0')
2791 replace_call_with_value (gsi, NULL_TREE);
2792 return true;
2795 /* If the string has length of 1, call putchar. */
2796 if (str[1] == '\0')
2798 /* Given printf("c"), (where c is any one character,)
2799 convert "c"[0] to an int and pass that to the replacement
2800 function. */
2801 newarg = build_int_cst (integer_type_node, str[0]);
2802 if (fn_putchar)
2804 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2805 replace_call_with_call_and_fold (gsi, repl);
2806 return true;
2809 else
2811 /* If the string was "string\n", call puts("string"). */
2812 size_t len = strlen (str);
2813 if ((unsigned char)str[len - 1] == target_newline
2814 && (size_t) (int) len == len
2815 && (int) len > 0)
2817 char *newstr;
2818 tree offset_node, string_cst;
2820 /* Create a NUL-terminated string that's one char shorter
2821 than the original, stripping off the trailing '\n'. */
2822 newarg = build_string_literal (len, str);
2823 string_cst = string_constant (newarg, &offset_node);
2824 gcc_checking_assert (string_cst
2825 && (TREE_STRING_LENGTH (string_cst)
2826 == (int) len)
2827 && integer_zerop (offset_node)
2828 && (unsigned char)
2829 TREE_STRING_POINTER (string_cst)[len - 1]
2830 == target_newline);
2831 /* build_string_literal creates a new STRING_CST,
2832 modify it in place to avoid double copying. */
2833 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2834 newstr[len - 1] = '\0';
2835 if (fn_puts)
2837 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2838 replace_call_with_call_and_fold (gsi, repl);
2839 return true;
2842 else
2843 /* We'd like to arrange to call fputs(string,stdout) here,
2844 but we need stdout and don't have a way to get it yet. */
2845 return false;
2849 /* The other optimizations can be done only on the non-va_list variants. */
2850 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2851 return false;
2853 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2854 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2856 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2857 return false;
2858 if (fn_puts)
2860 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2861 replace_call_with_call_and_fold (gsi, repl);
2862 return true;
2866 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2867 else if (strcmp (fmt_str, target_percent_c) == 0)
2869 if (!arg || ! useless_type_conversion_p (integer_type_node,
2870 TREE_TYPE (arg)))
2871 return false;
2872 if (fn_putchar)
2874 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2875 replace_call_with_call_and_fold (gsi, repl);
2876 return true;
2880 return false;
2885 /* Fold a call to __builtin_strlen with known length LEN. */
2887 static bool
2888 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2890 gimple *stmt = gsi_stmt (*gsi);
2891 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2892 if (!len)
2893 return false;
2894 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2895 replace_call_with_value (gsi, len);
2896 return true;
2899 /* Fold a call to __builtin_acc_on_device. */
2901 static bool
2902 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2904 /* Defer folding until we know which compiler we're in. */
2905 if (symtab->state != EXPANSION)
2906 return false;
2908 unsigned val_host = GOMP_DEVICE_HOST;
2909 unsigned val_dev = GOMP_DEVICE_NONE;
2911 #ifdef ACCEL_COMPILER
2912 val_host = GOMP_DEVICE_NOT_HOST;
2913 val_dev = ACCEL_COMPILER_acc_device;
2914 #endif
2916 location_t loc = gimple_location (gsi_stmt (*gsi));
2918 tree host_eq = make_ssa_name (boolean_type_node);
2919 gimple *host_ass = gimple_build_assign
2920 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2921 gimple_set_location (host_ass, loc);
2922 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2924 tree dev_eq = make_ssa_name (boolean_type_node);
2925 gimple *dev_ass = gimple_build_assign
2926 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2927 gimple_set_location (dev_ass, loc);
2928 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2930 tree result = make_ssa_name (boolean_type_node);
2931 gimple *result_ass = gimple_build_assign
2932 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2933 gimple_set_location (result_ass, loc);
2934 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2936 replace_call_with_value (gsi, result);
2938 return true;
2941 /* Fold the non-target builtin at *GSI and return whether any simplification
2942 was made. */
2944 static bool
2945 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2947 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2948 tree callee = gimple_call_fndecl (stmt);
2950 /* Give up for always_inline inline builtins until they are
2951 inlined. */
2952 if (avoid_folding_inline_builtin (callee))
2953 return false;
2955 unsigned n = gimple_call_num_args (stmt);
2956 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2957 switch (fcode)
2959 case BUILT_IN_BZERO:
2960 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2961 gimple_call_arg (stmt, 1));
2962 case BUILT_IN_MEMSET:
2963 return gimple_fold_builtin_memset (gsi,
2964 gimple_call_arg (stmt, 1),
2965 gimple_call_arg (stmt, 2));
2966 case BUILT_IN_BCOPY:
2967 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2968 gimple_call_arg (stmt, 0), 3);
2969 case BUILT_IN_MEMCPY:
2970 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2971 gimple_call_arg (stmt, 1), 0);
2972 case BUILT_IN_MEMPCPY:
2973 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2974 gimple_call_arg (stmt, 1), 1);
2975 case BUILT_IN_MEMMOVE:
2976 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2977 gimple_call_arg (stmt, 1), 3);
2978 case BUILT_IN_SPRINTF_CHK:
2979 case BUILT_IN_VSPRINTF_CHK:
2980 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2981 case BUILT_IN_STRCAT_CHK:
2982 return gimple_fold_builtin_strcat_chk (gsi);
2983 case BUILT_IN_STRNCAT_CHK:
2984 return gimple_fold_builtin_strncat_chk (gsi);
2985 case BUILT_IN_STRLEN:
2986 return gimple_fold_builtin_strlen (gsi);
2987 case BUILT_IN_STRCPY:
2988 return gimple_fold_builtin_strcpy (gsi,
2989 gimple_call_arg (stmt, 0),
2990 gimple_call_arg (stmt, 1));
2991 case BUILT_IN_STRNCPY:
2992 return gimple_fold_builtin_strncpy (gsi,
2993 gimple_call_arg (stmt, 0),
2994 gimple_call_arg (stmt, 1),
2995 gimple_call_arg (stmt, 2));
2996 case BUILT_IN_STRCAT:
2997 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2998 gimple_call_arg (stmt, 1));
2999 case BUILT_IN_STRNCAT:
3000 return gimple_fold_builtin_strncat (gsi);
3001 case BUILT_IN_INDEX:
3002 case BUILT_IN_STRCHR:
3003 return gimple_fold_builtin_strchr (gsi, false);
3004 case BUILT_IN_RINDEX:
3005 case BUILT_IN_STRRCHR:
3006 return gimple_fold_builtin_strchr (gsi, true);
3007 case BUILT_IN_FPUTS:
3008 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3009 gimple_call_arg (stmt, 1), false);
3010 case BUILT_IN_FPUTS_UNLOCKED:
3011 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3012 gimple_call_arg (stmt, 1), true);
3013 case BUILT_IN_MEMCPY_CHK:
3014 case BUILT_IN_MEMPCPY_CHK:
3015 case BUILT_IN_MEMMOVE_CHK:
3016 case BUILT_IN_MEMSET_CHK:
3017 return gimple_fold_builtin_memory_chk (gsi,
3018 gimple_call_arg (stmt, 0),
3019 gimple_call_arg (stmt, 1),
3020 gimple_call_arg (stmt, 2),
3021 gimple_call_arg (stmt, 3),
3022 fcode);
3023 case BUILT_IN_STPCPY:
3024 return gimple_fold_builtin_stpcpy (gsi);
3025 case BUILT_IN_STRCPY_CHK:
3026 case BUILT_IN_STPCPY_CHK:
3027 return gimple_fold_builtin_stxcpy_chk (gsi,
3028 gimple_call_arg (stmt, 0),
3029 gimple_call_arg (stmt, 1),
3030 gimple_call_arg (stmt, 2),
3031 fcode);
3032 case BUILT_IN_STRNCPY_CHK:
3033 case BUILT_IN_STPNCPY_CHK:
3034 return gimple_fold_builtin_stxncpy_chk (gsi,
3035 gimple_call_arg (stmt, 0),
3036 gimple_call_arg (stmt, 1),
3037 gimple_call_arg (stmt, 2),
3038 gimple_call_arg (stmt, 3),
3039 fcode);
3040 case BUILT_IN_SNPRINTF_CHK:
3041 case BUILT_IN_VSNPRINTF_CHK:
3042 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3043 case BUILT_IN_SNPRINTF:
3044 return gimple_fold_builtin_snprintf (gsi);
3045 case BUILT_IN_SPRINTF:
3046 return gimple_fold_builtin_sprintf (gsi);
3047 case BUILT_IN_FPRINTF:
3048 case BUILT_IN_FPRINTF_UNLOCKED:
3049 case BUILT_IN_VFPRINTF:
3050 if (n == 2 || n == 3)
3051 return gimple_fold_builtin_fprintf (gsi,
3052 gimple_call_arg (stmt, 0),
3053 gimple_call_arg (stmt, 1),
3054 n == 3
3055 ? gimple_call_arg (stmt, 2)
3056 : NULL_TREE,
3057 fcode);
3058 break;
3059 case BUILT_IN_FPRINTF_CHK:
3060 case BUILT_IN_VFPRINTF_CHK:
3061 if (n == 3 || n == 4)
3062 return gimple_fold_builtin_fprintf (gsi,
3063 gimple_call_arg (stmt, 0),
3064 gimple_call_arg (stmt, 2),
3065 n == 4
3066 ? gimple_call_arg (stmt, 3)
3067 : NULL_TREE,
3068 fcode);
3069 break;
3070 case BUILT_IN_PRINTF:
3071 case BUILT_IN_PRINTF_UNLOCKED:
3072 case BUILT_IN_VPRINTF:
3073 if (n == 1 || n == 2)
3074 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3075 n == 2
3076 ? gimple_call_arg (stmt, 1)
3077 : NULL_TREE, fcode);
3078 break;
3079 case BUILT_IN_PRINTF_CHK:
3080 case BUILT_IN_VPRINTF_CHK:
3081 if (n == 2 || n == 3)
3082 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3083 n == 3
3084 ? gimple_call_arg (stmt, 2)
3085 : NULL_TREE, fcode);
3086 break;
3087 case BUILT_IN_ACC_ON_DEVICE:
3088 return gimple_fold_builtin_acc_on_device (gsi,
3089 gimple_call_arg (stmt, 0));
3090 default:;
3093 /* Try the generic builtin folder. */
3094 bool ignore = (gimple_call_lhs (stmt) == NULL);
3095 tree result = fold_call_stmt (stmt, ignore);
3096 if (result)
3098 if (ignore)
3099 STRIP_NOPS (result);
3100 else
3101 result = fold_convert (gimple_call_return_type (stmt), result);
3102 if (!update_call_from_tree (gsi, result))
3103 gimplify_and_update_call_from_tree (gsi, result);
3104 return true;
3107 return false;
3110 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3111 function calls to constants, where possible. */
3113 static tree
3114 fold_internal_goacc_dim (const gimple *call)
3116 int axis = get_oacc_ifn_dim_arg (call);
3117 int size = get_oacc_fn_dim_size (current_function_decl, axis);
3118 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3119 tree result = NULL_TREE;
3121 /* If the size is 1, or we only want the size and it is not dynamic,
3122 we know the answer. */
3123 if (size == 1 || (!is_pos && size))
3125 tree type = TREE_TYPE (gimple_call_lhs (call));
3126 result = build_int_cst (type, size - is_pos);
3129 return result;
3132 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3133 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3134 &var where var is only addressable because of such calls. */
3136 bool
3137 optimize_atomic_compare_exchange_p (gimple *stmt)
3139 if (gimple_call_num_args (stmt) != 6
3140 || !flag_inline_atomics
3141 || !optimize
3142 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3143 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3144 || !gimple_vdef (stmt)
3145 || !gimple_vuse (stmt))
3146 return false;
3148 tree fndecl = gimple_call_fndecl (stmt);
3149 switch (DECL_FUNCTION_CODE (fndecl))
3151 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3152 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3153 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3154 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3155 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3156 break;
3157 default:
3158 return false;
3161 tree expected = gimple_call_arg (stmt, 1);
3162 if (TREE_CODE (expected) != ADDR_EXPR
3163 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3164 return false;
3166 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3167 if (!is_gimple_reg_type (etype)
3168 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3169 || TREE_THIS_VOLATILE (etype)
3170 || VECTOR_TYPE_P (etype)
3171 || TREE_CODE (etype) == COMPLEX_TYPE
3172 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3173 might not preserve all the bits. See PR71716. */
3174 || SCALAR_FLOAT_TYPE_P (etype)
3175 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3176 return false;
3178 tree weak = gimple_call_arg (stmt, 3);
3179 if (!integer_zerop (weak) && !integer_onep (weak))
3180 return false;
3182 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3183 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3184 machine_mode mode = TYPE_MODE (itype);
3186 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3187 == CODE_FOR_nothing
3188 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3189 return false;
3191 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3192 return false;
3194 return true;
3197 /* Fold
3198 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3199 into
3200 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3201 i = IMAGPART_EXPR <t>;
3202 r = (_Bool) i;
3203 e = REALPART_EXPR <t>; */
3205 void
3206 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3208 gimple *stmt = gsi_stmt (*gsi);
3209 tree fndecl = gimple_call_fndecl (stmt);
3210 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3211 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3212 tree ctype = build_complex_type (itype);
3213 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3214 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3215 expected);
3216 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3217 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3218 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3220 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3221 build1 (VIEW_CONVERT_EXPR, itype,
3222 gimple_assign_lhs (g)));
3223 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3225 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3226 + int_size_in_bytes (itype);
3227 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3228 gimple_call_arg (stmt, 0),
3229 gimple_assign_lhs (g),
3230 gimple_call_arg (stmt, 2),
3231 build_int_cst (integer_type_node, flag),
3232 gimple_call_arg (stmt, 4),
3233 gimple_call_arg (stmt, 5));
3234 tree lhs = make_ssa_name (ctype);
3235 gimple_call_set_lhs (g, lhs);
3236 gimple_set_vdef (g, gimple_vdef (stmt));
3237 gimple_set_vuse (g, gimple_vuse (stmt));
3238 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3239 if (gimple_call_lhs (stmt))
3241 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3242 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3243 build1 (IMAGPART_EXPR, itype, lhs));
3244 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3245 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3246 gimple_assign_lhs (g));
3248 gsi_replace (gsi, g, true);
3249 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3250 build1 (REALPART_EXPR, itype, lhs));
3251 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3252 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3254 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3255 VIEW_CONVERT_EXPR,
3256 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3257 gimple_assign_lhs (g)));
3258 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3260 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3261 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3262 *gsi = gsiret;
3265 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3266 doesn't fit into TYPE. The test for overflow should be regardless of
3267 -fwrapv, and even for unsigned types. */
3269 bool
3270 arith_overflowed_p (enum tree_code code, const_tree type,
3271 const_tree arg0, const_tree arg1)
3273 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3274 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3275 widest2_int_cst;
3276 widest2_int warg0 = widest2_int_cst (arg0);
3277 widest2_int warg1 = widest2_int_cst (arg1);
3278 widest2_int wres;
3279 switch (code)
3281 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3282 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3283 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3284 default: gcc_unreachable ();
3286 signop sign = TYPE_SIGN (type);
3287 if (sign == UNSIGNED && wi::neg_p (wres))
3288 return true;
3289 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3292 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3293 The statement may be replaced by another statement, e.g., if the call
3294 simplifies to a constant value. Return true if any changes were made.
3295 It is assumed that the operands have been previously folded. */
3297 static bool
3298 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3300 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3301 tree callee;
3302 bool changed = false;
3303 unsigned i;
3305 /* Fold *& in call arguments. */
3306 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3307 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3309 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3310 if (tmp)
3312 gimple_call_set_arg (stmt, i, tmp);
3313 changed = true;
3317 /* Check for virtual calls that became direct calls. */
3318 callee = gimple_call_fn (stmt);
3319 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3321 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3323 if (dump_file && virtual_method_call_p (callee)
3324 && !possible_polymorphic_call_target_p
3325 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3326 (OBJ_TYPE_REF_EXPR (callee)))))
3328 fprintf (dump_file,
3329 "Type inheritance inconsistent devirtualization of ");
3330 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3331 fprintf (dump_file, " to ");
3332 print_generic_expr (dump_file, callee, TDF_SLIM);
3333 fprintf (dump_file, "\n");
3336 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3337 changed = true;
3339 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3341 bool final;
3342 vec <cgraph_node *>targets
3343 = possible_polymorphic_call_targets (callee, stmt, &final);
3344 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3346 tree lhs = gimple_call_lhs (stmt);
3347 if (dump_enabled_p ())
3349 location_t loc = gimple_location_safe (stmt);
3350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3351 "folding virtual function call to %s\n",
3352 targets.length () == 1
3353 ? targets[0]->name ()
3354 : "__builtin_unreachable");
3356 if (targets.length () == 1)
3358 tree fndecl = targets[0]->decl;
3359 gimple_call_set_fndecl (stmt, fndecl);
3360 changed = true;
3361 /* If changing the call to __cxa_pure_virtual
3362 or similar noreturn function, adjust gimple_call_fntype
3363 too. */
3364 if (gimple_call_noreturn_p (stmt)
3365 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3366 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3367 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3368 == void_type_node))
3369 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3370 /* If the call becomes noreturn, remove the lhs. */
3371 if (lhs
3372 && gimple_call_noreturn_p (stmt)
3373 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3374 || should_remove_lhs_p (lhs)))
3376 if (TREE_CODE (lhs) == SSA_NAME)
3378 tree var = create_tmp_var (TREE_TYPE (lhs));
3379 tree def = get_or_create_ssa_default_def (cfun, var);
3380 gimple *new_stmt = gimple_build_assign (lhs, def);
3381 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3383 gimple_call_set_lhs (stmt, NULL_TREE);
3385 maybe_remove_unused_call_args (cfun, stmt);
3387 else
3389 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3390 gimple *new_stmt = gimple_build_call (fndecl, 0);
3391 gimple_set_location (new_stmt, gimple_location (stmt));
3392 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3394 tree var = create_tmp_var (TREE_TYPE (lhs));
3395 tree def = get_or_create_ssa_default_def (cfun, var);
3397 /* To satisfy condition for
3398 cgraph_update_edges_for_call_stmt_node,
3399 we need to preserve GIMPLE_CALL statement
3400 at position of GSI iterator. */
3401 update_call_from_tree (gsi, def);
3402 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3404 else
3406 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3407 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3408 gsi_replace (gsi, new_stmt, false);
3410 return true;
3416 /* Check for indirect calls that became direct calls, and then
3417 no longer require a static chain. */
3418 if (gimple_call_chain (stmt))
3420 tree fn = gimple_call_fndecl (stmt);
3421 if (fn && !DECL_STATIC_CHAIN (fn))
3423 gimple_call_set_chain (stmt, NULL);
3424 changed = true;
3426 else
3428 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3429 if (tmp)
3431 gimple_call_set_chain (stmt, tmp);
3432 changed = true;
3437 if (inplace)
3438 return changed;
3440 /* Check for builtins that CCP can handle using information not
3441 available in the generic fold routines. */
3442 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3444 if (gimple_fold_builtin (gsi))
3445 changed = true;
3447 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3449 changed |= targetm.gimple_fold_builtin (gsi);
3451 else if (gimple_call_internal_p (stmt))
3453 enum tree_code subcode = ERROR_MARK;
3454 tree result = NULL_TREE;
3455 bool cplx_result = false;
3456 tree overflow = NULL_TREE;
3457 switch (gimple_call_internal_fn (stmt))
3459 case IFN_BUILTIN_EXPECT:
3460 result = fold_builtin_expect (gimple_location (stmt),
3461 gimple_call_arg (stmt, 0),
3462 gimple_call_arg (stmt, 1),
3463 gimple_call_arg (stmt, 2));
3464 break;
3465 case IFN_UBSAN_OBJECT_SIZE:
3466 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3467 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3468 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3469 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3470 gimple_call_arg (stmt, 2))))
3472 gsi_replace (gsi, gimple_build_nop (), false);
3473 unlink_stmt_vdef (stmt);
3474 release_defs (stmt);
3475 return true;
3477 break;
3478 case IFN_GOACC_DIM_SIZE:
3479 case IFN_GOACC_DIM_POS:
3480 result = fold_internal_goacc_dim (stmt);
3481 break;
3482 case IFN_UBSAN_CHECK_ADD:
3483 subcode = PLUS_EXPR;
3484 break;
3485 case IFN_UBSAN_CHECK_SUB:
3486 subcode = MINUS_EXPR;
3487 break;
3488 case IFN_UBSAN_CHECK_MUL:
3489 subcode = MULT_EXPR;
3490 break;
3491 case IFN_ADD_OVERFLOW:
3492 subcode = PLUS_EXPR;
3493 cplx_result = true;
3494 break;
3495 case IFN_SUB_OVERFLOW:
3496 subcode = MINUS_EXPR;
3497 cplx_result = true;
3498 break;
3499 case IFN_MUL_OVERFLOW:
3500 subcode = MULT_EXPR;
3501 cplx_result = true;
3502 break;
3503 default:
3504 break;
3506 if (subcode != ERROR_MARK)
3508 tree arg0 = gimple_call_arg (stmt, 0);
3509 tree arg1 = gimple_call_arg (stmt, 1);
3510 tree type = TREE_TYPE (arg0);
3511 if (cplx_result)
3513 tree lhs = gimple_call_lhs (stmt);
3514 if (lhs == NULL_TREE)
3515 type = NULL_TREE;
3516 else
3517 type = TREE_TYPE (TREE_TYPE (lhs));
3519 if (type == NULL_TREE)
3521 /* x = y + 0; x = y - 0; x = y * 0; */
3522 else if (integer_zerop (arg1))
3523 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3524 /* x = 0 + y; x = 0 * y; */
3525 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3526 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3527 /* x = y - y; */
3528 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3529 result = integer_zero_node;
3530 /* x = y * 1; x = 1 * y; */
3531 else if (subcode == MULT_EXPR && integer_onep (arg1))
3532 result = arg0;
3533 else if (subcode == MULT_EXPR && integer_onep (arg0))
3534 result = arg1;
3535 else if (TREE_CODE (arg0) == INTEGER_CST
3536 && TREE_CODE (arg1) == INTEGER_CST)
3538 if (cplx_result)
3539 result = int_const_binop (subcode, fold_convert (type, arg0),
3540 fold_convert (type, arg1));
3541 else
3542 result = int_const_binop (subcode, arg0, arg1);
3543 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3545 if (cplx_result)
3546 overflow = build_one_cst (type);
3547 else
3548 result = NULL_TREE;
3551 if (result)
3553 if (result == integer_zero_node)
3554 result = build_zero_cst (type);
3555 else if (cplx_result && TREE_TYPE (result) != type)
3557 if (TREE_CODE (result) == INTEGER_CST)
3559 if (arith_overflowed_p (PLUS_EXPR, type, result,
3560 integer_zero_node))
3561 overflow = build_one_cst (type);
3563 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3564 && TYPE_UNSIGNED (type))
3565 || (TYPE_PRECISION (type)
3566 < (TYPE_PRECISION (TREE_TYPE (result))
3567 + (TYPE_UNSIGNED (TREE_TYPE (result))
3568 && !TYPE_UNSIGNED (type)))))
3569 result = NULL_TREE;
3570 if (result)
3571 result = fold_convert (type, result);
3576 if (result)
3578 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3579 result = drop_tree_overflow (result);
3580 if (cplx_result)
3582 if (overflow == NULL_TREE)
3583 overflow = build_zero_cst (TREE_TYPE (result));
3584 tree ctype = build_complex_type (TREE_TYPE (result));
3585 if (TREE_CODE (result) == INTEGER_CST
3586 && TREE_CODE (overflow) == INTEGER_CST)
3587 result = build_complex (ctype, result, overflow);
3588 else
3589 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3590 ctype, result, overflow);
3592 if (!update_call_from_tree (gsi, result))
3593 gimplify_and_update_call_from_tree (gsi, result);
3594 changed = true;
3598 return changed;
3602 /* Return true whether NAME has a use on STMT. */
3604 static bool
3605 has_use_on_stmt (tree name, gimple *stmt)
3607 imm_use_iterator iter;
3608 use_operand_p use_p;
3609 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3610 if (USE_STMT (use_p) == stmt)
3611 return true;
3612 return false;
3615 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3616 gimple_simplify.
3618 Replaces *GSI with the simplification result in RCODE and OPS
3619 and the associated statements in *SEQ. Does the replacement
3620 according to INPLACE and returns true if the operation succeeded. */
3622 static bool
3623 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3624 code_helper rcode, tree *ops,
3625 gimple_seq *seq, bool inplace)
3627 gimple *stmt = gsi_stmt (*gsi);
3629 /* Play safe and do not allow abnormals to be mentioned in
3630 newly created statements. See also maybe_push_res_to_seq.
3631 As an exception allow such uses if there was a use of the
3632 same SSA name on the old stmt. */
3633 if ((TREE_CODE (ops[0]) == SSA_NAME
3634 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3635 && !has_use_on_stmt (ops[0], stmt))
3636 || (ops[1]
3637 && TREE_CODE (ops[1]) == SSA_NAME
3638 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3639 && !has_use_on_stmt (ops[1], stmt))
3640 || (ops[2]
3641 && TREE_CODE (ops[2]) == SSA_NAME
3642 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3643 && !has_use_on_stmt (ops[2], stmt))
3644 || (COMPARISON_CLASS_P (ops[0])
3645 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3646 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3647 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3648 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3649 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3650 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3651 return false;
3653 /* Don't insert new statements when INPLACE is true, even if we could
3654 reuse STMT for the final statement. */
3655 if (inplace && !gimple_seq_empty_p (*seq))
3656 return false;
3658 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3660 gcc_assert (rcode.is_tree_code ());
3661 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3662 /* GIMPLE_CONDs condition may not throw. */
3663 && (!flag_exceptions
3664 || !cfun->can_throw_non_call_exceptions
3665 || !operation_could_trap_p (rcode,
3666 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3667 false, NULL_TREE)))
3668 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3669 else if (rcode == SSA_NAME)
3670 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3671 build_zero_cst (TREE_TYPE (ops[0])));
3672 else if (rcode == INTEGER_CST)
3674 if (integer_zerop (ops[0]))
3675 gimple_cond_make_false (cond_stmt);
3676 else
3677 gimple_cond_make_true (cond_stmt);
3679 else if (!inplace)
3681 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3682 ops, seq);
3683 if (!res)
3684 return false;
3685 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3686 build_zero_cst (TREE_TYPE (res)));
3688 else
3689 return false;
3690 if (dump_file && (dump_flags & TDF_DETAILS))
3692 fprintf (dump_file, "gimple_simplified to ");
3693 if (!gimple_seq_empty_p (*seq))
3694 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3695 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3696 0, TDF_SLIM);
3698 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3699 return true;
3701 else if (is_gimple_assign (stmt)
3702 && rcode.is_tree_code ())
3704 if (!inplace
3705 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3707 maybe_build_generic_op (rcode,
3708 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3709 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3710 if (dump_file && (dump_flags & TDF_DETAILS))
3712 fprintf (dump_file, "gimple_simplified to ");
3713 if (!gimple_seq_empty_p (*seq))
3714 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3715 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3716 0, TDF_SLIM);
3718 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3719 return true;
3722 else if (rcode.is_fn_code ()
3723 && gimple_call_combined_fn (stmt) == rcode)
3725 unsigned i;
3726 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3728 gcc_assert (ops[i] != NULL_TREE);
3729 gimple_call_set_arg (stmt, i, ops[i]);
3731 if (i < 3)
3732 gcc_assert (ops[i] == NULL_TREE);
3733 if (dump_file && (dump_flags & TDF_DETAILS))
3735 fprintf (dump_file, "gimple_simplified to ");
3736 if (!gimple_seq_empty_p (*seq))
3737 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3738 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3740 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3741 return true;
3743 else if (!inplace)
3745 if (gimple_has_lhs (stmt))
3747 tree lhs = gimple_get_lhs (stmt);
3748 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3749 ops, seq, lhs))
3750 return false;
3751 if (dump_file && (dump_flags & TDF_DETAILS))
3753 fprintf (dump_file, "gimple_simplified to ");
3754 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3756 gsi_replace_with_seq_vops (gsi, *seq);
3757 return true;
3759 else
3760 gcc_unreachable ();
3763 return false;
3766 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3768 static bool
3769 maybe_canonicalize_mem_ref_addr (tree *t)
3771 bool res = false;
3773 if (TREE_CODE (*t) == ADDR_EXPR)
3774 t = &TREE_OPERAND (*t, 0);
3776 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3777 generic vector extension. The actual vector referenced is
3778 view-converted to an array type for this purpose. If the index
3779 is constant the canonical representation in the middle-end is a
3780 BIT_FIELD_REF so re-write the former to the latter here. */
3781 if (TREE_CODE (*t) == ARRAY_REF
3782 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3783 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3784 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3786 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3787 if (VECTOR_TYPE_P (vtype))
3789 tree low = array_ref_low_bound (*t);
3790 if (TREE_CODE (low) == INTEGER_CST)
3792 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3794 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3795 wi::to_widest (low));
3796 idx = wi::mul (idx, wi::to_widest
3797 (TYPE_SIZE (TREE_TYPE (*t))));
3798 widest_int ext
3799 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3800 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3802 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3803 TREE_TYPE (*t),
3804 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3805 TYPE_SIZE (TREE_TYPE (*t)),
3806 wide_int_to_tree (sizetype, idx));
3807 res = true;
3814 while (handled_component_p (*t))
3815 t = &TREE_OPERAND (*t, 0);
3817 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3818 of invariant addresses into a SSA name MEM_REF address. */
3819 if (TREE_CODE (*t) == MEM_REF
3820 || TREE_CODE (*t) == TARGET_MEM_REF)
3822 tree addr = TREE_OPERAND (*t, 0);
3823 if (TREE_CODE (addr) == ADDR_EXPR
3824 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3825 || handled_component_p (TREE_OPERAND (addr, 0))))
3827 tree base;
3828 HOST_WIDE_INT coffset;
3829 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3830 &coffset);
3831 if (!base)
3832 gcc_unreachable ();
3834 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3835 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3836 TREE_OPERAND (*t, 1),
3837 size_int (coffset));
3838 res = true;
3840 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3841 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3844 /* Canonicalize back MEM_REFs to plain reference trees if the object
3845 accessed is a decl that has the same access semantics as the MEM_REF. */
3846 if (TREE_CODE (*t) == MEM_REF
3847 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3848 && integer_zerop (TREE_OPERAND (*t, 1))
3849 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3851 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3852 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3853 if (/* Same volatile qualification. */
3854 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3855 /* Same TBAA behavior with -fstrict-aliasing. */
3856 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3857 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3858 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3859 /* Same alignment. */
3860 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3861 /* We have to look out here to not drop a required conversion
3862 from the rhs to the lhs if *t appears on the lhs or vice-versa
3863 if it appears on the rhs. Thus require strict type
3864 compatibility. */
3865 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3867 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3868 res = true;
3872 /* Canonicalize TARGET_MEM_REF in particular with respect to
3873 the indexes becoming constant. */
3874 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3876 tree tem = maybe_fold_tmr (*t);
3877 if (tem)
3879 *t = tem;
3880 res = true;
3884 return res;
3887 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3888 distinguishes both cases. */
3890 static bool
3891 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3893 bool changed = false;
3894 gimple *stmt = gsi_stmt (*gsi);
3895 bool nowarning = gimple_no_warning_p (stmt);
3896 unsigned i;
3897 fold_defer_overflow_warnings ();
3899 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3900 after propagation.
3901 ??? This shouldn't be done in generic folding but in the
3902 propagation helpers which also know whether an address was
3903 propagated.
3904 Also canonicalize operand order. */
3905 switch (gimple_code (stmt))
3907 case GIMPLE_ASSIGN:
3908 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3910 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3911 if ((REFERENCE_CLASS_P (*rhs)
3912 || TREE_CODE (*rhs) == ADDR_EXPR)
3913 && maybe_canonicalize_mem_ref_addr (rhs))
3914 changed = true;
3915 tree *lhs = gimple_assign_lhs_ptr (stmt);
3916 if (REFERENCE_CLASS_P (*lhs)
3917 && maybe_canonicalize_mem_ref_addr (lhs))
3918 changed = true;
3920 else
3922 /* Canonicalize operand order. */
3923 enum tree_code code = gimple_assign_rhs_code (stmt);
3924 if (TREE_CODE_CLASS (code) == tcc_comparison
3925 || commutative_tree_code (code)
3926 || commutative_ternary_tree_code (code))
3928 tree rhs1 = gimple_assign_rhs1 (stmt);
3929 tree rhs2 = gimple_assign_rhs2 (stmt);
3930 if (tree_swap_operands_p (rhs1, rhs2, false))
3932 gimple_assign_set_rhs1 (stmt, rhs2);
3933 gimple_assign_set_rhs2 (stmt, rhs1);
3934 if (TREE_CODE_CLASS (code) == tcc_comparison)
3935 gimple_assign_set_rhs_code (stmt,
3936 swap_tree_comparison (code));
3937 changed = true;
3941 break;
3942 case GIMPLE_CALL:
3944 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3946 tree *arg = gimple_call_arg_ptr (stmt, i);
3947 if (REFERENCE_CLASS_P (*arg)
3948 && maybe_canonicalize_mem_ref_addr (arg))
3949 changed = true;
3951 tree *lhs = gimple_call_lhs_ptr (stmt);
3952 if (*lhs
3953 && REFERENCE_CLASS_P (*lhs)
3954 && maybe_canonicalize_mem_ref_addr (lhs))
3955 changed = true;
3956 break;
3958 case GIMPLE_ASM:
3960 gasm *asm_stmt = as_a <gasm *> (stmt);
3961 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3963 tree link = gimple_asm_output_op (asm_stmt, i);
3964 tree op = TREE_VALUE (link);
3965 if (REFERENCE_CLASS_P (op)
3966 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3967 changed = true;
3969 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3971 tree link = gimple_asm_input_op (asm_stmt, i);
3972 tree op = TREE_VALUE (link);
3973 if ((REFERENCE_CLASS_P (op)
3974 || TREE_CODE (op) == ADDR_EXPR)
3975 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3976 changed = true;
3979 break;
3980 case GIMPLE_DEBUG:
3981 if (gimple_debug_bind_p (stmt))
3983 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3984 if (*val
3985 && (REFERENCE_CLASS_P (*val)
3986 || TREE_CODE (*val) == ADDR_EXPR)
3987 && maybe_canonicalize_mem_ref_addr (val))
3988 changed = true;
3990 break;
3991 case GIMPLE_COND:
3993 /* Canonicalize operand order. */
3994 tree lhs = gimple_cond_lhs (stmt);
3995 tree rhs = gimple_cond_rhs (stmt);
3996 if (tree_swap_operands_p (lhs, rhs, false))
3998 gcond *gc = as_a <gcond *> (stmt);
3999 gimple_cond_set_lhs (gc, rhs);
4000 gimple_cond_set_rhs (gc, lhs);
4001 gimple_cond_set_code (gc,
4002 swap_tree_comparison (gimple_cond_code (gc)));
4003 changed = true;
4006 default:;
4009 /* Dispatch to pattern-based folding. */
4010 if (!inplace
4011 || is_gimple_assign (stmt)
4012 || gimple_code (stmt) == GIMPLE_COND)
4014 gimple_seq seq = NULL;
4015 code_helper rcode;
4016 tree ops[3] = {};
4017 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4018 valueize, valueize))
4020 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4021 changed = true;
4022 else
4023 gimple_seq_discard (seq);
4027 stmt = gsi_stmt (*gsi);
4029 /* Fold the main computation performed by the statement. */
4030 switch (gimple_code (stmt))
4032 case GIMPLE_ASSIGN:
4034 /* Try to canonicalize for boolean-typed X the comparisons
4035 X == 0, X == 1, X != 0, and X != 1. */
4036 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4037 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4039 tree lhs = gimple_assign_lhs (stmt);
4040 tree op1 = gimple_assign_rhs1 (stmt);
4041 tree op2 = gimple_assign_rhs2 (stmt);
4042 tree type = TREE_TYPE (op1);
4044 /* Check whether the comparison operands are of the same boolean
4045 type as the result type is.
4046 Check that second operand is an integer-constant with value
4047 one or zero. */
4048 if (TREE_CODE (op2) == INTEGER_CST
4049 && (integer_zerop (op2) || integer_onep (op2))
4050 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4052 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4053 bool is_logical_not = false;
4055 /* X == 0 and X != 1 is a logical-not.of X
4056 X == 1 and X != 0 is X */
4057 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4058 || (cmp_code == NE_EXPR && integer_onep (op2)))
4059 is_logical_not = true;
4061 if (is_logical_not == false)
4062 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4063 /* Only for one-bit precision typed X the transformation
4064 !X -> ~X is valied. */
4065 else if (TYPE_PRECISION (type) == 1)
4066 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4067 /* Otherwise we use !X -> X ^ 1. */
4068 else
4069 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4070 build_int_cst (type, 1));
4071 changed = true;
4072 break;
4076 unsigned old_num_ops = gimple_num_ops (stmt);
4077 tree lhs = gimple_assign_lhs (stmt);
4078 tree new_rhs = fold_gimple_assign (gsi);
4079 if (new_rhs
4080 && !useless_type_conversion_p (TREE_TYPE (lhs),
4081 TREE_TYPE (new_rhs)))
4082 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4083 if (new_rhs
4084 && (!inplace
4085 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4087 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4088 changed = true;
4090 break;
4093 case GIMPLE_CALL:
4094 changed |= gimple_fold_call (gsi, inplace);
4095 break;
4097 case GIMPLE_ASM:
4098 /* Fold *& in asm operands. */
4100 gasm *asm_stmt = as_a <gasm *> (stmt);
4101 size_t noutputs;
4102 const char **oconstraints;
4103 const char *constraint;
4104 bool allows_mem, allows_reg;
4106 noutputs = gimple_asm_noutputs (asm_stmt);
4107 oconstraints = XALLOCAVEC (const char *, noutputs);
4109 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4111 tree link = gimple_asm_output_op (asm_stmt, i);
4112 tree op = TREE_VALUE (link);
4113 oconstraints[i]
4114 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4115 if (REFERENCE_CLASS_P (op)
4116 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4118 TREE_VALUE (link) = op;
4119 changed = true;
4122 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4124 tree link = gimple_asm_input_op (asm_stmt, i);
4125 tree op = TREE_VALUE (link);
4126 constraint
4127 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4128 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4129 oconstraints, &allows_mem, &allows_reg);
4130 if (REFERENCE_CLASS_P (op)
4131 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4132 != NULL_TREE)
4134 TREE_VALUE (link) = op;
4135 changed = true;
4139 break;
4141 case GIMPLE_DEBUG:
4142 if (gimple_debug_bind_p (stmt))
4144 tree val = gimple_debug_bind_get_value (stmt);
4145 if (val
4146 && REFERENCE_CLASS_P (val))
4148 tree tem = maybe_fold_reference (val, false);
4149 if (tem)
4151 gimple_debug_bind_set_value (stmt, tem);
4152 changed = true;
4155 else if (val
4156 && TREE_CODE (val) == ADDR_EXPR)
4158 tree ref = TREE_OPERAND (val, 0);
4159 tree tem = maybe_fold_reference (ref, false);
4160 if (tem)
4162 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4163 gimple_debug_bind_set_value (stmt, tem);
4164 changed = true;
4168 break;
4170 default:;
4173 stmt = gsi_stmt (*gsi);
4175 /* Fold *& on the lhs. */
4176 if (gimple_has_lhs (stmt))
4178 tree lhs = gimple_get_lhs (stmt);
4179 if (lhs && REFERENCE_CLASS_P (lhs))
4181 tree new_lhs = maybe_fold_reference (lhs, true);
4182 if (new_lhs)
4184 gimple_set_lhs (stmt, new_lhs);
4185 changed = true;
4190 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4191 return changed;
4194 /* Valueziation callback that ends up not following SSA edges. */
4196 tree
4197 no_follow_ssa_edges (tree)
4199 return NULL_TREE;
4202 /* Valueization callback that ends up following single-use SSA edges only. */
4204 tree
4205 follow_single_use_edges (tree val)
4207 if (TREE_CODE (val) == SSA_NAME
4208 && !has_single_use (val))
4209 return NULL_TREE;
4210 return val;
4213 /* Fold the statement pointed to by GSI. In some cases, this function may
4214 replace the whole statement with a new one. Returns true iff folding
4215 makes any changes.
4216 The statement pointed to by GSI should be in valid gimple form but may
4217 be in unfolded state as resulting from for example constant propagation
4218 which can produce *&x = 0. */
4220 bool
4221 fold_stmt (gimple_stmt_iterator *gsi)
4223 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4226 bool
4227 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4229 return fold_stmt_1 (gsi, false, valueize);
4232 /* Perform the minimal folding on statement *GSI. Only operations like
4233 *&x created by constant propagation are handled. The statement cannot
4234 be replaced with a new one. Return true if the statement was
4235 changed, false otherwise.
4236 The statement *GSI should be in valid gimple form but may
4237 be in unfolded state as resulting from for example constant propagation
4238 which can produce *&x = 0. */
4240 bool
4241 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4243 gimple *stmt = gsi_stmt (*gsi);
4244 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4245 gcc_assert (gsi_stmt (*gsi) == stmt);
4246 return changed;
4249 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4250 if EXPR is null or we don't know how.
4251 If non-null, the result always has boolean type. */
4253 static tree
4254 canonicalize_bool (tree expr, bool invert)
4256 if (!expr)
4257 return NULL_TREE;
4258 else if (invert)
4260 if (integer_nonzerop (expr))
4261 return boolean_false_node;
4262 else if (integer_zerop (expr))
4263 return boolean_true_node;
4264 else if (TREE_CODE (expr) == SSA_NAME)
4265 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4266 build_int_cst (TREE_TYPE (expr), 0));
4267 else if (COMPARISON_CLASS_P (expr))
4268 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4269 boolean_type_node,
4270 TREE_OPERAND (expr, 0),
4271 TREE_OPERAND (expr, 1));
4272 else
4273 return NULL_TREE;
4275 else
4277 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4278 return expr;
4279 if (integer_nonzerop (expr))
4280 return boolean_true_node;
4281 else if (integer_zerop (expr))
4282 return boolean_false_node;
4283 else if (TREE_CODE (expr) == SSA_NAME)
4284 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4285 build_int_cst (TREE_TYPE (expr), 0));
4286 else if (COMPARISON_CLASS_P (expr))
4287 return fold_build2 (TREE_CODE (expr),
4288 boolean_type_node,
4289 TREE_OPERAND (expr, 0),
4290 TREE_OPERAND (expr, 1));
4291 else
4292 return NULL_TREE;
4296 /* Check to see if a boolean expression EXPR is logically equivalent to the
4297 comparison (OP1 CODE OP2). Check for various identities involving
4298 SSA_NAMEs. */
4300 static bool
4301 same_bool_comparison_p (const_tree expr, enum tree_code code,
4302 const_tree op1, const_tree op2)
4304 gimple *s;
4306 /* The obvious case. */
4307 if (TREE_CODE (expr) == code
4308 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4309 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4310 return true;
4312 /* Check for comparing (name, name != 0) and the case where expr
4313 is an SSA_NAME with a definition matching the comparison. */
4314 if (TREE_CODE (expr) == SSA_NAME
4315 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4317 if (operand_equal_p (expr, op1, 0))
4318 return ((code == NE_EXPR && integer_zerop (op2))
4319 || (code == EQ_EXPR && integer_nonzerop (op2)));
4320 s = SSA_NAME_DEF_STMT (expr);
4321 if (is_gimple_assign (s)
4322 && gimple_assign_rhs_code (s) == code
4323 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4324 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4325 return true;
4328 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4329 of name is a comparison, recurse. */
4330 if (TREE_CODE (op1) == SSA_NAME
4331 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4333 s = SSA_NAME_DEF_STMT (op1);
4334 if (is_gimple_assign (s)
4335 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4337 enum tree_code c = gimple_assign_rhs_code (s);
4338 if ((c == NE_EXPR && integer_zerop (op2))
4339 || (c == EQ_EXPR && integer_nonzerop (op2)))
4340 return same_bool_comparison_p (expr, c,
4341 gimple_assign_rhs1 (s),
4342 gimple_assign_rhs2 (s));
4343 if ((c == EQ_EXPR && integer_zerop (op2))
4344 || (c == NE_EXPR && integer_nonzerop (op2)))
4345 return same_bool_comparison_p (expr,
4346 invert_tree_comparison (c, false),
4347 gimple_assign_rhs1 (s),
4348 gimple_assign_rhs2 (s));
4351 return false;
4354 /* Check to see if two boolean expressions OP1 and OP2 are logically
4355 equivalent. */
4357 static bool
4358 same_bool_result_p (const_tree op1, const_tree op2)
4360 /* Simple cases first. */
4361 if (operand_equal_p (op1, op2, 0))
4362 return true;
4364 /* Check the cases where at least one of the operands is a comparison.
4365 These are a bit smarter than operand_equal_p in that they apply some
4366 identifies on SSA_NAMEs. */
4367 if (COMPARISON_CLASS_P (op2)
4368 && same_bool_comparison_p (op1, TREE_CODE (op2),
4369 TREE_OPERAND (op2, 0),
4370 TREE_OPERAND (op2, 1)))
4371 return true;
4372 if (COMPARISON_CLASS_P (op1)
4373 && same_bool_comparison_p (op2, TREE_CODE (op1),
4374 TREE_OPERAND (op1, 0),
4375 TREE_OPERAND (op1, 1)))
4376 return true;
4378 /* Default case. */
4379 return false;
4382 /* Forward declarations for some mutually recursive functions. */
4384 static tree
4385 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4386 enum tree_code code2, tree op2a, tree op2b);
4387 static tree
4388 and_var_with_comparison (tree var, bool invert,
4389 enum tree_code code2, tree op2a, tree op2b);
4390 static tree
4391 and_var_with_comparison_1 (gimple *stmt,
4392 enum tree_code code2, tree op2a, tree op2b);
4393 static tree
4394 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4395 enum tree_code code2, tree op2a, tree op2b);
4396 static tree
4397 or_var_with_comparison (tree var, bool invert,
4398 enum tree_code code2, tree op2a, tree op2b);
4399 static tree
4400 or_var_with_comparison_1 (gimple *stmt,
4401 enum tree_code code2, tree op2a, tree op2b);
4403 /* Helper function for and_comparisons_1: try to simplify the AND of the
4404 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4405 If INVERT is true, invert the value of the VAR before doing the AND.
4406 Return NULL_EXPR if we can't simplify this to a single expression. */
4408 static tree
4409 and_var_with_comparison (tree var, bool invert,
4410 enum tree_code code2, tree op2a, tree op2b)
4412 tree t;
4413 gimple *stmt = SSA_NAME_DEF_STMT (var);
4415 /* We can only deal with variables whose definitions are assignments. */
4416 if (!is_gimple_assign (stmt))
4417 return NULL_TREE;
4419 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4420 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4421 Then we only have to consider the simpler non-inverted cases. */
4422 if (invert)
4423 t = or_var_with_comparison_1 (stmt,
4424 invert_tree_comparison (code2, false),
4425 op2a, op2b);
4426 else
4427 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4428 return canonicalize_bool (t, invert);
4431 /* Try to simplify the AND of the ssa variable defined by the assignment
4432 STMT with the comparison specified by (OP2A CODE2 OP2B).
4433 Return NULL_EXPR if we can't simplify this to a single expression. */
4435 static tree
4436 and_var_with_comparison_1 (gimple *stmt,
4437 enum tree_code code2, tree op2a, tree op2b)
4439 tree var = gimple_assign_lhs (stmt);
4440 tree true_test_var = NULL_TREE;
4441 tree false_test_var = NULL_TREE;
4442 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4444 /* Check for identities like (var AND (var == 0)) => false. */
4445 if (TREE_CODE (op2a) == SSA_NAME
4446 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4448 if ((code2 == NE_EXPR && integer_zerop (op2b))
4449 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4451 true_test_var = op2a;
4452 if (var == true_test_var)
4453 return var;
4455 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4456 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4458 false_test_var = op2a;
4459 if (var == false_test_var)
4460 return boolean_false_node;
4464 /* If the definition is a comparison, recurse on it. */
4465 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4467 tree t = and_comparisons_1 (innercode,
4468 gimple_assign_rhs1 (stmt),
4469 gimple_assign_rhs2 (stmt),
4470 code2,
4471 op2a,
4472 op2b);
4473 if (t)
4474 return t;
4477 /* If the definition is an AND or OR expression, we may be able to
4478 simplify by reassociating. */
4479 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4480 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4482 tree inner1 = gimple_assign_rhs1 (stmt);
4483 tree inner2 = gimple_assign_rhs2 (stmt);
4484 gimple *s;
4485 tree t;
4486 tree partial = NULL_TREE;
4487 bool is_and = (innercode == BIT_AND_EXPR);
4489 /* Check for boolean identities that don't require recursive examination
4490 of inner1/inner2:
4491 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4492 inner1 AND (inner1 OR inner2) => inner1
4493 !inner1 AND (inner1 AND inner2) => false
4494 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4495 Likewise for similar cases involving inner2. */
4496 if (inner1 == true_test_var)
4497 return (is_and ? var : inner1);
4498 else if (inner2 == true_test_var)
4499 return (is_and ? var : inner2);
4500 else if (inner1 == false_test_var)
4501 return (is_and
4502 ? boolean_false_node
4503 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4504 else if (inner2 == false_test_var)
4505 return (is_and
4506 ? boolean_false_node
4507 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4509 /* Next, redistribute/reassociate the AND across the inner tests.
4510 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4511 if (TREE_CODE (inner1) == SSA_NAME
4512 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4513 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4514 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4515 gimple_assign_rhs1 (s),
4516 gimple_assign_rhs2 (s),
4517 code2, op2a, op2b)))
4519 /* Handle the AND case, where we are reassociating:
4520 (inner1 AND inner2) AND (op2a code2 op2b)
4521 => (t AND inner2)
4522 If the partial result t is a constant, we win. Otherwise
4523 continue on to try reassociating with the other inner test. */
4524 if (is_and)
4526 if (integer_onep (t))
4527 return inner2;
4528 else if (integer_zerop (t))
4529 return boolean_false_node;
4532 /* Handle the OR case, where we are redistributing:
4533 (inner1 OR inner2) AND (op2a code2 op2b)
4534 => (t OR (inner2 AND (op2a code2 op2b))) */
4535 else if (integer_onep (t))
4536 return boolean_true_node;
4538 /* Save partial result for later. */
4539 partial = t;
4542 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4543 if (TREE_CODE (inner2) == SSA_NAME
4544 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4545 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4546 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4547 gimple_assign_rhs1 (s),
4548 gimple_assign_rhs2 (s),
4549 code2, op2a, op2b)))
4551 /* Handle the AND case, where we are reassociating:
4552 (inner1 AND inner2) AND (op2a code2 op2b)
4553 => (inner1 AND t) */
4554 if (is_and)
4556 if (integer_onep (t))
4557 return inner1;
4558 else if (integer_zerop (t))
4559 return boolean_false_node;
4560 /* If both are the same, we can apply the identity
4561 (x AND x) == x. */
4562 else if (partial && same_bool_result_p (t, partial))
4563 return t;
4566 /* Handle the OR case. where we are redistributing:
4567 (inner1 OR inner2) AND (op2a code2 op2b)
4568 => (t OR (inner1 AND (op2a code2 op2b)))
4569 => (t OR partial) */
4570 else
4572 if (integer_onep (t))
4573 return boolean_true_node;
4574 else if (partial)
4576 /* We already got a simplification for the other
4577 operand to the redistributed OR expression. The
4578 interesting case is when at least one is false.
4579 Or, if both are the same, we can apply the identity
4580 (x OR x) == x. */
4581 if (integer_zerop (partial))
4582 return t;
4583 else if (integer_zerop (t))
4584 return partial;
4585 else if (same_bool_result_p (t, partial))
4586 return t;
4591 return NULL_TREE;
4594 /* Try to simplify the AND of two comparisons defined by
4595 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4596 If this can be done without constructing an intermediate value,
4597 return the resulting tree; otherwise NULL_TREE is returned.
4598 This function is deliberately asymmetric as it recurses on SSA_DEFs
4599 in the first comparison but not the second. */
4601 static tree
4602 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4603 enum tree_code code2, tree op2a, tree op2b)
4605 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4607 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4608 if (operand_equal_p (op1a, op2a, 0)
4609 && operand_equal_p (op1b, op2b, 0))
4611 /* Result will be either NULL_TREE, or a combined comparison. */
4612 tree t = combine_comparisons (UNKNOWN_LOCATION,
4613 TRUTH_ANDIF_EXPR, code1, code2,
4614 truth_type, op1a, op1b);
4615 if (t)
4616 return t;
4619 /* Likewise the swapped case of the above. */
4620 if (operand_equal_p (op1a, op2b, 0)
4621 && operand_equal_p (op1b, op2a, 0))
4623 /* Result will be either NULL_TREE, or a combined comparison. */
4624 tree t = combine_comparisons (UNKNOWN_LOCATION,
4625 TRUTH_ANDIF_EXPR, code1,
4626 swap_tree_comparison (code2),
4627 truth_type, op1a, op1b);
4628 if (t)
4629 return t;
4632 /* If both comparisons are of the same value against constants, we might
4633 be able to merge them. */
4634 if (operand_equal_p (op1a, op2a, 0)
4635 && TREE_CODE (op1b) == INTEGER_CST
4636 && TREE_CODE (op2b) == INTEGER_CST)
4638 int cmp = tree_int_cst_compare (op1b, op2b);
4640 /* If we have (op1a == op1b), we should either be able to
4641 return that or FALSE, depending on whether the constant op1b
4642 also satisfies the other comparison against op2b. */
4643 if (code1 == EQ_EXPR)
4645 bool done = true;
4646 bool val;
4647 switch (code2)
4649 case EQ_EXPR: val = (cmp == 0); break;
4650 case NE_EXPR: val = (cmp != 0); break;
4651 case LT_EXPR: val = (cmp < 0); break;
4652 case GT_EXPR: val = (cmp > 0); break;
4653 case LE_EXPR: val = (cmp <= 0); break;
4654 case GE_EXPR: val = (cmp >= 0); break;
4655 default: done = false;
4657 if (done)
4659 if (val)
4660 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4661 else
4662 return boolean_false_node;
4665 /* Likewise if the second comparison is an == comparison. */
4666 else if (code2 == EQ_EXPR)
4668 bool done = true;
4669 bool val;
4670 switch (code1)
4672 case EQ_EXPR: val = (cmp == 0); break;
4673 case NE_EXPR: val = (cmp != 0); break;
4674 case LT_EXPR: val = (cmp > 0); break;
4675 case GT_EXPR: val = (cmp < 0); break;
4676 case LE_EXPR: val = (cmp >= 0); break;
4677 case GE_EXPR: val = (cmp <= 0); break;
4678 default: done = false;
4680 if (done)
4682 if (val)
4683 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4684 else
4685 return boolean_false_node;
4689 /* Same business with inequality tests. */
4690 else if (code1 == NE_EXPR)
4692 bool val;
4693 switch (code2)
4695 case EQ_EXPR: val = (cmp != 0); break;
4696 case NE_EXPR: val = (cmp == 0); break;
4697 case LT_EXPR: val = (cmp >= 0); break;
4698 case GT_EXPR: val = (cmp <= 0); break;
4699 case LE_EXPR: val = (cmp > 0); break;
4700 case GE_EXPR: val = (cmp < 0); break;
4701 default:
4702 val = false;
4704 if (val)
4705 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4707 else if (code2 == NE_EXPR)
4709 bool val;
4710 switch (code1)
4712 case EQ_EXPR: val = (cmp == 0); break;
4713 case NE_EXPR: val = (cmp != 0); break;
4714 case LT_EXPR: val = (cmp <= 0); break;
4715 case GT_EXPR: val = (cmp >= 0); break;
4716 case LE_EXPR: val = (cmp < 0); break;
4717 case GE_EXPR: val = (cmp > 0); break;
4718 default:
4719 val = false;
4721 if (val)
4722 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4725 /* Chose the more restrictive of two < or <= comparisons. */
4726 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4727 && (code2 == LT_EXPR || code2 == LE_EXPR))
4729 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4730 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4731 else
4732 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4735 /* Likewise chose the more restrictive of two > or >= comparisons. */
4736 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4737 && (code2 == GT_EXPR || code2 == GE_EXPR))
4739 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4740 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4741 else
4742 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4745 /* Check for singleton ranges. */
4746 else if (cmp == 0
4747 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4748 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4749 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4751 /* Check for disjoint ranges. */
4752 else if (cmp <= 0
4753 && (code1 == LT_EXPR || code1 == LE_EXPR)
4754 && (code2 == GT_EXPR || code2 == GE_EXPR))
4755 return boolean_false_node;
4756 else if (cmp >= 0
4757 && (code1 == GT_EXPR || code1 == GE_EXPR)
4758 && (code2 == LT_EXPR || code2 == LE_EXPR))
4759 return boolean_false_node;
4762 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4763 NAME's definition is a truth value. See if there are any simplifications
4764 that can be done against the NAME's definition. */
4765 if (TREE_CODE (op1a) == SSA_NAME
4766 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4767 && (integer_zerop (op1b) || integer_onep (op1b)))
4769 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4770 || (code1 == NE_EXPR && integer_onep (op1b)));
4771 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4772 switch (gimple_code (stmt))
4774 case GIMPLE_ASSIGN:
4775 /* Try to simplify by copy-propagating the definition. */
4776 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4778 case GIMPLE_PHI:
4779 /* If every argument to the PHI produces the same result when
4780 ANDed with the second comparison, we win.
4781 Do not do this unless the type is bool since we need a bool
4782 result here anyway. */
4783 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4785 tree result = NULL_TREE;
4786 unsigned i;
4787 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4789 tree arg = gimple_phi_arg_def (stmt, i);
4791 /* If this PHI has itself as an argument, ignore it.
4792 If all the other args produce the same result,
4793 we're still OK. */
4794 if (arg == gimple_phi_result (stmt))
4795 continue;
4796 else if (TREE_CODE (arg) == INTEGER_CST)
4798 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4800 if (!result)
4801 result = boolean_false_node;
4802 else if (!integer_zerop (result))
4803 return NULL_TREE;
4805 else if (!result)
4806 result = fold_build2 (code2, boolean_type_node,
4807 op2a, op2b);
4808 else if (!same_bool_comparison_p (result,
4809 code2, op2a, op2b))
4810 return NULL_TREE;
4812 else if (TREE_CODE (arg) == SSA_NAME
4813 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4815 tree temp;
4816 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4817 /* In simple cases we can look through PHI nodes,
4818 but we have to be careful with loops.
4819 See PR49073. */
4820 if (! dom_info_available_p (CDI_DOMINATORS)
4821 || gimple_bb (def_stmt) == gimple_bb (stmt)
4822 || dominated_by_p (CDI_DOMINATORS,
4823 gimple_bb (def_stmt),
4824 gimple_bb (stmt)))
4825 return NULL_TREE;
4826 temp = and_var_with_comparison (arg, invert, code2,
4827 op2a, op2b);
4828 if (!temp)
4829 return NULL_TREE;
4830 else if (!result)
4831 result = temp;
4832 else if (!same_bool_result_p (result, temp))
4833 return NULL_TREE;
4835 else
4836 return NULL_TREE;
4838 return result;
4841 default:
4842 break;
4845 return NULL_TREE;
4848 /* Try to simplify the AND of two comparisons, specified by
4849 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4850 If this can be simplified to a single expression (without requiring
4851 introducing more SSA variables to hold intermediate values),
4852 return the resulting tree. Otherwise return NULL_TREE.
4853 If the result expression is non-null, it has boolean type. */
4855 tree
4856 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4857 enum tree_code code2, tree op2a, tree op2b)
4859 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4860 if (t)
4861 return t;
4862 else
4863 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4866 /* Helper function for or_comparisons_1: try to simplify the OR of the
4867 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4868 If INVERT is true, invert the value of VAR before doing the OR.
4869 Return NULL_EXPR if we can't simplify this to a single expression. */
4871 static tree
4872 or_var_with_comparison (tree var, bool invert,
4873 enum tree_code code2, tree op2a, tree op2b)
4875 tree t;
4876 gimple *stmt = SSA_NAME_DEF_STMT (var);
4878 /* We can only deal with variables whose definitions are assignments. */
4879 if (!is_gimple_assign (stmt))
4880 return NULL_TREE;
4882 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4883 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4884 Then we only have to consider the simpler non-inverted cases. */
4885 if (invert)
4886 t = and_var_with_comparison_1 (stmt,
4887 invert_tree_comparison (code2, false),
4888 op2a, op2b);
4889 else
4890 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4891 return canonicalize_bool (t, invert);
4894 /* Try to simplify the OR of the ssa variable defined by the assignment
4895 STMT with the comparison specified by (OP2A CODE2 OP2B).
4896 Return NULL_EXPR if we can't simplify this to a single expression. */
4898 static tree
4899 or_var_with_comparison_1 (gimple *stmt,
4900 enum tree_code code2, tree op2a, tree op2b)
4902 tree var = gimple_assign_lhs (stmt);
4903 tree true_test_var = NULL_TREE;
4904 tree false_test_var = NULL_TREE;
4905 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4907 /* Check for identities like (var OR (var != 0)) => true . */
4908 if (TREE_CODE (op2a) == SSA_NAME
4909 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4911 if ((code2 == NE_EXPR && integer_zerop (op2b))
4912 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4914 true_test_var = op2a;
4915 if (var == true_test_var)
4916 return var;
4918 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4919 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4921 false_test_var = op2a;
4922 if (var == false_test_var)
4923 return boolean_true_node;
4927 /* If the definition is a comparison, recurse on it. */
4928 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4930 tree t = or_comparisons_1 (innercode,
4931 gimple_assign_rhs1 (stmt),
4932 gimple_assign_rhs2 (stmt),
4933 code2,
4934 op2a,
4935 op2b);
4936 if (t)
4937 return t;
4940 /* If the definition is an AND or OR expression, we may be able to
4941 simplify by reassociating. */
4942 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4943 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4945 tree inner1 = gimple_assign_rhs1 (stmt);
4946 tree inner2 = gimple_assign_rhs2 (stmt);
4947 gimple *s;
4948 tree t;
4949 tree partial = NULL_TREE;
4950 bool is_or = (innercode == BIT_IOR_EXPR);
4952 /* Check for boolean identities that don't require recursive examination
4953 of inner1/inner2:
4954 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4955 inner1 OR (inner1 AND inner2) => inner1
4956 !inner1 OR (inner1 OR inner2) => true
4957 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4959 if (inner1 == true_test_var)
4960 return (is_or ? var : inner1);
4961 else if (inner2 == true_test_var)
4962 return (is_or ? var : inner2);
4963 else if (inner1 == false_test_var)
4964 return (is_or
4965 ? boolean_true_node
4966 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4967 else if (inner2 == false_test_var)
4968 return (is_or
4969 ? boolean_true_node
4970 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4972 /* Next, redistribute/reassociate the OR across the inner tests.
4973 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4974 if (TREE_CODE (inner1) == SSA_NAME
4975 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4976 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4977 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4978 gimple_assign_rhs1 (s),
4979 gimple_assign_rhs2 (s),
4980 code2, op2a, op2b)))
4982 /* Handle the OR case, where we are reassociating:
4983 (inner1 OR inner2) OR (op2a code2 op2b)
4984 => (t OR inner2)
4985 If the partial result t is a constant, we win. Otherwise
4986 continue on to try reassociating with the other inner test. */
4987 if (is_or)
4989 if (integer_onep (t))
4990 return boolean_true_node;
4991 else if (integer_zerop (t))
4992 return inner2;
4995 /* Handle the AND case, where we are redistributing:
4996 (inner1 AND inner2) OR (op2a code2 op2b)
4997 => (t AND (inner2 OR (op2a code op2b))) */
4998 else if (integer_zerop (t))
4999 return boolean_false_node;
5001 /* Save partial result for later. */
5002 partial = t;
5005 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5006 if (TREE_CODE (inner2) == SSA_NAME
5007 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5008 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5009 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5010 gimple_assign_rhs1 (s),
5011 gimple_assign_rhs2 (s),
5012 code2, op2a, op2b)))
5014 /* Handle the OR case, where we are reassociating:
5015 (inner1 OR inner2) OR (op2a code2 op2b)
5016 => (inner1 OR t)
5017 => (t OR partial) */
5018 if (is_or)
5020 if (integer_zerop (t))
5021 return inner1;
5022 else if (integer_onep (t))
5023 return boolean_true_node;
5024 /* If both are the same, we can apply the identity
5025 (x OR x) == x. */
5026 else if (partial && same_bool_result_p (t, partial))
5027 return t;
5030 /* Handle the AND case, where we are redistributing:
5031 (inner1 AND inner2) OR (op2a code2 op2b)
5032 => (t AND (inner1 OR (op2a code2 op2b)))
5033 => (t AND partial) */
5034 else
5036 if (integer_zerop (t))
5037 return boolean_false_node;
5038 else if (partial)
5040 /* We already got a simplification for the other
5041 operand to the redistributed AND expression. The
5042 interesting case is when at least one is true.
5043 Or, if both are the same, we can apply the identity
5044 (x AND x) == x. */
5045 if (integer_onep (partial))
5046 return t;
5047 else if (integer_onep (t))
5048 return partial;
5049 else if (same_bool_result_p (t, partial))
5050 return t;
5055 return NULL_TREE;
5058 /* Try to simplify the OR of two comparisons defined by
5059 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5060 If this can be done without constructing an intermediate value,
5061 return the resulting tree; otherwise NULL_TREE is returned.
5062 This function is deliberately asymmetric as it recurses on SSA_DEFs
5063 in the first comparison but not the second. */
5065 static tree
5066 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5067 enum tree_code code2, tree op2a, tree op2b)
5069 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5071 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5072 if (operand_equal_p (op1a, op2a, 0)
5073 && operand_equal_p (op1b, op2b, 0))
5075 /* Result will be either NULL_TREE, or a combined comparison. */
5076 tree t = combine_comparisons (UNKNOWN_LOCATION,
5077 TRUTH_ORIF_EXPR, code1, code2,
5078 truth_type, op1a, op1b);
5079 if (t)
5080 return t;
5083 /* Likewise the swapped case of the above. */
5084 if (operand_equal_p (op1a, op2b, 0)
5085 && operand_equal_p (op1b, op2a, 0))
5087 /* Result will be either NULL_TREE, or a combined comparison. */
5088 tree t = combine_comparisons (UNKNOWN_LOCATION,
5089 TRUTH_ORIF_EXPR, code1,
5090 swap_tree_comparison (code2),
5091 truth_type, op1a, op1b);
5092 if (t)
5093 return t;
5096 /* If both comparisons are of the same value against constants, we might
5097 be able to merge them. */
5098 if (operand_equal_p (op1a, op2a, 0)
5099 && TREE_CODE (op1b) == INTEGER_CST
5100 && TREE_CODE (op2b) == INTEGER_CST)
5102 int cmp = tree_int_cst_compare (op1b, op2b);
5104 /* If we have (op1a != op1b), we should either be able to
5105 return that or TRUE, depending on whether the constant op1b
5106 also satisfies the other comparison against op2b. */
5107 if (code1 == NE_EXPR)
5109 bool done = true;
5110 bool val;
5111 switch (code2)
5113 case EQ_EXPR: val = (cmp == 0); break;
5114 case NE_EXPR: val = (cmp != 0); break;
5115 case LT_EXPR: val = (cmp < 0); break;
5116 case GT_EXPR: val = (cmp > 0); break;
5117 case LE_EXPR: val = (cmp <= 0); break;
5118 case GE_EXPR: val = (cmp >= 0); break;
5119 default: done = false;
5121 if (done)
5123 if (val)
5124 return boolean_true_node;
5125 else
5126 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5129 /* Likewise if the second comparison is a != comparison. */
5130 else if (code2 == NE_EXPR)
5132 bool done = true;
5133 bool val;
5134 switch (code1)
5136 case EQ_EXPR: val = (cmp == 0); break;
5137 case NE_EXPR: val = (cmp != 0); break;
5138 case LT_EXPR: val = (cmp > 0); break;
5139 case GT_EXPR: val = (cmp < 0); break;
5140 case LE_EXPR: val = (cmp >= 0); break;
5141 case GE_EXPR: val = (cmp <= 0); break;
5142 default: done = false;
5144 if (done)
5146 if (val)
5147 return boolean_true_node;
5148 else
5149 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5153 /* See if an equality test is redundant with the other comparison. */
5154 else if (code1 == EQ_EXPR)
5156 bool val;
5157 switch (code2)
5159 case EQ_EXPR: val = (cmp == 0); break;
5160 case NE_EXPR: val = (cmp != 0); break;
5161 case LT_EXPR: val = (cmp < 0); break;
5162 case GT_EXPR: val = (cmp > 0); break;
5163 case LE_EXPR: val = (cmp <= 0); break;
5164 case GE_EXPR: val = (cmp >= 0); break;
5165 default:
5166 val = false;
5168 if (val)
5169 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5171 else if (code2 == EQ_EXPR)
5173 bool val;
5174 switch (code1)
5176 case EQ_EXPR: val = (cmp == 0); break;
5177 case NE_EXPR: val = (cmp != 0); break;
5178 case LT_EXPR: val = (cmp > 0); break;
5179 case GT_EXPR: val = (cmp < 0); break;
5180 case LE_EXPR: val = (cmp >= 0); break;
5181 case GE_EXPR: val = (cmp <= 0); break;
5182 default:
5183 val = false;
5185 if (val)
5186 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5189 /* Chose the less restrictive of two < or <= comparisons. */
5190 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5191 && (code2 == LT_EXPR || code2 == LE_EXPR))
5193 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5194 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5195 else
5196 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5199 /* Likewise chose the less restrictive of two > or >= comparisons. */
5200 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5201 && (code2 == GT_EXPR || code2 == GE_EXPR))
5203 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5204 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5205 else
5206 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5209 /* Check for singleton ranges. */
5210 else if (cmp == 0
5211 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5212 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5213 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5215 /* Check for less/greater pairs that don't restrict the range at all. */
5216 else if (cmp >= 0
5217 && (code1 == LT_EXPR || code1 == LE_EXPR)
5218 && (code2 == GT_EXPR || code2 == GE_EXPR))
5219 return boolean_true_node;
5220 else if (cmp <= 0
5221 && (code1 == GT_EXPR || code1 == GE_EXPR)
5222 && (code2 == LT_EXPR || code2 == LE_EXPR))
5223 return boolean_true_node;
5226 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5227 NAME's definition is a truth value. See if there are any simplifications
5228 that can be done against the NAME's definition. */
5229 if (TREE_CODE (op1a) == SSA_NAME
5230 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5231 && (integer_zerop (op1b) || integer_onep (op1b)))
5233 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5234 || (code1 == NE_EXPR && integer_onep (op1b)));
5235 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5236 switch (gimple_code (stmt))
5238 case GIMPLE_ASSIGN:
5239 /* Try to simplify by copy-propagating the definition. */
5240 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5242 case GIMPLE_PHI:
5243 /* If every argument to the PHI produces the same result when
5244 ORed with the second comparison, we win.
5245 Do not do this unless the type is bool since we need a bool
5246 result here anyway. */
5247 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5249 tree result = NULL_TREE;
5250 unsigned i;
5251 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5253 tree arg = gimple_phi_arg_def (stmt, i);
5255 /* If this PHI has itself as an argument, ignore it.
5256 If all the other args produce the same result,
5257 we're still OK. */
5258 if (arg == gimple_phi_result (stmt))
5259 continue;
5260 else if (TREE_CODE (arg) == INTEGER_CST)
5262 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5264 if (!result)
5265 result = boolean_true_node;
5266 else if (!integer_onep (result))
5267 return NULL_TREE;
5269 else if (!result)
5270 result = fold_build2 (code2, boolean_type_node,
5271 op2a, op2b);
5272 else if (!same_bool_comparison_p (result,
5273 code2, op2a, op2b))
5274 return NULL_TREE;
5276 else if (TREE_CODE (arg) == SSA_NAME
5277 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5279 tree temp;
5280 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5281 /* In simple cases we can look through PHI nodes,
5282 but we have to be careful with loops.
5283 See PR49073. */
5284 if (! dom_info_available_p (CDI_DOMINATORS)
5285 || gimple_bb (def_stmt) == gimple_bb (stmt)
5286 || dominated_by_p (CDI_DOMINATORS,
5287 gimple_bb (def_stmt),
5288 gimple_bb (stmt)))
5289 return NULL_TREE;
5290 temp = or_var_with_comparison (arg, invert, code2,
5291 op2a, op2b);
5292 if (!temp)
5293 return NULL_TREE;
5294 else if (!result)
5295 result = temp;
5296 else if (!same_bool_result_p (result, temp))
5297 return NULL_TREE;
5299 else
5300 return NULL_TREE;
5302 return result;
5305 default:
5306 break;
5309 return NULL_TREE;
5312 /* Try to simplify the OR of two comparisons, specified by
5313 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5314 If this can be simplified to a single expression (without requiring
5315 introducing more SSA variables to hold intermediate values),
5316 return the resulting tree. Otherwise return NULL_TREE.
5317 If the result expression is non-null, it has boolean type. */
5319 tree
5320 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5321 enum tree_code code2, tree op2a, tree op2b)
5323 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5324 if (t)
5325 return t;
5326 else
5327 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5331 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5333 Either NULL_TREE, a simplified but non-constant or a constant
5334 is returned.
5336 ??? This should go into a gimple-fold-inline.h file to be eventually
5337 privatized with the single valueize function used in the various TUs
5338 to avoid the indirect function call overhead. */
5340 tree
5341 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5342 tree (*gvalueize) (tree))
5344 code_helper rcode;
5345 tree ops[3] = {};
5346 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5347 edges if there are intermediate VARYING defs. For this reason
5348 do not follow SSA edges here even though SCCVN can technically
5349 just deal fine with that. */
5350 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5352 tree res = NULL_TREE;
5353 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5354 res = ops[0];
5355 else if (mprts_hook)
5356 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5357 if (res)
5359 if (dump_file && dump_flags & TDF_DETAILS)
5361 fprintf (dump_file, "Match-and-simplified ");
5362 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5363 fprintf (dump_file, " to ");
5364 print_generic_expr (dump_file, res, 0);
5365 fprintf (dump_file, "\n");
5367 return res;
5371 location_t loc = gimple_location (stmt);
5372 switch (gimple_code (stmt))
5374 case GIMPLE_ASSIGN:
5376 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5378 switch (get_gimple_rhs_class (subcode))
5380 case GIMPLE_SINGLE_RHS:
5382 tree rhs = gimple_assign_rhs1 (stmt);
5383 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5385 if (TREE_CODE (rhs) == SSA_NAME)
5387 /* If the RHS is an SSA_NAME, return its known constant value,
5388 if any. */
5389 return (*valueize) (rhs);
5391 /* Handle propagating invariant addresses into address
5392 operations. */
5393 else if (TREE_CODE (rhs) == ADDR_EXPR
5394 && !is_gimple_min_invariant (rhs))
5396 HOST_WIDE_INT offset = 0;
5397 tree base;
5398 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5399 &offset,
5400 valueize);
5401 if (base
5402 && (CONSTANT_CLASS_P (base)
5403 || decl_address_invariant_p (base)))
5404 return build_invariant_address (TREE_TYPE (rhs),
5405 base, offset);
5407 else if (TREE_CODE (rhs) == CONSTRUCTOR
5408 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5409 && (CONSTRUCTOR_NELTS (rhs)
5410 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5412 unsigned i;
5413 tree val, *vec;
5415 vec = XALLOCAVEC (tree,
5416 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5417 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5419 val = (*valueize) (val);
5420 if (TREE_CODE (val) == INTEGER_CST
5421 || TREE_CODE (val) == REAL_CST
5422 || TREE_CODE (val) == FIXED_CST)
5423 vec[i] = val;
5424 else
5425 return NULL_TREE;
5428 return build_vector (TREE_TYPE (rhs), vec);
5430 if (subcode == OBJ_TYPE_REF)
5432 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5433 /* If callee is constant, we can fold away the wrapper. */
5434 if (is_gimple_min_invariant (val))
5435 return val;
5438 if (kind == tcc_reference)
5440 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5441 || TREE_CODE (rhs) == REALPART_EXPR
5442 || TREE_CODE (rhs) == IMAGPART_EXPR)
5443 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5445 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5446 return fold_unary_loc (EXPR_LOCATION (rhs),
5447 TREE_CODE (rhs),
5448 TREE_TYPE (rhs), val);
5450 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5451 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5453 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5454 return fold_ternary_loc (EXPR_LOCATION (rhs),
5455 TREE_CODE (rhs),
5456 TREE_TYPE (rhs), val,
5457 TREE_OPERAND (rhs, 1),
5458 TREE_OPERAND (rhs, 2));
5460 else if (TREE_CODE (rhs) == MEM_REF
5461 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5463 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5464 if (TREE_CODE (val) == ADDR_EXPR
5465 && is_gimple_min_invariant (val))
5467 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5468 unshare_expr (val),
5469 TREE_OPERAND (rhs, 1));
5470 if (tem)
5471 rhs = tem;
5474 return fold_const_aggregate_ref_1 (rhs, valueize);
5476 else if (kind == tcc_declaration)
5477 return get_symbol_constant_value (rhs);
5478 return rhs;
5481 case GIMPLE_UNARY_RHS:
5482 return NULL_TREE;
5484 case GIMPLE_BINARY_RHS:
5485 /* Translate &x + CST into an invariant form suitable for
5486 further propagation. */
5487 if (subcode == POINTER_PLUS_EXPR)
5489 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5490 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5491 if (TREE_CODE (op0) == ADDR_EXPR
5492 && TREE_CODE (op1) == INTEGER_CST)
5494 tree off = fold_convert (ptr_type_node, op1);
5495 return build_fold_addr_expr_loc
5496 (loc,
5497 fold_build2 (MEM_REF,
5498 TREE_TYPE (TREE_TYPE (op0)),
5499 unshare_expr (op0), off));
5502 /* Canonicalize bool != 0 and bool == 0 appearing after
5503 valueization. While gimple_simplify handles this
5504 it can get confused by the ~X == 1 -> X == 0 transform
5505 which we cant reduce to a SSA name or a constant
5506 (and we have no way to tell gimple_simplify to not
5507 consider those transforms in the first place). */
5508 else if (subcode == EQ_EXPR
5509 || subcode == NE_EXPR)
5511 tree lhs = gimple_assign_lhs (stmt);
5512 tree op0 = gimple_assign_rhs1 (stmt);
5513 if (useless_type_conversion_p (TREE_TYPE (lhs),
5514 TREE_TYPE (op0)))
5516 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5517 op0 = (*valueize) (op0);
5518 if (TREE_CODE (op0) == INTEGER_CST)
5519 std::swap (op0, op1);
5520 if (TREE_CODE (op1) == INTEGER_CST
5521 && ((subcode == NE_EXPR && integer_zerop (op1))
5522 || (subcode == EQ_EXPR && integer_onep (op1))))
5523 return op0;
5526 return NULL_TREE;
5528 case GIMPLE_TERNARY_RHS:
5530 /* Handle ternary operators that can appear in GIMPLE form. */
5531 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5532 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5533 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5534 return fold_ternary_loc (loc, subcode,
5535 gimple_expr_type (stmt), op0, op1, op2);
5538 default:
5539 gcc_unreachable ();
5543 case GIMPLE_CALL:
5545 tree fn;
5546 gcall *call_stmt = as_a <gcall *> (stmt);
5548 if (gimple_call_internal_p (stmt))
5550 enum tree_code subcode = ERROR_MARK;
5551 switch (gimple_call_internal_fn (stmt))
5553 case IFN_UBSAN_CHECK_ADD:
5554 subcode = PLUS_EXPR;
5555 break;
5556 case IFN_UBSAN_CHECK_SUB:
5557 subcode = MINUS_EXPR;
5558 break;
5559 case IFN_UBSAN_CHECK_MUL:
5560 subcode = MULT_EXPR;
5561 break;
5562 case IFN_BUILTIN_EXPECT:
5564 tree arg0 = gimple_call_arg (stmt, 0);
5565 tree op0 = (*valueize) (arg0);
5566 if (TREE_CODE (op0) == INTEGER_CST)
5567 return op0;
5568 return NULL_TREE;
5570 default:
5571 return NULL_TREE;
5573 tree arg0 = gimple_call_arg (stmt, 0);
5574 tree arg1 = gimple_call_arg (stmt, 1);
5575 tree op0 = (*valueize) (arg0);
5576 tree op1 = (*valueize) (arg1);
5578 if (TREE_CODE (op0) != INTEGER_CST
5579 || TREE_CODE (op1) != INTEGER_CST)
5581 switch (subcode)
5583 case MULT_EXPR:
5584 /* x * 0 = 0 * x = 0 without overflow. */
5585 if (integer_zerop (op0) || integer_zerop (op1))
5586 return build_zero_cst (TREE_TYPE (arg0));
5587 break;
5588 case MINUS_EXPR:
5589 /* y - y = 0 without overflow. */
5590 if (operand_equal_p (op0, op1, 0))
5591 return build_zero_cst (TREE_TYPE (arg0));
5592 break;
5593 default:
5594 break;
5597 tree res
5598 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5599 if (res
5600 && TREE_CODE (res) == INTEGER_CST
5601 && !TREE_OVERFLOW (res))
5602 return res;
5603 return NULL_TREE;
5606 fn = (*valueize) (gimple_call_fn (stmt));
5607 if (TREE_CODE (fn) == ADDR_EXPR
5608 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5609 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5610 && gimple_builtin_call_types_compatible_p (stmt,
5611 TREE_OPERAND (fn, 0)))
5613 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5614 tree retval;
5615 unsigned i;
5616 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5617 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5618 retval = fold_builtin_call_array (loc,
5619 gimple_call_return_type (call_stmt),
5620 fn, gimple_call_num_args (stmt), args);
5621 if (retval)
5623 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5624 STRIP_NOPS (retval);
5625 retval = fold_convert (gimple_call_return_type (call_stmt),
5626 retval);
5628 return retval;
5630 return NULL_TREE;
5633 default:
5634 return NULL_TREE;
5638 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5639 Returns NULL_TREE if folding to a constant is not possible, otherwise
5640 returns a constant according to is_gimple_min_invariant. */
5642 tree
5643 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5645 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5646 if (res && is_gimple_min_invariant (res))
5647 return res;
5648 return NULL_TREE;
5652 /* The following set of functions are supposed to fold references using
5653 their constant initializers. */
5655 /* See if we can find constructor defining value of BASE.
5656 When we know the consructor with constant offset (such as
5657 base is array[40] and we do know constructor of array), then
5658 BIT_OFFSET is adjusted accordingly.
5660 As a special case, return error_mark_node when constructor
5661 is not explicitly available, but it is known to be zero
5662 such as 'static const int a;'. */
5663 static tree
5664 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5665 tree (*valueize)(tree))
5667 HOST_WIDE_INT bit_offset2, size, max_size;
5668 bool reverse;
5670 if (TREE_CODE (base) == MEM_REF)
5672 if (!integer_zerop (TREE_OPERAND (base, 1)))
5674 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5675 return NULL_TREE;
5676 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5677 * BITS_PER_UNIT);
5680 if (valueize
5681 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5682 base = valueize (TREE_OPERAND (base, 0));
5683 if (!base || TREE_CODE (base) != ADDR_EXPR)
5684 return NULL_TREE;
5685 base = TREE_OPERAND (base, 0);
5687 else if (valueize
5688 && TREE_CODE (base) == SSA_NAME)
5689 base = valueize (base);
5691 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5692 DECL_INITIAL. If BASE is a nested reference into another
5693 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5694 the inner reference. */
5695 switch (TREE_CODE (base))
5697 case VAR_DECL:
5698 case CONST_DECL:
5700 tree init = ctor_for_folding (base);
5702 /* Our semantic is exact opposite of ctor_for_folding;
5703 NULL means unknown, while error_mark_node is 0. */
5704 if (init == error_mark_node)
5705 return NULL_TREE;
5706 if (!init)
5707 return error_mark_node;
5708 return init;
5711 case VIEW_CONVERT_EXPR:
5712 return get_base_constructor (TREE_OPERAND (base, 0),
5713 bit_offset, valueize);
5715 case ARRAY_REF:
5716 case COMPONENT_REF:
5717 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5718 &reverse);
5719 if (max_size == -1 || size != max_size)
5720 return NULL_TREE;
5721 *bit_offset += bit_offset2;
5722 return get_base_constructor (base, bit_offset, valueize);
5724 case CONSTRUCTOR:
5725 return base;
5727 default:
5728 if (CONSTANT_CLASS_P (base))
5729 return base;
5731 return NULL_TREE;
5735 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5736 SIZE to the memory at bit OFFSET. */
5738 static tree
5739 fold_array_ctor_reference (tree type, tree ctor,
5740 unsigned HOST_WIDE_INT offset,
5741 unsigned HOST_WIDE_INT size,
5742 tree from_decl)
5744 offset_int low_bound;
5745 offset_int elt_size;
5746 offset_int access_index;
5747 tree domain_type = NULL_TREE;
5748 HOST_WIDE_INT inner_offset;
5750 /* Compute low bound and elt size. */
5751 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5752 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5753 if (domain_type && TYPE_MIN_VALUE (domain_type))
5755 /* Static constructors for variably sized objects makes no sense. */
5756 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
5757 return NULL_TREE;
5758 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5760 else
5761 low_bound = 0;
5762 /* Static constructors for variably sized objects makes no sense. */
5763 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
5764 return NULL_TREE;
5765 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5767 /* We can handle only constantly sized accesses that are known to not
5768 be larger than size of array element. */
5769 if (!TYPE_SIZE_UNIT (type)
5770 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5771 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5772 || elt_size == 0)
5773 return NULL_TREE;
5775 /* Compute the array index we look for. */
5776 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5777 elt_size);
5778 access_index += low_bound;
5780 /* And offset within the access. */
5781 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5783 /* See if the array field is large enough to span whole access. We do not
5784 care to fold accesses spanning multiple array indexes. */
5785 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5786 return NULL_TREE;
5787 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5788 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5790 /* When memory is not explicitely mentioned in constructor,
5791 it is 0 (or out of range). */
5792 return build_zero_cst (type);
5795 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5796 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5798 static tree
5799 fold_nonarray_ctor_reference (tree type, tree ctor,
5800 unsigned HOST_WIDE_INT offset,
5801 unsigned HOST_WIDE_INT size,
5802 tree from_decl)
5804 unsigned HOST_WIDE_INT cnt;
5805 tree cfield, cval;
5807 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5808 cval)
5810 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5811 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5812 tree field_size = DECL_SIZE (cfield);
5813 offset_int bitoffset;
5814 offset_int bitoffset_end, access_end;
5816 /* Variable sized objects in static constructors makes no sense,
5817 but field_size can be NULL for flexible array members. */
5818 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5819 && TREE_CODE (byte_offset) == INTEGER_CST
5820 && (field_size != NULL_TREE
5821 ? TREE_CODE (field_size) == INTEGER_CST
5822 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5824 /* Compute bit offset of the field. */
5825 bitoffset = (wi::to_offset (field_offset)
5826 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
5827 /* Compute bit offset where the field ends. */
5828 if (field_size != NULL_TREE)
5829 bitoffset_end = bitoffset + wi::to_offset (field_size);
5830 else
5831 bitoffset_end = 0;
5833 access_end = offset_int (offset) + size;
5835 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5836 [BITOFFSET, BITOFFSET_END)? */
5837 if (wi::cmps (access_end, bitoffset) > 0
5838 && (field_size == NULL_TREE
5839 || wi::lts_p (offset, bitoffset_end)))
5841 offset_int inner_offset = offset_int (offset) - bitoffset;
5842 /* We do have overlap. Now see if field is large enough to
5843 cover the access. Give up for accesses spanning multiple
5844 fields. */
5845 if (wi::cmps (access_end, bitoffset_end) > 0)
5846 return NULL_TREE;
5847 if (offset < bitoffset)
5848 return NULL_TREE;
5849 return fold_ctor_reference (type, cval,
5850 inner_offset.to_uhwi (), size,
5851 from_decl);
5854 /* When memory is not explicitely mentioned in constructor, it is 0. */
5855 return build_zero_cst (type);
5858 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5859 to the memory at bit OFFSET. */
5861 tree
5862 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5863 unsigned HOST_WIDE_INT size, tree from_decl)
5865 tree ret;
5867 /* We found the field with exact match. */
5868 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5869 && !offset)
5870 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5872 /* We are at the end of walk, see if we can view convert the
5873 result. */
5874 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5875 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5876 && !compare_tree_int (TYPE_SIZE (type), size)
5877 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5879 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5880 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5881 if (ret)
5882 STRIP_USELESS_TYPE_CONVERSION (ret);
5883 return ret;
5885 /* For constants and byte-aligned/sized reads try to go through
5886 native_encode/interpret. */
5887 if (CONSTANT_CLASS_P (ctor)
5888 && BITS_PER_UNIT == 8
5889 && offset % BITS_PER_UNIT == 0
5890 && size % BITS_PER_UNIT == 0
5891 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5893 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5894 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5895 offset / BITS_PER_UNIT);
5896 if (len > 0)
5897 return native_interpret_expr (type, buf, len);
5899 if (TREE_CODE (ctor) == CONSTRUCTOR)
5902 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5903 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5904 return fold_array_ctor_reference (type, ctor, offset, size,
5905 from_decl);
5906 else
5907 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5908 from_decl);
5911 return NULL_TREE;
5914 /* Return the tree representing the element referenced by T if T is an
5915 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5916 names using VALUEIZE. Return NULL_TREE otherwise. */
5918 tree
5919 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5921 tree ctor, idx, base;
5922 HOST_WIDE_INT offset, size, max_size;
5923 tree tem;
5924 bool reverse;
5926 if (TREE_THIS_VOLATILE (t))
5927 return NULL_TREE;
5929 if (DECL_P (t))
5930 return get_symbol_constant_value (t);
5932 tem = fold_read_from_constant_string (t);
5933 if (tem)
5934 return tem;
5936 switch (TREE_CODE (t))
5938 case ARRAY_REF:
5939 case ARRAY_RANGE_REF:
5940 /* Constant indexes are handled well by get_base_constructor.
5941 Only special case variable offsets.
5942 FIXME: This code can't handle nested references with variable indexes
5943 (they will be handled only by iteration of ccp). Perhaps we can bring
5944 get_ref_base_and_extent here and make it use a valueize callback. */
5945 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5946 && valueize
5947 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5948 && TREE_CODE (idx) == INTEGER_CST)
5950 tree low_bound, unit_size;
5952 /* If the resulting bit-offset is constant, track it. */
5953 if ((low_bound = array_ref_low_bound (t),
5954 TREE_CODE (low_bound) == INTEGER_CST)
5955 && (unit_size = array_ref_element_size (t),
5956 tree_fits_uhwi_p (unit_size)))
5958 offset_int woffset
5959 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5960 TYPE_PRECISION (TREE_TYPE (idx)));
5962 if (wi::fits_shwi_p (woffset))
5964 offset = woffset.to_shwi ();
5965 /* TODO: This code seems wrong, multiply then check
5966 to see if it fits. */
5967 offset *= tree_to_uhwi (unit_size);
5968 offset *= BITS_PER_UNIT;
5970 base = TREE_OPERAND (t, 0);
5971 ctor = get_base_constructor (base, &offset, valueize);
5972 /* Empty constructor. Always fold to 0. */
5973 if (ctor == error_mark_node)
5974 return build_zero_cst (TREE_TYPE (t));
5975 /* Out of bound array access. Value is undefined,
5976 but don't fold. */
5977 if (offset < 0)
5978 return NULL_TREE;
5979 /* We can not determine ctor. */
5980 if (!ctor)
5981 return NULL_TREE;
5982 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5983 tree_to_uhwi (unit_size)
5984 * BITS_PER_UNIT,
5985 base);
5989 /* Fallthru. */
5991 case COMPONENT_REF:
5992 case BIT_FIELD_REF:
5993 case TARGET_MEM_REF:
5994 case MEM_REF:
5995 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5996 ctor = get_base_constructor (base, &offset, valueize);
5998 /* Empty constructor. Always fold to 0. */
5999 if (ctor == error_mark_node)
6000 return build_zero_cst (TREE_TYPE (t));
6001 /* We do not know precise address. */
6002 if (max_size == -1 || max_size != size)
6003 return NULL_TREE;
6004 /* We can not determine ctor. */
6005 if (!ctor)
6006 return NULL_TREE;
6008 /* Out of bound array access. Value is undefined, but don't fold. */
6009 if (offset < 0)
6010 return NULL_TREE;
6012 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6013 base);
6015 case REALPART_EXPR:
6016 case IMAGPART_EXPR:
6018 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6019 if (c && TREE_CODE (c) == COMPLEX_CST)
6020 return fold_build1_loc (EXPR_LOCATION (t),
6021 TREE_CODE (t), TREE_TYPE (t), c);
6022 break;
6025 default:
6026 break;
6029 return NULL_TREE;
6032 tree
6033 fold_const_aggregate_ref (tree t)
6035 return fold_const_aggregate_ref_1 (t, NULL);
6038 /* Lookup virtual method with index TOKEN in a virtual table V
6039 at OFFSET.
6040 Set CAN_REFER if non-NULL to false if method
6041 is not referable or if the virtual table is ill-formed (such as rewriten
6042 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6044 tree
6045 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6046 tree v,
6047 unsigned HOST_WIDE_INT offset,
6048 bool *can_refer)
6050 tree vtable = v, init, fn;
6051 unsigned HOST_WIDE_INT size;
6052 unsigned HOST_WIDE_INT elt_size, access_index;
6053 tree domain_type;
6055 if (can_refer)
6056 *can_refer = true;
6058 /* First of all double check we have virtual table. */
6059 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6061 /* Pass down that we lost track of the target. */
6062 if (can_refer)
6063 *can_refer = false;
6064 return NULL_TREE;
6067 init = ctor_for_folding (v);
6069 /* The virtual tables should always be born with constructors
6070 and we always should assume that they are avaialble for
6071 folding. At the moment we do not stream them in all cases,
6072 but it should never happen that ctor seem unreachable. */
6073 gcc_assert (init);
6074 if (init == error_mark_node)
6076 gcc_assert (in_lto_p);
6077 /* Pass down that we lost track of the target. */
6078 if (can_refer)
6079 *can_refer = false;
6080 return NULL_TREE;
6082 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6083 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6084 offset *= BITS_PER_UNIT;
6085 offset += token * size;
6087 /* Lookup the value in the constructor that is assumed to be array.
6088 This is equivalent to
6089 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6090 offset, size, NULL);
6091 but in a constant time. We expect that frontend produced a simple
6092 array without indexed initializers. */
6094 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6095 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6096 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6097 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6099 access_index = offset / BITS_PER_UNIT / elt_size;
6100 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6102 /* This code makes an assumption that there are no
6103 indexed fileds produced by C++ FE, so we can directly index the array. */
6104 if (access_index < CONSTRUCTOR_NELTS (init))
6106 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6107 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6108 STRIP_NOPS (fn);
6110 else
6111 fn = NULL;
6113 /* For type inconsistent program we may end up looking up virtual method
6114 in virtual table that does not contain TOKEN entries. We may overrun
6115 the virtual table and pick up a constant or RTTI info pointer.
6116 In any case the call is undefined. */
6117 if (!fn
6118 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6119 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6120 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6121 else
6123 fn = TREE_OPERAND (fn, 0);
6125 /* When cgraph node is missing and function is not public, we cannot
6126 devirtualize. This can happen in WHOPR when the actual method
6127 ends up in other partition, because we found devirtualization
6128 possibility too late. */
6129 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6131 if (can_refer)
6133 *can_refer = false;
6134 return fn;
6136 return NULL_TREE;
6140 /* Make sure we create a cgraph node for functions we'll reference.
6141 They can be non-existent if the reference comes from an entry
6142 of an external vtable for example. */
6143 cgraph_node::get_create (fn);
6145 return fn;
6148 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6149 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6150 KNOWN_BINFO carries the binfo describing the true type of
6151 OBJ_TYPE_REF_OBJECT(REF).
6152 Set CAN_REFER if non-NULL to false if method
6153 is not referable or if the virtual table is ill-formed (such as rewriten
6154 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6156 tree
6157 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6158 bool *can_refer)
6160 unsigned HOST_WIDE_INT offset;
6161 tree v;
6163 v = BINFO_VTABLE (known_binfo);
6164 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6165 if (!v)
6166 return NULL_TREE;
6168 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6170 if (can_refer)
6171 *can_refer = false;
6172 return NULL_TREE;
6174 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6177 /* Given a pointer value OP0, return a simplified version of an
6178 indirection through OP0, or NULL_TREE if no simplification is
6179 possible. Note that the resulting type may be different from
6180 the type pointed to in the sense that it is still compatible
6181 from the langhooks point of view. */
6183 tree
6184 gimple_fold_indirect_ref (tree t)
6186 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6187 tree sub = t;
6188 tree subtype;
6190 STRIP_NOPS (sub);
6191 subtype = TREE_TYPE (sub);
6192 if (!POINTER_TYPE_P (subtype))
6193 return NULL_TREE;
6195 if (TREE_CODE (sub) == ADDR_EXPR)
6197 tree op = TREE_OPERAND (sub, 0);
6198 tree optype = TREE_TYPE (op);
6199 /* *&p => p */
6200 if (useless_type_conversion_p (type, optype))
6201 return op;
6203 /* *(foo *)&fooarray => fooarray[0] */
6204 if (TREE_CODE (optype) == ARRAY_TYPE
6205 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6206 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6208 tree type_domain = TYPE_DOMAIN (optype);
6209 tree min_val = size_zero_node;
6210 if (type_domain && TYPE_MIN_VALUE (type_domain))
6211 min_val = TYPE_MIN_VALUE (type_domain);
6212 if (TREE_CODE (min_val) == INTEGER_CST)
6213 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6215 /* *(foo *)&complexfoo => __real__ complexfoo */
6216 else if (TREE_CODE (optype) == COMPLEX_TYPE
6217 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6218 return fold_build1 (REALPART_EXPR, type, op);
6219 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6220 else if (TREE_CODE (optype) == VECTOR_TYPE
6221 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6223 tree part_width = TYPE_SIZE (type);
6224 tree index = bitsize_int (0);
6225 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6229 /* *(p + CST) -> ... */
6230 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6231 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6233 tree addr = TREE_OPERAND (sub, 0);
6234 tree off = TREE_OPERAND (sub, 1);
6235 tree addrtype;
6237 STRIP_NOPS (addr);
6238 addrtype = TREE_TYPE (addr);
6240 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6241 if (TREE_CODE (addr) == ADDR_EXPR
6242 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6243 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6244 && tree_fits_uhwi_p (off))
6246 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6247 tree part_width = TYPE_SIZE (type);
6248 unsigned HOST_WIDE_INT part_widthi
6249 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6250 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6251 tree index = bitsize_int (indexi);
6252 if (offset / part_widthi
6253 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6254 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6255 part_width, index);
6258 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6259 if (TREE_CODE (addr) == ADDR_EXPR
6260 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6261 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6263 tree size = TYPE_SIZE_UNIT (type);
6264 if (tree_int_cst_equal (size, off))
6265 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6268 /* *(p + CST) -> MEM_REF <p, CST>. */
6269 if (TREE_CODE (addr) != ADDR_EXPR
6270 || DECL_P (TREE_OPERAND (addr, 0)))
6271 return fold_build2 (MEM_REF, type,
6272 addr,
6273 wide_int_to_tree (ptype, off));
6276 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6277 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6278 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6279 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6281 tree type_domain;
6282 tree min_val = size_zero_node;
6283 tree osub = sub;
6284 sub = gimple_fold_indirect_ref (sub);
6285 if (! sub)
6286 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6287 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6288 if (type_domain && TYPE_MIN_VALUE (type_domain))
6289 min_val = TYPE_MIN_VALUE (type_domain);
6290 if (TREE_CODE (min_val) == INTEGER_CST)
6291 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6294 return NULL_TREE;
6297 /* Return true if CODE is an operation that when operating on signed
6298 integer types involves undefined behavior on overflow and the
6299 operation can be expressed with unsigned arithmetic. */
6301 bool
6302 arith_code_with_undefined_signed_overflow (tree_code code)
6304 switch (code)
6306 case PLUS_EXPR:
6307 case MINUS_EXPR:
6308 case MULT_EXPR:
6309 case NEGATE_EXPR:
6310 case POINTER_PLUS_EXPR:
6311 return true;
6312 default:
6313 return false;
6317 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6318 operation that can be transformed to unsigned arithmetic by converting
6319 its operand, carrying out the operation in the corresponding unsigned
6320 type and converting the result back to the original type.
6322 Returns a sequence of statements that replace STMT and also contain
6323 a modified form of STMT itself. */
6325 gimple_seq
6326 rewrite_to_defined_overflow (gimple *stmt)
6328 if (dump_file && (dump_flags & TDF_DETAILS))
6330 fprintf (dump_file, "rewriting stmt with undefined signed "
6331 "overflow ");
6332 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6335 tree lhs = gimple_assign_lhs (stmt);
6336 tree type = unsigned_type_for (TREE_TYPE (lhs));
6337 gimple_seq stmts = NULL;
6338 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6340 tree op = gimple_op (stmt, i);
6341 op = gimple_convert (&stmts, type, op);
6342 gimple_set_op (stmt, i, op);
6344 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6345 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6346 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6347 gimple_seq_add_stmt (&stmts, stmt);
6348 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6349 gimple_seq_add_stmt (&stmts, cvt);
6351 return stmts;
6355 /* The valueization hook we use for the gimple_build API simplification.
6356 This makes us match fold_buildN behavior by only combining with
6357 statements in the sequence(s) we are currently building. */
6359 static tree
6360 gimple_build_valueize (tree op)
6362 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6363 return op;
6364 return NULL_TREE;
6367 /* Build the expression CODE OP0 of type TYPE with location LOC,
6368 simplifying it first if possible. Returns the built
6369 expression value and appends statements possibly defining it
6370 to SEQ. */
6372 tree
6373 gimple_build (gimple_seq *seq, location_t loc,
6374 enum tree_code code, tree type, tree op0)
6376 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6377 if (!res)
6379 if (gimple_in_ssa_p (cfun))
6380 res = make_ssa_name (type);
6381 else
6382 res = create_tmp_reg (type);
6383 gimple *stmt;
6384 if (code == REALPART_EXPR
6385 || code == IMAGPART_EXPR
6386 || code == VIEW_CONVERT_EXPR)
6387 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6388 else
6389 stmt = gimple_build_assign (res, code, op0);
6390 gimple_set_location (stmt, loc);
6391 gimple_seq_add_stmt_without_update (seq, stmt);
6393 return res;
6396 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6397 simplifying it first if possible. Returns the built
6398 expression value and appends statements possibly defining it
6399 to SEQ. */
6401 tree
6402 gimple_build (gimple_seq *seq, location_t loc,
6403 enum tree_code code, tree type, tree op0, tree op1)
6405 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6406 if (!res)
6408 if (gimple_in_ssa_p (cfun))
6409 res = make_ssa_name (type);
6410 else
6411 res = create_tmp_reg (type);
6412 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6413 gimple_set_location (stmt, loc);
6414 gimple_seq_add_stmt_without_update (seq, stmt);
6416 return res;
6419 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6420 simplifying it first if possible. Returns the built
6421 expression value and appends statements possibly defining it
6422 to SEQ. */
6424 tree
6425 gimple_build (gimple_seq *seq, location_t loc,
6426 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6428 tree res = gimple_simplify (code, type, op0, op1, op2,
6429 seq, gimple_build_valueize);
6430 if (!res)
6432 if (gimple_in_ssa_p (cfun))
6433 res = make_ssa_name (type);
6434 else
6435 res = create_tmp_reg (type);
6436 gimple *stmt;
6437 if (code == BIT_FIELD_REF)
6438 stmt = gimple_build_assign (res, code,
6439 build3 (code, type, op0, op1, op2));
6440 else
6441 stmt = gimple_build_assign (res, code, op0, op1, op2);
6442 gimple_set_location (stmt, loc);
6443 gimple_seq_add_stmt_without_update (seq, stmt);
6445 return res;
6448 /* Build the call FN (ARG0) with a result of type TYPE
6449 (or no result if TYPE is void) with location LOC,
6450 simplifying it first if possible. Returns the built
6451 expression value (or NULL_TREE if TYPE is void) and appends
6452 statements possibly defining it to SEQ. */
6454 tree
6455 gimple_build (gimple_seq *seq, location_t loc,
6456 enum built_in_function fn, tree type, tree arg0)
6458 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6459 if (!res)
6461 tree decl = builtin_decl_implicit (fn);
6462 gimple *stmt = gimple_build_call (decl, 1, arg0);
6463 if (!VOID_TYPE_P (type))
6465 if (gimple_in_ssa_p (cfun))
6466 res = make_ssa_name (type);
6467 else
6468 res = create_tmp_reg (type);
6469 gimple_call_set_lhs (stmt, res);
6471 gimple_set_location (stmt, loc);
6472 gimple_seq_add_stmt_without_update (seq, stmt);
6474 return res;
6477 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6478 (or no result if TYPE is void) with location LOC,
6479 simplifying it first if possible. Returns the built
6480 expression value (or NULL_TREE if TYPE is void) and appends
6481 statements possibly defining it to SEQ. */
6483 tree
6484 gimple_build (gimple_seq *seq, location_t loc,
6485 enum built_in_function fn, tree type, tree arg0, tree arg1)
6487 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6488 if (!res)
6490 tree decl = builtin_decl_implicit (fn);
6491 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6492 if (!VOID_TYPE_P (type))
6494 if (gimple_in_ssa_p (cfun))
6495 res = make_ssa_name (type);
6496 else
6497 res = create_tmp_reg (type);
6498 gimple_call_set_lhs (stmt, res);
6500 gimple_set_location (stmt, loc);
6501 gimple_seq_add_stmt_without_update (seq, stmt);
6503 return res;
6506 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6507 (or no result if TYPE is void) with location LOC,
6508 simplifying it first if possible. Returns the built
6509 expression value (or NULL_TREE if TYPE is void) and appends
6510 statements possibly defining it to SEQ. */
6512 tree
6513 gimple_build (gimple_seq *seq, location_t loc,
6514 enum built_in_function fn, tree type,
6515 tree arg0, tree arg1, tree arg2)
6517 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6518 seq, gimple_build_valueize);
6519 if (!res)
6521 tree decl = builtin_decl_implicit (fn);
6522 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6523 if (!VOID_TYPE_P (type))
6525 if (gimple_in_ssa_p (cfun))
6526 res = make_ssa_name (type);
6527 else
6528 res = create_tmp_reg (type);
6529 gimple_call_set_lhs (stmt, res);
6531 gimple_set_location (stmt, loc);
6532 gimple_seq_add_stmt_without_update (seq, stmt);
6534 return res;
6537 /* Build the conversion (TYPE) OP with a result of type TYPE
6538 with location LOC if such conversion is neccesary in GIMPLE,
6539 simplifying it first.
6540 Returns the built expression value and appends
6541 statements possibly defining it to SEQ. */
6543 tree
6544 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6546 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6547 return op;
6548 return gimple_build (seq, loc, NOP_EXPR, type, op);
6551 /* Build the conversion (ptrofftype) OP with a result of a type
6552 compatible with ptrofftype with location LOC if such conversion
6553 is neccesary in GIMPLE, simplifying it first.
6554 Returns the built expression value and appends
6555 statements possibly defining it to SEQ. */
6557 tree
6558 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6560 if (ptrofftype_p (TREE_TYPE (op)))
6561 return op;
6562 return gimple_convert (seq, loc, sizetype, op);
6565 /* Return true if the result of assignment STMT is known to be non-negative.
6566 If the return value is based on the assumption that signed overflow is
6567 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6568 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6570 static bool
6571 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6572 int depth)
6574 enum tree_code code = gimple_assign_rhs_code (stmt);
6575 switch (get_gimple_rhs_class (code))
6577 case GIMPLE_UNARY_RHS:
6578 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6579 gimple_expr_type (stmt),
6580 gimple_assign_rhs1 (stmt),
6581 strict_overflow_p, depth);
6582 case GIMPLE_BINARY_RHS:
6583 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6584 gimple_expr_type (stmt),
6585 gimple_assign_rhs1 (stmt),
6586 gimple_assign_rhs2 (stmt),
6587 strict_overflow_p, depth);
6588 case GIMPLE_TERNARY_RHS:
6589 return false;
6590 case GIMPLE_SINGLE_RHS:
6591 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6592 strict_overflow_p, depth);
6593 case GIMPLE_INVALID_RHS:
6594 break;
6596 gcc_unreachable ();
6599 /* Return true if return value of call STMT is known to be non-negative.
6600 If the return value is based on the assumption that signed overflow is
6601 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6602 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6604 static bool
6605 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6606 int depth)
6608 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6609 gimple_call_arg (stmt, 0) : NULL_TREE;
6610 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6611 gimple_call_arg (stmt, 1) : NULL_TREE;
6613 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6614 gimple_call_combined_fn (stmt),
6615 arg0,
6616 arg1,
6617 strict_overflow_p, depth);
6620 /* Return true if return value of call STMT is known to be non-negative.
6621 If the return value is based on the assumption that signed overflow is
6622 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6623 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6625 static bool
6626 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6627 int depth)
6629 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6631 tree arg = gimple_phi_arg_def (stmt, i);
6632 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6633 return false;
6635 return true;
6638 /* Return true if STMT is known to compute a non-negative value.
6639 If the return value is based on the assumption that signed overflow is
6640 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6641 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6643 bool
6644 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6645 int depth)
6647 switch (gimple_code (stmt))
6649 case GIMPLE_ASSIGN:
6650 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6651 depth);
6652 case GIMPLE_CALL:
6653 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6654 depth);
6655 case GIMPLE_PHI:
6656 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6657 depth);
6658 default:
6659 return false;
6663 /* Return true if the floating-point value computed by assignment STMT
6664 is known to have an integer value. We also allow +Inf, -Inf and NaN
6665 to be considered integer values. Return false for signaling NaN.
6667 DEPTH is the current nesting depth of the query. */
6669 static bool
6670 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6672 enum tree_code code = gimple_assign_rhs_code (stmt);
6673 switch (get_gimple_rhs_class (code))
6675 case GIMPLE_UNARY_RHS:
6676 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6677 gimple_assign_rhs1 (stmt), depth);
6678 case GIMPLE_BINARY_RHS:
6679 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6680 gimple_assign_rhs1 (stmt),
6681 gimple_assign_rhs2 (stmt), depth);
6682 case GIMPLE_TERNARY_RHS:
6683 return false;
6684 case GIMPLE_SINGLE_RHS:
6685 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6686 case GIMPLE_INVALID_RHS:
6687 break;
6689 gcc_unreachable ();
6692 /* Return true if the floating-point value computed by call STMT is known
6693 to have an integer value. We also allow +Inf, -Inf and NaN to be
6694 considered integer values. Return false for signaling NaN.
6696 DEPTH is the current nesting depth of the query. */
6698 static bool
6699 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6701 tree arg0 = (gimple_call_num_args (stmt) > 0
6702 ? gimple_call_arg (stmt, 0)
6703 : NULL_TREE);
6704 tree arg1 = (gimple_call_num_args (stmt) > 1
6705 ? gimple_call_arg (stmt, 1)
6706 : NULL_TREE);
6707 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6708 arg0, arg1, depth);
6711 /* Return true if the floating-point result of phi STMT is known to have
6712 an integer value. We also allow +Inf, -Inf and NaN to be considered
6713 integer values. Return false for signaling NaN.
6715 DEPTH is the current nesting depth of the query. */
6717 static bool
6718 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6720 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6722 tree arg = gimple_phi_arg_def (stmt, i);
6723 if (!integer_valued_real_single_p (arg, depth + 1))
6724 return false;
6726 return true;
6729 /* Return true if the floating-point value computed by STMT is known
6730 to have an integer value. We also allow +Inf, -Inf and NaN to be
6731 considered integer values. Return false for signaling NaN.
6733 DEPTH is the current nesting depth of the query. */
6735 bool
6736 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6738 switch (gimple_code (stmt))
6740 case GIMPLE_ASSIGN:
6741 return gimple_assign_integer_valued_real_p (stmt, depth);
6742 case GIMPLE_CALL:
6743 return gimple_call_integer_valued_real_p (stmt, depth);
6744 case GIMPLE_PHI:
6745 return gimple_phi_integer_valued_real_p (stmt, depth);
6746 default:
6747 return false;