gcov: emit hotness colors to easily find hot code.
[official-gcc.git] / gcc / gimple-fold.c
blob362ab59e9c00ec338fa3594db0186442df8b82f2
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
642 static bool
643 size_must_be_zero_p (tree size)
645 if (integer_zerop (size))
646 return true;
648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
649 return false;
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
659 wide_int wone = wi::one (prec);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
702 gsi_replace (gsi, repl, false);
703 return true;
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
721 goto done;
723 else
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
728 const char *tmp_str;
729 unsigned HOST_WIDE_INT tmp_len;
731 /* Build accesses at offset zero with a ref-all character type. */
732 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
733 ptr_mode, true), 0);
735 /* If we can perform the copy efficiently with first doing all loads
736 and then all stores inline it that way. Currently efficiently
737 means that we can load all the memory into a single integer
738 register which is what MOVE_MAX gives us. */
739 src_align = get_pointer_alignment (src);
740 dest_align = get_pointer_alignment (dest);
741 if (tree_fits_uhwi_p (len)
742 && compare_tree_int (len, MOVE_MAX) <= 0
743 /* ??? Don't transform copies from strings with known length this
744 confuses the tree-ssa-strlen.c. This doesn't handle
745 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
746 reason. */
747 && !c_strlen (src, 2)
748 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
749 && memchr (tmp_str, 0, tmp_len) == NULL))
751 unsigned ilen = tree_to_uhwi (len);
752 if (pow2p_hwi (ilen))
754 /* Detect invalid bounds and overlapping copies and issue
755 either -Warray-bounds or -Wrestrict. */
756 if (!nowarn
757 && check_bounds_or_overlap (as_a <gcall *>(stmt),
758 dest, src, len, len))
759 gimple_set_no_warning (stmt, true);
761 scalar_int_mode mode;
762 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
763 if (type
764 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
765 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
766 /* If the destination pointer is not aligned we must be able
767 to emit an unaligned store. */
768 && (dest_align >= GET_MODE_ALIGNMENT (mode)
769 || !targetm.slow_unaligned_access (mode, dest_align)
770 || (optab_handler (movmisalign_optab, mode)
771 != CODE_FOR_nothing)))
773 tree srctype = type;
774 tree desttype = type;
775 if (src_align < GET_MODE_ALIGNMENT (mode))
776 srctype = build_aligned_type (type, src_align);
777 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
778 tree tem = fold_const_aggregate_ref (srcmem);
779 if (tem)
780 srcmem = tem;
781 else if (src_align < GET_MODE_ALIGNMENT (mode)
782 && targetm.slow_unaligned_access (mode, src_align)
783 && (optab_handler (movmisalign_optab, mode)
784 == CODE_FOR_nothing))
785 srcmem = NULL_TREE;
786 if (srcmem)
788 gimple *new_stmt;
789 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
791 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
792 srcmem
793 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
794 new_stmt);
795 gimple_assign_set_lhs (new_stmt, srcmem);
796 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
797 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
799 if (dest_align < GET_MODE_ALIGNMENT (mode))
800 desttype = build_aligned_type (type, dest_align);
801 new_stmt
802 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
803 dest, off0),
804 srcmem);
805 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
806 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
807 if (gimple_vdef (new_stmt)
808 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
809 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
810 if (!lhs)
812 gsi_replace (gsi, new_stmt, false);
813 return true;
815 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 goto done;
822 if (endp == 3)
824 /* Both DEST and SRC must be pointer types.
825 ??? This is what old code did. Is the testing for pointer types
826 really mandatory?
828 If either SRC is readonly or length is 1, we can use memcpy. */
829 if (!dest_align || !src_align)
830 return false;
831 if (readonly_data_expr (src)
832 || (tree_fits_uhwi_p (len)
833 && (MIN (src_align, dest_align) / BITS_PER_UNIT
834 >= tree_to_uhwi (len))))
836 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
837 if (!fn)
838 return false;
839 gimple_call_set_fndecl (stmt, fn);
840 gimple_call_set_arg (stmt, 0, dest);
841 gimple_call_set_arg (stmt, 1, src);
842 fold_stmt (gsi);
843 return true;
846 /* If *src and *dest can't overlap, optimize into memcpy as well. */
847 if (TREE_CODE (src) == ADDR_EXPR
848 && TREE_CODE (dest) == ADDR_EXPR)
850 tree src_base, dest_base, fn;
851 poly_int64 src_offset = 0, dest_offset = 0;
852 poly_uint64 maxsize;
854 srcvar = TREE_OPERAND (src, 0);
855 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
856 if (src_base == NULL)
857 src_base = srcvar;
858 destvar = TREE_OPERAND (dest, 0);
859 dest_base = get_addr_base_and_unit_offset (destvar,
860 &dest_offset);
861 if (dest_base == NULL)
862 dest_base = destvar;
863 if (!poly_int_tree_p (len, &maxsize))
864 maxsize = -1;
865 if (SSA_VAR_P (src_base)
866 && SSA_VAR_P (dest_base))
868 if (operand_equal_p (src_base, dest_base, 0)
869 && ranges_maybe_overlap_p (src_offset, maxsize,
870 dest_offset, maxsize))
871 return false;
873 else if (TREE_CODE (src_base) == MEM_REF
874 && TREE_CODE (dest_base) == MEM_REF)
876 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
877 TREE_OPERAND (dest_base, 0), 0))
878 return false;
879 poly_offset_int full_src_offset
880 = mem_ref_offset (src_base) + src_offset;
881 poly_offset_int full_dest_offset
882 = mem_ref_offset (dest_base) + dest_offset;
883 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
884 full_dest_offset, maxsize))
885 return false;
887 else
888 return false;
890 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
891 if (!fn)
892 return false;
893 gimple_call_set_fndecl (stmt, fn);
894 gimple_call_set_arg (stmt, 0, dest);
895 gimple_call_set_arg (stmt, 1, src);
896 fold_stmt (gsi);
897 return true;
900 /* If the destination and source do not alias optimize into
901 memcpy as well. */
902 if ((is_gimple_min_invariant (dest)
903 || TREE_CODE (dest) == SSA_NAME)
904 && (is_gimple_min_invariant (src)
905 || TREE_CODE (src) == SSA_NAME))
907 ao_ref destr, srcr;
908 ao_ref_init_from_ptr_and_size (&destr, dest, len);
909 ao_ref_init_from_ptr_and_size (&srcr, src, len);
910 if (!refs_may_alias_p_1 (&destr, &srcr, false))
912 tree fn;
913 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
914 if (!fn)
915 return false;
916 gimple_call_set_fndecl (stmt, fn);
917 gimple_call_set_arg (stmt, 0, dest);
918 gimple_call_set_arg (stmt, 1, src);
919 fold_stmt (gsi);
920 return true;
924 return false;
927 if (!tree_fits_shwi_p (len))
928 return false;
929 if (!POINTER_TYPE_P (TREE_TYPE (src))
930 || !POINTER_TYPE_P (TREE_TYPE (dest)))
931 return false;
932 /* In the following try to find a type that is most natural to be
933 used for the memcpy source and destination and that allows
934 the most optimization when memcpy is turned into a plain assignment
935 using that type. In theory we could always use a char[len] type
936 but that only gains us that the destination and source possibly
937 no longer will have their address taken. */
938 srctype = TREE_TYPE (TREE_TYPE (src));
939 if (TREE_CODE (srctype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
941 srctype = TREE_TYPE (srctype);
942 desttype = TREE_TYPE (TREE_TYPE (dest));
943 if (TREE_CODE (desttype) == ARRAY_TYPE
944 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
945 desttype = TREE_TYPE (desttype);
946 if (TREE_ADDRESSABLE (srctype)
947 || TREE_ADDRESSABLE (desttype))
948 return false;
950 /* Make sure we are not copying using a floating-point mode or
951 a type whose size possibly does not match its precision. */
952 if (FLOAT_MODE_P (TYPE_MODE (desttype))
953 || TREE_CODE (desttype) == BOOLEAN_TYPE
954 || TREE_CODE (desttype) == ENUMERAL_TYPE)
955 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
956 if (FLOAT_MODE_P (TYPE_MODE (srctype))
957 || TREE_CODE (srctype) == BOOLEAN_TYPE
958 || TREE_CODE (srctype) == ENUMERAL_TYPE)
959 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
960 if (!srctype)
961 srctype = desttype;
962 if (!desttype)
963 desttype = srctype;
964 if (!srctype)
965 return false;
967 src_align = get_pointer_alignment (src);
968 dest_align = get_pointer_alignment (dest);
969 if (dest_align < TYPE_ALIGN (desttype)
970 || src_align < TYPE_ALIGN (srctype))
971 return false;
973 destvar = NULL_TREE;
974 if (TREE_CODE (dest) == ADDR_EXPR
975 && var_decl_component_p (TREE_OPERAND (dest, 0))
976 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
977 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
979 srcvar = NULL_TREE;
980 if (TREE_CODE (src) == ADDR_EXPR
981 && var_decl_component_p (TREE_OPERAND (src, 0))
982 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
984 if (!destvar
985 || src_align >= TYPE_ALIGN (desttype))
986 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
987 src, off0);
988 else if (!STRICT_ALIGNMENT)
990 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
991 src_align);
992 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
996 if (srcvar == NULL_TREE && destvar == NULL_TREE)
997 return false;
999 if (srcvar == NULL_TREE)
1001 if (src_align >= TYPE_ALIGN (desttype))
1002 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1003 else
1005 if (STRICT_ALIGNMENT)
1006 return false;
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1012 else if (destvar == NULL_TREE)
1014 if (dest_align >= TYPE_ALIGN (srctype))
1015 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1016 else
1018 if (STRICT_ALIGNMENT)
1019 return false;
1020 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1021 dest_align);
1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1026 /* Detect invalid bounds and overlapping copies and issue either
1027 -Warray-bounds or -Wrestrict. */
1028 if (!nowarn)
1029 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1031 gimple *new_stmt;
1032 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1034 tree tem = fold_const_aggregate_ref (srcvar);
1035 if (tem)
1036 srcvar = tem;
1037 if (! is_gimple_min_invariant (srcvar))
1039 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1040 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1041 new_stmt);
1042 gimple_assign_set_lhs (new_stmt, srcvar);
1043 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1044 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1046 new_stmt = gimple_build_assign (destvar, srcvar);
1047 goto set_vop_and_replace;
1050 /* We get an aggregate copy. Use an unsigned char[] type to
1051 perform the copying to preserve padding and to avoid any issues
1052 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1053 desttype = build_array_type_nelts (unsigned_char_type_node,
1054 tree_to_uhwi (len));
1055 srctype = desttype;
1056 if (src_align > TYPE_ALIGN (srctype))
1057 srctype = build_aligned_type (srctype, src_align);
1058 if (dest_align > TYPE_ALIGN (desttype))
1059 desttype = build_aligned_type (desttype, dest_align);
1060 new_stmt
1061 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1062 fold_build2 (MEM_REF, srctype, src, off0));
1063 set_vop_and_replace:
1064 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1065 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1066 if (gimple_vdef (new_stmt)
1067 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1068 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1069 if (!lhs)
1071 gsi_replace (gsi, new_stmt, false);
1072 return true;
1074 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1077 done:
1078 gimple_seq stmts = NULL;
1079 if (endp == 0 || endp == 3)
1080 len = NULL_TREE;
1081 else if (endp == 2)
1082 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1083 ssize_int (1));
1084 if (endp == 2 || endp == 1)
1086 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1087 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1088 TREE_TYPE (dest), dest, len);
1091 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1092 gimple *repl = gimple_build_assign (lhs, dest);
1093 gsi_replace (gsi, repl, false);
1094 return true;
1097 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1098 to built-in memcmp (a, b, len). */
1100 static bool
1101 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1103 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1105 if (!fn)
1106 return false;
1108 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1110 gimple *stmt = gsi_stmt (*gsi);
1111 tree a = gimple_call_arg (stmt, 0);
1112 tree b = gimple_call_arg (stmt, 1);
1113 tree len = gimple_call_arg (stmt, 2);
1115 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1116 replace_call_with_call_and_fold (gsi, repl);
1118 return true;
1121 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1122 to built-in memmove (dest, src, len). */
1124 static bool
1125 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1127 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1129 if (!fn)
1130 return false;
1132 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1133 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1134 len) into memmove (dest, src, len). */
1136 gimple *stmt = gsi_stmt (*gsi);
1137 tree src = gimple_call_arg (stmt, 0);
1138 tree dest = gimple_call_arg (stmt, 1);
1139 tree len = gimple_call_arg (stmt, 2);
1141 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1142 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1143 replace_call_with_call_and_fold (gsi, repl);
1145 return true;
1148 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1149 to built-in memset (dest, 0, len). */
1151 static bool
1152 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1154 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1156 if (!fn)
1157 return false;
1159 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1161 gimple *stmt = gsi_stmt (*gsi);
1162 tree dest = gimple_call_arg (stmt, 0);
1163 tree len = gimple_call_arg (stmt, 1);
1165 gimple_seq seq = NULL;
1166 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1167 gimple_seq_add_stmt_without_update (&seq, repl);
1168 gsi_replace_with_seq_vops (gsi, seq);
1169 fold_stmt (gsi);
1171 return true;
1174 /* Fold function call to builtin memset or bzero at *GSI setting the
1175 memory of size LEN to VAL. Return whether a simplification was made. */
1177 static bool
1178 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1180 gimple *stmt = gsi_stmt (*gsi);
1181 tree etype;
1182 unsigned HOST_WIDE_INT length, cval;
1184 /* If the LEN parameter is zero, return DEST. */
1185 if (integer_zerop (len))
1187 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1188 return true;
1191 if (! tree_fits_uhwi_p (len))
1192 return false;
1194 if (TREE_CODE (c) != INTEGER_CST)
1195 return false;
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree var = dest;
1199 if (TREE_CODE (var) != ADDR_EXPR)
1200 return false;
1202 var = TREE_OPERAND (var, 0);
1203 if (TREE_THIS_VOLATILE (var))
1204 return false;
1206 etype = TREE_TYPE (var);
1207 if (TREE_CODE (etype) == ARRAY_TYPE)
1208 etype = TREE_TYPE (etype);
1210 if (!INTEGRAL_TYPE_P (etype)
1211 && !POINTER_TYPE_P (etype))
1212 return NULL_TREE;
1214 if (! var_decl_component_p (var))
1215 return NULL_TREE;
1217 length = tree_to_uhwi (len);
1218 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1219 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1220 return NULL_TREE;
1222 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1223 return NULL_TREE;
1225 if (integer_zerop (c))
1226 cval = 0;
1227 else
1229 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1230 return NULL_TREE;
1232 cval = TREE_INT_CST_LOW (c);
1233 cval &= 0xff;
1234 cval |= cval << 8;
1235 cval |= cval << 16;
1236 cval |= (cval << 31) << 1;
1239 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1240 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1241 gimple_set_vuse (store, gimple_vuse (stmt));
1242 tree vdef = gimple_vdef (stmt);
1243 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1245 gimple_set_vdef (store, gimple_vdef (stmt));
1246 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1248 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1249 if (gimple_call_lhs (stmt))
1251 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1252 gsi_replace (gsi, asgn, false);
1254 else
1256 gimple_stmt_iterator gsi2 = *gsi;
1257 gsi_prev (gsi);
1258 gsi_remove (&gsi2, true);
1261 return true;
1265 /* Obtain the minimum and maximum string length or minimum and maximum
1266 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1267 If ARG is an SSA name variable, follow its use-def chains. When
1268 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1269 if we are unable to determine the length or value, return false.
1270 VISITED is a bitmap of visited variables.
1271 TYPE is 0 if string length should be obtained, 1 for maximum string
1272 length and 2 for maximum value ARG can have.
1273 When FUZZY is non-zero and the length of a string cannot be determined,
1274 the function instead considers as the maximum possible length the
1275 size of a character array it may refer to. If FUZZY is 2, it will handle
1276 PHIs and COND_EXPRs optimistically, if we can determine string length
1277 minimum and maximum, it will use the minimum from the ones where it
1278 can be determined.
1279 Set *FLEXP to true if the range of the string lengths has been
1280 obtained from the upper bound of an array at the end of a struct.
1281 Such an array may hold a string that's longer than its upper bound
1282 due to it being used as a poor-man's flexible array member.
1283 Pass NONSTR through to children.
1284 ELTSIZE is 1 for normal single byte character strings, and 2 or
1285 4 for wide characer strings. ELTSIZE is by default 1. */
1287 static bool
1288 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1289 int fuzzy, bool *flexp, unsigned eltsize, tree *nonstr)
1291 tree var, val = NULL_TREE;
1292 gimple *def_stmt;
1294 /* The minimum and maximum length. */
1295 tree *const minlen = length;
1296 tree *const maxlen = length + 1;
1298 if (TREE_CODE (arg) != SSA_NAME)
1300 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1301 if (TREE_CODE (arg) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1304 tree op = TREE_OPERAND (arg, 0);
1305 if (integer_zerop (TREE_OPERAND (op, 1)))
1307 tree aop0 = TREE_OPERAND (op, 0);
1308 if (TREE_CODE (aop0) == INDIRECT_REF
1309 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1310 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
1311 visited, type, fuzzy, flexp,
1312 eltsize, nonstr);
1314 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1316 /* Fail if an array is the last member of a struct object
1317 since it could be treated as a (fake) flexible array
1318 member. */
1319 tree idx = TREE_OPERAND (op, 1);
1321 arg = TREE_OPERAND (op, 0);
1322 tree optype = TREE_TYPE (arg);
1323 if (tree dom = TYPE_DOMAIN (optype))
1324 if (tree bound = TYPE_MAX_VALUE (dom))
1325 if (TREE_CODE (bound) == INTEGER_CST
1326 && TREE_CODE (idx) == INTEGER_CST
1327 && tree_int_cst_lt (bound, idx))
1328 return false;
1332 if (type == 2)
1334 val = arg;
1335 if (TREE_CODE (val) != INTEGER_CST
1336 || tree_int_cst_sgn (val) < 0)
1337 return false;
1339 else
1340 val = c_strlen (arg, 1, nonstr, eltsize);
1342 if (!val && fuzzy)
1344 if (TREE_CODE (arg) == ADDR_EXPR)
1345 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1346 visited, type, fuzzy, flexp,
1347 eltsize, nonstr);
1349 if (TREE_CODE (arg) == ARRAY_REF)
1351 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1353 /* Determine the "innermost" array type. */
1354 while (TREE_CODE (type) == ARRAY_TYPE
1355 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1356 type = TREE_TYPE (type);
1358 /* Avoid arrays of pointers. */
1359 tree eltype = TREE_TYPE (type);
1360 if (TREE_CODE (type) != ARRAY_TYPE
1361 || !INTEGRAL_TYPE_P (eltype))
1362 return false;
1364 val = TYPE_SIZE_UNIT (type);
1365 if (!val || integer_zerop (val))
1366 return false;
1368 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1369 integer_one_node);
1370 /* Set the minimum size to zero since the string in
1371 the array could have zero length. */
1372 *minlen = ssize_int (0);
1374 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1375 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1376 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1377 *flexp = true;
1379 else if (TREE_CODE (arg) == COMPONENT_REF
1380 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1381 == ARRAY_TYPE))
1383 /* Use the type of the member array to determine the upper
1384 bound on the length of the array. This may be overly
1385 optimistic if the array itself isn't NUL-terminated and
1386 the caller relies on the subsequent member to contain
1387 the NUL but that would only be considered valid if
1388 the array were the last member of a struct.
1389 Set *FLEXP to true if the array whose bound is being
1390 used is at the end of a struct. */
1391 if (array_at_struct_end_p (arg))
1392 *flexp = true;
1394 arg = TREE_OPERAND (arg, 1);
1396 tree type = TREE_TYPE (arg);
1398 while (TREE_CODE (type) == ARRAY_TYPE
1399 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1400 type = TREE_TYPE (type);
1402 /* Fail when the array bound is unknown or zero. */
1403 val = TYPE_SIZE_UNIT (type);
1404 if (!val || integer_zerop (val))
1405 return false;
1406 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1407 integer_one_node);
1408 /* Set the minimum size to zero since the string in
1409 the array could have zero length. */
1410 *minlen = ssize_int (0);
1413 if (VAR_P (arg))
1415 tree type = TREE_TYPE (arg);
1416 if (POINTER_TYPE_P (type))
1417 type = TREE_TYPE (type);
1419 if (TREE_CODE (type) == ARRAY_TYPE)
1421 val = TYPE_SIZE_UNIT (type);
1422 if (!val
1423 || TREE_CODE (val) != INTEGER_CST
1424 || integer_zerop (val))
1425 return false;
1426 val = wide_int_to_tree (TREE_TYPE (val),
1427 wi::sub (wi::to_wide (val), 1));
1428 /* Set the minimum size to zero since the string in
1429 the array could have zero length. */
1430 *minlen = ssize_int (0);
1435 if (!val)
1436 return false;
1438 if (!*minlen
1439 || (type > 0
1440 && TREE_CODE (*minlen) == INTEGER_CST
1441 && TREE_CODE (val) == INTEGER_CST
1442 && tree_int_cst_lt (val, *minlen)))
1443 *minlen = val;
1445 if (*maxlen)
1447 if (type > 0)
1449 if (TREE_CODE (*maxlen) != INTEGER_CST
1450 || TREE_CODE (val) != INTEGER_CST)
1451 return false;
1453 if (tree_int_cst_lt (*maxlen, val))
1454 *maxlen = val;
1455 return true;
1457 else if (simple_cst_equal (val, *maxlen) != 1)
1458 return false;
1461 *maxlen = val;
1462 return true;
1465 /* If ARG is registered for SSA update we cannot look at its defining
1466 statement. */
1467 if (name_registered_for_update_p (arg))
1468 return false;
1470 /* If we were already here, break the infinite cycle. */
1471 if (!*visited)
1472 *visited = BITMAP_ALLOC (NULL);
1473 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1474 return true;
1476 var = arg;
1477 def_stmt = SSA_NAME_DEF_STMT (var);
1479 switch (gimple_code (def_stmt))
1481 case GIMPLE_ASSIGN:
1482 /* The RHS of the statement defining VAR must either have a
1483 constant length or come from another SSA_NAME with a constant
1484 length. */
1485 if (gimple_assign_single_p (def_stmt)
1486 || gimple_assign_unary_nop_p (def_stmt))
1488 tree rhs = gimple_assign_rhs1 (def_stmt);
1489 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp,
1490 eltsize, nonstr);
1492 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1494 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1495 gimple_assign_rhs3 (def_stmt) };
1497 for (unsigned int i = 0; i < 2; i++)
1498 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1499 flexp, eltsize, nonstr))
1501 if (fuzzy == 2)
1502 *maxlen = build_all_ones_cst (size_type_node);
1503 else
1504 return false;
1506 return true;
1508 return false;
1510 case GIMPLE_PHI:
1511 /* All the arguments of the PHI node must have the same constant
1512 length. */
1513 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1515 tree arg = gimple_phi_arg (def_stmt, i)->def;
1517 /* If this PHI has itself as an argument, we cannot
1518 determine the string length of this argument. However,
1519 if we can find a constant string length for the other
1520 PHI args then we can still be sure that this is a
1521 constant string length. So be optimistic and just
1522 continue with the next argument. */
1523 if (arg == gimple_phi_result (def_stmt))
1524 continue;
1526 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp,
1527 eltsize, nonstr))
1529 if (fuzzy == 2)
1530 *maxlen = build_all_ones_cst (size_type_node);
1531 else
1532 return false;
1535 return true;
1537 default:
1538 return false;
1542 /* Determine the minimum and maximum value or string length that ARG
1543 refers to and store each in the first two elements of MINMAXLEN.
1544 For expressions that point to strings of unknown lengths that are
1545 character arrays, use the upper bound of the array as the maximum
1546 length. For example, given an expression like 'x ? array : "xyz"'
1547 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1548 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1549 stored in array.
1550 Return true if the range of the string lengths has been obtained
1551 from the upper bound of an array at the end of a struct. Such
1552 an array may hold a string that's longer than its upper bound
1553 due to it being used as a poor-man's flexible array member.
1555 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1556 and false if PHIs and COND_EXPRs are to be handled optimistically,
1557 if we can determine string length minimum and maximum; it will use
1558 the minimum from the ones where it can be determined.
1559 STRICT false should be only used for warning code.
1560 When non-null, clear *NONSTR if ARG refers to a constant array
1561 that is known not be nul-terminated. Otherwise set it to
1562 the declaration of the constant non-terminated array.
1564 ELTSIZE is 1 for normal single byte character strings, and 2 or
1565 4 for wide characer strings. ELTSIZE is by default 1. */
1567 bool
1568 get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1569 bool strict, tree *nonstr /* = NULL */)
1571 bitmap visited = NULL;
1573 minmaxlen[0] = NULL_TREE;
1574 minmaxlen[1] = NULL_TREE;
1576 tree nonstrbuf;
1577 if (!nonstr)
1578 nonstr = &nonstrbuf;
1579 *nonstr = NULL_TREE;
1581 bool flexarray = false;
1582 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1583 &flexarray, eltsize, nonstr))
1585 minmaxlen[0] = NULL_TREE;
1586 minmaxlen[1] = NULL_TREE;
1589 if (visited)
1590 BITMAP_FREE (visited);
1592 return flexarray;
1595 /* Return the maximum string length for ARG, counting by TYPE
1596 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1597 if the caller is prepared to handle unterminated strings.
1599 If an unterminated string is discovered and our caller handles
1600 unterminated strings, then bubble up the offending DECL and
1601 return the maximum size. Otherwise return NULL. */
1603 tree
1604 get_maxval_strlen (tree arg, int type, tree *nonstr /* = NULL */)
1606 bitmap visited = NULL;
1607 tree len[2] = { NULL_TREE, NULL_TREE };
1609 bool dummy;
1610 /* Set to non-null if ARG refers to an untermianted array. */
1611 tree mynonstr = NULL_TREE;
1612 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy, 1, &mynonstr))
1613 len[1] = NULL_TREE;
1614 if (visited)
1615 BITMAP_FREE (visited);
1617 if (nonstr)
1619 /* For callers prepared to handle unterminated arrays set
1620 *NONSTR to point to the declaration of the array and return
1621 the maximum length/size. */
1622 *nonstr = mynonstr;
1623 return len[1];
1626 /* Fail if the constant array isn't nul-terminated. */
1627 return mynonstr ? NULL_TREE : len[1];
1631 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1632 If LEN is not NULL, it represents the length of the string to be
1633 copied. Return NULL_TREE if no simplification can be made. */
1635 static bool
1636 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1637 tree dest, tree src)
1639 gimple *stmt = gsi_stmt (*gsi);
1640 location_t loc = gimple_location (stmt);
1641 tree fn;
1643 /* If SRC and DEST are the same (and not volatile), return DEST. */
1644 if (operand_equal_p (src, dest, 0))
1646 /* Issue -Wrestrict unless the pointers are null (those do
1647 not point to objects and so do not indicate an overlap;
1648 such calls could be the result of sanitization and jump
1649 threading). */
1650 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1652 tree func = gimple_call_fndecl (stmt);
1654 warning_at (loc, OPT_Wrestrict,
1655 "%qD source argument is the same as destination",
1656 func);
1659 replace_call_with_value (gsi, dest);
1660 return true;
1663 if (optimize_function_for_size_p (cfun))
1664 return false;
1666 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1667 if (!fn)
1668 return false;
1670 /* Set to non-null if ARG refers to an unterminated array. */
1671 tree nonstr = NULL;
1672 tree len = get_maxval_strlen (src, 0, &nonstr);
1674 if (nonstr)
1676 /* Avoid folding calls with unterminated arrays. */
1677 if (!gimple_no_warning_p (stmt))
1678 warn_string_no_nul (loc, "strcpy", src, nonstr);
1679 gimple_set_no_warning (stmt, true);
1680 return false;
1683 if (!len)
1684 return false;
1686 len = fold_convert_loc (loc, size_type_node, len);
1687 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1688 len = force_gimple_operand_gsi (gsi, len, true,
1689 NULL_TREE, true, GSI_SAME_STMT);
1690 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1691 replace_call_with_call_and_fold (gsi, repl);
1692 return true;
1695 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1696 If SLEN is not NULL, it represents the length of the source string.
1697 Return NULL_TREE if no simplification can be made. */
1699 static bool
1700 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1701 tree dest, tree src, tree len)
1703 gimple *stmt = gsi_stmt (*gsi);
1704 location_t loc = gimple_location (stmt);
1705 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1707 /* If the LEN parameter is zero, return DEST. */
1708 if (integer_zerop (len))
1710 /* Avoid warning if the destination refers to a an array/pointer
1711 decorate with attribute nonstring. */
1712 if (!nonstring)
1714 tree fndecl = gimple_call_fndecl (stmt);
1716 /* Warn about the lack of nul termination: the result is not
1717 a (nul-terminated) string. */
1718 tree slen = get_maxval_strlen (src, 0);
1719 if (slen && !integer_zerop (slen))
1720 warning_at (loc, OPT_Wstringop_truncation,
1721 "%G%qD destination unchanged after copying no bytes "
1722 "from a string of length %E",
1723 stmt, fndecl, slen);
1724 else
1725 warning_at (loc, OPT_Wstringop_truncation,
1726 "%G%qD destination unchanged after copying no bytes",
1727 stmt, fndecl);
1730 replace_call_with_value (gsi, dest);
1731 return true;
1734 /* We can't compare slen with len as constants below if len is not a
1735 constant. */
1736 if (TREE_CODE (len) != INTEGER_CST)
1737 return false;
1739 /* Now, we must be passed a constant src ptr parameter. */
1740 tree slen = get_maxval_strlen (src, 0);
1741 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1742 return false;
1744 /* The size of the source string including the terminating nul. */
1745 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1747 /* We do not support simplification of this case, though we do
1748 support it when expanding trees into RTL. */
1749 /* FIXME: generate a call to __builtin_memset. */
1750 if (tree_int_cst_lt (ssize, len))
1751 return false;
1753 /* Diagnose truncation that leaves the copy unterminated. */
1754 maybe_diag_stxncpy_trunc (*gsi, src, len);
1756 /* OK transform into builtin memcpy. */
1757 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1758 if (!fn)
1759 return false;
1761 len = fold_convert_loc (loc, size_type_node, len);
1762 len = force_gimple_operand_gsi (gsi, len, true,
1763 NULL_TREE, true, GSI_SAME_STMT);
1764 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1765 replace_call_with_call_and_fold (gsi, repl);
1767 return true;
1770 /* Fold function call to builtin strchr or strrchr.
1771 If both arguments are constant, evaluate and fold the result,
1772 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1773 In general strlen is significantly faster than strchr
1774 due to being a simpler operation. */
1775 static bool
1776 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1778 gimple *stmt = gsi_stmt (*gsi);
1779 tree str = gimple_call_arg (stmt, 0);
1780 tree c = gimple_call_arg (stmt, 1);
1781 location_t loc = gimple_location (stmt);
1782 const char *p;
1783 char ch;
1785 if (!gimple_call_lhs (stmt))
1786 return false;
1788 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1790 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1792 if (p1 == NULL)
1794 replace_call_with_value (gsi, integer_zero_node);
1795 return true;
1798 tree len = build_int_cst (size_type_node, p1 - p);
1799 gimple_seq stmts = NULL;
1800 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1801 POINTER_PLUS_EXPR, str, len);
1802 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1803 gsi_replace_with_seq_vops (gsi, stmts);
1804 return true;
1807 if (!integer_zerop (c))
1808 return false;
1810 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1811 if (is_strrchr && optimize_function_for_size_p (cfun))
1813 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1815 if (strchr_fn)
1817 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1818 replace_call_with_call_and_fold (gsi, repl);
1819 return true;
1822 return false;
1825 tree len;
1826 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1828 if (!strlen_fn)
1829 return false;
1831 /* Create newstr = strlen (str). */
1832 gimple_seq stmts = NULL;
1833 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1834 gimple_set_location (new_stmt, loc);
1835 len = create_tmp_reg_or_ssa_name (size_type_node);
1836 gimple_call_set_lhs (new_stmt, len);
1837 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1839 /* Create (str p+ strlen (str)). */
1840 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1841 POINTER_PLUS_EXPR, str, len);
1842 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1843 gsi_replace_with_seq_vops (gsi, stmts);
1844 /* gsi now points at the assignment to the lhs, get a
1845 stmt iterator to the strlen.
1846 ??? We can't use gsi_for_stmt as that doesn't work when the
1847 CFG isn't built yet. */
1848 gimple_stmt_iterator gsi2 = *gsi;
1849 gsi_prev (&gsi2);
1850 fold_stmt (&gsi2);
1851 return true;
1854 /* Fold function call to builtin strstr.
1855 If both arguments are constant, evaluate and fold the result,
1856 additionally fold strstr (x, "") into x and strstr (x, "c")
1857 into strchr (x, 'c'). */
1858 static bool
1859 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1861 gimple *stmt = gsi_stmt (*gsi);
1862 tree haystack = gimple_call_arg (stmt, 0);
1863 tree needle = gimple_call_arg (stmt, 1);
1864 const char *p, *q;
1866 if (!gimple_call_lhs (stmt))
1867 return false;
1869 q = c_getstr (needle);
1870 if (q == NULL)
1871 return false;
1873 if ((p = c_getstr (haystack)))
1875 const char *r = strstr (p, q);
1877 if (r == NULL)
1879 replace_call_with_value (gsi, integer_zero_node);
1880 return true;
1883 tree len = build_int_cst (size_type_node, r - p);
1884 gimple_seq stmts = NULL;
1885 gimple *new_stmt
1886 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1887 haystack, len);
1888 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1889 gsi_replace_with_seq_vops (gsi, stmts);
1890 return true;
1893 /* For strstr (x, "") return x. */
1894 if (q[0] == '\0')
1896 replace_call_with_value (gsi, haystack);
1897 return true;
1900 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1901 if (q[1] == '\0')
1903 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1904 if (strchr_fn)
1906 tree c = build_int_cst (integer_type_node, q[0]);
1907 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1908 replace_call_with_call_and_fold (gsi, repl);
1909 return true;
1913 return false;
1916 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1917 to the call.
1919 Return NULL_TREE if no simplification was possible, otherwise return the
1920 simplified form of the call as a tree.
1922 The simplified form may be a constant or other expression which
1923 computes the same value, but in a more efficient manner (including
1924 calls to other builtin functions).
1926 The call may contain arguments which need to be evaluated, but
1927 which are not useful to determine the result of the call. In
1928 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1929 COMPOUND_EXPR will be an argument which must be evaluated.
1930 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1931 COMPOUND_EXPR in the chain will contain the tree for the simplified
1932 form of the builtin function call. */
1934 static bool
1935 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1937 gimple *stmt = gsi_stmt (*gsi);
1938 location_t loc = gimple_location (stmt);
1940 const char *p = c_getstr (src);
1942 /* If the string length is zero, return the dst parameter. */
1943 if (p && *p == '\0')
1945 replace_call_with_value (gsi, dst);
1946 return true;
1949 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1950 return false;
1952 /* See if we can store by pieces into (dst + strlen(dst)). */
1953 tree newdst;
1954 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1955 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1957 if (!strlen_fn || !memcpy_fn)
1958 return false;
1960 /* If the length of the source string isn't computable don't
1961 split strcat into strlen and memcpy. */
1962 tree len = get_maxval_strlen (src, 0);
1963 if (! len)
1964 return false;
1966 /* Create strlen (dst). */
1967 gimple_seq stmts = NULL, stmts2;
1968 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1969 gimple_set_location (repl, loc);
1970 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1971 gimple_call_set_lhs (repl, newdst);
1972 gimple_seq_add_stmt_without_update (&stmts, repl);
1974 /* Create (dst p+ strlen (dst)). */
1975 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1976 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1977 gimple_seq_add_seq_without_update (&stmts, stmts2);
1979 len = fold_convert_loc (loc, size_type_node, len);
1980 len = size_binop_loc (loc, PLUS_EXPR, len,
1981 build_int_cst (size_type_node, 1));
1982 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1983 gimple_seq_add_seq_without_update (&stmts, stmts2);
1985 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1986 gimple_seq_add_stmt_without_update (&stmts, repl);
1987 if (gimple_call_lhs (stmt))
1989 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1990 gimple_seq_add_stmt_without_update (&stmts, repl);
1991 gsi_replace_with_seq_vops (gsi, stmts);
1992 /* gsi now points at the assignment to the lhs, get a
1993 stmt iterator to the memcpy call.
1994 ??? We can't use gsi_for_stmt as that doesn't work when the
1995 CFG isn't built yet. */
1996 gimple_stmt_iterator gsi2 = *gsi;
1997 gsi_prev (&gsi2);
1998 fold_stmt (&gsi2);
2000 else
2002 gsi_replace_with_seq_vops (gsi, stmts);
2003 fold_stmt (gsi);
2005 return true;
2008 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2009 are the arguments to the call. */
2011 static bool
2012 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2014 gimple *stmt = gsi_stmt (*gsi);
2015 tree dest = gimple_call_arg (stmt, 0);
2016 tree src = gimple_call_arg (stmt, 1);
2017 tree size = gimple_call_arg (stmt, 2);
2018 tree fn;
2019 const char *p;
2022 p = c_getstr (src);
2023 /* If the SRC parameter is "", return DEST. */
2024 if (p && *p == '\0')
2026 replace_call_with_value (gsi, dest);
2027 return true;
2030 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2031 return false;
2033 /* If __builtin_strcat_chk is used, assume strcat is available. */
2034 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2035 if (!fn)
2036 return false;
2038 gimple *repl = gimple_build_call (fn, 2, dest, src);
2039 replace_call_with_call_and_fold (gsi, repl);
2040 return true;
2043 /* Simplify a call to the strncat builtin. */
2045 static bool
2046 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2048 gimple *stmt = gsi_stmt (*gsi);
2049 tree dst = gimple_call_arg (stmt, 0);
2050 tree src = gimple_call_arg (stmt, 1);
2051 tree len = gimple_call_arg (stmt, 2);
2053 const char *p = c_getstr (src);
2055 /* If the requested length is zero, or the src parameter string
2056 length is zero, return the dst parameter. */
2057 if (integer_zerop (len) || (p && *p == '\0'))
2059 replace_call_with_value (gsi, dst);
2060 return true;
2063 if (TREE_CODE (len) != INTEGER_CST || !p)
2064 return false;
2066 unsigned srclen = strlen (p);
2068 int cmpsrc = compare_tree_int (len, srclen);
2070 /* Return early if the requested len is less than the string length.
2071 Warnings will be issued elsewhere later. */
2072 if (cmpsrc < 0)
2073 return false;
2075 unsigned HOST_WIDE_INT dstsize;
2077 bool nowarn = gimple_no_warning_p (stmt);
2079 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2081 int cmpdst = compare_tree_int (len, dstsize);
2083 if (cmpdst >= 0)
2085 tree fndecl = gimple_call_fndecl (stmt);
2087 /* Strncat copies (at most) LEN bytes and always appends
2088 the terminating NUL so the specified bound should never
2089 be equal to (or greater than) the size of the destination.
2090 If it is, the copy could overflow. */
2091 location_t loc = gimple_location (stmt);
2092 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2093 cmpdst == 0
2094 ? G_("%G%qD specified bound %E equals "
2095 "destination size")
2096 : G_("%G%qD specified bound %E exceeds "
2097 "destination size %wu"),
2098 stmt, fndecl, len, dstsize);
2099 if (nowarn)
2100 gimple_set_no_warning (stmt, true);
2104 if (!nowarn && cmpsrc == 0)
2106 tree fndecl = gimple_call_fndecl (stmt);
2107 location_t loc = gimple_location (stmt);
2109 /* To avoid possible overflow the specified bound should also
2110 not be equal to the length of the source, even when the size
2111 of the destination is unknown (it's not an uncommon mistake
2112 to specify as the bound to strncpy the length of the source). */
2113 if (warning_at (loc, OPT_Wstringop_overflow_,
2114 "%G%qD specified bound %E equals source length",
2115 stmt, fndecl, len))
2116 gimple_set_no_warning (stmt, true);
2119 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2121 /* If the replacement _DECL isn't initialized, don't do the
2122 transformation. */
2123 if (!fn)
2124 return false;
2126 /* Otherwise, emit a call to strcat. */
2127 gcall *repl = gimple_build_call (fn, 2, dst, src);
2128 replace_call_with_call_and_fold (gsi, repl);
2129 return true;
2132 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2133 LEN, and SIZE. */
2135 static bool
2136 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2138 gimple *stmt = gsi_stmt (*gsi);
2139 tree dest = gimple_call_arg (stmt, 0);
2140 tree src = gimple_call_arg (stmt, 1);
2141 tree len = gimple_call_arg (stmt, 2);
2142 tree size = gimple_call_arg (stmt, 3);
2143 tree fn;
2144 const char *p;
2146 p = c_getstr (src);
2147 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2148 if ((p && *p == '\0')
2149 || integer_zerop (len))
2151 replace_call_with_value (gsi, dest);
2152 return true;
2155 if (! tree_fits_uhwi_p (size))
2156 return false;
2158 if (! integer_all_onesp (size))
2160 tree src_len = c_strlen (src, 1);
2161 if (src_len
2162 && tree_fits_uhwi_p (src_len)
2163 && tree_fits_uhwi_p (len)
2164 && ! tree_int_cst_lt (len, src_len))
2166 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2167 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2168 if (!fn)
2169 return false;
2171 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2172 replace_call_with_call_and_fold (gsi, repl);
2173 return true;
2175 return false;
2178 /* If __builtin_strncat_chk is used, assume strncat is available. */
2179 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2180 if (!fn)
2181 return false;
2183 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2184 replace_call_with_call_and_fold (gsi, repl);
2185 return true;
2188 /* Build and append gimple statements to STMTS that would load a first
2189 character of a memory location identified by STR. LOC is location
2190 of the statement. */
2192 static tree
2193 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2195 tree var;
2197 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2198 tree cst_uchar_ptr_node
2199 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2200 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2202 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2203 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2204 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2206 gimple_assign_set_lhs (stmt, var);
2207 gimple_seq_add_stmt_without_update (stmts, stmt);
2209 return var;
2212 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2213 FCODE is the name of the builtin. */
2215 static bool
2216 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2218 gimple *stmt = gsi_stmt (*gsi);
2219 tree callee = gimple_call_fndecl (stmt);
2220 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2222 tree type = integer_type_node;
2223 tree str1 = gimple_call_arg (stmt, 0);
2224 tree str2 = gimple_call_arg (stmt, 1);
2225 tree lhs = gimple_call_lhs (stmt);
2226 HOST_WIDE_INT length = -1;
2228 /* Handle strncmp and strncasecmp functions. */
2229 if (gimple_call_num_args (stmt) == 3)
2231 tree len = gimple_call_arg (stmt, 2);
2232 if (tree_fits_uhwi_p (len))
2233 length = tree_to_uhwi (len);
2236 /* If the LEN parameter is zero, return zero. */
2237 if (length == 0)
2239 replace_call_with_value (gsi, integer_zero_node);
2240 return true;
2243 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2244 if (operand_equal_p (str1, str2, 0))
2246 replace_call_with_value (gsi, integer_zero_node);
2247 return true;
2250 const char *p1 = c_getstr (str1);
2251 const char *p2 = c_getstr (str2);
2253 /* For known strings, return an immediate value. */
2254 if (p1 && p2)
2256 int r = 0;
2257 bool known_result = false;
2259 switch (fcode)
2261 case BUILT_IN_STRCMP:
2262 case BUILT_IN_STRCMP_EQ:
2264 r = strcmp (p1, p2);
2265 known_result = true;
2266 break;
2268 case BUILT_IN_STRNCMP:
2269 case BUILT_IN_STRNCMP_EQ:
2271 if (length == -1)
2272 break;
2273 r = strncmp (p1, p2, length);
2274 known_result = true;
2275 break;
2277 /* Only handleable situation is where the string are equal (result 0),
2278 which is already handled by operand_equal_p case. */
2279 case BUILT_IN_STRCASECMP:
2280 break;
2281 case BUILT_IN_STRNCASECMP:
2283 if (length == -1)
2284 break;
2285 r = strncmp (p1, p2, length);
2286 if (r == 0)
2287 known_result = true;
2288 break;
2290 default:
2291 gcc_unreachable ();
2294 if (known_result)
2296 replace_call_with_value (gsi, build_cmp_result (type, r));
2297 return true;
2301 bool nonzero_length = length >= 1
2302 || fcode == BUILT_IN_STRCMP
2303 || fcode == BUILT_IN_STRCMP_EQ
2304 || fcode == BUILT_IN_STRCASECMP;
2306 location_t loc = gimple_location (stmt);
2308 /* If the second arg is "", return *(const unsigned char*)arg1. */
2309 if (p2 && *p2 == '\0' && nonzero_length)
2311 gimple_seq stmts = NULL;
2312 tree var = gimple_load_first_char (loc, str1, &stmts);
2313 if (lhs)
2315 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2316 gimple_seq_add_stmt_without_update (&stmts, stmt);
2319 gsi_replace_with_seq_vops (gsi, stmts);
2320 return true;
2323 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2324 if (p1 && *p1 == '\0' && nonzero_length)
2326 gimple_seq stmts = NULL;
2327 tree var = gimple_load_first_char (loc, str2, &stmts);
2329 if (lhs)
2331 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2332 stmt = gimple_build_assign (c, NOP_EXPR, var);
2333 gimple_seq_add_stmt_without_update (&stmts, stmt);
2335 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2336 gimple_seq_add_stmt_without_update (&stmts, stmt);
2339 gsi_replace_with_seq_vops (gsi, stmts);
2340 return true;
2343 /* If len parameter is one, return an expression corresponding to
2344 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2345 if (fcode == BUILT_IN_STRNCMP && length == 1)
2347 gimple_seq stmts = NULL;
2348 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2349 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2351 if (lhs)
2353 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2354 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2355 gimple_seq_add_stmt_without_update (&stmts, convert1);
2357 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2358 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2359 gimple_seq_add_stmt_without_update (&stmts, convert2);
2361 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2362 gimple_seq_add_stmt_without_update (&stmts, stmt);
2365 gsi_replace_with_seq_vops (gsi, stmts);
2366 return true;
2369 /* If length is larger than the length of one constant string,
2370 replace strncmp with corresponding strcmp */
2371 if (fcode == BUILT_IN_STRNCMP
2372 && length > 0
2373 && ((p2 && (size_t) length > strlen (p2))
2374 || (p1 && (size_t) length > strlen (p1))))
2376 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2377 if (!fn)
2378 return false;
2379 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2380 replace_call_with_call_and_fold (gsi, repl);
2381 return true;
2384 return false;
2387 /* Fold a call to the memchr pointed by GSI iterator. */
2389 static bool
2390 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2392 gimple *stmt = gsi_stmt (*gsi);
2393 tree lhs = gimple_call_lhs (stmt);
2394 tree arg1 = gimple_call_arg (stmt, 0);
2395 tree arg2 = gimple_call_arg (stmt, 1);
2396 tree len = gimple_call_arg (stmt, 2);
2398 /* If the LEN parameter is zero, return zero. */
2399 if (integer_zerop (len))
2401 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2402 return true;
2405 char c;
2406 if (TREE_CODE (arg2) != INTEGER_CST
2407 || !tree_fits_uhwi_p (len)
2408 || !target_char_cst_p (arg2, &c))
2409 return false;
2411 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2412 unsigned HOST_WIDE_INT string_length;
2413 const char *p1 = c_getstr (arg1, &string_length);
2415 if (p1)
2417 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2418 if (r == NULL)
2420 if (length <= string_length)
2422 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2423 return true;
2426 else
2428 unsigned HOST_WIDE_INT offset = r - p1;
2429 gimple_seq stmts = NULL;
2430 if (lhs != NULL_TREE)
2432 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2433 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2434 arg1, offset_cst);
2435 gimple_seq_add_stmt_without_update (&stmts, stmt);
2437 else
2438 gimple_seq_add_stmt_without_update (&stmts,
2439 gimple_build_nop ());
2441 gsi_replace_with_seq_vops (gsi, stmts);
2442 return true;
2446 return false;
2449 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2450 to the call. IGNORE is true if the value returned
2451 by the builtin will be ignored. UNLOCKED is true is true if this
2452 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2453 the known length of the string. Return NULL_TREE if no simplification
2454 was possible. */
2456 static bool
2457 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2458 tree arg0, tree arg1,
2459 bool unlocked)
2461 gimple *stmt = gsi_stmt (*gsi);
2463 /* If we're using an unlocked function, assume the other unlocked
2464 functions exist explicitly. */
2465 tree const fn_fputc = (unlocked
2466 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2467 : builtin_decl_implicit (BUILT_IN_FPUTC));
2468 tree const fn_fwrite = (unlocked
2469 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2470 : builtin_decl_implicit (BUILT_IN_FWRITE));
2472 /* If the return value is used, don't do the transformation. */
2473 if (gimple_call_lhs (stmt))
2474 return false;
2476 /* Get the length of the string passed to fputs. If the length
2477 can't be determined, punt. */
2478 tree len = get_maxval_strlen (arg0, 0);
2479 if (!len
2480 || TREE_CODE (len) != INTEGER_CST)
2481 return false;
2483 switch (compare_tree_int (len, 1))
2485 case -1: /* length is 0, delete the call entirely . */
2486 replace_call_with_value (gsi, integer_zero_node);
2487 return true;
2489 case 0: /* length is 1, call fputc. */
2491 const char *p = c_getstr (arg0);
2492 if (p != NULL)
2494 if (!fn_fputc)
2495 return false;
2497 gimple *repl = gimple_build_call (fn_fputc, 2,
2498 build_int_cst
2499 (integer_type_node, p[0]), arg1);
2500 replace_call_with_call_and_fold (gsi, repl);
2501 return true;
2504 /* FALLTHROUGH */
2505 case 1: /* length is greater than 1, call fwrite. */
2507 /* If optimizing for size keep fputs. */
2508 if (optimize_function_for_size_p (cfun))
2509 return false;
2510 /* New argument list transforming fputs(string, stream) to
2511 fwrite(string, 1, len, stream). */
2512 if (!fn_fwrite)
2513 return false;
2515 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2516 size_one_node, len, arg1);
2517 replace_call_with_call_and_fold (gsi, repl);
2518 return true;
2520 default:
2521 gcc_unreachable ();
2523 return false;
2526 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2527 DEST, SRC, LEN, and SIZE are the arguments to the call.
2528 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2529 code of the builtin. If MAXLEN is not NULL, it is maximum length
2530 passed as third argument. */
2532 static bool
2533 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2534 tree dest, tree src, tree len, tree size,
2535 enum built_in_function fcode)
2537 gimple *stmt = gsi_stmt (*gsi);
2538 location_t loc = gimple_location (stmt);
2539 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2540 tree fn;
2542 /* If SRC and DEST are the same (and not volatile), return DEST
2543 (resp. DEST+LEN for __mempcpy_chk). */
2544 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2546 if (fcode != BUILT_IN_MEMPCPY_CHK)
2548 replace_call_with_value (gsi, dest);
2549 return true;
2551 else
2553 gimple_seq stmts = NULL;
2554 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2555 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2556 TREE_TYPE (dest), dest, len);
2557 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2558 replace_call_with_value (gsi, temp);
2559 return true;
2563 if (! tree_fits_uhwi_p (size))
2564 return false;
2566 tree maxlen = get_maxval_strlen (len, 2);
2567 if (! integer_all_onesp (size))
2569 if (! tree_fits_uhwi_p (len))
2571 /* If LEN is not constant, try MAXLEN too.
2572 For MAXLEN only allow optimizing into non-_ocs function
2573 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2574 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2576 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2578 /* (void) __mempcpy_chk () can be optimized into
2579 (void) __memcpy_chk (). */
2580 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2581 if (!fn)
2582 return false;
2584 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2585 replace_call_with_call_and_fold (gsi, repl);
2586 return true;
2588 return false;
2591 else
2592 maxlen = len;
2594 if (tree_int_cst_lt (size, maxlen))
2595 return false;
2598 fn = NULL_TREE;
2599 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2600 mem{cpy,pcpy,move,set} is available. */
2601 switch (fcode)
2603 case BUILT_IN_MEMCPY_CHK:
2604 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2605 break;
2606 case BUILT_IN_MEMPCPY_CHK:
2607 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2608 break;
2609 case BUILT_IN_MEMMOVE_CHK:
2610 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2611 break;
2612 case BUILT_IN_MEMSET_CHK:
2613 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2614 break;
2615 default:
2616 break;
2619 if (!fn)
2620 return false;
2622 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2623 replace_call_with_call_and_fold (gsi, repl);
2624 return true;
2627 /* Fold a call to the __st[rp]cpy_chk builtin.
2628 DEST, SRC, and SIZE are the arguments to the call.
2629 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2630 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2631 strings passed as second argument. */
2633 static bool
2634 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2635 tree dest,
2636 tree src, tree size,
2637 enum built_in_function fcode)
2639 gimple *stmt = gsi_stmt (*gsi);
2640 location_t loc = gimple_location (stmt);
2641 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2642 tree len, fn;
2644 /* If SRC and DEST are the same (and not volatile), return DEST. */
2645 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2647 /* Issue -Wrestrict unless the pointers are null (those do
2648 not point to objects and so do not indicate an overlap;
2649 such calls could be the result of sanitization and jump
2650 threading). */
2651 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2653 tree func = gimple_call_fndecl (stmt);
2655 warning_at (loc, OPT_Wrestrict,
2656 "%qD source argument is the same as destination",
2657 func);
2660 replace_call_with_value (gsi, dest);
2661 return true;
2664 if (! tree_fits_uhwi_p (size))
2665 return false;
2667 tree maxlen = get_maxval_strlen (src, 1);
2668 if (! integer_all_onesp (size))
2670 len = c_strlen (src, 1);
2671 if (! len || ! tree_fits_uhwi_p (len))
2673 /* If LEN is not constant, try MAXLEN too.
2674 For MAXLEN only allow optimizing into non-_ocs function
2675 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2676 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2678 if (fcode == BUILT_IN_STPCPY_CHK)
2680 if (! ignore)
2681 return false;
2683 /* If return value of __stpcpy_chk is ignored,
2684 optimize into __strcpy_chk. */
2685 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2686 if (!fn)
2687 return false;
2689 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2690 replace_call_with_call_and_fold (gsi, repl);
2691 return true;
2694 if (! len || TREE_SIDE_EFFECTS (len))
2695 return false;
2697 /* If c_strlen returned something, but not a constant,
2698 transform __strcpy_chk into __memcpy_chk. */
2699 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2700 if (!fn)
2701 return false;
2703 gimple_seq stmts = NULL;
2704 len = gimple_convert (&stmts, loc, size_type_node, len);
2705 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2706 build_int_cst (size_type_node, 1));
2707 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2708 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2709 replace_call_with_call_and_fold (gsi, repl);
2710 return true;
2713 else
2714 maxlen = len;
2716 if (! tree_int_cst_lt (maxlen, size))
2717 return false;
2720 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2721 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2722 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2723 if (!fn)
2724 return false;
2726 gimple *repl = gimple_build_call (fn, 2, dest, src);
2727 replace_call_with_call_and_fold (gsi, repl);
2728 return true;
2731 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2732 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2733 length passed as third argument. IGNORE is true if return value can be
2734 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2736 static bool
2737 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2738 tree dest, tree src,
2739 tree len, tree size,
2740 enum built_in_function fcode)
2742 gimple *stmt = gsi_stmt (*gsi);
2743 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2744 tree fn;
2746 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2748 /* If return value of __stpncpy_chk is ignored,
2749 optimize into __strncpy_chk. */
2750 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2751 if (fn)
2753 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2754 replace_call_with_call_and_fold (gsi, repl);
2755 return true;
2759 if (! tree_fits_uhwi_p (size))
2760 return false;
2762 tree maxlen = get_maxval_strlen (len, 2);
2763 if (! integer_all_onesp (size))
2765 if (! tree_fits_uhwi_p (len))
2767 /* If LEN is not constant, try MAXLEN too.
2768 For MAXLEN only allow optimizing into non-_ocs function
2769 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2770 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2771 return false;
2773 else
2774 maxlen = len;
2776 if (tree_int_cst_lt (size, maxlen))
2777 return false;
2780 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2781 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2782 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2783 if (!fn)
2784 return false;
2786 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2787 replace_call_with_call_and_fold (gsi, repl);
2788 return true;
2791 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2792 Return NULL_TREE if no simplification can be made. */
2794 static bool
2795 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2797 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2798 location_t loc = gimple_location (stmt);
2799 tree dest = gimple_call_arg (stmt, 0);
2800 tree src = gimple_call_arg (stmt, 1);
2801 tree fn, lenp1;
2803 /* If the result is unused, replace stpcpy with strcpy. */
2804 if (gimple_call_lhs (stmt) == NULL_TREE)
2806 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2807 if (!fn)
2808 return false;
2809 gimple_call_set_fndecl (stmt, fn);
2810 fold_stmt (gsi);
2811 return true;
2814 /* Set to non-null if ARG refers to an unterminated array. */
2815 tree nonstr = NULL;
2816 tree len = c_strlen (src, 1, &nonstr, 1);
2817 if (!len
2818 || TREE_CODE (len) != INTEGER_CST)
2820 nonstr = unterminated_array (src);
2821 if (!nonstr)
2822 return false;
2825 if (nonstr)
2827 /* Avoid folding calls with unterminated arrays. */
2828 if (!gimple_no_warning_p (stmt))
2829 warn_string_no_nul (loc, "stpcpy", src, nonstr);
2830 gimple_set_no_warning (stmt, true);
2831 return false;
2834 if (optimize_function_for_size_p (cfun)
2835 /* If length is zero it's small enough. */
2836 && !integer_zerop (len))
2837 return false;
2839 /* If the source has a known length replace stpcpy with memcpy. */
2840 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2841 if (!fn)
2842 return false;
2844 gimple_seq stmts = NULL;
2845 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2846 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2847 tem, build_int_cst (size_type_node, 1));
2848 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2849 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2850 gimple_set_vuse (repl, gimple_vuse (stmt));
2851 gimple_set_vdef (repl, gimple_vdef (stmt));
2852 if (gimple_vdef (repl)
2853 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2854 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2855 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2856 /* Replace the result with dest + len. */
2857 stmts = NULL;
2858 tem = gimple_convert (&stmts, loc, sizetype, len);
2859 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2860 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2861 POINTER_PLUS_EXPR, dest, tem);
2862 gsi_replace (gsi, ret, false);
2863 /* Finally fold the memcpy call. */
2864 gimple_stmt_iterator gsi2 = *gsi;
2865 gsi_prev (&gsi2);
2866 fold_stmt (&gsi2);
2867 return true;
2870 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2871 NULL_TREE if a normal call should be emitted rather than expanding
2872 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2873 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2874 passed as second argument. */
2876 static bool
2877 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2878 enum built_in_function fcode)
2880 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2881 tree dest, size, len, fn, fmt, flag;
2882 const char *fmt_str;
2884 /* Verify the required arguments in the original call. */
2885 if (gimple_call_num_args (stmt) < 5)
2886 return false;
2888 dest = gimple_call_arg (stmt, 0);
2889 len = gimple_call_arg (stmt, 1);
2890 flag = gimple_call_arg (stmt, 2);
2891 size = gimple_call_arg (stmt, 3);
2892 fmt = gimple_call_arg (stmt, 4);
2894 if (! tree_fits_uhwi_p (size))
2895 return false;
2897 if (! integer_all_onesp (size))
2899 tree maxlen = get_maxval_strlen (len, 2);
2900 if (! tree_fits_uhwi_p (len))
2902 /* If LEN is not constant, try MAXLEN too.
2903 For MAXLEN only allow optimizing into non-_ocs function
2904 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2905 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2906 return false;
2908 else
2909 maxlen = len;
2911 if (tree_int_cst_lt (size, maxlen))
2912 return false;
2915 if (!init_target_chars ())
2916 return false;
2918 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2919 or if format doesn't contain % chars or is "%s". */
2920 if (! integer_zerop (flag))
2922 fmt_str = c_getstr (fmt);
2923 if (fmt_str == NULL)
2924 return false;
2925 if (strchr (fmt_str, target_percent) != NULL
2926 && strcmp (fmt_str, target_percent_s))
2927 return false;
2930 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2931 available. */
2932 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2933 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2934 if (!fn)
2935 return false;
2937 /* Replace the called function and the first 5 argument by 3 retaining
2938 trailing varargs. */
2939 gimple_call_set_fndecl (stmt, fn);
2940 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2941 gimple_call_set_arg (stmt, 0, dest);
2942 gimple_call_set_arg (stmt, 1, len);
2943 gimple_call_set_arg (stmt, 2, fmt);
2944 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2945 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2946 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2947 fold_stmt (gsi);
2948 return true;
2951 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2952 Return NULL_TREE if a normal call should be emitted rather than
2953 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2954 or BUILT_IN_VSPRINTF_CHK. */
2956 static bool
2957 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2958 enum built_in_function fcode)
2960 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2961 tree dest, size, len, fn, fmt, flag;
2962 const char *fmt_str;
2963 unsigned nargs = gimple_call_num_args (stmt);
2965 /* Verify the required arguments in the original call. */
2966 if (nargs < 4)
2967 return false;
2968 dest = gimple_call_arg (stmt, 0);
2969 flag = gimple_call_arg (stmt, 1);
2970 size = gimple_call_arg (stmt, 2);
2971 fmt = gimple_call_arg (stmt, 3);
2973 if (! tree_fits_uhwi_p (size))
2974 return false;
2976 len = NULL_TREE;
2978 if (!init_target_chars ())
2979 return false;
2981 /* Check whether the format is a literal string constant. */
2982 fmt_str = c_getstr (fmt);
2983 if (fmt_str != NULL)
2985 /* If the format doesn't contain % args or %%, we know the size. */
2986 if (strchr (fmt_str, target_percent) == 0)
2988 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2989 len = build_int_cstu (size_type_node, strlen (fmt_str));
2991 /* If the format is "%s" and first ... argument is a string literal,
2992 we know the size too. */
2993 else if (fcode == BUILT_IN_SPRINTF_CHK
2994 && strcmp (fmt_str, target_percent_s) == 0)
2996 tree arg;
2998 if (nargs == 5)
3000 arg = gimple_call_arg (stmt, 4);
3001 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3003 len = c_strlen (arg, 1);
3004 if (! len || ! tree_fits_uhwi_p (len))
3005 len = NULL_TREE;
3011 if (! integer_all_onesp (size))
3013 if (! len || ! tree_int_cst_lt (len, size))
3014 return false;
3017 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3018 or if format doesn't contain % chars or is "%s". */
3019 if (! integer_zerop (flag))
3021 if (fmt_str == NULL)
3022 return false;
3023 if (strchr (fmt_str, target_percent) != NULL
3024 && strcmp (fmt_str, target_percent_s))
3025 return false;
3028 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3029 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3030 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3031 if (!fn)
3032 return false;
3034 /* Replace the called function and the first 4 argument by 2 retaining
3035 trailing varargs. */
3036 gimple_call_set_fndecl (stmt, fn);
3037 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3038 gimple_call_set_arg (stmt, 0, dest);
3039 gimple_call_set_arg (stmt, 1, fmt);
3040 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3041 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3042 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3043 fold_stmt (gsi);
3044 return true;
3047 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3048 ORIG may be null if this is a 2-argument call. We don't attempt to
3049 simplify calls with more than 3 arguments.
3051 Return true if simplification was possible, otherwise false. */
3053 bool
3054 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3056 gimple *stmt = gsi_stmt (*gsi);
3057 tree dest = gimple_call_arg (stmt, 0);
3058 tree fmt = gimple_call_arg (stmt, 1);
3059 tree orig = NULL_TREE;
3060 const char *fmt_str = NULL;
3062 /* Verify the required arguments in the original call. We deal with two
3063 types of sprintf() calls: 'sprintf (str, fmt)' and
3064 'sprintf (dest, "%s", orig)'. */
3065 if (gimple_call_num_args (stmt) > 3)
3066 return false;
3068 if (gimple_call_num_args (stmt) == 3)
3069 orig = gimple_call_arg (stmt, 2);
3071 /* Check whether the format is a literal string constant. */
3072 fmt_str = c_getstr (fmt);
3073 if (fmt_str == NULL)
3074 return false;
3076 if (!init_target_chars ())
3077 return false;
3079 /* If the format doesn't contain % args or %%, use strcpy. */
3080 if (strchr (fmt_str, target_percent) == NULL)
3082 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3084 if (!fn)
3085 return false;
3087 /* Don't optimize sprintf (buf, "abc", ptr++). */
3088 if (orig)
3089 return false;
3091 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3092 'format' is known to contain no % formats. */
3093 gimple_seq stmts = NULL;
3094 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3096 /* Propagate the NO_WARNING bit to avoid issuing the same
3097 warning more than once. */
3098 if (gimple_no_warning_p (stmt))
3099 gimple_set_no_warning (repl, true);
3101 gimple_seq_add_stmt_without_update (&stmts, repl);
3102 if (gimple_call_lhs (stmt))
3104 repl = gimple_build_assign (gimple_call_lhs (stmt),
3105 build_int_cst (integer_type_node,
3106 strlen (fmt_str)));
3107 gimple_seq_add_stmt_without_update (&stmts, repl);
3108 gsi_replace_with_seq_vops (gsi, stmts);
3109 /* gsi now points at the assignment to the lhs, get a
3110 stmt iterator to the memcpy call.
3111 ??? We can't use gsi_for_stmt as that doesn't work when the
3112 CFG isn't built yet. */
3113 gimple_stmt_iterator gsi2 = *gsi;
3114 gsi_prev (&gsi2);
3115 fold_stmt (&gsi2);
3117 else
3119 gsi_replace_with_seq_vops (gsi, stmts);
3120 fold_stmt (gsi);
3122 return true;
3125 /* If the format is "%s", use strcpy if the result isn't used. */
3126 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3128 tree fn;
3129 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3131 if (!fn)
3132 return false;
3134 /* Don't crash on sprintf (str1, "%s"). */
3135 if (!orig)
3136 return false;
3138 tree orig_len = NULL_TREE;
3139 if (gimple_call_lhs (stmt))
3141 orig_len = get_maxval_strlen (orig, 0);
3142 if (!orig_len)
3143 return false;
3146 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3147 gimple_seq stmts = NULL;
3148 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3150 /* Propagate the NO_WARNING bit to avoid issuing the same
3151 warning more than once. */
3152 if (gimple_no_warning_p (stmt))
3153 gimple_set_no_warning (repl, true);
3155 gimple_seq_add_stmt_without_update (&stmts, repl);
3156 if (gimple_call_lhs (stmt))
3158 if (!useless_type_conversion_p (integer_type_node,
3159 TREE_TYPE (orig_len)))
3160 orig_len = fold_convert (integer_type_node, orig_len);
3161 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3162 gimple_seq_add_stmt_without_update (&stmts, repl);
3163 gsi_replace_with_seq_vops (gsi, stmts);
3164 /* gsi now points at the assignment to the lhs, get a
3165 stmt iterator to the memcpy call.
3166 ??? We can't use gsi_for_stmt as that doesn't work when the
3167 CFG isn't built yet. */
3168 gimple_stmt_iterator gsi2 = *gsi;
3169 gsi_prev (&gsi2);
3170 fold_stmt (&gsi2);
3172 else
3174 gsi_replace_with_seq_vops (gsi, stmts);
3175 fold_stmt (gsi);
3177 return true;
3179 return false;
3182 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3183 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3184 attempt to simplify calls with more than 4 arguments.
3186 Return true if simplification was possible, otherwise false. */
3188 bool
3189 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3191 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3192 tree dest = gimple_call_arg (stmt, 0);
3193 tree destsize = gimple_call_arg (stmt, 1);
3194 tree fmt = gimple_call_arg (stmt, 2);
3195 tree orig = NULL_TREE;
3196 const char *fmt_str = NULL;
3198 if (gimple_call_num_args (stmt) > 4)
3199 return false;
3201 if (gimple_call_num_args (stmt) == 4)
3202 orig = gimple_call_arg (stmt, 3);
3204 if (!tree_fits_uhwi_p (destsize))
3205 return false;
3206 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3208 /* Check whether the format is a literal string constant. */
3209 fmt_str = c_getstr (fmt);
3210 if (fmt_str == NULL)
3211 return false;
3213 if (!init_target_chars ())
3214 return false;
3216 /* If the format doesn't contain % args or %%, use strcpy. */
3217 if (strchr (fmt_str, target_percent) == NULL)
3219 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3220 if (!fn)
3221 return false;
3223 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3224 if (orig)
3225 return false;
3227 /* We could expand this as
3228 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3229 or to
3230 memcpy (str, fmt_with_nul_at_cstm1, cst);
3231 but in the former case that might increase code size
3232 and in the latter case grow .rodata section too much.
3233 So punt for now. */
3234 size_t len = strlen (fmt_str);
3235 if (len >= destlen)
3236 return false;
3238 gimple_seq stmts = NULL;
3239 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3240 gimple_seq_add_stmt_without_update (&stmts, repl);
3241 if (gimple_call_lhs (stmt))
3243 repl = gimple_build_assign (gimple_call_lhs (stmt),
3244 build_int_cst (integer_type_node, len));
3245 gimple_seq_add_stmt_without_update (&stmts, repl);
3246 gsi_replace_with_seq_vops (gsi, stmts);
3247 /* gsi now points at the assignment to the lhs, get a
3248 stmt iterator to the memcpy call.
3249 ??? We can't use gsi_for_stmt as that doesn't work when the
3250 CFG isn't built yet. */
3251 gimple_stmt_iterator gsi2 = *gsi;
3252 gsi_prev (&gsi2);
3253 fold_stmt (&gsi2);
3255 else
3257 gsi_replace_with_seq_vops (gsi, stmts);
3258 fold_stmt (gsi);
3260 return true;
3263 /* If the format is "%s", use strcpy if the result isn't used. */
3264 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3266 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3267 if (!fn)
3268 return false;
3270 /* Don't crash on snprintf (str1, cst, "%s"). */
3271 if (!orig)
3272 return false;
3274 tree orig_len = get_maxval_strlen (orig, 0);
3275 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3276 return false;
3278 /* We could expand this as
3279 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3280 or to
3281 memcpy (str1, str2_with_nul_at_cstm1, cst);
3282 but in the former case that might increase code size
3283 and in the latter case grow .rodata section too much.
3284 So punt for now. */
3285 if (compare_tree_int (orig_len, destlen) >= 0)
3286 return false;
3288 /* Convert snprintf (str1, cst, "%s", str2) into
3289 strcpy (str1, str2) if strlen (str2) < cst. */
3290 gimple_seq stmts = NULL;
3291 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3292 gimple_seq_add_stmt_without_update (&stmts, repl);
3293 if (gimple_call_lhs (stmt))
3295 if (!useless_type_conversion_p (integer_type_node,
3296 TREE_TYPE (orig_len)))
3297 orig_len = fold_convert (integer_type_node, orig_len);
3298 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3299 gimple_seq_add_stmt_without_update (&stmts, repl);
3300 gsi_replace_with_seq_vops (gsi, stmts);
3301 /* gsi now points at the assignment to the lhs, get a
3302 stmt iterator to the memcpy call.
3303 ??? We can't use gsi_for_stmt as that doesn't work when the
3304 CFG isn't built yet. */
3305 gimple_stmt_iterator gsi2 = *gsi;
3306 gsi_prev (&gsi2);
3307 fold_stmt (&gsi2);
3309 else
3311 gsi_replace_with_seq_vops (gsi, stmts);
3312 fold_stmt (gsi);
3314 return true;
3316 return false;
3319 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3320 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3321 more than 3 arguments, and ARG may be null in the 2-argument case.
3323 Return NULL_TREE if no simplification was possible, otherwise return the
3324 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3325 code of the function to be simplified. */
3327 static bool
3328 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3329 tree fp, tree fmt, tree arg,
3330 enum built_in_function fcode)
3332 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3333 tree fn_fputc, fn_fputs;
3334 const char *fmt_str = NULL;
3336 /* If the return value is used, don't do the transformation. */
3337 if (gimple_call_lhs (stmt) != NULL_TREE)
3338 return false;
3340 /* Check whether the format is a literal string constant. */
3341 fmt_str = c_getstr (fmt);
3342 if (fmt_str == NULL)
3343 return false;
3345 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3347 /* If we're using an unlocked function, assume the other
3348 unlocked functions exist explicitly. */
3349 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3350 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3352 else
3354 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3355 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3358 if (!init_target_chars ())
3359 return false;
3361 /* If the format doesn't contain % args or %%, use strcpy. */
3362 if (strchr (fmt_str, target_percent) == NULL)
3364 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3365 && arg)
3366 return false;
3368 /* If the format specifier was "", fprintf does nothing. */
3369 if (fmt_str[0] == '\0')
3371 replace_call_with_value (gsi, NULL_TREE);
3372 return true;
3375 /* When "string" doesn't contain %, replace all cases of
3376 fprintf (fp, string) with fputs (string, fp). The fputs
3377 builtin will take care of special cases like length == 1. */
3378 if (fn_fputs)
3380 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3381 replace_call_with_call_and_fold (gsi, repl);
3382 return true;
3386 /* The other optimizations can be done only on the non-va_list variants. */
3387 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3388 return false;
3390 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3391 else if (strcmp (fmt_str, target_percent_s) == 0)
3393 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3394 return false;
3395 if (fn_fputs)
3397 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3398 replace_call_with_call_and_fold (gsi, repl);
3399 return true;
3403 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3404 else if (strcmp (fmt_str, target_percent_c) == 0)
3406 if (!arg
3407 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3408 return false;
3409 if (fn_fputc)
3411 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3412 replace_call_with_call_and_fold (gsi, repl);
3413 return true;
3417 return false;
3420 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3421 FMT and ARG are the arguments to the call; we don't fold cases with
3422 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3424 Return NULL_TREE if no simplification was possible, otherwise return the
3425 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3426 code of the function to be simplified. */
3428 static bool
3429 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3430 tree arg, enum built_in_function fcode)
3432 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3433 tree fn_putchar, fn_puts, newarg;
3434 const char *fmt_str = NULL;
3436 /* If the return value is used, don't do the transformation. */
3437 if (gimple_call_lhs (stmt) != NULL_TREE)
3438 return false;
3440 /* Check whether the format is a literal string constant. */
3441 fmt_str = c_getstr (fmt);
3442 if (fmt_str == NULL)
3443 return false;
3445 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3447 /* If we're using an unlocked function, assume the other
3448 unlocked functions exist explicitly. */
3449 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3450 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3452 else
3454 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3455 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3458 if (!init_target_chars ())
3459 return false;
3461 if (strcmp (fmt_str, target_percent_s) == 0
3462 || strchr (fmt_str, target_percent) == NULL)
3464 const char *str;
3466 if (strcmp (fmt_str, target_percent_s) == 0)
3468 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3469 return false;
3471 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3472 return false;
3474 str = c_getstr (arg);
3475 if (str == NULL)
3476 return false;
3478 else
3480 /* The format specifier doesn't contain any '%' characters. */
3481 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3482 && arg)
3483 return false;
3484 str = fmt_str;
3487 /* If the string was "", printf does nothing. */
3488 if (str[0] == '\0')
3490 replace_call_with_value (gsi, NULL_TREE);
3491 return true;
3494 /* If the string has length of 1, call putchar. */
3495 if (str[1] == '\0')
3497 /* Given printf("c"), (where c is any one character,)
3498 convert "c"[0] to an int and pass that to the replacement
3499 function. */
3500 newarg = build_int_cst (integer_type_node, str[0]);
3501 if (fn_putchar)
3503 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3504 replace_call_with_call_and_fold (gsi, repl);
3505 return true;
3508 else
3510 /* If the string was "string\n", call puts("string"). */
3511 size_t len = strlen (str);
3512 if ((unsigned char)str[len - 1] == target_newline
3513 && (size_t) (int) len == len
3514 && (int) len > 0)
3516 char *newstr;
3518 /* Create a NUL-terminated string that's one char shorter
3519 than the original, stripping off the trailing '\n'. */
3520 newstr = xstrdup (str);
3521 newstr[len - 1] = '\0';
3522 newarg = build_string_literal (len, newstr);
3523 free (newstr);
3524 if (fn_puts)
3526 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3527 replace_call_with_call_and_fold (gsi, repl);
3528 return true;
3531 else
3532 /* We'd like to arrange to call fputs(string,stdout) here,
3533 but we need stdout and don't have a way to get it yet. */
3534 return false;
3538 /* The other optimizations can be done only on the non-va_list variants. */
3539 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3540 return false;
3542 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3543 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3545 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3546 return false;
3547 if (fn_puts)
3549 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3550 replace_call_with_call_and_fold (gsi, repl);
3551 return true;
3555 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3556 else if (strcmp (fmt_str, target_percent_c) == 0)
3558 if (!arg || ! useless_type_conversion_p (integer_type_node,
3559 TREE_TYPE (arg)))
3560 return false;
3561 if (fn_putchar)
3563 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3564 replace_call_with_call_and_fold (gsi, repl);
3565 return true;
3569 return false;
3574 /* Fold a call to __builtin_strlen with known length LEN. */
3576 static bool
3577 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3579 gimple *stmt = gsi_stmt (*gsi);
3580 tree arg = gimple_call_arg (stmt, 0);
3582 wide_int minlen;
3583 wide_int maxlen;
3585 /* Set to non-null if ARG refers to an unterminated array. */
3586 tree nonstr;
3587 tree lenrange[2];
3588 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
3589 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3590 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3592 /* The range of lengths refers to either a single constant
3593 string or to the longest and shortest constant string
3594 referenced by the argument of the strlen() call, or to
3595 the strings that can possibly be stored in the arrays
3596 the argument refers to. */
3597 minlen = wi::to_wide (lenrange[0]);
3598 maxlen = wi::to_wide (lenrange[1]);
3600 else
3602 unsigned prec = TYPE_PRECISION (sizetype);
3604 minlen = wi::shwi (0, prec);
3605 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3608 if (minlen == maxlen)
3610 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3611 true, GSI_SAME_STMT);
3612 replace_call_with_value (gsi, lenrange[0]);
3613 return true;
3616 if (tree lhs = gimple_call_lhs (stmt))
3617 if (TREE_CODE (lhs) == SSA_NAME
3618 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3619 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3621 return false;
3624 /* Fold a call to __builtin_acc_on_device. */
3626 static bool
3627 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3629 /* Defer folding until we know which compiler we're in. */
3630 if (symtab->state != EXPANSION)
3631 return false;
3633 unsigned val_host = GOMP_DEVICE_HOST;
3634 unsigned val_dev = GOMP_DEVICE_NONE;
3636 #ifdef ACCEL_COMPILER
3637 val_host = GOMP_DEVICE_NOT_HOST;
3638 val_dev = ACCEL_COMPILER_acc_device;
3639 #endif
3641 location_t loc = gimple_location (gsi_stmt (*gsi));
3643 tree host_eq = make_ssa_name (boolean_type_node);
3644 gimple *host_ass = gimple_build_assign
3645 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3646 gimple_set_location (host_ass, loc);
3647 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3649 tree dev_eq = make_ssa_name (boolean_type_node);
3650 gimple *dev_ass = gimple_build_assign
3651 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3652 gimple_set_location (dev_ass, loc);
3653 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3655 tree result = make_ssa_name (boolean_type_node);
3656 gimple *result_ass = gimple_build_assign
3657 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3658 gimple_set_location (result_ass, loc);
3659 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3661 replace_call_with_value (gsi, result);
3663 return true;
3666 /* Fold realloc (0, n) -> malloc (n). */
3668 static bool
3669 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3671 gimple *stmt = gsi_stmt (*gsi);
3672 tree arg = gimple_call_arg (stmt, 0);
3673 tree size = gimple_call_arg (stmt, 1);
3675 if (operand_equal_p (arg, null_pointer_node, 0))
3677 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3678 if (fn_malloc)
3680 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3681 replace_call_with_call_and_fold (gsi, repl);
3682 return true;
3685 return false;
3688 /* Fold the non-target builtin at *GSI and return whether any simplification
3689 was made. */
3691 static bool
3692 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3694 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3695 tree callee = gimple_call_fndecl (stmt);
3697 /* Give up for always_inline inline builtins until they are
3698 inlined. */
3699 if (avoid_folding_inline_builtin (callee))
3700 return false;
3702 unsigned n = gimple_call_num_args (stmt);
3703 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3704 switch (fcode)
3706 case BUILT_IN_BCMP:
3707 return gimple_fold_builtin_bcmp (gsi);
3708 case BUILT_IN_BCOPY:
3709 return gimple_fold_builtin_bcopy (gsi);
3710 case BUILT_IN_BZERO:
3711 return gimple_fold_builtin_bzero (gsi);
3713 case BUILT_IN_MEMSET:
3714 return gimple_fold_builtin_memset (gsi,
3715 gimple_call_arg (stmt, 1),
3716 gimple_call_arg (stmt, 2));
3717 case BUILT_IN_MEMCPY:
3718 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3719 gimple_call_arg (stmt, 1), 0);
3720 case BUILT_IN_MEMPCPY:
3721 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3722 gimple_call_arg (stmt, 1), 1);
3723 case BUILT_IN_MEMMOVE:
3724 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3725 gimple_call_arg (stmt, 1), 3);
3726 case BUILT_IN_SPRINTF_CHK:
3727 case BUILT_IN_VSPRINTF_CHK:
3728 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3729 case BUILT_IN_STRCAT_CHK:
3730 return gimple_fold_builtin_strcat_chk (gsi);
3731 case BUILT_IN_STRNCAT_CHK:
3732 return gimple_fold_builtin_strncat_chk (gsi);
3733 case BUILT_IN_STRLEN:
3734 return gimple_fold_builtin_strlen (gsi);
3735 case BUILT_IN_STRCPY:
3736 return gimple_fold_builtin_strcpy (gsi,
3737 gimple_call_arg (stmt, 0),
3738 gimple_call_arg (stmt, 1));
3739 case BUILT_IN_STRNCPY:
3740 return gimple_fold_builtin_strncpy (gsi,
3741 gimple_call_arg (stmt, 0),
3742 gimple_call_arg (stmt, 1),
3743 gimple_call_arg (stmt, 2));
3744 case BUILT_IN_STRCAT:
3745 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3746 gimple_call_arg (stmt, 1));
3747 case BUILT_IN_STRNCAT:
3748 return gimple_fold_builtin_strncat (gsi);
3749 case BUILT_IN_INDEX:
3750 case BUILT_IN_STRCHR:
3751 return gimple_fold_builtin_strchr (gsi, false);
3752 case BUILT_IN_RINDEX:
3753 case BUILT_IN_STRRCHR:
3754 return gimple_fold_builtin_strchr (gsi, true);
3755 case BUILT_IN_STRSTR:
3756 return gimple_fold_builtin_strstr (gsi);
3757 case BUILT_IN_STRCMP:
3758 case BUILT_IN_STRCMP_EQ:
3759 case BUILT_IN_STRCASECMP:
3760 case BUILT_IN_STRNCMP:
3761 case BUILT_IN_STRNCMP_EQ:
3762 case BUILT_IN_STRNCASECMP:
3763 return gimple_fold_builtin_string_compare (gsi);
3764 case BUILT_IN_MEMCHR:
3765 return gimple_fold_builtin_memchr (gsi);
3766 case BUILT_IN_FPUTS:
3767 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3768 gimple_call_arg (stmt, 1), false);
3769 case BUILT_IN_FPUTS_UNLOCKED:
3770 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3771 gimple_call_arg (stmt, 1), true);
3772 case BUILT_IN_MEMCPY_CHK:
3773 case BUILT_IN_MEMPCPY_CHK:
3774 case BUILT_IN_MEMMOVE_CHK:
3775 case BUILT_IN_MEMSET_CHK:
3776 return gimple_fold_builtin_memory_chk (gsi,
3777 gimple_call_arg (stmt, 0),
3778 gimple_call_arg (stmt, 1),
3779 gimple_call_arg (stmt, 2),
3780 gimple_call_arg (stmt, 3),
3781 fcode);
3782 case BUILT_IN_STPCPY:
3783 return gimple_fold_builtin_stpcpy (gsi);
3784 case BUILT_IN_STRCPY_CHK:
3785 case BUILT_IN_STPCPY_CHK:
3786 return gimple_fold_builtin_stxcpy_chk (gsi,
3787 gimple_call_arg (stmt, 0),
3788 gimple_call_arg (stmt, 1),
3789 gimple_call_arg (stmt, 2),
3790 fcode);
3791 case BUILT_IN_STRNCPY_CHK:
3792 case BUILT_IN_STPNCPY_CHK:
3793 return gimple_fold_builtin_stxncpy_chk (gsi,
3794 gimple_call_arg (stmt, 0),
3795 gimple_call_arg (stmt, 1),
3796 gimple_call_arg (stmt, 2),
3797 gimple_call_arg (stmt, 3),
3798 fcode);
3799 case BUILT_IN_SNPRINTF_CHK:
3800 case BUILT_IN_VSNPRINTF_CHK:
3801 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3803 case BUILT_IN_FPRINTF:
3804 case BUILT_IN_FPRINTF_UNLOCKED:
3805 case BUILT_IN_VFPRINTF:
3806 if (n == 2 || n == 3)
3807 return gimple_fold_builtin_fprintf (gsi,
3808 gimple_call_arg (stmt, 0),
3809 gimple_call_arg (stmt, 1),
3810 n == 3
3811 ? gimple_call_arg (stmt, 2)
3812 : NULL_TREE,
3813 fcode);
3814 break;
3815 case BUILT_IN_FPRINTF_CHK:
3816 case BUILT_IN_VFPRINTF_CHK:
3817 if (n == 3 || n == 4)
3818 return gimple_fold_builtin_fprintf (gsi,
3819 gimple_call_arg (stmt, 0),
3820 gimple_call_arg (stmt, 2),
3821 n == 4
3822 ? gimple_call_arg (stmt, 3)
3823 : NULL_TREE,
3824 fcode);
3825 break;
3826 case BUILT_IN_PRINTF:
3827 case BUILT_IN_PRINTF_UNLOCKED:
3828 case BUILT_IN_VPRINTF:
3829 if (n == 1 || n == 2)
3830 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3831 n == 2
3832 ? gimple_call_arg (stmt, 1)
3833 : NULL_TREE, fcode);
3834 break;
3835 case BUILT_IN_PRINTF_CHK:
3836 case BUILT_IN_VPRINTF_CHK:
3837 if (n == 2 || n == 3)
3838 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3839 n == 3
3840 ? gimple_call_arg (stmt, 2)
3841 : NULL_TREE, fcode);
3842 break;
3843 case BUILT_IN_ACC_ON_DEVICE:
3844 return gimple_fold_builtin_acc_on_device (gsi,
3845 gimple_call_arg (stmt, 0));
3846 case BUILT_IN_REALLOC:
3847 return gimple_fold_builtin_realloc (gsi);
3849 default:;
3852 /* Try the generic builtin folder. */
3853 bool ignore = (gimple_call_lhs (stmt) == NULL);
3854 tree result = fold_call_stmt (stmt, ignore);
3855 if (result)
3857 if (ignore)
3858 STRIP_NOPS (result);
3859 else
3860 result = fold_convert (gimple_call_return_type (stmt), result);
3861 if (!update_call_from_tree (gsi, result))
3862 gimplify_and_update_call_from_tree (gsi, result);
3863 return true;
3866 return false;
3869 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3870 function calls to constants, where possible. */
3872 static tree
3873 fold_internal_goacc_dim (const gimple *call)
3875 int axis = oacc_get_ifn_dim_arg (call);
3876 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3877 tree result = NULL_TREE;
3878 tree type = TREE_TYPE (gimple_call_lhs (call));
3880 switch (gimple_call_internal_fn (call))
3882 case IFN_GOACC_DIM_POS:
3883 /* If the size is 1, we know the answer. */
3884 if (size == 1)
3885 result = build_int_cst (type, 0);
3886 break;
3887 case IFN_GOACC_DIM_SIZE:
3888 /* If the size is not dynamic, we know the answer. */
3889 if (size)
3890 result = build_int_cst (type, size);
3891 break;
3892 default:
3893 break;
3896 return result;
3899 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3900 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3901 &var where var is only addressable because of such calls. */
3903 bool
3904 optimize_atomic_compare_exchange_p (gimple *stmt)
3906 if (gimple_call_num_args (stmt) != 6
3907 || !flag_inline_atomics
3908 || !optimize
3909 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3910 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3911 || !gimple_vdef (stmt)
3912 || !gimple_vuse (stmt))
3913 return false;
3915 tree fndecl = gimple_call_fndecl (stmt);
3916 switch (DECL_FUNCTION_CODE (fndecl))
3918 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3919 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3920 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3921 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3922 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3923 break;
3924 default:
3925 return false;
3928 tree expected = gimple_call_arg (stmt, 1);
3929 if (TREE_CODE (expected) != ADDR_EXPR
3930 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3931 return false;
3933 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3934 if (!is_gimple_reg_type (etype)
3935 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3936 || TREE_THIS_VOLATILE (etype)
3937 || VECTOR_TYPE_P (etype)
3938 || TREE_CODE (etype) == COMPLEX_TYPE
3939 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3940 might not preserve all the bits. See PR71716. */
3941 || SCALAR_FLOAT_TYPE_P (etype)
3942 || maybe_ne (TYPE_PRECISION (etype),
3943 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3944 return false;
3946 tree weak = gimple_call_arg (stmt, 3);
3947 if (!integer_zerop (weak) && !integer_onep (weak))
3948 return false;
3950 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3951 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3952 machine_mode mode = TYPE_MODE (itype);
3954 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3955 == CODE_FOR_nothing
3956 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3957 return false;
3959 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3960 return false;
3962 return true;
3965 /* Fold
3966 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3967 into
3968 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3969 i = IMAGPART_EXPR <t>;
3970 r = (_Bool) i;
3971 e = REALPART_EXPR <t>; */
3973 void
3974 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3976 gimple *stmt = gsi_stmt (*gsi);
3977 tree fndecl = gimple_call_fndecl (stmt);
3978 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3979 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3980 tree ctype = build_complex_type (itype);
3981 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3982 bool throws = false;
3983 edge e = NULL;
3984 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3985 expected);
3986 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3987 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3988 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3990 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3991 build1 (VIEW_CONVERT_EXPR, itype,
3992 gimple_assign_lhs (g)));
3993 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3995 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3996 + int_size_in_bytes (itype);
3997 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3998 gimple_call_arg (stmt, 0),
3999 gimple_assign_lhs (g),
4000 gimple_call_arg (stmt, 2),
4001 build_int_cst (integer_type_node, flag),
4002 gimple_call_arg (stmt, 4),
4003 gimple_call_arg (stmt, 5));
4004 tree lhs = make_ssa_name (ctype);
4005 gimple_call_set_lhs (g, lhs);
4006 gimple_set_vdef (g, gimple_vdef (stmt));
4007 gimple_set_vuse (g, gimple_vuse (stmt));
4008 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4009 tree oldlhs = gimple_call_lhs (stmt);
4010 if (stmt_can_throw_internal (stmt))
4012 throws = true;
4013 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4015 gimple_call_set_nothrow (as_a <gcall *> (g),
4016 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4017 gimple_call_set_lhs (stmt, NULL_TREE);
4018 gsi_replace (gsi, g, true);
4019 if (oldlhs)
4021 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4022 build1 (IMAGPART_EXPR, itype, lhs));
4023 if (throws)
4025 gsi_insert_on_edge_immediate (e, g);
4026 *gsi = gsi_for_stmt (g);
4028 else
4029 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4030 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4031 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4033 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4034 build1 (REALPART_EXPR, itype, lhs));
4035 if (throws && oldlhs == NULL_TREE)
4037 gsi_insert_on_edge_immediate (e, g);
4038 *gsi = gsi_for_stmt (g);
4040 else
4041 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4042 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4044 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4045 VIEW_CONVERT_EXPR,
4046 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4047 gimple_assign_lhs (g)));
4048 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4050 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4051 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4052 *gsi = gsiret;
4055 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4056 doesn't fit into TYPE. The test for overflow should be regardless of
4057 -fwrapv, and even for unsigned types. */
4059 bool
4060 arith_overflowed_p (enum tree_code code, const_tree type,
4061 const_tree arg0, const_tree arg1)
4063 widest2_int warg0 = widest2_int_cst (arg0);
4064 widest2_int warg1 = widest2_int_cst (arg1);
4065 widest2_int wres;
4066 switch (code)
4068 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4069 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4070 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4071 default: gcc_unreachable ();
4073 signop sign = TYPE_SIGN (type);
4074 if (sign == UNSIGNED && wi::neg_p (wres))
4075 return true;
4076 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4079 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4080 The statement may be replaced by another statement, e.g., if the call
4081 simplifies to a constant value. Return true if any changes were made.
4082 It is assumed that the operands have been previously folded. */
4084 static bool
4085 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4087 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4088 tree callee;
4089 bool changed = false;
4090 unsigned i;
4092 /* Fold *& in call arguments. */
4093 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4094 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4096 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4097 if (tmp)
4099 gimple_call_set_arg (stmt, i, tmp);
4100 changed = true;
4104 /* Check for virtual calls that became direct calls. */
4105 callee = gimple_call_fn (stmt);
4106 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4108 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4110 if (dump_file && virtual_method_call_p (callee)
4111 && !possible_polymorphic_call_target_p
4112 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4113 (OBJ_TYPE_REF_EXPR (callee)))))
4115 fprintf (dump_file,
4116 "Type inheritance inconsistent devirtualization of ");
4117 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4118 fprintf (dump_file, " to ");
4119 print_generic_expr (dump_file, callee, TDF_SLIM);
4120 fprintf (dump_file, "\n");
4123 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4124 changed = true;
4126 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4128 bool final;
4129 vec <cgraph_node *>targets
4130 = possible_polymorphic_call_targets (callee, stmt, &final);
4131 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4133 tree lhs = gimple_call_lhs (stmt);
4134 if (dump_enabled_p ())
4136 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4137 "folding virtual function call to %s\n",
4138 targets.length () == 1
4139 ? targets[0]->name ()
4140 : "__builtin_unreachable");
4142 if (targets.length () == 1)
4144 tree fndecl = targets[0]->decl;
4145 gimple_call_set_fndecl (stmt, fndecl);
4146 changed = true;
4147 /* If changing the call to __cxa_pure_virtual
4148 or similar noreturn function, adjust gimple_call_fntype
4149 too. */
4150 if (gimple_call_noreturn_p (stmt)
4151 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4152 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4153 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4154 == void_type_node))
4155 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4156 /* If the call becomes noreturn, remove the lhs. */
4157 if (lhs
4158 && gimple_call_noreturn_p (stmt)
4159 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4160 || should_remove_lhs_p (lhs)))
4162 if (TREE_CODE (lhs) == SSA_NAME)
4164 tree var = create_tmp_var (TREE_TYPE (lhs));
4165 tree def = get_or_create_ssa_default_def (cfun, var);
4166 gimple *new_stmt = gimple_build_assign (lhs, def);
4167 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4169 gimple_call_set_lhs (stmt, NULL_TREE);
4171 maybe_remove_unused_call_args (cfun, stmt);
4173 else
4175 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4176 gimple *new_stmt = gimple_build_call (fndecl, 0);
4177 gimple_set_location (new_stmt, gimple_location (stmt));
4178 /* If the call had a SSA name as lhs morph that into
4179 an uninitialized value. */
4180 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4182 tree var = create_tmp_var (TREE_TYPE (lhs));
4183 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4184 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4185 set_ssa_default_def (cfun, var, lhs);
4187 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4188 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4189 gsi_replace (gsi, new_stmt, false);
4190 return true;
4196 /* Check for indirect calls that became direct calls, and then
4197 no longer require a static chain. */
4198 if (gimple_call_chain (stmt))
4200 tree fn = gimple_call_fndecl (stmt);
4201 if (fn && !DECL_STATIC_CHAIN (fn))
4203 gimple_call_set_chain (stmt, NULL);
4204 changed = true;
4206 else
4208 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4209 if (tmp)
4211 gimple_call_set_chain (stmt, tmp);
4212 changed = true;
4217 if (inplace)
4218 return changed;
4220 /* Check for builtins that CCP can handle using information not
4221 available in the generic fold routines. */
4222 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4224 if (gimple_fold_builtin (gsi))
4225 changed = true;
4227 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4229 changed |= targetm.gimple_fold_builtin (gsi);
4231 else if (gimple_call_internal_p (stmt))
4233 enum tree_code subcode = ERROR_MARK;
4234 tree result = NULL_TREE;
4235 bool cplx_result = false;
4236 tree overflow = NULL_TREE;
4237 switch (gimple_call_internal_fn (stmt))
4239 case IFN_BUILTIN_EXPECT:
4240 result = fold_builtin_expect (gimple_location (stmt),
4241 gimple_call_arg (stmt, 0),
4242 gimple_call_arg (stmt, 1),
4243 gimple_call_arg (stmt, 2),
4244 NULL_TREE);
4245 break;
4246 case IFN_UBSAN_OBJECT_SIZE:
4248 tree offset = gimple_call_arg (stmt, 1);
4249 tree objsize = gimple_call_arg (stmt, 2);
4250 if (integer_all_onesp (objsize)
4251 || (TREE_CODE (offset) == INTEGER_CST
4252 && TREE_CODE (objsize) == INTEGER_CST
4253 && tree_int_cst_le (offset, objsize)))
4255 replace_call_with_value (gsi, NULL_TREE);
4256 return true;
4259 break;
4260 case IFN_UBSAN_PTR:
4261 if (integer_zerop (gimple_call_arg (stmt, 1)))
4263 replace_call_with_value (gsi, NULL_TREE);
4264 return true;
4266 break;
4267 case IFN_UBSAN_BOUNDS:
4269 tree index = gimple_call_arg (stmt, 1);
4270 tree bound = gimple_call_arg (stmt, 2);
4271 if (TREE_CODE (index) == INTEGER_CST
4272 && TREE_CODE (bound) == INTEGER_CST)
4274 index = fold_convert (TREE_TYPE (bound), index);
4275 if (TREE_CODE (index) == INTEGER_CST
4276 && tree_int_cst_le (index, bound))
4278 replace_call_with_value (gsi, NULL_TREE);
4279 return true;
4283 break;
4284 case IFN_GOACC_DIM_SIZE:
4285 case IFN_GOACC_DIM_POS:
4286 result = fold_internal_goacc_dim (stmt);
4287 break;
4288 case IFN_UBSAN_CHECK_ADD:
4289 subcode = PLUS_EXPR;
4290 break;
4291 case IFN_UBSAN_CHECK_SUB:
4292 subcode = MINUS_EXPR;
4293 break;
4294 case IFN_UBSAN_CHECK_MUL:
4295 subcode = MULT_EXPR;
4296 break;
4297 case IFN_ADD_OVERFLOW:
4298 subcode = PLUS_EXPR;
4299 cplx_result = true;
4300 break;
4301 case IFN_SUB_OVERFLOW:
4302 subcode = MINUS_EXPR;
4303 cplx_result = true;
4304 break;
4305 case IFN_MUL_OVERFLOW:
4306 subcode = MULT_EXPR;
4307 cplx_result = true;
4308 break;
4309 default:
4310 break;
4312 if (subcode != ERROR_MARK)
4314 tree arg0 = gimple_call_arg (stmt, 0);
4315 tree arg1 = gimple_call_arg (stmt, 1);
4316 tree type = TREE_TYPE (arg0);
4317 if (cplx_result)
4319 tree lhs = gimple_call_lhs (stmt);
4320 if (lhs == NULL_TREE)
4321 type = NULL_TREE;
4322 else
4323 type = TREE_TYPE (TREE_TYPE (lhs));
4325 if (type == NULL_TREE)
4327 /* x = y + 0; x = y - 0; x = y * 0; */
4328 else if (integer_zerop (arg1))
4329 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4330 /* x = 0 + y; x = 0 * y; */
4331 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4332 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4333 /* x = y - y; */
4334 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4335 result = integer_zero_node;
4336 /* x = y * 1; x = 1 * y; */
4337 else if (subcode == MULT_EXPR && integer_onep (arg1))
4338 result = arg0;
4339 else if (subcode == MULT_EXPR && integer_onep (arg0))
4340 result = arg1;
4341 else if (TREE_CODE (arg0) == INTEGER_CST
4342 && TREE_CODE (arg1) == INTEGER_CST)
4344 if (cplx_result)
4345 result = int_const_binop (subcode, fold_convert (type, arg0),
4346 fold_convert (type, arg1));
4347 else
4348 result = int_const_binop (subcode, arg0, arg1);
4349 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4351 if (cplx_result)
4352 overflow = build_one_cst (type);
4353 else
4354 result = NULL_TREE;
4357 if (result)
4359 if (result == integer_zero_node)
4360 result = build_zero_cst (type);
4361 else if (cplx_result && TREE_TYPE (result) != type)
4363 if (TREE_CODE (result) == INTEGER_CST)
4365 if (arith_overflowed_p (PLUS_EXPR, type, result,
4366 integer_zero_node))
4367 overflow = build_one_cst (type);
4369 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4370 && TYPE_UNSIGNED (type))
4371 || (TYPE_PRECISION (type)
4372 < (TYPE_PRECISION (TREE_TYPE (result))
4373 + (TYPE_UNSIGNED (TREE_TYPE (result))
4374 && !TYPE_UNSIGNED (type)))))
4375 result = NULL_TREE;
4376 if (result)
4377 result = fold_convert (type, result);
4382 if (result)
4384 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4385 result = drop_tree_overflow (result);
4386 if (cplx_result)
4388 if (overflow == NULL_TREE)
4389 overflow = build_zero_cst (TREE_TYPE (result));
4390 tree ctype = build_complex_type (TREE_TYPE (result));
4391 if (TREE_CODE (result) == INTEGER_CST
4392 && TREE_CODE (overflow) == INTEGER_CST)
4393 result = build_complex (ctype, result, overflow);
4394 else
4395 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4396 ctype, result, overflow);
4398 if (!update_call_from_tree (gsi, result))
4399 gimplify_and_update_call_from_tree (gsi, result);
4400 changed = true;
4404 return changed;
4408 /* Return true whether NAME has a use on STMT. */
4410 static bool
4411 has_use_on_stmt (tree name, gimple *stmt)
4413 imm_use_iterator iter;
4414 use_operand_p use_p;
4415 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4416 if (USE_STMT (use_p) == stmt)
4417 return true;
4418 return false;
4421 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4422 gimple_simplify.
4424 Replaces *GSI with the simplification result in RCODE and OPS
4425 and the associated statements in *SEQ. Does the replacement
4426 according to INPLACE and returns true if the operation succeeded. */
4428 static bool
4429 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4430 gimple_match_op *res_op,
4431 gimple_seq *seq, bool inplace)
4433 gimple *stmt = gsi_stmt (*gsi);
4434 tree *ops = res_op->ops;
4435 unsigned int num_ops = res_op->num_ops;
4437 /* Play safe and do not allow abnormals to be mentioned in
4438 newly created statements. See also maybe_push_res_to_seq.
4439 As an exception allow such uses if there was a use of the
4440 same SSA name on the old stmt. */
4441 for (unsigned int i = 0; i < num_ops; ++i)
4442 if (TREE_CODE (ops[i]) == SSA_NAME
4443 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4444 && !has_use_on_stmt (ops[i], stmt))
4445 return false;
4447 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4448 for (unsigned int i = 0; i < 2; ++i)
4449 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4450 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4451 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4452 return false;
4454 /* Don't insert new statements when INPLACE is true, even if we could
4455 reuse STMT for the final statement. */
4456 if (inplace && !gimple_seq_empty_p (*seq))
4457 return false;
4459 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4461 gcc_assert (res_op->code.is_tree_code ());
4462 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4463 /* GIMPLE_CONDs condition may not throw. */
4464 && (!flag_exceptions
4465 || !cfun->can_throw_non_call_exceptions
4466 || !operation_could_trap_p (res_op->code,
4467 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4468 false, NULL_TREE)))
4469 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4470 else if (res_op->code == SSA_NAME)
4471 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4472 build_zero_cst (TREE_TYPE (ops[0])));
4473 else if (res_op->code == INTEGER_CST)
4475 if (integer_zerop (ops[0]))
4476 gimple_cond_make_false (cond_stmt);
4477 else
4478 gimple_cond_make_true (cond_stmt);
4480 else if (!inplace)
4482 tree res = maybe_push_res_to_seq (res_op, seq);
4483 if (!res)
4484 return false;
4485 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4486 build_zero_cst (TREE_TYPE (res)));
4488 else
4489 return false;
4490 if (dump_file && (dump_flags & TDF_DETAILS))
4492 fprintf (dump_file, "gimple_simplified to ");
4493 if (!gimple_seq_empty_p (*seq))
4494 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4495 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4496 0, TDF_SLIM);
4498 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4499 return true;
4501 else if (is_gimple_assign (stmt)
4502 && res_op->code.is_tree_code ())
4504 if (!inplace
4505 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4507 maybe_build_generic_op (res_op);
4508 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4509 res_op->op_or_null (0),
4510 res_op->op_or_null (1),
4511 res_op->op_or_null (2));
4512 if (dump_file && (dump_flags & TDF_DETAILS))
4514 fprintf (dump_file, "gimple_simplified to ");
4515 if (!gimple_seq_empty_p (*seq))
4516 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4517 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4518 0, TDF_SLIM);
4520 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4521 return true;
4524 else if (res_op->code.is_fn_code ()
4525 && gimple_call_combined_fn (stmt) == res_op->code)
4527 gcc_assert (num_ops == gimple_call_num_args (stmt));
4528 for (unsigned int i = 0; i < num_ops; ++i)
4529 gimple_call_set_arg (stmt, i, ops[i]);
4530 if (dump_file && (dump_flags & TDF_DETAILS))
4532 fprintf (dump_file, "gimple_simplified to ");
4533 if (!gimple_seq_empty_p (*seq))
4534 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4535 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4537 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4538 return true;
4540 else if (!inplace)
4542 if (gimple_has_lhs (stmt))
4544 tree lhs = gimple_get_lhs (stmt);
4545 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4546 return false;
4547 if (dump_file && (dump_flags & TDF_DETAILS))
4549 fprintf (dump_file, "gimple_simplified to ");
4550 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4552 gsi_replace_with_seq_vops (gsi, *seq);
4553 return true;
4555 else
4556 gcc_unreachable ();
4559 return false;
4562 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4564 static bool
4565 maybe_canonicalize_mem_ref_addr (tree *t)
4567 bool res = false;
4569 if (TREE_CODE (*t) == ADDR_EXPR)
4570 t = &TREE_OPERAND (*t, 0);
4572 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4573 generic vector extension. The actual vector referenced is
4574 view-converted to an array type for this purpose. If the index
4575 is constant the canonical representation in the middle-end is a
4576 BIT_FIELD_REF so re-write the former to the latter here. */
4577 if (TREE_CODE (*t) == ARRAY_REF
4578 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4579 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4580 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4582 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4583 if (VECTOR_TYPE_P (vtype))
4585 tree low = array_ref_low_bound (*t);
4586 if (TREE_CODE (low) == INTEGER_CST)
4588 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4590 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4591 wi::to_widest (low));
4592 idx = wi::mul (idx, wi::to_widest
4593 (TYPE_SIZE (TREE_TYPE (*t))));
4594 widest_int ext
4595 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4596 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4598 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4599 TREE_TYPE (*t),
4600 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4601 TYPE_SIZE (TREE_TYPE (*t)),
4602 wide_int_to_tree (bitsizetype, idx));
4603 res = true;
4610 while (handled_component_p (*t))
4611 t = &TREE_OPERAND (*t, 0);
4613 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4614 of invariant addresses into a SSA name MEM_REF address. */
4615 if (TREE_CODE (*t) == MEM_REF
4616 || TREE_CODE (*t) == TARGET_MEM_REF)
4618 tree addr = TREE_OPERAND (*t, 0);
4619 if (TREE_CODE (addr) == ADDR_EXPR
4620 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4621 || handled_component_p (TREE_OPERAND (addr, 0))))
4623 tree base;
4624 poly_int64 coffset;
4625 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4626 &coffset);
4627 if (!base)
4628 gcc_unreachable ();
4630 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4631 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4632 TREE_OPERAND (*t, 1),
4633 size_int (coffset));
4634 res = true;
4636 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4637 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4640 /* Canonicalize back MEM_REFs to plain reference trees if the object
4641 accessed is a decl that has the same access semantics as the MEM_REF. */
4642 if (TREE_CODE (*t) == MEM_REF
4643 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4644 && integer_zerop (TREE_OPERAND (*t, 1))
4645 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4647 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4648 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4649 if (/* Same volatile qualification. */
4650 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4651 /* Same TBAA behavior with -fstrict-aliasing. */
4652 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4653 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4654 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4655 /* Same alignment. */
4656 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4657 /* We have to look out here to not drop a required conversion
4658 from the rhs to the lhs if *t appears on the lhs or vice-versa
4659 if it appears on the rhs. Thus require strict type
4660 compatibility. */
4661 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4663 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4664 res = true;
4668 /* Canonicalize TARGET_MEM_REF in particular with respect to
4669 the indexes becoming constant. */
4670 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4672 tree tem = maybe_fold_tmr (*t);
4673 if (tem)
4675 *t = tem;
4676 res = true;
4680 return res;
4683 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4684 distinguishes both cases. */
4686 static bool
4687 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4689 bool changed = false;
4690 gimple *stmt = gsi_stmt (*gsi);
4691 bool nowarning = gimple_no_warning_p (stmt);
4692 unsigned i;
4693 fold_defer_overflow_warnings ();
4695 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4696 after propagation.
4697 ??? This shouldn't be done in generic folding but in the
4698 propagation helpers which also know whether an address was
4699 propagated.
4700 Also canonicalize operand order. */
4701 switch (gimple_code (stmt))
4703 case GIMPLE_ASSIGN:
4704 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4706 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4707 if ((REFERENCE_CLASS_P (*rhs)
4708 || TREE_CODE (*rhs) == ADDR_EXPR)
4709 && maybe_canonicalize_mem_ref_addr (rhs))
4710 changed = true;
4711 tree *lhs = gimple_assign_lhs_ptr (stmt);
4712 if (REFERENCE_CLASS_P (*lhs)
4713 && maybe_canonicalize_mem_ref_addr (lhs))
4714 changed = true;
4716 else
4718 /* Canonicalize operand order. */
4719 enum tree_code code = gimple_assign_rhs_code (stmt);
4720 if (TREE_CODE_CLASS (code) == tcc_comparison
4721 || commutative_tree_code (code)
4722 || commutative_ternary_tree_code (code))
4724 tree rhs1 = gimple_assign_rhs1 (stmt);
4725 tree rhs2 = gimple_assign_rhs2 (stmt);
4726 if (tree_swap_operands_p (rhs1, rhs2))
4728 gimple_assign_set_rhs1 (stmt, rhs2);
4729 gimple_assign_set_rhs2 (stmt, rhs1);
4730 if (TREE_CODE_CLASS (code) == tcc_comparison)
4731 gimple_assign_set_rhs_code (stmt,
4732 swap_tree_comparison (code));
4733 changed = true;
4737 break;
4738 case GIMPLE_CALL:
4740 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4742 tree *arg = gimple_call_arg_ptr (stmt, i);
4743 if (REFERENCE_CLASS_P (*arg)
4744 && maybe_canonicalize_mem_ref_addr (arg))
4745 changed = true;
4747 tree *lhs = gimple_call_lhs_ptr (stmt);
4748 if (*lhs
4749 && REFERENCE_CLASS_P (*lhs)
4750 && maybe_canonicalize_mem_ref_addr (lhs))
4751 changed = true;
4752 break;
4754 case GIMPLE_ASM:
4756 gasm *asm_stmt = as_a <gasm *> (stmt);
4757 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4759 tree link = gimple_asm_output_op (asm_stmt, i);
4760 tree op = TREE_VALUE (link);
4761 if (REFERENCE_CLASS_P (op)
4762 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4763 changed = true;
4765 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4767 tree link = gimple_asm_input_op (asm_stmt, i);
4768 tree op = TREE_VALUE (link);
4769 if ((REFERENCE_CLASS_P (op)
4770 || TREE_CODE (op) == ADDR_EXPR)
4771 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4772 changed = true;
4775 break;
4776 case GIMPLE_DEBUG:
4777 if (gimple_debug_bind_p (stmt))
4779 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4780 if (*val
4781 && (REFERENCE_CLASS_P (*val)
4782 || TREE_CODE (*val) == ADDR_EXPR)
4783 && maybe_canonicalize_mem_ref_addr (val))
4784 changed = true;
4786 break;
4787 case GIMPLE_COND:
4789 /* Canonicalize operand order. */
4790 tree lhs = gimple_cond_lhs (stmt);
4791 tree rhs = gimple_cond_rhs (stmt);
4792 if (tree_swap_operands_p (lhs, rhs))
4794 gcond *gc = as_a <gcond *> (stmt);
4795 gimple_cond_set_lhs (gc, rhs);
4796 gimple_cond_set_rhs (gc, lhs);
4797 gimple_cond_set_code (gc,
4798 swap_tree_comparison (gimple_cond_code (gc)));
4799 changed = true;
4802 default:;
4805 /* Dispatch to pattern-based folding. */
4806 if (!inplace
4807 || is_gimple_assign (stmt)
4808 || gimple_code (stmt) == GIMPLE_COND)
4810 gimple_seq seq = NULL;
4811 gimple_match_op res_op;
4812 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4813 valueize, valueize))
4815 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4816 changed = true;
4817 else
4818 gimple_seq_discard (seq);
4822 stmt = gsi_stmt (*gsi);
4824 /* Fold the main computation performed by the statement. */
4825 switch (gimple_code (stmt))
4827 case GIMPLE_ASSIGN:
4829 /* Try to canonicalize for boolean-typed X the comparisons
4830 X == 0, X == 1, X != 0, and X != 1. */
4831 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4832 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4834 tree lhs = gimple_assign_lhs (stmt);
4835 tree op1 = gimple_assign_rhs1 (stmt);
4836 tree op2 = gimple_assign_rhs2 (stmt);
4837 tree type = TREE_TYPE (op1);
4839 /* Check whether the comparison operands are of the same boolean
4840 type as the result type is.
4841 Check that second operand is an integer-constant with value
4842 one or zero. */
4843 if (TREE_CODE (op2) == INTEGER_CST
4844 && (integer_zerop (op2) || integer_onep (op2))
4845 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4847 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4848 bool is_logical_not = false;
4850 /* X == 0 and X != 1 is a logical-not.of X
4851 X == 1 and X != 0 is X */
4852 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4853 || (cmp_code == NE_EXPR && integer_onep (op2)))
4854 is_logical_not = true;
4856 if (is_logical_not == false)
4857 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4858 /* Only for one-bit precision typed X the transformation
4859 !X -> ~X is valied. */
4860 else if (TYPE_PRECISION (type) == 1)
4861 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4862 /* Otherwise we use !X -> X ^ 1. */
4863 else
4864 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4865 build_int_cst (type, 1));
4866 changed = true;
4867 break;
4871 unsigned old_num_ops = gimple_num_ops (stmt);
4872 tree lhs = gimple_assign_lhs (stmt);
4873 tree new_rhs = fold_gimple_assign (gsi);
4874 if (new_rhs
4875 && !useless_type_conversion_p (TREE_TYPE (lhs),
4876 TREE_TYPE (new_rhs)))
4877 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4878 if (new_rhs
4879 && (!inplace
4880 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4882 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4883 changed = true;
4885 break;
4888 case GIMPLE_CALL:
4889 changed |= gimple_fold_call (gsi, inplace);
4890 break;
4892 case GIMPLE_ASM:
4893 /* Fold *& in asm operands. */
4895 gasm *asm_stmt = as_a <gasm *> (stmt);
4896 size_t noutputs;
4897 const char **oconstraints;
4898 const char *constraint;
4899 bool allows_mem, allows_reg;
4901 noutputs = gimple_asm_noutputs (asm_stmt);
4902 oconstraints = XALLOCAVEC (const char *, noutputs);
4904 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4906 tree link = gimple_asm_output_op (asm_stmt, i);
4907 tree op = TREE_VALUE (link);
4908 oconstraints[i]
4909 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4910 if (REFERENCE_CLASS_P (op)
4911 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4913 TREE_VALUE (link) = op;
4914 changed = true;
4917 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4919 tree link = gimple_asm_input_op (asm_stmt, i);
4920 tree op = TREE_VALUE (link);
4921 constraint
4922 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4923 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4924 oconstraints, &allows_mem, &allows_reg);
4925 if (REFERENCE_CLASS_P (op)
4926 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4927 != NULL_TREE)
4929 TREE_VALUE (link) = op;
4930 changed = true;
4934 break;
4936 case GIMPLE_DEBUG:
4937 if (gimple_debug_bind_p (stmt))
4939 tree val = gimple_debug_bind_get_value (stmt);
4940 if (val
4941 && REFERENCE_CLASS_P (val))
4943 tree tem = maybe_fold_reference (val, false);
4944 if (tem)
4946 gimple_debug_bind_set_value (stmt, tem);
4947 changed = true;
4950 else if (val
4951 && TREE_CODE (val) == ADDR_EXPR)
4953 tree ref = TREE_OPERAND (val, 0);
4954 tree tem = maybe_fold_reference (ref, false);
4955 if (tem)
4957 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4958 gimple_debug_bind_set_value (stmt, tem);
4959 changed = true;
4963 break;
4965 case GIMPLE_RETURN:
4967 greturn *ret_stmt = as_a<greturn *> (stmt);
4968 tree ret = gimple_return_retval(ret_stmt);
4970 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4972 tree val = valueize (ret);
4973 if (val && val != ret
4974 && may_propagate_copy (ret, val))
4976 gimple_return_set_retval (ret_stmt, val);
4977 changed = true;
4981 break;
4983 default:;
4986 stmt = gsi_stmt (*gsi);
4988 /* Fold *& on the lhs. */
4989 if (gimple_has_lhs (stmt))
4991 tree lhs = gimple_get_lhs (stmt);
4992 if (lhs && REFERENCE_CLASS_P (lhs))
4994 tree new_lhs = maybe_fold_reference (lhs, true);
4995 if (new_lhs)
4997 gimple_set_lhs (stmt, new_lhs);
4998 changed = true;
5003 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5004 return changed;
5007 /* Valueziation callback that ends up not following SSA edges. */
5009 tree
5010 no_follow_ssa_edges (tree)
5012 return NULL_TREE;
5015 /* Valueization callback that ends up following single-use SSA edges only. */
5017 tree
5018 follow_single_use_edges (tree val)
5020 if (TREE_CODE (val) == SSA_NAME
5021 && !has_single_use (val))
5022 return NULL_TREE;
5023 return val;
5026 /* Valueization callback that follows all SSA edges. */
5028 tree
5029 follow_all_ssa_edges (tree val)
5031 return val;
5034 /* Fold the statement pointed to by GSI. In some cases, this function may
5035 replace the whole statement with a new one. Returns true iff folding
5036 makes any changes.
5037 The statement pointed to by GSI should be in valid gimple form but may
5038 be in unfolded state as resulting from for example constant propagation
5039 which can produce *&x = 0. */
5041 bool
5042 fold_stmt (gimple_stmt_iterator *gsi)
5044 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5047 bool
5048 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5050 return fold_stmt_1 (gsi, false, valueize);
5053 /* Perform the minimal folding on statement *GSI. Only operations like
5054 *&x created by constant propagation are handled. The statement cannot
5055 be replaced with a new one. Return true if the statement was
5056 changed, false otherwise.
5057 The statement *GSI should be in valid gimple form but may
5058 be in unfolded state as resulting from for example constant propagation
5059 which can produce *&x = 0. */
5061 bool
5062 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5064 gimple *stmt = gsi_stmt (*gsi);
5065 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5066 gcc_assert (gsi_stmt (*gsi) == stmt);
5067 return changed;
5070 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5071 if EXPR is null or we don't know how.
5072 If non-null, the result always has boolean type. */
5074 static tree
5075 canonicalize_bool (tree expr, bool invert)
5077 if (!expr)
5078 return NULL_TREE;
5079 else if (invert)
5081 if (integer_nonzerop (expr))
5082 return boolean_false_node;
5083 else if (integer_zerop (expr))
5084 return boolean_true_node;
5085 else if (TREE_CODE (expr) == SSA_NAME)
5086 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5087 build_int_cst (TREE_TYPE (expr), 0));
5088 else if (COMPARISON_CLASS_P (expr))
5089 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5090 boolean_type_node,
5091 TREE_OPERAND (expr, 0),
5092 TREE_OPERAND (expr, 1));
5093 else
5094 return NULL_TREE;
5096 else
5098 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5099 return expr;
5100 if (integer_nonzerop (expr))
5101 return boolean_true_node;
5102 else if (integer_zerop (expr))
5103 return boolean_false_node;
5104 else if (TREE_CODE (expr) == SSA_NAME)
5105 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5106 build_int_cst (TREE_TYPE (expr), 0));
5107 else if (COMPARISON_CLASS_P (expr))
5108 return fold_build2 (TREE_CODE (expr),
5109 boolean_type_node,
5110 TREE_OPERAND (expr, 0),
5111 TREE_OPERAND (expr, 1));
5112 else
5113 return NULL_TREE;
5117 /* Check to see if a boolean expression EXPR is logically equivalent to the
5118 comparison (OP1 CODE OP2). Check for various identities involving
5119 SSA_NAMEs. */
5121 static bool
5122 same_bool_comparison_p (const_tree expr, enum tree_code code,
5123 const_tree op1, const_tree op2)
5125 gimple *s;
5127 /* The obvious case. */
5128 if (TREE_CODE (expr) == code
5129 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5130 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5131 return true;
5133 /* Check for comparing (name, name != 0) and the case where expr
5134 is an SSA_NAME with a definition matching the comparison. */
5135 if (TREE_CODE (expr) == SSA_NAME
5136 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5138 if (operand_equal_p (expr, op1, 0))
5139 return ((code == NE_EXPR && integer_zerop (op2))
5140 || (code == EQ_EXPR && integer_nonzerop (op2)));
5141 s = SSA_NAME_DEF_STMT (expr);
5142 if (is_gimple_assign (s)
5143 && gimple_assign_rhs_code (s) == code
5144 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5145 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5146 return true;
5149 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5150 of name is a comparison, recurse. */
5151 if (TREE_CODE (op1) == SSA_NAME
5152 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5154 s = SSA_NAME_DEF_STMT (op1);
5155 if (is_gimple_assign (s)
5156 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5158 enum tree_code c = gimple_assign_rhs_code (s);
5159 if ((c == NE_EXPR && integer_zerop (op2))
5160 || (c == EQ_EXPR && integer_nonzerop (op2)))
5161 return same_bool_comparison_p (expr, c,
5162 gimple_assign_rhs1 (s),
5163 gimple_assign_rhs2 (s));
5164 if ((c == EQ_EXPR && integer_zerop (op2))
5165 || (c == NE_EXPR && integer_nonzerop (op2)))
5166 return same_bool_comparison_p (expr,
5167 invert_tree_comparison (c, false),
5168 gimple_assign_rhs1 (s),
5169 gimple_assign_rhs2 (s));
5172 return false;
5175 /* Check to see if two boolean expressions OP1 and OP2 are logically
5176 equivalent. */
5178 static bool
5179 same_bool_result_p (const_tree op1, const_tree op2)
5181 /* Simple cases first. */
5182 if (operand_equal_p (op1, op2, 0))
5183 return true;
5185 /* Check the cases where at least one of the operands is a comparison.
5186 These are a bit smarter than operand_equal_p in that they apply some
5187 identifies on SSA_NAMEs. */
5188 if (COMPARISON_CLASS_P (op2)
5189 && same_bool_comparison_p (op1, TREE_CODE (op2),
5190 TREE_OPERAND (op2, 0),
5191 TREE_OPERAND (op2, 1)))
5192 return true;
5193 if (COMPARISON_CLASS_P (op1)
5194 && same_bool_comparison_p (op2, TREE_CODE (op1),
5195 TREE_OPERAND (op1, 0),
5196 TREE_OPERAND (op1, 1)))
5197 return true;
5199 /* Default case. */
5200 return false;
5203 /* Forward declarations for some mutually recursive functions. */
5205 static tree
5206 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5207 enum tree_code code2, tree op2a, tree op2b);
5208 static tree
5209 and_var_with_comparison (tree var, bool invert,
5210 enum tree_code code2, tree op2a, tree op2b);
5211 static tree
5212 and_var_with_comparison_1 (gimple *stmt,
5213 enum tree_code code2, tree op2a, tree op2b);
5214 static tree
5215 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5216 enum tree_code code2, tree op2a, tree op2b);
5217 static tree
5218 or_var_with_comparison (tree var, bool invert,
5219 enum tree_code code2, tree op2a, tree op2b);
5220 static tree
5221 or_var_with_comparison_1 (gimple *stmt,
5222 enum tree_code code2, tree op2a, tree op2b);
5224 /* Helper function for and_comparisons_1: try to simplify the AND of the
5225 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5226 If INVERT is true, invert the value of the VAR before doing the AND.
5227 Return NULL_EXPR if we can't simplify this to a single expression. */
5229 static tree
5230 and_var_with_comparison (tree var, bool invert,
5231 enum tree_code code2, tree op2a, tree op2b)
5233 tree t;
5234 gimple *stmt = SSA_NAME_DEF_STMT (var);
5236 /* We can only deal with variables whose definitions are assignments. */
5237 if (!is_gimple_assign (stmt))
5238 return NULL_TREE;
5240 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5241 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5242 Then we only have to consider the simpler non-inverted cases. */
5243 if (invert)
5244 t = or_var_with_comparison_1 (stmt,
5245 invert_tree_comparison (code2, false),
5246 op2a, op2b);
5247 else
5248 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5249 return canonicalize_bool (t, invert);
5252 /* Try to simplify the AND of the ssa variable defined by the assignment
5253 STMT with the comparison specified by (OP2A CODE2 OP2B).
5254 Return NULL_EXPR if we can't simplify this to a single expression. */
5256 static tree
5257 and_var_with_comparison_1 (gimple *stmt,
5258 enum tree_code code2, tree op2a, tree op2b)
5260 tree var = gimple_assign_lhs (stmt);
5261 tree true_test_var = NULL_TREE;
5262 tree false_test_var = NULL_TREE;
5263 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5265 /* Check for identities like (var AND (var == 0)) => false. */
5266 if (TREE_CODE (op2a) == SSA_NAME
5267 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5269 if ((code2 == NE_EXPR && integer_zerop (op2b))
5270 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5272 true_test_var = op2a;
5273 if (var == true_test_var)
5274 return var;
5276 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5277 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5279 false_test_var = op2a;
5280 if (var == false_test_var)
5281 return boolean_false_node;
5285 /* If the definition is a comparison, recurse on it. */
5286 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5288 tree t = and_comparisons_1 (innercode,
5289 gimple_assign_rhs1 (stmt),
5290 gimple_assign_rhs2 (stmt),
5291 code2,
5292 op2a,
5293 op2b);
5294 if (t)
5295 return t;
5298 /* If the definition is an AND or OR expression, we may be able to
5299 simplify by reassociating. */
5300 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5301 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5303 tree inner1 = gimple_assign_rhs1 (stmt);
5304 tree inner2 = gimple_assign_rhs2 (stmt);
5305 gimple *s;
5306 tree t;
5307 tree partial = NULL_TREE;
5308 bool is_and = (innercode == BIT_AND_EXPR);
5310 /* Check for boolean identities that don't require recursive examination
5311 of inner1/inner2:
5312 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5313 inner1 AND (inner1 OR inner2) => inner1
5314 !inner1 AND (inner1 AND inner2) => false
5315 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5316 Likewise for similar cases involving inner2. */
5317 if (inner1 == true_test_var)
5318 return (is_and ? var : inner1);
5319 else if (inner2 == true_test_var)
5320 return (is_and ? var : inner2);
5321 else if (inner1 == false_test_var)
5322 return (is_and
5323 ? boolean_false_node
5324 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5325 else if (inner2 == false_test_var)
5326 return (is_and
5327 ? boolean_false_node
5328 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5330 /* Next, redistribute/reassociate the AND across the inner tests.
5331 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5332 if (TREE_CODE (inner1) == SSA_NAME
5333 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5334 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5335 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5336 gimple_assign_rhs1 (s),
5337 gimple_assign_rhs2 (s),
5338 code2, op2a, op2b)))
5340 /* Handle the AND case, where we are reassociating:
5341 (inner1 AND inner2) AND (op2a code2 op2b)
5342 => (t AND inner2)
5343 If the partial result t is a constant, we win. Otherwise
5344 continue on to try reassociating with the other inner test. */
5345 if (is_and)
5347 if (integer_onep (t))
5348 return inner2;
5349 else if (integer_zerop (t))
5350 return boolean_false_node;
5353 /* Handle the OR case, where we are redistributing:
5354 (inner1 OR inner2) AND (op2a code2 op2b)
5355 => (t OR (inner2 AND (op2a code2 op2b))) */
5356 else if (integer_onep (t))
5357 return boolean_true_node;
5359 /* Save partial result for later. */
5360 partial = t;
5363 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5364 if (TREE_CODE (inner2) == SSA_NAME
5365 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5366 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5367 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5368 gimple_assign_rhs1 (s),
5369 gimple_assign_rhs2 (s),
5370 code2, op2a, op2b)))
5372 /* Handle the AND case, where we are reassociating:
5373 (inner1 AND inner2) AND (op2a code2 op2b)
5374 => (inner1 AND t) */
5375 if (is_and)
5377 if (integer_onep (t))
5378 return inner1;
5379 else if (integer_zerop (t))
5380 return boolean_false_node;
5381 /* If both are the same, we can apply the identity
5382 (x AND x) == x. */
5383 else if (partial && same_bool_result_p (t, partial))
5384 return t;
5387 /* Handle the OR case. where we are redistributing:
5388 (inner1 OR inner2) AND (op2a code2 op2b)
5389 => (t OR (inner1 AND (op2a code2 op2b)))
5390 => (t OR partial) */
5391 else
5393 if (integer_onep (t))
5394 return boolean_true_node;
5395 else if (partial)
5397 /* We already got a simplification for the other
5398 operand to the redistributed OR expression. The
5399 interesting case is when at least one is false.
5400 Or, if both are the same, we can apply the identity
5401 (x OR x) == x. */
5402 if (integer_zerop (partial))
5403 return t;
5404 else if (integer_zerop (t))
5405 return partial;
5406 else if (same_bool_result_p (t, partial))
5407 return t;
5412 return NULL_TREE;
5415 /* Try to simplify the AND of two comparisons defined by
5416 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5417 If this can be done without constructing an intermediate value,
5418 return the resulting tree; otherwise NULL_TREE is returned.
5419 This function is deliberately asymmetric as it recurses on SSA_DEFs
5420 in the first comparison but not the second. */
5422 static tree
5423 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5424 enum tree_code code2, tree op2a, tree op2b)
5426 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5428 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5429 if (operand_equal_p (op1a, op2a, 0)
5430 && operand_equal_p (op1b, op2b, 0))
5432 /* Result will be either NULL_TREE, or a combined comparison. */
5433 tree t = combine_comparisons (UNKNOWN_LOCATION,
5434 TRUTH_ANDIF_EXPR, code1, code2,
5435 truth_type, op1a, op1b);
5436 if (t)
5437 return t;
5440 /* Likewise the swapped case of the above. */
5441 if (operand_equal_p (op1a, op2b, 0)
5442 && operand_equal_p (op1b, op2a, 0))
5444 /* Result will be either NULL_TREE, or a combined comparison. */
5445 tree t = combine_comparisons (UNKNOWN_LOCATION,
5446 TRUTH_ANDIF_EXPR, code1,
5447 swap_tree_comparison (code2),
5448 truth_type, op1a, op1b);
5449 if (t)
5450 return t;
5453 /* If both comparisons are of the same value against constants, we might
5454 be able to merge them. */
5455 if (operand_equal_p (op1a, op2a, 0)
5456 && TREE_CODE (op1b) == INTEGER_CST
5457 && TREE_CODE (op2b) == INTEGER_CST)
5459 int cmp = tree_int_cst_compare (op1b, op2b);
5461 /* If we have (op1a == op1b), we should either be able to
5462 return that or FALSE, depending on whether the constant op1b
5463 also satisfies the other comparison against op2b. */
5464 if (code1 == EQ_EXPR)
5466 bool done = true;
5467 bool val;
5468 switch (code2)
5470 case EQ_EXPR: val = (cmp == 0); break;
5471 case NE_EXPR: val = (cmp != 0); break;
5472 case LT_EXPR: val = (cmp < 0); break;
5473 case GT_EXPR: val = (cmp > 0); break;
5474 case LE_EXPR: val = (cmp <= 0); break;
5475 case GE_EXPR: val = (cmp >= 0); break;
5476 default: done = false;
5478 if (done)
5480 if (val)
5481 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5482 else
5483 return boolean_false_node;
5486 /* Likewise if the second comparison is an == comparison. */
5487 else if (code2 == EQ_EXPR)
5489 bool done = true;
5490 bool val;
5491 switch (code1)
5493 case EQ_EXPR: val = (cmp == 0); break;
5494 case NE_EXPR: val = (cmp != 0); break;
5495 case LT_EXPR: val = (cmp > 0); break;
5496 case GT_EXPR: val = (cmp < 0); break;
5497 case LE_EXPR: val = (cmp >= 0); break;
5498 case GE_EXPR: val = (cmp <= 0); break;
5499 default: done = false;
5501 if (done)
5503 if (val)
5504 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5505 else
5506 return boolean_false_node;
5510 /* Same business with inequality tests. */
5511 else if (code1 == NE_EXPR)
5513 bool val;
5514 switch (code2)
5516 case EQ_EXPR: val = (cmp != 0); break;
5517 case NE_EXPR: val = (cmp == 0); break;
5518 case LT_EXPR: val = (cmp >= 0); break;
5519 case GT_EXPR: val = (cmp <= 0); break;
5520 case LE_EXPR: val = (cmp > 0); break;
5521 case GE_EXPR: val = (cmp < 0); break;
5522 default:
5523 val = false;
5525 if (val)
5526 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5528 else if (code2 == NE_EXPR)
5530 bool val;
5531 switch (code1)
5533 case EQ_EXPR: val = (cmp == 0); break;
5534 case NE_EXPR: val = (cmp != 0); break;
5535 case LT_EXPR: val = (cmp <= 0); break;
5536 case GT_EXPR: val = (cmp >= 0); break;
5537 case LE_EXPR: val = (cmp < 0); break;
5538 case GE_EXPR: val = (cmp > 0); break;
5539 default:
5540 val = false;
5542 if (val)
5543 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5546 /* Chose the more restrictive of two < or <= comparisons. */
5547 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5548 && (code2 == LT_EXPR || code2 == LE_EXPR))
5550 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5551 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5552 else
5553 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5556 /* Likewise chose the more restrictive of two > or >= comparisons. */
5557 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5558 && (code2 == GT_EXPR || code2 == GE_EXPR))
5560 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5561 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5562 else
5563 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5566 /* Check for singleton ranges. */
5567 else if (cmp == 0
5568 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5569 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5570 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5572 /* Check for disjoint ranges. */
5573 else if (cmp <= 0
5574 && (code1 == LT_EXPR || code1 == LE_EXPR)
5575 && (code2 == GT_EXPR || code2 == GE_EXPR))
5576 return boolean_false_node;
5577 else if (cmp >= 0
5578 && (code1 == GT_EXPR || code1 == GE_EXPR)
5579 && (code2 == LT_EXPR || code2 == LE_EXPR))
5580 return boolean_false_node;
5583 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5584 NAME's definition is a truth value. See if there are any simplifications
5585 that can be done against the NAME's definition. */
5586 if (TREE_CODE (op1a) == SSA_NAME
5587 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5588 && (integer_zerop (op1b) || integer_onep (op1b)))
5590 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5591 || (code1 == NE_EXPR && integer_onep (op1b)));
5592 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5593 switch (gimple_code (stmt))
5595 case GIMPLE_ASSIGN:
5596 /* Try to simplify by copy-propagating the definition. */
5597 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5599 case GIMPLE_PHI:
5600 /* If every argument to the PHI produces the same result when
5601 ANDed with the second comparison, we win.
5602 Do not do this unless the type is bool since we need a bool
5603 result here anyway. */
5604 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5606 tree result = NULL_TREE;
5607 unsigned i;
5608 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5610 tree arg = gimple_phi_arg_def (stmt, i);
5612 /* If this PHI has itself as an argument, ignore it.
5613 If all the other args produce the same result,
5614 we're still OK. */
5615 if (arg == gimple_phi_result (stmt))
5616 continue;
5617 else if (TREE_CODE (arg) == INTEGER_CST)
5619 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5621 if (!result)
5622 result = boolean_false_node;
5623 else if (!integer_zerop (result))
5624 return NULL_TREE;
5626 else if (!result)
5627 result = fold_build2 (code2, boolean_type_node,
5628 op2a, op2b);
5629 else if (!same_bool_comparison_p (result,
5630 code2, op2a, op2b))
5631 return NULL_TREE;
5633 else if (TREE_CODE (arg) == SSA_NAME
5634 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5636 tree temp;
5637 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5638 /* In simple cases we can look through PHI nodes,
5639 but we have to be careful with loops.
5640 See PR49073. */
5641 if (! dom_info_available_p (CDI_DOMINATORS)
5642 || gimple_bb (def_stmt) == gimple_bb (stmt)
5643 || dominated_by_p (CDI_DOMINATORS,
5644 gimple_bb (def_stmt),
5645 gimple_bb (stmt)))
5646 return NULL_TREE;
5647 temp = and_var_with_comparison (arg, invert, code2,
5648 op2a, op2b);
5649 if (!temp)
5650 return NULL_TREE;
5651 else if (!result)
5652 result = temp;
5653 else if (!same_bool_result_p (result, temp))
5654 return NULL_TREE;
5656 else
5657 return NULL_TREE;
5659 return result;
5662 default:
5663 break;
5666 return NULL_TREE;
5669 /* Try to simplify the AND of two comparisons, specified by
5670 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5671 If this can be simplified to a single expression (without requiring
5672 introducing more SSA variables to hold intermediate values),
5673 return the resulting tree. Otherwise return NULL_TREE.
5674 If the result expression is non-null, it has boolean type. */
5676 tree
5677 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5678 enum tree_code code2, tree op2a, tree op2b)
5680 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5681 if (t)
5682 return t;
5683 else
5684 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5687 /* Helper function for or_comparisons_1: try to simplify the OR of the
5688 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5689 If INVERT is true, invert the value of VAR before doing the OR.
5690 Return NULL_EXPR if we can't simplify this to a single expression. */
5692 static tree
5693 or_var_with_comparison (tree var, bool invert,
5694 enum tree_code code2, tree op2a, tree op2b)
5696 tree t;
5697 gimple *stmt = SSA_NAME_DEF_STMT (var);
5699 /* We can only deal with variables whose definitions are assignments. */
5700 if (!is_gimple_assign (stmt))
5701 return NULL_TREE;
5703 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5704 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5705 Then we only have to consider the simpler non-inverted cases. */
5706 if (invert)
5707 t = and_var_with_comparison_1 (stmt,
5708 invert_tree_comparison (code2, false),
5709 op2a, op2b);
5710 else
5711 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5712 return canonicalize_bool (t, invert);
5715 /* Try to simplify the OR of the ssa variable defined by the assignment
5716 STMT with the comparison specified by (OP2A CODE2 OP2B).
5717 Return NULL_EXPR if we can't simplify this to a single expression. */
5719 static tree
5720 or_var_with_comparison_1 (gimple *stmt,
5721 enum tree_code code2, tree op2a, tree op2b)
5723 tree var = gimple_assign_lhs (stmt);
5724 tree true_test_var = NULL_TREE;
5725 tree false_test_var = NULL_TREE;
5726 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5728 /* Check for identities like (var OR (var != 0)) => true . */
5729 if (TREE_CODE (op2a) == SSA_NAME
5730 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5732 if ((code2 == NE_EXPR && integer_zerop (op2b))
5733 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5735 true_test_var = op2a;
5736 if (var == true_test_var)
5737 return var;
5739 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5740 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5742 false_test_var = op2a;
5743 if (var == false_test_var)
5744 return boolean_true_node;
5748 /* If the definition is a comparison, recurse on it. */
5749 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5751 tree t = or_comparisons_1 (innercode,
5752 gimple_assign_rhs1 (stmt),
5753 gimple_assign_rhs2 (stmt),
5754 code2,
5755 op2a,
5756 op2b);
5757 if (t)
5758 return t;
5761 /* If the definition is an AND or OR expression, we may be able to
5762 simplify by reassociating. */
5763 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5764 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5766 tree inner1 = gimple_assign_rhs1 (stmt);
5767 tree inner2 = gimple_assign_rhs2 (stmt);
5768 gimple *s;
5769 tree t;
5770 tree partial = NULL_TREE;
5771 bool is_or = (innercode == BIT_IOR_EXPR);
5773 /* Check for boolean identities that don't require recursive examination
5774 of inner1/inner2:
5775 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5776 inner1 OR (inner1 AND inner2) => inner1
5777 !inner1 OR (inner1 OR inner2) => true
5778 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5780 if (inner1 == true_test_var)
5781 return (is_or ? var : inner1);
5782 else if (inner2 == true_test_var)
5783 return (is_or ? var : inner2);
5784 else if (inner1 == false_test_var)
5785 return (is_or
5786 ? boolean_true_node
5787 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5788 else if (inner2 == false_test_var)
5789 return (is_or
5790 ? boolean_true_node
5791 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5793 /* Next, redistribute/reassociate the OR across the inner tests.
5794 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5795 if (TREE_CODE (inner1) == SSA_NAME
5796 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5797 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5798 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5799 gimple_assign_rhs1 (s),
5800 gimple_assign_rhs2 (s),
5801 code2, op2a, op2b)))
5803 /* Handle the OR case, where we are reassociating:
5804 (inner1 OR inner2) OR (op2a code2 op2b)
5805 => (t OR inner2)
5806 If the partial result t is a constant, we win. Otherwise
5807 continue on to try reassociating with the other inner test. */
5808 if (is_or)
5810 if (integer_onep (t))
5811 return boolean_true_node;
5812 else if (integer_zerop (t))
5813 return inner2;
5816 /* Handle the AND case, where we are redistributing:
5817 (inner1 AND inner2) OR (op2a code2 op2b)
5818 => (t AND (inner2 OR (op2a code op2b))) */
5819 else if (integer_zerop (t))
5820 return boolean_false_node;
5822 /* Save partial result for later. */
5823 partial = t;
5826 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5827 if (TREE_CODE (inner2) == SSA_NAME
5828 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5829 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5830 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5831 gimple_assign_rhs1 (s),
5832 gimple_assign_rhs2 (s),
5833 code2, op2a, op2b)))
5835 /* Handle the OR case, where we are reassociating:
5836 (inner1 OR inner2) OR (op2a code2 op2b)
5837 => (inner1 OR t)
5838 => (t OR partial) */
5839 if (is_or)
5841 if (integer_zerop (t))
5842 return inner1;
5843 else if (integer_onep (t))
5844 return boolean_true_node;
5845 /* If both are the same, we can apply the identity
5846 (x OR x) == x. */
5847 else if (partial && same_bool_result_p (t, partial))
5848 return t;
5851 /* Handle the AND case, where we are redistributing:
5852 (inner1 AND inner2) OR (op2a code2 op2b)
5853 => (t AND (inner1 OR (op2a code2 op2b)))
5854 => (t AND partial) */
5855 else
5857 if (integer_zerop (t))
5858 return boolean_false_node;
5859 else if (partial)
5861 /* We already got a simplification for the other
5862 operand to the redistributed AND expression. The
5863 interesting case is when at least one is true.
5864 Or, if both are the same, we can apply the identity
5865 (x AND x) == x. */
5866 if (integer_onep (partial))
5867 return t;
5868 else if (integer_onep (t))
5869 return partial;
5870 else if (same_bool_result_p (t, partial))
5871 return t;
5876 return NULL_TREE;
5879 /* Try to simplify the OR of two comparisons defined by
5880 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5881 If this can be done without constructing an intermediate value,
5882 return the resulting tree; otherwise NULL_TREE is returned.
5883 This function is deliberately asymmetric as it recurses on SSA_DEFs
5884 in the first comparison but not the second. */
5886 static tree
5887 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5888 enum tree_code code2, tree op2a, tree op2b)
5890 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5892 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5893 if (operand_equal_p (op1a, op2a, 0)
5894 && operand_equal_p (op1b, op2b, 0))
5896 /* Result will be either NULL_TREE, or a combined comparison. */
5897 tree t = combine_comparisons (UNKNOWN_LOCATION,
5898 TRUTH_ORIF_EXPR, code1, code2,
5899 truth_type, op1a, op1b);
5900 if (t)
5901 return t;
5904 /* Likewise the swapped case of the above. */
5905 if (operand_equal_p (op1a, op2b, 0)
5906 && operand_equal_p (op1b, op2a, 0))
5908 /* Result will be either NULL_TREE, or a combined comparison. */
5909 tree t = combine_comparisons (UNKNOWN_LOCATION,
5910 TRUTH_ORIF_EXPR, code1,
5911 swap_tree_comparison (code2),
5912 truth_type, op1a, op1b);
5913 if (t)
5914 return t;
5917 /* If both comparisons are of the same value against constants, we might
5918 be able to merge them. */
5919 if (operand_equal_p (op1a, op2a, 0)
5920 && TREE_CODE (op1b) == INTEGER_CST
5921 && TREE_CODE (op2b) == INTEGER_CST)
5923 int cmp = tree_int_cst_compare (op1b, op2b);
5925 /* If we have (op1a != op1b), we should either be able to
5926 return that or TRUE, depending on whether the constant op1b
5927 also satisfies the other comparison against op2b. */
5928 if (code1 == NE_EXPR)
5930 bool done = true;
5931 bool val;
5932 switch (code2)
5934 case EQ_EXPR: val = (cmp == 0); break;
5935 case NE_EXPR: val = (cmp != 0); break;
5936 case LT_EXPR: val = (cmp < 0); break;
5937 case GT_EXPR: val = (cmp > 0); break;
5938 case LE_EXPR: val = (cmp <= 0); break;
5939 case GE_EXPR: val = (cmp >= 0); break;
5940 default: done = false;
5942 if (done)
5944 if (val)
5945 return boolean_true_node;
5946 else
5947 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5950 /* Likewise if the second comparison is a != comparison. */
5951 else if (code2 == NE_EXPR)
5953 bool done = true;
5954 bool val;
5955 switch (code1)
5957 case EQ_EXPR: val = (cmp == 0); break;
5958 case NE_EXPR: val = (cmp != 0); break;
5959 case LT_EXPR: val = (cmp > 0); break;
5960 case GT_EXPR: val = (cmp < 0); break;
5961 case LE_EXPR: val = (cmp >= 0); break;
5962 case GE_EXPR: val = (cmp <= 0); break;
5963 default: done = false;
5965 if (done)
5967 if (val)
5968 return boolean_true_node;
5969 else
5970 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5974 /* See if an equality test is redundant with the other comparison. */
5975 else if (code1 == EQ_EXPR)
5977 bool val;
5978 switch (code2)
5980 case EQ_EXPR: val = (cmp == 0); break;
5981 case NE_EXPR: val = (cmp != 0); break;
5982 case LT_EXPR: val = (cmp < 0); break;
5983 case GT_EXPR: val = (cmp > 0); break;
5984 case LE_EXPR: val = (cmp <= 0); break;
5985 case GE_EXPR: val = (cmp >= 0); break;
5986 default:
5987 val = false;
5989 if (val)
5990 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5992 else if (code2 == EQ_EXPR)
5994 bool val;
5995 switch (code1)
5997 case EQ_EXPR: val = (cmp == 0); break;
5998 case NE_EXPR: val = (cmp != 0); break;
5999 case LT_EXPR: val = (cmp > 0); break;
6000 case GT_EXPR: val = (cmp < 0); break;
6001 case LE_EXPR: val = (cmp >= 0); break;
6002 case GE_EXPR: val = (cmp <= 0); break;
6003 default:
6004 val = false;
6006 if (val)
6007 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6010 /* Chose the less restrictive of two < or <= comparisons. */
6011 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6012 && (code2 == LT_EXPR || code2 == LE_EXPR))
6014 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6015 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6016 else
6017 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6020 /* Likewise chose the less restrictive of two > or >= comparisons. */
6021 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6022 && (code2 == GT_EXPR || code2 == GE_EXPR))
6024 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6025 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6026 else
6027 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6030 /* Check for singleton ranges. */
6031 else if (cmp == 0
6032 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6033 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6034 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6036 /* Check for less/greater pairs that don't restrict the range at all. */
6037 else if (cmp >= 0
6038 && (code1 == LT_EXPR || code1 == LE_EXPR)
6039 && (code2 == GT_EXPR || code2 == GE_EXPR))
6040 return boolean_true_node;
6041 else if (cmp <= 0
6042 && (code1 == GT_EXPR || code1 == GE_EXPR)
6043 && (code2 == LT_EXPR || code2 == LE_EXPR))
6044 return boolean_true_node;
6047 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6048 NAME's definition is a truth value. See if there are any simplifications
6049 that can be done against the NAME's definition. */
6050 if (TREE_CODE (op1a) == SSA_NAME
6051 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6052 && (integer_zerop (op1b) || integer_onep (op1b)))
6054 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6055 || (code1 == NE_EXPR && integer_onep (op1b)));
6056 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6057 switch (gimple_code (stmt))
6059 case GIMPLE_ASSIGN:
6060 /* Try to simplify by copy-propagating the definition. */
6061 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6063 case GIMPLE_PHI:
6064 /* If every argument to the PHI produces the same result when
6065 ORed with the second comparison, we win.
6066 Do not do this unless the type is bool since we need a bool
6067 result here anyway. */
6068 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6070 tree result = NULL_TREE;
6071 unsigned i;
6072 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6074 tree arg = gimple_phi_arg_def (stmt, i);
6076 /* If this PHI has itself as an argument, ignore it.
6077 If all the other args produce the same result,
6078 we're still OK. */
6079 if (arg == gimple_phi_result (stmt))
6080 continue;
6081 else if (TREE_CODE (arg) == INTEGER_CST)
6083 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6085 if (!result)
6086 result = boolean_true_node;
6087 else if (!integer_onep (result))
6088 return NULL_TREE;
6090 else if (!result)
6091 result = fold_build2 (code2, boolean_type_node,
6092 op2a, op2b);
6093 else if (!same_bool_comparison_p (result,
6094 code2, op2a, op2b))
6095 return NULL_TREE;
6097 else if (TREE_CODE (arg) == SSA_NAME
6098 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6100 tree temp;
6101 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6102 /* In simple cases we can look through PHI nodes,
6103 but we have to be careful with loops.
6104 See PR49073. */
6105 if (! dom_info_available_p (CDI_DOMINATORS)
6106 || gimple_bb (def_stmt) == gimple_bb (stmt)
6107 || dominated_by_p (CDI_DOMINATORS,
6108 gimple_bb (def_stmt),
6109 gimple_bb (stmt)))
6110 return NULL_TREE;
6111 temp = or_var_with_comparison (arg, invert, code2,
6112 op2a, op2b);
6113 if (!temp)
6114 return NULL_TREE;
6115 else if (!result)
6116 result = temp;
6117 else if (!same_bool_result_p (result, temp))
6118 return NULL_TREE;
6120 else
6121 return NULL_TREE;
6123 return result;
6126 default:
6127 break;
6130 return NULL_TREE;
6133 /* Try to simplify the OR of two comparisons, specified by
6134 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6135 If this can be simplified to a single expression (without requiring
6136 introducing more SSA variables to hold intermediate values),
6137 return the resulting tree. Otherwise return NULL_TREE.
6138 If the result expression is non-null, it has boolean type. */
6140 tree
6141 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6142 enum tree_code code2, tree op2a, tree op2b)
6144 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6145 if (t)
6146 return t;
6147 else
6148 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6152 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6154 Either NULL_TREE, a simplified but non-constant or a constant
6155 is returned.
6157 ??? This should go into a gimple-fold-inline.h file to be eventually
6158 privatized with the single valueize function used in the various TUs
6159 to avoid the indirect function call overhead. */
6161 tree
6162 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6163 tree (*gvalueize) (tree))
6165 gimple_match_op res_op;
6166 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6167 edges if there are intermediate VARYING defs. For this reason
6168 do not follow SSA edges here even though SCCVN can technically
6169 just deal fine with that. */
6170 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6172 tree res = NULL_TREE;
6173 if (gimple_simplified_result_is_gimple_val (&res_op))
6174 res = res_op.ops[0];
6175 else if (mprts_hook)
6176 res = mprts_hook (&res_op);
6177 if (res)
6179 if (dump_file && dump_flags & TDF_DETAILS)
6181 fprintf (dump_file, "Match-and-simplified ");
6182 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6183 fprintf (dump_file, " to ");
6184 print_generic_expr (dump_file, res);
6185 fprintf (dump_file, "\n");
6187 return res;
6191 location_t loc = gimple_location (stmt);
6192 switch (gimple_code (stmt))
6194 case GIMPLE_ASSIGN:
6196 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6198 switch (get_gimple_rhs_class (subcode))
6200 case GIMPLE_SINGLE_RHS:
6202 tree rhs = gimple_assign_rhs1 (stmt);
6203 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6205 if (TREE_CODE (rhs) == SSA_NAME)
6207 /* If the RHS is an SSA_NAME, return its known constant value,
6208 if any. */
6209 return (*valueize) (rhs);
6211 /* Handle propagating invariant addresses into address
6212 operations. */
6213 else if (TREE_CODE (rhs) == ADDR_EXPR
6214 && !is_gimple_min_invariant (rhs))
6216 poly_int64 offset = 0;
6217 tree base;
6218 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6219 &offset,
6220 valueize);
6221 if (base
6222 && (CONSTANT_CLASS_P (base)
6223 || decl_address_invariant_p (base)))
6224 return build_invariant_address (TREE_TYPE (rhs),
6225 base, offset);
6227 else if (TREE_CODE (rhs) == CONSTRUCTOR
6228 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6229 && known_eq (CONSTRUCTOR_NELTS (rhs),
6230 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6232 unsigned i, nelts;
6233 tree val;
6235 nelts = CONSTRUCTOR_NELTS (rhs);
6236 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6237 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6239 val = (*valueize) (val);
6240 if (TREE_CODE (val) == INTEGER_CST
6241 || TREE_CODE (val) == REAL_CST
6242 || TREE_CODE (val) == FIXED_CST)
6243 vec.quick_push (val);
6244 else
6245 return NULL_TREE;
6248 return vec.build ();
6250 if (subcode == OBJ_TYPE_REF)
6252 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6253 /* If callee is constant, we can fold away the wrapper. */
6254 if (is_gimple_min_invariant (val))
6255 return val;
6258 if (kind == tcc_reference)
6260 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6261 || TREE_CODE (rhs) == REALPART_EXPR
6262 || TREE_CODE (rhs) == IMAGPART_EXPR)
6263 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6265 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6266 return fold_unary_loc (EXPR_LOCATION (rhs),
6267 TREE_CODE (rhs),
6268 TREE_TYPE (rhs), val);
6270 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6271 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6273 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6274 return fold_ternary_loc (EXPR_LOCATION (rhs),
6275 TREE_CODE (rhs),
6276 TREE_TYPE (rhs), val,
6277 TREE_OPERAND (rhs, 1),
6278 TREE_OPERAND (rhs, 2));
6280 else if (TREE_CODE (rhs) == MEM_REF
6281 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6283 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6284 if (TREE_CODE (val) == ADDR_EXPR
6285 && is_gimple_min_invariant (val))
6287 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6288 unshare_expr (val),
6289 TREE_OPERAND (rhs, 1));
6290 if (tem)
6291 rhs = tem;
6294 return fold_const_aggregate_ref_1 (rhs, valueize);
6296 else if (kind == tcc_declaration)
6297 return get_symbol_constant_value (rhs);
6298 return rhs;
6301 case GIMPLE_UNARY_RHS:
6302 return NULL_TREE;
6304 case GIMPLE_BINARY_RHS:
6305 /* Translate &x + CST into an invariant form suitable for
6306 further propagation. */
6307 if (subcode == POINTER_PLUS_EXPR)
6309 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6310 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6311 if (TREE_CODE (op0) == ADDR_EXPR
6312 && TREE_CODE (op1) == INTEGER_CST)
6314 tree off = fold_convert (ptr_type_node, op1);
6315 return build_fold_addr_expr_loc
6316 (loc,
6317 fold_build2 (MEM_REF,
6318 TREE_TYPE (TREE_TYPE (op0)),
6319 unshare_expr (op0), off));
6322 /* Canonicalize bool != 0 and bool == 0 appearing after
6323 valueization. While gimple_simplify handles this
6324 it can get confused by the ~X == 1 -> X == 0 transform
6325 which we cant reduce to a SSA name or a constant
6326 (and we have no way to tell gimple_simplify to not
6327 consider those transforms in the first place). */
6328 else if (subcode == EQ_EXPR
6329 || subcode == NE_EXPR)
6331 tree lhs = gimple_assign_lhs (stmt);
6332 tree op0 = gimple_assign_rhs1 (stmt);
6333 if (useless_type_conversion_p (TREE_TYPE (lhs),
6334 TREE_TYPE (op0)))
6336 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6337 op0 = (*valueize) (op0);
6338 if (TREE_CODE (op0) == INTEGER_CST)
6339 std::swap (op0, op1);
6340 if (TREE_CODE (op1) == INTEGER_CST
6341 && ((subcode == NE_EXPR && integer_zerop (op1))
6342 || (subcode == EQ_EXPR && integer_onep (op1))))
6343 return op0;
6346 return NULL_TREE;
6348 case GIMPLE_TERNARY_RHS:
6350 /* Handle ternary operators that can appear in GIMPLE form. */
6351 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6352 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6353 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6354 return fold_ternary_loc (loc, subcode,
6355 gimple_expr_type (stmt), op0, op1, op2);
6358 default:
6359 gcc_unreachable ();
6363 case GIMPLE_CALL:
6365 tree fn;
6366 gcall *call_stmt = as_a <gcall *> (stmt);
6368 if (gimple_call_internal_p (stmt))
6370 enum tree_code subcode = ERROR_MARK;
6371 switch (gimple_call_internal_fn (stmt))
6373 case IFN_UBSAN_CHECK_ADD:
6374 subcode = PLUS_EXPR;
6375 break;
6376 case IFN_UBSAN_CHECK_SUB:
6377 subcode = MINUS_EXPR;
6378 break;
6379 case IFN_UBSAN_CHECK_MUL:
6380 subcode = MULT_EXPR;
6381 break;
6382 case IFN_BUILTIN_EXPECT:
6384 tree arg0 = gimple_call_arg (stmt, 0);
6385 tree op0 = (*valueize) (arg0);
6386 if (TREE_CODE (op0) == INTEGER_CST)
6387 return op0;
6388 return NULL_TREE;
6390 default:
6391 return NULL_TREE;
6393 tree arg0 = gimple_call_arg (stmt, 0);
6394 tree arg1 = gimple_call_arg (stmt, 1);
6395 tree op0 = (*valueize) (arg0);
6396 tree op1 = (*valueize) (arg1);
6398 if (TREE_CODE (op0) != INTEGER_CST
6399 || TREE_CODE (op1) != INTEGER_CST)
6401 switch (subcode)
6403 case MULT_EXPR:
6404 /* x * 0 = 0 * x = 0 without overflow. */
6405 if (integer_zerop (op0) || integer_zerop (op1))
6406 return build_zero_cst (TREE_TYPE (arg0));
6407 break;
6408 case MINUS_EXPR:
6409 /* y - y = 0 without overflow. */
6410 if (operand_equal_p (op0, op1, 0))
6411 return build_zero_cst (TREE_TYPE (arg0));
6412 break;
6413 default:
6414 break;
6417 tree res
6418 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6419 if (res
6420 && TREE_CODE (res) == INTEGER_CST
6421 && !TREE_OVERFLOW (res))
6422 return res;
6423 return NULL_TREE;
6426 fn = (*valueize) (gimple_call_fn (stmt));
6427 if (TREE_CODE (fn) == ADDR_EXPR
6428 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6429 && gimple_builtin_call_types_compatible_p (stmt,
6430 TREE_OPERAND (fn, 0)))
6432 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6433 tree retval;
6434 unsigned i;
6435 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6436 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6437 retval = fold_builtin_call_array (loc,
6438 gimple_call_return_type (call_stmt),
6439 fn, gimple_call_num_args (stmt), args);
6440 if (retval)
6442 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6443 STRIP_NOPS (retval);
6444 retval = fold_convert (gimple_call_return_type (call_stmt),
6445 retval);
6447 return retval;
6449 return NULL_TREE;
6452 default:
6453 return NULL_TREE;
6457 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6458 Returns NULL_TREE if folding to a constant is not possible, otherwise
6459 returns a constant according to is_gimple_min_invariant. */
6461 tree
6462 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6464 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6465 if (res && is_gimple_min_invariant (res))
6466 return res;
6467 return NULL_TREE;
6471 /* The following set of functions are supposed to fold references using
6472 their constant initializers. */
6474 /* See if we can find constructor defining value of BASE.
6475 When we know the consructor with constant offset (such as
6476 base is array[40] and we do know constructor of array), then
6477 BIT_OFFSET is adjusted accordingly.
6479 As a special case, return error_mark_node when constructor
6480 is not explicitly available, but it is known to be zero
6481 such as 'static const int a;'. */
6482 static tree
6483 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6484 tree (*valueize)(tree))
6486 poly_int64 bit_offset2, size, max_size;
6487 bool reverse;
6489 if (TREE_CODE (base) == MEM_REF)
6491 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6492 if (!boff.to_shwi (bit_offset))
6493 return NULL_TREE;
6495 if (valueize
6496 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6497 base = valueize (TREE_OPERAND (base, 0));
6498 if (!base || TREE_CODE (base) != ADDR_EXPR)
6499 return NULL_TREE;
6500 base = TREE_OPERAND (base, 0);
6502 else if (valueize
6503 && TREE_CODE (base) == SSA_NAME)
6504 base = valueize (base);
6506 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6507 DECL_INITIAL. If BASE is a nested reference into another
6508 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6509 the inner reference. */
6510 switch (TREE_CODE (base))
6512 case VAR_DECL:
6513 case CONST_DECL:
6515 tree init = ctor_for_folding (base);
6517 /* Our semantic is exact opposite of ctor_for_folding;
6518 NULL means unknown, while error_mark_node is 0. */
6519 if (init == error_mark_node)
6520 return NULL_TREE;
6521 if (!init)
6522 return error_mark_node;
6523 return init;
6526 case VIEW_CONVERT_EXPR:
6527 return get_base_constructor (TREE_OPERAND (base, 0),
6528 bit_offset, valueize);
6530 case ARRAY_REF:
6531 case COMPONENT_REF:
6532 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6533 &reverse);
6534 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6535 return NULL_TREE;
6536 *bit_offset += bit_offset2;
6537 return get_base_constructor (base, bit_offset, valueize);
6539 case CONSTRUCTOR:
6540 return base;
6542 default:
6543 if (CONSTANT_CLASS_P (base))
6544 return base;
6546 return NULL_TREE;
6550 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6551 to the memory at bit OFFSET. When non-null, TYPE is the expected
6552 type of the reference; otherwise the type of the referenced element
6553 is used instead. When SIZE is zero, attempt to fold a reference to
6554 the entire element which OFFSET refers to. Increment *SUBOFF by
6555 the bit offset of the accessed element. */
6557 static tree
6558 fold_array_ctor_reference (tree type, tree ctor,
6559 unsigned HOST_WIDE_INT offset,
6560 unsigned HOST_WIDE_INT size,
6561 tree from_decl,
6562 unsigned HOST_WIDE_INT *suboff)
6564 offset_int low_bound;
6565 offset_int elt_size;
6566 offset_int access_index;
6567 tree domain_type = NULL_TREE;
6568 HOST_WIDE_INT inner_offset;
6570 /* Compute low bound and elt size. */
6571 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6572 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6573 if (domain_type && TYPE_MIN_VALUE (domain_type))
6575 /* Static constructors for variably sized objects makes no sense. */
6576 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6577 return NULL_TREE;
6578 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6580 else
6581 low_bound = 0;
6582 /* Static constructors for variably sized objects makes no sense. */
6583 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6584 return NULL_TREE;
6585 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6587 /* When TYPE is non-null, verify that it specifies a constant-sized
6588 accessed not larger than size of array element. */
6589 if (type
6590 && (!TYPE_SIZE_UNIT (type)
6591 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6592 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6593 || elt_size == 0))
6594 return NULL_TREE;
6596 /* Compute the array index we look for. */
6597 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6598 elt_size);
6599 access_index += low_bound;
6601 /* And offset within the access. */
6602 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6604 /* See if the array field is large enough to span whole access. We do not
6605 care to fold accesses spanning multiple array indexes. */
6606 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6607 return NULL_TREE;
6608 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6610 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6612 /* For the final reference to the entire accessed element
6613 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6614 may be null) in favor of the type of the element, and set
6615 SIZE to the size of the accessed element. */
6616 inner_offset = 0;
6617 type = TREE_TYPE (val);
6618 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6621 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6622 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6623 suboff);
6626 /* Memory not explicitly mentioned in constructor is 0 (or
6627 the reference is out of range). */
6628 return type ? build_zero_cst (type) : NULL_TREE;
6631 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6632 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6633 is the expected type of the reference; otherwise the type of
6634 the referenced member is used instead. When SIZE is zero,
6635 attempt to fold a reference to the entire member which OFFSET
6636 refers to; in this case. Increment *SUBOFF by the bit offset
6637 of the accessed member. */
6639 static tree
6640 fold_nonarray_ctor_reference (tree type, tree ctor,
6641 unsigned HOST_WIDE_INT offset,
6642 unsigned HOST_WIDE_INT size,
6643 tree from_decl,
6644 unsigned HOST_WIDE_INT *suboff)
6646 unsigned HOST_WIDE_INT cnt;
6647 tree cfield, cval;
6649 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6650 cval)
6652 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6653 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6654 tree field_size = DECL_SIZE (cfield);
6656 if (!field_size)
6658 /* Determine the size of the flexible array member from
6659 the size of the initializer provided for it. */
6660 field_size = TYPE_SIZE (TREE_TYPE (cval));
6663 /* Variable sized objects in static constructors makes no sense,
6664 but field_size can be NULL for flexible array members. */
6665 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6666 && TREE_CODE (byte_offset) == INTEGER_CST
6667 && (field_size != NULL_TREE
6668 ? TREE_CODE (field_size) == INTEGER_CST
6669 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6671 /* Compute bit offset of the field. */
6672 offset_int bitoffset
6673 = (wi::to_offset (field_offset)
6674 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6675 /* Compute bit offset where the field ends. */
6676 offset_int bitoffset_end;
6677 if (field_size != NULL_TREE)
6678 bitoffset_end = bitoffset + wi::to_offset (field_size);
6679 else
6680 bitoffset_end = 0;
6682 /* Compute the bit offset of the end of the desired access.
6683 As a special case, if the size of the desired access is
6684 zero, assume the access is to the entire field (and let
6685 the caller make any necessary adjustments by storing
6686 the actual bounds of the field in FIELDBOUNDS). */
6687 offset_int access_end = offset_int (offset);
6688 if (size)
6689 access_end += size;
6690 else
6691 access_end = bitoffset_end;
6693 /* Is there any overlap between the desired access at
6694 [OFFSET, OFFSET+SIZE) and the offset of the field within
6695 the object at [BITOFFSET, BITOFFSET_END)? */
6696 if (wi::cmps (access_end, bitoffset) > 0
6697 && (field_size == NULL_TREE
6698 || wi::lts_p (offset, bitoffset_end)))
6700 *suboff += bitoffset.to_uhwi ();
6702 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6704 /* For the final reference to the entire accessed member
6705 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6706 be null) in favor of the type of the member, and set
6707 SIZE to the size of the accessed member. */
6708 offset = bitoffset.to_uhwi ();
6709 type = TREE_TYPE (cval);
6710 size = (bitoffset_end - bitoffset).to_uhwi ();
6713 /* We do have overlap. Now see if the field is large enough
6714 to cover the access. Give up for accesses that extend
6715 beyond the end of the object or that span multiple fields. */
6716 if (wi::cmps (access_end, bitoffset_end) > 0)
6717 return NULL_TREE;
6718 if (offset < bitoffset)
6719 return NULL_TREE;
6721 offset_int inner_offset = offset_int (offset) - bitoffset;
6722 return fold_ctor_reference (type, cval,
6723 inner_offset.to_uhwi (), size,
6724 from_decl, suboff);
6727 /* Memory not explicitly mentioned in constructor is 0. */
6728 return type ? build_zero_cst (type) : NULL_TREE;
6731 /* CTOR is value initializing memory. Fold a reference of TYPE and
6732 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6733 is zero, attempt to fold a reference to the entire subobject
6734 which OFFSET refers to. This is used when folding accesses to
6735 string members of aggregates. When non-null, set *SUBOFF to
6736 the bit offset of the accessed subobject. */
6738 tree
6739 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6740 const poly_uint64 &poly_size, tree from_decl,
6741 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6743 tree ret;
6745 /* We found the field with exact match. */
6746 if (type
6747 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6748 && known_eq (poly_offset, 0U))
6749 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6751 /* The remaining optimizations need a constant size and offset. */
6752 unsigned HOST_WIDE_INT size, offset;
6753 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6754 return NULL_TREE;
6756 /* We are at the end of walk, see if we can view convert the
6757 result. */
6758 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6759 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6760 && !compare_tree_int (TYPE_SIZE (type), size)
6761 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6763 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6764 if (ret)
6766 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6767 if (ret)
6768 STRIP_USELESS_TYPE_CONVERSION (ret);
6770 return ret;
6772 /* For constants and byte-aligned/sized reads try to go through
6773 native_encode/interpret. */
6774 if (CONSTANT_CLASS_P (ctor)
6775 && BITS_PER_UNIT == 8
6776 && offset % BITS_PER_UNIT == 0
6777 && size % BITS_PER_UNIT == 0
6778 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6780 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6781 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6782 offset / BITS_PER_UNIT);
6783 if (len > 0)
6784 return native_interpret_expr (type, buf, len);
6786 if (TREE_CODE (ctor) == CONSTRUCTOR)
6788 unsigned HOST_WIDE_INT dummy = 0;
6789 if (!suboff)
6790 suboff = &dummy;
6792 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6793 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6794 return fold_array_ctor_reference (type, ctor, offset, size,
6795 from_decl, suboff);
6797 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6798 from_decl, suboff);
6801 return NULL_TREE;
6804 /* Return the tree representing the element referenced by T if T is an
6805 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6806 names using VALUEIZE. Return NULL_TREE otherwise. */
6808 tree
6809 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6811 tree ctor, idx, base;
6812 poly_int64 offset, size, max_size;
6813 tree tem;
6814 bool reverse;
6816 if (TREE_THIS_VOLATILE (t))
6817 return NULL_TREE;
6819 if (DECL_P (t))
6820 return get_symbol_constant_value (t);
6822 tem = fold_read_from_constant_string (t);
6823 if (tem)
6824 return tem;
6826 switch (TREE_CODE (t))
6828 case ARRAY_REF:
6829 case ARRAY_RANGE_REF:
6830 /* Constant indexes are handled well by get_base_constructor.
6831 Only special case variable offsets.
6832 FIXME: This code can't handle nested references with variable indexes
6833 (they will be handled only by iteration of ccp). Perhaps we can bring
6834 get_ref_base_and_extent here and make it use a valueize callback. */
6835 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6836 && valueize
6837 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6838 && poly_int_tree_p (idx))
6840 tree low_bound, unit_size;
6842 /* If the resulting bit-offset is constant, track it. */
6843 if ((low_bound = array_ref_low_bound (t),
6844 poly_int_tree_p (low_bound))
6845 && (unit_size = array_ref_element_size (t),
6846 tree_fits_uhwi_p (unit_size)))
6848 poly_offset_int woffset
6849 = wi::sext (wi::to_poly_offset (idx)
6850 - wi::to_poly_offset (low_bound),
6851 TYPE_PRECISION (TREE_TYPE (idx)));
6853 if (woffset.to_shwi (&offset))
6855 /* TODO: This code seems wrong, multiply then check
6856 to see if it fits. */
6857 offset *= tree_to_uhwi (unit_size);
6858 offset *= BITS_PER_UNIT;
6860 base = TREE_OPERAND (t, 0);
6861 ctor = get_base_constructor (base, &offset, valueize);
6862 /* Empty constructor. Always fold to 0. */
6863 if (ctor == error_mark_node)
6864 return build_zero_cst (TREE_TYPE (t));
6865 /* Out of bound array access. Value is undefined,
6866 but don't fold. */
6867 if (maybe_lt (offset, 0))
6868 return NULL_TREE;
6869 /* We can not determine ctor. */
6870 if (!ctor)
6871 return NULL_TREE;
6872 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6873 tree_to_uhwi (unit_size)
6874 * BITS_PER_UNIT,
6875 base);
6879 /* Fallthru. */
6881 case COMPONENT_REF:
6882 case BIT_FIELD_REF:
6883 case TARGET_MEM_REF:
6884 case MEM_REF:
6885 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6886 ctor = get_base_constructor (base, &offset, valueize);
6888 /* Empty constructor. Always fold to 0. */
6889 if (ctor == error_mark_node)
6890 return build_zero_cst (TREE_TYPE (t));
6891 /* We do not know precise address. */
6892 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6893 return NULL_TREE;
6894 /* We can not determine ctor. */
6895 if (!ctor)
6896 return NULL_TREE;
6898 /* Out of bound array access. Value is undefined, but don't fold. */
6899 if (maybe_lt (offset, 0))
6900 return NULL_TREE;
6902 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6903 base);
6905 case REALPART_EXPR:
6906 case IMAGPART_EXPR:
6908 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6909 if (c && TREE_CODE (c) == COMPLEX_CST)
6910 return fold_build1_loc (EXPR_LOCATION (t),
6911 TREE_CODE (t), TREE_TYPE (t), c);
6912 break;
6915 default:
6916 break;
6919 return NULL_TREE;
6922 tree
6923 fold_const_aggregate_ref (tree t)
6925 return fold_const_aggregate_ref_1 (t, NULL);
6928 /* Lookup virtual method with index TOKEN in a virtual table V
6929 at OFFSET.
6930 Set CAN_REFER if non-NULL to false if method
6931 is not referable or if the virtual table is ill-formed (such as rewriten
6932 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6934 tree
6935 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6936 tree v,
6937 unsigned HOST_WIDE_INT offset,
6938 bool *can_refer)
6940 tree vtable = v, init, fn;
6941 unsigned HOST_WIDE_INT size;
6942 unsigned HOST_WIDE_INT elt_size, access_index;
6943 tree domain_type;
6945 if (can_refer)
6946 *can_refer = true;
6948 /* First of all double check we have virtual table. */
6949 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6951 /* Pass down that we lost track of the target. */
6952 if (can_refer)
6953 *can_refer = false;
6954 return NULL_TREE;
6957 init = ctor_for_folding (v);
6959 /* The virtual tables should always be born with constructors
6960 and we always should assume that they are avaialble for
6961 folding. At the moment we do not stream them in all cases,
6962 but it should never happen that ctor seem unreachable. */
6963 gcc_assert (init);
6964 if (init == error_mark_node)
6966 /* Pass down that we lost track of the target. */
6967 if (can_refer)
6968 *can_refer = false;
6969 return NULL_TREE;
6971 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6972 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6973 offset *= BITS_PER_UNIT;
6974 offset += token * size;
6976 /* Lookup the value in the constructor that is assumed to be array.
6977 This is equivalent to
6978 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6979 offset, size, NULL);
6980 but in a constant time. We expect that frontend produced a simple
6981 array without indexed initializers. */
6983 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6984 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6985 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6986 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6988 access_index = offset / BITS_PER_UNIT / elt_size;
6989 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6991 /* This code makes an assumption that there are no
6992 indexed fileds produced by C++ FE, so we can directly index the array. */
6993 if (access_index < CONSTRUCTOR_NELTS (init))
6995 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6996 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6997 STRIP_NOPS (fn);
6999 else
7000 fn = NULL;
7002 /* For type inconsistent program we may end up looking up virtual method
7003 in virtual table that does not contain TOKEN entries. We may overrun
7004 the virtual table and pick up a constant or RTTI info pointer.
7005 In any case the call is undefined. */
7006 if (!fn
7007 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7008 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7009 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7010 else
7012 fn = TREE_OPERAND (fn, 0);
7014 /* When cgraph node is missing and function is not public, we cannot
7015 devirtualize. This can happen in WHOPR when the actual method
7016 ends up in other partition, because we found devirtualization
7017 possibility too late. */
7018 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7020 if (can_refer)
7022 *can_refer = false;
7023 return fn;
7025 return NULL_TREE;
7029 /* Make sure we create a cgraph node for functions we'll reference.
7030 They can be non-existent if the reference comes from an entry
7031 of an external vtable for example. */
7032 cgraph_node::get_create (fn);
7034 return fn;
7037 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7038 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7039 KNOWN_BINFO carries the binfo describing the true type of
7040 OBJ_TYPE_REF_OBJECT(REF).
7041 Set CAN_REFER if non-NULL to false if method
7042 is not referable or if the virtual table is ill-formed (such as rewriten
7043 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7045 tree
7046 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7047 bool *can_refer)
7049 unsigned HOST_WIDE_INT offset;
7050 tree v;
7052 v = BINFO_VTABLE (known_binfo);
7053 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7054 if (!v)
7055 return NULL_TREE;
7057 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7059 if (can_refer)
7060 *can_refer = false;
7061 return NULL_TREE;
7063 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7066 /* Given a pointer value T, return a simplified version of an
7067 indirection through T, or NULL_TREE if no simplification is
7068 possible. Note that the resulting type may be different from
7069 the type pointed to in the sense that it is still compatible
7070 from the langhooks point of view. */
7072 tree
7073 gimple_fold_indirect_ref (tree t)
7075 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7076 tree sub = t;
7077 tree subtype;
7079 STRIP_NOPS (sub);
7080 subtype = TREE_TYPE (sub);
7081 if (!POINTER_TYPE_P (subtype)
7082 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7083 return NULL_TREE;
7085 if (TREE_CODE (sub) == ADDR_EXPR)
7087 tree op = TREE_OPERAND (sub, 0);
7088 tree optype = TREE_TYPE (op);
7089 /* *&p => p */
7090 if (useless_type_conversion_p (type, optype))
7091 return op;
7093 /* *(foo *)&fooarray => fooarray[0] */
7094 if (TREE_CODE (optype) == ARRAY_TYPE
7095 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7096 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7098 tree type_domain = TYPE_DOMAIN (optype);
7099 tree min_val = size_zero_node;
7100 if (type_domain && TYPE_MIN_VALUE (type_domain))
7101 min_val = TYPE_MIN_VALUE (type_domain);
7102 if (TREE_CODE (min_val) == INTEGER_CST)
7103 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7105 /* *(foo *)&complexfoo => __real__ complexfoo */
7106 else if (TREE_CODE (optype) == COMPLEX_TYPE
7107 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7108 return fold_build1 (REALPART_EXPR, type, op);
7109 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7110 else if (TREE_CODE (optype) == VECTOR_TYPE
7111 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7113 tree part_width = TYPE_SIZE (type);
7114 tree index = bitsize_int (0);
7115 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7119 /* *(p + CST) -> ... */
7120 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7121 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7123 tree addr = TREE_OPERAND (sub, 0);
7124 tree off = TREE_OPERAND (sub, 1);
7125 tree addrtype;
7127 STRIP_NOPS (addr);
7128 addrtype = TREE_TYPE (addr);
7130 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7131 if (TREE_CODE (addr) == ADDR_EXPR
7132 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7133 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7134 && tree_fits_uhwi_p (off))
7136 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7137 tree part_width = TYPE_SIZE (type);
7138 unsigned HOST_WIDE_INT part_widthi
7139 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7140 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7141 tree index = bitsize_int (indexi);
7142 if (known_lt (offset / part_widthi,
7143 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7144 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7145 part_width, index);
7148 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7149 if (TREE_CODE (addr) == ADDR_EXPR
7150 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7151 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7153 tree size = TYPE_SIZE_UNIT (type);
7154 if (tree_int_cst_equal (size, off))
7155 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7158 /* *(p + CST) -> MEM_REF <p, CST>. */
7159 if (TREE_CODE (addr) != ADDR_EXPR
7160 || DECL_P (TREE_OPERAND (addr, 0)))
7161 return fold_build2 (MEM_REF, type,
7162 addr,
7163 wide_int_to_tree (ptype, wi::to_wide (off)));
7166 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7167 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7168 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7169 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7171 tree type_domain;
7172 tree min_val = size_zero_node;
7173 tree osub = sub;
7174 sub = gimple_fold_indirect_ref (sub);
7175 if (! sub)
7176 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7177 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7178 if (type_domain && TYPE_MIN_VALUE (type_domain))
7179 min_val = TYPE_MIN_VALUE (type_domain);
7180 if (TREE_CODE (min_val) == INTEGER_CST)
7181 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7184 return NULL_TREE;
7187 /* Return true if CODE is an operation that when operating on signed
7188 integer types involves undefined behavior on overflow and the
7189 operation can be expressed with unsigned arithmetic. */
7191 bool
7192 arith_code_with_undefined_signed_overflow (tree_code code)
7194 switch (code)
7196 case PLUS_EXPR:
7197 case MINUS_EXPR:
7198 case MULT_EXPR:
7199 case NEGATE_EXPR:
7200 case POINTER_PLUS_EXPR:
7201 return true;
7202 default:
7203 return false;
7207 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7208 operation that can be transformed to unsigned arithmetic by converting
7209 its operand, carrying out the operation in the corresponding unsigned
7210 type and converting the result back to the original type.
7212 Returns a sequence of statements that replace STMT and also contain
7213 a modified form of STMT itself. */
7215 gimple_seq
7216 rewrite_to_defined_overflow (gimple *stmt)
7218 if (dump_file && (dump_flags & TDF_DETAILS))
7220 fprintf (dump_file, "rewriting stmt with undefined signed "
7221 "overflow ");
7222 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7225 tree lhs = gimple_assign_lhs (stmt);
7226 tree type = unsigned_type_for (TREE_TYPE (lhs));
7227 gimple_seq stmts = NULL;
7228 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7230 tree op = gimple_op (stmt, i);
7231 op = gimple_convert (&stmts, type, op);
7232 gimple_set_op (stmt, i, op);
7234 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7235 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7236 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7237 gimple_seq_add_stmt (&stmts, stmt);
7238 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7239 gimple_seq_add_stmt (&stmts, cvt);
7241 return stmts;
7245 /* The valueization hook we use for the gimple_build API simplification.
7246 This makes us match fold_buildN behavior by only combining with
7247 statements in the sequence(s) we are currently building. */
7249 static tree
7250 gimple_build_valueize (tree op)
7252 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7253 return op;
7254 return NULL_TREE;
7257 /* Build the expression CODE OP0 of type TYPE with location LOC,
7258 simplifying it first if possible. Returns the built
7259 expression value and appends statements possibly defining it
7260 to SEQ. */
7262 tree
7263 gimple_build (gimple_seq *seq, location_t loc,
7264 enum tree_code code, tree type, tree op0)
7266 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7267 if (!res)
7269 res = create_tmp_reg_or_ssa_name (type);
7270 gimple *stmt;
7271 if (code == REALPART_EXPR
7272 || code == IMAGPART_EXPR
7273 || code == VIEW_CONVERT_EXPR)
7274 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7275 else
7276 stmt = gimple_build_assign (res, code, op0);
7277 gimple_set_location (stmt, loc);
7278 gimple_seq_add_stmt_without_update (seq, stmt);
7280 return res;
7283 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7284 simplifying it first if possible. Returns the built
7285 expression value and appends statements possibly defining it
7286 to SEQ. */
7288 tree
7289 gimple_build (gimple_seq *seq, location_t loc,
7290 enum tree_code code, tree type, tree op0, tree op1)
7292 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7293 if (!res)
7295 res = create_tmp_reg_or_ssa_name (type);
7296 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7297 gimple_set_location (stmt, loc);
7298 gimple_seq_add_stmt_without_update (seq, stmt);
7300 return res;
7303 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7304 simplifying it first if possible. Returns the built
7305 expression value and appends statements possibly defining it
7306 to SEQ. */
7308 tree
7309 gimple_build (gimple_seq *seq, location_t loc,
7310 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7312 tree res = gimple_simplify (code, type, op0, op1, op2,
7313 seq, gimple_build_valueize);
7314 if (!res)
7316 res = create_tmp_reg_or_ssa_name (type);
7317 gimple *stmt;
7318 if (code == BIT_FIELD_REF)
7319 stmt = gimple_build_assign (res, code,
7320 build3 (code, type, op0, op1, op2));
7321 else
7322 stmt = gimple_build_assign (res, code, op0, op1, op2);
7323 gimple_set_location (stmt, loc);
7324 gimple_seq_add_stmt_without_update (seq, stmt);
7326 return res;
7329 /* Build the call FN (ARG0) with a result of type TYPE
7330 (or no result if TYPE is void) with location LOC,
7331 simplifying it first if possible. Returns the built
7332 expression value (or NULL_TREE if TYPE is void) and appends
7333 statements possibly defining it to SEQ. */
7335 tree
7336 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7337 tree type, tree arg0)
7339 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7340 if (!res)
7342 gcall *stmt;
7343 if (internal_fn_p (fn))
7344 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7345 else
7347 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7348 stmt = gimple_build_call (decl, 1, arg0);
7350 if (!VOID_TYPE_P (type))
7352 res = create_tmp_reg_or_ssa_name (type);
7353 gimple_call_set_lhs (stmt, res);
7355 gimple_set_location (stmt, loc);
7356 gimple_seq_add_stmt_without_update (seq, stmt);
7358 return res;
7361 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7362 (or no result if TYPE is void) with location LOC,
7363 simplifying it first if possible. Returns the built
7364 expression value (or NULL_TREE if TYPE is void) and appends
7365 statements possibly defining it to SEQ. */
7367 tree
7368 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7369 tree type, tree arg0, tree arg1)
7371 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7372 if (!res)
7374 gcall *stmt;
7375 if (internal_fn_p (fn))
7376 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7377 else
7379 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7380 stmt = gimple_build_call (decl, 2, arg0, arg1);
7382 if (!VOID_TYPE_P (type))
7384 res = create_tmp_reg_or_ssa_name (type);
7385 gimple_call_set_lhs (stmt, res);
7387 gimple_set_location (stmt, loc);
7388 gimple_seq_add_stmt_without_update (seq, stmt);
7390 return res;
7393 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7394 (or no result if TYPE is void) with location LOC,
7395 simplifying it first if possible. Returns the built
7396 expression value (or NULL_TREE if TYPE is void) and appends
7397 statements possibly defining it to SEQ. */
7399 tree
7400 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7401 tree type, tree arg0, tree arg1, tree arg2)
7403 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7404 seq, gimple_build_valueize);
7405 if (!res)
7407 gcall *stmt;
7408 if (internal_fn_p (fn))
7409 stmt = gimple_build_call_internal (as_internal_fn (fn),
7410 3, arg0, arg1, arg2);
7411 else
7413 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7414 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7416 if (!VOID_TYPE_P (type))
7418 res = create_tmp_reg_or_ssa_name (type);
7419 gimple_call_set_lhs (stmt, res);
7421 gimple_set_location (stmt, loc);
7422 gimple_seq_add_stmt_without_update (seq, stmt);
7424 return res;
7427 /* Build the conversion (TYPE) OP with a result of type TYPE
7428 with location LOC if such conversion is neccesary in GIMPLE,
7429 simplifying it first.
7430 Returns the built expression value and appends
7431 statements possibly defining it to SEQ. */
7433 tree
7434 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7436 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7437 return op;
7438 return gimple_build (seq, loc, NOP_EXPR, type, op);
7441 /* Build the conversion (ptrofftype) OP with a result of a type
7442 compatible with ptrofftype with location LOC if such conversion
7443 is neccesary in GIMPLE, simplifying it first.
7444 Returns the built expression value and appends
7445 statements possibly defining it to SEQ. */
7447 tree
7448 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7450 if (ptrofftype_p (TREE_TYPE (op)))
7451 return op;
7452 return gimple_convert (seq, loc, sizetype, op);
7455 /* Build a vector of type TYPE in which each element has the value OP.
7456 Return a gimple value for the result, appending any new statements
7457 to SEQ. */
7459 tree
7460 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7461 tree op)
7463 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7464 && !CONSTANT_CLASS_P (op))
7465 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7467 tree res, vec = build_vector_from_val (type, op);
7468 if (is_gimple_val (vec))
7469 return vec;
7470 if (gimple_in_ssa_p (cfun))
7471 res = make_ssa_name (type);
7472 else
7473 res = create_tmp_reg (type);
7474 gimple *stmt = gimple_build_assign (res, vec);
7475 gimple_set_location (stmt, loc);
7476 gimple_seq_add_stmt_without_update (seq, stmt);
7477 return res;
7480 /* Build a vector from BUILDER, handling the case in which some elements
7481 are non-constant. Return a gimple value for the result, appending any
7482 new instructions to SEQ.
7484 BUILDER must not have a stepped encoding on entry. This is because
7485 the function is not geared up to handle the arithmetic that would
7486 be needed in the variable case, and any code building a vector that
7487 is known to be constant should use BUILDER->build () directly. */
7489 tree
7490 gimple_build_vector (gimple_seq *seq, location_t loc,
7491 tree_vector_builder *builder)
7493 gcc_assert (builder->nelts_per_pattern () <= 2);
7494 unsigned int encoded_nelts = builder->encoded_nelts ();
7495 for (unsigned int i = 0; i < encoded_nelts; ++i)
7496 if (!TREE_CONSTANT ((*builder)[i]))
7498 tree type = builder->type ();
7499 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7500 vec<constructor_elt, va_gc> *v;
7501 vec_alloc (v, nelts);
7502 for (i = 0; i < nelts; ++i)
7503 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7505 tree res;
7506 if (gimple_in_ssa_p (cfun))
7507 res = make_ssa_name (type);
7508 else
7509 res = create_tmp_reg (type);
7510 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7511 gimple_set_location (stmt, loc);
7512 gimple_seq_add_stmt_without_update (seq, stmt);
7513 return res;
7515 return builder->build ();
7518 /* Return true if the result of assignment STMT is known to be non-negative.
7519 If the return value is based on the assumption that signed overflow is
7520 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7521 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7523 static bool
7524 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7525 int depth)
7527 enum tree_code code = gimple_assign_rhs_code (stmt);
7528 switch (get_gimple_rhs_class (code))
7530 case GIMPLE_UNARY_RHS:
7531 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7532 gimple_expr_type (stmt),
7533 gimple_assign_rhs1 (stmt),
7534 strict_overflow_p, depth);
7535 case GIMPLE_BINARY_RHS:
7536 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7537 gimple_expr_type (stmt),
7538 gimple_assign_rhs1 (stmt),
7539 gimple_assign_rhs2 (stmt),
7540 strict_overflow_p, depth);
7541 case GIMPLE_TERNARY_RHS:
7542 return false;
7543 case GIMPLE_SINGLE_RHS:
7544 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7545 strict_overflow_p, depth);
7546 case GIMPLE_INVALID_RHS:
7547 break;
7549 gcc_unreachable ();
7552 /* Return true if return value of call STMT is known to be non-negative.
7553 If the return value is based on the assumption that signed overflow is
7554 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7555 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7557 static bool
7558 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7559 int depth)
7561 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7562 gimple_call_arg (stmt, 0) : NULL_TREE;
7563 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7564 gimple_call_arg (stmt, 1) : NULL_TREE;
7566 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7567 gimple_call_combined_fn (stmt),
7568 arg0,
7569 arg1,
7570 strict_overflow_p, depth);
7573 /* Return true if return value of call STMT is known to be non-negative.
7574 If the return value is based on the assumption that signed overflow is
7575 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7576 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7578 static bool
7579 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7580 int depth)
7582 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7584 tree arg = gimple_phi_arg_def (stmt, i);
7585 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7586 return false;
7588 return true;
7591 /* Return true if STMT is known to compute a non-negative value.
7592 If the return value is based on the assumption that signed overflow is
7593 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7594 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7596 bool
7597 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7598 int depth)
7600 switch (gimple_code (stmt))
7602 case GIMPLE_ASSIGN:
7603 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7604 depth);
7605 case GIMPLE_CALL:
7606 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7607 depth);
7608 case GIMPLE_PHI:
7609 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7610 depth);
7611 default:
7612 return false;
7616 /* Return true if the floating-point value computed by assignment STMT
7617 is known to have an integer value. We also allow +Inf, -Inf and NaN
7618 to be considered integer values. Return false for signaling NaN.
7620 DEPTH is the current nesting depth of the query. */
7622 static bool
7623 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7625 enum tree_code code = gimple_assign_rhs_code (stmt);
7626 switch (get_gimple_rhs_class (code))
7628 case GIMPLE_UNARY_RHS:
7629 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7630 gimple_assign_rhs1 (stmt), depth);
7631 case GIMPLE_BINARY_RHS:
7632 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7633 gimple_assign_rhs1 (stmt),
7634 gimple_assign_rhs2 (stmt), depth);
7635 case GIMPLE_TERNARY_RHS:
7636 return false;
7637 case GIMPLE_SINGLE_RHS:
7638 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7639 case GIMPLE_INVALID_RHS:
7640 break;
7642 gcc_unreachable ();
7645 /* Return true if the floating-point value computed by call STMT is known
7646 to have an integer value. We also allow +Inf, -Inf and NaN to be
7647 considered integer values. Return false for signaling NaN.
7649 DEPTH is the current nesting depth of the query. */
7651 static bool
7652 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7654 tree arg0 = (gimple_call_num_args (stmt) > 0
7655 ? gimple_call_arg (stmt, 0)
7656 : NULL_TREE);
7657 tree arg1 = (gimple_call_num_args (stmt) > 1
7658 ? gimple_call_arg (stmt, 1)
7659 : NULL_TREE);
7660 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7661 arg0, arg1, depth);
7664 /* Return true if the floating-point result of phi STMT is known to have
7665 an integer value. We also allow +Inf, -Inf and NaN to be considered
7666 integer values. Return false for signaling NaN.
7668 DEPTH is the current nesting depth of the query. */
7670 static bool
7671 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7673 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7675 tree arg = gimple_phi_arg_def (stmt, i);
7676 if (!integer_valued_real_single_p (arg, depth + 1))
7677 return false;
7679 return true;
7682 /* Return true if the floating-point value computed by STMT is known
7683 to have an integer value. We also allow +Inf, -Inf and NaN to be
7684 considered integer values. Return false for signaling NaN.
7686 DEPTH is the current nesting depth of the query. */
7688 bool
7689 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7691 switch (gimple_code (stmt))
7693 case GIMPLE_ASSIGN:
7694 return gimple_assign_integer_valued_real_p (stmt, depth);
7695 case GIMPLE_CALL:
7696 return gimple_call_integer_valued_real_p (stmt, depth);
7697 case GIMPLE_PHI:
7698 return gimple_phi_integer_valued_real_p (stmt, depth);
7699 default:
7700 return false;