Add PR marker
[official-gcc.git] / gcc / gimple-match-head.c
blobd6c60ab34db6eafcc9e2b8a5d077a8734f5bf5ba
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "fold-const.h"
31 #include "fold-const-call.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "calls.h"
35 #include "tree-dfa.h"
36 #include "builtins.h"
37 #include "gimple-match.h"
38 #include "tree-pass.h"
39 #include "internal-fn.h"
40 #include "case-cfn-macros.h"
41 #include "gimplify.h"
42 #include "optabs-tree.h"
43 #include "tree-eh.h"
46 /* Forward declarations of the private auto-generated matchers.
47 They expect valueized operands in canonical order and do not
48 perform simplification of all-constant operands. */
49 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
50 code_helper, tree, tree);
51 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
52 code_helper, tree, tree, tree);
53 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
54 code_helper, tree, tree, tree, tree);
55 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
56 code_helper, tree, tree, tree, tree, tree);
57 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
58 code_helper, tree, tree, tree, tree, tree, tree);
60 const unsigned int gimple_match_op::MAX_NUM_OPS;
62 /* Return whether T is a constant that we'll dispatch to fold to
63 evaluate fully constant expressions. */
65 static inline bool
66 constant_for_folding (tree t)
68 return (CONSTANT_CLASS_P (t)
69 /* The following is only interesting to string builtins. */
70 || (TREE_CODE (t) == ADDR_EXPR
71 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
74 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
75 operation. Return true on success, storing the new operation in NEW_OP. */
77 static bool
78 convert_conditional_op (gimple_match_op *orig_op,
79 gimple_match_op *new_op)
81 internal_fn ifn;
82 if (orig_op->code.is_tree_code ())
83 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
84 else
86 combined_fn cfn = orig_op->code;
87 if (!internal_fn_p (cfn))
88 return false;
89 ifn = get_conditional_internal_fn (as_internal_fn (cfn));
91 if (ifn == IFN_LAST)
92 return false;
93 unsigned int num_ops = orig_op->num_ops;
94 new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
95 new_op->ops[0] = orig_op->cond.cond;
96 for (unsigned int i = 0; i < num_ops; ++i)
97 new_op->ops[i + 1] = orig_op->ops[i];
98 tree else_value = orig_op->cond.else_value;
99 if (!else_value)
100 else_value = targetm.preferred_else_value (ifn, orig_op->type,
101 num_ops, orig_op->ops);
102 new_op->ops[num_ops + 1] = else_value;
103 return true;
106 /* RES_OP is the result of a simplification. If it is conditional,
107 try to replace it with the equivalent UNCOND form, such as an
108 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
109 result of the replacement if appropriate, adding any new statements to
110 SEQ and using VALUEIZE as the valueization function. Return true if
111 this resimplification occurred and resulted in at least one change. */
113 static bool
114 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
115 tree (*valueize) (tree))
117 if (!res_op->cond.cond)
118 return false;
120 if (!res_op->cond.else_value
121 && res_op->code.is_tree_code ())
123 /* The "else" value doesn't matter. If the "then" value is a
124 gimple value, just use it unconditionally. This isn't a
125 simplification in itself, since there was no operation to
126 build in the first place. */
127 if (gimple_simplified_result_is_gimple_val (res_op))
129 res_op->cond.cond = NULL_TREE;
130 return false;
133 /* Likewise if the operation would not trap. */
134 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
135 && TYPE_OVERFLOW_TRAPS (res_op->type));
136 if (!operation_could_trap_p ((tree_code) res_op->code,
137 FLOAT_TYPE_P (res_op->type),
138 honor_trapv, res_op->op_or_null (1)))
140 res_op->cond.cond = NULL_TREE;
141 return false;
145 /* If the "then" value is a gimple value and the "else" value matters,
146 create a VEC_COND_EXPR between them, then see if it can be further
147 simplified. */
148 gimple_match_op new_op;
149 if (res_op->cond.else_value
150 && VECTOR_TYPE_P (res_op->type)
151 && gimple_simplified_result_is_gimple_val (res_op))
153 new_op.set_op (VEC_COND_EXPR, res_op->type,
154 res_op->cond.cond, res_op->ops[0],
155 res_op->cond.else_value);
156 *res_op = new_op;
157 return gimple_resimplify3 (seq, res_op, valueize);
160 /* Otherwise try rewriting the operation as an IFN_COND_* call.
161 Again, this isn't a simplification in itself, since it's what
162 RES_OP already described. */
163 if (convert_conditional_op (res_op, &new_op))
164 *res_op = new_op;
166 return false;
169 /* Helper that matches and simplifies the toplevel result from
170 a gimple_simplify run (where we don't want to build
171 a stmt in case it's used in in-place folding). Replaces
172 RES_OP with a simplified and/or canonicalized result and
173 returns whether any change was made. */
175 bool
176 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
177 tree (*valueize)(tree))
179 if (constant_for_folding (res_op->ops[0]))
181 tree tem = NULL_TREE;
182 if (res_op->code.is_tree_code ())
183 tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
184 else
185 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
186 res_op->ops[0]);
187 if (tem != NULL_TREE
188 && CONSTANT_CLASS_P (tem))
190 if (TREE_OVERFLOW_P (tem))
191 tem = drop_tree_overflow (tem);
192 res_op->set_value (tem);
193 maybe_resimplify_conditional_op (seq, res_op, valueize);
194 return true;
198 /* Limit recursion, there are cases like PR80887 and others, for
199 example when value-numbering presents us with unfolded expressions
200 that we are really not prepared to handle without eventual
201 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
202 itself as available expression. */
203 static unsigned depth;
204 if (depth > 10)
206 if (dump_file && (dump_flags & TDF_FOLDING))
207 fprintf (dump_file, "Aborting expression simplification due to "
208 "deep recursion\n");
209 return false;
212 ++depth;
213 gimple_match_op res_op2 (*res_op);
214 if (gimple_simplify (&res_op2, seq, valueize,
215 res_op->code, res_op->type, res_op->ops[0]))
217 --depth;
218 *res_op = res_op2;
219 return true;
221 --depth;
223 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
224 return true;
226 return false;
229 /* Helper that matches and simplifies the toplevel result from
230 a gimple_simplify run (where we don't want to build
231 a stmt in case it's used in in-place folding). Replaces
232 RES_OP with a simplified and/or canonicalized result and
233 returns whether any change was made. */
235 bool
236 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
237 tree (*valueize)(tree))
239 if (constant_for_folding (res_op->ops[0])
240 && constant_for_folding (res_op->ops[1]))
242 tree tem = NULL_TREE;
243 if (res_op->code.is_tree_code ())
244 tem = const_binop (res_op->code, res_op->type,
245 res_op->ops[0], res_op->ops[1]);
246 else
247 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
248 res_op->ops[0], res_op->ops[1]);
249 if (tem != NULL_TREE
250 && CONSTANT_CLASS_P (tem))
252 if (TREE_OVERFLOW_P (tem))
253 tem = drop_tree_overflow (tem);
254 res_op->set_value (tem);
255 maybe_resimplify_conditional_op (seq, res_op, valueize);
256 return true;
260 /* Canonicalize operand order. */
261 bool canonicalized = false;
262 if (res_op->code.is_tree_code ()
263 && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
264 || commutative_tree_code (res_op->code))
265 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
267 std::swap (res_op->ops[0], res_op->ops[1]);
268 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
269 res_op->code = swap_tree_comparison (res_op->code);
270 canonicalized = true;
273 /* Limit recursion, see gimple_resimplify1. */
274 static unsigned depth;
275 if (depth > 10)
277 if (dump_file && (dump_flags & TDF_FOLDING))
278 fprintf (dump_file, "Aborting expression simplification due to "
279 "deep recursion\n");
280 return false;
283 ++depth;
284 gimple_match_op res_op2 (*res_op);
285 if (gimple_simplify (&res_op2, seq, valueize,
286 res_op->code, res_op->type,
287 res_op->ops[0], res_op->ops[1]))
289 --depth;
290 *res_op = res_op2;
291 return true;
293 --depth;
295 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
296 return true;
298 return canonicalized;
301 /* Helper that matches and simplifies the toplevel result from
302 a gimple_simplify run (where we don't want to build
303 a stmt in case it's used in in-place folding). Replaces
304 RES_OP with a simplified and/or canonicalized result and
305 returns whether any change was made. */
307 bool
308 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
309 tree (*valueize)(tree))
311 if (constant_for_folding (res_op->ops[0])
312 && constant_for_folding (res_op->ops[1])
313 && constant_for_folding (res_op->ops[2]))
315 tree tem = NULL_TREE;
316 if (res_op->code.is_tree_code ())
317 tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
318 res_op->ops[0], res_op->ops[1],
319 res_op->ops[2]);
320 else
321 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
322 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
323 if (tem != NULL_TREE
324 && CONSTANT_CLASS_P (tem))
326 if (TREE_OVERFLOW_P (tem))
327 tem = drop_tree_overflow (tem);
328 res_op->set_value (tem);
329 maybe_resimplify_conditional_op (seq, res_op, valueize);
330 return true;
334 /* Canonicalize operand order. */
335 bool canonicalized = false;
336 if (res_op->code.is_tree_code ()
337 && commutative_ternary_tree_code (res_op->code)
338 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
340 std::swap (res_op->ops[0], res_op->ops[1]);
341 canonicalized = true;
344 /* Limit recursion, see gimple_resimplify1. */
345 static unsigned depth;
346 if (depth > 10)
348 if (dump_file && (dump_flags & TDF_FOLDING))
349 fprintf (dump_file, "Aborting expression simplification due to "
350 "deep recursion\n");
351 return false;
354 ++depth;
355 gimple_match_op res_op2 (*res_op);
356 if (gimple_simplify (&res_op2, seq, valueize,
357 res_op->code, res_op->type,
358 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
360 --depth;
361 *res_op = res_op2;
362 return true;
364 --depth;
366 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
367 return true;
369 return canonicalized;
372 /* Helper that matches and simplifies the toplevel result from
373 a gimple_simplify run (where we don't want to build
374 a stmt in case it's used in in-place folding). Replaces
375 RES_OP with a simplified and/or canonicalized result and
376 returns whether any change was made. */
378 bool
379 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
380 tree (*valueize)(tree))
382 /* No constant folding is defined for four-operand functions. */
384 /* Limit recursion, see gimple_resimplify1. */
385 static unsigned depth;
386 if (depth > 10)
388 if (dump_file && (dump_flags & TDF_FOLDING))
389 fprintf (dump_file, "Aborting expression simplification due to "
390 "deep recursion\n");
391 return false;
394 ++depth;
395 gimple_match_op res_op2 (*res_op);
396 if (gimple_simplify (&res_op2, seq, valueize,
397 res_op->code, res_op->type,
398 res_op->ops[0], res_op->ops[1], res_op->ops[2],
399 res_op->ops[3]))
401 --depth;
402 *res_op = res_op2;
403 return true;
405 --depth;
407 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
408 return true;
410 return false;
413 /* Helper that matches and simplifies the toplevel result from
414 a gimple_simplify run (where we don't want to build
415 a stmt in case it's used in in-place folding). Replaces
416 RES_OP with a simplified and/or canonicalized result and
417 returns whether any change was made. */
419 bool
420 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
421 tree (*valueize)(tree))
423 /* No constant folding is defined for five-operand functions. */
425 gimple_match_op res_op2 (*res_op);
426 if (gimple_simplify (&res_op2, seq, valueize,
427 res_op->code, res_op->type,
428 res_op->ops[0], res_op->ops[1], res_op->ops[2],
429 res_op->ops[3], res_op->ops[4]))
431 *res_op = res_op2;
432 return true;
435 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
436 return true;
438 return false;
441 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
442 build a GENERIC tree for that expression and update RES_OP accordingly. */
444 void
445 maybe_build_generic_op (gimple_match_op *res_op)
447 tree_code code = (tree_code) res_op->code;
448 tree val;
449 switch (code)
451 case REALPART_EXPR:
452 case IMAGPART_EXPR:
453 case VIEW_CONVERT_EXPR:
454 val = build1 (code, res_op->type, res_op->ops[0]);
455 res_op->set_value (val);
456 break;
457 case BIT_FIELD_REF:
458 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
459 res_op->ops[2]);
460 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
461 res_op->set_value (val);
462 break;
463 default:;
467 tree (*mprts_hook) (gimple_match_op *);
469 /* Try to build RES_OP, which is known to be a call to FN. Return null
470 if the target doesn't support the function. */
472 static gcall *
473 build_call_internal (internal_fn fn, gimple_match_op *res_op)
475 if (direct_internal_fn_p (fn))
477 tree_pair types = direct_internal_fn_types (fn, res_op->type,
478 res_op->ops);
479 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
480 return NULL;
482 return gimple_build_call_internal (fn, res_op->num_ops,
483 res_op->op_or_null (0),
484 res_op->op_or_null (1),
485 res_op->op_or_null (2),
486 res_op->op_or_null (3),
487 res_op->op_or_null (4));
490 /* Push the exploded expression described by RES_OP as a statement to
491 SEQ if necessary and return a gimple value denoting the value of the
492 expression. If RES is not NULL then the result will be always RES
493 and even gimple values are pushed to SEQ. */
495 tree
496 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
498 tree *ops = res_op->ops;
499 unsigned num_ops = res_op->num_ops;
501 /* The caller should have converted conditional operations into an UNCOND
502 form and resimplified as appropriate. The conditional form only
503 survives this far if that conversion failed. */
504 if (res_op->cond.cond)
505 return NULL_TREE;
507 if (res_op->code.is_tree_code ())
509 if (!res
510 && gimple_simplified_result_is_gimple_val (res_op))
511 return ops[0];
512 if (mprts_hook)
514 tree tem = mprts_hook (res_op);
515 if (tem)
516 return tem;
520 if (!seq)
521 return NULL_TREE;
523 /* Play safe and do not allow abnormals to be mentioned in
524 newly created statements. */
525 for (unsigned int i = 0; i < num_ops; ++i)
526 if (TREE_CODE (ops[i]) == SSA_NAME
527 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
528 return NULL_TREE;
530 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
531 for (unsigned int i = 0; i < 2; ++i)
532 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
533 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
534 return NULL_TREE;
536 if (res_op->code.is_tree_code ())
538 if (!res)
540 if (gimple_in_ssa_p (cfun))
541 res = make_ssa_name (res_op->type);
542 else
543 res = create_tmp_reg (res_op->type);
545 maybe_build_generic_op (res_op);
546 gimple *new_stmt = gimple_build_assign (res, res_op->code,
547 res_op->op_or_null (0),
548 res_op->op_or_null (1),
549 res_op->op_or_null (2));
550 gimple_seq_add_stmt_without_update (seq, new_stmt);
551 return res;
553 else
555 gcc_assert (num_ops != 0);
556 combined_fn fn = res_op->code;
557 gcall *new_stmt = NULL;
558 if (internal_fn_p (fn))
560 /* Generate the given function if we can. */
561 internal_fn ifn = as_internal_fn (fn);
562 new_stmt = build_call_internal (ifn, res_op);
563 if (!new_stmt)
564 return NULL_TREE;
566 else
568 /* Find the function we want to call. */
569 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
570 if (!decl)
571 return NULL;
573 /* We can't and should not emit calls to non-const functions. */
574 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
575 return NULL;
577 new_stmt = gimple_build_call (decl, num_ops,
578 res_op->op_or_null (0),
579 res_op->op_or_null (1),
580 res_op->op_or_null (2),
581 res_op->op_or_null (3),
582 res_op->op_or_null (4));
584 if (!res)
586 if (gimple_in_ssa_p (cfun))
587 res = make_ssa_name (res_op->type);
588 else
589 res = create_tmp_reg (res_op->type);
591 gimple_call_set_lhs (new_stmt, res);
592 gimple_seq_add_stmt_without_update (seq, new_stmt);
593 return res;
598 /* Public API overloads follow for operation being tree_code or
599 built_in_function and for one to three operands or arguments.
600 They return NULL_TREE if nothing could be simplified or
601 the resulting simplified value with parts pushed to SEQ.
602 If SEQ is NULL then if the simplification needs to create
603 new stmts it will fail. If VALUEIZE is non-NULL then all
604 SSA names will be valueized using that hook prior to
605 applying simplifications. */
607 /* Unary ops. */
609 tree
610 gimple_simplify (enum tree_code code, tree type,
611 tree op0,
612 gimple_seq *seq, tree (*valueize)(tree))
614 if (constant_for_folding (op0))
616 tree res = const_unop (code, type, op0);
617 if (res != NULL_TREE
618 && CONSTANT_CLASS_P (res))
619 return res;
622 gimple_match_op res_op;
623 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
624 return NULL_TREE;
625 return maybe_push_res_to_seq (&res_op, seq);
628 /* Binary ops. */
630 tree
631 gimple_simplify (enum tree_code code, tree type,
632 tree op0, tree op1,
633 gimple_seq *seq, tree (*valueize)(tree))
635 if (constant_for_folding (op0) && constant_for_folding (op1))
637 tree res = const_binop (code, type, op0, op1);
638 if (res != NULL_TREE
639 && CONSTANT_CLASS_P (res))
640 return res;
643 /* Canonicalize operand order both for matching and fallback stmt
644 generation. */
645 if ((commutative_tree_code (code)
646 || TREE_CODE_CLASS (code) == tcc_comparison)
647 && tree_swap_operands_p (op0, op1))
649 std::swap (op0, op1);
650 if (TREE_CODE_CLASS (code) == tcc_comparison)
651 code = swap_tree_comparison (code);
654 gimple_match_op res_op;
655 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
656 return NULL_TREE;
657 return maybe_push_res_to_seq (&res_op, seq);
660 /* Ternary ops. */
662 tree
663 gimple_simplify (enum tree_code code, tree type,
664 tree op0, tree op1, tree op2,
665 gimple_seq *seq, tree (*valueize)(tree))
667 if (constant_for_folding (op0) && constant_for_folding (op1)
668 && constant_for_folding (op2))
670 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
671 if (res != NULL_TREE
672 && CONSTANT_CLASS_P (res))
673 return res;
676 /* Canonicalize operand order both for matching and fallback stmt
677 generation. */
678 if (commutative_ternary_tree_code (code)
679 && tree_swap_operands_p (op0, op1))
680 std::swap (op0, op1);
682 gimple_match_op res_op;
683 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
684 return NULL_TREE;
685 return maybe_push_res_to_seq (&res_op, seq);
688 /* Builtin or internal function with one argument. */
690 tree
691 gimple_simplify (combined_fn fn, tree type,
692 tree arg0,
693 gimple_seq *seq, tree (*valueize)(tree))
695 if (constant_for_folding (arg0))
697 tree res = fold_const_call (fn, type, arg0);
698 if (res && CONSTANT_CLASS_P (res))
699 return res;
702 gimple_match_op res_op;
703 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
704 return NULL_TREE;
705 return maybe_push_res_to_seq (&res_op, seq);
708 /* Builtin or internal function with two arguments. */
710 tree
711 gimple_simplify (combined_fn fn, tree type,
712 tree arg0, tree arg1,
713 gimple_seq *seq, tree (*valueize)(tree))
715 if (constant_for_folding (arg0)
716 && constant_for_folding (arg1))
718 tree res = fold_const_call (fn, type, arg0, arg1);
719 if (res && CONSTANT_CLASS_P (res))
720 return res;
723 gimple_match_op res_op;
724 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
725 return NULL_TREE;
726 return maybe_push_res_to_seq (&res_op, seq);
729 /* Builtin or internal function with three arguments. */
731 tree
732 gimple_simplify (combined_fn fn, tree type,
733 tree arg0, tree arg1, tree arg2,
734 gimple_seq *seq, tree (*valueize)(tree))
736 if (constant_for_folding (arg0)
737 && constant_for_folding (arg1)
738 && constant_for_folding (arg2))
740 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
741 if (res && CONSTANT_CLASS_P (res))
742 return res;
745 gimple_match_op res_op;
746 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
747 return NULL_TREE;
748 return maybe_push_res_to_seq (&res_op, seq);
751 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
752 VALUEIZED to true if valueization changed OP. */
754 static inline tree
755 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
757 if (valueize && TREE_CODE (op) == SSA_NAME)
759 tree tem = valueize (op);
760 if (tem && tem != op)
762 op = tem;
763 valueized = true;
766 return op;
769 /* If RES_OP is a call to a conditional internal function, try simplifying
770 the associated unconditional operation and using the result to build
771 a new conditional operation. For example, if RES_OP is:
773 IFN_COND_ADD (COND, A, B, ELSE)
775 try simplifying (plus A B) and using the result to build a replacement
776 for the whole IFN_COND_ADD.
778 Return true if this approach led to a simplification, otherwise leave
779 RES_OP unchanged (and so suitable for other simplifications). When
780 returning true, add any new statements to SEQ and use VALUEIZE as the
781 valueization function.
783 RES_OP is known to be a call to IFN. */
785 static bool
786 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
787 gimple_seq *seq, tree (*valueize) (tree))
789 code_helper op;
790 tree_code code = conditional_internal_fn_code (ifn);
791 if (code != ERROR_MARK)
792 op = code;
793 else
795 ifn = get_unconditional_internal_fn (ifn);
796 if (ifn == IFN_LAST)
797 return false;
798 op = as_combined_fn (ifn);
801 unsigned int num_ops = res_op->num_ops;
802 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
803 res_op->ops[num_ops - 1]),
804 op, res_op->type, num_ops - 2);
805 for (unsigned int i = 1; i < num_ops - 1; ++i)
806 cond_op.ops[i - 1] = res_op->ops[i];
807 switch (num_ops - 2)
809 case 2:
810 if (!gimple_resimplify2 (seq, &cond_op, valueize))
811 return false;
812 break;
813 case 3:
814 if (!gimple_resimplify3 (seq, &cond_op, valueize))
815 return false;
816 break;
817 default:
818 gcc_unreachable ();
820 *res_op = cond_op;
821 maybe_resimplify_conditional_op (seq, res_op, valueize);
822 return true;
825 /* The main STMT based simplification entry. It is used by the fold_stmt
826 and the fold_stmt_to_constant APIs. */
828 bool
829 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
830 tree (*valueize)(tree), tree (*top_valueize)(tree))
832 switch (gimple_code (stmt))
834 case GIMPLE_ASSIGN:
836 enum tree_code code = gimple_assign_rhs_code (stmt);
837 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
838 switch (gimple_assign_rhs_class (stmt))
840 case GIMPLE_SINGLE_RHS:
841 if (code == REALPART_EXPR
842 || code == IMAGPART_EXPR
843 || code == VIEW_CONVERT_EXPR)
845 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
846 bool valueized = false;
847 op0 = do_valueize (op0, top_valueize, valueized);
848 res_op->set_op (code, type, op0);
849 return (gimple_resimplify1 (seq, res_op, valueize)
850 || valueized);
852 else if (code == BIT_FIELD_REF)
854 tree rhs1 = gimple_assign_rhs1 (stmt);
855 tree op0 = TREE_OPERAND (rhs1, 0);
856 bool valueized = false;
857 op0 = do_valueize (op0, top_valueize, valueized);
858 res_op->set_op (code, type, op0,
859 TREE_OPERAND (rhs1, 1),
860 TREE_OPERAND (rhs1, 2),
861 REF_REVERSE_STORAGE_ORDER (rhs1));
862 if (res_op->reverse)
863 return valueized;
864 return (gimple_resimplify3 (seq, res_op, valueize)
865 || valueized);
867 else if (code == SSA_NAME
868 && top_valueize)
870 tree op0 = gimple_assign_rhs1 (stmt);
871 tree valueized = top_valueize (op0);
872 if (!valueized || op0 == valueized)
873 return false;
874 res_op->set_op (TREE_CODE (op0), type, valueized);
875 return true;
877 break;
878 case GIMPLE_UNARY_RHS:
880 tree rhs1 = gimple_assign_rhs1 (stmt);
881 bool valueized = false;
882 rhs1 = do_valueize (rhs1, top_valueize, valueized);
883 res_op->set_op (code, type, rhs1);
884 return (gimple_resimplify1 (seq, res_op, valueize)
885 || valueized);
887 case GIMPLE_BINARY_RHS:
889 tree rhs1 = gimple_assign_rhs1 (stmt);
890 tree rhs2 = gimple_assign_rhs2 (stmt);
891 bool valueized = false;
892 rhs1 = do_valueize (rhs1, top_valueize, valueized);
893 rhs2 = do_valueize (rhs2, top_valueize, valueized);
894 res_op->set_op (code, type, rhs1, rhs2);
895 return (gimple_resimplify2 (seq, res_op, valueize)
896 || valueized);
898 case GIMPLE_TERNARY_RHS:
900 bool valueized = false;
901 tree rhs1 = gimple_assign_rhs1 (stmt);
902 /* If this is a [VEC_]COND_EXPR first try to simplify an
903 embedded GENERIC condition. */
904 if (code == COND_EXPR
905 || code == VEC_COND_EXPR)
907 if (COMPARISON_CLASS_P (rhs1))
909 tree lhs = TREE_OPERAND (rhs1, 0);
910 tree rhs = TREE_OPERAND (rhs1, 1);
911 lhs = do_valueize (lhs, top_valueize, valueized);
912 rhs = do_valueize (rhs, top_valueize, valueized);
913 gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
914 TREE_TYPE (rhs1), lhs, rhs);
915 if ((gimple_resimplify2 (seq, &res_op2, valueize)
916 || valueized)
917 && res_op2.code.is_tree_code ())
919 valueized = true;
920 if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
921 == tcc_comparison)
922 rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
923 res_op2.ops[0], res_op2.ops[1]);
924 else if (res_op2.code == SSA_NAME
925 || res_op2.code == INTEGER_CST
926 || res_op2.code == VECTOR_CST)
927 rhs1 = res_op2.ops[0];
928 else
929 valueized = false;
933 tree rhs2 = gimple_assign_rhs2 (stmt);
934 tree rhs3 = gimple_assign_rhs3 (stmt);
935 rhs1 = do_valueize (rhs1, top_valueize, valueized);
936 rhs2 = do_valueize (rhs2, top_valueize, valueized);
937 rhs3 = do_valueize (rhs3, top_valueize, valueized);
938 res_op->set_op (code, type, rhs1, rhs2, rhs3);
939 return (gimple_resimplify3 (seq, res_op, valueize)
940 || valueized);
942 default:
943 gcc_unreachable ();
945 break;
948 case GIMPLE_CALL:
949 /* ??? This way we can't simplify calls with side-effects. */
950 if (gimple_call_lhs (stmt) != NULL_TREE
951 && gimple_call_num_args (stmt) >= 1
952 && gimple_call_num_args (stmt) <= 5)
954 bool valueized = false;
955 combined_fn cfn;
956 if (gimple_call_internal_p (stmt))
957 cfn = as_combined_fn (gimple_call_internal_fn (stmt));
958 else
960 tree fn = gimple_call_fn (stmt);
961 if (!fn)
962 return false;
964 fn = do_valueize (fn, top_valueize, valueized);
965 if (TREE_CODE (fn) != ADDR_EXPR
966 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
967 return false;
969 tree decl = TREE_OPERAND (fn, 0);
970 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
971 || !gimple_builtin_call_types_compatible_p (stmt, decl))
972 return false;
974 cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
977 unsigned int num_args = gimple_call_num_args (stmt);
978 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
979 for (unsigned i = 0; i < num_args; ++i)
981 tree arg = gimple_call_arg (stmt, i);
982 res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
984 if (internal_fn_p (cfn)
985 && try_conditional_simplification (as_internal_fn (cfn),
986 res_op, seq, valueize))
987 return true;
988 switch (num_args)
990 case 1:
991 return (gimple_resimplify1 (seq, res_op, valueize)
992 || valueized);
993 case 2:
994 return (gimple_resimplify2 (seq, res_op, valueize)
995 || valueized);
996 case 3:
997 return (gimple_resimplify3 (seq, res_op, valueize)
998 || valueized);
999 case 4:
1000 return (gimple_resimplify4 (seq, res_op, valueize)
1001 || valueized);
1002 case 5:
1003 return (gimple_resimplify5 (seq, res_op, valueize)
1004 || valueized);
1005 default:
1006 gcc_unreachable ();
1009 break;
1011 case GIMPLE_COND:
1013 tree lhs = gimple_cond_lhs (stmt);
1014 tree rhs = gimple_cond_rhs (stmt);
1015 bool valueized = false;
1016 lhs = do_valueize (lhs, top_valueize, valueized);
1017 rhs = do_valueize (rhs, top_valueize, valueized);
1018 res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1019 return (gimple_resimplify2 (seq, res_op, valueize)
1020 || valueized);
1023 default:
1024 break;
1027 return false;
1031 /* Helper for the autogenerated code, valueize OP. */
1033 inline tree
1034 do_valueize (tree (*valueize)(tree), tree op)
1036 if (valueize && TREE_CODE (op) == SSA_NAME)
1038 tree tem = valueize (op);
1039 if (tem)
1040 return tem;
1042 return op;
1045 /* Helper for the autogenerated code, get at the definition of NAME when
1046 VALUEIZE allows that. */
1048 inline gimple *
1049 get_def (tree (*valueize)(tree), tree name)
1051 if (valueize && ! valueize (name))
1052 return NULL;
1053 return SSA_NAME_DEF_STMT (name);
1056 /* Routine to determine if the types T1 and T2 are effectively
1057 the same for GIMPLE. If T1 or T2 is not a type, the test
1058 applies to their TREE_TYPE. */
1060 static inline bool
1061 types_match (tree t1, tree t2)
1063 if (!TYPE_P (t1))
1064 t1 = TREE_TYPE (t1);
1065 if (!TYPE_P (t2))
1066 t2 = TREE_TYPE (t2);
1068 return types_compatible_p (t1, t2);
1071 /* Return if T has a single use. For GIMPLE, we also allow any
1072 non-SSA_NAME (ie constants) and zero uses to cope with uses
1073 that aren't linked up yet. */
1075 static inline bool
1076 single_use (tree t)
1078 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
1081 /* Return true if math operations should be canonicalized,
1082 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1084 static inline bool
1085 canonicalize_math_p ()
1087 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1090 /* Return true if math operations that are beneficial only after
1091 vectorization should be canonicalized. */
1093 static inline bool
1094 canonicalize_math_after_vectorization_p ()
1096 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1099 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1100 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1101 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1102 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1103 will likely be exact, while exp (log (arg0) * arg1) might be not.
1104 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1106 static bool
1107 optimize_pow_to_exp (tree arg0, tree arg1)
1109 gcc_assert (TREE_CODE (arg0) == REAL_CST);
1110 if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1111 return true;
1113 if (TREE_CODE (arg1) != SSA_NAME)
1114 return true;
1116 gimple *def = SSA_NAME_DEF_STMT (arg1);
1117 gphi *phi = dyn_cast <gphi *> (def);
1118 tree cst1 = NULL_TREE;
1119 enum tree_code code = ERROR_MARK;
1120 if (!phi)
1122 if (!is_gimple_assign (def))
1123 return true;
1124 code = gimple_assign_rhs_code (def);
1125 switch (code)
1127 case PLUS_EXPR:
1128 case MINUS_EXPR:
1129 break;
1130 default:
1131 return true;
1133 if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1134 || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1135 return true;
1137 cst1 = gimple_assign_rhs2 (def);
1139 phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1140 if (!phi)
1141 return true;
1144 tree cst2 = NULL_TREE;
1145 int n = gimple_phi_num_args (phi);
1146 for (int i = 0; i < n; i++)
1148 tree arg = PHI_ARG_DEF (phi, i);
1149 if (TREE_CODE (arg) != REAL_CST)
1150 continue;
1151 else if (cst2 == NULL_TREE)
1152 cst2 = arg;
1153 else if (!operand_equal_p (cst2, arg, 0))
1154 return true;
1157 if (cst1 && cst2)
1158 cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1159 if (cst2
1160 && TREE_CODE (cst2) == REAL_CST
1161 && real_isinteger (TREE_REAL_CST_PTR (cst2),
1162 TYPE_MODE (TREE_TYPE (cst2))))
1163 return false;
1164 return true;