Add prefixed insn support for stack_protect_setdi & stack_protect_testdi
[official-gcc.git] / gcc / gimple-match-head.c
blob2996bade301e09e433d9c415348a5b3fba882482
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45 #include "dbgcnt.h"
47 /* Forward declarations of the private auto-generated matchers.
48 They expect valueized operands in canonical order and do not
49 perform simplification of all-constant operands. */
50 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
51 code_helper, tree, tree);
52 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
53 code_helper, tree, tree, tree);
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 code_helper, tree, tree, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 code_helper, tree, tree, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 code_helper, tree, tree, tree, tree, tree, tree);
60 static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
61 tree (*)(tree));
62 static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
63 tree (*)(tree));
64 static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
65 tree (*)(tree));
66 static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
67 tree (*)(tree));
68 static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
69 tree (*)(tree));
71 const unsigned int gimple_match_op::MAX_NUM_OPS;
73 /* Return whether T is a constant that we'll dispatch to fold to
74 evaluate fully constant expressions. */
76 static inline bool
77 constant_for_folding (tree t)
79 return (CONSTANT_CLASS_P (t)
80 /* The following is only interesting to string builtins. */
81 || (TREE_CODE (t) == ADDR_EXPR
82 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
85 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
86 operation. Return true on success, storing the new operation in NEW_OP. */
88 static bool
89 convert_conditional_op (gimple_match_op *orig_op,
90 gimple_match_op *new_op)
92 internal_fn ifn;
93 if (orig_op->code.is_tree_code ())
94 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
95 else
97 combined_fn cfn = orig_op->code;
98 if (!internal_fn_p (cfn))
99 return false;
100 ifn = get_conditional_internal_fn (as_internal_fn (cfn));
102 if (ifn == IFN_LAST)
103 return false;
104 unsigned int num_ops = orig_op->num_ops;
105 new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
106 new_op->ops[0] = orig_op->cond.cond;
107 for (unsigned int i = 0; i < num_ops; ++i)
108 new_op->ops[i + 1] = orig_op->ops[i];
109 tree else_value = orig_op->cond.else_value;
110 if (!else_value)
111 else_value = targetm.preferred_else_value (ifn, orig_op->type,
112 num_ops, orig_op->ops);
113 new_op->ops[num_ops + 1] = else_value;
114 return true;
117 /* RES_OP is the result of a simplification. If it is conditional,
118 try to replace it with the equivalent UNCOND form, such as an
119 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
120 result of the replacement if appropriate, adding any new statements to
121 SEQ and using VALUEIZE as the valueization function. Return true if
122 this resimplification occurred and resulted in at least one change. */
124 static bool
125 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
126 tree (*valueize) (tree))
128 if (!res_op->cond.cond)
129 return false;
131 if (!res_op->cond.else_value
132 && res_op->code.is_tree_code ())
134 /* The "else" value doesn't matter. If the "then" value is a
135 gimple value, just use it unconditionally. This isn't a
136 simplification in itself, since there was no operation to
137 build in the first place. */
138 if (gimple_simplified_result_is_gimple_val (res_op))
140 res_op->cond.cond = NULL_TREE;
141 return false;
144 /* Likewise if the operation would not trap. */
145 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
146 && TYPE_OVERFLOW_TRAPS (res_op->type));
147 if (!operation_could_trap_p ((tree_code) res_op->code,
148 FLOAT_TYPE_P (res_op->type),
149 honor_trapv, res_op->op_or_null (1)))
151 res_op->cond.cond = NULL_TREE;
152 return false;
156 /* If the "then" value is a gimple value and the "else" value matters,
157 create a VEC_COND_EXPR between them, then see if it can be further
158 simplified. */
159 gimple_match_op new_op;
160 if (res_op->cond.else_value
161 && VECTOR_TYPE_P (res_op->type)
162 && gimple_simplified_result_is_gimple_val (res_op))
164 new_op.set_op (VEC_COND_EXPR, res_op->type,
165 res_op->cond.cond, res_op->ops[0],
166 res_op->cond.else_value);
167 *res_op = new_op;
168 return gimple_resimplify3 (seq, res_op, valueize);
171 /* Otherwise try rewriting the operation as an IFN_COND_* call.
172 Again, this isn't a simplification in itself, since it's what
173 RES_OP already described. */
174 if (convert_conditional_op (res_op, &new_op))
175 *res_op = new_op;
177 return false;
180 /* Helper that matches and simplifies the toplevel result from
181 a gimple_simplify run (where we don't want to build
182 a stmt in case it's used in in-place folding). Replaces
183 RES_OP with a simplified and/or canonicalized result and
184 returns whether any change was made. */
186 static bool
187 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
188 tree (*valueize)(tree))
190 if (constant_for_folding (res_op->ops[0]))
192 tree tem = NULL_TREE;
193 if (res_op->code.is_tree_code ())
195 tree_code code = res_op->code;
196 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
197 && TREE_CODE_LENGTH (code) == 1)
198 tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
200 else
201 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
202 res_op->ops[0]);
203 if (tem != NULL_TREE
204 && CONSTANT_CLASS_P (tem))
206 if (TREE_OVERFLOW_P (tem))
207 tem = drop_tree_overflow (tem);
208 res_op->set_value (tem);
209 maybe_resimplify_conditional_op (seq, res_op, valueize);
210 return true;
214 /* Limit recursion, there are cases like PR80887 and others, for
215 example when value-numbering presents us with unfolded expressions
216 that we are really not prepared to handle without eventual
217 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
218 itself as available expression. */
219 static unsigned depth;
220 if (depth > 10)
222 if (dump_file && (dump_flags & TDF_FOLDING))
223 fprintf (dump_file, "Aborting expression simplification due to "
224 "deep recursion\n");
225 return false;
228 ++depth;
229 gimple_match_op res_op2 (*res_op);
230 if (gimple_simplify (&res_op2, seq, valueize,
231 res_op->code, res_op->type, res_op->ops[0]))
233 --depth;
234 *res_op = res_op2;
235 return true;
237 --depth;
239 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
240 return true;
242 return false;
245 /* Helper that matches and simplifies the toplevel result from
246 a gimple_simplify run (where we don't want to build
247 a stmt in case it's used in in-place folding). Replaces
248 RES_OP with a simplified and/or canonicalized result and
249 returns whether any change was made. */
251 static bool
252 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
253 tree (*valueize)(tree))
255 if (constant_for_folding (res_op->ops[0])
256 && constant_for_folding (res_op->ops[1]))
258 tree tem = NULL_TREE;
259 if (res_op->code.is_tree_code ())
261 tree_code code = res_op->code;
262 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
263 && TREE_CODE_LENGTH (code) == 2)
264 tem = const_binop (res_op->code, res_op->type,
265 res_op->ops[0], res_op->ops[1]);
267 else
268 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
269 res_op->ops[0], res_op->ops[1]);
270 if (tem != NULL_TREE
271 && CONSTANT_CLASS_P (tem))
273 if (TREE_OVERFLOW_P (tem))
274 tem = drop_tree_overflow (tem);
275 res_op->set_value (tem);
276 maybe_resimplify_conditional_op (seq, res_op, valueize);
277 return true;
281 /* Canonicalize operand order. */
282 bool canonicalized = false;
283 if (res_op->code.is_tree_code ()
284 && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
285 || commutative_tree_code (res_op->code))
286 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
288 std::swap (res_op->ops[0], res_op->ops[1]);
289 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
290 res_op->code = swap_tree_comparison (res_op->code);
291 canonicalized = true;
294 /* Limit recursion, see gimple_resimplify1. */
295 static unsigned depth;
296 if (depth > 10)
298 if (dump_file && (dump_flags & TDF_FOLDING))
299 fprintf (dump_file, "Aborting expression simplification due to "
300 "deep recursion\n");
301 return false;
304 ++depth;
305 gimple_match_op res_op2 (*res_op);
306 if (gimple_simplify (&res_op2, seq, valueize,
307 res_op->code, res_op->type,
308 res_op->ops[0], res_op->ops[1]))
310 --depth;
311 *res_op = res_op2;
312 return true;
314 --depth;
316 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
317 return true;
319 return canonicalized;
322 /* Helper that matches and simplifies the toplevel result from
323 a gimple_simplify run (where we don't want to build
324 a stmt in case it's used in in-place folding). Replaces
325 RES_OP with a simplified and/or canonicalized result and
326 returns whether any change was made. */
328 static bool
329 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
330 tree (*valueize)(tree))
332 if (constant_for_folding (res_op->ops[0])
333 && constant_for_folding (res_op->ops[1])
334 && constant_for_folding (res_op->ops[2]))
336 tree tem = NULL_TREE;
337 if (res_op->code.is_tree_code ())
339 tree_code code = res_op->code;
340 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
341 && TREE_CODE_LENGTH (code) == 3)
342 tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
343 res_op->ops[0], res_op->ops[1],
344 res_op->ops[2]);
346 else
347 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
348 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
349 if (tem != NULL_TREE
350 && CONSTANT_CLASS_P (tem))
352 if (TREE_OVERFLOW_P (tem))
353 tem = drop_tree_overflow (tem);
354 res_op->set_value (tem);
355 maybe_resimplify_conditional_op (seq, res_op, valueize);
356 return true;
360 /* Canonicalize operand order. */
361 bool canonicalized = false;
362 if (res_op->code.is_tree_code ()
363 && commutative_ternary_tree_code (res_op->code)
364 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
366 std::swap (res_op->ops[0], res_op->ops[1]);
367 canonicalized = true;
370 /* Limit recursion, see gimple_resimplify1. */
371 static unsigned depth;
372 if (depth > 10)
374 if (dump_file && (dump_flags & TDF_FOLDING))
375 fprintf (dump_file, "Aborting expression simplification due to "
376 "deep recursion\n");
377 return false;
380 ++depth;
381 gimple_match_op res_op2 (*res_op);
382 if (gimple_simplify (&res_op2, seq, valueize,
383 res_op->code, res_op->type,
384 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
386 --depth;
387 *res_op = res_op2;
388 return true;
390 --depth;
392 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
393 return true;
395 return canonicalized;
398 /* Helper that matches and simplifies the toplevel result from
399 a gimple_simplify run (where we don't want to build
400 a stmt in case it's used in in-place folding). Replaces
401 RES_OP with a simplified and/or canonicalized result and
402 returns whether any change was made. */
404 static bool
405 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
406 tree (*valueize)(tree))
408 /* No constant folding is defined for four-operand functions. */
410 /* Limit recursion, see gimple_resimplify1. */
411 static unsigned depth;
412 if (depth > 10)
414 if (dump_file && (dump_flags & TDF_FOLDING))
415 fprintf (dump_file, "Aborting expression simplification due to "
416 "deep recursion\n");
417 return false;
420 ++depth;
421 gimple_match_op res_op2 (*res_op);
422 if (gimple_simplify (&res_op2, seq, valueize,
423 res_op->code, res_op->type,
424 res_op->ops[0], res_op->ops[1], res_op->ops[2],
425 res_op->ops[3]))
427 --depth;
428 *res_op = res_op2;
429 return true;
431 --depth;
433 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
434 return true;
436 return false;
439 /* Helper that matches and simplifies the toplevel result from
440 a gimple_simplify run (where we don't want to build
441 a stmt in case it's used in in-place folding). Replaces
442 RES_OP with a simplified and/or canonicalized result and
443 returns whether any change was made. */
445 static bool
446 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
447 tree (*valueize)(tree))
449 /* No constant folding is defined for five-operand functions. */
451 gimple_match_op res_op2 (*res_op);
452 if (gimple_simplify (&res_op2, seq, valueize,
453 res_op->code, res_op->type,
454 res_op->ops[0], res_op->ops[1], res_op->ops[2],
455 res_op->ops[3], res_op->ops[4]))
457 *res_op = res_op2;
458 return true;
461 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
462 return true;
464 return false;
467 /* Match and simplify the toplevel valueized operation THIS.
468 Replaces THIS with a simplified and/or canonicalized result and
469 returns whether any change was made. */
471 bool
472 gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
474 switch (num_ops)
476 case 1:
477 return gimple_resimplify1 (seq, this, valueize);
478 case 2:
479 return gimple_resimplify2 (seq, this, valueize);
480 case 3:
481 return gimple_resimplify3 (seq, this, valueize);
482 case 4:
483 return gimple_resimplify4 (seq, this, valueize);
484 case 5:
485 return gimple_resimplify5 (seq, this, valueize);
486 default:
487 gcc_unreachable ();
491 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
492 build a GENERIC tree for that expression and update RES_OP accordingly. */
494 void
495 maybe_build_generic_op (gimple_match_op *res_op)
497 tree_code code = (tree_code) res_op->code;
498 tree val;
499 switch (code)
501 case REALPART_EXPR:
502 case IMAGPART_EXPR:
503 case VIEW_CONVERT_EXPR:
504 val = build1 (code, res_op->type, res_op->ops[0]);
505 res_op->set_value (val);
506 break;
507 case BIT_FIELD_REF:
508 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
509 res_op->ops[2]);
510 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
511 res_op->set_value (val);
512 break;
513 default:;
517 tree (*mprts_hook) (gimple_match_op *);
519 /* Try to build RES_OP, which is known to be a call to FN. Return null
520 if the target doesn't support the function. */
522 static gcall *
523 build_call_internal (internal_fn fn, gimple_match_op *res_op)
525 if (direct_internal_fn_p (fn))
527 tree_pair types = direct_internal_fn_types (fn, res_op->type,
528 res_op->ops);
529 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
530 return NULL;
532 return gimple_build_call_internal (fn, res_op->num_ops,
533 res_op->op_or_null (0),
534 res_op->op_or_null (1),
535 res_op->op_or_null (2),
536 res_op->op_or_null (3),
537 res_op->op_or_null (4));
540 /* Push the exploded expression described by RES_OP as a statement to
541 SEQ if necessary and return a gimple value denoting the value of the
542 expression. If RES is not NULL then the result will be always RES
543 and even gimple values are pushed to SEQ. */
545 tree
546 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
548 tree *ops = res_op->ops;
549 unsigned num_ops = res_op->num_ops;
551 /* The caller should have converted conditional operations into an UNCOND
552 form and resimplified as appropriate. The conditional form only
553 survives this far if that conversion failed. */
554 if (res_op->cond.cond)
555 return NULL_TREE;
557 if (res_op->code.is_tree_code ())
559 if (!res
560 && gimple_simplified_result_is_gimple_val (res_op))
561 return ops[0];
562 if (mprts_hook)
564 tree tem = mprts_hook (res_op);
565 if (tem)
566 return tem;
570 if (!seq)
571 return NULL_TREE;
573 /* Play safe and do not allow abnormals to be mentioned in
574 newly created statements. */
575 for (unsigned int i = 0; i < num_ops; ++i)
576 if (TREE_CODE (ops[i]) == SSA_NAME
577 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
578 return NULL_TREE;
580 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
581 for (unsigned int i = 0; i < 2; ++i)
582 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
583 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
584 return NULL_TREE;
586 if (res_op->code.is_tree_code ())
588 if (!res)
590 if (gimple_in_ssa_p (cfun))
591 res = make_ssa_name (res_op->type);
592 else
593 res = create_tmp_reg (res_op->type);
595 maybe_build_generic_op (res_op);
596 gimple *new_stmt = gimple_build_assign (res, res_op->code,
597 res_op->op_or_null (0),
598 res_op->op_or_null (1),
599 res_op->op_or_null (2));
600 gimple_seq_add_stmt_without_update (seq, new_stmt);
601 return res;
603 else
605 gcc_assert (num_ops != 0);
606 combined_fn fn = res_op->code;
607 gcall *new_stmt = NULL;
608 if (internal_fn_p (fn))
610 /* Generate the given function if we can. */
611 internal_fn ifn = as_internal_fn (fn);
612 new_stmt = build_call_internal (ifn, res_op);
613 if (!new_stmt)
614 return NULL_TREE;
616 else
618 /* Find the function we want to call. */
619 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
620 if (!decl)
621 return NULL;
623 /* We can't and should not emit calls to non-const functions. */
624 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
625 return NULL;
627 new_stmt = gimple_build_call (decl, num_ops,
628 res_op->op_or_null (0),
629 res_op->op_or_null (1),
630 res_op->op_or_null (2),
631 res_op->op_or_null (3),
632 res_op->op_or_null (4));
634 if (!res)
636 if (gimple_in_ssa_p (cfun))
637 res = make_ssa_name (res_op->type);
638 else
639 res = create_tmp_reg (res_op->type);
641 gimple_call_set_lhs (new_stmt, res);
642 gimple_seq_add_stmt_without_update (seq, new_stmt);
643 return res;
648 /* Public API overloads follow for operation being tree_code or
649 built_in_function and for one to three operands or arguments.
650 They return NULL_TREE if nothing could be simplified or
651 the resulting simplified value with parts pushed to SEQ.
652 If SEQ is NULL then if the simplification needs to create
653 new stmts it will fail. If VALUEIZE is non-NULL then all
654 SSA names will be valueized using that hook prior to
655 applying simplifications. */
657 /* Unary ops. */
659 tree
660 gimple_simplify (enum tree_code code, tree type,
661 tree op0,
662 gimple_seq *seq, tree (*valueize)(tree))
664 if (constant_for_folding (op0))
666 tree res = const_unop (code, type, op0);
667 if (res != NULL_TREE
668 && CONSTANT_CLASS_P (res))
669 return res;
672 gimple_match_op res_op;
673 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
674 return NULL_TREE;
675 return maybe_push_res_to_seq (&res_op, seq);
678 /* Binary ops. */
680 tree
681 gimple_simplify (enum tree_code code, tree type,
682 tree op0, tree op1,
683 gimple_seq *seq, tree (*valueize)(tree))
685 if (constant_for_folding (op0) && constant_for_folding (op1))
687 tree res = const_binop (code, type, op0, op1);
688 if (res != NULL_TREE
689 && CONSTANT_CLASS_P (res))
690 return res;
693 /* Canonicalize operand order both for matching and fallback stmt
694 generation. */
695 if ((commutative_tree_code (code)
696 || TREE_CODE_CLASS (code) == tcc_comparison)
697 && tree_swap_operands_p (op0, op1))
699 std::swap (op0, op1);
700 if (TREE_CODE_CLASS (code) == tcc_comparison)
701 code = swap_tree_comparison (code);
704 gimple_match_op res_op;
705 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
706 return NULL_TREE;
707 return maybe_push_res_to_seq (&res_op, seq);
710 /* Ternary ops. */
712 tree
713 gimple_simplify (enum tree_code code, tree type,
714 tree op0, tree op1, tree op2,
715 gimple_seq *seq, tree (*valueize)(tree))
717 if (constant_for_folding (op0) && constant_for_folding (op1)
718 && constant_for_folding (op2))
720 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
721 if (res != NULL_TREE
722 && CONSTANT_CLASS_P (res))
723 return res;
726 /* Canonicalize operand order both for matching and fallback stmt
727 generation. */
728 if (commutative_ternary_tree_code (code)
729 && tree_swap_operands_p (op0, op1))
730 std::swap (op0, op1);
732 gimple_match_op res_op;
733 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
734 return NULL_TREE;
735 return maybe_push_res_to_seq (&res_op, seq);
738 /* Builtin or internal function with one argument. */
740 tree
741 gimple_simplify (combined_fn fn, tree type,
742 tree arg0,
743 gimple_seq *seq, tree (*valueize)(tree))
745 if (constant_for_folding (arg0))
747 tree res = fold_const_call (fn, type, arg0);
748 if (res && CONSTANT_CLASS_P (res))
749 return res;
752 gimple_match_op res_op;
753 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
754 return NULL_TREE;
755 return maybe_push_res_to_seq (&res_op, seq);
758 /* Builtin or internal function with two arguments. */
760 tree
761 gimple_simplify (combined_fn fn, tree type,
762 tree arg0, tree arg1,
763 gimple_seq *seq, tree (*valueize)(tree))
765 if (constant_for_folding (arg0)
766 && constant_for_folding (arg1))
768 tree res = fold_const_call (fn, type, arg0, arg1);
769 if (res && CONSTANT_CLASS_P (res))
770 return res;
773 gimple_match_op res_op;
774 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
775 return NULL_TREE;
776 return maybe_push_res_to_seq (&res_op, seq);
779 /* Builtin or internal function with three arguments. */
781 tree
782 gimple_simplify (combined_fn fn, tree type,
783 tree arg0, tree arg1, tree arg2,
784 gimple_seq *seq, tree (*valueize)(tree))
786 if (constant_for_folding (arg0)
787 && constant_for_folding (arg1)
788 && constant_for_folding (arg2))
790 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
791 if (res && CONSTANT_CLASS_P (res))
792 return res;
795 gimple_match_op res_op;
796 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
797 return NULL_TREE;
798 return maybe_push_res_to_seq (&res_op, seq);
801 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
802 VALUEIZED to true if valueization changed OP. */
804 static inline tree
805 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
807 if (valueize && TREE_CODE (op) == SSA_NAME)
809 tree tem = valueize (op);
810 if (tem && tem != op)
812 op = tem;
813 valueized = true;
816 return op;
819 /* If RES_OP is a call to a conditional internal function, try simplifying
820 the associated unconditional operation and using the result to build
821 a new conditional operation. For example, if RES_OP is:
823 IFN_COND_ADD (COND, A, B, ELSE)
825 try simplifying (plus A B) and using the result to build a replacement
826 for the whole IFN_COND_ADD.
828 Return true if this approach led to a simplification, otherwise leave
829 RES_OP unchanged (and so suitable for other simplifications). When
830 returning true, add any new statements to SEQ and use VALUEIZE as the
831 valueization function.
833 RES_OP is known to be a call to IFN. */
835 static bool
836 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
837 gimple_seq *seq, tree (*valueize) (tree))
839 code_helper op;
840 tree_code code = conditional_internal_fn_code (ifn);
841 if (code != ERROR_MARK)
842 op = code;
843 else
845 ifn = get_unconditional_internal_fn (ifn);
846 if (ifn == IFN_LAST)
847 return false;
848 op = as_combined_fn (ifn);
851 unsigned int num_ops = res_op->num_ops;
852 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
853 res_op->ops[num_ops - 1]),
854 op, res_op->type, num_ops - 2);
856 memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops);
857 switch (num_ops - 2)
859 case 2:
860 if (!gimple_resimplify2 (seq, &cond_op, valueize))
861 return false;
862 break;
863 case 3:
864 if (!gimple_resimplify3 (seq, &cond_op, valueize))
865 return false;
866 break;
867 default:
868 gcc_unreachable ();
870 *res_op = cond_op;
871 maybe_resimplify_conditional_op (seq, res_op, valueize);
872 return true;
875 /* The main STMT based simplification entry. It is used by the fold_stmt
876 and the fold_stmt_to_constant APIs. */
878 bool
879 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
880 tree (*valueize)(tree), tree (*top_valueize)(tree))
882 switch (gimple_code (stmt))
884 case GIMPLE_ASSIGN:
886 enum tree_code code = gimple_assign_rhs_code (stmt);
887 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
888 switch (gimple_assign_rhs_class (stmt))
890 case GIMPLE_SINGLE_RHS:
891 if (code == REALPART_EXPR
892 || code == IMAGPART_EXPR
893 || code == VIEW_CONVERT_EXPR)
895 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
896 bool valueized = false;
897 op0 = do_valueize (op0, top_valueize, valueized);
898 res_op->set_op (code, type, op0);
899 return (gimple_resimplify1 (seq, res_op, valueize)
900 || valueized);
902 else if (code == BIT_FIELD_REF)
904 tree rhs1 = gimple_assign_rhs1 (stmt);
905 tree op0 = TREE_OPERAND (rhs1, 0);
906 bool valueized = false;
907 op0 = do_valueize (op0, top_valueize, valueized);
908 res_op->set_op (code, type, op0,
909 TREE_OPERAND (rhs1, 1),
910 TREE_OPERAND (rhs1, 2),
911 REF_REVERSE_STORAGE_ORDER (rhs1));
912 if (res_op->reverse)
913 return valueized;
914 return (gimple_resimplify3 (seq, res_op, valueize)
915 || valueized);
917 else if (code == SSA_NAME
918 && top_valueize)
920 tree op0 = gimple_assign_rhs1 (stmt);
921 tree valueized = top_valueize (op0);
922 if (!valueized || op0 == valueized)
923 return false;
924 res_op->set_op (TREE_CODE (op0), type, valueized);
925 return true;
927 break;
928 case GIMPLE_UNARY_RHS:
930 tree rhs1 = gimple_assign_rhs1 (stmt);
931 bool valueized = false;
932 rhs1 = do_valueize (rhs1, top_valueize, valueized);
933 res_op->set_op (code, type, rhs1);
934 return (gimple_resimplify1 (seq, res_op, valueize)
935 || valueized);
937 case GIMPLE_BINARY_RHS:
939 tree rhs1 = gimple_assign_rhs1 (stmt);
940 tree rhs2 = gimple_assign_rhs2 (stmt);
941 bool valueized = false;
942 rhs1 = do_valueize (rhs1, top_valueize, valueized);
943 rhs2 = do_valueize (rhs2, top_valueize, valueized);
944 res_op->set_op (code, type, rhs1, rhs2);
945 return (gimple_resimplify2 (seq, res_op, valueize)
946 || valueized);
948 case GIMPLE_TERNARY_RHS:
950 bool valueized = false;
951 tree rhs1 = gimple_assign_rhs1 (stmt);
952 /* If this is a [VEC_]COND_EXPR first try to simplify an
953 embedded GENERIC condition. */
954 if (code == COND_EXPR
955 || code == VEC_COND_EXPR)
957 if (COMPARISON_CLASS_P (rhs1))
959 tree lhs = TREE_OPERAND (rhs1, 0);
960 tree rhs = TREE_OPERAND (rhs1, 1);
961 lhs = do_valueize (lhs, top_valueize, valueized);
962 rhs = do_valueize (rhs, top_valueize, valueized);
963 gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
964 TREE_TYPE (rhs1), lhs, rhs);
965 if ((gimple_resimplify2 (seq, &res_op2, valueize)
966 || valueized)
967 && res_op2.code.is_tree_code ())
969 valueized = true;
970 if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
971 == tcc_comparison)
972 rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
973 res_op2.ops[0], res_op2.ops[1]);
974 else if (res_op2.code == SSA_NAME
975 || res_op2.code == INTEGER_CST
976 || res_op2.code == VECTOR_CST)
977 rhs1 = res_op2.ops[0];
978 else
979 valueized = false;
983 tree rhs2 = gimple_assign_rhs2 (stmt);
984 tree rhs3 = gimple_assign_rhs3 (stmt);
985 rhs1 = do_valueize (rhs1, top_valueize, valueized);
986 rhs2 = do_valueize (rhs2, top_valueize, valueized);
987 rhs3 = do_valueize (rhs3, top_valueize, valueized);
988 res_op->set_op (code, type, rhs1, rhs2, rhs3);
989 return (gimple_resimplify3 (seq, res_op, valueize)
990 || valueized);
992 default:
993 gcc_unreachable ();
995 break;
998 case GIMPLE_CALL:
999 /* ??? This way we can't simplify calls with side-effects. */
1000 if (gimple_call_lhs (stmt) != NULL_TREE
1001 && gimple_call_num_args (stmt) >= 1
1002 && gimple_call_num_args (stmt) <= 5)
1004 bool valueized = false;
1005 combined_fn cfn;
1006 if (gimple_call_internal_p (stmt))
1007 cfn = as_combined_fn (gimple_call_internal_fn (stmt));
1008 else
1010 tree fn = gimple_call_fn (stmt);
1011 if (!fn)
1012 return false;
1014 fn = do_valueize (fn, top_valueize, valueized);
1015 if (TREE_CODE (fn) != ADDR_EXPR
1016 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1017 return false;
1019 tree decl = TREE_OPERAND (fn, 0);
1020 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1021 || !gimple_builtin_call_types_compatible_p (stmt, decl))
1022 return false;
1024 cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1027 unsigned int num_args = gimple_call_num_args (stmt);
1028 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1029 for (unsigned i = 0; i < num_args; ++i)
1031 tree arg = gimple_call_arg (stmt, i);
1032 res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
1034 if (internal_fn_p (cfn)
1035 && try_conditional_simplification (as_internal_fn (cfn),
1036 res_op, seq, valueize))
1037 return true;
1038 switch (num_args)
1040 case 1:
1041 return (gimple_resimplify1 (seq, res_op, valueize)
1042 || valueized);
1043 case 2:
1044 return (gimple_resimplify2 (seq, res_op, valueize)
1045 || valueized);
1046 case 3:
1047 return (gimple_resimplify3 (seq, res_op, valueize)
1048 || valueized);
1049 case 4:
1050 return (gimple_resimplify4 (seq, res_op, valueize)
1051 || valueized);
1052 case 5:
1053 return (gimple_resimplify5 (seq, res_op, valueize)
1054 || valueized);
1055 default:
1056 gcc_unreachable ();
1059 break;
1061 case GIMPLE_COND:
1063 tree lhs = gimple_cond_lhs (stmt);
1064 tree rhs = gimple_cond_rhs (stmt);
1065 bool valueized = false;
1066 lhs = do_valueize (lhs, top_valueize, valueized);
1067 rhs = do_valueize (rhs, top_valueize, valueized);
1068 res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1069 return (gimple_resimplify2 (seq, res_op, valueize)
1070 || valueized);
1073 default:
1074 break;
1077 return false;
1081 /* Helper for the autogenerated code, valueize OP. */
1083 inline tree
1084 do_valueize (tree (*valueize)(tree), tree op)
1086 if (valueize && TREE_CODE (op) == SSA_NAME)
1088 tree tem = valueize (op);
1089 if (tem)
1090 return tem;
1092 return op;
1095 /* Helper for the autogenerated code, get at the definition of NAME when
1096 VALUEIZE allows that. */
1098 inline gimple *
1099 get_def (tree (*valueize)(tree), tree name)
1101 if (valueize && ! valueize (name))
1102 return NULL;
1103 return SSA_NAME_DEF_STMT (name);
1106 /* Routine to determine if the types T1 and T2 are effectively
1107 the same for GIMPLE. If T1 or T2 is not a type, the test
1108 applies to their TREE_TYPE. */
1110 static inline bool
1111 types_match (tree t1, tree t2)
1113 if (!TYPE_P (t1))
1114 t1 = TREE_TYPE (t1);
1115 if (!TYPE_P (t2))
1116 t2 = TREE_TYPE (t2);
1118 return types_compatible_p (t1, t2);
1121 /* Return if T has a single use. For GIMPLE, we also allow any
1122 non-SSA_NAME (ie constants) and zero uses to cope with uses
1123 that aren't linked up yet. */
1125 static inline bool
1126 single_use (tree t)
1128 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
1131 /* Return true if math operations should be canonicalized,
1132 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
1134 static inline bool
1135 canonicalize_math_p ()
1137 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1140 /* Return true if math operations that are beneficial only after
1141 vectorization should be canonicalized. */
1143 static inline bool
1144 canonicalize_math_after_vectorization_p ()
1146 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1149 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1150 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1151 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1152 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1153 will likely be exact, while exp (log (arg0) * arg1) might be not.
1154 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
1156 static bool
1157 optimize_pow_to_exp (tree arg0, tree arg1)
1159 gcc_assert (TREE_CODE (arg0) == REAL_CST);
1160 if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1161 return true;
1163 if (TREE_CODE (arg1) != SSA_NAME)
1164 return true;
1166 gimple *def = SSA_NAME_DEF_STMT (arg1);
1167 gphi *phi = dyn_cast <gphi *> (def);
1168 tree cst1 = NULL_TREE;
1169 enum tree_code code = ERROR_MARK;
1170 if (!phi)
1172 if (!is_gimple_assign (def))
1173 return true;
1174 code = gimple_assign_rhs_code (def);
1175 switch (code)
1177 case PLUS_EXPR:
1178 case MINUS_EXPR:
1179 break;
1180 default:
1181 return true;
1183 if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1184 || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1185 return true;
1187 cst1 = gimple_assign_rhs2 (def);
1189 phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1190 if (!phi)
1191 return true;
1194 tree cst2 = NULL_TREE;
1195 int n = gimple_phi_num_args (phi);
1196 for (int i = 0; i < n; i++)
1198 tree arg = PHI_ARG_DEF (phi, i);
1199 if (TREE_CODE (arg) != REAL_CST)
1200 continue;
1201 else if (cst2 == NULL_TREE)
1202 cst2 = arg;
1203 else if (!operand_equal_p (cst2, arg, 0))
1204 return true;
1207 if (cst1 && cst2)
1208 cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1209 if (cst2
1210 && TREE_CODE (cst2) == REAL_CST
1211 && real_isinteger (TREE_REAL_CST_PTR (cst2),
1212 TYPE_MODE (TREE_TYPE (cst2))))
1213 return false;
1214 return true;
1217 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1218 is another division can be optimized. Don't optimize if INNER_DIV
1219 is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
1221 static bool
1222 optimize_successive_divisions_p (tree divisor, tree inner_div)
1224 if (!gimple_in_ssa_p (cfun))
1225 return false;
1227 imm_use_iterator imm_iter;
1228 use_operand_p use_p;
1229 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1231 gimple *use_stmt = USE_STMT (use_p);
1232 if (!is_gimple_assign (use_stmt)
1233 || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1234 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1235 continue;
1236 return false;
1238 return true;