1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "internal-fn.h"
34 #include "gimple-fold.h"
35 #include "gimple-iterator.h"
36 #include "insn-config.h"
50 #include "gimple-match.h"
53 /* Forward declarations of the private auto-generated matchers.
54 They expect valueized operands in canonical order and do not
55 perform simplification of all-constant operands. */
56 static bool gimple_simplify (code_helper
*, tree
*,
57 gimple_seq
*, tree (*)(tree
),
58 code_helper
, tree
, tree
);
59 static bool gimple_simplify (code_helper
*, tree
*,
60 gimple_seq
*, tree (*)(tree
),
61 code_helper
, tree
, tree
, tree
);
62 static bool gimple_simplify (code_helper
*, tree
*,
63 gimple_seq
*, tree (*)(tree
),
64 code_helper
, tree
, tree
, tree
, tree
);
67 /* Return whether T is a constant that we'll dispatch to fold to
68 evaluate fully constant expressions. */
71 constant_for_folding (tree t
)
73 return (CONSTANT_CLASS_P (t
)
74 /* The following is only interesting to string builtins. */
75 || (TREE_CODE (t
) == ADDR_EXPR
76 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
));
80 /* Helper that matches and simplifies the toplevel result from
81 a gimple_simplify run (where we don't want to build
82 a stmt in case it's used in in-place folding). Replaces
83 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
84 result and returns whether any change was made. */
87 gimple_resimplify1 (gimple_seq
*seq
,
88 code_helper
*res_code
, tree type
, tree
*res_ops
,
89 tree (*valueize
)(tree
))
91 if (constant_for_folding (res_ops
[0]))
94 if (res_code
->is_tree_code ())
95 tem
= const_unop (*res_code
, type
, res_ops
[0]);
98 tree decl
= builtin_decl_implicit (*res_code
);
101 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 1, false);
104 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
106 tem
= fold_convert (type
, tem
);
111 && CONSTANT_CLASS_P (tem
))
114 res_ops
[1] = NULL_TREE
;
115 res_ops
[2] = NULL_TREE
;
116 *res_code
= TREE_CODE (res_ops
[0]);
121 code_helper res_code2
;
122 tree res_ops2
[3] = {};
123 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
124 *res_code
, type
, res_ops
[0]))
126 *res_code
= res_code2
;
127 res_ops
[0] = res_ops2
[0];
128 res_ops
[1] = res_ops2
[1];
129 res_ops
[2] = res_ops2
[2];
136 /* Helper that matches and simplifies the toplevel result from
137 a gimple_simplify run (where we don't want to build
138 a stmt in case it's used in in-place folding). Replaces
139 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
140 result and returns whether any change was made. */
143 gimple_resimplify2 (gimple_seq
*seq
,
144 code_helper
*res_code
, tree type
, tree
*res_ops
,
145 tree (*valueize
)(tree
))
147 if (constant_for_folding (res_ops
[0]) && constant_for_folding (res_ops
[1]))
149 tree tem
= NULL_TREE
;
150 if (res_code
->is_tree_code ())
151 tem
= const_binop (*res_code
, type
, res_ops
[0], res_ops
[1]);
154 tree decl
= builtin_decl_implicit (*res_code
);
157 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 2, false);
160 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
162 tem
= fold_convert (type
, tem
);
167 && CONSTANT_CLASS_P (tem
))
170 res_ops
[1] = NULL_TREE
;
171 res_ops
[2] = NULL_TREE
;
172 *res_code
= TREE_CODE (res_ops
[0]);
177 /* Canonicalize operand order. */
178 bool canonicalized
= false;
179 if (res_code
->is_tree_code ()
180 && (TREE_CODE_CLASS ((enum tree_code
) *res_code
) == tcc_comparison
181 || commutative_tree_code (*res_code
))
182 && tree_swap_operands_p (res_ops
[0], res_ops
[1], false))
184 std::swap (res_ops
[0], res_ops
[1]);
185 if (TREE_CODE_CLASS ((enum tree_code
) *res_code
) == tcc_comparison
)
186 *res_code
= swap_tree_comparison (*res_code
);
187 canonicalized
= true;
190 code_helper res_code2
;
191 tree res_ops2
[3] = {};
192 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
193 *res_code
, type
, res_ops
[0], res_ops
[1]))
195 *res_code
= res_code2
;
196 res_ops
[0] = res_ops2
[0];
197 res_ops
[1] = res_ops2
[1];
198 res_ops
[2] = res_ops2
[2];
202 return canonicalized
;
205 /* Helper that matches and simplifies the toplevel result from
206 a gimple_simplify run (where we don't want to build
207 a stmt in case it's used in in-place folding). Replaces
208 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
209 result and returns whether any change was made. */
212 gimple_resimplify3 (gimple_seq
*seq
,
213 code_helper
*res_code
, tree type
, tree
*res_ops
,
214 tree (*valueize
)(tree
))
216 if (constant_for_folding (res_ops
[0]) && constant_for_folding (res_ops
[1])
217 && constant_for_folding (res_ops
[2]))
219 tree tem
= NULL_TREE
;
220 if (res_code
->is_tree_code ())
221 tem
= fold_ternary
/*_to_constant*/ (*res_code
, type
, res_ops
[0],
222 res_ops
[1], res_ops
[2]);
225 tree decl
= builtin_decl_implicit (*res_code
);
228 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 3, false);
231 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
233 tem
= fold_convert (type
, tem
);
238 && CONSTANT_CLASS_P (tem
))
241 res_ops
[1] = NULL_TREE
;
242 res_ops
[2] = NULL_TREE
;
243 *res_code
= TREE_CODE (res_ops
[0]);
248 /* Canonicalize operand order. */
249 bool canonicalized
= false;
250 if (res_code
->is_tree_code ()
251 && commutative_ternary_tree_code (*res_code
)
252 && tree_swap_operands_p (res_ops
[0], res_ops
[1], false))
254 std::swap (res_ops
[0], res_ops
[1]);
255 canonicalized
= true;
258 code_helper res_code2
;
259 tree res_ops2
[3] = {};
260 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
262 res_ops
[0], res_ops
[1], res_ops
[2]))
264 *res_code
= res_code2
;
265 res_ops
[0] = res_ops2
[0];
266 res_ops
[1] = res_ops2
[1];
267 res_ops
[2] = res_ops2
[2];
271 return canonicalized
;
275 /* If in GIMPLE expressions with CODE go as single-rhs build
276 a GENERIC tree for that expression into *OP0. */
279 maybe_build_generic_op (enum tree_code code
, tree type
,
280 tree
*op0
, tree op1
, tree op2
)
286 case VIEW_CONVERT_EXPR
:
287 *op0
= build1 (code
, type
, *op0
);
290 *op0
= build3 (code
, type
, *op0
, op1
, op2
);
296 /* Push the exploded expression described by RCODE, TYPE and OPS
297 as a statement to SEQ if necessary and return a gimple value
298 denoting the value of the expression. If RES is not NULL
299 then the result will be always RES and even gimple values are
303 maybe_push_res_to_seq (code_helper rcode
, tree type
, tree
*ops
,
304 gimple_seq
*seq
, tree res
)
306 if (rcode
.is_tree_code ())
309 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
310 || ((tree_code
) rcode
) == ADDR_EXPR
)
311 && is_gimple_val (ops
[0]))
315 /* Play safe and do not allow abnormals to be mentioned in
316 newly created statements. */
317 if ((TREE_CODE (ops
[0]) == SSA_NAME
318 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
320 && TREE_CODE (ops
[1]) == SSA_NAME
321 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
323 && TREE_CODE (ops
[2]) == SSA_NAME
324 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
327 res
= make_ssa_name (type
);
328 maybe_build_generic_op (rcode
, type
, &ops
[0], ops
[1], ops
[2]);
329 gimple new_stmt
= gimple_build_assign (res
, rcode
,
330 ops
[0], ops
[1], ops
[2]);
331 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
338 tree decl
= builtin_decl_implicit (rcode
);
341 /* Play safe and do not allow abnormals to be mentioned in
342 newly created statements. */
344 for (nargs
= 0; nargs
< 3; ++nargs
)
348 if (TREE_CODE (ops
[nargs
]) == SSA_NAME
349 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[nargs
]))
352 gcc_assert (nargs
!= 0);
354 res
= make_ssa_name (type
);
355 gimple new_stmt
= gimple_build_call (decl
, nargs
, ops
[0], ops
[1], ops
[2]);
356 gimple_call_set_lhs (new_stmt
, res
);
357 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
363 /* Public API overloads follow for operation being tree_code or
364 built_in_function and for one to three operands or arguments.
365 They return NULL_TREE if nothing could be simplified or
366 the resulting simplified value with parts pushed to SEQ.
367 If SEQ is NULL then if the simplification needs to create
368 new stmts it will fail. If VALUEIZE is non-NULL then all
369 SSA names will be valueized using that hook prior to
370 applying simplifications. */
375 gimple_simplify (enum tree_code code
, tree type
,
377 gimple_seq
*seq
, tree (*valueize
)(tree
))
379 if (constant_for_folding (op0
))
381 tree res
= const_unop (code
, type
, op0
);
383 && CONSTANT_CLASS_P (res
))
389 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
392 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
398 gimple_simplify (enum tree_code code
, tree type
,
400 gimple_seq
*seq
, tree (*valueize
)(tree
))
402 if (constant_for_folding (op0
) && constant_for_folding (op1
))
404 tree res
= const_binop (code
, type
, op0
, op1
);
406 && CONSTANT_CLASS_P (res
))
410 /* Canonicalize operand order both for matching and fallback stmt
412 if ((commutative_tree_code (code
)
413 || TREE_CODE_CLASS (code
) == tcc_comparison
)
414 && tree_swap_operands_p (op0
, op1
, false))
416 std::swap (op0
, op1
);
417 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
418 code
= swap_tree_comparison (code
);
423 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
424 code
, type
, op0
, op1
))
426 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
432 gimple_simplify (enum tree_code code
, tree type
,
433 tree op0
, tree op1
, tree op2
,
434 gimple_seq
*seq
, tree (*valueize
)(tree
))
436 if (constant_for_folding (op0
) && constant_for_folding (op1
)
437 && constant_for_folding (op2
))
439 tree res
= fold_ternary
/*_to_constant */ (code
, type
, op0
, op1
, op2
);
441 && CONSTANT_CLASS_P (res
))
445 /* Canonicalize operand order both for matching and fallback stmt
447 if (commutative_ternary_tree_code (code
)
448 && tree_swap_operands_p (op0
, op1
, false))
449 std::swap (op0
, op1
);
453 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
454 code
, type
, op0
, op1
, op2
))
456 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
459 /* Builtin function with one argument. */
462 gimple_simplify (enum built_in_function fn
, tree type
,
464 gimple_seq
*seq
, tree (*valueize
)(tree
))
466 if (constant_for_folding (arg0
))
468 tree decl
= builtin_decl_implicit (fn
);
471 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, &arg0
, 1, false);
474 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
476 res
= fold_convert (type
, res
);
477 if (CONSTANT_CLASS_P (res
))
485 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
488 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
491 /* Builtin function with two arguments. */
494 gimple_simplify (enum built_in_function fn
, tree type
,
495 tree arg0
, tree arg1
,
496 gimple_seq
*seq
, tree (*valueize
)(tree
))
498 if (constant_for_folding (arg0
)
499 && constant_for_folding (arg1
))
501 tree decl
= builtin_decl_implicit (fn
);
507 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, args
, 2, false);
510 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
512 res
= fold_convert (type
, res
);
513 if (CONSTANT_CLASS_P (res
))
521 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
522 fn
, type
, arg0
, arg1
))
524 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
527 /* Builtin function with three arguments. */
530 gimple_simplify (enum built_in_function fn
, tree type
,
531 tree arg0
, tree arg1
, tree arg2
,
532 gimple_seq
*seq
, tree (*valueize
)(tree
))
534 if (constant_for_folding (arg0
)
535 && constant_for_folding (arg1
)
536 && constant_for_folding (arg2
))
538 tree decl
= builtin_decl_implicit (fn
);
545 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, args
, 3, false);
548 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
550 res
= fold_convert (type
, res
);
551 if (CONSTANT_CLASS_P (res
))
559 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
560 fn
, type
, arg0
, arg1
, arg2
))
562 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
565 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
566 VALUEIZED to true if valueization changed OP. */
569 do_valueize (tree op
, tree (*valueize
)(tree
), bool &valueized
)
571 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
573 tree tem
= valueize (op
);
574 if (tem
&& tem
!= op
)
583 /* The main STMT based simplification entry. It is used by the fold_stmt
584 and the fold_stmt_to_constant APIs. */
587 gimple_simplify (gimple stmt
,
588 code_helper
*rcode
, tree
*ops
,
590 tree (*valueize
)(tree
), tree (*top_valueize
)(tree
))
592 switch (gimple_code (stmt
))
596 enum tree_code code
= gimple_assign_rhs_code (stmt
);
597 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
598 switch (gimple_assign_rhs_class (stmt
))
600 case GIMPLE_SINGLE_RHS
:
601 if (code
== REALPART_EXPR
602 || code
== IMAGPART_EXPR
603 || code
== VIEW_CONVERT_EXPR
)
605 tree op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
606 bool valueized
= false;
607 op0
= do_valueize (op0
, top_valueize
, valueized
);
610 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
613 else if (code
== BIT_FIELD_REF
)
615 tree rhs1
= gimple_assign_rhs1 (stmt
);
616 tree op0
= TREE_OPERAND (rhs1
, 0);
617 bool valueized
= false;
618 op0
= do_valueize (op0
, top_valueize
, valueized
);
621 ops
[1] = TREE_OPERAND (rhs1
, 1);
622 ops
[2] = TREE_OPERAND (rhs1
, 2);
623 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
626 else if (code
== SSA_NAME
629 tree op0
= gimple_assign_rhs1 (stmt
);
630 tree valueized
= top_valueize (op0
);
631 if (!valueized
|| op0
== valueized
)
634 *rcode
= TREE_CODE (op0
);
638 case GIMPLE_UNARY_RHS
:
640 tree rhs1
= gimple_assign_rhs1 (stmt
);
641 bool valueized
= false;
642 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
645 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
648 case GIMPLE_BINARY_RHS
:
650 tree rhs1
= gimple_assign_rhs1 (stmt
);
651 tree rhs2
= gimple_assign_rhs2 (stmt
);
652 bool valueized
= false;
653 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
654 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
658 return (gimple_resimplify2 (seq
, rcode
, type
, ops
, valueize
)
661 case GIMPLE_TERNARY_RHS
:
663 bool valueized
= false;
664 tree rhs1
= gimple_assign_rhs1 (stmt
);
665 /* If this is a [VEC_]COND_EXPR first try to simplify an
666 embedded GENERIC condition. */
667 if (code
== COND_EXPR
668 || code
== VEC_COND_EXPR
)
670 if (COMPARISON_CLASS_P (rhs1
))
672 tree lhs
= TREE_OPERAND (rhs1
, 0);
673 tree rhs
= TREE_OPERAND (rhs1
, 1);
674 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
675 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
676 code_helper rcode2
= TREE_CODE (rhs1
);
680 if ((gimple_resimplify2 (seq
, &rcode2
, TREE_TYPE (rhs1
),
683 && rcode2
.is_tree_code ())
686 if (TREE_CODE_CLASS ((enum tree_code
)rcode2
)
688 rhs1
= build2 (rcode2
, TREE_TYPE (rhs1
),
690 else if (rcode2
== SSA_NAME
691 || rcode2
== INTEGER_CST
)
698 tree rhs2
= gimple_assign_rhs2 (stmt
);
699 tree rhs3
= gimple_assign_rhs3 (stmt
);
700 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
701 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
702 rhs3
= do_valueize (rhs3
, top_valueize
, valueized
);
707 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
717 /* ??? This way we can't simplify calls with side-effects. */
718 if (gimple_call_lhs (stmt
) != NULL_TREE
719 && gimple_call_num_args (stmt
) >= 1
720 && gimple_call_num_args (stmt
) <= 3)
722 tree fn
= gimple_call_fn (stmt
);
723 /* ??? Internal function support missing. */
726 bool valueized
= false;
727 fn
= do_valueize (fn
, top_valueize
, valueized
);
728 if (TREE_CODE (fn
) != ADDR_EXPR
729 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
732 tree decl
= TREE_OPERAND (fn
, 0);
733 if (DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_NORMAL
734 || !builtin_decl_implicit (DECL_FUNCTION_CODE (decl
))
735 || !gimple_builtin_call_types_compatible_p (stmt
, decl
))
738 tree type
= TREE_TYPE (gimple_call_lhs (stmt
));
739 *rcode
= DECL_FUNCTION_CODE (decl
);
740 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
742 tree arg
= gimple_call_arg (stmt
, i
);
743 ops
[i
] = do_valueize (arg
, top_valueize
, valueized
);
745 switch (gimple_call_num_args (stmt
))
748 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
751 return (gimple_resimplify2 (seq
, rcode
, type
, ops
, valueize
)
754 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
764 tree lhs
= gimple_cond_lhs (stmt
);
765 tree rhs
= gimple_cond_rhs (stmt
);
766 bool valueized
= false;
767 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
768 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
769 *rcode
= gimple_cond_code (stmt
);
772 return (gimple_resimplify2 (seq
, rcode
,
773 boolean_type_node
, ops
, valueize
)
785 /* Helper for the autogenerated code, valueize OP. */
788 do_valueize (tree (*valueize
)(tree
), tree op
)
790 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
791 return valueize (op
);
795 /* Routine to determine if the types T1 and T2 are effectively
796 the same for GIMPLE. If T1 or T2 is not a type, the test
797 applies to their TREE_TYPE. */
800 types_match (tree t1
, tree t2
)
807 return types_compatible_p (t1
, t2
);
810 /* Return if T has a single use. For GIMPLE, we also allow any
811 non-SSA_NAME (ie constants) and zero uses to cope with uses
812 that aren't linked up yet. */
817 return TREE_CODE (t
) != SSA_NAME
|| has_zero_uses (t
) || has_single_use (t
);