1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "internal-fn.h"
34 #include "gimple-fold.h"
35 #include "gimple-iterator.h"
36 #include "insn-config.h"
50 #include "gimple-match.h"
53 /* Forward declarations of the private auto-generated matchers.
54 They expect valueized operands in canonical order and do not
55 perform simplification of all-constant operands. */
56 static bool gimple_simplify (code_helper
*, tree
*,
57 gimple_seq
*, tree (*)(tree
),
58 code_helper
, tree
, tree
);
59 static bool gimple_simplify (code_helper
*, tree
*,
60 gimple_seq
*, tree (*)(tree
),
61 code_helper
, tree
, tree
, tree
);
62 static bool gimple_simplify (code_helper
*, tree
*,
63 gimple_seq
*, tree (*)(tree
),
64 code_helper
, tree
, tree
, tree
, tree
);
67 /* Return whether T is a constant that we'll dispatch to fold to
68 evaluate fully constant expressions. */
71 constant_for_folding (tree t
)
73 return (CONSTANT_CLASS_P (t
)
74 /* The following is only interesting to string builtins. */
75 || (TREE_CODE (t
) == ADDR_EXPR
76 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
));
80 /* Helper that matches and simplifies the toplevel result from
81 a gimple_simplify run (where we don't want to build
82 a stmt in case it's used in in-place folding). Replaces
83 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
84 result and returns whether any change was made. */
87 gimple_resimplify1 (gimple_seq
*seq
,
88 code_helper
*res_code
, tree type
, tree
*res_ops
,
89 tree (*valueize
)(tree
))
91 if (constant_for_folding (res_ops
[0]))
94 if (res_code
->is_tree_code ())
95 tem
= const_unop (*res_code
, type
, res_ops
[0]);
98 tree decl
= builtin_decl_implicit (*res_code
);
101 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 1, false);
104 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
106 tem
= fold_convert (type
, tem
);
111 && CONSTANT_CLASS_P (tem
))
114 res_ops
[1] = NULL_TREE
;
115 res_ops
[2] = NULL_TREE
;
116 *res_code
= TREE_CODE (res_ops
[0]);
121 code_helper res_code2
;
122 tree res_ops2
[3] = {};
123 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
124 *res_code
, type
, res_ops
[0]))
126 *res_code
= res_code2
;
127 res_ops
[0] = res_ops2
[0];
128 res_ops
[1] = res_ops2
[1];
129 res_ops
[2] = res_ops2
[2];
136 /* Helper that matches and simplifies the toplevel result from
137 a gimple_simplify run (where we don't want to build
138 a stmt in case it's used in in-place folding). Replaces
139 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
140 result and returns whether any change was made. */
143 gimple_resimplify2 (gimple_seq
*seq
,
144 code_helper
*res_code
, tree type
, tree
*res_ops
,
145 tree (*valueize
)(tree
))
147 if (constant_for_folding (res_ops
[0]) && constant_for_folding (res_ops
[1]))
149 tree tem
= NULL_TREE
;
150 if (res_code
->is_tree_code ())
151 tem
= const_binop (*res_code
, type
, res_ops
[0], res_ops
[1]);
154 tree decl
= builtin_decl_implicit (*res_code
);
157 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 2, false);
160 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
162 tem
= fold_convert (type
, tem
);
167 && CONSTANT_CLASS_P (tem
))
170 res_ops
[1] = NULL_TREE
;
171 res_ops
[2] = NULL_TREE
;
172 *res_code
= TREE_CODE (res_ops
[0]);
177 /* Canonicalize operand order. */
178 bool canonicalized
= false;
179 if (res_code
->is_tree_code ()
180 && (TREE_CODE_CLASS ((enum tree_code
) *res_code
) == tcc_comparison
181 || commutative_tree_code (*res_code
))
182 && tree_swap_operands_p (res_ops
[0], res_ops
[1], false))
184 std::swap (res_ops
[0], res_ops
[1]);
185 if (TREE_CODE_CLASS ((enum tree_code
) *res_code
) == tcc_comparison
)
186 *res_code
= swap_tree_comparison (*res_code
);
187 canonicalized
= true;
190 code_helper res_code2
;
191 tree res_ops2
[3] = {};
192 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
193 *res_code
, type
, res_ops
[0], res_ops
[1]))
195 *res_code
= res_code2
;
196 res_ops
[0] = res_ops2
[0];
197 res_ops
[1] = res_ops2
[1];
198 res_ops
[2] = res_ops2
[2];
202 return canonicalized
;
205 /* Helper that matches and simplifies the toplevel result from
206 a gimple_simplify run (where we don't want to build
207 a stmt in case it's used in in-place folding). Replaces
208 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
209 result and returns whether any change was made. */
212 gimple_resimplify3 (gimple_seq
*seq
,
213 code_helper
*res_code
, tree type
, tree
*res_ops
,
214 tree (*valueize
)(tree
))
216 if (constant_for_folding (res_ops
[0]) && constant_for_folding (res_ops
[1])
217 && constant_for_folding (res_ops
[2]))
219 tree tem
= NULL_TREE
;
220 if (res_code
->is_tree_code ())
221 tem
= fold_ternary
/*_to_constant*/ (*res_code
, type
, res_ops
[0],
222 res_ops
[1], res_ops
[2]);
225 tree decl
= builtin_decl_implicit (*res_code
);
228 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 3, false);
231 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
233 tem
= fold_convert (type
, tem
);
238 && CONSTANT_CLASS_P (tem
))
241 res_ops
[1] = NULL_TREE
;
242 res_ops
[2] = NULL_TREE
;
243 *res_code
= TREE_CODE (res_ops
[0]);
248 /* Canonicalize operand order. */
249 bool canonicalized
= false;
250 if (res_code
->is_tree_code ()
251 && commutative_ternary_tree_code (*res_code
)
252 && tree_swap_operands_p (res_ops
[0], res_ops
[1], false))
254 std::swap (res_ops
[0], res_ops
[1]);
255 canonicalized
= true;
258 code_helper res_code2
;
259 tree res_ops2
[3] = {};
260 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
262 res_ops
[0], res_ops
[1], res_ops
[2]))
264 *res_code
= res_code2
;
265 res_ops
[0] = res_ops2
[0];
266 res_ops
[1] = res_ops2
[1];
267 res_ops
[2] = res_ops2
[2];
271 return canonicalized
;
275 /* If in GIMPLE expressions with CODE go as single-rhs build
276 a GENERIC tree for that expression into *OP0. */
279 maybe_build_generic_op (enum tree_code code
, tree type
,
280 tree
*op0
, tree op1
, tree op2
)
286 case VIEW_CONVERT_EXPR
:
287 *op0
= build1 (code
, type
, *op0
);
290 *op0
= build3 (code
, type
, *op0
, op1
, op2
);
296 /* Push the exploded expression described by RCODE, TYPE and OPS
297 as a statement to SEQ if necessary and return a gimple value
298 denoting the value of the expression. If RES is not NULL
299 then the result will be always RES and even gimple values are
303 maybe_push_res_to_seq (code_helper rcode
, tree type
, tree
*ops
,
304 gimple_seq
*seq
, tree res
)
306 if (rcode
.is_tree_code ())
309 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
310 || ((tree_code
) rcode
) == ADDR_EXPR
)
311 && is_gimple_val (ops
[0]))
315 /* Play safe and do not allow abnormals to be mentioned in
316 newly created statements. */
317 if ((TREE_CODE (ops
[0]) == SSA_NAME
318 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
320 && TREE_CODE (ops
[1]) == SSA_NAME
321 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
323 && TREE_CODE (ops
[2]) == SSA_NAME
324 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
327 res
= make_ssa_name (type
);
328 maybe_build_generic_op (rcode
, type
, &ops
[0], ops
[1], ops
[2]);
329 gimple new_stmt
= gimple_build_assign (res
, rcode
,
330 ops
[0], ops
[1], ops
[2]);
331 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
338 tree decl
= builtin_decl_implicit (rcode
);
341 /* We can't and should not emit calls to non-const functions. */
342 if (!(flags_from_decl_or_type (decl
) & ECF_CONST
))
344 /* Play safe and do not allow abnormals to be mentioned in
345 newly created statements. */
347 for (nargs
= 0; nargs
< 3; ++nargs
)
351 if (TREE_CODE (ops
[nargs
]) == SSA_NAME
352 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[nargs
]))
355 gcc_assert (nargs
!= 0);
357 res
= make_ssa_name (type
);
358 gimple new_stmt
= gimple_build_call (decl
, nargs
, ops
[0], ops
[1], ops
[2]);
359 gimple_call_set_lhs (new_stmt
, res
);
360 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
366 /* Public API overloads follow for operation being tree_code or
367 built_in_function and for one to three operands or arguments.
368 They return NULL_TREE if nothing could be simplified or
369 the resulting simplified value with parts pushed to SEQ.
370 If SEQ is NULL then if the simplification needs to create
371 new stmts it will fail. If VALUEIZE is non-NULL then all
372 SSA names will be valueized using that hook prior to
373 applying simplifications. */
378 gimple_simplify (enum tree_code code
, tree type
,
380 gimple_seq
*seq
, tree (*valueize
)(tree
))
382 if (constant_for_folding (op0
))
384 tree res
= const_unop (code
, type
, op0
);
386 && CONSTANT_CLASS_P (res
))
392 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
395 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
401 gimple_simplify (enum tree_code code
, tree type
,
403 gimple_seq
*seq
, tree (*valueize
)(tree
))
405 if (constant_for_folding (op0
) && constant_for_folding (op1
))
407 tree res
= const_binop (code
, type
, op0
, op1
);
409 && CONSTANT_CLASS_P (res
))
413 /* Canonicalize operand order both for matching and fallback stmt
415 if ((commutative_tree_code (code
)
416 || TREE_CODE_CLASS (code
) == tcc_comparison
)
417 && tree_swap_operands_p (op0
, op1
, false))
419 std::swap (op0
, op1
);
420 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
421 code
= swap_tree_comparison (code
);
426 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
427 code
, type
, op0
, op1
))
429 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
435 gimple_simplify (enum tree_code code
, tree type
,
436 tree op0
, tree op1
, tree op2
,
437 gimple_seq
*seq
, tree (*valueize
)(tree
))
439 if (constant_for_folding (op0
) && constant_for_folding (op1
)
440 && constant_for_folding (op2
))
442 tree res
= fold_ternary
/*_to_constant */ (code
, type
, op0
, op1
, op2
);
444 && CONSTANT_CLASS_P (res
))
448 /* Canonicalize operand order both for matching and fallback stmt
450 if (commutative_ternary_tree_code (code
)
451 && tree_swap_operands_p (op0
, op1
, false))
452 std::swap (op0
, op1
);
456 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
457 code
, type
, op0
, op1
, op2
))
459 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
462 /* Builtin function with one argument. */
465 gimple_simplify (enum built_in_function fn
, tree type
,
467 gimple_seq
*seq
, tree (*valueize
)(tree
))
469 if (constant_for_folding (arg0
))
471 tree decl
= builtin_decl_implicit (fn
);
474 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, &arg0
, 1, false);
477 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
479 res
= fold_convert (type
, res
);
480 if (CONSTANT_CLASS_P (res
))
488 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
491 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
494 /* Builtin function with two arguments. */
497 gimple_simplify (enum built_in_function fn
, tree type
,
498 tree arg0
, tree arg1
,
499 gimple_seq
*seq
, tree (*valueize
)(tree
))
501 if (constant_for_folding (arg0
)
502 && constant_for_folding (arg1
))
504 tree decl
= builtin_decl_implicit (fn
);
510 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, args
, 2, false);
513 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
515 res
= fold_convert (type
, res
);
516 if (CONSTANT_CLASS_P (res
))
524 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
525 fn
, type
, arg0
, arg1
))
527 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
530 /* Builtin function with three arguments. */
533 gimple_simplify (enum built_in_function fn
, tree type
,
534 tree arg0
, tree arg1
, tree arg2
,
535 gimple_seq
*seq
, tree (*valueize
)(tree
))
537 if (constant_for_folding (arg0
)
538 && constant_for_folding (arg1
)
539 && constant_for_folding (arg2
))
541 tree decl
= builtin_decl_implicit (fn
);
548 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, args
, 3, false);
551 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
553 res
= fold_convert (type
, res
);
554 if (CONSTANT_CLASS_P (res
))
562 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
563 fn
, type
, arg0
, arg1
, arg2
))
565 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
568 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
569 VALUEIZED to true if valueization changed OP. */
572 do_valueize (tree op
, tree (*valueize
)(tree
), bool &valueized
)
574 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
576 tree tem
= valueize (op
);
577 if (tem
&& tem
!= op
)
586 /* The main STMT based simplification entry. It is used by the fold_stmt
587 and the fold_stmt_to_constant APIs. */
590 gimple_simplify (gimple stmt
,
591 code_helper
*rcode
, tree
*ops
,
593 tree (*valueize
)(tree
), tree (*top_valueize
)(tree
))
595 switch (gimple_code (stmt
))
599 enum tree_code code
= gimple_assign_rhs_code (stmt
);
600 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
601 switch (gimple_assign_rhs_class (stmt
))
603 case GIMPLE_SINGLE_RHS
:
604 if (code
== REALPART_EXPR
605 || code
== IMAGPART_EXPR
606 || code
== VIEW_CONVERT_EXPR
)
608 tree op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
609 bool valueized
= false;
610 op0
= do_valueize (op0
, top_valueize
, valueized
);
613 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
616 else if (code
== BIT_FIELD_REF
)
618 tree rhs1
= gimple_assign_rhs1 (stmt
);
619 tree op0
= TREE_OPERAND (rhs1
, 0);
620 bool valueized
= false;
621 op0
= do_valueize (op0
, top_valueize
, valueized
);
624 ops
[1] = TREE_OPERAND (rhs1
, 1);
625 ops
[2] = TREE_OPERAND (rhs1
, 2);
626 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
629 else if (code
== SSA_NAME
632 tree op0
= gimple_assign_rhs1 (stmt
);
633 tree valueized
= top_valueize (op0
);
634 if (!valueized
|| op0
== valueized
)
637 *rcode
= TREE_CODE (op0
);
641 case GIMPLE_UNARY_RHS
:
643 tree rhs1
= gimple_assign_rhs1 (stmt
);
644 bool valueized
= false;
645 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
648 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
651 case GIMPLE_BINARY_RHS
:
653 tree rhs1
= gimple_assign_rhs1 (stmt
);
654 tree rhs2
= gimple_assign_rhs2 (stmt
);
655 bool valueized
= false;
656 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
657 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
661 return (gimple_resimplify2 (seq
, rcode
, type
, ops
, valueize
)
664 case GIMPLE_TERNARY_RHS
:
666 bool valueized
= false;
667 tree rhs1
= gimple_assign_rhs1 (stmt
);
668 /* If this is a [VEC_]COND_EXPR first try to simplify an
669 embedded GENERIC condition. */
670 if (code
== COND_EXPR
671 || code
== VEC_COND_EXPR
)
673 if (COMPARISON_CLASS_P (rhs1
))
675 tree lhs
= TREE_OPERAND (rhs1
, 0);
676 tree rhs
= TREE_OPERAND (rhs1
, 1);
677 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
678 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
679 code_helper rcode2
= TREE_CODE (rhs1
);
683 if ((gimple_resimplify2 (seq
, &rcode2
, TREE_TYPE (rhs1
),
686 && rcode2
.is_tree_code ())
689 if (TREE_CODE_CLASS ((enum tree_code
)rcode2
)
691 rhs1
= build2 (rcode2
, TREE_TYPE (rhs1
),
693 else if (rcode2
== SSA_NAME
694 || rcode2
== INTEGER_CST
)
701 tree rhs2
= gimple_assign_rhs2 (stmt
);
702 tree rhs3
= gimple_assign_rhs3 (stmt
);
703 rhs1
= do_valueize (rhs1
, top_valueize
, valueized
);
704 rhs2
= do_valueize (rhs2
, top_valueize
, valueized
);
705 rhs3
= do_valueize (rhs3
, top_valueize
, valueized
);
710 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
720 /* ??? This way we can't simplify calls with side-effects. */
721 if (gimple_call_lhs (stmt
) != NULL_TREE
722 && gimple_call_num_args (stmt
) >= 1
723 && gimple_call_num_args (stmt
) <= 3)
725 tree fn
= gimple_call_fn (stmt
);
726 /* ??? Internal function support missing. */
729 bool valueized
= false;
730 fn
= do_valueize (fn
, top_valueize
, valueized
);
731 if (TREE_CODE (fn
) != ADDR_EXPR
732 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
735 tree decl
= TREE_OPERAND (fn
, 0);
736 if (DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_NORMAL
737 || !builtin_decl_implicit (DECL_FUNCTION_CODE (decl
))
738 || !gimple_builtin_call_types_compatible_p (stmt
, decl
))
741 tree type
= TREE_TYPE (gimple_call_lhs (stmt
));
742 *rcode
= DECL_FUNCTION_CODE (decl
);
743 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
745 tree arg
= gimple_call_arg (stmt
, i
);
746 ops
[i
] = do_valueize (arg
, top_valueize
, valueized
);
748 switch (gimple_call_num_args (stmt
))
751 return (gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
)
754 return (gimple_resimplify2 (seq
, rcode
, type
, ops
, valueize
)
757 return (gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
)
767 tree lhs
= gimple_cond_lhs (stmt
);
768 tree rhs
= gimple_cond_rhs (stmt
);
769 bool valueized
= false;
770 lhs
= do_valueize (lhs
, top_valueize
, valueized
);
771 rhs
= do_valueize (rhs
, top_valueize
, valueized
);
772 *rcode
= gimple_cond_code (stmt
);
775 return (gimple_resimplify2 (seq
, rcode
,
776 boolean_type_node
, ops
, valueize
)
788 /* Helper for the autogenerated code, valueize OP. */
791 do_valueize (tree (*valueize
)(tree
), tree op
)
793 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
794 return valueize (op
);
798 /* Routine to determine if the types T1 and T2 are effectively
799 the same for GIMPLE. If T1 or T2 is not a type, the test
800 applies to their TREE_TYPE. */
803 types_match (tree t1
, tree t2
)
810 return types_compatible_p (t1
, t2
);
813 /* Return if T has a single use. For GIMPLE, we also allow any
814 non-SSA_NAME (ie constants) and zero uses to cope with uses
815 that aren't linked up yet. */
820 return TREE_CODE (t
) != SSA_NAME
|| has_zero_uses (t
) || has_single_use (t
);