1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
33 #include "internal-fn.h"
34 #include "gimple-fold.h"
35 #include "gimple-iterator.h"
36 #include "insn-config.h"
48 #include "gimple-match.h"
51 /* Forward declarations of the private auto-generated matchers.
52 They expect valueized operands in canonical order and do not
53 perform simplification of all-constant operands. */
54 static bool gimple_simplify (code_helper
*, tree
*,
55 gimple_seq
*, tree (*)(tree
),
56 code_helper
, tree
, tree
);
57 static bool gimple_simplify (code_helper
*, tree
*,
58 gimple_seq
*, tree (*)(tree
),
59 code_helper
, tree
, tree
, tree
);
60 static bool gimple_simplify (code_helper
*, tree
*,
61 gimple_seq
*, tree (*)(tree
),
62 code_helper
, tree
, tree
, tree
, tree
);
65 /* Return whether T is a constant that we'll dispatch to fold to
66 evaluate fully constant expressions. */
69 constant_for_folding (tree t
)
71 return (CONSTANT_CLASS_P (t
)
72 /* The following is only interesting to string builtins. */
73 || (TREE_CODE (t
) == ADDR_EXPR
74 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
));
78 /* Helper that matches and simplifies the toplevel result from
79 a gimple_simplify run (where we don't want to build
80 a stmt in case it's used in in-place folding). Replaces
81 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
82 result and returns whether any change was made. */
85 gimple_resimplify1 (gimple_seq
*seq
,
86 code_helper
*res_code
, tree type
, tree
*res_ops
,
87 tree (*valueize
)(tree
))
89 if (constant_for_folding (res_ops
[0]))
92 if (res_code
->is_tree_code ())
93 tem
= const_unop (*res_code
, type
, res_ops
[0]);
96 tree decl
= builtin_decl_implicit (*res_code
);
99 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 1, false);
102 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
104 tem
= fold_convert (type
, tem
);
109 && CONSTANT_CLASS_P (tem
))
112 res_ops
[1] = NULL_TREE
;
113 res_ops
[2] = NULL_TREE
;
114 *res_code
= TREE_CODE (res_ops
[0]);
119 code_helper res_code2
;
120 tree res_ops2
[3] = {};
121 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
122 *res_code
, type
, res_ops
[0]))
124 *res_code
= res_code2
;
125 res_ops
[0] = res_ops2
[0];
126 res_ops
[1] = res_ops2
[1];
127 res_ops
[2] = res_ops2
[2];
134 /* Helper that matches and simplifies the toplevel result from
135 a gimple_simplify run (where we don't want to build
136 a stmt in case it's used in in-place folding). Replaces
137 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
138 result and returns whether any change was made. */
141 gimple_resimplify2 (gimple_seq
*seq
,
142 code_helper
*res_code
, tree type
, tree
*res_ops
,
143 tree (*valueize
)(tree
))
145 if (constant_for_folding (res_ops
[0]) && constant_for_folding (res_ops
[1]))
147 tree tem
= NULL_TREE
;
148 if (res_code
->is_tree_code ())
149 tem
= const_binop (*res_code
, type
, res_ops
[0], res_ops
[1]);
152 tree decl
= builtin_decl_implicit (*res_code
);
155 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 2, false);
158 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
160 tem
= fold_convert (type
, tem
);
165 && CONSTANT_CLASS_P (tem
))
168 res_ops
[1] = NULL_TREE
;
169 res_ops
[2] = NULL_TREE
;
170 *res_code
= TREE_CODE (res_ops
[0]);
175 /* Canonicalize operand order. */
176 bool canonicalized
= false;
177 if (res_code
->is_tree_code ()
178 && (TREE_CODE_CLASS ((enum tree_code
) *res_code
) == tcc_comparison
179 || commutative_tree_code (*res_code
))
180 && tree_swap_operands_p (res_ops
[0], res_ops
[1], false))
182 std::swap (res_ops
[0], res_ops
[1]);
183 if (TREE_CODE_CLASS ((enum tree_code
) *res_code
) == tcc_comparison
)
184 *res_code
= swap_tree_comparison (*res_code
);
185 canonicalized
= true;
188 code_helper res_code2
;
189 tree res_ops2
[3] = {};
190 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
191 *res_code
, type
, res_ops
[0], res_ops
[1]))
193 *res_code
= res_code2
;
194 res_ops
[0] = res_ops2
[0];
195 res_ops
[1] = res_ops2
[1];
196 res_ops
[2] = res_ops2
[2];
200 return canonicalized
;
203 /* Helper that matches and simplifies the toplevel result from
204 a gimple_simplify run (where we don't want to build
205 a stmt in case it's used in in-place folding). Replaces
206 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
207 result and returns whether any change was made. */
210 gimple_resimplify3 (gimple_seq
*seq
,
211 code_helper
*res_code
, tree type
, tree
*res_ops
,
212 tree (*valueize
)(tree
))
214 if (constant_for_folding (res_ops
[0]) && constant_for_folding (res_ops
[1])
215 && constant_for_folding (res_ops
[2]))
217 tree tem
= NULL_TREE
;
218 if (res_code
->is_tree_code ())
219 tem
= fold_ternary
/*_to_constant*/ (*res_code
, type
, res_ops
[0],
220 res_ops
[1], res_ops
[2]);
223 tree decl
= builtin_decl_implicit (*res_code
);
226 tem
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, res_ops
, 3, false);
229 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
231 tem
= fold_convert (type
, tem
);
236 && CONSTANT_CLASS_P (tem
))
239 res_ops
[1] = NULL_TREE
;
240 res_ops
[2] = NULL_TREE
;
241 *res_code
= TREE_CODE (res_ops
[0]);
246 /* Canonicalize operand order. */
247 bool canonicalized
= false;
248 if (res_code
->is_tree_code ()
249 && commutative_ternary_tree_code (*res_code
)
250 && tree_swap_operands_p (res_ops
[0], res_ops
[1], false))
252 std::swap (res_ops
[0], res_ops
[1]);
253 canonicalized
= true;
256 code_helper res_code2
;
257 tree res_ops2
[3] = {};
258 if (gimple_simplify (&res_code2
, res_ops2
, seq
, valueize
,
260 res_ops
[0], res_ops
[1], res_ops
[2]))
262 *res_code
= res_code2
;
263 res_ops
[0] = res_ops2
[0];
264 res_ops
[1] = res_ops2
[1];
265 res_ops
[2] = res_ops2
[2];
269 return canonicalized
;
273 /* If in GIMPLE expressions with CODE go as single-rhs build
274 a GENERIC tree for that expression into *OP0. */
277 maybe_build_generic_op (enum tree_code code
, tree type
,
278 tree
*op0
, tree op1
, tree op2
)
284 case VIEW_CONVERT_EXPR
:
285 *op0
= build1 (code
, type
, *op0
);
288 *op0
= build3 (code
, type
, *op0
, op1
, op2
);
294 /* Push the exploded expression described by RCODE, TYPE and OPS
295 as a statement to SEQ if necessary and return a gimple value
296 denoting the value of the expression. If RES is not NULL
297 then the result will be always RES and even gimple values are
301 maybe_push_res_to_seq (code_helper rcode
, tree type
, tree
*ops
,
302 gimple_seq
*seq
, tree res
)
304 if (rcode
.is_tree_code ())
307 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
308 || ((tree_code
) rcode
) == ADDR_EXPR
)
309 && is_gimple_val (ops
[0]))
313 /* Play safe and do not allow abnormals to be mentioned in
314 newly created statements. */
315 if ((TREE_CODE (ops
[0]) == SSA_NAME
316 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
318 && TREE_CODE (ops
[1]) == SSA_NAME
319 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
321 && TREE_CODE (ops
[2]) == SSA_NAME
322 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
325 res
= make_ssa_name (type
);
326 maybe_build_generic_op (rcode
, type
, &ops
[0], ops
[1], ops
[2]);
327 gimple new_stmt
= gimple_build_assign (res
, rcode
,
328 ops
[0], ops
[1], ops
[2]);
329 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
336 tree decl
= builtin_decl_implicit (rcode
);
339 unsigned nargs
= type_num_arguments (TREE_TYPE (decl
));
340 gcc_assert (nargs
<= 3);
341 /* Play safe and do not allow abnormals to be mentioned in
342 newly created statements. */
343 if ((TREE_CODE (ops
[0]) == SSA_NAME
344 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
346 && TREE_CODE (ops
[1]) == SSA_NAME
347 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
349 && TREE_CODE (ops
[2]) == SSA_NAME
350 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
353 res
= make_ssa_name (type
);
354 gimple new_stmt
= gimple_build_call (decl
, nargs
, ops
[0], ops
[1], ops
[2]);
355 gimple_call_set_lhs (new_stmt
, res
);
356 gimple_seq_add_stmt_without_update (seq
, new_stmt
);
362 /* Public API overloads follow for operation being tree_code or
363 built_in_function and for one to three operands or arguments.
364 They return NULL_TREE if nothing could be simplified or
365 the resulting simplified value with parts pushed to SEQ.
366 If SEQ is NULL then if the simplification needs to create
367 new stmts it will fail. If VALUEIZE is non-NULL then all
368 SSA names will be valueized using that hook prior to
369 applying simplifications. */
374 gimple_simplify (enum tree_code code
, tree type
,
376 gimple_seq
*seq
, tree (*valueize
)(tree
))
378 if (constant_for_folding (op0
))
380 tree res
= const_unop (code
, type
, op0
);
382 && CONSTANT_CLASS_P (res
))
388 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
391 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
397 gimple_simplify (enum tree_code code
, tree type
,
399 gimple_seq
*seq
, tree (*valueize
)(tree
))
401 if (constant_for_folding (op0
) && constant_for_folding (op1
))
403 tree res
= const_binop (code
, type
, op0
, op1
);
405 && CONSTANT_CLASS_P (res
))
409 /* Canonicalize operand order both for matching and fallback stmt
411 if ((commutative_tree_code (code
)
412 || TREE_CODE_CLASS (code
) == tcc_comparison
)
413 && tree_swap_operands_p (op0
, op1
, false))
415 std::swap (op0
, op1
);
416 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
417 code
= swap_tree_comparison (code
);
422 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
423 code
, type
, op0
, op1
))
425 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
431 gimple_simplify (enum tree_code code
, tree type
,
432 tree op0
, tree op1
, tree op2
,
433 gimple_seq
*seq
, tree (*valueize
)(tree
))
435 if (constant_for_folding (op0
) && constant_for_folding (op1
)
436 && constant_for_folding (op2
))
438 tree res
= fold_ternary
/*_to_constant */ (code
, type
, op0
, op1
, op2
);
440 && CONSTANT_CLASS_P (res
))
444 /* Canonicalize operand order both for matching and fallback stmt
446 if (commutative_ternary_tree_code (code
)
447 && tree_swap_operands_p (op0
, op1
, false))
448 std::swap (op0
, op1
);
452 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
453 code
, type
, op0
, op1
, op2
))
455 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
458 /* Builtin function with one argument. */
461 gimple_simplify (enum built_in_function fn
, tree type
,
463 gimple_seq
*seq
, tree (*valueize
)(tree
))
465 if (constant_for_folding (arg0
))
467 tree decl
= builtin_decl_implicit (fn
);
470 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, &arg0
, 1, false);
473 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
475 res
= fold_convert (type
, res
);
476 if (CONSTANT_CLASS_P (res
))
484 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
487 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
490 /* Builtin function with two arguments. */
493 gimple_simplify (enum built_in_function fn
, tree type
,
494 tree arg0
, tree arg1
,
495 gimple_seq
*seq
, tree (*valueize
)(tree
))
497 if (constant_for_folding (arg0
)
498 && constant_for_folding (arg1
))
500 tree decl
= builtin_decl_implicit (fn
);
506 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, args
, 2, false);
509 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
511 res
= fold_convert (type
, res
);
512 if (CONSTANT_CLASS_P (res
))
520 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
521 fn
, type
, arg0
, arg1
))
523 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
526 /* Builtin function with three arguments. */
529 gimple_simplify (enum built_in_function fn
, tree type
,
530 tree arg0
, tree arg1
, tree arg2
,
531 gimple_seq
*seq
, tree (*valueize
)(tree
))
533 if (constant_for_folding (arg0
)
534 && constant_for_folding (arg1
)
535 && constant_for_folding (arg2
))
537 tree decl
= builtin_decl_implicit (fn
);
544 tree res
= fold_builtin_n (UNKNOWN_LOCATION
, decl
, args
, 3, false);
547 /* fold_builtin_n wraps the result inside a NOP_EXPR. */
549 res
= fold_convert (type
, res
);
550 if (CONSTANT_CLASS_P (res
))
558 if (!gimple_simplify (&rcode
, ops
, seq
, valueize
,
559 fn
, type
, arg0
, arg1
, arg2
))
561 return maybe_push_res_to_seq (rcode
, type
, ops
, seq
);
565 /* The main STMT based simplification entry. It is used by the fold_stmt
566 and the fold_stmt_to_constant APIs. */
569 gimple_simplify (gimple stmt
,
570 code_helper
*rcode
, tree
*ops
,
572 tree (*valueize
)(tree
), tree (*top_valueize
)(tree
))
574 switch (gimple_code (stmt
))
578 enum tree_code code
= gimple_assign_rhs_code (stmt
);
579 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
580 switch (gimple_assign_rhs_class (stmt
))
582 case GIMPLE_SINGLE_RHS
:
583 if (code
== REALPART_EXPR
584 || code
== IMAGPART_EXPR
585 || code
== VIEW_CONVERT_EXPR
)
587 tree op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
588 if (top_valueize
&& TREE_CODE (op0
) == SSA_NAME
)
590 tree tem
= top_valueize (op0
);
596 return gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
);
598 else if (code
== BIT_FIELD_REF
)
600 tree rhs1
= gimple_assign_rhs1 (stmt
);
601 tree op0
= TREE_OPERAND (rhs1
, 0);
602 if (top_valueize
&& TREE_CODE (op0
) == SSA_NAME
)
604 tree tem
= top_valueize (op0
);
610 ops
[1] = TREE_OPERAND (rhs1
, 1);
611 ops
[2] = TREE_OPERAND (rhs1
, 2);
612 return gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
);
614 else if (code
== SSA_NAME
617 tree op0
= gimple_assign_rhs1 (stmt
);
618 tree valueized
= top_valueize (op0
);
619 if (!valueized
|| op0
== valueized
)
622 *rcode
= TREE_CODE (op0
);
626 case GIMPLE_UNARY_RHS
:
628 tree rhs1
= gimple_assign_rhs1 (stmt
);
629 if (top_valueize
&& TREE_CODE (rhs1
) == SSA_NAME
)
631 tree tem
= top_valueize (rhs1
);
637 return gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
);
639 case GIMPLE_BINARY_RHS
:
641 tree rhs1
= gimple_assign_rhs1 (stmt
);
642 if (top_valueize
&& TREE_CODE (rhs1
) == SSA_NAME
)
644 tree tem
= top_valueize (rhs1
);
648 tree rhs2
= gimple_assign_rhs2 (stmt
);
649 if (top_valueize
&& TREE_CODE (rhs2
) == SSA_NAME
)
651 tree tem
= top_valueize (rhs2
);
658 return gimple_resimplify2 (seq
, rcode
, type
, ops
, valueize
);
660 case GIMPLE_TERNARY_RHS
:
662 tree rhs1
= gimple_assign_rhs1 (stmt
);
663 if (top_valueize
&& TREE_CODE (rhs1
) == SSA_NAME
)
665 tree tem
= top_valueize (rhs1
);
669 tree rhs2
= gimple_assign_rhs2 (stmt
);
670 if (top_valueize
&& TREE_CODE (rhs2
) == SSA_NAME
)
672 tree tem
= top_valueize (rhs2
);
676 tree rhs3
= gimple_assign_rhs3 (stmt
);
677 if (top_valueize
&& TREE_CODE (rhs3
) == SSA_NAME
)
679 tree tem
= top_valueize (rhs3
);
687 return gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
);
696 /* ??? This way we can't simplify calls with side-effects. */
697 if (gimple_call_lhs (stmt
) != NULL_TREE
)
699 tree fn
= gimple_call_fn (stmt
);
700 /* ??? Internal function support missing. */
703 if (top_valueize
&& TREE_CODE (fn
) == SSA_NAME
)
705 tree tem
= top_valueize (fn
);
710 || TREE_CODE (fn
) != ADDR_EXPR
711 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
712 || DECL_BUILT_IN_CLASS (TREE_OPERAND (fn
, 0)) != BUILT_IN_NORMAL
713 || !builtin_decl_implicit (DECL_FUNCTION_CODE (TREE_OPERAND (fn
, 0)))
714 || !gimple_builtin_call_types_compatible_p (stmt
,
715 TREE_OPERAND (fn
, 0)))
718 tree decl
= TREE_OPERAND (fn
, 0);
719 tree type
= TREE_TYPE (gimple_call_lhs (stmt
));
720 switch (gimple_call_num_args (stmt
))
724 tree arg1
= gimple_call_arg (stmt
, 0);
725 if (top_valueize
&& TREE_CODE (arg1
) == SSA_NAME
)
727 tree tem
= top_valueize (arg1
);
731 *rcode
= DECL_FUNCTION_CODE (decl
);
733 return gimple_resimplify1 (seq
, rcode
, type
, ops
, valueize
);
737 tree arg1
= gimple_call_arg (stmt
, 0);
738 if (top_valueize
&& TREE_CODE (arg1
) == SSA_NAME
)
740 tree tem
= top_valueize (arg1
);
744 tree arg2
= gimple_call_arg (stmt
, 1);
745 if (top_valueize
&& TREE_CODE (arg2
) == SSA_NAME
)
747 tree tem
= top_valueize (arg2
);
751 *rcode
= DECL_FUNCTION_CODE (decl
);
754 return gimple_resimplify2 (seq
, rcode
, type
, ops
, valueize
);
758 tree arg1
= gimple_call_arg (stmt
, 0);
759 if (top_valueize
&& TREE_CODE (arg1
) == SSA_NAME
)
761 tree tem
= top_valueize (arg1
);
765 tree arg2
= gimple_call_arg (stmt
, 1);
766 if (top_valueize
&& TREE_CODE (arg2
) == SSA_NAME
)
768 tree tem
= top_valueize (arg2
);
772 tree arg3
= gimple_call_arg (stmt
, 2);
773 if (top_valueize
&& TREE_CODE (arg3
) == SSA_NAME
)
775 tree tem
= top_valueize (arg3
);
779 *rcode
= DECL_FUNCTION_CODE (decl
);
783 return gimple_resimplify3 (seq
, rcode
, type
, ops
, valueize
);
793 tree lhs
= gimple_cond_lhs (stmt
);
794 if (top_valueize
&& TREE_CODE (lhs
) == SSA_NAME
)
796 tree tem
= top_valueize (lhs
);
800 tree rhs
= gimple_cond_rhs (stmt
);
801 if (top_valueize
&& TREE_CODE (rhs
) == SSA_NAME
)
803 tree tem
= top_valueize (rhs
);
807 *rcode
= gimple_cond_code (stmt
);
810 return gimple_resimplify2 (seq
, rcode
, boolean_type_node
, ops
, valueize
);
821 /* Helper for the autogenerated code, valueize OP. */
824 do_valueize (tree (*valueize
)(tree
), tree op
)
826 if (valueize
&& TREE_CODE (op
) == SSA_NAME
)
827 return valueize (op
);
831 /* Routine to determine if the types T1 and T2 are effectively
832 the same for GIMPLE. If T1 or T2 is not a type, the test
833 applies to their TREE_TYPE. */
836 types_match (tree t1
, tree t2
)
843 return types_compatible_p (t1
, t2
);
846 /* Return if T has a single use. For GIMPLE, we also allow any
847 non-SSA_NAME (ie constants) and zero uses to cope with uses
848 that aren't linked up yet. */
853 return TREE_CODE (t
) != SSA_NAME
|| has_zero_uses (t
) || has_single_use (t
);