Use gfc_add_*_component defines where appropriate
[official-gcc.git] / gcc / gimple-match-head.c
blob2beadbc29054d4bcb8678d6dbac04401224735b1
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
2 Copyright (C) 2014-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "fold-const.h"
31 #include "fold-const-call.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "calls.h"
35 #include "tree-dfa.h"
36 #include "builtins.h"
37 #include "gimple-match.h"
38 #include "tree-pass.h"
39 #include "internal-fn.h"
40 #include "case-cfn-macros.h"
41 #include "gimplify.h"
44 /* Forward declarations of the private auto-generated matchers.
45 They expect valueized operands in canonical order and do not
46 perform simplification of all-constant operands. */
47 static bool gimple_simplify (code_helper *, tree *,
48 gimple_seq *, tree (*)(tree),
49 code_helper, tree, tree);
50 static bool gimple_simplify (code_helper *, tree *,
51 gimple_seq *, tree (*)(tree),
52 code_helper, tree, tree, tree);
53 static bool gimple_simplify (code_helper *, tree *,
54 gimple_seq *, tree (*)(tree),
55 code_helper, tree, tree, tree, tree);
58 /* Return whether T is a constant that we'll dispatch to fold to
59 evaluate fully constant expressions. */
61 static inline bool
62 constant_for_folding (tree t)
64 return (CONSTANT_CLASS_P (t)
65 /* The following is only interesting to string builtins. */
66 || (TREE_CODE (t) == ADDR_EXPR
67 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
71 /* Helper that matches and simplifies the toplevel result from
72 a gimple_simplify run (where we don't want to build
73 a stmt in case it's used in in-place folding). Replaces
74 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
75 result and returns whether any change was made. */
77 bool
78 gimple_resimplify1 (gimple_seq *seq,
79 code_helper *res_code, tree type, tree *res_ops,
80 tree (*valueize)(tree))
82 if (constant_for_folding (res_ops[0]))
84 tree tem = NULL_TREE;
85 if (res_code->is_tree_code ())
86 tem = const_unop (*res_code, type, res_ops[0]);
87 else
88 tem = fold_const_call (combined_fn (*res_code), type, res_ops[0]);
89 if (tem != NULL_TREE
90 && CONSTANT_CLASS_P (tem))
92 res_ops[0] = tem;
93 res_ops[1] = NULL_TREE;
94 res_ops[2] = NULL_TREE;
95 *res_code = TREE_CODE (res_ops[0]);
96 return true;
100 code_helper res_code2;
101 tree res_ops2[3] = {};
102 if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
103 *res_code, type, res_ops[0]))
105 *res_code = res_code2;
106 res_ops[0] = res_ops2[0];
107 res_ops[1] = res_ops2[1];
108 res_ops[2] = res_ops2[2];
109 return true;
112 return false;
115 /* Helper that matches and simplifies the toplevel result from
116 a gimple_simplify run (where we don't want to build
117 a stmt in case it's used in in-place folding). Replaces
118 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
119 result and returns whether any change was made. */
121 bool
122 gimple_resimplify2 (gimple_seq *seq,
123 code_helper *res_code, tree type, tree *res_ops,
124 tree (*valueize)(tree))
126 if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1]))
128 tree tem = NULL_TREE;
129 if (res_code->is_tree_code ())
130 tem = const_binop (*res_code, type, res_ops[0], res_ops[1]);
131 else
132 tem = fold_const_call (combined_fn (*res_code), type,
133 res_ops[0], res_ops[1]);
134 if (tem != NULL_TREE
135 && CONSTANT_CLASS_P (tem))
137 res_ops[0] = tem;
138 res_ops[1] = NULL_TREE;
139 res_ops[2] = NULL_TREE;
140 *res_code = TREE_CODE (res_ops[0]);
141 return true;
145 /* Canonicalize operand order. */
146 bool canonicalized = false;
147 if (res_code->is_tree_code ()
148 && (TREE_CODE_CLASS ((enum tree_code) *res_code) == tcc_comparison
149 || commutative_tree_code (*res_code))
150 && tree_swap_operands_p (res_ops[0], res_ops[1], false))
152 std::swap (res_ops[0], res_ops[1]);
153 if (TREE_CODE_CLASS ((enum tree_code) *res_code) == tcc_comparison)
154 *res_code = swap_tree_comparison (*res_code);
155 canonicalized = true;
158 code_helper res_code2;
159 tree res_ops2[3] = {};
160 if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
161 *res_code, type, res_ops[0], res_ops[1]))
163 *res_code = res_code2;
164 res_ops[0] = res_ops2[0];
165 res_ops[1] = res_ops2[1];
166 res_ops[2] = res_ops2[2];
167 return true;
170 return canonicalized;
173 /* Helper that matches and simplifies the toplevel result from
174 a gimple_simplify run (where we don't want to build
175 a stmt in case it's used in in-place folding). Replaces
176 *RES_CODE and *RES_OPS with a simplified and/or canonicalized
177 result and returns whether any change was made. */
179 bool
180 gimple_resimplify3 (gimple_seq *seq,
181 code_helper *res_code, tree type, tree *res_ops,
182 tree (*valueize)(tree))
184 if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1])
185 && constant_for_folding (res_ops[2]))
187 tree tem = NULL_TREE;
188 if (res_code->is_tree_code ())
189 tem = fold_ternary/*_to_constant*/ (*res_code, type, res_ops[0],
190 res_ops[1], res_ops[2]);
191 else
192 tem = fold_const_call (combined_fn (*res_code), type,
193 res_ops[0], res_ops[1], res_ops[2]);
194 if (tem != NULL_TREE
195 && CONSTANT_CLASS_P (tem))
197 res_ops[0] = tem;
198 res_ops[1] = NULL_TREE;
199 res_ops[2] = NULL_TREE;
200 *res_code = TREE_CODE (res_ops[0]);
201 return true;
205 /* Canonicalize operand order. */
206 bool canonicalized = false;
207 if (res_code->is_tree_code ()
208 && commutative_ternary_tree_code (*res_code)
209 && tree_swap_operands_p (res_ops[0], res_ops[1], false))
211 std::swap (res_ops[0], res_ops[1]);
212 canonicalized = true;
215 code_helper res_code2;
216 tree res_ops2[3] = {};
217 if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
218 *res_code, type,
219 res_ops[0], res_ops[1], res_ops[2]))
221 *res_code = res_code2;
222 res_ops[0] = res_ops2[0];
223 res_ops[1] = res_ops2[1];
224 res_ops[2] = res_ops2[2];
225 return true;
228 return canonicalized;
232 /* If in GIMPLE expressions with CODE go as single-rhs build
233 a GENERIC tree for that expression into *OP0. */
235 void
236 maybe_build_generic_op (enum tree_code code, tree type, tree *ops)
238 switch (code)
240 case REALPART_EXPR:
241 case IMAGPART_EXPR:
242 case VIEW_CONVERT_EXPR:
243 ops[0] = build1 (code, type, ops[0]);
244 break;
245 case BIT_FIELD_REF:
246 ops[0] = build3 (code, type, ops[0], ops[1], ops[2]);
247 ops[1] = ops[2] = NULL_TREE;
248 break;
249 default:;
253 tree (*mprts_hook) (code_helper, tree, tree *);
255 /* Try to build a call to FN with return type TYPE and the NARGS
256 arguments given in OPS. Return null if the target doesn't support
257 the function. */
259 static gcall *
260 build_call_internal (internal_fn fn, tree type, unsigned int nargs, tree *ops)
262 if (direct_internal_fn_p (fn))
264 tree_pair types = direct_internal_fn_types (fn, type, ops);
265 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
266 return NULL;
268 return gimple_build_call_internal (fn, nargs, ops[0], ops[1], ops[2]);
271 /* Push the exploded expression described by RCODE, TYPE and OPS
272 as a statement to SEQ if necessary and return a gimple value
273 denoting the value of the expression. If RES is not NULL
274 then the result will be always RES and even gimple values are
275 pushed to SEQ. */
277 tree
278 maybe_push_res_to_seq (code_helper rcode, tree type, tree *ops,
279 gimple_seq *seq, tree res)
281 if (rcode.is_tree_code ())
283 if (!res
284 && gimple_simplified_result_is_gimple_val (rcode, ops))
285 return ops[0];
286 if (mprts_hook)
288 tree tem = mprts_hook (rcode, type, ops);
289 if (tem)
290 return tem;
292 if (!seq)
293 return NULL_TREE;
294 /* Play safe and do not allow abnormals to be mentioned in
295 newly created statements. */
296 if ((TREE_CODE (ops[0]) == SSA_NAME
297 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
298 || (ops[1]
299 && TREE_CODE (ops[1]) == SSA_NAME
300 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
301 || (ops[2]
302 && TREE_CODE (ops[2]) == SSA_NAME
303 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]))
304 || (COMPARISON_CLASS_P (ops[0])
305 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0],
307 0)))
308 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
309 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0],
310 1))))))
311 return NULL_TREE;
312 if (!res)
314 if (gimple_in_ssa_p (cfun))
315 res = make_ssa_name (type);
316 else
317 res = create_tmp_reg (type);
319 maybe_build_generic_op (rcode, type, ops);
320 gimple *new_stmt = gimple_build_assign (res, rcode,
321 ops[0], ops[1], ops[2]);
322 gimple_seq_add_stmt_without_update (seq, new_stmt);
323 return res;
325 else
327 if (!seq)
328 return NULL_TREE;
329 combined_fn fn = rcode;
330 /* Play safe and do not allow abnormals to be mentioned in
331 newly created statements. */
332 unsigned nargs;
333 for (nargs = 0; nargs < 3; ++nargs)
335 if (!ops[nargs])
336 break;
337 if (TREE_CODE (ops[nargs]) == SSA_NAME
338 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[nargs]))
339 return NULL_TREE;
341 gcc_assert (nargs != 0);
342 gcall *new_stmt = NULL;
343 if (internal_fn_p (fn))
345 /* Generate the given function if we can. */
346 internal_fn ifn = as_internal_fn (fn);
347 new_stmt = build_call_internal (ifn, type, nargs, ops);
348 if (!new_stmt)
349 return NULL_TREE;
351 else
353 /* Find the function we want to call. */
354 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
355 if (!decl)
356 return NULL;
358 /* We can't and should not emit calls to non-const functions. */
359 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
360 return NULL;
362 new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]);
364 if (!res)
366 if (gimple_in_ssa_p (cfun))
367 res = make_ssa_name (type);
368 else
369 res = create_tmp_reg (type);
371 gimple_call_set_lhs (new_stmt, res);
372 gimple_seq_add_stmt_without_update (seq, new_stmt);
373 return res;
378 /* Public API overloads follow for operation being tree_code or
379 built_in_function and for one to three operands or arguments.
380 They return NULL_TREE if nothing could be simplified or
381 the resulting simplified value with parts pushed to SEQ.
382 If SEQ is NULL then if the simplification needs to create
383 new stmts it will fail. If VALUEIZE is non-NULL then all
384 SSA names will be valueized using that hook prior to
385 applying simplifications. */
387 /* Unary ops. */
389 tree
390 gimple_simplify (enum tree_code code, tree type,
391 tree op0,
392 gimple_seq *seq, tree (*valueize)(tree))
394 if (constant_for_folding (op0))
396 tree res = const_unop (code, type, op0);
397 if (res != NULL_TREE
398 && CONSTANT_CLASS_P (res))
399 return res;
402 code_helper rcode;
403 tree ops[3] = {};
404 if (!gimple_simplify (&rcode, ops, seq, valueize,
405 code, type, op0))
406 return NULL_TREE;
407 return maybe_push_res_to_seq (rcode, type, ops, seq);
410 /* Binary ops. */
412 tree
413 gimple_simplify (enum tree_code code, tree type,
414 tree op0, tree op1,
415 gimple_seq *seq, tree (*valueize)(tree))
417 if (constant_for_folding (op0) && constant_for_folding (op1))
419 tree res = const_binop (code, type, op0, op1);
420 if (res != NULL_TREE
421 && CONSTANT_CLASS_P (res))
422 return res;
425 /* Canonicalize operand order both for matching and fallback stmt
426 generation. */
427 if ((commutative_tree_code (code)
428 || TREE_CODE_CLASS (code) == tcc_comparison)
429 && tree_swap_operands_p (op0, op1, false))
431 std::swap (op0, op1);
432 if (TREE_CODE_CLASS (code) == tcc_comparison)
433 code = swap_tree_comparison (code);
436 code_helper rcode;
437 tree ops[3] = {};
438 if (!gimple_simplify (&rcode, ops, seq, valueize,
439 code, type, op0, op1))
440 return NULL_TREE;
441 return maybe_push_res_to_seq (rcode, type, ops, seq);
444 /* Ternary ops. */
446 tree
447 gimple_simplify (enum tree_code code, tree type,
448 tree op0, tree op1, tree op2,
449 gimple_seq *seq, tree (*valueize)(tree))
451 if (constant_for_folding (op0) && constant_for_folding (op1)
452 && constant_for_folding (op2))
454 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
455 if (res != NULL_TREE
456 && CONSTANT_CLASS_P (res))
457 return res;
460 /* Canonicalize operand order both for matching and fallback stmt
461 generation. */
462 if (commutative_ternary_tree_code (code)
463 && tree_swap_operands_p (op0, op1, false))
464 std::swap (op0, op1);
466 code_helper rcode;
467 tree ops[3] = {};
468 if (!gimple_simplify (&rcode, ops, seq, valueize,
469 code, type, op0, op1, op2))
470 return NULL_TREE;
471 return maybe_push_res_to_seq (rcode, type, ops, seq);
474 /* Builtin function with one argument. */
476 tree
477 gimple_simplify (enum built_in_function fn, tree type,
478 tree arg0,
479 gimple_seq *seq, tree (*valueize)(tree))
481 if (constant_for_folding (arg0))
483 tree res = fold_const_call (as_combined_fn (fn), type, arg0);
484 if (res && CONSTANT_CLASS_P (res))
485 return res;
488 code_helper rcode;
489 tree ops[3] = {};
490 if (!gimple_simplify (&rcode, ops, seq, valueize,
491 as_combined_fn (fn), type, arg0))
492 return NULL_TREE;
493 return maybe_push_res_to_seq (rcode, type, ops, seq);
496 /* Builtin function with two arguments. */
498 tree
499 gimple_simplify (enum built_in_function fn, tree type,
500 tree arg0, tree arg1,
501 gimple_seq *seq, tree (*valueize)(tree))
503 if (constant_for_folding (arg0)
504 && constant_for_folding (arg1))
506 tree res = fold_const_call (as_combined_fn (fn), type, arg0, arg1);
507 if (res && CONSTANT_CLASS_P (res))
508 return res;
511 code_helper rcode;
512 tree ops[3] = {};
513 if (!gimple_simplify (&rcode, ops, seq, valueize,
514 as_combined_fn (fn), type, arg0, arg1))
515 return NULL_TREE;
516 return maybe_push_res_to_seq (rcode, type, ops, seq);
519 /* Builtin function with three arguments. */
521 tree
522 gimple_simplify (enum built_in_function fn, tree type,
523 tree arg0, tree arg1, tree arg2,
524 gimple_seq *seq, tree (*valueize)(tree))
526 if (constant_for_folding (arg0)
527 && constant_for_folding (arg1)
528 && constant_for_folding (arg2))
530 tree res = fold_const_call (as_combined_fn (fn), type, arg0, arg1, arg2);
531 if (res && CONSTANT_CLASS_P (res))
532 return res;
535 code_helper rcode;
536 tree ops[3] = {};
537 if (!gimple_simplify (&rcode, ops, seq, valueize,
538 as_combined_fn (fn), type, arg0, arg1, arg2))
539 return NULL_TREE;
540 return maybe_push_res_to_seq (rcode, type, ops, seq);
543 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
544 VALUEIZED to true if valueization changed OP. */
546 static inline tree
547 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
549 if (valueize && TREE_CODE (op) == SSA_NAME)
551 tree tem = valueize (op);
552 if (tem && tem != op)
554 op = tem;
555 valueized = true;
558 return op;
561 /* The main STMT based simplification entry. It is used by the fold_stmt
562 and the fold_stmt_to_constant APIs. */
564 bool
565 gimple_simplify (gimple *stmt,
566 code_helper *rcode, tree *ops,
567 gimple_seq *seq,
568 tree (*valueize)(tree), tree (*top_valueize)(tree))
570 switch (gimple_code (stmt))
572 case GIMPLE_ASSIGN:
574 enum tree_code code = gimple_assign_rhs_code (stmt);
575 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
576 switch (gimple_assign_rhs_class (stmt))
578 case GIMPLE_SINGLE_RHS:
579 if (code == REALPART_EXPR
580 || code == IMAGPART_EXPR
581 || code == VIEW_CONVERT_EXPR)
583 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
584 bool valueized = false;
585 op0 = do_valueize (op0, top_valueize, valueized);
586 *rcode = code;
587 ops[0] = op0;
588 return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
589 || valueized);
591 else if (code == BIT_FIELD_REF)
593 tree rhs1 = gimple_assign_rhs1 (stmt);
594 tree op0 = TREE_OPERAND (rhs1, 0);
595 bool valueized = false;
596 op0 = do_valueize (op0, top_valueize, valueized);
597 *rcode = code;
598 ops[0] = op0;
599 ops[1] = TREE_OPERAND (rhs1, 1);
600 ops[2] = TREE_OPERAND (rhs1, 2);
601 return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
602 || valueized);
604 else if (code == SSA_NAME
605 && top_valueize)
607 tree op0 = gimple_assign_rhs1 (stmt);
608 tree valueized = top_valueize (op0);
609 if (!valueized || op0 == valueized)
610 return false;
611 ops[0] = valueized;
612 *rcode = TREE_CODE (op0);
613 return true;
615 break;
616 case GIMPLE_UNARY_RHS:
618 tree rhs1 = gimple_assign_rhs1 (stmt);
619 bool valueized = false;
620 rhs1 = do_valueize (rhs1, top_valueize, valueized);
621 *rcode = code;
622 ops[0] = rhs1;
623 return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
624 || valueized);
626 case GIMPLE_BINARY_RHS:
628 tree rhs1 = gimple_assign_rhs1 (stmt);
629 tree rhs2 = gimple_assign_rhs2 (stmt);
630 bool valueized = false;
631 rhs1 = do_valueize (rhs1, top_valueize, valueized);
632 rhs2 = do_valueize (rhs2, top_valueize, valueized);
633 *rcode = code;
634 ops[0] = rhs1;
635 ops[1] = rhs2;
636 return (gimple_resimplify2 (seq, rcode, type, ops, valueize)
637 || valueized);
639 case GIMPLE_TERNARY_RHS:
641 bool valueized = false;
642 tree rhs1 = gimple_assign_rhs1 (stmt);
643 /* If this is a [VEC_]COND_EXPR first try to simplify an
644 embedded GENERIC condition. */
645 if (code == COND_EXPR
646 || code == VEC_COND_EXPR)
648 if (COMPARISON_CLASS_P (rhs1))
650 tree lhs = TREE_OPERAND (rhs1, 0);
651 tree rhs = TREE_OPERAND (rhs1, 1);
652 lhs = do_valueize (lhs, top_valueize, valueized);
653 rhs = do_valueize (rhs, top_valueize, valueized);
654 code_helper rcode2 = TREE_CODE (rhs1);
655 tree ops2[3] = {};
656 ops2[0] = lhs;
657 ops2[1] = rhs;
658 if ((gimple_resimplify2 (seq, &rcode2, TREE_TYPE (rhs1),
659 ops2, valueize)
660 || valueized)
661 && rcode2.is_tree_code ())
663 valueized = true;
664 if (TREE_CODE_CLASS ((enum tree_code)rcode2)
665 == tcc_comparison)
666 rhs1 = build2 (rcode2, TREE_TYPE (rhs1),
667 ops2[0], ops2[1]);
668 else if (rcode2 == SSA_NAME
669 || rcode2 == INTEGER_CST
670 || rcode2 == VECTOR_CST)
671 rhs1 = ops2[0];
672 else
673 valueized = false;
677 tree rhs2 = gimple_assign_rhs2 (stmt);
678 tree rhs3 = gimple_assign_rhs3 (stmt);
679 rhs1 = do_valueize (rhs1, top_valueize, valueized);
680 rhs2 = do_valueize (rhs2, top_valueize, valueized);
681 rhs3 = do_valueize (rhs3, top_valueize, valueized);
682 *rcode = code;
683 ops[0] = rhs1;
684 ops[1] = rhs2;
685 ops[2] = rhs3;
686 return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
687 || valueized);
689 default:
690 gcc_unreachable ();
692 break;
695 case GIMPLE_CALL:
696 /* ??? This way we can't simplify calls with side-effects. */
697 if (gimple_call_lhs (stmt) != NULL_TREE
698 && gimple_call_num_args (stmt) >= 1
699 && gimple_call_num_args (stmt) <= 3)
701 bool valueized = false;
702 if (gimple_call_internal_p (stmt))
703 *rcode = as_combined_fn (gimple_call_internal_fn (stmt));
704 else
706 tree fn = gimple_call_fn (stmt);
707 if (!fn)
708 return false;
710 fn = do_valueize (fn, top_valueize, valueized);
711 if (TREE_CODE (fn) != ADDR_EXPR
712 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
713 return false;
715 tree decl = TREE_OPERAND (fn, 0);
716 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
717 || !gimple_builtin_call_types_compatible_p (stmt, decl))
718 return false;
720 *rcode = as_combined_fn (DECL_FUNCTION_CODE (decl));
723 tree type = TREE_TYPE (gimple_call_lhs (stmt));
724 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
726 tree arg = gimple_call_arg (stmt, i);
727 ops[i] = do_valueize (arg, top_valueize, valueized);
729 switch (gimple_call_num_args (stmt))
731 case 1:
732 return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
733 || valueized);
734 case 2:
735 return (gimple_resimplify2 (seq, rcode, type, ops, valueize)
736 || valueized);
737 case 3:
738 return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
739 || valueized);
740 default:
741 gcc_unreachable ();
744 break;
746 case GIMPLE_COND:
748 tree lhs = gimple_cond_lhs (stmt);
749 tree rhs = gimple_cond_rhs (stmt);
750 bool valueized = false;
751 lhs = do_valueize (lhs, top_valueize, valueized);
752 rhs = do_valueize (rhs, top_valueize, valueized);
753 *rcode = gimple_cond_code (stmt);
754 ops[0] = lhs;
755 ops[1] = rhs;
756 return (gimple_resimplify2 (seq, rcode,
757 boolean_type_node, ops, valueize)
758 || valueized);
761 default:
762 break;
765 return false;
769 /* Helper for the autogenerated code, valueize OP. */
771 inline tree
772 do_valueize (tree (*valueize)(tree), tree op)
774 if (valueize && TREE_CODE (op) == SSA_NAME)
775 return valueize (op);
776 return op;
779 /* Routine to determine if the types T1 and T2 are effectively
780 the same for GIMPLE. If T1 or T2 is not a type, the test
781 applies to their TREE_TYPE. */
783 static inline bool
784 types_match (tree t1, tree t2)
786 if (!TYPE_P (t1))
787 t1 = TREE_TYPE (t1);
788 if (!TYPE_P (t2))
789 t2 = TREE_TYPE (t2);
791 return types_compatible_p (t1, t2);
794 /* Return if T has a single use. For GIMPLE, we also allow any
795 non-SSA_NAME (ie constants) and zero uses to cope with uses
796 that aren't linked up yet. */
798 static inline bool
799 single_use (tree t)
801 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
804 /* Return true if math operations should be canonicalized,
805 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
807 static inline bool
808 canonicalize_math_p ()
810 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;