2017-06-08 Tamar Christina <tamar.christina@arm.com>
[official-gcc.git] / gcc / gimple-low.c
blob1cc4a4dd699e30de53eb7fc50ca4b615d44d8da2
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2017 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "fold-const.h"
29 #include "tree-nested.h"
30 #include "calls.h"
31 #include "gimple-iterator.h"
32 #include "gimple-low.h"
33 #include "stor-layout.h"
34 #include "target.h"
35 #include "gimplify.h"
37 /* The differences between High GIMPLE and Low GIMPLE are the
38 following:
40 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
42 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43 flow and exception regions are built as an on-the-side region
44 hierarchy (See tree-eh.c:lower_eh_constructs).
46 3- Multiple identical return statements are grouped into a single
47 return and gotos to the unique return site. */
49 /* Match a return statement with a label. During lowering, we identify
50 identical return statements and replace duplicates with a jump to
51 the corresponding label. */
52 struct return_statements_t
54 tree label;
55 greturn *stmt;
57 typedef struct return_statements_t return_statements_t;
60 struct lower_data
62 /* Block the current statement belongs to. */
63 tree block;
65 /* A vector of label and return statements to be moved to the end
66 of the function. */
67 vec<return_statements_t> return_statements;
69 /* True if the current statement cannot fall through. */
70 bool cannot_fallthru;
73 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
74 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
75 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
76 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
77 static void lower_builtin_setjmp (gimple_stmt_iterator *);
78 static void lower_builtin_fpclassify (gimple_stmt_iterator *);
79 static void lower_builtin_isnan (gimple_stmt_iterator *);
80 static void lower_builtin_isinfinite (gimple_stmt_iterator *);
81 static void lower_builtin_isnormal (gimple_stmt_iterator *);
82 static void lower_builtin_iszero (gimple_stmt_iterator *);
83 static void lower_builtin_issubnormal (gimple_stmt_iterator *);
84 static void lower_builtin_isfinite (gimple_stmt_iterator *);
85 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
88 /* Lower the body of current_function_decl from High GIMPLE into Low
89 GIMPLE. */
91 static unsigned int
92 lower_function_body (void)
94 struct lower_data data;
95 gimple_seq body = gimple_body (current_function_decl);
96 gimple_seq lowered_body;
97 gimple_stmt_iterator i;
98 gimple *bind;
99 gimple *x;
101 /* The gimplifier should've left a body of exactly one statement,
102 namely a GIMPLE_BIND. */
103 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
104 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
106 memset (&data, 0, sizeof (data));
107 data.block = DECL_INITIAL (current_function_decl);
108 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
109 BLOCK_CHAIN (data.block) = NULL_TREE;
110 TREE_ASM_WRITTEN (data.block) = 1;
111 data.return_statements.create (8);
113 bind = gimple_seq_first_stmt (body);
114 lowered_body = NULL;
115 gimple_seq_add_stmt (&lowered_body, bind);
116 i = gsi_start (lowered_body);
117 lower_gimple_bind (&i, &data);
119 i = gsi_last (lowered_body);
121 /* If the function falls off the end, we need a null return statement.
122 If we've already got one in the return_statements vector, we don't
123 need to do anything special. Otherwise build one by hand. */
124 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
125 if (may_fallthru
126 && (data.return_statements.is_empty ()
127 || (gimple_return_retval (data.return_statements.last().stmt)
128 != NULL)))
130 x = gimple_build_return (NULL);
131 gimple_set_location (x, cfun->function_end_locus);
132 gimple_set_block (x, DECL_INITIAL (current_function_decl));
133 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
134 may_fallthru = false;
137 /* If we lowered any return statements, emit the representative
138 at the end of the function. */
139 while (!data.return_statements.is_empty ())
141 return_statements_t t = data.return_statements.pop ();
142 x = gimple_build_label (t.label);
143 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
144 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
145 if (may_fallthru)
147 /* Remove the line number from the representative return statement.
148 It now fills in for the fallthru too. Failure to remove this
149 will result in incorrect results for coverage analysis. */
150 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
151 may_fallthru = false;
155 /* Once the old body has been lowered, replace it with the new
156 lowered sequence. */
157 gimple_set_body (current_function_decl, lowered_body);
159 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
160 BLOCK_SUBBLOCKS (data.block)
161 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
163 clear_block_marks (data.block);
164 data.return_statements.release ();
165 return 0;
168 namespace {
170 const pass_data pass_data_lower_cf =
172 GIMPLE_PASS, /* type */
173 "lower", /* name */
174 OPTGROUP_NONE, /* optinfo_flags */
175 TV_NONE, /* tv_id */
176 PROP_gimple_any, /* properties_required */
177 PROP_gimple_lcf, /* properties_provided */
178 0, /* properties_destroyed */
179 0, /* todo_flags_start */
180 0, /* todo_flags_finish */
183 class pass_lower_cf : public gimple_opt_pass
185 public:
186 pass_lower_cf (gcc::context *ctxt)
187 : gimple_opt_pass (pass_data_lower_cf, ctxt)
190 /* opt_pass methods: */
191 virtual unsigned int execute (function *) { return lower_function_body (); }
193 }; // class pass_lower_cf
195 } // anon namespace
197 gimple_opt_pass *
198 make_pass_lower_cf (gcc::context *ctxt)
200 return new pass_lower_cf (ctxt);
203 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
204 when they are changed -- if this has to be done, the lowering routine must
205 do it explicitly. DATA is passed through the recursion. */
207 static void
208 lower_sequence (gimple_seq *seq, struct lower_data *data)
210 gimple_stmt_iterator gsi;
212 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
213 lower_stmt (&gsi, data);
217 /* Lower the OpenMP directive statement pointed by GSI. DATA is
218 passed through the recursion. */
220 static void
221 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
223 gimple *stmt;
225 stmt = gsi_stmt (*gsi);
227 lower_sequence (gimple_omp_body_ptr (stmt), data);
228 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
229 gimple_omp_set_body (stmt, NULL);
230 gsi_next (gsi);
234 /* Lower statement GSI. DATA is passed through the recursion. We try to
235 track the fallthruness of statements and get rid of unreachable return
236 statements in order to prevent the EH lowering pass from adding useless
237 edges that can cause bogus warnings to be issued later; this guess need
238 not be 100% accurate, simply be conservative and reset cannot_fallthru
239 to false if we don't know. */
241 static void
242 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
244 gimple *stmt = gsi_stmt (*gsi);
246 gimple_set_block (stmt, data->block);
248 switch (gimple_code (stmt))
250 case GIMPLE_BIND:
251 lower_gimple_bind (gsi, data);
252 /* Propagate fallthruness. */
253 return;
255 case GIMPLE_COND:
256 case GIMPLE_GOTO:
257 case GIMPLE_SWITCH:
258 data->cannot_fallthru = true;
259 gsi_next (gsi);
260 return;
262 case GIMPLE_RETURN:
263 if (data->cannot_fallthru)
265 gsi_remove (gsi, false);
266 /* Propagate fallthruness. */
268 else
270 lower_gimple_return (gsi, data);
271 data->cannot_fallthru = true;
273 return;
275 case GIMPLE_TRY:
276 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
277 lower_try_catch (gsi, data);
278 else
280 /* It must be a GIMPLE_TRY_FINALLY. */
281 bool cannot_fallthru;
282 lower_sequence (gimple_try_eval_ptr (stmt), data);
283 cannot_fallthru = data->cannot_fallthru;
285 /* The finally clause is always executed after the try clause,
286 so if it does not fall through, then the try-finally will not
287 fall through. Otherwise, if the try clause does not fall
288 through, then when the finally clause falls through it will
289 resume execution wherever the try clause was going. So the
290 whole try-finally will only fall through if both the try
291 clause and the finally clause fall through. */
292 data->cannot_fallthru = false;
293 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
294 data->cannot_fallthru |= cannot_fallthru;
295 gsi_next (gsi);
297 return;
299 case GIMPLE_EH_ELSE:
301 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
302 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
303 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
305 break;
307 case GIMPLE_NOP:
308 case GIMPLE_ASM:
309 case GIMPLE_ASSIGN:
310 case GIMPLE_PREDICT:
311 case GIMPLE_LABEL:
312 case GIMPLE_EH_MUST_NOT_THROW:
313 case GIMPLE_OMP_FOR:
314 case GIMPLE_OMP_SECTIONS:
315 case GIMPLE_OMP_SECTIONS_SWITCH:
316 case GIMPLE_OMP_SECTION:
317 case GIMPLE_OMP_SINGLE:
318 case GIMPLE_OMP_MASTER:
319 case GIMPLE_OMP_TASKGROUP:
320 case GIMPLE_OMP_ORDERED:
321 case GIMPLE_OMP_CRITICAL:
322 case GIMPLE_OMP_RETURN:
323 case GIMPLE_OMP_ATOMIC_LOAD:
324 case GIMPLE_OMP_ATOMIC_STORE:
325 case GIMPLE_OMP_CONTINUE:
326 break;
328 case GIMPLE_CALL:
330 tree decl = gimple_call_fndecl (stmt);
331 unsigned i;
333 for (i = 0; i < gimple_call_num_args (stmt); i++)
335 tree arg = gimple_call_arg (stmt, i);
336 if (EXPR_P (arg))
337 TREE_SET_BLOCK (arg, data->block);
340 if (decl
341 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
343 switch (DECL_FUNCTION_CODE (decl))
345 case BUILT_IN_SETJMP:
346 lower_builtin_setjmp (gsi);
347 data->cannot_fallthru = false;
348 return;
350 case BUILT_IN_POSIX_MEMALIGN:
351 if (flag_tree_bit_ccp
352 && gimple_builtin_call_types_compatible_p (stmt, decl))
354 lower_builtin_posix_memalign (gsi);
355 return;
357 break;
359 case BUILT_IN_FPCLASSIFY:
360 lower_builtin_fpclassify (gsi);
361 data->cannot_fallthru = false;
362 return;
364 CASE_FLT_FN (BUILT_IN_ISINF):
365 case BUILT_IN_ISINFD32:
366 case BUILT_IN_ISINFD64:
367 case BUILT_IN_ISINFD128:
368 lower_builtin_isinfinite (gsi);
369 data->cannot_fallthru = false;
370 return;
372 case BUILT_IN_ISNAND32:
373 case BUILT_IN_ISNAND64:
374 case BUILT_IN_ISNAND128:
375 CASE_FLT_FN (BUILT_IN_ISNAN):
376 lower_builtin_isnan (gsi);
377 data->cannot_fallthru = false;
378 return;
380 case BUILT_IN_ISNORMAL:
381 lower_builtin_isnormal (gsi);
382 data->cannot_fallthru = false;
383 return;
385 case BUILT_IN_ISZERO:
386 lower_builtin_iszero (gsi);
387 data->cannot_fallthru = false;
388 return;
390 case BUILT_IN_ISSUBNORMAL:
391 lower_builtin_issubnormal (gsi);
392 data->cannot_fallthru = false;
393 return;
395 CASE_FLT_FN (BUILT_IN_FINITE):
396 case BUILT_IN_FINITED32:
397 case BUILT_IN_FINITED64:
398 case BUILT_IN_FINITED128:
399 case BUILT_IN_ISFINITE:
400 lower_builtin_isfinite (gsi);
401 data->cannot_fallthru = false;
402 return;
404 default:
405 break;
409 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
411 data->cannot_fallthru = true;
412 gsi_next (gsi);
413 return;
416 break;
418 case GIMPLE_OMP_PARALLEL:
419 case GIMPLE_OMP_TASK:
420 case GIMPLE_OMP_TARGET:
421 case GIMPLE_OMP_TEAMS:
422 case GIMPLE_OMP_GRID_BODY:
423 data->cannot_fallthru = false;
424 lower_omp_directive (gsi, data);
425 data->cannot_fallthru = false;
426 return;
428 case GIMPLE_TRANSACTION:
429 lower_sequence (gimple_transaction_body_ptr (
430 as_a <gtransaction *> (stmt)),
431 data);
432 break;
434 default:
435 gcc_unreachable ();
438 data->cannot_fallthru = false;
439 gsi_next (gsi);
442 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
444 static void
445 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
447 tree old_block = data->block;
448 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
449 tree new_block = gimple_bind_block (stmt);
451 if (new_block)
453 if (new_block == old_block)
455 /* The outermost block of the original function may not be the
456 outermost statement chain of the gimplified function. So we
457 may see the outermost block just inside the function. */
458 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
459 new_block = NULL;
461 else
463 /* We do not expect to handle duplicate blocks. */
464 gcc_assert (!TREE_ASM_WRITTEN (new_block));
465 TREE_ASM_WRITTEN (new_block) = 1;
467 /* Block tree may get clobbered by inlining. Normally this would
468 be fixed in rest_of_decl_compilation using block notes, but
469 since we are not going to emit them, it is up to us. */
470 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
471 BLOCK_SUBBLOCKS (old_block) = new_block;
472 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
473 BLOCK_SUPERCONTEXT (new_block) = old_block;
475 data->block = new_block;
479 record_vars (gimple_bind_vars (stmt));
481 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
482 need gimple_bind_vars. */
483 tree next;
484 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
485 it by marking all BLOCK_VARS. */
486 if (gimple_bind_block (stmt))
487 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
488 TREE_VISITED (t) = 1;
489 for (tree var = gimple_bind_vars (stmt);
490 var && ! TREE_VISITED (var); var = next)
492 next = DECL_CHAIN (var);
493 DECL_CHAIN (var) = NULL_TREE;
495 /* Unmark BLOCK_VARS. */
496 if (gimple_bind_block (stmt))
497 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
498 TREE_VISITED (t) = 0;
500 lower_sequence (gimple_bind_body_ptr (stmt), data);
502 if (new_block)
504 gcc_assert (data->block == new_block);
506 BLOCK_SUBBLOCKS (new_block)
507 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
508 data->block = old_block;
511 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
512 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
513 gsi_remove (gsi, false);
516 /* Same as above, but for a GIMPLE_TRY_CATCH. */
518 static void
519 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
521 bool cannot_fallthru;
522 gimple *stmt = gsi_stmt (*gsi);
523 gimple_stmt_iterator i;
525 /* We don't handle GIMPLE_TRY_FINALLY. */
526 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
528 lower_sequence (gimple_try_eval_ptr (stmt), data);
529 cannot_fallthru = data->cannot_fallthru;
531 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
532 switch (gimple_code (gsi_stmt (i)))
534 case GIMPLE_CATCH:
535 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
536 catch expression and a body. The whole try/catch may fall
537 through iff any of the catch bodies falls through. */
538 for (; !gsi_end_p (i); gsi_next (&i))
540 data->cannot_fallthru = false;
541 lower_sequence (gimple_catch_handler_ptr (
542 as_a <gcatch *> (gsi_stmt (i))),
543 data);
544 if (!data->cannot_fallthru)
545 cannot_fallthru = false;
547 break;
549 case GIMPLE_EH_FILTER:
550 /* The exception filter expression only matters if there is an
551 exception. If the exception does not match EH_FILTER_TYPES,
552 we will execute EH_FILTER_FAILURE, and we will fall through
553 if that falls through. If the exception does match
554 EH_FILTER_TYPES, the stack unwinder will continue up the
555 stack, so we will not fall through. We don't know whether we
556 will throw an exception which matches EH_FILTER_TYPES or not,
557 so we just ignore EH_FILTER_TYPES and assume that we might
558 throw an exception which doesn't match. */
559 data->cannot_fallthru = false;
560 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
561 if (!data->cannot_fallthru)
562 cannot_fallthru = false;
563 break;
565 default:
566 /* This case represents statements to be executed when an
567 exception occurs. Those statements are implicitly followed
568 by a GIMPLE_RESX to resume execution after the exception. So
569 in this case the try/catch never falls through. */
570 data->cannot_fallthru = false;
571 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
572 break;
575 data->cannot_fallthru = cannot_fallthru;
576 gsi_next (gsi);
580 /* Try to determine whether a TRY_CATCH expression can fall through.
581 This is a subroutine of gimple_stmt_may_fallthru. */
583 static bool
584 gimple_try_catch_may_fallthru (gtry *stmt)
586 gimple_stmt_iterator i;
588 /* We don't handle GIMPLE_TRY_FINALLY. */
589 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
591 /* If the TRY block can fall through, the whole TRY_CATCH can
592 fall through. */
593 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
594 return true;
596 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
597 switch (gimple_code (gsi_stmt (i)))
599 case GIMPLE_CATCH:
600 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
601 catch expression and a body. The whole try/catch may fall
602 through iff any of the catch bodies falls through. */
603 for (; !gsi_end_p (i); gsi_next (&i))
605 if (gimple_seq_may_fallthru (gimple_catch_handler (
606 as_a <gcatch *> (gsi_stmt (i)))))
607 return true;
609 return false;
611 case GIMPLE_EH_FILTER:
612 /* The exception filter expression only matters if there is an
613 exception. If the exception does not match EH_FILTER_TYPES,
614 we will execute EH_FILTER_FAILURE, and we will fall through
615 if that falls through. If the exception does match
616 EH_FILTER_TYPES, the stack unwinder will continue up the
617 stack, so we will not fall through. We don't know whether we
618 will throw an exception which matches EH_FILTER_TYPES or not,
619 so we just ignore EH_FILTER_TYPES and assume that we might
620 throw an exception which doesn't match. */
621 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
623 default:
624 /* This case represents statements to be executed when an
625 exception occurs. Those statements are implicitly followed
626 by a GIMPLE_RESX to resume execution after the exception. So
627 in this case the try/catch never falls through. */
628 return false;
633 /* Try to determine if we can continue executing the statement
634 immediately following STMT. This guess need not be 100% accurate;
635 simply be conservative and return true if we don't know. This is
636 used only to avoid stupidly generating extra code. If we're wrong,
637 we'll just delete the extra code later. */
639 bool
640 gimple_stmt_may_fallthru (gimple *stmt)
642 if (!stmt)
643 return true;
645 switch (gimple_code (stmt))
647 case GIMPLE_GOTO:
648 case GIMPLE_RETURN:
649 case GIMPLE_RESX:
650 /* Easy cases. If the last statement of the seq implies
651 control transfer, then we can't fall through. */
652 return false;
654 case GIMPLE_SWITCH:
655 /* Switch has already been lowered and represents a branch
656 to a selected label and hence can't fall through. */
657 return false;
659 case GIMPLE_COND:
660 /* GIMPLE_COND's are already lowered into a two-way branch. They
661 can't fall through. */
662 return false;
664 case GIMPLE_BIND:
665 return gimple_seq_may_fallthru (
666 gimple_bind_body (as_a <gbind *> (stmt)));
668 case GIMPLE_TRY:
669 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
670 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
672 /* It must be a GIMPLE_TRY_FINALLY. */
674 /* The finally clause is always executed after the try clause,
675 so if it does not fall through, then the try-finally will not
676 fall through. Otherwise, if the try clause does not fall
677 through, then when the finally clause falls through it will
678 resume execution wherever the try clause was going. So the
679 whole try-finally will only fall through if both the try
680 clause and the finally clause fall through. */
681 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
682 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
684 case GIMPLE_EH_ELSE:
686 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
687 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
688 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
689 eh_else_stmt)));
692 case GIMPLE_CALL:
693 /* Functions that do not return do not fall through. */
694 return !gimple_call_noreturn_p (stmt);
696 default:
697 return true;
702 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
704 bool
705 gimple_seq_may_fallthru (gimple_seq seq)
707 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
711 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
713 static void
714 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
716 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
717 gimple *t;
718 int i;
719 return_statements_t tmp_rs;
721 /* Match this up with an existing return statement that's been created. */
722 for (i = data->return_statements.length () - 1;
723 i >= 0; i--)
725 tmp_rs = data->return_statements[i];
727 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
729 /* Remove the line number from the representative return statement.
730 It now fills in for many such returns. Failure to remove this
731 will result in incorrect results for coverage analysis. */
732 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
734 goto found;
738 /* Not found. Create a new label and record the return statement. */
739 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
740 tmp_rs.stmt = stmt;
741 data->return_statements.safe_push (tmp_rs);
743 /* Generate a goto statement and remove the return statement. */
744 found:
745 /* When not optimizing, make sure user returns are preserved. */
746 if (!optimize && gimple_has_location (stmt))
747 DECL_ARTIFICIAL (tmp_rs.label) = 0;
748 t = gimple_build_goto (tmp_rs.label);
749 gimple_set_location (t, gimple_location (stmt));
750 gimple_set_block (t, gimple_block (stmt));
751 gsi_insert_before (gsi, t, GSI_SAME_STMT);
752 gsi_remove (gsi, false);
755 /* Lower a __builtin_setjmp GSI.
757 __builtin_setjmp is passed a pointer to an array of five words (not
758 all will be used on all machines). It operates similarly to the C
759 library function of the same name, but is more efficient.
761 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
762 __builtin_setjmp_receiver.
764 After full lowering, the body of the function should look like:
767 int D.1844;
768 int D.2844;
770 [...]
772 __builtin_setjmp_setup (&buf, &<D1847>);
773 D.1844 = 0;
774 goto <D1846>;
775 <D1847>:;
776 __builtin_setjmp_receiver (&<D1847>);
777 D.1844 = 1;
778 <D1846>:;
779 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
781 [...]
783 __builtin_setjmp_setup (&buf, &<D2847>);
784 D.2844 = 0;
785 goto <D2846>;
786 <D2847>:;
787 __builtin_setjmp_receiver (&<D2847>);
788 D.2844 = 1;
789 <D2846>:;
790 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
792 [...]
794 <D3850>:;
795 return;
798 During cfg creation an extra per-function (or per-OpenMP region)
799 block with ABNORMAL_DISPATCHER internal call will be added, unique
800 destination of all the abnormal call edges and the unique source of
801 all the abnormal edges to the receivers, thus keeping the complexity
802 explosion localized. */
804 static void
805 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
807 gimple *stmt = gsi_stmt (*gsi);
808 location_t loc = gimple_location (stmt);
809 tree cont_label = create_artificial_label (loc);
810 tree next_label = create_artificial_label (loc);
811 tree dest, t, arg;
812 gimple *g;
814 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
815 these builtins are modelled as non-local label jumps to the label
816 that is passed to these two builtins, so pretend we have a non-local
817 label during GIMPLE passes too. See PR60003. */
818 cfun->has_nonlocal_label = 1;
820 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
821 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
822 FORCED_LABEL (next_label) = 1;
824 tree orig_dest = dest = gimple_call_lhs (stmt);
825 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
826 dest = create_tmp_reg (TREE_TYPE (orig_dest));
828 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
829 arg = build_addr (next_label);
830 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
831 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
832 gimple_set_location (g, loc);
833 gimple_set_block (g, gimple_block (stmt));
834 gsi_insert_before (gsi, g, GSI_SAME_STMT);
836 /* Build 'DEST = 0' and insert. */
837 if (dest)
839 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
840 gimple_set_location (g, loc);
841 gimple_set_block (g, gimple_block (stmt));
842 gsi_insert_before (gsi, g, GSI_SAME_STMT);
845 /* Build 'goto CONT_LABEL' and insert. */
846 g = gimple_build_goto (cont_label);
847 gsi_insert_before (gsi, g, GSI_SAME_STMT);
849 /* Build 'NEXT_LABEL:' and insert. */
850 g = gimple_build_label (next_label);
851 gsi_insert_before (gsi, g, GSI_SAME_STMT);
853 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
854 arg = build_addr (next_label);
855 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
856 g = gimple_build_call (t, 1, arg);
857 gimple_set_location (g, loc);
858 gimple_set_block (g, gimple_block (stmt));
859 gsi_insert_before (gsi, g, GSI_SAME_STMT);
861 /* Build 'DEST = 1' and insert. */
862 if (dest)
864 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
865 integer_one_node));
866 gimple_set_location (g, loc);
867 gimple_set_block (g, gimple_block (stmt));
868 gsi_insert_before (gsi, g, GSI_SAME_STMT);
871 /* Build 'CONT_LABEL:' and insert. */
872 g = gimple_build_label (cont_label);
873 gsi_insert_before (gsi, g, GSI_SAME_STMT);
875 /* Build orig_dest = dest if necessary. */
876 if (dest != orig_dest)
878 g = gimple_build_assign (orig_dest, dest);
879 gsi_insert_before (gsi, g, GSI_SAME_STMT);
882 /* Remove the call to __builtin_setjmp. */
883 gsi_remove (gsi, false);
886 /* This function will if ARG is not already a variable or SSA_NAME,
887 create a new temporary TMP and bind ARG to TMP. This new binding is then
888 emitted into SEQ and TMP is returned. */
889 static tree
890 emit_tree_and_return_var (gimple_seq *seq, tree arg)
892 if (TREE_CODE (arg) == SSA_NAME || VAR_P (arg))
893 return arg;
895 tree tmp = create_tmp_reg (TREE_TYPE (arg));
896 gassign *stm = gimple_build_assign (tmp, arg);
897 gimple_seq_add_stmt (seq, stm);
898 return tmp;
901 /* This function builds an if statement that ends up using explicit branches
902 instead of becoming a ternary conditional select. This function assumes you
903 will fall through to the next statements after the condition for the false
904 branch. The code emitted looks like:
906 if (COND)
907 RESULT_VARIABLE = TRUE_BRANCH
908 GOTO EXIT_LABEL
909 else
912 SEQ is the gimple sequence/buffer to emit any new bindings to.
913 RESULT_VARIABLE is the value to set if COND.
914 EXIT_LABEL is the label to jump to in case COND.
915 COND is condition to use in the conditional statement of the if.
916 TRUE_BRANCH is the value to set RESULT_VARIABLE to if COND. */
917 static void
918 emit_tree_cond (gimple_seq *seq, tree result_variable, tree exit_label,
919 tree cond, tree true_branch)
921 /* Create labels for fall through. */
922 tree true_label = create_artificial_label (UNKNOWN_LOCATION);
923 tree false_label = create_artificial_label (UNKNOWN_LOCATION);
924 gcond *stmt = gimple_build_cond_from_tree (cond, true_label, false_label);
925 gimple_seq_add_stmt (seq, stmt);
927 /* Build the true case. */
928 gimple_seq_add_stmt (seq, gimple_build_label (true_label));
929 tree value = TREE_CONSTANT (true_branch)
930 ? true_branch
931 : emit_tree_and_return_var (seq, true_branch);
932 gimple_seq_add_stmt (seq, gimple_build_assign (result_variable, value));
933 gimple_seq_add_stmt (seq, gimple_build_goto (exit_label));
935 /* Build the false case. */
936 gimple_seq_add_stmt (seq, gimple_build_label (false_label));
939 /* This function returns a variable containing an reinterpreted ARG as an
940 integer.
942 SEQ is the gimple sequence/buffer to write any new bindings to.
943 ARG is the floating point number to reinterpret as an integer.
944 LOC is the location to use when doing folding operations. */
945 static tree
946 get_num_as_int (gimple_seq *seq, tree arg, location_t loc)
948 tree type = TREE_TYPE (arg);
950 const HOST_WIDE_INT type_width = TYPE_PRECISION (type);
952 /* Re-interpret the float as an unsigned integer type
953 with equal precision. */
954 tree int_arg_type = build_nonstandard_integer_type (type_width, true);
955 tree conv_arg = fold_build1_loc (loc, VIEW_CONVERT_EXPR, int_arg_type, arg);
956 return emit_tree_and_return_var (seq, conv_arg);
959 /* Check if ARG which is the floating point number being classified is close
960 enough to IEEE 754 format to be able to go in the early exit code. */
961 static bool
962 use_ieee_int_mode (tree arg)
964 tree type = TREE_TYPE (arg);
965 machine_mode mode = TYPE_MODE (type);
967 const real_format *format = REAL_MODE_FORMAT (mode);
968 machine_mode imode = int_mode_for_mode (mode);
969 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
971 return (format->is_binary_ieee_compatible
972 && FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN
973 /* Check if there's a usable integer mode. */
974 && imode != BLKmode
975 && targetm.scalar_mode_supported_p (imode)
976 && !is_ibm_extended);
979 /* Perform some IBM extended format fixups on ARG for use by FP functions.
980 This is done by ignoring the lower 64 bits of the number.
982 MODE is the machine mode of ARG.
983 TYPE is the type of ARG.
984 LOC is the location to be used in fold functions. Usually is the location
985 of the definition of ARG. */
986 static bool
987 perform_ibm_extended_fixups (tree *arg, machine_mode *mode,
988 tree *type, location_t loc)
990 bool is_ibm_extended = MODE_COMPOSITE_P (*mode);
991 if (is_ibm_extended)
993 /* NaN and Inf are encoded in the high-order double value
994 only. The low-order value is not significant. */
995 *type = double_type_node;
996 *mode = DFmode;
997 *arg = fold_build1_loc (loc, NOP_EXPR, *type, *arg);
1000 return is_ibm_extended;
1003 /* Generates code to check if ARG is a normal number. For the FP case we check
1004 MIN_VALUE(ARG) <= ABS(ARG) > INF and for the INT value we check the exp and
1005 mantissa bits. Returns a variable containing a boolean which has the result
1006 of the check.
1008 SEQ is the buffer to use to emit the gimple instructions into.
1009 LOC is the location to use during fold calls. */
1010 static tree
1011 is_normal (gimple_seq *seq, tree arg, location_t loc)
1013 tree type = TREE_TYPE (arg);
1015 machine_mode mode = TYPE_MODE (type);
1016 const real_format *format = REAL_MODE_FORMAT (mode);
1017 const tree bool_type = boolean_type_node;
1020 /* If not using optimized route then exit early. */
1021 if (!use_ieee_int_mode (arg))
1023 tree orig_arg = arg;
1024 machine_mode orig_mode = mode;
1025 if (TREE_CODE (arg) != SSA_NAME
1026 && (TREE_ADDRESSABLE (arg) != 0
1027 || (TREE_CODE (arg) != PARM_DECL
1028 && (!VAR_P (arg) || TREE_STATIC (arg)))))
1029 orig_arg = save_expr (arg);
1031 /* Perform IBM extended format fixups if required. */
1032 bool is_ibm_extended = perform_ibm_extended_fixups (&arg, &mode,
1033 &type, loc);
1035 REAL_VALUE_TYPE rinf, rmin;
1036 tree arg_p = fold_build1_loc (loc, ABS_EXPR, type, arg);
1038 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
1039 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
1040 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
1042 char buf[128];
1043 real_inf (&rinf);
1044 get_min_float (REAL_MODE_FORMAT (orig_mode), buf, sizeof (buf));
1045 real_from_string (&rmin, buf);
1047 tree inf_exp = build_call_expr (islt_fn, 2, arg_p,
1048 build_real (type, rinf));
1049 tree min_exp = build_real (type, rmin);
1050 if (is_ibm_extended)
1052 /* Testing the high end of the range is done just using
1053 the high double, using the same test as isfinite().
1054 For the subnormal end of the range we first test the
1055 high double, then if its magnitude is equal to the
1056 limit of 0x1p-969, we test whether the low double is
1057 non-zero and opposite sign to the high double. */
1058 tree gt_min = build_call_expr (isgt_fn, 2, arg_p, min_exp);
1059 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
1060 arg_p, min_exp);
1061 tree as_complex = build1 (VIEW_CONVERT_EXPR,
1062 complex_double_type_node, orig_arg);
1063 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
1064 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
1065 tree zero = build_real (type, dconst0);
1066 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
1067 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
1068 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
1069 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
1070 fold_build3 (COND_EXPR,
1071 integer_type_node,
1072 hilt, logt, lolt));
1073 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
1074 eq_min, ok_lo);
1075 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
1076 gt_min, eq_min);
1078 else
1080 min_exp = build_call_expr (isge_fn, 2, arg_p, min_exp);
1083 push_gimplify_context ();
1084 gimplify_expr (&min_exp, seq, NULL, is_gimple_val, fb_either);
1085 gimplify_expr (&inf_exp, seq, NULL, is_gimple_val, fb_either);
1087 tree res
1088 = fold_build2_loc (loc, BIT_AND_EXPR, bool_type,
1089 emit_tree_and_return_var (seq,
1090 gimple_boolify (min_exp)),
1091 emit_tree_and_return_var (seq,
1092 gimple_boolify (inf_exp)));
1093 pop_gimplify_context (NULL);
1095 return emit_tree_and_return_var (seq, res);
1098 const tree int_type = unsigned_type_node;
1099 const int exp_bits = (GET_MODE_SIZE (mode) * BITS_PER_UNIT) - format->p;
1100 const int exp_mask = (1 << exp_bits) - 1;
1102 /* Get the number reinterpreted as an integer. */
1103 tree int_arg = get_num_as_int (seq, arg, loc);
1105 /* Extract exp bits from the float, where we expect the exponent to be.
1106 We create a new type because BIT_FIELD_REF does not allow you to
1107 extract less bits than the precision of the storage variable. */
1108 tree exp_tmp
1109 = fold_build3_loc (loc, BIT_FIELD_REF,
1110 build_nonstandard_integer_type (exp_bits, true),
1111 int_arg,
1112 build_int_cstu (int_type, exp_bits),
1113 build_int_cstu (int_type, format->p - 1));
1114 tree exp_bitfield = emit_tree_and_return_var (seq, exp_tmp);
1116 /* Re-interpret the extracted exponent bits as a 32 bit int.
1117 This allows us to continue doing operations as int_type. */
1118 tree exp
1119 = emit_tree_and_return_var (seq, fold_build1_loc (loc, NOP_EXPR, int_type,
1120 exp_bitfield));
1122 /* exp_mask & ~1. */
1123 tree mask_check
1124 = fold_build2_loc (loc, BIT_AND_EXPR, int_type,
1125 build_int_cstu (int_type, exp_mask),
1126 fold_build1_loc (loc, BIT_NOT_EXPR, int_type,
1127 build_int_cstu (int_type, 1)));
1129 /* (exp + 1) & mask_check.
1130 Check to see if exp is not all 0 or all 1. */
1131 tree exp_check
1132 = fold_build2_loc (loc, BIT_AND_EXPR, int_type,
1133 emit_tree_and_return_var (seq,
1134 fold_build2_loc (loc, PLUS_EXPR, int_type, exp,
1135 build_int_cstu (int_type, 1))),
1136 mask_check);
1138 tree res = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1139 build_int_cstu (int_type, 0),
1140 emit_tree_and_return_var (seq, exp_check));
1142 return emit_tree_and_return_var (seq, res);
1145 /* Generates code to check if ARG is a zero. For both the FP and INT case we
1146 check if ARG == 0 (modulo sign bit). Returns a variable containing a boolean
1147 which has the result of the check.
1149 SEQ is the buffer to use to emit the gimple instructions into.
1150 LOC is the location to use during fold calls. */
1151 static tree
1152 is_zero (gimple_seq *seq, tree arg, location_t loc)
1154 tree type = TREE_TYPE (arg);
1156 /* If not using optimized route then exit early. */
1157 if (!use_ieee_int_mode (arg))
1159 machine_mode mode = TYPE_MODE (type);
1160 /* Perform IBM extended format fixups if required. */
1161 perform_ibm_extended_fixups (&arg, &mode, &type, loc);
1163 tree res = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, arg,
1164 build_real (type, dconst0));
1165 return emit_tree_and_return_var (seq, res);
1168 const HOST_WIDE_INT type_width = TYPE_PRECISION (type);
1170 tree int_arg_type = build_nonstandard_integer_type (type_width, true);
1172 /* Get the number reinterpreted as an integer.
1173 Shift left to remove the sign. */
1174 tree int_arg
1175 = fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1176 get_num_as_int (seq, arg, loc),
1177 build_int_cstu (int_arg_type, 1));
1179 /* num << 1 == 0.
1180 This checks to see if the number is zero. */
1181 tree zero_check
1182 = fold_build2_loc (loc, EQ_EXPR, boolean_type_node,
1183 build_int_cstu (int_arg_type, 0),
1184 emit_tree_and_return_var (seq, int_arg));
1186 return emit_tree_and_return_var (seq, zero_check);
1189 /* Generates code to check if ARG is a subnormal number. In the FP case we test
1190 fabs (ARG) != 0 && fabs (ARG) < MIN_VALUE (ARG) and in the INT case we check
1191 the exp and mantissa bits on ARG. Returns a variable containing a boolean
1192 which has the result of the check.
1194 SEQ is the buffer to use to emit the gimple instructions into.
1195 LOC is the location to use during fold calls. */
1196 static tree
1197 is_subnormal (gimple_seq *seq, tree arg, location_t loc)
1199 const tree bool_type = boolean_type_node;
1201 tree type = TREE_TYPE (arg);
1203 machine_mode mode = TYPE_MODE (type);
1204 const real_format *format = REAL_MODE_FORMAT (mode);
1205 const HOST_WIDE_INT type_width = TYPE_PRECISION (type);
1207 tree int_arg_type = build_nonstandard_integer_type (type_width, true);
1209 /* If not using optimized route then exit early. */
1210 if (!use_ieee_int_mode (arg))
1212 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
1213 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
1215 tree arg_p
1216 = emit_tree_and_return_var (seq, fold_build1_loc (loc, ABS_EXPR, type,
1217 arg));
1218 REAL_VALUE_TYPE r;
1219 char buf[128];
1220 get_min_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
1221 real_from_string (&r, buf);
1222 tree subnorm = build_call_expr (islt_fn, 2, arg_p, build_real (type, r));
1224 tree zero = build_call_expr (isgt_fn, 2, arg_p,
1225 build_real (type, dconst0));
1227 push_gimplify_context ();
1228 gimplify_expr (&subnorm, seq, NULL, is_gimple_val, fb_either);
1229 gimplify_expr (&zero, seq, NULL, is_gimple_val, fb_either);
1231 tree res
1232 = fold_build2_loc (loc, BIT_AND_EXPR, bool_type,
1233 emit_tree_and_return_var (seq,
1234 gimple_boolify (subnorm)),
1235 emit_tree_and_return_var (seq,
1236 gimple_boolify (zero)));
1237 pop_gimplify_context (NULL);
1239 return emit_tree_and_return_var (seq, res);
1242 /* Get the number reinterpreted as an integer.
1243 Shift left to remove the sign. */
1244 tree int_arg
1245 = fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1246 get_num_as_int (seq, arg, loc),
1247 build_int_cstu (int_arg_type, 1));
1249 /* Check for a zero exponent and non-zero mantissa.
1250 This can be done with two comparisons by first apply a
1251 removing the sign bit and checking if the value is larger
1252 than the mantissa mask. */
1254 /* This creates a mask to be used to check the mantissa value in the shifted
1255 integer representation of the fpnum. */
1256 tree significant_bit = build_int_cstu (int_arg_type, format->p - 1);
1257 tree mantissa_mask
1258 = fold_build2_loc (loc, MINUS_EXPR, int_arg_type,
1259 fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1260 build_int_cstu (int_arg_type, 2),
1261 significant_bit),
1262 build_int_cstu (int_arg_type, 1));
1264 /* Check if exponent is zero and mantissa is not. */
1265 tree subnorm_cond_tmp
1266 = fold_build2_loc (loc, LE_EXPR, bool_type,
1267 emit_tree_and_return_var (seq, int_arg),
1268 mantissa_mask);
1270 tree subnorm_cond = emit_tree_and_return_var (seq, subnorm_cond_tmp);
1272 tree zero_cond
1273 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
1274 emit_tree_and_return_var (seq, int_arg),
1275 build_int_cstu (int_arg_type, 0));
1277 tree subnorm_check
1278 = fold_build2_loc (loc, BIT_AND_EXPR, boolean_type_node,
1279 emit_tree_and_return_var (seq, subnorm_cond),
1280 emit_tree_and_return_var (seq, zero_cond));
1282 return emit_tree_and_return_var (seq, subnorm_check);
1285 /* Generates code to check if ARG is an infinity. In the FP case we test
1286 FABS(ARG) == INF and in the INT case we check the bits on the exp and
1287 mantissa. Returns a variable containing a boolean which has the result
1288 of the check.
1290 SEQ is the buffer to use to emit the gimple instructions into.
1291 LOC is the location to use during fold calls. */
1292 static tree
1293 is_infinity (gimple_seq *seq, tree arg, location_t loc)
1295 tree type = TREE_TYPE (arg);
1297 machine_mode mode = TYPE_MODE (type);
1298 const tree bool_type = boolean_type_node;
1300 if (!HONOR_INFINITIES (mode))
1302 return build_int_cst (bool_type, false);
1305 /* If not using optimized route then exit early. */
1306 if (!use_ieee_int_mode (arg))
1308 /* Perform IBM extended format fixups if required. */
1309 perform_ibm_extended_fixups (&arg, &mode, &type, loc);
1311 tree arg_p
1312 = emit_tree_and_return_var (seq, fold_build1_loc (loc, ABS_EXPR, type,
1313 arg));
1314 REAL_VALUE_TYPE r;
1315 real_inf (&r);
1316 tree res = fold_build2_loc (loc, EQ_EXPR, bool_type, arg_p,
1317 build_real (type, r));
1319 return emit_tree_and_return_var (seq, res);
1322 const real_format *format = REAL_MODE_FORMAT (mode);
1323 const HOST_WIDE_INT type_width = TYPE_PRECISION (type);
1325 tree int_arg_type = build_nonstandard_integer_type (type_width, true);
1327 /* This creates a mask to be used to check the exp value in the shifted
1328 integer representation of the fpnum. */
1329 const int exp_bits = (GET_MODE_SIZE (mode) * BITS_PER_UNIT) - format->p;
1330 gcc_assert (format->p > 0);
1332 tree significant_bit = build_int_cstu (int_arg_type, format->p);
1333 tree exp_mask
1334 = fold_build2_loc (loc, MINUS_EXPR, int_arg_type,
1335 fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1336 build_int_cstu (int_arg_type, 2),
1337 build_int_cstu (int_arg_type,
1338 exp_bits - 1)),
1339 build_int_cstu (int_arg_type, 1));
1341 /* Get the number reinterpreted as an integer.
1342 Shift left to remove the sign. */
1343 tree int_arg
1344 = fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1345 get_num_as_int (seq, arg, loc),
1346 build_int_cstu (int_arg_type, 1));
1348 /* This mask checks to see if the exp has all bits set and mantissa no
1349 bits set. */
1350 tree inf_mask
1351 = fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1352 exp_mask, significant_bit);
1354 /* Check if exponent has all bits set and mantissa is 0. */
1355 tree inf_check
1356 = emit_tree_and_return_var(seq,
1357 fold_build2_loc (loc, EQ_EXPR, bool_type,
1358 emit_tree_and_return_var(seq, int_arg),
1359 inf_mask));
1361 return emit_tree_and_return_var (seq, inf_check);
1364 /* Generates code to check if ARG is a finite number. In the FP case we check
1365 if FABS(ARG) <= MAX_VALUE(ARG) and in the INT case we check the exp and
1366 mantissa bits. Returns a variable containing a boolean which has the result
1367 of the check.
1369 SEQ is the buffer to use to emit the gimple instructions into.
1370 LOC is the location to use during fold calls. */
1371 static tree
1372 is_finite (gimple_seq *seq, tree arg, location_t loc)
1374 tree type = TREE_TYPE (arg);
1376 machine_mode mode = TYPE_MODE (type);
1377 const tree bool_type = boolean_type_node;
1379 if (!HONOR_NANS (arg) && !HONOR_INFINITIES (arg))
1381 return build_int_cst (bool_type, true);
1384 /* If not using optimized route then exit early. */
1385 if (!use_ieee_int_mode (arg))
1388 /* Perform IBM extended format fixups if required. */
1389 perform_ibm_extended_fixups (&arg, &mode, &type, loc);
1391 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
1393 tree arg_p
1394 = emit_tree_and_return_var (seq, fold_build1_loc (loc, ABS_EXPR, type,
1395 arg));
1396 REAL_VALUE_TYPE rmax;
1397 char buf[128];
1398 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
1399 real_from_string (&rmax, buf);
1401 tree res = build_call_expr (isle_fn, 2, arg_p, build_real (type, rmax));
1403 push_gimplify_context ();
1404 gimplify_expr (&res, seq, NULL, is_gimple_val, fb_either);
1405 pop_gimplify_context (NULL);
1407 return emit_tree_and_return_var (seq, gimple_boolify(res));
1410 const real_format *format = REAL_MODE_FORMAT (mode);
1411 const HOST_WIDE_INT type_width = TYPE_PRECISION (type);
1413 tree int_arg_type = build_nonstandard_integer_type (type_width, true);
1415 /* This creates a mask to be used to check the exp value in the shifted
1416 integer representation of the fpnum. */
1417 const int exp_bits = (GET_MODE_SIZE (mode) * BITS_PER_UNIT) - format->p;
1418 gcc_assert (format->p > 0);
1420 tree significant_bit = build_int_cstu (int_arg_type, format->p);
1421 tree exp_mask
1422 = fold_build2_loc (loc, MINUS_EXPR, int_arg_type,
1423 fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1424 build_int_cstu (int_arg_type, 2),
1425 build_int_cstu (int_arg_type,
1426 exp_bits - 1)),
1427 build_int_cstu (int_arg_type, 1));
1429 /* Get the number reinterpreted as an integer.
1430 Shift left to remove the sign. */
1431 tree int_arg
1432 = fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1433 get_num_as_int (seq, arg, loc),
1434 build_int_cstu (int_arg_type, 1));
1436 /* This mask checks to see if the exp has all bits set and mantissa no
1437 bits set. */
1438 tree inf_mask
1439 = fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1440 exp_mask, significant_bit);
1442 /* Check if exponent has all bits set and mantissa is 0. */
1443 tree inf_check_tmp
1444 = fold_build2_loc (loc, LT_EXPR, bool_type,
1445 emit_tree_and_return_var (seq, int_arg),
1446 inf_mask);
1448 tree inf_check = emit_tree_and_return_var (seq, inf_check_tmp);
1450 return emit_tree_and_return_var (seq, inf_check);
1453 /* Generates code to check if ARG is a NaN. In the FP case we simply check if
1454 ARG != ARG and in the INT case we check the bits in the exp and mantissa.
1455 Returns a variable containing a boolean which has the result of the check.
1457 SEQ is the buffer to use to emit the gimple instructions into.
1458 LOC is the location to use during fold calls. */
1459 static tree
1460 is_nan (gimple_seq *seq, tree arg, location_t loc)
1462 tree type = TREE_TYPE (arg);
1464 machine_mode mode = TYPE_MODE (type);
1465 const tree bool_type = boolean_type_node;
1467 if (!HONOR_NANS (mode))
1469 return build_int_cst (bool_type, false);
1472 const real_format *format = REAL_MODE_FORMAT (mode);
1474 /* If not using optimized route then exit early. */
1475 if (!use_ieee_int_mode (arg))
1477 /* Perform IBM extended format fixups if required. */
1478 perform_ibm_extended_fixups (&arg, &mode, &type, loc);
1480 tree arg_p
1481 = emit_tree_and_return_var (seq, fold_build1_loc (loc, ABS_EXPR, type,
1482 arg));
1483 tree res
1484 = fold_build2_loc (loc, UNORDERED_EXPR, bool_type,arg_p, arg_p);
1486 return emit_tree_and_return_var (seq, res);
1489 const HOST_WIDE_INT type_width = TYPE_PRECISION (type);
1490 tree int_arg_type = build_nonstandard_integer_type (type_width, true);
1492 /* This creates a mask to be used to check the exp value in the shifted
1493 integer representation of the fpnum. */
1494 const int exp_bits = (GET_MODE_SIZE (mode) * BITS_PER_UNIT) - format->p;
1495 tree significant_bit = build_int_cstu (int_arg_type, format->p);
1496 tree exp_mask
1497 = fold_build2_loc (loc, MINUS_EXPR, int_arg_type,
1498 fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1499 build_int_cstu (int_arg_type, 2),
1500 build_int_cstu (int_arg_type,
1501 exp_bits - 1)),
1502 build_int_cstu (int_arg_type, 1));
1504 /* Get the number reinterpreted as an integer.
1505 Shift left to remove the sign. */
1506 tree int_arg
1507 = fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1508 get_num_as_int (seq, arg, loc),
1509 build_int_cstu (int_arg_type, 1));
1511 /* This mask checks to see if the exp has all bits set and mantissa no
1512 bits set. */
1513 tree inf_mask
1514 = fold_build2_loc (loc, LSHIFT_EXPR, int_arg_type,
1515 exp_mask, significant_bit);
1517 /* Check if exponent has all bits set and mantissa is not 0. */
1518 tree nan_check
1519 = emit_tree_and_return_var(seq,
1520 fold_build2_loc (loc, GT_EXPR, bool_type,
1521 emit_tree_and_return_var(seq, int_arg),
1522 inf_mask));
1524 return emit_tree_and_return_var (seq, nan_check);
1527 /* Validates a single argument from the arguments list CALL at position INDEX.
1528 The extracted parameter is compared against the expected type CODE.
1530 A boolean is returned indicating if the parameter exist and if of the
1531 expected type. */
1532 static bool
1533 gimple_validate_arg (gimple* call, int index, enum tree_code code)
1535 const tree arg = gimple_call_arg (call, index);
1536 if (!arg)
1537 return false;
1538 else if (code == POINTER_TYPE)
1539 return POINTER_TYPE_P (TREE_TYPE (arg));
1540 else if (code == INTEGER_TYPE)
1541 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
1542 return code == TREE_CODE (TREE_TYPE (arg));
1545 /* Lowers calls to __builtin_fpclassify to
1546 fpclassify (x) ->
1547 isnormal(x) ? FP_NORMAL :
1548 iszero (x) ? FP_ZERO :
1549 isnan (x) ? FP_NAN :
1550 isinfinite (x) ? FP_INFINITE :
1551 FP_SUBNORMAL.
1553 The code may use integer arithmentic if it decides
1554 that the produced assembly would be faster. This can only be done
1555 for numbers that are similar to IEEE-754 in format.
1557 This builtin will generate code to return the appropriate floating
1558 point classification depending on the value of the floating point
1559 number passed in. The possible return values must be supplied as
1560 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
1561 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
1562 one floating point argument which is "type generic".
1564 GSI is the gimple iterator containing the fpclassify call to lower.
1565 The call will be expanded and replaced inline in the given GSI. */
1566 static void
1567 lower_builtin_fpclassify (gimple_stmt_iterator *gsi)
1569 gimple *call = gsi_stmt (*gsi);
1570 location_t loc = gimple_location (call);
1572 /* Verify the required arguments in the original call. */
1573 if (gimple_call_num_args (call) != 6
1574 || !gimple_validate_arg (call, 0, INTEGER_TYPE)
1575 || !gimple_validate_arg (call, 1, INTEGER_TYPE)
1576 || !gimple_validate_arg (call, 2, INTEGER_TYPE)
1577 || !gimple_validate_arg (call, 3, INTEGER_TYPE)
1578 || !gimple_validate_arg (call, 4, INTEGER_TYPE)
1579 || !gimple_validate_arg (call, 5, REAL_TYPE))
1580 return;
1582 /* Collect the arguments from the call. */
1583 tree fp_nan = gimple_call_arg (call, 0);
1584 tree fp_infinite = gimple_call_arg (call, 1);
1585 tree fp_normal = gimple_call_arg (call, 2);
1586 tree fp_subnormal = gimple_call_arg (call, 3);
1587 tree fp_zero = gimple_call_arg (call, 4);
1588 tree arg = gimple_call_arg (call, 5);
1590 gimple_seq body = NULL;
1592 /* Create label to jump to to exit. */
1593 tree done_label = create_artificial_label (UNKNOWN_LOCATION);
1594 tree dest;
1595 tree orig_dest = dest = gimple_call_lhs (call);
1596 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
1597 dest = create_tmp_reg (TREE_TYPE (orig_dest));
1599 emit_tree_cond (&body, dest, done_label,
1600 is_normal (&body, arg, loc), fp_normal);
1601 emit_tree_cond (&body, dest, done_label,
1602 is_zero (&body, arg, loc), fp_zero);
1603 emit_tree_cond (&body, dest, done_label,
1604 is_nan (&body, arg, loc), fp_nan);
1605 emit_tree_cond (&body, dest, done_label,
1606 is_infinity (&body, arg, loc), fp_infinite);
1608 /* And finally, emit the default case if nothing else matches.
1609 This replaces the call to is_subnormal. */
1610 gimple_seq_add_stmt (&body, gimple_build_assign (dest, fp_subnormal));
1611 gimple_seq_add_stmt (&body, gimple_build_label (done_label));
1613 /* Build orig_dest = dest if necessary. */
1614 if (dest != orig_dest)
1616 gimple_seq_add_stmt (&body, gimple_build_assign (orig_dest, dest));
1619 gsi_insert_seq_before (gsi, body, GSI_SAME_STMT);
1622 /* Remove the call to __builtin_fpclassify. */
1623 gsi_remove (gsi, false);
1626 /* Generic wrapper for the is_nan, is_normal, is_subnormal, is_zero, etc.
1627 All these functions have the same setup. The wrapper validates the parameter
1628 and also creates the branches and labels required to properly invoke.
1629 This has been generalize and the function to call is passed as argument FNDECL.
1631 GSI is the gimple iterator containing the fpclassify call to lower.
1632 The call will be expanded and replaced inline in the given GSI. */
1633 static void
1634 gen_call_fp_builtin (gimple_stmt_iterator *gsi,
1635 tree (*fndecl)(gimple_seq *, tree, location_t))
1637 gimple *call = gsi_stmt (*gsi);
1638 location_t loc = gimple_location (call);
1640 /* Verify the required arguments in the original call. */
1641 if (gimple_call_num_args (call) != 1
1642 || !gimple_validate_arg (call, 0, REAL_TYPE))
1643 return;
1645 tree arg = gimple_call_arg (call, 0);
1646 gimple_seq body = NULL;
1648 /* Create label to jump to to exit. */
1649 tree done_label = create_artificial_label (UNKNOWN_LOCATION);
1650 tree dest;
1651 tree orig_dest = dest = gimple_call_lhs (call);
1652 tree type = TREE_TYPE (orig_dest);
1653 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
1654 dest = create_tmp_reg (type);
1656 tree t_true = build_int_cst (type, true);
1657 tree t_false = build_int_cst (type, false);
1659 emit_tree_cond (&body, dest, done_label,
1660 fndecl (&body, arg, loc), t_true);
1662 /* And finally, emit the default case if nothing else matches.
1663 This replaces the call to false. */
1664 gimple_seq_add_stmt (&body, gimple_build_assign (dest, t_false));
1665 gimple_seq_add_stmt (&body, gimple_build_label (done_label));
1667 /* Build orig_dest = dest if necessary. */
1668 if (dest != orig_dest)
1670 gimple_seq_add_stmt (&body, gimple_build_assign (orig_dest, dest));
1673 gsi_insert_seq_before (gsi, body, GSI_SAME_STMT);
1675 /* Remove the call to the builtin. */
1676 gsi_remove (gsi, false);
1679 /* Lower and expand calls to __builtin_isnan in GSI. */
1680 static void
1681 lower_builtin_isnan (gimple_stmt_iterator *gsi)
1683 gen_call_fp_builtin (gsi, &is_nan);
1686 /* Lower and expand calls to __builtin_isinfinite in GSI. */
1687 static void
1688 lower_builtin_isinfinite (gimple_stmt_iterator *gsi)
1690 gen_call_fp_builtin (gsi, &is_infinity);
1693 /* Lower and expand calls to __builtin_isnormal in GSI. */
1694 static void
1695 lower_builtin_isnormal (gimple_stmt_iterator *gsi)
1697 gen_call_fp_builtin (gsi, &is_normal);
1700 /* Lower and expand calls to __builtin_iszero in GSI. */
1701 static void
1702 lower_builtin_iszero (gimple_stmt_iterator *gsi)
1704 gen_call_fp_builtin (gsi, &is_zero);
1707 /* Lower and expand calls to __builtin_issubnormal in GSI. */
1708 static void
1709 lower_builtin_issubnormal (gimple_stmt_iterator *gsi)
1711 gen_call_fp_builtin (gsi, &is_subnormal);
1714 /* Lower and expand calls to __builtin_isfinite in GSI. */
1715 static void
1716 lower_builtin_isfinite (gimple_stmt_iterator *gsi)
1718 gen_call_fp_builtin (gsi, &is_finite);
1721 /* Lower calls to posix_memalign to
1722 res = posix_memalign (ptr, align, size);
1723 if (res == 0)
1724 *ptr = __builtin_assume_aligned (*ptr, align);
1725 or to
1726 void *tem;
1727 res = posix_memalign (&tem, align, size);
1728 if (res == 0)
1729 ptr = __builtin_assume_aligned (tem, align);
1730 in case the first argument was &ptr. That way we can get at the
1731 alignment of the heap pointer in CCP. */
1733 static void
1734 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
1736 gimple *stmt, *call = gsi_stmt (*gsi);
1737 tree pptr = gimple_call_arg (call, 0);
1738 tree align = gimple_call_arg (call, 1);
1739 tree res = gimple_call_lhs (call);
1740 tree ptr = create_tmp_reg (ptr_type_node);
1741 if (TREE_CODE (pptr) == ADDR_EXPR)
1743 tree tem = create_tmp_var (ptr_type_node);
1744 TREE_ADDRESSABLE (tem) = 1;
1745 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
1746 stmt = gimple_build_assign (ptr, tem);
1748 else
1749 stmt = gimple_build_assign (ptr,
1750 fold_build2 (MEM_REF, ptr_type_node, pptr,
1751 build_int_cst (ptr_type_node, 0)));
1752 if (res == NULL_TREE)
1754 res = create_tmp_reg (integer_type_node);
1755 gimple_call_set_lhs (call, res);
1757 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
1758 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
1759 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
1760 align_label, noalign_label);
1761 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
1762 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
1763 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1764 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
1765 2, ptr, align);
1766 gimple_call_set_lhs (stmt, ptr);
1767 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1768 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
1769 build_int_cst (ptr_type_node, 0)),
1770 ptr);
1771 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1772 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
1776 /* Record the variables in VARS into function FN. */
1778 void
1779 record_vars_into (tree vars, tree fn)
1781 for (; vars; vars = DECL_CHAIN (vars))
1783 tree var = vars;
1785 /* BIND_EXPRs contains also function/type/constant declarations
1786 we don't need to care about. */
1787 if (!VAR_P (var))
1788 continue;
1790 /* Nothing to do in this case. */
1791 if (DECL_EXTERNAL (var))
1792 continue;
1794 /* Record the variable. */
1795 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
1800 /* Record the variables in VARS into current_function_decl. */
1802 void
1803 record_vars (tree vars)
1805 record_vars_into (vars, current_function_decl);