builtins.c (expand_builtin_mathfn, [...]): Avoid busy work when builtin is not suppor...
[official-gcc.git] / gcc / builtins.c
blobe8ab4d5740d6c3a1ef50764e7b1c214047f6dfdf
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init = 0;
84 static REAL_VALUE_TYPE dconstpi;
85 static REAL_VALUE_TYPE dconste;
87 static int get_pointer_alignment (tree, unsigned int);
88 static tree c_strlen (tree, int);
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static rtx expand_builtin_setjmp (tree, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_constant_p (tree, enum machine_mode);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (tree);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_bcopy (tree);
127 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, int, int);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_inf (tree, int);
150 static tree fold_builtin_nan (tree, tree, int);
151 static int validate_arglist (tree, ...);
152 static tree fold_trunc_transparent_mathfn (tree);
153 static bool readonly_data_expr (tree);
154 static rtx expand_builtin_fabs (tree, rtx, rtx);
155 static rtx expand_builtin_cabs (tree, rtx);
156 static void init_builtin_dconsts (void);
157 static tree fold_builtin_cabs (tree, tree, tree);
159 /* Initialize mathematical constants for constant folding builtins.
160 These constants need to be given to at least 160 bits precision. */
162 static void
163 init_builtin_dconsts (void)
165 real_from_string (&dconstpi,
166 "3.1415926535897932384626433832795028841971693993751058209749445923078");
167 real_from_string (&dconste,
168 "2.7182818284590452353602874713526624977572470936999595749669676277241");
170 builtin_dconsts_init = true;
173 /* Return the alignment in bits of EXP, a pointer valued expression.
174 But don't return more than MAX_ALIGN no matter what.
175 The alignment returned is, by default, the alignment of the thing that
176 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
178 Otherwise, look at the expression to see if we can do better, i.e., if the
179 expression is actually pointing at an object whose alignment is tighter. */
181 static int
182 get_pointer_alignment (tree exp, unsigned int max_align)
184 unsigned int align, inner;
186 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
187 return 0;
189 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
190 align = MIN (align, max_align);
192 while (1)
194 switch (TREE_CODE (exp))
196 case NOP_EXPR:
197 case CONVERT_EXPR:
198 case NON_LVALUE_EXPR:
199 exp = TREE_OPERAND (exp, 0);
200 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
201 return align;
203 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
204 align = MIN (inner, max_align);
205 break;
207 case PLUS_EXPR:
208 /* If sum of pointer + int, restrict our maximum alignment to that
209 imposed by the integer. If not, we can't do any better than
210 ALIGN. */
211 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
212 return align;
214 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
215 & (max_align / BITS_PER_UNIT - 1))
216 != 0)
217 max_align >>= 1;
219 exp = TREE_OPERAND (exp, 0);
220 break;
222 case ADDR_EXPR:
223 /* See what we are pointing at and look at its alignment. */
224 exp = TREE_OPERAND (exp, 0);
225 if (TREE_CODE (exp) == FUNCTION_DECL)
226 align = FUNCTION_BOUNDARY;
227 else if (DECL_P (exp))
228 align = DECL_ALIGN (exp);
229 #ifdef CONSTANT_ALIGNMENT
230 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
231 align = CONSTANT_ALIGNMENT (exp, align);
232 #endif
233 return MIN (align, max_align);
235 default:
236 return align;
241 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
242 way, because it could contain a zero byte in the middle.
243 TREE_STRING_LENGTH is the size of the character array, not the string.
245 ONLY_VALUE should be nonzero if the result is not going to be emitted
246 into the instruction stream and zero if it is going to be expanded.
247 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
248 is returned, otherwise NULL, since
249 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
250 evaluate the side-effects.
252 The value returned is of type `ssizetype'.
254 Unfortunately, string_constant can't access the values of const char
255 arrays with initializers, so neither can we do so here. */
257 static tree
258 c_strlen (tree src, int only_value)
260 tree offset_node;
261 HOST_WIDE_INT offset;
262 int max;
263 const char *ptr;
265 STRIP_NOPS (src);
266 if (TREE_CODE (src) == COND_EXPR
267 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
269 tree len1, len2;
271 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
272 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
273 if (tree_int_cst_equal (len1, len2))
274 return len1;
277 if (TREE_CODE (src) == COMPOUND_EXPR
278 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
279 return c_strlen (TREE_OPERAND (src, 1), only_value);
281 src = string_constant (src, &offset_node);
282 if (src == 0)
283 return 0;
285 max = TREE_STRING_LENGTH (src) - 1;
286 ptr = TREE_STRING_POINTER (src);
288 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
290 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
291 compute the offset to the following null if we don't know where to
292 start searching for it. */
293 int i;
295 for (i = 0; i < max; i++)
296 if (ptr[i] == 0)
297 return 0;
299 /* We don't know the starting offset, but we do know that the string
300 has no internal zero bytes. We can assume that the offset falls
301 within the bounds of the string; otherwise, the programmer deserves
302 what he gets. Subtract the offset from the length of the string,
303 and return that. This would perhaps not be valid if we were dealing
304 with named arrays in addition to literal string constants. */
306 return size_diffop (size_int (max), offset_node);
309 /* We have a known offset into the string. Start searching there for
310 a null character if we can represent it as a single HOST_WIDE_INT. */
311 if (offset_node == 0)
312 offset = 0;
313 else if (! host_integerp (offset_node, 0))
314 offset = -1;
315 else
316 offset = tree_low_cst (offset_node, 0);
318 /* If the offset is known to be out of bounds, warn, and call strlen at
319 runtime. */
320 if (offset < 0 || offset > max)
322 warning ("offset outside bounds of constant string");
323 return 0;
326 /* Use strlen to search for the first zero byte. Since any strings
327 constructed with build_string will have nulls appended, we win even
328 if we get handed something like (char[4])"abcd".
330 Since OFFSET is our starting index into the string, no further
331 calculation is needed. */
332 return ssize_int (strlen (ptr + offset));
335 /* Return a char pointer for a C string if it is a string constant
336 or sum of string constant and integer constant. */
338 static const char *
339 c_getstr (tree src)
341 tree offset_node;
343 src = string_constant (src, &offset_node);
344 if (src == 0)
345 return 0;
347 if (offset_node == 0)
348 return TREE_STRING_POINTER (src);
349 else if (!host_integerp (offset_node, 1)
350 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
351 return 0;
353 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
356 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
357 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
359 static rtx
360 c_readstr (const char *str, enum machine_mode mode)
362 HOST_WIDE_INT c[2];
363 HOST_WIDE_INT ch;
364 unsigned int i, j;
366 if (GET_MODE_CLASS (mode) != MODE_INT)
367 abort ();
368 c[0] = 0;
369 c[1] = 0;
370 ch = 1;
371 for (i = 0; i < GET_MODE_SIZE (mode); i++)
373 j = i;
374 if (WORDS_BIG_ENDIAN)
375 j = GET_MODE_SIZE (mode) - i - 1;
376 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
377 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
378 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
379 j *= BITS_PER_UNIT;
380 if (j > 2 * HOST_BITS_PER_WIDE_INT)
381 abort ();
382 if (ch)
383 ch = (unsigned char) str[i];
384 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
386 return immed_double_const (c[0], c[1], mode);
389 /* Cast a target constant CST to target CHAR and if that value fits into
390 host char type, return zero and put that value into variable pointed by
391 P. */
393 static int
394 target_char_cast (tree cst, char *p)
396 unsigned HOST_WIDE_INT val, hostval;
398 if (!host_integerp (cst, 1)
399 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
400 return 1;
402 val = tree_low_cst (cst, 1);
403 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
404 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
406 hostval = val;
407 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
408 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
410 if (val != hostval)
411 return 1;
413 *p = hostval;
414 return 0;
417 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
418 times to get the address of either a higher stack frame, or a return
419 address located within it (depending on FNDECL_CODE). */
422 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
423 rtx tem)
425 int i;
427 /* Some machines need special handling before we can access
428 arbitrary frames. For example, on the sparc, we must first flush
429 all register windows to the stack. */
430 #ifdef SETUP_FRAME_ADDRESSES
431 if (count > 0)
432 SETUP_FRAME_ADDRESSES ();
433 #endif
435 /* On the sparc, the return address is not in the frame, it is in a
436 register. There is no way to access it off of the current frame
437 pointer, but it can be accessed off the previous frame pointer by
438 reading the value from the register window save area. */
439 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
440 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
441 count--;
442 #endif
444 /* Scan back COUNT frames to the specified frame. */
445 for (i = 0; i < count; i++)
447 /* Assume the dynamic chain pointer is in the word that the
448 frame address points to, unless otherwise specified. */
449 #ifdef DYNAMIC_CHAIN_ADDRESS
450 tem = DYNAMIC_CHAIN_ADDRESS (tem);
451 #endif
452 tem = memory_address (Pmode, tem);
453 tem = gen_rtx_MEM (Pmode, tem);
454 set_mem_alias_set (tem, get_frame_alias_set ());
455 tem = copy_to_reg (tem);
458 /* For __builtin_frame_address, return what we've got. */
459 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
460 return tem;
462 /* For __builtin_return_address, Get the return address from that
463 frame. */
464 #ifdef RETURN_ADDR_RTX
465 tem = RETURN_ADDR_RTX (count, tem);
466 #else
467 tem = memory_address (Pmode,
468 plus_constant (tem, GET_MODE_SIZE (Pmode)));
469 tem = gen_rtx_MEM (Pmode, tem);
470 set_mem_alias_set (tem, get_frame_alias_set ());
471 #endif
472 return tem;
475 /* Alias set used for setjmp buffer. */
476 static HOST_WIDE_INT setjmp_alias_set = -1;
478 /* Construct the leading half of a __builtin_setjmp call. Control will
479 return to RECEIVER_LABEL. This is used directly by sjlj exception
480 handling code. */
482 void
483 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
485 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
486 rtx stack_save;
487 rtx mem;
489 if (setjmp_alias_set == -1)
490 setjmp_alias_set = new_alias_set ();
492 #ifdef POINTERS_EXTEND_UNSIGNED
493 if (GET_MODE (buf_addr) != Pmode)
494 buf_addr = convert_memory_address (Pmode, buf_addr);
495 #endif
497 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
499 emit_queue ();
501 /* We store the frame pointer and the address of receiver_label in
502 the buffer and use the rest of it for the stack save area, which
503 is machine-dependent. */
505 #ifndef BUILTIN_SETJMP_FRAME_VALUE
506 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
507 #endif
509 mem = gen_rtx_MEM (Pmode, buf_addr);
510 set_mem_alias_set (mem, setjmp_alias_set);
511 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
513 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
514 set_mem_alias_set (mem, setjmp_alias_set);
516 emit_move_insn (validize_mem (mem),
517 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
519 stack_save = gen_rtx_MEM (sa_mode,
520 plus_constant (buf_addr,
521 2 * GET_MODE_SIZE (Pmode)));
522 set_mem_alias_set (stack_save, setjmp_alias_set);
523 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
525 /* If there is further processing to do, do it. */
526 #ifdef HAVE_builtin_setjmp_setup
527 if (HAVE_builtin_setjmp_setup)
528 emit_insn (gen_builtin_setjmp_setup (buf_addr));
529 #endif
531 /* Tell optimize_save_area_alloca that extra work is going to
532 need to go on during alloca. */
533 current_function_calls_setjmp = 1;
535 /* Set this so all the registers get saved in our frame; we need to be
536 able to copy the saved values for any registers from frames we unwind. */
537 current_function_has_nonlocal_label = 1;
540 /* Construct the trailing part of a __builtin_setjmp call.
541 This is used directly by sjlj exception handling code. */
543 void
544 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
546 /* Clobber the FP when we get here, so we have to make sure it's
547 marked as used by this function. */
548 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
550 /* Mark the static chain as clobbered here so life information
551 doesn't get messed up for it. */
552 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
554 /* Now put in the code to restore the frame pointer, and argument
555 pointer, if needed. The code below is from expand_end_bindings
556 in stmt.c; see detailed documentation there. */
557 #ifdef HAVE_nonlocal_goto
558 if (! HAVE_nonlocal_goto)
559 #endif
560 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
562 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
563 if (fixed_regs[ARG_POINTER_REGNUM])
565 #ifdef ELIMINABLE_REGS
566 size_t i;
567 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
569 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
570 if (elim_regs[i].from == ARG_POINTER_REGNUM
571 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
572 break;
574 if (i == ARRAY_SIZE (elim_regs))
575 #endif
577 /* Now restore our arg pointer from the address at which it
578 was saved in our stack frame. */
579 emit_move_insn (virtual_incoming_args_rtx,
580 copy_to_reg (get_arg_pointer_save_area (cfun)));
583 #endif
585 #ifdef HAVE_builtin_setjmp_receiver
586 if (HAVE_builtin_setjmp_receiver)
587 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
588 else
589 #endif
590 #ifdef HAVE_nonlocal_goto_receiver
591 if (HAVE_nonlocal_goto_receiver)
592 emit_insn (gen_nonlocal_goto_receiver ());
593 else
594 #endif
595 { /* Nothing */ }
597 /* @@@ This is a kludge. Not all machine descriptions define a blockage
598 insn, but we must not allow the code we just generated to be reordered
599 by scheduling. Specifically, the update of the frame pointer must
600 happen immediately, not later. So emit an ASM_INPUT to act as blockage
601 insn. */
602 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
605 /* __builtin_setjmp is passed a pointer to an array of five words (not
606 all will be used on all machines). It operates similarly to the C
607 library function of the same name, but is more efficient. Much of
608 the code below (and for longjmp) is copied from the handling of
609 non-local gotos.
611 NOTE: This is intended for use by GNAT and the exception handling
612 scheme in the compiler and will only work in the method used by
613 them. */
615 static rtx
616 expand_builtin_setjmp (tree arglist, rtx target)
618 rtx buf_addr, next_lab, cont_lab;
620 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
621 return NULL_RTX;
623 if (target == 0 || GET_CODE (target) != REG
624 || REGNO (target) < FIRST_PSEUDO_REGISTER)
625 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
627 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
629 next_lab = gen_label_rtx ();
630 cont_lab = gen_label_rtx ();
632 expand_builtin_setjmp_setup (buf_addr, next_lab);
634 /* Set TARGET to zero and branch to the continue label. */
635 emit_move_insn (target, const0_rtx);
636 emit_jump_insn (gen_jump (cont_lab));
637 emit_barrier ();
638 emit_label (next_lab);
640 expand_builtin_setjmp_receiver (next_lab);
642 /* Set TARGET to one. */
643 emit_move_insn (target, const1_rtx);
644 emit_label (cont_lab);
646 /* Tell flow about the strange goings on. Putting `next_lab' on
647 `nonlocal_goto_handler_labels' to indicates that function
648 calls may traverse the arc back to this label. */
650 current_function_has_nonlocal_label = 1;
651 nonlocal_goto_handler_labels
652 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
654 return target;
657 /* __builtin_longjmp is passed a pointer to an array of five words (not
658 all will be used on all machines). It operates similarly to the C
659 library function of the same name, but is more efficient. Much of
660 the code below is copied from the handling of non-local gotos.
662 NOTE: This is intended for use by GNAT and the exception handling
663 scheme in the compiler and will only work in the method used by
664 them. */
666 void
667 expand_builtin_longjmp (rtx buf_addr, rtx value)
669 rtx fp, lab, stack, insn, last;
670 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
672 if (setjmp_alias_set == -1)
673 setjmp_alias_set = new_alias_set ();
675 #ifdef POINTERS_EXTEND_UNSIGNED
676 if (GET_MODE (buf_addr) != Pmode)
677 buf_addr = convert_memory_address (Pmode, buf_addr);
678 #endif
680 buf_addr = force_reg (Pmode, buf_addr);
682 /* We used to store value in static_chain_rtx, but that fails if pointers
683 are smaller than integers. We instead require that the user must pass
684 a second argument of 1, because that is what builtin_setjmp will
685 return. This also makes EH slightly more efficient, since we are no
686 longer copying around a value that we don't care about. */
687 if (value != const1_rtx)
688 abort ();
690 current_function_calls_longjmp = 1;
692 last = get_last_insn ();
693 #ifdef HAVE_builtin_longjmp
694 if (HAVE_builtin_longjmp)
695 emit_insn (gen_builtin_longjmp (buf_addr));
696 else
697 #endif
699 fp = gen_rtx_MEM (Pmode, buf_addr);
700 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
701 GET_MODE_SIZE (Pmode)));
703 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
704 2 * GET_MODE_SIZE (Pmode)));
705 set_mem_alias_set (fp, setjmp_alias_set);
706 set_mem_alias_set (lab, setjmp_alias_set);
707 set_mem_alias_set (stack, setjmp_alias_set);
709 /* Pick up FP, label, and SP from the block and jump. This code is
710 from expand_goto in stmt.c; see there for detailed comments. */
711 #if HAVE_nonlocal_goto
712 if (HAVE_nonlocal_goto)
713 /* We have to pass a value to the nonlocal_goto pattern that will
714 get copied into the static_chain pointer, but it does not matter
715 what that value is, because builtin_setjmp does not use it. */
716 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
717 else
718 #endif
720 lab = copy_to_reg (lab);
722 emit_move_insn (hard_frame_pointer_rtx, fp);
723 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
725 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
726 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
727 emit_indirect_jump (lab);
731 /* Search backwards and mark the jump insn as a non-local goto.
732 Note that this precludes the use of __builtin_longjmp to a
733 __builtin_setjmp target in the same function. However, we've
734 already cautioned the user that these functions are for
735 internal exception handling use only. */
736 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
738 if (insn == last)
739 abort ();
740 if (GET_CODE (insn) == JUMP_INSN)
742 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
743 REG_NOTES (insn));
744 break;
746 else if (GET_CODE (insn) == CALL_INSN)
747 break;
751 /* Expand a call to __builtin_prefetch. For a target that does not support
752 data prefetch, evaluate the memory address argument in case it has side
753 effects. */
755 static void
756 expand_builtin_prefetch (tree arglist)
758 tree arg0, arg1, arg2;
759 rtx op0, op1, op2;
761 if (!validate_arglist (arglist, POINTER_TYPE, 0))
762 return;
764 arg0 = TREE_VALUE (arglist);
765 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
766 zero (read) and argument 2 (locality) defaults to 3 (high degree of
767 locality). */
768 if (TREE_CHAIN (arglist))
770 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
771 if (TREE_CHAIN (TREE_CHAIN (arglist)))
772 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
773 else
774 arg2 = build_int_2 (3, 0);
776 else
778 arg1 = integer_zero_node;
779 arg2 = build_int_2 (3, 0);
782 /* Argument 0 is an address. */
783 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
785 /* Argument 1 (read/write flag) must be a compile-time constant int. */
786 if (TREE_CODE (arg1) != INTEGER_CST)
788 error ("second arg to `__builtin_prefetch' must be a constant");
789 arg1 = integer_zero_node;
791 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
792 /* Argument 1 must be either zero or one. */
793 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
795 warning ("invalid second arg to __builtin_prefetch; using zero");
796 op1 = const0_rtx;
799 /* Argument 2 (locality) must be a compile-time constant int. */
800 if (TREE_CODE (arg2) != INTEGER_CST)
802 error ("third arg to `__builtin_prefetch' must be a constant");
803 arg2 = integer_zero_node;
805 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
806 /* Argument 2 must be 0, 1, 2, or 3. */
807 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
809 warning ("invalid third arg to __builtin_prefetch; using zero");
810 op2 = const0_rtx;
813 #ifdef HAVE_prefetch
814 if (HAVE_prefetch)
816 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
817 (op0,
818 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
819 || (GET_MODE(op0) != Pmode))
821 #ifdef POINTERS_EXTEND_UNSIGNED
822 if (GET_MODE(op0) != Pmode)
823 op0 = convert_memory_address (Pmode, op0);
824 #endif
825 op0 = force_reg (Pmode, op0);
827 emit_insn (gen_prefetch (op0, op1, op2));
829 else
830 #endif
831 op0 = protect_from_queue (op0, 0);
832 /* Don't do anything with direct references to volatile memory, but
833 generate code to handle other side effects. */
834 if (GET_CODE (op0) != MEM && side_effects_p (op0))
835 emit_insn (op0);
838 /* Get a MEM rtx for expression EXP which is the address of an operand
839 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
841 static rtx
842 get_memory_rtx (tree exp)
844 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
845 rtx mem;
847 #ifdef POINTERS_EXTEND_UNSIGNED
848 if (GET_MODE (addr) != Pmode)
849 addr = convert_memory_address (Pmode, addr);
850 #endif
852 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
854 /* Get an expression we can use to find the attributes to assign to MEM.
855 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
856 we can. First remove any nops. */
857 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
858 || TREE_CODE (exp) == NON_LVALUE_EXPR)
859 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
860 exp = TREE_OPERAND (exp, 0);
862 if (TREE_CODE (exp) == ADDR_EXPR)
864 exp = TREE_OPERAND (exp, 0);
865 set_mem_attributes (mem, exp, 0);
867 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
869 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
870 /* memcpy, memset and other builtin stringops can alias with anything. */
871 set_mem_alias_set (mem, 0);
874 return mem;
877 /* Built-in functions to perform an untyped call and return. */
879 /* For each register that may be used for calling a function, this
880 gives a mode used to copy the register's value. VOIDmode indicates
881 the register is not used for calling a function. If the machine
882 has register windows, this gives only the outbound registers.
883 INCOMING_REGNO gives the corresponding inbound register. */
884 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
886 /* For each register that may be used for returning values, this gives
887 a mode used to copy the register's value. VOIDmode indicates the
888 register is not used for returning values. If the machine has
889 register windows, this gives only the outbound registers.
890 INCOMING_REGNO gives the corresponding inbound register. */
891 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
893 /* For each register that may be used for calling a function, this
894 gives the offset of that register into the block returned by
895 __builtin_apply_args. 0 indicates that the register is not
896 used for calling a function. */
897 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
899 /* Return the offset of register REGNO into the block returned by
900 __builtin_apply_args. This is not declared static, since it is
901 needed in objc-act.c. */
904 apply_args_register_offset (int regno)
906 apply_args_size ();
908 /* Arguments are always put in outgoing registers (in the argument
909 block) if such make sense. */
910 #ifdef OUTGOING_REGNO
911 regno = OUTGOING_REGNO (regno);
912 #endif
913 return apply_args_reg_offset[regno];
916 /* Return the size required for the block returned by __builtin_apply_args,
917 and initialize apply_args_mode. */
919 static int
920 apply_args_size (void)
922 static int size = -1;
923 int align;
924 unsigned int regno;
925 enum machine_mode mode;
927 /* The values computed by this function never change. */
928 if (size < 0)
930 /* The first value is the incoming arg-pointer. */
931 size = GET_MODE_SIZE (Pmode);
933 /* The second value is the structure value address unless this is
934 passed as an "invisible" first argument. */
935 if (struct_value_rtx)
936 size += GET_MODE_SIZE (Pmode);
938 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
939 if (FUNCTION_ARG_REGNO_P (regno))
941 /* Search for the proper mode for copying this register's
942 value. I'm not sure this is right, but it works so far. */
943 enum machine_mode best_mode = VOIDmode;
945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
946 mode != VOIDmode;
947 mode = GET_MODE_WIDER_MODE (mode))
948 if (HARD_REGNO_MODE_OK (regno, mode)
949 && HARD_REGNO_NREGS (regno, mode) == 1)
950 best_mode = mode;
952 if (best_mode == VOIDmode)
953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
954 mode != VOIDmode;
955 mode = GET_MODE_WIDER_MODE (mode))
956 if (HARD_REGNO_MODE_OK (regno, mode)
957 && have_insn_for (SET, mode))
958 best_mode = mode;
960 if (best_mode == VOIDmode)
961 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
962 mode != VOIDmode;
963 mode = GET_MODE_WIDER_MODE (mode))
964 if (HARD_REGNO_MODE_OK (regno, mode)
965 && have_insn_for (SET, mode))
966 best_mode = mode;
968 if (best_mode == VOIDmode)
969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
970 mode != VOIDmode;
971 mode = GET_MODE_WIDER_MODE (mode))
972 if (HARD_REGNO_MODE_OK (regno, mode)
973 && have_insn_for (SET, mode))
974 best_mode = mode;
976 mode = best_mode;
977 if (mode == VOIDmode)
978 abort ();
980 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
981 if (size % align != 0)
982 size = CEIL (size, align) * align;
983 apply_args_reg_offset[regno] = size;
984 size += GET_MODE_SIZE (mode);
985 apply_args_mode[regno] = mode;
987 else
989 apply_args_mode[regno] = VOIDmode;
990 apply_args_reg_offset[regno] = 0;
993 return size;
996 /* Return the size required for the block returned by __builtin_apply,
997 and initialize apply_result_mode. */
999 static int
1000 apply_result_size (void)
1002 static int size = -1;
1003 int align, regno;
1004 enum machine_mode mode;
1006 /* The values computed by this function never change. */
1007 if (size < 0)
1009 size = 0;
1011 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1012 if (FUNCTION_VALUE_REGNO_P (regno))
1014 /* Search for the proper mode for copying this register's
1015 value. I'm not sure this is right, but it works so far. */
1016 enum machine_mode best_mode = VOIDmode;
1018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1019 mode != TImode;
1020 mode = GET_MODE_WIDER_MODE (mode))
1021 if (HARD_REGNO_MODE_OK (regno, mode))
1022 best_mode = mode;
1024 if (best_mode == VOIDmode)
1025 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1026 mode != VOIDmode;
1027 mode = GET_MODE_WIDER_MODE (mode))
1028 if (HARD_REGNO_MODE_OK (regno, mode)
1029 && have_insn_for (SET, mode))
1030 best_mode = mode;
1032 if (best_mode == VOIDmode)
1033 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1034 mode != VOIDmode;
1035 mode = GET_MODE_WIDER_MODE (mode))
1036 if (HARD_REGNO_MODE_OK (regno, mode)
1037 && have_insn_for (SET, mode))
1038 best_mode = mode;
1040 if (best_mode == VOIDmode)
1041 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1042 mode != VOIDmode;
1043 mode = GET_MODE_WIDER_MODE (mode))
1044 if (HARD_REGNO_MODE_OK (regno, mode)
1045 && have_insn_for (SET, mode))
1046 best_mode = mode;
1048 mode = best_mode;
1049 if (mode == VOIDmode)
1050 abort ();
1052 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1053 if (size % align != 0)
1054 size = CEIL (size, align) * align;
1055 size += GET_MODE_SIZE (mode);
1056 apply_result_mode[regno] = mode;
1058 else
1059 apply_result_mode[regno] = VOIDmode;
1061 /* Allow targets that use untyped_call and untyped_return to override
1062 the size so that machine-specific information can be stored here. */
1063 #ifdef APPLY_RESULT_SIZE
1064 size = APPLY_RESULT_SIZE;
1065 #endif
1067 return size;
1070 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1071 /* Create a vector describing the result block RESULT. If SAVEP is true,
1072 the result block is used to save the values; otherwise it is used to
1073 restore the values. */
1075 static rtx
1076 result_vector (int savep, rtx result)
1078 int regno, size, align, nelts;
1079 enum machine_mode mode;
1080 rtx reg, mem;
1081 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1083 size = nelts = 0;
1084 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1085 if ((mode = apply_result_mode[regno]) != VOIDmode)
1087 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1088 if (size % align != 0)
1089 size = CEIL (size, align) * align;
1090 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1091 mem = adjust_address (result, mode, size);
1092 savevec[nelts++] = (savep
1093 ? gen_rtx_SET (VOIDmode, mem, reg)
1094 : gen_rtx_SET (VOIDmode, reg, mem));
1095 size += GET_MODE_SIZE (mode);
1097 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1099 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1101 /* Save the state required to perform an untyped call with the same
1102 arguments as were passed to the current function. */
1104 static rtx
1105 expand_builtin_apply_args_1 (void)
1107 rtx registers;
1108 int size, align, regno;
1109 enum machine_mode mode;
1111 /* Create a block where the arg-pointer, structure value address,
1112 and argument registers can be saved. */
1113 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1115 /* Walk past the arg-pointer and structure value address. */
1116 size = GET_MODE_SIZE (Pmode);
1117 if (struct_value_rtx)
1118 size += GET_MODE_SIZE (Pmode);
1120 /* Save each register used in calling a function to the block. */
1121 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1122 if ((mode = apply_args_mode[regno]) != VOIDmode)
1124 rtx tem;
1126 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1127 if (size % align != 0)
1128 size = CEIL (size, align) * align;
1130 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1132 emit_move_insn (adjust_address (registers, mode, size), tem);
1133 size += GET_MODE_SIZE (mode);
1136 /* Save the arg pointer to the block. */
1137 emit_move_insn (adjust_address (registers, Pmode, 0),
1138 copy_to_reg (virtual_incoming_args_rtx));
1139 size = GET_MODE_SIZE (Pmode);
1141 /* Save the structure value address unless this is passed as an
1142 "invisible" first argument. */
1143 if (struct_value_incoming_rtx)
1145 emit_move_insn (adjust_address (registers, Pmode, size),
1146 copy_to_reg (struct_value_incoming_rtx));
1147 size += GET_MODE_SIZE (Pmode);
1150 /* Return the address of the block. */
1151 return copy_addr_to_reg (XEXP (registers, 0));
1154 /* __builtin_apply_args returns block of memory allocated on
1155 the stack into which is stored the arg pointer, structure
1156 value address, static chain, and all the registers that might
1157 possibly be used in performing a function call. The code is
1158 moved to the start of the function so the incoming values are
1159 saved. */
1161 static rtx
1162 expand_builtin_apply_args (void)
1164 /* Don't do __builtin_apply_args more than once in a function.
1165 Save the result of the first call and reuse it. */
1166 if (apply_args_value != 0)
1167 return apply_args_value;
1169 /* When this function is called, it means that registers must be
1170 saved on entry to this function. So we migrate the
1171 call to the first insn of this function. */
1172 rtx temp;
1173 rtx seq;
1175 start_sequence ();
1176 temp = expand_builtin_apply_args_1 ();
1177 seq = get_insns ();
1178 end_sequence ();
1180 apply_args_value = temp;
1182 /* Put the insns after the NOTE that starts the function.
1183 If this is inside a start_sequence, make the outer-level insn
1184 chain current, so the code is placed at the start of the
1185 function. */
1186 push_topmost_sequence ();
1187 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1188 pop_topmost_sequence ();
1189 return temp;
1193 /* Perform an untyped call and save the state required to perform an
1194 untyped return of whatever value was returned by the given function. */
1196 static rtx
1197 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1199 int size, align, regno;
1200 enum machine_mode mode;
1201 rtx incoming_args, result, reg, dest, src, call_insn;
1202 rtx old_stack_level = 0;
1203 rtx call_fusage = 0;
1205 #ifdef POINTERS_EXTEND_UNSIGNED
1206 if (GET_MODE (arguments) != Pmode)
1207 arguments = convert_memory_address (Pmode, arguments);
1208 #endif
1210 /* Create a block where the return registers can be saved. */
1211 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1213 /* Fetch the arg pointer from the ARGUMENTS block. */
1214 incoming_args = gen_reg_rtx (Pmode);
1215 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1216 #ifndef STACK_GROWS_DOWNWARD
1217 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1218 incoming_args, 0, OPTAB_LIB_WIDEN);
1219 #endif
1221 /* Perform postincrements before actually calling the function. */
1222 emit_queue ();
1224 /* Push a new argument block and copy the arguments. Do not allow
1225 the (potential) memcpy call below to interfere with our stack
1226 manipulations. */
1227 do_pending_stack_adjust ();
1228 NO_DEFER_POP;
1230 /* Save the stack with nonlocal if available. */
1231 #ifdef HAVE_save_stack_nonlocal
1232 if (HAVE_save_stack_nonlocal)
1233 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1234 else
1235 #endif
1236 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1238 /* Push a block of memory onto the stack to store the memory arguments.
1239 Save the address in a register, and copy the memory arguments. ??? I
1240 haven't figured out how the calling convention macros effect this,
1241 but it's likely that the source and/or destination addresses in
1242 the block copy will need updating in machine specific ways. */
1243 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1244 dest = gen_rtx_MEM (BLKmode, dest);
1245 set_mem_align (dest, PARM_BOUNDARY);
1246 src = gen_rtx_MEM (BLKmode, incoming_args);
1247 set_mem_align (src, PARM_BOUNDARY);
1248 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1250 /* Refer to the argument block. */
1251 apply_args_size ();
1252 arguments = gen_rtx_MEM (BLKmode, arguments);
1253 set_mem_align (arguments, PARM_BOUNDARY);
1255 /* Walk past the arg-pointer and structure value address. */
1256 size = GET_MODE_SIZE (Pmode);
1257 if (struct_value_rtx)
1258 size += GET_MODE_SIZE (Pmode);
1260 /* Restore each of the registers previously saved. Make USE insns
1261 for each of these registers for use in making the call. */
1262 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1263 if ((mode = apply_args_mode[regno]) != VOIDmode)
1265 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1266 if (size % align != 0)
1267 size = CEIL (size, align) * align;
1268 reg = gen_rtx_REG (mode, regno);
1269 emit_move_insn (reg, adjust_address (arguments, mode, size));
1270 use_reg (&call_fusage, reg);
1271 size += GET_MODE_SIZE (mode);
1274 /* Restore the structure value address unless this is passed as an
1275 "invisible" first argument. */
1276 size = GET_MODE_SIZE (Pmode);
1277 if (struct_value_rtx)
1279 rtx value = gen_reg_rtx (Pmode);
1280 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1281 emit_move_insn (struct_value_rtx, value);
1282 if (GET_CODE (struct_value_rtx) == REG)
1283 use_reg (&call_fusage, struct_value_rtx);
1284 size += GET_MODE_SIZE (Pmode);
1287 /* All arguments and registers used for the call are set up by now! */
1288 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1290 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1291 and we don't want to load it into a register as an optimization,
1292 because prepare_call_address already did it if it should be done. */
1293 if (GET_CODE (function) != SYMBOL_REF)
1294 function = memory_address (FUNCTION_MODE, function);
1296 /* Generate the actual call instruction and save the return value. */
1297 #ifdef HAVE_untyped_call
1298 if (HAVE_untyped_call)
1299 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1300 result, result_vector (1, result)));
1301 else
1302 #endif
1303 #ifdef HAVE_call_value
1304 if (HAVE_call_value)
1306 rtx valreg = 0;
1308 /* Locate the unique return register. It is not possible to
1309 express a call that sets more than one return register using
1310 call_value; use untyped_call for that. In fact, untyped_call
1311 only needs to save the return registers in the given block. */
1312 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1313 if ((mode = apply_result_mode[regno]) != VOIDmode)
1315 if (valreg)
1316 abort (); /* HAVE_untyped_call required. */
1317 valreg = gen_rtx_REG (mode, regno);
1320 emit_call_insn (GEN_CALL_VALUE (valreg,
1321 gen_rtx_MEM (FUNCTION_MODE, function),
1322 const0_rtx, NULL_RTX, const0_rtx));
1324 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1326 else
1327 #endif
1328 abort ();
1330 /* Find the CALL insn we just emitted, and attach the register usage
1331 information. */
1332 call_insn = last_call_insn ();
1333 add_function_usage_to (call_insn, call_fusage);
1335 /* Restore the stack. */
1336 #ifdef HAVE_save_stack_nonlocal
1337 if (HAVE_save_stack_nonlocal)
1338 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1339 else
1340 #endif
1341 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1343 OK_DEFER_POP;
1345 /* Return the address of the result block. */
1346 return copy_addr_to_reg (XEXP (result, 0));
1349 /* Perform an untyped return. */
1351 static void
1352 expand_builtin_return (rtx result)
1354 int size, align, regno;
1355 enum machine_mode mode;
1356 rtx reg;
1357 rtx call_fusage = 0;
1359 #ifdef POINTERS_EXTEND_UNSIGNED
1360 if (GET_MODE (result) != Pmode)
1361 result = convert_memory_address (Pmode, result);
1362 #endif
1364 apply_result_size ();
1365 result = gen_rtx_MEM (BLKmode, result);
1367 #ifdef HAVE_untyped_return
1368 if (HAVE_untyped_return)
1370 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1371 emit_barrier ();
1372 return;
1374 #endif
1376 /* Restore the return value and note that each value is used. */
1377 size = 0;
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1385 emit_move_insn (reg, adjust_address (result, mode, size));
1387 push_to_sequence (call_fusage);
1388 emit_insn (gen_rtx_USE (VOIDmode, reg));
1389 call_fusage = get_insns ();
1390 end_sequence ();
1391 size += GET_MODE_SIZE (mode);
1394 /* Put the USE insns before the return. */
1395 emit_insn (call_fusage);
1397 /* Return whatever values was restored by jumping directly to the end
1398 of the function. */
1399 expand_null_return ();
1402 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1404 static enum type_class
1405 type_to_class (tree type)
1407 switch (TREE_CODE (type))
1409 case VOID_TYPE: return void_type_class;
1410 case INTEGER_TYPE: return integer_type_class;
1411 case CHAR_TYPE: return char_type_class;
1412 case ENUMERAL_TYPE: return enumeral_type_class;
1413 case BOOLEAN_TYPE: return boolean_type_class;
1414 case POINTER_TYPE: return pointer_type_class;
1415 case REFERENCE_TYPE: return reference_type_class;
1416 case OFFSET_TYPE: return offset_type_class;
1417 case REAL_TYPE: return real_type_class;
1418 case COMPLEX_TYPE: return complex_type_class;
1419 case FUNCTION_TYPE: return function_type_class;
1420 case METHOD_TYPE: return method_type_class;
1421 case RECORD_TYPE: return record_type_class;
1422 case UNION_TYPE:
1423 case QUAL_UNION_TYPE: return union_type_class;
1424 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1425 ? string_type_class : array_type_class);
1426 case SET_TYPE: return set_type_class;
1427 case FILE_TYPE: return file_type_class;
1428 case LANG_TYPE: return lang_type_class;
1429 default: return no_type_class;
1433 /* Expand a call to __builtin_classify_type with arguments found in
1434 ARGLIST. */
1436 static rtx
1437 expand_builtin_classify_type (tree arglist)
1439 if (arglist != 0)
1440 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1441 return GEN_INT (no_type_class);
1444 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1446 static rtx
1447 expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
1449 rtx tmp;
1451 if (arglist == 0)
1452 return const0_rtx;
1453 arglist = TREE_VALUE (arglist);
1455 /* We have taken care of the easy cases during constant folding. This
1456 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1457 get a chance to see if it can deduce whether ARGLIST is constant. */
1459 current_function_calls_constant_p = 1;
1461 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1462 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1463 return tmp;
1466 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1467 if available. */
1468 tree
1469 mathfn_built_in (tree type, enum built_in_function fn)
1471 enum built_in_function fcode = NOT_BUILT_IN;
1472 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1473 switch (fn)
1475 case BUILT_IN_SQRT:
1476 case BUILT_IN_SQRTF:
1477 case BUILT_IN_SQRTL:
1478 fcode = BUILT_IN_SQRT;
1479 break;
1480 case BUILT_IN_SIN:
1481 case BUILT_IN_SINF:
1482 case BUILT_IN_SINL:
1483 fcode = BUILT_IN_SIN;
1484 break;
1485 case BUILT_IN_COS:
1486 case BUILT_IN_COSF:
1487 case BUILT_IN_COSL:
1488 fcode = BUILT_IN_COS;
1489 break;
1490 case BUILT_IN_EXP:
1491 case BUILT_IN_EXPF:
1492 case BUILT_IN_EXPL:
1493 fcode = BUILT_IN_EXP;
1494 break;
1495 case BUILT_IN_LOG:
1496 case BUILT_IN_LOGF:
1497 case BUILT_IN_LOGL:
1498 fcode = BUILT_IN_LOG;
1499 break;
1500 case BUILT_IN_TAN:
1501 case BUILT_IN_TANF:
1502 case BUILT_IN_TANL:
1503 fcode = BUILT_IN_TAN;
1504 break;
1505 case BUILT_IN_ATAN:
1506 case BUILT_IN_ATANF:
1507 case BUILT_IN_ATANL:
1508 fcode = BUILT_IN_ATAN;
1509 break;
1510 case BUILT_IN_FLOOR:
1511 case BUILT_IN_FLOORF:
1512 case BUILT_IN_FLOORL:
1513 fcode = BUILT_IN_FLOOR;
1514 break;
1515 case BUILT_IN_CEIL:
1516 case BUILT_IN_CEILF:
1517 case BUILT_IN_CEILL:
1518 fcode = BUILT_IN_CEIL;
1519 break;
1520 case BUILT_IN_TRUNC:
1521 case BUILT_IN_TRUNCF:
1522 case BUILT_IN_TRUNCL:
1523 fcode = BUILT_IN_TRUNC;
1524 break;
1525 case BUILT_IN_ROUND:
1526 case BUILT_IN_ROUNDF:
1527 case BUILT_IN_ROUNDL:
1528 fcode = BUILT_IN_ROUND;
1529 break;
1530 case BUILT_IN_NEARBYINT:
1531 case BUILT_IN_NEARBYINTF:
1532 case BUILT_IN_NEARBYINTL:
1533 fcode = BUILT_IN_NEARBYINT;
1534 break;
1535 default:
1536 abort ();
1538 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1539 switch (fn)
1541 case BUILT_IN_SQRT:
1542 case BUILT_IN_SQRTF:
1543 case BUILT_IN_SQRTL:
1544 fcode = BUILT_IN_SQRTF;
1545 break;
1546 case BUILT_IN_SIN:
1547 case BUILT_IN_SINF:
1548 case BUILT_IN_SINL:
1549 fcode = BUILT_IN_SINF;
1550 break;
1551 case BUILT_IN_COS:
1552 case BUILT_IN_COSF:
1553 case BUILT_IN_COSL:
1554 fcode = BUILT_IN_COSF;
1555 break;
1556 case BUILT_IN_EXP:
1557 case BUILT_IN_EXPF:
1558 case BUILT_IN_EXPL:
1559 fcode = BUILT_IN_EXPF;
1560 break;
1561 case BUILT_IN_LOG:
1562 case BUILT_IN_LOGF:
1563 case BUILT_IN_LOGL:
1564 fcode = BUILT_IN_LOGF;
1565 break;
1566 case BUILT_IN_TAN:
1567 case BUILT_IN_TANF:
1568 case BUILT_IN_TANL:
1569 fcode = BUILT_IN_TANF;
1570 break;
1571 case BUILT_IN_ATAN:
1572 case BUILT_IN_ATANF:
1573 case BUILT_IN_ATANL:
1574 fcode = BUILT_IN_ATANF;
1575 break;
1576 case BUILT_IN_FLOOR:
1577 case BUILT_IN_FLOORF:
1578 case BUILT_IN_FLOORL:
1579 fcode = BUILT_IN_FLOORF;
1580 break;
1581 case BUILT_IN_CEIL:
1582 case BUILT_IN_CEILF:
1583 case BUILT_IN_CEILL:
1584 fcode = BUILT_IN_CEILF;
1585 break;
1586 case BUILT_IN_TRUNC:
1587 case BUILT_IN_TRUNCF:
1588 case BUILT_IN_TRUNCL:
1589 fcode = BUILT_IN_TRUNCF;
1590 break;
1591 case BUILT_IN_ROUND:
1592 case BUILT_IN_ROUNDF:
1593 case BUILT_IN_ROUNDL:
1594 fcode = BUILT_IN_ROUNDF;
1595 break;
1596 case BUILT_IN_NEARBYINT:
1597 case BUILT_IN_NEARBYINTF:
1598 case BUILT_IN_NEARBYINTL:
1599 fcode = BUILT_IN_NEARBYINTF;
1600 break;
1601 default:
1602 abort ();
1604 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1605 switch (fn)
1607 case BUILT_IN_SQRT:
1608 case BUILT_IN_SQRTF:
1609 case BUILT_IN_SQRTL:
1610 fcode = BUILT_IN_SQRTL;
1611 break;
1612 case BUILT_IN_SIN:
1613 case BUILT_IN_SINF:
1614 case BUILT_IN_SINL:
1615 fcode = BUILT_IN_SINL;
1616 break;
1617 case BUILT_IN_COS:
1618 case BUILT_IN_COSF:
1619 case BUILT_IN_COSL:
1620 fcode = BUILT_IN_COSL;
1621 break;
1622 case BUILT_IN_EXP:
1623 case BUILT_IN_EXPF:
1624 case BUILT_IN_EXPL:
1625 fcode = BUILT_IN_EXPL;
1626 break;
1627 case BUILT_IN_LOG:
1628 case BUILT_IN_LOGF:
1629 case BUILT_IN_LOGL:
1630 fcode = BUILT_IN_LOGL;
1631 break;
1632 case BUILT_IN_TAN:
1633 case BUILT_IN_TANF:
1634 case BUILT_IN_TANL:
1635 fcode = BUILT_IN_TANL;
1636 break;
1637 case BUILT_IN_ATAN:
1638 case BUILT_IN_ATANF:
1639 case BUILT_IN_ATANL:
1640 fcode = BUILT_IN_ATANL;
1641 break;
1642 case BUILT_IN_FLOOR:
1643 case BUILT_IN_FLOORF:
1644 case BUILT_IN_FLOORL:
1645 fcode = BUILT_IN_FLOORL;
1646 break;
1647 case BUILT_IN_CEIL:
1648 case BUILT_IN_CEILF:
1649 case BUILT_IN_CEILL:
1650 fcode = BUILT_IN_CEILL;
1651 break;
1652 case BUILT_IN_TRUNC:
1653 case BUILT_IN_TRUNCF:
1654 case BUILT_IN_TRUNCL:
1655 fcode = BUILT_IN_TRUNCL;
1656 break;
1657 case BUILT_IN_ROUND:
1658 case BUILT_IN_ROUNDF:
1659 case BUILT_IN_ROUNDL:
1660 fcode = BUILT_IN_ROUNDL;
1661 break;
1662 case BUILT_IN_NEARBYINT:
1663 case BUILT_IN_NEARBYINTF:
1664 case BUILT_IN_NEARBYINTL:
1665 fcode = BUILT_IN_NEARBYINTL;
1666 break;
1667 default:
1668 abort ();
1670 return implicit_built_in_decls[fcode];
1673 /* If errno must be maintained, expand the RTL to check if the result,
1674 TARGET, of a built-in function call, EXP, is NaN, and if so set
1675 errno to EDOM. */
1677 static void
1678 expand_errno_check (tree exp, rtx target)
1680 rtx lab = gen_label_rtx ();
1682 /* Test the result; if it is NaN, set errno=EDOM because
1683 the argument was not in the domain. */
1684 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1685 0, lab);
1687 #ifdef TARGET_EDOM
1688 /* If this built-in doesn't throw an exception, set errno directly. */
1689 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1691 #ifdef GEN_ERRNO_RTX
1692 rtx errno_rtx = GEN_ERRNO_RTX;
1693 #else
1694 rtx errno_rtx
1695 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1696 #endif
1697 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1698 emit_label (lab);
1699 return;
1701 #endif
1703 /* We can't set errno=EDOM directly; let the library call do it.
1704 Pop the arguments right away in case the call gets deleted. */
1705 NO_DEFER_POP;
1706 expand_call (exp, target, 0);
1707 OK_DEFER_POP;
1708 emit_label (lab);
1712 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1713 Return 0 if a normal call should be emitted rather than expanding the
1714 function in-line. EXP is the expression that is a call to the builtin
1715 function; if convenient, the result should be placed in TARGET.
1716 SUBTARGET may be used as the target for computing one of EXP's operands. */
1718 static rtx
1719 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1721 optab builtin_optab;
1722 rtx op0, insns;
1723 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1724 tree arglist = TREE_OPERAND (exp, 1);
1725 enum machine_mode mode;
1726 bool errno_set = false;
1727 tree arg, narg;
1729 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1730 return 0;
1732 arg = TREE_VALUE (arglist);
1734 switch (DECL_FUNCTION_CODE (fndecl))
1736 case BUILT_IN_SIN:
1737 case BUILT_IN_SINF:
1738 case BUILT_IN_SINL:
1739 builtin_optab = sin_optab; break;
1740 case BUILT_IN_COS:
1741 case BUILT_IN_COSF:
1742 case BUILT_IN_COSL:
1743 builtin_optab = cos_optab; break;
1744 case BUILT_IN_SQRT:
1745 case BUILT_IN_SQRTF:
1746 case BUILT_IN_SQRTL:
1747 errno_set = ! tree_expr_nonnegative_p (arg);
1748 builtin_optab = sqrt_optab;
1749 break;
1750 case BUILT_IN_EXP:
1751 case BUILT_IN_EXPF:
1752 case BUILT_IN_EXPL:
1753 errno_set = true; builtin_optab = exp_optab; break;
1754 case BUILT_IN_LOG:
1755 case BUILT_IN_LOGF:
1756 case BUILT_IN_LOGL:
1757 errno_set = true; builtin_optab = log_optab; break;
1758 case BUILT_IN_TAN:
1759 case BUILT_IN_TANF:
1760 case BUILT_IN_TANL:
1761 builtin_optab = tan_optab; break;
1762 case BUILT_IN_ATAN:
1763 case BUILT_IN_ATANF:
1764 case BUILT_IN_ATANL:
1765 builtin_optab = atan_optab; break;
1766 case BUILT_IN_FLOOR:
1767 case BUILT_IN_FLOORF:
1768 case BUILT_IN_FLOORL:
1769 builtin_optab = floor_optab; break;
1770 case BUILT_IN_CEIL:
1771 case BUILT_IN_CEILF:
1772 case BUILT_IN_CEILL:
1773 builtin_optab = ceil_optab; break;
1774 case BUILT_IN_TRUNC:
1775 case BUILT_IN_TRUNCF:
1776 case BUILT_IN_TRUNCL:
1777 builtin_optab = trunc_optab; break;
1778 case BUILT_IN_ROUND:
1779 case BUILT_IN_ROUNDF:
1780 case BUILT_IN_ROUNDL:
1781 builtin_optab = round_optab; break;
1782 case BUILT_IN_NEARBYINT:
1783 case BUILT_IN_NEARBYINTF:
1784 case BUILT_IN_NEARBYINTL:
1785 builtin_optab = nearbyint_optab; break;
1786 default:
1787 abort ();
1790 /* Make a suitable register to place result in. */
1791 mode = TYPE_MODE (TREE_TYPE (exp));
1793 /* Before working hard, check whether the instruction is available. */
1794 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1795 return 0;
1796 target = gen_reg_rtx (mode);
1798 if (! flag_errno_math || ! HONOR_NANS (mode))
1799 errno_set = false;
1801 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1802 need to expand the argument again. This way, we will not perform
1803 side-effects more the once. */
1804 narg = save_expr (arg);
1805 if (narg != arg)
1807 arglist = build_tree_list (NULL_TREE, arg);
1808 exp = build_function_call_expr (fndecl, arglist);
1811 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1813 emit_queue ();
1814 start_sequence ();
1816 /* Compute into TARGET.
1817 Set TARGET to wherever the result comes back. */
1818 target = expand_unop (mode, builtin_optab, op0, target, 0);
1820 /* If we were unable to expand via the builtin, stop the sequence
1821 (without outputting the insns) and call to the library function
1822 with the stabilized argument list. */
1823 if (target == 0)
1825 end_sequence ();
1826 return expand_call (exp, target, target == const0_rtx);
1829 if (errno_set)
1830 expand_errno_check (exp, target);
1832 /* Output the entire sequence. */
1833 insns = get_insns ();
1834 end_sequence ();
1835 emit_insn (insns);
1837 return target;
1840 /* Expand a call to the builtin binary math functions (pow and atan2).
1841 Return 0 if a normal call should be emitted rather than expanding the
1842 function in-line. EXP is the expression that is a call to the builtin
1843 function; if convenient, the result should be placed in TARGET.
1844 SUBTARGET may be used as the target for computing one of EXP's
1845 operands. */
1847 static rtx
1848 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1850 optab builtin_optab;
1851 rtx op0, op1, insns;
1852 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1853 tree arglist = TREE_OPERAND (exp, 1);
1854 tree arg0, arg1, temp, narg;
1855 enum machine_mode mode;
1856 bool errno_set = true;
1857 bool stable = true;
1859 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1860 return 0;
1862 arg0 = TREE_VALUE (arglist);
1863 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1865 switch (DECL_FUNCTION_CODE (fndecl))
1867 case BUILT_IN_POW:
1868 case BUILT_IN_POWF:
1869 case BUILT_IN_POWL:
1870 builtin_optab = pow_optab; break;
1871 case BUILT_IN_ATAN2:
1872 case BUILT_IN_ATAN2F:
1873 case BUILT_IN_ATAN2L:
1874 builtin_optab = atan2_optab; break;
1875 default:
1876 abort ();
1879 /* Make a suitable register to place result in. */
1880 mode = TYPE_MODE (TREE_TYPE (exp));
1882 /* Before working hard, check whether the instruction is available. */
1883 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1884 return 0;
1886 target = gen_reg_rtx (mode);
1888 if (! flag_errno_math || ! HONOR_NANS (mode))
1889 errno_set = false;
1891 /* Alway stabilize the argument list. */
1892 narg = save_expr (arg1);
1893 if (narg != arg1)
1895 temp = build_tree_list (NULL_TREE, narg);
1896 stable = false;
1898 else
1899 temp = TREE_CHAIN (arglist);
1901 narg = save_expr (arg0);
1902 if (narg != arg0)
1904 arglist = tree_cons (NULL_TREE, narg, temp);
1905 stable = false;
1907 else if (! stable)
1908 arglist = tree_cons (NULL_TREE, arg0, temp);
1910 if (! stable)
1911 exp = build_function_call_expr (fndecl, arglist);
1913 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1914 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1916 emit_queue ();
1917 start_sequence ();
1919 /* Compute into TARGET.
1920 Set TARGET to wherever the result comes back. */
1921 target = expand_binop (mode, builtin_optab, op0, op1,
1922 target, 0, OPTAB_DIRECT);
1924 /* If we were unable to expand via the builtin, stop the sequence
1925 (without outputting the insns) and call to the library function
1926 with the stabilized argument list. */
1927 if (target == 0)
1929 end_sequence ();
1930 return expand_call (exp, target, target == const0_rtx);
1933 if (errno_set)
1934 expand_errno_check (exp, target);
1936 /* Output the entire sequence. */
1937 insns = get_insns ();
1938 end_sequence ();
1939 emit_insn (insns);
1941 return target;
1944 /* To evaluate powi(x,n), the floating point value x raised to the
1945 constant integer exponent n, we use a hybrid algorithm that
1946 combines the "window method" with look-up tables. For an
1947 introduction to exponentiation algorithms and "addition chains",
1948 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
1949 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
1950 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
1951 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
1953 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
1954 multiplications to inline before calling the system library's pow
1955 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
1956 so this default never requires calling pow, powf or powl. */
1958 #ifndef POWI_MAX_MULTS
1959 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
1960 #endif
1962 /* The size of the "optimal power tree" lookup table. All
1963 exponents less than this value are simply looked up in the
1964 powi_table below. This threshold is also used to size the
1965 cache of pseudo registers that hold intermediate results. */
1966 #define POWI_TABLE_SIZE 256
1968 /* The size, in bits of the window, used in the "window method"
1969 exponentiation algorithm. This is equivalent to a radix of
1970 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
1971 #define POWI_WINDOW_SIZE 3
1973 /* The following table is an efficient representation of an
1974 "optimal power tree". For each value, i, the corresponding
1975 value, j, in the table states than an optimal evaluation
1976 sequence for calculating pow(x,i) can be found by evaluating
1977 pow(x,j)*pow(x,i-j). An optimal power tree for the first
1978 100 integers is given in Knuth's "Seminumerical algorithms". */
1980 static const unsigned char powi_table[POWI_TABLE_SIZE] =
1982 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
1983 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
1984 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
1985 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
1986 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
1987 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
1988 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
1989 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
1990 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
1991 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
1992 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
1993 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
1994 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
1995 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
1996 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
1997 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
1998 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
1999 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2000 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2001 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2002 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2003 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2004 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2005 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2006 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2007 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2008 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2009 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2010 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2011 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2012 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2013 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2017 /* Return the number of multiplications required to calculate
2018 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2019 subroutine of powi_cost. CACHE is an array indicating
2020 which exponents have already been calculated. */
2022 static int
2023 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2025 /* If we've already calculated this exponent, then this evaluation
2026 doesn't require any additional multiplications. */
2027 if (cache[n])
2028 return 0;
2030 cache[n] = true;
2031 return powi_lookup_cost (n - powi_table[n], cache)
2032 + powi_lookup_cost (powi_table[n], cache) + 1;
2035 /* Return the number of multiplications required to calculate
2036 powi(x,n) for an arbitrary x, given the exponent N. This
2037 function needs to be kept in sync with expand_powi below. */
2039 static int
2040 powi_cost (HOST_WIDE_INT n)
2042 bool cache[POWI_TABLE_SIZE];
2043 unsigned HOST_WIDE_INT digit;
2044 unsigned HOST_WIDE_INT val;
2045 int result;
2047 if (n == 0)
2048 return 0;
2050 /* Ignore the reciprocal when calculating the cost. */
2051 val = (n < 0) ? -n : n;
2053 /* Initialize the exponent cache. */
2054 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2055 cache[1] = true;
2057 result = 0;
2059 while (val >= POWI_TABLE_SIZE)
2061 if (val & 1)
2063 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2064 result += powi_lookup_cost (digit, cache)
2065 + POWI_WINDOW_SIZE + 1;
2066 val >>= POWI_WINDOW_SIZE;
2068 else
2070 val >>= 1;
2071 result++;
2075 return result + powi_lookup_cost (val, cache);
2078 /* Recursive subroutine of expand_powi. This function takes the array,
2079 CACHE, of already calculated exponents and an exponent N and returns
2080 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2082 static rtx
2083 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2085 unsigned HOST_WIDE_INT digit;
2086 rtx target, result;
2087 rtx op0, op1;
2089 if (n < POWI_TABLE_SIZE)
2091 if (cache[n])
2092 return cache[n];
2094 target = gen_reg_rtx (mode);
2095 cache[n] = target;
2097 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2098 op1 = expand_powi_1 (mode, powi_table[n], cache);
2100 else if (n & 1)
2102 target = gen_reg_rtx (mode);
2103 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2104 op0 = expand_powi_1 (mode, n - digit, cache);
2105 op1 = expand_powi_1 (mode, digit, cache);
2107 else
2109 target = gen_reg_rtx (mode);
2110 op0 = expand_powi_1 (mode, n >> 1, cache);
2111 op1 = op0;
2114 result = expand_mult (mode, op0, op1, target, 0);
2115 if (result != target)
2116 emit_move_insn (target, result);
2117 return target;
2120 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2121 floating point operand in mode MODE, and N is the exponent. This
2122 function needs to be kept in sync with powi_cost above. */
2124 static rtx
2125 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2127 unsigned HOST_WIDE_INT val;
2128 rtx cache[POWI_TABLE_SIZE];
2129 rtx result;
2131 if (n == 0)
2132 return CONST1_RTX (mode);
2134 val = (n < 0) ? -n : n;
2136 memset (cache, 0, sizeof(cache));
2137 cache[1] = x;
2139 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2141 /* If the original exponent was negative, reciprocate the result. */
2142 if (n < 0)
2143 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2144 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2146 return result;
2149 /* Expand a call to the pow built-in mathematical function. Return 0 if
2150 a normal call should be emitted rather than expanding the function
2151 in-line. EXP is the expression that is a call to the builtin
2152 function; if convenient, the result should be placed in TARGET. */
2154 static rtx
2155 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2157 tree arglist = TREE_OPERAND (exp, 1);
2158 tree arg0, arg1;
2160 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2161 return 0;
2163 arg0 = TREE_VALUE (arglist);
2164 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2166 if (flag_unsafe_math_optimizations
2167 && ! flag_errno_math
2168 && ! optimize_size
2169 && TREE_CODE (arg1) == REAL_CST
2170 && ! TREE_CONSTANT_OVERFLOW (arg1))
2172 REAL_VALUE_TYPE cint;
2173 REAL_VALUE_TYPE c;
2174 HOST_WIDE_INT n;
2176 c = TREE_REAL_CST (arg1);
2177 n = real_to_integer (&c);
2178 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2179 if (real_identical (&c, &cint)
2180 && powi_cost (n) <= POWI_MAX_MULTS)
2182 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2183 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2184 op = force_reg (mode, op);
2185 return expand_powi (op, mode, n);
2188 return expand_builtin_mathfn_2 (exp, target, NULL_RTX);
2191 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2192 if we failed the caller should emit a normal call, otherwise
2193 try to get the result in TARGET, if convenient. */
2195 static rtx
2196 expand_builtin_strlen (tree arglist, rtx target,
2197 enum machine_mode target_mode)
2199 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2200 return 0;
2201 else
2203 rtx pat;
2204 tree len, src = TREE_VALUE (arglist);
2205 rtx result, src_reg, char_rtx, before_strlen;
2206 enum machine_mode insn_mode = target_mode, char_mode;
2207 enum insn_code icode = CODE_FOR_nothing;
2208 int align;
2210 /* If the length can be computed at compile-time, return it. */
2211 len = c_strlen (src, 0);
2212 if (len)
2213 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2215 /* If the length can be computed at compile-time and is constant
2216 integer, but there are side-effects in src, evaluate
2217 src for side-effects, then return len.
2218 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2219 can be optimized into: i++; x = 3; */
2220 len = c_strlen (src, 1);
2221 if (len && TREE_CODE (len) == INTEGER_CST)
2223 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2224 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2227 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2229 /* If SRC is not a pointer type, don't do this operation inline. */
2230 if (align == 0)
2231 return 0;
2233 /* Bail out if we can't compute strlen in the right mode. */
2234 while (insn_mode != VOIDmode)
2236 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2237 if (icode != CODE_FOR_nothing)
2238 break;
2240 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2242 if (insn_mode == VOIDmode)
2243 return 0;
2245 /* Make a place to write the result of the instruction. */
2246 result = target;
2247 if (! (result != 0
2248 && GET_CODE (result) == REG
2249 && GET_MODE (result) == insn_mode
2250 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2251 result = gen_reg_rtx (insn_mode);
2253 /* Make a place to hold the source address. We will not expand
2254 the actual source until we are sure that the expansion will
2255 not fail -- there are trees that cannot be expanded twice. */
2256 src_reg = gen_reg_rtx (Pmode);
2258 /* Mark the beginning of the strlen sequence so we can emit the
2259 source operand later. */
2260 before_strlen = get_last_insn ();
2262 char_rtx = const0_rtx;
2263 char_mode = insn_data[(int) icode].operand[2].mode;
2264 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2265 char_mode))
2266 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2268 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2269 char_rtx, GEN_INT (align));
2270 if (! pat)
2271 return 0;
2272 emit_insn (pat);
2274 /* Now that we are assured of success, expand the source. */
2275 start_sequence ();
2276 pat = memory_address (BLKmode,
2277 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2278 if (pat != src_reg)
2279 emit_move_insn (src_reg, pat);
2280 pat = get_insns ();
2281 end_sequence ();
2283 if (before_strlen)
2284 emit_insn_after (pat, before_strlen);
2285 else
2286 emit_insn_before (pat, get_insns ());
2288 /* Return the value in the proper mode for this function. */
2289 if (GET_MODE (result) == target_mode)
2290 target = result;
2291 else if (target != 0)
2292 convert_move (target, result, 0);
2293 else
2294 target = convert_to_mode (target_mode, result, 0);
2296 return target;
2300 /* Expand a call to the strstr builtin. Return 0 if we failed the
2301 caller should emit a normal call, otherwise try to get the result
2302 in TARGET, if convenient (and in mode MODE if that's convenient). */
2304 static rtx
2305 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2307 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2308 return 0;
2309 else
2311 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2312 tree fn;
2313 const char *p1, *p2;
2315 p2 = c_getstr (s2);
2316 if (p2 == NULL)
2317 return 0;
2319 p1 = c_getstr (s1);
2320 if (p1 != NULL)
2322 const char *r = strstr (p1, p2);
2324 if (r == NULL)
2325 return const0_rtx;
2327 /* Return an offset into the constant string argument. */
2328 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2329 s1, ssize_int (r - p1))),
2330 target, mode, EXPAND_NORMAL);
2333 if (p2[0] == '\0')
2334 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2336 if (p2[1] != '\0')
2337 return 0;
2339 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2340 if (!fn)
2341 return 0;
2343 /* New argument list transforming strstr(s1, s2) to
2344 strchr(s1, s2[0]). */
2345 arglist =
2346 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2347 arglist = tree_cons (NULL_TREE, s1, arglist);
2348 return expand_expr (build_function_call_expr (fn, arglist),
2349 target, mode, EXPAND_NORMAL);
2353 /* Expand a call to the strchr builtin. Return 0 if we failed the
2354 caller should emit a normal call, otherwise try to get the result
2355 in TARGET, if convenient (and in mode MODE if that's convenient). */
2357 static rtx
2358 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2360 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2361 return 0;
2362 else
2364 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2365 const char *p1;
2367 if (TREE_CODE (s2) != INTEGER_CST)
2368 return 0;
2370 p1 = c_getstr (s1);
2371 if (p1 != NULL)
2373 char c;
2374 const char *r;
2376 if (target_char_cast (s2, &c))
2377 return 0;
2379 r = strchr (p1, c);
2381 if (r == NULL)
2382 return const0_rtx;
2384 /* Return an offset into the constant string argument. */
2385 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2386 s1, ssize_int (r - p1))),
2387 target, mode, EXPAND_NORMAL);
2390 /* FIXME: Should use here strchrM optab so that ports can optimize
2391 this. */
2392 return 0;
2396 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2397 caller should emit a normal call, otherwise try to get the result
2398 in TARGET, if convenient (and in mode MODE if that's convenient). */
2400 static rtx
2401 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2403 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2404 return 0;
2405 else
2407 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2408 tree fn;
2409 const char *p1;
2411 if (TREE_CODE (s2) != INTEGER_CST)
2412 return 0;
2414 p1 = c_getstr (s1);
2415 if (p1 != NULL)
2417 char c;
2418 const char *r;
2420 if (target_char_cast (s2, &c))
2421 return 0;
2423 r = strrchr (p1, c);
2425 if (r == NULL)
2426 return const0_rtx;
2428 /* Return an offset into the constant string argument. */
2429 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2430 s1, ssize_int (r - p1))),
2431 target, mode, EXPAND_NORMAL);
2434 if (! integer_zerop (s2))
2435 return 0;
2437 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2438 if (!fn)
2439 return 0;
2441 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2442 return expand_expr (build_function_call_expr (fn, arglist),
2443 target, mode, EXPAND_NORMAL);
2447 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2448 caller should emit a normal call, otherwise try to get the result
2449 in TARGET, if convenient (and in mode MODE if that's convenient). */
2451 static rtx
2452 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2454 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2455 return 0;
2456 else
2458 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2459 tree fn;
2460 const char *p1, *p2;
2462 p2 = c_getstr (s2);
2463 if (p2 == NULL)
2464 return 0;
2466 p1 = c_getstr (s1);
2467 if (p1 != NULL)
2469 const char *r = strpbrk (p1, p2);
2471 if (r == NULL)
2472 return const0_rtx;
2474 /* Return an offset into the constant string argument. */
2475 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2476 s1, ssize_int (r - p1))),
2477 target, mode, EXPAND_NORMAL);
2480 if (p2[0] == '\0')
2482 /* strpbrk(x, "") == NULL.
2483 Evaluate and ignore the arguments in case they had
2484 side-effects. */
2485 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2486 return const0_rtx;
2489 if (p2[1] != '\0')
2490 return 0; /* Really call strpbrk. */
2492 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2493 if (!fn)
2494 return 0;
2496 /* New argument list transforming strpbrk(s1, s2) to
2497 strchr(s1, s2[0]). */
2498 arglist =
2499 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2500 arglist = tree_cons (NULL_TREE, s1, arglist);
2501 return expand_expr (build_function_call_expr (fn, arglist),
2502 target, mode, EXPAND_NORMAL);
2506 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2507 bytes from constant string DATA + OFFSET and return it as target
2508 constant. */
2510 static rtx
2511 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2512 enum machine_mode mode)
2514 const char *str = (const char *) data;
2516 if (offset < 0
2517 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2518 > strlen (str) + 1))
2519 abort (); /* Attempt to read past the end of constant string. */
2521 return c_readstr (str + offset, mode);
2524 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2525 Return 0 if we failed, the caller should emit a normal call,
2526 otherwise try to get the result in TARGET, if convenient (and in
2527 mode MODE if that's convenient). */
2528 static rtx
2529 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2531 if (!validate_arglist (arglist,
2532 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2533 return 0;
2534 else
2536 tree dest = TREE_VALUE (arglist);
2537 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2538 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2539 const char *src_str;
2540 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2541 unsigned int dest_align
2542 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2543 rtx dest_mem, src_mem, dest_addr, len_rtx;
2545 /* If DEST is not a pointer type, call the normal function. */
2546 if (dest_align == 0)
2547 return 0;
2549 /* If the LEN parameter is zero, return DEST. */
2550 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2552 /* Evaluate and ignore SRC in case it has side-effects. */
2553 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2554 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2557 /* If either SRC is not a pointer type, don't do this
2558 operation in-line. */
2559 if (src_align == 0)
2560 return 0;
2562 dest_mem = get_memory_rtx (dest);
2563 set_mem_align (dest_mem, dest_align);
2564 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2565 src_str = c_getstr (src);
2567 /* If SRC is a string constant and block move would be done
2568 by pieces, we can avoid loading the string from memory
2569 and only stored the computed constants. */
2570 if (src_str
2571 && GET_CODE (len_rtx) == CONST_INT
2572 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2573 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2574 (void *) src_str, dest_align))
2576 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2577 builtin_memcpy_read_str,
2578 (void *) src_str, dest_align, 0);
2579 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2580 #ifdef POINTERS_EXTEND_UNSIGNED
2581 if (GET_MODE (dest_mem) != ptr_mode)
2582 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2583 #endif
2584 return dest_mem;
2587 src_mem = get_memory_rtx (src);
2588 set_mem_align (src_mem, src_align);
2590 /* Copy word part most expediently. */
2591 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2592 BLOCK_OP_NORMAL);
2594 if (dest_addr == 0)
2596 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2597 #ifdef POINTERS_EXTEND_UNSIGNED
2598 if (GET_MODE (dest_addr) != ptr_mode)
2599 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2600 #endif
2602 return dest_addr;
2606 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2607 Return 0 if we failed the caller should emit a normal call,
2608 otherwise try to get the result in TARGET, if convenient (and in
2609 mode MODE if that's convenient). If ENDP is 0 return the
2610 destination pointer, if ENDP is 1 return the end pointer ala
2611 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2612 stpcpy. */
2614 static rtx
2615 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2616 int endp)
2618 if (!validate_arglist (arglist,
2619 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2620 return 0;
2621 /* If return value is ignored, transform mempcpy into memcpy. */
2622 else if (target == const0_rtx)
2624 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2626 if (!fn)
2627 return 0;
2629 return expand_expr (build_function_call_expr (fn, arglist),
2630 target, mode, EXPAND_NORMAL);
2632 else
2634 tree dest = TREE_VALUE (arglist);
2635 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2636 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2637 const char *src_str;
2638 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2639 unsigned int dest_align
2640 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2641 rtx dest_mem, src_mem, len_rtx;
2643 /* If DEST is not a pointer type or LEN is not constant,
2644 call the normal function. */
2645 if (dest_align == 0 || !host_integerp (len, 1))
2646 return 0;
2648 /* If the LEN parameter is zero, return DEST. */
2649 if (tree_low_cst (len, 1) == 0)
2651 /* Evaluate and ignore SRC in case it has side-effects. */
2652 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2653 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2656 /* If either SRC is not a pointer type, don't do this
2657 operation in-line. */
2658 if (src_align == 0)
2659 return 0;
2661 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2662 src_str = c_getstr (src);
2664 /* If SRC is a string constant and block move would be done
2665 by pieces, we can avoid loading the string from memory
2666 and only stored the computed constants. */
2667 if (src_str
2668 && GET_CODE (len_rtx) == CONST_INT
2669 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2670 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2671 (void *) src_str, dest_align))
2673 dest_mem = get_memory_rtx (dest);
2674 set_mem_align (dest_mem, dest_align);
2675 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2676 builtin_memcpy_read_str,
2677 (void *) src_str, dest_align, endp);
2678 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2679 #ifdef POINTERS_EXTEND_UNSIGNED
2680 if (GET_MODE (dest_mem) != ptr_mode)
2681 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2682 #endif
2683 return dest_mem;
2686 if (GET_CODE (len_rtx) == CONST_INT
2687 && can_move_by_pieces (INTVAL (len_rtx),
2688 MIN (dest_align, src_align)))
2690 dest_mem = get_memory_rtx (dest);
2691 set_mem_align (dest_mem, dest_align);
2692 src_mem = get_memory_rtx (src);
2693 set_mem_align (src_mem, src_align);
2694 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2695 MIN (dest_align, src_align), endp);
2696 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2697 #ifdef POINTERS_EXTEND_UNSIGNED
2698 if (GET_MODE (dest_mem) != ptr_mode)
2699 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2700 #endif
2701 return dest_mem;
2704 return 0;
2708 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2709 if we failed the caller should emit a normal call. */
2711 static rtx
2712 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2714 if (!validate_arglist (arglist,
2715 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2716 return 0;
2717 else
2719 tree dest = TREE_VALUE (arglist);
2720 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2721 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2723 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2724 unsigned int dest_align
2725 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2727 /* If DEST is not a pointer type, call the normal function. */
2728 if (dest_align == 0)
2729 return 0;
2731 /* If the LEN parameter is zero, return DEST. */
2732 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2734 /* Evaluate and ignore SRC in case it has side-effects. */
2735 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2736 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2739 /* If either SRC is not a pointer type, don't do this
2740 operation in-line. */
2741 if (src_align == 0)
2742 return 0;
2744 /* If src is categorized for a readonly section we can use
2745 normal memcpy. */
2746 if (readonly_data_expr (src))
2748 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2749 if (!fn)
2750 return 0;
2751 return expand_expr (build_function_call_expr (fn, arglist),
2752 target, mode, EXPAND_NORMAL);
2755 /* Otherwise, call the normal function. */
2756 return 0;
2760 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2761 if we failed the caller should emit a normal call. */
2763 static rtx
2764 expand_builtin_bcopy (tree arglist)
2766 tree src, dest, size, newarglist;
2768 if (!validate_arglist (arglist,
2769 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2770 return NULL_RTX;
2772 src = TREE_VALUE (arglist);
2773 dest = TREE_VALUE (TREE_CHAIN (arglist));
2774 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2776 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2777 memmove(ptr y, ptr x, size_t z). This is done this way
2778 so that if it isn't expanded inline, we fallback to
2779 calling bcopy instead of memmove. */
2781 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2782 newarglist = tree_cons (NULL_TREE, src, newarglist);
2783 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2785 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2788 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2789 if we failed the caller should emit a normal call, otherwise try to get
2790 the result in TARGET, if convenient (and in mode MODE if that's
2791 convenient). */
2793 static rtx
2794 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
2796 tree fn, len, src, dst;
2798 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2799 return 0;
2801 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2802 if (!fn)
2803 return 0;
2805 src = TREE_VALUE (TREE_CHAIN (arglist));
2806 len = c_strlen (src, 1);
2807 if (len == 0 || TREE_SIDE_EFFECTS (len))
2808 return 0;
2810 dst = TREE_VALUE (arglist);
2811 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2812 arglist = build_tree_list (NULL_TREE, len);
2813 arglist = tree_cons (NULL_TREE, src, arglist);
2814 arglist = tree_cons (NULL_TREE, dst, arglist);
2815 return expand_expr (build_function_call_expr (fn, arglist),
2816 target, mode, EXPAND_NORMAL);
2819 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2820 Return 0 if we failed the caller should emit a normal call,
2821 otherwise try to get the result in TARGET, if convenient (and in
2822 mode MODE if that's convenient). */
2824 static rtx
2825 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
2827 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2828 return 0;
2829 else
2831 tree dst, src, len;
2833 /* If return value is ignored, transform stpcpy into strcpy. */
2834 if (target == const0_rtx)
2836 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2837 if (!fn)
2838 return 0;
2840 return expand_expr (build_function_call_expr (fn, arglist),
2841 target, mode, EXPAND_NORMAL);
2844 /* Ensure we get an actual string whose length can be evaluated at
2845 compile-time, not an expression containing a string. This is
2846 because the latter will potentially produce pessimized code
2847 when used to produce the return value. */
2848 src = TREE_VALUE (TREE_CHAIN (arglist));
2849 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
2850 return 0;
2852 dst = TREE_VALUE (arglist);
2853 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2854 arglist = build_tree_list (NULL_TREE, len);
2855 arglist = tree_cons (NULL_TREE, src, arglist);
2856 arglist = tree_cons (NULL_TREE, dst, arglist);
2857 return expand_builtin_mempcpy (arglist, target, mode, /*endp=*/2);
2861 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2862 bytes from constant string DATA + OFFSET and return it as target
2863 constant. */
2865 static rtx
2866 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
2867 enum machine_mode mode)
2869 const char *str = (const char *) data;
2871 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2872 return const0_rtx;
2874 return c_readstr (str + offset, mode);
2877 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2878 if we failed the caller should emit a normal call. */
2880 static rtx
2881 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
2883 if (!validate_arglist (arglist,
2884 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2885 return 0;
2886 else
2888 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
2889 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2890 tree fn;
2892 /* We must be passed a constant len parameter. */
2893 if (TREE_CODE (len) != INTEGER_CST)
2894 return 0;
2896 /* If the len parameter is zero, return the dst parameter. */
2897 if (integer_zerop (len))
2899 /* Evaluate and ignore the src argument in case it has
2900 side-effects. */
2901 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2902 VOIDmode, EXPAND_NORMAL);
2903 /* Return the dst parameter. */
2904 return expand_expr (TREE_VALUE (arglist), target, mode,
2905 EXPAND_NORMAL);
2908 /* Now, we must be passed a constant src ptr parameter. */
2909 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2910 return 0;
2912 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2914 /* We're required to pad with trailing zeros if the requested
2915 len is greater than strlen(s2)+1. In that case try to
2916 use store_by_pieces, if it fails, punt. */
2917 if (tree_int_cst_lt (slen, len))
2919 tree dest = TREE_VALUE (arglist);
2920 unsigned int dest_align
2921 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2922 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2923 rtx dest_mem;
2925 if (!p || dest_align == 0 || !host_integerp (len, 1)
2926 || !can_store_by_pieces (tree_low_cst (len, 1),
2927 builtin_strncpy_read_str,
2928 (void *) p, dest_align))
2929 return 0;
2931 dest_mem = get_memory_rtx (dest);
2932 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2933 builtin_strncpy_read_str,
2934 (void *) p, dest_align, 0);
2935 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2936 #ifdef POINTERS_EXTEND_UNSIGNED
2937 if (GET_MODE (dest_mem) != ptr_mode)
2938 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2939 #endif
2940 return dest_mem;
2943 /* OK transform into builtin memcpy. */
2944 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2945 if (!fn)
2946 return 0;
2947 return expand_expr (build_function_call_expr (fn, arglist),
2948 target, mode, EXPAND_NORMAL);
2952 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2953 bytes from constant string DATA + OFFSET and return it as target
2954 constant. */
2956 static rtx
2957 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2958 enum machine_mode mode)
2960 const char *c = (const char *) data;
2961 char *p = alloca (GET_MODE_SIZE (mode));
2963 memset (p, *c, GET_MODE_SIZE (mode));
2965 return c_readstr (p, mode);
2968 /* Callback routine for store_by_pieces. Return the RTL of a register
2969 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2970 char value given in the RTL register data. For example, if mode is
2971 4 bytes wide, return the RTL for 0x01010101*data. */
2973 static rtx
2974 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2975 enum machine_mode mode)
2977 rtx target, coeff;
2978 size_t size;
2979 char *p;
2981 size = GET_MODE_SIZE (mode);
2982 if (size == 1)
2983 return (rtx) data;
2985 p = alloca (size);
2986 memset (p, 1, size);
2987 coeff = c_readstr (p, mode);
2989 target = convert_to_mode (mode, (rtx) data, 1);
2990 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2991 return force_reg (mode, target);
2994 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2995 if we failed the caller should emit a normal call, otherwise try to get
2996 the result in TARGET, if convenient (and in mode MODE if that's
2997 convenient). */
2999 static rtx
3000 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
3002 if (!validate_arglist (arglist,
3003 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3004 return 0;
3005 else
3007 tree dest = TREE_VALUE (arglist);
3008 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3009 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3010 char c;
3012 unsigned int dest_align
3013 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3014 rtx dest_mem, dest_addr, len_rtx;
3016 /* If DEST is not a pointer type, don't do this
3017 operation in-line. */
3018 if (dest_align == 0)
3019 return 0;
3021 /* If the LEN parameter is zero, return DEST. */
3022 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3024 /* Evaluate and ignore VAL in case it has side-effects. */
3025 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3026 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3029 if (TREE_CODE (val) != INTEGER_CST)
3031 rtx val_rtx;
3033 if (!host_integerp (len, 1))
3034 return 0;
3036 if (optimize_size && tree_low_cst (len, 1) > 1)
3037 return 0;
3039 /* Assume that we can memset by pieces if we can store the
3040 * the coefficients by pieces (in the required modes).
3041 * We can't pass builtin_memset_gen_str as that emits RTL. */
3042 c = 1;
3043 if (!can_store_by_pieces (tree_low_cst (len, 1),
3044 builtin_memset_read_str,
3045 &c, dest_align))
3046 return 0;
3048 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3049 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3050 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3051 val_rtx);
3052 dest_mem = get_memory_rtx (dest);
3053 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3054 builtin_memset_gen_str,
3055 val_rtx, dest_align, 0);
3056 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3057 #ifdef POINTERS_EXTEND_UNSIGNED
3058 if (GET_MODE (dest_mem) != ptr_mode)
3059 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3060 #endif
3061 return dest_mem;
3064 if (target_char_cast (val, &c))
3065 return 0;
3067 if (c)
3069 if (!host_integerp (len, 1))
3070 return 0;
3071 if (!can_store_by_pieces (tree_low_cst (len, 1),
3072 builtin_memset_read_str, &c,
3073 dest_align))
3074 return 0;
3076 dest_mem = get_memory_rtx (dest);
3077 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3078 builtin_memset_read_str,
3079 &c, dest_align, 0);
3080 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3081 #ifdef POINTERS_EXTEND_UNSIGNED
3082 if (GET_MODE (dest_mem) != ptr_mode)
3083 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3084 #endif
3085 return dest_mem;
3088 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3090 dest_mem = get_memory_rtx (dest);
3091 set_mem_align (dest_mem, dest_align);
3092 dest_addr = clear_storage (dest_mem, len_rtx);
3094 if (dest_addr == 0)
3096 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3097 #ifdef POINTERS_EXTEND_UNSIGNED
3098 if (GET_MODE (dest_addr) != ptr_mode)
3099 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3100 #endif
3103 return dest_addr;
3107 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3108 if we failed the caller should emit a normal call. */
3110 static rtx
3111 expand_builtin_bzero (tree arglist)
3113 tree dest, size, newarglist;
3115 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3116 return NULL_RTX;
3118 dest = TREE_VALUE (arglist);
3119 size = TREE_VALUE (TREE_CHAIN (arglist));
3121 /* New argument list transforming bzero(ptr x, int y) to
3122 memset(ptr x, int 0, size_t y). This is done this way
3123 so that if it isn't expanded inline, we fallback to
3124 calling bzero instead of memset. */
3126 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
3127 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3128 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3130 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3133 /* Expand expression EXP, which is a call to the memcmp built-in function.
3134 ARGLIST is the argument list for this call. Return 0 if we failed and the
3135 caller should emit a normal call, otherwise try to get the result in
3136 TARGET, if convenient (and in mode MODE, if that's convenient). */
3138 static rtx
3139 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3140 enum machine_mode mode)
3142 tree arg1, arg2, len;
3143 const char *p1, *p2;
3145 if (!validate_arglist (arglist,
3146 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3147 return 0;
3149 arg1 = TREE_VALUE (arglist);
3150 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3151 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3153 /* If the len parameter is zero, return zero. */
3154 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3156 /* Evaluate and ignore arg1 and arg2 in case they have
3157 side-effects. */
3158 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3159 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3160 return const0_rtx;
3163 p1 = c_getstr (arg1);
3164 p2 = c_getstr (arg2);
3166 /* If all arguments are constant, and the value of len is not greater
3167 than the lengths of arg1 and arg2, evaluate at compile-time. */
3168 if (host_integerp (len, 1) && p1 && p2
3169 && compare_tree_int (len, strlen (p1) + 1) <= 0
3170 && compare_tree_int (len, strlen (p2) + 1) <= 0)
3172 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
3174 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3177 /* If len parameter is one, return an expression corresponding to
3178 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3179 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
3181 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3182 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3183 tree ind1 =
3184 fold (build1 (CONVERT_EXPR, integer_type_node,
3185 build1 (INDIRECT_REF, cst_uchar_node,
3186 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3187 tree ind2 =
3188 fold (build1 (CONVERT_EXPR, integer_type_node,
3189 build1 (INDIRECT_REF, cst_uchar_node,
3190 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3191 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3192 return expand_expr (result, target, mode, EXPAND_NORMAL);
3195 #ifdef HAVE_cmpstrsi
3197 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3198 rtx result;
3199 rtx insn;
3201 int arg1_align
3202 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3203 int arg2_align
3204 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3205 enum machine_mode insn_mode
3206 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3208 /* If we don't have POINTER_TYPE, call the function. */
3209 if (arg1_align == 0 || arg2_align == 0)
3210 return 0;
3212 /* Make a place to write the result of the instruction. */
3213 result = target;
3214 if (! (result != 0
3215 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3216 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3217 result = gen_reg_rtx (insn_mode);
3219 arg1_rtx = get_memory_rtx (arg1);
3220 arg2_rtx = get_memory_rtx (arg2);
3221 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3222 if (!HAVE_cmpstrsi)
3223 insn = NULL_RTX;
3224 else
3225 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3226 GEN_INT (MIN (arg1_align, arg2_align)));
3228 if (insn)
3229 emit_insn (insn);
3230 else
3231 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3232 TYPE_MODE (integer_type_node), 3,
3233 XEXP (arg1_rtx, 0), Pmode,
3234 XEXP (arg2_rtx, 0), Pmode,
3235 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3236 TREE_UNSIGNED (sizetype)),
3237 TYPE_MODE (sizetype));
3239 /* Return the value in the proper mode for this function. */
3240 mode = TYPE_MODE (TREE_TYPE (exp));
3241 if (GET_MODE (result) == mode)
3242 return result;
3243 else if (target != 0)
3245 convert_move (target, result, 0);
3246 return target;
3248 else
3249 return convert_to_mode (mode, result, 0);
3251 #endif
3253 return 0;
3256 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3257 if we failed the caller should emit a normal call, otherwise try to get
3258 the result in TARGET, if convenient. */
3260 static rtx
3261 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3263 tree arglist = TREE_OPERAND (exp, 1);
3264 tree arg1, arg2;
3265 const char *p1, *p2;
3267 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3268 return 0;
3270 arg1 = TREE_VALUE (arglist);
3271 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3273 p1 = c_getstr (arg1);
3274 p2 = c_getstr (arg2);
3276 if (p1 && p2)
3278 const int i = strcmp (p1, p2);
3279 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3282 /* If either arg is "", return an expression corresponding to
3283 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3284 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3286 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3287 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3288 tree ind1 =
3289 fold (build1 (CONVERT_EXPR, integer_type_node,
3290 build1 (INDIRECT_REF, cst_uchar_node,
3291 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3292 tree ind2 =
3293 fold (build1 (CONVERT_EXPR, integer_type_node,
3294 build1 (INDIRECT_REF, cst_uchar_node,
3295 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3296 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3297 return expand_expr (result, target, mode, EXPAND_NORMAL);
3300 #ifdef HAVE_cmpstrsi
3301 if (HAVE_cmpstrsi)
3303 tree len, len1, len2;
3304 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3305 rtx result, insn;
3307 int arg1_align
3308 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3309 int arg2_align
3310 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3311 enum machine_mode insn_mode
3312 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3314 len1 = c_strlen (arg1, 1);
3315 len2 = c_strlen (arg2, 1);
3317 if (len1)
3318 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3319 if (len2)
3320 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3322 /* If we don't have a constant length for the first, use the length
3323 of the second, if we know it. We don't require a constant for
3324 this case; some cost analysis could be done if both are available
3325 but neither is constant. For now, assume they're equally cheap,
3326 unless one has side effects. If both strings have constant lengths,
3327 use the smaller. */
3329 if (!len1)
3330 len = len2;
3331 else if (!len2)
3332 len = len1;
3333 else if (TREE_SIDE_EFFECTS (len1))
3334 len = len2;
3335 else if (TREE_SIDE_EFFECTS (len2))
3336 len = len1;
3337 else if (TREE_CODE (len1) != INTEGER_CST)
3338 len = len2;
3339 else if (TREE_CODE (len2) != INTEGER_CST)
3340 len = len1;
3341 else if (tree_int_cst_lt (len1, len2))
3342 len = len1;
3343 else
3344 len = len2;
3346 /* If both arguments have side effects, we cannot optimize. */
3347 if (!len || TREE_SIDE_EFFECTS (len))
3348 return 0;
3350 /* If we don't have POINTER_TYPE, call the function. */
3351 if (arg1_align == 0 || arg2_align == 0)
3352 return 0;
3354 /* Make a place to write the result of the instruction. */
3355 result = target;
3356 if (! (result != 0
3357 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3358 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3359 result = gen_reg_rtx (insn_mode);
3361 arg1_rtx = get_memory_rtx (arg1);
3362 arg2_rtx = get_memory_rtx (arg2);
3363 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3364 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3365 GEN_INT (MIN (arg1_align, arg2_align)));
3366 if (!insn)
3367 return 0;
3369 emit_insn (insn);
3371 /* Return the value in the proper mode for this function. */
3372 mode = TYPE_MODE (TREE_TYPE (exp));
3373 if (GET_MODE (result) == mode)
3374 return result;
3375 if (target == 0)
3376 return convert_to_mode (mode, result, 0);
3377 convert_move (target, result, 0);
3378 return target;
3380 #endif
3381 return 0;
3384 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3385 if we failed the caller should emit a normal call, otherwise try to get
3386 the result in TARGET, if convenient. */
3388 static rtx
3389 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3391 tree arglist = TREE_OPERAND (exp, 1);
3392 tree arg1, arg2, arg3;
3393 const char *p1, *p2;
3395 if (!validate_arglist (arglist,
3396 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3397 return 0;
3399 arg1 = TREE_VALUE (arglist);
3400 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3401 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3403 /* If the len parameter is zero, return zero. */
3404 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3406 /* Evaluate and ignore arg1 and arg2 in case they have
3407 side-effects. */
3408 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3409 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3410 return const0_rtx;
3413 p1 = c_getstr (arg1);
3414 p2 = c_getstr (arg2);
3416 /* If all arguments are constant, evaluate at compile-time. */
3417 if (host_integerp (arg3, 1) && p1 && p2)
3419 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3420 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3423 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3424 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3425 if (host_integerp (arg3, 1)
3426 && (tree_low_cst (arg3, 1) == 1
3427 || (tree_low_cst (arg3, 1) > 1
3428 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3430 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3431 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3432 tree ind1 =
3433 fold (build1 (CONVERT_EXPR, integer_type_node,
3434 build1 (INDIRECT_REF, cst_uchar_node,
3435 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3436 tree ind2 =
3437 fold (build1 (CONVERT_EXPR, integer_type_node,
3438 build1 (INDIRECT_REF, cst_uchar_node,
3439 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3440 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3441 return expand_expr (result, target, mode, EXPAND_NORMAL);
3444 /* If c_strlen can determine an expression for one of the string
3445 lengths, and it doesn't have side effects, then emit cmpstrsi
3446 using length MIN(strlen(string)+1, arg3). */
3447 #ifdef HAVE_cmpstrsi
3448 if (HAVE_cmpstrsi)
3450 tree len, len1, len2;
3451 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3452 rtx result, insn;
3454 int arg1_align
3455 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3456 int arg2_align
3457 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3458 enum machine_mode insn_mode
3459 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3461 len1 = c_strlen (arg1, 1);
3462 len2 = c_strlen (arg2, 1);
3464 if (len1)
3465 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3466 if (len2)
3467 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3469 /* If we don't have a constant length for the first, use the length
3470 of the second, if we know it. We don't require a constant for
3471 this case; some cost analysis could be done if both are available
3472 but neither is constant. For now, assume they're equally cheap,
3473 unless one has side effects. If both strings have constant lengths,
3474 use the smaller. */
3476 if (!len1)
3477 len = len2;
3478 else if (!len2)
3479 len = len1;
3480 else if (TREE_SIDE_EFFECTS (len1))
3481 len = len2;
3482 else if (TREE_SIDE_EFFECTS (len2))
3483 len = len1;
3484 else if (TREE_CODE (len1) != INTEGER_CST)
3485 len = len2;
3486 else if (TREE_CODE (len2) != INTEGER_CST)
3487 len = len1;
3488 else if (tree_int_cst_lt (len1, len2))
3489 len = len1;
3490 else
3491 len = len2;
3493 /* If both arguments have side effects, we cannot optimize. */
3494 if (!len || TREE_SIDE_EFFECTS (len))
3495 return 0;
3497 /* The actual new length parameter is MIN(len,arg3). */
3498 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3500 /* If we don't have POINTER_TYPE, call the function. */
3501 if (arg1_align == 0 || arg2_align == 0)
3502 return 0;
3504 /* Make a place to write the result of the instruction. */
3505 result = target;
3506 if (! (result != 0
3507 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3508 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3509 result = gen_reg_rtx (insn_mode);
3511 arg1_rtx = get_memory_rtx (arg1);
3512 arg2_rtx = get_memory_rtx (arg2);
3513 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3514 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3515 GEN_INT (MIN (arg1_align, arg2_align)));
3516 if (!insn)
3517 return 0;
3519 emit_insn (insn);
3521 /* Return the value in the proper mode for this function. */
3522 mode = TYPE_MODE (TREE_TYPE (exp));
3523 if (GET_MODE (result) == mode)
3524 return result;
3525 if (target == 0)
3526 return convert_to_mode (mode, result, 0);
3527 convert_move (target, result, 0);
3528 return target;
3530 #endif
3531 return 0;
3534 /* Expand expression EXP, which is a call to the strcat builtin.
3535 Return 0 if we failed the caller should emit a normal call,
3536 otherwise try to get the result in TARGET, if convenient. */
3538 static rtx
3539 expand_builtin_strcat (tree arglist, rtx target, enum machine_mode mode)
3541 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3542 return 0;
3543 else
3545 tree dst = TREE_VALUE (arglist),
3546 src = TREE_VALUE (TREE_CHAIN (arglist));
3547 const char *p = c_getstr (src);
3549 /* If the string length is zero, return the dst parameter. */
3550 if (p && *p == '\0')
3551 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3553 return 0;
3557 /* Expand expression EXP, which is a call to the strncat builtin.
3558 Return 0 if we failed the caller should emit a normal call,
3559 otherwise try to get the result in TARGET, if convenient. */
3561 static rtx
3562 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
3564 if (!validate_arglist (arglist,
3565 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3566 return 0;
3567 else
3569 tree dst = TREE_VALUE (arglist),
3570 src = TREE_VALUE (TREE_CHAIN (arglist)),
3571 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3572 const char *p = c_getstr (src);
3574 /* If the requested length is zero, or the src parameter string
3575 length is zero, return the dst parameter. */
3576 if (integer_zerop (len) || (p && *p == '\0'))
3578 /* Evaluate and ignore the src and len parameters in case
3579 they have side-effects. */
3580 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3582 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3585 /* If the requested len is greater than or equal to the string
3586 length, call strcat. */
3587 if (TREE_CODE (len) == INTEGER_CST && p
3588 && compare_tree_int (len, strlen (p)) >= 0)
3590 tree newarglist
3591 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3592 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3594 /* If the replacement _DECL isn't initialized, don't do the
3595 transformation. */
3596 if (!fn)
3597 return 0;
3599 return expand_expr (build_function_call_expr (fn, newarglist),
3600 target, mode, EXPAND_NORMAL);
3602 return 0;
3606 /* Expand expression EXP, which is a call to the strspn builtin.
3607 Return 0 if we failed the caller should emit a normal call,
3608 otherwise try to get the result in TARGET, if convenient. */
3610 static rtx
3611 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
3613 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3614 return 0;
3615 else
3617 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3618 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3620 /* If both arguments are constants, evaluate at compile-time. */
3621 if (p1 && p2)
3623 const size_t r = strspn (p1, p2);
3624 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3627 /* If either argument is "", return 0. */
3628 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3630 /* Evaluate and ignore both arguments in case either one has
3631 side-effects. */
3632 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3633 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3634 return const0_rtx;
3636 return 0;
3640 /* Expand expression EXP, which is a call to the strcspn builtin.
3641 Return 0 if we failed the caller should emit a normal call,
3642 otherwise try to get the result in TARGET, if convenient. */
3644 static rtx
3645 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
3647 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3648 return 0;
3649 else
3651 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3652 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3654 /* If both arguments are constants, evaluate at compile-time. */
3655 if (p1 && p2)
3657 const size_t r = strcspn (p1, p2);
3658 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3661 /* If the first argument is "", return 0. */
3662 if (p1 && *p1 == '\0')
3664 /* Evaluate and ignore argument s2 in case it has
3665 side-effects. */
3666 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3667 return const0_rtx;
3670 /* If the second argument is "", return __builtin_strlen(s1). */
3671 if (p2 && *p2 == '\0')
3673 tree newarglist = build_tree_list (NULL_TREE, s1),
3674 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3676 /* If the replacement _DECL isn't initialized, don't do the
3677 transformation. */
3678 if (!fn)
3679 return 0;
3681 return expand_expr (build_function_call_expr (fn, newarglist),
3682 target, mode, EXPAND_NORMAL);
3684 return 0;
3688 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3689 if that's convenient. */
3692 expand_builtin_saveregs (void)
3694 rtx val, seq;
3696 /* Don't do __builtin_saveregs more than once in a function.
3697 Save the result of the first call and reuse it. */
3698 if (saveregs_value != 0)
3699 return saveregs_value;
3701 /* When this function is called, it means that registers must be
3702 saved on entry to this function. So we migrate the call to the
3703 first insn of this function. */
3705 start_sequence ();
3707 #ifdef EXPAND_BUILTIN_SAVEREGS
3708 /* Do whatever the machine needs done in this case. */
3709 val = EXPAND_BUILTIN_SAVEREGS ();
3710 #else
3711 /* ??? We used to try and build up a call to the out of line function,
3712 guessing about what registers needed saving etc. This became much
3713 harder with __builtin_va_start, since we don't have a tree for a
3714 call to __builtin_saveregs to fall back on. There was exactly one
3715 port (i860) that used this code, and I'm unconvinced it could actually
3716 handle the general case. So we no longer try to handle anything
3717 weird and make the backend absorb the evil. */
3719 error ("__builtin_saveregs not supported by this target");
3720 val = const0_rtx;
3721 #endif
3723 seq = get_insns ();
3724 end_sequence ();
3726 saveregs_value = val;
3728 /* Put the insns after the NOTE that starts the function. If this
3729 is inside a start_sequence, make the outer-level insn chain current, so
3730 the code is placed at the start of the function. */
3731 push_topmost_sequence ();
3732 emit_insn_after (seq, get_insns ());
3733 pop_topmost_sequence ();
3735 return val;
3738 /* __builtin_args_info (N) returns word N of the arg space info
3739 for the current function. The number and meanings of words
3740 is controlled by the definition of CUMULATIVE_ARGS. */
3742 static rtx
3743 expand_builtin_args_info (tree arglist)
3745 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3746 int *word_ptr = (int *) &current_function_args_info;
3748 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3749 abort ();
3751 if (arglist != 0)
3753 if (!host_integerp (TREE_VALUE (arglist), 0))
3754 error ("argument of `__builtin_args_info' must be constant");
3755 else
3757 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3759 if (wordnum < 0 || wordnum >= nwords)
3760 error ("argument of `__builtin_args_info' out of range");
3761 else
3762 return GEN_INT (word_ptr[wordnum]);
3765 else
3766 error ("missing argument in `__builtin_args_info'");
3768 return const0_rtx;
3771 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3773 static rtx
3774 expand_builtin_next_arg (tree arglist)
3776 tree fntype = TREE_TYPE (current_function_decl);
3778 if (TYPE_ARG_TYPES (fntype) == 0
3779 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3780 == void_type_node))
3782 error ("`va_start' used in function with fixed args");
3783 return const0_rtx;
3786 if (arglist)
3788 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3789 tree arg = TREE_VALUE (arglist);
3791 /* Strip off all nops for the sake of the comparison. This
3792 is not quite the same as STRIP_NOPS. It does more.
3793 We must also strip off INDIRECT_EXPR for C++ reference
3794 parameters. */
3795 while (TREE_CODE (arg) == NOP_EXPR
3796 || TREE_CODE (arg) == CONVERT_EXPR
3797 || TREE_CODE (arg) == NON_LVALUE_EXPR
3798 || TREE_CODE (arg) == INDIRECT_REF)
3799 arg = TREE_OPERAND (arg, 0);
3800 if (arg != last_parm)
3801 warning ("second parameter of `va_start' not last named argument");
3803 else
3804 /* Evidently an out of date version of <stdarg.h>; can't validate
3805 va_start's second argument, but can still work as intended. */
3806 warning ("`__builtin_next_arg' called without an argument");
3808 return expand_binop (Pmode, add_optab,
3809 current_function_internal_arg_pointer,
3810 current_function_arg_offset_rtx,
3811 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3814 /* Make it easier for the backends by protecting the valist argument
3815 from multiple evaluations. */
3817 static tree
3818 stabilize_va_list (tree valist, int needs_lvalue)
3820 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3822 if (TREE_SIDE_EFFECTS (valist))
3823 valist = save_expr (valist);
3825 /* For this case, the backends will be expecting a pointer to
3826 TREE_TYPE (va_list_type_node), but it's possible we've
3827 actually been given an array (an actual va_list_type_node).
3828 So fix it. */
3829 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3831 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3832 tree p2 = build_pointer_type (va_list_type_node);
3834 valist = build1 (ADDR_EXPR, p2, valist);
3835 valist = fold (build1 (NOP_EXPR, p1, valist));
3838 else
3840 tree pt;
3842 if (! needs_lvalue)
3844 if (! TREE_SIDE_EFFECTS (valist))
3845 return valist;
3847 pt = build_pointer_type (va_list_type_node);
3848 valist = fold (build1 (ADDR_EXPR, pt, valist));
3849 TREE_SIDE_EFFECTS (valist) = 1;
3852 if (TREE_SIDE_EFFECTS (valist))
3853 valist = save_expr (valist);
3854 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3855 valist));
3858 return valist;
3861 /* The "standard" implementation of va_start: just assign `nextarg' to
3862 the variable. */
3864 void
3865 std_expand_builtin_va_start (tree valist, rtx nextarg)
3867 tree t;
3869 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3870 make_tree (ptr_type_node, nextarg));
3871 TREE_SIDE_EFFECTS (t) = 1;
3873 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3876 /* Expand ARGLIST, from a call to __builtin_va_start. */
3878 static rtx
3879 expand_builtin_va_start (tree arglist)
3881 rtx nextarg;
3882 tree chain, valist;
3884 chain = TREE_CHAIN (arglist);
3886 if (TREE_CHAIN (chain))
3887 error ("too many arguments to function `va_start'");
3889 nextarg = expand_builtin_next_arg (chain);
3890 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3892 #ifdef EXPAND_BUILTIN_VA_START
3893 EXPAND_BUILTIN_VA_START (valist, nextarg);
3894 #else
3895 std_expand_builtin_va_start (valist, nextarg);
3896 #endif
3898 return const0_rtx;
3901 /* The "standard" implementation of va_arg: read the value from the
3902 current (padded) address and increment by the (padded) size. */
3905 std_expand_builtin_va_arg (tree valist, tree type)
3907 tree addr_tree, t, type_size = NULL;
3908 tree align, alignm1;
3909 tree rounded_size;
3910 rtx addr;
3912 /* Compute the rounded size of the type. */
3913 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3914 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3915 if (type == error_mark_node
3916 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3917 || TREE_OVERFLOW (type_size))
3918 rounded_size = size_zero_node;
3919 else
3920 rounded_size = fold (build (MULT_EXPR, sizetype,
3921 fold (build (TRUNC_DIV_EXPR, sizetype,
3922 fold (build (PLUS_EXPR, sizetype,
3923 type_size, alignm1)),
3924 align)),
3925 align));
3927 /* Get AP. */
3928 addr_tree = valist;
3929 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3931 /* Small args are padded downward. */
3932 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3933 fold (build (COND_EXPR, sizetype,
3934 fold (build (GT_EXPR, sizetype,
3935 rounded_size,
3936 align)),
3937 size_zero_node,
3938 fold (build (MINUS_EXPR, sizetype,
3939 rounded_size,
3940 type_size))))));
3943 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3944 addr = copy_to_reg (addr);
3946 /* Compute new value for AP. */
3947 if (! integer_zerop (rounded_size))
3949 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3950 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3951 rounded_size));
3952 TREE_SIDE_EFFECTS (t) = 1;
3953 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3956 return addr;
3959 /* Expand __builtin_va_arg, which is not really a builtin function, but
3960 a very special sort of operator. */
3963 expand_builtin_va_arg (tree valist, tree type)
3965 rtx addr, result;
3966 tree promoted_type, want_va_type, have_va_type;
3968 /* Verify that valist is of the proper type. */
3970 want_va_type = va_list_type_node;
3971 have_va_type = TREE_TYPE (valist);
3972 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3974 /* If va_list is an array type, the argument may have decayed
3975 to a pointer type, e.g. by being passed to another function.
3976 In that case, unwrap both types so that we can compare the
3977 underlying records. */
3978 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3979 || TREE_CODE (have_va_type) == POINTER_TYPE)
3981 want_va_type = TREE_TYPE (want_va_type);
3982 have_va_type = TREE_TYPE (have_va_type);
3985 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3987 error ("first argument to `va_arg' not of type `va_list'");
3988 addr = const0_rtx;
3991 /* Generate a diagnostic for requesting data of a type that cannot
3992 be passed through `...' due to type promotion at the call site. */
3993 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3994 != type)
3996 const char *name = "<anonymous type>", *pname = 0;
3997 static bool gave_help;
3999 if (TYPE_NAME (type))
4001 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
4002 name = IDENTIFIER_POINTER (TYPE_NAME (type));
4003 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
4004 && DECL_NAME (TYPE_NAME (type)))
4005 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
4007 if (TYPE_NAME (promoted_type))
4009 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
4010 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
4011 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
4012 && DECL_NAME (TYPE_NAME (promoted_type)))
4013 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
4016 /* Unfortunately, this is merely undefined, rather than a constraint
4017 violation, so we cannot make this an error. If this call is never
4018 executed, the program is still strictly conforming. */
4019 warning ("`%s' is promoted to `%s' when passed through `...'",
4020 name, pname);
4021 if (! gave_help)
4023 gave_help = true;
4024 warning ("(so you should pass `%s' not `%s' to `va_arg')",
4025 pname, name);
4028 /* We can, however, treat "undefined" any way we please.
4029 Call abort to encourage the user to fix the program. */
4030 expand_builtin_trap ();
4032 /* This is dead code, but go ahead and finish so that the
4033 mode of the result comes out right. */
4034 addr = const0_rtx;
4036 else
4038 /* Make it easier for the backends by protecting the valist argument
4039 from multiple evaluations. */
4040 valist = stabilize_va_list (valist, 0);
4042 #ifdef EXPAND_BUILTIN_VA_ARG
4043 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
4044 #else
4045 addr = std_expand_builtin_va_arg (valist, type);
4046 #endif
4049 #ifdef POINTERS_EXTEND_UNSIGNED
4050 if (GET_MODE (addr) != Pmode)
4051 addr = convert_memory_address (Pmode, addr);
4052 #endif
4054 result = gen_rtx_MEM (TYPE_MODE (type), addr);
4055 set_mem_alias_set (result, get_varargs_alias_set ());
4057 return result;
4060 /* Expand ARGLIST, from a call to __builtin_va_end. */
4062 static rtx
4063 expand_builtin_va_end (tree arglist)
4065 tree valist = TREE_VALUE (arglist);
4067 #ifdef EXPAND_BUILTIN_VA_END
4068 valist = stabilize_va_list (valist, 0);
4069 EXPAND_BUILTIN_VA_END (arglist);
4070 #else
4071 /* Evaluate for side effects, if needed. I hate macros that don't
4072 do that. */
4073 if (TREE_SIDE_EFFECTS (valist))
4074 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4075 #endif
4077 return const0_rtx;
4080 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4081 builtin rather than just as an assignment in stdarg.h because of the
4082 nastiness of array-type va_list types. */
4084 static rtx
4085 expand_builtin_va_copy (tree arglist)
4087 tree dst, src, t;
4089 dst = TREE_VALUE (arglist);
4090 src = TREE_VALUE (TREE_CHAIN (arglist));
4092 dst = stabilize_va_list (dst, 1);
4093 src = stabilize_va_list (src, 0);
4095 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4097 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
4098 TREE_SIDE_EFFECTS (t) = 1;
4099 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4101 else
4103 rtx dstb, srcb, size;
4105 /* Evaluate to pointers. */
4106 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4107 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4108 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4109 VOIDmode, EXPAND_NORMAL);
4111 #ifdef POINTERS_EXTEND_UNSIGNED
4112 if (GET_MODE (dstb) != Pmode)
4113 dstb = convert_memory_address (Pmode, dstb);
4115 if (GET_MODE (srcb) != Pmode)
4116 srcb = convert_memory_address (Pmode, srcb);
4117 #endif
4119 /* "Dereference" to BLKmode memories. */
4120 dstb = gen_rtx_MEM (BLKmode, dstb);
4121 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4122 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4123 srcb = gen_rtx_MEM (BLKmode, srcb);
4124 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4125 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4127 /* Copy. */
4128 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4131 return const0_rtx;
4134 /* Expand a call to one of the builtin functions __builtin_frame_address or
4135 __builtin_return_address. */
4137 static rtx
4138 expand_builtin_frame_address (tree fndecl, tree arglist)
4140 /* The argument must be a nonnegative integer constant.
4141 It counts the number of frames to scan up the stack.
4142 The value is the return address saved in that frame. */
4143 if (arglist == 0)
4144 /* Warning about missing arg was already issued. */
4145 return const0_rtx;
4146 else if (! host_integerp (TREE_VALUE (arglist), 1))
4148 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4149 error ("invalid arg to `__builtin_frame_address'");
4150 else
4151 error ("invalid arg to `__builtin_return_address'");
4152 return const0_rtx;
4154 else
4156 rtx tem
4157 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4158 tree_low_cst (TREE_VALUE (arglist), 1),
4159 hard_frame_pointer_rtx);
4161 /* Some ports cannot access arbitrary stack frames. */
4162 if (tem == NULL)
4164 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4165 warning ("unsupported arg to `__builtin_frame_address'");
4166 else
4167 warning ("unsupported arg to `__builtin_return_address'");
4168 return const0_rtx;
4171 /* For __builtin_frame_address, return what we've got. */
4172 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4173 return tem;
4175 if (GET_CODE (tem) != REG
4176 && ! CONSTANT_P (tem))
4177 tem = copy_to_mode_reg (Pmode, tem);
4178 return tem;
4182 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4183 we failed and the caller should emit a normal call, otherwise try to get
4184 the result in TARGET, if convenient. */
4186 static rtx
4187 expand_builtin_alloca (tree arglist, rtx target)
4189 rtx op0;
4190 rtx result;
4192 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4193 return 0;
4195 /* Compute the argument. */
4196 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4198 /* Allocate the desired space. */
4199 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4201 #ifdef POINTERS_EXTEND_UNSIGNED
4202 if (GET_MODE (result) != ptr_mode)
4203 result = convert_memory_address (ptr_mode, result);
4204 #endif
4206 return result;
4209 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4210 Return 0 if a normal call should be emitted rather than expanding the
4211 function in-line. If convenient, the result should be placed in TARGET.
4212 SUBTARGET may be used as the target for computing one of EXP's operands. */
4214 static rtx
4215 expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
4216 rtx subtarget, optab op_optab)
4218 rtx op0;
4219 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4220 return 0;
4222 /* Compute the argument. */
4223 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4224 /* Compute op, into TARGET if possible.
4225 Set TARGET to wherever the result comes back. */
4226 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4227 op_optab, op0, target, 1);
4228 if (target == 0)
4229 abort ();
4231 return convert_to_mode (target_mode, target, 0);
4234 /* If the string passed to fputs is a constant and is one character
4235 long, we attempt to transform this call into __builtin_fputc(). */
4237 static rtx
4238 expand_builtin_fputs (tree arglist, int ignore, int unlocked)
4240 tree len, fn;
4241 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
4242 : implicit_built_in_decls[BUILT_IN_FPUTC];
4243 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
4244 : implicit_built_in_decls[BUILT_IN_FWRITE];
4246 /* If the return value is used, or the replacement _DECL isn't
4247 initialized, don't do the transformation. */
4248 if (!ignore || !fn_fputc || !fn_fwrite)
4249 return 0;
4251 /* Verify the arguments in the original call. */
4252 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4253 return 0;
4255 /* Get the length of the string passed to fputs. If the length
4256 can't be determined, punt. */
4257 if (!(len = c_strlen (TREE_VALUE (arglist), 1))
4258 || TREE_CODE (len) != INTEGER_CST)
4259 return 0;
4261 switch (compare_tree_int (len, 1))
4263 case -1: /* length is 0, delete the call entirely . */
4265 /* Evaluate and ignore the argument in case it has
4266 side-effects. */
4267 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
4268 VOIDmode, EXPAND_NORMAL);
4269 return const0_rtx;
4271 case 0: /* length is 1, call fputc. */
4273 const char *p = c_getstr (TREE_VALUE (arglist));
4275 if (p != NULL)
4277 /* New argument list transforming fputs(string, stream) to
4278 fputc(string[0], stream). */
4279 arglist =
4280 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4281 arglist =
4282 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
4283 fn = fn_fputc;
4284 break;
4287 /* FALLTHROUGH */
4288 case 1: /* length is greater than 1, call fwrite. */
4290 tree string_arg;
4292 /* If optimizing for size keep fputs. */
4293 if (optimize_size)
4294 return 0;
4295 string_arg = TREE_VALUE (arglist);
4296 /* New argument list transforming fputs(string, stream) to
4297 fwrite(string, 1, len, stream). */
4298 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4299 arglist = tree_cons (NULL_TREE, len, arglist);
4300 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4301 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4302 fn = fn_fwrite;
4303 break;
4305 default:
4306 abort ();
4309 return expand_expr (build_function_call_expr (fn, arglist),
4310 (ignore ? const0_rtx : NULL_RTX),
4311 VOIDmode, EXPAND_NORMAL);
4314 /* Expand a call to __builtin_expect. We return our argument and emit a
4315 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4316 a non-jump context. */
4318 static rtx
4319 expand_builtin_expect (tree arglist, rtx target)
4321 tree exp, c;
4322 rtx note, rtx_c;
4324 if (arglist == NULL_TREE
4325 || TREE_CHAIN (arglist) == NULL_TREE)
4326 return const0_rtx;
4327 exp = TREE_VALUE (arglist);
4328 c = TREE_VALUE (TREE_CHAIN (arglist));
4330 if (TREE_CODE (c) != INTEGER_CST)
4332 error ("second arg to `__builtin_expect' must be a constant");
4333 c = integer_zero_node;
4336 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4338 /* Don't bother with expected value notes for integral constants. */
4339 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4341 /* We do need to force this into a register so that we can be
4342 moderately sure to be able to correctly interpret the branch
4343 condition later. */
4344 target = force_reg (GET_MODE (target), target);
4346 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4348 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
4349 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4352 return target;
4355 /* Like expand_builtin_expect, except do this in a jump context. This is
4356 called from do_jump if the conditional is a __builtin_expect. Return either
4357 a list of insns to emit the jump or NULL if we cannot optimize
4358 __builtin_expect. We need to optimize this at jump time so that machines
4359 like the PowerPC don't turn the test into a SCC operation, and then jump
4360 based on the test being 0/1. */
4363 expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4365 tree arglist = TREE_OPERAND (exp, 1);
4366 tree arg0 = TREE_VALUE (arglist);
4367 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4368 rtx ret = NULL_RTX;
4370 /* Only handle __builtin_expect (test, 0) and
4371 __builtin_expect (test, 1). */
4372 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4373 && (integer_zerop (arg1) || integer_onep (arg1)))
4375 int num_jumps = 0;
4376 rtx insn;
4378 /* If we fail to locate an appropriate conditional jump, we'll
4379 fall back to normal evaluation. Ensure that the expression
4380 can be re-evaluated. */
4381 switch (unsafe_for_reeval (arg0))
4383 case 0: /* Safe. */
4384 break;
4386 case 1: /* Mildly unsafe. */
4387 arg0 = unsave_expr (arg0);
4388 break;
4390 case 2: /* Wildly unsafe. */
4391 return NULL_RTX;
4394 /* Expand the jump insns. */
4395 start_sequence ();
4396 do_jump (arg0, if_false_label, if_true_label);
4397 ret = get_insns ();
4398 end_sequence ();
4400 /* Now that the __builtin_expect has been validated, go through and add
4401 the expect's to each of the conditional jumps. If we run into an
4402 error, just give up and generate the 'safe' code of doing a SCC
4403 operation and then doing a branch on that. */
4404 insn = ret;
4405 while (insn != NULL_RTX)
4407 rtx next = NEXT_INSN (insn);
4409 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
4411 rtx ifelse = SET_SRC (pc_set (insn));
4412 rtx label;
4413 int taken;
4415 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4417 taken = 1;
4418 label = XEXP (XEXP (ifelse, 1), 0);
4420 /* An inverted jump reverses the probabilities. */
4421 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4423 taken = 0;
4424 label = XEXP (XEXP (ifelse, 2), 0);
4426 /* We shouldn't have to worry about conditional returns during
4427 the expansion stage, but handle it gracefully anyway. */
4428 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4430 taken = 1;
4431 label = NULL_RTX;
4433 /* An inverted return reverses the probabilities. */
4434 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4436 taken = 0;
4437 label = NULL_RTX;
4439 else
4440 goto do_next_insn;
4442 /* If the test is expected to fail, reverse the
4443 probabilities. */
4444 if (integer_zerop (arg1))
4445 taken = 1 - taken;
4447 /* If we are jumping to the false label, reverse the
4448 probabilities. */
4449 if (label == NULL_RTX)
4450 ; /* conditional return */
4451 else if (label == if_false_label)
4452 taken = 1 - taken;
4453 else if (label != if_true_label)
4454 goto do_next_insn;
4456 num_jumps++;
4457 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4460 do_next_insn:
4461 insn = next;
4464 /* If no jumps were modified, fail and do __builtin_expect the normal
4465 way. */
4466 if (num_jumps == 0)
4467 ret = NULL_RTX;
4470 return ret;
4473 void
4474 expand_builtin_trap (void)
4476 #ifdef HAVE_trap
4477 if (HAVE_trap)
4478 emit_insn (gen_trap ());
4479 else
4480 #endif
4481 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4482 emit_barrier ();
4485 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4486 Return 0 if a normal call should be emitted rather than expanding
4487 the function inline. If convenient, the result should be placed
4488 in TARGET. SUBTARGET may be used as the target for computing
4489 the operand. */
4491 static rtx
4492 expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4494 enum machine_mode mode;
4495 tree arg;
4496 rtx op0;
4498 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4499 return 0;
4501 arg = TREE_VALUE (arglist);
4502 mode = TYPE_MODE (TREE_TYPE (arg));
4503 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4504 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4507 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4508 Return 0 if a normal call should be emitted rather than expanding
4509 the function inline. If convenient, the result should be placed
4510 in target. */
4512 static rtx
4513 expand_builtin_cabs (tree arglist, rtx target)
4515 enum machine_mode mode;
4516 tree arg;
4517 rtx op0;
4519 if (arglist == 0 || TREE_CHAIN (arglist))
4520 return 0;
4521 arg = TREE_VALUE (arglist);
4522 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
4523 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
4524 return 0;
4526 mode = TYPE_MODE (TREE_TYPE (arg));
4527 op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
4528 return expand_complex_abs (mode, op0, target, 0);
4531 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4532 a normal call should be emitted rather than expanding the function
4533 inline. If convenient, the result should be placed in TARGET with
4534 mode MODE. */
4536 static rtx
4537 expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
4539 tree orig_arglist, dest, fmt;
4540 const char *fmt_str;
4542 orig_arglist = arglist;
4544 /* Verify the required arguments in the original call. */
4545 if (! arglist)
4546 return 0;
4547 dest = TREE_VALUE (arglist);
4548 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4549 return 0;
4550 arglist = TREE_CHAIN (arglist);
4551 if (! arglist)
4552 return 0;
4553 fmt = TREE_VALUE (arglist);
4554 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4555 return 0;
4556 arglist = TREE_CHAIN (arglist);
4558 /* Check whether the format is a literal string constant. */
4559 fmt_str = c_getstr (fmt);
4560 if (fmt_str == NULL)
4561 return 0;
4563 /* If the format doesn't contain % args or %%, use strcpy. */
4564 if (strchr (fmt_str, '%') == 0)
4566 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4567 tree exp;
4569 if (arglist || ! fn)
4570 return 0;
4571 expand_expr (build_function_call_expr (fn, orig_arglist),
4572 const0_rtx, VOIDmode, EXPAND_NORMAL);
4573 if (target == const0_rtx)
4574 return const0_rtx;
4575 exp = build_int_2 (strlen (fmt_str), 0);
4576 exp = fold (build1 (NOP_EXPR, integer_type_node, exp));
4577 return expand_expr (exp, target, mode, EXPAND_NORMAL);
4579 /* If the format is "%s", use strcpy if the result isn't used. */
4580 else if (strcmp (fmt_str, "%s") == 0)
4582 tree fn, arg, len;
4583 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4585 if (! fn)
4586 return 0;
4588 if (! arglist || TREE_CHAIN (arglist))
4589 return 0;
4590 arg = TREE_VALUE (arglist);
4591 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
4592 return 0;
4594 if (target != const0_rtx)
4596 len = c_strlen (arg, 1);
4597 if (! len || TREE_CODE (len) != INTEGER_CST)
4598 return 0;
4600 else
4601 len = NULL_TREE;
4603 arglist = build_tree_list (NULL_TREE, arg);
4604 arglist = tree_cons (NULL_TREE, dest, arglist);
4605 expand_expr (build_function_call_expr (fn, arglist),
4606 const0_rtx, VOIDmode, EXPAND_NORMAL);
4608 if (target == const0_rtx)
4609 return const0_rtx;
4610 return expand_expr (len, target, mode, EXPAND_NORMAL);
4613 return 0;
4616 /* Expand an expression EXP that calls a built-in function,
4617 with result going to TARGET if that's convenient
4618 (and in mode MODE if that's convenient).
4619 SUBTARGET may be used as the target for computing one of EXP's operands.
4620 IGNORE is nonzero if the value is to be ignored. */
4623 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
4624 int ignore)
4626 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4627 tree arglist = TREE_OPERAND (exp, 1);
4628 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4629 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4631 /* Perform postincrements before expanding builtin functions.  */
4632 emit_queue ();
4634 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4635 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4637 /* When not optimizing, generate calls to library functions for a certain
4638 set of builtins. */
4639 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4640 switch (fcode)
4642 case BUILT_IN_SQRT:
4643 case BUILT_IN_SQRTF:
4644 case BUILT_IN_SQRTL:
4645 case BUILT_IN_SIN:
4646 case BUILT_IN_SINF:
4647 case BUILT_IN_SINL:
4648 case BUILT_IN_COS:
4649 case BUILT_IN_COSF:
4650 case BUILT_IN_COSL:
4651 case BUILT_IN_EXP:
4652 case BUILT_IN_EXPF:
4653 case BUILT_IN_EXPL:
4654 case BUILT_IN_LOG:
4655 case BUILT_IN_LOGF:
4656 case BUILT_IN_LOGL:
4657 case BUILT_IN_TAN:
4658 case BUILT_IN_TANF:
4659 case BUILT_IN_TANL:
4660 case BUILT_IN_ATAN:
4661 case BUILT_IN_ATANF:
4662 case BUILT_IN_ATANL:
4663 case BUILT_IN_POW:
4664 case BUILT_IN_POWF:
4665 case BUILT_IN_POWL:
4666 case BUILT_IN_ATAN2:
4667 case BUILT_IN_ATAN2F:
4668 case BUILT_IN_ATAN2L:
4669 case BUILT_IN_MEMSET:
4670 case BUILT_IN_MEMCPY:
4671 case BUILT_IN_MEMCMP:
4672 case BUILT_IN_MEMPCPY:
4673 case BUILT_IN_MEMMOVE:
4674 case BUILT_IN_BCMP:
4675 case BUILT_IN_BZERO:
4676 case BUILT_IN_BCOPY:
4677 case BUILT_IN_INDEX:
4678 case BUILT_IN_RINDEX:
4679 case BUILT_IN_SPRINTF:
4680 case BUILT_IN_STPCPY:
4681 case BUILT_IN_STRCHR:
4682 case BUILT_IN_STRRCHR:
4683 case BUILT_IN_STRLEN:
4684 case BUILT_IN_STRCPY:
4685 case BUILT_IN_STRNCPY:
4686 case BUILT_IN_STRNCMP:
4687 case BUILT_IN_STRSTR:
4688 case BUILT_IN_STRPBRK:
4689 case BUILT_IN_STRCAT:
4690 case BUILT_IN_STRNCAT:
4691 case BUILT_IN_STRSPN:
4692 case BUILT_IN_STRCSPN:
4693 case BUILT_IN_STRCMP:
4694 case BUILT_IN_FFS:
4695 case BUILT_IN_PUTCHAR:
4696 case BUILT_IN_PUTS:
4697 case BUILT_IN_PRINTF:
4698 case BUILT_IN_FPUTC:
4699 case BUILT_IN_FPUTS:
4700 case BUILT_IN_FWRITE:
4701 case BUILT_IN_PUTCHAR_UNLOCKED:
4702 case BUILT_IN_PUTS_UNLOCKED:
4703 case BUILT_IN_PRINTF_UNLOCKED:
4704 case BUILT_IN_FPUTC_UNLOCKED:
4705 case BUILT_IN_FPUTS_UNLOCKED:
4706 case BUILT_IN_FWRITE_UNLOCKED:
4707 case BUILT_IN_FLOOR:
4708 case BUILT_IN_FLOORF:
4709 case BUILT_IN_FLOORL:
4710 case BUILT_IN_CEIL:
4711 case BUILT_IN_CEILF:
4712 case BUILT_IN_CEILL:
4713 case BUILT_IN_TRUNC:
4714 case BUILT_IN_TRUNCF:
4715 case BUILT_IN_TRUNCL:
4716 case BUILT_IN_ROUND:
4717 case BUILT_IN_ROUNDF:
4718 case BUILT_IN_ROUNDL:
4719 case BUILT_IN_NEARBYINT:
4720 case BUILT_IN_NEARBYINTF:
4721 case BUILT_IN_NEARBYINTL:
4722 return expand_call (exp, target, ignore);
4724 default:
4725 break;
4728 /* The built-in function expanders test for target == const0_rtx
4729 to determine whether the function's result will be ignored. */
4730 if (ignore)
4731 target = const0_rtx;
4733 /* If the result of a pure or const built-in function is ignored, and
4734 none of its arguments are volatile, we can avoid expanding the
4735 built-in call and just evaluate the arguments for side-effects. */
4736 if (target == const0_rtx
4737 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4739 bool volatilep = false;
4740 tree arg;
4742 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4743 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4745 volatilep = true;
4746 break;
4749 if (! volatilep)
4751 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4752 expand_expr (TREE_VALUE (arg), const0_rtx,
4753 VOIDmode, EXPAND_NORMAL);
4754 return const0_rtx;
4758 switch (fcode)
4760 case BUILT_IN_ABS:
4761 case BUILT_IN_LABS:
4762 case BUILT_IN_LLABS:
4763 case BUILT_IN_IMAXABS:
4764 /* build_function_call changes these into ABS_EXPR. */
4765 abort ();
4767 case BUILT_IN_FABS:
4768 case BUILT_IN_FABSF:
4769 case BUILT_IN_FABSL:
4770 target = expand_builtin_fabs (arglist, target, subtarget);
4771 if (target)
4772 return target;
4773 break;
4775 case BUILT_IN_CABS:
4776 case BUILT_IN_CABSF:
4777 case BUILT_IN_CABSL:
4778 if (flag_unsafe_math_optimizations)
4780 target = expand_builtin_cabs (arglist, target);
4781 if (target)
4782 return target;
4784 break;
4786 case BUILT_IN_CONJ:
4787 case BUILT_IN_CONJF:
4788 case BUILT_IN_CONJL:
4789 case BUILT_IN_CREAL:
4790 case BUILT_IN_CREALF:
4791 case BUILT_IN_CREALL:
4792 case BUILT_IN_CIMAG:
4793 case BUILT_IN_CIMAGF:
4794 case BUILT_IN_CIMAGL:
4795 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4796 and IMAGPART_EXPR. */
4797 abort ();
4799 case BUILT_IN_SIN:
4800 case BUILT_IN_SINF:
4801 case BUILT_IN_SINL:
4802 case BUILT_IN_COS:
4803 case BUILT_IN_COSF:
4804 case BUILT_IN_COSL:
4805 case BUILT_IN_EXP:
4806 case BUILT_IN_EXPF:
4807 case BUILT_IN_EXPL:
4808 case BUILT_IN_LOG:
4809 case BUILT_IN_LOGF:
4810 case BUILT_IN_LOGL:
4811 case BUILT_IN_TAN:
4812 case BUILT_IN_TANF:
4813 case BUILT_IN_TANL:
4814 case BUILT_IN_ATAN:
4815 case BUILT_IN_ATANF:
4816 case BUILT_IN_ATANL:
4817 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4818 because of possible accuracy problems. */
4819 if (! flag_unsafe_math_optimizations)
4820 break;
4821 case BUILT_IN_SQRT:
4822 case BUILT_IN_SQRTF:
4823 case BUILT_IN_SQRTL:
4824 case BUILT_IN_FLOOR:
4825 case BUILT_IN_FLOORF:
4826 case BUILT_IN_FLOORL:
4827 case BUILT_IN_CEIL:
4828 case BUILT_IN_CEILF:
4829 case BUILT_IN_CEILL:
4830 case BUILT_IN_TRUNC:
4831 case BUILT_IN_TRUNCF:
4832 case BUILT_IN_TRUNCL:
4833 case BUILT_IN_ROUND:
4834 case BUILT_IN_ROUNDF:
4835 case BUILT_IN_ROUNDL:
4836 case BUILT_IN_NEARBYINT:
4837 case BUILT_IN_NEARBYINTF:
4838 case BUILT_IN_NEARBYINTL:
4839 target = expand_builtin_mathfn (exp, target, subtarget);
4840 if (target)
4841 return target;
4842 break;
4844 case BUILT_IN_POW:
4845 case BUILT_IN_POWF:
4846 case BUILT_IN_POWL:
4847 if (! flag_unsafe_math_optimizations)
4848 break;
4849 target = expand_builtin_pow (exp, target, subtarget);
4850 if (target)
4851 return target;
4852 break;
4854 case BUILT_IN_ATAN2:
4855 case BUILT_IN_ATAN2F:
4856 case BUILT_IN_ATAN2L:
4857 if (! flag_unsafe_math_optimizations)
4858 break;
4859 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4860 if (target)
4861 return target;
4862 break;
4864 case BUILT_IN_APPLY_ARGS:
4865 return expand_builtin_apply_args ();
4867 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4868 FUNCTION with a copy of the parameters described by
4869 ARGUMENTS, and ARGSIZE. It returns a block of memory
4870 allocated on the stack into which is stored all the registers
4871 that might possibly be used for returning the result of a
4872 function. ARGUMENTS is the value returned by
4873 __builtin_apply_args. ARGSIZE is the number of bytes of
4874 arguments that must be copied. ??? How should this value be
4875 computed? We'll also need a safe worst case value for varargs
4876 functions. */
4877 case BUILT_IN_APPLY:
4878 if (!validate_arglist (arglist, POINTER_TYPE,
4879 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4880 && !validate_arglist (arglist, REFERENCE_TYPE,
4881 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4882 return const0_rtx;
4883 else
4885 int i;
4886 tree t;
4887 rtx ops[3];
4889 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4890 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4892 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4895 /* __builtin_return (RESULT) causes the function to return the
4896 value described by RESULT. RESULT is address of the block of
4897 memory returned by __builtin_apply. */
4898 case BUILT_IN_RETURN:
4899 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4900 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4901 NULL_RTX, VOIDmode, 0));
4902 return const0_rtx;
4904 case BUILT_IN_SAVEREGS:
4905 return expand_builtin_saveregs ();
4907 case BUILT_IN_ARGS_INFO:
4908 return expand_builtin_args_info (arglist);
4910 /* Return the address of the first anonymous stack arg. */
4911 case BUILT_IN_NEXT_ARG:
4912 return expand_builtin_next_arg (arglist);
4914 case BUILT_IN_CLASSIFY_TYPE:
4915 return expand_builtin_classify_type (arglist);
4917 case BUILT_IN_CONSTANT_P:
4918 return expand_builtin_constant_p (arglist, target_mode);
4920 case BUILT_IN_FRAME_ADDRESS:
4921 case BUILT_IN_RETURN_ADDRESS:
4922 return expand_builtin_frame_address (fndecl, arglist);
4924 /* Returns the address of the area where the structure is returned.
4925 0 otherwise. */
4926 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4927 if (arglist != 0
4928 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4929 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4930 return const0_rtx;
4931 else
4932 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4934 case BUILT_IN_ALLOCA:
4935 target = expand_builtin_alloca (arglist, target);
4936 if (target)
4937 return target;
4938 break;
4940 case BUILT_IN_FFS:
4941 case BUILT_IN_FFSL:
4942 case BUILT_IN_FFSLL:
4943 target = expand_builtin_unop (target_mode, arglist, target,
4944 subtarget, ffs_optab);
4945 if (target)
4946 return target;
4947 break;
4949 case BUILT_IN_CLZ:
4950 case BUILT_IN_CLZL:
4951 case BUILT_IN_CLZLL:
4952 target = expand_builtin_unop (target_mode, arglist, target,
4953 subtarget, clz_optab);
4954 if (target)
4955 return target;
4956 break;
4958 case BUILT_IN_CTZ:
4959 case BUILT_IN_CTZL:
4960 case BUILT_IN_CTZLL:
4961 target = expand_builtin_unop (target_mode, arglist, target,
4962 subtarget, ctz_optab);
4963 if (target)
4964 return target;
4965 break;
4967 case BUILT_IN_POPCOUNT:
4968 case BUILT_IN_POPCOUNTL:
4969 case BUILT_IN_POPCOUNTLL:
4970 target = expand_builtin_unop (target_mode, arglist, target,
4971 subtarget, popcount_optab);
4972 if (target)
4973 return target;
4974 break;
4976 case BUILT_IN_PARITY:
4977 case BUILT_IN_PARITYL:
4978 case BUILT_IN_PARITYLL:
4979 target = expand_builtin_unop (target_mode, arglist, target,
4980 subtarget, parity_optab);
4981 if (target)
4982 return target;
4983 break;
4985 case BUILT_IN_STRLEN:
4986 target = expand_builtin_strlen (arglist, target, target_mode);
4987 if (target)
4988 return target;
4989 break;
4991 case BUILT_IN_STRCPY:
4992 target = expand_builtin_strcpy (arglist, target, mode);
4993 if (target)
4994 return target;
4995 break;
4997 case BUILT_IN_STRNCPY:
4998 target = expand_builtin_strncpy (arglist, target, mode);
4999 if (target)
5000 return target;
5001 break;
5003 case BUILT_IN_STPCPY:
5004 target = expand_builtin_stpcpy (arglist, target, mode);
5005 if (target)
5006 return target;
5007 break;
5009 case BUILT_IN_STRCAT:
5010 target = expand_builtin_strcat (arglist, target, mode);
5011 if (target)
5012 return target;
5013 break;
5015 case BUILT_IN_STRNCAT:
5016 target = expand_builtin_strncat (arglist, target, mode);
5017 if (target)
5018 return target;
5019 break;
5021 case BUILT_IN_STRSPN:
5022 target = expand_builtin_strspn (arglist, target, mode);
5023 if (target)
5024 return target;
5025 break;
5027 case BUILT_IN_STRCSPN:
5028 target = expand_builtin_strcspn (arglist, target, mode);
5029 if (target)
5030 return target;
5031 break;
5033 case BUILT_IN_STRSTR:
5034 target = expand_builtin_strstr (arglist, target, mode);
5035 if (target)
5036 return target;
5037 break;
5039 case BUILT_IN_STRPBRK:
5040 target = expand_builtin_strpbrk (arglist, target, mode);
5041 if (target)
5042 return target;
5043 break;
5045 case BUILT_IN_INDEX:
5046 case BUILT_IN_STRCHR:
5047 target = expand_builtin_strchr (arglist, target, mode);
5048 if (target)
5049 return target;
5050 break;
5052 case BUILT_IN_RINDEX:
5053 case BUILT_IN_STRRCHR:
5054 target = expand_builtin_strrchr (arglist, target, mode);
5055 if (target)
5056 return target;
5057 break;
5059 case BUILT_IN_MEMCPY:
5060 target = expand_builtin_memcpy (arglist, target, mode);
5061 if (target)
5062 return target;
5063 break;
5065 case BUILT_IN_MEMPCPY:
5066 target = expand_builtin_mempcpy (arglist, target, mode, /*endp=*/ 1);
5067 if (target)
5068 return target;
5069 break;
5071 case BUILT_IN_MEMMOVE:
5072 target = expand_builtin_memmove (arglist, target, mode);
5073 if (target)
5074 return target;
5075 break;
5077 case BUILT_IN_BCOPY:
5078 target = expand_builtin_bcopy (arglist);
5079 if (target)
5080 return target;
5081 break;
5083 case BUILT_IN_MEMSET:
5084 target = expand_builtin_memset (arglist, target, mode);
5085 if (target)
5086 return target;
5087 break;
5089 case BUILT_IN_BZERO:
5090 target = expand_builtin_bzero (arglist);
5091 if (target)
5092 return target;
5093 break;
5095 case BUILT_IN_STRCMP:
5096 target = expand_builtin_strcmp (exp, target, mode);
5097 if (target)
5098 return target;
5099 break;
5101 case BUILT_IN_STRNCMP:
5102 target = expand_builtin_strncmp (exp, target, mode);
5103 if (target)
5104 return target;
5105 break;
5107 case BUILT_IN_BCMP:
5108 case BUILT_IN_MEMCMP:
5109 target = expand_builtin_memcmp (exp, arglist, target, mode);
5110 if (target)
5111 return target;
5112 break;
5114 case BUILT_IN_SETJMP:
5115 target = expand_builtin_setjmp (arglist, target);
5116 if (target)
5117 return target;
5118 break;
5120 /* __builtin_longjmp is passed a pointer to an array of five words.
5121 It's similar to the C library longjmp function but works with
5122 __builtin_setjmp above. */
5123 case BUILT_IN_LONGJMP:
5124 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5125 break;
5126 else
5128 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
5129 VOIDmode, 0);
5130 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
5131 NULL_RTX, VOIDmode, 0);
5133 if (value != const1_rtx)
5135 error ("__builtin_longjmp second argument must be 1");
5136 return const0_rtx;
5139 expand_builtin_longjmp (buf_addr, value);
5140 return const0_rtx;
5143 case BUILT_IN_TRAP:
5144 expand_builtin_trap ();
5145 return const0_rtx;
5147 case BUILT_IN_FPUTS:
5148 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
5149 if (target)
5150 return target;
5151 break;
5152 case BUILT_IN_FPUTS_UNLOCKED:
5153 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
5154 if (target)
5155 return target;
5156 break;
5158 case BUILT_IN_SPRINTF:
5159 target = expand_builtin_sprintf (arglist, target, mode);
5160 if (target)
5161 return target;
5162 break;
5164 /* Various hooks for the DWARF 2 __throw routine. */
5165 case BUILT_IN_UNWIND_INIT:
5166 expand_builtin_unwind_init ();
5167 return const0_rtx;
5168 case BUILT_IN_DWARF_CFA:
5169 return virtual_cfa_rtx;
5170 #ifdef DWARF2_UNWIND_INFO
5171 case BUILT_IN_DWARF_SP_COLUMN:
5172 return expand_builtin_dwarf_sp_column ();
5173 case BUILT_IN_INIT_DWARF_REG_SIZES:
5174 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
5175 return const0_rtx;
5176 #endif
5177 case BUILT_IN_FROB_RETURN_ADDR:
5178 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
5179 case BUILT_IN_EXTRACT_RETURN_ADDR:
5180 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
5181 case BUILT_IN_EH_RETURN:
5182 expand_builtin_eh_return (TREE_VALUE (arglist),
5183 TREE_VALUE (TREE_CHAIN (arglist)));
5184 return const0_rtx;
5185 #ifdef EH_RETURN_DATA_REGNO
5186 case BUILT_IN_EH_RETURN_DATA_REGNO:
5187 return expand_builtin_eh_return_data_regno (arglist);
5188 #endif
5189 case BUILT_IN_VA_START:
5190 case BUILT_IN_STDARG_START:
5191 return expand_builtin_va_start (arglist);
5192 case BUILT_IN_VA_END:
5193 return expand_builtin_va_end (arglist);
5194 case BUILT_IN_VA_COPY:
5195 return expand_builtin_va_copy (arglist);
5196 case BUILT_IN_EXPECT:
5197 return expand_builtin_expect (arglist, target);
5198 case BUILT_IN_PREFETCH:
5199 expand_builtin_prefetch (arglist);
5200 return const0_rtx;
5203 default: /* just do library call, if unknown builtin */
5204 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
5205 error ("built-in function `%s' not currently supported",
5206 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5209 /* The switch statement above can drop through to cause the function
5210 to be called normally. */
5211 return expand_call (exp, target, ignore);
5214 /* Determine whether a tree node represents a call to a built-in
5215 math function. If the tree T is a call to a built-in function
5216 taking a single real argument, then the return value is the
5217 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
5218 the return value is END_BUILTINS. */
5220 enum built_in_function
5221 builtin_mathfn_code (tree t)
5223 tree fndecl, arglist;
5225 if (TREE_CODE (t) != CALL_EXPR
5226 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
5227 return END_BUILTINS;
5229 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5230 if (TREE_CODE (fndecl) != FUNCTION_DECL
5231 || ! DECL_BUILT_IN (fndecl)
5232 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5233 return END_BUILTINS;
5235 arglist = TREE_OPERAND (t, 1);
5236 if (! arglist
5237 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5238 return END_BUILTINS;
5240 arglist = TREE_CHAIN (arglist);
5241 switch (DECL_FUNCTION_CODE (fndecl))
5243 case BUILT_IN_POW:
5244 case BUILT_IN_POWF:
5245 case BUILT_IN_POWL:
5246 case BUILT_IN_ATAN2:
5247 case BUILT_IN_ATAN2F:
5248 case BUILT_IN_ATAN2L:
5249 if (! arglist
5250 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
5251 || TREE_CHAIN (arglist))
5252 return END_BUILTINS;
5253 break;
5255 default:
5256 if (arglist)
5257 return END_BUILTINS;
5258 break;
5261 return DECL_FUNCTION_CODE (fndecl);
5264 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
5265 constant. ARGLIST is the argument list of the call. */
5267 static tree
5268 fold_builtin_constant_p (tree arglist)
5270 if (arglist == 0)
5271 return 0;
5273 arglist = TREE_VALUE (arglist);
5275 /* We return 1 for a numeric type that's known to be a constant
5276 value at compile-time or for an aggregate type that's a
5277 literal constant. */
5278 STRIP_NOPS (arglist);
5280 /* If we know this is a constant, emit the constant of one. */
5281 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
5282 || (TREE_CODE (arglist) == CONSTRUCTOR
5283 && TREE_CONSTANT (arglist))
5284 || (TREE_CODE (arglist) == ADDR_EXPR
5285 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
5286 return integer_one_node;
5288 /* If we aren't going to be running CSE or this expression
5289 has side effects, show we don't know it to be a constant.
5290 Likewise if it's a pointer or aggregate type since in those
5291 case we only want literals, since those are only optimized
5292 when generating RTL, not later.
5293 And finally, if we are compiling an initializer, not code, we
5294 need to return a definite result now; there's not going to be any
5295 more optimization done. */
5296 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
5297 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
5298 || POINTER_TYPE_P (TREE_TYPE (arglist))
5299 || cfun == 0)
5300 return integer_zero_node;
5302 return 0;
5305 /* Fold a call to __builtin_classify_type. */
5307 static tree
5308 fold_builtin_classify_type (tree arglist)
5310 if (arglist == 0)
5311 return build_int_2 (no_type_class, 0);
5313 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
5316 /* Fold a call to __builtin_inf or __builtin_huge_val. */
5318 static tree
5319 fold_builtin_inf (tree type, int warn)
5321 REAL_VALUE_TYPE real;
5323 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
5324 warning ("target format does not support infinity");
5326 real_inf (&real);
5327 return build_real (type, real);
5330 /* Fold a call to __builtin_nan or __builtin_nans. */
5332 static tree
5333 fold_builtin_nan (tree arglist, tree type, int quiet)
5335 REAL_VALUE_TYPE real;
5336 const char *str;
5338 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5339 return 0;
5340 str = c_getstr (TREE_VALUE (arglist));
5341 if (!str)
5342 return 0;
5344 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
5345 return 0;
5347 return build_real (type, real);
5350 /* EXP is assumed to me builtin call where truncation can be propagated
5351 across (for instance floor((double)f) == (double)floorf (f).
5352 Do the transformation. */
5353 static tree
5354 fold_trunc_transparent_mathfn (tree exp)
5356 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5357 tree arglist = TREE_OPERAND (exp, 1);
5358 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5360 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5362 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
5363 tree ftype = TREE_TYPE (exp);
5364 tree newtype = TREE_TYPE (arg0);
5365 tree decl;
5367 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
5368 && (decl = mathfn_built_in (newtype, fcode)))
5370 arglist =
5371 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
5372 return convert (ftype,
5373 build_function_call_expr (decl, arglist));
5376 return 0;
5379 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
5380 function's DECL, ARGLIST is the argument list and TYPE is the return
5381 type. Return NULL_TREE if no simplification can be made. */
5383 static tree
5384 fold_builtin_cabs (tree fndecl, tree arglist, tree type)
5386 tree arg;
5388 if (!arglist || TREE_CHAIN (arglist))
5389 return NULL_TREE;
5391 arg = TREE_VALUE (arglist);
5392 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
5393 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
5394 return NULL_TREE;
5396 /* Evaluate cabs of a constant at compile-time. */
5397 if (flag_unsafe_math_optimizations
5398 && TREE_CODE (arg) == COMPLEX_CST
5399 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
5400 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
5401 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
5402 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
5404 REAL_VALUE_TYPE r, i;
5406 r = TREE_REAL_CST (TREE_REALPART (arg));
5407 i = TREE_REAL_CST (TREE_IMAGPART (arg));
5409 real_arithmetic (&r, MULT_EXPR, &r, &r);
5410 real_arithmetic (&i, MULT_EXPR, &i, &i);
5411 real_arithmetic (&r, PLUS_EXPR, &r, &i);
5412 if (real_sqrt (&r, TYPE_MODE (type), &r)
5413 || ! flag_trapping_math)
5414 return build_real (type, r);
5417 /* If either part is zero, cabs is fabs of the other. */
5418 if (TREE_CODE (arg) == COMPLEX_EXPR
5419 && real_zerop (TREE_OPERAND (arg, 0)))
5420 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
5421 if (TREE_CODE (arg) == COMPLEX_EXPR
5422 && real_zerop (TREE_OPERAND (arg, 1)))
5423 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
5425 if (flag_unsafe_math_optimizations)
5427 enum built_in_function fcode;
5428 tree sqrtfn;
5430 fcode = DECL_FUNCTION_CODE (fndecl);
5431 if (fcode == BUILT_IN_CABS)
5432 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5433 else if (fcode == BUILT_IN_CABSF)
5434 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5435 else if (fcode == BUILT_IN_CABSL)
5436 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5437 else
5438 sqrtfn = NULL_TREE;
5440 if (sqrtfn != NULL_TREE)
5442 tree rpart, ipart, result, arglist;
5444 rpart = fold (build1 (REALPART_EXPR, type, arg));
5445 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
5447 rpart = save_expr (rpart);
5448 ipart = save_expr (ipart);
5450 result = fold (build (PLUS_EXPR, type,
5451 fold (build (MULT_EXPR, type,
5452 rpart, rpart)),
5453 fold (build (MULT_EXPR, type,
5454 ipart, ipart))));
5456 arglist = build_tree_list (NULL_TREE, result);
5457 return build_function_call_expr (sqrtfn, arglist);
5461 return NULL_TREE;
5464 /* Used by constant folding to eliminate some builtin calls early. EXP is
5465 the CALL_EXPR of a call to a builtin function. */
5467 tree
5468 fold_builtin (tree exp)
5470 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5471 tree arglist = TREE_OPERAND (exp, 1);
5472 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5474 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5475 return 0;
5477 switch (DECL_FUNCTION_CODE (fndecl))
5479 case BUILT_IN_CONSTANT_P:
5480 return fold_builtin_constant_p (arglist);
5482 case BUILT_IN_CLASSIFY_TYPE:
5483 return fold_builtin_classify_type (arglist);
5485 case BUILT_IN_STRLEN:
5486 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5488 tree len = c_strlen (TREE_VALUE (arglist), 0);
5489 if (len)
5491 /* Convert from the internal "sizetype" type to "size_t". */
5492 if (size_type_node)
5493 len = convert (size_type_node, len);
5494 return len;
5497 break;
5499 case BUILT_IN_FABS:
5500 case BUILT_IN_FABSF:
5501 case BUILT_IN_FABSL:
5502 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5503 return fold (build1 (ABS_EXPR, type, TREE_VALUE (arglist)));
5504 break;
5506 case BUILT_IN_CABS:
5507 case BUILT_IN_CABSF:
5508 case BUILT_IN_CABSL:
5509 return fold_builtin_cabs (fndecl, arglist, type);
5511 case BUILT_IN_SQRT:
5512 case BUILT_IN_SQRTF:
5513 case BUILT_IN_SQRTL:
5514 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5516 enum built_in_function fcode;
5517 tree arg = TREE_VALUE (arglist);
5519 /* Optimize sqrt of constant value. */
5520 if (TREE_CODE (arg) == REAL_CST
5521 && ! TREE_CONSTANT_OVERFLOW (arg))
5523 REAL_VALUE_TYPE r, x;
5525 x = TREE_REAL_CST (arg);
5526 if (real_sqrt (&r, TYPE_MODE (type), &x)
5527 || (!flag_trapping_math && !flag_errno_math))
5528 return build_real (type, r);
5531 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5532 fcode = builtin_mathfn_code (arg);
5533 if (flag_unsafe_math_optimizations
5534 && (fcode == BUILT_IN_EXP
5535 || fcode == BUILT_IN_EXPF
5536 || fcode == BUILT_IN_EXPL))
5538 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5539 arg = fold (build (MULT_EXPR, type,
5540 TREE_VALUE (TREE_OPERAND (arg, 1)),
5541 build_real (type, dconsthalf)));
5542 arglist = build_tree_list (NULL_TREE, arg);
5543 return build_function_call_expr (expfn, arglist);
5546 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5547 if (flag_unsafe_math_optimizations
5548 && (fcode == BUILT_IN_POW
5549 || fcode == BUILT_IN_POWF
5550 || fcode == BUILT_IN_POWL))
5552 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5553 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5554 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5555 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5556 build_real (type, dconsthalf)));
5557 arglist = tree_cons (NULL_TREE, arg0,
5558 build_tree_list (NULL_TREE, narg1));
5559 return build_function_call_expr (powfn, arglist);
5562 break;
5564 case BUILT_IN_SIN:
5565 case BUILT_IN_SINF:
5566 case BUILT_IN_SINL:
5567 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5569 tree arg = TREE_VALUE (arglist);
5571 /* Optimize sin(0.0) = 0.0. */
5572 if (real_zerop (arg))
5573 return arg;
5575 break;
5577 case BUILT_IN_COS:
5578 case BUILT_IN_COSF:
5579 case BUILT_IN_COSL:
5580 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5582 tree arg = TREE_VALUE (arglist);
5584 /* Optimize cos(0.0) = 1.0. */
5585 if (real_zerop (arg))
5586 return build_real (type, dconst1);
5588 /* Optimize cos(-x) into cos(x). */
5589 if (TREE_CODE (arg) == NEGATE_EXPR)
5591 tree arglist = build_tree_list (NULL_TREE,
5592 TREE_OPERAND (arg, 0));
5593 return build_function_call_expr (fndecl, arglist);
5596 break;
5598 case BUILT_IN_EXP:
5599 case BUILT_IN_EXPF:
5600 case BUILT_IN_EXPL:
5601 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5603 enum built_in_function fcode;
5604 tree arg = TREE_VALUE (arglist);
5606 /* Optimize exp(0.0) = 1.0. */
5607 if (real_zerop (arg))
5608 return build_real (type, dconst1);
5610 /* Optimize exp(1.0) = e. */
5611 if (real_onep (arg))
5613 REAL_VALUE_TYPE cst;
5615 if (! builtin_dconsts_init)
5616 init_builtin_dconsts ();
5617 real_convert (&cst, TYPE_MODE (type), &dconste);
5618 return build_real (type, cst);
5621 /* Attempt to evaluate exp at compile-time. */
5622 if (flag_unsafe_math_optimizations
5623 && TREE_CODE (arg) == REAL_CST
5624 && ! TREE_CONSTANT_OVERFLOW (arg))
5626 REAL_VALUE_TYPE cint;
5627 REAL_VALUE_TYPE c;
5628 HOST_WIDE_INT n;
5630 c = TREE_REAL_CST (arg);
5631 n = real_to_integer (&c);
5632 real_from_integer (&cint, VOIDmode, n,
5633 n < 0 ? -1 : 0, 0);
5634 if (real_identical (&c, &cint))
5636 REAL_VALUE_TYPE x;
5638 if (! builtin_dconsts_init)
5639 init_builtin_dconsts ();
5640 real_powi (&x, TYPE_MODE (type), &dconste, n);
5641 return build_real (type, x);
5645 /* Optimize exp(log(x)) = x. */
5646 fcode = builtin_mathfn_code (arg);
5647 if (flag_unsafe_math_optimizations
5648 && (fcode == BUILT_IN_LOG
5649 || fcode == BUILT_IN_LOGF
5650 || fcode == BUILT_IN_LOGL))
5651 return TREE_VALUE (TREE_OPERAND (arg, 1));
5653 break;
5655 case BUILT_IN_LOG:
5656 case BUILT_IN_LOGF:
5657 case BUILT_IN_LOGL:
5658 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5660 enum built_in_function fcode;
5661 tree arg = TREE_VALUE (arglist);
5663 /* Optimize log(1.0) = 0.0. */
5664 if (real_onep (arg))
5665 return build_real (type, dconst0);
5667 /* Optimize log(exp(x)) = x. */
5668 fcode = builtin_mathfn_code (arg);
5669 if (flag_unsafe_math_optimizations
5670 && (fcode == BUILT_IN_EXP
5671 || fcode == BUILT_IN_EXPF
5672 || fcode == BUILT_IN_EXPL))
5673 return TREE_VALUE (TREE_OPERAND (arg, 1));
5675 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5676 if (flag_unsafe_math_optimizations
5677 && (fcode == BUILT_IN_SQRT
5678 || fcode == BUILT_IN_SQRTF
5679 || fcode == BUILT_IN_SQRTL))
5681 tree logfn = build_function_call_expr (fndecl,
5682 TREE_OPERAND (arg, 1));
5683 return fold (build (MULT_EXPR, type, logfn,
5684 build_real (type, dconsthalf)));
5687 /* Optimize log(pow(x,y)) = y*log(x). */
5688 if (flag_unsafe_math_optimizations
5689 && (fcode == BUILT_IN_POW
5690 || fcode == BUILT_IN_POWF
5691 || fcode == BUILT_IN_POWL))
5693 tree arg0, arg1, logfn;
5695 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5696 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5697 arglist = build_tree_list (NULL_TREE, arg0);
5698 logfn = build_function_call_expr (fndecl, arglist);
5699 return fold (build (MULT_EXPR, type, arg1, logfn));
5702 break;
5704 case BUILT_IN_TAN:
5705 case BUILT_IN_TANF:
5706 case BUILT_IN_TANL:
5707 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5709 enum built_in_function fcode;
5710 tree arg = TREE_VALUE (arglist);
5712 /* Optimize tan(0.0) = 0.0. */
5713 if (real_zerop (arg))
5714 return arg;
5716 /* Optimize tan(atan(x)) = x. */
5717 fcode = builtin_mathfn_code (arg);
5718 if (flag_unsafe_math_optimizations
5719 && (fcode == BUILT_IN_ATAN
5720 || fcode == BUILT_IN_ATANF
5721 || fcode == BUILT_IN_ATANL))
5722 return TREE_VALUE (TREE_OPERAND (arg, 1));
5724 break;
5726 case BUILT_IN_ATAN:
5727 case BUILT_IN_ATANF:
5728 case BUILT_IN_ATANL:
5729 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5731 tree arg = TREE_VALUE (arglist);
5733 /* Optimize atan(0.0) = 0.0. */
5734 if (real_zerop (arg))
5735 return arg;
5737 /* Optimize atan(1.0) = pi/4. */
5738 if (real_onep (arg))
5740 REAL_VALUE_TYPE cst;
5742 if (! builtin_dconsts_init)
5743 init_builtin_dconsts ();
5744 real_convert (&cst, TYPE_MODE (type), &dconstpi);
5745 cst.exp -= 2;
5746 return build_real (type, cst);
5749 break;
5751 case BUILT_IN_POW:
5752 case BUILT_IN_POWF:
5753 case BUILT_IN_POWL:
5754 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5756 enum built_in_function fcode;
5757 tree arg0 = TREE_VALUE (arglist);
5758 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5760 /* Optimize pow(1.0,y) = 1.0. */
5761 if (real_onep (arg0))
5762 return omit_one_operand (type, build_real (type, dconst1), arg1);
5764 if (TREE_CODE (arg1) == REAL_CST
5765 && ! TREE_CONSTANT_OVERFLOW (arg1))
5767 REAL_VALUE_TYPE c;
5768 c = TREE_REAL_CST (arg1);
5770 /* Optimize pow(x,0.0) = 1.0. */
5771 if (REAL_VALUES_EQUAL (c, dconst0))
5772 return omit_one_operand (type, build_real (type, dconst1),
5773 arg0);
5775 /* Optimize pow(x,1.0) = x. */
5776 if (REAL_VALUES_EQUAL (c, dconst1))
5777 return arg0;
5779 /* Optimize pow(x,-1.0) = 1.0/x. */
5780 if (REAL_VALUES_EQUAL (c, dconstm1))
5781 return fold (build (RDIV_EXPR, type,
5782 build_real (type, dconst1),
5783 arg0));
5785 /* Optimize pow(x,2.0) = x*x. */
5786 if (REAL_VALUES_EQUAL (c, dconst2)
5787 && (*lang_hooks.decls.global_bindings_p) () == 0
5788 && ! CONTAINS_PLACEHOLDER_P (arg0))
5790 arg0 = save_expr (arg0);
5791 return fold (build (MULT_EXPR, type, arg0, arg0));
5794 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5795 if (flag_unsafe_math_optimizations
5796 && REAL_VALUES_EQUAL (c, dconstm2)
5797 && (*lang_hooks.decls.global_bindings_p) () == 0
5798 && ! CONTAINS_PLACEHOLDER_P (arg0))
5800 arg0 = save_expr (arg0);
5801 return fold (build (RDIV_EXPR, type,
5802 build_real (type, dconst1),
5803 fold (build (MULT_EXPR, type,
5804 arg0, arg0))));
5807 /* Optimize pow(x,0.5) = sqrt(x). */
5808 if (flag_unsafe_math_optimizations
5809 && REAL_VALUES_EQUAL (c, dconsthalf))
5811 tree sqrtfn;
5813 fcode = DECL_FUNCTION_CODE (fndecl);
5814 if (fcode == BUILT_IN_POW)
5815 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5816 else if (fcode == BUILT_IN_POWF)
5817 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5818 else if (fcode == BUILT_IN_POWL)
5819 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5820 else
5821 sqrtfn = NULL_TREE;
5823 if (sqrtfn != NULL_TREE)
5825 tree arglist = build_tree_list (NULL_TREE, arg0);
5826 return build_function_call_expr (sqrtfn, arglist);
5830 /* Attempt to evaluate pow at compile-time. */
5831 if (TREE_CODE (arg0) == REAL_CST
5832 && ! TREE_CONSTANT_OVERFLOW (arg0))
5834 REAL_VALUE_TYPE cint;
5835 HOST_WIDE_INT n;
5837 n = real_to_integer (&c);
5838 real_from_integer (&cint, VOIDmode, n,
5839 n < 0 ? -1 : 0, 0);
5840 if (real_identical (&c, &cint))
5842 REAL_VALUE_TYPE x;
5843 bool inexact;
5845 x = TREE_REAL_CST (arg0);
5846 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
5847 if (flag_unsafe_math_optimizations || !inexact)
5848 return build_real (type, x);
5853 /* Optimize pow(exp(x),y) = exp(x*y). */
5854 fcode = builtin_mathfn_code (arg0);
5855 if (flag_unsafe_math_optimizations
5856 && (fcode == BUILT_IN_EXP
5857 || fcode == BUILT_IN_EXPF
5858 || fcode == BUILT_IN_EXPL))
5860 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5861 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5862 arg = fold (build (MULT_EXPR, type, arg, arg1));
5863 arglist = build_tree_list (NULL_TREE, arg);
5864 return build_function_call_expr (expfn, arglist);
5867 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5868 if (flag_unsafe_math_optimizations
5869 && (fcode == BUILT_IN_SQRT
5870 || fcode == BUILT_IN_SQRTF
5871 || fcode == BUILT_IN_SQRTL))
5873 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5874 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5875 build_real (type, dconsthalf)));
5877 arglist = tree_cons (NULL_TREE, narg0,
5878 build_tree_list (NULL_TREE, narg1));
5879 return build_function_call_expr (fndecl, arglist);
5882 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5883 if (flag_unsafe_math_optimizations
5884 && (fcode == BUILT_IN_POW
5885 || fcode == BUILT_IN_POWF
5886 || fcode == BUILT_IN_POWL))
5888 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5889 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5890 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5891 arglist = tree_cons (NULL_TREE, arg00,
5892 build_tree_list (NULL_TREE, narg1));
5893 return build_function_call_expr (fndecl, arglist);
5896 break;
5898 case BUILT_IN_INF:
5899 case BUILT_IN_INFF:
5900 case BUILT_IN_INFL:
5901 return fold_builtin_inf (type, true);
5903 case BUILT_IN_HUGE_VAL:
5904 case BUILT_IN_HUGE_VALF:
5905 case BUILT_IN_HUGE_VALL:
5906 return fold_builtin_inf (type, false);
5908 case BUILT_IN_NAN:
5909 case BUILT_IN_NANF:
5910 case BUILT_IN_NANL:
5911 return fold_builtin_nan (arglist, type, true);
5913 case BUILT_IN_NANS:
5914 case BUILT_IN_NANSF:
5915 case BUILT_IN_NANSL:
5916 return fold_builtin_nan (arglist, type, false);
5918 case BUILT_IN_FLOOR:
5919 case BUILT_IN_FLOORF:
5920 case BUILT_IN_FLOORL:
5921 case BUILT_IN_CEIL:
5922 case BUILT_IN_CEILF:
5923 case BUILT_IN_CEILL:
5924 case BUILT_IN_TRUNC:
5925 case BUILT_IN_TRUNCF:
5926 case BUILT_IN_TRUNCL:
5927 case BUILT_IN_ROUND:
5928 case BUILT_IN_ROUNDF:
5929 case BUILT_IN_ROUNDL:
5930 case BUILT_IN_NEARBYINT:
5931 case BUILT_IN_NEARBYINTF:
5932 case BUILT_IN_NEARBYINTL:
5933 return fold_trunc_transparent_mathfn (exp);
5935 default:
5936 break;
5939 return 0;
5942 /* Conveniently construct a function call expression. */
5944 tree
5945 build_function_call_expr (tree fn, tree arglist)
5947 tree call_expr;
5949 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5950 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5951 call_expr, arglist);
5952 TREE_SIDE_EFFECTS (call_expr) = 1;
5953 return fold (call_expr);
5956 /* This function validates the types of a function call argument list
5957 represented as a tree chain of parameters against a specified list
5958 of tree_codes. If the last specifier is a 0, that represents an
5959 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5961 static int
5962 validate_arglist (tree arglist, ...)
5964 enum tree_code code;
5965 int res = 0;
5966 va_list ap;
5968 va_start (ap, arglist);
5972 code = va_arg (ap, enum tree_code);
5973 switch (code)
5975 case 0:
5976 /* This signifies an ellipses, any further arguments are all ok. */
5977 res = 1;
5978 goto end;
5979 case VOID_TYPE:
5980 /* This signifies an endlink, if no arguments remain, return
5981 true, otherwise return false. */
5982 res = arglist == 0;
5983 goto end;
5984 default:
5985 /* If no parameters remain or the parameter's code does not
5986 match the specified code, return false. Otherwise continue
5987 checking any remaining arguments. */
5988 if (arglist == 0
5989 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5990 goto end;
5991 break;
5993 arglist = TREE_CHAIN (arglist);
5995 while (1);
5997 /* We need gotos here since we can only have one VA_CLOSE in a
5998 function. */
5999 end: ;
6000 va_end (ap);
6002 return res;
6005 /* Default version of target-specific builtin setup that does nothing. */
6007 void
6008 default_init_builtins (void)
6012 /* Default target-specific builtin expander that does nothing. */
6015 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
6016 rtx target ATTRIBUTE_UNUSED,
6017 rtx subtarget ATTRIBUTE_UNUSED,
6018 enum machine_mode mode ATTRIBUTE_UNUSED,
6019 int ignore ATTRIBUTE_UNUSED)
6021 return NULL_RTX;
6024 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
6026 void
6027 purge_builtin_constant_p (void)
6029 rtx insn, set, arg, new, note;
6031 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6032 if (INSN_P (insn)
6033 && (set = single_set (insn)) != NULL_RTX
6034 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
6035 || (GET_CODE (arg) == SUBREG
6036 && (GET_CODE (arg = SUBREG_REG (arg))
6037 == CONSTANT_P_RTX))))
6039 arg = XEXP (arg, 0);
6040 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
6041 validate_change (insn, &SET_SRC (set), new, 0);
6043 /* Remove the REG_EQUAL note from the insn. */
6044 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
6045 remove_note (insn, note);
6049 /* Returns true is EXP represents data that would potentially reside
6050 in a readonly section. */
6052 static bool
6053 readonly_data_expr (tree exp)
6055 STRIP_NOPS (exp);
6057 if (TREE_CODE (exp) == ADDR_EXPR)
6058 return decl_readonly_section (TREE_OPERAND (exp, 0), 0);
6059 else
6060 return false;