Missed one in last change.
[official-gcc.git] / gcc / builtins.c
blob94712150ad1f1d78754eaca9faaddaa1385e073c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init = 0;
84 static REAL_VALUE_TYPE dconstpi;
85 static REAL_VALUE_TYPE dconste;
87 static int get_pointer_alignment (tree, unsigned int);
88 static tree c_strlen (tree, int);
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static rtx expand_builtin_setjmp (tree, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_constant_p (tree, enum machine_mode);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (tree);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_bcopy (tree);
127 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, int, int);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_inf (tree, int);
150 static tree fold_builtin_nan (tree, tree, int);
151 static int validate_arglist (tree, ...);
152 static bool integer_valued_real_p (tree);
153 static tree fold_trunc_transparent_mathfn (tree);
154 static bool readonly_data_expr (tree);
155 static rtx expand_builtin_fabs (tree, rtx, rtx);
156 static rtx expand_builtin_cabs (tree, rtx);
157 static void init_builtin_dconsts (void);
158 static tree fold_builtin_cabs (tree, tree, tree);
159 static tree fold_builtin_trunc (tree);
160 static tree fold_builtin_floor (tree);
161 static tree fold_builtin_ceil (tree);
163 /* Initialize mathematical constants for constant folding builtins.
164 These constants need to be given to at least 160 bits precision. */
166 static void
167 init_builtin_dconsts (void)
169 real_from_string (&dconstpi,
170 "3.1415926535897932384626433832795028841971693993751058209749445923078");
171 real_from_string (&dconste,
172 "2.7182818284590452353602874713526624977572470936999595749669676277241");
174 builtin_dconsts_init = true;
177 /* Return the alignment in bits of EXP, a pointer valued expression.
178 But don't return more than MAX_ALIGN no matter what.
179 The alignment returned is, by default, the alignment of the thing that
180 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
182 Otherwise, look at the expression to see if we can do better, i.e., if the
183 expression is actually pointing at an object whose alignment is tighter. */
185 static int
186 get_pointer_alignment (tree exp, unsigned int max_align)
188 unsigned int align, inner;
190 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
191 return 0;
193 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
194 align = MIN (align, max_align);
196 while (1)
198 switch (TREE_CODE (exp))
200 case NOP_EXPR:
201 case CONVERT_EXPR:
202 case NON_LVALUE_EXPR:
203 exp = TREE_OPERAND (exp, 0);
204 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
205 return align;
207 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
208 align = MIN (inner, max_align);
209 break;
211 case PLUS_EXPR:
212 /* If sum of pointer + int, restrict our maximum alignment to that
213 imposed by the integer. If not, we can't do any better than
214 ALIGN. */
215 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
216 return align;
218 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
219 & (max_align / BITS_PER_UNIT - 1))
220 != 0)
221 max_align >>= 1;
223 exp = TREE_OPERAND (exp, 0);
224 break;
226 case ADDR_EXPR:
227 /* See what we are pointing at and look at its alignment. */
228 exp = TREE_OPERAND (exp, 0);
229 if (TREE_CODE (exp) == FUNCTION_DECL)
230 align = FUNCTION_BOUNDARY;
231 else if (DECL_P (exp))
232 align = DECL_ALIGN (exp);
233 #ifdef CONSTANT_ALIGNMENT
234 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
235 align = CONSTANT_ALIGNMENT (exp, align);
236 #endif
237 return MIN (align, max_align);
239 default:
240 return align;
245 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
246 way, because it could contain a zero byte in the middle.
247 TREE_STRING_LENGTH is the size of the character array, not the string.
249 ONLY_VALUE should be nonzero if the result is not going to be emitted
250 into the instruction stream and zero if it is going to be expanded.
251 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
252 is returned, otherwise NULL, since
253 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
254 evaluate the side-effects.
256 The value returned is of type `ssizetype'.
258 Unfortunately, string_constant can't access the values of const char
259 arrays with initializers, so neither can we do so here. */
261 static tree
262 c_strlen (tree src, int only_value)
264 tree offset_node;
265 HOST_WIDE_INT offset;
266 int max;
267 const char *ptr;
269 STRIP_NOPS (src);
270 if (TREE_CODE (src) == COND_EXPR
271 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
273 tree len1, len2;
275 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
276 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
277 if (tree_int_cst_equal (len1, len2))
278 return len1;
281 if (TREE_CODE (src) == COMPOUND_EXPR
282 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
283 return c_strlen (TREE_OPERAND (src, 1), only_value);
285 src = string_constant (src, &offset_node);
286 if (src == 0)
287 return 0;
289 max = TREE_STRING_LENGTH (src) - 1;
290 ptr = TREE_STRING_POINTER (src);
292 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
294 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
295 compute the offset to the following null if we don't know where to
296 start searching for it. */
297 int i;
299 for (i = 0; i < max; i++)
300 if (ptr[i] == 0)
301 return 0;
303 /* We don't know the starting offset, but we do know that the string
304 has no internal zero bytes. We can assume that the offset falls
305 within the bounds of the string; otherwise, the programmer deserves
306 what he gets. Subtract the offset from the length of the string,
307 and return that. This would perhaps not be valid if we were dealing
308 with named arrays in addition to literal string constants. */
310 return size_diffop (size_int (max), offset_node);
313 /* We have a known offset into the string. Start searching there for
314 a null character if we can represent it as a single HOST_WIDE_INT. */
315 if (offset_node == 0)
316 offset = 0;
317 else if (! host_integerp (offset_node, 0))
318 offset = -1;
319 else
320 offset = tree_low_cst (offset_node, 0);
322 /* If the offset is known to be out of bounds, warn, and call strlen at
323 runtime. */
324 if (offset < 0 || offset > max)
326 warning ("offset outside bounds of constant string");
327 return 0;
330 /* Use strlen to search for the first zero byte. Since any strings
331 constructed with build_string will have nulls appended, we win even
332 if we get handed something like (char[4])"abcd".
334 Since OFFSET is our starting index into the string, no further
335 calculation is needed. */
336 return ssize_int (strlen (ptr + offset));
339 /* Return a char pointer for a C string if it is a string constant
340 or sum of string constant and integer constant. */
342 static const char *
343 c_getstr (tree src)
345 tree offset_node;
347 src = string_constant (src, &offset_node);
348 if (src == 0)
349 return 0;
351 if (offset_node == 0)
352 return TREE_STRING_POINTER (src);
353 else if (!host_integerp (offset_node, 1)
354 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
355 return 0;
357 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
360 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
361 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
363 static rtx
364 c_readstr (const char *str, enum machine_mode mode)
366 HOST_WIDE_INT c[2];
367 HOST_WIDE_INT ch;
368 unsigned int i, j;
370 if (GET_MODE_CLASS (mode) != MODE_INT)
371 abort ();
372 c[0] = 0;
373 c[1] = 0;
374 ch = 1;
375 for (i = 0; i < GET_MODE_SIZE (mode); i++)
377 j = i;
378 if (WORDS_BIG_ENDIAN)
379 j = GET_MODE_SIZE (mode) - i - 1;
380 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
381 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
382 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
383 j *= BITS_PER_UNIT;
384 if (j > 2 * HOST_BITS_PER_WIDE_INT)
385 abort ();
386 if (ch)
387 ch = (unsigned char) str[i];
388 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
390 return immed_double_const (c[0], c[1], mode);
393 /* Cast a target constant CST to target CHAR and if that value fits into
394 host char type, return zero and put that value into variable pointed by
395 P. */
397 static int
398 target_char_cast (tree cst, char *p)
400 unsigned HOST_WIDE_INT val, hostval;
402 if (!host_integerp (cst, 1)
403 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
404 return 1;
406 val = tree_low_cst (cst, 1);
407 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
408 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
410 hostval = val;
411 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
412 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
414 if (val != hostval)
415 return 1;
417 *p = hostval;
418 return 0;
421 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
422 times to get the address of either a higher stack frame, or a return
423 address located within it (depending on FNDECL_CODE). */
426 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
427 rtx tem)
429 int i;
431 /* Some machines need special handling before we can access
432 arbitrary frames. For example, on the sparc, we must first flush
433 all register windows to the stack. */
434 #ifdef SETUP_FRAME_ADDRESSES
435 if (count > 0)
436 SETUP_FRAME_ADDRESSES ();
437 #endif
439 /* On the sparc, the return address is not in the frame, it is in a
440 register. There is no way to access it off of the current frame
441 pointer, but it can be accessed off the previous frame pointer by
442 reading the value from the register window save area. */
443 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
444 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
445 count--;
446 #endif
448 /* Scan back COUNT frames to the specified frame. */
449 for (i = 0; i < count; i++)
451 /* Assume the dynamic chain pointer is in the word that the
452 frame address points to, unless otherwise specified. */
453 #ifdef DYNAMIC_CHAIN_ADDRESS
454 tem = DYNAMIC_CHAIN_ADDRESS (tem);
455 #endif
456 tem = memory_address (Pmode, tem);
457 tem = gen_rtx_MEM (Pmode, tem);
458 set_mem_alias_set (tem, get_frame_alias_set ());
459 tem = copy_to_reg (tem);
462 /* For __builtin_frame_address, return what we've got. */
463 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
464 return tem;
466 /* For __builtin_return_address, Get the return address from that
467 frame. */
468 #ifdef RETURN_ADDR_RTX
469 tem = RETURN_ADDR_RTX (count, tem);
470 #else
471 tem = memory_address (Pmode,
472 plus_constant (tem, GET_MODE_SIZE (Pmode)));
473 tem = gen_rtx_MEM (Pmode, tem);
474 set_mem_alias_set (tem, get_frame_alias_set ());
475 #endif
476 return tem;
479 /* Alias set used for setjmp buffer. */
480 static HOST_WIDE_INT setjmp_alias_set = -1;
482 /* Construct the leading half of a __builtin_setjmp call. Control will
483 return to RECEIVER_LABEL. This is used directly by sjlj exception
484 handling code. */
486 void
487 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
489 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
490 rtx stack_save;
491 rtx mem;
493 if (setjmp_alias_set == -1)
494 setjmp_alias_set = new_alias_set ();
496 #ifdef POINTERS_EXTEND_UNSIGNED
497 if (GET_MODE (buf_addr) != Pmode)
498 buf_addr = convert_memory_address (Pmode, buf_addr);
499 #endif
501 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
503 emit_queue ();
505 /* We store the frame pointer and the address of receiver_label in
506 the buffer and use the rest of it for the stack save area, which
507 is machine-dependent. */
509 #ifndef BUILTIN_SETJMP_FRAME_VALUE
510 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
511 #endif
513 mem = gen_rtx_MEM (Pmode, buf_addr);
514 set_mem_alias_set (mem, setjmp_alias_set);
515 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
517 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
518 set_mem_alias_set (mem, setjmp_alias_set);
520 emit_move_insn (validize_mem (mem),
521 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
523 stack_save = gen_rtx_MEM (sa_mode,
524 plus_constant (buf_addr,
525 2 * GET_MODE_SIZE (Pmode)));
526 set_mem_alias_set (stack_save, setjmp_alias_set);
527 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
529 /* If there is further processing to do, do it. */
530 #ifdef HAVE_builtin_setjmp_setup
531 if (HAVE_builtin_setjmp_setup)
532 emit_insn (gen_builtin_setjmp_setup (buf_addr));
533 #endif
535 /* Tell optimize_save_area_alloca that extra work is going to
536 need to go on during alloca. */
537 current_function_calls_setjmp = 1;
539 /* Set this so all the registers get saved in our frame; we need to be
540 able to copy the saved values for any registers from frames we unwind. */
541 current_function_has_nonlocal_label = 1;
544 /* Construct the trailing part of a __builtin_setjmp call.
545 This is used directly by sjlj exception handling code. */
547 void
548 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
550 /* Clobber the FP when we get here, so we have to make sure it's
551 marked as used by this function. */
552 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
554 /* Mark the static chain as clobbered here so life information
555 doesn't get messed up for it. */
556 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
558 /* Now put in the code to restore the frame pointer, and argument
559 pointer, if needed. The code below is from expand_end_bindings
560 in stmt.c; see detailed documentation there. */
561 #ifdef HAVE_nonlocal_goto
562 if (! HAVE_nonlocal_goto)
563 #endif
564 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
566 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
567 if (fixed_regs[ARG_POINTER_REGNUM])
569 #ifdef ELIMINABLE_REGS
570 size_t i;
571 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
573 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
574 if (elim_regs[i].from == ARG_POINTER_REGNUM
575 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
576 break;
578 if (i == ARRAY_SIZE (elim_regs))
579 #endif
581 /* Now restore our arg pointer from the address at which it
582 was saved in our stack frame. */
583 emit_move_insn (virtual_incoming_args_rtx,
584 copy_to_reg (get_arg_pointer_save_area (cfun)));
587 #endif
589 #ifdef HAVE_builtin_setjmp_receiver
590 if (HAVE_builtin_setjmp_receiver)
591 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
592 else
593 #endif
594 #ifdef HAVE_nonlocal_goto_receiver
595 if (HAVE_nonlocal_goto_receiver)
596 emit_insn (gen_nonlocal_goto_receiver ());
597 else
598 #endif
599 { /* Nothing */ }
601 /* @@@ This is a kludge. Not all machine descriptions define a blockage
602 insn, but we must not allow the code we just generated to be reordered
603 by scheduling. Specifically, the update of the frame pointer must
604 happen immediately, not later. So emit an ASM_INPUT to act as blockage
605 insn. */
606 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
609 /* __builtin_setjmp is passed a pointer to an array of five words (not
610 all will be used on all machines). It operates similarly to the C
611 library function of the same name, but is more efficient. Much of
612 the code below (and for longjmp) is copied from the handling of
613 non-local gotos.
615 NOTE: This is intended for use by GNAT and the exception handling
616 scheme in the compiler and will only work in the method used by
617 them. */
619 static rtx
620 expand_builtin_setjmp (tree arglist, rtx target)
622 rtx buf_addr, next_lab, cont_lab;
624 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
625 return NULL_RTX;
627 if (target == 0 || GET_CODE (target) != REG
628 || REGNO (target) < FIRST_PSEUDO_REGISTER)
629 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
631 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
633 next_lab = gen_label_rtx ();
634 cont_lab = gen_label_rtx ();
636 expand_builtin_setjmp_setup (buf_addr, next_lab);
638 /* Set TARGET to zero and branch to the continue label. */
639 emit_move_insn (target, const0_rtx);
640 emit_jump_insn (gen_jump (cont_lab));
641 emit_barrier ();
642 emit_label (next_lab);
644 expand_builtin_setjmp_receiver (next_lab);
646 /* Set TARGET to one. */
647 emit_move_insn (target, const1_rtx);
648 emit_label (cont_lab);
650 /* Tell flow about the strange goings on. Putting `next_lab' on
651 `nonlocal_goto_handler_labels' to indicates that function
652 calls may traverse the arc back to this label. */
654 current_function_has_nonlocal_label = 1;
655 nonlocal_goto_handler_labels
656 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
658 return target;
661 /* __builtin_longjmp is passed a pointer to an array of five words (not
662 all will be used on all machines). It operates similarly to the C
663 library function of the same name, but is more efficient. Much of
664 the code below is copied from the handling of non-local gotos.
666 NOTE: This is intended for use by GNAT and the exception handling
667 scheme in the compiler and will only work in the method used by
668 them. */
670 void
671 expand_builtin_longjmp (rtx buf_addr, rtx value)
673 rtx fp, lab, stack, insn, last;
674 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
676 if (setjmp_alias_set == -1)
677 setjmp_alias_set = new_alias_set ();
679 #ifdef POINTERS_EXTEND_UNSIGNED
680 if (GET_MODE (buf_addr) != Pmode)
681 buf_addr = convert_memory_address (Pmode, buf_addr);
682 #endif
684 buf_addr = force_reg (Pmode, buf_addr);
686 /* We used to store value in static_chain_rtx, but that fails if pointers
687 are smaller than integers. We instead require that the user must pass
688 a second argument of 1, because that is what builtin_setjmp will
689 return. This also makes EH slightly more efficient, since we are no
690 longer copying around a value that we don't care about. */
691 if (value != const1_rtx)
692 abort ();
694 current_function_calls_longjmp = 1;
696 last = get_last_insn ();
697 #ifdef HAVE_builtin_longjmp
698 if (HAVE_builtin_longjmp)
699 emit_insn (gen_builtin_longjmp (buf_addr));
700 else
701 #endif
703 fp = gen_rtx_MEM (Pmode, buf_addr);
704 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
705 GET_MODE_SIZE (Pmode)));
707 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
708 2 * GET_MODE_SIZE (Pmode)));
709 set_mem_alias_set (fp, setjmp_alias_set);
710 set_mem_alias_set (lab, setjmp_alias_set);
711 set_mem_alias_set (stack, setjmp_alias_set);
713 /* Pick up FP, label, and SP from the block and jump. This code is
714 from expand_goto in stmt.c; see there for detailed comments. */
715 #if HAVE_nonlocal_goto
716 if (HAVE_nonlocal_goto)
717 /* We have to pass a value to the nonlocal_goto pattern that will
718 get copied into the static_chain pointer, but it does not matter
719 what that value is, because builtin_setjmp does not use it. */
720 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
721 else
722 #endif
724 lab = copy_to_reg (lab);
726 emit_move_insn (hard_frame_pointer_rtx, fp);
727 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
729 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
730 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
731 emit_indirect_jump (lab);
735 /* Search backwards and mark the jump insn as a non-local goto.
736 Note that this precludes the use of __builtin_longjmp to a
737 __builtin_setjmp target in the same function. However, we've
738 already cautioned the user that these functions are for
739 internal exception handling use only. */
740 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
742 if (insn == last)
743 abort ();
744 if (GET_CODE (insn) == JUMP_INSN)
746 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
747 REG_NOTES (insn));
748 break;
750 else if (GET_CODE (insn) == CALL_INSN)
751 break;
755 /* Expand a call to __builtin_prefetch. For a target that does not support
756 data prefetch, evaluate the memory address argument in case it has side
757 effects. */
759 static void
760 expand_builtin_prefetch (tree arglist)
762 tree arg0, arg1, arg2;
763 rtx op0, op1, op2;
765 if (!validate_arglist (arglist, POINTER_TYPE, 0))
766 return;
768 arg0 = TREE_VALUE (arglist);
769 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
770 zero (read) and argument 2 (locality) defaults to 3 (high degree of
771 locality). */
772 if (TREE_CHAIN (arglist))
774 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
775 if (TREE_CHAIN (TREE_CHAIN (arglist)))
776 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
777 else
778 arg2 = build_int_2 (3, 0);
780 else
782 arg1 = integer_zero_node;
783 arg2 = build_int_2 (3, 0);
786 /* Argument 0 is an address. */
787 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
789 /* Argument 1 (read/write flag) must be a compile-time constant int. */
790 if (TREE_CODE (arg1) != INTEGER_CST)
792 error ("second arg to `__builtin_prefetch' must be a constant");
793 arg1 = integer_zero_node;
795 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
796 /* Argument 1 must be either zero or one. */
797 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
799 warning ("invalid second arg to __builtin_prefetch; using zero");
800 op1 = const0_rtx;
803 /* Argument 2 (locality) must be a compile-time constant int. */
804 if (TREE_CODE (arg2) != INTEGER_CST)
806 error ("third arg to `__builtin_prefetch' must be a constant");
807 arg2 = integer_zero_node;
809 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
810 /* Argument 2 must be 0, 1, 2, or 3. */
811 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
813 warning ("invalid third arg to __builtin_prefetch; using zero");
814 op2 = const0_rtx;
817 #ifdef HAVE_prefetch
818 if (HAVE_prefetch)
820 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
821 (op0,
822 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
823 || (GET_MODE(op0) != Pmode))
825 #ifdef POINTERS_EXTEND_UNSIGNED
826 if (GET_MODE(op0) != Pmode)
827 op0 = convert_memory_address (Pmode, op0);
828 #endif
829 op0 = force_reg (Pmode, op0);
831 emit_insn (gen_prefetch (op0, op1, op2));
833 else
834 #endif
835 op0 = protect_from_queue (op0, 0);
836 /* Don't do anything with direct references to volatile memory, but
837 generate code to handle other side effects. */
838 if (GET_CODE (op0) != MEM && side_effects_p (op0))
839 emit_insn (op0);
842 /* Get a MEM rtx for expression EXP which is the address of an operand
843 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
845 static rtx
846 get_memory_rtx (tree exp)
848 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
849 rtx mem;
851 #ifdef POINTERS_EXTEND_UNSIGNED
852 if (GET_MODE (addr) != Pmode)
853 addr = convert_memory_address (Pmode, addr);
854 #endif
856 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
858 /* Get an expression we can use to find the attributes to assign to MEM.
859 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
860 we can. First remove any nops. */
861 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
862 || TREE_CODE (exp) == NON_LVALUE_EXPR)
863 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
864 exp = TREE_OPERAND (exp, 0);
866 if (TREE_CODE (exp) == ADDR_EXPR)
868 exp = TREE_OPERAND (exp, 0);
869 set_mem_attributes (mem, exp, 0);
871 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
873 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
874 /* memcpy, memset and other builtin stringops can alias with anything. */
875 set_mem_alias_set (mem, 0);
878 return mem;
881 /* Built-in functions to perform an untyped call and return. */
883 /* For each register that may be used for calling a function, this
884 gives a mode used to copy the register's value. VOIDmode indicates
885 the register is not used for calling a function. If the machine
886 has register windows, this gives only the outbound registers.
887 INCOMING_REGNO gives the corresponding inbound register. */
888 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
890 /* For each register that may be used for returning values, this gives
891 a mode used to copy the register's value. VOIDmode indicates the
892 register is not used for returning values. If the machine has
893 register windows, this gives only the outbound registers.
894 INCOMING_REGNO gives the corresponding inbound register. */
895 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
897 /* For each register that may be used for calling a function, this
898 gives the offset of that register into the block returned by
899 __builtin_apply_args. 0 indicates that the register is not
900 used for calling a function. */
901 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
903 /* Return the offset of register REGNO into the block returned by
904 __builtin_apply_args. This is not declared static, since it is
905 needed in objc-act.c. */
908 apply_args_register_offset (int regno)
910 apply_args_size ();
912 /* Arguments are always put in outgoing registers (in the argument
913 block) if such make sense. */
914 #ifdef OUTGOING_REGNO
915 regno = OUTGOING_REGNO (regno);
916 #endif
917 return apply_args_reg_offset[regno];
920 /* Return the size required for the block returned by __builtin_apply_args,
921 and initialize apply_args_mode. */
923 static int
924 apply_args_size (void)
926 static int size = -1;
927 int align;
928 unsigned int regno;
929 enum machine_mode mode;
931 /* The values computed by this function never change. */
932 if (size < 0)
934 /* The first value is the incoming arg-pointer. */
935 size = GET_MODE_SIZE (Pmode);
937 /* The second value is the structure value address unless this is
938 passed as an "invisible" first argument. */
939 if (struct_value_rtx)
940 size += GET_MODE_SIZE (Pmode);
942 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
943 if (FUNCTION_ARG_REGNO_P (regno))
945 /* Search for the proper mode for copying this register's
946 value. I'm not sure this is right, but it works so far. */
947 enum machine_mode best_mode = VOIDmode;
949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
950 mode != VOIDmode;
951 mode = GET_MODE_WIDER_MODE (mode))
952 if (HARD_REGNO_MODE_OK (regno, mode)
953 && HARD_REGNO_NREGS (regno, mode) == 1)
954 best_mode = mode;
956 if (best_mode == VOIDmode)
957 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
958 mode != VOIDmode;
959 mode = GET_MODE_WIDER_MODE (mode))
960 if (HARD_REGNO_MODE_OK (regno, mode)
961 && have_insn_for (SET, mode))
962 best_mode = mode;
964 if (best_mode == VOIDmode)
965 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
966 mode != VOIDmode;
967 mode = GET_MODE_WIDER_MODE (mode))
968 if (HARD_REGNO_MODE_OK (regno, mode)
969 && have_insn_for (SET, mode))
970 best_mode = mode;
972 if (best_mode == VOIDmode)
973 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
974 mode != VOIDmode;
975 mode = GET_MODE_WIDER_MODE (mode))
976 if (HARD_REGNO_MODE_OK (regno, mode)
977 && have_insn_for (SET, mode))
978 best_mode = mode;
980 mode = best_mode;
981 if (mode == VOIDmode)
982 abort ();
984 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
985 if (size % align != 0)
986 size = CEIL (size, align) * align;
987 apply_args_reg_offset[regno] = size;
988 size += GET_MODE_SIZE (mode);
989 apply_args_mode[regno] = mode;
991 else
993 apply_args_mode[regno] = VOIDmode;
994 apply_args_reg_offset[regno] = 0;
997 return size;
1000 /* Return the size required for the block returned by __builtin_apply,
1001 and initialize apply_result_mode. */
1003 static int
1004 apply_result_size (void)
1006 static int size = -1;
1007 int align, regno;
1008 enum machine_mode mode;
1010 /* The values computed by this function never change. */
1011 if (size < 0)
1013 size = 0;
1015 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1016 if (FUNCTION_VALUE_REGNO_P (regno))
1018 /* Search for the proper mode for copying this register's
1019 value. I'm not sure this is right, but it works so far. */
1020 enum machine_mode best_mode = VOIDmode;
1022 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1023 mode != TImode;
1024 mode = GET_MODE_WIDER_MODE (mode))
1025 if (HARD_REGNO_MODE_OK (regno, mode))
1026 best_mode = mode;
1028 if (best_mode == VOIDmode)
1029 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1030 mode != VOIDmode;
1031 mode = GET_MODE_WIDER_MODE (mode))
1032 if (HARD_REGNO_MODE_OK (regno, mode)
1033 && have_insn_for (SET, mode))
1034 best_mode = mode;
1036 if (best_mode == VOIDmode)
1037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1038 mode != VOIDmode;
1039 mode = GET_MODE_WIDER_MODE (mode))
1040 if (HARD_REGNO_MODE_OK (regno, mode)
1041 && have_insn_for (SET, mode))
1042 best_mode = mode;
1044 if (best_mode == VOIDmode)
1045 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1046 mode != VOIDmode;
1047 mode = GET_MODE_WIDER_MODE (mode))
1048 if (HARD_REGNO_MODE_OK (regno, mode)
1049 && have_insn_for (SET, mode))
1050 best_mode = mode;
1052 mode = best_mode;
1053 if (mode == VOIDmode)
1054 abort ();
1056 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1057 if (size % align != 0)
1058 size = CEIL (size, align) * align;
1059 size += GET_MODE_SIZE (mode);
1060 apply_result_mode[regno] = mode;
1062 else
1063 apply_result_mode[regno] = VOIDmode;
1065 /* Allow targets that use untyped_call and untyped_return to override
1066 the size so that machine-specific information can be stored here. */
1067 #ifdef APPLY_RESULT_SIZE
1068 size = APPLY_RESULT_SIZE;
1069 #endif
1071 return size;
1074 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1075 /* Create a vector describing the result block RESULT. If SAVEP is true,
1076 the result block is used to save the values; otherwise it is used to
1077 restore the values. */
1079 static rtx
1080 result_vector (int savep, rtx result)
1082 int regno, size, align, nelts;
1083 enum machine_mode mode;
1084 rtx reg, mem;
1085 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1087 size = nelts = 0;
1088 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1089 if ((mode = apply_result_mode[regno]) != VOIDmode)
1091 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1092 if (size % align != 0)
1093 size = CEIL (size, align) * align;
1094 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1095 mem = adjust_address (result, mode, size);
1096 savevec[nelts++] = (savep
1097 ? gen_rtx_SET (VOIDmode, mem, reg)
1098 : gen_rtx_SET (VOIDmode, reg, mem));
1099 size += GET_MODE_SIZE (mode);
1101 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1103 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1105 /* Save the state required to perform an untyped call with the same
1106 arguments as were passed to the current function. */
1108 static rtx
1109 expand_builtin_apply_args_1 (void)
1111 rtx registers;
1112 int size, align, regno;
1113 enum machine_mode mode;
1115 /* Create a block where the arg-pointer, structure value address,
1116 and argument registers can be saved. */
1117 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1119 /* Walk past the arg-pointer and structure value address. */
1120 size = GET_MODE_SIZE (Pmode);
1121 if (struct_value_rtx)
1122 size += GET_MODE_SIZE (Pmode);
1124 /* Save each register used in calling a function to the block. */
1125 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1126 if ((mode = apply_args_mode[regno]) != VOIDmode)
1128 rtx tem;
1130 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1131 if (size % align != 0)
1132 size = CEIL (size, align) * align;
1134 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1136 emit_move_insn (adjust_address (registers, mode, size), tem);
1137 size += GET_MODE_SIZE (mode);
1140 /* Save the arg pointer to the block. */
1141 emit_move_insn (adjust_address (registers, Pmode, 0),
1142 copy_to_reg (virtual_incoming_args_rtx));
1143 size = GET_MODE_SIZE (Pmode);
1145 /* Save the structure value address unless this is passed as an
1146 "invisible" first argument. */
1147 if (struct_value_incoming_rtx)
1149 emit_move_insn (adjust_address (registers, Pmode, size),
1150 copy_to_reg (struct_value_incoming_rtx));
1151 size += GET_MODE_SIZE (Pmode);
1154 /* Return the address of the block. */
1155 return copy_addr_to_reg (XEXP (registers, 0));
1158 /* __builtin_apply_args returns block of memory allocated on
1159 the stack into which is stored the arg pointer, structure
1160 value address, static chain, and all the registers that might
1161 possibly be used in performing a function call. The code is
1162 moved to the start of the function so the incoming values are
1163 saved. */
1165 static rtx
1166 expand_builtin_apply_args (void)
1168 /* Don't do __builtin_apply_args more than once in a function.
1169 Save the result of the first call and reuse it. */
1170 if (apply_args_value != 0)
1171 return apply_args_value;
1173 /* When this function is called, it means that registers must be
1174 saved on entry to this function. So we migrate the
1175 call to the first insn of this function. */
1176 rtx temp;
1177 rtx seq;
1179 start_sequence ();
1180 temp = expand_builtin_apply_args_1 ();
1181 seq = get_insns ();
1182 end_sequence ();
1184 apply_args_value = temp;
1186 /* Put the insns after the NOTE that starts the function.
1187 If this is inside a start_sequence, make the outer-level insn
1188 chain current, so the code is placed at the start of the
1189 function. */
1190 push_topmost_sequence ();
1191 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1192 pop_topmost_sequence ();
1193 return temp;
1197 /* Perform an untyped call and save the state required to perform an
1198 untyped return of whatever value was returned by the given function. */
1200 static rtx
1201 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1203 int size, align, regno;
1204 enum machine_mode mode;
1205 rtx incoming_args, result, reg, dest, src, call_insn;
1206 rtx old_stack_level = 0;
1207 rtx call_fusage = 0;
1209 #ifdef POINTERS_EXTEND_UNSIGNED
1210 if (GET_MODE (arguments) != Pmode)
1211 arguments = convert_memory_address (Pmode, arguments);
1212 #endif
1214 /* Create a block where the return registers can be saved. */
1215 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1217 /* Fetch the arg pointer from the ARGUMENTS block. */
1218 incoming_args = gen_reg_rtx (Pmode);
1219 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1220 #ifndef STACK_GROWS_DOWNWARD
1221 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1222 incoming_args, 0, OPTAB_LIB_WIDEN);
1223 #endif
1225 /* Perform postincrements before actually calling the function. */
1226 emit_queue ();
1228 /* Push a new argument block and copy the arguments. Do not allow
1229 the (potential) memcpy call below to interfere with our stack
1230 manipulations. */
1231 do_pending_stack_adjust ();
1232 NO_DEFER_POP;
1234 /* Save the stack with nonlocal if available. */
1235 #ifdef HAVE_save_stack_nonlocal
1236 if (HAVE_save_stack_nonlocal)
1237 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1238 else
1239 #endif
1240 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1242 /* Push a block of memory onto the stack to store the memory arguments.
1243 Save the address in a register, and copy the memory arguments. ??? I
1244 haven't figured out how the calling convention macros effect this,
1245 but it's likely that the source and/or destination addresses in
1246 the block copy will need updating in machine specific ways. */
1247 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1248 dest = gen_rtx_MEM (BLKmode, dest);
1249 set_mem_align (dest, PARM_BOUNDARY);
1250 src = gen_rtx_MEM (BLKmode, incoming_args);
1251 set_mem_align (src, PARM_BOUNDARY);
1252 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1254 /* Refer to the argument block. */
1255 apply_args_size ();
1256 arguments = gen_rtx_MEM (BLKmode, arguments);
1257 set_mem_align (arguments, PARM_BOUNDARY);
1259 /* Walk past the arg-pointer and structure value address. */
1260 size = GET_MODE_SIZE (Pmode);
1261 if (struct_value_rtx)
1262 size += GET_MODE_SIZE (Pmode);
1264 /* Restore each of the registers previously saved. Make USE insns
1265 for each of these registers for use in making the call. */
1266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1267 if ((mode = apply_args_mode[regno]) != VOIDmode)
1269 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1270 if (size % align != 0)
1271 size = CEIL (size, align) * align;
1272 reg = gen_rtx_REG (mode, regno);
1273 emit_move_insn (reg, adjust_address (arguments, mode, size));
1274 use_reg (&call_fusage, reg);
1275 size += GET_MODE_SIZE (mode);
1278 /* Restore the structure value address unless this is passed as an
1279 "invisible" first argument. */
1280 size = GET_MODE_SIZE (Pmode);
1281 if (struct_value_rtx)
1283 rtx value = gen_reg_rtx (Pmode);
1284 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1285 emit_move_insn (struct_value_rtx, value);
1286 if (GET_CODE (struct_value_rtx) == REG)
1287 use_reg (&call_fusage, struct_value_rtx);
1288 size += GET_MODE_SIZE (Pmode);
1291 /* All arguments and registers used for the call are set up by now! */
1292 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1294 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1295 and we don't want to load it into a register as an optimization,
1296 because prepare_call_address already did it if it should be done. */
1297 if (GET_CODE (function) != SYMBOL_REF)
1298 function = memory_address (FUNCTION_MODE, function);
1300 /* Generate the actual call instruction and save the return value. */
1301 #ifdef HAVE_untyped_call
1302 if (HAVE_untyped_call)
1303 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1304 result, result_vector (1, result)));
1305 else
1306 #endif
1307 #ifdef HAVE_call_value
1308 if (HAVE_call_value)
1310 rtx valreg = 0;
1312 /* Locate the unique return register. It is not possible to
1313 express a call that sets more than one return register using
1314 call_value; use untyped_call for that. In fact, untyped_call
1315 only needs to save the return registers in the given block. */
1316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1317 if ((mode = apply_result_mode[regno]) != VOIDmode)
1319 if (valreg)
1320 abort (); /* HAVE_untyped_call required. */
1321 valreg = gen_rtx_REG (mode, regno);
1324 emit_call_insn (GEN_CALL_VALUE (valreg,
1325 gen_rtx_MEM (FUNCTION_MODE, function),
1326 const0_rtx, NULL_RTX, const0_rtx));
1328 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1330 else
1331 #endif
1332 abort ();
1334 /* Find the CALL insn we just emitted, and attach the register usage
1335 information. */
1336 call_insn = last_call_insn ();
1337 add_function_usage_to (call_insn, call_fusage);
1339 /* Restore the stack. */
1340 #ifdef HAVE_save_stack_nonlocal
1341 if (HAVE_save_stack_nonlocal)
1342 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1343 else
1344 #endif
1345 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1347 OK_DEFER_POP;
1349 /* Return the address of the result block. */
1350 return copy_addr_to_reg (XEXP (result, 0));
1353 /* Perform an untyped return. */
1355 static void
1356 expand_builtin_return (rtx result)
1358 int size, align, regno;
1359 enum machine_mode mode;
1360 rtx reg;
1361 rtx call_fusage = 0;
1363 #ifdef POINTERS_EXTEND_UNSIGNED
1364 if (GET_MODE (result) != Pmode)
1365 result = convert_memory_address (Pmode, result);
1366 #endif
1368 apply_result_size ();
1369 result = gen_rtx_MEM (BLKmode, result);
1371 #ifdef HAVE_untyped_return
1372 if (HAVE_untyped_return)
1374 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1375 emit_barrier ();
1376 return;
1378 #endif
1380 /* Restore the return value and note that each value is used. */
1381 size = 0;
1382 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1383 if ((mode = apply_result_mode[regno]) != VOIDmode)
1385 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1386 if (size % align != 0)
1387 size = CEIL (size, align) * align;
1388 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1389 emit_move_insn (reg, adjust_address (result, mode, size));
1391 push_to_sequence (call_fusage);
1392 emit_insn (gen_rtx_USE (VOIDmode, reg));
1393 call_fusage = get_insns ();
1394 end_sequence ();
1395 size += GET_MODE_SIZE (mode);
1398 /* Put the USE insns before the return. */
1399 emit_insn (call_fusage);
1401 /* Return whatever values was restored by jumping directly to the end
1402 of the function. */
1403 expand_null_return ();
1406 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1408 static enum type_class
1409 type_to_class (tree type)
1411 switch (TREE_CODE (type))
1413 case VOID_TYPE: return void_type_class;
1414 case INTEGER_TYPE: return integer_type_class;
1415 case CHAR_TYPE: return char_type_class;
1416 case ENUMERAL_TYPE: return enumeral_type_class;
1417 case BOOLEAN_TYPE: return boolean_type_class;
1418 case POINTER_TYPE: return pointer_type_class;
1419 case REFERENCE_TYPE: return reference_type_class;
1420 case OFFSET_TYPE: return offset_type_class;
1421 case REAL_TYPE: return real_type_class;
1422 case COMPLEX_TYPE: return complex_type_class;
1423 case FUNCTION_TYPE: return function_type_class;
1424 case METHOD_TYPE: return method_type_class;
1425 case RECORD_TYPE: return record_type_class;
1426 case UNION_TYPE:
1427 case QUAL_UNION_TYPE: return union_type_class;
1428 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1429 ? string_type_class : array_type_class);
1430 case SET_TYPE: return set_type_class;
1431 case FILE_TYPE: return file_type_class;
1432 case LANG_TYPE: return lang_type_class;
1433 default: return no_type_class;
1437 /* Expand a call to __builtin_classify_type with arguments found in
1438 ARGLIST. */
1440 static rtx
1441 expand_builtin_classify_type (tree arglist)
1443 if (arglist != 0)
1444 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1445 return GEN_INT (no_type_class);
1448 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1450 static rtx
1451 expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
1453 rtx tmp;
1455 if (arglist == 0)
1456 return const0_rtx;
1457 arglist = TREE_VALUE (arglist);
1459 /* We have taken care of the easy cases during constant folding. This
1460 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1461 get a chance to see if it can deduce whether ARGLIST is constant. */
1463 current_function_calls_constant_p = 1;
1465 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1466 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1467 return tmp;
1470 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1471 if available. */
1472 tree
1473 mathfn_built_in (tree type, enum built_in_function fn)
1475 enum built_in_function fcode = NOT_BUILT_IN;
1476 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1477 switch (fn)
1479 case BUILT_IN_SQRT:
1480 case BUILT_IN_SQRTF:
1481 case BUILT_IN_SQRTL:
1482 fcode = BUILT_IN_SQRT;
1483 break;
1484 case BUILT_IN_SIN:
1485 case BUILT_IN_SINF:
1486 case BUILT_IN_SINL:
1487 fcode = BUILT_IN_SIN;
1488 break;
1489 case BUILT_IN_COS:
1490 case BUILT_IN_COSF:
1491 case BUILT_IN_COSL:
1492 fcode = BUILT_IN_COS;
1493 break;
1494 case BUILT_IN_EXP:
1495 case BUILT_IN_EXPF:
1496 case BUILT_IN_EXPL:
1497 fcode = BUILT_IN_EXP;
1498 break;
1499 case BUILT_IN_LOG:
1500 case BUILT_IN_LOGF:
1501 case BUILT_IN_LOGL:
1502 fcode = BUILT_IN_LOG;
1503 break;
1504 case BUILT_IN_TAN:
1505 case BUILT_IN_TANF:
1506 case BUILT_IN_TANL:
1507 fcode = BUILT_IN_TAN;
1508 break;
1509 case BUILT_IN_ATAN:
1510 case BUILT_IN_ATANF:
1511 case BUILT_IN_ATANL:
1512 fcode = BUILT_IN_ATAN;
1513 break;
1514 case BUILT_IN_FLOOR:
1515 case BUILT_IN_FLOORF:
1516 case BUILT_IN_FLOORL:
1517 fcode = BUILT_IN_FLOOR;
1518 break;
1519 case BUILT_IN_CEIL:
1520 case BUILT_IN_CEILF:
1521 case BUILT_IN_CEILL:
1522 fcode = BUILT_IN_CEIL;
1523 break;
1524 case BUILT_IN_TRUNC:
1525 case BUILT_IN_TRUNCF:
1526 case BUILT_IN_TRUNCL:
1527 fcode = BUILT_IN_TRUNC;
1528 break;
1529 case BUILT_IN_ROUND:
1530 case BUILT_IN_ROUNDF:
1531 case BUILT_IN_ROUNDL:
1532 fcode = BUILT_IN_ROUND;
1533 break;
1534 case BUILT_IN_NEARBYINT:
1535 case BUILT_IN_NEARBYINTF:
1536 case BUILT_IN_NEARBYINTL:
1537 fcode = BUILT_IN_NEARBYINT;
1538 break;
1539 default:
1540 abort ();
1542 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1543 switch (fn)
1545 case BUILT_IN_SQRT:
1546 case BUILT_IN_SQRTF:
1547 case BUILT_IN_SQRTL:
1548 fcode = BUILT_IN_SQRTF;
1549 break;
1550 case BUILT_IN_SIN:
1551 case BUILT_IN_SINF:
1552 case BUILT_IN_SINL:
1553 fcode = BUILT_IN_SINF;
1554 break;
1555 case BUILT_IN_COS:
1556 case BUILT_IN_COSF:
1557 case BUILT_IN_COSL:
1558 fcode = BUILT_IN_COSF;
1559 break;
1560 case BUILT_IN_EXP:
1561 case BUILT_IN_EXPF:
1562 case BUILT_IN_EXPL:
1563 fcode = BUILT_IN_EXPF;
1564 break;
1565 case BUILT_IN_LOG:
1566 case BUILT_IN_LOGF:
1567 case BUILT_IN_LOGL:
1568 fcode = BUILT_IN_LOGF;
1569 break;
1570 case BUILT_IN_TAN:
1571 case BUILT_IN_TANF:
1572 case BUILT_IN_TANL:
1573 fcode = BUILT_IN_TANF;
1574 break;
1575 case BUILT_IN_ATAN:
1576 case BUILT_IN_ATANF:
1577 case BUILT_IN_ATANL:
1578 fcode = BUILT_IN_ATANF;
1579 break;
1580 case BUILT_IN_FLOOR:
1581 case BUILT_IN_FLOORF:
1582 case BUILT_IN_FLOORL:
1583 fcode = BUILT_IN_FLOORF;
1584 break;
1585 case BUILT_IN_CEIL:
1586 case BUILT_IN_CEILF:
1587 case BUILT_IN_CEILL:
1588 fcode = BUILT_IN_CEILF;
1589 break;
1590 case BUILT_IN_TRUNC:
1591 case BUILT_IN_TRUNCF:
1592 case BUILT_IN_TRUNCL:
1593 fcode = BUILT_IN_TRUNCF;
1594 break;
1595 case BUILT_IN_ROUND:
1596 case BUILT_IN_ROUNDF:
1597 case BUILT_IN_ROUNDL:
1598 fcode = BUILT_IN_ROUNDF;
1599 break;
1600 case BUILT_IN_NEARBYINT:
1601 case BUILT_IN_NEARBYINTF:
1602 case BUILT_IN_NEARBYINTL:
1603 fcode = BUILT_IN_NEARBYINTF;
1604 break;
1605 default:
1606 abort ();
1608 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1609 switch (fn)
1611 case BUILT_IN_SQRT:
1612 case BUILT_IN_SQRTF:
1613 case BUILT_IN_SQRTL:
1614 fcode = BUILT_IN_SQRTL;
1615 break;
1616 case BUILT_IN_SIN:
1617 case BUILT_IN_SINF:
1618 case BUILT_IN_SINL:
1619 fcode = BUILT_IN_SINL;
1620 break;
1621 case BUILT_IN_COS:
1622 case BUILT_IN_COSF:
1623 case BUILT_IN_COSL:
1624 fcode = BUILT_IN_COSL;
1625 break;
1626 case BUILT_IN_EXP:
1627 case BUILT_IN_EXPF:
1628 case BUILT_IN_EXPL:
1629 fcode = BUILT_IN_EXPL;
1630 break;
1631 case BUILT_IN_LOG:
1632 case BUILT_IN_LOGF:
1633 case BUILT_IN_LOGL:
1634 fcode = BUILT_IN_LOGL;
1635 break;
1636 case BUILT_IN_TAN:
1637 case BUILT_IN_TANF:
1638 case BUILT_IN_TANL:
1639 fcode = BUILT_IN_TANL;
1640 break;
1641 case BUILT_IN_ATAN:
1642 case BUILT_IN_ATANF:
1643 case BUILT_IN_ATANL:
1644 fcode = BUILT_IN_ATANL;
1645 break;
1646 case BUILT_IN_FLOOR:
1647 case BUILT_IN_FLOORF:
1648 case BUILT_IN_FLOORL:
1649 fcode = BUILT_IN_FLOORL;
1650 break;
1651 case BUILT_IN_CEIL:
1652 case BUILT_IN_CEILF:
1653 case BUILT_IN_CEILL:
1654 fcode = BUILT_IN_CEILL;
1655 break;
1656 case BUILT_IN_TRUNC:
1657 case BUILT_IN_TRUNCF:
1658 case BUILT_IN_TRUNCL:
1659 fcode = BUILT_IN_TRUNCL;
1660 break;
1661 case BUILT_IN_ROUND:
1662 case BUILT_IN_ROUNDF:
1663 case BUILT_IN_ROUNDL:
1664 fcode = BUILT_IN_ROUNDL;
1665 break;
1666 case BUILT_IN_NEARBYINT:
1667 case BUILT_IN_NEARBYINTF:
1668 case BUILT_IN_NEARBYINTL:
1669 fcode = BUILT_IN_NEARBYINTL;
1670 break;
1671 default:
1672 abort ();
1674 return implicit_built_in_decls[fcode];
1677 /* If errno must be maintained, expand the RTL to check if the result,
1678 TARGET, of a built-in function call, EXP, is NaN, and if so set
1679 errno to EDOM. */
1681 static void
1682 expand_errno_check (tree exp, rtx target)
1684 rtx lab = gen_label_rtx ();
1686 /* Test the result; if it is NaN, set errno=EDOM because
1687 the argument was not in the domain. */
1688 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1689 0, lab);
1691 #ifdef TARGET_EDOM
1692 /* If this built-in doesn't throw an exception, set errno directly. */
1693 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1695 #ifdef GEN_ERRNO_RTX
1696 rtx errno_rtx = GEN_ERRNO_RTX;
1697 #else
1698 rtx errno_rtx
1699 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1700 #endif
1701 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1702 emit_label (lab);
1703 return;
1705 #endif
1707 /* We can't set errno=EDOM directly; let the library call do it.
1708 Pop the arguments right away in case the call gets deleted. */
1709 NO_DEFER_POP;
1710 expand_call (exp, target, 0);
1711 OK_DEFER_POP;
1712 emit_label (lab);
1716 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1717 Return 0 if a normal call should be emitted rather than expanding the
1718 function in-line. EXP is the expression that is a call to the builtin
1719 function; if convenient, the result should be placed in TARGET.
1720 SUBTARGET may be used as the target for computing one of EXP's operands. */
1722 static rtx
1723 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1725 optab builtin_optab;
1726 rtx op0, insns;
1727 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1728 tree arglist = TREE_OPERAND (exp, 1);
1729 enum machine_mode mode;
1730 bool errno_set = false;
1731 tree arg, narg;
1733 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1734 return 0;
1736 arg = TREE_VALUE (arglist);
1738 switch (DECL_FUNCTION_CODE (fndecl))
1740 case BUILT_IN_SIN:
1741 case BUILT_IN_SINF:
1742 case BUILT_IN_SINL:
1743 builtin_optab = sin_optab; break;
1744 case BUILT_IN_COS:
1745 case BUILT_IN_COSF:
1746 case BUILT_IN_COSL:
1747 builtin_optab = cos_optab; break;
1748 case BUILT_IN_SQRT:
1749 case BUILT_IN_SQRTF:
1750 case BUILT_IN_SQRTL:
1751 errno_set = ! tree_expr_nonnegative_p (arg);
1752 builtin_optab = sqrt_optab;
1753 break;
1754 case BUILT_IN_EXP:
1755 case BUILT_IN_EXPF:
1756 case BUILT_IN_EXPL:
1757 errno_set = true; builtin_optab = exp_optab; break;
1758 case BUILT_IN_LOG:
1759 case BUILT_IN_LOGF:
1760 case BUILT_IN_LOGL:
1761 errno_set = true; builtin_optab = log_optab; break;
1762 case BUILT_IN_TAN:
1763 case BUILT_IN_TANF:
1764 case BUILT_IN_TANL:
1765 builtin_optab = tan_optab; break;
1766 case BUILT_IN_ATAN:
1767 case BUILT_IN_ATANF:
1768 case BUILT_IN_ATANL:
1769 builtin_optab = atan_optab; break;
1770 case BUILT_IN_FLOOR:
1771 case BUILT_IN_FLOORF:
1772 case BUILT_IN_FLOORL:
1773 builtin_optab = floor_optab; break;
1774 case BUILT_IN_CEIL:
1775 case BUILT_IN_CEILF:
1776 case BUILT_IN_CEILL:
1777 builtin_optab = ceil_optab; break;
1778 case BUILT_IN_TRUNC:
1779 case BUILT_IN_TRUNCF:
1780 case BUILT_IN_TRUNCL:
1781 builtin_optab = trunc_optab; break;
1782 case BUILT_IN_ROUND:
1783 case BUILT_IN_ROUNDF:
1784 case BUILT_IN_ROUNDL:
1785 builtin_optab = round_optab; break;
1786 case BUILT_IN_NEARBYINT:
1787 case BUILT_IN_NEARBYINTF:
1788 case BUILT_IN_NEARBYINTL:
1789 builtin_optab = nearbyint_optab; break;
1790 default:
1791 abort ();
1794 /* Make a suitable register to place result in. */
1795 mode = TYPE_MODE (TREE_TYPE (exp));
1797 /* Before working hard, check whether the instruction is available. */
1798 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1799 return 0;
1800 target = gen_reg_rtx (mode);
1802 if (! flag_errno_math || ! HONOR_NANS (mode))
1803 errno_set = false;
1805 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1806 need to expand the argument again. This way, we will not perform
1807 side-effects more the once. */
1808 narg = save_expr (arg);
1809 if (narg != arg)
1811 arglist = build_tree_list (NULL_TREE, arg);
1812 exp = build_function_call_expr (fndecl, arglist);
1815 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1817 emit_queue ();
1818 start_sequence ();
1820 /* Compute into TARGET.
1821 Set TARGET to wherever the result comes back. */
1822 target = expand_unop (mode, builtin_optab, op0, target, 0);
1824 /* If we were unable to expand via the builtin, stop the sequence
1825 (without outputting the insns) and call to the library function
1826 with the stabilized argument list. */
1827 if (target == 0)
1829 end_sequence ();
1830 return expand_call (exp, target, target == const0_rtx);
1833 if (errno_set)
1834 expand_errno_check (exp, target);
1836 /* Output the entire sequence. */
1837 insns = get_insns ();
1838 end_sequence ();
1839 emit_insn (insns);
1841 return target;
1844 /* Expand a call to the builtin binary math functions (pow and atan2).
1845 Return 0 if a normal call should be emitted rather than expanding the
1846 function in-line. EXP is the expression that is a call to the builtin
1847 function; if convenient, the result should be placed in TARGET.
1848 SUBTARGET may be used as the target for computing one of EXP's
1849 operands. */
1851 static rtx
1852 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1854 optab builtin_optab;
1855 rtx op0, op1, insns;
1856 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1857 tree arglist = TREE_OPERAND (exp, 1);
1858 tree arg0, arg1, temp, narg;
1859 enum machine_mode mode;
1860 bool errno_set = true;
1861 bool stable = true;
1863 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1864 return 0;
1866 arg0 = TREE_VALUE (arglist);
1867 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1869 switch (DECL_FUNCTION_CODE (fndecl))
1871 case BUILT_IN_POW:
1872 case BUILT_IN_POWF:
1873 case BUILT_IN_POWL:
1874 builtin_optab = pow_optab; break;
1875 case BUILT_IN_ATAN2:
1876 case BUILT_IN_ATAN2F:
1877 case BUILT_IN_ATAN2L:
1878 builtin_optab = atan2_optab; break;
1879 default:
1880 abort ();
1883 /* Make a suitable register to place result in. */
1884 mode = TYPE_MODE (TREE_TYPE (exp));
1886 /* Before working hard, check whether the instruction is available. */
1887 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1888 return 0;
1890 target = gen_reg_rtx (mode);
1892 if (! flag_errno_math || ! HONOR_NANS (mode))
1893 errno_set = false;
1895 /* Alway stabilize the argument list. */
1896 narg = save_expr (arg1);
1897 if (narg != arg1)
1899 temp = build_tree_list (NULL_TREE, narg);
1900 stable = false;
1902 else
1903 temp = TREE_CHAIN (arglist);
1905 narg = save_expr (arg0);
1906 if (narg != arg0)
1908 arglist = tree_cons (NULL_TREE, narg, temp);
1909 stable = false;
1911 else if (! stable)
1912 arglist = tree_cons (NULL_TREE, arg0, temp);
1914 if (! stable)
1915 exp = build_function_call_expr (fndecl, arglist);
1917 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1918 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1920 emit_queue ();
1921 start_sequence ();
1923 /* Compute into TARGET.
1924 Set TARGET to wherever the result comes back. */
1925 target = expand_binop (mode, builtin_optab, op0, op1,
1926 target, 0, OPTAB_DIRECT);
1928 /* If we were unable to expand via the builtin, stop the sequence
1929 (without outputting the insns) and call to the library function
1930 with the stabilized argument list. */
1931 if (target == 0)
1933 end_sequence ();
1934 return expand_call (exp, target, target == const0_rtx);
1937 if (errno_set)
1938 expand_errno_check (exp, target);
1940 /* Output the entire sequence. */
1941 insns = get_insns ();
1942 end_sequence ();
1943 emit_insn (insns);
1945 return target;
1948 /* To evaluate powi(x,n), the floating point value x raised to the
1949 constant integer exponent n, we use a hybrid algorithm that
1950 combines the "window method" with look-up tables. For an
1951 introduction to exponentiation algorithms and "addition chains",
1952 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
1953 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
1954 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
1955 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
1957 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
1958 multiplications to inline before calling the system library's pow
1959 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
1960 so this default never requires calling pow, powf or powl. */
1962 #ifndef POWI_MAX_MULTS
1963 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
1964 #endif
1966 /* The size of the "optimal power tree" lookup table. All
1967 exponents less than this value are simply looked up in the
1968 powi_table below. This threshold is also used to size the
1969 cache of pseudo registers that hold intermediate results. */
1970 #define POWI_TABLE_SIZE 256
1972 /* The size, in bits of the window, used in the "window method"
1973 exponentiation algorithm. This is equivalent to a radix of
1974 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
1975 #define POWI_WINDOW_SIZE 3
1977 /* The following table is an efficient representation of an
1978 "optimal power tree". For each value, i, the corresponding
1979 value, j, in the table states than an optimal evaluation
1980 sequence for calculating pow(x,i) can be found by evaluating
1981 pow(x,j)*pow(x,i-j). An optimal power tree for the first
1982 100 integers is given in Knuth's "Seminumerical algorithms". */
1984 static const unsigned char powi_table[POWI_TABLE_SIZE] =
1986 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
1987 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
1988 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
1989 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
1990 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
1991 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
1992 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
1993 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
1994 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
1995 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
1996 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
1997 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
1998 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
1999 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2000 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2001 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2002 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2003 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2004 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2005 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2006 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2007 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2008 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2009 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2010 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2011 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2012 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2013 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2014 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2015 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2016 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2017 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2021 /* Return the number of multiplications required to calculate
2022 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2023 subroutine of powi_cost. CACHE is an array indicating
2024 which exponents have already been calculated. */
2026 static int
2027 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2029 /* If we've already calculated this exponent, then this evaluation
2030 doesn't require any additional multiplications. */
2031 if (cache[n])
2032 return 0;
2034 cache[n] = true;
2035 return powi_lookup_cost (n - powi_table[n], cache)
2036 + powi_lookup_cost (powi_table[n], cache) + 1;
2039 /* Return the number of multiplications required to calculate
2040 powi(x,n) for an arbitrary x, given the exponent N. This
2041 function needs to be kept in sync with expand_powi below. */
2043 static int
2044 powi_cost (HOST_WIDE_INT n)
2046 bool cache[POWI_TABLE_SIZE];
2047 unsigned HOST_WIDE_INT digit;
2048 unsigned HOST_WIDE_INT val;
2049 int result;
2051 if (n == 0)
2052 return 0;
2054 /* Ignore the reciprocal when calculating the cost. */
2055 val = (n < 0) ? -n : n;
2057 /* Initialize the exponent cache. */
2058 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2059 cache[1] = true;
2061 result = 0;
2063 while (val >= POWI_TABLE_SIZE)
2065 if (val & 1)
2067 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2068 result += powi_lookup_cost (digit, cache)
2069 + POWI_WINDOW_SIZE + 1;
2070 val >>= POWI_WINDOW_SIZE;
2072 else
2074 val >>= 1;
2075 result++;
2079 return result + powi_lookup_cost (val, cache);
2082 /* Recursive subroutine of expand_powi. This function takes the array,
2083 CACHE, of already calculated exponents and an exponent N and returns
2084 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2086 static rtx
2087 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2089 unsigned HOST_WIDE_INT digit;
2090 rtx target, result;
2091 rtx op0, op1;
2093 if (n < POWI_TABLE_SIZE)
2095 if (cache[n])
2096 return cache[n];
2098 target = gen_reg_rtx (mode);
2099 cache[n] = target;
2101 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2102 op1 = expand_powi_1 (mode, powi_table[n], cache);
2104 else if (n & 1)
2106 target = gen_reg_rtx (mode);
2107 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2108 op0 = expand_powi_1 (mode, n - digit, cache);
2109 op1 = expand_powi_1 (mode, digit, cache);
2111 else
2113 target = gen_reg_rtx (mode);
2114 op0 = expand_powi_1 (mode, n >> 1, cache);
2115 op1 = op0;
2118 result = expand_mult (mode, op0, op1, target, 0);
2119 if (result != target)
2120 emit_move_insn (target, result);
2121 return target;
2124 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2125 floating point operand in mode MODE, and N is the exponent. This
2126 function needs to be kept in sync with powi_cost above. */
2128 static rtx
2129 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2131 unsigned HOST_WIDE_INT val;
2132 rtx cache[POWI_TABLE_SIZE];
2133 rtx result;
2135 if (n == 0)
2136 return CONST1_RTX (mode);
2138 val = (n < 0) ? -n : n;
2140 memset (cache, 0, sizeof(cache));
2141 cache[1] = x;
2143 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2145 /* If the original exponent was negative, reciprocate the result. */
2146 if (n < 0)
2147 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2148 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2150 return result;
2153 /* Expand a call to the pow built-in mathematical function. Return 0 if
2154 a normal call should be emitted rather than expanding the function
2155 in-line. EXP is the expression that is a call to the builtin
2156 function; if convenient, the result should be placed in TARGET. */
2158 static rtx
2159 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2161 tree arglist = TREE_OPERAND (exp, 1);
2162 tree arg0, arg1;
2164 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2165 return 0;
2167 arg0 = TREE_VALUE (arglist);
2168 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2170 if (flag_unsafe_math_optimizations
2171 && ! flag_errno_math
2172 && ! optimize_size
2173 && TREE_CODE (arg1) == REAL_CST
2174 && ! TREE_CONSTANT_OVERFLOW (arg1))
2176 REAL_VALUE_TYPE cint;
2177 REAL_VALUE_TYPE c;
2178 HOST_WIDE_INT n;
2180 c = TREE_REAL_CST (arg1);
2181 n = real_to_integer (&c);
2182 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2183 if (real_identical (&c, &cint)
2184 && powi_cost (n) <= POWI_MAX_MULTS)
2186 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2187 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2188 op = force_reg (mode, op);
2189 return expand_powi (op, mode, n);
2192 return expand_builtin_mathfn_2 (exp, target, NULL_RTX);
2195 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2196 if we failed the caller should emit a normal call, otherwise
2197 try to get the result in TARGET, if convenient. */
2199 static rtx
2200 expand_builtin_strlen (tree arglist, rtx target,
2201 enum machine_mode target_mode)
2203 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2204 return 0;
2205 else
2207 rtx pat;
2208 tree len, src = TREE_VALUE (arglist);
2209 rtx result, src_reg, char_rtx, before_strlen;
2210 enum machine_mode insn_mode = target_mode, char_mode;
2211 enum insn_code icode = CODE_FOR_nothing;
2212 int align;
2214 /* If the length can be computed at compile-time, return it. */
2215 len = c_strlen (src, 0);
2216 if (len)
2217 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2219 /* If the length can be computed at compile-time and is constant
2220 integer, but there are side-effects in src, evaluate
2221 src for side-effects, then return len.
2222 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2223 can be optimized into: i++; x = 3; */
2224 len = c_strlen (src, 1);
2225 if (len && TREE_CODE (len) == INTEGER_CST)
2227 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2228 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2231 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2233 /* If SRC is not a pointer type, don't do this operation inline. */
2234 if (align == 0)
2235 return 0;
2237 /* Bail out if we can't compute strlen in the right mode. */
2238 while (insn_mode != VOIDmode)
2240 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2241 if (icode != CODE_FOR_nothing)
2242 break;
2244 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2246 if (insn_mode == VOIDmode)
2247 return 0;
2249 /* Make a place to write the result of the instruction. */
2250 result = target;
2251 if (! (result != 0
2252 && GET_CODE (result) == REG
2253 && GET_MODE (result) == insn_mode
2254 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2255 result = gen_reg_rtx (insn_mode);
2257 /* Make a place to hold the source address. We will not expand
2258 the actual source until we are sure that the expansion will
2259 not fail -- there are trees that cannot be expanded twice. */
2260 src_reg = gen_reg_rtx (Pmode);
2262 /* Mark the beginning of the strlen sequence so we can emit the
2263 source operand later. */
2264 before_strlen = get_last_insn ();
2266 char_rtx = const0_rtx;
2267 char_mode = insn_data[(int) icode].operand[2].mode;
2268 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2269 char_mode))
2270 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2272 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2273 char_rtx, GEN_INT (align));
2274 if (! pat)
2275 return 0;
2276 emit_insn (pat);
2278 /* Now that we are assured of success, expand the source. */
2279 start_sequence ();
2280 pat = memory_address (BLKmode,
2281 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2282 if (pat != src_reg)
2283 emit_move_insn (src_reg, pat);
2284 pat = get_insns ();
2285 end_sequence ();
2287 if (before_strlen)
2288 emit_insn_after (pat, before_strlen);
2289 else
2290 emit_insn_before (pat, get_insns ());
2292 /* Return the value in the proper mode for this function. */
2293 if (GET_MODE (result) == target_mode)
2294 target = result;
2295 else if (target != 0)
2296 convert_move (target, result, 0);
2297 else
2298 target = convert_to_mode (target_mode, result, 0);
2300 return target;
2304 /* Expand a call to the strstr builtin. Return 0 if we failed the
2305 caller should emit a normal call, otherwise try to get the result
2306 in TARGET, if convenient (and in mode MODE if that's convenient). */
2308 static rtx
2309 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2311 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2312 return 0;
2313 else
2315 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2316 tree fn;
2317 const char *p1, *p2;
2319 p2 = c_getstr (s2);
2320 if (p2 == NULL)
2321 return 0;
2323 p1 = c_getstr (s1);
2324 if (p1 != NULL)
2326 const char *r = strstr (p1, p2);
2328 if (r == NULL)
2329 return const0_rtx;
2331 /* Return an offset into the constant string argument. */
2332 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2333 s1, ssize_int (r - p1))),
2334 target, mode, EXPAND_NORMAL);
2337 if (p2[0] == '\0')
2338 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2340 if (p2[1] != '\0')
2341 return 0;
2343 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2344 if (!fn)
2345 return 0;
2347 /* New argument list transforming strstr(s1, s2) to
2348 strchr(s1, s2[0]). */
2349 arglist =
2350 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2351 arglist = tree_cons (NULL_TREE, s1, arglist);
2352 return expand_expr (build_function_call_expr (fn, arglist),
2353 target, mode, EXPAND_NORMAL);
2357 /* Expand a call to the strchr builtin. Return 0 if we failed the
2358 caller should emit a normal call, otherwise try to get the result
2359 in TARGET, if convenient (and in mode MODE if that's convenient). */
2361 static rtx
2362 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2364 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2365 return 0;
2366 else
2368 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2369 const char *p1;
2371 if (TREE_CODE (s2) != INTEGER_CST)
2372 return 0;
2374 p1 = c_getstr (s1);
2375 if (p1 != NULL)
2377 char c;
2378 const char *r;
2380 if (target_char_cast (s2, &c))
2381 return 0;
2383 r = strchr (p1, c);
2385 if (r == NULL)
2386 return const0_rtx;
2388 /* Return an offset into the constant string argument. */
2389 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2390 s1, ssize_int (r - p1))),
2391 target, mode, EXPAND_NORMAL);
2394 /* FIXME: Should use here strchrM optab so that ports can optimize
2395 this. */
2396 return 0;
2400 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2401 caller should emit a normal call, otherwise try to get the result
2402 in TARGET, if convenient (and in mode MODE if that's convenient). */
2404 static rtx
2405 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2407 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2408 return 0;
2409 else
2411 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2412 tree fn;
2413 const char *p1;
2415 if (TREE_CODE (s2) != INTEGER_CST)
2416 return 0;
2418 p1 = c_getstr (s1);
2419 if (p1 != NULL)
2421 char c;
2422 const char *r;
2424 if (target_char_cast (s2, &c))
2425 return 0;
2427 r = strrchr (p1, c);
2429 if (r == NULL)
2430 return const0_rtx;
2432 /* Return an offset into the constant string argument. */
2433 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2434 s1, ssize_int (r - p1))),
2435 target, mode, EXPAND_NORMAL);
2438 if (! integer_zerop (s2))
2439 return 0;
2441 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2442 if (!fn)
2443 return 0;
2445 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2446 return expand_expr (build_function_call_expr (fn, arglist),
2447 target, mode, EXPAND_NORMAL);
2451 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2452 caller should emit a normal call, otherwise try to get the result
2453 in TARGET, if convenient (and in mode MODE if that's convenient). */
2455 static rtx
2456 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2458 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2459 return 0;
2460 else
2462 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2463 tree fn;
2464 const char *p1, *p2;
2466 p2 = c_getstr (s2);
2467 if (p2 == NULL)
2468 return 0;
2470 p1 = c_getstr (s1);
2471 if (p1 != NULL)
2473 const char *r = strpbrk (p1, p2);
2475 if (r == NULL)
2476 return const0_rtx;
2478 /* Return an offset into the constant string argument. */
2479 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2480 s1, ssize_int (r - p1))),
2481 target, mode, EXPAND_NORMAL);
2484 if (p2[0] == '\0')
2486 /* strpbrk(x, "") == NULL.
2487 Evaluate and ignore the arguments in case they had
2488 side-effects. */
2489 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2490 return const0_rtx;
2493 if (p2[1] != '\0')
2494 return 0; /* Really call strpbrk. */
2496 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2497 if (!fn)
2498 return 0;
2500 /* New argument list transforming strpbrk(s1, s2) to
2501 strchr(s1, s2[0]). */
2502 arglist =
2503 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2504 arglist = tree_cons (NULL_TREE, s1, arglist);
2505 return expand_expr (build_function_call_expr (fn, arglist),
2506 target, mode, EXPAND_NORMAL);
2510 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2511 bytes from constant string DATA + OFFSET and return it as target
2512 constant. */
2514 static rtx
2515 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2516 enum machine_mode mode)
2518 const char *str = (const char *) data;
2520 if (offset < 0
2521 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2522 > strlen (str) + 1))
2523 abort (); /* Attempt to read past the end of constant string. */
2525 return c_readstr (str + offset, mode);
2528 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2529 Return 0 if we failed, the caller should emit a normal call,
2530 otherwise try to get the result in TARGET, if convenient (and in
2531 mode MODE if that's convenient). */
2532 static rtx
2533 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2535 if (!validate_arglist (arglist,
2536 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2537 return 0;
2538 else
2540 tree dest = TREE_VALUE (arglist);
2541 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2542 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2543 const char *src_str;
2544 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2545 unsigned int dest_align
2546 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2547 rtx dest_mem, src_mem, dest_addr, len_rtx;
2549 /* If DEST is not a pointer type, call the normal function. */
2550 if (dest_align == 0)
2551 return 0;
2553 /* If the LEN parameter is zero, return DEST. */
2554 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2556 /* Evaluate and ignore SRC in case it has side-effects. */
2557 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2558 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2561 /* If either SRC is not a pointer type, don't do this
2562 operation in-line. */
2563 if (src_align == 0)
2564 return 0;
2566 dest_mem = get_memory_rtx (dest);
2567 set_mem_align (dest_mem, dest_align);
2568 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2569 src_str = c_getstr (src);
2571 /* If SRC is a string constant and block move would be done
2572 by pieces, we can avoid loading the string from memory
2573 and only stored the computed constants. */
2574 if (src_str
2575 && GET_CODE (len_rtx) == CONST_INT
2576 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2577 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2578 (void *) src_str, dest_align))
2580 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2581 builtin_memcpy_read_str,
2582 (void *) src_str, dest_align, 0);
2583 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2584 #ifdef POINTERS_EXTEND_UNSIGNED
2585 if (GET_MODE (dest_mem) != ptr_mode)
2586 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2587 #endif
2588 return dest_mem;
2591 src_mem = get_memory_rtx (src);
2592 set_mem_align (src_mem, src_align);
2594 /* Copy word part most expediently. */
2595 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2596 BLOCK_OP_NORMAL);
2598 if (dest_addr == 0)
2600 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2601 #ifdef POINTERS_EXTEND_UNSIGNED
2602 if (GET_MODE (dest_addr) != ptr_mode)
2603 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2604 #endif
2606 return dest_addr;
2610 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2611 Return 0 if we failed the caller should emit a normal call,
2612 otherwise try to get the result in TARGET, if convenient (and in
2613 mode MODE if that's convenient). If ENDP is 0 return the
2614 destination pointer, if ENDP is 1 return the end pointer ala
2615 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2616 stpcpy. */
2618 static rtx
2619 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2620 int endp)
2622 if (!validate_arglist (arglist,
2623 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2624 return 0;
2625 /* If return value is ignored, transform mempcpy into memcpy. */
2626 else if (target == const0_rtx)
2628 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2630 if (!fn)
2631 return 0;
2633 return expand_expr (build_function_call_expr (fn, arglist),
2634 target, mode, EXPAND_NORMAL);
2636 else
2638 tree dest = TREE_VALUE (arglist);
2639 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2640 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2641 const char *src_str;
2642 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2643 unsigned int dest_align
2644 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2645 rtx dest_mem, src_mem, len_rtx;
2647 /* If DEST is not a pointer type or LEN is not constant,
2648 call the normal function. */
2649 if (dest_align == 0 || !host_integerp (len, 1))
2650 return 0;
2652 /* If the LEN parameter is zero, return DEST. */
2653 if (tree_low_cst (len, 1) == 0)
2655 /* Evaluate and ignore SRC in case it has side-effects. */
2656 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2657 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2660 /* If either SRC is not a pointer type, don't do this
2661 operation in-line. */
2662 if (src_align == 0)
2663 return 0;
2665 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2666 src_str = c_getstr (src);
2668 /* If SRC is a string constant and block move would be done
2669 by pieces, we can avoid loading the string from memory
2670 and only stored the computed constants. */
2671 if (src_str
2672 && GET_CODE (len_rtx) == CONST_INT
2673 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2674 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2675 (void *) src_str, dest_align))
2677 dest_mem = get_memory_rtx (dest);
2678 set_mem_align (dest_mem, dest_align);
2679 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2680 builtin_memcpy_read_str,
2681 (void *) src_str, dest_align, endp);
2682 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2683 #ifdef POINTERS_EXTEND_UNSIGNED
2684 if (GET_MODE (dest_mem) != ptr_mode)
2685 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2686 #endif
2687 return dest_mem;
2690 if (GET_CODE (len_rtx) == CONST_INT
2691 && can_move_by_pieces (INTVAL (len_rtx),
2692 MIN (dest_align, src_align)))
2694 dest_mem = get_memory_rtx (dest);
2695 set_mem_align (dest_mem, dest_align);
2696 src_mem = get_memory_rtx (src);
2697 set_mem_align (src_mem, src_align);
2698 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2699 MIN (dest_align, src_align), endp);
2700 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2701 #ifdef POINTERS_EXTEND_UNSIGNED
2702 if (GET_MODE (dest_mem) != ptr_mode)
2703 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2704 #endif
2705 return dest_mem;
2708 return 0;
2712 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2713 if we failed the caller should emit a normal call. */
2715 static rtx
2716 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2718 if (!validate_arglist (arglist,
2719 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2720 return 0;
2721 else
2723 tree dest = TREE_VALUE (arglist);
2724 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2725 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2727 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2728 unsigned int dest_align
2729 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2731 /* If DEST is not a pointer type, call the normal function. */
2732 if (dest_align == 0)
2733 return 0;
2735 /* If the LEN parameter is zero, return DEST. */
2736 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2738 /* Evaluate and ignore SRC in case it has side-effects. */
2739 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2740 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2743 /* If either SRC is not a pointer type, don't do this
2744 operation in-line. */
2745 if (src_align == 0)
2746 return 0;
2748 /* If src is categorized for a readonly section we can use
2749 normal memcpy. */
2750 if (readonly_data_expr (src))
2752 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2753 if (!fn)
2754 return 0;
2755 return expand_expr (build_function_call_expr (fn, arglist),
2756 target, mode, EXPAND_NORMAL);
2759 /* Otherwise, call the normal function. */
2760 return 0;
2764 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2765 if we failed the caller should emit a normal call. */
2767 static rtx
2768 expand_builtin_bcopy (tree arglist)
2770 tree src, dest, size, newarglist;
2772 if (!validate_arglist (arglist,
2773 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2774 return NULL_RTX;
2776 src = TREE_VALUE (arglist);
2777 dest = TREE_VALUE (TREE_CHAIN (arglist));
2778 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2780 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2781 memmove(ptr y, ptr x, size_t z). This is done this way
2782 so that if it isn't expanded inline, we fallback to
2783 calling bcopy instead of memmove. */
2785 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2786 newarglist = tree_cons (NULL_TREE, src, newarglist);
2787 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2789 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2792 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2793 if we failed the caller should emit a normal call, otherwise try to get
2794 the result in TARGET, if convenient (and in mode MODE if that's
2795 convenient). */
2797 static rtx
2798 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
2800 tree fn, len, src, dst;
2802 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2803 return 0;
2805 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2806 if (!fn)
2807 return 0;
2809 src = TREE_VALUE (TREE_CHAIN (arglist));
2810 len = c_strlen (src, 1);
2811 if (len == 0 || TREE_SIDE_EFFECTS (len))
2812 return 0;
2814 dst = TREE_VALUE (arglist);
2815 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2816 arglist = build_tree_list (NULL_TREE, len);
2817 arglist = tree_cons (NULL_TREE, src, arglist);
2818 arglist = tree_cons (NULL_TREE, dst, arglist);
2819 return expand_expr (build_function_call_expr (fn, arglist),
2820 target, mode, EXPAND_NORMAL);
2823 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2824 Return 0 if we failed the caller should emit a normal call,
2825 otherwise try to get the result in TARGET, if convenient (and in
2826 mode MODE if that's convenient). */
2828 static rtx
2829 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
2831 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2832 return 0;
2833 else
2835 tree dst, src, len;
2837 /* If return value is ignored, transform stpcpy into strcpy. */
2838 if (target == const0_rtx)
2840 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2841 if (!fn)
2842 return 0;
2844 return expand_expr (build_function_call_expr (fn, arglist),
2845 target, mode, EXPAND_NORMAL);
2848 /* Ensure we get an actual string whose length can be evaluated at
2849 compile-time, not an expression containing a string. This is
2850 because the latter will potentially produce pessimized code
2851 when used to produce the return value. */
2852 src = TREE_VALUE (TREE_CHAIN (arglist));
2853 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
2854 return 0;
2856 dst = TREE_VALUE (arglist);
2857 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2858 arglist = build_tree_list (NULL_TREE, len);
2859 arglist = tree_cons (NULL_TREE, src, arglist);
2860 arglist = tree_cons (NULL_TREE, dst, arglist);
2861 return expand_builtin_mempcpy (arglist, target, mode, /*endp=*/2);
2865 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2866 bytes from constant string DATA + OFFSET and return it as target
2867 constant. */
2869 static rtx
2870 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
2871 enum machine_mode mode)
2873 const char *str = (const char *) data;
2875 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2876 return const0_rtx;
2878 return c_readstr (str + offset, mode);
2881 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2882 if we failed the caller should emit a normal call. */
2884 static rtx
2885 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
2887 if (!validate_arglist (arglist,
2888 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2889 return 0;
2890 else
2892 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
2893 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2894 tree fn;
2896 /* We must be passed a constant len parameter. */
2897 if (TREE_CODE (len) != INTEGER_CST)
2898 return 0;
2900 /* If the len parameter is zero, return the dst parameter. */
2901 if (integer_zerop (len))
2903 /* Evaluate and ignore the src argument in case it has
2904 side-effects. */
2905 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2906 VOIDmode, EXPAND_NORMAL);
2907 /* Return the dst parameter. */
2908 return expand_expr (TREE_VALUE (arglist), target, mode,
2909 EXPAND_NORMAL);
2912 /* Now, we must be passed a constant src ptr parameter. */
2913 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2914 return 0;
2916 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2918 /* We're required to pad with trailing zeros if the requested
2919 len is greater than strlen(s2)+1. In that case try to
2920 use store_by_pieces, if it fails, punt. */
2921 if (tree_int_cst_lt (slen, len))
2923 tree dest = TREE_VALUE (arglist);
2924 unsigned int dest_align
2925 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2926 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2927 rtx dest_mem;
2929 if (!p || dest_align == 0 || !host_integerp (len, 1)
2930 || !can_store_by_pieces (tree_low_cst (len, 1),
2931 builtin_strncpy_read_str,
2932 (void *) p, dest_align))
2933 return 0;
2935 dest_mem = get_memory_rtx (dest);
2936 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2937 builtin_strncpy_read_str,
2938 (void *) p, dest_align, 0);
2939 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2940 #ifdef POINTERS_EXTEND_UNSIGNED
2941 if (GET_MODE (dest_mem) != ptr_mode)
2942 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2943 #endif
2944 return dest_mem;
2947 /* OK transform into builtin memcpy. */
2948 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2949 if (!fn)
2950 return 0;
2951 return expand_expr (build_function_call_expr (fn, arglist),
2952 target, mode, EXPAND_NORMAL);
2956 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2957 bytes from constant string DATA + OFFSET and return it as target
2958 constant. */
2960 static rtx
2961 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2962 enum machine_mode mode)
2964 const char *c = (const char *) data;
2965 char *p = alloca (GET_MODE_SIZE (mode));
2967 memset (p, *c, GET_MODE_SIZE (mode));
2969 return c_readstr (p, mode);
2972 /* Callback routine for store_by_pieces. Return the RTL of a register
2973 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2974 char value given in the RTL register data. For example, if mode is
2975 4 bytes wide, return the RTL for 0x01010101*data. */
2977 static rtx
2978 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2979 enum machine_mode mode)
2981 rtx target, coeff;
2982 size_t size;
2983 char *p;
2985 size = GET_MODE_SIZE (mode);
2986 if (size == 1)
2987 return (rtx) data;
2989 p = alloca (size);
2990 memset (p, 1, size);
2991 coeff = c_readstr (p, mode);
2993 target = convert_to_mode (mode, (rtx) data, 1);
2994 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2995 return force_reg (mode, target);
2998 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2999 if we failed the caller should emit a normal call, otherwise try to get
3000 the result in TARGET, if convenient (and in mode MODE if that's
3001 convenient). */
3003 static rtx
3004 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
3006 if (!validate_arglist (arglist,
3007 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3008 return 0;
3009 else
3011 tree dest = TREE_VALUE (arglist);
3012 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3013 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3014 char c;
3016 unsigned int dest_align
3017 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3018 rtx dest_mem, dest_addr, len_rtx;
3020 /* If DEST is not a pointer type, don't do this
3021 operation in-line. */
3022 if (dest_align == 0)
3023 return 0;
3025 /* If the LEN parameter is zero, return DEST. */
3026 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3028 /* Evaluate and ignore VAL in case it has side-effects. */
3029 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3030 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3033 if (TREE_CODE (val) != INTEGER_CST)
3035 rtx val_rtx;
3037 if (!host_integerp (len, 1))
3038 return 0;
3040 if (optimize_size && tree_low_cst (len, 1) > 1)
3041 return 0;
3043 /* Assume that we can memset by pieces if we can store the
3044 * the coefficients by pieces (in the required modes).
3045 * We can't pass builtin_memset_gen_str as that emits RTL. */
3046 c = 1;
3047 if (!can_store_by_pieces (tree_low_cst (len, 1),
3048 builtin_memset_read_str,
3049 &c, dest_align))
3050 return 0;
3052 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3053 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3054 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3055 val_rtx);
3056 dest_mem = get_memory_rtx (dest);
3057 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3058 builtin_memset_gen_str,
3059 val_rtx, dest_align, 0);
3060 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3061 #ifdef POINTERS_EXTEND_UNSIGNED
3062 if (GET_MODE (dest_mem) != ptr_mode)
3063 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3064 #endif
3065 return dest_mem;
3068 if (target_char_cast (val, &c))
3069 return 0;
3071 if (c)
3073 if (!host_integerp (len, 1))
3074 return 0;
3075 if (!can_store_by_pieces (tree_low_cst (len, 1),
3076 builtin_memset_read_str, &c,
3077 dest_align))
3078 return 0;
3080 dest_mem = get_memory_rtx (dest);
3081 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3082 builtin_memset_read_str,
3083 &c, dest_align, 0);
3084 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3085 #ifdef POINTERS_EXTEND_UNSIGNED
3086 if (GET_MODE (dest_mem) != ptr_mode)
3087 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3088 #endif
3089 return dest_mem;
3092 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3094 dest_mem = get_memory_rtx (dest);
3095 set_mem_align (dest_mem, dest_align);
3096 dest_addr = clear_storage (dest_mem, len_rtx);
3098 if (dest_addr == 0)
3100 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3101 #ifdef POINTERS_EXTEND_UNSIGNED
3102 if (GET_MODE (dest_addr) != ptr_mode)
3103 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3104 #endif
3107 return dest_addr;
3111 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3112 if we failed the caller should emit a normal call. */
3114 static rtx
3115 expand_builtin_bzero (tree arglist)
3117 tree dest, size, newarglist;
3119 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3120 return NULL_RTX;
3122 dest = TREE_VALUE (arglist);
3123 size = TREE_VALUE (TREE_CHAIN (arglist));
3125 /* New argument list transforming bzero(ptr x, int y) to
3126 memset(ptr x, int 0, size_t y). This is done this way
3127 so that if it isn't expanded inline, we fallback to
3128 calling bzero instead of memset. */
3130 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
3131 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3132 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3134 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3137 /* Expand expression EXP, which is a call to the memcmp built-in function.
3138 ARGLIST is the argument list for this call. Return 0 if we failed and the
3139 caller should emit a normal call, otherwise try to get the result in
3140 TARGET, if convenient (and in mode MODE, if that's convenient). */
3142 static rtx
3143 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3144 enum machine_mode mode)
3146 tree arg1, arg2, len;
3147 const char *p1, *p2;
3149 if (!validate_arglist (arglist,
3150 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3151 return 0;
3153 arg1 = TREE_VALUE (arglist);
3154 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3155 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3157 /* If the len parameter is zero, return zero. */
3158 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3160 /* Evaluate and ignore arg1 and arg2 in case they have
3161 side-effects. */
3162 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3163 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3164 return const0_rtx;
3167 p1 = c_getstr (arg1);
3168 p2 = c_getstr (arg2);
3170 /* If all arguments are constant, and the value of len is not greater
3171 than the lengths of arg1 and arg2, evaluate at compile-time. */
3172 if (host_integerp (len, 1) && p1 && p2
3173 && compare_tree_int (len, strlen (p1) + 1) <= 0
3174 && compare_tree_int (len, strlen (p2) + 1) <= 0)
3176 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
3178 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3181 /* If len parameter is one, return an expression corresponding to
3182 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3183 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
3185 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3186 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3187 tree ind1 =
3188 fold (build1 (CONVERT_EXPR, integer_type_node,
3189 build1 (INDIRECT_REF, cst_uchar_node,
3190 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3191 tree ind2 =
3192 fold (build1 (CONVERT_EXPR, integer_type_node,
3193 build1 (INDIRECT_REF, cst_uchar_node,
3194 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3195 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3196 return expand_expr (result, target, mode, EXPAND_NORMAL);
3199 #ifdef HAVE_cmpstrsi
3201 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3202 rtx result;
3203 rtx insn;
3205 int arg1_align
3206 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3207 int arg2_align
3208 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3209 enum machine_mode insn_mode
3210 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3212 /* If we don't have POINTER_TYPE, call the function. */
3213 if (arg1_align == 0 || arg2_align == 0)
3214 return 0;
3216 /* Make a place to write the result of the instruction. */
3217 result = target;
3218 if (! (result != 0
3219 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3220 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3221 result = gen_reg_rtx (insn_mode);
3223 arg1_rtx = get_memory_rtx (arg1);
3224 arg2_rtx = get_memory_rtx (arg2);
3225 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3226 if (!HAVE_cmpstrsi)
3227 insn = NULL_RTX;
3228 else
3229 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3230 GEN_INT (MIN (arg1_align, arg2_align)));
3232 if (insn)
3233 emit_insn (insn);
3234 else
3235 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3236 TYPE_MODE (integer_type_node), 3,
3237 XEXP (arg1_rtx, 0), Pmode,
3238 XEXP (arg2_rtx, 0), Pmode,
3239 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3240 TREE_UNSIGNED (sizetype)),
3241 TYPE_MODE (sizetype));
3243 /* Return the value in the proper mode for this function. */
3244 mode = TYPE_MODE (TREE_TYPE (exp));
3245 if (GET_MODE (result) == mode)
3246 return result;
3247 else if (target != 0)
3249 convert_move (target, result, 0);
3250 return target;
3252 else
3253 return convert_to_mode (mode, result, 0);
3255 #endif
3257 return 0;
3260 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3261 if we failed the caller should emit a normal call, otherwise try to get
3262 the result in TARGET, if convenient. */
3264 static rtx
3265 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3267 tree arglist = TREE_OPERAND (exp, 1);
3268 tree arg1, arg2;
3269 const char *p1, *p2;
3271 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3272 return 0;
3274 arg1 = TREE_VALUE (arglist);
3275 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3277 p1 = c_getstr (arg1);
3278 p2 = c_getstr (arg2);
3280 if (p1 && p2)
3282 const int i = strcmp (p1, p2);
3283 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3286 /* If either arg is "", return an expression corresponding to
3287 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3288 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3290 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3291 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3292 tree ind1 =
3293 fold (build1 (CONVERT_EXPR, integer_type_node,
3294 build1 (INDIRECT_REF, cst_uchar_node,
3295 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3296 tree ind2 =
3297 fold (build1 (CONVERT_EXPR, integer_type_node,
3298 build1 (INDIRECT_REF, cst_uchar_node,
3299 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3300 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3304 #ifdef HAVE_cmpstrsi
3305 if (HAVE_cmpstrsi)
3307 tree len, len1, len2;
3308 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3309 rtx result, insn;
3311 int arg1_align
3312 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3313 int arg2_align
3314 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3315 enum machine_mode insn_mode
3316 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3318 len1 = c_strlen (arg1, 1);
3319 len2 = c_strlen (arg2, 1);
3321 if (len1)
3322 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3323 if (len2)
3324 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3326 /* If we don't have a constant length for the first, use the length
3327 of the second, if we know it. We don't require a constant for
3328 this case; some cost analysis could be done if both are available
3329 but neither is constant. For now, assume they're equally cheap,
3330 unless one has side effects. If both strings have constant lengths,
3331 use the smaller. */
3333 if (!len1)
3334 len = len2;
3335 else if (!len2)
3336 len = len1;
3337 else if (TREE_SIDE_EFFECTS (len1))
3338 len = len2;
3339 else if (TREE_SIDE_EFFECTS (len2))
3340 len = len1;
3341 else if (TREE_CODE (len1) != INTEGER_CST)
3342 len = len2;
3343 else if (TREE_CODE (len2) != INTEGER_CST)
3344 len = len1;
3345 else if (tree_int_cst_lt (len1, len2))
3346 len = len1;
3347 else
3348 len = len2;
3350 /* If both arguments have side effects, we cannot optimize. */
3351 if (!len || TREE_SIDE_EFFECTS (len))
3352 return 0;
3354 /* If we don't have POINTER_TYPE, call the function. */
3355 if (arg1_align == 0 || arg2_align == 0)
3356 return 0;
3358 /* Make a place to write the result of the instruction. */
3359 result = target;
3360 if (! (result != 0
3361 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3362 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3363 result = gen_reg_rtx (insn_mode);
3365 arg1_rtx = get_memory_rtx (arg1);
3366 arg2_rtx = get_memory_rtx (arg2);
3367 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3368 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3369 GEN_INT (MIN (arg1_align, arg2_align)));
3370 if (!insn)
3371 return 0;
3373 emit_insn (insn);
3375 /* Return the value in the proper mode for this function. */
3376 mode = TYPE_MODE (TREE_TYPE (exp));
3377 if (GET_MODE (result) == mode)
3378 return result;
3379 if (target == 0)
3380 return convert_to_mode (mode, result, 0);
3381 convert_move (target, result, 0);
3382 return target;
3384 #endif
3385 return 0;
3388 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3389 if we failed the caller should emit a normal call, otherwise try to get
3390 the result in TARGET, if convenient. */
3392 static rtx
3393 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3395 tree arglist = TREE_OPERAND (exp, 1);
3396 tree arg1, arg2, arg3;
3397 const char *p1, *p2;
3399 if (!validate_arglist (arglist,
3400 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3401 return 0;
3403 arg1 = TREE_VALUE (arglist);
3404 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3405 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3407 /* If the len parameter is zero, return zero. */
3408 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3410 /* Evaluate and ignore arg1 and arg2 in case they have
3411 side-effects. */
3412 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3413 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3414 return const0_rtx;
3417 p1 = c_getstr (arg1);
3418 p2 = c_getstr (arg2);
3420 /* If all arguments are constant, evaluate at compile-time. */
3421 if (host_integerp (arg3, 1) && p1 && p2)
3423 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3424 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3427 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3428 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3429 if (host_integerp (arg3, 1)
3430 && (tree_low_cst (arg3, 1) == 1
3431 || (tree_low_cst (arg3, 1) > 1
3432 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3434 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3435 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3436 tree ind1 =
3437 fold (build1 (CONVERT_EXPR, integer_type_node,
3438 build1 (INDIRECT_REF, cst_uchar_node,
3439 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3440 tree ind2 =
3441 fold (build1 (CONVERT_EXPR, integer_type_node,
3442 build1 (INDIRECT_REF, cst_uchar_node,
3443 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3444 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3445 return expand_expr (result, target, mode, EXPAND_NORMAL);
3448 /* If c_strlen can determine an expression for one of the string
3449 lengths, and it doesn't have side effects, then emit cmpstrsi
3450 using length MIN(strlen(string)+1, arg3). */
3451 #ifdef HAVE_cmpstrsi
3452 if (HAVE_cmpstrsi)
3454 tree len, len1, len2;
3455 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3456 rtx result, insn;
3458 int arg1_align
3459 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3460 int arg2_align
3461 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3462 enum machine_mode insn_mode
3463 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3465 len1 = c_strlen (arg1, 1);
3466 len2 = c_strlen (arg2, 1);
3468 if (len1)
3469 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3470 if (len2)
3471 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3473 /* If we don't have a constant length for the first, use the length
3474 of the second, if we know it. We don't require a constant for
3475 this case; some cost analysis could be done if both are available
3476 but neither is constant. For now, assume they're equally cheap,
3477 unless one has side effects. If both strings have constant lengths,
3478 use the smaller. */
3480 if (!len1)
3481 len = len2;
3482 else if (!len2)
3483 len = len1;
3484 else if (TREE_SIDE_EFFECTS (len1))
3485 len = len2;
3486 else if (TREE_SIDE_EFFECTS (len2))
3487 len = len1;
3488 else if (TREE_CODE (len1) != INTEGER_CST)
3489 len = len2;
3490 else if (TREE_CODE (len2) != INTEGER_CST)
3491 len = len1;
3492 else if (tree_int_cst_lt (len1, len2))
3493 len = len1;
3494 else
3495 len = len2;
3497 /* If both arguments have side effects, we cannot optimize. */
3498 if (!len || TREE_SIDE_EFFECTS (len))
3499 return 0;
3501 /* The actual new length parameter is MIN(len,arg3). */
3502 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3504 /* If we don't have POINTER_TYPE, call the function. */
3505 if (arg1_align == 0 || arg2_align == 0)
3506 return 0;
3508 /* Make a place to write the result of the instruction. */
3509 result = target;
3510 if (! (result != 0
3511 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3512 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3513 result = gen_reg_rtx (insn_mode);
3515 arg1_rtx = get_memory_rtx (arg1);
3516 arg2_rtx = get_memory_rtx (arg2);
3517 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3518 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3519 GEN_INT (MIN (arg1_align, arg2_align)));
3520 if (!insn)
3521 return 0;
3523 emit_insn (insn);
3525 /* Return the value in the proper mode for this function. */
3526 mode = TYPE_MODE (TREE_TYPE (exp));
3527 if (GET_MODE (result) == mode)
3528 return result;
3529 if (target == 0)
3530 return convert_to_mode (mode, result, 0);
3531 convert_move (target, result, 0);
3532 return target;
3534 #endif
3535 return 0;
3538 /* Expand expression EXP, which is a call to the strcat builtin.
3539 Return 0 if we failed the caller should emit a normal call,
3540 otherwise try to get the result in TARGET, if convenient. */
3542 static rtx
3543 expand_builtin_strcat (tree arglist, rtx target, enum machine_mode mode)
3545 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3546 return 0;
3547 else
3549 tree dst = TREE_VALUE (arglist),
3550 src = TREE_VALUE (TREE_CHAIN (arglist));
3551 const char *p = c_getstr (src);
3553 /* If the string length is zero, return the dst parameter. */
3554 if (p && *p == '\0')
3555 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3557 return 0;
3561 /* Expand expression EXP, which is a call to the strncat builtin.
3562 Return 0 if we failed the caller should emit a normal call,
3563 otherwise try to get the result in TARGET, if convenient. */
3565 static rtx
3566 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
3568 if (!validate_arglist (arglist,
3569 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3570 return 0;
3571 else
3573 tree dst = TREE_VALUE (arglist),
3574 src = TREE_VALUE (TREE_CHAIN (arglist)),
3575 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3576 const char *p = c_getstr (src);
3578 /* If the requested length is zero, or the src parameter string
3579 length is zero, return the dst parameter. */
3580 if (integer_zerop (len) || (p && *p == '\0'))
3582 /* Evaluate and ignore the src and len parameters in case
3583 they have side-effects. */
3584 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3585 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3586 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3589 /* If the requested len is greater than or equal to the string
3590 length, call strcat. */
3591 if (TREE_CODE (len) == INTEGER_CST && p
3592 && compare_tree_int (len, strlen (p)) >= 0)
3594 tree newarglist
3595 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3596 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3598 /* If the replacement _DECL isn't initialized, don't do the
3599 transformation. */
3600 if (!fn)
3601 return 0;
3603 return expand_expr (build_function_call_expr (fn, newarglist),
3604 target, mode, EXPAND_NORMAL);
3606 return 0;
3610 /* Expand expression EXP, which is a call to the strspn builtin.
3611 Return 0 if we failed the caller should emit a normal call,
3612 otherwise try to get the result in TARGET, if convenient. */
3614 static rtx
3615 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
3617 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3618 return 0;
3619 else
3621 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3622 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3624 /* If both arguments are constants, evaluate at compile-time. */
3625 if (p1 && p2)
3627 const size_t r = strspn (p1, p2);
3628 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3631 /* If either argument is "", return 0. */
3632 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3634 /* Evaluate and ignore both arguments in case either one has
3635 side-effects. */
3636 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3637 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3638 return const0_rtx;
3640 return 0;
3644 /* Expand expression EXP, which is a call to the strcspn builtin.
3645 Return 0 if we failed the caller should emit a normal call,
3646 otherwise try to get the result in TARGET, if convenient. */
3648 static rtx
3649 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
3651 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3652 return 0;
3653 else
3655 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3656 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3658 /* If both arguments are constants, evaluate at compile-time. */
3659 if (p1 && p2)
3661 const size_t r = strcspn (p1, p2);
3662 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3665 /* If the first argument is "", return 0. */
3666 if (p1 && *p1 == '\0')
3668 /* Evaluate and ignore argument s2 in case it has
3669 side-effects. */
3670 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3671 return const0_rtx;
3674 /* If the second argument is "", return __builtin_strlen(s1). */
3675 if (p2 && *p2 == '\0')
3677 tree newarglist = build_tree_list (NULL_TREE, s1),
3678 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3680 /* If the replacement _DECL isn't initialized, don't do the
3681 transformation. */
3682 if (!fn)
3683 return 0;
3685 return expand_expr (build_function_call_expr (fn, newarglist),
3686 target, mode, EXPAND_NORMAL);
3688 return 0;
3692 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3693 if that's convenient. */
3696 expand_builtin_saveregs (void)
3698 rtx val, seq;
3700 /* Don't do __builtin_saveregs more than once in a function.
3701 Save the result of the first call and reuse it. */
3702 if (saveregs_value != 0)
3703 return saveregs_value;
3705 /* When this function is called, it means that registers must be
3706 saved on entry to this function. So we migrate the call to the
3707 first insn of this function. */
3709 start_sequence ();
3711 #ifdef EXPAND_BUILTIN_SAVEREGS
3712 /* Do whatever the machine needs done in this case. */
3713 val = EXPAND_BUILTIN_SAVEREGS ();
3714 #else
3715 /* ??? We used to try and build up a call to the out of line function,
3716 guessing about what registers needed saving etc. This became much
3717 harder with __builtin_va_start, since we don't have a tree for a
3718 call to __builtin_saveregs to fall back on. There was exactly one
3719 port (i860) that used this code, and I'm unconvinced it could actually
3720 handle the general case. So we no longer try to handle anything
3721 weird and make the backend absorb the evil. */
3723 error ("__builtin_saveregs not supported by this target");
3724 val = const0_rtx;
3725 #endif
3727 seq = get_insns ();
3728 end_sequence ();
3730 saveregs_value = val;
3732 /* Put the insns after the NOTE that starts the function. If this
3733 is inside a start_sequence, make the outer-level insn chain current, so
3734 the code is placed at the start of the function. */
3735 push_topmost_sequence ();
3736 emit_insn_after (seq, get_insns ());
3737 pop_topmost_sequence ();
3739 return val;
3742 /* __builtin_args_info (N) returns word N of the arg space info
3743 for the current function. The number and meanings of words
3744 is controlled by the definition of CUMULATIVE_ARGS. */
3746 static rtx
3747 expand_builtin_args_info (tree arglist)
3749 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3750 int *word_ptr = (int *) &current_function_args_info;
3752 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3753 abort ();
3755 if (arglist != 0)
3757 if (!host_integerp (TREE_VALUE (arglist), 0))
3758 error ("argument of `__builtin_args_info' must be constant");
3759 else
3761 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3763 if (wordnum < 0 || wordnum >= nwords)
3764 error ("argument of `__builtin_args_info' out of range");
3765 else
3766 return GEN_INT (word_ptr[wordnum]);
3769 else
3770 error ("missing argument in `__builtin_args_info'");
3772 return const0_rtx;
3775 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3777 static rtx
3778 expand_builtin_next_arg (tree arglist)
3780 tree fntype = TREE_TYPE (current_function_decl);
3782 if (TYPE_ARG_TYPES (fntype) == 0
3783 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3784 == void_type_node))
3786 error ("`va_start' used in function with fixed args");
3787 return const0_rtx;
3790 if (arglist)
3792 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3793 tree arg = TREE_VALUE (arglist);
3795 /* Strip off all nops for the sake of the comparison. This
3796 is not quite the same as STRIP_NOPS. It does more.
3797 We must also strip off INDIRECT_EXPR for C++ reference
3798 parameters. */
3799 while (TREE_CODE (arg) == NOP_EXPR
3800 || TREE_CODE (arg) == CONVERT_EXPR
3801 || TREE_CODE (arg) == NON_LVALUE_EXPR
3802 || TREE_CODE (arg) == INDIRECT_REF)
3803 arg = TREE_OPERAND (arg, 0);
3804 if (arg != last_parm)
3805 warning ("second parameter of `va_start' not last named argument");
3807 else
3808 /* Evidently an out of date version of <stdarg.h>; can't validate
3809 va_start's second argument, but can still work as intended. */
3810 warning ("`__builtin_next_arg' called without an argument");
3812 return expand_binop (Pmode, add_optab,
3813 current_function_internal_arg_pointer,
3814 current_function_arg_offset_rtx,
3815 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3818 /* Make it easier for the backends by protecting the valist argument
3819 from multiple evaluations. */
3821 static tree
3822 stabilize_va_list (tree valist, int needs_lvalue)
3824 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3826 if (TREE_SIDE_EFFECTS (valist))
3827 valist = save_expr (valist);
3829 /* For this case, the backends will be expecting a pointer to
3830 TREE_TYPE (va_list_type_node), but it's possible we've
3831 actually been given an array (an actual va_list_type_node).
3832 So fix it. */
3833 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3835 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3836 tree p2 = build_pointer_type (va_list_type_node);
3838 valist = build1 (ADDR_EXPR, p2, valist);
3839 valist = fold (build1 (NOP_EXPR, p1, valist));
3842 else
3844 tree pt;
3846 if (! needs_lvalue)
3848 if (! TREE_SIDE_EFFECTS (valist))
3849 return valist;
3851 pt = build_pointer_type (va_list_type_node);
3852 valist = fold (build1 (ADDR_EXPR, pt, valist));
3853 TREE_SIDE_EFFECTS (valist) = 1;
3856 if (TREE_SIDE_EFFECTS (valist))
3857 valist = save_expr (valist);
3858 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3859 valist));
3862 return valist;
3865 /* The "standard" implementation of va_start: just assign `nextarg' to
3866 the variable. */
3868 void
3869 std_expand_builtin_va_start (tree valist, rtx nextarg)
3871 tree t;
3873 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3874 make_tree (ptr_type_node, nextarg));
3875 TREE_SIDE_EFFECTS (t) = 1;
3877 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3880 /* Expand ARGLIST, from a call to __builtin_va_start. */
3882 static rtx
3883 expand_builtin_va_start (tree arglist)
3885 rtx nextarg;
3886 tree chain, valist;
3888 chain = TREE_CHAIN (arglist);
3890 if (TREE_CHAIN (chain))
3891 error ("too many arguments to function `va_start'");
3893 nextarg = expand_builtin_next_arg (chain);
3894 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3896 #ifdef EXPAND_BUILTIN_VA_START
3897 EXPAND_BUILTIN_VA_START (valist, nextarg);
3898 #else
3899 std_expand_builtin_va_start (valist, nextarg);
3900 #endif
3902 return const0_rtx;
3905 /* The "standard" implementation of va_arg: read the value from the
3906 current (padded) address and increment by the (padded) size. */
3909 std_expand_builtin_va_arg (tree valist, tree type)
3911 tree addr_tree, t, type_size = NULL;
3912 tree align, alignm1;
3913 tree rounded_size;
3914 rtx addr;
3916 /* Compute the rounded size of the type. */
3917 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3918 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3919 if (type == error_mark_node
3920 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3921 || TREE_OVERFLOW (type_size))
3922 rounded_size = size_zero_node;
3923 else
3924 rounded_size = fold (build (MULT_EXPR, sizetype,
3925 fold (build (TRUNC_DIV_EXPR, sizetype,
3926 fold (build (PLUS_EXPR, sizetype,
3927 type_size, alignm1)),
3928 align)),
3929 align));
3931 /* Get AP. */
3932 addr_tree = valist;
3933 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3935 /* Small args are padded downward. */
3936 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3937 fold (build (COND_EXPR, sizetype,
3938 fold (build (GT_EXPR, sizetype,
3939 rounded_size,
3940 align)),
3941 size_zero_node,
3942 fold (build (MINUS_EXPR, sizetype,
3943 rounded_size,
3944 type_size))))));
3947 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3948 addr = copy_to_reg (addr);
3950 /* Compute new value for AP. */
3951 if (! integer_zerop (rounded_size))
3953 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3954 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3955 rounded_size));
3956 TREE_SIDE_EFFECTS (t) = 1;
3957 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3960 return addr;
3963 /* Expand __builtin_va_arg, which is not really a builtin function, but
3964 a very special sort of operator. */
3967 expand_builtin_va_arg (tree valist, tree type)
3969 rtx addr, result;
3970 tree promoted_type, want_va_type, have_va_type;
3972 /* Verify that valist is of the proper type. */
3974 want_va_type = va_list_type_node;
3975 have_va_type = TREE_TYPE (valist);
3976 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3978 /* If va_list is an array type, the argument may have decayed
3979 to a pointer type, e.g. by being passed to another function.
3980 In that case, unwrap both types so that we can compare the
3981 underlying records. */
3982 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3983 || TREE_CODE (have_va_type) == POINTER_TYPE)
3985 want_va_type = TREE_TYPE (want_va_type);
3986 have_va_type = TREE_TYPE (have_va_type);
3989 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3991 error ("first argument to `va_arg' not of type `va_list'");
3992 addr = const0_rtx;
3995 /* Generate a diagnostic for requesting data of a type that cannot
3996 be passed through `...' due to type promotion at the call site. */
3997 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3998 != type)
4000 const char *name = "<anonymous type>", *pname = 0;
4001 static bool gave_help;
4003 if (TYPE_NAME (type))
4005 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
4006 name = IDENTIFIER_POINTER (TYPE_NAME (type));
4007 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
4008 && DECL_NAME (TYPE_NAME (type)))
4009 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
4011 if (TYPE_NAME (promoted_type))
4013 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
4014 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
4015 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
4016 && DECL_NAME (TYPE_NAME (promoted_type)))
4017 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
4020 /* Unfortunately, this is merely undefined, rather than a constraint
4021 violation, so we cannot make this an error. If this call is never
4022 executed, the program is still strictly conforming. */
4023 warning ("`%s' is promoted to `%s' when passed through `...'",
4024 name, pname);
4025 if (! gave_help)
4027 gave_help = true;
4028 warning ("(so you should pass `%s' not `%s' to `va_arg')",
4029 pname, name);
4032 /* We can, however, treat "undefined" any way we please.
4033 Call abort to encourage the user to fix the program. */
4034 expand_builtin_trap ();
4036 /* This is dead code, but go ahead and finish so that the
4037 mode of the result comes out right. */
4038 addr = const0_rtx;
4040 else
4042 /* Make it easier for the backends by protecting the valist argument
4043 from multiple evaluations. */
4044 valist = stabilize_va_list (valist, 0);
4046 #ifdef EXPAND_BUILTIN_VA_ARG
4047 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
4048 #else
4049 addr = std_expand_builtin_va_arg (valist, type);
4050 #endif
4053 #ifdef POINTERS_EXTEND_UNSIGNED
4054 if (GET_MODE (addr) != Pmode)
4055 addr = convert_memory_address (Pmode, addr);
4056 #endif
4058 result = gen_rtx_MEM (TYPE_MODE (type), addr);
4059 set_mem_alias_set (result, get_varargs_alias_set ());
4061 return result;
4064 /* Expand ARGLIST, from a call to __builtin_va_end. */
4066 static rtx
4067 expand_builtin_va_end (tree arglist)
4069 tree valist = TREE_VALUE (arglist);
4071 #ifdef EXPAND_BUILTIN_VA_END
4072 valist = stabilize_va_list (valist, 0);
4073 EXPAND_BUILTIN_VA_END (arglist);
4074 #else
4075 /* Evaluate for side effects, if needed. I hate macros that don't
4076 do that. */
4077 if (TREE_SIDE_EFFECTS (valist))
4078 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4079 #endif
4081 return const0_rtx;
4084 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4085 builtin rather than just as an assignment in stdarg.h because of the
4086 nastiness of array-type va_list types. */
4088 static rtx
4089 expand_builtin_va_copy (tree arglist)
4091 tree dst, src, t;
4093 dst = TREE_VALUE (arglist);
4094 src = TREE_VALUE (TREE_CHAIN (arglist));
4096 dst = stabilize_va_list (dst, 1);
4097 src = stabilize_va_list (src, 0);
4099 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4101 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
4102 TREE_SIDE_EFFECTS (t) = 1;
4103 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4105 else
4107 rtx dstb, srcb, size;
4109 /* Evaluate to pointers. */
4110 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4111 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4112 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4113 VOIDmode, EXPAND_NORMAL);
4115 #ifdef POINTERS_EXTEND_UNSIGNED
4116 if (GET_MODE (dstb) != Pmode)
4117 dstb = convert_memory_address (Pmode, dstb);
4119 if (GET_MODE (srcb) != Pmode)
4120 srcb = convert_memory_address (Pmode, srcb);
4121 #endif
4123 /* "Dereference" to BLKmode memories. */
4124 dstb = gen_rtx_MEM (BLKmode, dstb);
4125 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4126 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4127 srcb = gen_rtx_MEM (BLKmode, srcb);
4128 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4129 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4131 /* Copy. */
4132 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4135 return const0_rtx;
4138 /* Expand a call to one of the builtin functions __builtin_frame_address or
4139 __builtin_return_address. */
4141 static rtx
4142 expand_builtin_frame_address (tree fndecl, tree arglist)
4144 /* The argument must be a nonnegative integer constant.
4145 It counts the number of frames to scan up the stack.
4146 The value is the return address saved in that frame. */
4147 if (arglist == 0)
4148 /* Warning about missing arg was already issued. */
4149 return const0_rtx;
4150 else if (! host_integerp (TREE_VALUE (arglist), 1))
4152 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4153 error ("invalid arg to `__builtin_frame_address'");
4154 else
4155 error ("invalid arg to `__builtin_return_address'");
4156 return const0_rtx;
4158 else
4160 rtx tem
4161 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4162 tree_low_cst (TREE_VALUE (arglist), 1),
4163 hard_frame_pointer_rtx);
4165 /* Some ports cannot access arbitrary stack frames. */
4166 if (tem == NULL)
4168 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4169 warning ("unsupported arg to `__builtin_frame_address'");
4170 else
4171 warning ("unsupported arg to `__builtin_return_address'");
4172 return const0_rtx;
4175 /* For __builtin_frame_address, return what we've got. */
4176 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4177 return tem;
4179 if (GET_CODE (tem) != REG
4180 && ! CONSTANT_P (tem))
4181 tem = copy_to_mode_reg (Pmode, tem);
4182 return tem;
4186 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4187 we failed and the caller should emit a normal call, otherwise try to get
4188 the result in TARGET, if convenient. */
4190 static rtx
4191 expand_builtin_alloca (tree arglist, rtx target)
4193 rtx op0;
4194 rtx result;
4196 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4197 return 0;
4199 /* Compute the argument. */
4200 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4202 /* Allocate the desired space. */
4203 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4205 #ifdef POINTERS_EXTEND_UNSIGNED
4206 if (GET_MODE (result) != ptr_mode)
4207 result = convert_memory_address (ptr_mode, result);
4208 #endif
4210 return result;
4213 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4214 Return 0 if a normal call should be emitted rather than expanding the
4215 function in-line. If convenient, the result should be placed in TARGET.
4216 SUBTARGET may be used as the target for computing one of EXP's operands. */
4218 static rtx
4219 expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
4220 rtx subtarget, optab op_optab)
4222 rtx op0;
4223 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4224 return 0;
4226 /* Compute the argument. */
4227 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4228 /* Compute op, into TARGET if possible.
4229 Set TARGET to wherever the result comes back. */
4230 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4231 op_optab, op0, target, 1);
4232 if (target == 0)
4233 abort ();
4235 return convert_to_mode (target_mode, target, 0);
4238 /* If the string passed to fputs is a constant and is one character
4239 long, we attempt to transform this call into __builtin_fputc(). */
4241 static rtx
4242 expand_builtin_fputs (tree arglist, int ignore, int unlocked)
4244 tree len, fn;
4245 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
4246 : implicit_built_in_decls[BUILT_IN_FPUTC];
4247 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
4248 : implicit_built_in_decls[BUILT_IN_FWRITE];
4250 /* If the return value is used, or the replacement _DECL isn't
4251 initialized, don't do the transformation. */
4252 if (!ignore || !fn_fputc || !fn_fwrite)
4253 return 0;
4255 /* Verify the arguments in the original call. */
4256 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4257 return 0;
4259 /* Get the length of the string passed to fputs. If the length
4260 can't be determined, punt. */
4261 if (!(len = c_strlen (TREE_VALUE (arglist), 1))
4262 || TREE_CODE (len) != INTEGER_CST)
4263 return 0;
4265 switch (compare_tree_int (len, 1))
4267 case -1: /* length is 0, delete the call entirely . */
4269 /* Evaluate and ignore the argument in case it has
4270 side-effects. */
4271 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
4272 VOIDmode, EXPAND_NORMAL);
4273 return const0_rtx;
4275 case 0: /* length is 1, call fputc. */
4277 const char *p = c_getstr (TREE_VALUE (arglist));
4279 if (p != NULL)
4281 /* New argument list transforming fputs(string, stream) to
4282 fputc(string[0], stream). */
4283 arglist =
4284 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4285 arglist =
4286 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
4287 fn = fn_fputc;
4288 break;
4291 /* FALLTHROUGH */
4292 case 1: /* length is greater than 1, call fwrite. */
4294 tree string_arg;
4296 /* If optimizing for size keep fputs. */
4297 if (optimize_size)
4298 return 0;
4299 string_arg = TREE_VALUE (arglist);
4300 /* New argument list transforming fputs(string, stream) to
4301 fwrite(string, 1, len, stream). */
4302 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4303 arglist = tree_cons (NULL_TREE, len, arglist);
4304 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4305 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4306 fn = fn_fwrite;
4307 break;
4309 default:
4310 abort ();
4313 return expand_expr (build_function_call_expr (fn, arglist),
4314 (ignore ? const0_rtx : NULL_RTX),
4315 VOIDmode, EXPAND_NORMAL);
4318 /* Expand a call to __builtin_expect. We return our argument and emit a
4319 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4320 a non-jump context. */
4322 static rtx
4323 expand_builtin_expect (tree arglist, rtx target)
4325 tree exp, c;
4326 rtx note, rtx_c;
4328 if (arglist == NULL_TREE
4329 || TREE_CHAIN (arglist) == NULL_TREE)
4330 return const0_rtx;
4331 exp = TREE_VALUE (arglist);
4332 c = TREE_VALUE (TREE_CHAIN (arglist));
4334 if (TREE_CODE (c) != INTEGER_CST)
4336 error ("second arg to `__builtin_expect' must be a constant");
4337 c = integer_zero_node;
4340 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4342 /* Don't bother with expected value notes for integral constants. */
4343 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4345 /* We do need to force this into a register so that we can be
4346 moderately sure to be able to correctly interpret the branch
4347 condition later. */
4348 target = force_reg (GET_MODE (target), target);
4350 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4352 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
4353 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4356 return target;
4359 /* Like expand_builtin_expect, except do this in a jump context. This is
4360 called from do_jump if the conditional is a __builtin_expect. Return either
4361 a list of insns to emit the jump or NULL if we cannot optimize
4362 __builtin_expect. We need to optimize this at jump time so that machines
4363 like the PowerPC don't turn the test into a SCC operation, and then jump
4364 based on the test being 0/1. */
4367 expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4369 tree arglist = TREE_OPERAND (exp, 1);
4370 tree arg0 = TREE_VALUE (arglist);
4371 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4372 rtx ret = NULL_RTX;
4374 /* Only handle __builtin_expect (test, 0) and
4375 __builtin_expect (test, 1). */
4376 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4377 && (integer_zerop (arg1) || integer_onep (arg1)))
4379 int num_jumps = 0;
4380 rtx insn;
4382 /* If we fail to locate an appropriate conditional jump, we'll
4383 fall back to normal evaluation. Ensure that the expression
4384 can be re-evaluated. */
4385 switch (unsafe_for_reeval (arg0))
4387 case 0: /* Safe. */
4388 break;
4390 case 1: /* Mildly unsafe. */
4391 arg0 = unsave_expr (arg0);
4392 break;
4394 case 2: /* Wildly unsafe. */
4395 return NULL_RTX;
4398 /* Expand the jump insns. */
4399 start_sequence ();
4400 do_jump (arg0, if_false_label, if_true_label);
4401 ret = get_insns ();
4402 end_sequence ();
4404 /* Now that the __builtin_expect has been validated, go through and add
4405 the expect's to each of the conditional jumps. If we run into an
4406 error, just give up and generate the 'safe' code of doing a SCC
4407 operation and then doing a branch on that. */
4408 insn = ret;
4409 while (insn != NULL_RTX)
4411 rtx next = NEXT_INSN (insn);
4413 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
4415 rtx ifelse = SET_SRC (pc_set (insn));
4416 rtx label;
4417 int taken;
4419 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4421 taken = 1;
4422 label = XEXP (XEXP (ifelse, 1), 0);
4424 /* An inverted jump reverses the probabilities. */
4425 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4427 taken = 0;
4428 label = XEXP (XEXP (ifelse, 2), 0);
4430 /* We shouldn't have to worry about conditional returns during
4431 the expansion stage, but handle it gracefully anyway. */
4432 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4434 taken = 1;
4435 label = NULL_RTX;
4437 /* An inverted return reverses the probabilities. */
4438 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4440 taken = 0;
4441 label = NULL_RTX;
4443 else
4444 goto do_next_insn;
4446 /* If the test is expected to fail, reverse the
4447 probabilities. */
4448 if (integer_zerop (arg1))
4449 taken = 1 - taken;
4451 /* If we are jumping to the false label, reverse the
4452 probabilities. */
4453 if (label == NULL_RTX)
4454 ; /* conditional return */
4455 else if (label == if_false_label)
4456 taken = 1 - taken;
4457 else if (label != if_true_label)
4458 goto do_next_insn;
4460 num_jumps++;
4461 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4464 do_next_insn:
4465 insn = next;
4468 /* If no jumps were modified, fail and do __builtin_expect the normal
4469 way. */
4470 if (num_jumps == 0)
4471 ret = NULL_RTX;
4474 return ret;
4477 void
4478 expand_builtin_trap (void)
4480 #ifdef HAVE_trap
4481 if (HAVE_trap)
4482 emit_insn (gen_trap ());
4483 else
4484 #endif
4485 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4486 emit_barrier ();
4489 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4490 Return 0 if a normal call should be emitted rather than expanding
4491 the function inline. If convenient, the result should be placed
4492 in TARGET. SUBTARGET may be used as the target for computing
4493 the operand. */
4495 static rtx
4496 expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4498 enum machine_mode mode;
4499 tree arg;
4500 rtx op0;
4502 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4503 return 0;
4505 arg = TREE_VALUE (arglist);
4506 mode = TYPE_MODE (TREE_TYPE (arg));
4507 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4508 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4511 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4512 Return 0 if a normal call should be emitted rather than expanding
4513 the function inline. If convenient, the result should be placed
4514 in target. */
4516 static rtx
4517 expand_builtin_cabs (tree arglist, rtx target)
4519 enum machine_mode mode;
4520 tree arg;
4521 rtx op0;
4523 if (arglist == 0 || TREE_CHAIN (arglist))
4524 return 0;
4525 arg = TREE_VALUE (arglist);
4526 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
4527 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
4528 return 0;
4530 mode = TYPE_MODE (TREE_TYPE (arg));
4531 op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
4532 return expand_complex_abs (mode, op0, target, 0);
4535 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4536 a normal call should be emitted rather than expanding the function
4537 inline. If convenient, the result should be placed in TARGET with
4538 mode MODE. */
4540 static rtx
4541 expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
4543 tree orig_arglist, dest, fmt;
4544 const char *fmt_str;
4546 orig_arglist = arglist;
4548 /* Verify the required arguments in the original call. */
4549 if (! arglist)
4550 return 0;
4551 dest = TREE_VALUE (arglist);
4552 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4553 return 0;
4554 arglist = TREE_CHAIN (arglist);
4555 if (! arglist)
4556 return 0;
4557 fmt = TREE_VALUE (arglist);
4558 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4559 return 0;
4560 arglist = TREE_CHAIN (arglist);
4562 /* Check whether the format is a literal string constant. */
4563 fmt_str = c_getstr (fmt);
4564 if (fmt_str == NULL)
4565 return 0;
4567 /* If the format doesn't contain % args or %%, use strcpy. */
4568 if (strchr (fmt_str, '%') == 0)
4570 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4571 tree exp;
4573 if (arglist || ! fn)
4574 return 0;
4575 expand_expr (build_function_call_expr (fn, orig_arglist),
4576 const0_rtx, VOIDmode, EXPAND_NORMAL);
4577 if (target == const0_rtx)
4578 return const0_rtx;
4579 exp = build_int_2 (strlen (fmt_str), 0);
4580 exp = fold (build1 (NOP_EXPR, integer_type_node, exp));
4581 return expand_expr (exp, target, mode, EXPAND_NORMAL);
4583 /* If the format is "%s", use strcpy if the result isn't used. */
4584 else if (strcmp (fmt_str, "%s") == 0)
4586 tree fn, arg, len;
4587 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4589 if (! fn)
4590 return 0;
4592 if (! arglist || TREE_CHAIN (arglist))
4593 return 0;
4594 arg = TREE_VALUE (arglist);
4595 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
4596 return 0;
4598 if (target != const0_rtx)
4600 len = c_strlen (arg, 1);
4601 if (! len || TREE_CODE (len) != INTEGER_CST)
4602 return 0;
4604 else
4605 len = NULL_TREE;
4607 arglist = build_tree_list (NULL_TREE, arg);
4608 arglist = tree_cons (NULL_TREE, dest, arglist);
4609 expand_expr (build_function_call_expr (fn, arglist),
4610 const0_rtx, VOIDmode, EXPAND_NORMAL);
4612 if (target == const0_rtx)
4613 return const0_rtx;
4614 return expand_expr (len, target, mode, EXPAND_NORMAL);
4617 return 0;
4620 /* Expand an expression EXP that calls a built-in function,
4621 with result going to TARGET if that's convenient
4622 (and in mode MODE if that's convenient).
4623 SUBTARGET may be used as the target for computing one of EXP's operands.
4624 IGNORE is nonzero if the value is to be ignored. */
4627 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
4628 int ignore)
4630 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4631 tree arglist = TREE_OPERAND (exp, 1);
4632 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4633 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4635 /* Perform postincrements before expanding builtin functions.  */
4636 emit_queue ();
4638 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4639 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4641 /* When not optimizing, generate calls to library functions for a certain
4642 set of builtins. */
4643 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4644 switch (fcode)
4646 case BUILT_IN_SQRT:
4647 case BUILT_IN_SQRTF:
4648 case BUILT_IN_SQRTL:
4649 case BUILT_IN_SIN:
4650 case BUILT_IN_SINF:
4651 case BUILT_IN_SINL:
4652 case BUILT_IN_COS:
4653 case BUILT_IN_COSF:
4654 case BUILT_IN_COSL:
4655 case BUILT_IN_EXP:
4656 case BUILT_IN_EXPF:
4657 case BUILT_IN_EXPL:
4658 case BUILT_IN_LOG:
4659 case BUILT_IN_LOGF:
4660 case BUILT_IN_LOGL:
4661 case BUILT_IN_TAN:
4662 case BUILT_IN_TANF:
4663 case BUILT_IN_TANL:
4664 case BUILT_IN_ATAN:
4665 case BUILT_IN_ATANF:
4666 case BUILT_IN_ATANL:
4667 case BUILT_IN_POW:
4668 case BUILT_IN_POWF:
4669 case BUILT_IN_POWL:
4670 case BUILT_IN_ATAN2:
4671 case BUILT_IN_ATAN2F:
4672 case BUILT_IN_ATAN2L:
4673 case BUILT_IN_MEMSET:
4674 case BUILT_IN_MEMCPY:
4675 case BUILT_IN_MEMCMP:
4676 case BUILT_IN_MEMPCPY:
4677 case BUILT_IN_MEMMOVE:
4678 case BUILT_IN_BCMP:
4679 case BUILT_IN_BZERO:
4680 case BUILT_IN_BCOPY:
4681 case BUILT_IN_INDEX:
4682 case BUILT_IN_RINDEX:
4683 case BUILT_IN_SPRINTF:
4684 case BUILT_IN_STPCPY:
4685 case BUILT_IN_STRCHR:
4686 case BUILT_IN_STRRCHR:
4687 case BUILT_IN_STRLEN:
4688 case BUILT_IN_STRCPY:
4689 case BUILT_IN_STRNCPY:
4690 case BUILT_IN_STRNCMP:
4691 case BUILT_IN_STRSTR:
4692 case BUILT_IN_STRPBRK:
4693 case BUILT_IN_STRCAT:
4694 case BUILT_IN_STRNCAT:
4695 case BUILT_IN_STRSPN:
4696 case BUILT_IN_STRCSPN:
4697 case BUILT_IN_STRCMP:
4698 case BUILT_IN_FFS:
4699 case BUILT_IN_PUTCHAR:
4700 case BUILT_IN_PUTS:
4701 case BUILT_IN_PRINTF:
4702 case BUILT_IN_FPUTC:
4703 case BUILT_IN_FPUTS:
4704 case BUILT_IN_FWRITE:
4705 case BUILT_IN_PUTCHAR_UNLOCKED:
4706 case BUILT_IN_PUTS_UNLOCKED:
4707 case BUILT_IN_PRINTF_UNLOCKED:
4708 case BUILT_IN_FPUTC_UNLOCKED:
4709 case BUILT_IN_FPUTS_UNLOCKED:
4710 case BUILT_IN_FWRITE_UNLOCKED:
4711 case BUILT_IN_FLOOR:
4712 case BUILT_IN_FLOORF:
4713 case BUILT_IN_FLOORL:
4714 case BUILT_IN_CEIL:
4715 case BUILT_IN_CEILF:
4716 case BUILT_IN_CEILL:
4717 case BUILT_IN_TRUNC:
4718 case BUILT_IN_TRUNCF:
4719 case BUILT_IN_TRUNCL:
4720 case BUILT_IN_ROUND:
4721 case BUILT_IN_ROUNDF:
4722 case BUILT_IN_ROUNDL:
4723 case BUILT_IN_NEARBYINT:
4724 case BUILT_IN_NEARBYINTF:
4725 case BUILT_IN_NEARBYINTL:
4726 return expand_call (exp, target, ignore);
4728 default:
4729 break;
4732 /* The built-in function expanders test for target == const0_rtx
4733 to determine whether the function's result will be ignored. */
4734 if (ignore)
4735 target = const0_rtx;
4737 /* If the result of a pure or const built-in function is ignored, and
4738 none of its arguments are volatile, we can avoid expanding the
4739 built-in call and just evaluate the arguments for side-effects. */
4740 if (target == const0_rtx
4741 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4743 bool volatilep = false;
4744 tree arg;
4746 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4747 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4749 volatilep = true;
4750 break;
4753 if (! volatilep)
4755 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4756 expand_expr (TREE_VALUE (arg), const0_rtx,
4757 VOIDmode, EXPAND_NORMAL);
4758 return const0_rtx;
4762 switch (fcode)
4764 case BUILT_IN_ABS:
4765 case BUILT_IN_LABS:
4766 case BUILT_IN_LLABS:
4767 case BUILT_IN_IMAXABS:
4768 /* build_function_call changes these into ABS_EXPR. */
4769 abort ();
4771 case BUILT_IN_FABS:
4772 case BUILT_IN_FABSF:
4773 case BUILT_IN_FABSL:
4774 target = expand_builtin_fabs (arglist, target, subtarget);
4775 if (target)
4776 return target;
4777 break;
4779 case BUILT_IN_CABS:
4780 case BUILT_IN_CABSF:
4781 case BUILT_IN_CABSL:
4782 if (flag_unsafe_math_optimizations)
4784 target = expand_builtin_cabs (arglist, target);
4785 if (target)
4786 return target;
4788 break;
4790 case BUILT_IN_CONJ:
4791 case BUILT_IN_CONJF:
4792 case BUILT_IN_CONJL:
4793 case BUILT_IN_CREAL:
4794 case BUILT_IN_CREALF:
4795 case BUILT_IN_CREALL:
4796 case BUILT_IN_CIMAG:
4797 case BUILT_IN_CIMAGF:
4798 case BUILT_IN_CIMAGL:
4799 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4800 and IMAGPART_EXPR. */
4801 abort ();
4803 case BUILT_IN_SIN:
4804 case BUILT_IN_SINF:
4805 case BUILT_IN_SINL:
4806 case BUILT_IN_COS:
4807 case BUILT_IN_COSF:
4808 case BUILT_IN_COSL:
4809 case BUILT_IN_EXP:
4810 case BUILT_IN_EXPF:
4811 case BUILT_IN_EXPL:
4812 case BUILT_IN_LOG:
4813 case BUILT_IN_LOGF:
4814 case BUILT_IN_LOGL:
4815 case BUILT_IN_TAN:
4816 case BUILT_IN_TANF:
4817 case BUILT_IN_TANL:
4818 case BUILT_IN_ATAN:
4819 case BUILT_IN_ATANF:
4820 case BUILT_IN_ATANL:
4821 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4822 because of possible accuracy problems. */
4823 if (! flag_unsafe_math_optimizations)
4824 break;
4825 case BUILT_IN_SQRT:
4826 case BUILT_IN_SQRTF:
4827 case BUILT_IN_SQRTL:
4828 case BUILT_IN_FLOOR:
4829 case BUILT_IN_FLOORF:
4830 case BUILT_IN_FLOORL:
4831 case BUILT_IN_CEIL:
4832 case BUILT_IN_CEILF:
4833 case BUILT_IN_CEILL:
4834 case BUILT_IN_TRUNC:
4835 case BUILT_IN_TRUNCF:
4836 case BUILT_IN_TRUNCL:
4837 case BUILT_IN_ROUND:
4838 case BUILT_IN_ROUNDF:
4839 case BUILT_IN_ROUNDL:
4840 case BUILT_IN_NEARBYINT:
4841 case BUILT_IN_NEARBYINTF:
4842 case BUILT_IN_NEARBYINTL:
4843 target = expand_builtin_mathfn (exp, target, subtarget);
4844 if (target)
4845 return target;
4846 break;
4848 case BUILT_IN_POW:
4849 case BUILT_IN_POWF:
4850 case BUILT_IN_POWL:
4851 if (! flag_unsafe_math_optimizations)
4852 break;
4853 target = expand_builtin_pow (exp, target, subtarget);
4854 if (target)
4855 return target;
4856 break;
4858 case BUILT_IN_ATAN2:
4859 case BUILT_IN_ATAN2F:
4860 case BUILT_IN_ATAN2L:
4861 if (! flag_unsafe_math_optimizations)
4862 break;
4863 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4864 if (target)
4865 return target;
4866 break;
4868 case BUILT_IN_APPLY_ARGS:
4869 return expand_builtin_apply_args ();
4871 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4872 FUNCTION with a copy of the parameters described by
4873 ARGUMENTS, and ARGSIZE. It returns a block of memory
4874 allocated on the stack into which is stored all the registers
4875 that might possibly be used for returning the result of a
4876 function. ARGUMENTS is the value returned by
4877 __builtin_apply_args. ARGSIZE is the number of bytes of
4878 arguments that must be copied. ??? How should this value be
4879 computed? We'll also need a safe worst case value for varargs
4880 functions. */
4881 case BUILT_IN_APPLY:
4882 if (!validate_arglist (arglist, POINTER_TYPE,
4883 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4884 && !validate_arglist (arglist, REFERENCE_TYPE,
4885 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4886 return const0_rtx;
4887 else
4889 int i;
4890 tree t;
4891 rtx ops[3];
4893 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4894 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4896 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4899 /* __builtin_return (RESULT) causes the function to return the
4900 value described by RESULT. RESULT is address of the block of
4901 memory returned by __builtin_apply. */
4902 case BUILT_IN_RETURN:
4903 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4904 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4905 NULL_RTX, VOIDmode, 0));
4906 return const0_rtx;
4908 case BUILT_IN_SAVEREGS:
4909 return expand_builtin_saveregs ();
4911 case BUILT_IN_ARGS_INFO:
4912 return expand_builtin_args_info (arglist);
4914 /* Return the address of the first anonymous stack arg. */
4915 case BUILT_IN_NEXT_ARG:
4916 return expand_builtin_next_arg (arglist);
4918 case BUILT_IN_CLASSIFY_TYPE:
4919 return expand_builtin_classify_type (arglist);
4921 case BUILT_IN_CONSTANT_P:
4922 return expand_builtin_constant_p (arglist, target_mode);
4924 case BUILT_IN_FRAME_ADDRESS:
4925 case BUILT_IN_RETURN_ADDRESS:
4926 return expand_builtin_frame_address (fndecl, arglist);
4928 /* Returns the address of the area where the structure is returned.
4929 0 otherwise. */
4930 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4931 if (arglist != 0
4932 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4933 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4934 return const0_rtx;
4935 else
4936 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4938 case BUILT_IN_ALLOCA:
4939 target = expand_builtin_alloca (arglist, target);
4940 if (target)
4941 return target;
4942 break;
4944 case BUILT_IN_FFS:
4945 case BUILT_IN_FFSL:
4946 case BUILT_IN_FFSLL:
4947 target = expand_builtin_unop (target_mode, arglist, target,
4948 subtarget, ffs_optab);
4949 if (target)
4950 return target;
4951 break;
4953 case BUILT_IN_CLZ:
4954 case BUILT_IN_CLZL:
4955 case BUILT_IN_CLZLL:
4956 target = expand_builtin_unop (target_mode, arglist, target,
4957 subtarget, clz_optab);
4958 if (target)
4959 return target;
4960 break;
4962 case BUILT_IN_CTZ:
4963 case BUILT_IN_CTZL:
4964 case BUILT_IN_CTZLL:
4965 target = expand_builtin_unop (target_mode, arglist, target,
4966 subtarget, ctz_optab);
4967 if (target)
4968 return target;
4969 break;
4971 case BUILT_IN_POPCOUNT:
4972 case BUILT_IN_POPCOUNTL:
4973 case BUILT_IN_POPCOUNTLL:
4974 target = expand_builtin_unop (target_mode, arglist, target,
4975 subtarget, popcount_optab);
4976 if (target)
4977 return target;
4978 break;
4980 case BUILT_IN_PARITY:
4981 case BUILT_IN_PARITYL:
4982 case BUILT_IN_PARITYLL:
4983 target = expand_builtin_unop (target_mode, arglist, target,
4984 subtarget, parity_optab);
4985 if (target)
4986 return target;
4987 break;
4989 case BUILT_IN_STRLEN:
4990 target = expand_builtin_strlen (arglist, target, target_mode);
4991 if (target)
4992 return target;
4993 break;
4995 case BUILT_IN_STRCPY:
4996 target = expand_builtin_strcpy (arglist, target, mode);
4997 if (target)
4998 return target;
4999 break;
5001 case BUILT_IN_STRNCPY:
5002 target = expand_builtin_strncpy (arglist, target, mode);
5003 if (target)
5004 return target;
5005 break;
5007 case BUILT_IN_STPCPY:
5008 target = expand_builtin_stpcpy (arglist, target, mode);
5009 if (target)
5010 return target;
5011 break;
5013 case BUILT_IN_STRCAT:
5014 target = expand_builtin_strcat (arglist, target, mode);
5015 if (target)
5016 return target;
5017 break;
5019 case BUILT_IN_STRNCAT:
5020 target = expand_builtin_strncat (arglist, target, mode);
5021 if (target)
5022 return target;
5023 break;
5025 case BUILT_IN_STRSPN:
5026 target = expand_builtin_strspn (arglist, target, mode);
5027 if (target)
5028 return target;
5029 break;
5031 case BUILT_IN_STRCSPN:
5032 target = expand_builtin_strcspn (arglist, target, mode);
5033 if (target)
5034 return target;
5035 break;
5037 case BUILT_IN_STRSTR:
5038 target = expand_builtin_strstr (arglist, target, mode);
5039 if (target)
5040 return target;
5041 break;
5043 case BUILT_IN_STRPBRK:
5044 target = expand_builtin_strpbrk (arglist, target, mode);
5045 if (target)
5046 return target;
5047 break;
5049 case BUILT_IN_INDEX:
5050 case BUILT_IN_STRCHR:
5051 target = expand_builtin_strchr (arglist, target, mode);
5052 if (target)
5053 return target;
5054 break;
5056 case BUILT_IN_RINDEX:
5057 case BUILT_IN_STRRCHR:
5058 target = expand_builtin_strrchr (arglist, target, mode);
5059 if (target)
5060 return target;
5061 break;
5063 case BUILT_IN_MEMCPY:
5064 target = expand_builtin_memcpy (arglist, target, mode);
5065 if (target)
5066 return target;
5067 break;
5069 case BUILT_IN_MEMPCPY:
5070 target = expand_builtin_mempcpy (arglist, target, mode, /*endp=*/ 1);
5071 if (target)
5072 return target;
5073 break;
5075 case BUILT_IN_MEMMOVE:
5076 target = expand_builtin_memmove (arglist, target, mode);
5077 if (target)
5078 return target;
5079 break;
5081 case BUILT_IN_BCOPY:
5082 target = expand_builtin_bcopy (arglist);
5083 if (target)
5084 return target;
5085 break;
5087 case BUILT_IN_MEMSET:
5088 target = expand_builtin_memset (arglist, target, mode);
5089 if (target)
5090 return target;
5091 break;
5093 case BUILT_IN_BZERO:
5094 target = expand_builtin_bzero (arglist);
5095 if (target)
5096 return target;
5097 break;
5099 case BUILT_IN_STRCMP:
5100 target = expand_builtin_strcmp (exp, target, mode);
5101 if (target)
5102 return target;
5103 break;
5105 case BUILT_IN_STRNCMP:
5106 target = expand_builtin_strncmp (exp, target, mode);
5107 if (target)
5108 return target;
5109 break;
5111 case BUILT_IN_BCMP:
5112 case BUILT_IN_MEMCMP:
5113 target = expand_builtin_memcmp (exp, arglist, target, mode);
5114 if (target)
5115 return target;
5116 break;
5118 case BUILT_IN_SETJMP:
5119 target = expand_builtin_setjmp (arglist, target);
5120 if (target)
5121 return target;
5122 break;
5124 /* __builtin_longjmp is passed a pointer to an array of five words.
5125 It's similar to the C library longjmp function but works with
5126 __builtin_setjmp above. */
5127 case BUILT_IN_LONGJMP:
5128 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5129 break;
5130 else
5132 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
5133 VOIDmode, 0);
5134 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
5135 NULL_RTX, VOIDmode, 0);
5137 if (value != const1_rtx)
5139 error ("__builtin_longjmp second argument must be 1");
5140 return const0_rtx;
5143 expand_builtin_longjmp (buf_addr, value);
5144 return const0_rtx;
5147 case BUILT_IN_TRAP:
5148 expand_builtin_trap ();
5149 return const0_rtx;
5151 case BUILT_IN_FPUTS:
5152 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
5153 if (target)
5154 return target;
5155 break;
5156 case BUILT_IN_FPUTS_UNLOCKED:
5157 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
5158 if (target)
5159 return target;
5160 break;
5162 case BUILT_IN_SPRINTF:
5163 target = expand_builtin_sprintf (arglist, target, mode);
5164 if (target)
5165 return target;
5166 break;
5168 /* Various hooks for the DWARF 2 __throw routine. */
5169 case BUILT_IN_UNWIND_INIT:
5170 expand_builtin_unwind_init ();
5171 return const0_rtx;
5172 case BUILT_IN_DWARF_CFA:
5173 return virtual_cfa_rtx;
5174 #ifdef DWARF2_UNWIND_INFO
5175 case BUILT_IN_DWARF_SP_COLUMN:
5176 return expand_builtin_dwarf_sp_column ();
5177 case BUILT_IN_INIT_DWARF_REG_SIZES:
5178 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
5179 return const0_rtx;
5180 #endif
5181 case BUILT_IN_FROB_RETURN_ADDR:
5182 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
5183 case BUILT_IN_EXTRACT_RETURN_ADDR:
5184 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
5185 case BUILT_IN_EH_RETURN:
5186 expand_builtin_eh_return (TREE_VALUE (arglist),
5187 TREE_VALUE (TREE_CHAIN (arglist)));
5188 return const0_rtx;
5189 #ifdef EH_RETURN_DATA_REGNO
5190 case BUILT_IN_EH_RETURN_DATA_REGNO:
5191 return expand_builtin_eh_return_data_regno (arglist);
5192 #endif
5193 case BUILT_IN_VA_START:
5194 case BUILT_IN_STDARG_START:
5195 return expand_builtin_va_start (arglist);
5196 case BUILT_IN_VA_END:
5197 return expand_builtin_va_end (arglist);
5198 case BUILT_IN_VA_COPY:
5199 return expand_builtin_va_copy (arglist);
5200 case BUILT_IN_EXPECT:
5201 return expand_builtin_expect (arglist, target);
5202 case BUILT_IN_PREFETCH:
5203 expand_builtin_prefetch (arglist);
5204 return const0_rtx;
5207 default: /* just do library call, if unknown builtin */
5208 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
5209 error ("built-in function `%s' not currently supported",
5210 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5213 /* The switch statement above can drop through to cause the function
5214 to be called normally. */
5215 return expand_call (exp, target, ignore);
5218 /* Determine whether a tree node represents a call to a built-in
5219 math function. If the tree T is a call to a built-in function
5220 taking a single real argument, then the return value is the
5221 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
5222 the return value is END_BUILTINS. */
5224 enum built_in_function
5225 builtin_mathfn_code (tree t)
5227 tree fndecl, arglist;
5229 if (TREE_CODE (t) != CALL_EXPR
5230 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
5231 return END_BUILTINS;
5233 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5234 if (TREE_CODE (fndecl) != FUNCTION_DECL
5235 || ! DECL_BUILT_IN (fndecl)
5236 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5237 return END_BUILTINS;
5239 arglist = TREE_OPERAND (t, 1);
5240 if (! arglist
5241 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5242 return END_BUILTINS;
5244 arglist = TREE_CHAIN (arglist);
5245 switch (DECL_FUNCTION_CODE (fndecl))
5247 case BUILT_IN_POW:
5248 case BUILT_IN_POWF:
5249 case BUILT_IN_POWL:
5250 case BUILT_IN_ATAN2:
5251 case BUILT_IN_ATAN2F:
5252 case BUILT_IN_ATAN2L:
5253 if (! arglist
5254 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
5255 || TREE_CHAIN (arglist))
5256 return END_BUILTINS;
5257 break;
5259 default:
5260 if (arglist)
5261 return END_BUILTINS;
5262 break;
5265 return DECL_FUNCTION_CODE (fndecl);
5268 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
5269 constant. ARGLIST is the argument list of the call. */
5271 static tree
5272 fold_builtin_constant_p (tree arglist)
5274 if (arglist == 0)
5275 return 0;
5277 arglist = TREE_VALUE (arglist);
5279 /* We return 1 for a numeric type that's known to be a constant
5280 value at compile-time or for an aggregate type that's a
5281 literal constant. */
5282 STRIP_NOPS (arglist);
5284 /* If we know this is a constant, emit the constant of one. */
5285 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
5286 || (TREE_CODE (arglist) == CONSTRUCTOR
5287 && TREE_CONSTANT (arglist))
5288 || (TREE_CODE (arglist) == ADDR_EXPR
5289 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
5290 return integer_one_node;
5292 /* If we aren't going to be running CSE or this expression
5293 has side effects, show we don't know it to be a constant.
5294 Likewise if it's a pointer or aggregate type since in those
5295 case we only want literals, since those are only optimized
5296 when generating RTL, not later.
5297 And finally, if we are compiling an initializer, not code, we
5298 need to return a definite result now; there's not going to be any
5299 more optimization done. */
5300 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
5301 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
5302 || POINTER_TYPE_P (TREE_TYPE (arglist))
5303 || cfun == 0)
5304 return integer_zero_node;
5306 return 0;
5309 /* Fold a call to __builtin_classify_type. */
5311 static tree
5312 fold_builtin_classify_type (tree arglist)
5314 if (arglist == 0)
5315 return build_int_2 (no_type_class, 0);
5317 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
5320 /* Fold a call to __builtin_inf or __builtin_huge_val. */
5322 static tree
5323 fold_builtin_inf (tree type, int warn)
5325 REAL_VALUE_TYPE real;
5327 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
5328 warning ("target format does not support infinity");
5330 real_inf (&real);
5331 return build_real (type, real);
5334 /* Fold a call to __builtin_nan or __builtin_nans. */
5336 static tree
5337 fold_builtin_nan (tree arglist, tree type, int quiet)
5339 REAL_VALUE_TYPE real;
5340 const char *str;
5342 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5343 return 0;
5344 str = c_getstr (TREE_VALUE (arglist));
5345 if (!str)
5346 return 0;
5348 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
5349 return 0;
5351 return build_real (type, real);
5354 /* Return true if the floating point expression T has an integer value.
5355 We also allow +Inf, -Inf and NaN to be considered integer values. */
5357 static bool
5358 integer_valued_real_p (tree t)
5360 switch (TREE_CODE (t))
5362 case FLOAT_EXPR:
5363 return true;
5365 case ABS_EXPR:
5366 case SAVE_EXPR:
5367 case NON_LVALUE_EXPR:
5368 return integer_valued_real_p (TREE_OPERAND (t, 0));
5370 case COMPOUND_EXPR:
5371 case MODIFY_EXPR:
5372 case BIND_EXPR:
5373 return integer_valued_real_p (TREE_OPERAND (t, 1));
5375 case PLUS_EXPR:
5376 case MINUS_EXPR:
5377 case MULT_EXPR:
5378 case MIN_EXPR:
5379 case MAX_EXPR:
5380 return integer_valued_real_p (TREE_OPERAND (t, 0))
5381 && integer_valued_real_p (TREE_OPERAND (t, 1));
5383 case COND_EXPR:
5384 return integer_valued_real_p (TREE_OPERAND (t, 1))
5385 && integer_valued_real_p (TREE_OPERAND (t, 2));
5387 case REAL_CST:
5388 if (! TREE_CONSTANT_OVERFLOW (t))
5390 REAL_VALUE_TYPE c, cint;
5392 c = TREE_REAL_CST (t);
5393 real_trunc (&cint, TYPE_MODE (TREE_TYPE (t)), &c);
5394 return real_identical (&c, &cint);
5397 case NOP_EXPR:
5399 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
5400 if (TREE_CODE (type) == INTEGER_TYPE)
5401 return true;
5402 if (TREE_CODE (type) == REAL_TYPE)
5403 return integer_valued_real_p (TREE_OPERAND (t, 0));
5404 break;
5407 case CALL_EXPR:
5408 switch (builtin_mathfn_code (t))
5410 case BUILT_IN_CEIL:
5411 case BUILT_IN_CEILF:
5412 case BUILT_IN_CEILL:
5413 case BUILT_IN_FLOOR:
5414 case BUILT_IN_FLOORF:
5415 case BUILT_IN_FLOORL:
5416 case BUILT_IN_NEARBYINT:
5417 case BUILT_IN_NEARBYINTF:
5418 case BUILT_IN_NEARBYINTL:
5419 case BUILT_IN_ROUND:
5420 case BUILT_IN_ROUNDF:
5421 case BUILT_IN_ROUNDL:
5422 case BUILT_IN_TRUNC:
5423 case BUILT_IN_TRUNCF:
5424 case BUILT_IN_TRUNCL:
5425 return true;
5427 default:
5428 break;
5430 break;
5432 default:
5433 break;
5435 return false;
5438 /* EXP is assumed to be builtin call where truncation can be propagated
5439 across (for instance floor((double)f) == (double)floorf (f).
5440 Do the transformation. */
5442 static tree
5443 fold_trunc_transparent_mathfn (tree exp)
5445 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5446 tree arglist = TREE_OPERAND (exp, 1);
5447 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5448 tree arg;
5450 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5451 return 0;
5453 arg = TREE_VALUE (arglist);
5454 /* Integer rounding functions are idempotent. */
5455 if (fcode == builtin_mathfn_code (arg))
5456 return arg;
5458 /* If argument is already integer valued, and we don't need to worry
5459 about setting errno, there's no need to perform rounding. */
5460 if (! flag_errno_math && integer_valued_real_p (arg))
5461 return arg;
5463 if (optimize)
5465 tree arg0 = strip_float_extensions (arg);
5466 tree ftype = TREE_TYPE (exp);
5467 tree newtype = TREE_TYPE (arg0);
5468 tree decl;
5470 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
5471 && (decl = mathfn_built_in (newtype, fcode)))
5473 arglist =
5474 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
5475 return convert (ftype,
5476 build_function_call_expr (decl, arglist));
5479 return 0;
5482 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
5483 function's DECL, ARGLIST is the argument list and TYPE is the return
5484 type. Return NULL_TREE if no simplification can be made. */
5486 static tree
5487 fold_builtin_cabs (tree fndecl, tree arglist, tree type)
5489 tree arg;
5491 if (!arglist || TREE_CHAIN (arglist))
5492 return NULL_TREE;
5494 arg = TREE_VALUE (arglist);
5495 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
5496 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
5497 return NULL_TREE;
5499 /* Evaluate cabs of a constant at compile-time. */
5500 if (flag_unsafe_math_optimizations
5501 && TREE_CODE (arg) == COMPLEX_CST
5502 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
5503 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
5504 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
5505 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
5507 REAL_VALUE_TYPE r, i;
5509 r = TREE_REAL_CST (TREE_REALPART (arg));
5510 i = TREE_REAL_CST (TREE_IMAGPART (arg));
5512 real_arithmetic (&r, MULT_EXPR, &r, &r);
5513 real_arithmetic (&i, MULT_EXPR, &i, &i);
5514 real_arithmetic (&r, PLUS_EXPR, &r, &i);
5515 if (real_sqrt (&r, TYPE_MODE (type), &r)
5516 || ! flag_trapping_math)
5517 return build_real (type, r);
5520 /* If either part is zero, cabs is fabs of the other. */
5521 if (TREE_CODE (arg) == COMPLEX_EXPR
5522 && real_zerop (TREE_OPERAND (arg, 0)))
5523 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
5524 if (TREE_CODE (arg) == COMPLEX_EXPR
5525 && real_zerop (TREE_OPERAND (arg, 1)))
5526 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
5528 if (flag_unsafe_math_optimizations)
5530 enum built_in_function fcode;
5531 tree sqrtfn;
5533 fcode = DECL_FUNCTION_CODE (fndecl);
5534 if (fcode == BUILT_IN_CABS)
5535 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5536 else if (fcode == BUILT_IN_CABSF)
5537 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5538 else if (fcode == BUILT_IN_CABSL)
5539 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5540 else
5541 sqrtfn = NULL_TREE;
5543 if (sqrtfn != NULL_TREE)
5545 tree rpart, ipart, result, arglist;
5547 rpart = fold (build1 (REALPART_EXPR, type, arg));
5548 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
5550 rpart = save_expr (rpart);
5551 ipart = save_expr (ipart);
5553 result = fold (build (PLUS_EXPR, type,
5554 fold (build (MULT_EXPR, type,
5555 rpart, rpart)),
5556 fold (build (MULT_EXPR, type,
5557 ipart, ipart))));
5559 arglist = build_tree_list (NULL_TREE, result);
5560 return build_function_call_expr (sqrtfn, arglist);
5564 return NULL_TREE;
5567 /* Fold function call to builtin trunc, truncf or truncl. Return
5568 NULL_TREE if no simplification can be made. */
5570 static tree
5571 fold_builtin_trunc (tree exp)
5573 tree arglist = TREE_OPERAND (exp, 1);
5574 tree arg;
5576 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5577 return 0;
5579 /* Optimize trunc of constant value. */
5580 arg = TREE_VALUE (arglist);
5581 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
5583 REAL_VALUE_TYPE r, x;
5584 tree type = TREE_TYPE (exp);
5586 x = TREE_REAL_CST (arg);
5587 real_trunc (&r, TYPE_MODE (type), &x);
5588 return build_real (type, r);
5591 return fold_trunc_transparent_mathfn (exp);
5594 /* Fold function call to builtin floor, floorf or floorl. Return
5595 NULL_TREE if no simplification can be made. */
5597 static tree
5598 fold_builtin_floor (tree exp)
5600 tree arglist = TREE_OPERAND (exp, 1);
5601 tree arg;
5603 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5604 return 0;
5606 /* Optimize floor of constant value. */
5607 arg = TREE_VALUE (arglist);
5608 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
5610 REAL_VALUE_TYPE x;
5612 x = TREE_REAL_CST (arg);
5613 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
5615 tree type = TREE_TYPE (exp);
5616 REAL_VALUE_TYPE r;
5618 real_floor (&r, TYPE_MODE (type), &x);
5619 return build_real (type, r);
5623 return fold_trunc_transparent_mathfn (exp);
5626 /* Fold function call to builtin ceil, ceilf or ceill. Return
5627 NULL_TREE if no simplification can be made. */
5629 static tree
5630 fold_builtin_ceil (tree exp)
5632 tree arglist = TREE_OPERAND (exp, 1);
5633 tree arg;
5635 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5636 return 0;
5638 /* Optimize ceil of constant value. */
5639 arg = TREE_VALUE (arglist);
5640 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
5642 REAL_VALUE_TYPE x;
5644 x = TREE_REAL_CST (arg);
5645 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
5647 tree type = TREE_TYPE (exp);
5648 REAL_VALUE_TYPE r;
5650 real_ceil (&r, TYPE_MODE (type), &x);
5651 return build_real (type, r);
5655 return fold_trunc_transparent_mathfn (exp);
5658 /* Used by constant folding to eliminate some builtin calls early. EXP is
5659 the CALL_EXPR of a call to a builtin function. */
5661 tree
5662 fold_builtin (tree exp)
5664 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5665 tree arglist = TREE_OPERAND (exp, 1);
5666 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5668 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5669 return 0;
5671 switch (DECL_FUNCTION_CODE (fndecl))
5673 case BUILT_IN_CONSTANT_P:
5674 return fold_builtin_constant_p (arglist);
5676 case BUILT_IN_CLASSIFY_TYPE:
5677 return fold_builtin_classify_type (arglist);
5679 case BUILT_IN_STRLEN:
5680 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5682 tree len = c_strlen (TREE_VALUE (arglist), 0);
5683 if (len)
5685 /* Convert from the internal "sizetype" type to "size_t". */
5686 if (size_type_node)
5687 len = convert (size_type_node, len);
5688 return len;
5691 break;
5693 case BUILT_IN_FABS:
5694 case BUILT_IN_FABSF:
5695 case BUILT_IN_FABSL:
5696 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5697 return fold (build1 (ABS_EXPR, type, TREE_VALUE (arglist)));
5698 break;
5700 case BUILT_IN_CABS:
5701 case BUILT_IN_CABSF:
5702 case BUILT_IN_CABSL:
5703 return fold_builtin_cabs (fndecl, arglist, type);
5705 case BUILT_IN_SQRT:
5706 case BUILT_IN_SQRTF:
5707 case BUILT_IN_SQRTL:
5708 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5710 enum built_in_function fcode;
5711 tree arg = TREE_VALUE (arglist);
5713 /* Optimize sqrt of constant value. */
5714 if (TREE_CODE (arg) == REAL_CST
5715 && ! TREE_CONSTANT_OVERFLOW (arg))
5717 REAL_VALUE_TYPE r, x;
5719 x = TREE_REAL_CST (arg);
5720 if (real_sqrt (&r, TYPE_MODE (type), &x)
5721 || (!flag_trapping_math && !flag_errno_math))
5722 return build_real (type, r);
5725 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5726 fcode = builtin_mathfn_code (arg);
5727 if (flag_unsafe_math_optimizations
5728 && (fcode == BUILT_IN_EXP
5729 || fcode == BUILT_IN_EXPF
5730 || fcode == BUILT_IN_EXPL))
5732 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5733 arg = fold (build (MULT_EXPR, type,
5734 TREE_VALUE (TREE_OPERAND (arg, 1)),
5735 build_real (type, dconsthalf)));
5736 arglist = build_tree_list (NULL_TREE, arg);
5737 return build_function_call_expr (expfn, arglist);
5740 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5741 if (flag_unsafe_math_optimizations
5742 && (fcode == BUILT_IN_POW
5743 || fcode == BUILT_IN_POWF
5744 || fcode == BUILT_IN_POWL))
5746 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5747 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5748 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5749 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5750 build_real (type, dconsthalf)));
5751 arglist = tree_cons (NULL_TREE, arg0,
5752 build_tree_list (NULL_TREE, narg1));
5753 return build_function_call_expr (powfn, arglist);
5756 break;
5758 case BUILT_IN_SIN:
5759 case BUILT_IN_SINF:
5760 case BUILT_IN_SINL:
5761 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5763 tree arg = TREE_VALUE (arglist);
5765 /* Optimize sin(0.0) = 0.0. */
5766 if (real_zerop (arg))
5767 return arg;
5769 break;
5771 case BUILT_IN_COS:
5772 case BUILT_IN_COSF:
5773 case BUILT_IN_COSL:
5774 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5776 tree arg = TREE_VALUE (arglist);
5778 /* Optimize cos(0.0) = 1.0. */
5779 if (real_zerop (arg))
5780 return build_real (type, dconst1);
5782 /* Optimize cos(-x) into cos(x). */
5783 if (TREE_CODE (arg) == NEGATE_EXPR)
5785 tree arglist = build_tree_list (NULL_TREE,
5786 TREE_OPERAND (arg, 0));
5787 return build_function_call_expr (fndecl, arglist);
5790 break;
5792 case BUILT_IN_EXP:
5793 case BUILT_IN_EXPF:
5794 case BUILT_IN_EXPL:
5795 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5797 enum built_in_function fcode;
5798 tree arg = TREE_VALUE (arglist);
5800 /* Optimize exp(0.0) = 1.0. */
5801 if (real_zerop (arg))
5802 return build_real (type, dconst1);
5804 /* Optimize exp(1.0) = e. */
5805 if (real_onep (arg))
5807 REAL_VALUE_TYPE cst;
5809 if (! builtin_dconsts_init)
5810 init_builtin_dconsts ();
5811 real_convert (&cst, TYPE_MODE (type), &dconste);
5812 return build_real (type, cst);
5815 /* Attempt to evaluate exp at compile-time. */
5816 if (flag_unsafe_math_optimizations
5817 && TREE_CODE (arg) == REAL_CST
5818 && ! TREE_CONSTANT_OVERFLOW (arg))
5820 REAL_VALUE_TYPE cint;
5821 REAL_VALUE_TYPE c;
5822 HOST_WIDE_INT n;
5824 c = TREE_REAL_CST (arg);
5825 n = real_to_integer (&c);
5826 real_from_integer (&cint, VOIDmode, n,
5827 n < 0 ? -1 : 0, 0);
5828 if (real_identical (&c, &cint))
5830 REAL_VALUE_TYPE x;
5832 if (! builtin_dconsts_init)
5833 init_builtin_dconsts ();
5834 real_powi (&x, TYPE_MODE (type), &dconste, n);
5835 return build_real (type, x);
5839 /* Optimize exp(log(x)) = x. */
5840 fcode = builtin_mathfn_code (arg);
5841 if (flag_unsafe_math_optimizations
5842 && (fcode == BUILT_IN_LOG
5843 || fcode == BUILT_IN_LOGF
5844 || fcode == BUILT_IN_LOGL))
5845 return TREE_VALUE (TREE_OPERAND (arg, 1));
5847 break;
5849 case BUILT_IN_LOG:
5850 case BUILT_IN_LOGF:
5851 case BUILT_IN_LOGL:
5852 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5854 enum built_in_function fcode;
5855 tree arg = TREE_VALUE (arglist);
5857 /* Optimize log(1.0) = 0.0. */
5858 if (real_onep (arg))
5859 return build_real (type, dconst0);
5861 /* Optimize log(exp(x)) = x. */
5862 fcode = builtin_mathfn_code (arg);
5863 if (flag_unsafe_math_optimizations
5864 && (fcode == BUILT_IN_EXP
5865 || fcode == BUILT_IN_EXPF
5866 || fcode == BUILT_IN_EXPL))
5867 return TREE_VALUE (TREE_OPERAND (arg, 1));
5869 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5870 if (flag_unsafe_math_optimizations
5871 && (fcode == BUILT_IN_SQRT
5872 || fcode == BUILT_IN_SQRTF
5873 || fcode == BUILT_IN_SQRTL))
5875 tree logfn = build_function_call_expr (fndecl,
5876 TREE_OPERAND (arg, 1));
5877 return fold (build (MULT_EXPR, type, logfn,
5878 build_real (type, dconsthalf)));
5881 /* Optimize log(pow(x,y)) = y*log(x). */
5882 if (flag_unsafe_math_optimizations
5883 && (fcode == BUILT_IN_POW
5884 || fcode == BUILT_IN_POWF
5885 || fcode == BUILT_IN_POWL))
5887 tree arg0, arg1, logfn;
5889 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5890 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5891 arglist = build_tree_list (NULL_TREE, arg0);
5892 logfn = build_function_call_expr (fndecl, arglist);
5893 return fold (build (MULT_EXPR, type, arg1, logfn));
5896 break;
5898 case BUILT_IN_TAN:
5899 case BUILT_IN_TANF:
5900 case BUILT_IN_TANL:
5901 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5903 enum built_in_function fcode;
5904 tree arg = TREE_VALUE (arglist);
5906 /* Optimize tan(0.0) = 0.0. */
5907 if (real_zerop (arg))
5908 return arg;
5910 /* Optimize tan(atan(x)) = x. */
5911 fcode = builtin_mathfn_code (arg);
5912 if (flag_unsafe_math_optimizations
5913 && (fcode == BUILT_IN_ATAN
5914 || fcode == BUILT_IN_ATANF
5915 || fcode == BUILT_IN_ATANL))
5916 return TREE_VALUE (TREE_OPERAND (arg, 1));
5918 break;
5920 case BUILT_IN_ATAN:
5921 case BUILT_IN_ATANF:
5922 case BUILT_IN_ATANL:
5923 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5925 tree arg = TREE_VALUE (arglist);
5927 /* Optimize atan(0.0) = 0.0. */
5928 if (real_zerop (arg))
5929 return arg;
5931 /* Optimize atan(1.0) = pi/4. */
5932 if (real_onep (arg))
5934 REAL_VALUE_TYPE cst;
5936 if (! builtin_dconsts_init)
5937 init_builtin_dconsts ();
5938 real_convert (&cst, TYPE_MODE (type), &dconstpi);
5939 cst.exp -= 2;
5940 return build_real (type, cst);
5943 break;
5945 case BUILT_IN_POW:
5946 case BUILT_IN_POWF:
5947 case BUILT_IN_POWL:
5948 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5950 enum built_in_function fcode;
5951 tree arg0 = TREE_VALUE (arglist);
5952 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5954 /* Optimize pow(1.0,y) = 1.0. */
5955 if (real_onep (arg0))
5956 return omit_one_operand (type, build_real (type, dconst1), arg1);
5958 if (TREE_CODE (arg1) == REAL_CST
5959 && ! TREE_CONSTANT_OVERFLOW (arg1))
5961 REAL_VALUE_TYPE c;
5962 c = TREE_REAL_CST (arg1);
5964 /* Optimize pow(x,0.0) = 1.0. */
5965 if (REAL_VALUES_EQUAL (c, dconst0))
5966 return omit_one_operand (type, build_real (type, dconst1),
5967 arg0);
5969 /* Optimize pow(x,1.0) = x. */
5970 if (REAL_VALUES_EQUAL (c, dconst1))
5971 return arg0;
5973 /* Optimize pow(x,-1.0) = 1.0/x. */
5974 if (REAL_VALUES_EQUAL (c, dconstm1))
5975 return fold (build (RDIV_EXPR, type,
5976 build_real (type, dconst1),
5977 arg0));
5979 /* Optimize pow(x,2.0) = x*x. */
5980 if (REAL_VALUES_EQUAL (c, dconst2)
5981 && (*lang_hooks.decls.global_bindings_p) () == 0
5982 && ! CONTAINS_PLACEHOLDER_P (arg0))
5984 arg0 = save_expr (arg0);
5985 return fold (build (MULT_EXPR, type, arg0, arg0));
5988 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5989 if (flag_unsafe_math_optimizations
5990 && REAL_VALUES_EQUAL (c, dconstm2)
5991 && (*lang_hooks.decls.global_bindings_p) () == 0
5992 && ! CONTAINS_PLACEHOLDER_P (arg0))
5994 arg0 = save_expr (arg0);
5995 return fold (build (RDIV_EXPR, type,
5996 build_real (type, dconst1),
5997 fold (build (MULT_EXPR, type,
5998 arg0, arg0))));
6001 /* Optimize pow(x,0.5) = sqrt(x). */
6002 if (flag_unsafe_math_optimizations
6003 && REAL_VALUES_EQUAL (c, dconsthalf))
6005 tree sqrtfn;
6007 fcode = DECL_FUNCTION_CODE (fndecl);
6008 if (fcode == BUILT_IN_POW)
6009 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
6010 else if (fcode == BUILT_IN_POWF)
6011 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
6012 else if (fcode == BUILT_IN_POWL)
6013 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
6014 else
6015 sqrtfn = NULL_TREE;
6017 if (sqrtfn != NULL_TREE)
6019 tree arglist = build_tree_list (NULL_TREE, arg0);
6020 return build_function_call_expr (sqrtfn, arglist);
6024 /* Attempt to evaluate pow at compile-time. */
6025 if (TREE_CODE (arg0) == REAL_CST
6026 && ! TREE_CONSTANT_OVERFLOW (arg0))
6028 REAL_VALUE_TYPE cint;
6029 HOST_WIDE_INT n;
6031 n = real_to_integer (&c);
6032 real_from_integer (&cint, VOIDmode, n,
6033 n < 0 ? -1 : 0, 0);
6034 if (real_identical (&c, &cint))
6036 REAL_VALUE_TYPE x;
6037 bool inexact;
6039 x = TREE_REAL_CST (arg0);
6040 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
6041 if (flag_unsafe_math_optimizations || !inexact)
6042 return build_real (type, x);
6047 /* Optimize pow(exp(x),y) = exp(x*y). */
6048 fcode = builtin_mathfn_code (arg0);
6049 if (flag_unsafe_math_optimizations
6050 && (fcode == BUILT_IN_EXP
6051 || fcode == BUILT_IN_EXPF
6052 || fcode == BUILT_IN_EXPL))
6054 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6055 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6056 arg = fold (build (MULT_EXPR, type, arg, arg1));
6057 arglist = build_tree_list (NULL_TREE, arg);
6058 return build_function_call_expr (expfn, arglist);
6061 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
6062 if (flag_unsafe_math_optimizations
6063 && (fcode == BUILT_IN_SQRT
6064 || fcode == BUILT_IN_SQRTF
6065 || fcode == BUILT_IN_SQRTL))
6067 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6068 tree narg1 = fold (build (MULT_EXPR, type, arg1,
6069 build_real (type, dconsthalf)));
6071 arglist = tree_cons (NULL_TREE, narg0,
6072 build_tree_list (NULL_TREE, narg1));
6073 return build_function_call_expr (fndecl, arglist);
6076 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
6077 if (flag_unsafe_math_optimizations
6078 && (fcode == BUILT_IN_POW
6079 || fcode == BUILT_IN_POWF
6080 || fcode == BUILT_IN_POWL))
6082 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6083 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6084 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
6085 arglist = tree_cons (NULL_TREE, arg00,
6086 build_tree_list (NULL_TREE, narg1));
6087 return build_function_call_expr (fndecl, arglist);
6090 break;
6092 case BUILT_IN_INF:
6093 case BUILT_IN_INFF:
6094 case BUILT_IN_INFL:
6095 return fold_builtin_inf (type, true);
6097 case BUILT_IN_HUGE_VAL:
6098 case BUILT_IN_HUGE_VALF:
6099 case BUILT_IN_HUGE_VALL:
6100 return fold_builtin_inf (type, false);
6102 case BUILT_IN_NAN:
6103 case BUILT_IN_NANF:
6104 case BUILT_IN_NANL:
6105 return fold_builtin_nan (arglist, type, true);
6107 case BUILT_IN_NANS:
6108 case BUILT_IN_NANSF:
6109 case BUILT_IN_NANSL:
6110 return fold_builtin_nan (arglist, type, false);
6112 case BUILT_IN_FLOOR:
6113 case BUILT_IN_FLOORF:
6114 case BUILT_IN_FLOORL:
6115 return fold_builtin_floor (exp);
6117 case BUILT_IN_CEIL:
6118 case BUILT_IN_CEILF:
6119 case BUILT_IN_CEILL:
6120 return fold_builtin_ceil (exp);
6122 case BUILT_IN_TRUNC:
6123 case BUILT_IN_TRUNCF:
6124 case BUILT_IN_TRUNCL:
6125 return fold_builtin_trunc (exp);
6127 case BUILT_IN_ROUND:
6128 case BUILT_IN_ROUNDF:
6129 case BUILT_IN_ROUNDL:
6130 case BUILT_IN_NEARBYINT:
6131 case BUILT_IN_NEARBYINTF:
6132 case BUILT_IN_NEARBYINTL:
6133 return fold_trunc_transparent_mathfn (exp);
6135 default:
6136 break;
6139 return 0;
6142 /* Conveniently construct a function call expression. */
6144 tree
6145 build_function_call_expr (tree fn, tree arglist)
6147 tree call_expr;
6149 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
6150 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
6151 call_expr, arglist);
6152 TREE_SIDE_EFFECTS (call_expr) = 1;
6153 return fold (call_expr);
6156 /* This function validates the types of a function call argument list
6157 represented as a tree chain of parameters against a specified list
6158 of tree_codes. If the last specifier is a 0, that represents an
6159 ellipses, otherwise the last specifier must be a VOID_TYPE. */
6161 static int
6162 validate_arglist (tree arglist, ...)
6164 enum tree_code code;
6165 int res = 0;
6166 va_list ap;
6168 va_start (ap, arglist);
6172 code = va_arg (ap, enum tree_code);
6173 switch (code)
6175 case 0:
6176 /* This signifies an ellipses, any further arguments are all ok. */
6177 res = 1;
6178 goto end;
6179 case VOID_TYPE:
6180 /* This signifies an endlink, if no arguments remain, return
6181 true, otherwise return false. */
6182 res = arglist == 0;
6183 goto end;
6184 default:
6185 /* If no parameters remain or the parameter's code does not
6186 match the specified code, return false. Otherwise continue
6187 checking any remaining arguments. */
6188 if (arglist == 0
6189 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
6190 goto end;
6191 break;
6193 arglist = TREE_CHAIN (arglist);
6195 while (1);
6197 /* We need gotos here since we can only have one VA_CLOSE in a
6198 function. */
6199 end: ;
6200 va_end (ap);
6202 return res;
6205 /* Default version of target-specific builtin setup that does nothing. */
6207 void
6208 default_init_builtins (void)
6212 /* Default target-specific builtin expander that does nothing. */
6215 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
6216 rtx target ATTRIBUTE_UNUSED,
6217 rtx subtarget ATTRIBUTE_UNUSED,
6218 enum machine_mode mode ATTRIBUTE_UNUSED,
6219 int ignore ATTRIBUTE_UNUSED)
6221 return NULL_RTX;
6224 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
6226 void
6227 purge_builtin_constant_p (void)
6229 rtx insn, set, arg, new, note;
6231 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6232 if (INSN_P (insn)
6233 && (set = single_set (insn)) != NULL_RTX
6234 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
6235 || (GET_CODE (arg) == SUBREG
6236 && (GET_CODE (arg = SUBREG_REG (arg))
6237 == CONSTANT_P_RTX))))
6239 arg = XEXP (arg, 0);
6240 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
6241 validate_change (insn, &SET_SRC (set), new, 0);
6243 /* Remove the REG_EQUAL note from the insn. */
6244 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
6245 remove_note (insn, note);
6249 /* Returns true is EXP represents data that would potentially reside
6250 in a readonly section. */
6252 static bool
6253 readonly_data_expr (tree exp)
6255 STRIP_NOPS (exp);
6257 if (TREE_CODE (exp) == ADDR_EXPR)
6258 return decl_readonly_section (TREE_OPERAND (exp, 0), 0);
6259 else
6260 return false;