* fixinc/inclhack.def (avoid_bool_define, avoid_bool_type): Bypass
[official-gcc.git] / gcc / builtins.c
blob90cd6acd1655dc98300b6a9ee32567716f36e13f
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init = 0;
84 static REAL_VALUE_TYPE dconstpi;
85 static REAL_VALUE_TYPE dconste;
87 static int get_pointer_alignment (tree, unsigned int);
88 static tree c_strlen (tree, int);
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static rtx expand_builtin_setjmp (tree, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_constant_p (tree, enum machine_mode);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (tree);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_bcopy (tree);
127 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, int, int);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_inf (tree, int);
150 static tree fold_builtin_nan (tree, tree, int);
151 static int validate_arglist (tree, ...);
152 static bool integer_valued_real_p (tree);
153 static tree fold_trunc_transparent_mathfn (tree);
154 static bool readonly_data_expr (tree);
155 static rtx expand_builtin_fabs (tree, rtx, rtx);
156 static rtx expand_builtin_cabs (tree, rtx);
157 static void init_builtin_dconsts (void);
158 static tree fold_builtin_cabs (tree, tree, tree);
159 static tree fold_builtin_trunc (tree);
160 static tree fold_builtin_floor (tree);
161 static tree fold_builtin_ceil (tree);
163 /* Initialize mathematical constants for constant folding builtins.
164 These constants need to be given to at least 160 bits precision. */
166 static void
167 init_builtin_dconsts (void)
169 real_from_string (&dconstpi,
170 "3.1415926535897932384626433832795028841971693993751058209749445923078");
171 real_from_string (&dconste,
172 "2.7182818284590452353602874713526624977572470936999595749669676277241");
174 builtin_dconsts_init = true;
177 /* Return the alignment in bits of EXP, a pointer valued expression.
178 But don't return more than MAX_ALIGN no matter what.
179 The alignment returned is, by default, the alignment of the thing that
180 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
182 Otherwise, look at the expression to see if we can do better, i.e., if the
183 expression is actually pointing at an object whose alignment is tighter. */
185 static int
186 get_pointer_alignment (tree exp, unsigned int max_align)
188 unsigned int align, inner;
190 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
191 return 0;
193 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
194 align = MIN (align, max_align);
196 while (1)
198 switch (TREE_CODE (exp))
200 case NOP_EXPR:
201 case CONVERT_EXPR:
202 case NON_LVALUE_EXPR:
203 exp = TREE_OPERAND (exp, 0);
204 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
205 return align;
207 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
208 align = MIN (inner, max_align);
209 break;
211 case PLUS_EXPR:
212 /* If sum of pointer + int, restrict our maximum alignment to that
213 imposed by the integer. If not, we can't do any better than
214 ALIGN. */
215 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
216 return align;
218 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
219 & (max_align / BITS_PER_UNIT - 1))
220 != 0)
221 max_align >>= 1;
223 exp = TREE_OPERAND (exp, 0);
224 break;
226 case ADDR_EXPR:
227 /* See what we are pointing at and look at its alignment. */
228 exp = TREE_OPERAND (exp, 0);
229 if (TREE_CODE (exp) == FUNCTION_DECL)
230 align = FUNCTION_BOUNDARY;
231 else if (DECL_P (exp))
232 align = DECL_ALIGN (exp);
233 #ifdef CONSTANT_ALIGNMENT
234 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
235 align = CONSTANT_ALIGNMENT (exp, align);
236 #endif
237 return MIN (align, max_align);
239 default:
240 return align;
245 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
246 way, because it could contain a zero byte in the middle.
247 TREE_STRING_LENGTH is the size of the character array, not the string.
249 ONLY_VALUE should be nonzero if the result is not going to be emitted
250 into the instruction stream and zero if it is going to be expanded.
251 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
252 is returned, otherwise NULL, since
253 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
254 evaluate the side-effects.
256 The value returned is of type `ssizetype'.
258 Unfortunately, string_constant can't access the values of const char
259 arrays with initializers, so neither can we do so here. */
261 static tree
262 c_strlen (tree src, int only_value)
264 tree offset_node;
265 HOST_WIDE_INT offset;
266 int max;
267 const char *ptr;
269 STRIP_NOPS (src);
270 if (TREE_CODE (src) == COND_EXPR
271 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
273 tree len1, len2;
275 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
276 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
277 if (tree_int_cst_equal (len1, len2))
278 return len1;
281 if (TREE_CODE (src) == COMPOUND_EXPR
282 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
283 return c_strlen (TREE_OPERAND (src, 1), only_value);
285 src = string_constant (src, &offset_node);
286 if (src == 0)
287 return 0;
289 max = TREE_STRING_LENGTH (src) - 1;
290 ptr = TREE_STRING_POINTER (src);
292 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
294 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
295 compute the offset to the following null if we don't know where to
296 start searching for it. */
297 int i;
299 for (i = 0; i < max; i++)
300 if (ptr[i] == 0)
301 return 0;
303 /* We don't know the starting offset, but we do know that the string
304 has no internal zero bytes. We can assume that the offset falls
305 within the bounds of the string; otherwise, the programmer deserves
306 what he gets. Subtract the offset from the length of the string,
307 and return that. This would perhaps not be valid if we were dealing
308 with named arrays in addition to literal string constants. */
310 return size_diffop (size_int (max), offset_node);
313 /* We have a known offset into the string. Start searching there for
314 a null character if we can represent it as a single HOST_WIDE_INT. */
315 if (offset_node == 0)
316 offset = 0;
317 else if (! host_integerp (offset_node, 0))
318 offset = -1;
319 else
320 offset = tree_low_cst (offset_node, 0);
322 /* If the offset is known to be out of bounds, warn, and call strlen at
323 runtime. */
324 if (offset < 0 || offset > max)
326 warning ("offset outside bounds of constant string");
327 return 0;
330 /* Use strlen to search for the first zero byte. Since any strings
331 constructed with build_string will have nulls appended, we win even
332 if we get handed something like (char[4])"abcd".
334 Since OFFSET is our starting index into the string, no further
335 calculation is needed. */
336 return ssize_int (strlen (ptr + offset));
339 /* Return a char pointer for a C string if it is a string constant
340 or sum of string constant and integer constant. */
342 static const char *
343 c_getstr (tree src)
345 tree offset_node;
347 src = string_constant (src, &offset_node);
348 if (src == 0)
349 return 0;
351 if (offset_node == 0)
352 return TREE_STRING_POINTER (src);
353 else if (!host_integerp (offset_node, 1)
354 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
355 return 0;
357 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
360 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
361 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
363 static rtx
364 c_readstr (const char *str, enum machine_mode mode)
366 HOST_WIDE_INT c[2];
367 HOST_WIDE_INT ch;
368 unsigned int i, j;
370 if (GET_MODE_CLASS (mode) != MODE_INT)
371 abort ();
372 c[0] = 0;
373 c[1] = 0;
374 ch = 1;
375 for (i = 0; i < GET_MODE_SIZE (mode); i++)
377 j = i;
378 if (WORDS_BIG_ENDIAN)
379 j = GET_MODE_SIZE (mode) - i - 1;
380 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
381 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
382 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
383 j *= BITS_PER_UNIT;
384 if (j > 2 * HOST_BITS_PER_WIDE_INT)
385 abort ();
386 if (ch)
387 ch = (unsigned char) str[i];
388 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
390 return immed_double_const (c[0], c[1], mode);
393 /* Cast a target constant CST to target CHAR and if that value fits into
394 host char type, return zero and put that value into variable pointed by
395 P. */
397 static int
398 target_char_cast (tree cst, char *p)
400 unsigned HOST_WIDE_INT val, hostval;
402 if (!host_integerp (cst, 1)
403 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
404 return 1;
406 val = tree_low_cst (cst, 1);
407 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
408 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
410 hostval = val;
411 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
412 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
414 if (val != hostval)
415 return 1;
417 *p = hostval;
418 return 0;
421 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
422 times to get the address of either a higher stack frame, or a return
423 address located within it (depending on FNDECL_CODE). */
426 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
427 rtx tem)
429 int i;
431 /* Some machines need special handling before we can access
432 arbitrary frames. For example, on the sparc, we must first flush
433 all register windows to the stack. */
434 #ifdef SETUP_FRAME_ADDRESSES
435 if (count > 0)
436 SETUP_FRAME_ADDRESSES ();
437 #endif
439 /* On the sparc, the return address is not in the frame, it is in a
440 register. There is no way to access it off of the current frame
441 pointer, but it can be accessed off the previous frame pointer by
442 reading the value from the register window save area. */
443 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
444 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
445 count--;
446 #endif
448 /* Scan back COUNT frames to the specified frame. */
449 for (i = 0; i < count; i++)
451 /* Assume the dynamic chain pointer is in the word that the
452 frame address points to, unless otherwise specified. */
453 #ifdef DYNAMIC_CHAIN_ADDRESS
454 tem = DYNAMIC_CHAIN_ADDRESS (tem);
455 #endif
456 tem = memory_address (Pmode, tem);
457 tem = gen_rtx_MEM (Pmode, tem);
458 set_mem_alias_set (tem, get_frame_alias_set ());
459 tem = copy_to_reg (tem);
462 /* For __builtin_frame_address, return what we've got. */
463 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
464 return tem;
466 /* For __builtin_return_address, Get the return address from that
467 frame. */
468 #ifdef RETURN_ADDR_RTX
469 tem = RETURN_ADDR_RTX (count, tem);
470 #else
471 tem = memory_address (Pmode,
472 plus_constant (tem, GET_MODE_SIZE (Pmode)));
473 tem = gen_rtx_MEM (Pmode, tem);
474 set_mem_alias_set (tem, get_frame_alias_set ());
475 #endif
476 return tem;
479 /* Alias set used for setjmp buffer. */
480 static HOST_WIDE_INT setjmp_alias_set = -1;
482 /* Construct the leading half of a __builtin_setjmp call. Control will
483 return to RECEIVER_LABEL. This is used directly by sjlj exception
484 handling code. */
486 void
487 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
489 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
490 rtx stack_save;
491 rtx mem;
493 if (setjmp_alias_set == -1)
494 setjmp_alias_set = new_alias_set ();
496 #ifdef POINTERS_EXTEND_UNSIGNED
497 if (GET_MODE (buf_addr) != Pmode)
498 buf_addr = convert_memory_address (Pmode, buf_addr);
499 #endif
501 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
503 emit_queue ();
505 /* We store the frame pointer and the address of receiver_label in
506 the buffer and use the rest of it for the stack save area, which
507 is machine-dependent. */
509 #ifndef BUILTIN_SETJMP_FRAME_VALUE
510 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
511 #endif
513 mem = gen_rtx_MEM (Pmode, buf_addr);
514 set_mem_alias_set (mem, setjmp_alias_set);
515 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
517 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
518 set_mem_alias_set (mem, setjmp_alias_set);
520 emit_move_insn (validize_mem (mem),
521 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
523 stack_save = gen_rtx_MEM (sa_mode,
524 plus_constant (buf_addr,
525 2 * GET_MODE_SIZE (Pmode)));
526 set_mem_alias_set (stack_save, setjmp_alias_set);
527 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
529 /* If there is further processing to do, do it. */
530 #ifdef HAVE_builtin_setjmp_setup
531 if (HAVE_builtin_setjmp_setup)
532 emit_insn (gen_builtin_setjmp_setup (buf_addr));
533 #endif
535 /* Tell optimize_save_area_alloca that extra work is going to
536 need to go on during alloca. */
537 current_function_calls_setjmp = 1;
539 /* Set this so all the registers get saved in our frame; we need to be
540 able to copy the saved values for any registers from frames we unwind. */
541 current_function_has_nonlocal_label = 1;
544 /* Construct the trailing part of a __builtin_setjmp call.
545 This is used directly by sjlj exception handling code. */
547 void
548 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
550 /* Clobber the FP when we get here, so we have to make sure it's
551 marked as used by this function. */
552 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
554 /* Mark the static chain as clobbered here so life information
555 doesn't get messed up for it. */
556 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
558 /* Now put in the code to restore the frame pointer, and argument
559 pointer, if needed. The code below is from expand_end_bindings
560 in stmt.c; see detailed documentation there. */
561 #ifdef HAVE_nonlocal_goto
562 if (! HAVE_nonlocal_goto)
563 #endif
564 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
566 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
567 if (fixed_regs[ARG_POINTER_REGNUM])
569 #ifdef ELIMINABLE_REGS
570 size_t i;
571 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
573 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
574 if (elim_regs[i].from == ARG_POINTER_REGNUM
575 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
576 break;
578 if (i == ARRAY_SIZE (elim_regs))
579 #endif
581 /* Now restore our arg pointer from the address at which it
582 was saved in our stack frame. */
583 emit_move_insn (virtual_incoming_args_rtx,
584 copy_to_reg (get_arg_pointer_save_area (cfun)));
587 #endif
589 #ifdef HAVE_builtin_setjmp_receiver
590 if (HAVE_builtin_setjmp_receiver)
591 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
592 else
593 #endif
594 #ifdef HAVE_nonlocal_goto_receiver
595 if (HAVE_nonlocal_goto_receiver)
596 emit_insn (gen_nonlocal_goto_receiver ());
597 else
598 #endif
599 { /* Nothing */ }
601 /* @@@ This is a kludge. Not all machine descriptions define a blockage
602 insn, but we must not allow the code we just generated to be reordered
603 by scheduling. Specifically, the update of the frame pointer must
604 happen immediately, not later. So emit an ASM_INPUT to act as blockage
605 insn. */
606 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
609 /* __builtin_setjmp is passed a pointer to an array of five words (not
610 all will be used on all machines). It operates similarly to the C
611 library function of the same name, but is more efficient. Much of
612 the code below (and for longjmp) is copied from the handling of
613 non-local gotos.
615 NOTE: This is intended for use by GNAT and the exception handling
616 scheme in the compiler and will only work in the method used by
617 them. */
619 static rtx
620 expand_builtin_setjmp (tree arglist, rtx target)
622 rtx buf_addr, next_lab, cont_lab;
624 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
625 return NULL_RTX;
627 if (target == 0 || GET_CODE (target) != REG
628 || REGNO (target) < FIRST_PSEUDO_REGISTER)
629 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
631 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
633 next_lab = gen_label_rtx ();
634 cont_lab = gen_label_rtx ();
636 expand_builtin_setjmp_setup (buf_addr, next_lab);
638 /* Set TARGET to zero and branch to the continue label. */
639 emit_move_insn (target, const0_rtx);
640 emit_jump_insn (gen_jump (cont_lab));
641 emit_barrier ();
642 emit_label (next_lab);
644 expand_builtin_setjmp_receiver (next_lab);
646 /* Set TARGET to one. */
647 emit_move_insn (target, const1_rtx);
648 emit_label (cont_lab);
650 /* Tell flow about the strange goings on. Putting `next_lab' on
651 `nonlocal_goto_handler_labels' to indicates that function
652 calls may traverse the arc back to this label. */
654 current_function_has_nonlocal_label = 1;
655 nonlocal_goto_handler_labels
656 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
658 return target;
661 /* __builtin_longjmp is passed a pointer to an array of five words (not
662 all will be used on all machines). It operates similarly to the C
663 library function of the same name, but is more efficient. Much of
664 the code below is copied from the handling of non-local gotos.
666 NOTE: This is intended for use by GNAT and the exception handling
667 scheme in the compiler and will only work in the method used by
668 them. */
670 void
671 expand_builtin_longjmp (rtx buf_addr, rtx value)
673 rtx fp, lab, stack, insn, last;
674 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
676 if (setjmp_alias_set == -1)
677 setjmp_alias_set = new_alias_set ();
679 #ifdef POINTERS_EXTEND_UNSIGNED
680 if (GET_MODE (buf_addr) != Pmode)
681 buf_addr = convert_memory_address (Pmode, buf_addr);
682 #endif
684 buf_addr = force_reg (Pmode, buf_addr);
686 /* We used to store value in static_chain_rtx, but that fails if pointers
687 are smaller than integers. We instead require that the user must pass
688 a second argument of 1, because that is what builtin_setjmp will
689 return. This also makes EH slightly more efficient, since we are no
690 longer copying around a value that we don't care about. */
691 if (value != const1_rtx)
692 abort ();
694 current_function_calls_longjmp = 1;
696 last = get_last_insn ();
697 #ifdef HAVE_builtin_longjmp
698 if (HAVE_builtin_longjmp)
699 emit_insn (gen_builtin_longjmp (buf_addr));
700 else
701 #endif
703 fp = gen_rtx_MEM (Pmode, buf_addr);
704 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
705 GET_MODE_SIZE (Pmode)));
707 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
708 2 * GET_MODE_SIZE (Pmode)));
709 set_mem_alias_set (fp, setjmp_alias_set);
710 set_mem_alias_set (lab, setjmp_alias_set);
711 set_mem_alias_set (stack, setjmp_alias_set);
713 /* Pick up FP, label, and SP from the block and jump. This code is
714 from expand_goto in stmt.c; see there for detailed comments. */
715 #if HAVE_nonlocal_goto
716 if (HAVE_nonlocal_goto)
717 /* We have to pass a value to the nonlocal_goto pattern that will
718 get copied into the static_chain pointer, but it does not matter
719 what that value is, because builtin_setjmp does not use it. */
720 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
721 else
722 #endif
724 lab = copy_to_reg (lab);
726 emit_move_insn (hard_frame_pointer_rtx, fp);
727 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
729 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
730 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
731 emit_indirect_jump (lab);
735 /* Search backwards and mark the jump insn as a non-local goto.
736 Note that this precludes the use of __builtin_longjmp to a
737 __builtin_setjmp target in the same function. However, we've
738 already cautioned the user that these functions are for
739 internal exception handling use only. */
740 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
742 if (insn == last)
743 abort ();
744 if (GET_CODE (insn) == JUMP_INSN)
746 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
747 REG_NOTES (insn));
748 break;
750 else if (GET_CODE (insn) == CALL_INSN)
751 break;
755 /* Expand a call to __builtin_prefetch. For a target that does not support
756 data prefetch, evaluate the memory address argument in case it has side
757 effects. */
759 static void
760 expand_builtin_prefetch (tree arglist)
762 tree arg0, arg1, arg2;
763 rtx op0, op1, op2;
765 if (!validate_arglist (arglist, POINTER_TYPE, 0))
766 return;
768 arg0 = TREE_VALUE (arglist);
769 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
770 zero (read) and argument 2 (locality) defaults to 3 (high degree of
771 locality). */
772 if (TREE_CHAIN (arglist))
774 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
775 if (TREE_CHAIN (TREE_CHAIN (arglist)))
776 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
777 else
778 arg2 = build_int_2 (3, 0);
780 else
782 arg1 = integer_zero_node;
783 arg2 = build_int_2 (3, 0);
786 /* Argument 0 is an address. */
787 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
789 /* Argument 1 (read/write flag) must be a compile-time constant int. */
790 if (TREE_CODE (arg1) != INTEGER_CST)
792 error ("second arg to `__builtin_prefetch' must be a constant");
793 arg1 = integer_zero_node;
795 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
796 /* Argument 1 must be either zero or one. */
797 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
799 warning ("invalid second arg to __builtin_prefetch; using zero");
800 op1 = const0_rtx;
803 /* Argument 2 (locality) must be a compile-time constant int. */
804 if (TREE_CODE (arg2) != INTEGER_CST)
806 error ("third arg to `__builtin_prefetch' must be a constant");
807 arg2 = integer_zero_node;
809 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
810 /* Argument 2 must be 0, 1, 2, or 3. */
811 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
813 warning ("invalid third arg to __builtin_prefetch; using zero");
814 op2 = const0_rtx;
817 #ifdef HAVE_prefetch
818 if (HAVE_prefetch)
820 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
821 (op0,
822 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
823 || (GET_MODE(op0) != Pmode))
825 #ifdef POINTERS_EXTEND_UNSIGNED
826 if (GET_MODE(op0) != Pmode)
827 op0 = convert_memory_address (Pmode, op0);
828 #endif
829 op0 = force_reg (Pmode, op0);
831 emit_insn (gen_prefetch (op0, op1, op2));
833 else
834 #endif
835 op0 = protect_from_queue (op0, 0);
836 /* Don't do anything with direct references to volatile memory, but
837 generate code to handle other side effects. */
838 if (GET_CODE (op0) != MEM && side_effects_p (op0))
839 emit_insn (op0);
842 /* Get a MEM rtx for expression EXP which is the address of an operand
843 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
845 static rtx
846 get_memory_rtx (tree exp)
848 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
849 rtx mem;
851 #ifdef POINTERS_EXTEND_UNSIGNED
852 if (GET_MODE (addr) != Pmode)
853 addr = convert_memory_address (Pmode, addr);
854 #endif
856 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
858 /* Get an expression we can use to find the attributes to assign to MEM.
859 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
860 we can. First remove any nops. */
861 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
862 || TREE_CODE (exp) == NON_LVALUE_EXPR)
863 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
864 exp = TREE_OPERAND (exp, 0);
866 if (TREE_CODE (exp) == ADDR_EXPR)
868 exp = TREE_OPERAND (exp, 0);
869 set_mem_attributes (mem, exp, 0);
871 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
873 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
874 /* memcpy, memset and other builtin stringops can alias with anything. */
875 set_mem_alias_set (mem, 0);
878 return mem;
881 /* Built-in functions to perform an untyped call and return. */
883 /* For each register that may be used for calling a function, this
884 gives a mode used to copy the register's value. VOIDmode indicates
885 the register is not used for calling a function. If the machine
886 has register windows, this gives only the outbound registers.
887 INCOMING_REGNO gives the corresponding inbound register. */
888 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
890 /* For each register that may be used for returning values, this gives
891 a mode used to copy the register's value. VOIDmode indicates the
892 register is not used for returning values. If the machine has
893 register windows, this gives only the outbound registers.
894 INCOMING_REGNO gives the corresponding inbound register. */
895 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
897 /* For each register that may be used for calling a function, this
898 gives the offset of that register into the block returned by
899 __builtin_apply_args. 0 indicates that the register is not
900 used for calling a function. */
901 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
903 /* Return the offset of register REGNO into the block returned by
904 __builtin_apply_args. This is not declared static, since it is
905 needed in objc-act.c. */
908 apply_args_register_offset (int regno)
910 apply_args_size ();
912 /* Arguments are always put in outgoing registers (in the argument
913 block) if such make sense. */
914 #ifdef OUTGOING_REGNO
915 regno = OUTGOING_REGNO (regno);
916 #endif
917 return apply_args_reg_offset[regno];
920 /* Return the size required for the block returned by __builtin_apply_args,
921 and initialize apply_args_mode. */
923 static int
924 apply_args_size (void)
926 static int size = -1;
927 int align;
928 unsigned int regno;
929 enum machine_mode mode;
931 /* The values computed by this function never change. */
932 if (size < 0)
934 /* The first value is the incoming arg-pointer. */
935 size = GET_MODE_SIZE (Pmode);
937 /* The second value is the structure value address unless this is
938 passed as an "invisible" first argument. */
939 if (struct_value_rtx)
940 size += GET_MODE_SIZE (Pmode);
942 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
943 if (FUNCTION_ARG_REGNO_P (regno))
945 /* Search for the proper mode for copying this register's
946 value. I'm not sure this is right, but it works so far. */
947 enum machine_mode best_mode = VOIDmode;
949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
950 mode != VOIDmode;
951 mode = GET_MODE_WIDER_MODE (mode))
952 if (HARD_REGNO_MODE_OK (regno, mode)
953 && HARD_REGNO_NREGS (regno, mode) == 1)
954 best_mode = mode;
956 if (best_mode == VOIDmode)
957 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
958 mode != VOIDmode;
959 mode = GET_MODE_WIDER_MODE (mode))
960 if (HARD_REGNO_MODE_OK (regno, mode)
961 && have_insn_for (SET, mode))
962 best_mode = mode;
964 if (best_mode == VOIDmode)
965 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
966 mode != VOIDmode;
967 mode = GET_MODE_WIDER_MODE (mode))
968 if (HARD_REGNO_MODE_OK (regno, mode)
969 && have_insn_for (SET, mode))
970 best_mode = mode;
972 if (best_mode == VOIDmode)
973 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
974 mode != VOIDmode;
975 mode = GET_MODE_WIDER_MODE (mode))
976 if (HARD_REGNO_MODE_OK (regno, mode)
977 && have_insn_for (SET, mode))
978 best_mode = mode;
980 mode = best_mode;
981 if (mode == VOIDmode)
982 abort ();
984 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
985 if (size % align != 0)
986 size = CEIL (size, align) * align;
987 apply_args_reg_offset[regno] = size;
988 size += GET_MODE_SIZE (mode);
989 apply_args_mode[regno] = mode;
991 else
993 apply_args_mode[regno] = VOIDmode;
994 apply_args_reg_offset[regno] = 0;
997 return size;
1000 /* Return the size required for the block returned by __builtin_apply,
1001 and initialize apply_result_mode. */
1003 static int
1004 apply_result_size (void)
1006 static int size = -1;
1007 int align, regno;
1008 enum machine_mode mode;
1010 /* The values computed by this function never change. */
1011 if (size < 0)
1013 size = 0;
1015 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1016 if (FUNCTION_VALUE_REGNO_P (regno))
1018 /* Search for the proper mode for copying this register's
1019 value. I'm not sure this is right, but it works so far. */
1020 enum machine_mode best_mode = VOIDmode;
1022 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1023 mode != TImode;
1024 mode = GET_MODE_WIDER_MODE (mode))
1025 if (HARD_REGNO_MODE_OK (regno, mode))
1026 best_mode = mode;
1028 if (best_mode == VOIDmode)
1029 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1030 mode != VOIDmode;
1031 mode = GET_MODE_WIDER_MODE (mode))
1032 if (HARD_REGNO_MODE_OK (regno, mode)
1033 && have_insn_for (SET, mode))
1034 best_mode = mode;
1036 if (best_mode == VOIDmode)
1037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1038 mode != VOIDmode;
1039 mode = GET_MODE_WIDER_MODE (mode))
1040 if (HARD_REGNO_MODE_OK (regno, mode)
1041 && have_insn_for (SET, mode))
1042 best_mode = mode;
1044 if (best_mode == VOIDmode)
1045 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1046 mode != VOIDmode;
1047 mode = GET_MODE_WIDER_MODE (mode))
1048 if (HARD_REGNO_MODE_OK (regno, mode)
1049 && have_insn_for (SET, mode))
1050 best_mode = mode;
1052 mode = best_mode;
1053 if (mode == VOIDmode)
1054 abort ();
1056 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1057 if (size % align != 0)
1058 size = CEIL (size, align) * align;
1059 size += GET_MODE_SIZE (mode);
1060 apply_result_mode[regno] = mode;
1062 else
1063 apply_result_mode[regno] = VOIDmode;
1065 /* Allow targets that use untyped_call and untyped_return to override
1066 the size so that machine-specific information can be stored here. */
1067 #ifdef APPLY_RESULT_SIZE
1068 size = APPLY_RESULT_SIZE;
1069 #endif
1071 return size;
1074 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1075 /* Create a vector describing the result block RESULT. If SAVEP is true,
1076 the result block is used to save the values; otherwise it is used to
1077 restore the values. */
1079 static rtx
1080 result_vector (int savep, rtx result)
1082 int regno, size, align, nelts;
1083 enum machine_mode mode;
1084 rtx reg, mem;
1085 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1087 size = nelts = 0;
1088 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1089 if ((mode = apply_result_mode[regno]) != VOIDmode)
1091 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1092 if (size % align != 0)
1093 size = CEIL (size, align) * align;
1094 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1095 mem = adjust_address (result, mode, size);
1096 savevec[nelts++] = (savep
1097 ? gen_rtx_SET (VOIDmode, mem, reg)
1098 : gen_rtx_SET (VOIDmode, reg, mem));
1099 size += GET_MODE_SIZE (mode);
1101 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1103 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1105 /* Save the state required to perform an untyped call with the same
1106 arguments as were passed to the current function. */
1108 static rtx
1109 expand_builtin_apply_args_1 (void)
1111 rtx registers;
1112 int size, align, regno;
1113 enum machine_mode mode;
1115 /* Create a block where the arg-pointer, structure value address,
1116 and argument registers can be saved. */
1117 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1119 /* Walk past the arg-pointer and structure value address. */
1120 size = GET_MODE_SIZE (Pmode);
1121 if (struct_value_rtx)
1122 size += GET_MODE_SIZE (Pmode);
1124 /* Save each register used in calling a function to the block. */
1125 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1126 if ((mode = apply_args_mode[regno]) != VOIDmode)
1128 rtx tem;
1130 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1131 if (size % align != 0)
1132 size = CEIL (size, align) * align;
1134 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1136 emit_move_insn (adjust_address (registers, mode, size), tem);
1137 size += GET_MODE_SIZE (mode);
1140 /* Save the arg pointer to the block. */
1141 emit_move_insn (adjust_address (registers, Pmode, 0),
1142 copy_to_reg (virtual_incoming_args_rtx));
1143 size = GET_MODE_SIZE (Pmode);
1145 /* Save the structure value address unless this is passed as an
1146 "invisible" first argument. */
1147 if (struct_value_incoming_rtx)
1149 emit_move_insn (adjust_address (registers, Pmode, size),
1150 copy_to_reg (struct_value_incoming_rtx));
1151 size += GET_MODE_SIZE (Pmode);
1154 /* Return the address of the block. */
1155 return copy_addr_to_reg (XEXP (registers, 0));
1158 /* __builtin_apply_args returns block of memory allocated on
1159 the stack into which is stored the arg pointer, structure
1160 value address, static chain, and all the registers that might
1161 possibly be used in performing a function call. The code is
1162 moved to the start of the function so the incoming values are
1163 saved. */
1165 static rtx
1166 expand_builtin_apply_args (void)
1168 /* Don't do __builtin_apply_args more than once in a function.
1169 Save the result of the first call and reuse it. */
1170 if (apply_args_value != 0)
1171 return apply_args_value;
1173 /* When this function is called, it means that registers must be
1174 saved on entry to this function. So we migrate the
1175 call to the first insn of this function. */
1176 rtx temp;
1177 rtx seq;
1179 start_sequence ();
1180 temp = expand_builtin_apply_args_1 ();
1181 seq = get_insns ();
1182 end_sequence ();
1184 apply_args_value = temp;
1186 /* Put the insns after the NOTE that starts the function.
1187 If this is inside a start_sequence, make the outer-level insn
1188 chain current, so the code is placed at the start of the
1189 function. */
1190 push_topmost_sequence ();
1191 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1192 pop_topmost_sequence ();
1193 return temp;
1197 /* Perform an untyped call and save the state required to perform an
1198 untyped return of whatever value was returned by the given function. */
1200 static rtx
1201 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1203 int size, align, regno;
1204 enum machine_mode mode;
1205 rtx incoming_args, result, reg, dest, src, call_insn;
1206 rtx old_stack_level = 0;
1207 rtx call_fusage = 0;
1209 #ifdef POINTERS_EXTEND_UNSIGNED
1210 if (GET_MODE (arguments) != Pmode)
1211 arguments = convert_memory_address (Pmode, arguments);
1212 #endif
1214 /* Create a block where the return registers can be saved. */
1215 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1217 /* Fetch the arg pointer from the ARGUMENTS block. */
1218 incoming_args = gen_reg_rtx (Pmode);
1219 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1220 #ifndef STACK_GROWS_DOWNWARD
1221 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1222 incoming_args, 0, OPTAB_LIB_WIDEN);
1223 #endif
1225 /* Perform postincrements before actually calling the function. */
1226 emit_queue ();
1228 /* Push a new argument block and copy the arguments. Do not allow
1229 the (potential) memcpy call below to interfere with our stack
1230 manipulations. */
1231 do_pending_stack_adjust ();
1232 NO_DEFER_POP;
1234 /* Save the stack with nonlocal if available. */
1235 #ifdef HAVE_save_stack_nonlocal
1236 if (HAVE_save_stack_nonlocal)
1237 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1238 else
1239 #endif
1240 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1242 /* Push a block of memory onto the stack to store the memory arguments.
1243 Save the address in a register, and copy the memory arguments. ??? I
1244 haven't figured out how the calling convention macros effect this,
1245 but it's likely that the source and/or destination addresses in
1246 the block copy will need updating in machine specific ways. */
1247 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1248 dest = gen_rtx_MEM (BLKmode, dest);
1249 set_mem_align (dest, PARM_BOUNDARY);
1250 src = gen_rtx_MEM (BLKmode, incoming_args);
1251 set_mem_align (src, PARM_BOUNDARY);
1252 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1254 /* Refer to the argument block. */
1255 apply_args_size ();
1256 arguments = gen_rtx_MEM (BLKmode, arguments);
1257 set_mem_align (arguments, PARM_BOUNDARY);
1259 /* Walk past the arg-pointer and structure value address. */
1260 size = GET_MODE_SIZE (Pmode);
1261 if (struct_value_rtx)
1262 size += GET_MODE_SIZE (Pmode);
1264 /* Restore each of the registers previously saved. Make USE insns
1265 for each of these registers for use in making the call. */
1266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1267 if ((mode = apply_args_mode[regno]) != VOIDmode)
1269 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1270 if (size % align != 0)
1271 size = CEIL (size, align) * align;
1272 reg = gen_rtx_REG (mode, regno);
1273 emit_move_insn (reg, adjust_address (arguments, mode, size));
1274 use_reg (&call_fusage, reg);
1275 size += GET_MODE_SIZE (mode);
1278 /* Restore the structure value address unless this is passed as an
1279 "invisible" first argument. */
1280 size = GET_MODE_SIZE (Pmode);
1281 if (struct_value_rtx)
1283 rtx value = gen_reg_rtx (Pmode);
1284 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1285 emit_move_insn (struct_value_rtx, value);
1286 if (GET_CODE (struct_value_rtx) == REG)
1287 use_reg (&call_fusage, struct_value_rtx);
1288 size += GET_MODE_SIZE (Pmode);
1291 /* All arguments and registers used for the call are set up by now! */
1292 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1294 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1295 and we don't want to load it into a register as an optimization,
1296 because prepare_call_address already did it if it should be done. */
1297 if (GET_CODE (function) != SYMBOL_REF)
1298 function = memory_address (FUNCTION_MODE, function);
1300 /* Generate the actual call instruction and save the return value. */
1301 #ifdef HAVE_untyped_call
1302 if (HAVE_untyped_call)
1303 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1304 result, result_vector (1, result)));
1305 else
1306 #endif
1307 #ifdef HAVE_call_value
1308 if (HAVE_call_value)
1310 rtx valreg = 0;
1312 /* Locate the unique return register. It is not possible to
1313 express a call that sets more than one return register using
1314 call_value; use untyped_call for that. In fact, untyped_call
1315 only needs to save the return registers in the given block. */
1316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1317 if ((mode = apply_result_mode[regno]) != VOIDmode)
1319 if (valreg)
1320 abort (); /* HAVE_untyped_call required. */
1321 valreg = gen_rtx_REG (mode, regno);
1324 emit_call_insn (GEN_CALL_VALUE (valreg,
1325 gen_rtx_MEM (FUNCTION_MODE, function),
1326 const0_rtx, NULL_RTX, const0_rtx));
1328 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1330 else
1331 #endif
1332 abort ();
1334 /* Find the CALL insn we just emitted, and attach the register usage
1335 information. */
1336 call_insn = last_call_insn ();
1337 add_function_usage_to (call_insn, call_fusage);
1339 /* Restore the stack. */
1340 #ifdef HAVE_save_stack_nonlocal
1341 if (HAVE_save_stack_nonlocal)
1342 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1343 else
1344 #endif
1345 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1347 OK_DEFER_POP;
1349 /* Return the address of the result block. */
1350 return copy_addr_to_reg (XEXP (result, 0));
1353 /* Perform an untyped return. */
1355 static void
1356 expand_builtin_return (rtx result)
1358 int size, align, regno;
1359 enum machine_mode mode;
1360 rtx reg;
1361 rtx call_fusage = 0;
1363 #ifdef POINTERS_EXTEND_UNSIGNED
1364 if (GET_MODE (result) != Pmode)
1365 result = convert_memory_address (Pmode, result);
1366 #endif
1368 apply_result_size ();
1369 result = gen_rtx_MEM (BLKmode, result);
1371 #ifdef HAVE_untyped_return
1372 if (HAVE_untyped_return)
1374 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1375 emit_barrier ();
1376 return;
1378 #endif
1380 /* Restore the return value and note that each value is used. */
1381 size = 0;
1382 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1383 if ((mode = apply_result_mode[regno]) != VOIDmode)
1385 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1386 if (size % align != 0)
1387 size = CEIL (size, align) * align;
1388 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1389 emit_move_insn (reg, adjust_address (result, mode, size));
1391 push_to_sequence (call_fusage);
1392 emit_insn (gen_rtx_USE (VOIDmode, reg));
1393 call_fusage = get_insns ();
1394 end_sequence ();
1395 size += GET_MODE_SIZE (mode);
1398 /* Put the USE insns before the return. */
1399 emit_insn (call_fusage);
1401 /* Return whatever values was restored by jumping directly to the end
1402 of the function. */
1403 expand_null_return ();
1406 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1408 static enum type_class
1409 type_to_class (tree type)
1411 switch (TREE_CODE (type))
1413 case VOID_TYPE: return void_type_class;
1414 case INTEGER_TYPE: return integer_type_class;
1415 case CHAR_TYPE: return char_type_class;
1416 case ENUMERAL_TYPE: return enumeral_type_class;
1417 case BOOLEAN_TYPE: return boolean_type_class;
1418 case POINTER_TYPE: return pointer_type_class;
1419 case REFERENCE_TYPE: return reference_type_class;
1420 case OFFSET_TYPE: return offset_type_class;
1421 case REAL_TYPE: return real_type_class;
1422 case COMPLEX_TYPE: return complex_type_class;
1423 case FUNCTION_TYPE: return function_type_class;
1424 case METHOD_TYPE: return method_type_class;
1425 case RECORD_TYPE: return record_type_class;
1426 case UNION_TYPE:
1427 case QUAL_UNION_TYPE: return union_type_class;
1428 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1429 ? string_type_class : array_type_class);
1430 case SET_TYPE: return set_type_class;
1431 case FILE_TYPE: return file_type_class;
1432 case LANG_TYPE: return lang_type_class;
1433 default: return no_type_class;
1437 /* Expand a call to __builtin_classify_type with arguments found in
1438 ARGLIST. */
1440 static rtx
1441 expand_builtin_classify_type (tree arglist)
1443 if (arglist != 0)
1444 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1445 return GEN_INT (no_type_class);
1448 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1450 static rtx
1451 expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
1453 rtx tmp;
1455 if (arglist == 0)
1456 return const0_rtx;
1457 arglist = TREE_VALUE (arglist);
1459 /* We have taken care of the easy cases during constant folding. This
1460 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1461 get a chance to see if it can deduce whether ARGLIST is constant. */
1463 current_function_calls_constant_p = 1;
1465 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1466 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1467 return tmp;
1470 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1471 if available. */
1472 tree
1473 mathfn_built_in (tree type, enum built_in_function fn)
1475 enum built_in_function fcode = NOT_BUILT_IN;
1476 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1477 switch (fn)
1479 case BUILT_IN_SQRT:
1480 case BUILT_IN_SQRTF:
1481 case BUILT_IN_SQRTL:
1482 fcode = BUILT_IN_SQRT;
1483 break;
1484 case BUILT_IN_SIN:
1485 case BUILT_IN_SINF:
1486 case BUILT_IN_SINL:
1487 fcode = BUILT_IN_SIN;
1488 break;
1489 case BUILT_IN_COS:
1490 case BUILT_IN_COSF:
1491 case BUILT_IN_COSL:
1492 fcode = BUILT_IN_COS;
1493 break;
1494 case BUILT_IN_EXP:
1495 case BUILT_IN_EXPF:
1496 case BUILT_IN_EXPL:
1497 fcode = BUILT_IN_EXP;
1498 break;
1499 case BUILT_IN_LOG:
1500 case BUILT_IN_LOGF:
1501 case BUILT_IN_LOGL:
1502 fcode = BUILT_IN_LOG;
1503 break;
1504 case BUILT_IN_TAN:
1505 case BUILT_IN_TANF:
1506 case BUILT_IN_TANL:
1507 fcode = BUILT_IN_TAN;
1508 break;
1509 case BUILT_IN_ATAN:
1510 case BUILT_IN_ATANF:
1511 case BUILT_IN_ATANL:
1512 fcode = BUILT_IN_ATAN;
1513 break;
1514 case BUILT_IN_FLOOR:
1515 case BUILT_IN_FLOORF:
1516 case BUILT_IN_FLOORL:
1517 fcode = BUILT_IN_FLOOR;
1518 break;
1519 case BUILT_IN_CEIL:
1520 case BUILT_IN_CEILF:
1521 case BUILT_IN_CEILL:
1522 fcode = BUILT_IN_CEIL;
1523 break;
1524 case BUILT_IN_TRUNC:
1525 case BUILT_IN_TRUNCF:
1526 case BUILT_IN_TRUNCL:
1527 fcode = BUILT_IN_TRUNC;
1528 break;
1529 case BUILT_IN_ROUND:
1530 case BUILT_IN_ROUNDF:
1531 case BUILT_IN_ROUNDL:
1532 fcode = BUILT_IN_ROUND;
1533 break;
1534 case BUILT_IN_NEARBYINT:
1535 case BUILT_IN_NEARBYINTF:
1536 case BUILT_IN_NEARBYINTL:
1537 fcode = BUILT_IN_NEARBYINT;
1538 break;
1539 default:
1540 abort ();
1542 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1543 switch (fn)
1545 case BUILT_IN_SQRT:
1546 case BUILT_IN_SQRTF:
1547 case BUILT_IN_SQRTL:
1548 fcode = BUILT_IN_SQRTF;
1549 break;
1550 case BUILT_IN_SIN:
1551 case BUILT_IN_SINF:
1552 case BUILT_IN_SINL:
1553 fcode = BUILT_IN_SINF;
1554 break;
1555 case BUILT_IN_COS:
1556 case BUILT_IN_COSF:
1557 case BUILT_IN_COSL:
1558 fcode = BUILT_IN_COSF;
1559 break;
1560 case BUILT_IN_EXP:
1561 case BUILT_IN_EXPF:
1562 case BUILT_IN_EXPL:
1563 fcode = BUILT_IN_EXPF;
1564 break;
1565 case BUILT_IN_LOG:
1566 case BUILT_IN_LOGF:
1567 case BUILT_IN_LOGL:
1568 fcode = BUILT_IN_LOGF;
1569 break;
1570 case BUILT_IN_TAN:
1571 case BUILT_IN_TANF:
1572 case BUILT_IN_TANL:
1573 fcode = BUILT_IN_TANF;
1574 break;
1575 case BUILT_IN_ATAN:
1576 case BUILT_IN_ATANF:
1577 case BUILT_IN_ATANL:
1578 fcode = BUILT_IN_ATANF;
1579 break;
1580 case BUILT_IN_FLOOR:
1581 case BUILT_IN_FLOORF:
1582 case BUILT_IN_FLOORL:
1583 fcode = BUILT_IN_FLOORF;
1584 break;
1585 case BUILT_IN_CEIL:
1586 case BUILT_IN_CEILF:
1587 case BUILT_IN_CEILL:
1588 fcode = BUILT_IN_CEILF;
1589 break;
1590 case BUILT_IN_TRUNC:
1591 case BUILT_IN_TRUNCF:
1592 case BUILT_IN_TRUNCL:
1593 fcode = BUILT_IN_TRUNCF;
1594 break;
1595 case BUILT_IN_ROUND:
1596 case BUILT_IN_ROUNDF:
1597 case BUILT_IN_ROUNDL:
1598 fcode = BUILT_IN_ROUNDF;
1599 break;
1600 case BUILT_IN_NEARBYINT:
1601 case BUILT_IN_NEARBYINTF:
1602 case BUILT_IN_NEARBYINTL:
1603 fcode = BUILT_IN_NEARBYINTF;
1604 break;
1605 default:
1606 abort ();
1608 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1609 switch (fn)
1611 case BUILT_IN_SQRT:
1612 case BUILT_IN_SQRTF:
1613 case BUILT_IN_SQRTL:
1614 fcode = BUILT_IN_SQRTL;
1615 break;
1616 case BUILT_IN_SIN:
1617 case BUILT_IN_SINF:
1618 case BUILT_IN_SINL:
1619 fcode = BUILT_IN_SINL;
1620 break;
1621 case BUILT_IN_COS:
1622 case BUILT_IN_COSF:
1623 case BUILT_IN_COSL:
1624 fcode = BUILT_IN_COSL;
1625 break;
1626 case BUILT_IN_EXP:
1627 case BUILT_IN_EXPF:
1628 case BUILT_IN_EXPL:
1629 fcode = BUILT_IN_EXPL;
1630 break;
1631 case BUILT_IN_LOG:
1632 case BUILT_IN_LOGF:
1633 case BUILT_IN_LOGL:
1634 fcode = BUILT_IN_LOGL;
1635 break;
1636 case BUILT_IN_TAN:
1637 case BUILT_IN_TANF:
1638 case BUILT_IN_TANL:
1639 fcode = BUILT_IN_TANL;
1640 break;
1641 case BUILT_IN_ATAN:
1642 case BUILT_IN_ATANF:
1643 case BUILT_IN_ATANL:
1644 fcode = BUILT_IN_ATANL;
1645 break;
1646 case BUILT_IN_FLOOR:
1647 case BUILT_IN_FLOORF:
1648 case BUILT_IN_FLOORL:
1649 fcode = BUILT_IN_FLOORL;
1650 break;
1651 case BUILT_IN_CEIL:
1652 case BUILT_IN_CEILF:
1653 case BUILT_IN_CEILL:
1654 fcode = BUILT_IN_CEILL;
1655 break;
1656 case BUILT_IN_TRUNC:
1657 case BUILT_IN_TRUNCF:
1658 case BUILT_IN_TRUNCL:
1659 fcode = BUILT_IN_TRUNCL;
1660 break;
1661 case BUILT_IN_ROUND:
1662 case BUILT_IN_ROUNDF:
1663 case BUILT_IN_ROUNDL:
1664 fcode = BUILT_IN_ROUNDL;
1665 break;
1666 case BUILT_IN_NEARBYINT:
1667 case BUILT_IN_NEARBYINTF:
1668 case BUILT_IN_NEARBYINTL:
1669 fcode = BUILT_IN_NEARBYINTL;
1670 break;
1671 default:
1672 abort ();
1674 return implicit_built_in_decls[fcode];
1677 /* If errno must be maintained, expand the RTL to check if the result,
1678 TARGET, of a built-in function call, EXP, is NaN, and if so set
1679 errno to EDOM. */
1681 static void
1682 expand_errno_check (tree exp, rtx target)
1684 rtx lab = gen_label_rtx ();
1686 /* Test the result; if it is NaN, set errno=EDOM because
1687 the argument was not in the domain. */
1688 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1689 0, lab);
1691 #ifdef TARGET_EDOM
1692 /* If this built-in doesn't throw an exception, set errno directly. */
1693 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1695 #ifdef GEN_ERRNO_RTX
1696 rtx errno_rtx = GEN_ERRNO_RTX;
1697 #else
1698 rtx errno_rtx
1699 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1700 #endif
1701 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1702 emit_label (lab);
1703 return;
1705 #endif
1707 /* We can't set errno=EDOM directly; let the library call do it.
1708 Pop the arguments right away in case the call gets deleted. */
1709 NO_DEFER_POP;
1710 expand_call (exp, target, 0);
1711 OK_DEFER_POP;
1712 emit_label (lab);
1716 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1717 Return 0 if a normal call should be emitted rather than expanding the
1718 function in-line. EXP is the expression that is a call to the builtin
1719 function; if convenient, the result should be placed in TARGET.
1720 SUBTARGET may be used as the target for computing one of EXP's operands. */
1722 static rtx
1723 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1725 optab builtin_optab;
1726 rtx op0, insns;
1727 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1728 tree arglist = TREE_OPERAND (exp, 1);
1729 enum machine_mode mode;
1730 bool errno_set = false;
1731 tree arg, narg;
1733 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1734 return 0;
1736 arg = TREE_VALUE (arglist);
1738 switch (DECL_FUNCTION_CODE (fndecl))
1740 case BUILT_IN_SIN:
1741 case BUILT_IN_SINF:
1742 case BUILT_IN_SINL:
1743 builtin_optab = sin_optab; break;
1744 case BUILT_IN_COS:
1745 case BUILT_IN_COSF:
1746 case BUILT_IN_COSL:
1747 builtin_optab = cos_optab; break;
1748 case BUILT_IN_SQRT:
1749 case BUILT_IN_SQRTF:
1750 case BUILT_IN_SQRTL:
1751 errno_set = ! tree_expr_nonnegative_p (arg);
1752 builtin_optab = sqrt_optab;
1753 break;
1754 case BUILT_IN_EXP:
1755 case BUILT_IN_EXPF:
1756 case BUILT_IN_EXPL:
1757 errno_set = true; builtin_optab = exp_optab; break;
1758 case BUILT_IN_LOG:
1759 case BUILT_IN_LOGF:
1760 case BUILT_IN_LOGL:
1761 errno_set = true; builtin_optab = log_optab; break;
1762 case BUILT_IN_TAN:
1763 case BUILT_IN_TANF:
1764 case BUILT_IN_TANL:
1765 builtin_optab = tan_optab; break;
1766 case BUILT_IN_ATAN:
1767 case BUILT_IN_ATANF:
1768 case BUILT_IN_ATANL:
1769 builtin_optab = atan_optab; break;
1770 case BUILT_IN_FLOOR:
1771 case BUILT_IN_FLOORF:
1772 case BUILT_IN_FLOORL:
1773 builtin_optab = floor_optab; break;
1774 case BUILT_IN_CEIL:
1775 case BUILT_IN_CEILF:
1776 case BUILT_IN_CEILL:
1777 builtin_optab = ceil_optab; break;
1778 case BUILT_IN_TRUNC:
1779 case BUILT_IN_TRUNCF:
1780 case BUILT_IN_TRUNCL:
1781 builtin_optab = trunc_optab; break;
1782 case BUILT_IN_ROUND:
1783 case BUILT_IN_ROUNDF:
1784 case BUILT_IN_ROUNDL:
1785 builtin_optab = round_optab; break;
1786 case BUILT_IN_NEARBYINT:
1787 case BUILT_IN_NEARBYINTF:
1788 case BUILT_IN_NEARBYINTL:
1789 builtin_optab = nearbyint_optab; break;
1790 default:
1791 abort ();
1794 /* Make a suitable register to place result in. */
1795 mode = TYPE_MODE (TREE_TYPE (exp));
1797 /* Before working hard, check whether the instruction is available. */
1798 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1799 return 0;
1800 target = gen_reg_rtx (mode);
1802 if (! flag_errno_math || ! HONOR_NANS (mode))
1803 errno_set = false;
1805 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1806 need to expand the argument again. This way, we will not perform
1807 side-effects more the once. */
1808 narg = save_expr (arg);
1809 if (narg != arg)
1811 arglist = build_tree_list (NULL_TREE, arg);
1812 exp = build_function_call_expr (fndecl, arglist);
1815 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1817 emit_queue ();
1818 start_sequence ();
1820 /* Compute into TARGET.
1821 Set TARGET to wherever the result comes back. */
1822 target = expand_unop (mode, builtin_optab, op0, target, 0);
1824 /* If we were unable to expand via the builtin, stop the sequence
1825 (without outputting the insns) and call to the library function
1826 with the stabilized argument list. */
1827 if (target == 0)
1829 end_sequence ();
1830 return expand_call (exp, target, target == const0_rtx);
1833 if (errno_set)
1834 expand_errno_check (exp, target);
1836 /* Output the entire sequence. */
1837 insns = get_insns ();
1838 end_sequence ();
1839 emit_insn (insns);
1841 return target;
1844 /* Expand a call to the builtin binary math functions (pow and atan2).
1845 Return 0 if a normal call should be emitted rather than expanding the
1846 function in-line. EXP is the expression that is a call to the builtin
1847 function; if convenient, the result should be placed in TARGET.
1848 SUBTARGET may be used as the target for computing one of EXP's
1849 operands. */
1851 static rtx
1852 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1854 optab builtin_optab;
1855 rtx op0, op1, insns;
1856 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1857 tree arglist = TREE_OPERAND (exp, 1);
1858 tree arg0, arg1, temp, narg;
1859 enum machine_mode mode;
1860 bool errno_set = true;
1861 bool stable = true;
1863 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1864 return 0;
1866 arg0 = TREE_VALUE (arglist);
1867 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1869 switch (DECL_FUNCTION_CODE (fndecl))
1871 case BUILT_IN_POW:
1872 case BUILT_IN_POWF:
1873 case BUILT_IN_POWL:
1874 builtin_optab = pow_optab; break;
1875 case BUILT_IN_ATAN2:
1876 case BUILT_IN_ATAN2F:
1877 case BUILT_IN_ATAN2L:
1878 builtin_optab = atan2_optab; break;
1879 default:
1880 abort ();
1883 /* Make a suitable register to place result in. */
1884 mode = TYPE_MODE (TREE_TYPE (exp));
1886 /* Before working hard, check whether the instruction is available. */
1887 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1888 return 0;
1890 target = gen_reg_rtx (mode);
1892 if (! flag_errno_math || ! HONOR_NANS (mode))
1893 errno_set = false;
1895 /* Alway stabilize the argument list. */
1896 narg = save_expr (arg1);
1897 if (narg != arg1)
1899 temp = build_tree_list (NULL_TREE, narg);
1900 stable = false;
1902 else
1903 temp = TREE_CHAIN (arglist);
1905 narg = save_expr (arg0);
1906 if (narg != arg0)
1908 arglist = tree_cons (NULL_TREE, narg, temp);
1909 stable = false;
1911 else if (! stable)
1912 arglist = tree_cons (NULL_TREE, arg0, temp);
1914 if (! stable)
1915 exp = build_function_call_expr (fndecl, arglist);
1917 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1918 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1920 emit_queue ();
1921 start_sequence ();
1923 /* Compute into TARGET.
1924 Set TARGET to wherever the result comes back. */
1925 target = expand_binop (mode, builtin_optab, op0, op1,
1926 target, 0, OPTAB_DIRECT);
1928 /* If we were unable to expand via the builtin, stop the sequence
1929 (without outputting the insns) and call to the library function
1930 with the stabilized argument list. */
1931 if (target == 0)
1933 end_sequence ();
1934 return expand_call (exp, target, target == const0_rtx);
1937 if (errno_set)
1938 expand_errno_check (exp, target);
1940 /* Output the entire sequence. */
1941 insns = get_insns ();
1942 end_sequence ();
1943 emit_insn (insns);
1945 return target;
1948 /* To evaluate powi(x,n), the floating point value x raised to the
1949 constant integer exponent n, we use a hybrid algorithm that
1950 combines the "window method" with look-up tables. For an
1951 introduction to exponentiation algorithms and "addition chains",
1952 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
1953 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
1954 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
1955 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
1957 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
1958 multiplications to inline before calling the system library's pow
1959 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
1960 so this default never requires calling pow, powf or powl. */
1962 #ifndef POWI_MAX_MULTS
1963 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
1964 #endif
1966 /* The size of the "optimal power tree" lookup table. All
1967 exponents less than this value are simply looked up in the
1968 powi_table below. This threshold is also used to size the
1969 cache of pseudo registers that hold intermediate results. */
1970 #define POWI_TABLE_SIZE 256
1972 /* The size, in bits of the window, used in the "window method"
1973 exponentiation algorithm. This is equivalent to a radix of
1974 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
1975 #define POWI_WINDOW_SIZE 3
1977 /* The following table is an efficient representation of an
1978 "optimal power tree". For each value, i, the corresponding
1979 value, j, in the table states than an optimal evaluation
1980 sequence for calculating pow(x,i) can be found by evaluating
1981 pow(x,j)*pow(x,i-j). An optimal power tree for the first
1982 100 integers is given in Knuth's "Seminumerical algorithms". */
1984 static const unsigned char powi_table[POWI_TABLE_SIZE] =
1986 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
1987 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
1988 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
1989 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
1990 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
1991 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
1992 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
1993 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
1994 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
1995 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
1996 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
1997 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
1998 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
1999 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2000 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2001 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2002 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2003 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2004 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2005 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2006 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2007 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2008 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2009 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2010 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2011 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2012 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2013 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2014 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2015 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2016 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2017 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2021 /* Return the number of multiplications required to calculate
2022 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2023 subroutine of powi_cost. CACHE is an array indicating
2024 which exponents have already been calculated. */
2026 static int
2027 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2029 /* If we've already calculated this exponent, then this evaluation
2030 doesn't require any additional multiplications. */
2031 if (cache[n])
2032 return 0;
2034 cache[n] = true;
2035 return powi_lookup_cost (n - powi_table[n], cache)
2036 + powi_lookup_cost (powi_table[n], cache) + 1;
2039 /* Return the number of multiplications required to calculate
2040 powi(x,n) for an arbitrary x, given the exponent N. This
2041 function needs to be kept in sync with expand_powi below. */
2043 static int
2044 powi_cost (HOST_WIDE_INT n)
2046 bool cache[POWI_TABLE_SIZE];
2047 unsigned HOST_WIDE_INT digit;
2048 unsigned HOST_WIDE_INT val;
2049 int result;
2051 if (n == 0)
2052 return 0;
2054 /* Ignore the reciprocal when calculating the cost. */
2055 val = (n < 0) ? -n : n;
2057 /* Initialize the exponent cache. */
2058 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2059 cache[1] = true;
2061 result = 0;
2063 while (val >= POWI_TABLE_SIZE)
2065 if (val & 1)
2067 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2068 result += powi_lookup_cost (digit, cache)
2069 + POWI_WINDOW_SIZE + 1;
2070 val >>= POWI_WINDOW_SIZE;
2072 else
2074 val >>= 1;
2075 result++;
2079 return result + powi_lookup_cost (val, cache);
2082 /* Recursive subroutine of expand_powi. This function takes the array,
2083 CACHE, of already calculated exponents and an exponent N and returns
2084 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2086 static rtx
2087 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2089 unsigned HOST_WIDE_INT digit;
2090 rtx target, result;
2091 rtx op0, op1;
2093 if (n < POWI_TABLE_SIZE)
2095 if (cache[n])
2096 return cache[n];
2098 target = gen_reg_rtx (mode);
2099 cache[n] = target;
2101 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2102 op1 = expand_powi_1 (mode, powi_table[n], cache);
2104 else if (n & 1)
2106 target = gen_reg_rtx (mode);
2107 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2108 op0 = expand_powi_1 (mode, n - digit, cache);
2109 op1 = expand_powi_1 (mode, digit, cache);
2111 else
2113 target = gen_reg_rtx (mode);
2114 op0 = expand_powi_1 (mode, n >> 1, cache);
2115 op1 = op0;
2118 result = expand_mult (mode, op0, op1, target, 0);
2119 if (result != target)
2120 emit_move_insn (target, result);
2121 return target;
2124 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2125 floating point operand in mode MODE, and N is the exponent. This
2126 function needs to be kept in sync with powi_cost above. */
2128 static rtx
2129 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2131 unsigned HOST_WIDE_INT val;
2132 rtx cache[POWI_TABLE_SIZE];
2133 rtx result;
2135 if (n == 0)
2136 return CONST1_RTX (mode);
2138 val = (n < 0) ? -n : n;
2140 memset (cache, 0, sizeof(cache));
2141 cache[1] = x;
2143 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2145 /* If the original exponent was negative, reciprocate the result. */
2146 if (n < 0)
2147 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2148 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2150 return result;
2153 /* Expand a call to the pow built-in mathematical function. Return 0 if
2154 a normal call should be emitted rather than expanding the function
2155 in-line. EXP is the expression that is a call to the builtin
2156 function; if convenient, the result should be placed in TARGET. */
2158 static rtx
2159 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2161 tree arglist = TREE_OPERAND (exp, 1);
2162 tree arg0, arg1;
2164 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2165 return 0;
2167 arg0 = TREE_VALUE (arglist);
2168 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2170 if (flag_unsafe_math_optimizations
2171 && ! flag_errno_math
2172 && ! optimize_size
2173 && TREE_CODE (arg1) == REAL_CST
2174 && ! TREE_CONSTANT_OVERFLOW (arg1))
2176 REAL_VALUE_TYPE cint;
2177 REAL_VALUE_TYPE c;
2178 HOST_WIDE_INT n;
2180 c = TREE_REAL_CST (arg1);
2181 n = real_to_integer (&c);
2182 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2183 if (real_identical (&c, &cint)
2184 && powi_cost (n) <= POWI_MAX_MULTS)
2186 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2187 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2188 op = force_reg (mode, op);
2189 return expand_powi (op, mode, n);
2192 return expand_builtin_mathfn_2 (exp, target, NULL_RTX);
2195 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2196 if we failed the caller should emit a normal call, otherwise
2197 try to get the result in TARGET, if convenient. */
2199 static rtx
2200 expand_builtin_strlen (tree arglist, rtx target,
2201 enum machine_mode target_mode)
2203 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2204 return 0;
2205 else
2207 rtx pat;
2208 tree len, src = TREE_VALUE (arglist);
2209 rtx result, src_reg, char_rtx, before_strlen;
2210 enum machine_mode insn_mode = target_mode, char_mode;
2211 enum insn_code icode = CODE_FOR_nothing;
2212 int align;
2214 /* If the length can be computed at compile-time, return it. */
2215 len = c_strlen (src, 0);
2216 if (len)
2217 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2219 /* If the length can be computed at compile-time and is constant
2220 integer, but there are side-effects in src, evaluate
2221 src for side-effects, then return len.
2222 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2223 can be optimized into: i++; x = 3; */
2224 len = c_strlen (src, 1);
2225 if (len && TREE_CODE (len) == INTEGER_CST)
2227 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2228 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2231 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2233 /* If SRC is not a pointer type, don't do this operation inline. */
2234 if (align == 0)
2235 return 0;
2237 /* Bail out if we can't compute strlen in the right mode. */
2238 while (insn_mode != VOIDmode)
2240 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2241 if (icode != CODE_FOR_nothing)
2242 break;
2244 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2246 if (insn_mode == VOIDmode)
2247 return 0;
2249 /* Make a place to write the result of the instruction. */
2250 result = target;
2251 if (! (result != 0
2252 && GET_CODE (result) == REG
2253 && GET_MODE (result) == insn_mode
2254 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2255 result = gen_reg_rtx (insn_mode);
2257 /* Make a place to hold the source address. We will not expand
2258 the actual source until we are sure that the expansion will
2259 not fail -- there are trees that cannot be expanded twice. */
2260 src_reg = gen_reg_rtx (Pmode);
2262 /* Mark the beginning of the strlen sequence so we can emit the
2263 source operand later. */
2264 before_strlen = get_last_insn ();
2266 char_rtx = const0_rtx;
2267 char_mode = insn_data[(int) icode].operand[2].mode;
2268 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2269 char_mode))
2270 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2272 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2273 char_rtx, GEN_INT (align));
2274 if (! pat)
2275 return 0;
2276 emit_insn (pat);
2278 /* Now that we are assured of success, expand the source. */
2279 start_sequence ();
2280 pat = memory_address (BLKmode,
2281 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2282 if (pat != src_reg)
2283 emit_move_insn (src_reg, pat);
2284 pat = get_insns ();
2285 end_sequence ();
2287 if (before_strlen)
2288 emit_insn_after (pat, before_strlen);
2289 else
2290 emit_insn_before (pat, get_insns ());
2292 /* Return the value in the proper mode for this function. */
2293 if (GET_MODE (result) == target_mode)
2294 target = result;
2295 else if (target != 0)
2296 convert_move (target, result, 0);
2297 else
2298 target = convert_to_mode (target_mode, result, 0);
2300 return target;
2304 /* Expand a call to the strstr builtin. Return 0 if we failed the
2305 caller should emit a normal call, otherwise try to get the result
2306 in TARGET, if convenient (and in mode MODE if that's convenient). */
2308 static rtx
2309 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2311 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2312 return 0;
2313 else
2315 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2316 tree fn;
2317 const char *p1, *p2;
2319 p2 = c_getstr (s2);
2320 if (p2 == NULL)
2321 return 0;
2323 p1 = c_getstr (s1);
2324 if (p1 != NULL)
2326 const char *r = strstr (p1, p2);
2328 if (r == NULL)
2329 return const0_rtx;
2331 /* Return an offset into the constant string argument. */
2332 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2333 s1, ssize_int (r - p1))),
2334 target, mode, EXPAND_NORMAL);
2337 if (p2[0] == '\0')
2338 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2340 if (p2[1] != '\0')
2341 return 0;
2343 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2344 if (!fn)
2345 return 0;
2347 /* New argument list transforming strstr(s1, s2) to
2348 strchr(s1, s2[0]). */
2349 arglist =
2350 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2351 arglist = tree_cons (NULL_TREE, s1, arglist);
2352 return expand_expr (build_function_call_expr (fn, arglist),
2353 target, mode, EXPAND_NORMAL);
2357 /* Expand a call to the strchr builtin. Return 0 if we failed the
2358 caller should emit a normal call, otherwise try to get the result
2359 in TARGET, if convenient (and in mode MODE if that's convenient). */
2361 static rtx
2362 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2364 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2365 return 0;
2366 else
2368 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2369 const char *p1;
2371 if (TREE_CODE (s2) != INTEGER_CST)
2372 return 0;
2374 p1 = c_getstr (s1);
2375 if (p1 != NULL)
2377 char c;
2378 const char *r;
2380 if (target_char_cast (s2, &c))
2381 return 0;
2383 r = strchr (p1, c);
2385 if (r == NULL)
2386 return const0_rtx;
2388 /* Return an offset into the constant string argument. */
2389 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2390 s1, ssize_int (r - p1))),
2391 target, mode, EXPAND_NORMAL);
2394 /* FIXME: Should use here strchrM optab so that ports can optimize
2395 this. */
2396 return 0;
2400 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2401 caller should emit a normal call, otherwise try to get the result
2402 in TARGET, if convenient (and in mode MODE if that's convenient). */
2404 static rtx
2405 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2407 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2408 return 0;
2409 else
2411 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2412 tree fn;
2413 const char *p1;
2415 if (TREE_CODE (s2) != INTEGER_CST)
2416 return 0;
2418 p1 = c_getstr (s1);
2419 if (p1 != NULL)
2421 char c;
2422 const char *r;
2424 if (target_char_cast (s2, &c))
2425 return 0;
2427 r = strrchr (p1, c);
2429 if (r == NULL)
2430 return const0_rtx;
2432 /* Return an offset into the constant string argument. */
2433 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2434 s1, ssize_int (r - p1))),
2435 target, mode, EXPAND_NORMAL);
2438 if (! integer_zerop (s2))
2439 return 0;
2441 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2442 if (!fn)
2443 return 0;
2445 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2446 return expand_expr (build_function_call_expr (fn, arglist),
2447 target, mode, EXPAND_NORMAL);
2451 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2452 caller should emit a normal call, otherwise try to get the result
2453 in TARGET, if convenient (and in mode MODE if that's convenient). */
2455 static rtx
2456 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2458 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2459 return 0;
2460 else
2462 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2463 tree fn;
2464 const char *p1, *p2;
2466 p2 = c_getstr (s2);
2467 if (p2 == NULL)
2468 return 0;
2470 p1 = c_getstr (s1);
2471 if (p1 != NULL)
2473 const char *r = strpbrk (p1, p2);
2475 if (r == NULL)
2476 return const0_rtx;
2478 /* Return an offset into the constant string argument. */
2479 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2480 s1, ssize_int (r - p1))),
2481 target, mode, EXPAND_NORMAL);
2484 if (p2[0] == '\0')
2486 /* strpbrk(x, "") == NULL.
2487 Evaluate and ignore the arguments in case they had
2488 side-effects. */
2489 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2490 return const0_rtx;
2493 if (p2[1] != '\0')
2494 return 0; /* Really call strpbrk. */
2496 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2497 if (!fn)
2498 return 0;
2500 /* New argument list transforming strpbrk(s1, s2) to
2501 strchr(s1, s2[0]). */
2502 arglist =
2503 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2504 arglist = tree_cons (NULL_TREE, s1, arglist);
2505 return expand_expr (build_function_call_expr (fn, arglist),
2506 target, mode, EXPAND_NORMAL);
2510 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2511 bytes from constant string DATA + OFFSET and return it as target
2512 constant. */
2514 static rtx
2515 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2516 enum machine_mode mode)
2518 const char *str = (const char *) data;
2520 if (offset < 0
2521 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2522 > strlen (str) + 1))
2523 abort (); /* Attempt to read past the end of constant string. */
2525 return c_readstr (str + offset, mode);
2528 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2529 Return 0 if we failed, the caller should emit a normal call,
2530 otherwise try to get the result in TARGET, if convenient (and in
2531 mode MODE if that's convenient). */
2532 static rtx
2533 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2535 if (!validate_arglist (arglist,
2536 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2537 return 0;
2538 else
2540 tree dest = TREE_VALUE (arglist);
2541 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2542 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2543 const char *src_str;
2544 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2545 unsigned int dest_align
2546 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2547 rtx dest_mem, src_mem, dest_addr, len_rtx;
2549 /* If DEST is not a pointer type, call the normal function. */
2550 if (dest_align == 0)
2551 return 0;
2553 /* If the LEN parameter is zero, return DEST. */
2554 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2556 /* Evaluate and ignore SRC in case it has side-effects. */
2557 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2558 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2561 /* If either SRC is not a pointer type, don't do this
2562 operation in-line. */
2563 if (src_align == 0)
2564 return 0;
2566 dest_mem = get_memory_rtx (dest);
2567 set_mem_align (dest_mem, dest_align);
2568 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2569 src_str = c_getstr (src);
2571 /* If SRC is a string constant and block move would be done
2572 by pieces, we can avoid loading the string from memory
2573 and only stored the computed constants. */
2574 if (src_str
2575 && GET_CODE (len_rtx) == CONST_INT
2576 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2577 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2578 (void *) src_str, dest_align))
2580 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2581 builtin_memcpy_read_str,
2582 (void *) src_str, dest_align, 0);
2583 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2584 #ifdef POINTERS_EXTEND_UNSIGNED
2585 if (GET_MODE (dest_mem) != ptr_mode)
2586 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2587 #endif
2588 return dest_mem;
2591 src_mem = get_memory_rtx (src);
2592 set_mem_align (src_mem, src_align);
2594 /* Copy word part most expediently. */
2595 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2596 BLOCK_OP_NORMAL);
2598 if (dest_addr == 0)
2600 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2601 #ifdef POINTERS_EXTEND_UNSIGNED
2602 if (GET_MODE (dest_addr) != ptr_mode)
2603 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2604 #endif
2606 return dest_addr;
2610 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2611 Return 0 if we failed the caller should emit a normal call,
2612 otherwise try to get the result in TARGET, if convenient (and in
2613 mode MODE if that's convenient). If ENDP is 0 return the
2614 destination pointer, if ENDP is 1 return the end pointer ala
2615 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2616 stpcpy. */
2618 static rtx
2619 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2620 int endp)
2622 if (!validate_arglist (arglist,
2623 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2624 return 0;
2625 /* If return value is ignored, transform mempcpy into memcpy. */
2626 else if (target == const0_rtx)
2628 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2630 if (!fn)
2631 return 0;
2633 return expand_expr (build_function_call_expr (fn, arglist),
2634 target, mode, EXPAND_NORMAL);
2636 else
2638 tree dest = TREE_VALUE (arglist);
2639 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2640 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2641 const char *src_str;
2642 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2643 unsigned int dest_align
2644 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2645 rtx dest_mem, src_mem, len_rtx;
2647 /* If DEST is not a pointer type or LEN is not constant,
2648 call the normal function. */
2649 if (dest_align == 0 || !host_integerp (len, 1))
2650 return 0;
2652 /* If the LEN parameter is zero, return DEST. */
2653 if (tree_low_cst (len, 1) == 0)
2655 /* Evaluate and ignore SRC in case it has side-effects. */
2656 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2657 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2660 /* If either SRC is not a pointer type, don't do this
2661 operation in-line. */
2662 if (src_align == 0)
2663 return 0;
2665 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2666 src_str = c_getstr (src);
2668 /* If SRC is a string constant and block move would be done
2669 by pieces, we can avoid loading the string from memory
2670 and only stored the computed constants. */
2671 if (src_str
2672 && GET_CODE (len_rtx) == CONST_INT
2673 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2674 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2675 (void *) src_str, dest_align))
2677 dest_mem = get_memory_rtx (dest);
2678 set_mem_align (dest_mem, dest_align);
2679 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2680 builtin_memcpy_read_str,
2681 (void *) src_str, dest_align, endp);
2682 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2683 #ifdef POINTERS_EXTEND_UNSIGNED
2684 if (GET_MODE (dest_mem) != ptr_mode)
2685 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2686 #endif
2687 return dest_mem;
2690 if (GET_CODE (len_rtx) == CONST_INT
2691 && can_move_by_pieces (INTVAL (len_rtx),
2692 MIN (dest_align, src_align)))
2694 dest_mem = get_memory_rtx (dest);
2695 set_mem_align (dest_mem, dest_align);
2696 src_mem = get_memory_rtx (src);
2697 set_mem_align (src_mem, src_align);
2698 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2699 MIN (dest_align, src_align), endp);
2700 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2701 #ifdef POINTERS_EXTEND_UNSIGNED
2702 if (GET_MODE (dest_mem) != ptr_mode)
2703 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2704 #endif
2705 return dest_mem;
2708 return 0;
2712 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2713 if we failed the caller should emit a normal call. */
2715 static rtx
2716 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2718 if (!validate_arglist (arglist,
2719 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2720 return 0;
2721 else
2723 tree dest = TREE_VALUE (arglist);
2724 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2725 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2727 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2728 unsigned int dest_align
2729 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2731 /* If DEST is not a pointer type, call the normal function. */
2732 if (dest_align == 0)
2733 return 0;
2735 /* If the LEN parameter is zero, return DEST. */
2736 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2738 /* Evaluate and ignore SRC in case it has side-effects. */
2739 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2740 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2743 /* If either SRC is not a pointer type, don't do this
2744 operation in-line. */
2745 if (src_align == 0)
2746 return 0;
2748 /* If src is categorized for a readonly section we can use
2749 normal memcpy. */
2750 if (readonly_data_expr (src))
2752 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2753 if (!fn)
2754 return 0;
2755 return expand_expr (build_function_call_expr (fn, arglist),
2756 target, mode, EXPAND_NORMAL);
2759 /* Otherwise, call the normal function. */
2760 return 0;
2764 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2765 if we failed the caller should emit a normal call. */
2767 static rtx
2768 expand_builtin_bcopy (tree arglist)
2770 tree src, dest, size, newarglist;
2772 if (!validate_arglist (arglist,
2773 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2774 return NULL_RTX;
2776 src = TREE_VALUE (arglist);
2777 dest = TREE_VALUE (TREE_CHAIN (arglist));
2778 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2780 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2781 memmove(ptr y, ptr x, size_t z). This is done this way
2782 so that if it isn't expanded inline, we fallback to
2783 calling bcopy instead of memmove. */
2785 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2786 newarglist = tree_cons (NULL_TREE, src, newarglist);
2787 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2789 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2792 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2793 if we failed the caller should emit a normal call, otherwise try to get
2794 the result in TARGET, if convenient (and in mode MODE if that's
2795 convenient). */
2797 static rtx
2798 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
2800 tree fn, len, src, dst;
2802 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2803 return 0;
2805 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2806 if (!fn)
2807 return 0;
2809 src = TREE_VALUE (TREE_CHAIN (arglist));
2810 len = c_strlen (src, 1);
2811 if (len == 0 || TREE_SIDE_EFFECTS (len))
2812 return 0;
2814 dst = TREE_VALUE (arglist);
2815 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2816 arglist = build_tree_list (NULL_TREE, len);
2817 arglist = tree_cons (NULL_TREE, src, arglist);
2818 arglist = tree_cons (NULL_TREE, dst, arglist);
2819 return expand_expr (build_function_call_expr (fn, arglist),
2820 target, mode, EXPAND_NORMAL);
2823 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2824 Return 0 if we failed the caller should emit a normal call,
2825 otherwise try to get the result in TARGET, if convenient (and in
2826 mode MODE if that's convenient). */
2828 static rtx
2829 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
2831 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2832 return 0;
2833 else
2835 tree dst, src, len;
2837 /* If return value is ignored, transform stpcpy into strcpy. */
2838 if (target == const0_rtx)
2840 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2841 if (!fn)
2842 return 0;
2844 return expand_expr (build_function_call_expr (fn, arglist),
2845 target, mode, EXPAND_NORMAL);
2848 /* Ensure we get an actual string whose length can be evaluated at
2849 compile-time, not an expression containing a string. This is
2850 because the latter will potentially produce pessimized code
2851 when used to produce the return value. */
2852 src = TREE_VALUE (TREE_CHAIN (arglist));
2853 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
2854 return 0;
2856 dst = TREE_VALUE (arglist);
2857 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2858 arglist = build_tree_list (NULL_TREE, len);
2859 arglist = tree_cons (NULL_TREE, src, arglist);
2860 arglist = tree_cons (NULL_TREE, dst, arglist);
2861 return expand_builtin_mempcpy (arglist, target, mode, /*endp=*/2);
2865 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2866 bytes from constant string DATA + OFFSET and return it as target
2867 constant. */
2869 static rtx
2870 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
2871 enum machine_mode mode)
2873 const char *str = (const char *) data;
2875 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2876 return const0_rtx;
2878 return c_readstr (str + offset, mode);
2881 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2882 if we failed the caller should emit a normal call. */
2884 static rtx
2885 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
2887 if (!validate_arglist (arglist,
2888 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2889 return 0;
2890 else
2892 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
2893 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2894 tree fn;
2896 /* We must be passed a constant len parameter. */
2897 if (TREE_CODE (len) != INTEGER_CST)
2898 return 0;
2900 /* If the len parameter is zero, return the dst parameter. */
2901 if (integer_zerop (len))
2903 /* Evaluate and ignore the src argument in case it has
2904 side-effects. */
2905 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2906 VOIDmode, EXPAND_NORMAL);
2907 /* Return the dst parameter. */
2908 return expand_expr (TREE_VALUE (arglist), target, mode,
2909 EXPAND_NORMAL);
2912 /* Now, we must be passed a constant src ptr parameter. */
2913 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2914 return 0;
2916 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2918 /* We're required to pad with trailing zeros if the requested
2919 len is greater than strlen(s2)+1. In that case try to
2920 use store_by_pieces, if it fails, punt. */
2921 if (tree_int_cst_lt (slen, len))
2923 tree dest = TREE_VALUE (arglist);
2924 unsigned int dest_align
2925 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2926 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2927 rtx dest_mem;
2929 if (!p || dest_align == 0 || !host_integerp (len, 1)
2930 || !can_store_by_pieces (tree_low_cst (len, 1),
2931 builtin_strncpy_read_str,
2932 (void *) p, dest_align))
2933 return 0;
2935 dest_mem = get_memory_rtx (dest);
2936 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2937 builtin_strncpy_read_str,
2938 (void *) p, dest_align, 0);
2939 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2940 #ifdef POINTERS_EXTEND_UNSIGNED
2941 if (GET_MODE (dest_mem) != ptr_mode)
2942 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2943 #endif
2944 return dest_mem;
2947 /* OK transform into builtin memcpy. */
2948 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2949 if (!fn)
2950 return 0;
2951 return expand_expr (build_function_call_expr (fn, arglist),
2952 target, mode, EXPAND_NORMAL);
2956 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2957 bytes from constant string DATA + OFFSET and return it as target
2958 constant. */
2960 static rtx
2961 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2962 enum machine_mode mode)
2964 const char *c = (const char *) data;
2965 char *p = alloca (GET_MODE_SIZE (mode));
2967 memset (p, *c, GET_MODE_SIZE (mode));
2969 return c_readstr (p, mode);
2972 /* Callback routine for store_by_pieces. Return the RTL of a register
2973 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2974 char value given in the RTL register data. For example, if mode is
2975 4 bytes wide, return the RTL for 0x01010101*data. */
2977 static rtx
2978 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2979 enum machine_mode mode)
2981 rtx target, coeff;
2982 size_t size;
2983 char *p;
2985 size = GET_MODE_SIZE (mode);
2986 if (size == 1)
2987 return (rtx) data;
2989 p = alloca (size);
2990 memset (p, 1, size);
2991 coeff = c_readstr (p, mode);
2993 target = convert_to_mode (mode, (rtx) data, 1);
2994 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2995 return force_reg (mode, target);
2998 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2999 if we failed the caller should emit a normal call, otherwise try to get
3000 the result in TARGET, if convenient (and in mode MODE if that's
3001 convenient). */
3003 static rtx
3004 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
3006 if (!validate_arglist (arglist,
3007 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3008 return 0;
3009 else
3011 tree dest = TREE_VALUE (arglist);
3012 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3013 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3014 char c;
3016 unsigned int dest_align
3017 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3018 rtx dest_mem, dest_addr, len_rtx;
3020 /* If DEST is not a pointer type, don't do this
3021 operation in-line. */
3022 if (dest_align == 0)
3023 return 0;
3025 /* If the LEN parameter is zero, return DEST. */
3026 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3028 /* Evaluate and ignore VAL in case it has side-effects. */
3029 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3030 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3033 if (TREE_CODE (val) != INTEGER_CST)
3035 rtx val_rtx;
3037 if (!host_integerp (len, 1))
3038 return 0;
3040 if (optimize_size && tree_low_cst (len, 1) > 1)
3041 return 0;
3043 /* Assume that we can memset by pieces if we can store the
3044 * the coefficients by pieces (in the required modes).
3045 * We can't pass builtin_memset_gen_str as that emits RTL. */
3046 c = 1;
3047 if (!can_store_by_pieces (tree_low_cst (len, 1),
3048 builtin_memset_read_str,
3049 &c, dest_align))
3050 return 0;
3052 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3053 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3054 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3055 val_rtx);
3056 dest_mem = get_memory_rtx (dest);
3057 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3058 builtin_memset_gen_str,
3059 val_rtx, dest_align, 0);
3060 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3061 #ifdef POINTERS_EXTEND_UNSIGNED
3062 if (GET_MODE (dest_mem) != ptr_mode)
3063 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3064 #endif
3065 return dest_mem;
3068 if (target_char_cast (val, &c))
3069 return 0;
3071 if (c)
3073 if (!host_integerp (len, 1))
3074 return 0;
3075 if (!can_store_by_pieces (tree_low_cst (len, 1),
3076 builtin_memset_read_str, &c,
3077 dest_align))
3078 return 0;
3080 dest_mem = get_memory_rtx (dest);
3081 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3082 builtin_memset_read_str,
3083 &c, dest_align, 0);
3084 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3085 #ifdef POINTERS_EXTEND_UNSIGNED
3086 if (GET_MODE (dest_mem) != ptr_mode)
3087 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3088 #endif
3089 return dest_mem;
3092 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3094 dest_mem = get_memory_rtx (dest);
3095 set_mem_align (dest_mem, dest_align);
3096 dest_addr = clear_storage (dest_mem, len_rtx);
3098 if (dest_addr == 0)
3100 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3101 #ifdef POINTERS_EXTEND_UNSIGNED
3102 if (GET_MODE (dest_addr) != ptr_mode)
3103 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3104 #endif
3107 return dest_addr;
3111 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3112 if we failed the caller should emit a normal call. */
3114 static rtx
3115 expand_builtin_bzero (tree arglist)
3117 tree dest, size, newarglist;
3119 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3120 return NULL_RTX;
3122 dest = TREE_VALUE (arglist);
3123 size = TREE_VALUE (TREE_CHAIN (arglist));
3125 /* New argument list transforming bzero(ptr x, int y) to
3126 memset(ptr x, int 0, size_t y). This is done this way
3127 so that if it isn't expanded inline, we fallback to
3128 calling bzero instead of memset. */
3130 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
3131 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3132 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3134 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3137 /* Expand expression EXP, which is a call to the memcmp built-in function.
3138 ARGLIST is the argument list for this call. Return 0 if we failed and the
3139 caller should emit a normal call, otherwise try to get the result in
3140 TARGET, if convenient (and in mode MODE, if that's convenient). */
3142 static rtx
3143 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3144 enum machine_mode mode)
3146 tree arg1, arg2, len;
3147 const char *p1, *p2;
3149 if (!validate_arglist (arglist,
3150 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3151 return 0;
3153 arg1 = TREE_VALUE (arglist);
3154 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3155 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3157 /* If the len parameter is zero, return zero. */
3158 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3160 /* Evaluate and ignore arg1 and arg2 in case they have
3161 side-effects. */
3162 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3163 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3164 return const0_rtx;
3167 p1 = c_getstr (arg1);
3168 p2 = c_getstr (arg2);
3170 /* If all arguments are constant, and the value of len is not greater
3171 than the lengths of arg1 and arg2, evaluate at compile-time. */
3172 if (host_integerp (len, 1) && p1 && p2
3173 && compare_tree_int (len, strlen (p1) + 1) <= 0
3174 && compare_tree_int (len, strlen (p2) + 1) <= 0)
3176 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
3178 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3181 /* If len parameter is one, return an expression corresponding to
3182 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3183 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
3185 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3186 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3187 tree ind1 =
3188 fold (build1 (CONVERT_EXPR, integer_type_node,
3189 build1 (INDIRECT_REF, cst_uchar_node,
3190 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3191 tree ind2 =
3192 fold (build1 (CONVERT_EXPR, integer_type_node,
3193 build1 (INDIRECT_REF, cst_uchar_node,
3194 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3195 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3196 return expand_expr (result, target, mode, EXPAND_NORMAL);
3199 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrsi
3201 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3202 rtx result;
3203 rtx insn;
3205 int arg1_align
3206 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3207 int arg2_align
3208 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3209 enum machine_mode insn_mode;
3211 #ifdef HAVE_cmpmemsi
3212 if (HAVE_cmpmemsi)
3213 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3214 else
3215 #endif
3216 #ifdef HAVE_cmpstrsi
3217 if (HAVE_cmpstrsi)
3218 insn_mode = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3219 else
3220 #endif
3221 return 0;
3223 /* If we don't have POINTER_TYPE, call the function. */
3224 if (arg1_align == 0 || arg2_align == 0)
3225 return 0;
3227 /* Make a place to write the result of the instruction. */
3228 result = target;
3229 if (! (result != 0
3230 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3231 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3232 result = gen_reg_rtx (insn_mode);
3234 arg1_rtx = get_memory_rtx (arg1);
3235 arg2_rtx = get_memory_rtx (arg2);
3236 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3237 #ifdef HAVE_cmpmemsi
3238 if (HAVE_cmpmemsi)
3239 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3240 GEN_INT (MIN (arg1_align, arg2_align)));
3241 else
3242 #endif
3243 #ifdef HAVE_cmpstrsi
3244 if (HAVE_cmpstrsi)
3245 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3246 GEN_INT (MIN (arg1_align, arg2_align)));
3247 else
3248 #endif
3249 abort ();
3251 if (insn)
3252 emit_insn (insn);
3253 else
3254 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3255 TYPE_MODE (integer_type_node), 3,
3256 XEXP (arg1_rtx, 0), Pmode,
3257 XEXP (arg2_rtx, 0), Pmode,
3258 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3259 TREE_UNSIGNED (sizetype)),
3260 TYPE_MODE (sizetype));
3262 /* Return the value in the proper mode for this function. */
3263 mode = TYPE_MODE (TREE_TYPE (exp));
3264 if (GET_MODE (result) == mode)
3265 return result;
3266 else if (target != 0)
3268 convert_move (target, result, 0);
3269 return target;
3271 else
3272 return convert_to_mode (mode, result, 0);
3274 #endif
3276 return 0;
3279 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3280 if we failed the caller should emit a normal call, otherwise try to get
3281 the result in TARGET, if convenient. */
3283 static rtx
3284 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3286 tree arglist = TREE_OPERAND (exp, 1);
3287 tree arg1, arg2;
3288 const char *p1, *p2;
3290 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3291 return 0;
3293 arg1 = TREE_VALUE (arglist);
3294 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3296 p1 = c_getstr (arg1);
3297 p2 = c_getstr (arg2);
3299 if (p1 && p2)
3301 const int i = strcmp (p1, p2);
3302 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3305 /* If either arg is "", return an expression corresponding to
3306 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3307 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3309 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3310 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3311 tree ind1 =
3312 fold (build1 (CONVERT_EXPR, integer_type_node,
3313 build1 (INDIRECT_REF, cst_uchar_node,
3314 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3315 tree ind2 =
3316 fold (build1 (CONVERT_EXPR, integer_type_node,
3317 build1 (INDIRECT_REF, cst_uchar_node,
3318 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3319 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3320 return expand_expr (result, target, mode, EXPAND_NORMAL);
3323 #ifdef HAVE_cmpstrsi
3324 if (HAVE_cmpstrsi)
3326 tree len, len1, len2;
3327 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3328 rtx result, insn;
3330 int arg1_align
3331 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3332 int arg2_align
3333 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3334 enum machine_mode insn_mode
3335 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3337 len1 = c_strlen (arg1, 1);
3338 len2 = c_strlen (arg2, 1);
3340 if (len1)
3341 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3342 if (len2)
3343 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3345 /* If we don't have a constant length for the first, use the length
3346 of the second, if we know it. We don't require a constant for
3347 this case; some cost analysis could be done if both are available
3348 but neither is constant. For now, assume they're equally cheap,
3349 unless one has side effects. If both strings have constant lengths,
3350 use the smaller. */
3352 if (!len1)
3353 len = len2;
3354 else if (!len2)
3355 len = len1;
3356 else if (TREE_SIDE_EFFECTS (len1))
3357 len = len2;
3358 else if (TREE_SIDE_EFFECTS (len2))
3359 len = len1;
3360 else if (TREE_CODE (len1) != INTEGER_CST)
3361 len = len2;
3362 else if (TREE_CODE (len2) != INTEGER_CST)
3363 len = len1;
3364 else if (tree_int_cst_lt (len1, len2))
3365 len = len1;
3366 else
3367 len = len2;
3369 /* If both arguments have side effects, we cannot optimize. */
3370 if (!len || TREE_SIDE_EFFECTS (len))
3371 return 0;
3373 /* If we don't have POINTER_TYPE, call the function. */
3374 if (arg1_align == 0 || arg2_align == 0)
3375 return 0;
3377 /* Make a place to write the result of the instruction. */
3378 result = target;
3379 if (! (result != 0
3380 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3381 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3382 result = gen_reg_rtx (insn_mode);
3384 arg1_rtx = get_memory_rtx (arg1);
3385 arg2_rtx = get_memory_rtx (arg2);
3386 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3387 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3388 GEN_INT (MIN (arg1_align, arg2_align)));
3389 if (!insn)
3390 return 0;
3392 emit_insn (insn);
3394 /* Return the value in the proper mode for this function. */
3395 mode = TYPE_MODE (TREE_TYPE (exp));
3396 if (GET_MODE (result) == mode)
3397 return result;
3398 if (target == 0)
3399 return convert_to_mode (mode, result, 0);
3400 convert_move (target, result, 0);
3401 return target;
3403 #endif
3404 return 0;
3407 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3408 if we failed the caller should emit a normal call, otherwise try to get
3409 the result in TARGET, if convenient. */
3411 static rtx
3412 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3414 tree arglist = TREE_OPERAND (exp, 1);
3415 tree arg1, arg2, arg3;
3416 const char *p1, *p2;
3418 if (!validate_arglist (arglist,
3419 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3420 return 0;
3422 arg1 = TREE_VALUE (arglist);
3423 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3424 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3426 /* If the len parameter is zero, return zero. */
3427 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3429 /* Evaluate and ignore arg1 and arg2 in case they have
3430 side-effects. */
3431 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3432 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3433 return const0_rtx;
3436 p1 = c_getstr (arg1);
3437 p2 = c_getstr (arg2);
3439 /* If all arguments are constant, evaluate at compile-time. */
3440 if (host_integerp (arg3, 1) && p1 && p2)
3442 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3443 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3446 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3447 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3448 if (host_integerp (arg3, 1)
3449 && (tree_low_cst (arg3, 1) == 1
3450 || (tree_low_cst (arg3, 1) > 1
3451 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3453 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3454 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3455 tree ind1 =
3456 fold (build1 (CONVERT_EXPR, integer_type_node,
3457 build1 (INDIRECT_REF, cst_uchar_node,
3458 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3459 tree ind2 =
3460 fold (build1 (CONVERT_EXPR, integer_type_node,
3461 build1 (INDIRECT_REF, cst_uchar_node,
3462 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3463 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3464 return expand_expr (result, target, mode, EXPAND_NORMAL);
3467 /* If c_strlen can determine an expression for one of the string
3468 lengths, and it doesn't have side effects, then emit cmpstrsi
3469 using length MIN(strlen(string)+1, arg3). */
3470 #ifdef HAVE_cmpstrsi
3471 if (HAVE_cmpstrsi)
3473 tree len, len1, len2;
3474 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3475 rtx result, insn;
3477 int arg1_align
3478 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3479 int arg2_align
3480 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3481 enum machine_mode insn_mode
3482 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3484 len1 = c_strlen (arg1, 1);
3485 len2 = c_strlen (arg2, 1);
3487 if (len1)
3488 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3489 if (len2)
3490 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3492 /* If we don't have a constant length for the first, use the length
3493 of the second, if we know it. We don't require a constant for
3494 this case; some cost analysis could be done if both are available
3495 but neither is constant. For now, assume they're equally cheap,
3496 unless one has side effects. If both strings have constant lengths,
3497 use the smaller. */
3499 if (!len1)
3500 len = len2;
3501 else if (!len2)
3502 len = len1;
3503 else if (TREE_SIDE_EFFECTS (len1))
3504 len = len2;
3505 else if (TREE_SIDE_EFFECTS (len2))
3506 len = len1;
3507 else if (TREE_CODE (len1) != INTEGER_CST)
3508 len = len2;
3509 else if (TREE_CODE (len2) != INTEGER_CST)
3510 len = len1;
3511 else if (tree_int_cst_lt (len1, len2))
3512 len = len1;
3513 else
3514 len = len2;
3516 /* If both arguments have side effects, we cannot optimize. */
3517 if (!len || TREE_SIDE_EFFECTS (len))
3518 return 0;
3520 /* The actual new length parameter is MIN(len,arg3). */
3521 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3523 /* If we don't have POINTER_TYPE, call the function. */
3524 if (arg1_align == 0 || arg2_align == 0)
3525 return 0;
3527 /* Make a place to write the result of the instruction. */
3528 result = target;
3529 if (! (result != 0
3530 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3531 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3532 result = gen_reg_rtx (insn_mode);
3534 arg1_rtx = get_memory_rtx (arg1);
3535 arg2_rtx = get_memory_rtx (arg2);
3536 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3537 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3538 GEN_INT (MIN (arg1_align, arg2_align)));
3539 if (!insn)
3540 return 0;
3542 emit_insn (insn);
3544 /* Return the value in the proper mode for this function. */
3545 mode = TYPE_MODE (TREE_TYPE (exp));
3546 if (GET_MODE (result) == mode)
3547 return result;
3548 if (target == 0)
3549 return convert_to_mode (mode, result, 0);
3550 convert_move (target, result, 0);
3551 return target;
3553 #endif
3554 return 0;
3557 /* Expand expression EXP, which is a call to the strcat builtin.
3558 Return 0 if we failed the caller should emit a normal call,
3559 otherwise try to get the result in TARGET, if convenient. */
3561 static rtx
3562 expand_builtin_strcat (tree arglist, rtx target, enum machine_mode mode)
3564 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3565 return 0;
3566 else
3568 tree dst = TREE_VALUE (arglist),
3569 src = TREE_VALUE (TREE_CHAIN (arglist));
3570 const char *p = c_getstr (src);
3572 /* If the string length is zero, return the dst parameter. */
3573 if (p && *p == '\0')
3574 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3576 return 0;
3580 /* Expand expression EXP, which is a call to the strncat builtin.
3581 Return 0 if we failed the caller should emit a normal call,
3582 otherwise try to get the result in TARGET, if convenient. */
3584 static rtx
3585 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
3587 if (!validate_arglist (arglist,
3588 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3589 return 0;
3590 else
3592 tree dst = TREE_VALUE (arglist),
3593 src = TREE_VALUE (TREE_CHAIN (arglist)),
3594 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3595 const char *p = c_getstr (src);
3597 /* If the requested length is zero, or the src parameter string
3598 length is zero, return the dst parameter. */
3599 if (integer_zerop (len) || (p && *p == '\0'))
3601 /* Evaluate and ignore the src and len parameters in case
3602 they have side-effects. */
3603 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3604 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3605 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3608 /* If the requested len is greater than or equal to the string
3609 length, call strcat. */
3610 if (TREE_CODE (len) == INTEGER_CST && p
3611 && compare_tree_int (len, strlen (p)) >= 0)
3613 tree newarglist
3614 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3615 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3617 /* If the replacement _DECL isn't initialized, don't do the
3618 transformation. */
3619 if (!fn)
3620 return 0;
3622 return expand_expr (build_function_call_expr (fn, newarglist),
3623 target, mode, EXPAND_NORMAL);
3625 return 0;
3629 /* Expand expression EXP, which is a call to the strspn builtin.
3630 Return 0 if we failed the caller should emit a normal call,
3631 otherwise try to get the result in TARGET, if convenient. */
3633 static rtx
3634 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
3636 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3637 return 0;
3638 else
3640 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3641 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3643 /* If both arguments are constants, evaluate at compile-time. */
3644 if (p1 && p2)
3646 const size_t r = strspn (p1, p2);
3647 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3650 /* If either argument is "", return 0. */
3651 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3653 /* Evaluate and ignore both arguments in case either one has
3654 side-effects. */
3655 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3656 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3657 return const0_rtx;
3659 return 0;
3663 /* Expand expression EXP, which is a call to the strcspn builtin.
3664 Return 0 if we failed the caller should emit a normal call,
3665 otherwise try to get the result in TARGET, if convenient. */
3667 static rtx
3668 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
3670 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3671 return 0;
3672 else
3674 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3675 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3677 /* If both arguments are constants, evaluate at compile-time. */
3678 if (p1 && p2)
3680 const size_t r = strcspn (p1, p2);
3681 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3684 /* If the first argument is "", return 0. */
3685 if (p1 && *p1 == '\0')
3687 /* Evaluate and ignore argument s2 in case it has
3688 side-effects. */
3689 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3690 return const0_rtx;
3693 /* If the second argument is "", return __builtin_strlen(s1). */
3694 if (p2 && *p2 == '\0')
3696 tree newarglist = build_tree_list (NULL_TREE, s1),
3697 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3699 /* If the replacement _DECL isn't initialized, don't do the
3700 transformation. */
3701 if (!fn)
3702 return 0;
3704 return expand_expr (build_function_call_expr (fn, newarglist),
3705 target, mode, EXPAND_NORMAL);
3707 return 0;
3711 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3712 if that's convenient. */
3715 expand_builtin_saveregs (void)
3717 rtx val, seq;
3719 /* Don't do __builtin_saveregs more than once in a function.
3720 Save the result of the first call and reuse it. */
3721 if (saveregs_value != 0)
3722 return saveregs_value;
3724 /* When this function is called, it means that registers must be
3725 saved on entry to this function. So we migrate the call to the
3726 first insn of this function. */
3728 start_sequence ();
3730 #ifdef EXPAND_BUILTIN_SAVEREGS
3731 /* Do whatever the machine needs done in this case. */
3732 val = EXPAND_BUILTIN_SAVEREGS ();
3733 #else
3734 /* ??? We used to try and build up a call to the out of line function,
3735 guessing about what registers needed saving etc. This became much
3736 harder with __builtin_va_start, since we don't have a tree for a
3737 call to __builtin_saveregs to fall back on. There was exactly one
3738 port (i860) that used this code, and I'm unconvinced it could actually
3739 handle the general case. So we no longer try to handle anything
3740 weird and make the backend absorb the evil. */
3742 error ("__builtin_saveregs not supported by this target");
3743 val = const0_rtx;
3744 #endif
3746 seq = get_insns ();
3747 end_sequence ();
3749 saveregs_value = val;
3751 /* Put the insns after the NOTE that starts the function. If this
3752 is inside a start_sequence, make the outer-level insn chain current, so
3753 the code is placed at the start of the function. */
3754 push_topmost_sequence ();
3755 emit_insn_after (seq, get_insns ());
3756 pop_topmost_sequence ();
3758 return val;
3761 /* __builtin_args_info (N) returns word N of the arg space info
3762 for the current function. The number and meanings of words
3763 is controlled by the definition of CUMULATIVE_ARGS. */
3765 static rtx
3766 expand_builtin_args_info (tree arglist)
3768 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3769 int *word_ptr = (int *) &current_function_args_info;
3771 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3772 abort ();
3774 if (arglist != 0)
3776 if (!host_integerp (TREE_VALUE (arglist), 0))
3777 error ("argument of `__builtin_args_info' must be constant");
3778 else
3780 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3782 if (wordnum < 0 || wordnum >= nwords)
3783 error ("argument of `__builtin_args_info' out of range");
3784 else
3785 return GEN_INT (word_ptr[wordnum]);
3788 else
3789 error ("missing argument in `__builtin_args_info'");
3791 return const0_rtx;
3794 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3796 static rtx
3797 expand_builtin_next_arg (tree arglist)
3799 tree fntype = TREE_TYPE (current_function_decl);
3801 if (TYPE_ARG_TYPES (fntype) == 0
3802 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3803 == void_type_node))
3805 error ("`va_start' used in function with fixed args");
3806 return const0_rtx;
3809 if (arglist)
3811 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3812 tree arg = TREE_VALUE (arglist);
3814 /* Strip off all nops for the sake of the comparison. This
3815 is not quite the same as STRIP_NOPS. It does more.
3816 We must also strip off INDIRECT_EXPR for C++ reference
3817 parameters. */
3818 while (TREE_CODE (arg) == NOP_EXPR
3819 || TREE_CODE (arg) == CONVERT_EXPR
3820 || TREE_CODE (arg) == NON_LVALUE_EXPR
3821 || TREE_CODE (arg) == INDIRECT_REF)
3822 arg = TREE_OPERAND (arg, 0);
3823 if (arg != last_parm)
3824 warning ("second parameter of `va_start' not last named argument");
3826 else
3827 /* Evidently an out of date version of <stdarg.h>; can't validate
3828 va_start's second argument, but can still work as intended. */
3829 warning ("`__builtin_next_arg' called without an argument");
3831 return expand_binop (Pmode, add_optab,
3832 current_function_internal_arg_pointer,
3833 current_function_arg_offset_rtx,
3834 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3837 /* Make it easier for the backends by protecting the valist argument
3838 from multiple evaluations. */
3840 static tree
3841 stabilize_va_list (tree valist, int needs_lvalue)
3843 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3845 if (TREE_SIDE_EFFECTS (valist))
3846 valist = save_expr (valist);
3848 /* For this case, the backends will be expecting a pointer to
3849 TREE_TYPE (va_list_type_node), but it's possible we've
3850 actually been given an array (an actual va_list_type_node).
3851 So fix it. */
3852 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3854 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3855 tree p2 = build_pointer_type (va_list_type_node);
3857 valist = build1 (ADDR_EXPR, p2, valist);
3858 valist = fold (build1 (NOP_EXPR, p1, valist));
3861 else
3863 tree pt;
3865 if (! needs_lvalue)
3867 if (! TREE_SIDE_EFFECTS (valist))
3868 return valist;
3870 pt = build_pointer_type (va_list_type_node);
3871 valist = fold (build1 (ADDR_EXPR, pt, valist));
3872 TREE_SIDE_EFFECTS (valist) = 1;
3875 if (TREE_SIDE_EFFECTS (valist))
3876 valist = save_expr (valist);
3877 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3878 valist));
3881 return valist;
3884 /* The "standard" implementation of va_start: just assign `nextarg' to
3885 the variable. */
3887 void
3888 std_expand_builtin_va_start (tree valist, rtx nextarg)
3890 tree t;
3892 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3893 make_tree (ptr_type_node, nextarg));
3894 TREE_SIDE_EFFECTS (t) = 1;
3896 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3899 /* Expand ARGLIST, from a call to __builtin_va_start. */
3901 static rtx
3902 expand_builtin_va_start (tree arglist)
3904 rtx nextarg;
3905 tree chain, valist;
3907 chain = TREE_CHAIN (arglist);
3909 if (TREE_CHAIN (chain))
3910 error ("too many arguments to function `va_start'");
3912 nextarg = expand_builtin_next_arg (chain);
3913 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3915 #ifdef EXPAND_BUILTIN_VA_START
3916 EXPAND_BUILTIN_VA_START (valist, nextarg);
3917 #else
3918 std_expand_builtin_va_start (valist, nextarg);
3919 #endif
3921 return const0_rtx;
3924 /* The "standard" implementation of va_arg: read the value from the
3925 current (padded) address and increment by the (padded) size. */
3928 std_expand_builtin_va_arg (tree valist, tree type)
3930 tree addr_tree, t, type_size = NULL;
3931 tree align, alignm1;
3932 tree rounded_size;
3933 rtx addr;
3935 /* Compute the rounded size of the type. */
3936 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3937 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3938 if (type == error_mark_node
3939 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3940 || TREE_OVERFLOW (type_size))
3941 rounded_size = size_zero_node;
3942 else
3943 rounded_size = fold (build (MULT_EXPR, sizetype,
3944 fold (build (TRUNC_DIV_EXPR, sizetype,
3945 fold (build (PLUS_EXPR, sizetype,
3946 type_size, alignm1)),
3947 align)),
3948 align));
3950 /* Get AP. */
3951 addr_tree = valist;
3952 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3954 /* Small args are padded downward. */
3955 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3956 fold (build (COND_EXPR, sizetype,
3957 fold (build (GT_EXPR, sizetype,
3958 rounded_size,
3959 align)),
3960 size_zero_node,
3961 fold (build (MINUS_EXPR, sizetype,
3962 rounded_size,
3963 type_size))))));
3966 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3967 addr = copy_to_reg (addr);
3969 /* Compute new value for AP. */
3970 if (! integer_zerop (rounded_size))
3972 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3973 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3974 rounded_size));
3975 TREE_SIDE_EFFECTS (t) = 1;
3976 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3979 return addr;
3982 /* Expand __builtin_va_arg, which is not really a builtin function, but
3983 a very special sort of operator. */
3986 expand_builtin_va_arg (tree valist, tree type)
3988 rtx addr, result;
3989 tree promoted_type, want_va_type, have_va_type;
3991 /* Verify that valist is of the proper type. */
3993 want_va_type = va_list_type_node;
3994 have_va_type = TREE_TYPE (valist);
3995 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3997 /* If va_list is an array type, the argument may have decayed
3998 to a pointer type, e.g. by being passed to another function.
3999 In that case, unwrap both types so that we can compare the
4000 underlying records. */
4001 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4002 || TREE_CODE (have_va_type) == POINTER_TYPE)
4004 want_va_type = TREE_TYPE (want_va_type);
4005 have_va_type = TREE_TYPE (have_va_type);
4008 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4010 error ("first argument to `va_arg' not of type `va_list'");
4011 addr = const0_rtx;
4014 /* Generate a diagnostic for requesting data of a type that cannot
4015 be passed through `...' due to type promotion at the call site. */
4016 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
4017 != type)
4019 const char *name = "<anonymous type>", *pname = 0;
4020 static bool gave_help;
4022 if (TYPE_NAME (type))
4024 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
4025 name = IDENTIFIER_POINTER (TYPE_NAME (type));
4026 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
4027 && DECL_NAME (TYPE_NAME (type)))
4028 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
4030 if (TYPE_NAME (promoted_type))
4032 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
4033 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
4034 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
4035 && DECL_NAME (TYPE_NAME (promoted_type)))
4036 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
4039 /* Unfortunately, this is merely undefined, rather than a constraint
4040 violation, so we cannot make this an error. If this call is never
4041 executed, the program is still strictly conforming. */
4042 warning ("`%s' is promoted to `%s' when passed through `...'",
4043 name, pname);
4044 if (! gave_help)
4046 gave_help = true;
4047 warning ("(so you should pass `%s' not `%s' to `va_arg')",
4048 pname, name);
4051 /* We can, however, treat "undefined" any way we please.
4052 Call abort to encourage the user to fix the program. */
4053 expand_builtin_trap ();
4055 /* This is dead code, but go ahead and finish so that the
4056 mode of the result comes out right. */
4057 addr = const0_rtx;
4059 else
4061 /* Make it easier for the backends by protecting the valist argument
4062 from multiple evaluations. */
4063 valist = stabilize_va_list (valist, 0);
4065 #ifdef EXPAND_BUILTIN_VA_ARG
4066 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
4067 #else
4068 addr = std_expand_builtin_va_arg (valist, type);
4069 #endif
4072 #ifdef POINTERS_EXTEND_UNSIGNED
4073 if (GET_MODE (addr) != Pmode)
4074 addr = convert_memory_address (Pmode, addr);
4075 #endif
4077 result = gen_rtx_MEM (TYPE_MODE (type), addr);
4078 set_mem_alias_set (result, get_varargs_alias_set ());
4080 return result;
4083 /* Expand ARGLIST, from a call to __builtin_va_end. */
4085 static rtx
4086 expand_builtin_va_end (tree arglist)
4088 tree valist = TREE_VALUE (arglist);
4090 #ifdef EXPAND_BUILTIN_VA_END
4091 valist = stabilize_va_list (valist, 0);
4092 EXPAND_BUILTIN_VA_END (arglist);
4093 #else
4094 /* Evaluate for side effects, if needed. I hate macros that don't
4095 do that. */
4096 if (TREE_SIDE_EFFECTS (valist))
4097 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4098 #endif
4100 return const0_rtx;
4103 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4104 builtin rather than just as an assignment in stdarg.h because of the
4105 nastiness of array-type va_list types. */
4107 static rtx
4108 expand_builtin_va_copy (tree arglist)
4110 tree dst, src, t;
4112 dst = TREE_VALUE (arglist);
4113 src = TREE_VALUE (TREE_CHAIN (arglist));
4115 dst = stabilize_va_list (dst, 1);
4116 src = stabilize_va_list (src, 0);
4118 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4120 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
4121 TREE_SIDE_EFFECTS (t) = 1;
4122 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4124 else
4126 rtx dstb, srcb, size;
4128 /* Evaluate to pointers. */
4129 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4130 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4131 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4132 VOIDmode, EXPAND_NORMAL);
4134 #ifdef POINTERS_EXTEND_UNSIGNED
4135 if (GET_MODE (dstb) != Pmode)
4136 dstb = convert_memory_address (Pmode, dstb);
4138 if (GET_MODE (srcb) != Pmode)
4139 srcb = convert_memory_address (Pmode, srcb);
4140 #endif
4142 /* "Dereference" to BLKmode memories. */
4143 dstb = gen_rtx_MEM (BLKmode, dstb);
4144 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4145 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4146 srcb = gen_rtx_MEM (BLKmode, srcb);
4147 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4148 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4150 /* Copy. */
4151 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4154 return const0_rtx;
4157 /* Expand a call to one of the builtin functions __builtin_frame_address or
4158 __builtin_return_address. */
4160 static rtx
4161 expand_builtin_frame_address (tree fndecl, tree arglist)
4163 /* The argument must be a nonnegative integer constant.
4164 It counts the number of frames to scan up the stack.
4165 The value is the return address saved in that frame. */
4166 if (arglist == 0)
4167 /* Warning about missing arg was already issued. */
4168 return const0_rtx;
4169 else if (! host_integerp (TREE_VALUE (arglist), 1))
4171 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4172 error ("invalid arg to `__builtin_frame_address'");
4173 else
4174 error ("invalid arg to `__builtin_return_address'");
4175 return const0_rtx;
4177 else
4179 rtx tem
4180 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4181 tree_low_cst (TREE_VALUE (arglist), 1),
4182 hard_frame_pointer_rtx);
4184 /* Some ports cannot access arbitrary stack frames. */
4185 if (tem == NULL)
4187 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4188 warning ("unsupported arg to `__builtin_frame_address'");
4189 else
4190 warning ("unsupported arg to `__builtin_return_address'");
4191 return const0_rtx;
4194 /* For __builtin_frame_address, return what we've got. */
4195 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4196 return tem;
4198 if (GET_CODE (tem) != REG
4199 && ! CONSTANT_P (tem))
4200 tem = copy_to_mode_reg (Pmode, tem);
4201 return tem;
4205 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4206 we failed and the caller should emit a normal call, otherwise try to get
4207 the result in TARGET, if convenient. */
4209 static rtx
4210 expand_builtin_alloca (tree arglist, rtx target)
4212 rtx op0;
4213 rtx result;
4215 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4216 return 0;
4218 /* Compute the argument. */
4219 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4221 /* Allocate the desired space. */
4222 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4224 #ifdef POINTERS_EXTEND_UNSIGNED
4225 if (GET_MODE (result) != ptr_mode)
4226 result = convert_memory_address (ptr_mode, result);
4227 #endif
4229 return result;
4232 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4233 Return 0 if a normal call should be emitted rather than expanding the
4234 function in-line. If convenient, the result should be placed in TARGET.
4235 SUBTARGET may be used as the target for computing one of EXP's operands. */
4237 static rtx
4238 expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
4239 rtx subtarget, optab op_optab)
4241 rtx op0;
4242 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4243 return 0;
4245 /* Compute the argument. */
4246 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4247 /* Compute op, into TARGET if possible.
4248 Set TARGET to wherever the result comes back. */
4249 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4250 op_optab, op0, target, 1);
4251 if (target == 0)
4252 abort ();
4254 return convert_to_mode (target_mode, target, 0);
4257 /* If the string passed to fputs is a constant and is one character
4258 long, we attempt to transform this call into __builtin_fputc(). */
4260 static rtx
4261 expand_builtin_fputs (tree arglist, int ignore, int unlocked)
4263 tree len, fn;
4264 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
4265 : implicit_built_in_decls[BUILT_IN_FPUTC];
4266 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
4267 : implicit_built_in_decls[BUILT_IN_FWRITE];
4269 /* If the return value is used, or the replacement _DECL isn't
4270 initialized, don't do the transformation. */
4271 if (!ignore || !fn_fputc || !fn_fwrite)
4272 return 0;
4274 /* Verify the arguments in the original call. */
4275 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4276 return 0;
4278 /* Get the length of the string passed to fputs. If the length
4279 can't be determined, punt. */
4280 if (!(len = c_strlen (TREE_VALUE (arglist), 1))
4281 || TREE_CODE (len) != INTEGER_CST)
4282 return 0;
4284 switch (compare_tree_int (len, 1))
4286 case -1: /* length is 0, delete the call entirely . */
4288 /* Evaluate and ignore the argument in case it has
4289 side-effects. */
4290 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
4291 VOIDmode, EXPAND_NORMAL);
4292 return const0_rtx;
4294 case 0: /* length is 1, call fputc. */
4296 const char *p = c_getstr (TREE_VALUE (arglist));
4298 if (p != NULL)
4300 /* New argument list transforming fputs(string, stream) to
4301 fputc(string[0], stream). */
4302 arglist =
4303 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4304 arglist =
4305 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
4306 fn = fn_fputc;
4307 break;
4310 /* FALLTHROUGH */
4311 case 1: /* length is greater than 1, call fwrite. */
4313 tree string_arg;
4315 /* If optimizing for size keep fputs. */
4316 if (optimize_size)
4317 return 0;
4318 string_arg = TREE_VALUE (arglist);
4319 /* New argument list transforming fputs(string, stream) to
4320 fwrite(string, 1, len, stream). */
4321 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4322 arglist = tree_cons (NULL_TREE, len, arglist);
4323 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4324 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4325 fn = fn_fwrite;
4326 break;
4328 default:
4329 abort ();
4332 return expand_expr (build_function_call_expr (fn, arglist),
4333 (ignore ? const0_rtx : NULL_RTX),
4334 VOIDmode, EXPAND_NORMAL);
4337 /* Expand a call to __builtin_expect. We return our argument and emit a
4338 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4339 a non-jump context. */
4341 static rtx
4342 expand_builtin_expect (tree arglist, rtx target)
4344 tree exp, c;
4345 rtx note, rtx_c;
4347 if (arglist == NULL_TREE
4348 || TREE_CHAIN (arglist) == NULL_TREE)
4349 return const0_rtx;
4350 exp = TREE_VALUE (arglist);
4351 c = TREE_VALUE (TREE_CHAIN (arglist));
4353 if (TREE_CODE (c) != INTEGER_CST)
4355 error ("second arg to `__builtin_expect' must be a constant");
4356 c = integer_zero_node;
4359 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4361 /* Don't bother with expected value notes for integral constants. */
4362 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4364 /* We do need to force this into a register so that we can be
4365 moderately sure to be able to correctly interpret the branch
4366 condition later. */
4367 target = force_reg (GET_MODE (target), target);
4369 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4371 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
4372 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4375 return target;
4378 /* Like expand_builtin_expect, except do this in a jump context. This is
4379 called from do_jump if the conditional is a __builtin_expect. Return either
4380 a list of insns to emit the jump or NULL if we cannot optimize
4381 __builtin_expect. We need to optimize this at jump time so that machines
4382 like the PowerPC don't turn the test into a SCC operation, and then jump
4383 based on the test being 0/1. */
4386 expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4388 tree arglist = TREE_OPERAND (exp, 1);
4389 tree arg0 = TREE_VALUE (arglist);
4390 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4391 rtx ret = NULL_RTX;
4393 /* Only handle __builtin_expect (test, 0) and
4394 __builtin_expect (test, 1). */
4395 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4396 && (integer_zerop (arg1) || integer_onep (arg1)))
4398 int num_jumps = 0;
4399 rtx insn;
4401 /* If we fail to locate an appropriate conditional jump, we'll
4402 fall back to normal evaluation. Ensure that the expression
4403 can be re-evaluated. */
4404 switch (unsafe_for_reeval (arg0))
4406 case 0: /* Safe. */
4407 break;
4409 case 1: /* Mildly unsafe. */
4410 arg0 = unsave_expr (arg0);
4411 break;
4413 case 2: /* Wildly unsafe. */
4414 return NULL_RTX;
4417 /* Expand the jump insns. */
4418 start_sequence ();
4419 do_jump (arg0, if_false_label, if_true_label);
4420 ret = get_insns ();
4421 end_sequence ();
4423 /* Now that the __builtin_expect has been validated, go through and add
4424 the expect's to each of the conditional jumps. If we run into an
4425 error, just give up and generate the 'safe' code of doing a SCC
4426 operation and then doing a branch on that. */
4427 insn = ret;
4428 while (insn != NULL_RTX)
4430 rtx next = NEXT_INSN (insn);
4432 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
4434 rtx ifelse = SET_SRC (pc_set (insn));
4435 rtx label;
4436 int taken;
4438 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4440 taken = 1;
4441 label = XEXP (XEXP (ifelse, 1), 0);
4443 /* An inverted jump reverses the probabilities. */
4444 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4446 taken = 0;
4447 label = XEXP (XEXP (ifelse, 2), 0);
4449 /* We shouldn't have to worry about conditional returns during
4450 the expansion stage, but handle it gracefully anyway. */
4451 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4453 taken = 1;
4454 label = NULL_RTX;
4456 /* An inverted return reverses the probabilities. */
4457 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4459 taken = 0;
4460 label = NULL_RTX;
4462 else
4463 goto do_next_insn;
4465 /* If the test is expected to fail, reverse the
4466 probabilities. */
4467 if (integer_zerop (arg1))
4468 taken = 1 - taken;
4470 /* If we are jumping to the false label, reverse the
4471 probabilities. */
4472 if (label == NULL_RTX)
4473 ; /* conditional return */
4474 else if (label == if_false_label)
4475 taken = 1 - taken;
4476 else if (label != if_true_label)
4477 goto do_next_insn;
4479 num_jumps++;
4480 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4483 do_next_insn:
4484 insn = next;
4487 /* If no jumps were modified, fail and do __builtin_expect the normal
4488 way. */
4489 if (num_jumps == 0)
4490 ret = NULL_RTX;
4493 return ret;
4496 void
4497 expand_builtin_trap (void)
4499 #ifdef HAVE_trap
4500 if (HAVE_trap)
4501 emit_insn (gen_trap ());
4502 else
4503 #endif
4504 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4505 emit_barrier ();
4508 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4509 Return 0 if a normal call should be emitted rather than expanding
4510 the function inline. If convenient, the result should be placed
4511 in TARGET. SUBTARGET may be used as the target for computing
4512 the operand. */
4514 static rtx
4515 expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4517 enum machine_mode mode;
4518 tree arg;
4519 rtx op0;
4521 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4522 return 0;
4524 arg = TREE_VALUE (arglist);
4525 mode = TYPE_MODE (TREE_TYPE (arg));
4526 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4527 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4530 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4531 Return 0 if a normal call should be emitted rather than expanding
4532 the function inline. If convenient, the result should be placed
4533 in target. */
4535 static rtx
4536 expand_builtin_cabs (tree arglist, rtx target)
4538 enum machine_mode mode;
4539 tree arg;
4540 rtx op0;
4542 if (arglist == 0 || TREE_CHAIN (arglist))
4543 return 0;
4544 arg = TREE_VALUE (arglist);
4545 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
4546 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
4547 return 0;
4549 mode = TYPE_MODE (TREE_TYPE (arg));
4550 op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
4551 return expand_complex_abs (mode, op0, target, 0);
4554 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4555 a normal call should be emitted rather than expanding the function
4556 inline. If convenient, the result should be placed in TARGET with
4557 mode MODE. */
4559 static rtx
4560 expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
4562 tree orig_arglist, dest, fmt;
4563 const char *fmt_str;
4565 orig_arglist = arglist;
4567 /* Verify the required arguments in the original call. */
4568 if (! arglist)
4569 return 0;
4570 dest = TREE_VALUE (arglist);
4571 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4572 return 0;
4573 arglist = TREE_CHAIN (arglist);
4574 if (! arglist)
4575 return 0;
4576 fmt = TREE_VALUE (arglist);
4577 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4578 return 0;
4579 arglist = TREE_CHAIN (arglist);
4581 /* Check whether the format is a literal string constant. */
4582 fmt_str = c_getstr (fmt);
4583 if (fmt_str == NULL)
4584 return 0;
4586 /* If the format doesn't contain % args or %%, use strcpy. */
4587 if (strchr (fmt_str, '%') == 0)
4589 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4590 tree exp;
4592 if (arglist || ! fn)
4593 return 0;
4594 expand_expr (build_function_call_expr (fn, orig_arglist),
4595 const0_rtx, VOIDmode, EXPAND_NORMAL);
4596 if (target == const0_rtx)
4597 return const0_rtx;
4598 exp = build_int_2 (strlen (fmt_str), 0);
4599 exp = fold (build1 (NOP_EXPR, integer_type_node, exp));
4600 return expand_expr (exp, target, mode, EXPAND_NORMAL);
4602 /* If the format is "%s", use strcpy if the result isn't used. */
4603 else if (strcmp (fmt_str, "%s") == 0)
4605 tree fn, arg, len;
4606 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4608 if (! fn)
4609 return 0;
4611 if (! arglist || TREE_CHAIN (arglist))
4612 return 0;
4613 arg = TREE_VALUE (arglist);
4614 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
4615 return 0;
4617 if (target != const0_rtx)
4619 len = c_strlen (arg, 1);
4620 if (! len || TREE_CODE (len) != INTEGER_CST)
4621 return 0;
4623 else
4624 len = NULL_TREE;
4626 arglist = build_tree_list (NULL_TREE, arg);
4627 arglist = tree_cons (NULL_TREE, dest, arglist);
4628 expand_expr (build_function_call_expr (fn, arglist),
4629 const0_rtx, VOIDmode, EXPAND_NORMAL);
4631 if (target == const0_rtx)
4632 return const0_rtx;
4633 return expand_expr (len, target, mode, EXPAND_NORMAL);
4636 return 0;
4639 /* Expand an expression EXP that calls a built-in function,
4640 with result going to TARGET if that's convenient
4641 (and in mode MODE if that's convenient).
4642 SUBTARGET may be used as the target for computing one of EXP's operands.
4643 IGNORE is nonzero if the value is to be ignored. */
4646 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
4647 int ignore)
4649 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4650 tree arglist = TREE_OPERAND (exp, 1);
4651 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4652 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4654 /* Perform postincrements before expanding builtin functions.  */
4655 emit_queue ();
4657 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4658 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4660 /* When not optimizing, generate calls to library functions for a certain
4661 set of builtins. */
4662 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4663 switch (fcode)
4665 case BUILT_IN_SQRT:
4666 case BUILT_IN_SQRTF:
4667 case BUILT_IN_SQRTL:
4668 case BUILT_IN_SIN:
4669 case BUILT_IN_SINF:
4670 case BUILT_IN_SINL:
4671 case BUILT_IN_COS:
4672 case BUILT_IN_COSF:
4673 case BUILT_IN_COSL:
4674 case BUILT_IN_EXP:
4675 case BUILT_IN_EXPF:
4676 case BUILT_IN_EXPL:
4677 case BUILT_IN_LOG:
4678 case BUILT_IN_LOGF:
4679 case BUILT_IN_LOGL:
4680 case BUILT_IN_TAN:
4681 case BUILT_IN_TANF:
4682 case BUILT_IN_TANL:
4683 case BUILT_IN_ATAN:
4684 case BUILT_IN_ATANF:
4685 case BUILT_IN_ATANL:
4686 case BUILT_IN_POW:
4687 case BUILT_IN_POWF:
4688 case BUILT_IN_POWL:
4689 case BUILT_IN_ATAN2:
4690 case BUILT_IN_ATAN2F:
4691 case BUILT_IN_ATAN2L:
4692 case BUILT_IN_MEMSET:
4693 case BUILT_IN_MEMCPY:
4694 case BUILT_IN_MEMCMP:
4695 case BUILT_IN_MEMPCPY:
4696 case BUILT_IN_MEMMOVE:
4697 case BUILT_IN_BCMP:
4698 case BUILT_IN_BZERO:
4699 case BUILT_IN_BCOPY:
4700 case BUILT_IN_INDEX:
4701 case BUILT_IN_RINDEX:
4702 case BUILT_IN_SPRINTF:
4703 case BUILT_IN_STPCPY:
4704 case BUILT_IN_STRCHR:
4705 case BUILT_IN_STRRCHR:
4706 case BUILT_IN_STRLEN:
4707 case BUILT_IN_STRCPY:
4708 case BUILT_IN_STRNCPY:
4709 case BUILT_IN_STRNCMP:
4710 case BUILT_IN_STRSTR:
4711 case BUILT_IN_STRPBRK:
4712 case BUILT_IN_STRCAT:
4713 case BUILT_IN_STRNCAT:
4714 case BUILT_IN_STRSPN:
4715 case BUILT_IN_STRCSPN:
4716 case BUILT_IN_STRCMP:
4717 case BUILT_IN_FFS:
4718 case BUILT_IN_PUTCHAR:
4719 case BUILT_IN_PUTS:
4720 case BUILT_IN_PRINTF:
4721 case BUILT_IN_FPUTC:
4722 case BUILT_IN_FPUTS:
4723 case BUILT_IN_FWRITE:
4724 case BUILT_IN_PUTCHAR_UNLOCKED:
4725 case BUILT_IN_PUTS_UNLOCKED:
4726 case BUILT_IN_PRINTF_UNLOCKED:
4727 case BUILT_IN_FPUTC_UNLOCKED:
4728 case BUILT_IN_FPUTS_UNLOCKED:
4729 case BUILT_IN_FWRITE_UNLOCKED:
4730 case BUILT_IN_FLOOR:
4731 case BUILT_IN_FLOORF:
4732 case BUILT_IN_FLOORL:
4733 case BUILT_IN_CEIL:
4734 case BUILT_IN_CEILF:
4735 case BUILT_IN_CEILL:
4736 case BUILT_IN_TRUNC:
4737 case BUILT_IN_TRUNCF:
4738 case BUILT_IN_TRUNCL:
4739 case BUILT_IN_ROUND:
4740 case BUILT_IN_ROUNDF:
4741 case BUILT_IN_ROUNDL:
4742 case BUILT_IN_NEARBYINT:
4743 case BUILT_IN_NEARBYINTF:
4744 case BUILT_IN_NEARBYINTL:
4745 return expand_call (exp, target, ignore);
4747 default:
4748 break;
4751 /* The built-in function expanders test for target == const0_rtx
4752 to determine whether the function's result will be ignored. */
4753 if (ignore)
4754 target = const0_rtx;
4756 /* If the result of a pure or const built-in function is ignored, and
4757 none of its arguments are volatile, we can avoid expanding the
4758 built-in call and just evaluate the arguments for side-effects. */
4759 if (target == const0_rtx
4760 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4762 bool volatilep = false;
4763 tree arg;
4765 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4766 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4768 volatilep = true;
4769 break;
4772 if (! volatilep)
4774 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4775 expand_expr (TREE_VALUE (arg), const0_rtx,
4776 VOIDmode, EXPAND_NORMAL);
4777 return const0_rtx;
4781 switch (fcode)
4783 case BUILT_IN_ABS:
4784 case BUILT_IN_LABS:
4785 case BUILT_IN_LLABS:
4786 case BUILT_IN_IMAXABS:
4787 /* build_function_call changes these into ABS_EXPR. */
4788 abort ();
4790 case BUILT_IN_FABS:
4791 case BUILT_IN_FABSF:
4792 case BUILT_IN_FABSL:
4793 target = expand_builtin_fabs (arglist, target, subtarget);
4794 if (target)
4795 return target;
4796 break;
4798 case BUILT_IN_CABS:
4799 case BUILT_IN_CABSF:
4800 case BUILT_IN_CABSL:
4801 if (flag_unsafe_math_optimizations)
4803 target = expand_builtin_cabs (arglist, target);
4804 if (target)
4805 return target;
4807 break;
4809 case BUILT_IN_CONJ:
4810 case BUILT_IN_CONJF:
4811 case BUILT_IN_CONJL:
4812 case BUILT_IN_CREAL:
4813 case BUILT_IN_CREALF:
4814 case BUILT_IN_CREALL:
4815 case BUILT_IN_CIMAG:
4816 case BUILT_IN_CIMAGF:
4817 case BUILT_IN_CIMAGL:
4818 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4819 and IMAGPART_EXPR. */
4820 abort ();
4822 case BUILT_IN_SIN:
4823 case BUILT_IN_SINF:
4824 case BUILT_IN_SINL:
4825 case BUILT_IN_COS:
4826 case BUILT_IN_COSF:
4827 case BUILT_IN_COSL:
4828 case BUILT_IN_EXP:
4829 case BUILT_IN_EXPF:
4830 case BUILT_IN_EXPL:
4831 case BUILT_IN_LOG:
4832 case BUILT_IN_LOGF:
4833 case BUILT_IN_LOGL:
4834 case BUILT_IN_TAN:
4835 case BUILT_IN_TANF:
4836 case BUILT_IN_TANL:
4837 case BUILT_IN_ATAN:
4838 case BUILT_IN_ATANF:
4839 case BUILT_IN_ATANL:
4840 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4841 because of possible accuracy problems. */
4842 if (! flag_unsafe_math_optimizations)
4843 break;
4844 case BUILT_IN_SQRT:
4845 case BUILT_IN_SQRTF:
4846 case BUILT_IN_SQRTL:
4847 case BUILT_IN_FLOOR:
4848 case BUILT_IN_FLOORF:
4849 case BUILT_IN_FLOORL:
4850 case BUILT_IN_CEIL:
4851 case BUILT_IN_CEILF:
4852 case BUILT_IN_CEILL:
4853 case BUILT_IN_TRUNC:
4854 case BUILT_IN_TRUNCF:
4855 case BUILT_IN_TRUNCL:
4856 case BUILT_IN_ROUND:
4857 case BUILT_IN_ROUNDF:
4858 case BUILT_IN_ROUNDL:
4859 case BUILT_IN_NEARBYINT:
4860 case BUILT_IN_NEARBYINTF:
4861 case BUILT_IN_NEARBYINTL:
4862 target = expand_builtin_mathfn (exp, target, subtarget);
4863 if (target)
4864 return target;
4865 break;
4867 case BUILT_IN_POW:
4868 case BUILT_IN_POWF:
4869 case BUILT_IN_POWL:
4870 if (! flag_unsafe_math_optimizations)
4871 break;
4872 target = expand_builtin_pow (exp, target, subtarget);
4873 if (target)
4874 return target;
4875 break;
4877 case BUILT_IN_ATAN2:
4878 case BUILT_IN_ATAN2F:
4879 case BUILT_IN_ATAN2L:
4880 if (! flag_unsafe_math_optimizations)
4881 break;
4882 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4883 if (target)
4884 return target;
4885 break;
4887 case BUILT_IN_APPLY_ARGS:
4888 return expand_builtin_apply_args ();
4890 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4891 FUNCTION with a copy of the parameters described by
4892 ARGUMENTS, and ARGSIZE. It returns a block of memory
4893 allocated on the stack into which is stored all the registers
4894 that might possibly be used for returning the result of a
4895 function. ARGUMENTS is the value returned by
4896 __builtin_apply_args. ARGSIZE is the number of bytes of
4897 arguments that must be copied. ??? How should this value be
4898 computed? We'll also need a safe worst case value for varargs
4899 functions. */
4900 case BUILT_IN_APPLY:
4901 if (!validate_arglist (arglist, POINTER_TYPE,
4902 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4903 && !validate_arglist (arglist, REFERENCE_TYPE,
4904 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4905 return const0_rtx;
4906 else
4908 int i;
4909 tree t;
4910 rtx ops[3];
4912 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4913 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4915 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4918 /* __builtin_return (RESULT) causes the function to return the
4919 value described by RESULT. RESULT is address of the block of
4920 memory returned by __builtin_apply. */
4921 case BUILT_IN_RETURN:
4922 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4923 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4924 NULL_RTX, VOIDmode, 0));
4925 return const0_rtx;
4927 case BUILT_IN_SAVEREGS:
4928 return expand_builtin_saveregs ();
4930 case BUILT_IN_ARGS_INFO:
4931 return expand_builtin_args_info (arglist);
4933 /* Return the address of the first anonymous stack arg. */
4934 case BUILT_IN_NEXT_ARG:
4935 return expand_builtin_next_arg (arglist);
4937 case BUILT_IN_CLASSIFY_TYPE:
4938 return expand_builtin_classify_type (arglist);
4940 case BUILT_IN_CONSTANT_P:
4941 return expand_builtin_constant_p (arglist, target_mode);
4943 case BUILT_IN_FRAME_ADDRESS:
4944 case BUILT_IN_RETURN_ADDRESS:
4945 return expand_builtin_frame_address (fndecl, arglist);
4947 /* Returns the address of the area where the structure is returned.
4948 0 otherwise. */
4949 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4950 if (arglist != 0
4951 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4952 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4953 return const0_rtx;
4954 else
4955 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4957 case BUILT_IN_ALLOCA:
4958 target = expand_builtin_alloca (arglist, target);
4959 if (target)
4960 return target;
4961 break;
4963 case BUILT_IN_FFS:
4964 case BUILT_IN_FFSL:
4965 case BUILT_IN_FFSLL:
4966 target = expand_builtin_unop (target_mode, arglist, target,
4967 subtarget, ffs_optab);
4968 if (target)
4969 return target;
4970 break;
4972 case BUILT_IN_CLZ:
4973 case BUILT_IN_CLZL:
4974 case BUILT_IN_CLZLL:
4975 target = expand_builtin_unop (target_mode, arglist, target,
4976 subtarget, clz_optab);
4977 if (target)
4978 return target;
4979 break;
4981 case BUILT_IN_CTZ:
4982 case BUILT_IN_CTZL:
4983 case BUILT_IN_CTZLL:
4984 target = expand_builtin_unop (target_mode, arglist, target,
4985 subtarget, ctz_optab);
4986 if (target)
4987 return target;
4988 break;
4990 case BUILT_IN_POPCOUNT:
4991 case BUILT_IN_POPCOUNTL:
4992 case BUILT_IN_POPCOUNTLL:
4993 target = expand_builtin_unop (target_mode, arglist, target,
4994 subtarget, popcount_optab);
4995 if (target)
4996 return target;
4997 break;
4999 case BUILT_IN_PARITY:
5000 case BUILT_IN_PARITYL:
5001 case BUILT_IN_PARITYLL:
5002 target = expand_builtin_unop (target_mode, arglist, target,
5003 subtarget, parity_optab);
5004 if (target)
5005 return target;
5006 break;
5008 case BUILT_IN_STRLEN:
5009 target = expand_builtin_strlen (arglist, target, target_mode);
5010 if (target)
5011 return target;
5012 break;
5014 case BUILT_IN_STRCPY:
5015 target = expand_builtin_strcpy (arglist, target, mode);
5016 if (target)
5017 return target;
5018 break;
5020 case BUILT_IN_STRNCPY:
5021 target = expand_builtin_strncpy (arglist, target, mode);
5022 if (target)
5023 return target;
5024 break;
5026 case BUILT_IN_STPCPY:
5027 target = expand_builtin_stpcpy (arglist, target, mode);
5028 if (target)
5029 return target;
5030 break;
5032 case BUILT_IN_STRCAT:
5033 target = expand_builtin_strcat (arglist, target, mode);
5034 if (target)
5035 return target;
5036 break;
5038 case BUILT_IN_STRNCAT:
5039 target = expand_builtin_strncat (arglist, target, mode);
5040 if (target)
5041 return target;
5042 break;
5044 case BUILT_IN_STRSPN:
5045 target = expand_builtin_strspn (arglist, target, mode);
5046 if (target)
5047 return target;
5048 break;
5050 case BUILT_IN_STRCSPN:
5051 target = expand_builtin_strcspn (arglist, target, mode);
5052 if (target)
5053 return target;
5054 break;
5056 case BUILT_IN_STRSTR:
5057 target = expand_builtin_strstr (arglist, target, mode);
5058 if (target)
5059 return target;
5060 break;
5062 case BUILT_IN_STRPBRK:
5063 target = expand_builtin_strpbrk (arglist, target, mode);
5064 if (target)
5065 return target;
5066 break;
5068 case BUILT_IN_INDEX:
5069 case BUILT_IN_STRCHR:
5070 target = expand_builtin_strchr (arglist, target, mode);
5071 if (target)
5072 return target;
5073 break;
5075 case BUILT_IN_RINDEX:
5076 case BUILT_IN_STRRCHR:
5077 target = expand_builtin_strrchr (arglist, target, mode);
5078 if (target)
5079 return target;
5080 break;
5082 case BUILT_IN_MEMCPY:
5083 target = expand_builtin_memcpy (arglist, target, mode);
5084 if (target)
5085 return target;
5086 break;
5088 case BUILT_IN_MEMPCPY:
5089 target = expand_builtin_mempcpy (arglist, target, mode, /*endp=*/ 1);
5090 if (target)
5091 return target;
5092 break;
5094 case BUILT_IN_MEMMOVE:
5095 target = expand_builtin_memmove (arglist, target, mode);
5096 if (target)
5097 return target;
5098 break;
5100 case BUILT_IN_BCOPY:
5101 target = expand_builtin_bcopy (arglist);
5102 if (target)
5103 return target;
5104 break;
5106 case BUILT_IN_MEMSET:
5107 target = expand_builtin_memset (arglist, target, mode);
5108 if (target)
5109 return target;
5110 break;
5112 case BUILT_IN_BZERO:
5113 target = expand_builtin_bzero (arglist);
5114 if (target)
5115 return target;
5116 break;
5118 case BUILT_IN_STRCMP:
5119 target = expand_builtin_strcmp (exp, target, mode);
5120 if (target)
5121 return target;
5122 break;
5124 case BUILT_IN_STRNCMP:
5125 target = expand_builtin_strncmp (exp, target, mode);
5126 if (target)
5127 return target;
5128 break;
5130 case BUILT_IN_BCMP:
5131 case BUILT_IN_MEMCMP:
5132 target = expand_builtin_memcmp (exp, arglist, target, mode);
5133 if (target)
5134 return target;
5135 break;
5137 case BUILT_IN_SETJMP:
5138 target = expand_builtin_setjmp (arglist, target);
5139 if (target)
5140 return target;
5141 break;
5143 /* __builtin_longjmp is passed a pointer to an array of five words.
5144 It's similar to the C library longjmp function but works with
5145 __builtin_setjmp above. */
5146 case BUILT_IN_LONGJMP:
5147 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5148 break;
5149 else
5151 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
5152 VOIDmode, 0);
5153 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
5154 NULL_RTX, VOIDmode, 0);
5156 if (value != const1_rtx)
5158 error ("__builtin_longjmp second argument must be 1");
5159 return const0_rtx;
5162 expand_builtin_longjmp (buf_addr, value);
5163 return const0_rtx;
5166 case BUILT_IN_TRAP:
5167 expand_builtin_trap ();
5168 return const0_rtx;
5170 case BUILT_IN_FPUTS:
5171 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
5172 if (target)
5173 return target;
5174 break;
5175 case BUILT_IN_FPUTS_UNLOCKED:
5176 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
5177 if (target)
5178 return target;
5179 break;
5181 case BUILT_IN_SPRINTF:
5182 target = expand_builtin_sprintf (arglist, target, mode);
5183 if (target)
5184 return target;
5185 break;
5187 /* Various hooks for the DWARF 2 __throw routine. */
5188 case BUILT_IN_UNWIND_INIT:
5189 expand_builtin_unwind_init ();
5190 return const0_rtx;
5191 case BUILT_IN_DWARF_CFA:
5192 return virtual_cfa_rtx;
5193 #ifdef DWARF2_UNWIND_INFO
5194 case BUILT_IN_DWARF_SP_COLUMN:
5195 return expand_builtin_dwarf_sp_column ();
5196 case BUILT_IN_INIT_DWARF_REG_SIZES:
5197 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
5198 return const0_rtx;
5199 #endif
5200 case BUILT_IN_FROB_RETURN_ADDR:
5201 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
5202 case BUILT_IN_EXTRACT_RETURN_ADDR:
5203 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
5204 case BUILT_IN_EH_RETURN:
5205 expand_builtin_eh_return (TREE_VALUE (arglist),
5206 TREE_VALUE (TREE_CHAIN (arglist)));
5207 return const0_rtx;
5208 #ifdef EH_RETURN_DATA_REGNO
5209 case BUILT_IN_EH_RETURN_DATA_REGNO:
5210 return expand_builtin_eh_return_data_regno (arglist);
5211 #endif
5212 case BUILT_IN_VA_START:
5213 case BUILT_IN_STDARG_START:
5214 return expand_builtin_va_start (arglist);
5215 case BUILT_IN_VA_END:
5216 return expand_builtin_va_end (arglist);
5217 case BUILT_IN_VA_COPY:
5218 return expand_builtin_va_copy (arglist);
5219 case BUILT_IN_EXPECT:
5220 return expand_builtin_expect (arglist, target);
5221 case BUILT_IN_PREFETCH:
5222 expand_builtin_prefetch (arglist);
5223 return const0_rtx;
5226 default: /* just do library call, if unknown builtin */
5227 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
5228 error ("built-in function `%s' not currently supported",
5229 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5232 /* The switch statement above can drop through to cause the function
5233 to be called normally. */
5234 return expand_call (exp, target, ignore);
5237 /* Determine whether a tree node represents a call to a built-in
5238 math function. If the tree T is a call to a built-in function
5239 taking a single real argument, then the return value is the
5240 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
5241 the return value is END_BUILTINS. */
5243 enum built_in_function
5244 builtin_mathfn_code (tree t)
5246 tree fndecl, arglist;
5248 if (TREE_CODE (t) != CALL_EXPR
5249 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
5250 return END_BUILTINS;
5252 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5253 if (TREE_CODE (fndecl) != FUNCTION_DECL
5254 || ! DECL_BUILT_IN (fndecl)
5255 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5256 return END_BUILTINS;
5258 arglist = TREE_OPERAND (t, 1);
5259 if (! arglist
5260 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5261 return END_BUILTINS;
5263 arglist = TREE_CHAIN (arglist);
5264 switch (DECL_FUNCTION_CODE (fndecl))
5266 case BUILT_IN_POW:
5267 case BUILT_IN_POWF:
5268 case BUILT_IN_POWL:
5269 case BUILT_IN_ATAN2:
5270 case BUILT_IN_ATAN2F:
5271 case BUILT_IN_ATAN2L:
5272 if (! arglist
5273 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
5274 || TREE_CHAIN (arglist))
5275 return END_BUILTINS;
5276 break;
5278 default:
5279 if (arglist)
5280 return END_BUILTINS;
5281 break;
5284 return DECL_FUNCTION_CODE (fndecl);
5287 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
5288 constant. ARGLIST is the argument list of the call. */
5290 static tree
5291 fold_builtin_constant_p (tree arglist)
5293 if (arglist == 0)
5294 return 0;
5296 arglist = TREE_VALUE (arglist);
5298 /* We return 1 for a numeric type that's known to be a constant
5299 value at compile-time or for an aggregate type that's a
5300 literal constant. */
5301 STRIP_NOPS (arglist);
5303 /* If we know this is a constant, emit the constant of one. */
5304 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
5305 || (TREE_CODE (arglist) == CONSTRUCTOR
5306 && TREE_CONSTANT (arglist))
5307 || (TREE_CODE (arglist) == ADDR_EXPR
5308 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
5309 return integer_one_node;
5311 /* If we aren't going to be running CSE or this expression
5312 has side effects, show we don't know it to be a constant.
5313 Likewise if it's a pointer or aggregate type since in those
5314 case we only want literals, since those are only optimized
5315 when generating RTL, not later.
5316 And finally, if we are compiling an initializer, not code, we
5317 need to return a definite result now; there's not going to be any
5318 more optimization done. */
5319 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
5320 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
5321 || POINTER_TYPE_P (TREE_TYPE (arglist))
5322 || cfun == 0)
5323 return integer_zero_node;
5325 return 0;
5328 /* Fold a call to __builtin_classify_type. */
5330 static tree
5331 fold_builtin_classify_type (tree arglist)
5333 if (arglist == 0)
5334 return build_int_2 (no_type_class, 0);
5336 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
5339 /* Fold a call to __builtin_inf or __builtin_huge_val. */
5341 static tree
5342 fold_builtin_inf (tree type, int warn)
5344 REAL_VALUE_TYPE real;
5346 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
5347 warning ("target format does not support infinity");
5349 real_inf (&real);
5350 return build_real (type, real);
5353 /* Fold a call to __builtin_nan or __builtin_nans. */
5355 static tree
5356 fold_builtin_nan (tree arglist, tree type, int quiet)
5358 REAL_VALUE_TYPE real;
5359 const char *str;
5361 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5362 return 0;
5363 str = c_getstr (TREE_VALUE (arglist));
5364 if (!str)
5365 return 0;
5367 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
5368 return 0;
5370 return build_real (type, real);
5373 /* Return true if the floating point expression T has an integer value.
5374 We also allow +Inf, -Inf and NaN to be considered integer values. */
5376 static bool
5377 integer_valued_real_p (tree t)
5379 switch (TREE_CODE (t))
5381 case FLOAT_EXPR:
5382 return true;
5384 case ABS_EXPR:
5385 case SAVE_EXPR:
5386 case NON_LVALUE_EXPR:
5387 return integer_valued_real_p (TREE_OPERAND (t, 0));
5389 case COMPOUND_EXPR:
5390 case MODIFY_EXPR:
5391 case BIND_EXPR:
5392 return integer_valued_real_p (TREE_OPERAND (t, 1));
5394 case PLUS_EXPR:
5395 case MINUS_EXPR:
5396 case MULT_EXPR:
5397 case MIN_EXPR:
5398 case MAX_EXPR:
5399 return integer_valued_real_p (TREE_OPERAND (t, 0))
5400 && integer_valued_real_p (TREE_OPERAND (t, 1));
5402 case COND_EXPR:
5403 return integer_valued_real_p (TREE_OPERAND (t, 1))
5404 && integer_valued_real_p (TREE_OPERAND (t, 2));
5406 case REAL_CST:
5407 if (! TREE_CONSTANT_OVERFLOW (t))
5409 REAL_VALUE_TYPE c, cint;
5411 c = TREE_REAL_CST (t);
5412 real_trunc (&cint, TYPE_MODE (TREE_TYPE (t)), &c);
5413 return real_identical (&c, &cint);
5416 case NOP_EXPR:
5418 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
5419 if (TREE_CODE (type) == INTEGER_TYPE)
5420 return true;
5421 if (TREE_CODE (type) == REAL_TYPE)
5422 return integer_valued_real_p (TREE_OPERAND (t, 0));
5423 break;
5426 case CALL_EXPR:
5427 switch (builtin_mathfn_code (t))
5429 case BUILT_IN_CEIL:
5430 case BUILT_IN_CEILF:
5431 case BUILT_IN_CEILL:
5432 case BUILT_IN_FLOOR:
5433 case BUILT_IN_FLOORF:
5434 case BUILT_IN_FLOORL:
5435 case BUILT_IN_NEARBYINT:
5436 case BUILT_IN_NEARBYINTF:
5437 case BUILT_IN_NEARBYINTL:
5438 case BUILT_IN_ROUND:
5439 case BUILT_IN_ROUNDF:
5440 case BUILT_IN_ROUNDL:
5441 case BUILT_IN_TRUNC:
5442 case BUILT_IN_TRUNCF:
5443 case BUILT_IN_TRUNCL:
5444 return true;
5446 default:
5447 break;
5449 break;
5451 default:
5452 break;
5454 return false;
5457 /* EXP is assumed to be builtin call where truncation can be propagated
5458 across (for instance floor((double)f) == (double)floorf (f).
5459 Do the transformation. */
5461 static tree
5462 fold_trunc_transparent_mathfn (tree exp)
5464 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5465 tree arglist = TREE_OPERAND (exp, 1);
5466 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5467 tree arg;
5469 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5470 return 0;
5472 arg = TREE_VALUE (arglist);
5473 /* Integer rounding functions are idempotent. */
5474 if (fcode == builtin_mathfn_code (arg))
5475 return arg;
5477 /* If argument is already integer valued, and we don't need to worry
5478 about setting errno, there's no need to perform rounding. */
5479 if (! flag_errno_math && integer_valued_real_p (arg))
5480 return arg;
5482 if (optimize)
5484 tree arg0 = strip_float_extensions (arg);
5485 tree ftype = TREE_TYPE (exp);
5486 tree newtype = TREE_TYPE (arg0);
5487 tree decl;
5489 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
5490 && (decl = mathfn_built_in (newtype, fcode)))
5492 arglist =
5493 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
5494 return convert (ftype,
5495 build_function_call_expr (decl, arglist));
5498 return 0;
5501 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
5502 function's DECL, ARGLIST is the argument list and TYPE is the return
5503 type. Return NULL_TREE if no simplification can be made. */
5505 static tree
5506 fold_builtin_cabs (tree fndecl, tree arglist, tree type)
5508 tree arg;
5510 if (!arglist || TREE_CHAIN (arglist))
5511 return NULL_TREE;
5513 arg = TREE_VALUE (arglist);
5514 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
5515 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
5516 return NULL_TREE;
5518 /* Evaluate cabs of a constant at compile-time. */
5519 if (flag_unsafe_math_optimizations
5520 && TREE_CODE (arg) == COMPLEX_CST
5521 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
5522 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
5523 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
5524 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
5526 REAL_VALUE_TYPE r, i;
5528 r = TREE_REAL_CST (TREE_REALPART (arg));
5529 i = TREE_REAL_CST (TREE_IMAGPART (arg));
5531 real_arithmetic (&r, MULT_EXPR, &r, &r);
5532 real_arithmetic (&i, MULT_EXPR, &i, &i);
5533 real_arithmetic (&r, PLUS_EXPR, &r, &i);
5534 if (real_sqrt (&r, TYPE_MODE (type), &r)
5535 || ! flag_trapping_math)
5536 return build_real (type, r);
5539 /* If either part is zero, cabs is fabs of the other. */
5540 if (TREE_CODE (arg) == COMPLEX_EXPR
5541 && real_zerop (TREE_OPERAND (arg, 0)))
5542 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
5543 if (TREE_CODE (arg) == COMPLEX_EXPR
5544 && real_zerop (TREE_OPERAND (arg, 1)))
5545 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
5547 if (flag_unsafe_math_optimizations)
5549 enum built_in_function fcode;
5550 tree sqrtfn;
5552 fcode = DECL_FUNCTION_CODE (fndecl);
5553 if (fcode == BUILT_IN_CABS)
5554 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5555 else if (fcode == BUILT_IN_CABSF)
5556 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5557 else if (fcode == BUILT_IN_CABSL)
5558 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5559 else
5560 sqrtfn = NULL_TREE;
5562 if (sqrtfn != NULL_TREE)
5564 tree rpart, ipart, result, arglist;
5566 rpart = fold (build1 (REALPART_EXPR, type, arg));
5567 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
5569 rpart = save_expr (rpart);
5570 ipart = save_expr (ipart);
5572 result = fold (build (PLUS_EXPR, type,
5573 fold (build (MULT_EXPR, type,
5574 rpart, rpart)),
5575 fold (build (MULT_EXPR, type,
5576 ipart, ipart))));
5578 arglist = build_tree_list (NULL_TREE, result);
5579 return build_function_call_expr (sqrtfn, arglist);
5583 return NULL_TREE;
5586 /* Fold function call to builtin trunc, truncf or truncl. Return
5587 NULL_TREE if no simplification can be made. */
5589 static tree
5590 fold_builtin_trunc (tree exp)
5592 tree arglist = TREE_OPERAND (exp, 1);
5593 tree arg;
5595 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5596 return 0;
5598 /* Optimize trunc of constant value. */
5599 arg = TREE_VALUE (arglist);
5600 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
5602 REAL_VALUE_TYPE r, x;
5603 tree type = TREE_TYPE (exp);
5605 x = TREE_REAL_CST (arg);
5606 real_trunc (&r, TYPE_MODE (type), &x);
5607 return build_real (type, r);
5610 return fold_trunc_transparent_mathfn (exp);
5613 /* Fold function call to builtin floor, floorf or floorl. Return
5614 NULL_TREE if no simplification can be made. */
5616 static tree
5617 fold_builtin_floor (tree exp)
5619 tree arglist = TREE_OPERAND (exp, 1);
5620 tree arg;
5622 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5623 return 0;
5625 /* Optimize floor of constant value. */
5626 arg = TREE_VALUE (arglist);
5627 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
5629 REAL_VALUE_TYPE x;
5631 x = TREE_REAL_CST (arg);
5632 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
5634 tree type = TREE_TYPE (exp);
5635 REAL_VALUE_TYPE r;
5637 real_floor (&r, TYPE_MODE (type), &x);
5638 return build_real (type, r);
5642 return fold_trunc_transparent_mathfn (exp);
5645 /* Fold function call to builtin ceil, ceilf or ceill. Return
5646 NULL_TREE if no simplification can be made. */
5648 static tree
5649 fold_builtin_ceil (tree exp)
5651 tree arglist = TREE_OPERAND (exp, 1);
5652 tree arg;
5654 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5655 return 0;
5657 /* Optimize ceil of constant value. */
5658 arg = TREE_VALUE (arglist);
5659 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
5661 REAL_VALUE_TYPE x;
5663 x = TREE_REAL_CST (arg);
5664 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
5666 tree type = TREE_TYPE (exp);
5667 REAL_VALUE_TYPE r;
5669 real_ceil (&r, TYPE_MODE (type), &x);
5670 return build_real (type, r);
5674 return fold_trunc_transparent_mathfn (exp);
5677 /* Used by constant folding to eliminate some builtin calls early. EXP is
5678 the CALL_EXPR of a call to a builtin function. */
5680 tree
5681 fold_builtin (tree exp)
5683 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5684 tree arglist = TREE_OPERAND (exp, 1);
5685 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5687 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5688 return 0;
5690 switch (DECL_FUNCTION_CODE (fndecl))
5692 case BUILT_IN_CONSTANT_P:
5693 return fold_builtin_constant_p (arglist);
5695 case BUILT_IN_CLASSIFY_TYPE:
5696 return fold_builtin_classify_type (arglist);
5698 case BUILT_IN_STRLEN:
5699 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5701 tree len = c_strlen (TREE_VALUE (arglist), 0);
5702 if (len)
5704 /* Convert from the internal "sizetype" type to "size_t". */
5705 if (size_type_node)
5706 len = convert (size_type_node, len);
5707 return len;
5710 break;
5712 case BUILT_IN_FABS:
5713 case BUILT_IN_FABSF:
5714 case BUILT_IN_FABSL:
5715 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5716 return fold (build1 (ABS_EXPR, type, TREE_VALUE (arglist)));
5717 break;
5719 case BUILT_IN_CABS:
5720 case BUILT_IN_CABSF:
5721 case BUILT_IN_CABSL:
5722 return fold_builtin_cabs (fndecl, arglist, type);
5724 case BUILT_IN_SQRT:
5725 case BUILT_IN_SQRTF:
5726 case BUILT_IN_SQRTL:
5727 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5729 enum built_in_function fcode;
5730 tree arg = TREE_VALUE (arglist);
5732 /* Optimize sqrt of constant value. */
5733 if (TREE_CODE (arg) == REAL_CST
5734 && ! TREE_CONSTANT_OVERFLOW (arg))
5736 REAL_VALUE_TYPE r, x;
5738 x = TREE_REAL_CST (arg);
5739 if (real_sqrt (&r, TYPE_MODE (type), &x)
5740 || (!flag_trapping_math && !flag_errno_math))
5741 return build_real (type, r);
5744 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5745 fcode = builtin_mathfn_code (arg);
5746 if (flag_unsafe_math_optimizations
5747 && (fcode == BUILT_IN_EXP
5748 || fcode == BUILT_IN_EXPF
5749 || fcode == BUILT_IN_EXPL))
5751 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5752 arg = fold (build (MULT_EXPR, type,
5753 TREE_VALUE (TREE_OPERAND (arg, 1)),
5754 build_real (type, dconsthalf)));
5755 arglist = build_tree_list (NULL_TREE, arg);
5756 return build_function_call_expr (expfn, arglist);
5759 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5760 if (flag_unsafe_math_optimizations
5761 && (fcode == BUILT_IN_POW
5762 || fcode == BUILT_IN_POWF
5763 || fcode == BUILT_IN_POWL))
5765 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5766 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5767 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5768 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5769 build_real (type, dconsthalf)));
5770 arglist = tree_cons (NULL_TREE, arg0,
5771 build_tree_list (NULL_TREE, narg1));
5772 return build_function_call_expr (powfn, arglist);
5775 break;
5777 case BUILT_IN_SIN:
5778 case BUILT_IN_SINF:
5779 case BUILT_IN_SINL:
5780 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5782 tree arg = TREE_VALUE (arglist);
5784 /* Optimize sin(0.0) = 0.0. */
5785 if (real_zerop (arg))
5786 return arg;
5788 break;
5790 case BUILT_IN_COS:
5791 case BUILT_IN_COSF:
5792 case BUILT_IN_COSL:
5793 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5795 tree arg = TREE_VALUE (arglist);
5797 /* Optimize cos(0.0) = 1.0. */
5798 if (real_zerop (arg))
5799 return build_real (type, dconst1);
5801 /* Optimize cos(-x) into cos(x). */
5802 if (TREE_CODE (arg) == NEGATE_EXPR)
5804 tree arglist = build_tree_list (NULL_TREE,
5805 TREE_OPERAND (arg, 0));
5806 return build_function_call_expr (fndecl, arglist);
5809 break;
5811 case BUILT_IN_EXP:
5812 case BUILT_IN_EXPF:
5813 case BUILT_IN_EXPL:
5814 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5816 enum built_in_function fcode;
5817 tree arg = TREE_VALUE (arglist);
5819 /* Optimize exp(0.0) = 1.0. */
5820 if (real_zerop (arg))
5821 return build_real (type, dconst1);
5823 /* Optimize exp(1.0) = e. */
5824 if (real_onep (arg))
5826 REAL_VALUE_TYPE cst;
5828 if (! builtin_dconsts_init)
5829 init_builtin_dconsts ();
5830 real_convert (&cst, TYPE_MODE (type), &dconste);
5831 return build_real (type, cst);
5834 /* Attempt to evaluate exp at compile-time. */
5835 if (flag_unsafe_math_optimizations
5836 && TREE_CODE (arg) == REAL_CST
5837 && ! TREE_CONSTANT_OVERFLOW (arg))
5839 REAL_VALUE_TYPE cint;
5840 REAL_VALUE_TYPE c;
5841 HOST_WIDE_INT n;
5843 c = TREE_REAL_CST (arg);
5844 n = real_to_integer (&c);
5845 real_from_integer (&cint, VOIDmode, n,
5846 n < 0 ? -1 : 0, 0);
5847 if (real_identical (&c, &cint))
5849 REAL_VALUE_TYPE x;
5851 if (! builtin_dconsts_init)
5852 init_builtin_dconsts ();
5853 real_powi (&x, TYPE_MODE (type), &dconste, n);
5854 return build_real (type, x);
5858 /* Optimize exp(log(x)) = x. */
5859 fcode = builtin_mathfn_code (arg);
5860 if (flag_unsafe_math_optimizations
5861 && (fcode == BUILT_IN_LOG
5862 || fcode == BUILT_IN_LOGF
5863 || fcode == BUILT_IN_LOGL))
5864 return TREE_VALUE (TREE_OPERAND (arg, 1));
5866 break;
5868 case BUILT_IN_LOG:
5869 case BUILT_IN_LOGF:
5870 case BUILT_IN_LOGL:
5871 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5873 enum built_in_function fcode;
5874 tree arg = TREE_VALUE (arglist);
5876 /* Optimize log(1.0) = 0.0. */
5877 if (real_onep (arg))
5878 return build_real (type, dconst0);
5880 /* Optimize log(exp(x)) = x. */
5881 fcode = builtin_mathfn_code (arg);
5882 if (flag_unsafe_math_optimizations
5883 && (fcode == BUILT_IN_EXP
5884 || fcode == BUILT_IN_EXPF
5885 || fcode == BUILT_IN_EXPL))
5886 return TREE_VALUE (TREE_OPERAND (arg, 1));
5888 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5889 if (flag_unsafe_math_optimizations
5890 && (fcode == BUILT_IN_SQRT
5891 || fcode == BUILT_IN_SQRTF
5892 || fcode == BUILT_IN_SQRTL))
5894 tree logfn = build_function_call_expr (fndecl,
5895 TREE_OPERAND (arg, 1));
5896 return fold (build (MULT_EXPR, type, logfn,
5897 build_real (type, dconsthalf)));
5900 /* Optimize log(pow(x,y)) = y*log(x). */
5901 if (flag_unsafe_math_optimizations
5902 && (fcode == BUILT_IN_POW
5903 || fcode == BUILT_IN_POWF
5904 || fcode == BUILT_IN_POWL))
5906 tree arg0, arg1, logfn;
5908 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5909 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5910 arglist = build_tree_list (NULL_TREE, arg0);
5911 logfn = build_function_call_expr (fndecl, arglist);
5912 return fold (build (MULT_EXPR, type, arg1, logfn));
5915 break;
5917 case BUILT_IN_TAN:
5918 case BUILT_IN_TANF:
5919 case BUILT_IN_TANL:
5920 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5922 enum built_in_function fcode;
5923 tree arg = TREE_VALUE (arglist);
5925 /* Optimize tan(0.0) = 0.0. */
5926 if (real_zerop (arg))
5927 return arg;
5929 /* Optimize tan(atan(x)) = x. */
5930 fcode = builtin_mathfn_code (arg);
5931 if (flag_unsafe_math_optimizations
5932 && (fcode == BUILT_IN_ATAN
5933 || fcode == BUILT_IN_ATANF
5934 || fcode == BUILT_IN_ATANL))
5935 return TREE_VALUE (TREE_OPERAND (arg, 1));
5937 break;
5939 case BUILT_IN_ATAN:
5940 case BUILT_IN_ATANF:
5941 case BUILT_IN_ATANL:
5942 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5944 tree arg = TREE_VALUE (arglist);
5946 /* Optimize atan(0.0) = 0.0. */
5947 if (real_zerop (arg))
5948 return arg;
5950 /* Optimize atan(1.0) = pi/4. */
5951 if (real_onep (arg))
5953 REAL_VALUE_TYPE cst;
5955 if (! builtin_dconsts_init)
5956 init_builtin_dconsts ();
5957 real_convert (&cst, TYPE_MODE (type), &dconstpi);
5958 cst.exp -= 2;
5959 return build_real (type, cst);
5962 break;
5964 case BUILT_IN_POW:
5965 case BUILT_IN_POWF:
5966 case BUILT_IN_POWL:
5967 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5969 enum built_in_function fcode;
5970 tree arg0 = TREE_VALUE (arglist);
5971 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5973 /* Optimize pow(1.0,y) = 1.0. */
5974 if (real_onep (arg0))
5975 return omit_one_operand (type, build_real (type, dconst1), arg1);
5977 if (TREE_CODE (arg1) == REAL_CST
5978 && ! TREE_CONSTANT_OVERFLOW (arg1))
5980 REAL_VALUE_TYPE c;
5981 c = TREE_REAL_CST (arg1);
5983 /* Optimize pow(x,0.0) = 1.0. */
5984 if (REAL_VALUES_EQUAL (c, dconst0))
5985 return omit_one_operand (type, build_real (type, dconst1),
5986 arg0);
5988 /* Optimize pow(x,1.0) = x. */
5989 if (REAL_VALUES_EQUAL (c, dconst1))
5990 return arg0;
5992 /* Optimize pow(x,-1.0) = 1.0/x. */
5993 if (REAL_VALUES_EQUAL (c, dconstm1))
5994 return fold (build (RDIV_EXPR, type,
5995 build_real (type, dconst1),
5996 arg0));
5998 /* Optimize pow(x,2.0) = x*x. */
5999 if (REAL_VALUES_EQUAL (c, dconst2)
6000 && (*lang_hooks.decls.global_bindings_p) () == 0
6001 && ! CONTAINS_PLACEHOLDER_P (arg0))
6003 arg0 = save_expr (arg0);
6004 return fold (build (MULT_EXPR, type, arg0, arg0));
6007 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
6008 if (flag_unsafe_math_optimizations
6009 && REAL_VALUES_EQUAL (c, dconstm2)
6010 && (*lang_hooks.decls.global_bindings_p) () == 0
6011 && ! CONTAINS_PLACEHOLDER_P (arg0))
6013 arg0 = save_expr (arg0);
6014 return fold (build (RDIV_EXPR, type,
6015 build_real (type, dconst1),
6016 fold (build (MULT_EXPR, type,
6017 arg0, arg0))));
6020 /* Optimize pow(x,0.5) = sqrt(x). */
6021 if (flag_unsafe_math_optimizations
6022 && REAL_VALUES_EQUAL (c, dconsthalf))
6024 tree sqrtfn;
6026 fcode = DECL_FUNCTION_CODE (fndecl);
6027 if (fcode == BUILT_IN_POW)
6028 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
6029 else if (fcode == BUILT_IN_POWF)
6030 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
6031 else if (fcode == BUILT_IN_POWL)
6032 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
6033 else
6034 sqrtfn = NULL_TREE;
6036 if (sqrtfn != NULL_TREE)
6038 tree arglist = build_tree_list (NULL_TREE, arg0);
6039 return build_function_call_expr (sqrtfn, arglist);
6043 /* Attempt to evaluate pow at compile-time. */
6044 if (TREE_CODE (arg0) == REAL_CST
6045 && ! TREE_CONSTANT_OVERFLOW (arg0))
6047 REAL_VALUE_TYPE cint;
6048 HOST_WIDE_INT n;
6050 n = real_to_integer (&c);
6051 real_from_integer (&cint, VOIDmode, n,
6052 n < 0 ? -1 : 0, 0);
6053 if (real_identical (&c, &cint))
6055 REAL_VALUE_TYPE x;
6056 bool inexact;
6058 x = TREE_REAL_CST (arg0);
6059 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
6060 if (flag_unsafe_math_optimizations || !inexact)
6061 return build_real (type, x);
6066 /* Optimize pow(exp(x),y) = exp(x*y). */
6067 fcode = builtin_mathfn_code (arg0);
6068 if (flag_unsafe_math_optimizations
6069 && (fcode == BUILT_IN_EXP
6070 || fcode == BUILT_IN_EXPF
6071 || fcode == BUILT_IN_EXPL))
6073 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6074 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6075 arg = fold (build (MULT_EXPR, type, arg, arg1));
6076 arglist = build_tree_list (NULL_TREE, arg);
6077 return build_function_call_expr (expfn, arglist);
6080 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
6081 if (flag_unsafe_math_optimizations
6082 && (fcode == BUILT_IN_SQRT
6083 || fcode == BUILT_IN_SQRTF
6084 || fcode == BUILT_IN_SQRTL))
6086 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6087 tree narg1 = fold (build (MULT_EXPR, type, arg1,
6088 build_real (type, dconsthalf)));
6090 arglist = tree_cons (NULL_TREE, narg0,
6091 build_tree_list (NULL_TREE, narg1));
6092 return build_function_call_expr (fndecl, arglist);
6095 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
6096 if (flag_unsafe_math_optimizations
6097 && (fcode == BUILT_IN_POW
6098 || fcode == BUILT_IN_POWF
6099 || fcode == BUILT_IN_POWL))
6101 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6102 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6103 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
6104 arglist = tree_cons (NULL_TREE, arg00,
6105 build_tree_list (NULL_TREE, narg1));
6106 return build_function_call_expr (fndecl, arglist);
6109 break;
6111 case BUILT_IN_INF:
6112 case BUILT_IN_INFF:
6113 case BUILT_IN_INFL:
6114 return fold_builtin_inf (type, true);
6116 case BUILT_IN_HUGE_VAL:
6117 case BUILT_IN_HUGE_VALF:
6118 case BUILT_IN_HUGE_VALL:
6119 return fold_builtin_inf (type, false);
6121 case BUILT_IN_NAN:
6122 case BUILT_IN_NANF:
6123 case BUILT_IN_NANL:
6124 return fold_builtin_nan (arglist, type, true);
6126 case BUILT_IN_NANS:
6127 case BUILT_IN_NANSF:
6128 case BUILT_IN_NANSL:
6129 return fold_builtin_nan (arglist, type, false);
6131 case BUILT_IN_FLOOR:
6132 case BUILT_IN_FLOORF:
6133 case BUILT_IN_FLOORL:
6134 return fold_builtin_floor (exp);
6136 case BUILT_IN_CEIL:
6137 case BUILT_IN_CEILF:
6138 case BUILT_IN_CEILL:
6139 return fold_builtin_ceil (exp);
6141 case BUILT_IN_TRUNC:
6142 case BUILT_IN_TRUNCF:
6143 case BUILT_IN_TRUNCL:
6144 return fold_builtin_trunc (exp);
6146 case BUILT_IN_ROUND:
6147 case BUILT_IN_ROUNDF:
6148 case BUILT_IN_ROUNDL:
6149 case BUILT_IN_NEARBYINT:
6150 case BUILT_IN_NEARBYINTF:
6151 case BUILT_IN_NEARBYINTL:
6152 return fold_trunc_transparent_mathfn (exp);
6154 default:
6155 break;
6158 return 0;
6161 /* Conveniently construct a function call expression. */
6163 tree
6164 build_function_call_expr (tree fn, tree arglist)
6166 tree call_expr;
6168 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
6169 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
6170 call_expr, arglist);
6171 TREE_SIDE_EFFECTS (call_expr) = 1;
6172 return fold (call_expr);
6175 /* This function validates the types of a function call argument list
6176 represented as a tree chain of parameters against a specified list
6177 of tree_codes. If the last specifier is a 0, that represents an
6178 ellipses, otherwise the last specifier must be a VOID_TYPE. */
6180 static int
6181 validate_arglist (tree arglist, ...)
6183 enum tree_code code;
6184 int res = 0;
6185 va_list ap;
6187 va_start (ap, arglist);
6191 code = va_arg (ap, enum tree_code);
6192 switch (code)
6194 case 0:
6195 /* This signifies an ellipses, any further arguments are all ok. */
6196 res = 1;
6197 goto end;
6198 case VOID_TYPE:
6199 /* This signifies an endlink, if no arguments remain, return
6200 true, otherwise return false. */
6201 res = arglist == 0;
6202 goto end;
6203 default:
6204 /* If no parameters remain or the parameter's code does not
6205 match the specified code, return false. Otherwise continue
6206 checking any remaining arguments. */
6207 if (arglist == 0
6208 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
6209 goto end;
6210 break;
6212 arglist = TREE_CHAIN (arglist);
6214 while (1);
6216 /* We need gotos here since we can only have one VA_CLOSE in a
6217 function. */
6218 end: ;
6219 va_end (ap);
6221 return res;
6224 /* Default version of target-specific builtin setup that does nothing. */
6226 void
6227 default_init_builtins (void)
6231 /* Default target-specific builtin expander that does nothing. */
6234 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
6235 rtx target ATTRIBUTE_UNUSED,
6236 rtx subtarget ATTRIBUTE_UNUSED,
6237 enum machine_mode mode ATTRIBUTE_UNUSED,
6238 int ignore ATTRIBUTE_UNUSED)
6240 return NULL_RTX;
6243 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
6245 void
6246 purge_builtin_constant_p (void)
6248 rtx insn, set, arg, new, note;
6250 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6251 if (INSN_P (insn)
6252 && (set = single_set (insn)) != NULL_RTX
6253 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
6254 || (GET_CODE (arg) == SUBREG
6255 && (GET_CODE (arg = SUBREG_REG (arg))
6256 == CONSTANT_P_RTX))))
6258 arg = XEXP (arg, 0);
6259 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
6260 validate_change (insn, &SET_SRC (set), new, 0);
6262 /* Remove the REG_EQUAL note from the insn. */
6263 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
6264 remove_note (insn, note);
6268 /* Returns true is EXP represents data that would potentially reside
6269 in a readonly section. */
6271 static bool
6272 readonly_data_expr (tree exp)
6274 STRIP_NOPS (exp);
6276 if (TREE_CODE (exp) == ADDR_EXPR)
6277 return decl_readonly_section (TREE_OPERAND (exp, 0), 0);
6278 else
6279 return false;