1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "diagnostic-core.h"
45 #include "target-def.h"
47 #include "langhooks.h"
52 #include "stor-layout.h"
56 /* Forward function declarations. */
57 static bool prologue_saved_reg_p (unsigned);
58 static void nios2_load_pic_register (void);
59 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code
, int);
60 static const char *nios2_unspec_reloc_name (int);
61 static void nios2_register_builtin_fndecl (unsigned, tree
);
63 /* Threshold for data being put into the small data/bss area, instead
64 of the normal data area (references to the small data/bss area take
65 1 instruction, and use the global pointer, references to the normal
66 data area takes 2 instructions). */
67 unsigned HOST_WIDE_INT nios2_section_threshold
= NIOS2_DEFAULT_GVALUE
;
69 struct GTY (()) machine_function
71 /* Current frame information, to be filled in by nios2_compute_frame_layout
72 with register save masks, and offsets for the current function. */
74 /* Mask of registers to save. */
75 unsigned int save_mask
;
76 /* Number of bytes that the entire frame takes up. */
78 /* Number of bytes that variables take up. */
80 /* Number of bytes that outgoing arguments take up. */
82 /* Number of bytes needed to store registers in frame. */
84 /* Offset from new stack pointer to store registers. */
86 /* Offset from save_regs_offset to store frame pointer register. */
88 /* != 0 if frame layout already calculated. */
92 /* State to track the assignment of custom codes to FPU/custom builtins. */
93 static enum nios2_ccs_code custom_code_status
[256];
94 static int custom_code_index
[256];
95 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
96 static bool custom_code_conflict
= false;
99 /* Definition of builtin function types for nios2. */
103 N2_FTYPE(1, (VOID)) \
104 N2_FTYPE(2, (DF, DF)) \
105 N2_FTYPE(3, (DF, DF, DF)) \
106 N2_FTYPE(2, (DF, SF)) \
107 N2_FTYPE(2, (DF, SI)) \
108 N2_FTYPE(2, (DF, UI)) \
109 N2_FTYPE(2, (SF, DF)) \
110 N2_FTYPE(2, (SF, SF)) \
111 N2_FTYPE(3, (SF, SF, SF)) \
112 N2_FTYPE(2, (SF, SI)) \
113 N2_FTYPE(2, (SF, UI)) \
114 N2_FTYPE(2, (SI, CVPTR)) \
115 N2_FTYPE(2, (SI, DF)) \
116 N2_FTYPE(3, (SI, DF, DF)) \
117 N2_FTYPE(2, (SI, SF)) \
118 N2_FTYPE(3, (SI, SF, SF)) \
119 N2_FTYPE(2, (SI, SI)) \
120 N2_FTYPE(2, (UI, CVPTR)) \
121 N2_FTYPE(2, (UI, DF)) \
122 N2_FTYPE(2, (UI, SF)) \
123 N2_FTYPE(2, (VOID, DF)) \
124 N2_FTYPE(2, (VOID, SF)) \
125 N2_FTYPE(3, (VOID, SI, SI)) \
126 N2_FTYPE(3, (VOID, VPTR, SI))
128 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
129 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
130 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
132 /* Expand ftcode enumeration. */
134 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
140 /* Return the tree function type, based on the ftcode. */
142 nios2_ftype (enum nios2_ftcode ftcode
)
144 static tree types
[(int) N2_FTYPE_MAX
];
146 tree N2_TYPE_SF
= float_type_node
;
147 tree N2_TYPE_DF
= double_type_node
;
148 tree N2_TYPE_SI
= integer_type_node
;
149 tree N2_TYPE_UI
= unsigned_type_node
;
150 tree N2_TYPE_VOID
= void_type_node
;
152 static const_tree N2_TYPE_CVPTR
, N2_TYPE_VPTR
;
155 /* const volatile void *. */
157 = build_pointer_type (build_qualified_type (void_type_node
,
159 | TYPE_QUAL_VOLATILE
)));
160 /* volatile void *. */
162 = build_pointer_type (build_qualified_type (void_type_node
,
163 TYPE_QUAL_VOLATILE
));
165 if (types
[(int) ftcode
] == NULL_TREE
)
168 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
169 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
170 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
171 #define N2_FTYPE(N,ARGS) \
172 case N2_FTYPE_OP ## N ARGS: \
173 types[(int) ftcode] \
174 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
178 default: gcc_unreachable ();
180 return types
[(int) ftcode
];
184 /* Definition of FPU instruction descriptions. */
186 struct nios2_fpu_insn_info
189 int num_operands
, *optvar
;
192 #define N2F_DFREQ 0x2
193 #define N2F_UNSAFE 0x4
194 #define N2F_FINITE 0x8
196 enum insn_code icode
;
197 enum nios2_ftcode ftcode
;
200 /* Base macro for defining FPU instructions. */
201 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
202 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
203 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
204 N2_FTYPE_OP ## nop args }
206 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
207 #define N2FPU_OP2(mode) (mode, mode)
208 #define N2FPU_OP3(mode) (mode, mode, mode)
209 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
210 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
211 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
212 #define N2FPU_INSN_SF(code, nop, flags) \
213 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
214 #define N2FPU_INSN_DF(code, nop, flags) \
215 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
217 /* Compare instructions, 3 operand FP operation with a SI result. */
218 #define N2FPU_CMP_DEF(code, flags, m, M) \
219 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
220 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
221 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
222 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
224 /* The order of definition needs to be maintained consistent with
225 enum n2fpu_code in nios2-opts.h. */
226 struct nios2_fpu_insn_info nios2_fpu_insn
[] =
228 /* Single precision instructions. */
229 N2FPU_INSN_SF (add
, 3, 0),
230 N2FPU_INSN_SF (sub
, 3, 0),
231 N2FPU_INSN_SF (mul
, 3, 0),
232 N2FPU_INSN_SF (div
, 3, 0),
233 /* Due to textual difference between min/max and smin/smax. */
234 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, s
, S
),
235 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, s
, S
),
236 N2FPU_INSN_SF (neg
, 2, 0),
237 N2FPU_INSN_SF (abs
, 2, 0),
238 N2FPU_INSN_SF (sqrt
, 2, 0),
239 N2FPU_INSN_SF (sin
, 2, N2F_UNSAFE
),
240 N2FPU_INSN_SF (cos
, 2, N2F_UNSAFE
),
241 N2FPU_INSN_SF (tan
, 2, N2F_UNSAFE
),
242 N2FPU_INSN_SF (atan
, 2, N2F_UNSAFE
),
243 N2FPU_INSN_SF (exp
, 2, N2F_UNSAFE
),
244 N2FPU_INSN_SF (log
, 2, N2F_UNSAFE
),
245 /* Single precision compares. */
246 N2FPU_CMP_SF (eq
), N2FPU_CMP_SF (ne
),
247 N2FPU_CMP_SF (lt
), N2FPU_CMP_SF (le
),
248 N2FPU_CMP_SF (gt
), N2FPU_CMP_SF (ge
),
250 /* Double precision instructions. */
251 N2FPU_INSN_DF (add
, 3, 0),
252 N2FPU_INSN_DF (sub
, 3, 0),
253 N2FPU_INSN_DF (mul
, 3, 0),
254 N2FPU_INSN_DF (div
, 3, 0),
255 /* Due to textual difference between min/max and smin/smax. */
256 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, d
, D
),
257 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, d
, D
),
258 N2FPU_INSN_DF (neg
, 2, 0),
259 N2FPU_INSN_DF (abs
, 2, 0),
260 N2FPU_INSN_DF (sqrt
, 2, 0),
261 N2FPU_INSN_DF (sin
, 2, N2F_UNSAFE
),
262 N2FPU_INSN_DF (cos
, 2, N2F_UNSAFE
),
263 N2FPU_INSN_DF (tan
, 2, N2F_UNSAFE
),
264 N2FPU_INSN_DF (atan
, 2, N2F_UNSAFE
),
265 N2FPU_INSN_DF (exp
, 2, N2F_UNSAFE
),
266 N2FPU_INSN_DF (log
, 2, N2F_UNSAFE
),
267 /* Double precision compares. */
268 N2FPU_CMP_DF (eq
), N2FPU_CMP_DF (ne
),
269 N2FPU_CMP_DF (lt
), N2FPU_CMP_DF (le
),
270 N2FPU_CMP_DF (gt
), N2FPU_CMP_DF (ge
),
272 /* Conversion instructions. */
273 N2FPU_INSN_DEF_BASE (floatis
, 2, 0, floatsisf2
, (SF
, SI
)),
274 N2FPU_INSN_DEF_BASE (floatus
, 2, 0, floatunssisf2
, (SF
, UI
)),
275 N2FPU_INSN_DEF_BASE (floatid
, 2, 0, floatsidf2
, (DF
, SI
)),
276 N2FPU_INSN_DEF_BASE (floatud
, 2, 0, floatunssidf2
, (DF
, UI
)),
277 N2FPU_INSN_DEF_BASE (fixsi
, 2, 0, fix_truncsfsi2
, (SI
, SF
)),
278 N2FPU_INSN_DEF_BASE (fixsu
, 2, 0, fixuns_truncsfsi2
, (UI
, SF
)),
279 N2FPU_INSN_DEF_BASE (fixdi
, 2, 0, fix_truncdfsi2
, (SI
, DF
)),
280 N2FPU_INSN_DEF_BASE (fixdu
, 2, 0, fixuns_truncdfsi2
, (UI
, DF
)),
281 N2FPU_INSN_DEF_BASE (fextsd
, 2, 0, extendsfdf2
, (DF
, SF
)),
282 N2FPU_INSN_DEF_BASE (ftruncds
, 2, 0, truncdfsf2
, (SF
, DF
)),
284 /* X, Y access instructions. */
285 N2FPU_INSN_DEF_BASE (fwrx
, 2, N2F_DFREQ
, nios2_fwrx
, (VOID
, DF
)),
286 N2FPU_INSN_DEF_BASE (fwry
, 2, N2F_DFREQ
, nios2_fwry
, (VOID
, SF
)),
287 N2FPU_INSN_DEF_BASE (frdxlo
, 1, N2F_DFREQ
, nios2_frdxlo
, (SF
)),
288 N2FPU_INSN_DEF_BASE (frdxhi
, 1, N2F_DFREQ
, nios2_frdxhi
, (SF
)),
289 N2FPU_INSN_DEF_BASE (frdy
, 1, N2F_DFREQ
, nios2_frdy
, (SF
))
292 /* Some macros for ease of access. */
293 #define N2FPU(code) nios2_fpu_insn[(int) code]
294 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
295 #define N2FPU_N(code) (*N2FPU(code).optvar)
296 #define N2FPU_NAME(code) (N2FPU(code).name)
297 #define N2FPU_ICODE(code) (N2FPU(code).icode)
298 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
299 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
300 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
301 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
302 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
304 /* Same as above, but for cases where using only the op part is shorter. */
305 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
306 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
307 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
309 /* Export the FPU insn enabled predicate to nios2.md. */
311 nios2_fpu_insn_enabled (enum n2fpu_code code
)
313 return N2FPU_ENABLED_P (code
);
316 /* Return true if COND comparison for mode MODE is enabled under current
320 nios2_fpu_compare_enabled (enum rtx_code cond
, enum machine_mode mode
)
325 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqs
);
326 case NE
: return N2FPU_OP_ENABLED_P (fcmpnes
);
327 case GT
: return N2FPU_OP_ENABLED_P (fcmpgts
);
328 case GE
: return N2FPU_OP_ENABLED_P (fcmpges
);
329 case LT
: return N2FPU_OP_ENABLED_P (fcmplts
);
330 case LE
: return N2FPU_OP_ENABLED_P (fcmples
);
333 else if (mode
== DFmode
)
336 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqd
);
337 case NE
: return N2FPU_OP_ENABLED_P (fcmpned
);
338 case GT
: return N2FPU_OP_ENABLED_P (fcmpgtd
);
339 case GE
: return N2FPU_OP_ENABLED_P (fcmpged
);
340 case LT
: return N2FPU_OP_ENABLED_P (fcmpltd
);
341 case LE
: return N2FPU_OP_ENABLED_P (fcmpled
);
347 /* Stack layout and calling conventions. */
349 #define NIOS2_STACK_ALIGN(LOC) \
350 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
351 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
353 /* Return the bytes needed to compute the frame pointer from the current
356 nios2_compute_frame_layout (void)
359 unsigned int save_mask
= 0;
365 if (cfun
->machine
->initialized
)
366 return cfun
->machine
->total_size
;
368 var_size
= NIOS2_STACK_ALIGN (get_frame_size ());
369 out_args_size
= NIOS2_STACK_ALIGN (crtl
->outgoing_args_size
);
370 total_size
= var_size
+ out_args_size
;
372 /* Calculate space needed for gp registers. */
374 for (regno
= 0; regno
<= LAST_GP_REG
; regno
++)
375 if (prologue_saved_reg_p (regno
))
377 save_mask
|= 1 << regno
;
381 /* If we call eh_return, we need to save the EH data registers. */
382 if (crtl
->calls_eh_return
)
387 for (i
= 0; (r
= EH_RETURN_DATA_REGNO (i
)) != INVALID_REGNUM
; i
++)
388 if (!(save_mask
& (1 << r
)))
395 cfun
->machine
->fp_save_offset
= 0;
396 if (save_mask
& (1 << HARD_FRAME_POINTER_REGNUM
))
398 int fp_save_offset
= 0;
399 for (regno
= 0; regno
< HARD_FRAME_POINTER_REGNUM
; regno
++)
400 if (save_mask
& (1 << regno
))
403 cfun
->machine
->fp_save_offset
= fp_save_offset
;
406 save_reg_size
= NIOS2_STACK_ALIGN (save_reg_size
);
407 total_size
+= save_reg_size
;
408 total_size
+= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
410 /* Save other computed information. */
411 cfun
->machine
->save_mask
= save_mask
;
412 cfun
->machine
->total_size
= total_size
;
413 cfun
->machine
->var_size
= var_size
;
414 cfun
->machine
->args_size
= out_args_size
;
415 cfun
->machine
->save_reg_size
= save_reg_size
;
416 cfun
->machine
->initialized
= reload_completed
;
417 cfun
->machine
->save_regs_offset
= out_args_size
+ var_size
;
422 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
423 prologue/epilogue expand routines. */
425 save_reg (int regno
, unsigned offset
)
427 rtx reg
= gen_rtx_REG (SImode
, regno
);
428 rtx addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
429 gen_int_mode (offset
, Pmode
));
430 rtx insn
= emit_move_insn (gen_frame_mem (Pmode
, addr
), reg
);
431 RTX_FRAME_RELATED_P (insn
) = 1;
435 restore_reg (int regno
, unsigned offset
)
437 rtx reg
= gen_rtx_REG (SImode
, regno
);
438 rtx addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
439 gen_int_mode (offset
, Pmode
));
440 rtx insn
= emit_move_insn (reg
, gen_frame_mem (Pmode
, addr
));
441 /* Tag epilogue unwind note. */
442 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
443 RTX_FRAME_RELATED_P (insn
) = 1;
446 /* Emit conditional trap for checking stack limit. */
448 nios2_emit_stack_limit_check (void)
450 if (REG_P (stack_limit_rtx
))
451 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode
, stack_pointer_rtx
,
453 stack_pointer_rtx
, stack_limit_rtx
, GEN_INT (3)));
455 sorry ("only register based stack limit is supported");
458 /* Temp regno used inside prologue/epilogue. */
459 #define TEMP_REG_NUM 8
462 nios2_expand_prologue (void)
465 int total_frame_size
, save_offset
;
466 int sp_offset
; /* offset from base_reg to final stack value. */
467 int save_regs_base
; /* offset from base_reg to register save area. */
470 total_frame_size
= nios2_compute_frame_layout ();
472 if (flag_stack_usage_info
)
473 current_function_static_stack_size
= total_frame_size
;
475 /* Decrement the stack pointer. */
476 if (!SMALL_INT (total_frame_size
))
478 /* We need an intermediary point, this will point at the spill block. */
480 (gen_add2_insn (stack_pointer_rtx
,
481 gen_int_mode (cfun
->machine
->save_regs_offset
482 - total_frame_size
, Pmode
)));
483 RTX_FRAME_RELATED_P (insn
) = 1;
485 sp_offset
= -cfun
->machine
->save_regs_offset
;
487 else if (total_frame_size
)
489 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
490 gen_int_mode (-total_frame_size
,
492 RTX_FRAME_RELATED_P (insn
) = 1;
493 save_regs_base
= cfun
->machine
->save_regs_offset
;
497 save_regs_base
= sp_offset
= 0;
499 if (crtl
->limit_stack
)
500 nios2_emit_stack_limit_check ();
502 save_offset
= save_regs_base
+ cfun
->machine
->save_reg_size
;
504 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
505 if (cfun
->machine
->save_mask
& (1 << regno
))
508 save_reg (regno
, save_offset
);
511 if (frame_pointer_needed
)
513 int fp_save_offset
= save_regs_base
+ cfun
->machine
->fp_save_offset
;
514 insn
= emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
516 gen_int_mode (fp_save_offset
, Pmode
)));
517 RTX_FRAME_RELATED_P (insn
) = 1;
523 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
524 plus_constant (Pmode
, stack_pointer_rtx
, sp_offset
));
525 if (SMALL_INT (sp_offset
))
526 insn
= emit_insn (sp_adjust
);
529 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
530 emit_move_insn (tmp
, gen_int_mode (sp_offset
, Pmode
));
531 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
532 /* Attach the sp_adjust as a note indicating what happened. */
533 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, sp_adjust
);
535 RTX_FRAME_RELATED_P (insn
) = 1;
537 if (crtl
->limit_stack
)
538 nios2_emit_stack_limit_check ();
541 /* Load the PIC register if needed. */
542 if (crtl
->uses_pic_offset_table
)
543 nios2_load_pic_register ();
545 /* If we are profiling, make sure no instructions are scheduled before
546 the call to mcount. */
548 emit_insn (gen_blockage ());
552 nios2_expand_epilogue (bool sibcall_p
)
555 int total_frame_size
;
556 int sp_adjust
, save_offset
;
559 if (!sibcall_p
&& nios2_can_use_return_insn ())
561 emit_jump_insn (gen_return ());
565 emit_insn (gen_blockage ());
567 total_frame_size
= nios2_compute_frame_layout ();
568 if (frame_pointer_needed
)
570 /* Recover the stack pointer. */
571 insn
= emit_insn (gen_add3_insn
572 (stack_pointer_rtx
, hard_frame_pointer_rtx
,
573 gen_int_mode (-cfun
->machine
->fp_save_offset
, Pmode
)));
574 cfa_adj
= plus_constant (Pmode
, stack_pointer_rtx
,
576 - cfun
->machine
->save_regs_offset
));
577 add_reg_note (insn
, REG_CFA_DEF_CFA
, cfa_adj
);
578 RTX_FRAME_RELATED_P (insn
) = 1;
581 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
583 else if (!SMALL_INT (total_frame_size
))
585 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
586 emit_move_insn (tmp
, gen_int_mode (cfun
->machine
->save_regs_offset
,
588 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
589 cfa_adj
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
590 plus_constant (Pmode
, stack_pointer_rtx
,
591 cfun
->machine
->save_regs_offset
));
592 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa_adj
);
593 RTX_FRAME_RELATED_P (insn
) = 1;
595 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
599 save_offset
= cfun
->machine
->save_regs_offset
;
600 sp_adjust
= total_frame_size
;
603 save_offset
+= cfun
->machine
->save_reg_size
;
605 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
606 if (cfun
->machine
->save_mask
& (1 << regno
))
609 restore_reg (regno
, save_offset
);
614 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
615 gen_int_mode (sp_adjust
, Pmode
)));
616 cfa_adj
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
617 plus_constant (Pmode
, stack_pointer_rtx
,
619 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa_adj
);
620 RTX_FRAME_RELATED_P (insn
) = 1;
623 /* Add in the __builtin_eh_return stack adjustment. */
624 if (crtl
->calls_eh_return
)
625 emit_insn (gen_add2_insn (stack_pointer_rtx
, EH_RETURN_STACKADJ_RTX
));
628 emit_jump_insn (gen_simple_return ());
631 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
632 back to a previous frame. */
634 nios2_get_return_address (int count
)
639 return get_hard_reg_initial_val (Pmode
, RA_REGNO
);
642 /* Emit code to change the current function's return address to
643 ADDRESS. SCRATCH is available as a scratch register, if needed.
644 ADDRESS and SCRATCH are both word-mode GPRs. */
646 nios2_set_return_address (rtx address
, rtx scratch
)
648 nios2_compute_frame_layout ();
649 if (cfun
->machine
->save_mask
& (1 << RA_REGNO
))
651 unsigned offset
= cfun
->machine
->save_reg_size
- 4;
654 if (frame_pointer_needed
)
655 base
= hard_frame_pointer_rtx
;
658 base
= stack_pointer_rtx
;
659 offset
+= cfun
->machine
->save_regs_offset
;
661 if (!SMALL_INT (offset
))
663 emit_move_insn (scratch
, gen_int_mode (offset
, Pmode
));
664 emit_insn (gen_add2_insn (scratch
, base
));
670 base
= plus_constant (Pmode
, base
, offset
);
671 emit_move_insn (gen_rtx_MEM (Pmode
, base
), address
);
674 emit_move_insn (gen_rtx_REG (Pmode
, RA_REGNO
), address
);
677 /* Implement FUNCTION_PROFILER macro. */
679 nios2_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
681 fprintf (file
, "\tmov\tr8, ra\n");
684 fprintf (file
, "\tnextpc\tr2\n");
685 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
686 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
687 fprintf (file
, "\tadd\tr2, r2, r3\n");
688 fprintf (file
, "\tldw\tr2, %%call(_mcount)(r2)\n");
689 fprintf (file
, "\tcallr\tr2\n");
691 else if (flag_pic
== 2)
693 fprintf (file
, "\tnextpc\tr2\n");
694 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
695 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
696 fprintf (file
, "\tadd\tr2, r2, r3\n");
697 fprintf (file
, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
698 fprintf (file
, "\taddi\tr3, %%call_lo(_mcount)\n");
699 fprintf (file
, "\tadd\tr3, r2, r3\n");
700 fprintf (file
, "\tldw\tr2, 0(r3)\n");
701 fprintf (file
, "\tcallr\tr2\n");
704 fprintf (file
, "\tcall\t_mcount\n");
705 fprintf (file
, "\tmov\tra, r8\n");
708 /* Dump stack layout. */
710 nios2_dump_frame_layout (FILE *file
)
712 fprintf (file
, "\t%s Current Frame Info\n", ASM_COMMENT_START
);
713 fprintf (file
, "\t%s total_size = %d\n", ASM_COMMENT_START
,
714 cfun
->machine
->total_size
);
715 fprintf (file
, "\t%s var_size = %d\n", ASM_COMMENT_START
,
716 cfun
->machine
->var_size
);
717 fprintf (file
, "\t%s args_size = %d\n", ASM_COMMENT_START
,
718 cfun
->machine
->args_size
);
719 fprintf (file
, "\t%s save_reg_size = %d\n", ASM_COMMENT_START
,
720 cfun
->machine
->save_reg_size
);
721 fprintf (file
, "\t%s initialized = %d\n", ASM_COMMENT_START
,
722 cfun
->machine
->initialized
);
723 fprintf (file
, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START
,
724 cfun
->machine
->save_regs_offset
);
725 fprintf (file
, "\t%s is_leaf = %d\n", ASM_COMMENT_START
,
727 fprintf (file
, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START
,
728 frame_pointer_needed
);
729 fprintf (file
, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START
,
730 crtl
->args
.pretend_args_size
);
733 /* Return true if REGNO should be saved in the prologue. */
735 prologue_saved_reg_p (unsigned regno
)
737 gcc_assert (GP_REG_P (regno
));
739 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
742 if (regno
== HARD_FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
745 if (regno
== PIC_OFFSET_TABLE_REGNUM
&& crtl
->uses_pic_offset_table
)
748 if (regno
== RA_REGNO
&& df_regs_ever_live_p (RA_REGNO
))
754 /* Implement TARGET_CAN_ELIMINATE. */
756 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
758 if (to
== STACK_POINTER_REGNUM
)
759 return !frame_pointer_needed
;
763 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
765 nios2_initial_elimination_offset (int from
, int to
)
769 nios2_compute_frame_layout ();
771 /* Set OFFSET to the offset from the stack pointer. */
774 case FRAME_POINTER_REGNUM
:
775 offset
= cfun
->machine
->args_size
;
778 case ARG_POINTER_REGNUM
:
779 offset
= cfun
->machine
->total_size
;
780 offset
-= crtl
->args
.pretend_args_size
;
787 /* If we are asked for the frame pointer offset, then adjust OFFSET
788 by the offset from the frame pointer to the stack pointer. */
789 if (to
== HARD_FRAME_POINTER_REGNUM
)
790 offset
-= (cfun
->machine
->save_regs_offset
791 + cfun
->machine
->fp_save_offset
);
796 /* Return nonzero if this function is known to have a null epilogue.
797 This allows the optimizer to omit jumps to jumps if no stack
800 nios2_can_use_return_insn (void)
802 if (!reload_completed
|| crtl
->profile
)
805 return nios2_compute_frame_layout () == 0;
809 /* Check and signal some warnings/errors on FPU insn options. */
811 nios2_custom_check_insns (void)
816 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
817 if (N2FPU_ENABLED_P (i
) && N2FPU_DOUBLE_P (i
))
819 for (j
= 0; j
< ARRAY_SIZE (nios2_fpu_insn
); j
++)
820 if (N2FPU_DOUBLE_REQUIRED_P (j
) && ! N2FPU_ENABLED_P (j
))
822 error ("switch %<-mcustom-%s%> is required for double "
823 "precision floating point", N2FPU_NAME (j
));
829 /* Warn if the user has certain exotic operations that won't get used
830 without -funsafe-math-optimizations. See expand_builtin () in
832 if (!flag_unsafe_math_optimizations
)
833 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
834 if (N2FPU_ENABLED_P (i
) && N2FPU_UNSAFE_P (i
))
835 warning (0, "switch %<-mcustom-%s%> has no effect unless "
836 "-funsafe-math-optimizations is specified", N2FPU_NAME (i
));
838 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
839 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
841 if (!flag_finite_math_only
)
842 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
843 if (N2FPU_ENABLED_P (i
) && N2FPU_FINITE_P (i
))
844 warning (0, "switch %<-mcustom-%s%> has no effect unless "
845 "-ffinite-math-only is specified", N2FPU_NAME (i
));
847 if (errors
|| custom_code_conflict
)
848 fatal_error ("conflicting use of -mcustom switches, target attributes, "
849 "and/or __builtin_custom_ functions");
853 nios2_set_fpu_custom_code (enum n2fpu_code code
, int n
, bool override_p
)
855 if (override_p
|| N2FPU_N (code
) == -1)
857 nios2_register_custom_code (n
, CCS_FPU
, (int) code
);
860 /* Type to represent a standard FPU config. */
861 struct nios2_fpu_config
864 bool set_sp_constants
;
865 int code
[n2fpu_code_num
];
868 #define NIOS2_FPU_CONFIG_NUM 3
869 static struct nios2_fpu_config custom_fpu_config
[NIOS2_FPU_CONFIG_NUM
];
872 nios2_init_fpu_configs (void)
874 struct nios2_fpu_config
* cfg
;
876 #define NEXT_FPU_CONFIG \
878 cfg = &custom_fpu_config[i++]; \
879 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
884 cfg
->set_sp_constants
= true;
885 cfg
->code
[n2fpu_fmuls
] = 252;
886 cfg
->code
[n2fpu_fadds
] = 253;
887 cfg
->code
[n2fpu_fsubs
] = 254;
891 cfg
->set_sp_constants
= true;
892 cfg
->code
[n2fpu_fmuls
] = 252;
893 cfg
->code
[n2fpu_fadds
] = 253;
894 cfg
->code
[n2fpu_fsubs
] = 254;
895 cfg
->code
[n2fpu_fdivs
] = 255;
899 cfg
->set_sp_constants
= true;
900 cfg
->code
[n2fpu_floatus
] = 243;
901 cfg
->code
[n2fpu_fixsi
] = 244;
902 cfg
->code
[n2fpu_floatis
] = 245;
903 cfg
->code
[n2fpu_fcmpgts
] = 246;
904 cfg
->code
[n2fpu_fcmples
] = 249;
905 cfg
->code
[n2fpu_fcmpeqs
] = 250;
906 cfg
->code
[n2fpu_fcmpnes
] = 251;
907 cfg
->code
[n2fpu_fmuls
] = 252;
908 cfg
->code
[n2fpu_fadds
] = 253;
909 cfg
->code
[n2fpu_fsubs
] = 254;
910 cfg
->code
[n2fpu_fdivs
] = 255;
912 #undef NEXT_FPU_CONFIG
913 gcc_assert (i
== NIOS2_FPU_CONFIG_NUM
);
916 static struct nios2_fpu_config
*
917 nios2_match_custom_fpu_cfg (const char *cfgname
, const char *endp
)
920 for (i
= 0; i
< NIOS2_FPU_CONFIG_NUM
; i
++)
922 bool match
= !(endp
!= NULL
923 ? strncmp (custom_fpu_config
[i
].name
, cfgname
,
925 : strcmp (custom_fpu_config
[i
].name
, cfgname
));
927 return &custom_fpu_config
[i
];
932 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
933 OVERRIDE is true if loaded config codes should overwrite current state. */
935 nios2_handle_custom_fpu_cfg (const char *cfgname
, const char *endp
,
938 struct nios2_fpu_config
*cfg
= nios2_match_custom_fpu_cfg (cfgname
, endp
);
942 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
943 if (cfg
->code
[i
] >= 0)
944 nios2_set_fpu_custom_code ((enum n2fpu_code
) i
, cfg
->code
[i
],
946 if (cfg
->set_sp_constants
)
947 flag_single_precision_constant
= 1;
950 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
951 "value %<%s%>", cfgname
);
953 /* Guard against errors in the standard configurations. */
954 nios2_custom_check_insns ();
957 /* Check individual FPU insn options, and register custom code. */
959 nios2_handle_custom_fpu_insn_option (int fpu_insn_index
)
961 int param
= N2FPU_N (fpu_insn_index
);
963 if (0 <= param
&& param
<= 255)
964 nios2_register_custom_code (param
, CCS_FPU
, fpu_insn_index
);
966 /* Valid values are 0-255, but also allow -1 so that the
967 -mno-custom-<opt> switches work. */
968 else if (param
!= -1)
969 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
970 N2FPU_NAME (fpu_insn_index
), param
);
973 /* Allocate a chunk of memory for per-function machine-dependent data. */
974 static struct machine_function
*
975 nios2_init_machine_status (void)
977 return ggc_alloc_cleared_machine_function ();
980 /* Implement TARGET_OPTION_OVERRIDE. */
982 nios2_option_override (void)
986 #ifdef SUBTARGET_OVERRIDE_OPTIONS
987 SUBTARGET_OVERRIDE_OPTIONS
;
990 /* Check for unsupported options. */
991 if (flag_pic
&& !TARGET_LINUX_ABI
)
992 sorry ("position-independent code requires the Linux ABI");
994 /* Function to allocate machine-dependent function status. */
995 init_machine_status
= &nios2_init_machine_status
;
997 nios2_section_threshold
998 = (global_options_set
.x_g_switch_value
999 ? g_switch_value
: NIOS2_DEFAULT_GVALUE
);
1001 /* Default to -mgpopt unless -fpic or -fPIC. */
1002 if (TARGET_GPOPT
== -1 && flag_pic
)
1005 /* If we don't have mul, we don't have mulx either! */
1006 if (!TARGET_HAS_MUL
&& TARGET_HAS_MULX
)
1007 target_flags
&= ~MASK_HAS_MULX
;
1009 /* Initialize default FPU configurations. */
1010 nios2_init_fpu_configs ();
1012 /* Set up default handling for floating point custom instructions.
1014 Putting things in this order means that the -mcustom-fpu-cfg=
1015 switch will always be overridden by individual -mcustom-fadds=
1016 switches, regardless of the order in which they were specified
1017 on the command line.
1019 This behavior of prioritization of individual -mcustom-<insn>=
1020 options before the -mcustom-fpu-cfg= switch is maintained for
1022 if (nios2_custom_fpu_cfg_string
&& *nios2_custom_fpu_cfg_string
)
1023 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string
, NULL
, false);
1025 /* Handle options for individual FPU insns. */
1026 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1027 nios2_handle_custom_fpu_insn_option (i
);
1029 nios2_custom_check_insns ();
1031 /* Save the initial options in case the user does function specific
1033 target_option_default_node
= target_option_current_node
1034 = build_target_option_node (&global_options
);
1038 /* Return true if CST is a constant within range of movi/movui/movhi. */
1040 nios2_simple_const_p (const_rtx cst
)
1042 HOST_WIDE_INT val
= INTVAL (cst
);
1043 return SMALL_INT (val
) || SMALL_INT_UNSIGNED (val
) || UPPER16_INT (val
);
1046 /* Compute a (partial) cost for rtx X. Return true if the complete
1047 cost has been computed, and false if subexpressions should be
1048 scanned. In either case, *TOTAL contains the cost result. */
1050 nios2_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
1051 int opno ATTRIBUTE_UNUSED
,
1052 int *total
, bool speed ATTRIBUTE_UNUSED
)
1057 if (INTVAL (x
) == 0)
1059 *total
= COSTS_N_INSNS (0);
1062 else if (nios2_simple_const_p (x
))
1064 *total
= COSTS_N_INSNS (2);
1069 *total
= COSTS_N_INSNS (4);
1078 *total
= COSTS_N_INSNS (4);
1084 /* Recognize 'nor' insn pattern. */
1085 if (GET_CODE (XEXP (x
, 0)) == NOT
1086 && GET_CODE (XEXP (x
, 1)) == NOT
)
1088 *total
= COSTS_N_INSNS (1);
1096 *total
= COSTS_N_INSNS (1);
1101 *total
= COSTS_N_INSNS (3);
1106 *total
= COSTS_N_INSNS (1);
1115 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1117 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, reg_class_t regclass
)
1119 return regclass
== NO_REGS
? GENERAL_REGS
: regclass
;
1122 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1123 RET is an RTX for the return value location. The entire insn sequence
1125 static GTY(()) rtx nios2_tls_symbol
;
1128 nios2_call_tls_get_addr (rtx ti
)
1130 rtx arg
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
1131 rtx ret
= gen_rtx_REG (Pmode
, FIRST_RETVAL_REGNO
);
1134 if (!nios2_tls_symbol
)
1135 nios2_tls_symbol
= init_one_libfunc ("__tls_get_addr");
1137 emit_move_insn (arg
, ti
);
1138 fn
= gen_rtx_MEM (QImode
, nios2_tls_symbol
);
1139 insn
= emit_call_insn (gen_call_value (ret
, fn
, const0_rtx
));
1140 RTL_CONST_CALL_P (insn
) = 1;
1141 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), ret
);
1142 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), arg
);
1147 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1149 nios2_large_offset_p (int unspec
)
1151 gcc_assert (nios2_unspec_reloc_name (unspec
) != NULL
);
1154 /* FIXME: TLS GOT offset relocations will eventually also get this
1155 treatment, after binutils support for those are also completed. */
1156 && (unspec
== UNSPEC_PIC_SYM
|| unspec
== UNSPEC_PIC_CALL_SYM
))
1159 /* 'gotoff' offsets are always hiadj/lo. */
1160 if (unspec
== UNSPEC_PIC_GOTOFF_SYM
)
1166 /* Return true for conforming unspec relocations. Also used in
1167 constraints.md and predicates.md. */
1169 nios2_unspec_reloc_p (rtx op
)
1171 return (GET_CODE (op
) == CONST
1172 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1173 && ! nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1176 /* Helper to generate unspec constant. */
1178 nios2_unspec_offset (rtx loc
, int unspec
)
1180 return gen_rtx_CONST (Pmode
, gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, loc
),
1184 /* Generate GOT pointer based address with large offset. */
1186 nios2_large_got_address (rtx sym
, rtx offset
)
1188 rtx addr
= gen_reg_rtx (Pmode
);
1189 emit_insn (gen_add3_insn (addr
, pic_offset_table_rtx
,
1190 force_reg (Pmode
, offset
)));
1194 /* Generate a GOT pointer based address. */
1196 nios2_got_address (rtx loc
, int unspec
)
1198 rtx offset
= nios2_unspec_offset (loc
, unspec
);
1199 crtl
->uses_pic_offset_table
= 1;
1201 if (nios2_large_offset_p (unspec
))
1202 return nios2_large_got_address (loc
, offset
);
1204 return gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
1207 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1208 return value will be a valid address and move_operand (either a REG
1211 nios2_legitimize_tls_address (rtx loc
)
1214 enum tls_model model
= SYMBOL_REF_TLS_MODEL (loc
);
1218 case TLS_MODEL_GLOBAL_DYNAMIC
:
1219 tmp
= gen_reg_rtx (Pmode
);
1220 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_GD
));
1221 return nios2_call_tls_get_addr (tmp
);
1223 case TLS_MODEL_LOCAL_DYNAMIC
:
1224 tmp
= gen_reg_rtx (Pmode
);
1225 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_LDM
));
1226 return gen_rtx_PLUS (Pmode
, nios2_call_tls_get_addr (tmp
),
1227 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LDO
));
1229 case TLS_MODEL_INITIAL_EXEC
:
1230 tmp
= gen_reg_rtx (Pmode
);
1231 mem
= gen_const_mem (Pmode
, nios2_got_address (loc
, UNSPEC_LOAD_TLS_IE
));
1232 emit_move_insn (tmp
, mem
);
1233 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1234 return gen_rtx_PLUS (Pmode
, tp
, tmp
);
1236 case TLS_MODEL_LOCAL_EXEC
:
1237 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1238 return gen_rtx_PLUS (Pmode
, tp
,
1239 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LE
));
1247 If -O3 is used, we want to output a table lookup for
1248 divides between small numbers (both num and den >= 0
1249 and < 0x10). The overhead of this method in the worst
1250 case is 40 bytes in the text section (10 insns) and
1251 256 bytes in the data section. Additional divides do
1252 not incur additional penalties in the data section.
1254 Code speed is improved for small divides by about 5x
1255 when using this method in the worse case (~9 cycles
1256 vs ~45). And in the worst case divides not within the
1257 table are penalized by about 10% (~5 cycles vs ~45).
1258 However in the typical case the penalty is not as bad
1259 because doing the long divide in only 45 cycles is
1262 ??? would be nice to have some benchmarks other
1263 than Dhrystone to back this up.
1265 This bit of expansion is to create this instruction
1272 add $12, $11, divide_table
1278 # continue here with result in $2
1280 ??? Ideally I would like the libcall block to contain all
1281 of this code, but I don't know how to do that. What it
1282 means is that if the divide can be eliminated, it may not
1283 completely disappear.
1285 ??? The __divsi3_table label should ideally be moved out
1286 of this block and into a global. If it is placed into the
1287 sdata section we can save even more cycles by doing things
1290 nios2_emit_expensive_div (rtx
*operands
, enum machine_mode mode
)
1292 rtx or_result
, shift_left_result
;
1301 /* It may look a little generic, but only SImode is supported for now. */
1302 gcc_assert (mode
== SImode
);
1303 libfunc
= optab_libfunc (sdiv_optab
, SImode
);
1305 lab1
= gen_label_rtx ();
1306 lab3
= gen_label_rtx ();
1308 or_result
= expand_simple_binop (SImode
, IOR
,
1309 operands
[1], operands
[2],
1310 0, 0, OPTAB_LIB_WIDEN
);
1312 emit_cmp_and_jump_insns (or_result
, GEN_INT (15), GTU
, 0,
1313 GET_MODE (or_result
), 0, lab3
);
1314 JUMP_LABEL (get_last_insn ()) = lab3
;
1316 shift_left_result
= expand_simple_binop (SImode
, ASHIFT
,
1317 operands
[1], GEN_INT (4),
1318 0, 0, OPTAB_LIB_WIDEN
);
1320 lookup_value
= expand_simple_binop (SImode
, IOR
,
1321 shift_left_result
, operands
[2],
1322 0, 0, OPTAB_LIB_WIDEN
);
1323 table
= gen_rtx_PLUS (SImode
, lookup_value
,
1324 gen_rtx_SYMBOL_REF (SImode
, "__divsi3_table"));
1325 convert_move (operands
[0], gen_rtx_MEM (QImode
, table
), 1);
1327 tmp
= emit_jump_insn (gen_jump (lab1
));
1328 JUMP_LABEL (tmp
) = lab1
;
1332 LABEL_NUSES (lab3
) = 1;
1335 final_result
= emit_library_call_value (libfunc
, NULL_RTX
,
1336 LCT_CONST
, SImode
, 2,
1337 operands
[1], SImode
,
1338 operands
[2], SImode
);
1340 insns
= get_insns ();
1342 emit_libcall_block (insns
, operands
[0], final_result
,
1343 gen_rtx_DIV (SImode
, operands
[1], operands
[2]));
1346 LABEL_NUSES (lab1
) = 1;
1350 /* Branches and compares. */
1352 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1353 comparison, e.g. >= 1 into > 0. */
1355 nios2_alternate_compare_const (enum rtx_code code
, rtx op
,
1356 enum rtx_code
*alt_code
, rtx
*alt_op
,
1357 enum machine_mode mode
)
1359 HOST_WIDE_INT opval
= INTVAL (op
);
1360 enum rtx_code scode
= signed_condition (code
);
1361 bool dec_p
= (scode
== LT
|| scode
== GE
);
1363 if (code
== EQ
|| code
== NE
)
1371 ? gen_int_mode (opval
- 1, mode
)
1372 : gen_int_mode (opval
+ 1, mode
));
1374 /* The required conversion between [>,>=] and [<,<=] is captured
1375 by a reverse + swap of condition codes. */
1376 *alt_code
= reverse_condition (swap_condition (code
));
1379 /* Test if the incremented/decremented value crosses the over/underflow
1380 boundary. Supposedly, such boundary cases should already be transformed
1381 into always-true/false or EQ conditions, so use an assertion here. */
1382 unsigned HOST_WIDE_INT alt_opval
= INTVAL (*alt_op
);
1384 alt_opval
^= (1 << (GET_MODE_BITSIZE (mode
) - 1));
1385 alt_opval
&= GET_MODE_MASK (mode
);
1386 gcc_assert (dec_p
? alt_opval
!= GET_MODE_MASK (mode
) : alt_opval
!= 0);
1390 /* Return true if the constant comparison is supported by nios2. */
1392 nios2_valid_compare_const_p (enum rtx_code code
, rtx op
)
1396 case EQ
: case NE
: case GE
: case LT
:
1397 return SMALL_INT (INTVAL (op
));
1399 return SMALL_INT_UNSIGNED (INTVAL (op
));
1405 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1406 the current configuration. Perform modifications if MODIFY_P is true.
1407 Returns true if FPU compare can be done. */
1410 nios2_validate_fpu_compare (enum machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
,
1414 enum rtx_code code
= GET_CODE (*cmp
);
1416 if (!nios2_fpu_compare_enabled (code
, mode
))
1418 code
= swap_condition (code
);
1419 if (nios2_fpu_compare_enabled (code
, mode
))
1433 *op1
= force_reg (mode
, *op1
);
1434 *op2
= force_reg (mode
, *op2
);
1435 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1440 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1441 nios2 supported form. Returns true if success. */
1443 nios2_validate_compare (enum machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
)
1445 enum rtx_code code
= GET_CODE (*cmp
);
1446 enum rtx_code alt_code
;
1449 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1450 return nios2_validate_fpu_compare (mode
, cmp
, op1
, op2
, true);
1452 if (!reg_or_0_operand (*op2
, mode
))
1454 /* Create alternate constant compare. */
1455 nios2_alternate_compare_const (code
, *op2
, &alt_code
, &alt_op2
, mode
);
1457 /* If alterate op2 is zero(0), we can use it directly, possibly
1458 swapping the compare code. */
1459 if (alt_op2
== const0_rtx
)
1463 goto check_rebuild_cmp
;
1466 /* Check if either constant compare can be used. */
1467 if (nios2_valid_compare_const_p (code
, *op2
))
1469 else if (nios2_valid_compare_const_p (alt_code
, alt_op2
))
1476 /* We have to force op2 into a register now. Try to pick one
1477 with a lower cost. */
1478 if (! nios2_simple_const_p (*op2
)
1479 && nios2_simple_const_p (alt_op2
))
1484 *op2
= force_reg (SImode
, *op2
);
1487 if (code
== GT
|| code
== GTU
|| code
== LE
|| code
== LEU
)
1489 rtx t
= *op1
; *op1
= *op2
; *op2
= t
;
1490 code
= swap_condition (code
);
1493 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1498 /* Addressing Modes. */
1500 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1502 nios2_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1505 split_const (x
, &base
, &offset
);
1506 return GET_CODE (base
) != SYMBOL_REF
|| !SYMBOL_REF_TLS_MODEL (base
);
1509 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1511 nios2_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1513 return nios2_legitimate_constant_p (mode
, x
) == false;
1516 /* Return true if register REGNO is a valid base register.
1517 STRICT_P is true if REG_OK_STRICT is in effect. */
1520 nios2_regno_ok_for_base_p (int regno
, bool strict_p
)
1522 if (!HARD_REGISTER_NUM_P (regno
))
1530 regno
= reg_renumber
[regno
];
1533 /* The fake registers will be eliminated to either the stack or
1534 hard frame pointer, both of which are usually valid base registers.
1535 Reload deals with the cases where the eliminated form isn't valid. */
1536 return (GP_REG_P (regno
)
1537 || regno
== FRAME_POINTER_REGNUM
1538 || regno
== ARG_POINTER_REGNUM
);
1541 /* Return true if the address expression formed by BASE + OFFSET is
1544 nios2_valid_addr_expr_p (rtx base
, rtx offset
, bool strict_p
)
1546 if (!strict_p
&& GET_CODE (base
) == SUBREG
)
1547 base
= SUBREG_REG (base
);
1548 return (REG_P (base
)
1549 && nios2_regno_ok_for_base_p (REGNO (base
), strict_p
)
1550 && (offset
== NULL_RTX
1551 || const_arith_operand (offset
, Pmode
)
1552 || nios2_unspec_reloc_p (offset
)));
1555 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1557 nios2_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
1558 rtx operand
, bool strict_p
)
1560 switch (GET_CODE (operand
))
1564 if (SYMBOL_REF_TLS_MODEL (operand
))
1567 if (nios2_symbol_ref_in_small_data_p (operand
))
1570 /* Else, fall through. */
1577 /* Register indirect. */
1579 return nios2_regno_ok_for_base_p (REGNO (operand
), strict_p
);
1581 /* Register indirect with displacement. */
1584 rtx op0
= XEXP (operand
, 0);
1585 rtx op1
= XEXP (operand
, 1);
1587 return (nios2_valid_addr_expr_p (op0
, op1
, strict_p
)
1588 || nios2_valid_addr_expr_p (op1
, op0
, strict_p
));
1597 /* Return true if SECTION is a small section name. */
1599 nios2_small_section_name_p (const char *section
)
1601 return (strcmp (section
, ".sbss") == 0
1602 || strncmp (section
, ".sbss.", 6) == 0
1603 || strcmp (section
, ".sdata") == 0
1604 || strncmp (section
, ".sdata.", 7) == 0);
1607 /* Return true if EXP should be placed in the small data section. */
1609 nios2_in_small_data_p (const_tree exp
)
1611 /* We want to merge strings, so we never consider them small data. */
1612 if (TREE_CODE (exp
) == STRING_CST
)
1615 if (TREE_CODE (exp
) == VAR_DECL
)
1617 if (DECL_SECTION_NAME (exp
))
1619 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (exp
));
1620 if (nios2_section_threshold
> 0
1621 && nios2_small_section_name_p (section
))
1626 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
1628 /* If this is an incomplete type with size 0, then we can't put it
1629 in sdata because it might be too big when completed. */
1631 && (unsigned HOST_WIDE_INT
) size
<= nios2_section_threshold
)
1639 /* Return true if symbol is in small data section. */
1642 nios2_symbol_ref_in_small_data_p (rtx sym
)
1644 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
);
1647 /* GP-relative access cannot be used for externally defined symbols,
1648 because the compilation unit that defines the symbol may place it
1649 in a section that cannot be reached from GP. */
1650 && !SYMBOL_REF_EXTERNAL_P (sym
)
1651 /* True if a symbol is both small and not weak. */
1652 && SYMBOL_REF_SMALL_P (sym
)
1653 && !(SYMBOL_REF_DECL (sym
) && DECL_WEAK (SYMBOL_REF_DECL (sym
)))
1654 /* TLS variables are not accessed through the GP. */
1655 && SYMBOL_REF_TLS_MODEL (sym
) == 0);
1659 /* Implement TARGET_SECTION_TYPE_FLAGS. */
1662 nios2_section_type_flags (tree decl
, const char *name
, int reloc
)
1666 flags
= default_section_type_flags (decl
, name
, reloc
);
1668 if (nios2_small_section_name_p (name
))
1669 flags
|= SECTION_SMALL
;
1674 /* Return true if SYMBOL_REF X binds locally. */
1677 nios2_symbol_binds_local_p (const_rtx x
)
1679 return (SYMBOL_REF_DECL (x
)
1680 ? targetm
.binds_local_p (SYMBOL_REF_DECL (x
))
1681 : SYMBOL_REF_LOCAL_P (x
));
1684 /* Position independent code related. */
1686 /* Emit code to load the PIC register. */
1688 nios2_load_pic_register (void)
1690 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
1692 emit_insn (gen_load_got_register (pic_offset_table_rtx
, tmp
));
1693 emit_insn (gen_add3_insn (pic_offset_table_rtx
, pic_offset_table_rtx
, tmp
));
1696 /* Generate a PIC address as a MEM rtx. */
1698 nios2_load_pic_address (rtx sym
, int unspec
)
1701 && GET_CODE (sym
) == SYMBOL_REF
1702 && nios2_symbol_binds_local_p (sym
))
1703 /* Under -fPIC, generate a GOTOFF address for local symbols. */
1704 return nios2_got_address (sym
, UNSPEC_PIC_GOTOFF_SYM
);
1706 return gen_const_mem (Pmode
, nios2_got_address (sym
, unspec
));
1709 /* Nonzero if the constant value X is a legitimate general operand
1710 when generating PIC code. It is given that flag_pic is on and
1711 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1713 nios2_legitimate_pic_operand_p (rtx x
)
1715 if (GET_CODE (x
) == CONST
1716 && GET_CODE (XEXP (x
, 0)) == UNSPEC
1717 && nios2_large_offset_p (XINT (XEXP (x
, 0), 1)))
1720 return ! (GET_CODE (x
) == SYMBOL_REF
1721 || GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == CONST
);
1724 /* Return TRUE if X is a thread-local symbol. */
1726 nios2_tls_symbol_p (rtx x
)
1728 return (targetm
.have_tls
&& GET_CODE (x
) == SYMBOL_REF
1729 && SYMBOL_REF_TLS_MODEL (x
) != 0);
1732 /* Legitimize addresses that are CONSTANT_P expressions. */
1734 nios2_legitimize_constant_address (rtx addr
)
1737 split_const (addr
, &base
, &offset
);
1739 if (nios2_tls_symbol_p (base
))
1740 base
= nios2_legitimize_tls_address (base
);
1742 base
= nios2_load_pic_address (base
, UNSPEC_PIC_SYM
);
1746 if (offset
!= const0_rtx
)
1748 gcc_assert (can_create_pseudo_p ());
1749 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, base
),
1750 (CONST_INT_P (offset
)
1751 ? (SMALL_INT (INTVAL (offset
))
1752 ? offset
: force_reg (Pmode
, offset
))
1758 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1760 nios2_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1761 enum machine_mode mode ATTRIBUTE_UNUSED
)
1764 return nios2_legitimize_constant_address (x
);
1766 /* For the TLS LE (Local Exec) model, the compiler may try to
1767 combine constant offsets with unspec relocs, creating address RTXs
1769 (plus:SI (reg:SI 23 r23)
1772 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
1773 (const_int 48 [0x30]))))
1775 This usually happens when 'var' is a thread-local struct variable,
1776 and access of a field in var causes the addend.
1778 We typically want this combining, so transform the above into this
1779 form, which is allowed:
1780 (plus:SI (reg:SI 23 r23)
1784 (plus:SI (symbol_ref:SI ("var"))
1785 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
1787 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
1788 if (GET_CODE (x
) == PLUS
1789 && GET_CODE (XEXP (x
, 0)) == REG
1790 && GET_CODE (XEXP (x
, 1)) == CONST
)
1792 rtx unspec
, offset
, reg
= XEXP (x
, 0);
1793 split_const (XEXP (x
, 1), &unspec
, &offset
);
1794 if (GET_CODE (unspec
) == UNSPEC
1795 && !nios2_large_offset_p (XINT (unspec
, 1))
1796 && offset
!= const0_rtx
)
1798 unspec
= copy_rtx (unspec
);
1799 XVECEXP (unspec
, 0, 0)
1800 = plus_constant (Pmode
, XVECEXP (unspec
, 0, 0), INTVAL (offset
));
1801 x
= gen_rtx_PLUS (Pmode
, reg
, gen_rtx_CONST (Pmode
, unspec
));
1808 /* Main expander function for RTL moves. */
1810 nios2_emit_move_sequence (rtx
*operands
, enum machine_mode mode
)
1812 rtx to
= operands
[0];
1813 rtx from
= operands
[1];
1815 if (!register_operand (to
, mode
) && !reg_or_0_operand (from
, mode
))
1817 gcc_assert (can_create_pseudo_p ());
1818 from
= copy_to_mode_reg (mode
, from
);
1821 if (GET_CODE (from
) == SYMBOL_REF
|| GET_CODE (from
) == LABEL_REF
1822 || (GET_CODE (from
) == CONST
1823 && GET_CODE (XEXP (from
, 0)) != UNSPEC
))
1824 from
= nios2_legitimize_constant_address (from
);
1831 /* The function with address *ADDR is being called. If the address
1832 needs to be loaded from the GOT, emit the instruction to do so and
1833 update *ADDR to point to the rtx for the loaded value. */
1835 nios2_adjust_call_address (rtx
*call_op
)
1838 gcc_assert (MEM_P (*call_op
));
1839 addr
= XEXP (*call_op
, 0);
1840 if (flag_pic
&& CONSTANT_P (addr
))
1842 rtx reg
= gen_reg_rtx (Pmode
);
1843 emit_move_insn (reg
, nios2_load_pic_address (addr
, UNSPEC_PIC_CALL_SYM
));
1844 XEXP (*call_op
, 0) = reg
;
1849 /* Output assembly language related definitions. */
1851 /* Print the operand OP to file stream FILE modified by LETTER.
1852 LETTER can be one of:
1854 i: print "i" if OP is an immediate, except 0
1855 o: print "io" if OP is volatile
1856 z: for const0_rtx print $0 instead of 0
1859 U: for upper half of 32 bit value
1860 D: for the upper 32-bits of a 64-bit double value
1861 R: prints reverse condition.
1864 nios2_print_operand (FILE *file
, rtx op
, int letter
)
1870 if (CONSTANT_P (op
) && op
!= const0_rtx
)
1871 fprintf (file
, "i");
1875 if (GET_CODE (op
) == MEM
1876 && ((MEM_VOLATILE_P (op
) && TARGET_BYPASS_CACHE_VOLATILE
)
1877 || TARGET_BYPASS_CACHE
))
1878 fprintf (file
, "io");
1885 if (comparison_operator (op
, VOIDmode
))
1887 enum rtx_code cond
= GET_CODE (op
);
1890 fprintf (file
, "%s", GET_RTX_NAME (cond
));
1895 fprintf (file
, "%s", GET_RTX_NAME (reverse_condition (cond
)));
1900 switch (GET_CODE (op
))
1903 if (letter
== 0 || letter
== 'z')
1905 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1908 else if (letter
== 'D')
1910 fprintf (file
, "%s", reg_names
[REGNO (op
)+1]);
1916 if (INTVAL (op
) == 0 && letter
== 'z')
1918 fprintf (file
, "zero");
1924 HOST_WIDE_INT val
= INTVAL (op
);
1925 val
= (val
>> 16) & 0xFFFF;
1926 output_addr_const (file
, gen_int_mode (val
, SImode
));
1929 /* Else, fall through. */
1935 if (letter
== 0 || letter
== 'z')
1937 output_addr_const (file
, op
);
1940 else if (letter
== 'H' || letter
== 'L')
1942 fprintf (file
, "%%");
1943 if (GET_CODE (op
) == CONST
1944 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
1946 rtx unspec
= XEXP (op
, 0);
1947 int unspec_reloc
= XINT (unspec
, 1);
1948 gcc_assert (nios2_large_offset_p (unspec_reloc
));
1949 fprintf (file
, "%s_", nios2_unspec_reloc_name (unspec_reloc
));
1950 op
= XVECEXP (unspec
, 0, 0);
1952 fprintf (file
, letter
== 'H' ? "hiadj(" : "lo(");
1953 output_addr_const (file
, op
);
1954 fprintf (file
, ")");
1963 output_address (op
);
1971 output_addr_const (file
, op
);
1980 output_operand_lossage ("Unsupported operand for code '%c'", letter
);
1984 /* Return true if this is a GP-relative accessible reference. */
1986 gprel_constant_p (rtx op
)
1988 if (GET_CODE (op
) == SYMBOL_REF
1989 && nios2_symbol_ref_in_small_data_p (op
))
1991 else if (GET_CODE (op
) == CONST
1992 && GET_CODE (XEXP (op
, 0)) == PLUS
)
1993 return gprel_constant_p (XEXP (XEXP (op
, 0), 0));
1998 /* Return the name string for a supported unspec reloc offset. */
2000 nios2_unspec_reloc_name (int unspec
)
2004 case UNSPEC_PIC_SYM
:
2006 case UNSPEC_PIC_CALL_SYM
:
2008 case UNSPEC_PIC_GOTOFF_SYM
:
2010 case UNSPEC_LOAD_TLS_IE
:
2012 case UNSPEC_ADD_TLS_LE
:
2014 case UNSPEC_ADD_TLS_GD
:
2016 case UNSPEC_ADD_TLS_LDM
:
2018 case UNSPEC_ADD_TLS_LDO
:
2025 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2027 nios2_output_addr_const_extra (FILE *file
, rtx op
)
2030 gcc_assert (GET_CODE (op
) == UNSPEC
);
2032 /* Support for printing out const unspec relocations. */
2033 name
= nios2_unspec_reloc_name (XINT (op
, 1));
2036 fprintf (file
, "%%%s(", name
);
2037 output_addr_const (file
, XVECEXP (op
, 0, 0));
2038 fprintf (file
, ")");
2044 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2046 nios2_print_operand_address (FILE *file
, rtx op
)
2048 switch (GET_CODE (op
))
2055 if (gprel_constant_p (op
))
2057 fprintf (file
, "%%gprel(");
2058 output_addr_const (file
, op
);
2059 fprintf (file
, ")(%s)", reg_names
[GP_REGNO
]);
2067 rtx op0
= XEXP (op
, 0);
2068 rtx op1
= XEXP (op
, 1);
2070 if (REG_P (op0
) && CONSTANT_P (op1
))
2072 output_addr_const (file
, op1
);
2073 fprintf (file
, "(%s)", reg_names
[REGNO (op0
)]);
2076 else if (REG_P (op1
) && CONSTANT_P (op0
))
2078 output_addr_const (file
, op0
);
2079 fprintf (file
, "(%s)", reg_names
[REGNO (op1
)]);
2086 fprintf (file
, "0(%s)", reg_names
[REGNO (op
)]);
2091 rtx base
= XEXP (op
, 0);
2092 nios2_print_operand_address (file
, base
);
2099 fprintf (stderr
, "Missing way to print address\n");
2104 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2106 nios2_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
2108 gcc_assert (size
== 4);
2109 fprintf (file
, "\t.4byte\t%%tls_ldo(");
2110 output_addr_const (file
, x
);
2111 fprintf (file
, ")");
2114 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2116 nios2_asm_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
2118 if (flag_verbose_asm
|| flag_debug_asm
)
2120 nios2_compute_frame_layout ();
2121 nios2_dump_frame_layout (file
);
2125 /* Emit assembly of custom FPU instructions. */
2127 nios2_fpu_insn_asm (enum n2fpu_code code
)
2129 static char buf
[256];
2130 const char *op1
, *op2
, *op3
;
2131 int ln
= 256, n
= 0;
2133 int N
= N2FPU_N (code
);
2134 int num_operands
= N2FPU (code
).num_operands
;
2135 const char *insn_name
= N2FPU_NAME (code
);
2136 tree ftype
= nios2_ftype (N2FPU_FTCODE (code
));
2137 enum machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (ftype
));
2138 enum machine_mode src_mode
= TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype
)));
2140 /* Prepare X register for DF input operands. */
2141 if (GET_MODE_SIZE (src_mode
) == 8 && num_operands
== 3)
2142 n
= snprintf (buf
, ln
, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2143 N2FPU_N (n2fpu_fwrx
));
2145 if (src_mode
== SFmode
)
2147 if (dst_mode
== VOIDmode
)
2149 /* The fwry case. */
2156 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2158 op3
= (num_operands
== 2 ? "zero" : "%2");
2161 else if (src_mode
== DFmode
)
2163 if (dst_mode
== VOIDmode
)
2165 /* The fwrx case. */
2173 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2174 op2
= (num_operands
== 2 ? "%1" : "%2");
2175 op3
= (num_operands
== 2 ? "%D1" : "%D2");
2178 else if (src_mode
== VOIDmode
)
2180 /* frdxlo, frdxhi, frdy cases. */
2181 gcc_assert (dst_mode
== SFmode
);
2185 else if (src_mode
== SImode
)
2187 /* Conversion operators. */
2188 gcc_assert (num_operands
== 2);
2189 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2196 /* Main instruction string. */
2197 n
+= snprintf (buf
+ n
, ln
- n
, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2198 N
, op1
, op2
, op3
, insn_name
,
2199 (num_operands
>= 2 ? ", %1" : ""),
2200 (num_operands
== 3 ? ", %2" : ""));
2202 /* Extraction of Y register for DF results. */
2203 if (dst_mode
== DFmode
)
2204 snprintf (buf
+ n
, ln
- n
, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2205 N2FPU_N (n2fpu_frdy
));
2211 /* Function argument related. */
2213 /* Define where to put the arguments to a function. Value is zero to
2214 push the argument on the stack, or a hard register in which to
2217 MODE is the argument's machine mode.
2218 TYPE is the data type of the argument (as a tree).
2219 This is null for libcalls where that information may
2221 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2222 the preceding args and about the function being called.
2223 NAMED is nonzero if this argument is a named parameter
2224 (otherwise it is an extra parameter matching an ellipsis). */
2227 nios2_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2228 const_tree type ATTRIBUTE_UNUSED
,
2229 bool named ATTRIBUTE_UNUSED
)
2231 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2232 rtx return_rtx
= NULL_RTX
;
2234 if (cum
->regs_used
< NUM_ARG_REGS
)
2235 return_rtx
= gen_rtx_REG (mode
, FIRST_ARG_REGNO
+ cum
->regs_used
);
2240 /* Return number of bytes, at the beginning of the argument, that must be
2241 put in registers. 0 is the argument is entirely in registers or entirely
2245 nios2_arg_partial_bytes (cumulative_args_t cum_v
,
2246 enum machine_mode mode
, tree type ATTRIBUTE_UNUSED
,
2247 bool named ATTRIBUTE_UNUSED
)
2249 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2250 HOST_WIDE_INT param_size
;
2252 if (mode
== BLKmode
)
2254 param_size
= int_size_in_bytes (type
);
2255 gcc_assert (param_size
>= 0);
2258 param_size
= GET_MODE_SIZE (mode
);
2260 /* Convert to words (round up). */
2261 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2263 if (cum
->regs_used
< NUM_ARG_REGS
2264 && cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2265 return (NUM_ARG_REGS
- cum
->regs_used
) * UNITS_PER_WORD
;
2270 /* Update the data in CUM to advance over an argument of mode MODE
2271 and data type TYPE; TYPE is null for libcalls where that information
2272 may not be available. */
2275 nios2_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2276 const_tree type ATTRIBUTE_UNUSED
,
2277 bool named ATTRIBUTE_UNUSED
)
2279 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2280 HOST_WIDE_INT param_size
;
2282 if (mode
== BLKmode
)
2284 param_size
= int_size_in_bytes (type
);
2285 gcc_assert (param_size
>= 0);
2288 param_size
= GET_MODE_SIZE (mode
);
2290 /* Convert to words (round up). */
2291 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2293 if (cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2294 cum
->regs_used
= NUM_ARG_REGS
;
2296 cum
->regs_used
+= param_size
;
2300 nios2_function_arg_padding (enum machine_mode mode
, const_tree type
)
2302 /* On little-endian targets, the first byte of every stack argument
2303 is passed in the first byte of the stack slot. */
2304 if (!BYTES_BIG_ENDIAN
)
2307 /* Otherwise, integral types are padded downward: the last byte of a
2308 stack argument is passed in the last byte of the stack slot. */
2310 ? INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
2311 : GET_MODE_CLASS (mode
) == MODE_INT
)
2314 /* Arguments smaller than a stack slot are padded downward. */
2315 if (mode
!= BLKmode
)
2316 return (GET_MODE_BITSIZE (mode
) >= PARM_BOUNDARY
) ? upward
: downward
;
2318 return ((int_size_in_bytes (type
) >= (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2319 ? upward
: downward
);
2323 nios2_block_reg_padding (enum machine_mode mode
, tree type
,
2324 int first ATTRIBUTE_UNUSED
)
2326 return nios2_function_arg_padding (mode
, type
);
2329 /* Emit RTL insns to initialize the variable parts of a trampoline.
2330 FNADDR is an RTX for the address of the function's pure code.
2331 CXT is an RTX for the static chain value for the function.
2332 On Nios II, we handle this by a library call. */
2334 nios2_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
2336 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2337 rtx ctx_reg
= force_reg (Pmode
, cxt
);
2338 rtx addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
2340 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
2341 LCT_NORMAL
, VOIDmode
, 3, addr
, Pmode
, fnaddr
, Pmode
,
2345 /* Implement TARGET_FUNCTION_VALUE. */
2347 nios2_function_value (const_tree ret_type
, const_tree fn ATTRIBUTE_UNUSED
,
2348 bool outgoing ATTRIBUTE_UNUSED
)
2350 return gen_rtx_REG (TYPE_MODE (ret_type
), FIRST_RETVAL_REGNO
);
2353 /* Implement TARGET_LIBCALL_VALUE. */
2355 nios2_libcall_value (enum machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
2357 return gen_rtx_REG (mode
, FIRST_RETVAL_REGNO
);
2360 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
2362 nios2_function_value_regno_p (const unsigned int regno
)
2364 return regno
== FIRST_RETVAL_REGNO
;
2367 /* Implement TARGET_RETURN_IN_MEMORY. */
2369 nios2_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2371 return (int_size_in_bytes (type
) > (2 * UNITS_PER_WORD
)
2372 || int_size_in_bytes (type
) == -1);
2375 /* TODO: It may be possible to eliminate the copyback and implement
2378 nios2_setup_incoming_varargs (cumulative_args_t cum_v
,
2379 enum machine_mode mode
, tree type
,
2380 int *pretend_size
, int second_time
)
2382 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2383 CUMULATIVE_ARGS local_cum
;
2384 cumulative_args_t local_cum_v
= pack_cumulative_args (&local_cum
);
2389 nios2_function_arg_advance (local_cum_v
, mode
, type
, 1);
2391 regs_to_push
= NUM_ARG_REGS
- local_cum
.regs_used
;
2393 if (!second_time
&& regs_to_push
> 0)
2395 rtx ptr
= virtual_incoming_args_rtx
;
2396 rtx mem
= gen_rtx_MEM (BLKmode
, ptr
);
2397 emit_insn (gen_blockage ());
2398 move_block_from_reg (local_cum
.regs_used
+ FIRST_ARG_REGNO
, mem
,
2400 emit_insn (gen_blockage ());
2403 pret_size
= regs_to_push
* UNITS_PER_WORD
;
2405 *pretend_size
= pret_size
;
2410 /* Init FPU builtins. */
2412 nios2_init_fpu_builtins (int start_code
)
2415 char builtin_name
[64] = "__builtin_custom_";
2416 unsigned int i
, n
= strlen ("__builtin_custom_");
2418 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2420 snprintf (builtin_name
+ n
, sizeof (builtin_name
) - n
,
2421 "%s", N2FPU_NAME (i
));
2423 add_builtin_function (builtin_name
, nios2_ftype (N2FPU_FTCODE (i
)),
2424 start_code
+ i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
2425 nios2_register_builtin_fndecl (start_code
+ i
, fndecl
);
2429 /* Helper function for expanding FPU builtins. */
2431 nios2_expand_fpu_builtin (tree exp
, unsigned int code
, rtx target
)
2433 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2434 enum insn_code icode
= N2FPU_ICODE (code
);
2435 int nargs
, argno
, opno
= 0;
2436 int num_operands
= N2FPU (code
).num_operands
;
2437 enum machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (exp
));
2438 bool has_target_p
= (dst_mode
!= VOIDmode
);
2440 if (N2FPU_N (code
) < 0)
2441 fatal_error ("Cannot call %<__builtin_custom_%s%> without specifying switch"
2442 " %<-mcustom-%s%>", N2FPU_NAME (code
), N2FPU_NAME (code
));
2444 create_output_operand (&ops
[opno
++], target
, dst_mode
);
2446 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
2448 nargs
= call_expr_nargs (exp
);
2449 for (argno
= 0; argno
< nargs
; argno
++)
2451 tree arg
= CALL_EXPR_ARG (exp
, argno
);
2452 create_input_operand (&ops
[opno
++], expand_normal (arg
),
2453 TYPE_MODE (TREE_TYPE (arg
)));
2455 if (!maybe_expand_insn (icode
, num_operands
, ops
))
2457 error ("invalid argument to built-in function");
2458 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
2460 return has_target_p
? ops
[0].value
: const0_rtx
;
2463 /* Nios II has custom instruction built-in functions of the forms:
2466 __builtin_custom_nXX
2468 __builtin_custom_XnX
2469 __builtin_custom_XnXX
2471 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
2472 Therefore with 0-1 return values, and 0-2 arguments, we have a
2473 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
2475 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
2476 static char custom_builtin_name
[NUM_CUSTOM_BUILTINS
][5];
2479 nios2_init_custom_builtins (int start_code
)
2481 tree builtin_ftype
, ret_type
, fndecl
;
2482 char builtin_name
[32] = "__builtin_custom_";
2483 int n
= strlen ("__builtin_custom_");
2484 int builtin_code
= 0;
2485 int lhs
, rhs1
, rhs2
;
2487 struct { tree type
; const char *c
; } op
[4];
2488 /* z */ op
[0].c
= ""; op
[0].type
= NULL_TREE
;
2489 /* f */ op
[1].c
= "f"; op
[1].type
= float_type_node
;
2490 /* i */ op
[2].c
= "i"; op
[2].type
= integer_type_node
;
2491 /* p */ op
[3].c
= "p"; op
[3].type
= ptr_type_node
;
2493 /* We enumerate through the possible operand types to create all the
2494 __builtin_custom_XnXX function tree types. Note that these may slightly
2495 overlap with the function types created for other fixed builtins. */
2497 for (lhs
= 0; lhs
< 4; lhs
++)
2498 for (rhs1
= 0; rhs1
< 4; rhs1
++)
2499 for (rhs2
= 0; rhs2
< 4; rhs2
++)
2501 if (rhs1
== 0 && rhs2
!= 0)
2503 ret_type
= (op
[lhs
].type
? op
[lhs
].type
: void_type_node
);
2505 = build_function_type_list (ret_type
, integer_type_node
,
2506 op
[rhs1
].type
, op
[rhs2
].type
,
2508 snprintf (builtin_name
+ n
, 32 - n
, "%sn%s%s",
2509 op
[lhs
].c
, op
[rhs1
].c
, op
[rhs2
].c
);
2510 /* Save copy of parameter string into custom_builtin_name[]. */
2511 strncpy (custom_builtin_name
[builtin_code
], builtin_name
+ n
, 5);
2513 add_builtin_function (builtin_name
, builtin_ftype
,
2514 start_code
+ builtin_code
,
2515 BUILT_IN_MD
, NULL
, NULL_TREE
);
2516 nios2_register_builtin_fndecl (start_code
+ builtin_code
, fndecl
);
2521 /* Helper function for expanding custom builtins. */
2523 nios2_expand_custom_builtin (tree exp
, unsigned int index
, rtx target
)
2525 bool has_target_p
= (TREE_TYPE (exp
) != void_type_node
);
2526 enum machine_mode tmode
= VOIDmode
;
2528 rtx value
, insn
, unspec_args
[3];
2534 tmode
= TYPE_MODE (TREE_TYPE (exp
));
2535 if (!target
|| GET_MODE (target
) != tmode
2537 target
= gen_reg_rtx (tmode
);
2540 nargs
= call_expr_nargs (exp
);
2541 for (argno
= 0; argno
< nargs
; argno
++)
2543 arg
= CALL_EXPR_ARG (exp
, argno
);
2544 value
= expand_normal (arg
);
2545 unspec_args
[argno
] = value
;
2548 if (!custom_insn_opcode (value
, VOIDmode
))
2549 error ("custom instruction opcode must be compile time "
2550 "constant in the range 0-255 for __builtin_custom_%s",
2551 custom_builtin_name
[index
]);
2554 /* For other arguments, force into a register. */
2555 unspec_args
[argno
] = force_reg (TYPE_MODE (TREE_TYPE (arg
)),
2556 unspec_args
[argno
]);
2558 /* Fill remaining unspec operands with zero. */
2559 for (; argno
< 3; argno
++)
2560 unspec_args
[argno
] = const0_rtx
;
2562 insn
= (has_target_p
2563 ? gen_rtx_SET (VOIDmode
, target
,
2564 gen_rtx_UNSPEC_VOLATILE (tmode
,
2565 gen_rtvec_v (3, unspec_args
),
2566 UNSPECV_CUSTOM_XNXX
))
2567 : gen_rtx_UNSPEC_VOLATILE (VOIDmode
, gen_rtvec_v (3, unspec_args
),
2568 UNSPECV_CUSTOM_NXX
));
2570 return has_target_p
? target
: const0_rtx
;
2576 /* Main definition of built-in functions. Nios II has a small number of fixed
2577 builtins, plus a large number of FPU insn builtins, and builtins for
2578 generating custom instructions. */
2580 struct nios2_builtin_desc
2582 enum insn_code icode
;
2583 enum nios2_ftcode ftype
;
2587 #define N2_BUILTINS \
2588 N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \
2589 N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \
2590 N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \
2591 N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \
2592 N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \
2593 N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \
2594 N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \
2595 N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \
2596 N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \
2597 N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \
2598 N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI)
2600 enum nios2_builtin_code
{
2601 #define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name,
2603 #undef N2_BUILTIN_DEF
2604 NUM_FIXED_NIOS2_BUILTINS
2607 static const struct nios2_builtin_desc nios2_builtins
[] = {
2608 #define N2_BUILTIN_DEF(name, ftype) \
2609 { CODE_FOR_ ## name, ftype, "__builtin_" #name },
2611 #undef N2_BUILTIN_DEF
2614 /* Start/ends of FPU/custom insn builtin index ranges. */
2615 static unsigned int nios2_fpu_builtin_base
;
2616 static unsigned int nios2_custom_builtin_base
;
2617 static unsigned int nios2_custom_builtin_end
;
2619 /* Implement TARGET_INIT_BUILTINS. */
2621 nios2_init_builtins (void)
2625 /* Initialize fixed builtins. */
2626 for (i
= 0; i
< ARRAY_SIZE (nios2_builtins
); i
++)
2628 const struct nios2_builtin_desc
*d
= &nios2_builtins
[i
];
2630 add_builtin_function (d
->name
, nios2_ftype (d
->ftype
), i
,
2631 BUILT_IN_MD
, NULL
, NULL
);
2632 nios2_register_builtin_fndecl (i
, fndecl
);
2635 /* Initialize FPU builtins. */
2636 nios2_fpu_builtin_base
= ARRAY_SIZE (nios2_builtins
);
2637 nios2_init_fpu_builtins (nios2_fpu_builtin_base
);
2639 /* Initialize custom insn builtins. */
2640 nios2_custom_builtin_base
2641 = nios2_fpu_builtin_base
+ ARRAY_SIZE (nios2_fpu_insn
);
2642 nios2_custom_builtin_end
2643 = nios2_custom_builtin_base
+ NUM_CUSTOM_BUILTINS
;
2644 nios2_init_custom_builtins (nios2_custom_builtin_base
);
2647 /* Array of fndecls for TARGET_BUILTIN_DECL. */
2648 #define NIOS2_NUM_BUILTINS \
2649 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
2650 static GTY(()) tree nios2_builtin_decls
[NIOS2_NUM_BUILTINS
];
2653 nios2_register_builtin_fndecl (unsigned code
, tree fndecl
)
2655 nios2_builtin_decls
[code
] = fndecl
;
2658 /* Implement TARGET_BUILTIN_DECL. */
2660 nios2_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
2662 gcc_assert (nios2_custom_builtin_end
== ARRAY_SIZE (nios2_builtin_decls
));
2664 if (code
>= nios2_custom_builtin_end
)
2665 return error_mark_node
;
2667 if (code
>= nios2_fpu_builtin_base
2668 && code
< nios2_custom_builtin_base
2669 && ! N2FPU_ENABLED_P (code
- nios2_fpu_builtin_base
))
2670 return error_mark_node
;
2672 return nios2_builtin_decls
[code
];
2676 /* Low-level built-in expand routine. */
2678 nios2_expand_builtin_insn (const struct nios2_builtin_desc
*d
, int n
,
2679 struct expand_operand
*ops
, bool has_target_p
)
2681 if (maybe_expand_insn (d
->icode
, n
, ops
))
2682 return has_target_p
? ops
[0].value
: const0_rtx
;
2685 error ("invalid argument to built-in function %s", d
->name
);
2686 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
2690 /* Expand ldio/stio form load-store instruction builtins. */
2692 nios2_expand_ldstio_builtin (tree exp
, rtx target
,
2693 const struct nios2_builtin_desc
*d
)
2697 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2698 enum machine_mode mode
= insn_data
[d
->icode
].operand
[0].mode
;
2700 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2701 mem
= gen_rtx_MEM (mode
, addr
);
2703 if (insn_data
[d
->icode
].operand
[0].allows_mem
)
2706 val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2707 if (CONST_INT_P (val
))
2708 val
= force_reg (mode
, gen_int_mode (INTVAL (val
), mode
));
2709 val
= simplify_gen_subreg (mode
, val
, GET_MODE (val
), 0);
2710 create_output_operand (&ops
[0], mem
, mode
);
2711 create_input_operand (&ops
[1], val
, mode
);
2712 has_target_p
= false;
2717 create_output_operand (&ops
[0], target
, mode
);
2718 create_input_operand (&ops
[1], mem
, mode
);
2719 has_target_p
= true;
2721 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
2724 /* Expand rdctl/wrctl builtins. */
2726 nios2_expand_rdwrctl_builtin (tree exp
, rtx target
,
2727 const struct nios2_builtin_desc
*d
)
2729 bool has_target_p
= (insn_data
[d
->icode
].operand
[0].predicate
2730 == register_operand
);
2731 rtx ctlcode
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2732 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2733 if (!rdwrctl_operand (ctlcode
, VOIDmode
))
2735 error ("Control register number must be in range 0-31 for %s",
2737 return has_target_p
? gen_reg_rtx (SImode
) : const0_rtx
;
2741 create_output_operand (&ops
[0], target
, SImode
);
2742 create_integer_operand (&ops
[1], INTVAL (ctlcode
));
2746 rtx val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2747 create_integer_operand (&ops
[0], INTVAL (ctlcode
));
2748 create_input_operand (&ops
[1], val
, SImode
);
2750 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
2753 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
2754 a built-in function, with result going to TARGET if that's convenient
2755 (and in mode MODE if that's convenient).
2756 SUBTARGET may be used as the target for computing one of EXP's operands.
2757 IGNORE is nonzero if the value is to be ignored. */
2760 nios2_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
2761 enum machine_mode mode ATTRIBUTE_UNUSED
,
2762 int ignore ATTRIBUTE_UNUSED
)
2764 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
2765 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
2767 if (fcode
< nios2_fpu_builtin_base
)
2769 const struct nios2_builtin_desc
*d
= &nios2_builtins
[fcode
];
2773 case NIOS2_BUILTIN_sync
:
2774 emit_insn (gen_sync ());
2777 case NIOS2_BUILTIN_ldbio
:
2778 case NIOS2_BUILTIN_ldbuio
:
2779 case NIOS2_BUILTIN_ldhio
:
2780 case NIOS2_BUILTIN_ldhuio
:
2781 case NIOS2_BUILTIN_ldwio
:
2782 case NIOS2_BUILTIN_stbio
:
2783 case NIOS2_BUILTIN_sthio
:
2784 case NIOS2_BUILTIN_stwio
:
2785 return nios2_expand_ldstio_builtin (exp
, target
, d
);
2787 case NIOS2_BUILTIN_rdctl
:
2788 case NIOS2_BUILTIN_wrctl
:
2789 return nios2_expand_rdwrctl_builtin (exp
, target
, d
);
2795 else if (fcode
< nios2_custom_builtin_base
)
2796 /* FPU builtin range. */
2797 return nios2_expand_fpu_builtin (exp
, fcode
- nios2_fpu_builtin_base
,
2799 else if (fcode
< nios2_custom_builtin_end
)
2800 /* Custom insn builtin range. */
2801 return nios2_expand_custom_builtin (exp
, fcode
- nios2_custom_builtin_base
,
2807 /* Implement TARGET_INIT_LIBFUNCS. */
2809 nios2_init_libfuncs (void)
2811 /* For Linux, we have access to kernel support for atomic operations. */
2812 if (TARGET_LINUX_ABI
)
2813 init_sync_libfuncs (UNITS_PER_WORD
);
2818 /* Register a custom code use, and signal error if a conflict was found. */
2820 nios2_register_custom_code (unsigned int N
, enum nios2_ccs_code status
,
2823 gcc_assert (N
<= 255);
2825 if (status
== CCS_FPU
)
2827 if (custom_code_status
[N
] == CCS_FPU
&& index
!= custom_code_index
[N
])
2829 custom_code_conflict
= true;
2830 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
2831 N2FPU_NAME (custom_code_index
[N
]), N2FPU_NAME (index
));
2833 else if (custom_code_status
[N
] == CCS_BUILTIN_CALL
)
2835 custom_code_conflict
= true;
2836 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2837 "%<-mcustom-%s%>", custom_builtin_name
[custom_code_index
[N
]],
2838 N2FPU_NAME (index
));
2841 else if (status
== CCS_BUILTIN_CALL
)
2843 if (custom_code_status
[N
] == CCS_FPU
)
2845 custom_code_conflict
= true;
2846 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2847 "%<-mcustom-%s%>", custom_builtin_name
[index
],
2848 N2FPU_NAME (custom_code_index
[N
]));
2852 /* Note that code conflicts between different __builtin_custom_xnxx
2853 calls are not checked. */
2859 custom_code_status
[N
] = status
;
2860 custom_code_index
[N
] = index
;
2863 /* Mark a custom code as not in use. */
2865 nios2_deregister_custom_code (unsigned int N
)
2869 custom_code_status
[N
] = CCS_UNUSED
;
2870 custom_code_index
[N
] = 0;
2874 /* Target attributes can affect per-function option state, so we need to
2875 save/restore the custom code tracking info using the
2876 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
2879 nios2_option_save (struct cl_target_option
*ptr
,
2880 struct gcc_options
*opts ATTRIBUTE_UNUSED
)
2883 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2884 ptr
->saved_fpu_custom_code
[i
] = N2FPU_N (i
);
2885 memcpy (ptr
->saved_custom_code_status
, custom_code_status
,
2886 sizeof (custom_code_status
));
2887 memcpy (ptr
->saved_custom_code_index
, custom_code_index
,
2888 sizeof (custom_code_index
));
2892 nios2_option_restore (struct gcc_options
*opts ATTRIBUTE_UNUSED
,
2893 struct cl_target_option
*ptr
)
2896 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2897 N2FPU_N (i
) = ptr
->saved_fpu_custom_code
[i
];
2898 memcpy (custom_code_status
, ptr
->saved_custom_code_status
,
2899 sizeof (custom_code_status
));
2900 memcpy (custom_code_index
, ptr
->saved_custom_code_index
,
2901 sizeof (custom_code_index
));
2904 /* Inner function to process the attribute((target(...))), take an argument and
2905 set the current options from the argument. If we have a list, recursively
2906 go over the list. */
2909 nios2_valid_target_attribute_rec (tree args
)
2911 if (TREE_CODE (args
) == TREE_LIST
)
2914 for (; args
; args
= TREE_CHAIN (args
))
2915 if (TREE_VALUE (args
)
2916 && !nios2_valid_target_attribute_rec (TREE_VALUE (args
)))
2920 else if (TREE_CODE (args
) == STRING_CST
)
2922 char *argstr
= ASTRDUP (TREE_STRING_POINTER (args
));
2923 while (argstr
&& *argstr
!= '\0')
2925 bool no_opt
= false, end_p
= false;
2926 char *eq
= NULL
, *p
;
2927 while (ISSPACE (*argstr
))
2930 while (*p
!= '\0' && *p
!= ',')
2932 if (!eq
&& *p
== '=')
2942 if (!strncmp (argstr
, "no-", 3))
2947 if (!strncmp (argstr
, "custom-fpu-cfg", 14))
2952 error ("custom-fpu-cfg option does not support %<no-%>");
2957 error ("custom-fpu-cfg option requires configuration"
2961 /* Increment and skip whitespace. */
2962 while (ISSPACE (*(++eq
))) ;
2963 /* Decrement and skip to before any trailing whitespace. */
2964 while (ISSPACE (*(--end_eq
))) ;
2966 nios2_handle_custom_fpu_cfg (eq
, end_eq
+ 1, true);
2968 else if (!strncmp (argstr
, "custom-", 7))
2972 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2973 if (!strncmp (argstr
+ 7, N2FPU_NAME (i
),
2974 strlen (N2FPU_NAME (i
))))
2986 error ("%<no-custom-%s%> does not accept arguments",
2990 /* Disable option by setting to -1. */
2991 nios2_deregister_custom_code (N2FPU_N (code
));
2992 N2FPU_N (code
) = -1;
2998 while (ISSPACE (*(++eq
))) ;
3001 error ("%<custom-%s=%> requires argument",
3005 for (t
= eq
; t
!= p
; ++t
)
3011 error ("`custom-%s=' argument requires "
3012 "numeric digits", N2FPU_NAME (code
));
3016 /* Set option to argument. */
3017 N2FPU_N (code
) = atoi (eq
);
3018 nios2_handle_custom_fpu_insn_option (code
);
3023 error ("%<custom-%s=%> is not recognised as FPU instruction",
3030 error ("%<%s%> is unknown", argstr
);
3045 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3048 nios2_valid_target_attribute_tree (tree args
)
3050 if (!nios2_valid_target_attribute_rec (args
))
3052 nios2_custom_check_insns ();
3053 return build_target_option_node (&global_options
);
3056 /* Hook to validate attribute((target("string"))). */
3059 nios2_valid_target_attribute_p (tree fndecl
, tree
ARG_UNUSED (name
),
3060 tree args
, int ARG_UNUSED (flags
))
3062 struct cl_target_option cur_target
;
3064 tree old_optimize
= build_optimization_node (&global_options
);
3065 tree new_target
, new_optimize
;
3066 tree func_optimize
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
);
3068 /* If the function changed the optimization levels as well as setting target
3069 options, start with the optimizations specified. */
3070 if (func_optimize
&& func_optimize
!= old_optimize
)
3071 cl_optimization_restore (&global_options
,
3072 TREE_OPTIMIZATION (func_optimize
));
3074 /* The target attributes may also change some optimization flags, so update
3075 the optimization options if necessary. */
3076 cl_target_option_save (&cur_target
, &global_options
);
3077 new_target
= nios2_valid_target_attribute_tree (args
);
3078 new_optimize
= build_optimization_node (&global_options
);
3085 DECL_FUNCTION_SPECIFIC_TARGET (fndecl
) = new_target
;
3087 if (old_optimize
!= new_optimize
)
3088 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
) = new_optimize
;
3091 cl_target_option_restore (&global_options
, &cur_target
);
3093 if (old_optimize
!= new_optimize
)
3094 cl_optimization_restore (&global_options
,
3095 TREE_OPTIMIZATION (old_optimize
));
3099 /* Remember the last target of nios2_set_current_function. */
3100 static GTY(()) tree nios2_previous_fndecl
;
3102 /* Establish appropriate back-end context for processing the function
3103 FNDECL. The argument might be NULL to indicate processing at top
3104 level, outside of any function scope. */
3106 nios2_set_current_function (tree fndecl
)
3108 tree old_tree
= (nios2_previous_fndecl
3109 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl
)
3112 tree new_tree
= (fndecl
3113 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl
)
3116 if (fndecl
&& fndecl
!= nios2_previous_fndecl
)
3118 nios2_previous_fndecl
= fndecl
;
3119 if (old_tree
== new_tree
)
3124 cl_target_option_restore (&global_options
,
3125 TREE_TARGET_OPTION (new_tree
));
3131 struct cl_target_option
*def
3132 = TREE_TARGET_OPTION (target_option_current_node
);
3134 cl_target_option_restore (&global_options
, def
);
3140 /* Hook to validate the current #pragma GCC target and set the FPU custom
3141 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3144 nios2_pragma_target_parse (tree args
, tree pop_target
)
3149 cur_tree
= ((pop_target
)
3151 : target_option_default_node
);
3152 cl_target_option_restore (&global_options
,
3153 TREE_TARGET_OPTION (cur_tree
));
3157 cur_tree
= nios2_valid_target_attribute_tree (args
);
3162 target_option_current_node
= cur_tree
;
3166 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3167 We are just using this hook to add some additional error checking to
3168 the default behavior. GCC does not provide a target hook for merging
3169 the target options, and only correctly handles merging empty vs non-empty
3170 option data; see merge_decls() in c-decl.c.
3171 So here we require either that at least one of the decls has empty
3172 target options, or that the target options/data be identical. */
3174 nios2_merge_decl_attributes (tree olddecl
, tree newdecl
)
3176 tree oldopts
= lookup_attribute ("target", DECL_ATTRIBUTES (olddecl
));
3177 tree newopts
= lookup_attribute ("target", DECL_ATTRIBUTES (newdecl
));
3178 if (newopts
&& oldopts
&& newopts
!= oldopts
)
3180 tree oldtree
= DECL_FUNCTION_SPECIFIC_TARGET (olddecl
);
3181 tree newtree
= DECL_FUNCTION_SPECIFIC_TARGET (newdecl
);
3182 if (oldtree
&& newtree
&& oldtree
!= newtree
)
3184 struct cl_target_option
*olddata
= TREE_TARGET_OPTION (oldtree
);
3185 struct cl_target_option
*newdata
= TREE_TARGET_OPTION (newtree
);
3186 if (olddata
!= newdata
3187 && memcmp (olddata
, newdata
, sizeof (struct cl_target_option
)))
3188 error ("%qE redeclared with conflicting %qs attributes",
3189 DECL_NAME (newdecl
), "target");
3192 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
3193 DECL_ATTRIBUTES (newdecl
));
3197 /* Initialize the GCC target structure. */
3198 #undef TARGET_ASM_FUNCTION_PROLOGUE
3199 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
3201 #undef TARGET_IN_SMALL_DATA_P
3202 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
3204 #undef TARGET_SECTION_TYPE_FLAGS
3205 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
3207 #undef TARGET_INIT_BUILTINS
3208 #define TARGET_INIT_BUILTINS nios2_init_builtins
3209 #undef TARGET_EXPAND_BUILTIN
3210 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
3211 #undef TARGET_BUILTIN_DECL
3212 #define TARGET_BUILTIN_DECL nios2_builtin_decl
3214 #undef TARGET_INIT_LIBFUNCS
3215 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
3217 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3218 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
3220 #undef TARGET_CAN_ELIMINATE
3221 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
3223 #undef TARGET_FUNCTION_ARG
3224 #define TARGET_FUNCTION_ARG nios2_function_arg
3226 #undef TARGET_FUNCTION_ARG_ADVANCE
3227 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
3229 #undef TARGET_ARG_PARTIAL_BYTES
3230 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
3232 #undef TARGET_TRAMPOLINE_INIT
3233 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
3235 #undef TARGET_FUNCTION_VALUE
3236 #define TARGET_FUNCTION_VALUE nios2_function_value
3238 #undef TARGET_LIBCALL_VALUE
3239 #define TARGET_LIBCALL_VALUE nios2_libcall_value
3241 #undef TARGET_FUNCTION_VALUE_REGNO_P
3242 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
3244 #undef TARGET_RETURN_IN_MEMORY
3245 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
3247 #undef TARGET_PROMOTE_PROTOTYPES
3248 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3250 #undef TARGET_SETUP_INCOMING_VARARGS
3251 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
3253 #undef TARGET_MUST_PASS_IN_STACK
3254 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
3256 #undef TARGET_LEGITIMATE_CONSTANT_P
3257 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
3259 #undef TARGET_LEGITIMIZE_ADDRESS
3260 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
3262 #undef TARGET_LEGITIMATE_ADDRESS_P
3263 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
3265 #undef TARGET_PREFERRED_RELOAD_CLASS
3266 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
3268 #undef TARGET_RTX_COSTS
3269 #define TARGET_RTX_COSTS nios2_rtx_costs
3271 #undef TARGET_HAVE_TLS
3272 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
3274 #undef TARGET_CANNOT_FORCE_CONST_MEM
3275 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
3277 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
3278 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
3280 #undef TARGET_PRINT_OPERAND
3281 #define TARGET_PRINT_OPERAND nios2_print_operand
3283 #undef TARGET_PRINT_OPERAND_ADDRESS
3284 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
3286 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3287 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
3289 #undef TARGET_OPTION_OVERRIDE
3290 #define TARGET_OPTION_OVERRIDE nios2_option_override
3292 #undef TARGET_OPTION_SAVE
3293 #define TARGET_OPTION_SAVE nios2_option_save
3295 #undef TARGET_OPTION_RESTORE
3296 #define TARGET_OPTION_RESTORE nios2_option_restore
3298 #undef TARGET_SET_CURRENT_FUNCTION
3299 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
3301 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
3302 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
3304 #undef TARGET_OPTION_PRAGMA_PARSE
3305 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
3307 #undef TARGET_MERGE_DECL_ATTRIBUTES
3308 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
3310 struct gcc_target targetm
= TARGET_INITIALIZER
;
3312 #include "gt-nios2.h"