1 /* Subroutines used for MIPS code generation.
2 Copyright (C) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Contributed by A. Lichnewsky, lich@inria.inria.fr.
6 Changes by Michael Meissner, meissner@osf.org.
7 64-bit r4000 support by Ian Lance Taylor, ian@cygnus.com, and
8 Brendan Eich, brendan@microunity.com.
10 This file is part of GCC.
12 GCC is free software; you can redistribute it and/or modify
13 it under the terms of the GNU General Public License as published by
14 the Free Software Foundation; either version 3, or (at your option)
17 GCC is distributed in the hope that it will be useful,
18 but WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 GNU General Public License for more details.
22 You should have received a copy of the GNU General Public License
23 along with GCC; see the file COPYING3. If not see
24 <http://www.gnu.org/licenses/>. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "insn-attr.h"
53 #include "target-def.h"
54 #include "integrate.h"
55 #include "langhooks.h"
56 #include "cfglayout.h"
57 #include "sched-int.h"
58 #include "tree-gimple.h"
60 #include "diagnostic.h"
62 /* True if X is an unspec wrapper around a SYMBOL_REF or LABEL_REF. */
63 #define UNSPEC_ADDRESS_P(X) \
64 (GET_CODE (X) == UNSPEC \
65 && XINT (X, 1) >= UNSPEC_ADDRESS_FIRST \
66 && XINT (X, 1) < UNSPEC_ADDRESS_FIRST + NUM_SYMBOL_TYPES)
68 /* Extract the symbol or label from UNSPEC wrapper X. */
69 #define UNSPEC_ADDRESS(X) \
72 /* Extract the symbol type from UNSPEC wrapper X. */
73 #define UNSPEC_ADDRESS_TYPE(X) \
74 ((enum mips_symbol_type) (XINT (X, 1) - UNSPEC_ADDRESS_FIRST))
76 /* The maximum distance between the top of the stack frame and the
77 value $sp has when we save and restore registers.
79 The value for normal-mode code must be a SMALL_OPERAND and must
80 preserve the maximum stack alignment. We therefore use a value
81 of 0x7ff0 in this case.
83 MIPS16e SAVE and RESTORE instructions can adjust the stack pointer by
84 up to 0x7f8 bytes and can usually save or restore all the registers
85 that we need to save or restore. (Note that we can only use these
86 instructions for o32, for which the stack alignment is 8 bytes.)
88 We use a maximum gap of 0x100 or 0x400 for MIPS16 code when SAVE and
89 RESTORE are not available. We can then use unextended instructions
90 to save and restore registers, and to allocate and deallocate the top
92 #define MIPS_MAX_FIRST_STACK_STEP \
93 (!TARGET_MIPS16 ? 0x7ff0 \
94 : GENERATE_MIPS16E_SAVE_RESTORE ? 0x7f8 \
95 : TARGET_64BIT ? 0x100 : 0x400)
97 /* True if INSN is a mips.md pattern or asm statement. */
98 #define USEFUL_INSN_P(INSN) \
100 && GET_CODE (PATTERN (INSN)) != USE \
101 && GET_CODE (PATTERN (INSN)) != CLOBBER \
102 && GET_CODE (PATTERN (INSN)) != ADDR_VEC \
103 && GET_CODE (PATTERN (INSN)) != ADDR_DIFF_VEC)
105 /* If INSN is a delayed branch sequence, return the first instruction
106 in the sequence, otherwise return INSN itself. */
107 #define SEQ_BEGIN(INSN) \
108 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
109 ? XVECEXP (PATTERN (INSN), 0, 0) \
112 /* Likewise for the last instruction in a delayed branch sequence. */
113 #define SEQ_END(INSN) \
114 (INSN_P (INSN) && GET_CODE (PATTERN (INSN)) == SEQUENCE \
115 ? XVECEXP (PATTERN (INSN), 0, XVECLEN (PATTERN (INSN), 0) - 1) \
118 /* Execute the following loop body with SUBINSN set to each instruction
119 between SEQ_BEGIN (INSN) and SEQ_END (INSN) inclusive. */
120 #define FOR_EACH_SUBINSN(SUBINSN, INSN) \
121 for ((SUBINSN) = SEQ_BEGIN (INSN); \
122 (SUBINSN) != NEXT_INSN (SEQ_END (INSN)); \
123 (SUBINSN) = NEXT_INSN (SUBINSN))
125 /* True if bit BIT is set in VALUE. */
126 #define BITSET_P(VALUE, BIT) (((VALUE) & (1 << (BIT))) != 0)
128 /* Classifies an address.
131 A natural register + offset address. The register satisfies
132 mips_valid_base_register_p and the offset is a const_arith_operand.
135 A LO_SUM rtx. The first operand is a valid base register and
136 the second operand is a symbolic address.
139 A signed 16-bit constant address.
142 A constant symbolic address (equivalent to CONSTANT_SYMBOLIC). */
143 enum mips_address_type
{
150 /* Classifies the prototype of a builtin function. */
151 enum mips_function_type
153 MIPS_V2SF_FTYPE_V2SF
,
154 MIPS_V2SF_FTYPE_V2SF_V2SF
,
155 MIPS_V2SF_FTYPE_V2SF_V2SF_INT
,
156 MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF
,
157 MIPS_V2SF_FTYPE_SF_SF
,
158 MIPS_INT_FTYPE_V2SF_V2SF
,
159 MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF
,
160 MIPS_INT_FTYPE_SF_SF
,
161 MIPS_INT_FTYPE_DF_DF
,
168 /* For MIPS DSP ASE */
170 MIPS_DI_FTYPE_DI_SI_SI
,
171 MIPS_DI_FTYPE_DI_V2HI_V2HI
,
172 MIPS_DI_FTYPE_DI_V4QI_V4QI
,
174 MIPS_SI_FTYPE_PTR_SI
,
178 MIPS_SI_FTYPE_V2HI_V2HI
,
180 MIPS_SI_FTYPE_V4QI_V4QI
,
183 MIPS_V2HI_FTYPE_SI_SI
,
184 MIPS_V2HI_FTYPE_V2HI
,
185 MIPS_V2HI_FTYPE_V2HI_SI
,
186 MIPS_V2HI_FTYPE_V2HI_V2HI
,
187 MIPS_V2HI_FTYPE_V4QI
,
188 MIPS_V2HI_FTYPE_V4QI_V2HI
,
190 MIPS_V4QI_FTYPE_V2HI_V2HI
,
191 MIPS_V4QI_FTYPE_V4QI_SI
,
192 MIPS_V4QI_FTYPE_V4QI_V4QI
,
193 MIPS_VOID_FTYPE_SI_SI
,
194 MIPS_VOID_FTYPE_V2HI_V2HI
,
195 MIPS_VOID_FTYPE_V4QI_V4QI
,
197 /* For MIPS DSP REV 2 ASE. */
198 MIPS_V4QI_FTYPE_V4QI
,
199 MIPS_SI_FTYPE_SI_SI_SI
,
200 MIPS_DI_FTYPE_DI_USI_USI
,
202 MIPS_DI_FTYPE_USI_USI
,
203 MIPS_V2HI_FTYPE_SI_SI_SI
,
209 /* Specifies how a builtin function should be converted into rtl. */
210 enum mips_builtin_type
212 /* The builtin corresponds directly to an .md pattern. The return
213 value is mapped to operand 0 and the arguments are mapped to
214 operands 1 and above. */
217 /* The builtin corresponds directly to an .md pattern. There is no return
218 value and the arguments are mapped to operands 0 and above. */
219 MIPS_BUILTIN_DIRECT_NO_TARGET
,
221 /* The builtin corresponds to a comparison instruction followed by
222 a mips_cond_move_tf_ps pattern. The first two arguments are the
223 values to compare and the second two arguments are the vector
224 operands for the movt.ps or movf.ps instruction (in assembly order). */
228 /* The builtin corresponds to a V2SF comparison instruction. Operand 0
229 of this instruction is the result of the comparison, which has mode
230 CCV2 or CCV4. The function arguments are mapped to operands 1 and
231 above. The function's return value is an SImode boolean that is
232 true under the following conditions:
234 MIPS_BUILTIN_CMP_ANY: one of the registers is true
235 MIPS_BUILTIN_CMP_ALL: all of the registers are true
236 MIPS_BUILTIN_CMP_LOWER: the first register is true
237 MIPS_BUILTIN_CMP_UPPER: the second register is true. */
238 MIPS_BUILTIN_CMP_ANY
,
239 MIPS_BUILTIN_CMP_ALL
,
240 MIPS_BUILTIN_CMP_UPPER
,
241 MIPS_BUILTIN_CMP_LOWER
,
243 /* As above, but the instruction only sets a single $fcc register. */
244 MIPS_BUILTIN_CMP_SINGLE
,
246 /* For generating bposge32 branch instructions in MIPS32 DSP ASE. */
247 MIPS_BUILTIN_BPOSGE32
250 /* Invokes MACRO (COND) for each c.cond.fmt condition. */
251 #define MIPS_FP_CONDITIONS(MACRO) \
269 /* Enumerates the codes above as MIPS_FP_COND_<X>. */
270 #define DECLARE_MIPS_COND(X) MIPS_FP_COND_ ## X
271 enum mips_fp_condition
{
272 MIPS_FP_CONDITIONS (DECLARE_MIPS_COND
)
275 /* Index X provides the string representation of MIPS_FP_COND_<X>. */
276 #define STRINGIFY(X) #X
277 static const char *const mips_fp_conditions
[] = {
278 MIPS_FP_CONDITIONS (STRINGIFY
)
281 /* A function to save or store a register. The first argument is the
282 register and the second is the stack slot. */
283 typedef void (*mips_save_restore_fn
) (rtx
, rtx
);
285 struct mips16_constant
;
286 struct mips_arg_info
;
287 struct mips_address_info
;
288 struct mips_integer_op
;
291 static bool mips_valid_base_register_p (rtx
, enum machine_mode
, int);
292 static bool mips_classify_address (struct mips_address_info
*, rtx
,
293 enum machine_mode
, int);
294 static bool mips_cannot_force_const_mem (rtx
);
295 static bool mips_use_blocks_for_constant_p (enum machine_mode
, const_rtx
);
296 static int mips_symbol_insns (enum mips_symbol_type
, enum machine_mode
);
297 static bool mips16_unextended_reference_p (enum machine_mode mode
, rtx
, rtx
);
298 static rtx
mips_force_temporary (rtx
, rtx
);
299 static rtx
mips_unspec_offset_high (rtx
, rtx
, rtx
, enum mips_symbol_type
);
300 static rtx
mips_add_offset (rtx
, rtx
, HOST_WIDE_INT
);
301 static unsigned int mips_build_shift (struct mips_integer_op
*, HOST_WIDE_INT
);
302 static unsigned int mips_build_lower (struct mips_integer_op
*,
303 unsigned HOST_WIDE_INT
);
304 static unsigned int mips_build_integer (struct mips_integer_op
*,
305 unsigned HOST_WIDE_INT
);
306 static void mips_legitimize_const_move (enum machine_mode
, rtx
, rtx
);
307 static int m16_check_op (rtx
, int, int, int);
308 static bool mips_rtx_costs (rtx
, int, int, int *);
309 static int mips_address_cost (rtx
);
310 static void mips_emit_compare (enum rtx_code
*, rtx
*, rtx
*, bool);
311 static void mips_load_call_address (rtx
, rtx
, int);
312 static bool mips_function_ok_for_sibcall (tree
, tree
);
313 static void mips_block_move_straight (rtx
, rtx
, HOST_WIDE_INT
);
314 static void mips_adjust_block_mem (rtx
, HOST_WIDE_INT
, rtx
*, rtx
*);
315 static void mips_block_move_loop (rtx
, rtx
, HOST_WIDE_INT
);
316 static void mips_arg_info (const CUMULATIVE_ARGS
*, enum machine_mode
,
317 tree
, int, struct mips_arg_info
*);
318 static bool mips_get_unaligned_mem (rtx
*, unsigned int, int, rtx
*, rtx
*);
319 static void mips_set_architecture (const struct mips_cpu_info
*);
320 static void mips_set_tune (const struct mips_cpu_info
*);
321 static bool mips_handle_option (size_t, const char *, int);
322 static struct machine_function
*mips_init_machine_status (void);
323 static void print_operand_reloc (FILE *, rtx
, enum mips_symbol_context
,
325 static void mips_file_start (void);
326 static int mips_small_data_pattern_1 (rtx
*, void *);
327 static int mips_rewrite_small_data_1 (rtx
*, void *);
328 static bool mips_function_has_gp_insn (void);
329 static unsigned int mips_global_pointer (void);
330 static bool mips_save_reg_p (unsigned int);
331 static void mips_save_restore_reg (enum machine_mode
, int, HOST_WIDE_INT
,
332 mips_save_restore_fn
);
333 static void mips_for_each_saved_reg (HOST_WIDE_INT
, mips_save_restore_fn
);
334 static void mips_output_cplocal (void);
335 static void mips_emit_loadgp (void);
336 static void mips_output_function_prologue (FILE *, HOST_WIDE_INT
);
337 static void mips_set_frame_expr (rtx
);
338 static rtx
mips_frame_set (rtx
, rtx
);
339 static void mips_save_reg (rtx
, rtx
);
340 static void mips_output_function_epilogue (FILE *, HOST_WIDE_INT
);
341 static void mips_restore_reg (rtx
, rtx
);
342 static void mips_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
343 HOST_WIDE_INT
, tree
);
344 static section
*mips_select_rtx_section (enum machine_mode
, rtx
,
345 unsigned HOST_WIDE_INT
);
346 static section
*mips_function_rodata_section (tree
);
347 static bool mips_in_small_data_p (const_tree
);
348 static bool mips_use_anchors_for_symbol_p (const_rtx
);
349 static int mips_fpr_return_fields (const_tree
, tree
*);
350 static bool mips_return_in_msb (const_tree
);
351 static rtx
mips_return_fpr_pair (enum machine_mode mode
,
352 enum machine_mode mode1
, HOST_WIDE_INT
,
353 enum machine_mode mode2
, HOST_WIDE_INT
);
354 static rtx
mips16_gp_pseudo_reg (void);
355 static void mips16_fp_args (FILE *, int, int);
356 static void build_mips16_function_stub (FILE *);
357 static rtx
dump_constants_1 (enum machine_mode
, rtx
, rtx
);
358 static void dump_constants (struct mips16_constant
*, rtx
);
359 static int mips16_insn_length (rtx
);
360 static int mips16_rewrite_pool_refs (rtx
*, void *);
361 static void mips16_lay_out_constants (void);
362 static void mips_sim_reset (struct mips_sim
*);
363 static void mips_sim_init (struct mips_sim
*, state_t
);
364 static void mips_sim_next_cycle (struct mips_sim
*);
365 static void mips_sim_wait_reg (struct mips_sim
*, rtx
, rtx
);
366 static int mips_sim_wait_regs_2 (rtx
*, void *);
367 static void mips_sim_wait_regs_1 (rtx
*, void *);
368 static void mips_sim_wait_regs (struct mips_sim
*, rtx
);
369 static void mips_sim_wait_units (struct mips_sim
*, rtx
);
370 static void mips_sim_wait_insn (struct mips_sim
*, rtx
);
371 static void mips_sim_record_set (rtx
, const_rtx
, void *);
372 static void mips_sim_issue_insn (struct mips_sim
*, rtx
);
373 static void mips_sim_issue_nop (struct mips_sim
*);
374 static void mips_sim_finish_insn (struct mips_sim
*, rtx
);
375 static void vr4130_avoid_branch_rt_conflict (rtx
);
376 static void vr4130_align_insns (void);
377 static void mips_avoid_hazard (rtx
, rtx
, int *, rtx
*, rtx
);
378 static void mips_avoid_hazards (void);
379 static void mips_reorg (void);
380 static bool mips_strict_matching_cpu_name_p (const char *, const char *);
381 static bool mips_matching_cpu_name_p (const char *, const char *);
382 static const struct mips_cpu_info
*mips_parse_cpu (const char *);
383 static const struct mips_cpu_info
*mips_cpu_info_from_isa (int);
384 static bool mips_return_in_memory (const_tree
, const_tree
);
385 static bool mips_strict_argument_naming (CUMULATIVE_ARGS
*);
386 static void mips_macc_chains_record (rtx
);
387 static void mips_macc_chains_reorder (rtx
*, int);
388 static void vr4130_true_reg_dependence_p_1 (rtx
, const_rtx
, void *);
389 static bool vr4130_true_reg_dependence_p (rtx
);
390 static bool vr4130_swap_insns_p (rtx
, rtx
);
391 static void vr4130_reorder (rtx
*, int);
392 static void mips_promote_ready (rtx
*, int, int);
393 static void mips_sched_init (FILE *, int, int);
394 static int mips_sched_reorder (FILE *, int, rtx
*, int *, int);
395 static int mips_variable_issue (FILE *, int, rtx
, int);
396 static int mips_adjust_cost (rtx
, rtx
, rtx
, int);
397 static int mips_issue_rate (void);
398 static int mips_multipass_dfa_lookahead (void);
399 static void mips_init_libfuncs (void);
400 static void mips_setup_incoming_varargs (CUMULATIVE_ARGS
*, enum machine_mode
,
402 static tree
mips_build_builtin_va_list (void);
403 static tree
mips_gimplify_va_arg_expr (tree
, tree
, tree
*, tree
*);
404 static bool mips_pass_by_reference (CUMULATIVE_ARGS
*, enum machine_mode mode
,
406 static bool mips_callee_copies (CUMULATIVE_ARGS
*, enum machine_mode mode
,
408 static int mips_arg_partial_bytes (CUMULATIVE_ARGS
*, enum machine_mode mode
,
410 static bool mips_valid_pointer_mode (enum machine_mode
);
411 static bool mips_scalar_mode_supported_p (enum machine_mode
);
412 static bool mips_vector_mode_supported_p (enum machine_mode
);
413 static rtx
mips_prepare_builtin_arg (enum insn_code
, unsigned int, tree
, unsigned int);
414 static rtx
mips_prepare_builtin_target (enum insn_code
, unsigned int, rtx
);
415 static rtx
mips_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
416 static void mips_init_builtins (void);
417 static rtx
mips_expand_builtin_direct (enum insn_code
, rtx
, tree
, bool);
418 static rtx
mips_expand_builtin_movtf (enum mips_builtin_type
,
419 enum insn_code
, enum mips_fp_condition
,
421 static rtx
mips_expand_builtin_compare (enum mips_builtin_type
,
422 enum insn_code
, enum mips_fp_condition
,
424 static rtx
mips_expand_builtin_bposge (enum mips_builtin_type
, rtx
);
425 static void mips_encode_section_info (tree
, rtx
, int);
426 static void mips_extra_live_on_entry (bitmap
);
427 static int mips_comp_type_attributes (const_tree
, const_tree
);
428 static void mips_set_mips16_mode (int);
429 static void mips_insert_attributes (tree
, tree
*);
430 static tree
mips_merge_decl_attributes (tree
, tree
);
431 static void mips_set_current_function (tree
);
432 static int mips_mode_rep_extended (enum machine_mode
, enum machine_mode
);
433 static bool mips_offset_within_alignment_p (rtx
, HOST_WIDE_INT
);
434 static void mips_output_dwarf_dtprel (FILE *, int, rtx
) ATTRIBUTE_UNUSED
;
436 /* Structure to be filled in by compute_frame_size with register
437 save masks, and offsets for the current function. */
439 struct mips_frame_info
GTY(())
441 HOST_WIDE_INT total_size
; /* # bytes that the entire frame takes up */
442 HOST_WIDE_INT var_size
; /* # bytes that variables take up */
443 HOST_WIDE_INT args_size
; /* # bytes that outgoing arguments take up */
444 HOST_WIDE_INT cprestore_size
; /* # bytes that the .cprestore slot takes up */
445 HOST_WIDE_INT gp_reg_size
; /* # bytes needed to store gp regs */
446 HOST_WIDE_INT fp_reg_size
; /* # bytes needed to store fp regs */
447 unsigned int mask
; /* mask of saved gp registers */
448 unsigned int fmask
; /* mask of saved fp registers */
449 HOST_WIDE_INT gp_save_offset
; /* offset from vfp to store gp registers */
450 HOST_WIDE_INT fp_save_offset
; /* offset from vfp to store fp registers */
451 HOST_WIDE_INT gp_sp_offset
; /* offset from new sp to store gp registers */
452 HOST_WIDE_INT fp_sp_offset
; /* offset from new sp to store fp registers */
453 bool initialized
; /* true if frame size already calculated */
454 int num_gp
; /* number of gp registers saved */
455 int num_fp
; /* number of fp registers saved */
458 struct machine_function
GTY(()) {
459 /* Pseudo-reg holding the value of $28 in a mips16 function which
460 refers to GP relative global variables. */
461 rtx mips16_gp_pseudo_rtx
;
463 /* The number of extra stack bytes taken up by register varargs.
464 This area is allocated by the callee at the very top of the frame. */
467 /* Current frame information, calculated by compute_frame_size. */
468 struct mips_frame_info frame
;
470 /* The register to use as the global pointer within this function. */
471 unsigned int global_pointer
;
473 /* True if mips_adjust_insn_length should ignore an instruction's
475 bool ignore_hazard_length_p
;
477 /* True if the whole function is suitable for .set noreorder and
479 bool all_noreorder_p
;
481 /* True if the function is known to have an instruction that needs $gp. */
484 /* True if we have emitted an instruction to initialize
485 mips16_gp_pseudo_rtx. */
486 bool initialized_mips16_gp_pseudo_p
;
489 /* Information about a single argument. */
492 /* True if the argument is passed in a floating-point register, or
493 would have been if we hadn't run out of registers. */
496 /* The number of words passed in registers, rounded up. */
497 unsigned int reg_words
;
499 /* For EABI, the offset of the first register from GP_ARG_FIRST or
500 FP_ARG_FIRST. For other ABIs, the offset of the first register from
501 the start of the ABI's argument structure (see the CUMULATIVE_ARGS
502 comment for details).
504 The value is MAX_ARGS_IN_REGISTERS if the argument is passed entirely
506 unsigned int reg_offset
;
508 /* The number of words that must be passed on the stack, rounded up. */
509 unsigned int stack_words
;
511 /* The offset from the start of the stack overflow area of the argument's
512 first stack word. Only meaningful when STACK_WORDS is nonzero. */
513 unsigned int stack_offset
;
517 /* Information about an address described by mips_address_type.
523 REG is the base register and OFFSET is the constant offset.
526 REG is the register that contains the high part of the address,
527 OFFSET is the symbolic address being referenced and SYMBOL_TYPE
528 is the type of OFFSET's symbol.
531 SYMBOL_TYPE is the type of symbol being referenced. */
533 struct mips_address_info
535 enum mips_address_type type
;
538 enum mips_symbol_type symbol_type
;
542 /* One stage in a constant building sequence. These sequences have
546 A = A CODE[1] VALUE[1]
547 A = A CODE[2] VALUE[2]
550 where A is an accumulator, each CODE[i] is a binary rtl operation
551 and each VALUE[i] is a constant integer. */
552 struct mips_integer_op
{
554 unsigned HOST_WIDE_INT value
;
558 /* The largest number of operations needed to load an integer constant.
559 The worst accepted case for 64-bit constants is LUI,ORI,SLL,ORI,SLL,ORI.
560 When the lowest bit is clear, we can try, but reject a sequence with
561 an extra SLL at the end. */
562 #define MIPS_MAX_INTEGER_OPS 7
564 /* Information about a MIPS16e SAVE or RESTORE instruction. */
565 struct mips16e_save_restore_info
{
566 /* The number of argument registers saved by a SAVE instruction.
567 0 for RESTORE instructions. */
570 /* Bit X is set if the instruction saves or restores GPR X. */
573 /* The total number of bytes to allocate. */
577 /* Global variables for machine-dependent things. */
579 /* Threshold for data being put into the small data/bss area, instead
580 of the normal data area. */
581 int mips_section_threshold
= -1;
583 /* Count the number of .file directives, so that .loc is up to date. */
584 int num_source_filenames
= 0;
586 /* Count the number of sdb related labels are generated (to find block
587 start and end boundaries). */
588 int sdb_label_count
= 0;
590 /* Next label # for each statement for Silicon Graphics IRIS systems. */
593 /* Name of the file containing the current function. */
594 const char *current_function_file
= "";
596 /* Number of nested .set noreorder, noat, nomacro, and volatile requests. */
602 /* The next branch instruction is a branch likely, not branch normal. */
603 int mips_branch_likely
;
605 /* The operands passed to the last cmpMM expander. */
608 /* The target cpu for code generation. */
609 enum processor_type mips_arch
;
610 const struct mips_cpu_info
*mips_arch_info
;
612 /* The target cpu for optimization and scheduling. */
613 enum processor_type mips_tune
;
614 const struct mips_cpu_info
*mips_tune_info
;
616 /* Which instruction set architecture to use. */
619 /* Which ABI to use. */
620 int mips_abi
= MIPS_ABI_DEFAULT
;
622 /* Cost information to use. */
623 const struct mips_rtx_cost_data
*mips_cost
;
625 /* Remember the ambient target flags, excluding mips16. */
626 static int mips_base_target_flags
;
627 /* The mips16 command-line target flags only. */
628 static bool mips_base_mips16
;
629 /* Similar copies of option settings. */
630 static int mips_base_schedule_insns
; /* flag_schedule_insns */
631 static int mips_base_reorder_blocks_and_partition
; /* flag_reorder... */
632 static int mips_base_move_loop_invariants
; /* flag_move_loop_invariants */
633 static int mips_base_align_loops
; /* align_loops */
634 static int mips_base_align_jumps
; /* align_jumps */
635 static int mips_base_align_functions
; /* align_functions */
636 static GTY(()) int mips16_flipper
;
638 /* The -mtext-loads setting. */
639 enum mips_code_readable_setting mips_code_readable
= CODE_READABLE_YES
;
641 /* The -mllsc setting. */
642 enum mips_llsc_setting mips_llsc
= LLSC_DEFAULT
;
644 /* The architecture selected by -mipsN. */
645 static const struct mips_cpu_info
*mips_isa_info
;
647 /* If TRUE, we split addresses into their high and low parts in the RTL. */
648 int mips_split_addresses
;
650 /* Mode used for saving/restoring general purpose registers. */
651 static enum machine_mode gpr_mode
;
653 /* Array giving truth value on whether or not a given hard register
654 can support a given mode. */
655 char mips_hard_regno_mode_ok
[(int)MAX_MACHINE_MODE
][FIRST_PSEUDO_REGISTER
];
657 /* List of all MIPS punctuation characters used by print_operand. */
658 char mips_print_operand_punct
[256];
660 /* Map GCC register number to debugger register number. */
661 int mips_dbx_regno
[FIRST_PSEUDO_REGISTER
];
662 int mips_dwarf_regno
[FIRST_PSEUDO_REGISTER
];
664 /* A copy of the original flag_delayed_branch: see override_options. */
665 static int mips_flag_delayed_branch
;
667 static GTY (()) int mips_output_filename_first_time
= 1;
669 /* mips_split_p[X] is true if symbols of type X can be split by
670 mips_split_symbol(). */
671 bool mips_split_p
[NUM_SYMBOL_TYPES
];
673 /* mips_lo_relocs[X] is the relocation to use when a symbol of type X
674 appears in a LO_SUM. It can be null if such LO_SUMs aren't valid or
675 if they are matched by a special .md file pattern. */
676 static const char *mips_lo_relocs
[NUM_SYMBOL_TYPES
];
678 /* Likewise for HIGHs. */
679 static const char *mips_hi_relocs
[NUM_SYMBOL_TYPES
];
681 /* Map hard register number to register class */
682 const enum reg_class mips_regno_to_class
[] =
684 LEA_REGS
, LEA_REGS
, M16_NA_REGS
, V1_REG
,
685 M16_REGS
, M16_REGS
, M16_REGS
, M16_REGS
,
686 LEA_REGS
, LEA_REGS
, LEA_REGS
, LEA_REGS
,
687 LEA_REGS
, LEA_REGS
, LEA_REGS
, LEA_REGS
,
688 M16_NA_REGS
, M16_NA_REGS
, LEA_REGS
, LEA_REGS
,
689 LEA_REGS
, LEA_REGS
, LEA_REGS
, LEA_REGS
,
690 T_REG
, PIC_FN_ADDR_REG
, LEA_REGS
, LEA_REGS
,
691 LEA_REGS
, LEA_REGS
, LEA_REGS
, LEA_REGS
,
692 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
693 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
694 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
695 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
696 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
697 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
698 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
699 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
700 MD0_REG
, MD1_REG
, NO_REGS
, ST_REGS
,
701 ST_REGS
, ST_REGS
, ST_REGS
, ST_REGS
,
702 ST_REGS
, ST_REGS
, ST_REGS
, NO_REGS
,
703 NO_REGS
, ALL_REGS
, ALL_REGS
, NO_REGS
,
704 COP0_REGS
, COP0_REGS
, COP0_REGS
, COP0_REGS
,
705 COP0_REGS
, COP0_REGS
, COP0_REGS
, COP0_REGS
,
706 COP0_REGS
, COP0_REGS
, COP0_REGS
, COP0_REGS
,
707 COP0_REGS
, COP0_REGS
, COP0_REGS
, COP0_REGS
,
708 COP0_REGS
, COP0_REGS
, COP0_REGS
, COP0_REGS
,
709 COP0_REGS
, COP0_REGS
, COP0_REGS
, COP0_REGS
,
710 COP0_REGS
, COP0_REGS
, COP0_REGS
, COP0_REGS
,
711 COP0_REGS
, COP0_REGS
, COP0_REGS
, COP0_REGS
,
712 COP2_REGS
, COP2_REGS
, COP2_REGS
, COP2_REGS
,
713 COP2_REGS
, COP2_REGS
, COP2_REGS
, COP2_REGS
,
714 COP2_REGS
, COP2_REGS
, COP2_REGS
, COP2_REGS
,
715 COP2_REGS
, COP2_REGS
, COP2_REGS
, COP2_REGS
,
716 COP2_REGS
, COP2_REGS
, COP2_REGS
, COP2_REGS
,
717 COP2_REGS
, COP2_REGS
, COP2_REGS
, COP2_REGS
,
718 COP2_REGS
, COP2_REGS
, COP2_REGS
, COP2_REGS
,
719 COP2_REGS
, COP2_REGS
, COP2_REGS
, COP2_REGS
,
720 COP3_REGS
, COP3_REGS
, COP3_REGS
, COP3_REGS
,
721 COP3_REGS
, COP3_REGS
, COP3_REGS
, COP3_REGS
,
722 COP3_REGS
, COP3_REGS
, COP3_REGS
, COP3_REGS
,
723 COP3_REGS
, COP3_REGS
, COP3_REGS
, COP3_REGS
,
724 COP3_REGS
, COP3_REGS
, COP3_REGS
, COP3_REGS
,
725 COP3_REGS
, COP3_REGS
, COP3_REGS
, COP3_REGS
,
726 COP3_REGS
, COP3_REGS
, COP3_REGS
, COP3_REGS
,
727 COP3_REGS
, COP3_REGS
, COP3_REGS
, COP3_REGS
,
728 DSP_ACC_REGS
, DSP_ACC_REGS
, DSP_ACC_REGS
, DSP_ACC_REGS
,
729 DSP_ACC_REGS
, DSP_ACC_REGS
, ALL_REGS
, ALL_REGS
,
730 ALL_REGS
, ALL_REGS
, ALL_REGS
, ALL_REGS
733 /* Table of machine dependent attributes. */
734 const struct attribute_spec mips_attribute_table
[] =
736 { "long_call", 0, 0, false, true, true, NULL
},
737 { "far", 0, 0, false, true, true, NULL
},
738 { "near", 0, 0, false, true, true, NULL
},
739 /* Switch MIPS16 ASE on and off per-function. We would really like
740 to make these type attributes, but GCC doesn't provide the hooks
741 we need to support the right conversion rules. As declaration
742 attributes, they affect code generation but don't carry other
744 { "mips16", 0, 0, true, false, false, NULL
},
745 { "nomips16", 0, 0, true, false, false, NULL
},
746 { NULL
, 0, 0, false, false, false, NULL
}
749 /* A table describing all the processors gcc knows about. Names are
750 matched in the order listed. The first mention of an ISA level is
751 taken as the canonical name for that ISA.
753 To ease comparison, please keep this table in the same order
754 as gas's mips_cpu_info_table[]. Please also make sure that
755 MIPS_ISA_LEVEL_SPEC and MIPS_ARCH_FLOAT_SPEC handle all -march
756 options correctly. */
757 const struct mips_cpu_info mips_cpu_info_table
[] = {
758 /* Entries for generic ISAs */
759 { "mips1", PROCESSOR_R3000
, 1 },
760 { "mips2", PROCESSOR_R6000
, 2 },
761 { "mips3", PROCESSOR_R4000
, 3 },
762 { "mips4", PROCESSOR_R8000
, 4 },
763 { "mips32", PROCESSOR_4KC
, 32 },
764 { "mips32r2", PROCESSOR_M4K
, 33 },
765 { "mips64", PROCESSOR_5KC
, 64 },
768 { "r3000", PROCESSOR_R3000
, 1 },
769 { "r2000", PROCESSOR_R3000
, 1 }, /* = r3000 */
770 { "r3900", PROCESSOR_R3900
, 1 },
773 { "r6000", PROCESSOR_R6000
, 2 },
776 { "r4000", PROCESSOR_R4000
, 3 },
777 { "vr4100", PROCESSOR_R4100
, 3 },
778 { "vr4111", PROCESSOR_R4111
, 3 },
779 { "vr4120", PROCESSOR_R4120
, 3 },
780 { "vr4130", PROCESSOR_R4130
, 3 },
781 { "vr4300", PROCESSOR_R4300
, 3 },
782 { "r4400", PROCESSOR_R4000
, 3 }, /* = r4000 */
783 { "r4600", PROCESSOR_R4600
, 3 },
784 { "orion", PROCESSOR_R4600
, 3 }, /* = r4600 */
785 { "r4650", PROCESSOR_R4650
, 3 },
788 { "r8000", PROCESSOR_R8000
, 4 },
789 { "vr5000", PROCESSOR_R5000
, 4 },
790 { "vr5400", PROCESSOR_R5400
, 4 },
791 { "vr5500", PROCESSOR_R5500
, 4 },
792 { "rm7000", PROCESSOR_R7000
, 4 },
793 { "rm9000", PROCESSOR_R9000
, 4 },
796 { "4kc", PROCESSOR_4KC
, 32 },
797 { "4km", PROCESSOR_4KC
, 32 }, /* = 4kc */
798 { "4kp", PROCESSOR_4KP
, 32 },
799 { "4ksc", PROCESSOR_4KC
, 32 },
801 /* MIPS32 Release 2 */
802 { "m4k", PROCESSOR_M4K
, 33 },
803 { "4kec", PROCESSOR_4KC
, 33 },
804 { "4kem", PROCESSOR_4KC
, 33 },
805 { "4kep", PROCESSOR_4KP
, 33 },
806 { "4ksd", PROCESSOR_4KC
, 33 },
808 { "24kc", PROCESSOR_24KC
, 33 },
809 { "24kf2_1", PROCESSOR_24KF2_1
, 33 },
810 { "24kf", PROCESSOR_24KF2_1
, 33 },
811 { "24kf1_1", PROCESSOR_24KF1_1
, 33 },
812 { "24kfx", PROCESSOR_24KF1_1
, 33 },
813 { "24kx", PROCESSOR_24KF1_1
, 33 },
815 { "24kec", PROCESSOR_24KC
, 33 }, /* 24K with DSP */
816 { "24kef2_1", PROCESSOR_24KF2_1
, 33 },
817 { "24kef", PROCESSOR_24KF2_1
, 33 },
818 { "24kef1_1", PROCESSOR_24KF1_1
, 33 },
819 { "24kefx", PROCESSOR_24KF1_1
, 33 },
820 { "24kex", PROCESSOR_24KF1_1
, 33 },
822 { "34kc", PROCESSOR_24KC
, 33 }, /* 34K with MT/DSP */
823 { "34kf2_1", PROCESSOR_24KF2_1
, 33 },
824 { "34kf", PROCESSOR_24KF2_1
, 33 },
825 { "34kf1_1", PROCESSOR_24KF1_1
, 33 },
826 { "34kfx", PROCESSOR_24KF1_1
, 33 },
827 { "34kx", PROCESSOR_24KF1_1
, 33 },
829 { "74kc", PROCESSOR_74KC
, 33 }, /* 74K with DSPr2 */
830 { "74kf2_1", PROCESSOR_74KF2_1
, 33 },
831 { "74kf", PROCESSOR_74KF2_1
, 33 },
832 { "74kf1_1", PROCESSOR_74KF1_1
, 33 },
833 { "74kfx", PROCESSOR_74KF1_1
, 33 },
834 { "74kx", PROCESSOR_74KF1_1
, 33 },
835 { "74kf3_2", PROCESSOR_74KF3_2
, 33 },
838 { "5kc", PROCESSOR_5KC
, 64 },
839 { "5kf", PROCESSOR_5KF
, 64 },
840 { "20kc", PROCESSOR_20KC
, 64 },
841 { "sb1", PROCESSOR_SB1
, 64 },
842 { "sb1a", PROCESSOR_SB1A
, 64 },
843 { "sr71000", PROCESSOR_SR71000
, 64 },
849 /* Default costs. If these are used for a processor we should look
850 up the actual costs. */
851 #define DEFAULT_COSTS COSTS_N_INSNS (6), /* fp_add */ \
852 COSTS_N_INSNS (7), /* fp_mult_sf */ \
853 COSTS_N_INSNS (8), /* fp_mult_df */ \
854 COSTS_N_INSNS (23), /* fp_div_sf */ \
855 COSTS_N_INSNS (36), /* fp_div_df */ \
856 COSTS_N_INSNS (10), /* int_mult_si */ \
857 COSTS_N_INSNS (10), /* int_mult_di */ \
858 COSTS_N_INSNS (69), /* int_div_si */ \
859 COSTS_N_INSNS (69), /* int_div_di */ \
860 2, /* branch_cost */ \
861 4 /* memory_latency */
863 /* Need to replace these with the costs of calling the appropriate
865 #define SOFT_FP_COSTS COSTS_N_INSNS (256), /* fp_add */ \
866 COSTS_N_INSNS (256), /* fp_mult_sf */ \
867 COSTS_N_INSNS (256), /* fp_mult_df */ \
868 COSTS_N_INSNS (256), /* fp_div_sf */ \
869 COSTS_N_INSNS (256) /* fp_div_df */
871 static struct mips_rtx_cost_data
const mips_rtx_cost_optimize_size
=
873 COSTS_N_INSNS (1), /* fp_add */
874 COSTS_N_INSNS (1), /* fp_mult_sf */
875 COSTS_N_INSNS (1), /* fp_mult_df */
876 COSTS_N_INSNS (1), /* fp_div_sf */
877 COSTS_N_INSNS (1), /* fp_div_df */
878 COSTS_N_INSNS (1), /* int_mult_si */
879 COSTS_N_INSNS (1), /* int_mult_di */
880 COSTS_N_INSNS (1), /* int_div_si */
881 COSTS_N_INSNS (1), /* int_div_di */
883 4 /* memory_latency */
886 static struct mips_rtx_cost_data
const mips_rtx_cost_data
[PROCESSOR_MAX
] =
889 COSTS_N_INSNS (2), /* fp_add */
890 COSTS_N_INSNS (4), /* fp_mult_sf */
891 COSTS_N_INSNS (5), /* fp_mult_df */
892 COSTS_N_INSNS (12), /* fp_div_sf */
893 COSTS_N_INSNS (19), /* fp_div_df */
894 COSTS_N_INSNS (12), /* int_mult_si */
895 COSTS_N_INSNS (12), /* int_mult_di */
896 COSTS_N_INSNS (35), /* int_div_si */
897 COSTS_N_INSNS (35), /* int_div_di */
899 4 /* memory_latency */
904 COSTS_N_INSNS (6), /* int_mult_si */
905 COSTS_N_INSNS (6), /* int_mult_di */
906 COSTS_N_INSNS (36), /* int_div_si */
907 COSTS_N_INSNS (36), /* int_div_di */
909 4 /* memory_latency */
913 COSTS_N_INSNS (36), /* int_mult_si */
914 COSTS_N_INSNS (36), /* int_mult_di */
915 COSTS_N_INSNS (37), /* int_div_si */
916 COSTS_N_INSNS (37), /* int_div_di */
918 4 /* memory_latency */
922 COSTS_N_INSNS (4), /* int_mult_si */
923 COSTS_N_INSNS (11), /* int_mult_di */
924 COSTS_N_INSNS (36), /* int_div_si */
925 COSTS_N_INSNS (68), /* int_div_di */
927 4 /* memory_latency */
930 COSTS_N_INSNS (4), /* fp_add */
931 COSTS_N_INSNS (4), /* fp_mult_sf */
932 COSTS_N_INSNS (5), /* fp_mult_df */
933 COSTS_N_INSNS (17), /* fp_div_sf */
934 COSTS_N_INSNS (32), /* fp_div_df */
935 COSTS_N_INSNS (4), /* int_mult_si */
936 COSTS_N_INSNS (11), /* int_mult_di */
937 COSTS_N_INSNS (36), /* int_div_si */
938 COSTS_N_INSNS (68), /* int_div_di */
940 4 /* memory_latency */
943 COSTS_N_INSNS (4), /* fp_add */
944 COSTS_N_INSNS (4), /* fp_mult_sf */
945 COSTS_N_INSNS (5), /* fp_mult_df */
946 COSTS_N_INSNS (17), /* fp_div_sf */
947 COSTS_N_INSNS (32), /* fp_div_df */
948 COSTS_N_INSNS (4), /* int_mult_si */
949 COSTS_N_INSNS (7), /* int_mult_di */
950 COSTS_N_INSNS (42), /* int_div_si */
951 COSTS_N_INSNS (72), /* int_div_di */
953 4 /* memory_latency */
957 COSTS_N_INSNS (5), /* int_mult_si */
958 COSTS_N_INSNS (5), /* int_mult_di */
959 COSTS_N_INSNS (41), /* int_div_si */
960 COSTS_N_INSNS (41), /* int_div_di */
962 4 /* memory_latency */
965 COSTS_N_INSNS (8), /* fp_add */
966 COSTS_N_INSNS (8), /* fp_mult_sf */
967 COSTS_N_INSNS (10), /* fp_mult_df */
968 COSTS_N_INSNS (34), /* fp_div_sf */
969 COSTS_N_INSNS (64), /* fp_div_df */
970 COSTS_N_INSNS (5), /* int_mult_si */
971 COSTS_N_INSNS (5), /* int_mult_di */
972 COSTS_N_INSNS (41), /* int_div_si */
973 COSTS_N_INSNS (41), /* int_div_di */
975 4 /* memory_latency */
978 COSTS_N_INSNS (4), /* fp_add */
979 COSTS_N_INSNS (4), /* fp_mult_sf */
980 COSTS_N_INSNS (5), /* fp_mult_df */
981 COSTS_N_INSNS (17), /* fp_div_sf */
982 COSTS_N_INSNS (32), /* fp_div_df */
983 COSTS_N_INSNS (5), /* int_mult_si */
984 COSTS_N_INSNS (5), /* int_mult_di */
985 COSTS_N_INSNS (41), /* int_div_si */
986 COSTS_N_INSNS (41), /* int_div_di */
988 4 /* memory_latency */
992 COSTS_N_INSNS (5), /* int_mult_si */
993 COSTS_N_INSNS (5), /* int_mult_di */
994 COSTS_N_INSNS (41), /* int_div_si */
995 COSTS_N_INSNS (41), /* int_div_di */
997 4 /* memory_latency */
1000 COSTS_N_INSNS (8), /* fp_add */
1001 COSTS_N_INSNS (8), /* fp_mult_sf */
1002 COSTS_N_INSNS (10), /* fp_mult_df */
1003 COSTS_N_INSNS (34), /* fp_div_sf */
1004 COSTS_N_INSNS (64), /* fp_div_df */
1005 COSTS_N_INSNS (5), /* int_mult_si */
1006 COSTS_N_INSNS (5), /* int_mult_di */
1007 COSTS_N_INSNS (41), /* int_div_si */
1008 COSTS_N_INSNS (41), /* int_div_di */
1009 1, /* branch_cost */
1010 4 /* memory_latency */
1013 COSTS_N_INSNS (4), /* fp_add */
1014 COSTS_N_INSNS (4), /* fp_mult_sf */
1015 COSTS_N_INSNS (5), /* fp_mult_df */
1016 COSTS_N_INSNS (17), /* fp_div_sf */
1017 COSTS_N_INSNS (32), /* fp_div_df */
1018 COSTS_N_INSNS (5), /* int_mult_si */
1019 COSTS_N_INSNS (5), /* int_mult_di */
1020 COSTS_N_INSNS (41), /* int_div_si */
1021 COSTS_N_INSNS (41), /* int_div_di */
1022 1, /* branch_cost */
1023 4 /* memory_latency */
1026 COSTS_N_INSNS (6), /* fp_add */
1027 COSTS_N_INSNS (6), /* fp_mult_sf */
1028 COSTS_N_INSNS (7), /* fp_mult_df */
1029 COSTS_N_INSNS (25), /* fp_div_sf */
1030 COSTS_N_INSNS (48), /* fp_div_df */
1031 COSTS_N_INSNS (5), /* int_mult_si */
1032 COSTS_N_INSNS (5), /* int_mult_di */
1033 COSTS_N_INSNS (41), /* int_div_si */
1034 COSTS_N_INSNS (41), /* int_div_di */
1035 1, /* branch_cost */
1036 4 /* memory_latency */
1042 COSTS_N_INSNS (2), /* fp_add */
1043 COSTS_N_INSNS (4), /* fp_mult_sf */
1044 COSTS_N_INSNS (5), /* fp_mult_df */
1045 COSTS_N_INSNS (12), /* fp_div_sf */
1046 COSTS_N_INSNS (19), /* fp_div_df */
1047 COSTS_N_INSNS (2), /* int_mult_si */
1048 COSTS_N_INSNS (2), /* int_mult_di */
1049 COSTS_N_INSNS (35), /* int_div_si */
1050 COSTS_N_INSNS (35), /* int_div_di */
1051 1, /* branch_cost */
1052 4 /* memory_latency */
1055 COSTS_N_INSNS (3), /* fp_add */
1056 COSTS_N_INSNS (5), /* fp_mult_sf */
1057 COSTS_N_INSNS (6), /* fp_mult_df */
1058 COSTS_N_INSNS (15), /* fp_div_sf */
1059 COSTS_N_INSNS (16), /* fp_div_df */
1060 COSTS_N_INSNS (17), /* int_mult_si */
1061 COSTS_N_INSNS (17), /* int_mult_di */
1062 COSTS_N_INSNS (38), /* int_div_si */
1063 COSTS_N_INSNS (38), /* int_div_di */
1064 2, /* branch_cost */
1065 6 /* memory_latency */
1068 COSTS_N_INSNS (6), /* fp_add */
1069 COSTS_N_INSNS (7), /* fp_mult_sf */
1070 COSTS_N_INSNS (8), /* fp_mult_df */
1071 COSTS_N_INSNS (23), /* fp_div_sf */
1072 COSTS_N_INSNS (36), /* fp_div_df */
1073 COSTS_N_INSNS (10), /* int_mult_si */
1074 COSTS_N_INSNS (10), /* int_mult_di */
1075 COSTS_N_INSNS (69), /* int_div_si */
1076 COSTS_N_INSNS (69), /* int_div_di */
1077 2, /* branch_cost */
1078 6 /* memory_latency */
1090 /* The only costs that appear to be updated here are
1091 integer multiplication. */
1093 COSTS_N_INSNS (4), /* int_mult_si */
1094 COSTS_N_INSNS (6), /* int_mult_di */
1095 COSTS_N_INSNS (69), /* int_div_si */
1096 COSTS_N_INSNS (69), /* int_div_di */
1097 1, /* branch_cost */
1098 4 /* memory_latency */
1110 COSTS_N_INSNS (6), /* fp_add */
1111 COSTS_N_INSNS (4), /* fp_mult_sf */
1112 COSTS_N_INSNS (5), /* fp_mult_df */
1113 COSTS_N_INSNS (23), /* fp_div_sf */
1114 COSTS_N_INSNS (36), /* fp_div_df */
1115 COSTS_N_INSNS (5), /* int_mult_si */
1116 COSTS_N_INSNS (5), /* int_mult_di */
1117 COSTS_N_INSNS (36), /* int_div_si */
1118 COSTS_N_INSNS (36), /* int_div_di */
1119 1, /* branch_cost */
1120 4 /* memory_latency */
1123 COSTS_N_INSNS (6), /* fp_add */
1124 COSTS_N_INSNS (5), /* fp_mult_sf */
1125 COSTS_N_INSNS (6), /* fp_mult_df */
1126 COSTS_N_INSNS (30), /* fp_div_sf */
1127 COSTS_N_INSNS (59), /* fp_div_df */
1128 COSTS_N_INSNS (3), /* int_mult_si */
1129 COSTS_N_INSNS (4), /* int_mult_di */
1130 COSTS_N_INSNS (42), /* int_div_si */
1131 COSTS_N_INSNS (74), /* int_div_di */
1132 1, /* branch_cost */
1133 4 /* memory_latency */
1136 COSTS_N_INSNS (6), /* fp_add */
1137 COSTS_N_INSNS (5), /* fp_mult_sf */
1138 COSTS_N_INSNS (6), /* fp_mult_df */
1139 COSTS_N_INSNS (30), /* fp_div_sf */
1140 COSTS_N_INSNS (59), /* fp_div_df */
1141 COSTS_N_INSNS (5), /* int_mult_si */
1142 COSTS_N_INSNS (9), /* int_mult_di */
1143 COSTS_N_INSNS (42), /* int_div_si */
1144 COSTS_N_INSNS (74), /* int_div_di */
1145 1, /* branch_cost */
1146 4 /* memory_latency */
1149 /* The only costs that are changed here are
1150 integer multiplication. */
1151 COSTS_N_INSNS (6), /* fp_add */
1152 COSTS_N_INSNS (7), /* fp_mult_sf */
1153 COSTS_N_INSNS (8), /* fp_mult_df */
1154 COSTS_N_INSNS (23), /* fp_div_sf */
1155 COSTS_N_INSNS (36), /* fp_div_df */
1156 COSTS_N_INSNS (5), /* int_mult_si */
1157 COSTS_N_INSNS (9), /* int_mult_di */
1158 COSTS_N_INSNS (69), /* int_div_si */
1159 COSTS_N_INSNS (69), /* int_div_di */
1160 1, /* branch_cost */
1161 4 /* memory_latency */
1167 /* The only costs that are changed here are
1168 integer multiplication. */
1169 COSTS_N_INSNS (6), /* fp_add */
1170 COSTS_N_INSNS (7), /* fp_mult_sf */
1171 COSTS_N_INSNS (8), /* fp_mult_df */
1172 COSTS_N_INSNS (23), /* fp_div_sf */
1173 COSTS_N_INSNS (36), /* fp_div_df */
1174 COSTS_N_INSNS (3), /* int_mult_si */
1175 COSTS_N_INSNS (8), /* int_mult_di */
1176 COSTS_N_INSNS (69), /* int_div_si */
1177 COSTS_N_INSNS (69), /* int_div_di */
1178 1, /* branch_cost */
1179 4 /* memory_latency */
1182 /* These costs are the same as the SB-1A below. */
1183 COSTS_N_INSNS (4), /* fp_add */
1184 COSTS_N_INSNS (4), /* fp_mult_sf */
1185 COSTS_N_INSNS (4), /* fp_mult_df */
1186 COSTS_N_INSNS (24), /* fp_div_sf */
1187 COSTS_N_INSNS (32), /* fp_div_df */
1188 COSTS_N_INSNS (3), /* int_mult_si */
1189 COSTS_N_INSNS (4), /* int_mult_di */
1190 COSTS_N_INSNS (36), /* int_div_si */
1191 COSTS_N_INSNS (68), /* int_div_di */
1192 1, /* branch_cost */
1193 4 /* memory_latency */
1196 /* These costs are the same as the SB-1 above. */
1197 COSTS_N_INSNS (4), /* fp_add */
1198 COSTS_N_INSNS (4), /* fp_mult_sf */
1199 COSTS_N_INSNS (4), /* fp_mult_df */
1200 COSTS_N_INSNS (24), /* fp_div_sf */
1201 COSTS_N_INSNS (32), /* fp_div_df */
1202 COSTS_N_INSNS (3), /* int_mult_si */
1203 COSTS_N_INSNS (4), /* int_mult_di */
1204 COSTS_N_INSNS (36), /* int_div_si */
1205 COSTS_N_INSNS (68), /* int_div_di */
1206 1, /* branch_cost */
1207 4 /* memory_latency */
1214 /* If a MIPS16e SAVE or RESTORE instruction saves or restores register
1215 mips16e_s2_s8_regs[X], it must also save the registers in indexes
1216 X + 1 onwards. Likewise mips16e_a0_a3_regs. */
1217 static const unsigned char mips16e_s2_s8_regs
[] = {
1218 30, 23, 22, 21, 20, 19, 18
1220 static const unsigned char mips16e_a0_a3_regs
[] = {
1224 /* A list of the registers that can be saved by the MIPS16e SAVE instruction,
1225 ordered from the uppermost in memory to the lowest in memory. */
1226 static const unsigned char mips16e_save_restore_regs
[] = {
1227 31, 30, 23, 22, 21, 20, 19, 18, 17, 16, 7, 6, 5, 4
1230 /* Initialize the GCC target structure. */
1231 #undef TARGET_ASM_ALIGNED_HI_OP
1232 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
1233 #undef TARGET_ASM_ALIGNED_SI_OP
1234 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
1235 #undef TARGET_ASM_ALIGNED_DI_OP
1236 #define TARGET_ASM_ALIGNED_DI_OP "\t.dword\t"
1238 #undef TARGET_ASM_FUNCTION_PROLOGUE
1239 #define TARGET_ASM_FUNCTION_PROLOGUE mips_output_function_prologue
1240 #undef TARGET_ASM_FUNCTION_EPILOGUE
1241 #define TARGET_ASM_FUNCTION_EPILOGUE mips_output_function_epilogue
1242 #undef TARGET_ASM_SELECT_RTX_SECTION
1243 #define TARGET_ASM_SELECT_RTX_SECTION mips_select_rtx_section
1244 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
1245 #define TARGET_ASM_FUNCTION_RODATA_SECTION mips_function_rodata_section
1247 #undef TARGET_SCHED_INIT
1248 #define TARGET_SCHED_INIT mips_sched_init
1249 #undef TARGET_SCHED_REORDER
1250 #define TARGET_SCHED_REORDER mips_sched_reorder
1251 #undef TARGET_SCHED_REORDER2
1252 #define TARGET_SCHED_REORDER2 mips_sched_reorder
1253 #undef TARGET_SCHED_VARIABLE_ISSUE
1254 #define TARGET_SCHED_VARIABLE_ISSUE mips_variable_issue
1255 #undef TARGET_SCHED_ADJUST_COST
1256 #define TARGET_SCHED_ADJUST_COST mips_adjust_cost
1257 #undef TARGET_SCHED_ISSUE_RATE
1258 #define TARGET_SCHED_ISSUE_RATE mips_issue_rate
1259 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1260 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
1261 mips_multipass_dfa_lookahead
1263 #undef TARGET_DEFAULT_TARGET_FLAGS
1264 #define TARGET_DEFAULT_TARGET_FLAGS \
1266 | TARGET_CPU_DEFAULT \
1267 | TARGET_ENDIAN_DEFAULT \
1268 | TARGET_FP_EXCEPTIONS_DEFAULT \
1269 | MASK_CHECK_ZERO_DIV \
1271 #undef TARGET_HANDLE_OPTION
1272 #define TARGET_HANDLE_OPTION mips_handle_option
1274 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
1275 #define TARGET_FUNCTION_OK_FOR_SIBCALL mips_function_ok_for_sibcall
1277 #undef TARGET_INSERT_ATTRIBUTES
1278 #define TARGET_INSERT_ATTRIBUTES mips_insert_attributes
1279 #undef TARGET_MERGE_DECL_ATTRIBUTES
1280 #define TARGET_MERGE_DECL_ATTRIBUTES mips_merge_decl_attributes
1281 #undef TARGET_SET_CURRENT_FUNCTION
1282 #define TARGET_SET_CURRENT_FUNCTION mips_set_current_function
1284 #undef TARGET_VALID_POINTER_MODE
1285 #define TARGET_VALID_POINTER_MODE mips_valid_pointer_mode
1286 #undef TARGET_RTX_COSTS
1287 #define TARGET_RTX_COSTS mips_rtx_costs
1288 #undef TARGET_ADDRESS_COST
1289 #define TARGET_ADDRESS_COST mips_address_cost
1291 #undef TARGET_IN_SMALL_DATA_P
1292 #define TARGET_IN_SMALL_DATA_P mips_in_small_data_p
1294 #undef TARGET_MACHINE_DEPENDENT_REORG
1295 #define TARGET_MACHINE_DEPENDENT_REORG mips_reorg
1297 #undef TARGET_ASM_FILE_START
1298 #define TARGET_ASM_FILE_START mips_file_start
1299 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
1300 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
1302 #undef TARGET_INIT_LIBFUNCS
1303 #define TARGET_INIT_LIBFUNCS mips_init_libfuncs
1305 #undef TARGET_BUILD_BUILTIN_VA_LIST
1306 #define TARGET_BUILD_BUILTIN_VA_LIST mips_build_builtin_va_list
1307 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
1308 #define TARGET_GIMPLIFY_VA_ARG_EXPR mips_gimplify_va_arg_expr
1310 #undef TARGET_PROMOTE_FUNCTION_ARGS
1311 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
1312 #undef TARGET_PROMOTE_FUNCTION_RETURN
1313 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
1314 #undef TARGET_PROMOTE_PROTOTYPES
1315 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
1317 #undef TARGET_RETURN_IN_MEMORY
1318 #define TARGET_RETURN_IN_MEMORY mips_return_in_memory
1319 #undef TARGET_RETURN_IN_MSB
1320 #define TARGET_RETURN_IN_MSB mips_return_in_msb
1322 #undef TARGET_ASM_OUTPUT_MI_THUNK
1323 #define TARGET_ASM_OUTPUT_MI_THUNK mips_output_mi_thunk
1324 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
1325 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
1327 #undef TARGET_SETUP_INCOMING_VARARGS
1328 #define TARGET_SETUP_INCOMING_VARARGS mips_setup_incoming_varargs
1329 #undef TARGET_STRICT_ARGUMENT_NAMING
1330 #define TARGET_STRICT_ARGUMENT_NAMING mips_strict_argument_naming
1331 #undef TARGET_MUST_PASS_IN_STACK
1332 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
1333 #undef TARGET_PASS_BY_REFERENCE
1334 #define TARGET_PASS_BY_REFERENCE mips_pass_by_reference
1335 #undef TARGET_CALLEE_COPIES
1336 #define TARGET_CALLEE_COPIES mips_callee_copies
1337 #undef TARGET_ARG_PARTIAL_BYTES
1338 #define TARGET_ARG_PARTIAL_BYTES mips_arg_partial_bytes
1340 #undef TARGET_MODE_REP_EXTENDED
1341 #define TARGET_MODE_REP_EXTENDED mips_mode_rep_extended
1343 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1344 #define TARGET_VECTOR_MODE_SUPPORTED_P mips_vector_mode_supported_p
1346 #undef TARGET_SCALAR_MODE_SUPPORTED_P
1347 #define TARGET_SCALAR_MODE_SUPPORTED_P mips_scalar_mode_supported_p
1349 #undef TARGET_INIT_BUILTINS
1350 #define TARGET_INIT_BUILTINS mips_init_builtins
1351 #undef TARGET_EXPAND_BUILTIN
1352 #define TARGET_EXPAND_BUILTIN mips_expand_builtin
1354 #undef TARGET_HAVE_TLS
1355 #define TARGET_HAVE_TLS HAVE_AS_TLS
1357 #undef TARGET_CANNOT_FORCE_CONST_MEM
1358 #define TARGET_CANNOT_FORCE_CONST_MEM mips_cannot_force_const_mem
1360 #undef TARGET_ENCODE_SECTION_INFO
1361 #define TARGET_ENCODE_SECTION_INFO mips_encode_section_info
1363 #undef TARGET_ATTRIBUTE_TABLE
1364 #define TARGET_ATTRIBUTE_TABLE mips_attribute_table
1365 /* All our function attributes are related to how out-of-line copies should
1366 be compiled or called. They don't in themselves prevent inlining. */
1367 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
1368 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P hook_bool_const_tree_true
1370 #undef TARGET_EXTRA_LIVE_ON_ENTRY
1371 #define TARGET_EXTRA_LIVE_ON_ENTRY mips_extra_live_on_entry
1373 #undef TARGET_MIN_ANCHOR_OFFSET
1374 #define TARGET_MIN_ANCHOR_OFFSET -32768
1375 #undef TARGET_MAX_ANCHOR_OFFSET
1376 #define TARGET_MAX_ANCHOR_OFFSET 32767
1377 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1378 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P mips_use_blocks_for_constant_p
1379 #undef TARGET_USE_ANCHORS_FOR_SYMBOL_P
1380 #define TARGET_USE_ANCHORS_FOR_SYMBOL_P mips_use_anchors_for_symbol_p
1382 #undef TARGET_COMP_TYPE_ATTRIBUTES
1383 #define TARGET_COMP_TYPE_ATTRIBUTES mips_comp_type_attributes
1385 #ifdef HAVE_AS_DTPRELWORD
1386 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1387 #define TARGET_ASM_OUTPUT_DWARF_DTPREL mips_output_dwarf_dtprel
1390 struct gcc_target targetm
= TARGET_INITIALIZER
;
1393 /* Predicates to test for presence of "near" and "far"/"long_call"
1394 attributes on the given TYPE. */
1397 mips_near_type_p (const_tree type
)
1399 return lookup_attribute ("near", TYPE_ATTRIBUTES (type
)) != NULL
;
1403 mips_far_type_p (const_tree type
)
1405 return (lookup_attribute ("long_call", TYPE_ATTRIBUTES (type
)) != NULL
1406 || lookup_attribute ("far", TYPE_ATTRIBUTES (type
)) != NULL
);
1409 /* Similar predicates for "mips16"/"nomips16" attributes. */
1412 mips_mips16_decl_p (const_tree decl
)
1414 return lookup_attribute ("mips16", DECL_ATTRIBUTES (decl
)) != NULL
;
1418 mips_nomips16_decl_p (const_tree decl
)
1420 return lookup_attribute ("nomips16", DECL_ATTRIBUTES (decl
)) != NULL
;
1423 /* Return 0 if the attributes for two types are incompatible, 1 if they
1424 are compatible, and 2 if they are nearly compatible (which causes a
1425 warning to be generated). */
1428 mips_comp_type_attributes (const_tree type1
, const_tree type2
)
1430 /* Check for mismatch of non-default calling convention. */
1431 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
1434 /* Disallow mixed near/far attributes. */
1435 if (mips_far_type_p (type1
) && mips_near_type_p (type2
))
1437 if (mips_near_type_p (type1
) && mips_far_type_p (type2
))
1443 /* If X is a PLUS of a CONST_INT, return the two terms in *BASE_PTR
1444 and *OFFSET_PTR. Return X in *BASE_PTR and 0 in *OFFSET_PTR otherwise. */
1447 mips_split_plus (rtx x
, rtx
*base_ptr
, HOST_WIDE_INT
*offset_ptr
)
1449 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1451 *base_ptr
= XEXP (x
, 0);
1452 *offset_ptr
= INTVAL (XEXP (x
, 1));
1461 /* Return true if SYMBOL_REF X is associated with a global symbol
1462 (in the STB_GLOBAL sense). */
1465 mips_global_symbol_p (const_rtx x
)
1467 const_tree
const decl
= SYMBOL_REF_DECL (x
);
1470 return !SYMBOL_REF_LOCAL_P (x
);
1472 /* Weakref symbols are not TREE_PUBLIC, but their targets are global
1473 or weak symbols. Relocations in the object file will be against
1474 the target symbol, so it's that symbol's binding that matters here. */
1475 return DECL_P (decl
) && (TREE_PUBLIC (decl
) || DECL_WEAK (decl
));
1478 /* Return true if SYMBOL_REF X binds locally. */
1481 mips_symbol_binds_local_p (const_rtx x
)
1483 return (SYMBOL_REF_DECL (x
)
1484 ? targetm
.binds_local_p (SYMBOL_REF_DECL (x
))
1485 : SYMBOL_REF_LOCAL_P (x
));
1488 /* Return true if rtx constants of mode MODE should be put into a small
1492 mips_rtx_constant_in_small_data_p (enum machine_mode mode
)
1494 return (!TARGET_EMBEDDED_DATA
1495 && TARGET_LOCAL_SDATA
1496 && GET_MODE_SIZE (mode
) <= mips_section_threshold
);
1499 /* Return the method that should be used to access SYMBOL_REF or
1500 LABEL_REF X in context CONTEXT. */
1502 static enum mips_symbol_type
1503 mips_classify_symbol (const_rtx x
, enum mips_symbol_context context
)
1506 return SYMBOL_GOT_DISP
;
1508 if (GET_CODE (x
) == LABEL_REF
)
1510 /* LABEL_REFs are used for jump tables as well as text labels.
1511 Only return SYMBOL_PC_RELATIVE if we know the label is in
1512 the text section. */
1513 if (TARGET_MIPS16_SHORT_JUMP_TABLES
)
1514 return SYMBOL_PC_RELATIVE
;
1515 if (TARGET_ABICALLS
&& !TARGET_ABSOLUTE_ABICALLS
)
1516 return SYMBOL_GOT_PAGE_OFST
;
1517 return SYMBOL_ABSOLUTE
;
1520 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
1522 if (SYMBOL_REF_TLS_MODEL (x
))
1525 if (CONSTANT_POOL_ADDRESS_P (x
))
1527 if (TARGET_MIPS16_TEXT_LOADS
)
1528 return SYMBOL_PC_RELATIVE
;
1530 if (TARGET_MIPS16_PCREL_LOADS
&& context
== SYMBOL_CONTEXT_MEM
)
1531 return SYMBOL_PC_RELATIVE
;
1533 if (mips_rtx_constant_in_small_data_p (get_pool_mode (x
)))
1534 return SYMBOL_GP_RELATIVE
;
1537 /* Do not use small-data accesses for weak symbols; they may end up
1540 && SYMBOL_REF_SMALL_P (x
)
1541 && !SYMBOL_REF_WEAK (x
))
1542 return SYMBOL_GP_RELATIVE
;
1544 /* Don't use GOT accesses for locally-binding symbols when -mno-shared
1547 && !(TARGET_ABSOLUTE_ABICALLS
&& mips_symbol_binds_local_p (x
)))
1549 /* There are three cases to consider:
1551 - o32 PIC (either with or without explicit relocs)
1552 - n32/n64 PIC without explicit relocs
1553 - n32/n64 PIC with explicit relocs
1555 In the first case, both local and global accesses will use an
1556 R_MIPS_GOT16 relocation. We must correctly predict which of
1557 the two semantics (local or global) the assembler and linker
1558 will apply. The choice depends on the symbol's binding rather
1559 than its visibility.
1561 In the second case, the assembler will not use R_MIPS_GOT16
1562 relocations, but it chooses between local and global accesses
1563 in the same way as for o32 PIC.
1565 In the third case we have more freedom since both forms of
1566 access will work for any kind of symbol. However, there seems
1567 little point in doing things differently. */
1568 if (mips_global_symbol_p (x
))
1569 return SYMBOL_GOT_DISP
;
1571 return SYMBOL_GOT_PAGE_OFST
;
1574 if (TARGET_MIPS16_PCREL_LOADS
&& context
!= SYMBOL_CONTEXT_CALL
)
1575 return SYMBOL_FORCE_TO_MEM
;
1576 return SYMBOL_ABSOLUTE
;
1579 /* Classify symbolic expression X, given that it appears in context
1582 static enum mips_symbol_type
1583 mips_classify_symbolic_expression (rtx x
, enum mips_symbol_context context
)
1587 split_const (x
, &x
, &offset
);
1588 if (UNSPEC_ADDRESS_P (x
))
1589 return UNSPEC_ADDRESS_TYPE (x
);
1591 return mips_classify_symbol (x
, context
);
1594 /* Return true if OFFSET is within the range [0, ALIGN), where ALIGN
1595 is the alignment (in bytes) of SYMBOL_REF X. */
1598 mips_offset_within_alignment_p (rtx x
, HOST_WIDE_INT offset
)
1600 /* If for some reason we can't get the alignment for the
1601 symbol, initializing this to one means we will only accept
1603 HOST_WIDE_INT align
= 1;
1606 /* Get the alignment of the symbol we're referring to. */
1607 t
= SYMBOL_REF_DECL (x
);
1609 align
= DECL_ALIGN_UNIT (t
);
1611 return offset
>= 0 && offset
< align
;
1614 /* Return true if X is a symbolic constant that can be used in context
1615 CONTEXT. If it is, store the type of the symbol in *SYMBOL_TYPE. */
1618 mips_symbolic_constant_p (rtx x
, enum mips_symbol_context context
,
1619 enum mips_symbol_type
*symbol_type
)
1623 split_const (x
, &x
, &offset
);
1624 if (UNSPEC_ADDRESS_P (x
))
1626 *symbol_type
= UNSPEC_ADDRESS_TYPE (x
);
1627 x
= UNSPEC_ADDRESS (x
);
1629 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
1631 *symbol_type
= mips_classify_symbol (x
, context
);
1632 if (*symbol_type
== SYMBOL_TLS
)
1638 if (offset
== const0_rtx
)
1641 /* Check whether a nonzero offset is valid for the underlying
1643 switch (*symbol_type
)
1645 case SYMBOL_ABSOLUTE
:
1646 case SYMBOL_FORCE_TO_MEM
:
1647 case SYMBOL_32_HIGH
:
1648 case SYMBOL_64_HIGH
:
1651 /* If the target has 64-bit pointers and the object file only
1652 supports 32-bit symbols, the values of those symbols will be
1653 sign-extended. In this case we can't allow an arbitrary offset
1654 in case the 32-bit value X + OFFSET has a different sign from X. */
1655 if (Pmode
== DImode
&& !ABI_HAS_64BIT_SYMBOLS
)
1656 return offset_within_block_p (x
, INTVAL (offset
));
1658 /* In other cases the relocations can handle any offset. */
1661 case SYMBOL_PC_RELATIVE
:
1662 /* Allow constant pool references to be converted to LABEL+CONSTANT.
1663 In this case, we no longer have access to the underlying constant,
1664 but the original symbol-based access was known to be valid. */
1665 if (GET_CODE (x
) == LABEL_REF
)
1670 case SYMBOL_GP_RELATIVE
:
1671 /* Make sure that the offset refers to something within the
1672 same object block. This should guarantee that the final
1673 PC- or GP-relative offset is within the 16-bit limit. */
1674 return offset_within_block_p (x
, INTVAL (offset
));
1676 case SYMBOL_GOT_PAGE_OFST
:
1677 case SYMBOL_GOTOFF_PAGE
:
1678 /* If the symbol is global, the GOT entry will contain the symbol's
1679 address, and we will apply a 16-bit offset after loading it.
1680 If the symbol is local, the linker should provide enough local
1681 GOT entries for a 16-bit offset, but larger offsets may lead
1683 return SMALL_INT (offset
);
1687 /* There is no carry between the HI and LO REL relocations, so the
1688 offset is only valid if we know it won't lead to such a carry. */
1689 return mips_offset_within_alignment_p (x
, INTVAL (offset
));
1691 case SYMBOL_GOT_DISP
:
1692 case SYMBOL_GOTOFF_DISP
:
1693 case SYMBOL_GOTOFF_CALL
:
1694 case SYMBOL_GOTOFF_LOADGP
:
1697 case SYMBOL_GOTTPREL
:
1706 /* This function is used to implement REG_MODE_OK_FOR_BASE_P. */
1709 mips_regno_mode_ok_for_base_p (int regno
, enum machine_mode mode
, int strict
)
1711 if (!HARD_REGISTER_NUM_P (regno
))
1715 regno
= reg_renumber
[regno
];
1718 /* These fake registers will be eliminated to either the stack or
1719 hard frame pointer, both of which are usually valid base registers.
1720 Reload deals with the cases where the eliminated form isn't valid. */
1721 if (regno
== ARG_POINTER_REGNUM
|| regno
== FRAME_POINTER_REGNUM
)
1724 /* In mips16 mode, the stack pointer can only address word and doubleword
1725 values, nothing smaller. There are two problems here:
1727 (a) Instantiating virtual registers can introduce new uses of the
1728 stack pointer. If these virtual registers are valid addresses,
1729 the stack pointer should be too.
1731 (b) Most uses of the stack pointer are not made explicit until
1732 FRAME_POINTER_REGNUM and ARG_POINTER_REGNUM have been eliminated.
1733 We don't know until that stage whether we'll be eliminating to the
1734 stack pointer (which needs the restriction) or the hard frame
1735 pointer (which doesn't).
1737 All in all, it seems more consistent to only enforce this restriction
1738 during and after reload. */
1739 if (TARGET_MIPS16
&& regno
== STACK_POINTER_REGNUM
)
1740 return !strict
|| GET_MODE_SIZE (mode
) == 4 || GET_MODE_SIZE (mode
) == 8;
1742 return TARGET_MIPS16
? M16_REG_P (regno
) : GP_REG_P (regno
);
1746 /* Return true if X is a valid base register for the given mode.
1747 Allow only hard registers if STRICT. */
1750 mips_valid_base_register_p (rtx x
, enum machine_mode mode
, int strict
)
1752 if (!strict
&& GET_CODE (x
) == SUBREG
)
1756 && mips_regno_mode_ok_for_base_p (REGNO (x
), mode
, strict
));
1760 /* Return true if X is a valid address for machine mode MODE. If it is,
1761 fill in INFO appropriately. STRICT is true if we should only accept
1762 hard base registers. */
1765 mips_classify_address (struct mips_address_info
*info
, rtx x
,
1766 enum machine_mode mode
, int strict
)
1768 switch (GET_CODE (x
))
1772 info
->type
= ADDRESS_REG
;
1774 info
->offset
= const0_rtx
;
1775 return mips_valid_base_register_p (info
->reg
, mode
, strict
);
1778 info
->type
= ADDRESS_REG
;
1779 info
->reg
= XEXP (x
, 0);
1780 info
->offset
= XEXP (x
, 1);
1781 return (mips_valid_base_register_p (info
->reg
, mode
, strict
)
1782 && const_arith_operand (info
->offset
, VOIDmode
));
1785 info
->type
= ADDRESS_LO_SUM
;
1786 info
->reg
= XEXP (x
, 0);
1787 info
->offset
= XEXP (x
, 1);
1788 /* We have to trust the creator of the LO_SUM to do something vaguely
1789 sane. Target-independent code that creates a LO_SUM should also
1790 create and verify the matching HIGH. Target-independent code that
1791 adds an offset to a LO_SUM must prove that the offset will not
1792 induce a carry. Failure to do either of these things would be
1793 a bug, and we are not required to check for it here. The MIPS
1794 backend itself should only create LO_SUMs for valid symbolic
1795 constants, with the high part being either a HIGH or a copy
1798 = mips_classify_symbolic_expression (info
->offset
, SYMBOL_CONTEXT_MEM
);
1799 return (mips_valid_base_register_p (info
->reg
, mode
, strict
)
1800 && mips_symbol_insns (info
->symbol_type
, mode
) > 0
1801 && mips_lo_relocs
[info
->symbol_type
] != 0);
1804 /* Small-integer addresses don't occur very often, but they
1805 are legitimate if $0 is a valid base register. */
1806 info
->type
= ADDRESS_CONST_INT
;
1807 return !TARGET_MIPS16
&& SMALL_INT (x
);
1812 info
->type
= ADDRESS_SYMBOLIC
;
1813 return (mips_symbolic_constant_p (x
, SYMBOL_CONTEXT_MEM
,
1815 && mips_symbol_insns (info
->symbol_type
, mode
) > 0
1816 && !mips_split_p
[info
->symbol_type
]);
1823 /* Return true if X is a thread-local symbol. */
1826 mips_tls_operand_p (rtx x
)
1828 return GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
) != 0;
1831 /* Return true if X can not be forced into a constant pool. */
1834 mips_tls_symbol_ref_1 (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
1836 return mips_tls_operand_p (*x
);
1839 /* Return true if X can not be forced into a constant pool. */
1842 mips_cannot_force_const_mem (rtx x
)
1848 /* As an optimization, reject constants that mips_legitimize_move
1851 Suppose we have a multi-instruction sequence that loads constant C
1852 into register R. If R does not get allocated a hard register, and
1853 R is used in an operand that allows both registers and memory
1854 references, reload will consider forcing C into memory and using
1855 one of the instruction's memory alternatives. Returning false
1856 here will force it to use an input reload instead. */
1857 if (GET_CODE (x
) == CONST_INT
)
1860 split_const (x
, &base
, &offset
);
1861 if (symbolic_operand (base
, VOIDmode
) && SMALL_INT (offset
))
1865 if (TARGET_HAVE_TLS
&& for_each_rtx (&x
, &mips_tls_symbol_ref_1
, 0))
1871 /* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. We can't use blocks for
1872 constants when we're using a per-function constant pool. */
1875 mips_use_blocks_for_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
1876 const_rtx x ATTRIBUTE_UNUSED
)
1878 return !TARGET_MIPS16_PCREL_LOADS
;
1881 /* Like mips_symbol_insns, but treat extended MIPS16 instructions as a
1882 single instruction. We rely on the fact that, in the worst case,
1883 all instructions involved in a MIPS16 address calculation are usually
1887 mips_symbol_insns_1 (enum mips_symbol_type type
, enum machine_mode mode
)
1891 case SYMBOL_ABSOLUTE
:
1892 /* When using 64-bit symbols, we need 5 preparatory instructions,
1895 lui $at,%highest(symbol)
1896 daddiu $at,$at,%higher(symbol)
1898 daddiu $at,$at,%hi(symbol)
1901 The final address is then $at + %lo(symbol). With 32-bit
1902 symbols we just need a preparatory lui for normal mode and
1903 a preparatory "li; sll" for MIPS16. */
1904 return ABI_HAS_64BIT_SYMBOLS
? 6 : TARGET_MIPS16
? 3 : 2;
1906 case SYMBOL_GP_RELATIVE
:
1907 /* Treat GP-relative accesses as taking a single instruction on
1908 MIPS16 too; the copy of $gp can often be shared. */
1911 case SYMBOL_PC_RELATIVE
:
1912 /* PC-relative constants can be only be used with addiupc,
1914 if (mode
== MAX_MACHINE_MODE
1915 || GET_MODE_SIZE (mode
) == 4
1916 || GET_MODE_SIZE (mode
) == 8)
1919 /* The constant must be loaded using addiupc first. */
1922 case SYMBOL_FORCE_TO_MEM
:
1923 /* LEAs will be converted into constant-pool references by
1925 if (mode
== MAX_MACHINE_MODE
)
1928 /* The constant must be loaded from the constant pool. */
1931 case SYMBOL_GOT_DISP
:
1932 /* The constant will have to be loaded from the GOT before it
1933 is used in an address. */
1934 if (mode
!= MAX_MACHINE_MODE
)
1939 case SYMBOL_GOT_PAGE_OFST
:
1940 /* Unless -funit-at-a-time is in effect, we can't be sure whether
1941 the local/global classification is accurate. See override_options
1944 The worst cases are:
1946 (1) For local symbols when generating o32 or o64 code. The assembler
1952 ...and the final address will be $at + %lo(symbol).
1954 (2) For global symbols when -mxgot. The assembler will use:
1956 lui $at,%got_hi(symbol)
1959 ...and the final address will be $at + %got_lo(symbol). */
1962 case SYMBOL_GOTOFF_PAGE
:
1963 case SYMBOL_GOTOFF_DISP
:
1964 case SYMBOL_GOTOFF_CALL
:
1965 case SYMBOL_GOTOFF_LOADGP
:
1966 case SYMBOL_32_HIGH
:
1967 case SYMBOL_64_HIGH
:
1973 case SYMBOL_GOTTPREL
:
1976 /* A 16-bit constant formed by a single relocation, or a 32-bit
1977 constant formed from a high 16-bit relocation and a low 16-bit
1978 relocation. Use mips_split_p to determine which. */
1979 return !mips_split_p
[type
] ? 1 : TARGET_MIPS16
? 3 : 2;
1982 /* We don't treat a bare TLS symbol as a constant. */
1988 /* If MODE is MAX_MACHINE_MODE, return the number of instructions needed
1989 to load symbols of type TYPE into a register. Return 0 if the given
1990 type of symbol cannot be used as an immediate operand.
1992 Otherwise, return the number of instructions needed to load or store
1993 values of mode MODE to or from addresses of type TYPE. Return 0 if
1994 the given type of symbol is not valid in addresses.
1996 In both cases, treat extended MIPS16 instructions as two instructions. */
1999 mips_symbol_insns (enum mips_symbol_type type
, enum machine_mode mode
)
2001 return mips_symbol_insns_1 (type
, mode
) * (TARGET_MIPS16
? 2 : 1);
2004 /* Return true if X is a legitimate $sp-based address for mode MDOE. */
2007 mips_stack_address_p (rtx x
, enum machine_mode mode
)
2009 struct mips_address_info addr
;
2011 return (mips_classify_address (&addr
, x
, mode
, false)
2012 && addr
.type
== ADDRESS_REG
2013 && addr
.reg
== stack_pointer_rtx
);
2016 /* Return true if a value at OFFSET bytes from BASE can be accessed
2017 using an unextended mips16 instruction. MODE is the mode of the
2020 Usually the offset in an unextended instruction is a 5-bit field.
2021 The offset is unsigned and shifted left once for HIs, twice
2022 for SIs, and so on. An exception is SImode accesses off the
2023 stack pointer, which have an 8-bit immediate field. */
2026 mips16_unextended_reference_p (enum machine_mode mode
, rtx base
, rtx offset
)
2029 && GET_CODE (offset
) == CONST_INT
2030 && INTVAL (offset
) >= 0
2031 && (INTVAL (offset
) & (GET_MODE_SIZE (mode
) - 1)) == 0)
2033 if (GET_MODE_SIZE (mode
) == 4 && base
== stack_pointer_rtx
)
2034 return INTVAL (offset
) < 256 * GET_MODE_SIZE (mode
);
2035 return INTVAL (offset
) < 32 * GET_MODE_SIZE (mode
);
2041 /* Return the number of instructions needed to load or store a value
2042 of mode MODE at X. Return 0 if X isn't valid for MODE. Assume that
2043 multiword moves may need to be split into word moves if MIGHT_SPLIT_P,
2044 otherwise assume that a single load or store is enough.
2046 For mips16 code, count extended instructions as two instructions. */
2049 mips_address_insns (rtx x
, enum machine_mode mode
, bool might_split_p
)
2051 struct mips_address_info addr
;
2054 /* BLKmode is used for single unaligned loads and stores and should
2055 not count as a multiword mode. (GET_MODE_SIZE (BLKmode) is pretty
2056 meaningless, so we have to single it out as a special case one way
2058 if (mode
!= BLKmode
&& might_split_p
)
2059 factor
= (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2063 if (mips_classify_address (&addr
, x
, mode
, false))
2068 && !mips16_unextended_reference_p (mode
, addr
.reg
, addr
.offset
))
2072 case ADDRESS_LO_SUM
:
2073 return (TARGET_MIPS16
? factor
* 2 : factor
);
2075 case ADDRESS_CONST_INT
:
2078 case ADDRESS_SYMBOLIC
:
2079 return factor
* mips_symbol_insns (addr
.symbol_type
, mode
);
2085 /* Likewise for constant X. */
2088 mips_const_insns (rtx x
)
2090 struct mips_integer_op codes
[MIPS_MAX_INTEGER_OPS
];
2091 enum mips_symbol_type symbol_type
;
2094 switch (GET_CODE (x
))
2097 if (!mips_symbolic_constant_p (XEXP (x
, 0), SYMBOL_CONTEXT_LEA
,
2099 || !mips_split_p
[symbol_type
])
2102 /* This is simply an lui for normal mode. It is an extended
2103 "li" followed by an extended "sll" for MIPS16. */
2104 return TARGET_MIPS16
? 4 : 1;
2108 /* Unsigned 8-bit constants can be loaded using an unextended
2109 LI instruction. Unsigned 16-bit constants can be loaded
2110 using an extended LI. Negative constants must be loaded
2111 using LI and then negated. */
2112 return (INTVAL (x
) >= 0 && INTVAL (x
) < 256 ? 1
2113 : SMALL_OPERAND_UNSIGNED (INTVAL (x
)) ? 2
2114 : INTVAL (x
) > -256 && INTVAL (x
) < 0 ? 2
2115 : SMALL_OPERAND_UNSIGNED (-INTVAL (x
)) ? 3
2118 return mips_build_integer (codes
, INTVAL (x
));
2122 return (!TARGET_MIPS16
&& x
== CONST0_RTX (GET_MODE (x
)) ? 1 : 0);
2128 /* See if we can refer to X directly. */
2129 if (mips_symbolic_constant_p (x
, SYMBOL_CONTEXT_LEA
, &symbol_type
))
2130 return mips_symbol_insns (symbol_type
, MAX_MACHINE_MODE
);
2132 /* Otherwise try splitting the constant into a base and offset.
2133 16-bit offsets can be added using an extra addiu. Larger offsets
2134 must be calculated separately and then added to the base. */
2135 split_const (x
, &x
, &offset
);
2138 int n
= mips_const_insns (x
);
2141 if (SMALL_INT (offset
))
2144 return n
+ 1 + mips_build_integer (codes
, INTVAL (offset
));
2151 return mips_symbol_insns (mips_classify_symbol (x
, SYMBOL_CONTEXT_LEA
),
2160 /* Return the number of instructions needed to implement INSN,
2161 given that it loads from or stores to MEM. Count extended
2162 mips16 instructions as two instructions. */
2165 mips_load_store_insns (rtx mem
, rtx insn
)
2167 enum machine_mode mode
;
2171 gcc_assert (MEM_P (mem
));
2172 mode
= GET_MODE (mem
);
2174 /* Try to prove that INSN does not need to be split. */
2175 might_split_p
= true;
2176 if (GET_MODE_BITSIZE (mode
) == 64)
2178 set
= single_set (insn
);
2179 if (set
&& !mips_split_64bit_move_p (SET_DEST (set
), SET_SRC (set
)))
2180 might_split_p
= false;
2183 return mips_address_insns (XEXP (mem
, 0), mode
, might_split_p
);
2187 /* Return the number of instructions needed for an integer division. */
2190 mips_idiv_insns (void)
2195 if (TARGET_CHECK_ZERO_DIV
)
2197 if (GENERATE_DIVIDE_TRAPS
)
2203 if (TARGET_FIX_R4000
|| TARGET_FIX_R4400
)
2208 /* This function is used to implement GO_IF_LEGITIMATE_ADDRESS. It
2209 returns a nonzero value if X is a legitimate address for a memory
2210 operand of the indicated MODE. STRICT is nonzero if this function
2211 is called during reload. */
2214 mips_legitimate_address_p (enum machine_mode mode
, rtx x
, int strict
)
2216 struct mips_address_info addr
;
2218 return mips_classify_address (&addr
, x
, mode
, strict
);
2221 /* Emit a move from SRC to DEST. Assume that the move expanders can
2222 handle all moves if !can_create_pseudo_p (). The distinction is
2223 important because, unlike emit_move_insn, the move expanders know
2224 how to force Pmode objects into the constant pool even when the
2225 constant pool address is not itself legitimate. */
2228 mips_emit_move (rtx dest
, rtx src
)
2230 return (can_create_pseudo_p ()
2231 ? emit_move_insn (dest
, src
)
2232 : emit_move_insn_1 (dest
, src
));
2235 /* Copy VALUE to a register and return that register. If new psuedos
2236 are allowed, copy it into a new register, otherwise use DEST. */
2239 mips_force_temporary (rtx dest
, rtx value
)
2241 if (can_create_pseudo_p ())
2242 return force_reg (Pmode
, value
);
2245 mips_emit_move (copy_rtx (dest
), value
);
2251 /* If MODE is MAX_MACHINE_MODE, ADDR appears as a move operand, otherwise
2252 it appears in a MEM of that mode. Return true if ADDR is a legitimate
2253 constant in that context and can be split into a high part and a LO_SUM.
2254 If so, and if LO_SUM_OUT is nonnull, emit the high part and return
2255 the LO_SUM in *LO_SUM_OUT. Leave *LO_SUM_OUT unchanged otherwise.
2257 TEMP is as for mips_force_temporary and is used to load the high
2258 part into a register. */
2261 mips_split_symbol (rtx temp
, rtx addr
, enum machine_mode mode
, rtx
*lo_sum_out
)
2263 enum mips_symbol_context context
;
2264 enum mips_symbol_type symbol_type
;
2267 context
= (mode
== MAX_MACHINE_MODE
2268 ? SYMBOL_CONTEXT_LEA
2269 : SYMBOL_CONTEXT_MEM
);
2270 if (!mips_symbolic_constant_p (addr
, context
, &symbol_type
)
2271 || mips_symbol_insns (symbol_type
, mode
) == 0
2272 || !mips_split_p
[symbol_type
])
2277 if (symbol_type
== SYMBOL_GP_RELATIVE
)
2279 if (!can_create_pseudo_p ())
2281 emit_insn (gen_load_const_gp (copy_rtx (temp
)));
2285 high
= mips16_gp_pseudo_reg ();
2289 high
= gen_rtx_HIGH (Pmode
, copy_rtx (addr
));
2290 high
= mips_force_temporary (temp
, high
);
2292 *lo_sum_out
= gen_rtx_LO_SUM (Pmode
, high
, addr
);
2298 /* Wrap symbol or label BASE in an unspec address of type SYMBOL_TYPE
2299 and add CONST_INT OFFSET to the result. */
2302 mips_unspec_address_offset (rtx base
, rtx offset
,
2303 enum mips_symbol_type symbol_type
)
2305 base
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, base
),
2306 UNSPEC_ADDRESS_FIRST
+ symbol_type
);
2307 if (offset
!= const0_rtx
)
2308 base
= gen_rtx_PLUS (Pmode
, base
, offset
);
2309 return gen_rtx_CONST (Pmode
, base
);
2312 /* Return an UNSPEC address with underlying address ADDRESS and symbol
2313 type SYMBOL_TYPE. */
2316 mips_unspec_address (rtx address
, enum mips_symbol_type symbol_type
)
2320 split_const (address
, &base
, &offset
);
2321 return mips_unspec_address_offset (base
, offset
, symbol_type
);
2325 /* If mips_unspec_address (ADDR, SYMBOL_TYPE) is a 32-bit value, add the
2326 high part to BASE and return the result. Just return BASE otherwise.
2327 TEMP is available as a temporary register if needed.
2329 The returned expression can be used as the first operand to a LO_SUM. */
2332 mips_unspec_offset_high (rtx temp
, rtx base
, rtx addr
,
2333 enum mips_symbol_type symbol_type
)
2335 if (mips_split_p
[symbol_type
])
2337 addr
= gen_rtx_HIGH (Pmode
, mips_unspec_address (addr
, symbol_type
));
2338 addr
= mips_force_temporary (temp
, addr
);
2339 return mips_force_temporary (temp
, gen_rtx_PLUS (Pmode
, addr
, base
));
2345 /* Return a legitimate address for REG + OFFSET. TEMP is as for
2346 mips_force_temporary; it is only needed when OFFSET is not a
2350 mips_add_offset (rtx temp
, rtx reg
, HOST_WIDE_INT offset
)
2352 if (!SMALL_OPERAND (offset
))
2357 /* Load the full offset into a register so that we can use
2358 an unextended instruction for the address itself. */
2359 high
= GEN_INT (offset
);
2364 /* Leave OFFSET as a 16-bit offset and put the excess in HIGH. */
2365 high
= GEN_INT (CONST_HIGH_PART (offset
));
2366 offset
= CONST_LOW_PART (offset
);
2368 high
= mips_force_temporary (temp
, high
);
2369 reg
= mips_force_temporary (temp
, gen_rtx_PLUS (Pmode
, high
, reg
));
2371 return plus_constant (reg
, offset
);
2374 /* Emit a call to __tls_get_addr. SYM is the TLS symbol we are
2375 referencing, and TYPE is the symbol type to use (either global
2376 dynamic or local dynamic). V0 is an RTX for the return value
2377 location. The entire insn sequence is returned. */
2379 static GTY(()) rtx mips_tls_symbol
;
2382 mips_call_tls_get_addr (rtx sym
, enum mips_symbol_type type
, rtx v0
)
2384 rtx insn
, loc
, tga
, a0
;
2386 a0
= gen_rtx_REG (Pmode
, GP_ARG_FIRST
);
2388 if (!mips_tls_symbol
)
2389 mips_tls_symbol
= init_one_libfunc ("__tls_get_addr");
2391 loc
= mips_unspec_address (sym
, type
);
2395 emit_insn (gen_rtx_SET (Pmode
, a0
,
2396 gen_rtx_LO_SUM (Pmode
, pic_offset_table_rtx
, loc
)));
2397 tga
= gen_rtx_MEM (Pmode
, mips_tls_symbol
);
2398 insn
= emit_call_insn (gen_call_value (v0
, tga
, const0_rtx
, const0_rtx
));
2399 CONST_OR_PURE_CALL_P (insn
) = 1;
2400 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), v0
);
2401 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), a0
);
2402 insn
= get_insns ();
2409 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
2410 return value will be a valid address and move_operand (either a REG
2414 mips_legitimize_tls_address (rtx loc
)
2416 rtx dest
, insn
, v0
, v1
, tmp1
, tmp2
, eqv
;
2417 enum tls_model model
;
2421 sorry ("MIPS16 TLS");
2422 return gen_reg_rtx (Pmode
);
2425 v0
= gen_rtx_REG (Pmode
, GP_RETURN
);
2426 v1
= gen_rtx_REG (Pmode
, GP_RETURN
+ 1);
2428 model
= SYMBOL_REF_TLS_MODEL (loc
);
2429 /* Only TARGET_ABICALLS code can have more than one module; other
2430 code must be be static and should not use a GOT. All TLS models
2431 reduce to local exec in this situation. */
2432 if (!TARGET_ABICALLS
)
2433 model
= TLS_MODEL_LOCAL_EXEC
;
2437 case TLS_MODEL_GLOBAL_DYNAMIC
:
2438 insn
= mips_call_tls_get_addr (loc
, SYMBOL_TLSGD
, v0
);
2439 dest
= gen_reg_rtx (Pmode
);
2440 emit_libcall_block (insn
, dest
, v0
, loc
);
2443 case TLS_MODEL_LOCAL_DYNAMIC
:
2444 insn
= mips_call_tls_get_addr (loc
, SYMBOL_TLSLDM
, v0
);
2445 tmp1
= gen_reg_rtx (Pmode
);
2447 /* Attach a unique REG_EQUIV, to allow the RTL optimizers to
2448 share the LDM result with other LD model accesses. */
2449 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
2451 emit_libcall_block (insn
, tmp1
, v0
, eqv
);
2453 tmp2
= mips_unspec_offset_high (NULL
, tmp1
, loc
, SYMBOL_DTPREL
);
2454 dest
= gen_rtx_LO_SUM (Pmode
, tmp2
,
2455 mips_unspec_address (loc
, SYMBOL_DTPREL
));
2458 case TLS_MODEL_INITIAL_EXEC
:
2459 tmp1
= gen_reg_rtx (Pmode
);
2460 tmp2
= mips_unspec_address (loc
, SYMBOL_GOTTPREL
);
2461 if (Pmode
== DImode
)
2463 emit_insn (gen_tls_get_tp_di (v1
));
2464 emit_insn (gen_load_gotdi (tmp1
, pic_offset_table_rtx
, tmp2
));
2468 emit_insn (gen_tls_get_tp_si (v1
));
2469 emit_insn (gen_load_gotsi (tmp1
, pic_offset_table_rtx
, tmp2
));
2471 dest
= gen_reg_rtx (Pmode
);
2472 emit_insn (gen_add3_insn (dest
, tmp1
, v1
));
2475 case TLS_MODEL_LOCAL_EXEC
:
2476 if (Pmode
== DImode
)
2477 emit_insn (gen_tls_get_tp_di (v1
));
2479 emit_insn (gen_tls_get_tp_si (v1
));
2481 tmp1
= mips_unspec_offset_high (NULL
, v1
, loc
, SYMBOL_TPREL
);
2482 dest
= gen_rtx_LO_SUM (Pmode
, tmp1
,
2483 mips_unspec_address (loc
, SYMBOL_TPREL
));
2493 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can
2494 be legitimized in a way that the generic machinery might not expect,
2495 put the new address in *XLOC and return true. MODE is the mode of
2496 the memory being accessed. */
2499 mips_legitimize_address (rtx
*xloc
, enum machine_mode mode
)
2501 if (mips_tls_operand_p (*xloc
))
2503 *xloc
= mips_legitimize_tls_address (*xloc
);
2507 /* See if the address can split into a high part and a LO_SUM. */
2508 if (mips_split_symbol (NULL
, *xloc
, mode
, xloc
))
2511 if (GET_CODE (*xloc
) == PLUS
&& GET_CODE (XEXP (*xloc
, 1)) == CONST_INT
)
2513 /* Handle REG + CONSTANT using mips_add_offset. */
2516 reg
= XEXP (*xloc
, 0);
2517 if (!mips_valid_base_register_p (reg
, mode
, 0))
2518 reg
= copy_to_mode_reg (Pmode
, reg
);
2519 *xloc
= mips_add_offset (0, reg
, INTVAL (XEXP (*xloc
, 1)));
2527 /* Subroutine of mips_build_integer (with the same interface).
2528 Assume that the final action in the sequence should be a left shift. */
2531 mips_build_shift (struct mips_integer_op
*codes
, HOST_WIDE_INT value
)
2533 unsigned int i
, shift
;
2535 /* Shift VALUE right until its lowest bit is set. Shift arithmetically
2536 since signed numbers are easier to load than unsigned ones. */
2538 while ((value
& 1) == 0)
2539 value
/= 2, shift
++;
2541 i
= mips_build_integer (codes
, value
);
2542 codes
[i
].code
= ASHIFT
;
2543 codes
[i
].value
= shift
;
2548 /* As for mips_build_shift, but assume that the final action will be
2549 an IOR or PLUS operation. */
2552 mips_build_lower (struct mips_integer_op
*codes
, unsigned HOST_WIDE_INT value
)
2554 unsigned HOST_WIDE_INT high
;
2557 high
= value
& ~(unsigned HOST_WIDE_INT
) 0xffff;
2558 if (!LUI_OPERAND (high
) && (value
& 0x18000) == 0x18000)
2560 /* The constant is too complex to load with a simple lui/ori pair
2561 so our goal is to clear as many trailing zeros as possible.
2562 In this case, we know bit 16 is set and that the low 16 bits
2563 form a negative number. If we subtract that number from VALUE,
2564 we will clear at least the lowest 17 bits, maybe more. */
2565 i
= mips_build_integer (codes
, CONST_HIGH_PART (value
));
2566 codes
[i
].code
= PLUS
;
2567 codes
[i
].value
= CONST_LOW_PART (value
);
2571 i
= mips_build_integer (codes
, high
);
2572 codes
[i
].code
= IOR
;
2573 codes
[i
].value
= value
& 0xffff;
2579 /* Fill CODES with a sequence of rtl operations to load VALUE.
2580 Return the number of operations needed. */
2583 mips_build_integer (struct mips_integer_op
*codes
,
2584 unsigned HOST_WIDE_INT value
)
2586 if (SMALL_OPERAND (value
)
2587 || SMALL_OPERAND_UNSIGNED (value
)
2588 || LUI_OPERAND (value
))
2590 /* The value can be loaded with a single instruction. */
2591 codes
[0].code
= UNKNOWN
;
2592 codes
[0].value
= value
;
2595 else if ((value
& 1) != 0 || LUI_OPERAND (CONST_HIGH_PART (value
)))
2597 /* Either the constant is a simple LUI/ORI combination or its
2598 lowest bit is set. We don't want to shift in this case. */
2599 return mips_build_lower (codes
, value
);
2601 else if ((value
& 0xffff) == 0)
2603 /* The constant will need at least three actions. The lowest
2604 16 bits are clear, so the final action will be a shift. */
2605 return mips_build_shift (codes
, value
);
2609 /* The final action could be a shift, add or inclusive OR.
2610 Rather than use a complex condition to select the best
2611 approach, try both mips_build_shift and mips_build_lower
2612 and pick the one that gives the shortest sequence.
2613 Note that this case is only used once per constant. */
2614 struct mips_integer_op alt_codes
[MIPS_MAX_INTEGER_OPS
];
2615 unsigned int cost
, alt_cost
;
2617 cost
= mips_build_shift (codes
, value
);
2618 alt_cost
= mips_build_lower (alt_codes
, value
);
2619 if (alt_cost
< cost
)
2621 memcpy (codes
, alt_codes
, alt_cost
* sizeof (codes
[0]));
2629 /* Load VALUE into DEST, using TEMP as a temporary register if need be. */
2632 mips_move_integer (rtx dest
, rtx temp
, unsigned HOST_WIDE_INT value
)
2634 struct mips_integer_op codes
[MIPS_MAX_INTEGER_OPS
];
2635 enum machine_mode mode
;
2636 unsigned int i
, cost
;
2639 mode
= GET_MODE (dest
);
2640 cost
= mips_build_integer (codes
, value
);
2642 /* Apply each binary operation to X. Invariant: X is a legitimate
2643 source operand for a SET pattern. */
2644 x
= GEN_INT (codes
[0].value
);
2645 for (i
= 1; i
< cost
; i
++)
2647 if (!can_create_pseudo_p ())
2649 emit_insn (gen_rtx_SET (VOIDmode
, temp
, x
));
2653 x
= force_reg (mode
, x
);
2654 x
= gen_rtx_fmt_ee (codes
[i
].code
, mode
, x
, GEN_INT (codes
[i
].value
));
2657 emit_insn (gen_rtx_SET (VOIDmode
, dest
, x
));
2661 /* Subroutine of mips_legitimize_move. Move constant SRC into register
2662 DEST given that SRC satisfies immediate_operand but doesn't satisfy
2666 mips_legitimize_const_move (enum machine_mode mode
, rtx dest
, rtx src
)
2670 /* Split moves of big integers into smaller pieces. */
2671 if (splittable_const_int_operand (src
, mode
))
2673 mips_move_integer (dest
, dest
, INTVAL (src
));
2677 /* Split moves of symbolic constants into high/low pairs. */
2678 if (mips_split_symbol (dest
, src
, MAX_MACHINE_MODE
, &src
))
2680 emit_insn (gen_rtx_SET (VOIDmode
, dest
, src
));
2684 if (mips_tls_operand_p (src
))
2686 mips_emit_move (dest
, mips_legitimize_tls_address (src
));
2690 /* If we have (const (plus symbol offset)), and that expression cannot
2691 be forced into memory, load the symbol first and add in the offset.
2692 In non-MIPS16 mode, prefer to do this even if the constant _can_ be
2693 forced into memory, as it usually produces better code. */
2694 split_const (src
, &base
, &offset
);
2695 if (offset
!= const0_rtx
2696 && (targetm
.cannot_force_const_mem (src
)
2697 || (!TARGET_MIPS16
&& can_create_pseudo_p ())))
2699 base
= mips_force_temporary (dest
, base
);
2700 mips_emit_move (dest
, mips_add_offset (0, base
, INTVAL (offset
)));
2704 src
= force_const_mem (mode
, src
);
2706 /* When using explicit relocs, constant pool references are sometimes
2707 not legitimate addresses. */
2708 mips_split_symbol (dest
, XEXP (src
, 0), mode
, &XEXP (src
, 0));
2709 mips_emit_move (dest
, src
);
2713 /* If (set DEST SRC) is not a valid instruction, emit an equivalent
2714 sequence that is valid. */
2717 mips_legitimize_move (enum machine_mode mode
, rtx dest
, rtx src
)
2719 if (!register_operand (dest
, mode
) && !reg_or_0_operand (src
, mode
))
2721 mips_emit_move (dest
, force_reg (mode
, src
));
2725 /* Check for individual, fully-reloaded mflo and mfhi instructions. */
2726 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
2727 && REG_P (src
) && MD_REG_P (REGNO (src
))
2728 && REG_P (dest
) && GP_REG_P (REGNO (dest
)))
2730 int other_regno
= REGNO (src
) == HI_REGNUM
? LO_REGNUM
: HI_REGNUM
;
2731 if (GET_MODE_SIZE (mode
) <= 4)
2732 emit_insn (gen_mfhilo_si (gen_rtx_REG (SImode
, REGNO (dest
)),
2733 gen_rtx_REG (SImode
, REGNO (src
)),
2734 gen_rtx_REG (SImode
, other_regno
)));
2736 emit_insn (gen_mfhilo_di (gen_rtx_REG (DImode
, REGNO (dest
)),
2737 gen_rtx_REG (DImode
, REGNO (src
)),
2738 gen_rtx_REG (DImode
, other_regno
)));
2742 /* We need to deal with constants that would be legitimate
2743 immediate_operands but not legitimate move_operands. */
2744 if (CONSTANT_P (src
) && !move_operand (src
, mode
))
2746 mips_legitimize_const_move (mode
, dest
, src
);
2747 set_unique_reg_note (get_last_insn (), REG_EQUAL
, copy_rtx (src
));
2753 /* We need a lot of little routines to check constant values on the
2754 mips16. These are used to figure out how long the instruction will
2755 be. It would be much better to do this using constraints, but
2756 there aren't nearly enough letters available. */
2759 m16_check_op (rtx op
, int low
, int high
, int mask
)
2761 return (GET_CODE (op
) == CONST_INT
2762 && INTVAL (op
) >= low
2763 && INTVAL (op
) <= high
2764 && (INTVAL (op
) & mask
) == 0);
2768 m16_uimm3_b (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2770 return m16_check_op (op
, 0x1, 0x8, 0);
2774 m16_simm4_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2776 return m16_check_op (op
, - 0x8, 0x7, 0);
2780 m16_nsimm4_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2782 return m16_check_op (op
, - 0x7, 0x8, 0);
2786 m16_simm5_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2788 return m16_check_op (op
, - 0x10, 0xf, 0);
2792 m16_nsimm5_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2794 return m16_check_op (op
, - 0xf, 0x10, 0);
2798 m16_uimm5_4 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2800 return m16_check_op (op
, (- 0x10) << 2, 0xf << 2, 3);
2804 m16_nuimm5_4 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2806 return m16_check_op (op
, (- 0xf) << 2, 0x10 << 2, 3);
2810 m16_simm8_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2812 return m16_check_op (op
, - 0x80, 0x7f, 0);
2816 m16_nsimm8_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2818 return m16_check_op (op
, - 0x7f, 0x80, 0);
2822 m16_uimm8_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2824 return m16_check_op (op
, 0x0, 0xff, 0);
2828 m16_nuimm8_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2830 return m16_check_op (op
, - 0xff, 0x0, 0);
2834 m16_uimm8_m1_1 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2836 return m16_check_op (op
, - 0x1, 0xfe, 0);
2840 m16_uimm8_4 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2842 return m16_check_op (op
, 0x0, 0xff << 2, 3);
2846 m16_nuimm8_4 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2848 return m16_check_op (op
, (- 0xff) << 2, 0x0, 3);
2852 m16_simm8_8 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2854 return m16_check_op (op
, (- 0x80) << 3, 0x7f << 3, 7);
2858 m16_nsimm8_8 (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2860 return m16_check_op (op
, (- 0x7f) << 3, 0x80 << 3, 7);
2863 /* Return true if ADDR matches the pattern for the lwxs load scaled indexed
2864 address instruction. */
2867 mips_lwxs_address_p (rtx addr
)
2870 && GET_CODE (addr
) == PLUS
2871 && REG_P (XEXP (addr
, 1)))
2873 rtx offset
= XEXP (addr
, 0);
2874 if (GET_CODE (offset
) == MULT
2875 && REG_P (XEXP (offset
, 0))
2876 && GET_CODE (XEXP (offset
, 1)) == CONST_INT
2877 && INTVAL (XEXP (offset
, 1)) == 4)
2883 /* The cost of loading values from the constant pool. It should be
2884 larger than the cost of any constant we want to synthesize inline. */
2886 #define CONSTANT_POOL_COST COSTS_N_INSNS (TARGET_MIPS16 ? 4 : 8)
2888 /* Return the cost of X when used as an operand to the MIPS16 instruction
2889 that implements CODE. Return -1 if there is no such instruction, or if
2890 X is not a valid immediate operand for it. */
2893 mips16_constant_cost (int code
, HOST_WIDE_INT x
)
2900 /* Shifts by between 1 and 8 bits (inclusive) are unextended,
2901 other shifts are extended. The shift patterns truncate the shift
2902 count to the right size, so there are no out-of-range values. */
2903 if (IN_RANGE (x
, 1, 8))
2905 return COSTS_N_INSNS (1);
2908 if (IN_RANGE (x
, -128, 127))
2910 if (SMALL_OPERAND (x
))
2911 return COSTS_N_INSNS (1);
2915 /* Like LE, but reject the always-true case. */
2919 /* We add 1 to the immediate and use SLT. */
2922 /* We can use CMPI for an xor with an unsigned 16-bit X. */
2925 if (IN_RANGE (x
, 0, 255))
2927 if (SMALL_OPERAND_UNSIGNED (x
))
2928 return COSTS_N_INSNS (1);
2933 /* Equality comparisons with 0 are cheap. */
2943 /* Return true if there is a non-MIPS16 instruction that implements CODE
2944 and if that instruction accepts X as an immediate operand. */
2947 mips_immediate_operand_p (int code
, HOST_WIDE_INT x
)
2954 /* All shift counts are truncated to a valid constant. */
2959 /* Likewise rotates, if the target supports rotates at all. */
2965 /* These instructions take 16-bit unsigned immediates. */
2966 return SMALL_OPERAND_UNSIGNED (x
);
2971 /* These instructions take 16-bit signed immediates. */
2972 return SMALL_OPERAND (x
);
2978 /* The "immediate" forms of these instructions are really
2979 implemented as comparisons with register 0. */
2984 /* Likewise, meaning that the only valid immediate operand is 1. */
2988 /* We add 1 to the immediate and use SLT. */
2989 return SMALL_OPERAND (x
+ 1);
2992 /* Likewise SLTU, but reject the always-true case. */
2993 return SMALL_OPERAND (x
+ 1) && x
+ 1 != 0;
2997 /* The bit position and size are immediate operands. */
2998 return ISA_HAS_EXT_INS
;
3001 /* By default assume that $0 can be used for 0. */
3006 /* Return the cost of binary operation X, given that the instruction
3007 sequence for a word-sized or smaller operation has cost SINGLE_COST
3008 and that the sequence of a double-word operation has cost DOUBLE_COST. */
3011 mips_binary_cost (rtx x
, int single_cost
, int double_cost
)
3015 if (GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
* 2)
3020 + rtx_cost (XEXP (x
, 0), 0)
3021 + rtx_cost (XEXP (x
, 1), GET_CODE (x
)));
3024 /* Return the cost of floating-point multiplications of mode MODE. */
3027 mips_fp_mult_cost (enum machine_mode mode
)
3029 return mode
== DFmode
? mips_cost
->fp_mult_df
: mips_cost
->fp_mult_sf
;
3032 /* Return the cost of floating-point divisions of mode MODE. */
3035 mips_fp_div_cost (enum machine_mode mode
)
3037 return mode
== DFmode
? mips_cost
->fp_div_df
: mips_cost
->fp_div_sf
;
3040 /* Return the cost of sign-extending OP to mode MODE, not including the
3041 cost of OP itself. */
3044 mips_sign_extend_cost (enum machine_mode mode
, rtx op
)
3047 /* Extended loads are as cheap as unextended ones. */
3050 if (TARGET_64BIT
&& mode
== DImode
&& GET_MODE (op
) == SImode
)
3051 /* A sign extension from SImode to DImode in 64-bit mode is free. */
3054 if (ISA_HAS_SEB_SEH
|| GENERATE_MIPS16E
)
3055 /* We can use SEB or SEH. */
3056 return COSTS_N_INSNS (1);
3058 /* We need to use a shift left and a shift right. */
3059 return COSTS_N_INSNS (TARGET_MIPS16
? 4 : 2);
3062 /* Return the cost of zero-extending OP to mode MODE, not including the
3063 cost of OP itself. */
3066 mips_zero_extend_cost (enum machine_mode mode
, rtx op
)
3069 /* Extended loads are as cheap as unextended ones. */
3072 if (TARGET_64BIT
&& mode
== DImode
&& GET_MODE (op
) == SImode
)
3073 /* We need a shift left by 32 bits and a shift right by 32 bits. */
3074 return COSTS_N_INSNS (TARGET_MIPS16
? 4 : 2);
3076 if (GENERATE_MIPS16E
)
3077 /* We can use ZEB or ZEH. */
3078 return COSTS_N_INSNS (1);
3081 /* We need to load 0xff or 0xffff into a register and use AND. */
3082 return COSTS_N_INSNS (GET_MODE (op
) == QImode
? 2 : 3);
3084 /* We can use ANDI. */
3085 return COSTS_N_INSNS (1);
3088 /* Implement TARGET_RTX_COSTS. */
3091 mips_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
3093 enum machine_mode mode
= GET_MODE (x
);
3094 bool float_mode_p
= FLOAT_MODE_P (mode
);
3098 /* The cost of a COMPARE is hard to define for MIPS. COMPAREs don't
3099 appear in the instruction stream, and the cost of a comparison is
3100 really the cost of the branch or scc condition. At the time of
3101 writing, gcc only uses an explicit outer COMPARE code when optabs
3102 is testing whether a constant is expensive enough to force into a
3103 register. We want optabs to pass such constants through the MIPS
3104 expanders instead, so make all constants very cheap here. */
3105 if (outer_code
== COMPARE
)
3107 gcc_assert (CONSTANT_P (x
));
3115 /* Treat *clear_upper32-style ANDs as having zero cost in the
3116 second operand. The cost is entirely in the first operand.
3118 ??? This is needed because we would otherwise try to CSE
3119 the constant operand. Although that's the right thing for
3120 instructions that continue to be a register operation throughout
3121 compilation, it is disastrous for instructions that could
3122 later be converted into a memory operation. */
3124 && outer_code
== AND
3125 && UINTVAL (x
) == 0xffffffff)
3133 cost
= mips16_constant_cost (outer_code
, INTVAL (x
));
3142 /* When not optimizing for size, we care more about the cost
3143 of hot code, and hot code is often in a loop. If a constant
3144 operand needs to be forced into a register, we will often be
3145 able to hoist the constant load out of the loop, so the load
3146 should not contribute to the cost. */
3148 || mips_immediate_operand_p (outer_code
, INTVAL (x
)))
3160 if (force_to_mem_operand (x
, VOIDmode
))
3162 *total
= COSTS_N_INSNS (1);
3165 cost
= mips_const_insns (x
);
3168 /* If the constant is likely to be stored in a GPR, SETs of
3169 single-insn constants are as cheap as register sets; we
3170 never want to CSE them.
3172 Don't reduce the cost of storing a floating-point zero in
3173 FPRs. If we have a zero in an FPR for other reasons, we
3174 can get better cfg-cleanup and delayed-branch results by
3175 using it consistently, rather than using $0 sometimes and
3176 an FPR at other times. Also, moves between floating-point
3177 registers are sometimes cheaper than (D)MTC1 $0. */
3179 && outer_code
== SET
3180 && !(float_mode_p
&& TARGET_HARD_FLOAT
))
3182 /* When non-MIPS16 code loads a constant N>1 times, we rarely
3183 want to CSE the constant itself. It is usually better to
3184 have N copies of the last operation in the sequence and one
3185 shared copy of the other operations. (Note that this is
3186 not true for MIPS16 code, where the final operation in the
3187 sequence is often an extended instruction.)
3189 Also, if we have a CONST_INT, we don't know whether it is
3190 for a word or doubleword operation, so we cannot rely on
3191 the result of mips_build_integer. */
3192 else if (!TARGET_MIPS16
3193 && (outer_code
== SET
|| mode
== VOIDmode
))
3195 *total
= COSTS_N_INSNS (cost
);
3198 /* The value will need to be fetched from the constant pool. */
3199 *total
= CONSTANT_POOL_COST
;
3203 /* If the address is legitimate, return the number of
3204 instructions it needs. */
3206 cost
= mips_address_insns (addr
, mode
, true);
3209 *total
= COSTS_N_INSNS (cost
+ 1);
3212 /* Check for a scaled indexed address. */
3213 if (mips_lwxs_address_p (addr
))
3215 *total
= COSTS_N_INSNS (2);
3218 /* Otherwise use the default handling. */
3222 *total
= COSTS_N_INSNS (6);
3226 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
? 2 : 1);
3230 /* Check for a *clear_upper32 pattern and treat it like a zero
3231 extension. See the pattern's comment for details. */
3234 && CONST_INT_P (XEXP (x
, 1))
3235 && UINTVAL (XEXP (x
, 1)) == 0xffffffff)
3237 *total
= (mips_zero_extend_cost (mode
, XEXP (x
, 0))
3238 + rtx_cost (XEXP (x
, 0), 0));
3245 /* Double-word operations use two single-word operations. */
3246 *total
= mips_binary_cost (x
, COSTS_N_INSNS (1), COSTS_N_INSNS (2));
3254 if (CONSTANT_P (XEXP (x
, 1)))
3255 *total
= mips_binary_cost (x
, COSTS_N_INSNS (1), COSTS_N_INSNS (4));
3257 *total
= mips_binary_cost (x
, COSTS_N_INSNS (1), COSTS_N_INSNS (12));
3262 *total
= mips_cost
->fp_add
;
3264 *total
= COSTS_N_INSNS (4);
3268 /* Low-part immediates need an extended MIPS16 instruction. */
3269 *total
= (COSTS_N_INSNS (TARGET_MIPS16
? 2 : 1)
3270 + rtx_cost (XEXP (x
, 0), 0));
3285 /* Branch comparisons have VOIDmode, so use the first operand's
3287 mode
= GET_MODE (XEXP (x
, 0));
3288 if (FLOAT_MODE_P (mode
))
3290 *total
= mips_cost
->fp_add
;
3293 *total
= mips_binary_cost (x
, COSTS_N_INSNS (1), COSTS_N_INSNS (4));
3298 && ISA_HAS_NMADD_NMSUB
3299 && TARGET_FUSED_MADD
3300 && !HONOR_NANS (mode
)
3301 && !HONOR_SIGNED_ZEROS (mode
))
3303 /* See if we can use NMADD or NMSUB. See mips.md for the
3304 associated patterns. */
3305 rtx op0
= XEXP (x
, 0);
3306 rtx op1
= XEXP (x
, 1);
3307 if (GET_CODE (op0
) == MULT
&& GET_CODE (XEXP (op0
, 0)) == NEG
)
3309 *total
= (mips_fp_mult_cost (mode
)
3310 + rtx_cost (XEXP (XEXP (op0
, 0), 0), 0)
3311 + rtx_cost (XEXP (op0
, 1), 0)
3312 + rtx_cost (op1
, 0));
3315 if (GET_CODE (op1
) == MULT
)
3317 *total
= (mips_fp_mult_cost (mode
)
3319 + rtx_cost (XEXP (op1
, 0), 0)
3320 + rtx_cost (XEXP (op1
, 1), 0));
3330 && TARGET_FUSED_MADD
3331 && GET_CODE (XEXP (x
, 0)) == MULT
)
3334 *total
= mips_cost
->fp_add
;
3338 /* Double-word operations require three single-word operations and
3339 an SLTU. The MIPS16 version then needs to move the result of
3340 the SLTU from $24 to a MIPS16 register. */
3341 *total
= mips_binary_cost (x
, COSTS_N_INSNS (1),
3342 COSTS_N_INSNS (TARGET_MIPS16
? 5 : 4));
3347 && ISA_HAS_NMADD_NMSUB
3348 && TARGET_FUSED_MADD
3349 && !HONOR_NANS (mode
)
3350 && HONOR_SIGNED_ZEROS (mode
))
3352 /* See if we can use NMADD or NMSUB. See mips.md for the
3353 associated patterns. */
3354 rtx op
= XEXP (x
, 0);
3355 if ((GET_CODE (op
) == PLUS
|| GET_CODE (op
) == MINUS
)
3356 && GET_CODE (XEXP (op
, 0)) == MULT
)
3358 *total
= (mips_fp_mult_cost (mode
)
3359 + rtx_cost (XEXP (XEXP (op
, 0), 0), 0)
3360 + rtx_cost (XEXP (XEXP (op
, 0), 1), 0)
3361 + rtx_cost (XEXP (op
, 1), 0));
3367 *total
= mips_cost
->fp_add
;
3369 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
? 4 : 1);
3374 *total
= mips_fp_mult_cost (mode
);
3375 else if (mode
== DImode
&& !TARGET_64BIT
)
3376 /* Synthesized from 2 mulsi3s, 1 mulsidi3 and two additions,
3377 where the mulsidi3 always includes an MFHI and an MFLO. */
3378 *total
= (optimize_size
3379 ? COSTS_N_INSNS (ISA_HAS_MUL3
? 7 : 9)
3380 : mips_cost
->int_mult_si
* 3 + 6);
3381 else if (optimize_size
)
3382 *total
= (ISA_HAS_MUL3
? 1 : 2);
3383 else if (mode
== DImode
)
3384 *total
= mips_cost
->int_mult_di
;
3386 *total
= mips_cost
->int_mult_si
;
3390 /* Check for a reciprocal. */
3391 if (float_mode_p
&& XEXP (x
, 0) == CONST1_RTX (mode
))
3394 && flag_unsafe_math_optimizations
3395 && (outer_code
== SQRT
|| GET_CODE (XEXP (x
, 1)) == SQRT
))
3397 /* An rsqrt<mode>a or rsqrt<mode>b pattern. Count the
3398 division as being free. */
3399 *total
= rtx_cost (XEXP (x
, 1), 0);
3404 *total
= mips_fp_div_cost (mode
) + rtx_cost (XEXP (x
, 1), 0);
3414 *total
= mips_fp_div_cost (mode
);
3423 /* It is our responsibility to make division by a power of 2
3424 as cheap as 2 register additions if we want the division
3425 expanders to be used for such operations; see the setting
3426 of sdiv_pow2_cheap in optabs.c. Using (D)DIV for MIPS16
3427 should always produce shorter code than using
3428 expand_sdiv2_pow2. */
3430 && CONST_INT_P (XEXP (x
, 1))
3431 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
3433 *total
= COSTS_N_INSNS (2) + rtx_cost (XEXP (x
, 0), 0);
3436 *total
= COSTS_N_INSNS (mips_idiv_insns ());
3438 else if (mode
== DImode
)
3439 *total
= mips_cost
->int_div_di
;
3441 *total
= mips_cost
->int_div_si
;
3445 *total
= mips_sign_extend_cost (mode
, XEXP (x
, 0));
3449 *total
= mips_zero_extend_cost (mode
, XEXP (x
, 0));
3453 case UNSIGNED_FLOAT
:
3456 case FLOAT_TRUNCATE
:
3457 *total
= mips_cost
->fp_add
;
3465 /* Provide the costs of an addressing mode that contains ADDR.
3466 If ADDR is not a valid address, its cost is irrelevant. */
3469 mips_address_cost (rtx addr
)
3471 return mips_address_insns (addr
, SImode
, false);
3474 /* Return one word of double-word value OP, taking into account the fixed
3475 endianness of certain registers. HIGH_P is true to select the high part,
3476 false to select the low part. */
3479 mips_subword (rtx op
, int high_p
)
3482 enum machine_mode mode
;
3484 mode
= GET_MODE (op
);
3485 if (mode
== VOIDmode
)
3488 if (TARGET_BIG_ENDIAN
? !high_p
: high_p
)
3489 byte
= UNITS_PER_WORD
;
3493 if (FP_REG_RTX_P (op
))
3494 return gen_rtx_REG (word_mode
, high_p
? REGNO (op
) + 1 : REGNO (op
));
3497 return mips_rewrite_small_data (adjust_address (op
, word_mode
, byte
));
3499 return simplify_gen_subreg (word_mode
, op
, mode
, byte
);
3503 /* Return true if a 64-bit move from SRC to DEST should be split into two. */
3506 mips_split_64bit_move_p (rtx dest
, rtx src
)
3511 /* FP->FP moves can be done in a single instruction. */
3512 if (FP_REG_RTX_P (src
) && FP_REG_RTX_P (dest
))
3515 /* Check for floating-point loads and stores. They can be done using
3516 ldc1 and sdc1 on MIPS II and above. */
3519 if (FP_REG_RTX_P (dest
) && MEM_P (src
))
3521 if (FP_REG_RTX_P (src
) && MEM_P (dest
))
3528 /* Split a 64-bit move from SRC to DEST assuming that
3529 mips_split_64bit_move_p holds.
3531 Moves into and out of FPRs cause some difficulty here. Such moves
3532 will always be DFmode, since paired FPRs are not allowed to store
3533 DImode values. The most natural representation would be two separate
3534 32-bit moves, such as:
3536 (set (reg:SI $f0) (mem:SI ...))
3537 (set (reg:SI $f1) (mem:SI ...))
3539 However, the second insn is invalid because odd-numbered FPRs are
3540 not allowed to store independent values. Use the patterns load_df_low,
3541 load_df_high and store_df_high instead. */
3544 mips_split_64bit_move (rtx dest
, rtx src
)
3546 if (FP_REG_RTX_P (dest
))
3548 /* Loading an FPR from memory or from GPRs. */
3551 dest
= gen_lowpart (DFmode
, dest
);
3552 emit_insn (gen_load_df_low (dest
, mips_subword (src
, 0)));
3553 emit_insn (gen_mthc1 (dest
, mips_subword (src
, 1),
3558 emit_insn (gen_load_df_low (copy_rtx (dest
),
3559 mips_subword (src
, 0)));
3560 emit_insn (gen_load_df_high (dest
, mips_subword (src
, 1),
3564 else if (FP_REG_RTX_P (src
))
3566 /* Storing an FPR into memory or GPRs. */
3569 src
= gen_lowpart (DFmode
, src
);
3570 mips_emit_move (mips_subword (dest
, 0), mips_subword (src
, 0));
3571 emit_insn (gen_mfhc1 (mips_subword (dest
, 1), src
));
3575 mips_emit_move (mips_subword (dest
, 0), mips_subword (src
, 0));
3576 emit_insn (gen_store_df_high (mips_subword (dest
, 1), src
));
3581 /* The operation can be split into two normal moves. Decide in
3582 which order to do them. */
3585 low_dest
= mips_subword (dest
, 0);
3586 if (REG_P (low_dest
)
3587 && reg_overlap_mentioned_p (low_dest
, src
))
3589 mips_emit_move (mips_subword (dest
, 1), mips_subword (src
, 1));
3590 mips_emit_move (low_dest
, mips_subword (src
, 0));
3594 mips_emit_move (low_dest
, mips_subword (src
, 0));
3595 mips_emit_move (mips_subword (dest
, 1), mips_subword (src
, 1));
3600 /* Return the appropriate instructions to move SRC into DEST. Assume
3601 that SRC is operand 1 and DEST is operand 0. */
3604 mips_output_move (rtx dest
, rtx src
)
3606 enum rtx_code dest_code
, src_code
;
3607 enum mips_symbol_type symbol_type
;
3610 dest_code
= GET_CODE (dest
);
3611 src_code
= GET_CODE (src
);
3612 dbl_p
= (GET_MODE_SIZE (GET_MODE (dest
)) == 8);
3614 if (dbl_p
&& mips_split_64bit_move_p (dest
, src
))
3617 if ((src_code
== REG
&& GP_REG_P (REGNO (src
)))
3618 || (!TARGET_MIPS16
&& src
== CONST0_RTX (GET_MODE (dest
))))
3620 if (dest_code
== REG
)
3622 if (GP_REG_P (REGNO (dest
)))
3623 return "move\t%0,%z1";
3625 if (MD_REG_P (REGNO (dest
)))
3628 if (DSP_ACC_REG_P (REGNO (dest
)))
3630 static char retval
[] = "mt__\t%z1,%q0";
3631 retval
[2] = reg_names
[REGNO (dest
)][4];
3632 retval
[3] = reg_names
[REGNO (dest
)][5];
3636 if (FP_REG_P (REGNO (dest
)))
3637 return (dbl_p
? "dmtc1\t%z1,%0" : "mtc1\t%z1,%0");
3639 if (ALL_COP_REG_P (REGNO (dest
)))
3641 static char retval
[] = "dmtc_\t%z1,%0";
3643 retval
[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest
));
3644 return (dbl_p
? retval
: retval
+ 1);
3647 if (dest_code
== MEM
)
3648 return (dbl_p
? "sd\t%z1,%0" : "sw\t%z1,%0");
3650 if (dest_code
== REG
&& GP_REG_P (REGNO (dest
)))
3652 if (src_code
== REG
)
3654 if (DSP_ACC_REG_P (REGNO (src
)))
3656 static char retval
[] = "mf__\t%0,%q1";
3657 retval
[2] = reg_names
[REGNO (src
)][4];
3658 retval
[3] = reg_names
[REGNO (src
)][5];
3662 if (ST_REG_P (REGNO (src
)) && ISA_HAS_8CC
)
3663 return "lui\t%0,0x3f80\n\tmovf\t%0,%.,%1";
3665 if (FP_REG_P (REGNO (src
)))
3666 return (dbl_p
? "dmfc1\t%0,%1" : "mfc1\t%0,%1");
3668 if (ALL_COP_REG_P (REGNO (src
)))
3670 static char retval
[] = "dmfc_\t%0,%1";
3672 retval
[4] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src
));
3673 return (dbl_p
? retval
: retval
+ 1);
3677 if (src_code
== MEM
)
3678 return (dbl_p
? "ld\t%0,%1" : "lw\t%0,%1");
3680 if (src_code
== CONST_INT
)
3682 /* Don't use the X format, because that will give out of
3683 range numbers for 64-bit hosts and 32-bit targets. */
3685 return "li\t%0,%1\t\t\t# %X1";
3687 if (INTVAL (src
) >= 0 && INTVAL (src
) <= 0xffff)
3690 if (INTVAL (src
) < 0 && INTVAL (src
) >= -0xffff)
3694 if (src_code
== HIGH
)
3695 return TARGET_MIPS16
? "#" : "lui\t%0,%h1";
3697 if (CONST_GP_P (src
))
3698 return "move\t%0,%1";
3700 if (mips_symbolic_constant_p (src
, SYMBOL_CONTEXT_LEA
, &symbol_type
)
3701 && mips_lo_relocs
[symbol_type
] != 0)
3703 /* A signed 16-bit constant formed by applying a relocation
3704 operator to a symbolic address. */
3705 gcc_assert (!mips_split_p
[symbol_type
]);
3706 return "li\t%0,%R1";
3709 if (symbolic_operand (src
, VOIDmode
))
3711 gcc_assert (TARGET_MIPS16
3712 ? TARGET_MIPS16_TEXT_LOADS
3713 : !TARGET_EXPLICIT_RELOCS
);
3714 return (dbl_p
? "dla\t%0,%1" : "la\t%0,%1");
3717 if (src_code
== REG
&& FP_REG_P (REGNO (src
)))
3719 if (dest_code
== REG
&& FP_REG_P (REGNO (dest
)))
3721 if (GET_MODE (dest
) == V2SFmode
)
3722 return "mov.ps\t%0,%1";
3724 return (dbl_p
? "mov.d\t%0,%1" : "mov.s\t%0,%1");
3727 if (dest_code
== MEM
)
3728 return (dbl_p
? "sdc1\t%1,%0" : "swc1\t%1,%0");
3730 if (dest_code
== REG
&& FP_REG_P (REGNO (dest
)))
3732 if (src_code
== MEM
)
3733 return (dbl_p
? "ldc1\t%0,%1" : "lwc1\t%0,%1");
3735 if (dest_code
== REG
&& ALL_COP_REG_P (REGNO (dest
)) && src_code
== MEM
)
3737 static char retval
[] = "l_c_\t%0,%1";
3739 retval
[1] = (dbl_p
? 'd' : 'w');
3740 retval
[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (dest
));
3743 if (dest_code
== MEM
&& src_code
== REG
&& ALL_COP_REG_P (REGNO (src
)))
3745 static char retval
[] = "s_c_\t%1,%0";
3747 retval
[1] = (dbl_p
? 'd' : 'w');
3748 retval
[3] = COPNUM_AS_CHAR_FROM_REGNUM (REGNO (src
));
3754 /* Restore $gp from its save slot. Valid only when using o32 or
3758 mips_restore_gp (void)
3762 gcc_assert (TARGET_ABICALLS
&& TARGET_OLDABI
);
3764 address
= mips_add_offset (pic_offset_table_rtx
,
3765 frame_pointer_needed
3766 ? hard_frame_pointer_rtx
3767 : stack_pointer_rtx
,
3768 current_function_outgoing_args_size
);
3769 slot
= gen_rtx_MEM (Pmode
, address
);
3771 mips_emit_move (pic_offset_table_rtx
, slot
);
3772 if (!TARGET_EXPLICIT_RELOCS
)
3773 emit_insn (gen_blockage ());
3776 /* Emit an instruction of the form (set TARGET (CODE OP0 OP1)). */
3779 mips_emit_binary (enum rtx_code code
, rtx target
, rtx op0
, rtx op1
)
3781 emit_insn (gen_rtx_SET (VOIDmode
, target
,
3782 gen_rtx_fmt_ee (code
, GET_MODE (target
), op0
, op1
)));
3785 /* Return true if CMP1 is a suitable second operand for relational
3786 operator CODE. See also the *sCC patterns in mips.md. */
3789 mips_relational_operand_ok_p (enum rtx_code code
, rtx cmp1
)
3795 return reg_or_0_operand (cmp1
, VOIDmode
);
3799 return !TARGET_MIPS16
&& cmp1
== const1_rtx
;
3803 return arith_operand (cmp1
, VOIDmode
);
3806 return sle_operand (cmp1
, VOIDmode
);
3809 return sleu_operand (cmp1
, VOIDmode
);
3816 /* Canonicalize LE or LEU comparisons into LT comparisons when
3817 possible to avoid extra instructions or inverting the
3821 mips_canonicalize_comparison (enum rtx_code
*code
, rtx
*cmp1
,
3822 enum machine_mode mode
)
3824 HOST_WIDE_INT original
, plus_one
;
3826 if (GET_CODE (*cmp1
) != CONST_INT
)
3829 original
= INTVAL (*cmp1
);
3830 plus_one
= trunc_int_for_mode ((unsigned HOST_WIDE_INT
) original
+ 1, mode
);
3835 if (original
< plus_one
)
3838 *cmp1
= force_reg (mode
, GEN_INT (plus_one
));
3847 *cmp1
= force_reg (mode
, GEN_INT (plus_one
));
3860 /* Compare CMP0 and CMP1 using relational operator CODE and store the
3861 result in TARGET. CMP0 and TARGET are register_operands that have
3862 the same integer mode. If INVERT_PTR is nonnull, it's OK to set
3863 TARGET to the inverse of the result and flip *INVERT_PTR instead. */
3866 mips_emit_int_relational (enum rtx_code code
, bool *invert_ptr
,
3867 rtx target
, rtx cmp0
, rtx cmp1
)
3869 /* First see if there is a MIPS instruction that can do this operation
3870 with CMP1 in its current form. If not, try to canonicalize the
3871 comparison to LT. If that fails, try doing the same for the
3872 inverse operation. If that also fails, force CMP1 into a register
3874 if (mips_relational_operand_ok_p (code
, cmp1
))
3875 mips_emit_binary (code
, target
, cmp0
, cmp1
);
3876 else if (mips_canonicalize_comparison (&code
, &cmp1
, GET_MODE (target
)))
3877 mips_emit_binary (code
, target
, cmp0
, cmp1
);
3880 enum rtx_code inv_code
= reverse_condition (code
);
3881 if (!mips_relational_operand_ok_p (inv_code
, cmp1
))
3883 cmp1
= force_reg (GET_MODE (cmp0
), cmp1
);
3884 mips_emit_int_relational (code
, invert_ptr
, target
, cmp0
, cmp1
);
3886 else if (invert_ptr
== 0)
3888 rtx inv_target
= gen_reg_rtx (GET_MODE (target
));
3889 mips_emit_binary (inv_code
, inv_target
, cmp0
, cmp1
);
3890 mips_emit_binary (XOR
, target
, inv_target
, const1_rtx
);
3894 *invert_ptr
= !*invert_ptr
;
3895 mips_emit_binary (inv_code
, target
, cmp0
, cmp1
);
3900 /* Return a register that is zero iff CMP0 and CMP1 are equal.
3901 The register will have the same mode as CMP0. */
3904 mips_zero_if_equal (rtx cmp0
, rtx cmp1
)
3906 if (cmp1
== const0_rtx
)
3909 if (uns_arith_operand (cmp1
, VOIDmode
))
3910 return expand_binop (GET_MODE (cmp0
), xor_optab
,
3911 cmp0
, cmp1
, 0, 0, OPTAB_DIRECT
);
3913 return expand_binop (GET_MODE (cmp0
), sub_optab
,
3914 cmp0
, cmp1
, 0, 0, OPTAB_DIRECT
);
3917 /* Convert *CODE into a code that can be used in a floating-point
3918 scc instruction (c.<cond>.<fmt>). Return true if the values of
3919 the condition code registers will be inverted, with 0 indicating
3920 that the condition holds. */
3923 mips_reverse_fp_cond_p (enum rtx_code
*code
)
3930 *code
= reverse_condition_maybe_unordered (*code
);
3938 /* Convert a comparison into something that can be used in a branch or
3939 conditional move. cmp_operands[0] and cmp_operands[1] are the values
3940 being compared and *CODE is the code used to compare them.
3942 Update *CODE, *OP0 and *OP1 so that they describe the final comparison.
3943 If NEED_EQ_NE_P, then only EQ/NE comparisons against zero are possible,
3944 otherwise any standard branch condition can be used. The standard branch
3947 - EQ/NE between two registers.
3948 - any comparison between a register and zero. */
3951 mips_emit_compare (enum rtx_code
*code
, rtx
*op0
, rtx
*op1
, bool need_eq_ne_p
)
3953 if (GET_MODE_CLASS (GET_MODE (cmp_operands
[0])) == MODE_INT
)
3955 if (!need_eq_ne_p
&& cmp_operands
[1] == const0_rtx
)
3957 *op0
= cmp_operands
[0];
3958 *op1
= cmp_operands
[1];
3960 else if (*code
== EQ
|| *code
== NE
)
3964 *op0
= mips_zero_if_equal (cmp_operands
[0], cmp_operands
[1]);
3969 *op0
= cmp_operands
[0];
3970 *op1
= force_reg (GET_MODE (*op0
), cmp_operands
[1]);
3975 /* The comparison needs a separate scc instruction. Store the
3976 result of the scc in *OP0 and compare it against zero. */
3977 bool invert
= false;
3978 *op0
= gen_reg_rtx (GET_MODE (cmp_operands
[0]));
3980 mips_emit_int_relational (*code
, &invert
, *op0
,
3981 cmp_operands
[0], cmp_operands
[1]);
3982 *code
= (invert
? EQ
: NE
);
3985 else if (ALL_FIXED_POINT_MODE_P (GET_MODE (cmp_operands
[0])))
3987 *op0
= gen_rtx_REG (CCDSPmode
, CCDSP_CC_REGNUM
);
3988 mips_emit_binary (*code
, *op0
, cmp_operands
[0], cmp_operands
[1]);
3994 enum rtx_code cmp_code
;
3996 /* Floating-point tests use a separate c.cond.fmt comparison to
3997 set a condition code register. The branch or conditional move
3998 will then compare that register against zero.
4000 Set CMP_CODE to the code of the comparison instruction and
4001 *CODE to the code that the branch or move should use. */
4003 *code
= mips_reverse_fp_cond_p (&cmp_code
) ? EQ
: NE
;
4005 ? gen_reg_rtx (CCmode
)
4006 : gen_rtx_REG (CCmode
, FPSW_REGNUM
));
4008 mips_emit_binary (cmp_code
, *op0
, cmp_operands
[0], cmp_operands
[1]);
4012 /* Try comparing cmp_operands[0] and cmp_operands[1] using rtl code CODE.
4013 Store the result in TARGET and return true if successful.
4015 On 64-bit targets, TARGET may be wider than cmp_operands[0]. */
4018 mips_emit_scc (enum rtx_code code
, rtx target
)
4020 if (GET_MODE_CLASS (GET_MODE (cmp_operands
[0])) != MODE_INT
)
4023 target
= gen_lowpart (GET_MODE (cmp_operands
[0]), target
);
4024 if (code
== EQ
|| code
== NE
)
4026 rtx zie
= mips_zero_if_equal (cmp_operands
[0], cmp_operands
[1]);
4027 mips_emit_binary (code
, target
, zie
, const0_rtx
);
4030 mips_emit_int_relational (code
, 0, target
,
4031 cmp_operands
[0], cmp_operands
[1]);
4035 /* Emit the common code for doing conditional branches.
4036 operand[0] is the label to jump to.
4037 The comparison operands are saved away by cmp{si,di,sf,df}. */
4040 gen_conditional_branch (rtx
*operands
, enum rtx_code code
)
4042 rtx op0
, op1
, condition
;
4044 mips_emit_compare (&code
, &op0
, &op1
, TARGET_MIPS16
);
4045 condition
= gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
4046 emit_jump_insn (gen_condjump (condition
, operands
[0]));
4051 (set temp (COND:CCV2 CMP_OP0 CMP_OP1))
4052 (set DEST (unspec [TRUE_SRC FALSE_SRC temp] UNSPEC_MOVE_TF_PS)) */
4055 mips_expand_vcondv2sf (rtx dest
, rtx true_src
, rtx false_src
,
4056 enum rtx_code cond
, rtx cmp_op0
, rtx cmp_op1
)
4061 reversed_p
= mips_reverse_fp_cond_p (&cond
);
4062 cmp_result
= gen_reg_rtx (CCV2mode
);
4063 emit_insn (gen_scc_ps (cmp_result
,
4064 gen_rtx_fmt_ee (cond
, VOIDmode
, cmp_op0
, cmp_op1
)));
4066 emit_insn (gen_mips_cond_move_tf_ps (dest
, false_src
, true_src
,
4069 emit_insn (gen_mips_cond_move_tf_ps (dest
, true_src
, false_src
,
4073 /* Emit the common code for conditional moves. OPERANDS is the array
4074 of operands passed to the conditional move define_expand. */
4077 gen_conditional_move (rtx
*operands
)
4082 code
= GET_CODE (operands
[1]);
4083 mips_emit_compare (&code
, &op0
, &op1
, true);
4084 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0],
4085 gen_rtx_IF_THEN_ELSE (GET_MODE (operands
[0]),
4086 gen_rtx_fmt_ee (code
,
4089 operands
[2], operands
[3])));
4092 /* Emit a conditional trap. OPERANDS is the array of operands passed to
4093 the conditional_trap expander. */
4096 mips_gen_conditional_trap (rtx
*operands
)
4099 enum rtx_code cmp_code
= GET_CODE (operands
[0]);
4100 enum machine_mode mode
= GET_MODE (cmp_operands
[0]);
4102 /* MIPS conditional trap machine instructions don't have GT or LE
4103 flavors, so we must invert the comparison and convert to LT and
4104 GE, respectively. */
4107 case GT
: cmp_code
= LT
; break;
4108 case LE
: cmp_code
= GE
; break;
4109 case GTU
: cmp_code
= LTU
; break;
4110 case LEU
: cmp_code
= GEU
; break;
4113 if (cmp_code
== GET_CODE (operands
[0]))
4115 op0
= cmp_operands
[0];
4116 op1
= cmp_operands
[1];
4120 op0
= cmp_operands
[1];
4121 op1
= cmp_operands
[0];
4123 op0
= force_reg (mode
, op0
);
4124 if (!arith_operand (op1
, mode
))
4125 op1
= force_reg (mode
, op1
);
4127 emit_insn (gen_rtx_TRAP_IF (VOIDmode
,
4128 gen_rtx_fmt_ee (cmp_code
, mode
, op0
, op1
),
4132 /* Return true if function DECL is a MIPS16 function. Return the ambient
4133 setting if DECL is null. */
4136 mips_use_mips16_mode_p (tree decl
)
4140 /* Nested functions must use the same frame pointer as their
4141 parent and must therefore use the same ISA mode. */
4142 tree parent
= decl_function_context (decl
);
4145 if (mips_mips16_decl_p (decl
))
4147 if (mips_nomips16_decl_p (decl
))
4150 return mips_base_mips16
;
4153 /* Return true if calls to X can use R_MIPS_CALL* relocations. */
4156 mips_ok_for_lazy_binding_p (rtx x
)
4158 return (TARGET_USE_GOT
4159 && GET_CODE (x
) == SYMBOL_REF
4160 && !mips_symbol_binds_local_p (x
));
4163 /* Load function address ADDR into register DEST. SIBCALL_P is true
4164 if the address is needed for a sibling call. */
4167 mips_load_call_address (rtx dest
, rtx addr
, int sibcall_p
)
4169 /* If we're generating PIC, and this call is to a global function,
4170 try to allow its address to be resolved lazily. This isn't
4171 possible if TARGET_CALL_SAVED_GP since the value of $gp on entry
4172 to the stub would be our caller's gp, not ours. */
4173 if (TARGET_EXPLICIT_RELOCS
4174 && !(sibcall_p
&& TARGET_CALL_SAVED_GP
)
4175 && mips_ok_for_lazy_binding_p (addr
))
4177 rtx high
, lo_sum_symbol
;
4179 high
= mips_unspec_offset_high (dest
, pic_offset_table_rtx
,
4180 addr
, SYMBOL_GOTOFF_CALL
);
4181 lo_sum_symbol
= mips_unspec_address (addr
, SYMBOL_GOTOFF_CALL
);
4182 if (Pmode
== SImode
)
4183 emit_insn (gen_load_callsi (dest
, high
, lo_sum_symbol
));
4185 emit_insn (gen_load_calldi (dest
, high
, lo_sum_symbol
));
4188 mips_emit_move (dest
, addr
);
4192 /* Expand a call or call_value instruction. RESULT is where the
4193 result will go (null for calls), ADDR is the address of the
4194 function, ARGS_SIZE is the size of the arguments and AUX is
4195 the value passed to us by mips_function_arg. SIBCALL_P is true
4196 if we are expanding a sibling call, false if we're expanding
4200 mips_expand_call (rtx result
, rtx addr
, rtx args_size
, rtx aux
, int sibcall_p
)
4202 rtx orig_addr
, pattern
, insn
;
4205 if (!call_insn_operand (addr
, VOIDmode
))
4207 addr
= gen_reg_rtx (Pmode
);
4208 mips_load_call_address (addr
, orig_addr
, sibcall_p
);
4212 && TARGET_HARD_FLOAT_ABI
4213 && build_mips16_call_stub (result
, addr
, args_size
,
4214 aux
== 0 ? 0 : (int) GET_MODE (aux
)))
4218 pattern
= (sibcall_p
4219 ? gen_sibcall_internal (addr
, args_size
)
4220 : gen_call_internal (addr
, args_size
));
4221 else if (GET_CODE (result
) == PARALLEL
&& XVECLEN (result
, 0) == 2)
4225 reg1
= XEXP (XVECEXP (result
, 0, 0), 0);
4226 reg2
= XEXP (XVECEXP (result
, 0, 1), 0);
4229 ? gen_sibcall_value_multiple_internal (reg1
, addr
, args_size
, reg2
)
4230 : gen_call_value_multiple_internal (reg1
, addr
, args_size
, reg2
));
4233 pattern
= (sibcall_p
4234 ? gen_sibcall_value_internal (result
, addr
, args_size
)
4235 : gen_call_value_internal (result
, addr
, args_size
));
4237 insn
= emit_call_insn (pattern
);
4239 /* Lazy-binding stubs require $gp to be valid on entry. */
4240 if (mips_ok_for_lazy_binding_p (orig_addr
))
4241 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), pic_offset_table_rtx
);
4245 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
4248 mips_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
4250 if (!TARGET_SIBCALLS
)
4253 /* We can't do a sibcall if the called function is a MIPS16 function
4254 because there is no direct "jx" instruction equivalent to "jalx" to
4255 switch the ISA mode. */
4256 if (mips_use_mips16_mode_p (decl
))
4259 /* ...and when -minterlink-mips16 is in effect, assume that external
4260 functions could be MIPS16 ones unless an attribute explicitly
4261 tells us otherwise. We only care about cases where the sibling
4262 and normal calls would both be direct. */
4263 if (TARGET_INTERLINK_MIPS16
4265 && DECL_EXTERNAL (decl
)
4266 && !mips_nomips16_decl_p (decl
)
4267 && const_call_insn_operand (XEXP (DECL_RTL (decl
), 0), VOIDmode
))
4274 /* Emit code to move general operand SRC into condition-code
4275 register DEST. SCRATCH is a scratch TFmode float register.
4282 where FP1 and FP2 are single-precision float registers
4283 taken from SCRATCH. */
4286 mips_emit_fcc_reload (rtx dest
, rtx src
, rtx scratch
)
4290 /* Change the source to SFmode. */
4292 src
= adjust_address (src
, SFmode
, 0);
4293 else if (REG_P (src
) || GET_CODE (src
) == SUBREG
)
4294 src
= gen_rtx_REG (SFmode
, true_regnum (src
));
4296 fp1
= gen_rtx_REG (SFmode
, REGNO (scratch
));
4297 fp2
= gen_rtx_REG (SFmode
, REGNO (scratch
) + MAX_FPRS_PER_FMT
);
4299 mips_emit_move (copy_rtx (fp1
), src
);
4300 mips_emit_move (copy_rtx (fp2
), CONST0_RTX (SFmode
));
4301 emit_insn (gen_slt_sf (dest
, fp2
, fp1
));
4304 /* Emit code to change the current function's return address to
4305 ADDRESS. SCRATCH is available as a scratch register, if needed.
4306 ADDRESS and SCRATCH are both word-mode GPRs. */
4309 mips_set_return_address (rtx address
, rtx scratch
)
4313 compute_frame_size (get_frame_size ());
4314 gcc_assert ((cfun
->machine
->frame
.mask
>> 31) & 1);
4315 slot_address
= mips_add_offset (scratch
, stack_pointer_rtx
,
4316 cfun
->machine
->frame
.gp_sp_offset
);
4318 mips_emit_move (gen_rtx_MEM (GET_MODE (address
), slot_address
), address
);
4321 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
4322 Assume that the areas do not overlap. */
4325 mips_block_move_straight (rtx dest
, rtx src
, HOST_WIDE_INT length
)
4327 HOST_WIDE_INT offset
, delta
;
4328 unsigned HOST_WIDE_INT bits
;
4330 enum machine_mode mode
;
4333 /* Work out how many bits to move at a time. If both operands have
4334 half-word alignment, it is usually better to move in half words.
4335 For instance, lh/lh/sh/sh is usually better than lwl/lwr/swl/swr
4336 and lw/lw/sw/sw is usually better than ldl/ldr/sdl/sdr.
4337 Otherwise move word-sized chunks. */
4338 if (MEM_ALIGN (src
) == BITS_PER_WORD
/ 2
4339 && MEM_ALIGN (dest
) == BITS_PER_WORD
/ 2)
4340 bits
= BITS_PER_WORD
/ 2;
4342 bits
= BITS_PER_WORD
;
4344 mode
= mode_for_size (bits
, MODE_INT
, 0);
4345 delta
= bits
/ BITS_PER_UNIT
;
4347 /* Allocate a buffer for the temporary registers. */
4348 regs
= alloca (sizeof (rtx
) * length
/ delta
);
4350 /* Load as many BITS-sized chunks as possible. Use a normal load if
4351 the source has enough alignment, otherwise use left/right pairs. */
4352 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
4354 regs
[i
] = gen_reg_rtx (mode
);
4355 if (MEM_ALIGN (src
) >= bits
)
4356 mips_emit_move (regs
[i
], adjust_address (src
, mode
, offset
));
4359 rtx part
= adjust_address (src
, BLKmode
, offset
);
4360 if (!mips_expand_unaligned_load (regs
[i
], part
, bits
, 0))
4365 /* Copy the chunks to the destination. */
4366 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
4367 if (MEM_ALIGN (dest
) >= bits
)
4368 mips_emit_move (adjust_address (dest
, mode
, offset
), regs
[i
]);
4371 rtx part
= adjust_address (dest
, BLKmode
, offset
);
4372 if (!mips_expand_unaligned_store (part
, regs
[i
], bits
, 0))
4376 /* Mop up any left-over bytes. */
4377 if (offset
< length
)
4379 src
= adjust_address (src
, BLKmode
, offset
);
4380 dest
= adjust_address (dest
, BLKmode
, offset
);
4381 move_by_pieces (dest
, src
, length
- offset
,
4382 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
4386 #define MAX_MOVE_REGS 4
4387 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
4390 /* Helper function for doing a loop-based block operation on memory
4391 reference MEM. Each iteration of the loop will operate on LENGTH
4394 Create a new base register for use within the loop and point it to
4395 the start of MEM. Create a new memory reference that uses this
4396 register. Store them in *LOOP_REG and *LOOP_MEM respectively. */
4399 mips_adjust_block_mem (rtx mem
, HOST_WIDE_INT length
,
4400 rtx
*loop_reg
, rtx
*loop_mem
)
4402 *loop_reg
= copy_addr_to_reg (XEXP (mem
, 0));
4404 /* Although the new mem does not refer to a known location,
4405 it does keep up to LENGTH bytes of alignment. */
4406 *loop_mem
= change_address (mem
, BLKmode
, *loop_reg
);
4407 set_mem_align (*loop_mem
, MIN (MEM_ALIGN (mem
), length
* BITS_PER_UNIT
));
4411 /* Move LENGTH bytes from SRC to DEST using a loop that moves MAX_MOVE_BYTES
4412 per iteration. LENGTH must be at least MAX_MOVE_BYTES. Assume that the
4413 memory regions do not overlap. */
4416 mips_block_move_loop (rtx dest
, rtx src
, HOST_WIDE_INT length
)
4418 rtx label
, src_reg
, dest_reg
, final_src
;
4419 HOST_WIDE_INT leftover
;
4421 leftover
= length
% MAX_MOVE_BYTES
;
4424 /* Create registers and memory references for use within the loop. */
4425 mips_adjust_block_mem (src
, MAX_MOVE_BYTES
, &src_reg
, &src
);
4426 mips_adjust_block_mem (dest
, MAX_MOVE_BYTES
, &dest_reg
, &dest
);
4428 /* Calculate the value that SRC_REG should have after the last iteration
4430 final_src
= expand_simple_binop (Pmode
, PLUS
, src_reg
, GEN_INT (length
),
4433 /* Emit the start of the loop. */
4434 label
= gen_label_rtx ();
4437 /* Emit the loop body. */
4438 mips_block_move_straight (dest
, src
, MAX_MOVE_BYTES
);
4440 /* Move on to the next block. */
4441 mips_emit_move (src_reg
, plus_constant (src_reg
, MAX_MOVE_BYTES
));
4442 mips_emit_move (dest_reg
, plus_constant (dest_reg
, MAX_MOVE_BYTES
));
4444 /* Emit the loop condition. */
4445 if (Pmode
== DImode
)
4446 emit_insn (gen_cmpdi (src_reg
, final_src
));
4448 emit_insn (gen_cmpsi (src_reg
, final_src
));
4449 emit_jump_insn (gen_bne (label
));
4451 /* Mop up any left-over bytes. */
4453 mips_block_move_straight (dest
, src
, leftover
);
4457 /* Expand a loop of synci insns for the address range [BEGIN, END). */
4460 mips_expand_synci_loop (rtx begin
, rtx end
)
4462 rtx inc
, label
, cmp
, cmp_result
;
4464 /* Load INC with the cache line size (rdhwr INC,$1). */
4465 inc
= gen_reg_rtx (SImode
);
4466 emit_insn (gen_rdhwr (inc
, const1_rtx
));
4468 /* Loop back to here. */
4469 label
= gen_label_rtx ();
4472 emit_insn (gen_synci (begin
));
4474 cmp
= gen_reg_rtx (Pmode
);
4475 mips_emit_binary (GTU
, cmp
, begin
, end
);
4477 mips_emit_binary (PLUS
, begin
, begin
, inc
);
4479 cmp_result
= gen_rtx_EQ (VOIDmode
, cmp
, const0_rtx
);
4480 emit_jump_insn (gen_condjump (cmp_result
, label
));
4483 /* Expand a movmemsi instruction. */
4486 mips_expand_block_move (rtx dest
, rtx src
, rtx length
)
4488 if (GET_CODE (length
) == CONST_INT
)
4490 if (INTVAL (length
) <= 2 * MAX_MOVE_BYTES
)
4492 mips_block_move_straight (dest
, src
, INTVAL (length
));
4497 mips_block_move_loop (dest
, src
, INTVAL (length
));
4504 /* Argument support functions. */
4506 /* Initialize CUMULATIVE_ARGS for a function. */
4509 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
4510 rtx libname ATTRIBUTE_UNUSED
)
4512 static CUMULATIVE_ARGS zero_cum
;
4513 tree param
, next_param
;
4516 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
4518 /* Determine if this function has variable arguments. This is
4519 indicated by the last argument being 'void_type_mode' if there
4520 are no variable arguments. The standard MIPS calling sequence
4521 passes all arguments in the general purpose registers in this case. */
4523 for (param
= fntype
? TYPE_ARG_TYPES (fntype
) : 0;
4524 param
!= 0; param
= next_param
)
4526 next_param
= TREE_CHAIN (param
);
4527 if (next_param
== 0 && TREE_VALUE (param
) != void_type_node
)
4528 cum
->gp_reg_found
= 1;
4533 /* Fill INFO with information about a single argument. CUM is the
4534 cumulative state for earlier arguments. MODE is the mode of this
4535 argument and TYPE is its type (if known). NAMED is true if this
4536 is a named (fixed) argument rather than a variable one. */
4539 mips_arg_info (const CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4540 tree type
, int named
, struct mips_arg_info
*info
)
4542 bool doubleword_aligned_p
;
4543 unsigned int num_bytes
, num_words
, max_regs
;
4545 /* Work out the size of the argument. */
4546 num_bytes
= type
? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
4547 num_words
= (num_bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
4549 /* Decide whether it should go in a floating-point register, assuming
4550 one is free. Later code checks for availability.
4552 The checks against UNITS_PER_FPVALUE handle the soft-float and
4553 single-float cases. */
4557 /* The EABI conventions have traditionally been defined in terms
4558 of TYPE_MODE, regardless of the actual type. */
4559 info
->fpr_p
= ((GET_MODE_CLASS (mode
) == MODE_FLOAT
4560 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
)
4561 && GET_MODE_SIZE (mode
) <= UNITS_PER_FPVALUE
);
4566 /* Only leading floating-point scalars are passed in
4567 floating-point registers. We also handle vector floats the same
4568 say, which is OK because they are not covered by the standard ABI. */
4569 info
->fpr_p
= (!cum
->gp_reg_found
4570 && cum
->arg_number
< 2
4571 && (type
== 0 || SCALAR_FLOAT_TYPE_P (type
)
4572 || VECTOR_FLOAT_TYPE_P (type
))
4573 && (GET_MODE_CLASS (mode
) == MODE_FLOAT
4574 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
)
4575 && GET_MODE_SIZE (mode
) <= UNITS_PER_FPVALUE
);
4580 /* Scalar and complex floating-point types are passed in
4581 floating-point registers. */
4582 info
->fpr_p
= (named
4583 && (type
== 0 || FLOAT_TYPE_P (type
))
4584 && (GET_MODE_CLASS (mode
) == MODE_FLOAT
4585 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
4586 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
)
4587 && GET_MODE_UNIT_SIZE (mode
) <= UNITS_PER_FPVALUE
);
4589 /* ??? According to the ABI documentation, the real and imaginary
4590 parts of complex floats should be passed in individual registers.
4591 The real and imaginary parts of stack arguments are supposed
4592 to be contiguous and there should be an extra word of padding
4595 This has two problems. First, it makes it impossible to use a
4596 single "void *" va_list type, since register and stack arguments
4597 are passed differently. (At the time of writing, MIPSpro cannot
4598 handle complex float varargs correctly.) Second, it's unclear
4599 what should happen when there is only one register free.
4601 For now, we assume that named complex floats should go into FPRs
4602 if there are two FPRs free, otherwise they should be passed in the
4603 same way as a struct containing two floats. */
4605 && GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
4606 && GET_MODE_UNIT_SIZE (mode
) < UNITS_PER_FPVALUE
)
4608 if (cum
->num_gprs
>= MAX_ARGS_IN_REGISTERS
- 1)
4609 info
->fpr_p
= false;
4619 /* See whether the argument has doubleword alignment. */
4620 doubleword_aligned_p
= FUNCTION_ARG_BOUNDARY (mode
, type
) > BITS_PER_WORD
;
4622 /* Set REG_OFFSET to the register count we're interested in.
4623 The EABI allocates the floating-point registers separately,
4624 but the other ABIs allocate them like integer registers. */
4625 info
->reg_offset
= (mips_abi
== ABI_EABI
&& info
->fpr_p
4629 /* Advance to an even register if the argument is doubleword-aligned. */
4630 if (doubleword_aligned_p
)
4631 info
->reg_offset
+= info
->reg_offset
& 1;
4633 /* Work out the offset of a stack argument. */
4634 info
->stack_offset
= cum
->stack_words
;
4635 if (doubleword_aligned_p
)
4636 info
->stack_offset
+= info
->stack_offset
& 1;
4638 max_regs
= MAX_ARGS_IN_REGISTERS
- info
->reg_offset
;
4640 /* Partition the argument between registers and stack. */
4641 info
->reg_words
= MIN (num_words
, max_regs
);
4642 info
->stack_words
= num_words
- info
->reg_words
;
4646 /* INFO describes an argument that is passed in a single-register value.
4647 Return the register it uses, assuming that FPRs are available if
4651 mips_arg_regno (const struct mips_arg_info
*info
, bool hard_float_p
)
4653 if (!info
->fpr_p
|| !hard_float_p
)
4654 return GP_ARG_FIRST
+ info
->reg_offset
;
4655 else if (mips_abi
== ABI_32
&& TARGET_DOUBLE_FLOAT
&& info
->reg_offset
> 0)
4656 /* In o32, the second argument is always passed in $f14
4657 for TARGET_DOUBLE_FLOAT, regardless of whether the
4658 first argument was a word or doubleword. */
4659 return FP_ARG_FIRST
+ 2;
4661 return FP_ARG_FIRST
+ info
->reg_offset
;
4664 /* Implement FUNCTION_ARG_ADVANCE. */
4667 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4668 tree type
, int named
)
4670 struct mips_arg_info info
;
4672 mips_arg_info (cum
, mode
, type
, named
, &info
);
4675 cum
->gp_reg_found
= true;
4677 /* See the comment above the cumulative args structure in mips.h
4678 for an explanation of what this code does. It assumes the O32
4679 ABI, which passes at most 2 arguments in float registers. */
4680 if (cum
->arg_number
< 2 && info
.fpr_p
)
4681 cum
->fp_code
+= (mode
== SFmode
? 1 : 2) << (cum
->arg_number
* 2);
4683 if (mips_abi
!= ABI_EABI
|| !info
.fpr_p
)
4684 cum
->num_gprs
= info
.reg_offset
+ info
.reg_words
;
4685 else if (info
.reg_words
> 0)
4686 cum
->num_fprs
+= MAX_FPRS_PER_FMT
;
4688 if (info
.stack_words
> 0)
4689 cum
->stack_words
= info
.stack_offset
+ info
.stack_words
;
4694 /* Implement FUNCTION_ARG. */
4697 function_arg (const CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4698 tree type
, int named
)
4700 struct mips_arg_info info
;
4702 /* We will be called with a mode of VOIDmode after the last argument
4703 has been seen. Whatever we return will be passed to the call
4704 insn. If we need a mips16 fp_code, return a REG with the code
4705 stored as the mode. */
4706 if (mode
== VOIDmode
)
4708 if (TARGET_MIPS16
&& cum
->fp_code
!= 0)
4709 return gen_rtx_REG ((enum machine_mode
) cum
->fp_code
, 0);
4715 mips_arg_info (cum
, mode
, type
, named
, &info
);
4717 /* Return straight away if the whole argument is passed on the stack. */
4718 if (info
.reg_offset
== MAX_ARGS_IN_REGISTERS
)
4722 && TREE_CODE (type
) == RECORD_TYPE
4724 && TYPE_SIZE_UNIT (type
)
4725 && host_integerp (TYPE_SIZE_UNIT (type
), 1)
4728 /* The Irix 6 n32/n64 ABIs say that if any 64-bit chunk of the
4729 structure contains a double in its entirety, then that 64-bit
4730 chunk is passed in a floating point register. */
4733 /* First check to see if there is any such field. */
4734 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
4735 if (TREE_CODE (field
) == FIELD_DECL
4736 && TREE_CODE (TREE_TYPE (field
)) == REAL_TYPE
4737 && TYPE_PRECISION (TREE_TYPE (field
)) == BITS_PER_WORD
4738 && host_integerp (bit_position (field
), 0)
4739 && int_bit_position (field
) % BITS_PER_WORD
== 0)
4744 /* Now handle the special case by returning a PARALLEL
4745 indicating where each 64-bit chunk goes. INFO.REG_WORDS
4746 chunks are passed in registers. */
4748 HOST_WIDE_INT bitpos
;
4751 /* assign_parms checks the mode of ENTRY_PARM, so we must
4752 use the actual mode here. */
4753 ret
= gen_rtx_PARALLEL (mode
, rtvec_alloc (info
.reg_words
));
4756 field
= TYPE_FIELDS (type
);
4757 for (i
= 0; i
< info
.reg_words
; i
++)
4761 for (; field
; field
= TREE_CHAIN (field
))
4762 if (TREE_CODE (field
) == FIELD_DECL
4763 && int_bit_position (field
) >= bitpos
)
4767 && int_bit_position (field
) == bitpos
4768 && TREE_CODE (TREE_TYPE (field
)) == REAL_TYPE
4769 && !TARGET_SOFT_FLOAT
4770 && TYPE_PRECISION (TREE_TYPE (field
)) == BITS_PER_WORD
)
4771 reg
= gen_rtx_REG (DFmode
, FP_ARG_FIRST
+ info
.reg_offset
+ i
);
4773 reg
= gen_rtx_REG (DImode
, GP_ARG_FIRST
+ info
.reg_offset
+ i
);
4776 = gen_rtx_EXPR_LIST (VOIDmode
, reg
,
4777 GEN_INT (bitpos
/ BITS_PER_UNIT
));
4779 bitpos
+= BITS_PER_WORD
;
4785 /* Handle the n32/n64 conventions for passing complex floating-point
4786 arguments in FPR pairs. The real part goes in the lower register
4787 and the imaginary part goes in the upper register. */
4790 && GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
4793 enum machine_mode inner
;
4796 inner
= GET_MODE_INNER (mode
);
4797 reg
= FP_ARG_FIRST
+ info
.reg_offset
;
4798 if (info
.reg_words
* UNITS_PER_WORD
== GET_MODE_SIZE (inner
))
4800 /* Real part in registers, imaginary part on stack. */
4801 gcc_assert (info
.stack_words
== info
.reg_words
);
4802 return gen_rtx_REG (inner
, reg
);
4806 gcc_assert (info
.stack_words
== 0);
4807 real
= gen_rtx_EXPR_LIST (VOIDmode
,
4808 gen_rtx_REG (inner
, reg
),
4810 imag
= gen_rtx_EXPR_LIST (VOIDmode
,
4812 reg
+ info
.reg_words
/ 2),
4813 GEN_INT (GET_MODE_SIZE (inner
)));
4814 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, real
, imag
));
4818 return gen_rtx_REG (mode
, mips_arg_regno (&info
, TARGET_HARD_FLOAT
));
4822 /* Implement TARGET_ARG_PARTIAL_BYTES. */
4825 mips_arg_partial_bytes (CUMULATIVE_ARGS
*cum
,
4826 enum machine_mode mode
, tree type
, bool named
)
4828 struct mips_arg_info info
;
4830 mips_arg_info (cum
, mode
, type
, named
, &info
);
4831 return info
.stack_words
> 0 ? info
.reg_words
* UNITS_PER_WORD
: 0;
4835 /* Implement FUNCTION_ARG_BOUNDARY. Every parameter gets at least
4836 PARM_BOUNDARY bits of alignment, but will be given anything up
4837 to STACK_BOUNDARY bits if the type requires it. */
4840 function_arg_boundary (enum machine_mode mode
, tree type
)
4842 unsigned int alignment
;
4844 alignment
= type
? TYPE_ALIGN (type
) : GET_MODE_ALIGNMENT (mode
);
4845 if (alignment
< PARM_BOUNDARY
)
4846 alignment
= PARM_BOUNDARY
;
4847 if (alignment
> STACK_BOUNDARY
)
4848 alignment
= STACK_BOUNDARY
;
4852 /* Return true if FUNCTION_ARG_PADDING (MODE, TYPE) should return
4853 upward rather than downward. In other words, return true if the
4854 first byte of the stack slot has useful data, false if the last
4858 mips_pad_arg_upward (enum machine_mode mode
, const_tree type
)
4860 /* On little-endian targets, the first byte of every stack argument
4861 is passed in the first byte of the stack slot. */
4862 if (!BYTES_BIG_ENDIAN
)
4865 /* Otherwise, integral types are padded downward: the last byte of a
4866 stack argument is passed in the last byte of the stack slot. */
4868 ? (INTEGRAL_TYPE_P (type
)
4869 || POINTER_TYPE_P (type
)
4870 || FIXED_POINT_TYPE_P (type
))
4871 : (GET_MODE_CLASS (mode
) == MODE_INT
4872 || ALL_SCALAR_FIXED_POINT_MODE_P (mode
)))
4875 /* Big-endian o64 pads floating-point arguments downward. */
4876 if (mips_abi
== ABI_O64
)
4877 if (type
!= 0 ? FLOAT_TYPE_P (type
) : GET_MODE_CLASS (mode
) == MODE_FLOAT
)
4880 /* Other types are padded upward for o32, o64, n32 and n64. */
4881 if (mips_abi
!= ABI_EABI
)
4884 /* Arguments smaller than a stack slot are padded downward. */
4885 if (mode
!= BLKmode
)
4886 return (GET_MODE_BITSIZE (mode
) >= PARM_BOUNDARY
);
4888 return (int_size_in_bytes (type
) >= (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4892 /* Likewise BLOCK_REG_PADDING (MODE, TYPE, ...). Return !BYTES_BIG_ENDIAN
4893 if the least significant byte of the register has useful data. Return
4894 the opposite if the most significant byte does. */
4897 mips_pad_reg_upward (enum machine_mode mode
, tree type
)
4899 /* No shifting is required for floating-point arguments. */
4900 if (type
!= 0 ? FLOAT_TYPE_P (type
) : GET_MODE_CLASS (mode
) == MODE_FLOAT
)
4901 return !BYTES_BIG_ENDIAN
;
4903 /* Otherwise, apply the same padding to register arguments as we do
4904 to stack arguments. */
4905 return mips_pad_arg_upward (mode
, type
);
4909 mips_setup_incoming_varargs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4910 tree type
, int *pretend_size ATTRIBUTE_UNUSED
,
4913 CUMULATIVE_ARGS local_cum
;
4914 int gp_saved
, fp_saved
;
4916 /* The caller has advanced CUM up to, but not beyond, the last named
4917 argument. Advance a local copy of CUM past the last "real" named
4918 argument, to find out how many registers are left over. */
4921 FUNCTION_ARG_ADVANCE (local_cum
, mode
, type
, 1);
4923 /* Found out how many registers we need to save. */
4924 gp_saved
= MAX_ARGS_IN_REGISTERS
- local_cum
.num_gprs
;
4925 fp_saved
= (EABI_FLOAT_VARARGS_P
4926 ? MAX_ARGS_IN_REGISTERS
- local_cum
.num_fprs
4935 ptr
= plus_constant (virtual_incoming_args_rtx
,
4936 REG_PARM_STACK_SPACE (cfun
->decl
)
4937 - gp_saved
* UNITS_PER_WORD
);
4938 mem
= gen_rtx_MEM (BLKmode
, ptr
);
4939 set_mem_alias_set (mem
, get_varargs_alias_set ());
4941 move_block_from_reg (local_cum
.num_gprs
+ GP_ARG_FIRST
,
4946 /* We can't use move_block_from_reg, because it will use
4948 enum machine_mode mode
;
4951 /* Set OFF to the offset from virtual_incoming_args_rtx of
4952 the first float register. The FP save area lies below
4953 the integer one, and is aligned to UNITS_PER_FPVALUE bytes. */
4954 off
= -gp_saved
* UNITS_PER_WORD
;
4955 off
&= ~(UNITS_PER_FPVALUE
- 1);
4956 off
-= fp_saved
* UNITS_PER_FPREG
;
4958 mode
= TARGET_SINGLE_FLOAT
? SFmode
: DFmode
;
4960 for (i
= local_cum
.num_fprs
; i
< MAX_ARGS_IN_REGISTERS
;
4961 i
+= MAX_FPRS_PER_FMT
)
4965 ptr
= plus_constant (virtual_incoming_args_rtx
, off
);
4966 mem
= gen_rtx_MEM (mode
, ptr
);
4967 set_mem_alias_set (mem
, get_varargs_alias_set ());
4968 mips_emit_move (mem
, gen_rtx_REG (mode
, FP_ARG_FIRST
+ i
));
4969 off
+= UNITS_PER_HWFPVALUE
;
4973 if (REG_PARM_STACK_SPACE (cfun
->decl
) == 0)
4974 cfun
->machine
->varargs_size
= (gp_saved
* UNITS_PER_WORD
4975 + fp_saved
* UNITS_PER_FPREG
);
4978 /* Create the va_list data type.
4979 We keep 3 pointers, and two offsets.
4980 Two pointers are to the overflow area, which starts at the CFA.
4981 One of these is constant, for addressing into the GPR save area below it.
4982 The other is advanced up the stack through the overflow region.
4983 The third pointer is to the GPR save area. Since the FPR save area
4984 is just below it, we can address FPR slots off this pointer.
4985 We also keep two one-byte offsets, which are to be subtracted from the
4986 constant pointers to yield addresses in the GPR and FPR save areas.
4987 These are downcounted as float or non-float arguments are used,
4988 and when they get to zero, the argument must be obtained from the
4990 If !EABI_FLOAT_VARARGS_P, then no FPR save area exists, and a single
4991 pointer is enough. It's started at the GPR save area, and is
4993 Note that the GPR save area is not constant size, due to optimization
4994 in the prologue. Hence, we can't use a design with two pointers
4995 and two offsets, although we could have designed this with two pointers
4996 and three offsets. */
4999 mips_build_builtin_va_list (void)
5001 if (EABI_FLOAT_VARARGS_P
)
5003 tree f_ovfl
, f_gtop
, f_ftop
, f_goff
, f_foff
, f_res
, record
;
5006 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
5008 f_ovfl
= build_decl (FIELD_DECL
, get_identifier ("__overflow_argptr"),
5010 f_gtop
= build_decl (FIELD_DECL
, get_identifier ("__gpr_top"),
5012 f_ftop
= build_decl (FIELD_DECL
, get_identifier ("__fpr_top"),
5014 f_goff
= build_decl (FIELD_DECL
, get_identifier ("__gpr_offset"),
5015 unsigned_char_type_node
);
5016 f_foff
= build_decl (FIELD_DECL
, get_identifier ("__fpr_offset"),
5017 unsigned_char_type_node
);
5018 /* Explicitly pad to the size of a pointer, so that -Wpadded won't
5019 warn on every user file. */
5020 index
= build_int_cst (NULL_TREE
, GET_MODE_SIZE (ptr_mode
) - 2 - 1);
5021 array
= build_array_type (unsigned_char_type_node
,
5022 build_index_type (index
));
5023 f_res
= build_decl (FIELD_DECL
, get_identifier ("__reserved"), array
);
5025 DECL_FIELD_CONTEXT (f_ovfl
) = record
;
5026 DECL_FIELD_CONTEXT (f_gtop
) = record
;
5027 DECL_FIELD_CONTEXT (f_ftop
) = record
;
5028 DECL_FIELD_CONTEXT (f_goff
) = record
;
5029 DECL_FIELD_CONTEXT (f_foff
) = record
;
5030 DECL_FIELD_CONTEXT (f_res
) = record
;
5032 TYPE_FIELDS (record
) = f_ovfl
;
5033 TREE_CHAIN (f_ovfl
) = f_gtop
;
5034 TREE_CHAIN (f_gtop
) = f_ftop
;
5035 TREE_CHAIN (f_ftop
) = f_goff
;
5036 TREE_CHAIN (f_goff
) = f_foff
;
5037 TREE_CHAIN (f_foff
) = f_res
;
5039 layout_type (record
);
5042 else if (TARGET_IRIX
&& TARGET_IRIX6
)
5043 /* On IRIX 6, this type is 'char *'. */
5044 return build_pointer_type (char_type_node
);
5046 /* Otherwise, we use 'void *'. */
5047 return ptr_type_node
;
5050 /* Implement va_start. */
5053 mips_va_start (tree valist
, rtx nextarg
)
5055 if (EABI_FLOAT_VARARGS_P
)
5057 const CUMULATIVE_ARGS
*cum
;
5058 tree f_ovfl
, f_gtop
, f_ftop
, f_goff
, f_foff
;
5059 tree ovfl
, gtop
, ftop
, goff
, foff
;
5061 int gpr_save_area_size
;
5062 int fpr_save_area_size
;
5065 cum
= ¤t_function_args_info
;
5067 = (MAX_ARGS_IN_REGISTERS
- cum
->num_gprs
) * UNITS_PER_WORD
;
5069 = (MAX_ARGS_IN_REGISTERS
- cum
->num_fprs
) * UNITS_PER_FPREG
;
5071 f_ovfl
= TYPE_FIELDS (va_list_type_node
);
5072 f_gtop
= TREE_CHAIN (f_ovfl
);
5073 f_ftop
= TREE_CHAIN (f_gtop
);
5074 f_goff
= TREE_CHAIN (f_ftop
);
5075 f_foff
= TREE_CHAIN (f_goff
);
5077 ovfl
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovfl
), valist
, f_ovfl
,
5079 gtop
= build3 (COMPONENT_REF
, TREE_TYPE (f_gtop
), valist
, f_gtop
,
5081 ftop
= build3 (COMPONENT_REF
, TREE_TYPE (f_ftop
), valist
, f_ftop
,
5083 goff
= build3 (COMPONENT_REF
, TREE_TYPE (f_goff
), valist
, f_goff
,
5085 foff
= build3 (COMPONENT_REF
, TREE_TYPE (f_foff
), valist
, f_foff
,
5088 /* Emit code to initialize OVFL, which points to the next varargs
5089 stack argument. CUM->STACK_WORDS gives the number of stack
5090 words used by named arguments. */
5091 t
= make_tree (TREE_TYPE (ovfl
), virtual_incoming_args_rtx
);
5092 if (cum
->stack_words
> 0)
5093 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (ovfl
), t
,
5094 size_int (cum
->stack_words
* UNITS_PER_WORD
));
5095 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (ovfl
), ovfl
, t
);
5096 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5098 /* Emit code to initialize GTOP, the top of the GPR save area. */
5099 t
= make_tree (TREE_TYPE (gtop
), virtual_incoming_args_rtx
);
5100 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (gtop
), gtop
, t
);
5101 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5103 /* Emit code to initialize FTOP, the top of the FPR save area.
5104 This address is gpr_save_area_bytes below GTOP, rounded
5105 down to the next fp-aligned boundary. */
5106 t
= make_tree (TREE_TYPE (ftop
), virtual_incoming_args_rtx
);
5107 fpr_offset
= gpr_save_area_size
+ UNITS_PER_FPVALUE
- 1;
5108 fpr_offset
&= ~(UNITS_PER_FPVALUE
- 1);
5110 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (ftop
), t
,
5111 size_int (-fpr_offset
));
5112 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (ftop
), ftop
, t
);
5113 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5115 /* Emit code to initialize GOFF, the offset from GTOP of the
5116 next GPR argument. */
5117 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (goff
), goff
,
5118 build_int_cst (NULL_TREE
, gpr_save_area_size
));
5119 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5121 /* Likewise emit code to initialize FOFF, the offset from FTOP
5122 of the next FPR argument. */
5123 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (foff
), foff
,
5124 build_int_cst (NULL_TREE
, fpr_save_area_size
));
5125 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5129 nextarg
= plus_constant (nextarg
, -cfun
->machine
->varargs_size
);
5130 std_expand_builtin_va_start (valist
, nextarg
);
5134 /* Implement va_arg. */
5137 mips_gimplify_va_arg_expr (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
5139 HOST_WIDE_INT size
, rsize
;
5143 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, 0);
5146 type
= build_pointer_type (type
);
5148 size
= int_size_in_bytes (type
);
5149 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
5151 if (mips_abi
!= ABI_EABI
|| !EABI_FLOAT_VARARGS_P
)
5152 addr
= std_gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
5155 /* Not a simple merged stack. */
5157 tree f_ovfl
, f_gtop
, f_ftop
, f_goff
, f_foff
;
5158 tree ovfl
, top
, off
, align
;
5159 HOST_WIDE_INT osize
;
5162 f_ovfl
= TYPE_FIELDS (va_list_type_node
);
5163 f_gtop
= TREE_CHAIN (f_ovfl
);
5164 f_ftop
= TREE_CHAIN (f_gtop
);
5165 f_goff
= TREE_CHAIN (f_ftop
);
5166 f_foff
= TREE_CHAIN (f_goff
);
5168 /* We maintain separate pointers and offsets for floating-point
5169 and integer arguments, but we need similar code in both cases.
5172 TOP be the top of the register save area;
5173 OFF be the offset from TOP of the next register;
5174 ADDR_RTX be the address of the argument;
5175 RSIZE be the number of bytes used to store the argument
5176 when it's in the register save area;
5177 OSIZE be the number of bytes used to store it when it's
5178 in the stack overflow area; and
5179 PADDING be (BYTES_BIG_ENDIAN ? OSIZE - RSIZE : 0)
5181 The code we want is:
5183 1: off &= -rsize; // round down
5186 4: addr_rtx = top - off;
5191 9: ovfl += ((intptr_t) ovfl + osize - 1) & -osize;
5192 10: addr_rtx = ovfl + PADDING;
5196 [1] and [9] can sometimes be optimized away. */
5198 ovfl
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovfl
), valist
, f_ovfl
,
5201 if (GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_FLOAT
5202 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_FPVALUE
)
5204 top
= build3 (COMPONENT_REF
, TREE_TYPE (f_ftop
), valist
, f_ftop
,
5206 off
= build3 (COMPONENT_REF
, TREE_TYPE (f_foff
), valist
, f_foff
,
5209 /* When floating-point registers are saved to the stack,
5210 each one will take up UNITS_PER_HWFPVALUE bytes, regardless
5211 of the float's precision. */
5212 rsize
= UNITS_PER_HWFPVALUE
;
5214 /* Overflow arguments are padded to UNITS_PER_WORD bytes
5215 (= PARM_BOUNDARY bits). This can be different from RSIZE
5218 (1) On 32-bit targets when TYPE is a structure such as:
5220 struct s { float f; };
5222 Such structures are passed in paired FPRs, so RSIZE
5223 will be 8 bytes. However, the structure only takes
5224 up 4 bytes of memory, so OSIZE will only be 4.
5226 (2) In combinations such as -mgp64 -msingle-float
5227 -fshort-double. Doubles passed in registers
5228 will then take up 4 (UNITS_PER_HWFPVALUE) bytes,
5229 but those passed on the stack take up
5230 UNITS_PER_WORD bytes. */
5231 osize
= MAX (GET_MODE_SIZE (TYPE_MODE (type
)), UNITS_PER_WORD
);
5235 top
= build3 (COMPONENT_REF
, TREE_TYPE (f_gtop
), valist
, f_gtop
,
5237 off
= build3 (COMPONENT_REF
, TREE_TYPE (f_goff
), valist
, f_goff
,
5239 if (rsize
> UNITS_PER_WORD
)
5241 /* [1] Emit code for: off &= -rsize. */
5242 t
= build2 (BIT_AND_EXPR
, TREE_TYPE (off
), off
,
5243 build_int_cst (NULL_TREE
, -rsize
));
5244 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (off
), off
, t
);
5245 gimplify_and_add (t
, pre_p
);
5250 /* [2] Emit code to branch if off == 0. */
5251 t
= build2 (NE_EXPR
, boolean_type_node
, off
,
5252 build_int_cst (TREE_TYPE (off
), 0));
5253 addr
= build3 (COND_EXPR
, ptr_type_node
, t
, NULL_TREE
, NULL_TREE
);
5255 /* [5] Emit code for: off -= rsize. We do this as a form of
5256 post-increment not available to C. Also widen for the
5257 coming pointer arithmetic. */
5258 t
= fold_convert (TREE_TYPE (off
), build_int_cst (NULL_TREE
, rsize
));
5259 t
= build2 (POSTDECREMENT_EXPR
, TREE_TYPE (off
), off
, t
);
5260 t
= fold_convert (sizetype
, t
);
5261 t
= fold_build1 (NEGATE_EXPR
, sizetype
, t
);
5263 /* [4] Emit code for: addr_rtx = top - off. On big endian machines,
5264 the argument has RSIZE - SIZE bytes of leading padding. */
5265 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (top
), top
, t
);
5266 if (BYTES_BIG_ENDIAN
&& rsize
> size
)
5268 u
= size_int (rsize
- size
);
5269 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, u
);
5271 COND_EXPR_THEN (addr
) = t
;
5273 if (osize
> UNITS_PER_WORD
)
5275 /* [9] Emit: ovfl += ((intptr_t) ovfl + osize - 1) & -osize. */
5276 u
= size_int (osize
- 1);
5277 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (ovfl
), ovfl
, u
);
5278 t
= fold_convert (sizetype
, t
);
5279 u
= size_int (-osize
);
5280 t
= build2 (BIT_AND_EXPR
, sizetype
, t
, u
);
5281 t
= fold_convert (TREE_TYPE (ovfl
), t
);
5282 align
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (ovfl
), ovfl
, t
);
5287 /* [10, 11]. Emit code to store ovfl in addr_rtx, then
5288 post-increment ovfl by osize. On big-endian machines,
5289 the argument has OSIZE - SIZE bytes of leading padding. */
5290 u
= fold_convert (TREE_TYPE (ovfl
),
5291 build_int_cst (NULL_TREE
, osize
));
5292 t
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (ovfl
), ovfl
, u
);
5293 if (BYTES_BIG_ENDIAN
&& osize
> size
)
5295 u
= size_int (osize
- size
);
5296 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, u
);
5299 /* String [9] and [10,11] together. */
5301 t
= build2 (COMPOUND_EXPR
, TREE_TYPE (t
), align
, t
);
5302 COND_EXPR_ELSE (addr
) = t
;
5304 addr
= fold_convert (build_pointer_type (type
), addr
);
5305 addr
= build_va_arg_indirect_ref (addr
);
5309 addr
= build_va_arg_indirect_ref (addr
);
5314 /* Return true if it is possible to use left/right accesses for a
5315 bitfield of WIDTH bits starting BITPOS bits into *OP. When
5316 returning true, update *OP, *LEFT and *RIGHT as follows:
5318 *OP is a BLKmode reference to the whole field.
5320 *LEFT is a QImode reference to the first byte if big endian or
5321 the last byte if little endian. This address can be used in the
5322 left-side instructions (lwl, swl, ldl, sdl).
5324 *RIGHT is a QImode reference to the opposite end of the field and
5325 can be used in the patterning right-side instruction. */
5328 mips_get_unaligned_mem (rtx
*op
, unsigned int width
, int bitpos
,
5329 rtx
*left
, rtx
*right
)
5333 /* Check that the operand really is a MEM. Not all the extv and
5334 extzv predicates are checked. */
5338 /* Check that the size is valid. */
5339 if (width
!= 32 && (!TARGET_64BIT
|| width
!= 64))
5342 /* We can only access byte-aligned values. Since we are always passed
5343 a reference to the first byte of the field, it is not necessary to
5344 do anything with BITPOS after this check. */
5345 if (bitpos
% BITS_PER_UNIT
!= 0)
5348 /* Reject aligned bitfields: we want to use a normal load or store
5349 instead of a left/right pair. */
5350 if (MEM_ALIGN (*op
) >= width
)
5353 /* Adjust *OP to refer to the whole field. This also has the effect
5354 of legitimizing *OP's address for BLKmode, possibly simplifying it. */
5355 *op
= adjust_address (*op
, BLKmode
, 0);
5356 set_mem_size (*op
, GEN_INT (width
/ BITS_PER_UNIT
));
5358 /* Get references to both ends of the field. We deliberately don't
5359 use the original QImode *OP for FIRST since the new BLKmode one
5360 might have a simpler address. */
5361 first
= adjust_address (*op
, QImode
, 0);
5362 last
= adjust_address (*op
, QImode
, width
/ BITS_PER_UNIT
- 1);
5364 /* Allocate to LEFT and RIGHT according to endianness. LEFT should
5365 be the upper word and RIGHT the lower word. */
5366 if (TARGET_BIG_ENDIAN
)
5367 *left
= first
, *right
= last
;
5369 *left
= last
, *right
= first
;
5375 /* Try to emit the equivalent of (set DEST (zero_extract SRC WIDTH BITPOS)).
5376 Return true on success. We only handle cases where zero_extract is
5377 equivalent to sign_extract. */
5380 mips_expand_unaligned_load (rtx dest
, rtx src
, unsigned int width
, int bitpos
)
5382 rtx left
, right
, temp
;
5384 /* If TARGET_64BIT, the destination of a 32-bit load will be a
5385 paradoxical word_mode subreg. This is the only case in which
5386 we allow the destination to be larger than the source. */
5387 if (GET_CODE (dest
) == SUBREG
5388 && GET_MODE (dest
) == DImode
5389 && SUBREG_BYTE (dest
) == 0
5390 && GET_MODE (SUBREG_REG (dest
)) == SImode
)
5391 dest
= SUBREG_REG (dest
);
5393 /* After the above adjustment, the destination must be the same
5394 width as the source. */
5395 if (GET_MODE_BITSIZE (GET_MODE (dest
)) != width
)
5398 if (!mips_get_unaligned_mem (&src
, width
, bitpos
, &left
, &right
))
5401 temp
= gen_reg_rtx (GET_MODE (dest
));
5402 if (GET_MODE (dest
) == DImode
)
5404 emit_insn (gen_mov_ldl (temp
, src
, left
));
5405 emit_insn (gen_mov_ldr (dest
, copy_rtx (src
), right
, temp
));
5409 emit_insn (gen_mov_lwl (temp
, src
, left
));
5410 emit_insn (gen_mov_lwr (dest
, copy_rtx (src
), right
, temp
));
5416 /* Try to expand (set (zero_extract DEST WIDTH BITPOS) SRC). Return
5420 mips_expand_unaligned_store (rtx dest
, rtx src
, unsigned int width
, int bitpos
)
5423 enum machine_mode mode
;
5425 if (!mips_get_unaligned_mem (&dest
, width
, bitpos
, &left
, &right
))
5428 mode
= mode_for_size (width
, MODE_INT
, 0);
5429 src
= gen_lowpart (mode
, src
);
5433 emit_insn (gen_mov_sdl (dest
, src
, left
));
5434 emit_insn (gen_mov_sdr (copy_rtx (dest
), copy_rtx (src
), right
));
5438 emit_insn (gen_mov_swl (dest
, src
, left
));
5439 emit_insn (gen_mov_swr (copy_rtx (dest
), copy_rtx (src
), right
));
5444 /* Return true if X is a MEM with the same size as MODE. */
5447 mips_mem_fits_mode_p (enum machine_mode mode
, rtx x
)
5454 size
= MEM_SIZE (x
);
5455 return size
&& INTVAL (size
) == GET_MODE_SIZE (mode
);
5458 /* Return true if (zero_extract OP SIZE POSITION) can be used as the
5459 source of an "ext" instruction or the destination of an "ins"
5460 instruction. OP must be a register operand and the following
5461 conditions must hold:
5463 0 <= POSITION < GET_MODE_BITSIZE (GET_MODE (op))
5464 0 < SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
5465 0 < POSITION + SIZE <= GET_MODE_BITSIZE (GET_MODE (op))
5467 Also reject lengths equal to a word as they are better handled
5468 by the move patterns. */
5471 mips_use_ins_ext_p (rtx op
, rtx size
, rtx position
)
5473 HOST_WIDE_INT len
, pos
;
5475 if (!ISA_HAS_EXT_INS
5476 || !register_operand (op
, VOIDmode
)
5477 || GET_MODE_BITSIZE (GET_MODE (op
)) > BITS_PER_WORD
)
5480 len
= INTVAL (size
);
5481 pos
= INTVAL (position
);
5483 if (len
<= 0 || len
>= GET_MODE_BITSIZE (GET_MODE (op
))
5484 || pos
< 0 || pos
+ len
> GET_MODE_BITSIZE (GET_MODE (op
)))
5490 /* Set up globals to generate code for the ISA or processor
5491 described by INFO. */
5494 mips_set_architecture (const struct mips_cpu_info
*info
)
5498 mips_arch_info
= info
;
5499 mips_arch
= info
->cpu
;
5500 mips_isa
= info
->isa
;
5505 /* Likewise for tuning. */
5508 mips_set_tune (const struct mips_cpu_info
*info
)
5512 mips_tune_info
= info
;
5513 mips_tune
= info
->cpu
;
5517 /* Initialize mips_split_addresses from the associated command-line
5520 mips_split_addresses is a half-way house between explicit
5521 relocations and the traditional assembler macros. It can
5522 split absolute 32-bit symbolic constants into a high/lo_sum
5523 pair but uses macros for other sorts of access.
5525 Like explicit relocation support for REL targets, it relies
5526 on GNU extensions in the assembler and the linker.
5528 Although this code should work for -O0, it has traditionally
5529 been treated as an optimization. */
5532 mips_init_split_addresses (void)
5534 if (!TARGET_MIPS16
&& TARGET_SPLIT_ADDRESSES
5535 && optimize
&& !flag_pic
5536 && !ABI_HAS_64BIT_SYMBOLS
)
5537 mips_split_addresses
= 1;
5539 mips_split_addresses
= 0;
5542 /* (Re-)Initialize information about relocs. */
5545 mips_init_relocs (void)
5547 memset (mips_split_p
, '\0', sizeof (mips_split_p
));
5548 memset (mips_hi_relocs
, '\0', sizeof (mips_hi_relocs
));
5549 memset (mips_lo_relocs
, '\0', sizeof (mips_lo_relocs
));
5551 if (ABI_HAS_64BIT_SYMBOLS
)
5553 if (TARGET_EXPLICIT_RELOCS
)
5555 mips_split_p
[SYMBOL_64_HIGH
] = true;
5556 mips_hi_relocs
[SYMBOL_64_HIGH
] = "%highest(";
5557 mips_lo_relocs
[SYMBOL_64_HIGH
] = "%higher(";
5559 mips_split_p
[SYMBOL_64_MID
] = true;
5560 mips_hi_relocs
[SYMBOL_64_MID
] = "%higher(";
5561 mips_lo_relocs
[SYMBOL_64_MID
] = "%hi(";
5563 mips_split_p
[SYMBOL_64_LOW
] = true;
5564 mips_hi_relocs
[SYMBOL_64_LOW
] = "%hi(";
5565 mips_lo_relocs
[SYMBOL_64_LOW
] = "%lo(";
5567 mips_split_p
[SYMBOL_ABSOLUTE
] = true;
5568 mips_lo_relocs
[SYMBOL_ABSOLUTE
] = "%lo(";
5573 if (TARGET_EXPLICIT_RELOCS
|| mips_split_addresses
|| TARGET_MIPS16
)
5575 mips_split_p
[SYMBOL_ABSOLUTE
] = true;
5576 mips_hi_relocs
[SYMBOL_ABSOLUTE
] = "%hi(";
5577 mips_lo_relocs
[SYMBOL_ABSOLUTE
] = "%lo(";
5579 mips_lo_relocs
[SYMBOL_32_HIGH
] = "%hi(";
5585 /* The high part is provided by a pseudo copy of $gp. */
5586 mips_split_p
[SYMBOL_GP_RELATIVE
] = true;
5587 mips_lo_relocs
[SYMBOL_GP_RELATIVE
] = "%gprel(";
5590 if (TARGET_EXPLICIT_RELOCS
)
5592 /* Small data constants are kept whole until after reload,
5593 then lowered by mips_rewrite_small_data. */
5594 mips_lo_relocs
[SYMBOL_GP_RELATIVE
] = "%gp_rel(";
5596 mips_split_p
[SYMBOL_GOT_PAGE_OFST
] = true;
5599 mips_lo_relocs
[SYMBOL_GOTOFF_PAGE
] = "%got_page(";
5600 mips_lo_relocs
[SYMBOL_GOT_PAGE_OFST
] = "%got_ofst(";
5604 mips_lo_relocs
[SYMBOL_GOTOFF_PAGE
] = "%got(";
5605 mips_lo_relocs
[SYMBOL_GOT_PAGE_OFST
] = "%lo(";
5610 /* The HIGH and LO_SUM are matched by special .md patterns. */
5611 mips_split_p
[SYMBOL_GOT_DISP
] = true;
5613 mips_split_p
[SYMBOL_GOTOFF_DISP
] = true;
5614 mips_hi_relocs
[SYMBOL_GOTOFF_DISP
] = "%got_hi(";
5615 mips_lo_relocs
[SYMBOL_GOTOFF_DISP
] = "%got_lo(";
5617 mips_split_p
[SYMBOL_GOTOFF_CALL
] = true;
5618 mips_hi_relocs
[SYMBOL_GOTOFF_CALL
] = "%call_hi(";
5619 mips_lo_relocs
[SYMBOL_GOTOFF_CALL
] = "%call_lo(";
5624 mips_lo_relocs
[SYMBOL_GOTOFF_DISP
] = "%got_disp(";
5626 mips_lo_relocs
[SYMBOL_GOTOFF_DISP
] = "%got(";
5627 mips_lo_relocs
[SYMBOL_GOTOFF_CALL
] = "%call16(";
5633 mips_split_p
[SYMBOL_GOTOFF_LOADGP
] = true;
5634 mips_hi_relocs
[SYMBOL_GOTOFF_LOADGP
] = "%hi(%neg(%gp_rel(";
5635 mips_lo_relocs
[SYMBOL_GOTOFF_LOADGP
] = "%lo(%neg(%gp_rel(";
5638 /* Thread-local relocation operators. */
5639 mips_lo_relocs
[SYMBOL_TLSGD
] = "%tlsgd(";
5640 mips_lo_relocs
[SYMBOL_TLSLDM
] = "%tlsldm(";
5641 mips_split_p
[SYMBOL_DTPREL
] = 1;
5642 mips_hi_relocs
[SYMBOL_DTPREL
] = "%dtprel_hi(";
5643 mips_lo_relocs
[SYMBOL_DTPREL
] = "%dtprel_lo(";
5644 mips_lo_relocs
[SYMBOL_GOTTPREL
] = "%gottprel(";
5645 mips_split_p
[SYMBOL_TPREL
] = 1;
5646 mips_hi_relocs
[SYMBOL_TPREL
] = "%tprel_hi(";
5647 mips_lo_relocs
[SYMBOL_TPREL
] = "%tprel_lo(";
5649 mips_lo_relocs
[SYMBOL_HALF
] = "%half(";
5652 static GTY(()) int was_mips16_p
= -1;
5654 /* Set up the target-dependent global state so that it matches the
5655 current function's ISA mode. */
5658 mips_set_mips16_mode (int mips16_p
)
5660 if (mips16_p
== was_mips16_p
)
5663 /* Restore base settings of various flags. */
5664 target_flags
= mips_base_target_flags
;
5665 align_loops
= mips_base_align_loops
;
5666 align_jumps
= mips_base_align_jumps
;
5667 align_functions
= mips_base_align_functions
;
5668 flag_schedule_insns
= mips_base_schedule_insns
;
5669 flag_reorder_blocks_and_partition
= mips_base_reorder_blocks_and_partition
;
5670 flag_move_loop_invariants
= mips_base_move_loop_invariants
;
5671 flag_delayed_branch
= mips_flag_delayed_branch
;
5675 /* Select mips16 instruction set. */
5676 target_flags
|= MASK_MIPS16
;
5678 /* Don't run the scheduler before reload, since it tends to
5679 increase register pressure. */
5680 flag_schedule_insns
= 0;
5682 /* Don't do hot/cold partitioning. The constant layout code expects
5683 the whole function to be in a single section. */
5684 flag_reorder_blocks_and_partition
= 0;
5686 /* Don't move loop invariants, because it tends to increase
5687 register pressure. It also introduces an extra move in cases
5688 where the constant is the first operand in a two-operand binary
5689 instruction, or when it forms a register argument to a functon
5691 flag_move_loop_invariants
= 0;
5693 /* Silently disable -mexplicit-relocs since it doesn't apply
5694 to mips16 code. Even so, it would overly pedantic to warn
5695 about "-mips16 -mexplicit-relocs", especially given that
5696 we use a %gprel() operator. */
5697 target_flags
&= ~MASK_EXPLICIT_RELOCS
;
5699 /* Experiments suggest we get the best overall results from using
5700 the range of an unextended lw or sw. Code that makes heavy use
5701 of byte or short accesses can do better with ranges of 0...31
5702 and 0...63 respectively, but most code is sensitive to the range
5703 of lw and sw instead. */
5704 targetm
.min_anchor_offset
= 0;
5705 targetm
.max_anchor_offset
= 127;
5707 if (flag_pic
|| TARGET_ABICALLS
)
5708 sorry ("MIPS16 PIC");
5712 /* Reset to select base non-mips16 ISA. */
5713 target_flags
&= ~MASK_MIPS16
;
5715 /* When using explicit relocs, we call dbr_schedule from within
5717 if (TARGET_EXPLICIT_RELOCS
)
5718 flag_delayed_branch
= 0;
5720 /* Provide default values for align_* for 64-bit targets. */
5723 if (align_loops
== 0)
5725 if (align_jumps
== 0)
5727 if (align_functions
== 0)
5728 align_functions
= 8;
5731 targetm
.min_anchor_offset
= TARGET_MIN_ANCHOR_OFFSET
;
5732 targetm
.max_anchor_offset
= TARGET_MAX_ANCHOR_OFFSET
;
5735 /* (Re)initialize mips target internals for new ISA. */
5736 mips_init_split_addresses ();
5737 mips_init_relocs ();
5739 if (was_mips16_p
>= 0)
5740 /* Reinitialize target-dependent state. */
5743 was_mips16_p
= TARGET_MIPS16
;
5746 /* Use a hash table to keep track of implicit mips16/nomips16 attributes
5747 for -mflip_mips16. It maps decl names onto a boolean mode setting. */
5749 struct mflip_mips16_entry
GTY (()) {
5753 static GTY ((param_is (struct mflip_mips16_entry
))) htab_t mflip_mips16_htab
;
5755 /* Hash table callbacks for mflip_mips16_htab. */
5758 mflip_mips16_htab_hash (const void *entry
)
5760 return htab_hash_string (((const struct mflip_mips16_entry
*) entry
)->name
);
5764 mflip_mips16_htab_eq (const void *entry
, const void *name
)
5766 return strcmp (((const struct mflip_mips16_entry
*) entry
)->name
,
5767 (const char *) name
) == 0;
5770 /* DECL is a function that needs a default "mips16" or "nomips16" attribute
5771 for -mflip-mips16. Return true if it should use "mips16" and false if
5772 it should use "nomips16". */
5775 mflip_mips16_use_mips16_p (tree decl
)
5777 struct mflip_mips16_entry
*entry
;
5782 /* Use the opposite of the command-line setting for anonymous decls. */
5783 if (!DECL_NAME (decl
))
5784 return !mips_base_mips16
;
5786 if (!mflip_mips16_htab
)
5787 mflip_mips16_htab
= htab_create_ggc (37, mflip_mips16_htab_hash
,
5788 mflip_mips16_htab_eq
, NULL
);
5790 name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
5791 hash
= htab_hash_string (name
);
5792 slot
= htab_find_slot_with_hash (mflip_mips16_htab
, name
, hash
, INSERT
);
5793 entry
= (struct mflip_mips16_entry
*) *slot
;
5796 mips16_flipper
= !mips16_flipper
;
5797 entry
= GGC_NEW (struct mflip_mips16_entry
);
5799 entry
->mips16_p
= mips16_flipper
? !mips_base_mips16
: mips_base_mips16
;
5802 return entry
->mips16_p
;
5805 /* Implement TARGET_INSERT_ATTRIBUTES. */
5808 mips_insert_attributes (tree decl
, tree
*attributes
)
5811 bool mips16_p
, nomips16_p
;
5813 /* Check for "mips16" and "nomips16" attributes. */
5814 mips16_p
= lookup_attribute ("mips16", *attributes
) != NULL
;
5815 nomips16_p
= lookup_attribute ("nomips16", *attributes
) != NULL
;
5816 if (TREE_CODE (decl
) != FUNCTION_DECL
)
5819 error ("%qs attribute only applies to functions", "mips16");
5821 error ("%qs attribute only applies to functions", "nomips16");
5825 mips16_p
|= mips_mips16_decl_p (decl
);
5826 nomips16_p
|= mips_nomips16_decl_p (decl
);
5827 if (mips16_p
|| nomips16_p
)
5829 /* DECL cannot be simultaneously mips16 and nomips16. */
5830 if (mips16_p
&& nomips16_p
)
5831 error ("%qs cannot have both %<mips16%> and "
5832 "%<nomips16%> attributes",
5833 IDENTIFIER_POINTER (DECL_NAME (decl
)));
5835 else if (TARGET_FLIP_MIPS16
&& !DECL_ARTIFICIAL (decl
))
5837 /* Implement -mflip-mips16. If DECL has neither a "nomips16" nor a
5838 "mips16" attribute, arbitrarily pick one. We must pick the same
5839 setting for duplicate declarations of a function. */
5840 name
= mflip_mips16_use_mips16_p (decl
) ? "mips16" : "nomips16";
5841 *attributes
= tree_cons (get_identifier (name
), NULL
, *attributes
);
5846 /* Implement TARGET_MERGE_DECL_ATTRIBUTES. */
5849 mips_merge_decl_attributes (tree olddecl
, tree newdecl
)
5851 /* The decls' "mips16" and "nomips16" attributes must match exactly. */
5852 if (mips_mips16_decl_p (olddecl
) != mips_mips16_decl_p (newdecl
))
5853 error ("%qs redeclared with conflicting %qs attributes",
5854 IDENTIFIER_POINTER (DECL_NAME (newdecl
)), "mips16");
5855 if (mips_nomips16_decl_p (olddecl
) != mips_nomips16_decl_p (newdecl
))
5856 error ("%qs redeclared with conflicting %qs attributes",
5857 IDENTIFIER_POINTER (DECL_NAME (newdecl
)), "nomips16");
5859 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
5860 DECL_ATTRIBUTES (newdecl
));
5863 /* Implement TARGET_SET_CURRENT_FUNCTION. Decide whether the current
5864 function should use the MIPS16 ISA and switch modes accordingly. */
5867 mips_set_current_function (tree fndecl
)
5869 mips_set_mips16_mode (mips_use_mips16_mode_p (fndecl
));
5872 /* Implement TARGET_HANDLE_OPTION. */
5875 mips_handle_option (size_t code
, const char *arg
, int value
)
5880 if (strcmp (arg
, "32") == 0)
5882 else if (strcmp (arg
, "o64") == 0)
5884 else if (strcmp (arg
, "n32") == 0)
5886 else if (strcmp (arg
, "64") == 0)
5888 else if (strcmp (arg
, "eabi") == 0)
5889 mips_abi
= ABI_EABI
;
5896 return mips_parse_cpu (arg
) != 0;
5899 mips_isa_info
= mips_parse_cpu (ACONCAT (("mips", arg
, NULL
)));
5900 return mips_isa_info
!= 0;
5902 case OPT_mno_flush_func
:
5903 mips_cache_flush_func
= NULL
;
5906 case OPT_mcode_readable_
:
5907 if (strcmp (arg
, "yes") == 0)
5908 mips_code_readable
= CODE_READABLE_YES
;
5909 else if (strcmp (arg
, "pcrel") == 0)
5910 mips_code_readable
= CODE_READABLE_PCREL
;
5911 else if (strcmp (arg
, "no") == 0)
5912 mips_code_readable
= CODE_READABLE_NO
;
5918 mips_llsc
= value
? LLSC_YES
: LLSC_NO
;
5926 /* Set up the threshold for data to go into the small data area, instead
5927 of the normal data area, and detect any conflicts in the switches. */
5930 override_options (void)
5932 int i
, start
, regno
;
5933 enum machine_mode mode
;
5935 #ifdef SUBTARGET_OVERRIDE_OPTIONS
5936 SUBTARGET_OVERRIDE_OPTIONS
;
5939 mips_section_threshold
= g_switch_set
? g_switch_value
: MIPS_DEFAULT_GVALUE
;
5941 /* The following code determines the architecture and register size.
5942 Similar code was added to GAS 2.14 (see tc-mips.c:md_after_parse_args()).
5943 The GAS and GCC code should be kept in sync as much as possible. */
5945 if (mips_arch_string
!= 0)
5946 mips_set_architecture (mips_parse_cpu (mips_arch_string
));
5948 if (mips_isa_info
!= 0)
5950 if (mips_arch_info
== 0)
5951 mips_set_architecture (mips_isa_info
);
5952 else if (mips_arch_info
->isa
!= mips_isa_info
->isa
)
5953 error ("-%s conflicts with the other architecture options, "
5954 "which specify a %s processor",
5955 mips_isa_info
->name
,
5956 mips_cpu_info_from_isa (mips_arch_info
->isa
)->name
);
5959 if (mips_arch_info
== 0)
5961 #ifdef MIPS_CPU_STRING_DEFAULT
5962 mips_set_architecture (mips_parse_cpu (MIPS_CPU_STRING_DEFAULT
));
5964 mips_set_architecture (mips_cpu_info_from_isa (MIPS_ISA_DEFAULT
));
5968 if (ABI_NEEDS_64BIT_REGS
&& !ISA_HAS_64BIT_REGS
)
5969 error ("-march=%s is not compatible with the selected ABI",
5970 mips_arch_info
->name
);
5972 /* Optimize for mips_arch, unless -mtune selects a different processor. */
5973 if (mips_tune_string
!= 0)
5974 mips_set_tune (mips_parse_cpu (mips_tune_string
));
5976 if (mips_tune_info
== 0)
5977 mips_set_tune (mips_arch_info
);
5979 /* Set cost structure for the processor. */
5981 mips_cost
= &mips_rtx_cost_optimize_size
;
5983 mips_cost
= &mips_rtx_cost_data
[mips_tune
];
5985 /* If the user hasn't specified a branch cost, use the processor's
5987 if (mips_branch_cost
== 0)
5988 mips_branch_cost
= mips_cost
->branch_cost
;
5990 if ((target_flags_explicit
& MASK_64BIT
) != 0)
5992 /* The user specified the size of the integer registers. Make sure
5993 it agrees with the ABI and ISA. */
5994 if (TARGET_64BIT
&& !ISA_HAS_64BIT_REGS
)
5995 error ("-mgp64 used with a 32-bit processor");
5996 else if (!TARGET_64BIT
&& ABI_NEEDS_64BIT_REGS
)
5997 error ("-mgp32 used with a 64-bit ABI");
5998 else if (TARGET_64BIT
&& ABI_NEEDS_32BIT_REGS
)
5999 error ("-mgp64 used with a 32-bit ABI");
6003 /* Infer the integer register size from the ABI and processor.
6004 Restrict ourselves to 32-bit registers if that's all the
6005 processor has, or if the ABI cannot handle 64-bit registers. */
6006 if (ABI_NEEDS_32BIT_REGS
|| !ISA_HAS_64BIT_REGS
)
6007 target_flags
&= ~MASK_64BIT
;
6009 target_flags
|= MASK_64BIT
;
6012 if ((target_flags_explicit
& MASK_FLOAT64
) != 0)
6014 /* Really, -mfp32 and -mfp64 are ornamental options. There's
6015 only one right answer here. */
6016 if (TARGET_64BIT
&& TARGET_DOUBLE_FLOAT
&& !TARGET_FLOAT64
)
6017 error ("unsupported combination: %s", "-mgp64 -mfp32 -mdouble-float");
6018 else if (!TARGET_64BIT
&& TARGET_FLOAT64
6019 && !(ISA_HAS_MXHC1
&& mips_abi
== ABI_32
))
6020 error ("-mgp32 and -mfp64 can only be combined if the target"
6021 " supports the mfhc1 and mthc1 instructions");
6022 else if (TARGET_SINGLE_FLOAT
&& TARGET_FLOAT64
)
6023 error ("unsupported combination: %s", "-mfp64 -msingle-float");
6027 /* -msingle-float selects 32-bit float registers. Otherwise the
6028 float registers should be the same size as the integer ones. */
6029 if (TARGET_64BIT
&& TARGET_DOUBLE_FLOAT
)
6030 target_flags
|= MASK_FLOAT64
;
6032 target_flags
&= ~MASK_FLOAT64
;
6035 /* End of code shared with GAS. */
6037 if ((target_flags_explicit
& MASK_LONG64
) == 0)
6039 if ((mips_abi
== ABI_EABI
&& TARGET_64BIT
) || mips_abi
== ABI_64
)
6040 target_flags
|= MASK_LONG64
;
6042 target_flags
&= ~MASK_LONG64
;
6046 flag_pcc_struct_return
= 0;
6048 if ((target_flags_explicit
& MASK_BRANCHLIKELY
) == 0)
6050 /* If neither -mbranch-likely nor -mno-branch-likely was given
6051 on the command line, set MASK_BRANCHLIKELY based on the target
6054 By default, we enable use of Branch Likely instructions on
6055 all architectures which support them with the following
6056 exceptions: when creating MIPS32 or MIPS64 code, and when
6057 tuning for architectures where their use tends to hurt
6060 The MIPS32 and MIPS64 architecture specifications say "Software
6061 is strongly encouraged to avoid use of Branch Likely
6062 instructions, as they will be removed from a future revision
6063 of the [MIPS32 and MIPS64] architecture." Therefore, we do not
6064 issue those instructions unless instructed to do so by
6066 if (ISA_HAS_BRANCHLIKELY
6067 && !(ISA_MIPS32
|| ISA_MIPS32R2
|| ISA_MIPS64
)
6068 && !(TUNE_MIPS5500
|| TUNE_SB1
))
6069 target_flags
|= MASK_BRANCHLIKELY
;
6071 target_flags
&= ~MASK_BRANCHLIKELY
;
6073 if (TARGET_BRANCHLIKELY
&& !ISA_HAS_BRANCHLIKELY
)
6074 warning (0, "generation of Branch Likely instructions enabled, but not supported by architecture");
6076 /* The effect of -mabicalls isn't defined for the EABI. */
6077 if (mips_abi
== ABI_EABI
&& TARGET_ABICALLS
)
6079 error ("unsupported combination: %s", "-mabicalls -mabi=eabi");
6080 target_flags
&= ~MASK_ABICALLS
;
6083 /* MIPS16 cannot generate PIC yet. */
6084 if (TARGET_MIPS16
&& (flag_pic
|| TARGET_ABICALLS
))
6086 sorry ("MIPS16 PIC");
6087 target_flags
&= ~MASK_ABICALLS
;
6088 flag_pic
= flag_pie
= flag_shlib
= 0;
6091 if (TARGET_ABICALLS
)
6092 /* We need to set flag_pic for executables as well as DSOs
6093 because we may reference symbols that are not defined in
6094 the final executable. (MIPS does not use things like
6095 copy relocs, for example.)
6097 Also, there is a body of code that uses __PIC__ to distinguish
6098 between -mabicalls and -mno-abicalls code. */
6101 /* -mvr4130-align is a "speed over size" optimization: it usually produces
6102 faster code, but at the expense of more nops. Enable it at -O3 and
6104 if (optimize
> 2 && (target_flags_explicit
& MASK_VR4130_ALIGN
) == 0)
6105 target_flags
|= MASK_VR4130_ALIGN
;
6107 /* Prefer a call to memcpy over inline code when optimizing for size,
6108 though see MOVE_RATIO in mips.h. */
6109 if (optimize_size
&& (target_flags_explicit
& MASK_MEMCPY
) == 0)
6110 target_flags
|= MASK_MEMCPY
;
6112 /* If we have a nonzero small-data limit, check that the -mgpopt
6113 setting is consistent with the other target flags. */
6114 if (mips_section_threshold
> 0)
6118 if (!TARGET_MIPS16
&& !TARGET_EXPLICIT_RELOCS
)
6119 error ("%<-mno-gpopt%> needs %<-mexplicit-relocs%>");
6121 TARGET_LOCAL_SDATA
= false;
6122 TARGET_EXTERN_SDATA
= false;
6126 if (TARGET_VXWORKS_RTP
)
6127 warning (0, "cannot use small-data accesses for %qs", "-mrtp");
6129 if (TARGET_ABICALLS
)
6130 warning (0, "cannot use small-data accesses for %qs",
6135 #ifdef MIPS_TFMODE_FORMAT
6136 REAL_MODE_FORMAT (TFmode
) = &MIPS_TFMODE_FORMAT
;
6139 /* Make sure that the user didn't turn off paired single support when
6140 MIPS-3D support is requested. */
6141 if (TARGET_MIPS3D
&& (target_flags_explicit
& MASK_PAIRED_SINGLE_FLOAT
)
6142 && !TARGET_PAIRED_SINGLE_FLOAT
)
6143 error ("-mips3d requires -mpaired-single");
6145 /* If TARGET_MIPS3D, enable MASK_PAIRED_SINGLE_FLOAT. */
6147 target_flags
|= MASK_PAIRED_SINGLE_FLOAT
;
6149 /* Make sure that when TARGET_PAIRED_SINGLE_FLOAT is true, TARGET_FLOAT64
6150 and TARGET_HARD_FLOAT_ABI are both true. */
6151 if (TARGET_PAIRED_SINGLE_FLOAT
&& !(TARGET_FLOAT64
&& TARGET_HARD_FLOAT_ABI
))
6152 error ("-mips3d/-mpaired-single must be used with -mfp64 -mhard-float");
6154 /* Make sure that the ISA supports TARGET_PAIRED_SINGLE_FLOAT when it is
6156 if (TARGET_PAIRED_SINGLE_FLOAT
&& !ISA_MIPS64
)
6157 error ("-mips3d/-mpaired-single must be used with -mips64");
6159 /* If TARGET_DSPR2, enable MASK_DSP. */
6161 target_flags
|= MASK_DSP
;
6163 mips_print_operand_punct
['?'] = 1;
6164 mips_print_operand_punct
['#'] = 1;
6165 mips_print_operand_punct
['/'] = 1;
6166 mips_print_operand_punct
['&'] = 1;
6167 mips_print_operand_punct
['!'] = 1;
6168 mips_print_operand_punct
['*'] = 1;
6169 mips_print_operand_punct
['@'] = 1;
6170 mips_print_operand_punct
['.'] = 1;
6171 mips_print_operand_punct
['('] = 1;
6172 mips_print_operand_punct
[')'] = 1;
6173 mips_print_operand_punct
['['] = 1;
6174 mips_print_operand_punct
[']'] = 1;
6175 mips_print_operand_punct
['<'] = 1;
6176 mips_print_operand_punct
['>'] = 1;
6177 mips_print_operand_punct
['{'] = 1;
6178 mips_print_operand_punct
['}'] = 1;
6179 mips_print_operand_punct
['^'] = 1;
6180 mips_print_operand_punct
['$'] = 1;
6181 mips_print_operand_punct
['+'] = 1;
6182 mips_print_operand_punct
['~'] = 1;
6183 mips_print_operand_punct
['|'] = 1;
6184 mips_print_operand_punct
['-'] = 1;
6186 /* Set up array to map GCC register number to debug register number.
6187 Ignore the special purpose register numbers. */
6189 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6191 mips_dbx_regno
[i
] = INVALID_REGNUM
;
6192 if (GP_REG_P (i
) || FP_REG_P (i
) || ALL_COP_REG_P (i
))
6193 mips_dwarf_regno
[i
] = i
;
6195 mips_dwarf_regno
[i
] = INVALID_REGNUM
;
6198 start
= GP_DBX_FIRST
- GP_REG_FIRST
;
6199 for (i
= GP_REG_FIRST
; i
<= GP_REG_LAST
; i
++)
6200 mips_dbx_regno
[i
] = i
+ start
;
6202 start
= FP_DBX_FIRST
- FP_REG_FIRST
;
6203 for (i
= FP_REG_FIRST
; i
<= FP_REG_LAST
; i
++)
6204 mips_dbx_regno
[i
] = i
+ start
;
6206 /* HI and LO debug registers use big-endian ordering. */
6207 mips_dbx_regno
[HI_REGNUM
] = MD_DBX_FIRST
+ 0;
6208 mips_dbx_regno
[LO_REGNUM
] = MD_DBX_FIRST
+ 1;
6209 mips_dwarf_regno
[HI_REGNUM
] = MD_REG_FIRST
+ 0;
6210 mips_dwarf_regno
[LO_REGNUM
] = MD_REG_FIRST
+ 1;
6211 for (i
= DSP_ACC_REG_FIRST
; i
<= DSP_ACC_REG_LAST
; i
+= 2)
6213 mips_dwarf_regno
[i
+ TARGET_LITTLE_ENDIAN
] = i
;
6214 mips_dwarf_regno
[i
+ TARGET_BIG_ENDIAN
] = i
+ 1;
6217 /* Set up array giving whether a given register can hold a given mode. */
6219 for (mode
= VOIDmode
;
6220 mode
!= MAX_MACHINE_MODE
;
6221 mode
= (enum machine_mode
) ((int)mode
+ 1))
6223 register int size
= GET_MODE_SIZE (mode
);
6224 register enum mode_class
class = GET_MODE_CLASS (mode
);
6226 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
6230 if (mode
== CCV2mode
)
6233 && (regno
- ST_REG_FIRST
) % 2 == 0);
6235 else if (mode
== CCV4mode
)
6238 && (regno
- ST_REG_FIRST
) % 4 == 0);
6240 else if (mode
== CCmode
)
6243 temp
= (regno
== FPSW_REGNUM
);
6245 temp
= (ST_REG_P (regno
) || GP_REG_P (regno
)
6246 || FP_REG_P (regno
));
6249 else if (GP_REG_P (regno
))
6250 temp
= ((regno
& 1) == 0 || size
<= UNITS_PER_WORD
);
6252 else if (FP_REG_P (regno
))
6253 temp
= ((((regno
% MAX_FPRS_PER_FMT
) == 0)
6254 || (MIN_FPRS_PER_FMT
== 1
6255 && size
<= UNITS_PER_FPREG
))
6256 && (((class == MODE_FLOAT
|| class == MODE_COMPLEX_FLOAT
6257 || class == MODE_VECTOR_FLOAT
)
6258 && size
<= UNITS_PER_FPVALUE
)
6259 /* Allow integer modes that fit into a single
6260 register. We need to put integers into FPRs
6261 when using instructions like cvt and trunc.
6262 We can't allow sizes smaller than a word,
6263 the FPU has no appropriate load/store
6264 instructions for those. */
6265 || (class == MODE_INT
6266 && size
>= MIN_UNITS_PER_WORD
6267 && size
<= UNITS_PER_FPREG
)
6268 /* Allow TFmode for CCmode reloads. */
6269 || (ISA_HAS_8CC
&& mode
== TFmode
)));
6271 else if (ACC_REG_P (regno
))
6272 temp
= ((INTEGRAL_MODE_P (mode
) || ALL_FIXED_POINT_MODE_P (mode
))
6273 && size
<= UNITS_PER_WORD
* 2
6274 && (size
<= UNITS_PER_WORD
6275 || regno
== MD_REG_FIRST
6276 || (DSP_ACC_REG_P (regno
)
6277 && ((regno
- DSP_ACC_REG_FIRST
) & 1) == 0)));
6279 else if (ALL_COP_REG_P (regno
))
6280 temp
= (class == MODE_INT
&& size
<= UNITS_PER_WORD
);
6284 mips_hard_regno_mode_ok
[(int)mode
][regno
] = temp
;
6288 /* Save GPR registers in word_mode sized hunks. word_mode hasn't been
6289 initialized yet, so we can't use that here. */
6290 gpr_mode
= TARGET_64BIT
? DImode
: SImode
;
6292 /* Function to allocate machine-dependent function status. */
6293 init_machine_status
= &mips_init_machine_status
;
6295 /* Default to working around R4000 errata only if the processor
6296 was selected explicitly. */
6297 if ((target_flags_explicit
& MASK_FIX_R4000
) == 0
6298 && mips_matching_cpu_name_p (mips_arch_info
->name
, "r4000"))
6299 target_flags
|= MASK_FIX_R4000
;
6301 /* Default to working around R4400 errata only if the processor
6302 was selected explicitly. */
6303 if ((target_flags_explicit
& MASK_FIX_R4400
) == 0
6304 && mips_matching_cpu_name_p (mips_arch_info
->name
, "r4400"))
6305 target_flags
|= MASK_FIX_R4400
;
6307 /* Save base state of options. */
6308 mips_base_mips16
= TARGET_MIPS16
;
6309 mips_base_target_flags
= target_flags
;
6310 mips_base_schedule_insns
= flag_schedule_insns
;
6311 mips_base_reorder_blocks_and_partition
= flag_reorder_blocks_and_partition
;
6312 mips_base_move_loop_invariants
= flag_move_loop_invariants
;
6313 mips_base_align_loops
= align_loops
;
6314 mips_base_align_jumps
= align_jumps
;
6315 mips_base_align_functions
= align_functions
;
6316 mips_flag_delayed_branch
= flag_delayed_branch
;
6318 /* Now select the mips16 or 32-bit instruction set, as requested. */
6319 mips_set_mips16_mode (mips_base_mips16
);
6322 /* Swap the register information for registers I and I + 1, which
6323 currently have the wrong endianness. Note that the registers'
6324 fixedness and call-clobberedness might have been set on the
6328 mips_swap_registers (unsigned int i
)
6333 #define SWAP_INT(X, Y) (tmpi = (X), (X) = (Y), (Y) = tmpi)
6334 #define SWAP_STRING(X, Y) (tmps = (X), (X) = (Y), (Y) = tmps)
6336 SWAP_INT (fixed_regs
[i
], fixed_regs
[i
+ 1]);
6337 SWAP_INT (call_used_regs
[i
], call_used_regs
[i
+ 1]);
6338 SWAP_INT (call_really_used_regs
[i
], call_really_used_regs
[i
+ 1]);
6339 SWAP_STRING (reg_names
[i
], reg_names
[i
+ 1]);
6345 /* Implement CONDITIONAL_REGISTER_USAGE. */
6348 mips_conditional_register_usage (void)
6354 for (regno
= DSP_ACC_REG_FIRST
; regno
<= DSP_ACC_REG_LAST
; regno
++)
6355 fixed_regs
[regno
] = call_used_regs
[regno
] = 1;
6357 if (!TARGET_HARD_FLOAT
)
6361 for (regno
= FP_REG_FIRST
; regno
<= FP_REG_LAST
; regno
++)
6362 fixed_regs
[regno
] = call_used_regs
[regno
] = 1;
6363 for (regno
= ST_REG_FIRST
; regno
<= ST_REG_LAST
; regno
++)
6364 fixed_regs
[regno
] = call_used_regs
[regno
] = 1;
6366 else if (! ISA_HAS_8CC
)
6370 /* We only have a single condition code register. We
6371 implement this by hiding all the condition code registers,
6372 and generating RTL that refers directly to ST_REG_FIRST. */
6373 for (regno
= ST_REG_FIRST
; regno
<= ST_REG_LAST
; regno
++)
6374 fixed_regs
[regno
] = call_used_regs
[regno
] = 1;
6376 /* In mips16 mode, we permit the $t temporary registers to be used
6377 for reload. We prohibit the unused $s registers, since they
6378 are caller saved, and saving them via a mips16 register would
6379 probably waste more time than just reloading the value. */
6382 fixed_regs
[18] = call_used_regs
[18] = 1;
6383 fixed_regs
[19] = call_used_regs
[19] = 1;
6384 fixed_regs
[20] = call_used_regs
[20] = 1;
6385 fixed_regs
[21] = call_used_regs
[21] = 1;
6386 fixed_regs
[22] = call_used_regs
[22] = 1;
6387 fixed_regs
[23] = call_used_regs
[23] = 1;
6388 fixed_regs
[26] = call_used_regs
[26] = 1;
6389 fixed_regs
[27] = call_used_regs
[27] = 1;
6390 fixed_regs
[30] = call_used_regs
[30] = 1;
6392 /* fp20-23 are now caller saved. */
6393 if (mips_abi
== ABI_64
)
6396 for (regno
= FP_REG_FIRST
+ 20; regno
< FP_REG_FIRST
+ 24; regno
++)
6397 call_really_used_regs
[regno
] = call_used_regs
[regno
] = 1;
6399 /* Odd registers from fp21 to fp31 are now caller saved. */
6400 if (mips_abi
== ABI_N32
)
6403 for (regno
= FP_REG_FIRST
+ 21; regno
<= FP_REG_FIRST
+ 31; regno
+=2)
6404 call_really_used_regs
[regno
] = call_used_regs
[regno
] = 1;
6406 /* Make sure that double-register accumulator values are correctly
6407 ordered for the current endianness. */
6408 if (TARGET_LITTLE_ENDIAN
)
6411 mips_swap_registers (MD_REG_FIRST
);
6412 for (regno
= DSP_ACC_REG_FIRST
; regno
<= DSP_ACC_REG_LAST
; regno
+= 2)
6413 mips_swap_registers (regno
);
6417 /* Allocate a chunk of memory for per-function machine-dependent data. */
6418 static struct machine_function
*
6419 mips_init_machine_status (void)
6421 return ((struct machine_function
*)
6422 ggc_alloc_cleared (sizeof (struct machine_function
)));
6425 /* On the mips16, we want to allocate $24 (T_REG) before other
6426 registers for instructions for which it is possible. This helps
6427 avoid shuffling registers around in order to set up for an xor,
6428 encouraging the compiler to use a cmp instead. */
6431 mips_order_regs_for_local_alloc (void)
6435 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6436 reg_alloc_order
[i
] = i
;
6440 /* It really doesn't matter where we put register 0, since it is
6441 a fixed register anyhow. */
6442 reg_alloc_order
[0] = 24;
6443 reg_alloc_order
[24] = 0;
6448 /* The MIPS debug format wants all automatic variables and arguments
6449 to be in terms of the virtual frame pointer (stack pointer before
6450 any adjustment in the function), while the MIPS 3.0 linker wants
6451 the frame pointer to be the stack pointer after the initial
6452 adjustment. So, we do the adjustment here. The arg pointer (which
6453 is eliminated) points to the virtual frame pointer, while the frame
6454 pointer (which may be eliminated) points to the stack pointer after
6455 the initial adjustments. */
6458 mips_debugger_offset (rtx addr
, HOST_WIDE_INT offset
)
6460 rtx offset2
= const0_rtx
;
6461 rtx reg
= eliminate_constant_term (addr
, &offset2
);
6464 offset
= INTVAL (offset2
);
6466 if (reg
== stack_pointer_rtx
|| reg
== frame_pointer_rtx
6467 || reg
== hard_frame_pointer_rtx
)
6469 HOST_WIDE_INT frame_size
= (!cfun
->machine
->frame
.initialized
)
6470 ? compute_frame_size (get_frame_size ())
6471 : cfun
->machine
->frame
.total_size
;
6473 /* MIPS16 frame is smaller */
6474 if (frame_pointer_needed
&& TARGET_MIPS16
)
6475 frame_size
-= cfun
->machine
->frame
.args_size
;
6477 offset
= offset
- frame_size
;
6480 /* sdbout_parms does not want this to crash for unrecognized cases. */
6482 else if (reg
!= arg_pointer_rtx
)
6483 fatal_insn ("mips_debugger_offset called with non stack/frame/arg pointer",
6490 /* If OP is an UNSPEC address, return the address to which it refers,
6491 otherwise return OP itself. */
6494 mips_strip_unspec_address (rtx op
)
6498 split_const (op
, &base
, &offset
);
6499 if (UNSPEC_ADDRESS_P (base
))
6500 op
= plus_constant (UNSPEC_ADDRESS (base
), INTVAL (offset
));
6504 /* Implement the PRINT_OPERAND macro. The MIPS-specific operand codes are:
6506 'X' OP is CONST_INT, prints 32 bits in hexadecimal format = "0x%08x",
6507 'x' OP is CONST_INT, prints 16 bits in hexadecimal format = "0x%04x",
6508 'h' OP is HIGH, prints %hi(X),
6509 'd' output integer constant in decimal,
6510 'z' if the operand is 0, use $0 instead of normal operand.
6511 'D' print second part of double-word register or memory operand.
6512 'L' print low-order register of double-word register operand.
6513 'M' print high-order register of double-word register operand.
6514 'C' print part of opcode for a branch condition.
6515 'F' print part of opcode for a floating-point branch condition.
6516 'N' print part of opcode for a branch condition, inverted.
6517 'W' print part of opcode for a floating-point branch condition, inverted.
6518 'T' print 'f' for (eq:CC ...), 't' for (ne:CC ...),
6519 'z' for (eq:?I ...), 'n' for (ne:?I ...).
6520 't' like 'T', but with the EQ/NE cases reversed
6521 'Y' for a CONST_INT X, print mips_fp_conditions[X]
6522 'Z' print the operand and a comma for ISA_HAS_8CC, otherwise print nothing
6523 'R' print the reloc associated with LO_SUM
6524 'q' print DSP accumulator registers
6526 The punctuation characters are:
6528 '(' Turn on .set noreorder
6529 ')' Turn on .set reorder
6530 '[' Turn on .set noat
6532 '<' Turn on .set nomacro
6533 '>' Turn on .set macro
6534 '{' Turn on .set volatile (not GAS)
6535 '}' Turn on .set novolatile (not GAS)
6536 '&' Turn on .set noreorder if filling delay slots
6537 '*' Turn on both .set noreorder and .set nomacro if filling delay slots
6538 '!' Turn on .set nomacro if filling delay slots
6539 '#' Print nop if in a .set noreorder section.
6540 '/' Like '#', but does nothing within a delayed branch sequence
6541 '?' Print 'l' if we are to use a branch likely instead of normal branch.
6542 '@' Print the name of the assembler temporary register (at or $1).
6543 '.' Print the name of the register with a hard-wired zero (zero or $0).
6544 '^' Print the name of the pic call-through register (t9 or $25).
6545 '$' Print the name of the stack pointer register (sp or $29).
6546 '+' Print the name of the gp register (usually gp or $28).
6547 '~' Output a branch alignment to LABEL_ALIGN(NULL).
6548 '|' Print .set push; .set mips2 if mips_llsc == LLSC_YES
6550 '-' Print .set pop under the same conditions for '|'. */
6553 print_operand (FILE *file
, rtx op
, int letter
)
6555 register enum rtx_code code
;
6557 if (PRINT_OPERAND_PUNCT_VALID_P (letter
))
6562 if (mips_branch_likely
)
6567 fputs (reg_names
[GP_REG_FIRST
+ 1], file
);
6571 fputs (reg_names
[PIC_FUNCTION_ADDR_REGNUM
], file
);
6575 fputs (reg_names
[GP_REG_FIRST
+ 0], file
);
6579 fputs (reg_names
[STACK_POINTER_REGNUM
], file
);
6583 fputs (reg_names
[PIC_OFFSET_TABLE_REGNUM
], file
);
6587 if (final_sequence
!= 0 && set_noreorder
++ == 0)
6588 fputs (".set\tnoreorder\n\t", file
);
6592 if (final_sequence
!= 0)
6594 if (set_noreorder
++ == 0)
6595 fputs (".set\tnoreorder\n\t", file
);
6597 if (set_nomacro
++ == 0)
6598 fputs (".set\tnomacro\n\t", file
);
6603 if (final_sequence
!= 0 && set_nomacro
++ == 0)
6604 fputs ("\n\t.set\tnomacro", file
);
6608 if (set_noreorder
!= 0)
6609 fputs ("\n\tnop", file
);
6613 /* Print an extra newline so that the delayed insn is separated
6614 from the following ones. This looks neater and is consistent
6615 with non-nop delayed sequences. */
6616 if (set_noreorder
!= 0 && final_sequence
== 0)
6617 fputs ("\n\tnop\n", file
);
6621 if (set_noreorder
++ == 0)
6622 fputs (".set\tnoreorder\n\t", file
);
6626 if (set_noreorder
== 0)
6627 error ("internal error: %%) found without a %%( in assembler pattern");
6629 else if (--set_noreorder
== 0)
6630 fputs ("\n\t.set\treorder", file
);
6635 if (set_noat
++ == 0)
6636 fputs (".set\tnoat\n\t", file
);
6641 error ("internal error: %%] found without a %%[ in assembler pattern");
6642 else if (--set_noat
== 0)
6643 fputs ("\n\t.set\tat", file
);
6648 if (set_nomacro
++ == 0)
6649 fputs (".set\tnomacro\n\t", file
);
6653 if (set_nomacro
== 0)
6654 error ("internal error: %%> found without a %%< in assembler pattern");
6655 else if (--set_nomacro
== 0)
6656 fputs ("\n\t.set\tmacro", file
);
6661 if (set_volatile
++ == 0)
6662 fputs ("#.set\tvolatile\n\t", file
);
6666 if (set_volatile
== 0)
6667 error ("internal error: %%} found without a %%{ in assembler pattern");
6668 else if (--set_volatile
== 0)
6669 fputs ("\n\t#.set\tnovolatile", file
);
6675 if (align_labels_log
> 0)
6676 ASM_OUTPUT_ALIGN (file
, align_labels_log
);
6682 fputs (".set\tpush\n\t.set\tmips2\n\t", file
);
6687 fputs ("\n\t.set\tpop", file
);
6691 error ("PRINT_OPERAND: unknown punctuation '%c'", letter
);
6700 error ("PRINT_OPERAND null pointer");
6704 code
= GET_CODE (op
);
6709 case EQ
: fputs ("eq", file
); break;
6710 case NE
: fputs ("ne", file
); break;
6711 case GT
: fputs ("gt", file
); break;
6712 case GE
: fputs ("ge", file
); break;
6713 case LT
: fputs ("lt", file
); break;
6714 case LE
: fputs ("le", file
); break;
6715 case GTU
: fputs ("gtu", file
); break;
6716 case GEU
: fputs ("geu", file
); break;
6717 case LTU
: fputs ("ltu", file
); break;
6718 case LEU
: fputs ("leu", file
); break;
6720 fatal_insn ("PRINT_OPERAND, invalid insn for %%C", op
);
6723 else if (letter
== 'N')
6726 case EQ
: fputs ("ne", file
); break;
6727 case NE
: fputs ("eq", file
); break;
6728 case GT
: fputs ("le", file
); break;
6729 case GE
: fputs ("lt", file
); break;
6730 case LT
: fputs ("ge", file
); break;
6731 case LE
: fputs ("gt", file
); break;
6732 case GTU
: fputs ("leu", file
); break;
6733 case GEU
: fputs ("ltu", file
); break;
6734 case LTU
: fputs ("geu", file
); break;
6735 case LEU
: fputs ("gtu", file
); break;
6737 fatal_insn ("PRINT_OPERAND, invalid insn for %%N", op
);
6740 else if (letter
== 'F')
6743 case EQ
: fputs ("c1f", file
); break;
6744 case NE
: fputs ("c1t", file
); break;
6746 fatal_insn ("PRINT_OPERAND, invalid insn for %%F", op
);
6749 else if (letter
== 'W')
6752 case EQ
: fputs ("c1t", file
); break;
6753 case NE
: fputs ("c1f", file
); break;
6755 fatal_insn ("PRINT_OPERAND, invalid insn for %%W", op
);
6758 else if (letter
== 'h')
6760 if (GET_CODE (op
) == HIGH
)
6763 print_operand_reloc (file
, op
, SYMBOL_CONTEXT_LEA
, mips_hi_relocs
);
6766 else if (letter
== 'R')
6767 print_operand_reloc (file
, op
, SYMBOL_CONTEXT_LEA
, mips_lo_relocs
);
6769 else if (letter
== 'Y')
6771 if (GET_CODE (op
) == CONST_INT
6772 && ((unsigned HOST_WIDE_INT
) INTVAL (op
)
6773 < ARRAY_SIZE (mips_fp_conditions
)))
6774 fputs (mips_fp_conditions
[INTVAL (op
)], file
);
6776 output_operand_lossage ("invalid %%Y value");
6779 else if (letter
== 'Z')
6783 print_operand (file
, op
, 0);
6788 else if (letter
== 'q')
6793 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op
);
6795 regnum
= REGNO (op
);
6796 if (MD_REG_P (regnum
))
6797 fprintf (file
, "$ac0");
6798 else if (DSP_ACC_REG_P (regnum
))
6799 fprintf (file
, "$ac%c", reg_names
[regnum
][3]);
6801 fatal_insn ("PRINT_OPERAND, invalid insn for %%q", op
);
6804 else if (code
== REG
|| code
== SUBREG
)
6806 register int regnum
;
6809 regnum
= REGNO (op
);
6811 regnum
= true_regnum (op
);
6813 if ((letter
== 'M' && ! WORDS_BIG_ENDIAN
)
6814 || (letter
== 'L' && WORDS_BIG_ENDIAN
)
6818 fprintf (file
, "%s", reg_names
[regnum
]);
6821 else if (code
== MEM
)
6824 output_address (plus_constant (XEXP (op
, 0), 4));
6826 output_address (XEXP (op
, 0));
6829 else if (letter
== 'x' && GET_CODE (op
) == CONST_INT
)
6830 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, 0xffff & INTVAL(op
));
6832 else if (letter
== 'X' && GET_CODE(op
) == CONST_INT
)
6833 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (op
));
6835 else if (letter
== 'd' && GET_CODE(op
) == CONST_INT
)
6836 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (INTVAL(op
)));
6838 else if (letter
== 'z' && op
== CONST0_RTX (GET_MODE (op
)))
6839 fputs (reg_names
[GP_REG_FIRST
], file
);
6841 else if (letter
== 'd' || letter
== 'x' || letter
== 'X')
6842 output_operand_lossage ("invalid use of %%d, %%x, or %%X");
6844 else if (letter
== 'T' || letter
== 't')
6846 int truth
= (code
== NE
) == (letter
== 'T');
6847 fputc ("zfnt"[truth
* 2 + (GET_MODE (op
) == CCmode
)], file
);
6850 else if (CONST_GP_P (op
))
6851 fputs (reg_names
[GLOBAL_POINTER_REGNUM
], file
);
6854 output_addr_const (file
, mips_strip_unspec_address (op
));
6858 /* Print symbolic operand OP, which is part of a HIGH or LO_SUM
6859 in context CONTEXT. RELOCS is the array of relocations to use. */
6862 print_operand_reloc (FILE *file
, rtx op
, enum mips_symbol_context context
,
6863 const char **relocs
)
6865 enum mips_symbol_type symbol_type
;
6868 symbol_type
= mips_classify_symbolic_expression (op
, context
);
6869 if (relocs
[symbol_type
] == 0)
6870 fatal_insn ("PRINT_OPERAND, invalid operand for relocation", op
);
6872 fputs (relocs
[symbol_type
], file
);
6873 output_addr_const (file
, mips_strip_unspec_address (op
));
6874 for (p
= relocs
[symbol_type
]; *p
!= 0; p
++)
6879 /* Output address operand X to FILE. */
6882 print_operand_address (FILE *file
, rtx x
)
6884 struct mips_address_info addr
;
6886 if (mips_classify_address (&addr
, x
, word_mode
, true))
6890 print_operand (file
, addr
.offset
, 0);
6891 fprintf (file
, "(%s)", reg_names
[REGNO (addr
.reg
)]);
6894 case ADDRESS_LO_SUM
:
6895 print_operand_reloc (file
, addr
.offset
, SYMBOL_CONTEXT_MEM
,
6897 fprintf (file
, "(%s)", reg_names
[REGNO (addr
.reg
)]);
6900 case ADDRESS_CONST_INT
:
6901 output_addr_const (file
, x
);
6902 fprintf (file
, "(%s)", reg_names
[0]);
6905 case ADDRESS_SYMBOLIC
:
6906 output_addr_const (file
, mips_strip_unspec_address (x
));
6912 /* When using assembler macros, keep track of all of small-data externs
6913 so that mips_file_end can emit the appropriate declarations for them.
6915 In most cases it would be safe (though pointless) to emit .externs
6916 for other symbols too. One exception is when an object is within
6917 the -G limit but declared by the user to be in a section other
6918 than .sbss or .sdata. */
6921 mips_output_external (FILE *file
, tree decl
, const char *name
)
6923 default_elf_asm_output_external (file
, decl
, name
);
6925 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
6926 set in order to avoid putting out names that are never really
6928 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl
)))
6930 if (!TARGET_EXPLICIT_RELOCS
&& mips_in_small_data_p (decl
))
6932 fputs ("\t.extern\t", file
);
6933 assemble_name (file
, name
);
6934 fprintf (file
, ", " HOST_WIDE_INT_PRINT_DEC
"\n",
6935 int_size_in_bytes (TREE_TYPE (decl
)));
6937 else if (TARGET_IRIX
6938 && mips_abi
== ABI_32
6939 && TREE_CODE (decl
) == FUNCTION_DECL
)
6941 /* In IRIX 5 or IRIX 6 for the O32 ABI, we must output a
6942 `.global name .text' directive for every used but
6943 undefined function. If we don't, the linker may perform
6944 an optimization (skipping over the insns that set $gp)
6945 when it is unsafe. */
6946 fputs ("\t.globl ", file
);
6947 assemble_name (file
, name
);
6948 fputs (" .text\n", file
);
6953 /* Emit a new filename to a stream. If we are smuggling stabs, try to
6954 put out a MIPS ECOFF file and a stab. */
6957 mips_output_filename (FILE *stream
, const char *name
)
6960 /* If we are emitting DWARF-2, let dwarf2out handle the ".file"
6962 if (write_symbols
== DWARF2_DEBUG
)
6964 else if (mips_output_filename_first_time
)
6966 mips_output_filename_first_time
= 0;
6967 num_source_filenames
+= 1;
6968 current_function_file
= name
;
6969 fprintf (stream
, "\t.file\t%d ", num_source_filenames
);
6970 output_quoted_string (stream
, name
);
6971 putc ('\n', stream
);
6974 /* If we are emitting stabs, let dbxout.c handle this (except for
6975 the mips_output_filename_first_time case). */
6976 else if (write_symbols
== DBX_DEBUG
)
6979 else if (name
!= current_function_file
6980 && strcmp (name
, current_function_file
) != 0)
6982 num_source_filenames
+= 1;
6983 current_function_file
= name
;
6984 fprintf (stream
, "\t.file\t%d ", num_source_filenames
);
6985 output_quoted_string (stream
, name
);
6986 putc ('\n', stream
);
6990 /* Output an ASCII string, in a space-saving way. PREFIX is the string
6991 that should be written before the opening quote, such as "\t.ascii\t"
6992 for real string data or "\t# " for a comment. */
6995 mips_output_ascii (FILE *stream
, const char *string_param
, size_t len
,
7000 register const unsigned char *string
=
7001 (const unsigned char *)string_param
;
7003 fprintf (stream
, "%s\"", prefix
);
7004 for (i
= 0; i
< len
; i
++)
7006 register int c
= string
[i
];
7010 if (c
== '\\' || c
== '\"')
7012 putc ('\\', stream
);
7020 fprintf (stream
, "\\%03o", c
);
7024 if (cur_pos
> 72 && i
+1 < len
)
7027 fprintf (stream
, "\"\n%s\"", prefix
);
7030 fprintf (stream
, "\"\n");
7033 /* Implement TARGET_ASM_FILE_START. */
7036 mips_file_start (void)
7038 default_file_start ();
7042 /* Generate a special section to describe the ABI switches used to
7043 produce the resultant binary. This used to be done by the assembler
7044 setting bits in the ELF header's flags field, but we have run out of
7045 bits. GDB needs this information in order to be able to correctly
7046 debug these binaries. See the function mips_gdbarch_init() in
7047 gdb/mips-tdep.c. This is unnecessary for the IRIX 5/6 ABIs and
7048 causes unnecessary IRIX 6 ld warnings. */
7049 const char * abi_string
= NULL
;
7053 case ABI_32
: abi_string
= "abi32"; break;
7054 case ABI_N32
: abi_string
= "abiN32"; break;
7055 case ABI_64
: abi_string
= "abi64"; break;
7056 case ABI_O64
: abi_string
= "abiO64"; break;
7057 case ABI_EABI
: abi_string
= TARGET_64BIT
? "eabi64" : "eabi32"; break;
7061 /* Note - we use fprintf directly rather than calling switch_to_section
7062 because in this way we can avoid creating an allocated section. We
7063 do not want this section to take up any space in the running
7065 fprintf (asm_out_file
, "\t.section .mdebug.%s\n\t.previous\n",
7068 /* There is no ELF header flag to distinguish long32 forms of the
7069 EABI from long64 forms. Emit a special section to help tools
7070 such as GDB. Do the same for o64, which is sometimes used with
7072 if (mips_abi
== ABI_EABI
|| mips_abi
== ABI_O64
)
7073 fprintf (asm_out_file
, "\t.section .gcc_compiled_long%d\n"
7074 "\t.previous\n", TARGET_LONG64
? 64 : 32);
7076 #ifdef HAVE_AS_GNU_ATTRIBUTE
7077 fprintf (asm_out_file
, "\t.gnu_attribute 4, %d\n",
7078 TARGET_HARD_FLOAT_ABI
? (TARGET_DOUBLE_FLOAT
? 1 : 2) : 3);
7082 /* Generate the pseudo ops that System V.4 wants. */
7083 if (TARGET_ABICALLS
)
7084 fprintf (asm_out_file
, "\t.abicalls\n");
7086 if (flag_verbose_asm
)
7087 fprintf (asm_out_file
, "\n%s -G value = %d, Arch = %s, ISA = %d\n",
7089 mips_section_threshold
, mips_arch_info
->name
, mips_isa
);
7092 #ifdef BSS_SECTION_ASM_OP
7093 /* Implement ASM_OUTPUT_ALIGNED_BSS. This differs from the default only
7094 in the use of sbss. */
7097 mips_output_aligned_bss (FILE *stream
, tree decl
, const char *name
,
7098 unsigned HOST_WIDE_INT size
, int align
)
7100 extern tree last_assemble_variable_decl
;
7102 if (mips_in_small_data_p (decl
))
7103 switch_to_section (get_named_section (NULL
, ".sbss", 0));
7105 switch_to_section (bss_section
);
7106 ASM_OUTPUT_ALIGN (stream
, floor_log2 (align
/ BITS_PER_UNIT
));
7107 last_assemble_variable_decl
= decl
;
7108 ASM_DECLARE_OBJECT_NAME (stream
, name
, decl
);
7109 ASM_OUTPUT_SKIP (stream
, size
!= 0 ? size
: 1);
7113 /* Implement ASM_OUTPUT_ALIGNED_DECL_COMMON. This is usually the same as the
7114 elfos.h version, but we also need to handle -muninit-const-in-rodata. */
7117 mips_output_aligned_decl_common (FILE *stream
, tree decl
, const char *name
,
7118 unsigned HOST_WIDE_INT size
,
7121 /* If the target wants uninitialized const declarations in
7122 .rdata then don't put them in .comm. */
7123 if (TARGET_EMBEDDED_DATA
&& TARGET_UNINIT_CONST_IN_RODATA
7124 && TREE_CODE (decl
) == VAR_DECL
&& TREE_READONLY (decl
)
7125 && (DECL_INITIAL (decl
) == 0 || DECL_INITIAL (decl
) == error_mark_node
))
7127 if (TREE_PUBLIC (decl
) && DECL_NAME (decl
))
7128 targetm
.asm_out
.globalize_label (stream
, name
);
7130 switch_to_section (readonly_data_section
);
7131 ASM_OUTPUT_ALIGN (stream
, floor_log2 (align
/ BITS_PER_UNIT
));
7132 mips_declare_object (stream
, name
, "",
7133 ":\n\t.space\t" HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
7137 mips_declare_common_object (stream
, name
, "\n\t.comm\t",
7141 /* Declare a common object of SIZE bytes using asm directive INIT_STRING.
7142 NAME is the name of the object and ALIGN is the required alignment
7143 in bytes. TAKES_ALIGNMENT_P is true if the directive takes a third
7144 alignment argument. */
7147 mips_declare_common_object (FILE *stream
, const char *name
,
7148 const char *init_string
,
7149 unsigned HOST_WIDE_INT size
,
7150 unsigned int align
, bool takes_alignment_p
)
7152 if (!takes_alignment_p
)
7154 size
+= (align
/ BITS_PER_UNIT
) - 1;
7155 size
-= size
% (align
/ BITS_PER_UNIT
);
7156 mips_declare_object (stream
, name
, init_string
,
7157 "," HOST_WIDE_INT_PRINT_UNSIGNED
"\n", size
);
7160 mips_declare_object (stream
, name
, init_string
,
7161 "," HOST_WIDE_INT_PRINT_UNSIGNED
",%u\n",
7162 size
, align
/ BITS_PER_UNIT
);
7165 /* Emit either a label, .comm, or .lcomm directive. When using assembler
7166 macros, mark the symbol as written so that mips_file_end won't emit an
7167 .extern for it. STREAM is the output file, NAME is the name of the
7168 symbol, INIT_STRING is the string that should be written before the
7169 symbol and FINAL_STRING is the string that should be written after it.
7170 FINAL_STRING is a printf() format that consumes the remaining arguments. */
7173 mips_declare_object (FILE *stream
, const char *name
, const char *init_string
,
7174 const char *final_string
, ...)
7178 fputs (init_string
, stream
);
7179 assemble_name (stream
, name
);
7180 va_start (ap
, final_string
);
7181 vfprintf (stream
, final_string
, ap
);
7184 if (!TARGET_EXPLICIT_RELOCS
)
7186 tree name_tree
= get_identifier (name
);
7187 TREE_ASM_WRITTEN (name_tree
) = 1;
7191 #ifdef ASM_OUTPUT_SIZE_DIRECTIVE
7192 extern int size_directive_output
;
7194 /* Implement ASM_DECLARE_OBJECT_NAME. This is like most of the standard ELF
7195 definitions except that it uses mips_declare_object() to emit the label. */
7198 mips_declare_object_name (FILE *stream
, const char *name
,
7199 tree decl ATTRIBUTE_UNUSED
)
7201 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
7202 ASM_OUTPUT_TYPE_DIRECTIVE (stream
, name
, "object");
7205 size_directive_output
= 0;
7206 if (!flag_inhibit_size_directive
&& DECL_SIZE (decl
))
7210 size_directive_output
= 1;
7211 size
= int_size_in_bytes (TREE_TYPE (decl
));
7212 ASM_OUTPUT_SIZE_DIRECTIVE (stream
, name
, size
);
7215 mips_declare_object (stream
, name
, "", ":\n");
7218 /* Implement ASM_FINISH_DECLARE_OBJECT. This is generic ELF stuff. */
7221 mips_finish_declare_object (FILE *stream
, tree decl
, int top_level
, int at_end
)
7225 name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
7226 if (!flag_inhibit_size_directive
7227 && DECL_SIZE (decl
) != 0
7228 && !at_end
&& top_level
7229 && DECL_INITIAL (decl
) == error_mark_node
7230 && !size_directive_output
)
7234 size_directive_output
= 1;
7235 size
= int_size_in_bytes (TREE_TYPE (decl
));
7236 ASM_OUTPUT_SIZE_DIRECTIVE (stream
, name
, size
);
7241 /* Return true if X in context CONTEXT is a small data address that can
7242 be rewritten as a LO_SUM. */
7245 mips_rewrite_small_data_p (rtx x
, enum mips_symbol_context context
)
7247 enum mips_symbol_type symbol_type
;
7249 return (TARGET_EXPLICIT_RELOCS
7250 && mips_symbolic_constant_p (x
, context
, &symbol_type
)
7251 && symbol_type
== SYMBOL_GP_RELATIVE
);
7255 /* A for_each_rtx callback for mips_small_data_pattern_p. DATA is the
7256 containing MEM, or null if none. */
7259 mips_small_data_pattern_1 (rtx
*loc
, void *data
)
7261 enum mips_symbol_context context
;
7263 if (GET_CODE (*loc
) == LO_SUM
)
7268 if (for_each_rtx (&XEXP (*loc
, 0), mips_small_data_pattern_1
, *loc
))
7273 context
= data
? SYMBOL_CONTEXT_MEM
: SYMBOL_CONTEXT_LEA
;
7274 return mips_rewrite_small_data_p (*loc
, context
);
7277 /* Return true if OP refers to small data symbols directly, not through
7281 mips_small_data_pattern_p (rtx op
)
7283 return for_each_rtx (&op
, mips_small_data_pattern_1
, 0);
7286 /* A for_each_rtx callback, used by mips_rewrite_small_data.
7287 DATA is the containing MEM, or null if none. */
7290 mips_rewrite_small_data_1 (rtx
*loc
, void *data
)
7292 enum mips_symbol_context context
;
7296 for_each_rtx (&XEXP (*loc
, 0), mips_rewrite_small_data_1
, *loc
);
7300 context
= data
? SYMBOL_CONTEXT_MEM
: SYMBOL_CONTEXT_LEA
;
7301 if (mips_rewrite_small_data_p (*loc
, context
))
7302 *loc
= gen_rtx_LO_SUM (Pmode
, pic_offset_table_rtx
, *loc
);
7304 if (GET_CODE (*loc
) == LO_SUM
)
7310 /* If possible, rewrite OP so that it refers to small data using
7311 explicit relocations. */
7314 mips_rewrite_small_data (rtx op
)
7316 op
= copy_insn (op
);
7317 for_each_rtx (&op
, mips_rewrite_small_data_1
, 0);
7321 /* Return true if the current function has an insn that implicitly
7325 mips_function_has_gp_insn (void)
7327 /* Don't bother rechecking if we found one last time. */
7328 if (!cfun
->machine
->has_gp_insn_p
)
7332 push_topmost_sequence ();
7333 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
7335 && GET_CODE (PATTERN (insn
)) != USE
7336 && GET_CODE (PATTERN (insn
)) != CLOBBER
7337 && (get_attr_got (insn
) != GOT_UNSET
7338 || small_data_pattern (PATTERN (insn
), VOIDmode
)))
7340 pop_topmost_sequence ();
7342 cfun
->machine
->has_gp_insn_p
= (insn
!= 0);
7344 return cfun
->machine
->has_gp_insn_p
;
7348 /* Return the register that should be used as the global pointer
7349 within this function. Return 0 if the function doesn't need
7350 a global pointer. */
7353 mips_global_pointer (void)
7357 /* $gp is always available unless we're using a GOT. */
7358 if (!TARGET_USE_GOT
)
7359 return GLOBAL_POINTER_REGNUM
;
7361 /* We must always provide $gp when it is used implicitly. */
7362 if (!TARGET_EXPLICIT_RELOCS
)
7363 return GLOBAL_POINTER_REGNUM
;
7365 /* FUNCTION_PROFILER includes a jal macro, so we need to give it
7367 if (current_function_profile
)
7368 return GLOBAL_POINTER_REGNUM
;
7370 /* If the function has a nonlocal goto, $gp must hold the correct
7371 global pointer for the target function. */
7372 if (current_function_has_nonlocal_goto
)
7373 return GLOBAL_POINTER_REGNUM
;
7375 /* If the gp is never referenced, there's no need to initialize it.
7376 Note that reload can sometimes introduce constant pool references
7377 into a function that otherwise didn't need them. For example,
7378 suppose we have an instruction like:
7380 (set (reg:DF R1) (float:DF (reg:SI R2)))
7382 If R2 turns out to be constant such as 1, the instruction may have a
7383 REG_EQUAL note saying that R1 == 1.0. Reload then has the option of
7384 using this constant if R2 doesn't get allocated to a register.
7386 In cases like these, reload will have added the constant to the pool
7387 but no instruction will yet refer to it. */
7388 if (!df_regs_ever_live_p (GLOBAL_POINTER_REGNUM
)
7389 && !current_function_uses_const_pool
7390 && !mips_function_has_gp_insn ())
7393 /* We need a global pointer, but perhaps we can use a call-clobbered
7394 register instead of $gp. */
7395 if (TARGET_CALL_SAVED_GP
&& current_function_is_leaf
)
7396 for (regno
= GP_REG_FIRST
; regno
<= GP_REG_LAST
; regno
++)
7397 if (!df_regs_ever_live_p (regno
)
7398 && call_really_used_regs
[regno
]
7399 && !fixed_regs
[regno
]
7400 && regno
!= PIC_FUNCTION_ADDR_REGNUM
)
7403 return GLOBAL_POINTER_REGNUM
;
7407 /* Return true if the function return value MODE will get returned in a
7408 floating-point register. */
7411 mips_return_mode_in_fpr_p (enum machine_mode mode
)
7413 return ((GET_MODE_CLASS (mode
) == MODE_FLOAT
7414 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
7415 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7416 && GET_MODE_UNIT_SIZE (mode
) <= UNITS_PER_HWFPVALUE
);
7419 /* Return a two-character string representing a function floating-point
7420 return mode, used to name MIPS16 function stubs. */
7423 mips16_call_stub_mode_suffix (enum machine_mode mode
)
7427 else if (mode
== DFmode
)
7429 else if (mode
== SCmode
)
7431 else if (mode
== DCmode
)
7433 else if (mode
== V2SFmode
)
7439 /* Return true if the current function returns its value in a floating-point
7440 register in MIPS16 mode. */
7443 mips16_cfun_returns_in_fpr_p (void)
7445 tree return_type
= DECL_RESULT (current_function_decl
);
7446 return (TARGET_MIPS16
7447 && TARGET_HARD_FLOAT_ABI
7448 && !aggregate_value_p (return_type
, current_function_decl
)
7449 && mips_return_mode_in_fpr_p (DECL_MODE (return_type
)));
7453 /* Return true if the current function must save REGNO. */
7456 mips_save_reg_p (unsigned int regno
)
7458 /* We only need to save $gp if TARGET_CALL_SAVED_GP and only then
7459 if we have not chosen a call-clobbered substitute. */
7460 if (regno
== GLOBAL_POINTER_REGNUM
)
7461 return TARGET_CALL_SAVED_GP
&& cfun
->machine
->global_pointer
== regno
;
7463 /* Check call-saved registers. */
7464 if ((current_function_saves_all_registers
|| df_regs_ever_live_p (regno
))
7465 && !call_really_used_regs
[regno
])
7468 /* Save both registers in an FPR pair if either one is used. This is
7469 needed for the case when MIN_FPRS_PER_FMT == 1, which allows the odd
7470 register to be used without the even register. */
7471 if (FP_REG_P (regno
)
7472 && MAX_FPRS_PER_FMT
== 2
7473 && df_regs_ever_live_p (regno
+ 1)
7474 && !call_really_used_regs
[regno
+ 1])
7477 /* We need to save the old frame pointer before setting up a new one. */
7478 if (regno
== HARD_FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
7481 /* Check for registers that must be saved for FUNCTION_PROFILER. */
7482 if (current_function_profile
&& MIPS_SAVE_REG_FOR_PROFILING_P (regno
))
7485 /* We need to save the incoming return address if it is ever clobbered
7486 within the function, if __builtin_eh_return is being used to set a
7487 different return address, or if a stub is being used to return a
7489 if (regno
== GP_REG_FIRST
+ 31
7490 && (df_regs_ever_live_p (regno
)
7491 || current_function_calls_eh_return
7492 || mips16_cfun_returns_in_fpr_p ()))
7498 /* Return the index of the lowest X in the range [0, SIZE) for which
7499 bit REGS[X] is set in MASK. Return SIZE if there is no such X. */
7502 mips16e_find_first_register (unsigned int mask
, const unsigned char *regs
,
7507 for (i
= 0; i
< size
; i
++)
7508 if (BITSET_P (mask
, regs
[i
]))
7514 /* *MASK_PTR is a mask of general purpose registers and *GP_REG_SIZE_PTR
7515 is the number of bytes that they occupy. If *MASK_PTR contains REGS[X]
7516 for some X in [0, SIZE), adjust *MASK_PTR and *GP_REG_SIZE_PTR so that
7517 the same is true for all indexes (X, SIZE). */
7520 mips16e_mask_registers (unsigned int *mask_ptr
, const unsigned char *regs
,
7521 unsigned int size
, HOST_WIDE_INT
*gp_reg_size_ptr
)
7525 i
= mips16e_find_first_register (*mask_ptr
, regs
, size
);
7526 for (i
++; i
< size
; i
++)
7527 if (!BITSET_P (*mask_ptr
, regs
[i
]))
7529 *gp_reg_size_ptr
+= GET_MODE_SIZE (gpr_mode
);
7530 *mask_ptr
|= 1 << regs
[i
];
7534 /* Return the bytes needed to compute the frame pointer from the current
7535 stack pointer. SIZE is the size (in bytes) of the local variables.
7537 MIPS stack frames look like:
7539 Before call After call
7540 high +-----------------------+ +-----------------------+
7542 | caller's temps. | | caller's temps. |
7544 +-----------------------+ +-----------------------+
7546 | arguments on stack. | | arguments on stack. |
7548 +-----------------------+ +-----------------------+
7549 | 4 words to save | | 4 words to save |
7550 | arguments passed | | arguments passed |
7551 | in registers, even | | in registers, even |
7552 | if not passed. | | if not passed. |
7553 SP->+-----------------------+ VFP->+-----------------------+
7554 (VFP = SP+fp_sp_offset) | |\
7555 | fp register save | | fp_reg_size
7557 SP+gp_sp_offset->+-----------------------+
7559 | | gp register save | | gp_reg_size
7560 gp_reg_rounded | | |/
7561 | +-----------------------+
7562 \| alignment padding |
7563 +-----------------------+
7565 | local variables | | var_size
7567 +-----------------------+
7569 | alloca allocations |
7571 +-----------------------+
7573 cprestore_size | | GP save for V.4 abi |
7575 +-----------------------+
7577 | arguments on stack | |
7579 +-----------------------+ |
7580 | 4 words to save | | args_size
7581 | arguments passed | |
7582 | in registers, even | |
7583 | if not passed. | |
7584 low | (TARGET_OLDABI only) |/
7585 memory SP->+-----------------------+
7590 compute_frame_size (HOST_WIDE_INT size
)
7593 HOST_WIDE_INT total_size
; /* # bytes that the entire frame takes up */
7594 HOST_WIDE_INT var_size
; /* # bytes that variables take up */
7595 HOST_WIDE_INT args_size
; /* # bytes that outgoing arguments take up */
7596 HOST_WIDE_INT cprestore_size
; /* # bytes that the cprestore slot takes up */
7597 HOST_WIDE_INT gp_reg_rounded
; /* # bytes needed to store gp after rounding */
7598 HOST_WIDE_INT gp_reg_size
; /* # bytes needed to store gp regs */
7599 HOST_WIDE_INT fp_reg_size
; /* # bytes needed to store fp regs */
7600 unsigned int mask
; /* mask of saved gp registers */
7601 unsigned int fmask
; /* mask of saved fp registers */
7603 cfun
->machine
->global_pointer
= mips_global_pointer ();
7609 var_size
= MIPS_STACK_ALIGN (size
);
7610 args_size
= current_function_outgoing_args_size
;
7611 cprestore_size
= MIPS_STACK_ALIGN (STARTING_FRAME_OFFSET
) - args_size
;
7613 /* The space set aside by STARTING_FRAME_OFFSET isn't needed in leaf
7614 functions. If the function has local variables, we're committed
7615 to allocating it anyway. Otherwise reclaim it here. */
7616 if (var_size
== 0 && current_function_is_leaf
)
7617 cprestore_size
= args_size
= 0;
7619 /* The MIPS 3.0 linker does not like functions that dynamically
7620 allocate the stack and have 0 for STACK_DYNAMIC_OFFSET, since it
7621 looks like we are trying to create a second frame pointer to the
7622 function, so allocate some stack space to make it happy. */
7624 if (args_size
== 0 && current_function_calls_alloca
)
7625 args_size
= 4 * UNITS_PER_WORD
;
7627 total_size
= var_size
+ args_size
+ cprestore_size
;
7629 /* Calculate space needed for gp registers. */
7630 for (regno
= GP_REG_FIRST
; regno
<= GP_REG_LAST
; regno
++)
7631 if (mips_save_reg_p (regno
))
7633 gp_reg_size
+= GET_MODE_SIZE (gpr_mode
);
7634 mask
|= 1 << (regno
- GP_REG_FIRST
);
7637 /* We need to restore these for the handler. */
7638 if (current_function_calls_eh_return
)
7643 regno
= EH_RETURN_DATA_REGNO (i
);
7644 if (regno
== INVALID_REGNUM
)
7646 gp_reg_size
+= GET_MODE_SIZE (gpr_mode
);
7647 mask
|= 1 << (regno
- GP_REG_FIRST
);
7651 /* The MIPS16e SAVE and RESTORE instructions have two ranges of registers:
7652 $a3-$a0 and $s2-$s8. If we save one register in the range, we must
7653 save all later registers too. */
7654 if (GENERATE_MIPS16E_SAVE_RESTORE
)
7656 mips16e_mask_registers (&mask
, mips16e_s2_s8_regs
,
7657 ARRAY_SIZE (mips16e_s2_s8_regs
), &gp_reg_size
);
7658 mips16e_mask_registers (&mask
, mips16e_a0_a3_regs
,
7659 ARRAY_SIZE (mips16e_a0_a3_regs
), &gp_reg_size
);
7662 /* This loop must iterate over the same space as its companion in
7663 mips_for_each_saved_reg. */
7664 if (TARGET_HARD_FLOAT
)
7665 for (regno
= (FP_REG_LAST
- MAX_FPRS_PER_FMT
+ 1);
7666 regno
>= FP_REG_FIRST
;
7667 regno
-= MAX_FPRS_PER_FMT
)
7668 if (mips_save_reg_p (regno
))
7670 fp_reg_size
+= MAX_FPRS_PER_FMT
* UNITS_PER_FPREG
;
7671 fmask
|= ((1 << MAX_FPRS_PER_FMT
) - 1) << (regno
- FP_REG_FIRST
);
7674 gp_reg_rounded
= MIPS_STACK_ALIGN (gp_reg_size
);
7675 total_size
+= gp_reg_rounded
+ MIPS_STACK_ALIGN (fp_reg_size
);
7677 /* Add in the space required for saving incoming register arguments. */
7678 total_size
+= current_function_pretend_args_size
;
7679 total_size
+= MIPS_STACK_ALIGN (cfun
->machine
->varargs_size
);
7681 /* Save other computed information. */
7682 cfun
->machine
->frame
.total_size
= total_size
;
7683 cfun
->machine
->frame
.var_size
= var_size
;
7684 cfun
->machine
->frame
.args_size
= args_size
;
7685 cfun
->machine
->frame
.cprestore_size
= cprestore_size
;
7686 cfun
->machine
->frame
.gp_reg_size
= gp_reg_size
;
7687 cfun
->machine
->frame
.fp_reg_size
= fp_reg_size
;
7688 cfun
->machine
->frame
.mask
= mask
;
7689 cfun
->machine
->frame
.fmask
= fmask
;
7690 cfun
->machine
->frame
.initialized
= reload_completed
;
7691 cfun
->machine
->frame
.num_gp
= gp_reg_size
/ UNITS_PER_WORD
;
7692 cfun
->machine
->frame
.num_fp
= (fp_reg_size
7693 / (MAX_FPRS_PER_FMT
* UNITS_PER_FPREG
));
7697 HOST_WIDE_INT offset
;
7699 if (GENERATE_MIPS16E_SAVE_RESTORE
)
7700 /* MIPS16e SAVE and RESTORE instructions require the GP save area
7701 to be aligned at the high end with any padding at the low end.
7702 It is only safe to use this calculation for o32, where we never
7703 have pretend arguments, and where any varargs will be saved in
7704 the caller-allocated area rather than at the top of the frame. */
7705 offset
= (total_size
- GET_MODE_SIZE (gpr_mode
));
7707 offset
= (args_size
+ cprestore_size
+ var_size
7708 + gp_reg_size
- GET_MODE_SIZE (gpr_mode
));
7709 cfun
->machine
->frame
.gp_sp_offset
= offset
;
7710 cfun
->machine
->frame
.gp_save_offset
= offset
- total_size
;
7714 cfun
->machine
->frame
.gp_sp_offset
= 0;
7715 cfun
->machine
->frame
.gp_save_offset
= 0;
7720 HOST_WIDE_INT offset
;
7722 offset
= (args_size
+ cprestore_size
+ var_size
7723 + gp_reg_rounded
+ fp_reg_size
7724 - MAX_FPRS_PER_FMT
* UNITS_PER_FPREG
);
7725 cfun
->machine
->frame
.fp_sp_offset
= offset
;
7726 cfun
->machine
->frame
.fp_save_offset
= offset
- total_size
;
7730 cfun
->machine
->frame
.fp_sp_offset
= 0;
7731 cfun
->machine
->frame
.fp_save_offset
= 0;
7734 /* Ok, we're done. */
7738 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
7739 pointer or argument pointer. TO is either the stack pointer or
7740 hard frame pointer. */
7743 mips_initial_elimination_offset (int from
, int to
)
7745 HOST_WIDE_INT offset
;
7747 compute_frame_size (get_frame_size ());
7749 /* Set OFFSET to the offset from the stack pointer. */
7752 case FRAME_POINTER_REGNUM
:
7756 case ARG_POINTER_REGNUM
:
7757 offset
= (cfun
->machine
->frame
.total_size
7758 - current_function_pretend_args_size
);
7765 if (TARGET_MIPS16
&& to
== HARD_FRAME_POINTER_REGNUM
)
7766 offset
-= cfun
->machine
->frame
.args_size
;
7771 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
7772 back to a previous frame. */
7774 mips_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
7779 return get_hard_reg_initial_val (Pmode
, GP_REG_FIRST
+ 31);
7782 /* Use FN to save or restore register REGNO. MODE is the register's
7783 mode and OFFSET is the offset of its save slot from the current
7787 mips_save_restore_reg (enum machine_mode mode
, int regno
,
7788 HOST_WIDE_INT offset
, mips_save_restore_fn fn
)
7792 mem
= gen_frame_mem (mode
, plus_constant (stack_pointer_rtx
, offset
));
7794 fn (gen_rtx_REG (mode
, regno
), mem
);
7798 /* Call FN for each register that is saved by the current function.
7799 SP_OFFSET is the offset of the current stack pointer from the start
7803 mips_for_each_saved_reg (HOST_WIDE_INT sp_offset
, mips_save_restore_fn fn
)
7805 enum machine_mode fpr_mode
;
7806 HOST_WIDE_INT offset
;
7809 /* Save registers starting from high to low. The debuggers prefer at least
7810 the return register be stored at func+4, and also it allows us not to
7811 need a nop in the epilogue if at least one register is reloaded in
7812 addition to return address. */
7813 offset
= cfun
->machine
->frame
.gp_sp_offset
- sp_offset
;
7814 for (regno
= GP_REG_LAST
; regno
>= GP_REG_FIRST
; regno
--)
7815 if (BITSET_P (cfun
->machine
->frame
.mask
, regno
- GP_REG_FIRST
))
7817 mips_save_restore_reg (gpr_mode
, regno
, offset
, fn
);
7818 offset
-= GET_MODE_SIZE (gpr_mode
);
7821 /* This loop must iterate over the same space as its companion in
7822 compute_frame_size. */
7823 offset
= cfun
->machine
->frame
.fp_sp_offset
- sp_offset
;
7824 fpr_mode
= (TARGET_SINGLE_FLOAT
? SFmode
: DFmode
);
7825 for (regno
= (FP_REG_LAST
- MAX_FPRS_PER_FMT
+ 1);
7826 regno
>= FP_REG_FIRST
;
7827 regno
-= MAX_FPRS_PER_FMT
)
7828 if (BITSET_P (cfun
->machine
->frame
.fmask
, regno
- FP_REG_FIRST
))
7830 mips_save_restore_reg (fpr_mode
, regno
, offset
, fn
);
7831 offset
-= GET_MODE_SIZE (fpr_mode
);
7835 /* If we're generating n32 or n64 abicalls, and the current function
7836 does not use $28 as its global pointer, emit a cplocal directive.
7837 Use pic_offset_table_rtx as the argument to the directive. */
7840 mips_output_cplocal (void)
7842 if (!TARGET_EXPLICIT_RELOCS
7843 && cfun
->machine
->global_pointer
> 0
7844 && cfun
->machine
->global_pointer
!= GLOBAL_POINTER_REGNUM
)
7845 output_asm_insn (".cplocal %+", 0);
7848 /* Return the style of GP load sequence that is being used for the
7849 current function. */
7851 enum mips_loadgp_style
7852 mips_current_loadgp_style (void)
7854 if (!TARGET_USE_GOT
|| cfun
->machine
->global_pointer
== 0)
7860 if (TARGET_ABSOLUTE_ABICALLS
)
7861 return LOADGP_ABSOLUTE
;
7863 return TARGET_NEWABI
? LOADGP_NEWABI
: LOADGP_OLDABI
;
7866 /* The __gnu_local_gp symbol. */
7868 static GTY(()) rtx mips_gnu_local_gp
;
7870 /* If we're generating n32 or n64 abicalls, emit instructions
7871 to set up the global pointer. */
7874 mips_emit_loadgp (void)
7876 rtx addr
, offset
, incoming_address
, base
, index
;
7878 switch (mips_current_loadgp_style ())
7880 case LOADGP_ABSOLUTE
:
7881 if (mips_gnu_local_gp
== NULL
)
7883 mips_gnu_local_gp
= gen_rtx_SYMBOL_REF (Pmode
, "__gnu_local_gp");
7884 SYMBOL_REF_FLAGS (mips_gnu_local_gp
) |= SYMBOL_FLAG_LOCAL
;
7886 emit_insn (gen_loadgp_absolute (mips_gnu_local_gp
));
7890 addr
= XEXP (DECL_RTL (current_function_decl
), 0);
7891 offset
= mips_unspec_address (addr
, SYMBOL_GOTOFF_LOADGP
);
7892 incoming_address
= gen_rtx_REG (Pmode
, PIC_FUNCTION_ADDR_REGNUM
);
7893 emit_insn (gen_loadgp_newabi (offset
, incoming_address
));
7894 if (!TARGET_EXPLICIT_RELOCS
)
7895 emit_insn (gen_loadgp_blockage ());
7899 base
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (VXWORKS_GOTT_BASE
));
7900 index
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (VXWORKS_GOTT_INDEX
));
7901 emit_insn (gen_loadgp_rtp (base
, index
));
7902 if (!TARGET_EXPLICIT_RELOCS
)
7903 emit_insn (gen_loadgp_blockage ());
7911 /* Set up the stack and frame (if desired) for the function. */
7914 mips_output_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
7917 HOST_WIDE_INT tsize
= cfun
->machine
->frame
.total_size
;
7919 #ifdef SDB_DEBUGGING_INFO
7920 if (debug_info_level
!= DINFO_LEVEL_TERSE
&& write_symbols
== SDB_DEBUG
)
7921 SDB_OUTPUT_SOURCE_LINE (file
, DECL_SOURCE_LINE (current_function_decl
));
7924 /* In mips16 mode, we may need to generate a 32 bit to handle
7925 floating point arguments. The linker will arrange for any 32-bit
7926 functions to call this stub, which will then jump to the 16-bit
7929 && TARGET_HARD_FLOAT_ABI
7930 && current_function_args_info
.fp_code
!= 0)
7931 build_mips16_function_stub (file
);
7933 /* Select the mips16 mode for this function. */
7935 fprintf (file
, "\t.set\tmips16\n");
7937 fprintf (file
, "\t.set\tnomips16\n");
7939 if (!FUNCTION_NAME_ALREADY_DECLARED
)
7941 /* Get the function name the same way that toplev.c does before calling
7942 assemble_start_function. This is needed so that the name used here
7943 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
7944 fnname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
7946 if (!flag_inhibit_size_directive
)
7948 fputs ("\t.ent\t", file
);
7949 assemble_name (file
, fnname
);
7953 assemble_name (file
, fnname
);
7954 fputs (":\n", file
);
7957 /* Stop mips_file_end from treating this function as external. */
7958 if (TARGET_IRIX
&& mips_abi
== ABI_32
)
7959 TREE_ASM_WRITTEN (DECL_NAME (cfun
->decl
)) = 1;
7961 if (!flag_inhibit_size_directive
)
7963 /* .frame FRAMEREG, FRAMESIZE, RETREG */
7965 "\t.frame\t%s," HOST_WIDE_INT_PRINT_DEC
",%s\t\t"
7966 "# vars= " HOST_WIDE_INT_PRINT_DEC
", regs= %d/%d"
7967 ", args= " HOST_WIDE_INT_PRINT_DEC
7968 ", gp= " HOST_WIDE_INT_PRINT_DEC
"\n",
7969 (reg_names
[(frame_pointer_needed
)
7970 ? HARD_FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
]),
7971 ((frame_pointer_needed
&& TARGET_MIPS16
)
7972 ? tsize
- cfun
->machine
->frame
.args_size
7974 reg_names
[GP_REG_FIRST
+ 31],
7975 cfun
->machine
->frame
.var_size
,
7976 cfun
->machine
->frame
.num_gp
,
7977 cfun
->machine
->frame
.num_fp
,
7978 cfun
->machine
->frame
.args_size
,
7979 cfun
->machine
->frame
.cprestore_size
);
7981 /* .mask MASK, GPOFFSET; .fmask FPOFFSET */
7982 fprintf (file
, "\t.mask\t0x%08x," HOST_WIDE_INT_PRINT_DEC
"\n",
7983 cfun
->machine
->frame
.mask
,
7984 cfun
->machine
->frame
.gp_save_offset
);
7985 fprintf (file
, "\t.fmask\t0x%08x," HOST_WIDE_INT_PRINT_DEC
"\n",
7986 cfun
->machine
->frame
.fmask
,
7987 cfun
->machine
->frame
.fp_save_offset
);
7990 OLD_SP == *FRAMEREG + FRAMESIZE => can find old_sp from nominated FP reg.
7991 HIGHEST_GP_SAVED == *FRAMEREG + FRAMESIZE + GPOFFSET => can find saved regs. */
7994 if (mips_current_loadgp_style () == LOADGP_OLDABI
)
7996 /* Handle the initialization of $gp for SVR4 PIC. */
7997 if (!cfun
->machine
->all_noreorder_p
)
7998 output_asm_insn ("%(.cpload\t%^%)", 0);
8000 output_asm_insn ("%(.cpload\t%^\n\t%<", 0);
8002 else if (cfun
->machine
->all_noreorder_p
)
8003 output_asm_insn ("%(%<", 0);
8005 /* Tell the assembler which register we're using as the global
8006 pointer. This is needed for thunks, since they can use either
8007 explicit relocs or assembler macros. */
8008 mips_output_cplocal ();
8011 /* Make the last instruction frame related and note that it performs
8012 the operation described by FRAME_PATTERN. */
8015 mips_set_frame_expr (rtx frame_pattern
)
8019 insn
= get_last_insn ();
8020 RTX_FRAME_RELATED_P (insn
) = 1;
8021 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
8027 /* Return a frame-related rtx that stores REG at MEM.
8028 REG must be a single register. */
8031 mips_frame_set (rtx mem
, rtx reg
)
8035 /* If we're saving the return address register and the dwarf return
8036 address column differs from the hard register number, adjust the
8037 note reg to refer to the former. */
8038 if (REGNO (reg
) == GP_REG_FIRST
+ 31
8039 && DWARF_FRAME_RETURN_COLUMN
!= GP_REG_FIRST
+ 31)
8040 reg
= gen_rtx_REG (GET_MODE (reg
), DWARF_FRAME_RETURN_COLUMN
);
8042 set
= gen_rtx_SET (VOIDmode
, mem
, reg
);
8043 RTX_FRAME_RELATED_P (set
) = 1;
8049 /* Save register REG to MEM. Make the instruction frame-related. */
8052 mips_save_reg (rtx reg
, rtx mem
)
8054 if (GET_MODE (reg
) == DFmode
&& !TARGET_FLOAT64
)
8058 if (mips_split_64bit_move_p (mem
, reg
))
8059 mips_split_64bit_move (mem
, reg
);
8061 mips_emit_move (mem
, reg
);
8063 x1
= mips_frame_set (mips_subword (mem
, 0), mips_subword (reg
, 0));
8064 x2
= mips_frame_set (mips_subword (mem
, 1), mips_subword (reg
, 1));
8065 mips_set_frame_expr (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, x1
, x2
)));
8070 && REGNO (reg
) != GP_REG_FIRST
+ 31
8071 && !M16_REG_P (REGNO (reg
)))
8073 /* Save a non-mips16 register by moving it through a temporary.
8074 We don't need to do this for $31 since there's a special
8075 instruction for it. */
8076 mips_emit_move (MIPS_PROLOGUE_TEMP (GET_MODE (reg
)), reg
);
8077 mips_emit_move (mem
, MIPS_PROLOGUE_TEMP (GET_MODE (reg
)));
8080 mips_emit_move (mem
, reg
);
8082 mips_set_frame_expr (mips_frame_set (mem
, reg
));
8086 /* Return a move between register REGNO and memory location SP + OFFSET.
8087 Make the move a load if RESTORE_P, otherwise make it a frame-related
8091 mips16e_save_restore_reg (bool restore_p
, HOST_WIDE_INT offset
,
8096 mem
= gen_frame_mem (SImode
, plus_constant (stack_pointer_rtx
, offset
));
8097 reg
= gen_rtx_REG (SImode
, regno
);
8099 ? gen_rtx_SET (VOIDmode
, reg
, mem
)
8100 : mips_frame_set (mem
, reg
));
8103 /* Return RTL for a MIPS16e SAVE or RESTORE instruction; RESTORE_P says which.
8104 The instruction must:
8106 - Allocate or deallocate SIZE bytes in total; SIZE is known
8109 - Save or restore as many registers in *MASK_PTR as possible.
8110 The instruction saves the first registers at the top of the
8111 allocated area, with the other registers below it.
8113 - Save NARGS argument registers above the allocated area.
8115 (NARGS is always zero if RESTORE_P.)
8117 The SAVE and RESTORE instructions cannot save and restore all general
8118 registers, so there may be some registers left over for the caller to
8119 handle. Destructively modify *MASK_PTR so that it contains the registers
8120 that still need to be saved or restored. The caller can save these
8121 registers in the memory immediately below *OFFSET_PTR, which is a
8122 byte offset from the bottom of the allocated stack area. */
8125 mips16e_build_save_restore (bool restore_p
, unsigned int *mask_ptr
,
8126 HOST_WIDE_INT
*offset_ptr
, unsigned int nargs
,
8130 HOST_WIDE_INT offset
, top_offset
;
8131 unsigned int i
, regno
;
8134 gcc_assert (cfun
->machine
->frame
.fp_reg_size
== 0);
8136 /* Calculate the number of elements in the PARALLEL. We need one element
8137 for the stack adjustment, one for each argument register save, and one
8138 for each additional register move. */
8140 for (i
= 0; i
< ARRAY_SIZE (mips16e_save_restore_regs
); i
++)
8141 if (BITSET_P (*mask_ptr
, mips16e_save_restore_regs
[i
]))
8144 /* Create the final PARALLEL. */
8145 pattern
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (n
));
8148 /* Add the stack pointer adjustment. */
8149 set
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8150 plus_constant (stack_pointer_rtx
,
8151 restore_p
? size
: -size
));
8152 RTX_FRAME_RELATED_P (set
) = 1;
8153 XVECEXP (pattern
, 0, n
++) = set
;
8155 /* Stack offsets in the PARALLEL are relative to the old stack pointer. */
8156 top_offset
= restore_p
? size
: 0;
8158 /* Save the arguments. */
8159 for (i
= 0; i
< nargs
; i
++)
8161 offset
= top_offset
+ i
* GET_MODE_SIZE (gpr_mode
);
8162 set
= mips16e_save_restore_reg (restore_p
, offset
, GP_ARG_FIRST
+ i
);
8163 XVECEXP (pattern
, 0, n
++) = set
;
8166 /* Then fill in the other register moves. */
8167 offset
= top_offset
;
8168 for (i
= 0; i
< ARRAY_SIZE (mips16e_save_restore_regs
); i
++)
8170 regno
= mips16e_save_restore_regs
[i
];
8171 if (BITSET_P (*mask_ptr
, regno
))
8173 offset
-= UNITS_PER_WORD
;
8174 set
= mips16e_save_restore_reg (restore_p
, offset
, regno
);
8175 XVECEXP (pattern
, 0, n
++) = set
;
8176 *mask_ptr
&= ~(1 << regno
);
8180 /* Tell the caller what offset it should use for the remaining registers. */
8181 *offset_ptr
= size
+ (offset
- top_offset
) + size
;
8183 gcc_assert (n
== XVECLEN (pattern
, 0));
8188 /* PATTERN is a PARALLEL whose first element adds ADJUST to the stack
8189 pointer. Return true if PATTERN matches the kind of instruction
8190 generated by mips16e_build_save_restore. If INFO is nonnull,
8191 initialize it when returning true. */
8194 mips16e_save_restore_pattern_p (rtx pattern
, HOST_WIDE_INT adjust
,
8195 struct mips16e_save_restore_info
*info
)
8197 unsigned int i
, nargs
, mask
;
8198 HOST_WIDE_INT top_offset
, save_offset
, offset
, extra
;
8199 rtx set
, reg
, mem
, base
;
8202 if (!GENERATE_MIPS16E_SAVE_RESTORE
)
8205 /* Stack offsets in the PARALLEL are relative to the old stack pointer. */
8206 top_offset
= adjust
> 0 ? adjust
: 0;
8208 /* Interpret all other members of the PARALLEL. */
8209 save_offset
= top_offset
- GET_MODE_SIZE (gpr_mode
);
8213 for (n
= 1; n
< XVECLEN (pattern
, 0); n
++)
8215 /* Check that we have a SET. */
8216 set
= XVECEXP (pattern
, 0, n
);
8217 if (GET_CODE (set
) != SET
)
8220 /* Check that the SET is a load (if restoring) or a store
8222 mem
= adjust
> 0 ? SET_SRC (set
) : SET_DEST (set
);
8226 /* Check that the address is the sum of the stack pointer and a
8227 possibly-zero constant offset. */
8228 mips_split_plus (XEXP (mem
, 0), &base
, &offset
);
8229 if (base
!= stack_pointer_rtx
)
8232 /* Check that SET's other operand is a register. */
8233 reg
= adjust
> 0 ? SET_DEST (set
) : SET_SRC (set
);
8237 /* Check for argument saves. */
8238 if (offset
== top_offset
+ nargs
* GET_MODE_SIZE (gpr_mode
)
8239 && REGNO (reg
) == GP_ARG_FIRST
+ nargs
)
8241 else if (offset
== save_offset
)
8243 while (mips16e_save_restore_regs
[i
++] != REGNO (reg
))
8244 if (i
== ARRAY_SIZE (mips16e_save_restore_regs
))
8247 mask
|= 1 << REGNO (reg
);
8248 save_offset
-= GET_MODE_SIZE (gpr_mode
);
8254 /* Check that the restrictions on register ranges are met. */
8256 mips16e_mask_registers (&mask
, mips16e_s2_s8_regs
,
8257 ARRAY_SIZE (mips16e_s2_s8_regs
), &extra
);
8258 mips16e_mask_registers (&mask
, mips16e_a0_a3_regs
,
8259 ARRAY_SIZE (mips16e_a0_a3_regs
), &extra
);
8263 /* Make sure that the topmost argument register is not saved twice.
8264 The checks above ensure that the same is then true for the other
8265 argument registers. */
8266 if (nargs
> 0 && BITSET_P (mask
, GP_ARG_FIRST
+ nargs
- 1))
8269 /* Pass back information, if requested. */
8272 info
->nargs
= nargs
;
8274 info
->size
= (adjust
> 0 ? adjust
: -adjust
);
8280 /* Add a MIPS16e SAVE or RESTORE register-range argument to string S
8281 for the register range [MIN_REG, MAX_REG]. Return a pointer to
8282 the null terminator. */
8285 mips16e_add_register_range (char *s
, unsigned int min_reg
,
8286 unsigned int max_reg
)
8288 if (min_reg
!= max_reg
)
8289 s
+= sprintf (s
, ",%s-%s", reg_names
[min_reg
], reg_names
[max_reg
]);
8291 s
+= sprintf (s
, ",%s", reg_names
[min_reg
]);
8295 /* Return the assembly instruction for a MIPS16e SAVE or RESTORE instruction.
8296 PATTERN and ADJUST are as for mips16e_save_restore_pattern_p. */
8299 mips16e_output_save_restore (rtx pattern
, HOST_WIDE_INT adjust
)
8301 static char buffer
[300];
8303 struct mips16e_save_restore_info info
;
8304 unsigned int i
, end
;
8307 /* Parse the pattern. */
8308 if (!mips16e_save_restore_pattern_p (pattern
, adjust
, &info
))
8311 /* Add the mnemonic. */
8312 s
= strcpy (buffer
, adjust
> 0 ? "restore\t" : "save\t");
8315 /* Save the arguments. */
8317 s
+= sprintf (s
, "%s-%s,", reg_names
[GP_ARG_FIRST
],
8318 reg_names
[GP_ARG_FIRST
+ info
.nargs
- 1]);
8319 else if (info
.nargs
== 1)
8320 s
+= sprintf (s
, "%s,", reg_names
[GP_ARG_FIRST
]);
8322 /* Emit the amount of stack space to allocate or deallocate. */
8323 s
+= sprintf (s
, "%d", (int) info
.size
);
8325 /* Save or restore $16. */
8326 if (BITSET_P (info
.mask
, 16))
8327 s
+= sprintf (s
, ",%s", reg_names
[GP_REG_FIRST
+ 16]);
8329 /* Save or restore $17. */
8330 if (BITSET_P (info
.mask
, 17))
8331 s
+= sprintf (s
, ",%s", reg_names
[GP_REG_FIRST
+ 17]);
8333 /* Save or restore registers in the range $s2...$s8, which
8334 mips16e_s2_s8_regs lists in decreasing order. Note that this
8335 is a software register range; the hardware registers are not
8336 numbered consecutively. */
8337 end
= ARRAY_SIZE (mips16e_s2_s8_regs
);
8338 i
= mips16e_find_first_register (info
.mask
, mips16e_s2_s8_regs
, end
);
8340 s
= mips16e_add_register_range (s
, mips16e_s2_s8_regs
[end
- 1],
8341 mips16e_s2_s8_regs
[i
]);
8343 /* Save or restore registers in the range $a0...$a3. */
8344 end
= ARRAY_SIZE (mips16e_a0_a3_regs
);
8345 i
= mips16e_find_first_register (info
.mask
, mips16e_a0_a3_regs
, end
);
8347 s
= mips16e_add_register_range (s
, mips16e_a0_a3_regs
[i
],
8348 mips16e_a0_a3_regs
[end
- 1]);
8350 /* Save or restore $31. */
8351 if (BITSET_P (info
.mask
, 31))
8352 s
+= sprintf (s
, ",%s", reg_names
[GP_REG_FIRST
+ 31]);
8357 /* Return a simplified form of X using the register values in REG_VALUES.
8358 REG_VALUES[R] is the last value assigned to hard register R, or null
8359 if R has not been modified.
8361 This function is rather limited, but is good enough for our purposes. */
8364 mips16e_collect_propagate_value (rtx x
, rtx
*reg_values
)
8368 x
= avoid_constant_pool_reference (x
);
8372 x0
= mips16e_collect_propagate_value (XEXP (x
, 0), reg_values
);
8373 return simplify_gen_unary (GET_CODE (x
), GET_MODE (x
),
8374 x0
, GET_MODE (XEXP (x
, 0)));
8377 if (ARITHMETIC_P (x
))
8379 x0
= mips16e_collect_propagate_value (XEXP (x
, 0), reg_values
);
8380 x1
= mips16e_collect_propagate_value (XEXP (x
, 1), reg_values
);
8381 return simplify_gen_binary (GET_CODE (x
), GET_MODE (x
), x0
, x1
);
8385 && reg_values
[REGNO (x
)]
8386 && !rtx_unstable_p (reg_values
[REGNO (x
)]))
8387 return reg_values
[REGNO (x
)];
8392 /* Return true if (set DEST SRC) stores an argument register into its
8393 caller-allocated save slot, storing the number of that argument
8394 register in *REGNO_PTR if so. REG_VALUES is as for
8395 mips16e_collect_propagate_value. */
8398 mips16e_collect_argument_save_p (rtx dest
, rtx src
, rtx
*reg_values
,
8399 unsigned int *regno_ptr
)
8401 unsigned int argno
, regno
;
8402 HOST_WIDE_INT offset
, required_offset
;
8405 /* Check that this is a word-mode store. */
8406 if (!MEM_P (dest
) || !REG_P (src
) || GET_MODE (dest
) != word_mode
)
8409 /* Check that the register being saved is an unmodified argument
8411 regno
= REGNO (src
);
8412 if (regno
< GP_ARG_FIRST
|| regno
> GP_ARG_LAST
|| reg_values
[regno
])
8414 argno
= regno
- GP_ARG_FIRST
;
8416 /* Check whether the address is an appropriate stack pointer or
8417 frame pointer access. The frame pointer is offset from the
8418 stack pointer by the size of the outgoing arguments. */
8419 addr
= mips16e_collect_propagate_value (XEXP (dest
, 0), reg_values
);
8420 mips_split_plus (addr
, &base
, &offset
);
8421 required_offset
= cfun
->machine
->frame
.total_size
+ argno
* UNITS_PER_WORD
;
8422 if (base
== hard_frame_pointer_rtx
)
8423 required_offset
-= cfun
->machine
->frame
.args_size
;
8424 else if (base
!= stack_pointer_rtx
)
8426 if (offset
!= required_offset
)
8433 /* A subroutine of mips_expand_prologue, called only when generating
8434 MIPS16e SAVE instructions. Search the start of the function for any
8435 instructions that save argument registers into their caller-allocated
8436 save slots. Delete such instructions and return a value N such that
8437 saving [GP_ARG_FIRST, GP_ARG_FIRST + N) would make all the deleted
8438 instructions redundant. */
8441 mips16e_collect_argument_saves (void)
8443 rtx reg_values
[FIRST_PSEUDO_REGISTER
];
8444 rtx insn
, next
, set
, dest
, src
;
8445 unsigned int nargs
, regno
;
8447 push_topmost_sequence ();
8449 memset (reg_values
, 0, sizeof (reg_values
));
8450 for (insn
= get_insns (); insn
; insn
= next
)
8452 next
= NEXT_INSN (insn
);
8459 set
= PATTERN (insn
);
8460 if (GET_CODE (set
) != SET
)
8463 dest
= SET_DEST (set
);
8464 src
= SET_SRC (set
);
8465 if (mips16e_collect_argument_save_p (dest
, src
, reg_values
, ®no
))
8467 if (!BITSET_P (cfun
->machine
->frame
.mask
, regno
))
8470 nargs
= MAX (nargs
, (regno
- GP_ARG_FIRST
) + 1);
8473 else if (REG_P (dest
) && GET_MODE (dest
) == word_mode
)
8474 reg_values
[REGNO (dest
)]
8475 = mips16e_collect_propagate_value (src
, reg_values
);
8479 pop_topmost_sequence ();
8484 /* Expand the prologue into a bunch of separate insns. */
8487 mips_expand_prologue (void)
8493 if (cfun
->machine
->global_pointer
> 0)
8494 SET_REGNO (pic_offset_table_rtx
, cfun
->machine
->global_pointer
);
8496 size
= compute_frame_size (get_frame_size ());
8498 /* Save the registers. Allocate up to MIPS_MAX_FIRST_STACK_STEP
8499 bytes beforehand; this is enough to cover the register save area
8500 without going out of range. */
8501 if ((cfun
->machine
->frame
.mask
| cfun
->machine
->frame
.fmask
) != 0)
8503 HOST_WIDE_INT step1
;
8505 step1
= MIN (size
, MIPS_MAX_FIRST_STACK_STEP
);
8507 if (GENERATE_MIPS16E_SAVE_RESTORE
)
8509 HOST_WIDE_INT offset
;
8510 unsigned int mask
, regno
;
8512 /* Try to merge argument stores into the save instruction. */
8513 nargs
= mips16e_collect_argument_saves ();
8515 /* Build the save instruction. */
8516 mask
= cfun
->machine
->frame
.mask
;
8517 insn
= mips16e_build_save_restore (false, &mask
, &offset
,
8519 RTX_FRAME_RELATED_P (emit_insn (insn
)) = 1;
8522 /* Check if we need to save other registers. */
8523 for (regno
= GP_REG_FIRST
; regno
< GP_REG_LAST
; regno
++)
8524 if (BITSET_P (mask
, regno
- GP_REG_FIRST
))
8526 offset
-= GET_MODE_SIZE (gpr_mode
);
8527 mips_save_restore_reg (gpr_mode
, regno
, offset
, mips_save_reg
);
8532 insn
= gen_add3_insn (stack_pointer_rtx
,
8535 RTX_FRAME_RELATED_P (emit_insn (insn
)) = 1;
8537 mips_for_each_saved_reg (size
, mips_save_reg
);
8541 /* Allocate the rest of the frame. */
8544 if (SMALL_OPERAND (-size
))
8545 RTX_FRAME_RELATED_P (emit_insn (gen_add3_insn (stack_pointer_rtx
,
8547 GEN_INT (-size
)))) = 1;
8550 mips_emit_move (MIPS_PROLOGUE_TEMP (Pmode
), GEN_INT (size
));
8553 /* There are no instructions to add or subtract registers
8554 from the stack pointer, so use the frame pointer as a
8555 temporary. We should always be using a frame pointer
8556 in this case anyway. */
8557 gcc_assert (frame_pointer_needed
);
8558 mips_emit_move (hard_frame_pointer_rtx
, stack_pointer_rtx
);
8559 emit_insn (gen_sub3_insn (hard_frame_pointer_rtx
,
8560 hard_frame_pointer_rtx
,
8561 MIPS_PROLOGUE_TEMP (Pmode
)));
8562 mips_emit_move (stack_pointer_rtx
, hard_frame_pointer_rtx
);
8565 emit_insn (gen_sub3_insn (stack_pointer_rtx
,
8567 MIPS_PROLOGUE_TEMP (Pmode
)));
8569 /* Describe the combined effect of the previous instructions. */
8571 (gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8572 plus_constant (stack_pointer_rtx
, -size
)));
8576 /* Set up the frame pointer, if we're using one. In mips16 code,
8577 we point the frame pointer ahead of the outgoing argument area.
8578 This should allow more variables & incoming arguments to be
8579 accessed with unextended instructions. */
8580 if (frame_pointer_needed
)
8582 if (TARGET_MIPS16
&& cfun
->machine
->frame
.args_size
!= 0)
8584 rtx offset
= GEN_INT (cfun
->machine
->frame
.args_size
);
8585 if (SMALL_OPERAND (cfun
->machine
->frame
.args_size
))
8587 (emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
8592 mips_emit_move (MIPS_PROLOGUE_TEMP (Pmode
), offset
);
8593 mips_emit_move (hard_frame_pointer_rtx
, stack_pointer_rtx
);
8594 emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
8595 hard_frame_pointer_rtx
,
8596 MIPS_PROLOGUE_TEMP (Pmode
)));
8598 (gen_rtx_SET (VOIDmode
, hard_frame_pointer_rtx
,
8599 plus_constant (stack_pointer_rtx
,
8600 cfun
->machine
->frame
.args_size
)));
8604 RTX_FRAME_RELATED_P (mips_emit_move (hard_frame_pointer_rtx
,
8605 stack_pointer_rtx
)) = 1;
8608 mips_emit_loadgp ();
8610 /* If generating o32/o64 abicalls, save $gp on the stack. */
8611 if (TARGET_ABICALLS
&& TARGET_OLDABI
&& !current_function_is_leaf
)
8612 emit_insn (gen_cprestore (GEN_INT (current_function_outgoing_args_size
)));
8614 /* If we are profiling, make sure no instructions are scheduled before
8615 the call to mcount. */
8617 if (current_function_profile
)
8618 emit_insn (gen_blockage ());
8621 /* Do any necessary cleanup after a function to restore stack, frame,
8624 #define RA_MASK BITMASK_HIGH /* 1 << 31 */
8627 mips_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED
,
8628 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
8630 /* Reinstate the normal $gp. */
8631 SET_REGNO (pic_offset_table_rtx
, GLOBAL_POINTER_REGNUM
);
8632 mips_output_cplocal ();
8634 if (cfun
->machine
->all_noreorder_p
)
8636 /* Avoid using %>%) since it adds excess whitespace. */
8637 output_asm_insn (".set\tmacro", 0);
8638 output_asm_insn (".set\treorder", 0);
8639 set_noreorder
= set_nomacro
= 0;
8642 if (!FUNCTION_NAME_ALREADY_DECLARED
&& !flag_inhibit_size_directive
)
8646 /* Get the function name the same way that toplev.c does before calling
8647 assemble_start_function. This is needed so that the name used here
8648 exactly matches the name used in ASM_DECLARE_FUNCTION_NAME. */
8649 fnname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
8650 fputs ("\t.end\t", file
);
8651 assemble_name (file
, fnname
);
8656 /* Emit instructions to restore register REG from slot MEM. */
8659 mips_restore_reg (rtx reg
, rtx mem
)
8661 /* There's no mips16 instruction to load $31 directly. Load into
8662 $7 instead and adjust the return insn appropriately. */
8663 if (TARGET_MIPS16
&& REGNO (reg
) == GP_REG_FIRST
+ 31)
8664 reg
= gen_rtx_REG (GET_MODE (reg
), 7);
8666 if (TARGET_MIPS16
&& !M16_REG_P (REGNO (reg
)))
8668 /* Can't restore directly; move through a temporary. */
8669 mips_emit_move (MIPS_EPILOGUE_TEMP (GET_MODE (reg
)), mem
);
8670 mips_emit_move (reg
, MIPS_EPILOGUE_TEMP (GET_MODE (reg
)));
8673 mips_emit_move (reg
, mem
);
8677 /* Expand the epilogue into a bunch of separate insns. SIBCALL_P is true
8678 if this epilogue precedes a sibling call, false if it is for a normal
8679 "epilogue" pattern. */
8682 mips_expand_epilogue (int sibcall_p
)
8684 HOST_WIDE_INT step1
, step2
;
8687 if (!sibcall_p
&& mips_can_use_return_insn ())
8689 emit_jump_insn (gen_return ());
8693 /* In mips16 mode, if the return value should go into a floating-point
8694 register, we need to call a helper routine to copy it over. */
8695 if (mips16_cfun_returns_in_fpr_p ())
8704 enum machine_mode return_mode
;
8706 return_type
= DECL_RESULT (current_function_decl
);
8707 return_mode
= DECL_MODE (return_type
);
8709 name
= ACONCAT (("__mips16_ret_",
8710 mips16_call_stub_mode_suffix (return_mode
),
8712 id
= get_identifier (name
);
8713 func
= gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (id
));
8714 retval
= gen_rtx_REG (return_mode
, GP_RETURN
);
8715 call
= gen_call_value_internal (retval
, func
, const0_rtx
);
8716 insn
= emit_call_insn (call
);
8717 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), retval
);
8720 /* Split the frame into two. STEP1 is the amount of stack we should
8721 deallocate before restoring the registers. STEP2 is the amount we
8722 should deallocate afterwards.
8724 Start off by assuming that no registers need to be restored. */
8725 step1
= cfun
->machine
->frame
.total_size
;
8728 /* Work out which register holds the frame address. Account for the
8729 frame pointer offset used by mips16 code. */
8730 if (!frame_pointer_needed
)
8731 base
= stack_pointer_rtx
;
8734 base
= hard_frame_pointer_rtx
;
8736 step1
-= cfun
->machine
->frame
.args_size
;
8739 /* If we need to restore registers, deallocate as much stack as
8740 possible in the second step without going out of range. */
8741 if ((cfun
->machine
->frame
.mask
| cfun
->machine
->frame
.fmask
) != 0)
8743 step2
= MIN (step1
, MIPS_MAX_FIRST_STACK_STEP
);
8747 /* Set TARGET to BASE + STEP1. */
8753 /* Get an rtx for STEP1 that we can add to BASE. */
8754 adjust
= GEN_INT (step1
);
8755 if (!SMALL_OPERAND (step1
))
8757 mips_emit_move (MIPS_EPILOGUE_TEMP (Pmode
), adjust
);
8758 adjust
= MIPS_EPILOGUE_TEMP (Pmode
);
8761 /* Normal mode code can copy the result straight into $sp. */
8763 target
= stack_pointer_rtx
;
8765 emit_insn (gen_add3_insn (target
, base
, adjust
));
8768 /* Copy TARGET into the stack pointer. */
8769 if (target
!= stack_pointer_rtx
)
8770 mips_emit_move (stack_pointer_rtx
, target
);
8772 /* If we're using addressing macros, $gp is implicitly used by all
8773 SYMBOL_REFs. We must emit a blockage insn before restoring $gp
8775 if (TARGET_CALL_SAVED_GP
&& !TARGET_EXPLICIT_RELOCS
)
8776 emit_insn (gen_blockage ());
8778 if (GENERATE_MIPS16E_SAVE_RESTORE
&& cfun
->machine
->frame
.mask
!= 0)
8780 unsigned int regno
, mask
;
8781 HOST_WIDE_INT offset
;
8784 /* Generate the restore instruction. */
8785 mask
= cfun
->machine
->frame
.mask
;
8786 restore
= mips16e_build_save_restore (true, &mask
, &offset
, 0, step2
);
8788 /* Restore any other registers manually. */
8789 for (regno
= GP_REG_FIRST
; regno
< GP_REG_LAST
; regno
++)
8790 if (BITSET_P (mask
, regno
- GP_REG_FIRST
))
8792 offset
-= GET_MODE_SIZE (gpr_mode
);
8793 mips_save_restore_reg (gpr_mode
, regno
, offset
, mips_restore_reg
);
8796 /* Restore the remaining registers and deallocate the final bit
8798 emit_insn (restore
);
8802 /* Restore the registers. */
8803 mips_for_each_saved_reg (cfun
->machine
->frame
.total_size
- step2
,
8806 /* Deallocate the final bit of the frame. */
8808 emit_insn (gen_add3_insn (stack_pointer_rtx
,
8813 /* Add in the __builtin_eh_return stack adjustment. We need to
8814 use a temporary in mips16 code. */
8815 if (current_function_calls_eh_return
)
8819 mips_emit_move (MIPS_EPILOGUE_TEMP (Pmode
), stack_pointer_rtx
);
8820 emit_insn (gen_add3_insn (MIPS_EPILOGUE_TEMP (Pmode
),
8821 MIPS_EPILOGUE_TEMP (Pmode
),
8822 EH_RETURN_STACKADJ_RTX
));
8823 mips_emit_move (stack_pointer_rtx
, MIPS_EPILOGUE_TEMP (Pmode
));
8826 emit_insn (gen_add3_insn (stack_pointer_rtx
,
8828 EH_RETURN_STACKADJ_RTX
));
8833 /* When generating MIPS16 code, the normal mips_for_each_saved_reg
8834 path will restore the return address into $7 rather than $31. */
8836 && !GENERATE_MIPS16E_SAVE_RESTORE
8837 && (cfun
->machine
->frame
.mask
& RA_MASK
) != 0)
8838 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
,
8839 GP_REG_FIRST
+ 7)));
8841 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
,
8842 GP_REG_FIRST
+ 31)));
8846 /* Return nonzero if this function is known to have a null epilogue.
8847 This allows the optimizer to omit jumps to jumps if no stack
8851 mips_can_use_return_insn (void)
8853 if (! reload_completed
)
8856 if (df_regs_ever_live_p (31) || current_function_profile
)
8859 /* In mips16 mode, a function that returns a floating point value
8860 needs to arrange to copy the return value into the floating point
8862 if (mips16_cfun_returns_in_fpr_p ())
8865 if (cfun
->machine
->frame
.initialized
)
8866 return cfun
->machine
->frame
.total_size
== 0;
8868 return compute_frame_size (get_frame_size ()) == 0;
8871 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
8872 in order to avoid duplicating too much logic from elsewhere. */
8875 mips_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
8876 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
8879 rtx
this, temp1
, temp2
, insn
, fnaddr
;
8882 /* Pretend to be a post-reload pass while generating rtl. */
8883 reload_completed
= 1;
8885 /* Mark the end of the (empty) prologue. */
8886 emit_note (NOTE_INSN_PROLOGUE_END
);
8888 /* Determine if we can use a sibcall to call FUNCTION directly. */
8889 fnaddr
= XEXP (DECL_RTL (function
), 0);
8890 use_sibcall_p
= (mips_function_ok_for_sibcall (function
, NULL
)
8891 && const_call_insn_operand (fnaddr
, Pmode
));
8893 /* Determine if we need to load FNADDR from the GOT. */
8895 switch (mips_classify_symbol (fnaddr
, SYMBOL_CONTEXT_LEA
))
8897 case SYMBOL_GOT_PAGE_OFST
:
8898 case SYMBOL_GOT_DISP
:
8899 /* Pick a global pointer. Use a call-clobbered register if
8900 TARGET_CALL_SAVED_GP. */
8901 cfun
->machine
->global_pointer
=
8902 TARGET_CALL_SAVED_GP
? 15 : GLOBAL_POINTER_REGNUM
;
8903 SET_REGNO (pic_offset_table_rtx
, cfun
->machine
->global_pointer
);
8905 /* Set up the global pointer for n32 or n64 abicalls. */
8906 mips_emit_loadgp ();
8913 /* We need two temporary registers in some cases. */
8914 temp1
= gen_rtx_REG (Pmode
, 2);
8915 temp2
= gen_rtx_REG (Pmode
, 3);
8917 /* Find out which register contains the "this" pointer. */
8918 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
8919 this = gen_rtx_REG (Pmode
, GP_ARG_FIRST
+ 1);
8921 this = gen_rtx_REG (Pmode
, GP_ARG_FIRST
);
8923 /* Add DELTA to THIS. */
8926 rtx offset
= GEN_INT (delta
);
8927 if (!SMALL_OPERAND (delta
))
8929 mips_emit_move (temp1
, offset
);
8932 emit_insn (gen_add3_insn (this, this, offset
));
8935 /* If needed, add *(*THIS + VCALL_OFFSET) to THIS. */
8936 if (vcall_offset
!= 0)
8940 /* Set TEMP1 to *THIS. */
8941 mips_emit_move (temp1
, gen_rtx_MEM (Pmode
, this));
8943 /* Set ADDR to a legitimate address for *THIS + VCALL_OFFSET. */
8944 addr
= mips_add_offset (temp2
, temp1
, vcall_offset
);
8946 /* Load the offset and add it to THIS. */
8947 mips_emit_move (temp1
, gen_rtx_MEM (Pmode
, addr
));
8948 emit_insn (gen_add3_insn (this, this, temp1
));
8951 /* Jump to the target function. Use a sibcall if direct jumps are
8952 allowed, otherwise load the address into a register first. */
8955 insn
= emit_call_insn (gen_sibcall_internal (fnaddr
, const0_rtx
));
8956 SIBLING_CALL_P (insn
) = 1;
8960 /* This is messy. gas treats "la $25,foo" as part of a call
8961 sequence and may allow a global "foo" to be lazily bound.
8962 The general move patterns therefore reject this combination.
8964 In this context, lazy binding would actually be OK
8965 for TARGET_CALL_CLOBBERED_GP, but it's still wrong for
8966 TARGET_CALL_SAVED_GP; see mips_load_call_address.
8967 We must therefore load the address via a temporary
8968 register if mips_dangerous_for_la25_p.
8970 If we jump to the temporary register rather than $25, the assembler
8971 can use the move insn to fill the jump's delay slot. */
8972 if (TARGET_USE_PIC_FN_ADDR_REG
8973 && !mips_dangerous_for_la25_p (fnaddr
))
8974 temp1
= gen_rtx_REG (Pmode
, PIC_FUNCTION_ADDR_REGNUM
);
8975 mips_load_call_address (temp1
, fnaddr
, true);
8977 if (TARGET_USE_PIC_FN_ADDR_REG
8978 && REGNO (temp1
) != PIC_FUNCTION_ADDR_REGNUM
)
8979 mips_emit_move (gen_rtx_REG (Pmode
, PIC_FUNCTION_ADDR_REGNUM
), temp1
);
8980 emit_jump_insn (gen_indirect_jump (temp1
));
8983 /* Run just enough of rest_of_compilation. This sequence was
8984 "borrowed" from alpha.c. */
8985 insn
= get_insns ();
8986 insn_locators_alloc ();
8987 split_all_insns_noflow ();
8988 mips16_lay_out_constants ();
8989 shorten_branches (insn
);
8990 final_start_function (insn
, file
, 1);
8991 final (insn
, file
, 1);
8992 final_end_function ();
8994 /* Clean up the vars set above. Note that final_end_function resets
8995 the global pointer for us. */
8996 reload_completed
= 0;
8999 /* Implement TARGET_SELECT_RTX_SECTION. */
9002 mips_select_rtx_section (enum machine_mode mode
, rtx x
,
9003 unsigned HOST_WIDE_INT align
)
9005 /* ??? Consider using mergeable small data sections. */
9006 if (mips_rtx_constant_in_small_data_p (mode
))
9007 return get_named_section (NULL
, ".sdata", 0);
9009 return default_elf_select_rtx_section (mode
, x
, align
);
9012 /* Implement TARGET_ASM_FUNCTION_RODATA_SECTION.
9014 The complication here is that, with the combination TARGET_ABICALLS
9015 && !TARGET_GPWORD, jump tables will use absolute addresses, and should
9016 therefore not be included in the read-only part of a DSO. Handle such
9017 cases by selecting a normal data section instead of a read-only one.
9018 The logic apes that in default_function_rodata_section. */
9021 mips_function_rodata_section (tree decl
)
9023 if (!TARGET_ABICALLS
|| TARGET_GPWORD
)
9024 return default_function_rodata_section (decl
);
9026 if (decl
&& DECL_SECTION_NAME (decl
))
9028 const char *name
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
9029 if (DECL_ONE_ONLY (decl
) && strncmp (name
, ".gnu.linkonce.t.", 16) == 0)
9031 char *rname
= ASTRDUP (name
);
9033 return get_section (rname
, SECTION_LINKONCE
| SECTION_WRITE
, decl
);
9035 else if (flag_function_sections
&& flag_data_sections
9036 && strncmp (name
, ".text.", 6) == 0)
9038 char *rname
= ASTRDUP (name
);
9039 memcpy (rname
+ 1, "data", 4);
9040 return get_section (rname
, SECTION_WRITE
, decl
);
9043 return data_section
;
9046 /* Implement TARGET_IN_SMALL_DATA_P. This function controls whether
9047 locally-defined objects go in a small data section. It also controls
9048 the setting of the SYMBOL_REF_SMALL_P flag, which in turn helps
9049 mips_classify_symbol decide when to use %gp_rel(...)($gp) accesses. */
9052 mips_in_small_data_p (const_tree decl
)
9056 if (TREE_CODE (decl
) == STRING_CST
|| TREE_CODE (decl
) == FUNCTION_DECL
)
9059 /* We don't yet generate small-data references for -mabicalls or
9060 VxWorks RTP code. See the related -G handling in override_options. */
9061 if (TARGET_ABICALLS
|| TARGET_VXWORKS_RTP
)
9064 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
) != 0)
9068 /* Reject anything that isn't in a known small-data section. */
9069 name
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
9070 if (strcmp (name
, ".sdata") != 0 && strcmp (name
, ".sbss") != 0)
9073 /* If a symbol is defined externally, the assembler will use the
9074 usual -G rules when deciding how to implement macros. */
9075 if (mips_lo_relocs
[SYMBOL_GP_RELATIVE
] || !DECL_EXTERNAL (decl
))
9078 else if (TARGET_EMBEDDED_DATA
)
9080 /* Don't put constants into the small data section: we want them
9081 to be in ROM rather than RAM. */
9082 if (TREE_CODE (decl
) != VAR_DECL
)
9085 if (TREE_READONLY (decl
)
9086 && !TREE_SIDE_EFFECTS (decl
)
9087 && (!DECL_INITIAL (decl
) || TREE_CONSTANT (DECL_INITIAL (decl
))))
9091 /* Enforce -mlocal-sdata. */
9092 if (!TARGET_LOCAL_SDATA
&& !TREE_PUBLIC (decl
))
9095 /* Enforce -mextern-sdata. */
9096 if (!TARGET_EXTERN_SDATA
&& DECL_P (decl
))
9098 if (DECL_EXTERNAL (decl
))
9100 if (DECL_COMMON (decl
) && DECL_INITIAL (decl
) == NULL
)
9104 size
= int_size_in_bytes (TREE_TYPE (decl
));
9105 return (size
> 0 && size
<= mips_section_threshold
);
9108 /* Implement TARGET_USE_ANCHORS_FOR_SYMBOL_P. We don't want to use
9109 anchors for small data: the GP register acts as an anchor in that
9110 case. We also don't want to use them for PC-relative accesses,
9111 where the PC acts as an anchor. */
9114 mips_use_anchors_for_symbol_p (const_rtx symbol
)
9116 switch (mips_classify_symbol (symbol
, SYMBOL_CONTEXT_MEM
))
9118 case SYMBOL_PC_RELATIVE
:
9119 case SYMBOL_GP_RELATIVE
:
9123 return default_use_anchors_for_symbol_p (symbol
);
9127 /* See whether VALTYPE is a record whose fields should be returned in
9128 floating-point registers. If so, return the number of fields and
9129 list them in FIELDS (which should have two elements). Return 0
9132 For n32 & n64, a structure with one or two fields is returned in
9133 floating-point registers as long as every field has a floating-point
9137 mips_fpr_return_fields (const_tree valtype
, tree
*fields
)
9145 if (TREE_CODE (valtype
) != RECORD_TYPE
)
9149 for (field
= TYPE_FIELDS (valtype
); field
!= 0; field
= TREE_CHAIN (field
))
9151 if (TREE_CODE (field
) != FIELD_DECL
)
9154 if (TREE_CODE (TREE_TYPE (field
)) != REAL_TYPE
)
9160 fields
[i
++] = field
;
9166 /* Implement TARGET_RETURN_IN_MSB. For n32 & n64, we should return
9167 a value in the most significant part of $2/$3 if:
9169 - the target is big-endian;
9171 - the value has a structure or union type (we generalize this to
9172 cover aggregates from other languages too); and
9174 - the structure is not returned in floating-point registers. */
9177 mips_return_in_msb (const_tree valtype
)
9181 return (TARGET_NEWABI
9182 && TARGET_BIG_ENDIAN
9183 && AGGREGATE_TYPE_P (valtype
)
9184 && mips_fpr_return_fields (valtype
, fields
) == 0);
9188 /* Return a composite value in a pair of floating-point registers.
9189 MODE1 and OFFSET1 are the mode and byte offset for the first value,
9190 likewise MODE2 and OFFSET2 for the second. MODE is the mode of the
9193 For n32 & n64, $f0 always holds the first value and $f2 the second.
9194 Otherwise the values are packed together as closely as possible. */
9197 mips_return_fpr_pair (enum machine_mode mode
,
9198 enum machine_mode mode1
, HOST_WIDE_INT offset1
,
9199 enum machine_mode mode2
, HOST_WIDE_INT offset2
)
9203 inc
= (TARGET_NEWABI
? 2 : MAX_FPRS_PER_FMT
);
9204 return gen_rtx_PARALLEL
9207 gen_rtx_EXPR_LIST (VOIDmode
,
9208 gen_rtx_REG (mode1
, FP_RETURN
),
9210 gen_rtx_EXPR_LIST (VOIDmode
,
9211 gen_rtx_REG (mode2
, FP_RETURN
+ inc
),
9212 GEN_INT (offset2
))));
9217 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
9218 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
9219 VALTYPE is null and MODE is the mode of the return value. */
9222 mips_function_value (const_tree valtype
, const_tree func ATTRIBUTE_UNUSED
,
9223 enum machine_mode mode
)
9230 mode
= TYPE_MODE (valtype
);
9231 unsignedp
= TYPE_UNSIGNED (valtype
);
9233 /* Since we define TARGET_PROMOTE_FUNCTION_RETURN that returns
9234 true, we must promote the mode just as PROMOTE_MODE does. */
9235 mode
= promote_mode (valtype
, mode
, &unsignedp
, 1);
9237 /* Handle structures whose fields are returned in $f0/$f2. */
9238 switch (mips_fpr_return_fields (valtype
, fields
))
9241 return gen_rtx_REG (mode
, FP_RETURN
);
9244 return mips_return_fpr_pair (mode
,
9245 TYPE_MODE (TREE_TYPE (fields
[0])),
9246 int_byte_position (fields
[0]),
9247 TYPE_MODE (TREE_TYPE (fields
[1])),
9248 int_byte_position (fields
[1]));
9251 /* If a value is passed in the most significant part of a register, see
9252 whether we have to round the mode up to a whole number of words. */
9253 if (mips_return_in_msb (valtype
))
9255 HOST_WIDE_INT size
= int_size_in_bytes (valtype
);
9256 if (size
% UNITS_PER_WORD
!= 0)
9258 size
+= UNITS_PER_WORD
- size
% UNITS_PER_WORD
;
9259 mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
9263 /* For EABI, the class of return register depends entirely on MODE.
9264 For example, "struct { some_type x; }" and "union { some_type x; }"
9265 are returned in the same way as a bare "some_type" would be.
9266 Other ABIs only use FPRs for scalar, complex or vector types. */
9267 if (mips_abi
!= ABI_EABI
&& !FLOAT_TYPE_P (valtype
))
9268 return gen_rtx_REG (mode
, GP_RETURN
);
9273 /* Handle long doubles for n32 & n64. */
9275 return mips_return_fpr_pair (mode
,
9277 DImode
, GET_MODE_SIZE (mode
) / 2);
9279 if (mips_return_mode_in_fpr_p (mode
))
9281 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
9282 return mips_return_fpr_pair (mode
,
9283 GET_MODE_INNER (mode
), 0,
9284 GET_MODE_INNER (mode
),
9285 GET_MODE_SIZE (mode
) / 2);
9287 return gen_rtx_REG (mode
, FP_RETURN
);
9291 return gen_rtx_REG (mode
, GP_RETURN
);
9294 /* Return nonzero when an argument must be passed by reference. */
9297 mips_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
9298 enum machine_mode mode
, const_tree type
,
9299 bool named ATTRIBUTE_UNUSED
)
9301 if (mips_abi
== ABI_EABI
)
9305 /* ??? How should SCmode be handled? */
9306 if (mode
== DImode
|| mode
== DFmode
9307 || mode
== DQmode
|| mode
== UDQmode
9308 || mode
== DAmode
|| mode
== UDAmode
)
9311 size
= type
? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
9312 return size
== -1 || size
> UNITS_PER_WORD
;
9316 /* If we have a variable-sized parameter, we have no choice. */
9317 return targetm
.calls
.must_pass_in_stack (mode
, type
);
9322 mips_callee_copies (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
9323 enum machine_mode mode ATTRIBUTE_UNUSED
,
9324 const_tree type ATTRIBUTE_UNUSED
, bool named
)
9326 return mips_abi
== ABI_EABI
&& named
;
9329 /* Return true if registers of class CLASS cannot change from mode FROM
9333 mips_cannot_change_mode_class (enum machine_mode from
,
9334 enum machine_mode to
, enum reg_class
class)
9336 if (MIN (GET_MODE_SIZE (from
), GET_MODE_SIZE (to
)) <= UNITS_PER_WORD
9337 && MAX (GET_MODE_SIZE (from
), GET_MODE_SIZE (to
)) > UNITS_PER_WORD
)
9339 if (TARGET_BIG_ENDIAN
)
9341 /* When a multi-word value is stored in paired floating-point
9342 registers, the first register always holds the low word.
9343 We therefore can't allow FPRs to change between single-word
9344 and multi-word modes. */
9345 if (MAX_FPRS_PER_FMT
> 1 && reg_classes_intersect_p (FP_REGS
, class))
9350 /* gcc assumes that each word of a multiword register can be accessed
9351 individually using SUBREGs. This is not true for floating-point
9352 registers if they are bigger than a word. */
9353 if (UNITS_PER_FPREG
> UNITS_PER_WORD
9354 && GET_MODE_SIZE (from
) > UNITS_PER_WORD
9355 && GET_MODE_SIZE (to
) < UNITS_PER_FPREG
9356 && reg_classes_intersect_p (FP_REGS
, class))
9359 /* Loading a 32-bit value into a 64-bit floating-point register
9360 will not sign-extend the value, despite what LOAD_EXTEND_OP says.
9361 We can't allow 64-bit float registers to change from SImode to
9366 && GET_MODE_SIZE (to
) >= UNITS_PER_WORD
9367 && reg_classes_intersect_p (FP_REGS
, class))
9373 /* Return true if X should not be moved directly into register $25.
9374 We need this because many versions of GAS will treat "la $25,foo" as
9375 part of a call sequence and so allow a global "foo" to be lazily bound. */
9378 mips_dangerous_for_la25_p (rtx x
)
9380 return (!TARGET_EXPLICIT_RELOCS
9382 && GET_CODE (x
) == SYMBOL_REF
9383 && mips_global_symbol_p (x
));
9386 /* Implement PREFERRED_RELOAD_CLASS. */
9389 mips_preferred_reload_class (rtx x
, enum reg_class
class)
9391 if (mips_dangerous_for_la25_p (x
) && reg_class_subset_p (LEA_REGS
, class))
9394 if (TARGET_HARD_FLOAT
9395 && FLOAT_MODE_P (GET_MODE (x
))
9396 && reg_class_subset_p (FP_REGS
, class))
9399 if (reg_class_subset_p (GR_REGS
, class))
9402 if (TARGET_MIPS16
&& reg_class_subset_p (M16_REGS
, class))
9408 /* This function returns the register class required for a secondary
9409 register when copying between one of the registers in CLASS, and X,
9410 using MODE. If IN_P is nonzero, the copy is going from X to the
9411 register, otherwise the register is the source. A return value of
9412 NO_REGS means that no secondary register is required. */
9415 mips_secondary_reload_class (enum reg_class
class,
9416 enum machine_mode mode
, rtx x
, int in_p
)
9418 enum reg_class gr_regs
= TARGET_MIPS16
? M16_REGS
: GR_REGS
;
9422 if (REG_P (x
)|| GET_CODE (x
) == SUBREG
)
9423 regno
= true_regnum (x
);
9425 gp_reg_p
= TARGET_MIPS16
? M16_REG_P (regno
) : GP_REG_P (regno
);
9427 if (mips_dangerous_for_la25_p (x
))
9430 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], 25))
9434 /* Copying from HI or LO to anywhere other than a general register
9435 requires a general register.
9436 This rule applies to both the original HI/LO pair and the new
9437 DSP accumulators. */
9438 if (reg_class_subset_p (class, ACC_REGS
))
9440 if (TARGET_MIPS16
&& in_p
)
9442 /* We can't really copy to HI or LO at all in mips16 mode. */
9445 return gp_reg_p
? NO_REGS
: gr_regs
;
9447 if (ACC_REG_P (regno
))
9449 if (TARGET_MIPS16
&& ! in_p
)
9451 /* We can't really copy to HI or LO at all in mips16 mode. */
9454 return class == gr_regs
? NO_REGS
: gr_regs
;
9457 /* We can only copy a value to a condition code register from a
9458 floating point register, and even then we require a scratch
9459 floating point register. We can only copy a value out of a
9460 condition code register into a general register. */
9461 if (class == ST_REGS
)
9465 return gp_reg_p
? NO_REGS
: gr_regs
;
9467 if (ST_REG_P (regno
))
9471 return class == gr_regs
? NO_REGS
: gr_regs
;
9474 if (class == FP_REGS
)
9478 /* In this case we can use lwc1, swc1, ldc1 or sdc1. */
9481 else if (CONSTANT_P (x
) && GET_MODE_CLASS (mode
) == MODE_FLOAT
)
9483 /* We can use the l.s and l.d macros to load floating-point
9484 constants. ??? For l.s, we could probably get better
9485 code by returning GR_REGS here. */
9488 else if (gp_reg_p
|| x
== CONST0_RTX (mode
))
9490 /* In this case we can use mtc1, mfc1, dmtc1 or dmfc1. */
9493 else if (FP_REG_P (regno
))
9495 /* In this case we can use mov.s or mov.d. */
9500 /* Otherwise, we need to reload through an integer register. */
9505 /* In mips16 mode, going between memory and anything but M16_REGS
9506 requires an M16_REG. */
9509 if (class != M16_REGS
&& class != M16_NA_REGS
)
9517 if (class == M16_REGS
|| class == M16_NA_REGS
)
9526 /* Implement CLASS_MAX_NREGS.
9528 - UNITS_PER_FPREG controls the number of registers needed by FP_REGS.
9530 - ST_REGS are always hold CCmode values, and CCmode values are
9531 considered to be 4 bytes wide.
9533 All other register classes are covered by UNITS_PER_WORD. Note that
9534 this is true even for unions of integer and float registers when the
9535 latter are smaller than the former. The only supported combination
9536 in which case this occurs is -mgp64 -msingle-float, which has 64-bit
9537 words but 32-bit float registers. A word-based calculation is correct
9538 in that case since -msingle-float disallows multi-FPR values. */
9541 mips_class_max_nregs (enum reg_class
class ATTRIBUTE_UNUSED
,
9542 enum machine_mode mode
)
9544 if (class == ST_REGS
)
9545 return (GET_MODE_SIZE (mode
) + 3) / 4;
9546 else if (class == FP_REGS
)
9547 return (GET_MODE_SIZE (mode
) + UNITS_PER_FPREG
- 1) / UNITS_PER_FPREG
;
9549 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
9553 mips_valid_pointer_mode (enum machine_mode mode
)
9555 return (mode
== SImode
|| (TARGET_64BIT
&& mode
== DImode
));
9558 /* Target hook for vector_mode_supported_p. */
9561 mips_vector_mode_supported_p (enum machine_mode mode
)
9566 return TARGET_PAIRED_SINGLE_FLOAT
;
9583 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
9586 mips_scalar_mode_supported_p (enum machine_mode mode
)
9588 if (ALL_FIXED_POINT_MODE_P (mode
)
9589 && GET_MODE_PRECISION (mode
) <= 2 * BITS_PER_WORD
)
9592 return default_scalar_mode_supported_p (mode
);
9595 /* If we can access small data directly (using gp-relative relocation
9596 operators) return the small data pointer, otherwise return null.
9598 For each mips16 function which refers to GP relative symbols, we
9599 use a pseudo register, initialized at the start of the function, to
9600 hold the $gp value. */
9603 mips16_gp_pseudo_reg (void)
9605 if (cfun
->machine
->mips16_gp_pseudo_rtx
== NULL_RTX
)
9606 cfun
->machine
->mips16_gp_pseudo_rtx
= gen_reg_rtx (Pmode
);
9608 /* Don't initialize the pseudo register if we are being called from
9609 the tree optimizers' cost-calculation routines. */
9610 if (!cfun
->machine
->initialized_mips16_gp_pseudo_p
9611 && (current_ir_type () != IR_GIMPLE
|| currently_expanding_to_rtl
))
9615 /* We want to initialize this to a value which gcc will believe
9617 insn
= gen_load_const_gp (cfun
->machine
->mips16_gp_pseudo_rtx
);
9619 push_topmost_sequence ();
9620 /* We need to emit the initialization after the FUNCTION_BEG
9621 note, so that it will be integrated. */
9622 for (scan
= get_insns (); scan
!= NULL_RTX
; scan
= NEXT_INSN (scan
))
9624 && NOTE_KIND (scan
) == NOTE_INSN_FUNCTION_BEG
)
9626 if (scan
== NULL_RTX
)
9627 scan
= get_insns ();
9628 insn
= emit_insn_after (insn
, scan
);
9629 pop_topmost_sequence ();
9631 cfun
->machine
->initialized_mips16_gp_pseudo_p
= true;
9634 return cfun
->machine
->mips16_gp_pseudo_rtx
;
9637 /* Write out code to move floating point arguments in or out of
9638 general registers. Output the instructions to FILE. FP_CODE is
9639 the code describing which arguments are present (see the comment at
9640 the definition of CUMULATIVE_ARGS in mips.h). FROM_FP_P is nonzero if
9641 we are copying from the floating point registers. */
9644 mips16_fp_args (FILE *file
, int fp_code
, int from_fp_p
)
9649 CUMULATIVE_ARGS cum
;
9651 /* This code only works for the original 32-bit ABI and the O64 ABI. */
9652 gcc_assert (TARGET_OLDABI
);
9659 init_cumulative_args (&cum
, NULL
, NULL
);
9661 for (f
= (unsigned int) fp_code
; f
!= 0; f
>>= 2)
9663 enum machine_mode mode
;
9664 struct mips_arg_info info
;
9668 else if ((f
& 3) == 2)
9673 mips_arg_info (&cum
, mode
, NULL
, true, &info
);
9674 gparg
= mips_arg_regno (&info
, false);
9675 fparg
= mips_arg_regno (&info
, true);
9678 fprintf (file
, "\t%s\t%s,%s\n", s
,
9679 reg_names
[gparg
], reg_names
[fparg
]);
9680 else if (TARGET_64BIT
)
9681 fprintf (file
, "\td%s\t%s,%s\n", s
,
9682 reg_names
[gparg
], reg_names
[fparg
]);
9683 else if (ISA_HAS_MXHC1
)
9684 /* -mips32r2 -mfp64 */
9685 fprintf (file
, "\t%s\t%s,%s\n\t%s\t%s,%s\n",
9687 reg_names
[gparg
+ (WORDS_BIG_ENDIAN
? 1 : 0)],
9689 from_fp_p
? "mfhc1" : "mthc1",
9690 reg_names
[gparg
+ (WORDS_BIG_ENDIAN
? 0 : 1)],
9692 else if (TARGET_BIG_ENDIAN
)
9693 fprintf (file
, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s
,
9694 reg_names
[gparg
], reg_names
[fparg
+ 1], s
,
9695 reg_names
[gparg
+ 1], reg_names
[fparg
]);
9697 fprintf (file
, "\t%s\t%s,%s\n\t%s\t%s,%s\n", s
,
9698 reg_names
[gparg
], reg_names
[fparg
], s
,
9699 reg_names
[gparg
+ 1], reg_names
[fparg
+ 1]);
9701 function_arg_advance (&cum
, mode
, NULL
, true);
9705 /* Build a mips16 function stub. This is used for functions which
9706 take arguments in the floating point registers. It is 32-bit code
9707 that moves the floating point args into the general registers, and
9708 then jumps to the 16-bit code. */
9711 build_mips16_function_stub (FILE *file
)
9714 char *secname
, *stubname
;
9715 tree stubid
, stubdecl
;
9719 fnname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
9720 fnname
= targetm
.strip_name_encoding (fnname
);
9721 secname
= (char *) alloca (strlen (fnname
) + 20);
9722 sprintf (secname
, ".mips16.fn.%s", fnname
);
9723 stubname
= (char *) alloca (strlen (fnname
) + 20);
9724 sprintf (stubname
, "__fn_stub_%s", fnname
);
9725 stubid
= get_identifier (stubname
);
9726 stubdecl
= build_decl (FUNCTION_DECL
, stubid
,
9727 build_function_type (void_type_node
, NULL_TREE
));
9728 DECL_SECTION_NAME (stubdecl
) = build_string (strlen (secname
), secname
);
9729 DECL_RESULT (stubdecl
) = build_decl (RESULT_DECL
, NULL_TREE
, void_type_node
);
9731 fprintf (file
, "\t# Stub function for %s (", current_function_name ());
9733 for (f
= (unsigned int) current_function_args_info
.fp_code
; f
!= 0; f
>>= 2)
9735 fprintf (file
, "%s%s",
9736 need_comma
? ", " : "",
9737 (f
& 3) == 1 ? "float" : "double");
9740 fprintf (file
, ")\n");
9742 fprintf (file
, "\t.set\tnomips16\n");
9743 switch_to_section (function_section (stubdecl
));
9744 ASM_OUTPUT_ALIGN (file
, floor_log2 (FUNCTION_BOUNDARY
/ BITS_PER_UNIT
));
9746 /* ??? If FUNCTION_NAME_ALREADY_DECLARED is defined, then we are
9747 within a .ent, and we cannot emit another .ent. */
9748 if (!FUNCTION_NAME_ALREADY_DECLARED
)
9750 fputs ("\t.ent\t", file
);
9751 assemble_name (file
, stubname
);
9755 assemble_name (file
, stubname
);
9756 fputs (":\n", file
);
9758 /* We don't want the assembler to insert any nops here. */
9759 fprintf (file
, "\t.set\tnoreorder\n");
9761 mips16_fp_args (file
, current_function_args_info
.fp_code
, 1);
9763 fprintf (asm_out_file
, "\t.set\tnoat\n");
9764 fprintf (asm_out_file
, "\tla\t%s,", reg_names
[GP_REG_FIRST
+ 1]);
9765 assemble_name (file
, fnname
);
9766 fprintf (file
, "\n");
9767 fprintf (asm_out_file
, "\tjr\t%s\n", reg_names
[GP_REG_FIRST
+ 1]);
9768 fprintf (asm_out_file
, "\t.set\tat\n");
9770 /* Unfortunately, we can't fill the jump delay slot. We can't fill
9771 with one of the mfc1 instructions, because the result is not
9772 available for one instruction, so if the very first instruction
9773 in the function refers to the register, it will see the wrong
9775 fprintf (file
, "\tnop\n");
9777 fprintf (file
, "\t.set\treorder\n");
9779 if (!FUNCTION_NAME_ALREADY_DECLARED
)
9781 fputs ("\t.end\t", file
);
9782 assemble_name (file
, stubname
);
9786 switch_to_section (function_section (current_function_decl
));
9789 /* We keep a list of functions for which we have already built stubs
9790 in build_mips16_call_stub. */
9794 struct mips16_stub
*next
;
9799 static struct mips16_stub
*mips16_stubs
;
9801 /* Emit code to return a double value from a mips16 stub. GPREG is the
9802 first GP reg to use, FPREG is the first FP reg to use. */
9805 mips16_fpret_double (int gpreg
, int fpreg
)
9808 fprintf (asm_out_file
, "\tdmfc1\t%s,%s\n",
9809 reg_names
[gpreg
], reg_names
[fpreg
]);
9810 else if (TARGET_FLOAT64
)
9812 fprintf (asm_out_file
, "\tmfc1\t%s,%s\n",
9813 reg_names
[gpreg
+ WORDS_BIG_ENDIAN
],
9815 fprintf (asm_out_file
, "\tmfhc1\t%s,%s\n",
9816 reg_names
[gpreg
+ !WORDS_BIG_ENDIAN
],
9821 if (TARGET_BIG_ENDIAN
)
9823 fprintf (asm_out_file
, "\tmfc1\t%s,%s\n",
9824 reg_names
[gpreg
+ 0],
9825 reg_names
[fpreg
+ 1]);
9826 fprintf (asm_out_file
, "\tmfc1\t%s,%s\n",
9827 reg_names
[gpreg
+ 1],
9828 reg_names
[fpreg
+ 0]);
9832 fprintf (asm_out_file
, "\tmfc1\t%s,%s\n",
9833 reg_names
[gpreg
+ 0],
9834 reg_names
[fpreg
+ 0]);
9835 fprintf (asm_out_file
, "\tmfc1\t%s,%s\n",
9836 reg_names
[gpreg
+ 1],
9837 reg_names
[fpreg
+ 1]);
9842 /* Build a call stub for a mips16 call. A stub is needed if we are
9843 passing any floating point values which should go into the floating
9844 point registers. If we are, and the call turns out to be to a
9845 32-bit function, the stub will be used to move the values into the
9846 floating point registers before calling the 32-bit function. The
9847 linker will magically adjust the function call to either the 16-bit
9848 function or the 32-bit stub, depending upon where the function call
9849 is actually defined.
9851 Similarly, we need a stub if the return value might come back in a
9852 floating point register.
9854 RETVAL is the location of the return value, or null if this is
9855 a call rather than a call_value. FN is the address of the
9856 function and ARG_SIZE is the size of the arguments. FP_CODE
9857 is the code built by function_arg. This function returns a nonzero
9858 value if it builds the call instruction itself. */
9861 build_mips16_call_stub (rtx retval
, rtx fn
, rtx arg_size
, int fp_code
)
9865 char *secname
, *stubname
;
9866 struct mips16_stub
*l
;
9867 tree stubid
, stubdecl
;
9872 /* We don't need to do anything if we aren't in mips16 mode, or if
9873 we were invoked with the -msoft-float option. */
9874 if (!TARGET_MIPS16
|| TARGET_SOFT_FLOAT_ABI
)
9877 /* Figure out whether the value might come back in a floating point
9880 fpret
= mips_return_mode_in_fpr_p (GET_MODE (retval
));
9882 /* We don't need to do anything if there were no floating point
9883 arguments and the value will not be returned in a floating point
9885 if (fp_code
== 0 && ! fpret
)
9888 /* We don't need to do anything if this is a call to a special
9889 mips16 support function. */
9890 if (GET_CODE (fn
) == SYMBOL_REF
9891 && strncmp (XSTR (fn
, 0), "__mips16_", 9) == 0)
9894 /* This code will only work for o32 and o64 abis. The other ABI's
9895 require more sophisticated support. */
9896 gcc_assert (TARGET_OLDABI
);
9898 /* If we're calling via a function pointer, then we must always call
9899 via a stub. There are magic stubs provided in libgcc.a for each
9900 of the required cases. Each of them expects the function address
9901 to arrive in register $2. */
9903 if (GET_CODE (fn
) != SYMBOL_REF
)
9909 /* ??? If this code is modified to support other ABI's, we need
9910 to handle PARALLEL return values here. */
9913 sprintf (buf
, "__mips16_call_stub_%s_%d",
9914 mips16_call_stub_mode_suffix (GET_MODE (retval
)),
9917 sprintf (buf
, "__mips16_call_stub_%d",
9920 id
= get_identifier (buf
);
9921 stub_fn
= gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (id
));
9923 mips_emit_move (gen_rtx_REG (Pmode
, 2), fn
);
9925 if (retval
== NULL_RTX
)
9926 insn
= gen_call_internal (stub_fn
, arg_size
);
9928 insn
= gen_call_value_internal (retval
, stub_fn
, arg_size
);
9929 insn
= emit_call_insn (insn
);
9931 /* Put the register usage information on the CALL. */
9932 CALL_INSN_FUNCTION_USAGE (insn
) =
9933 gen_rtx_EXPR_LIST (VOIDmode
,
9934 gen_rtx_USE (VOIDmode
, gen_rtx_REG (Pmode
, 2)),
9935 CALL_INSN_FUNCTION_USAGE (insn
));
9937 /* If we are handling a floating point return value, we need to
9938 save $18 in the function prologue. Putting a note on the
9939 call will mean that df_regs_ever_live_p ($18) will be true if the
9940 call is not eliminated, and we can check that in the prologue
9943 CALL_INSN_FUNCTION_USAGE (insn
) =
9944 gen_rtx_EXPR_LIST (VOIDmode
,
9945 gen_rtx_USE (VOIDmode
,
9946 gen_rtx_REG (word_mode
, 18)),
9947 CALL_INSN_FUNCTION_USAGE (insn
));
9949 /* Return 1 to tell the caller that we've generated the call
9954 /* We know the function we are going to call. If we have already
9955 built a stub, we don't need to do anything further. */
9957 fnname
= targetm
.strip_name_encoding (XSTR (fn
, 0));
9958 for (l
= mips16_stubs
; l
!= NULL
; l
= l
->next
)
9959 if (strcmp (l
->name
, fnname
) == 0)
9964 /* Build a special purpose stub. When the linker sees a
9965 function call in mips16 code, it will check where the target
9966 is defined. If the target is a 32-bit call, the linker will
9967 search for the section defined here. It can tell which
9968 symbol this section is associated with by looking at the
9969 relocation information (the name is unreliable, since this
9970 might be a static function). If such a section is found, the
9971 linker will redirect the call to the start of the magic
9974 If the function does not return a floating point value, the
9975 special stub section is named
9978 If the function does return a floating point value, the stub
9980 .mips16.call.fp.FNNAME
9983 secname
= (char *) alloca (strlen (fnname
) + 40);
9984 sprintf (secname
, ".mips16.call.%s%s",
9987 stubname
= (char *) alloca (strlen (fnname
) + 20);
9988 sprintf (stubname
, "__call_stub_%s%s",
9991 stubid
= get_identifier (stubname
);
9992 stubdecl
= build_decl (FUNCTION_DECL
, stubid
,
9993 build_function_type (void_type_node
, NULL_TREE
));
9994 DECL_SECTION_NAME (stubdecl
) = build_string (strlen (secname
), secname
);
9995 DECL_RESULT (stubdecl
) = build_decl (RESULT_DECL
, NULL_TREE
, void_type_node
);
9997 fprintf (asm_out_file
, "\t# Stub function to call %s%s (",
9999 ? (GET_MODE (retval
) == SFmode
? "float " : "double ")
10003 for (f
= (unsigned int) fp_code
; f
!= 0; f
>>= 2)
10005 fprintf (asm_out_file
, "%s%s",
10006 need_comma
? ", " : "",
10007 (f
& 3) == 1 ? "float" : "double");
10010 fprintf (asm_out_file
, ")\n");
10012 fprintf (asm_out_file
, "\t.set\tnomips16\n");
10013 assemble_start_function (stubdecl
, stubname
);
10015 if (!FUNCTION_NAME_ALREADY_DECLARED
)
10017 fputs ("\t.ent\t", asm_out_file
);
10018 assemble_name (asm_out_file
, stubname
);
10019 fputs ("\n", asm_out_file
);
10021 assemble_name (asm_out_file
, stubname
);
10022 fputs (":\n", asm_out_file
);
10025 /* We build the stub code by hand. That's the only way we can
10026 do it, since we can't generate 32-bit code during a 16-bit
10029 /* We don't want the assembler to insert any nops here. */
10030 fprintf (asm_out_file
, "\t.set\tnoreorder\n");
10032 mips16_fp_args (asm_out_file
, fp_code
, 0);
10036 fprintf (asm_out_file
, "\t.set\tnoat\n");
10037 fprintf (asm_out_file
, "\tla\t%s,%s\n", reg_names
[GP_REG_FIRST
+ 1],
10039 fprintf (asm_out_file
, "\tjr\t%s\n", reg_names
[GP_REG_FIRST
+ 1]);
10040 fprintf (asm_out_file
, "\t.set\tat\n");
10041 /* Unfortunately, we can't fill the jump delay slot. We
10042 can't fill with one of the mtc1 instructions, because the
10043 result is not available for one instruction, so if the
10044 very first instruction in the function refers to the
10045 register, it will see the wrong value. */
10046 fprintf (asm_out_file
, "\tnop\n");
10050 fprintf (asm_out_file
, "\tmove\t%s,%s\n",
10051 reg_names
[GP_REG_FIRST
+ 18], reg_names
[GP_REG_FIRST
+ 31]);
10052 fprintf (asm_out_file
, "\tjal\t%s\n", fnname
);
10053 /* As above, we can't fill the delay slot. */
10054 fprintf (asm_out_file
, "\tnop\n");
10055 switch (GET_MODE (retval
))
10058 fprintf (asm_out_file
, "\tmfc1\t%s,%s\n",
10059 reg_names
[GP_REG_FIRST
+ 3],
10060 reg_names
[FP_REG_FIRST
+ MAX_FPRS_PER_FMT
]);
10063 fprintf (asm_out_file
, "\tmfc1\t%s,%s\n",
10064 reg_names
[GP_REG_FIRST
+ 2],
10065 reg_names
[FP_REG_FIRST
+ 0]);
10069 mips16_fpret_double (GP_REG_FIRST
+ 2 + (8 / UNITS_PER_WORD
),
10070 FP_REG_FIRST
+ MAX_FPRS_PER_FMT
);
10074 mips16_fpret_double (GP_REG_FIRST
+ 2, FP_REG_FIRST
+ 0);
10078 gcc_unreachable ();
10080 fprintf (asm_out_file
, "\tj\t%s\n", reg_names
[GP_REG_FIRST
+ 18]);
10081 /* As above, we can't fill the delay slot. */
10082 fprintf (asm_out_file
, "\tnop\n");
10085 fprintf (asm_out_file
, "\t.set\treorder\n");
10087 #ifdef ASM_DECLARE_FUNCTION_SIZE
10088 ASM_DECLARE_FUNCTION_SIZE (asm_out_file
, stubname
, stubdecl
);
10091 if (!FUNCTION_NAME_ALREADY_DECLARED
)
10093 fputs ("\t.end\t", asm_out_file
);
10094 assemble_name (asm_out_file
, stubname
);
10095 fputs ("\n", asm_out_file
);
10098 /* Record this stub. */
10099 l
= (struct mips16_stub
*) xmalloc (sizeof *l
);
10100 l
->name
= xstrdup (fnname
);
10102 l
->next
= mips16_stubs
;
10106 /* If we expect a floating point return value, but we've built a
10107 stub which does not expect one, then we're in trouble. We can't
10108 use the existing stub, because it won't handle the floating point
10109 value. We can't build a new stub, because the linker won't know
10110 which stub to use for the various calls in this object file.
10111 Fortunately, this case is illegal, since it means that a function
10112 was declared in two different ways in a single compilation. */
10113 if (fpret
&& ! l
->fpret
)
10114 error ("cannot handle inconsistent calls to %qs", fnname
);
10116 if (retval
== NULL_RTX
)
10117 insn
= gen_call_internal_direct (fn
, arg_size
);
10119 insn
= gen_call_value_internal_direct (retval
, fn
, arg_size
);
10120 insn
= emit_call_insn (insn
);
10122 /* If we are calling a stub which handles a floating point return
10123 value, we need to arrange to save $18 in the prologue. We do
10124 this by marking the function call as using the register. The
10125 prologue will later see that it is used, and emit code to save
10128 CALL_INSN_FUNCTION_USAGE (insn
) =
10129 gen_rtx_EXPR_LIST (VOIDmode
,
10130 gen_rtx_USE (VOIDmode
, gen_rtx_REG (word_mode
, 18)),
10131 CALL_INSN_FUNCTION_USAGE (insn
));
10133 /* Return 1 to tell the caller that we've generated the call
10138 /* An entry in the mips16 constant pool. VALUE is the pool constant,
10139 MODE is its mode, and LABEL is the CODE_LABEL associated with it. */
10141 struct mips16_constant
{
10142 struct mips16_constant
*next
;
10145 enum machine_mode mode
;
10148 /* Information about an incomplete mips16 constant pool. FIRST is the
10149 first constant, HIGHEST_ADDRESS is the highest address that the first
10150 byte of the pool can have, and INSN_ADDRESS is the current instruction
10153 struct mips16_constant_pool
{
10154 struct mips16_constant
*first
;
10155 int highest_address
;
10159 /* Add constant VALUE to POOL and return its label. MODE is the
10160 value's mode (used for CONST_INTs, etc.). */
10163 add_constant (struct mips16_constant_pool
*pool
,
10164 rtx value
, enum machine_mode mode
)
10166 struct mips16_constant
**p
, *c
;
10167 bool first_of_size_p
;
10169 /* See whether the constant is already in the pool. If so, return the
10170 existing label, otherwise leave P pointing to the place where the
10171 constant should be added.
10173 Keep the pool sorted in increasing order of mode size so that we can
10174 reduce the number of alignments needed. */
10175 first_of_size_p
= true;
10176 for (p
= &pool
->first
; *p
!= 0; p
= &(*p
)->next
)
10178 if (mode
== (*p
)->mode
&& rtx_equal_p (value
, (*p
)->value
))
10179 return (*p
)->label
;
10180 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE ((*p
)->mode
))
10182 if (GET_MODE_SIZE (mode
) == GET_MODE_SIZE ((*p
)->mode
))
10183 first_of_size_p
= false;
10186 /* In the worst case, the constant needed by the earliest instruction
10187 will end up at the end of the pool. The entire pool must then be
10188 accessible from that instruction.
10190 When adding the first constant, set the pool's highest address to
10191 the address of the first out-of-range byte. Adjust this address
10192 downwards each time a new constant is added. */
10193 if (pool
->first
== 0)
10194 /* For pc-relative lw, addiu and daddiu instructions, the base PC value
10195 is the address of the instruction with the lowest two bits clear.
10196 The base PC value for ld has the lowest three bits clear. Assume
10197 the worst case here. */
10198 pool
->highest_address
= pool
->insn_address
- (UNITS_PER_WORD
- 2) + 0x8000;
10199 pool
->highest_address
-= GET_MODE_SIZE (mode
);
10200 if (first_of_size_p
)
10201 /* Take into account the worst possible padding due to alignment. */
10202 pool
->highest_address
-= GET_MODE_SIZE (mode
) - 1;
10204 /* Create a new entry. */
10205 c
= (struct mips16_constant
*) xmalloc (sizeof *c
);
10208 c
->label
= gen_label_rtx ();
10215 /* Output constant VALUE after instruction INSN and return the last
10216 instruction emitted. MODE is the mode of the constant. */
10219 dump_constants_1 (enum machine_mode mode
, rtx value
, rtx insn
)
10221 if (SCALAR_INT_MODE_P (mode
)
10222 || ALL_SCALAR_FRACT_MODE_P (mode
)
10223 || ALL_SCALAR_ACCUM_MODE_P (mode
))
10225 rtx size
= GEN_INT (GET_MODE_SIZE (mode
));
10226 return emit_insn_after (gen_consttable_int (value
, size
), insn
);
10229 if (SCALAR_FLOAT_MODE_P (mode
))
10230 return emit_insn_after (gen_consttable_float (value
), insn
);
10232 if (VECTOR_MODE_P (mode
))
10236 for (i
= 0; i
< CONST_VECTOR_NUNITS (value
); i
++)
10237 insn
= dump_constants_1 (GET_MODE_INNER (mode
),
10238 CONST_VECTOR_ELT (value
, i
), insn
);
10242 gcc_unreachable ();
10246 /* Dump out the constants in CONSTANTS after INSN. */
10249 dump_constants (struct mips16_constant
*constants
, rtx insn
)
10251 struct mips16_constant
*c
, *next
;
10255 for (c
= constants
; c
!= NULL
; c
= next
)
10257 /* If necessary, increase the alignment of PC. */
10258 if (align
< GET_MODE_SIZE (c
->mode
))
10260 int align_log
= floor_log2 (GET_MODE_SIZE (c
->mode
));
10261 insn
= emit_insn_after (gen_align (GEN_INT (align_log
)), insn
);
10263 align
= GET_MODE_SIZE (c
->mode
);
10265 insn
= emit_label_after (c
->label
, insn
);
10266 insn
= dump_constants_1 (c
->mode
, c
->value
, insn
);
10272 emit_barrier_after (insn
);
10275 /* Return the length of instruction INSN. */
10278 mips16_insn_length (rtx insn
)
10282 rtx body
= PATTERN (insn
);
10283 if (GET_CODE (body
) == ADDR_VEC
)
10284 return GET_MODE_SIZE (GET_MODE (body
)) * XVECLEN (body
, 0);
10285 if (GET_CODE (body
) == ADDR_DIFF_VEC
)
10286 return GET_MODE_SIZE (GET_MODE (body
)) * XVECLEN (body
, 1);
10288 return get_attr_length (insn
);
10291 /* If *X is a symbolic constant that refers to the constant pool, add
10292 the constant to POOL and rewrite *X to use the constant's label. */
10295 mips16_rewrite_pool_constant (struct mips16_constant_pool
*pool
, rtx
*x
)
10297 rtx base
, offset
, label
;
10299 split_const (*x
, &base
, &offset
);
10300 if (GET_CODE (base
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (base
))
10302 label
= add_constant (pool
, get_pool_constant (base
),
10303 get_pool_mode (base
));
10304 base
= gen_rtx_LABEL_REF (Pmode
, label
);
10305 *x
= mips_unspec_address_offset (base
, offset
, SYMBOL_PC_RELATIVE
);
10309 /* This structure is used to communicate with mips16_rewrite_pool_refs.
10310 INSN is the instruction we're rewriting and POOL points to the current
10312 struct mips16_rewrite_pool_refs_info
{
10314 struct mips16_constant_pool
*pool
;
10317 /* Rewrite *X so that constant pool references refer to the constant's
10318 label instead. DATA points to a mips16_rewrite_pool_refs_info
10322 mips16_rewrite_pool_refs (rtx
*x
, void *data
)
10324 struct mips16_rewrite_pool_refs_info
*info
= data
;
10326 if (force_to_mem_operand (*x
, Pmode
))
10328 rtx mem
= force_const_mem (GET_MODE (*x
), *x
);
10329 validate_change (info
->insn
, x
, mem
, false);
10334 mips16_rewrite_pool_constant (info
->pool
, &XEXP (*x
, 0));
10338 if (TARGET_MIPS16_TEXT_LOADS
)
10339 mips16_rewrite_pool_constant (info
->pool
, x
);
10341 return GET_CODE (*x
) == CONST
? -1 : 0;
10344 /* Build MIPS16 constant pools. */
10347 mips16_lay_out_constants (void)
10349 struct mips16_constant_pool pool
;
10350 struct mips16_rewrite_pool_refs_info info
;
10353 if (!TARGET_MIPS16_PCREL_LOADS
)
10357 memset (&pool
, 0, sizeof (pool
));
10358 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10360 /* Rewrite constant pool references in INSN. */
10365 for_each_rtx (&PATTERN (insn
), mips16_rewrite_pool_refs
, &info
);
10368 pool
.insn_address
+= mips16_insn_length (insn
);
10370 if (pool
.first
!= NULL
)
10372 /* If there are no natural barriers between the first user of
10373 the pool and the highest acceptable address, we'll need to
10374 create a new instruction to jump around the constant pool.
10375 In the worst case, this instruction will be 4 bytes long.
10377 If it's too late to do this transformation after INSN,
10378 do it immediately before INSN. */
10379 if (barrier
== 0 && pool
.insn_address
+ 4 > pool
.highest_address
)
10383 label
= gen_label_rtx ();
10385 jump
= emit_jump_insn_before (gen_jump (label
), insn
);
10386 JUMP_LABEL (jump
) = label
;
10387 LABEL_NUSES (label
) = 1;
10388 barrier
= emit_barrier_after (jump
);
10390 emit_label_after (label
, barrier
);
10391 pool
.insn_address
+= 4;
10394 /* See whether the constant pool is now out of range of the first
10395 user. If so, output the constants after the previous barrier.
10396 Note that any instructions between BARRIER and INSN (inclusive)
10397 will use negative offsets to refer to the pool. */
10398 if (pool
.insn_address
> pool
.highest_address
)
10400 dump_constants (pool
.first
, barrier
);
10404 else if (BARRIER_P (insn
))
10408 dump_constants (pool
.first
, get_last_insn ());
10411 /* A temporary variable used by for_each_rtx callbacks, etc. */
10412 static rtx mips_sim_insn
;
10414 /* A structure representing the state of the processor pipeline.
10415 Used by the mips_sim_* family of functions. */
10417 /* The maximum number of instructions that can be issued in a cycle.
10418 (Caches mips_issue_rate.) */
10419 unsigned int issue_rate
;
10421 /* The current simulation time. */
10424 /* How many more instructions can be issued in the current cycle. */
10425 unsigned int insns_left
;
10427 /* LAST_SET[X].INSN is the last instruction to set register X.
10428 LAST_SET[X].TIME is the time at which that instruction was issued.
10429 INSN is null if no instruction has yet set register X. */
10433 } last_set
[FIRST_PSEUDO_REGISTER
];
10435 /* The pipeline's current DFA state. */
10439 /* Reset STATE to the initial simulation state. */
10442 mips_sim_reset (struct mips_sim
*state
)
10445 state
->insns_left
= state
->issue_rate
;
10446 memset (&state
->last_set
, 0, sizeof (state
->last_set
));
10447 state_reset (state
->dfa_state
);
10450 /* Initialize STATE before its first use. DFA_STATE points to an
10451 allocated but uninitialized DFA state. */
10454 mips_sim_init (struct mips_sim
*state
, state_t dfa_state
)
10456 state
->issue_rate
= mips_issue_rate ();
10457 state
->dfa_state
= dfa_state
;
10458 mips_sim_reset (state
);
10461 /* Advance STATE by one clock cycle. */
10464 mips_sim_next_cycle (struct mips_sim
*state
)
10467 state
->insns_left
= state
->issue_rate
;
10468 state_transition (state
->dfa_state
, 0);
10471 /* Advance simulation state STATE until instruction INSN can read
10475 mips_sim_wait_reg (struct mips_sim
*state
, rtx insn
, rtx reg
)
10479 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
)); i
++)
10480 if (state
->last_set
[REGNO (reg
) + i
].insn
!= 0)
10484 t
= state
->last_set
[REGNO (reg
) + i
].time
;
10485 t
+= insn_latency (state
->last_set
[REGNO (reg
) + i
].insn
, insn
);
10486 while (state
->time
< t
)
10487 mips_sim_next_cycle (state
);
10491 /* A for_each_rtx callback. If *X is a register, advance simulation state
10492 DATA until mips_sim_insn can read the register's value. */
10495 mips_sim_wait_regs_2 (rtx
*x
, void *data
)
10498 mips_sim_wait_reg (data
, mips_sim_insn
, *x
);
10502 /* Call mips_sim_wait_regs_2 (R, DATA) for each register R mentioned in *X. */
10505 mips_sim_wait_regs_1 (rtx
*x
, void *data
)
10507 for_each_rtx (x
, mips_sim_wait_regs_2
, data
);
10510 /* Advance simulation state STATE until all of INSN's register
10511 dependencies are satisfied. */
10514 mips_sim_wait_regs (struct mips_sim
*state
, rtx insn
)
10516 mips_sim_insn
= insn
;
10517 note_uses (&PATTERN (insn
), mips_sim_wait_regs_1
, state
);
10520 /* Advance simulation state STATE until the units required by
10521 instruction INSN are available. */
10524 mips_sim_wait_units (struct mips_sim
*state
, rtx insn
)
10528 tmp_state
= alloca (state_size ());
10529 while (state
->insns_left
== 0
10530 || (memcpy (tmp_state
, state
->dfa_state
, state_size ()),
10531 state_transition (tmp_state
, insn
) >= 0))
10532 mips_sim_next_cycle (state
);
10535 /* Advance simulation state STATE until INSN is ready to issue. */
10538 mips_sim_wait_insn (struct mips_sim
*state
, rtx insn
)
10540 mips_sim_wait_regs (state
, insn
);
10541 mips_sim_wait_units (state
, insn
);
10544 /* mips_sim_insn has just set X. Update the LAST_SET array
10545 in simulation state DATA. */
10548 mips_sim_record_set (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
10550 struct mips_sim
*state
;
10555 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
10557 state
->last_set
[REGNO (x
) + i
].insn
= mips_sim_insn
;
10558 state
->last_set
[REGNO (x
) + i
].time
= state
->time
;
10562 /* Issue instruction INSN in scheduler state STATE. Assume that INSN
10563 can issue immediately (i.e., that mips_sim_wait_insn has already
10567 mips_sim_issue_insn (struct mips_sim
*state
, rtx insn
)
10569 state_transition (state
->dfa_state
, insn
);
10570 state
->insns_left
--;
10572 mips_sim_insn
= insn
;
10573 note_stores (PATTERN (insn
), mips_sim_record_set
, state
);
10576 /* Simulate issuing a NOP in state STATE. */
10579 mips_sim_issue_nop (struct mips_sim
*state
)
10581 if (state
->insns_left
== 0)
10582 mips_sim_next_cycle (state
);
10583 state
->insns_left
--;
10586 /* Update simulation state STATE so that it's ready to accept the instruction
10587 after INSN. INSN should be part of the main rtl chain, not a member of a
10591 mips_sim_finish_insn (struct mips_sim
*state
, rtx insn
)
10593 /* If INSN is a jump with an implicit delay slot, simulate a nop. */
10595 mips_sim_issue_nop (state
);
10597 switch (GET_CODE (SEQ_BEGIN (insn
)))
10601 /* We can't predict the processor state after a call or label. */
10602 mips_sim_reset (state
);
10606 /* The delay slots of branch likely instructions are only executed
10607 when the branch is taken. Therefore, if the caller has simulated
10608 the delay slot instruction, STATE does not really reflect the state
10609 of the pipeline for the instruction after the delay slot. Also,
10610 branch likely instructions tend to incur a penalty when not taken,
10611 so there will probably be an extra delay between the branch and
10612 the instruction after the delay slot. */
10613 if (INSN_ANNULLED_BRANCH_P (SEQ_BEGIN (insn
)))
10614 mips_sim_reset (state
);
10622 /* The VR4130 pipeline issues aligned pairs of instructions together,
10623 but it stalls the second instruction if it depends on the first.
10624 In order to cut down the amount of logic required, this dependence
10625 check is not based on a full instruction decode. Instead, any non-SPECIAL
10626 instruction is assumed to modify the register specified by bits 20-16
10627 (which is usually the "rt" field).
10629 In beq, beql, bne and bnel instructions, the rt field is actually an
10630 input, so we can end up with a false dependence between the branch
10631 and its delay slot. If this situation occurs in instruction INSN,
10632 try to avoid it by swapping rs and rt. */
10635 vr4130_avoid_branch_rt_conflict (rtx insn
)
10639 first
= SEQ_BEGIN (insn
);
10640 second
= SEQ_END (insn
);
10642 && NONJUMP_INSN_P (second
)
10643 && GET_CODE (PATTERN (first
)) == SET
10644 && GET_CODE (SET_DEST (PATTERN (first
))) == PC
10645 && GET_CODE (SET_SRC (PATTERN (first
))) == IF_THEN_ELSE
)
10647 /* Check for the right kind of condition. */
10648 rtx cond
= XEXP (SET_SRC (PATTERN (first
)), 0);
10649 if ((GET_CODE (cond
) == EQ
|| GET_CODE (cond
) == NE
)
10650 && REG_P (XEXP (cond
, 0))
10651 && REG_P (XEXP (cond
, 1))
10652 && reg_referenced_p (XEXP (cond
, 1), PATTERN (second
))
10653 && !reg_referenced_p (XEXP (cond
, 0), PATTERN (second
)))
10655 /* SECOND mentions the rt register but not the rs register. */
10656 rtx tmp
= XEXP (cond
, 0);
10657 XEXP (cond
, 0) = XEXP (cond
, 1);
10658 XEXP (cond
, 1) = tmp
;
10663 /* Implement -mvr4130-align. Go through each basic block and simulate the
10664 processor pipeline. If we find that a pair of instructions could execute
10665 in parallel, and the first of those instruction is not 8-byte aligned,
10666 insert a nop to make it aligned. */
10669 vr4130_align_insns (void)
10671 struct mips_sim state
;
10672 rtx insn
, subinsn
, last
, last2
, next
;
10677 /* LAST is the last instruction before INSN to have a nonzero length.
10678 LAST2 is the last such instruction before LAST. */
10682 /* ALIGNED_P is true if INSN is known to be at an aligned address. */
10685 mips_sim_init (&state
, alloca (state_size ()));
10686 for (insn
= get_insns (); insn
!= 0; insn
= next
)
10688 unsigned int length
;
10690 next
= NEXT_INSN (insn
);
10692 /* See the comment above vr4130_avoid_branch_rt_conflict for details.
10693 This isn't really related to the alignment pass, but we do it on
10694 the fly to avoid a separate instruction walk. */
10695 vr4130_avoid_branch_rt_conflict (insn
);
10697 if (USEFUL_INSN_P (insn
))
10698 FOR_EACH_SUBINSN (subinsn
, insn
)
10700 mips_sim_wait_insn (&state
, subinsn
);
10702 /* If we want this instruction to issue in parallel with the
10703 previous one, make sure that the previous instruction is
10704 aligned. There are several reasons why this isn't worthwhile
10705 when the second instruction is a call:
10707 - Calls are less likely to be performance critical,
10708 - There's a good chance that the delay slot can execute
10709 in parallel with the call.
10710 - The return address would then be unaligned.
10712 In general, if we're going to insert a nop between instructions
10713 X and Y, it's better to insert it immediately after X. That
10714 way, if the nop makes Y aligned, it will also align any labels
10715 between X and Y. */
10716 if (state
.insns_left
!= state
.issue_rate
10717 && !CALL_P (subinsn
))
10719 if (subinsn
== SEQ_BEGIN (insn
) && aligned_p
)
10721 /* SUBINSN is the first instruction in INSN and INSN is
10722 aligned. We want to align the previous instruction
10723 instead, so insert a nop between LAST2 and LAST.
10725 Note that LAST could be either a single instruction
10726 or a branch with a delay slot. In the latter case,
10727 LAST, like INSN, is already aligned, but the delay
10728 slot must have some extra delay that stops it from
10729 issuing at the same time as the branch. We therefore
10730 insert a nop before the branch in order to align its
10732 emit_insn_after (gen_nop (), last2
);
10735 else if (subinsn
!= SEQ_BEGIN (insn
) && !aligned_p
)
10737 /* SUBINSN is the delay slot of INSN, but INSN is
10738 currently unaligned. Insert a nop between
10739 LAST and INSN to align it. */
10740 emit_insn_after (gen_nop (), last
);
10744 mips_sim_issue_insn (&state
, subinsn
);
10746 mips_sim_finish_insn (&state
, insn
);
10748 /* Update LAST, LAST2 and ALIGNED_P for the next instruction. */
10749 length
= get_attr_length (insn
);
10752 /* If the instruction is an asm statement or multi-instruction
10753 mips.md patern, the length is only an estimate. Insert an
10754 8 byte alignment after it so that the following instructions
10755 can be handled correctly. */
10756 if (NONJUMP_INSN_P (SEQ_BEGIN (insn
))
10757 && (recog_memoized (insn
) < 0 || length
>= 8))
10759 next
= emit_insn_after (gen_align (GEN_INT (3)), insn
);
10760 next
= NEXT_INSN (next
);
10761 mips_sim_next_cycle (&state
);
10764 else if (length
& 4)
10765 aligned_p
= !aligned_p
;
10770 /* See whether INSN is an aligned label. */
10771 if (LABEL_P (insn
) && label_to_alignment (insn
) >= 3)
10777 /* Subroutine of mips_reorg. If there is a hazard between INSN
10778 and a previous instruction, avoid it by inserting nops after
10781 *DELAYED_REG and *HILO_DELAY describe the hazards that apply at
10782 this point. If *DELAYED_REG is non-null, INSN must wait a cycle
10783 before using the value of that register. *HILO_DELAY counts the
10784 number of instructions since the last hilo hazard (that is,
10785 the number of instructions since the last mflo or mfhi).
10787 After inserting nops for INSN, update *DELAYED_REG and *HILO_DELAY
10788 for the next instruction.
10790 LO_REG is an rtx for the LO register, used in dependence checking. */
10793 mips_avoid_hazard (rtx after
, rtx insn
, int *hilo_delay
,
10794 rtx
*delayed_reg
, rtx lo_reg
)
10799 if (!INSN_P (insn
))
10802 pattern
= PATTERN (insn
);
10804 /* Do not put the whole function in .set noreorder if it contains
10805 an asm statement. We don't know whether there will be hazards
10806 between the asm statement and the gcc-generated code. */
10807 if (GET_CODE (pattern
) == ASM_INPUT
|| asm_noperands (pattern
) >= 0)
10808 cfun
->machine
->all_noreorder_p
= false;
10810 /* Ignore zero-length instructions (barriers and the like). */
10811 ninsns
= get_attr_length (insn
) / 4;
10815 /* Work out how many nops are needed. Note that we only care about
10816 registers that are explicitly mentioned in the instruction's pattern.
10817 It doesn't matter that calls use the argument registers or that they
10818 clobber hi and lo. */
10819 if (*hilo_delay
< 2 && reg_set_p (lo_reg
, pattern
))
10820 nops
= 2 - *hilo_delay
;
10821 else if (*delayed_reg
!= 0 && reg_referenced_p (*delayed_reg
, pattern
))
10826 /* Insert the nops between this instruction and the previous one.
10827 Each new nop takes us further from the last hilo hazard. */
10828 *hilo_delay
+= nops
;
10830 emit_insn_after (gen_hazard_nop (), after
);
10832 /* Set up the state for the next instruction. */
10833 *hilo_delay
+= ninsns
;
10835 if (INSN_CODE (insn
) >= 0)
10836 switch (get_attr_hazard (insn
))
10846 set
= single_set (insn
);
10847 gcc_assert (set
!= 0);
10848 *delayed_reg
= SET_DEST (set
);
10854 /* Go through the instruction stream and insert nops where necessary.
10855 See if the whole function can then be put into .set noreorder &
10859 mips_avoid_hazards (void)
10861 rtx insn
, last_insn
, lo_reg
, delayed_reg
;
10864 /* Force all instructions to be split into their final form. */
10865 split_all_insns_noflow ();
10867 /* Recalculate instruction lengths without taking nops into account. */
10868 cfun
->machine
->ignore_hazard_length_p
= true;
10869 shorten_branches (get_insns ());
10871 cfun
->machine
->all_noreorder_p
= true;
10873 /* Profiled functions can't be all noreorder because the profiler
10874 support uses assembler macros. */
10875 if (current_function_profile
)
10876 cfun
->machine
->all_noreorder_p
= false;
10878 /* Code compiled with -mfix-vr4120 can't be all noreorder because
10879 we rely on the assembler to work around some errata. */
10880 if (TARGET_FIX_VR4120
)
10881 cfun
->machine
->all_noreorder_p
= false;
10883 /* The same is true for -mfix-vr4130 if we might generate mflo or
10884 mfhi instructions. Note that we avoid using mflo and mfhi if
10885 the VR4130 macc and dmacc instructions are available instead;
10886 see the *mfhilo_{si,di}_macc patterns. */
10887 if (TARGET_FIX_VR4130
&& !ISA_HAS_MACCHI
)
10888 cfun
->machine
->all_noreorder_p
= false;
10893 lo_reg
= gen_rtx_REG (SImode
, LO_REGNUM
);
10895 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10898 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
10899 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
10900 mips_avoid_hazard (last_insn
, XVECEXP (PATTERN (insn
), 0, i
),
10901 &hilo_delay
, &delayed_reg
, lo_reg
);
10903 mips_avoid_hazard (last_insn
, insn
, &hilo_delay
,
10904 &delayed_reg
, lo_reg
);
10911 /* Implement TARGET_MACHINE_DEPENDENT_REORG. */
10916 mips16_lay_out_constants ();
10917 if (TARGET_EXPLICIT_RELOCS
)
10919 if (mips_flag_delayed_branch
)
10920 dbr_schedule (get_insns ());
10921 mips_avoid_hazards ();
10922 if (TUNE_MIPS4130
&& TARGET_VR4130_ALIGN
)
10923 vr4130_align_insns ();
10927 /* This function does three things:
10929 - Register the special divsi3 and modsi3 functions if -mfix-vr4120.
10930 - Register the mips16 hardware floating point stubs.
10931 - Register the gofast functions if selected using --enable-gofast. */
10933 #include "config/gofast.h"
10936 mips_init_libfuncs (void)
10938 if (TARGET_FIX_VR4120
)
10940 set_optab_libfunc (sdiv_optab
, SImode
, "__vr4120_divsi3");
10941 set_optab_libfunc (smod_optab
, SImode
, "__vr4120_modsi3");
10944 if (TARGET_MIPS16
&& TARGET_HARD_FLOAT_ABI
)
10946 set_optab_libfunc (add_optab
, SFmode
, "__mips16_addsf3");
10947 set_optab_libfunc (sub_optab
, SFmode
, "__mips16_subsf3");
10948 set_optab_libfunc (smul_optab
, SFmode
, "__mips16_mulsf3");
10949 set_optab_libfunc (sdiv_optab
, SFmode
, "__mips16_divsf3");
10951 set_optab_libfunc (eq_optab
, SFmode
, "__mips16_eqsf2");
10952 set_optab_libfunc (ne_optab
, SFmode
, "__mips16_nesf2");
10953 set_optab_libfunc (gt_optab
, SFmode
, "__mips16_gtsf2");
10954 set_optab_libfunc (ge_optab
, SFmode
, "__mips16_gesf2");
10955 set_optab_libfunc (lt_optab
, SFmode
, "__mips16_ltsf2");
10956 set_optab_libfunc (le_optab
, SFmode
, "__mips16_lesf2");
10957 set_optab_libfunc (unord_optab
, SFmode
, "__mips16_unordsf2");
10959 set_conv_libfunc (sfix_optab
, SImode
, SFmode
, "__mips16_fix_truncsfsi");
10960 set_conv_libfunc (sfloat_optab
, SFmode
, SImode
, "__mips16_floatsisf");
10961 set_conv_libfunc (ufloat_optab
, SFmode
, SImode
, "__mips16_floatunsisf");
10963 if (TARGET_DOUBLE_FLOAT
)
10965 set_optab_libfunc (add_optab
, DFmode
, "__mips16_adddf3");
10966 set_optab_libfunc (sub_optab
, DFmode
, "__mips16_subdf3");
10967 set_optab_libfunc (smul_optab
, DFmode
, "__mips16_muldf3");
10968 set_optab_libfunc (sdiv_optab
, DFmode
, "__mips16_divdf3");
10970 set_optab_libfunc (eq_optab
, DFmode
, "__mips16_eqdf2");
10971 set_optab_libfunc (ne_optab
, DFmode
, "__mips16_nedf2");
10972 set_optab_libfunc (gt_optab
, DFmode
, "__mips16_gtdf2");
10973 set_optab_libfunc (ge_optab
, DFmode
, "__mips16_gedf2");
10974 set_optab_libfunc (lt_optab
, DFmode
, "__mips16_ltdf2");
10975 set_optab_libfunc (le_optab
, DFmode
, "__mips16_ledf2");
10976 set_optab_libfunc (unord_optab
, DFmode
, "__mips16_unorddf2");
10978 set_conv_libfunc (sext_optab
, DFmode
, SFmode
, "__mips16_extendsfdf2");
10979 set_conv_libfunc (trunc_optab
, SFmode
, DFmode
, "__mips16_truncdfsf2");
10981 set_conv_libfunc (sfix_optab
, SImode
, DFmode
, "__mips16_fix_truncdfsi");
10982 set_conv_libfunc (sfloat_optab
, DFmode
, SImode
, "__mips16_floatsidf");
10983 set_conv_libfunc (ufloat_optab
, DFmode
, SImode
, "__mips16_floatunsidf");
10987 gofast_maybe_init_libfuncs ();
10990 /* Return a number assessing the cost of moving a register in class
10991 FROM to class TO. The classes are expressed using the enumeration
10992 values such as `GENERAL_REGS'. A value of 2 is the default; other
10993 values are interpreted relative to that.
10995 It is not required that the cost always equal 2 when FROM is the
10996 same as TO; on some machines it is expensive to move between
10997 registers if they are not general registers.
10999 If reload sees an insn consisting of a single `set' between two
11000 hard registers, and if `REGISTER_MOVE_COST' applied to their
11001 classes returns a value of 2, reload does not check to ensure that
11002 the constraints of the insn are met. Setting a cost of other than
11003 2 will allow reload to verify that the constraints are met. You
11004 should do this if the `movM' pattern's constraints do not allow
11007 ??? We make the cost of moving from HI/LO into general
11008 registers the same as for one of moving general registers to
11009 HI/LO for TARGET_MIPS16 in order to prevent allocating a
11010 pseudo to HI/LO. This might hurt optimizations though, it
11011 isn't clear if it is wise. And it might not work in all cases. We
11012 could solve the DImode LO reg problem by using a multiply, just
11013 like reload_{in,out}si. We could solve the SImode/HImode HI reg
11014 problem by using divide instructions. divu puts the remainder in
11015 the HI reg, so doing a divide by -1 will move the value in the HI
11016 reg for all values except -1. We could handle that case by using a
11017 signed divide, e.g. -1 / 2 (or maybe 1 / -2?). We'd have to emit
11018 a compare/branch to test the input value to see which instruction
11019 we need to use. This gets pretty messy, but it is feasible. */
11022 mips_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
11023 enum reg_class to
, enum reg_class from
)
11025 if (from
== M16_REGS
&& reg_class_subset_p (to
, GENERAL_REGS
))
11027 else if (from
== M16_NA_REGS
&& reg_class_subset_p (to
, GENERAL_REGS
))
11029 else if (reg_class_subset_p (from
, GENERAL_REGS
))
11031 if (to
== M16_REGS
)
11033 else if (to
== M16_NA_REGS
)
11035 else if (reg_class_subset_p (to
, GENERAL_REGS
))
11042 else if (to
== FP_REGS
)
11044 else if (reg_class_subset_p (to
, ACC_REGS
))
11051 else if (reg_class_subset_p (to
, ALL_COP_REGS
))
11056 else if (from
== FP_REGS
)
11058 if (reg_class_subset_p (to
, GENERAL_REGS
))
11060 else if (to
== FP_REGS
)
11062 else if (to
== ST_REGS
)
11065 else if (reg_class_subset_p (from
, ACC_REGS
))
11067 if (reg_class_subset_p (to
, GENERAL_REGS
))
11075 else if (from
== ST_REGS
&& reg_class_subset_p (to
, GENERAL_REGS
))
11077 else if (reg_class_subset_p (from
, ALL_COP_REGS
))
11083 ??? What cases are these? Shouldn't we return 2 here? */
11088 /* Return the length of INSN. LENGTH is the initial length computed by
11089 attributes in the machine-description file. */
11092 mips_adjust_insn_length (rtx insn
, int length
)
11094 /* A unconditional jump has an unfilled delay slot if it is not part
11095 of a sequence. A conditional jump normally has a delay slot, but
11096 does not on MIPS16. */
11097 if (CALL_P (insn
) || (TARGET_MIPS16
? simplejump_p (insn
) : JUMP_P (insn
)))
11100 /* See how many nops might be needed to avoid hardware hazards. */
11101 if (!cfun
->machine
->ignore_hazard_length_p
&& INSN_CODE (insn
) >= 0)
11102 switch (get_attr_hazard (insn
))
11116 /* All MIPS16 instructions are a measly two bytes. */
11124 /* Return an asm sequence to start a noat block and load the address
11125 of a label into $1. */
11128 mips_output_load_label (void)
11130 if (TARGET_EXPLICIT_RELOCS
)
11134 return "%[lw\t%@,%%got_page(%0)(%+)\n\taddiu\t%@,%@,%%got_ofst(%0)";
11137 return "%[ld\t%@,%%got_page(%0)(%+)\n\tdaddiu\t%@,%@,%%got_ofst(%0)";
11140 if (ISA_HAS_LOAD_DELAY
)
11141 return "%[lw\t%@,%%got(%0)(%+)%#\n\taddiu\t%@,%@,%%lo(%0)";
11142 return "%[lw\t%@,%%got(%0)(%+)\n\taddiu\t%@,%@,%%lo(%0)";
11146 if (Pmode
== DImode
)
11147 return "%[dla\t%@,%0";
11149 return "%[la\t%@,%0";
11153 /* Return the assembly code for INSN, which has the operands given by
11154 OPERANDS, and which branches to OPERANDS[1] if some condition is true.
11155 BRANCH_IF_TRUE is the asm template that should be used if OPERANDS[1]
11156 is in range of a direct branch. BRANCH_IF_FALSE is an inverted
11157 version of BRANCH_IF_TRUE. */
11160 mips_output_conditional_branch (rtx insn
, rtx
*operands
,
11161 const char *branch_if_true
,
11162 const char *branch_if_false
)
11164 unsigned int length
;
11165 rtx taken
, not_taken
;
11167 length
= get_attr_length (insn
);
11170 /* Just a simple conditional branch. */
11171 mips_branch_likely
= (final_sequence
&& INSN_ANNULLED_BRANCH_P (insn
));
11172 return branch_if_true
;
11175 /* Generate a reversed branch around a direct jump. This fallback does
11176 not use branch-likely instructions. */
11177 mips_branch_likely
= false;
11178 not_taken
= gen_label_rtx ();
11179 taken
= operands
[1];
11181 /* Generate the reversed branch to NOT_TAKEN. */
11182 operands
[1] = not_taken
;
11183 output_asm_insn (branch_if_false
, operands
);
11185 /* If INSN has a delay slot, we must provide delay slots for both the
11186 branch to NOT_TAKEN and the conditional jump. We must also ensure
11187 that INSN's delay slot is executed in the appropriate cases. */
11188 if (final_sequence
)
11190 /* This first delay slot will always be executed, so use INSN's
11191 delay slot if is not annulled. */
11192 if (!INSN_ANNULLED_BRANCH_P (insn
))
11194 final_scan_insn (XVECEXP (final_sequence
, 0, 1),
11195 asm_out_file
, optimize
, 1, NULL
);
11196 INSN_DELETED_P (XVECEXP (final_sequence
, 0, 1)) = 1;
11199 output_asm_insn ("nop", 0);
11200 fprintf (asm_out_file
, "\n");
11203 /* Output the unconditional branch to TAKEN. */
11205 output_asm_insn ("j\t%0%/", &taken
);
11208 output_asm_insn (mips_output_load_label (), &taken
);
11209 output_asm_insn ("jr\t%@%]%/", 0);
11212 /* Now deal with its delay slot; see above. */
11213 if (final_sequence
)
11215 /* This delay slot will only be executed if the branch is taken.
11216 Use INSN's delay slot if is annulled. */
11217 if (INSN_ANNULLED_BRANCH_P (insn
))
11219 final_scan_insn (XVECEXP (final_sequence
, 0, 1),
11220 asm_out_file
, optimize
, 1, NULL
);
11221 INSN_DELETED_P (XVECEXP (final_sequence
, 0, 1)) = 1;
11224 output_asm_insn ("nop", 0);
11225 fprintf (asm_out_file
, "\n");
11228 /* Output NOT_TAKEN. */
11229 (*targetm
.asm_out
.internal_label
) (asm_out_file
, "L",
11230 CODE_LABEL_NUMBER (not_taken
));
11234 /* Return the assembly code for INSN, which branches to OPERANDS[1]
11235 if some ordered condition is true. The condition is given by
11236 OPERANDS[0] if !INVERTED_P, otherwise it is the inverse of
11237 OPERANDS[0]. OPERANDS[2] is the comparison's first operand;
11238 its second is always zero. */
11241 mips_output_order_conditional_branch (rtx insn
, rtx
*operands
, bool inverted_p
)
11243 const char *branch
[2];
11245 /* Make BRANCH[1] branch to OPERANDS[1] when the condition is true.
11246 Make BRANCH[0] branch on the inverse condition. */
11247 switch (GET_CODE (operands
[0]))
11249 /* These cases are equivalent to comparisons against zero. */
11251 inverted_p
= !inverted_p
;
11252 /* Fall through. */
11254 branch
[!inverted_p
] = MIPS_BRANCH ("bne", "%2,%.,%1");
11255 branch
[inverted_p
] = MIPS_BRANCH ("beq", "%2,%.,%1");
11258 /* These cases are always true or always false. */
11260 inverted_p
= !inverted_p
;
11261 /* Fall through. */
11263 branch
[!inverted_p
] = MIPS_BRANCH ("beq", "%.,%.,%1");
11264 branch
[inverted_p
] = MIPS_BRANCH ("bne", "%.,%.,%1");
11268 branch
[!inverted_p
] = MIPS_BRANCH ("b%C0z", "%2,%1");
11269 branch
[inverted_p
] = MIPS_BRANCH ("b%N0z", "%2,%1");
11272 return mips_output_conditional_branch (insn
, operands
, branch
[1], branch
[0]);
11275 /* Used to output div or ddiv instruction DIVISION, which has the operands
11276 given by OPERANDS. Add in a divide-by-zero check if needed.
11278 When working around R4000 and R4400 errata, we need to make sure that
11279 the division is not immediately followed by a shift[1][2]. We also
11280 need to stop the division from being put into a branch delay slot[3].
11281 The easiest way to avoid both problems is to add a nop after the
11282 division. When a divide-by-zero check is needed, this nop can be
11283 used to fill the branch delay slot.
11285 [1] If a double-word or a variable shift executes immediately
11286 after starting an integer division, the shift may give an
11287 incorrect result. See quotations of errata #16 and #28 from
11288 "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
11289 in mips.md for details.
11291 [2] A similar bug to [1] exists for all revisions of the
11292 R4000 and the R4400 when run in an MC configuration.
11293 From "MIPS R4000MC Errata, Processor Revision 2.2 and 3.0":
11295 "19. In this following sequence:
11297 ddiv (or ddivu or div or divu)
11298 dsll32 (or dsrl32, dsra32)
11300 if an MPT stall occurs, while the divide is slipping the cpu
11301 pipeline, then the following double shift would end up with an
11304 Workaround: The compiler needs to avoid generating any
11305 sequence with divide followed by extended double shift."
11307 This erratum is also present in "MIPS R4400MC Errata, Processor
11308 Revision 1.0" and "MIPS R4400MC Errata, Processor Revision 2.0
11309 & 3.0" as errata #10 and #4, respectively.
11311 [3] From "MIPS R4000PC/SC Errata, Processor Revision 2.2 and 3.0"
11312 (also valid for MIPS R4000MC processors):
11314 "52. R4000SC: This bug does not apply for the R4000PC.
11316 There are two flavors of this bug:
11318 1) If the instruction just after divide takes an RF exception
11319 (tlb-refill, tlb-invalid) and gets an instruction cache
11320 miss (both primary and secondary) and the line which is
11321 currently in secondary cache at this index had the first
11322 data word, where the bits 5..2 are set, then R4000 would
11323 get a wrong result for the div.
11328 ------------------- # end-of page. -tlb-refill
11333 ------------------- # end-of page. -tlb-invalid
11336 2) If the divide is in the taken branch delay slot, where the
11337 target takes RF exception and gets an I-cache miss for the
11338 exception vector or where I-cache miss occurs for the
11339 target address, under the above mentioned scenarios, the
11340 div would get wrong results.
11343 j r2 # to next page mapped or unmapped
11344 div r8,r9 # this bug would be there as long
11345 # as there is an ICache miss and
11346 nop # the "data pattern" is present
11349 beq r0, r0, NextPage # to Next page
11353 This bug is present for div, divu, ddiv, and ddivu
11356 Workaround: For item 1), OS could make sure that the next page
11357 after the divide instruction is also mapped. For item 2), the
11358 compiler could make sure that the divide instruction is not in
11359 the branch delay slot."
11361 These processors have PRId values of 0x00004220 and 0x00004300 for
11362 the R4000 and 0x00004400, 0x00004500 and 0x00004600 for the R4400. */
11365 mips_output_division (const char *division
, rtx
*operands
)
11370 if (TARGET_FIX_R4000
|| TARGET_FIX_R4400
)
11372 output_asm_insn (s
, operands
);
11375 if (TARGET_CHECK_ZERO_DIV
)
11379 output_asm_insn (s
, operands
);
11380 s
= "bnez\t%2,1f\n\tbreak\t7\n1:";
11382 else if (GENERATE_DIVIDE_TRAPS
)
11384 output_asm_insn (s
, operands
);
11385 s
= "teq\t%2,%.,7";
11389 output_asm_insn ("%(bne\t%2,%.,1f", operands
);
11390 output_asm_insn (s
, operands
);
11391 s
= "break\t7%)\n1:";
11397 /* Return true if GIVEN is the same as CANONICAL, or if it is CANONICAL
11398 with a final "000" replaced by "k". Ignore case.
11400 Note: this function is shared between GCC and GAS. */
11403 mips_strict_matching_cpu_name_p (const char *canonical
, const char *given
)
11405 while (*given
!= 0 && TOLOWER (*given
) == TOLOWER (*canonical
))
11406 given
++, canonical
++;
11408 return ((*given
== 0 && *canonical
== 0)
11409 || (strcmp (canonical
, "000") == 0 && strcasecmp (given
, "k") == 0));
11413 /* Return true if GIVEN matches CANONICAL, where GIVEN is a user-supplied
11414 CPU name. We've traditionally allowed a lot of variation here.
11416 Note: this function is shared between GCC and GAS. */
11419 mips_matching_cpu_name_p (const char *canonical
, const char *given
)
11421 /* First see if the name matches exactly, or with a final "000"
11422 turned into "k". */
11423 if (mips_strict_matching_cpu_name_p (canonical
, given
))
11426 /* If not, try comparing based on numerical designation alone.
11427 See if GIVEN is an unadorned number, or 'r' followed by a number. */
11428 if (TOLOWER (*given
) == 'r')
11430 if (!ISDIGIT (*given
))
11433 /* Skip over some well-known prefixes in the canonical name,
11434 hoping to find a number there too. */
11435 if (TOLOWER (canonical
[0]) == 'v' && TOLOWER (canonical
[1]) == 'r')
11437 else if (TOLOWER (canonical
[0]) == 'r' && TOLOWER (canonical
[1]) == 'm')
11439 else if (TOLOWER (canonical
[0]) == 'r')
11442 return mips_strict_matching_cpu_name_p (canonical
, given
);
11446 /* Return the mips_cpu_info entry for the processor or ISA given
11447 by CPU_STRING. Return null if the string isn't recognized.
11449 A similar function exists in GAS. */
11451 static const struct mips_cpu_info
*
11452 mips_parse_cpu (const char *cpu_string
)
11454 const struct mips_cpu_info
*p
;
11457 /* In the past, we allowed upper-case CPU names, but it doesn't
11458 work well with the multilib machinery. */
11459 for (s
= cpu_string
; *s
!= 0; s
++)
11462 warning (0, "the cpu name must be lower case");
11466 /* 'from-abi' selects the most compatible architecture for the given
11467 ABI: MIPS I for 32-bit ABIs and MIPS III for 64-bit ABIs. For the
11468 EABIs, we have to decide whether we're using the 32-bit or 64-bit
11469 version. Look first at the -mgp options, if given, otherwise base
11470 the choice on MASK_64BIT in TARGET_DEFAULT. */
11471 if (strcasecmp (cpu_string
, "from-abi") == 0)
11472 return mips_cpu_info_from_isa (ABI_NEEDS_32BIT_REGS
? 1
11473 : ABI_NEEDS_64BIT_REGS
? 3
11474 : (TARGET_64BIT
? 3 : 1));
11476 /* 'default' has traditionally been a no-op. Probably not very useful. */
11477 if (strcasecmp (cpu_string
, "default") == 0)
11480 for (p
= mips_cpu_info_table
; p
->name
!= 0; p
++)
11481 if (mips_matching_cpu_name_p (p
->name
, cpu_string
))
11488 /* Return the processor associated with the given ISA level, or null
11489 if the ISA isn't valid. */
11491 static const struct mips_cpu_info
*
11492 mips_cpu_info_from_isa (int isa
)
11494 const struct mips_cpu_info
*p
;
11496 for (p
= mips_cpu_info_table
; p
->name
!= 0; p
++)
11503 /* Implement HARD_REGNO_NREGS. The size of FP registers is controlled
11504 by UNITS_PER_FPREG. The size of FP status registers is always 4, because
11505 they only hold condition code modes, and CCmode is always considered to
11506 be 4 bytes wide. All other registers are word sized. */
11509 mips_hard_regno_nregs (int regno
, enum machine_mode mode
)
11511 if (ST_REG_P (regno
))
11512 return ((GET_MODE_SIZE (mode
) + 3) / 4);
11513 else if (! FP_REG_P (regno
))
11514 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
11516 return ((GET_MODE_SIZE (mode
) + UNITS_PER_FPREG
- 1) / UNITS_PER_FPREG
);
11519 /* Implement TARGET_RETURN_IN_MEMORY. Under the old (i.e., 32 and O64 ABIs)
11520 all BLKmode objects are returned in memory. Under the new (N32 and
11521 64-bit MIPS ABIs) small structures are returned in a register.
11522 Objects with varying size must still be returned in memory, of
11526 mips_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
11529 return (TYPE_MODE (type
) == BLKmode
);
11531 return ((int_size_in_bytes (type
) > (2 * UNITS_PER_WORD
))
11532 || (int_size_in_bytes (type
) == -1));
11536 mips_strict_argument_naming (CUMULATIVE_ARGS
*ca ATTRIBUTE_UNUSED
)
11538 return !TARGET_OLDABI
;
11541 /* Return true if INSN is a multiply-add or multiply-subtract
11542 instruction and PREV assigns to the accumulator operand. */
11545 mips_linked_madd_p (rtx prev
, rtx insn
)
11549 x
= single_set (insn
);
11555 if (GET_CODE (x
) == PLUS
11556 && GET_CODE (XEXP (x
, 0)) == MULT
11557 && reg_set_p (XEXP (x
, 1), prev
))
11560 if (GET_CODE (x
) == MINUS
11561 && GET_CODE (XEXP (x
, 1)) == MULT
11562 && reg_set_p (XEXP (x
, 0), prev
))
11568 /* Used by TUNE_MACC_CHAINS to record the last scheduled instruction
11569 that may clobber hi or lo. */
11571 static rtx mips_macc_chains_last_hilo
;
11573 /* A TUNE_MACC_CHAINS helper function. Record that instruction INSN has
11574 been scheduled, updating mips_macc_chains_last_hilo appropriately. */
11577 mips_macc_chains_record (rtx insn
)
11579 if (get_attr_may_clobber_hilo (insn
))
11580 mips_macc_chains_last_hilo
= insn
;
11583 /* A TUNE_MACC_CHAINS helper function. Search ready queue READY, which
11584 has NREADY elements, looking for a multiply-add or multiply-subtract
11585 instruction that is cumulative with mips_macc_chains_last_hilo.
11586 If there is one, promote it ahead of anything else that might
11587 clobber hi or lo. */
11590 mips_macc_chains_reorder (rtx
*ready
, int nready
)
11594 if (mips_macc_chains_last_hilo
!= 0)
11595 for (i
= nready
- 1; i
>= 0; i
--)
11596 if (mips_linked_madd_p (mips_macc_chains_last_hilo
, ready
[i
]))
11598 for (j
= nready
- 1; j
> i
; j
--)
11599 if (recog_memoized (ready
[j
]) >= 0
11600 && get_attr_may_clobber_hilo (ready
[j
]))
11602 mips_promote_ready (ready
, i
, j
);
11609 /* The last instruction to be scheduled. */
11611 static rtx vr4130_last_insn
;
11613 /* A note_stores callback used by vr4130_true_reg_dependence_p. DATA
11614 points to an rtx that is initially an instruction. Nullify the rtx
11615 if the instruction uses the value of register X. */
11618 vr4130_true_reg_dependence_p_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
11620 rtx
*insn_ptr
= data
;
11623 && reg_referenced_p (x
, PATTERN (*insn_ptr
)))
11627 /* Return true if there is true register dependence between vr4130_last_insn
11631 vr4130_true_reg_dependence_p (rtx insn
)
11633 note_stores (PATTERN (vr4130_last_insn
),
11634 vr4130_true_reg_dependence_p_1
, &insn
);
11638 /* A TUNE_MIPS4130 helper function. Given that INSN1 is at the head of
11639 the ready queue and that INSN2 is the instruction after it, return
11640 true if it is worth promoting INSN2 ahead of INSN1. Look for cases
11641 in which INSN1 and INSN2 can probably issue in parallel, but for
11642 which (INSN2, INSN1) should be less sensitive to instruction
11643 alignment than (INSN1, INSN2). See 4130.md for more details. */
11646 vr4130_swap_insns_p (rtx insn1
, rtx insn2
)
11648 sd_iterator_def sd_it
;
11651 /* Check for the following case:
11653 1) there is some other instruction X with an anti dependence on INSN1;
11654 2) X has a higher priority than INSN2; and
11655 3) X is an arithmetic instruction (and thus has no unit restrictions).
11657 If INSN1 is the last instruction blocking X, it would better to
11658 choose (INSN1, X) over (INSN2, INSN1). */
11659 FOR_EACH_DEP (insn1
, SD_LIST_FORW
, sd_it
, dep
)
11660 if (DEP_TYPE (dep
) == REG_DEP_ANTI
11661 && INSN_PRIORITY (DEP_CON (dep
)) > INSN_PRIORITY (insn2
)
11662 && recog_memoized (DEP_CON (dep
)) >= 0
11663 && get_attr_vr4130_class (DEP_CON (dep
)) == VR4130_CLASS_ALU
)
11666 if (vr4130_last_insn
!= 0
11667 && recog_memoized (insn1
) >= 0
11668 && recog_memoized (insn2
) >= 0)
11670 /* See whether INSN1 and INSN2 use different execution units,
11671 or if they are both ALU-type instructions. If so, they can
11672 probably execute in parallel. */
11673 enum attr_vr4130_class class1
= get_attr_vr4130_class (insn1
);
11674 enum attr_vr4130_class class2
= get_attr_vr4130_class (insn2
);
11675 if (class1
!= class2
|| class1
== VR4130_CLASS_ALU
)
11677 /* If only one of the instructions has a dependence on
11678 vr4130_last_insn, prefer to schedule the other one first. */
11679 bool dep1
= vr4130_true_reg_dependence_p (insn1
);
11680 bool dep2
= vr4130_true_reg_dependence_p (insn2
);
11684 /* Prefer to schedule INSN2 ahead of INSN1 if vr4130_last_insn
11685 is not an ALU-type instruction and if INSN1 uses the same
11686 execution unit. (Note that if this condition holds, we already
11687 know that INSN2 uses a different execution unit.) */
11688 if (class1
!= VR4130_CLASS_ALU
11689 && recog_memoized (vr4130_last_insn
) >= 0
11690 && class1
== get_attr_vr4130_class (vr4130_last_insn
))
11697 /* A TUNE_MIPS4130 helper function. (READY, NREADY) describes a ready
11698 queue with at least two instructions. Swap the first two if
11699 vr4130_swap_insns_p says that it could be worthwhile. */
11702 vr4130_reorder (rtx
*ready
, int nready
)
11704 if (vr4130_swap_insns_p (ready
[nready
- 1], ready
[nready
- 2]))
11705 mips_promote_ready (ready
, nready
- 2, nready
- 1);
11708 /* Remove the instruction at index LOWER from ready queue READY and
11709 reinsert it in front of the instruction at index HIGHER. LOWER must
11713 mips_promote_ready (rtx
*ready
, int lower
, int higher
)
11718 new_head
= ready
[lower
];
11719 for (i
= lower
; i
< higher
; i
++)
11720 ready
[i
] = ready
[i
+ 1];
11721 ready
[i
] = new_head
;
11724 /* If the priority of the instruction at POS2 in the ready queue READY
11725 is within LIMIT units of that of the instruction at POS1, swap the
11726 instructions if POS2 is not already less than POS1. */
11729 mips_maybe_swap_ready (rtx
*ready
, int pos1
, int pos2
, int limit
)
11732 && INSN_PRIORITY (ready
[pos1
]) + limit
>= INSN_PRIORITY (ready
[pos2
]))
11735 temp
= ready
[pos1
];
11736 ready
[pos1
] = ready
[pos2
];
11737 ready
[pos2
] = temp
;
11741 /* Record whether last 74k AGEN instruction was a load or store. */
11743 static enum attr_type mips_last_74k_agen_insn
= TYPE_UNKNOWN
;
11745 /* Initialize mips_last_74k_agen_insn from INSN. A null argument
11746 resets to TYPE_UNKNOWN state. */
11749 mips_74k_agen_init (rtx insn
)
11751 if (!insn
|| !NONJUMP_INSN_P (insn
))
11752 mips_last_74k_agen_insn
= TYPE_UNKNOWN
;
11753 else if (USEFUL_INSN_P (insn
))
11755 enum attr_type type
= get_attr_type (insn
);
11756 if (type
== TYPE_LOAD
|| type
== TYPE_STORE
)
11757 mips_last_74k_agen_insn
= type
;
11761 /* A TUNE_74K helper function. The 74K AGEN pipeline likes multiple
11762 loads to be grouped together, and multiple stores to be grouped
11763 together. Swap things around in the ready queue to make this happen. */
11766 mips_74k_agen_reorder (rtx
*ready
, int nready
)
11769 int store_pos
, load_pos
;
11774 for (i
= nready
- 1; i
>= 0; i
--)
11776 rtx insn
= ready
[i
];
11777 if (USEFUL_INSN_P (insn
))
11778 switch (get_attr_type (insn
))
11781 if (store_pos
== -1)
11786 if (load_pos
== -1)
11795 if (load_pos
== -1 || store_pos
== -1)
11798 switch (mips_last_74k_agen_insn
)
11801 /* Prefer to schedule loads since they have a higher latency. */
11803 /* Swap loads to the front of the queue. */
11804 mips_maybe_swap_ready (ready
, load_pos
, store_pos
, 4);
11807 /* Swap stores to the front of the queue. */
11808 mips_maybe_swap_ready (ready
, store_pos
, load_pos
, 4);
11815 /* Implement TARGET_SCHED_INIT. */
11818 mips_sched_init (FILE *file ATTRIBUTE_UNUSED
, int verbose ATTRIBUTE_UNUSED
,
11819 int max_ready ATTRIBUTE_UNUSED
)
11821 mips_macc_chains_last_hilo
= 0;
11822 vr4130_last_insn
= 0;
11823 mips_74k_agen_init (NULL_RTX
);
11826 /* Implement TARGET_SCHED_REORDER and TARG_SCHED_REORDER2. */
11829 mips_sched_reorder (FILE *file ATTRIBUTE_UNUSED
, int verbose ATTRIBUTE_UNUSED
,
11830 rtx
*ready
, int *nreadyp
, int cycle ATTRIBUTE_UNUSED
)
11832 if (!reload_completed
11833 && TUNE_MACC_CHAINS
11835 mips_macc_chains_reorder (ready
, *nreadyp
);
11836 if (reload_completed
11838 && !TARGET_VR4130_ALIGN
11840 vr4130_reorder (ready
, *nreadyp
);
11842 mips_74k_agen_reorder (ready
, *nreadyp
);
11843 return mips_issue_rate ();
11846 /* Implement TARGET_SCHED_VARIABLE_ISSUE. */
11849 mips_variable_issue (FILE *file ATTRIBUTE_UNUSED
, int verbose ATTRIBUTE_UNUSED
,
11850 rtx insn
, int more
)
11853 mips_74k_agen_init (insn
);
11854 switch (GET_CODE (PATTERN (insn
)))
11858 /* Don't count USEs and CLOBBERs against the issue rate. */
11863 if (!reload_completed
&& TUNE_MACC_CHAINS
)
11864 mips_macc_chains_record (insn
);
11865 vr4130_last_insn
= insn
;
11871 /* Implement TARGET_SCHED_ADJUST_COST. We assume that anti and output
11872 dependencies have no cost, except on the 20Kc where output-dependence
11873 is treated like input-dependence. */
11876 mips_adjust_cost (rtx insn ATTRIBUTE_UNUSED
, rtx link
,
11877 rtx dep ATTRIBUTE_UNUSED
, int cost
)
11879 if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
11882 if (REG_NOTE_KIND (link
) != 0)
11887 /* Return the number of instructions that can be issued per cycle. */
11890 mips_issue_rate (void)
11894 case PROCESSOR_74KC
:
11895 case PROCESSOR_74KF2_1
:
11896 case PROCESSOR_74KF1_1
:
11897 case PROCESSOR_74KF3_2
:
11898 /* The 74k is not strictly quad-issue cpu, but can be seen as one
11899 by the scheduler. It can issue 1 ALU, 1 AGEN and 2 FPU insns,
11900 but in reality only a maximum of 3 insns can be issued as the
11901 floating point load/stores also require a slot in the AGEN pipe. */
11904 case PROCESSOR_20KC
:
11905 case PROCESSOR_R4130
:
11906 case PROCESSOR_R5400
:
11907 case PROCESSOR_R5500
:
11908 case PROCESSOR_R7000
:
11909 case PROCESSOR_R9000
:
11912 case PROCESSOR_SB1
:
11913 case PROCESSOR_SB1A
:
11914 /* This is actually 4, but we get better performance if we claim 3.
11915 This is partly because of unwanted speculative code motion with the
11916 larger number, and partly because in most common cases we can't
11917 reach the theoretical max of 4. */
11925 /* Implements TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD. This should
11926 be as wide as the scheduling freedom in the DFA. */
11929 mips_multipass_dfa_lookahead (void)
11931 /* Can schedule up to 4 of the 6 function units in any one cycle. */
11938 /* Implements a store data bypass check. We need this because the cprestore
11939 pattern is type store, but defined using an UNSPEC. This UNSPEC causes the
11940 default routine to abort. We just return false for that case. */
11941 /* ??? Should try to give a better result here than assuming false. */
11944 mips_store_data_bypass_p (rtx out_insn
, rtx in_insn
)
11946 if (GET_CODE (PATTERN (in_insn
)) == UNSPEC_VOLATILE
)
11949 return ! store_data_bypass_p (out_insn
, in_insn
);
11952 /* Given that we have an rtx of the form (prefetch ... WRITE LOCALITY),
11953 return the first operand of the associated "pref" or "prefx" insn. */
11956 mips_prefetch_cookie (rtx write
, rtx locality
)
11958 /* store_streamed / load_streamed. */
11959 if (INTVAL (locality
) <= 0)
11960 return GEN_INT (INTVAL (write
) + 4);
11962 /* store / load. */
11963 if (INTVAL (locality
) <= 2)
11966 /* store_retained / load_retained. */
11967 return GEN_INT (INTVAL (write
) + 6);
11970 /* MIPS builtin function support. */
11972 struct builtin_description
11974 /* The code of the main .md file instruction. See mips_builtin_type
11975 for more information. */
11976 enum insn_code icode
;
11978 /* The floating-point comparison code to use with ICODE, if any. */
11979 enum mips_fp_condition cond
;
11981 /* The name of the builtin function. */
11984 /* Specifies how the function should be expanded. */
11985 enum mips_builtin_type builtin_type
;
11987 /* The function's prototype. */
11988 enum mips_function_type function_type
;
11990 /* The target flags required for this function. */
11994 /* Define a MIPS_BUILTIN_DIRECT function for instruction CODE_FOR_mips_<INSN>.
11995 FUNCTION_TYPE and TARGET_FLAGS are builtin_description fields. */
11996 #define DIRECT_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
11997 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
11998 MIPS_BUILTIN_DIRECT, FUNCTION_TYPE, TARGET_FLAGS }
12000 /* Define __builtin_mips_<INSN>_<COND>_{s,d}, both of which require
12002 #define CMP_SCALAR_BUILTINS(INSN, COND, TARGET_FLAGS) \
12003 { CODE_FOR_mips_ ## INSN ## _cond_s, MIPS_FP_COND_ ## COND, \
12004 "__builtin_mips_" #INSN "_" #COND "_s", \
12005 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_SF_SF, TARGET_FLAGS }, \
12006 { CODE_FOR_mips_ ## INSN ## _cond_d, MIPS_FP_COND_ ## COND, \
12007 "__builtin_mips_" #INSN "_" #COND "_d", \
12008 MIPS_BUILTIN_CMP_SINGLE, MIPS_INT_FTYPE_DF_DF, TARGET_FLAGS }
12010 /* Define __builtin_mips_{any,all,upper,lower}_<INSN>_<COND>_ps.
12011 The lower and upper forms require TARGET_FLAGS while the any and all
12012 forms require MASK_MIPS3D. */
12013 #define CMP_PS_BUILTINS(INSN, COND, TARGET_FLAGS) \
12014 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
12015 "__builtin_mips_any_" #INSN "_" #COND "_ps", \
12016 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
12017 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
12018 "__builtin_mips_all_" #INSN "_" #COND "_ps", \
12019 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF, MASK_MIPS3D }, \
12020 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
12021 "__builtin_mips_lower_" #INSN "_" #COND "_ps", \
12022 MIPS_BUILTIN_CMP_LOWER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }, \
12023 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
12024 "__builtin_mips_upper_" #INSN "_" #COND "_ps", \
12025 MIPS_BUILTIN_CMP_UPPER, MIPS_INT_FTYPE_V2SF_V2SF, TARGET_FLAGS }
12027 /* Define __builtin_mips_{any,all}_<INSN>_<COND>_4s. The functions
12028 require MASK_MIPS3D. */
12029 #define CMP_4S_BUILTINS(INSN, COND) \
12030 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
12031 "__builtin_mips_any_" #INSN "_" #COND "_4s", \
12032 MIPS_BUILTIN_CMP_ANY, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
12034 { CODE_FOR_mips_ ## INSN ## _cond_4s, MIPS_FP_COND_ ## COND, \
12035 "__builtin_mips_all_" #INSN "_" #COND "_4s", \
12036 MIPS_BUILTIN_CMP_ALL, MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF, \
12039 /* Define __builtin_mips_mov{t,f}_<INSN>_<COND>_ps. The comparison
12040 instruction requires TARGET_FLAGS. */
12041 #define MOVTF_BUILTINS(INSN, COND, TARGET_FLAGS) \
12042 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
12043 "__builtin_mips_movt_" #INSN "_" #COND "_ps", \
12044 MIPS_BUILTIN_MOVT, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
12046 { CODE_FOR_mips_ ## INSN ## _cond_ps, MIPS_FP_COND_ ## COND, \
12047 "__builtin_mips_movf_" #INSN "_" #COND "_ps", \
12048 MIPS_BUILTIN_MOVF, MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF, \
12051 /* Define all the builtins related to c.cond.fmt condition COND. */
12052 #define CMP_BUILTINS(COND) \
12053 MOVTF_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
12054 MOVTF_BUILTINS (cabs, COND, MASK_MIPS3D), \
12055 CMP_SCALAR_BUILTINS (cabs, COND, MASK_MIPS3D), \
12056 CMP_PS_BUILTINS (c, COND, MASK_PAIRED_SINGLE_FLOAT), \
12057 CMP_PS_BUILTINS (cabs, COND, MASK_MIPS3D), \
12058 CMP_4S_BUILTINS (c, COND), \
12059 CMP_4S_BUILTINS (cabs, COND)
12061 static const struct builtin_description mips_bdesc
[] =
12063 DIRECT_BUILTIN (pll_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF
, MASK_PAIRED_SINGLE_FLOAT
),
12064 DIRECT_BUILTIN (pul_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF
, MASK_PAIRED_SINGLE_FLOAT
),
12065 DIRECT_BUILTIN (plu_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF
, MASK_PAIRED_SINGLE_FLOAT
),
12066 DIRECT_BUILTIN (puu_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF
, MASK_PAIRED_SINGLE_FLOAT
),
12067 DIRECT_BUILTIN (cvt_ps_s
, MIPS_V2SF_FTYPE_SF_SF
, MASK_PAIRED_SINGLE_FLOAT
),
12068 DIRECT_BUILTIN (cvt_s_pl
, MIPS_SF_FTYPE_V2SF
, MASK_PAIRED_SINGLE_FLOAT
),
12069 DIRECT_BUILTIN (cvt_s_pu
, MIPS_SF_FTYPE_V2SF
, MASK_PAIRED_SINGLE_FLOAT
),
12070 DIRECT_BUILTIN (abs_ps
, MIPS_V2SF_FTYPE_V2SF
, MASK_PAIRED_SINGLE_FLOAT
),
12072 DIRECT_BUILTIN (alnv_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF_INT
,
12073 MASK_PAIRED_SINGLE_FLOAT
),
12074 DIRECT_BUILTIN (addr_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF
, MASK_MIPS3D
),
12075 DIRECT_BUILTIN (mulr_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF
, MASK_MIPS3D
),
12076 DIRECT_BUILTIN (cvt_pw_ps
, MIPS_V2SF_FTYPE_V2SF
, MASK_MIPS3D
),
12077 DIRECT_BUILTIN (cvt_ps_pw
, MIPS_V2SF_FTYPE_V2SF
, MASK_MIPS3D
),
12079 DIRECT_BUILTIN (recip1_s
, MIPS_SF_FTYPE_SF
, MASK_MIPS3D
),
12080 DIRECT_BUILTIN (recip1_d
, MIPS_DF_FTYPE_DF
, MASK_MIPS3D
),
12081 DIRECT_BUILTIN (recip1_ps
, MIPS_V2SF_FTYPE_V2SF
, MASK_MIPS3D
),
12082 DIRECT_BUILTIN (recip2_s
, MIPS_SF_FTYPE_SF_SF
, MASK_MIPS3D
),
12083 DIRECT_BUILTIN (recip2_d
, MIPS_DF_FTYPE_DF_DF
, MASK_MIPS3D
),
12084 DIRECT_BUILTIN (recip2_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF
, MASK_MIPS3D
),
12086 DIRECT_BUILTIN (rsqrt1_s
, MIPS_SF_FTYPE_SF
, MASK_MIPS3D
),
12087 DIRECT_BUILTIN (rsqrt1_d
, MIPS_DF_FTYPE_DF
, MASK_MIPS3D
),
12088 DIRECT_BUILTIN (rsqrt1_ps
, MIPS_V2SF_FTYPE_V2SF
, MASK_MIPS3D
),
12089 DIRECT_BUILTIN (rsqrt2_s
, MIPS_SF_FTYPE_SF_SF
, MASK_MIPS3D
),
12090 DIRECT_BUILTIN (rsqrt2_d
, MIPS_DF_FTYPE_DF_DF
, MASK_MIPS3D
),
12091 DIRECT_BUILTIN (rsqrt2_ps
, MIPS_V2SF_FTYPE_V2SF_V2SF
, MASK_MIPS3D
),
12093 MIPS_FP_CONDITIONS (CMP_BUILTINS
)
12096 /* Builtin functions for the SB-1 processor. */
12098 #define CODE_FOR_mips_sqrt_ps CODE_FOR_sqrtv2sf2
12100 static const struct builtin_description sb1_bdesc
[] =
12102 DIRECT_BUILTIN (sqrt_ps
, MIPS_V2SF_FTYPE_V2SF
, MASK_PAIRED_SINGLE_FLOAT
)
12105 /* Builtin functions for DSP ASE. */
12107 #define CODE_FOR_mips_addq_ph CODE_FOR_addv2hi3
12108 #define CODE_FOR_mips_addu_qb CODE_FOR_addv4qi3
12109 #define CODE_FOR_mips_subq_ph CODE_FOR_subv2hi3
12110 #define CODE_FOR_mips_subu_qb CODE_FOR_subv4qi3
12111 #define CODE_FOR_mips_mul_ph CODE_FOR_mulv2hi3
12113 /* Define a MIPS_BUILTIN_DIRECT_NO_TARGET function for instruction
12114 CODE_FOR_mips_<INSN>. FUNCTION_TYPE and TARGET_FLAGS are
12115 builtin_description fields. */
12116 #define DIRECT_NO_TARGET_BUILTIN(INSN, FUNCTION_TYPE, TARGET_FLAGS) \
12117 { CODE_FOR_mips_ ## INSN, 0, "__builtin_mips_" #INSN, \
12118 MIPS_BUILTIN_DIRECT_NO_TARGET, FUNCTION_TYPE, TARGET_FLAGS }
12120 /* Define __builtin_mips_bposge<VALUE>. <VALUE> is 32 for the MIPS32 DSP
12121 branch instruction. TARGET_FLAGS is a builtin_description field. */
12122 #define BPOSGE_BUILTIN(VALUE, TARGET_FLAGS) \
12123 { CODE_FOR_mips_bposge, 0, "__builtin_mips_bposge" #VALUE, \
12124 MIPS_BUILTIN_BPOSGE ## VALUE, MIPS_SI_FTYPE_VOID, TARGET_FLAGS }
12126 static const struct builtin_description dsp_bdesc
[] =
12128 DIRECT_BUILTIN (addq_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12129 DIRECT_BUILTIN (addq_s_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12130 DIRECT_BUILTIN (addq_s_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSP
),
12131 DIRECT_BUILTIN (addu_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSP
),
12132 DIRECT_BUILTIN (addu_s_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSP
),
12133 DIRECT_BUILTIN (subq_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12134 DIRECT_BUILTIN (subq_s_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12135 DIRECT_BUILTIN (subq_s_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSP
),
12136 DIRECT_BUILTIN (subu_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSP
),
12137 DIRECT_BUILTIN (subu_s_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSP
),
12138 DIRECT_BUILTIN (addsc
, MIPS_SI_FTYPE_SI_SI
, MASK_DSP
),
12139 DIRECT_BUILTIN (addwc
, MIPS_SI_FTYPE_SI_SI
, MASK_DSP
),
12140 DIRECT_BUILTIN (modsub
, MIPS_SI_FTYPE_SI_SI
, MASK_DSP
),
12141 DIRECT_BUILTIN (raddu_w_qb
, MIPS_SI_FTYPE_V4QI
, MASK_DSP
),
12142 DIRECT_BUILTIN (absq_s_ph
, MIPS_V2HI_FTYPE_V2HI
, MASK_DSP
),
12143 DIRECT_BUILTIN (absq_s_w
, MIPS_SI_FTYPE_SI
, MASK_DSP
),
12144 DIRECT_BUILTIN (precrq_qb_ph
, MIPS_V4QI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12145 DIRECT_BUILTIN (precrq_ph_w
, MIPS_V2HI_FTYPE_SI_SI
, MASK_DSP
),
12146 DIRECT_BUILTIN (precrq_rs_ph_w
, MIPS_V2HI_FTYPE_SI_SI
, MASK_DSP
),
12147 DIRECT_BUILTIN (precrqu_s_qb_ph
, MIPS_V4QI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12148 DIRECT_BUILTIN (preceq_w_phl
, MIPS_SI_FTYPE_V2HI
, MASK_DSP
),
12149 DIRECT_BUILTIN (preceq_w_phr
, MIPS_SI_FTYPE_V2HI
, MASK_DSP
),
12150 DIRECT_BUILTIN (precequ_ph_qbl
, MIPS_V2HI_FTYPE_V4QI
, MASK_DSP
),
12151 DIRECT_BUILTIN (precequ_ph_qbr
, MIPS_V2HI_FTYPE_V4QI
, MASK_DSP
),
12152 DIRECT_BUILTIN (precequ_ph_qbla
, MIPS_V2HI_FTYPE_V4QI
, MASK_DSP
),
12153 DIRECT_BUILTIN (precequ_ph_qbra
, MIPS_V2HI_FTYPE_V4QI
, MASK_DSP
),
12154 DIRECT_BUILTIN (preceu_ph_qbl
, MIPS_V2HI_FTYPE_V4QI
, MASK_DSP
),
12155 DIRECT_BUILTIN (preceu_ph_qbr
, MIPS_V2HI_FTYPE_V4QI
, MASK_DSP
),
12156 DIRECT_BUILTIN (preceu_ph_qbla
, MIPS_V2HI_FTYPE_V4QI
, MASK_DSP
),
12157 DIRECT_BUILTIN (preceu_ph_qbra
, MIPS_V2HI_FTYPE_V4QI
, MASK_DSP
),
12158 DIRECT_BUILTIN (shll_qb
, MIPS_V4QI_FTYPE_V4QI_SI
, MASK_DSP
),
12159 DIRECT_BUILTIN (shll_ph
, MIPS_V2HI_FTYPE_V2HI_SI
, MASK_DSP
),
12160 DIRECT_BUILTIN (shll_s_ph
, MIPS_V2HI_FTYPE_V2HI_SI
, MASK_DSP
),
12161 DIRECT_BUILTIN (shll_s_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSP
),
12162 DIRECT_BUILTIN (shrl_qb
, MIPS_V4QI_FTYPE_V4QI_SI
, MASK_DSP
),
12163 DIRECT_BUILTIN (shra_ph
, MIPS_V2HI_FTYPE_V2HI_SI
, MASK_DSP
),
12164 DIRECT_BUILTIN (shra_r_ph
, MIPS_V2HI_FTYPE_V2HI_SI
, MASK_DSP
),
12165 DIRECT_BUILTIN (shra_r_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSP
),
12166 DIRECT_BUILTIN (muleu_s_ph_qbl
, MIPS_V2HI_FTYPE_V4QI_V2HI
, MASK_DSP
),
12167 DIRECT_BUILTIN (muleu_s_ph_qbr
, MIPS_V2HI_FTYPE_V4QI_V2HI
, MASK_DSP
),
12168 DIRECT_BUILTIN (mulq_rs_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12169 DIRECT_BUILTIN (muleq_s_w_phl
, MIPS_SI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12170 DIRECT_BUILTIN (muleq_s_w_phr
, MIPS_SI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12171 DIRECT_BUILTIN (bitrev
, MIPS_SI_FTYPE_SI
, MASK_DSP
),
12172 DIRECT_BUILTIN (insv
, MIPS_SI_FTYPE_SI_SI
, MASK_DSP
),
12173 DIRECT_BUILTIN (repl_qb
, MIPS_V4QI_FTYPE_SI
, MASK_DSP
),
12174 DIRECT_BUILTIN (repl_ph
, MIPS_V2HI_FTYPE_SI
, MASK_DSP
),
12175 DIRECT_NO_TARGET_BUILTIN (cmpu_eq_qb
, MIPS_VOID_FTYPE_V4QI_V4QI
, MASK_DSP
),
12176 DIRECT_NO_TARGET_BUILTIN (cmpu_lt_qb
, MIPS_VOID_FTYPE_V4QI_V4QI
, MASK_DSP
),
12177 DIRECT_NO_TARGET_BUILTIN (cmpu_le_qb
, MIPS_VOID_FTYPE_V4QI_V4QI
, MASK_DSP
),
12178 DIRECT_BUILTIN (cmpgu_eq_qb
, MIPS_SI_FTYPE_V4QI_V4QI
, MASK_DSP
),
12179 DIRECT_BUILTIN (cmpgu_lt_qb
, MIPS_SI_FTYPE_V4QI_V4QI
, MASK_DSP
),
12180 DIRECT_BUILTIN (cmpgu_le_qb
, MIPS_SI_FTYPE_V4QI_V4QI
, MASK_DSP
),
12181 DIRECT_NO_TARGET_BUILTIN (cmp_eq_ph
, MIPS_VOID_FTYPE_V2HI_V2HI
, MASK_DSP
),
12182 DIRECT_NO_TARGET_BUILTIN (cmp_lt_ph
, MIPS_VOID_FTYPE_V2HI_V2HI
, MASK_DSP
),
12183 DIRECT_NO_TARGET_BUILTIN (cmp_le_ph
, MIPS_VOID_FTYPE_V2HI_V2HI
, MASK_DSP
),
12184 DIRECT_BUILTIN (pick_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSP
),
12185 DIRECT_BUILTIN (pick_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12186 DIRECT_BUILTIN (packrl_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSP
),
12187 DIRECT_NO_TARGET_BUILTIN (wrdsp
, MIPS_VOID_FTYPE_SI_SI
, MASK_DSP
),
12188 DIRECT_BUILTIN (rddsp
, MIPS_SI_FTYPE_SI
, MASK_DSP
),
12189 DIRECT_BUILTIN (lbux
, MIPS_SI_FTYPE_PTR_SI
, MASK_DSP
),
12190 DIRECT_BUILTIN (lhx
, MIPS_SI_FTYPE_PTR_SI
, MASK_DSP
),
12191 DIRECT_BUILTIN (lwx
, MIPS_SI_FTYPE_PTR_SI
, MASK_DSP
),
12192 BPOSGE_BUILTIN (32, MASK_DSP
),
12194 /* The following are for the MIPS DSP ASE REV 2. */
12195 DIRECT_BUILTIN (absq_s_qb
, MIPS_V4QI_FTYPE_V4QI
, MASK_DSPR2
),
12196 DIRECT_BUILTIN (addu_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12197 DIRECT_BUILTIN (addu_s_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12198 DIRECT_BUILTIN (adduh_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSPR2
),
12199 DIRECT_BUILTIN (adduh_r_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSPR2
),
12200 DIRECT_BUILTIN (append
, MIPS_SI_FTYPE_SI_SI_SI
, MASK_DSPR2
),
12201 DIRECT_BUILTIN (balign
, MIPS_SI_FTYPE_SI_SI_SI
, MASK_DSPR2
),
12202 DIRECT_BUILTIN (cmpgdu_eq_qb
, MIPS_SI_FTYPE_V4QI_V4QI
, MASK_DSPR2
),
12203 DIRECT_BUILTIN (cmpgdu_lt_qb
, MIPS_SI_FTYPE_V4QI_V4QI
, MASK_DSPR2
),
12204 DIRECT_BUILTIN (cmpgdu_le_qb
, MIPS_SI_FTYPE_V4QI_V4QI
, MASK_DSPR2
),
12205 DIRECT_BUILTIN (mul_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12206 DIRECT_BUILTIN (mul_s_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12207 DIRECT_BUILTIN (mulq_rs_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSPR2
),
12208 DIRECT_BUILTIN (mulq_s_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12209 DIRECT_BUILTIN (mulq_s_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSPR2
),
12210 DIRECT_BUILTIN (precr_qb_ph
, MIPS_V4QI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12211 DIRECT_BUILTIN (precr_sra_ph_w
, MIPS_V2HI_FTYPE_SI_SI_SI
, MASK_DSPR2
),
12212 DIRECT_BUILTIN (precr_sra_r_ph_w
, MIPS_V2HI_FTYPE_SI_SI_SI
, MASK_DSPR2
),
12213 DIRECT_BUILTIN (prepend
, MIPS_SI_FTYPE_SI_SI_SI
, MASK_DSPR2
),
12214 DIRECT_BUILTIN (shra_qb
, MIPS_V4QI_FTYPE_V4QI_SI
, MASK_DSPR2
),
12215 DIRECT_BUILTIN (shra_r_qb
, MIPS_V4QI_FTYPE_V4QI_SI
, MASK_DSPR2
),
12216 DIRECT_BUILTIN (shrl_ph
, MIPS_V2HI_FTYPE_V2HI_SI
, MASK_DSPR2
),
12217 DIRECT_BUILTIN (subu_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12218 DIRECT_BUILTIN (subu_s_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12219 DIRECT_BUILTIN (subuh_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSPR2
),
12220 DIRECT_BUILTIN (subuh_r_qb
, MIPS_V4QI_FTYPE_V4QI_V4QI
, MASK_DSPR2
),
12221 DIRECT_BUILTIN (addqh_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12222 DIRECT_BUILTIN (addqh_r_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12223 DIRECT_BUILTIN (addqh_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSPR2
),
12224 DIRECT_BUILTIN (addqh_r_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSPR2
),
12225 DIRECT_BUILTIN (subqh_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12226 DIRECT_BUILTIN (subqh_r_ph
, MIPS_V2HI_FTYPE_V2HI_V2HI
, MASK_DSPR2
),
12227 DIRECT_BUILTIN (subqh_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSPR2
),
12228 DIRECT_BUILTIN (subqh_r_w
, MIPS_SI_FTYPE_SI_SI
, MASK_DSPR2
)
12231 static const struct builtin_description dsp_32only_bdesc
[] =
12233 DIRECT_BUILTIN (dpau_h_qbl
, MIPS_DI_FTYPE_DI_V4QI_V4QI
, MASK_DSP
),
12234 DIRECT_BUILTIN (dpau_h_qbr
, MIPS_DI_FTYPE_DI_V4QI_V4QI
, MASK_DSP
),
12235 DIRECT_BUILTIN (dpsu_h_qbl
, MIPS_DI_FTYPE_DI_V4QI_V4QI
, MASK_DSP
),
12236 DIRECT_BUILTIN (dpsu_h_qbr
, MIPS_DI_FTYPE_DI_V4QI_V4QI
, MASK_DSP
),
12237 DIRECT_BUILTIN (dpaq_s_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSP
),
12238 DIRECT_BUILTIN (dpsq_s_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSP
),
12239 DIRECT_BUILTIN (mulsaq_s_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSP
),
12240 DIRECT_BUILTIN (dpaq_sa_l_w
, MIPS_DI_FTYPE_DI_SI_SI
, MASK_DSP
),
12241 DIRECT_BUILTIN (dpsq_sa_l_w
, MIPS_DI_FTYPE_DI_SI_SI
, MASK_DSP
),
12242 DIRECT_BUILTIN (maq_s_w_phl
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSP
),
12243 DIRECT_BUILTIN (maq_s_w_phr
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSP
),
12244 DIRECT_BUILTIN (maq_sa_w_phl
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSP
),
12245 DIRECT_BUILTIN (maq_sa_w_phr
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSP
),
12246 DIRECT_BUILTIN (extr_w
, MIPS_SI_FTYPE_DI_SI
, MASK_DSP
),
12247 DIRECT_BUILTIN (extr_r_w
, MIPS_SI_FTYPE_DI_SI
, MASK_DSP
),
12248 DIRECT_BUILTIN (extr_rs_w
, MIPS_SI_FTYPE_DI_SI
, MASK_DSP
),
12249 DIRECT_BUILTIN (extr_s_h
, MIPS_SI_FTYPE_DI_SI
, MASK_DSP
),
12250 DIRECT_BUILTIN (extp
, MIPS_SI_FTYPE_DI_SI
, MASK_DSP
),
12251 DIRECT_BUILTIN (extpdp
, MIPS_SI_FTYPE_DI_SI
, MASK_DSP
),
12252 DIRECT_BUILTIN (shilo
, MIPS_DI_FTYPE_DI_SI
, MASK_DSP
),
12253 DIRECT_BUILTIN (mthlip
, MIPS_DI_FTYPE_DI_SI
, MASK_DSP
),
12255 /* The following are for the MIPS DSP ASE REV 2. */
12256 DIRECT_BUILTIN (dpa_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
),
12257 DIRECT_BUILTIN (dps_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
),
12258 DIRECT_BUILTIN (madd
, MIPS_DI_FTYPE_DI_SI_SI
, MASK_DSPR2
),
12259 DIRECT_BUILTIN (maddu
, MIPS_DI_FTYPE_DI_USI_USI
, MASK_DSPR2
),
12260 DIRECT_BUILTIN (msub
, MIPS_DI_FTYPE_DI_SI_SI
, MASK_DSPR2
),
12261 DIRECT_BUILTIN (msubu
, MIPS_DI_FTYPE_DI_USI_USI
, MASK_DSPR2
),
12262 DIRECT_BUILTIN (mulsa_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
),
12263 DIRECT_BUILTIN (mult
, MIPS_DI_FTYPE_SI_SI
, MASK_DSPR2
),
12264 DIRECT_BUILTIN (multu
, MIPS_DI_FTYPE_USI_USI
, MASK_DSPR2
),
12265 DIRECT_BUILTIN (dpax_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
),
12266 DIRECT_BUILTIN (dpsx_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
),
12267 DIRECT_BUILTIN (dpaqx_s_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
),
12268 DIRECT_BUILTIN (dpaqx_sa_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
),
12269 DIRECT_BUILTIN (dpsqx_s_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
),
12270 DIRECT_BUILTIN (dpsqx_sa_w_ph
, MIPS_DI_FTYPE_DI_V2HI_V2HI
, MASK_DSPR2
)
12273 /* This helps provide a mapping from builtin function codes to bdesc
12278 /* The builtin function table that this entry describes. */
12279 const struct builtin_description
*bdesc
;
12281 /* The number of entries in the builtin function table. */
12284 /* The target processor that supports these builtin functions.
12285 PROCESSOR_MAX means we enable them for all processors. */
12286 enum processor_type proc
;
12288 /* If the target has these flags, this builtin function table
12289 will not be supported. */
12290 int unsupported_target_flags
;
12293 static const struct bdesc_map bdesc_arrays
[] =
12295 { mips_bdesc
, ARRAY_SIZE (mips_bdesc
), PROCESSOR_MAX
, 0 },
12296 { sb1_bdesc
, ARRAY_SIZE (sb1_bdesc
), PROCESSOR_SB1
, 0 },
12297 { dsp_bdesc
, ARRAY_SIZE (dsp_bdesc
), PROCESSOR_MAX
, 0 },
12298 { dsp_32only_bdesc
, ARRAY_SIZE (dsp_32only_bdesc
), PROCESSOR_MAX
,
12302 /* Take the argument ARGNUM of the arglist of EXP and convert it into a form
12303 suitable for input operand OP of instruction ICODE. Return the value. */
12306 mips_prepare_builtin_arg (enum insn_code icode
,
12307 unsigned int op
, tree exp
, unsigned int argnum
)
12310 enum machine_mode mode
;
12312 value
= expand_normal (CALL_EXPR_ARG (exp
, argnum
));
12313 mode
= insn_data
[icode
].operand
[op
].mode
;
12314 if (!insn_data
[icode
].operand
[op
].predicate (value
, mode
))
12316 value
= copy_to_mode_reg (mode
, value
);
12317 /* Check the predicate again. */
12318 if (!insn_data
[icode
].operand
[op
].predicate (value
, mode
))
12320 error ("invalid argument to builtin function");
12328 /* Return an rtx suitable for output operand OP of instruction ICODE.
12329 If TARGET is non-null, try to use it where possible. */
12332 mips_prepare_builtin_target (enum insn_code icode
, unsigned int op
, rtx target
)
12334 enum machine_mode mode
;
12336 mode
= insn_data
[icode
].operand
[op
].mode
;
12337 if (target
== 0 || !insn_data
[icode
].operand
[op
].predicate (target
, mode
))
12338 target
= gen_reg_rtx (mode
);
12343 /* Expand builtin functions. This is called from TARGET_EXPAND_BUILTIN. */
12346 mips_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
12347 enum machine_mode mode ATTRIBUTE_UNUSED
,
12348 int ignore ATTRIBUTE_UNUSED
)
12350 enum insn_code icode
;
12351 enum mips_builtin_type type
;
12353 unsigned int fcode
;
12354 const struct builtin_description
*bdesc
;
12355 const struct bdesc_map
*m
;
12357 fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
12358 fcode
= DECL_FUNCTION_CODE (fndecl
);
12362 error ("built-in function %qs not supported for MIPS16",
12363 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
12368 for (m
= bdesc_arrays
; m
< &bdesc_arrays
[ARRAY_SIZE (bdesc_arrays
)]; m
++)
12370 if (fcode
< m
->size
)
12373 icode
= bdesc
[fcode
].icode
;
12374 type
= bdesc
[fcode
].builtin_type
;
12384 case MIPS_BUILTIN_DIRECT
:
12385 return mips_expand_builtin_direct (icode
, target
, exp
, true);
12387 case MIPS_BUILTIN_DIRECT_NO_TARGET
:
12388 return mips_expand_builtin_direct (icode
, target
, exp
, false);
12390 case MIPS_BUILTIN_MOVT
:
12391 case MIPS_BUILTIN_MOVF
:
12392 return mips_expand_builtin_movtf (type
, icode
, bdesc
[fcode
].cond
,
12395 case MIPS_BUILTIN_CMP_ANY
:
12396 case MIPS_BUILTIN_CMP_ALL
:
12397 case MIPS_BUILTIN_CMP_UPPER
:
12398 case MIPS_BUILTIN_CMP_LOWER
:
12399 case MIPS_BUILTIN_CMP_SINGLE
:
12400 return mips_expand_builtin_compare (type
, icode
, bdesc
[fcode
].cond
,
12403 case MIPS_BUILTIN_BPOSGE32
:
12404 return mips_expand_builtin_bposge (type
, target
);
12411 /* Init builtin functions. This is called from TARGET_INIT_BUILTIN. */
12414 mips_init_builtins (void)
12416 const struct builtin_description
*d
;
12417 const struct bdesc_map
*m
;
12418 tree types
[(int) MIPS_MAX_FTYPE_MAX
];
12419 tree V2SF_type_node
;
12420 tree V2HI_type_node
;
12421 tree V4QI_type_node
;
12422 unsigned int offset
;
12424 /* We have only builtins for -mpaired-single, -mips3d and -mdsp. */
12425 if (!TARGET_PAIRED_SINGLE_FLOAT
&& !TARGET_DSP
)
12428 if (TARGET_PAIRED_SINGLE_FLOAT
)
12430 V2SF_type_node
= build_vector_type_for_mode (float_type_node
, V2SFmode
);
12432 types
[MIPS_V2SF_FTYPE_V2SF
]
12433 = build_function_type_list (V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
12435 types
[MIPS_V2SF_FTYPE_V2SF_V2SF
]
12436 = build_function_type_list (V2SF_type_node
,
12437 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
12439 types
[MIPS_V2SF_FTYPE_V2SF_V2SF_INT
]
12440 = build_function_type_list (V2SF_type_node
,
12441 V2SF_type_node
, V2SF_type_node
,
12442 integer_type_node
, NULL_TREE
);
12444 types
[MIPS_V2SF_FTYPE_V2SF_V2SF_V2SF_V2SF
]
12445 = build_function_type_list (V2SF_type_node
,
12446 V2SF_type_node
, V2SF_type_node
,
12447 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
12449 types
[MIPS_V2SF_FTYPE_SF_SF
]
12450 = build_function_type_list (V2SF_type_node
,
12451 float_type_node
, float_type_node
, NULL_TREE
);
12453 types
[MIPS_INT_FTYPE_V2SF_V2SF
]
12454 = build_function_type_list (integer_type_node
,
12455 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
12457 types
[MIPS_INT_FTYPE_V2SF_V2SF_V2SF_V2SF
]
12458 = build_function_type_list (integer_type_node
,
12459 V2SF_type_node
, V2SF_type_node
,
12460 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
12462 types
[MIPS_INT_FTYPE_SF_SF
]
12463 = build_function_type_list (integer_type_node
,
12464 float_type_node
, float_type_node
, NULL_TREE
);
12466 types
[MIPS_INT_FTYPE_DF_DF
]
12467 = build_function_type_list (integer_type_node
,
12468 double_type_node
, double_type_node
, NULL_TREE
);
12470 types
[MIPS_SF_FTYPE_V2SF
]
12471 = build_function_type_list (float_type_node
, V2SF_type_node
, NULL_TREE
);
12473 types
[MIPS_SF_FTYPE_SF
]
12474 = build_function_type_list (float_type_node
,
12475 float_type_node
, NULL_TREE
);
12477 types
[MIPS_SF_FTYPE_SF_SF
]
12478 = build_function_type_list (float_type_node
,
12479 float_type_node
, float_type_node
, NULL_TREE
);
12481 types
[MIPS_DF_FTYPE_DF
]
12482 = build_function_type_list (double_type_node
,
12483 double_type_node
, NULL_TREE
);
12485 types
[MIPS_DF_FTYPE_DF_DF
]
12486 = build_function_type_list (double_type_node
,
12487 double_type_node
, double_type_node
, NULL_TREE
);
12492 V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
12493 V4QI_type_node
= build_vector_type_for_mode (intQI_type_node
, V4QImode
);
12495 types
[MIPS_V2HI_FTYPE_V2HI_V2HI
]
12496 = build_function_type_list (V2HI_type_node
,
12497 V2HI_type_node
, V2HI_type_node
,
12500 types
[MIPS_SI_FTYPE_SI_SI
]
12501 = build_function_type_list (intSI_type_node
,
12502 intSI_type_node
, intSI_type_node
,
12505 types
[MIPS_V4QI_FTYPE_V4QI_V4QI
]
12506 = build_function_type_list (V4QI_type_node
,
12507 V4QI_type_node
, V4QI_type_node
,
12510 types
[MIPS_SI_FTYPE_V4QI
]
12511 = build_function_type_list (intSI_type_node
,
12515 types
[MIPS_V2HI_FTYPE_V2HI
]
12516 = build_function_type_list (V2HI_type_node
,
12520 types
[MIPS_SI_FTYPE_SI
]
12521 = build_function_type_list (intSI_type_node
,
12525 types
[MIPS_V4QI_FTYPE_V2HI_V2HI
]
12526 = build_function_type_list (V4QI_type_node
,
12527 V2HI_type_node
, V2HI_type_node
,
12530 types
[MIPS_V2HI_FTYPE_SI_SI
]
12531 = build_function_type_list (V2HI_type_node
,
12532 intSI_type_node
, intSI_type_node
,
12535 types
[MIPS_SI_FTYPE_V2HI
]
12536 = build_function_type_list (intSI_type_node
,
12540 types
[MIPS_V2HI_FTYPE_V4QI
]
12541 = build_function_type_list (V2HI_type_node
,
12545 types
[MIPS_V4QI_FTYPE_V4QI_SI
]
12546 = build_function_type_list (V4QI_type_node
,
12547 V4QI_type_node
, intSI_type_node
,
12550 types
[MIPS_V2HI_FTYPE_V2HI_SI
]
12551 = build_function_type_list (V2HI_type_node
,
12552 V2HI_type_node
, intSI_type_node
,
12555 types
[MIPS_V2HI_FTYPE_V4QI_V2HI
]
12556 = build_function_type_list (V2HI_type_node
,
12557 V4QI_type_node
, V2HI_type_node
,
12560 types
[MIPS_SI_FTYPE_V2HI_V2HI
]
12561 = build_function_type_list (intSI_type_node
,
12562 V2HI_type_node
, V2HI_type_node
,
12565 types
[MIPS_DI_FTYPE_DI_V4QI_V4QI
]
12566 = build_function_type_list (intDI_type_node
,
12567 intDI_type_node
, V4QI_type_node
, V4QI_type_node
,
12570 types
[MIPS_DI_FTYPE_DI_V2HI_V2HI
]
12571 = build_function_type_list (intDI_type_node
,
12572 intDI_type_node
, V2HI_type_node
, V2HI_type_node
,
12575 types
[MIPS_DI_FTYPE_DI_SI_SI
]
12576 = build_function_type_list (intDI_type_node
,
12577 intDI_type_node
, intSI_type_node
, intSI_type_node
,
12580 types
[MIPS_V4QI_FTYPE_SI
]
12581 = build_function_type_list (V4QI_type_node
,
12585 types
[MIPS_V2HI_FTYPE_SI
]
12586 = build_function_type_list (V2HI_type_node
,
12590 types
[MIPS_VOID_FTYPE_V4QI_V4QI
]
12591 = build_function_type_list (void_type_node
,
12592 V4QI_type_node
, V4QI_type_node
,
12595 types
[MIPS_SI_FTYPE_V4QI_V4QI
]
12596 = build_function_type_list (intSI_type_node
,
12597 V4QI_type_node
, V4QI_type_node
,
12600 types
[MIPS_VOID_FTYPE_V2HI_V2HI
]
12601 = build_function_type_list (void_type_node
,
12602 V2HI_type_node
, V2HI_type_node
,
12605 types
[MIPS_SI_FTYPE_DI_SI
]
12606 = build_function_type_list (intSI_type_node
,
12607 intDI_type_node
, intSI_type_node
,
12610 types
[MIPS_DI_FTYPE_DI_SI
]
12611 = build_function_type_list (intDI_type_node
,
12612 intDI_type_node
, intSI_type_node
,
12615 types
[MIPS_VOID_FTYPE_SI_SI
]
12616 = build_function_type_list (void_type_node
,
12617 intSI_type_node
, intSI_type_node
,
12620 types
[MIPS_SI_FTYPE_PTR_SI
]
12621 = build_function_type_list (intSI_type_node
,
12622 ptr_type_node
, intSI_type_node
,
12625 types
[MIPS_SI_FTYPE_VOID
]
12626 = build_function_type (intSI_type_node
, void_list_node
);
12630 types
[MIPS_V4QI_FTYPE_V4QI
]
12631 = build_function_type_list (V4QI_type_node
,
12635 types
[MIPS_SI_FTYPE_SI_SI_SI
]
12636 = build_function_type_list (intSI_type_node
,
12637 intSI_type_node
, intSI_type_node
,
12638 intSI_type_node
, NULL_TREE
);
12640 types
[MIPS_DI_FTYPE_DI_USI_USI
]
12641 = build_function_type_list (intDI_type_node
,
12643 unsigned_intSI_type_node
,
12644 unsigned_intSI_type_node
, NULL_TREE
);
12646 types
[MIPS_DI_FTYPE_SI_SI
]
12647 = build_function_type_list (intDI_type_node
,
12648 intSI_type_node
, intSI_type_node
,
12651 types
[MIPS_DI_FTYPE_USI_USI
]
12652 = build_function_type_list (intDI_type_node
,
12653 unsigned_intSI_type_node
,
12654 unsigned_intSI_type_node
, NULL_TREE
);
12656 types
[MIPS_V2HI_FTYPE_SI_SI_SI
]
12657 = build_function_type_list (V2HI_type_node
,
12658 intSI_type_node
, intSI_type_node
,
12659 intSI_type_node
, NULL_TREE
);
12664 /* Iterate through all of the bdesc arrays, initializing all of the
12665 builtin functions. */
12668 for (m
= bdesc_arrays
; m
< &bdesc_arrays
[ARRAY_SIZE (bdesc_arrays
)]; m
++)
12670 if ((m
->proc
== PROCESSOR_MAX
|| (m
->proc
== mips_arch
))
12671 && (m
->unsupported_target_flags
& target_flags
) == 0)
12672 for (d
= m
->bdesc
; d
< &m
->bdesc
[m
->size
]; d
++)
12673 if ((d
->target_flags
& target_flags
) == d
->target_flags
)
12674 add_builtin_function (d
->name
, types
[d
->function_type
],
12675 d
- m
->bdesc
+ offset
,
12676 BUILT_IN_MD
, NULL
, NULL
);
12681 /* Expand a MIPS_BUILTIN_DIRECT function. ICODE is the code of the
12682 .md pattern and CALL is the function expr with arguments. TARGET,
12683 if nonnull, suggests a good place to put the result.
12684 HAS_TARGET indicates the function must return something. */
12687 mips_expand_builtin_direct (enum insn_code icode
, rtx target
, tree exp
,
12690 rtx ops
[MAX_RECOG_OPERANDS
];
12696 /* We save target to ops[0]. */
12697 ops
[0] = mips_prepare_builtin_target (icode
, 0, target
);
12701 /* We need to test if the arglist is not zero. Some instructions have extra
12702 clobber registers. */
12703 for (; i
< insn_data
[icode
].n_operands
&& i
<= call_expr_nargs (exp
); i
++, j
++)
12704 ops
[i
] = mips_prepare_builtin_arg (icode
, i
, exp
, j
);
12709 emit_insn (GEN_FCN (icode
) (ops
[0], ops
[1]));
12713 emit_insn (GEN_FCN (icode
) (ops
[0], ops
[1], ops
[2]));
12717 emit_insn (GEN_FCN (icode
) (ops
[0], ops
[1], ops
[2], ops
[3]));
12721 gcc_unreachable ();
12726 /* Expand a __builtin_mips_movt_*_ps() or __builtin_mips_movf_*_ps()
12727 function (TYPE says which). EXP is the tree for the function
12728 function, ICODE is the instruction that should be used to compare
12729 the first two arguments, and COND is the condition it should test.
12730 TARGET, if nonnull, suggests a good place to put the result. */
12733 mips_expand_builtin_movtf (enum mips_builtin_type type
,
12734 enum insn_code icode
, enum mips_fp_condition cond
,
12735 rtx target
, tree exp
)
12737 rtx cmp_result
, op0
, op1
;
12739 cmp_result
= mips_prepare_builtin_target (icode
, 0, 0);
12740 op0
= mips_prepare_builtin_arg (icode
, 1, exp
, 0);
12741 op1
= mips_prepare_builtin_arg (icode
, 2, exp
, 1);
12742 emit_insn (GEN_FCN (icode
) (cmp_result
, op0
, op1
, GEN_INT (cond
)));
12744 icode
= CODE_FOR_mips_cond_move_tf_ps
;
12745 target
= mips_prepare_builtin_target (icode
, 0, target
);
12746 if (type
== MIPS_BUILTIN_MOVT
)
12748 op1
= mips_prepare_builtin_arg (icode
, 2, exp
, 2);
12749 op0
= mips_prepare_builtin_arg (icode
, 1, exp
, 3);
12753 op0
= mips_prepare_builtin_arg (icode
, 1, exp
, 2);
12754 op1
= mips_prepare_builtin_arg (icode
, 2, exp
, 3);
12756 emit_insn (gen_mips_cond_move_tf_ps (target
, op0
, op1
, cmp_result
));
12760 /* Move VALUE_IF_TRUE into TARGET if CONDITION is true; move VALUE_IF_FALSE
12761 into TARGET otherwise. Return TARGET. */
12764 mips_builtin_branch_and_move (rtx condition
, rtx target
,
12765 rtx value_if_true
, rtx value_if_false
)
12767 rtx true_label
, done_label
;
12769 true_label
= gen_label_rtx ();
12770 done_label
= gen_label_rtx ();
12772 /* First assume that CONDITION is false. */
12773 mips_emit_move (target
, value_if_false
);
12775 /* Branch to TRUE_LABEL if CONDITION is true and DONE_LABEL otherwise. */
12776 emit_jump_insn (gen_condjump (condition
, true_label
));
12777 emit_jump_insn (gen_jump (done_label
));
12780 /* Fix TARGET if CONDITION is true. */
12781 emit_label (true_label
);
12782 mips_emit_move (target
, value_if_true
);
12784 emit_label (done_label
);
12788 /* Expand a comparison builtin of type BUILTIN_TYPE. ICODE is the code
12789 of the comparison instruction and COND is the condition it should test.
12790 EXP is the function call and arguments and TARGET, if nonnull,
12791 suggests a good place to put the boolean result. */
12794 mips_expand_builtin_compare (enum mips_builtin_type builtin_type
,
12795 enum insn_code icode
, enum mips_fp_condition cond
,
12796 rtx target
, tree exp
)
12798 rtx offset
, condition
, cmp_result
, ops
[MAX_RECOG_OPERANDS
];
12802 if (target
== 0 || GET_MODE (target
) != SImode
)
12803 target
= gen_reg_rtx (SImode
);
12805 /* Prepare the operands to the comparison. */
12806 cmp_result
= mips_prepare_builtin_target (icode
, 0, 0);
12807 for (i
= 1; i
< insn_data
[icode
].n_operands
- 1; i
++, j
++)
12808 ops
[i
] = mips_prepare_builtin_arg (icode
, i
, exp
, j
);
12810 switch (insn_data
[icode
].n_operands
)
12813 emit_insn (GEN_FCN (icode
) (cmp_result
, ops
[1], ops
[2], GEN_INT (cond
)));
12817 emit_insn (GEN_FCN (icode
) (cmp_result
, ops
[1], ops
[2],
12818 ops
[3], ops
[4], GEN_INT (cond
)));
12822 gcc_unreachable ();
12825 /* If the comparison sets more than one register, we define the result
12826 to be 0 if all registers are false and -1 if all registers are true.
12827 The value of the complete result is indeterminate otherwise. */
12828 switch (builtin_type
)
12830 case MIPS_BUILTIN_CMP_ALL
:
12831 condition
= gen_rtx_NE (VOIDmode
, cmp_result
, constm1_rtx
);
12832 return mips_builtin_branch_and_move (condition
, target
,
12833 const0_rtx
, const1_rtx
);
12835 case MIPS_BUILTIN_CMP_UPPER
:
12836 case MIPS_BUILTIN_CMP_LOWER
:
12837 offset
= GEN_INT (builtin_type
== MIPS_BUILTIN_CMP_UPPER
);
12838 condition
= gen_single_cc (cmp_result
, offset
);
12839 return mips_builtin_branch_and_move (condition
, target
,
12840 const1_rtx
, const0_rtx
);
12843 condition
= gen_rtx_NE (VOIDmode
, cmp_result
, const0_rtx
);
12844 return mips_builtin_branch_and_move (condition
, target
,
12845 const1_rtx
, const0_rtx
);
12849 /* Expand a bposge builtin of type BUILTIN_TYPE. TARGET, if nonnull,
12850 suggests a good place to put the boolean result. */
12853 mips_expand_builtin_bposge (enum mips_builtin_type builtin_type
, rtx target
)
12855 rtx condition
, cmp_result
;
12858 if (target
== 0 || GET_MODE (target
) != SImode
)
12859 target
= gen_reg_rtx (SImode
);
12861 cmp_result
= gen_rtx_REG (CCDSPmode
, CCDSP_PO_REGNUM
);
12863 if (builtin_type
== MIPS_BUILTIN_BPOSGE32
)
12868 condition
= gen_rtx_GE (VOIDmode
, cmp_result
, GEN_INT (cmp_value
));
12869 return mips_builtin_branch_and_move (condition
, target
,
12870 const1_rtx
, const0_rtx
);
12873 /* Set SYMBOL_REF_FLAGS for the SYMBOL_REF inside RTL, which belongs to DECL.
12874 FIRST is true if this is the first time handling this decl. */
12877 mips_encode_section_info (tree decl
, rtx rtl
, int first
)
12879 default_encode_section_info (decl
, rtl
, first
);
12881 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12883 rtx symbol
= XEXP (rtl
, 0);
12884 tree type
= TREE_TYPE (decl
);
12886 if ((TARGET_LONG_CALLS
&& !mips_near_type_p (type
))
12887 || mips_far_type_p (type
))
12888 SYMBOL_REF_FLAGS (symbol
) |= SYMBOL_FLAG_LONG_CALL
;
12892 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. Some code models use the incoming
12893 value of PIC_FUNCTION_ADDR_REGNUM to set up the global pointer. */
12896 mips_extra_live_on_entry (bitmap regs
)
12898 if (TARGET_USE_GOT
&& !TARGET_ABSOLUTE_ABICALLS
)
12899 bitmap_set_bit (regs
, PIC_FUNCTION_ADDR_REGNUM
);
12902 /* SImode values are represented as sign-extended to DImode. */
12905 mips_mode_rep_extended (enum machine_mode mode
, enum machine_mode mode_rep
)
12907 if (TARGET_64BIT
&& mode
== SImode
&& mode_rep
== DImode
)
12908 return SIGN_EXTEND
;
12913 /* MIPS implementation of TARGET_ASM_OUTPUT_DWARF_DTPREL. */
12916 mips_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
12921 fputs ("\t.dtprelword\t", file
);
12925 fputs ("\t.dtpreldword\t", file
);
12929 gcc_unreachable ();
12931 output_addr_const (file
, x
);
12932 fputs ("+0x8000", file
);
12935 #include "gt-mips.h"