1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2024 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #define IN_TARGET_CODE 1
26 #include "coretypes.h"
31 #include "stringpool.h"
45 #include "tm-constrs.h"
48 /* This file should be included last. */
49 #include "target-def.h"
51 struct lm32_frame_info
53 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
54 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
55 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
56 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
57 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
58 unsigned int reg_save_mask
; /* mask of saved registers. */
61 /* Prototypes for static functions. */
62 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
63 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
64 static void stack_adjust (HOST_WIDE_INT amount
);
65 static bool lm32_in_small_data_p (const_tree
);
66 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
67 const function_arg_info
&,
68 int *pretend_size
, int no_rtl
);
69 static bool lm32_rtx_costs (rtx x
, machine_mode mode
, int outer_code
, int opno
,
70 int *total
, bool speed
);
71 static bool lm32_can_eliminate (const int, const int);
72 static bool lm32_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
,
73 code_helper
= ERROR_MARK
);
74 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
75 static void lm32_option_override (void);
76 static rtx
lm32_function_arg (cumulative_args_t
, const function_arg_info
&);
77 static void lm32_function_arg_advance (cumulative_args_t cum
,
78 const function_arg_info
&);
79 static bool lm32_hard_regno_mode_ok (unsigned int, machine_mode
);
80 static bool lm32_modes_tieable_p (machine_mode
, machine_mode
);
81 static HOST_WIDE_INT
lm32_starting_frame_offset (void);
83 #undef TARGET_OPTION_OVERRIDE
84 #define TARGET_OPTION_OVERRIDE lm32_option_override
85 #undef TARGET_ADDRESS_COST
86 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
87 #undef TARGET_RTX_COSTS
88 #define TARGET_RTX_COSTS lm32_rtx_costs
89 #undef TARGET_IN_SMALL_DATA_P
90 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
91 #undef TARGET_PROMOTE_FUNCTION_MODE
92 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
93 #undef TARGET_SETUP_INCOMING_VARARGS
94 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
95 #undef TARGET_FUNCTION_ARG
96 #define TARGET_FUNCTION_ARG lm32_function_arg
97 #undef TARGET_FUNCTION_ARG_ADVANCE
98 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
99 #undef TARGET_PROMOTE_PROTOTYPES
100 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
101 #undef TARGET_MIN_ANCHOR_OFFSET
102 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
103 #undef TARGET_MAX_ANCHOR_OFFSET
104 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
105 #undef TARGET_CAN_ELIMINATE
106 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
108 #define TARGET_LRA_P hook_bool_void_false
109 #undef TARGET_LEGITIMATE_ADDRESS_P
110 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
111 #undef TARGET_HARD_REGNO_MODE_OK
112 #define TARGET_HARD_REGNO_MODE_OK lm32_hard_regno_mode_ok
113 #undef TARGET_MODES_TIEABLE_P
114 #define TARGET_MODES_TIEABLE_P lm32_modes_tieable_p
116 #undef TARGET_CONSTANT_ALIGNMENT
117 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
119 #undef TARGET_STARTING_FRAME_OFFSET
120 #define TARGET_STARTING_FRAME_OFFSET lm32_starting_frame_offset
122 struct gcc_target targetm
= TARGET_INITIALIZER
;
124 /* Current frame information calculated by lm32_compute_frame_size. */
125 static struct lm32_frame_info current_frame_info
;
127 /* Return non-zero if the given return type should be returned in memory. */
130 lm32_return_in_memory (tree type
)
134 if (!AGGREGATE_TYPE_P (type
))
136 /* All simple types are returned in registers. */
140 size
= int_size_in_bytes (type
);
141 if (size
>= 0 && size
<= UNITS_PER_WORD
)
143 /* If it can fit in one register. */
150 /* Generate an emit a word sized add instruction. */
153 emit_add (rtx dest
, rtx src0
, rtx src1
)
156 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
160 /* Generate the code to compare (and possibly branch) two integer values
161 TEST_CODE is the comparison code we are trying to emulate
162 (or implement directly)
163 RESULT is where to store the result of the comparison,
164 or null to emit a branch
165 CMP0 CMP1 are the two comparison operands
166 DESTINATION is the destination of the branch, or null to only compare
170 gen_int_relational (enum rtx_code code
,
179 mode
= GET_MODE (cmp0
);
180 if (mode
== VOIDmode
)
181 mode
= GET_MODE (cmp1
);
183 /* Is this a branch or compare. */
184 branch_p
= (destination
!= 0);
186 /* Instruction set doesn't support LE or LT, so swap operands and use
197 code
= swap_condition (code
);
209 rtx insn
, cond
, label
;
211 /* Operands must be in registers. */
212 if (!register_operand (cmp0
, mode
))
213 cmp0
= force_reg (mode
, cmp0
);
214 if (!register_operand (cmp1
, mode
))
215 cmp1
= force_reg (mode
, cmp1
);
217 /* Generate conditional branch instruction. */
218 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
219 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
220 insn
= gen_rtx_SET (pc_rtx
, gen_rtx_IF_THEN_ELSE (VOIDmode
,
221 cond
, label
, pc_rtx
));
222 emit_jump_insn (insn
);
226 /* We can't have const_ints in cmp0, other than 0. */
227 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
228 cmp0
= force_reg (mode
, cmp0
);
230 /* If the comparison is against an int not in legal range
231 move it into a register. */
232 if (GET_CODE (cmp1
) == CONST_INT
)
242 if (!satisfies_constraint_K (cmp1
))
243 cmp1
= force_reg (mode
, cmp1
);
249 if (!satisfies_constraint_L (cmp1
))
250 cmp1
= force_reg (mode
, cmp1
);
257 /* Generate compare instruction. */
258 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
262 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
263 and OPERAND[3]. Store the result in OPERANDS[0]. */
266 lm32_expand_scc (rtx operands
[])
268 rtx target
= operands
[0];
269 enum rtx_code code
= GET_CODE (operands
[1]);
270 rtx op0
= operands
[2];
271 rtx op1
= operands
[3];
273 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
276 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
277 CODE and jump to OPERANDS[3] if the condition holds. */
280 lm32_expand_conditional_branch (rtx operands
[])
282 enum rtx_code code
= GET_CODE (operands
[0]);
283 rtx op0
= operands
[1];
284 rtx op1
= operands
[2];
285 rtx destination
= operands
[3];
287 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
290 /* Generate and emit RTL to save or restore callee save registers. */
292 expand_save_restore (struct lm32_frame_info
*info
, int op
)
294 unsigned int reg_save_mask
= info
->reg_save_mask
;
296 HOST_WIDE_INT offset
;
299 /* Callee saves are below locals and above outgoing arguments. */
300 offset
= info
->args_size
+ info
->callee_size
;
301 for (regno
= 0; regno
<= 31; regno
++)
303 if ((reg_save_mask
& (1 << regno
)) != 0)
308 offset_rtx
= GEN_INT (offset
);
309 if (satisfies_constraint_K (offset_rtx
))
311 mem
= gen_rtx_MEM (word_mode
,
318 /* r10 is caller saved so it can be used as a temp reg. */
321 r10
= gen_rtx_REG (word_mode
, 10);
322 insn
= emit_move_insn (r10
, offset_rtx
);
324 RTX_FRAME_RELATED_P (insn
) = 1;
325 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
327 RTX_FRAME_RELATED_P (insn
) = 1;
328 mem
= gen_rtx_MEM (word_mode
, r10
);
332 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
334 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
336 /* only prologue instructions which set the sp fp or save a
337 register should be marked as frame related. */
339 RTX_FRAME_RELATED_P (insn
) = 1;
340 offset
-= UNITS_PER_WORD
;
346 stack_adjust (HOST_WIDE_INT amount
)
350 if (!IN_RANGE (amount
, -32776, 32768))
352 /* r10 is caller saved so it can be used as a temp reg. */
354 r10
= gen_rtx_REG (word_mode
, 10);
355 insn
= emit_move_insn (r10
, GEN_INT (amount
));
357 RTX_FRAME_RELATED_P (insn
) = 1;
358 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
360 RTX_FRAME_RELATED_P (insn
) = 1;
364 insn
= emit_add (stack_pointer_rtx
,
365 stack_pointer_rtx
, GEN_INT (amount
));
367 RTX_FRAME_RELATED_P (insn
) = 1;
372 /* Create and emit instructions for a functions prologue. */
374 lm32_expand_prologue (void)
378 lm32_compute_frame_size (get_frame_size ());
380 if (current_frame_info
.total_size
> 0)
382 /* Add space on stack new frame. */
383 stack_adjust (-current_frame_info
.total_size
);
385 /* Save callee save registers. */
386 if (current_frame_info
.reg_save_mask
!= 0)
387 expand_save_restore (¤t_frame_info
, 0);
389 /* Setup frame pointer if it's needed. */
390 if (frame_pointer_needed
== 1)
393 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
394 RTX_FRAME_RELATED_P (insn
) = 1;
396 /* Add offset - Don't use total_size, as that includes pretend_size,
397 which isn't part of this frame? */
398 insn
= emit_add (frame_pointer_rtx
,
400 GEN_INT (current_frame_info
.args_size
+
401 current_frame_info
.callee_size
+
402 current_frame_info
.locals_size
));
403 RTX_FRAME_RELATED_P (insn
) = 1;
406 /* Prevent prologue from being scheduled into function body. */
407 emit_insn (gen_blockage ());
411 /* Create an emit instructions for a functions epilogue. */
413 lm32_expand_epilogue (void)
415 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
417 lm32_compute_frame_size (get_frame_size ());
419 if (current_frame_info
.total_size
> 0)
421 /* Prevent stack code from being reordered. */
422 emit_insn (gen_blockage ());
424 /* Restore callee save registers. */
425 if (current_frame_info
.reg_save_mask
!= 0)
426 expand_save_restore (¤t_frame_info
, 1);
428 /* Deallocate stack. */
429 stack_adjust (current_frame_info
.total_size
);
431 /* Return to calling function. */
432 emit_jump_insn (gen_return_internal (ra_rtx
));
436 /* Return to calling function. */
437 emit_jump_insn (gen_return_internal (ra_rtx
));
441 /* Return the bytes needed to compute the frame pointer from the current
444 lm32_compute_frame_size (int size
)
447 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
448 unsigned int reg_save_mask
;
451 args_size
= crtl
->outgoing_args_size
;
452 pretend_size
= crtl
->args
.pretend_args_size
;
456 /* Build mask that actually determines which regsiters we save
457 and calculate size required to store them in the stack. */
458 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
460 if (df_regs_ever_live_p (regno
) && !call_used_or_fixed_reg_p (regno
))
462 reg_save_mask
|= 1 << regno
;
463 callee_size
+= UNITS_PER_WORD
;
466 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
469 reg_save_mask
|= 1 << RA_REGNUM
;
470 callee_size
+= UNITS_PER_WORD
;
472 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
474 reg_save_mask
|= 1 << FP_REGNUM
;
475 callee_size
+= UNITS_PER_WORD
;
478 /* Compute total frame size. */
479 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
481 /* Align frame to appropriate boundary. */
482 total_size
= (total_size
+ 3) & ~3;
484 /* Save computed information. */
485 current_frame_info
.total_size
= total_size
;
486 current_frame_info
.callee_size
= callee_size
;
487 current_frame_info
.pretend_size
= pretend_size
;
488 current_frame_info
.locals_size
= locals_size
;
489 current_frame_info
.args_size
= args_size
;
490 current_frame_info
.reg_save_mask
= reg_save_mask
;
496 lm32_print_operand (FILE * file
, rtx op
, int letter
)
500 code
= GET_CODE (op
);
502 if (code
== SIGN_EXTEND
)
503 op
= XEXP (op
, 0), code
= GET_CODE (op
);
504 else if (code
== REG
|| code
== SUBREG
)
511 regnum
= true_regnum (op
);
513 fprintf (file
, "%s", reg_names
[regnum
]);
515 else if (code
== HIGH
)
516 output_addr_const (file
, XEXP (op
, 0));
517 else if (code
== MEM
)
518 output_address (GET_MODE (op
), XEXP (op
, 0));
519 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
520 fprintf (file
, "%s", reg_names
[0]);
521 else if (GET_CODE (op
) == CONST_DOUBLE
)
523 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
524 output_operand_lossage ("only 0.0 can be loaded as an immediate");
529 fprintf (file
, "e ");
531 fprintf (file
, "ne ");
533 fprintf (file
, "g ");
534 else if (code
== GTU
)
535 fprintf (file
, "gu ");
537 fprintf (file
, "l ");
538 else if (code
== LTU
)
539 fprintf (file
, "lu ");
541 fprintf (file
, "ge ");
542 else if (code
== GEU
)
543 fprintf (file
, "geu");
545 fprintf (file
, "le ");
546 else if (code
== LEU
)
547 fprintf (file
, "leu");
549 output_addr_const (file
, op
);
552 /* A C compound statement to output to stdio stream STREAM the
553 assembler syntax for an instruction operand that is a memory
554 reference whose address is ADDR. ADDR is an RTL expression.
556 On some machines, the syntax for a symbolic address depends on
557 the section that the address refers to. On these machines,
558 define the macro `ENCODE_SECTION_INFO' to store the information
559 into the `symbol_ref', and then check for it here. */
562 lm32_print_operand_address (FILE * file
, rtx addr
)
564 switch (GET_CODE (addr
))
567 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
571 output_address (VOIDmode
, XEXP (addr
, 0));
576 rtx arg0
= XEXP (addr
, 0);
577 rtx arg1
= XEXP (addr
, 1);
579 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
581 if (GET_CODE (arg1
) == CONST_INT
)
582 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
586 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
587 output_addr_const (file
, arg1
);
591 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
592 output_addr_const (file
, addr
);
594 fatal_insn ("bad operand", addr
);
599 if (SYMBOL_REF_SMALL_P (addr
))
601 fprintf (file
, "gp(");
602 output_addr_const (file
, addr
);
606 fatal_insn ("can't use non gp relative absolute address", addr
);
610 fatal_insn ("invalid addressing mode", addr
);
615 /* Determine where to put an argument to a function.
616 Value is zero to push the argument on the stack,
617 or a hard register in which to store the argument.
619 CUM is a variable of type CUMULATIVE_ARGS which gives info about
620 the preceding args and about the function being called.
621 ARG is a description of the argument. */
624 lm32_function_arg (cumulative_args_t cum_v
, const function_arg_info
&arg
)
626 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
628 if (arg
.end_marker_p ())
629 /* Compute operand 2 of the call insn. */
632 if (targetm
.calls
.must_pass_in_stack (arg
))
636 || *cum
+ LM32_NUM_REGS2 (arg
.mode
, arg
.type
) > LM32_NUM_ARG_REGS
)
639 return gen_rtx_REG (arg
.mode
, *cum
+ LM32_FIRST_ARG_REG
);
643 lm32_function_arg_advance (cumulative_args_t cum
,
644 const function_arg_info
&arg
)
646 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (arg
.mode
, arg
.type
);
650 lm32_compute_initial_elimination_offset (int from
, int to
)
652 HOST_WIDE_INT offset
= 0;
656 case ARG_POINTER_REGNUM
:
659 case FRAME_POINTER_REGNUM
:
662 case STACK_POINTER_REGNUM
:
664 lm32_compute_frame_size (get_frame_size ()) -
665 current_frame_info
.pretend_size
;
679 lm32_setup_incoming_varargs (cumulative_args_t cum_v
,
680 const function_arg_info
&arg
,
681 int *pretend_size
, int no_rtl
)
683 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
687 fntype
= TREE_TYPE (current_function_decl
);
689 if (stdarg_p (fntype
))
690 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
693 /* this is the common case, we have been passed details setup
694 for the last named argument, we want to skip over the
695 registers, if any used in passing this named parameter in
696 order to determine which is the first registers used to pass
697 anonymous arguments. */
698 int size
= arg
.promoted_size_in_bytes ();
701 *cum
+ LM32_FIRST_ARG_REG
+
702 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
705 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
707 int first_reg_offset
= first_anon_arg
;
708 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
711 regblock
= gen_rtx_MEM (BLKmode
,
712 plus_constant (Pmode
, arg_pointer_rtx
,
713 FIRST_PARM_OFFSET (0)));
714 move_block_from_reg (first_reg_offset
, regblock
, size
);
716 *pretend_size
= size
* UNITS_PER_WORD
;
720 /* Override command line options. */
722 lm32_option_override (void)
724 /* We must have sign-extend enabled if barrel-shift isn't. */
725 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
726 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
729 /* Return nonzero if this function is known to have a null epilogue.
730 This allows the optimizer to omit jumps to jumps if no stack
733 lm32_can_use_return (void)
735 if (!reload_completed
)
738 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
741 if (lm32_compute_frame_size (get_frame_size ()) != 0)
747 /* Support function to determine the return address of the function
748 'count' frames back up the stack. */
750 lm32_return_addr_rtx (int count
, rtx frame
)
755 if (!df_regs_ever_live_p (RA_REGNUM
))
756 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
759 r
= gen_rtx_MEM (Pmode
,
760 gen_rtx_PLUS (Pmode
, frame
,
761 GEN_INT (-2 * UNITS_PER_WORD
)));
762 set_mem_alias_set (r
, get_frame_alias_set ());
765 else if (flag_omit_frame_pointer
)
769 r
= gen_rtx_MEM (Pmode
,
770 gen_rtx_PLUS (Pmode
, frame
,
771 GEN_INT (-2 * UNITS_PER_WORD
)));
772 set_mem_alias_set (r
, get_frame_alias_set ());
777 /* Return true if EXP should be placed in the small data section. */
780 lm32_in_small_data_p (const_tree exp
)
782 /* We want to merge strings, so we never consider them small data. */
783 if (TREE_CODE (exp
) == STRING_CST
)
786 /* Functions are never in the small data area. Duh. */
787 if (TREE_CODE (exp
) == FUNCTION_DECL
)
790 if (VAR_P (exp
) && DECL_SECTION_NAME (exp
))
792 const char *section
= DECL_SECTION_NAME (exp
);
793 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
798 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
800 /* If this is an incomplete type with size 0, then we can't put it
801 in sdata because it might be too big when completed. */
802 if (size
> 0 && size
<= g_switch_value
)
809 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
810 Assume that the areas do not overlap. */
813 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
814 HOST_WIDE_INT alignment
)
816 HOST_WIDE_INT offset
, delta
;
817 unsigned HOST_WIDE_INT bits
;
822 /* Work out how many bits to move at a time. */
836 mode
= int_mode_for_size (bits
, 0).require ();
837 delta
= bits
/ BITS_PER_UNIT
;
839 /* Allocate a buffer for the temporary registers. */
840 regs
= XALLOCAVEC (rtx
, length
/ delta
);
842 /* Load as many BITS-sized chunks as possible. */
843 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
845 regs
[i
] = gen_reg_rtx (mode
);
846 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
849 /* Copy the chunks to the destination. */
850 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
851 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
853 /* Mop up any left-over bytes. */
856 src
= adjust_address (src
, BLKmode
, offset
);
857 dest
= adjust_address (dest
, BLKmode
, offset
);
858 move_by_pieces (dest
, src
, length
- offset
,
859 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), RETURN_BEGIN
);
863 /* Expand string/block move operations.
865 operands[0] is the pointer to the destination.
866 operands[1] is the pointer to the source.
867 operands[2] is the number of bytes to move.
868 operands[3] is the alignment. */
871 lm32_expand_block_move (rtx
* operands
)
873 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
875 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
876 INTVAL (operands
[3]));
882 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
883 isn't protected by a PIC unspec. */
885 nonpic_symbol_mentioned_p (rtx x
)
890 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
891 || GET_CODE (x
) == PC
)
894 /* We don't want to look into the possible MEM location of a
895 CONST_DOUBLE, since we're not going to use it, in general. */
896 if (GET_CODE (x
) == CONST_DOUBLE
)
899 if (GET_CODE (x
) == UNSPEC
)
902 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
903 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
909 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
910 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
913 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
920 /* Compute a (partial) cost for rtx X. Return true if the complete
921 cost has been computed, and false if subexpressions should be
922 scanned. In either case, *TOTAL contains the cost result. */
925 lm32_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
926 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
928 int code
= GET_CODE (x
);
931 const int arithmetic_latency
= 1;
932 const int shift_latency
= 1;
933 const int compare_latency
= 2;
934 const int multiply_latency
= 3;
935 const int load_latency
= 3;
936 const int libcall_size_cost
= 5;
938 /* Determine if we can handle the given mode size in a single instruction. */
939 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
952 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
955 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
962 *total
= COSTS_N_INSNS (1);
964 *total
= COSTS_N_INSNS (compare_latency
);
968 /* FIXME. Guessing here. */
969 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
976 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
979 *total
= COSTS_N_INSNS (1);
981 *total
= COSTS_N_INSNS (shift_latency
);
983 else if (TARGET_BARREL_SHIFT_ENABLED
)
985 /* FIXME: Guessing here. */
986 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
988 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
990 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
996 *total
= COSTS_N_INSNS (libcall_size_cost
);
998 *total
= COSTS_N_INSNS (100);
1003 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1006 *total
= COSTS_N_INSNS (1);
1008 *total
= COSTS_N_INSNS (multiply_latency
);
1014 *total
= COSTS_N_INSNS (libcall_size_cost
);
1016 *total
= COSTS_N_INSNS (100);
1024 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1027 *total
= COSTS_N_INSNS (1);
1030 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1033 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1040 if (IN_RANGE (i
, 0, 65536))
1041 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1043 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1046 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1048 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1053 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1062 *total
= COSTS_N_INSNS (libcall_size_cost
);
1064 *total
= COSTS_N_INSNS (100);
1071 *total
= COSTS_N_INSNS (1);
1073 *total
= COSTS_N_INSNS (arithmetic_latency
);
1077 if (MEM_P (XEXP (x
, 0)))
1078 *total
= COSTS_N_INSNS (0);
1079 else if (small_mode
)
1082 *total
= COSTS_N_INSNS (1);
1084 *total
= COSTS_N_INSNS (arithmetic_latency
);
1087 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1096 *total
= COSTS_N_INSNS (0);
1107 if (satisfies_constraint_L (x
))
1108 *total
= COSTS_N_INSNS (0);
1110 *total
= COSTS_N_INSNS (2);
1117 if (satisfies_constraint_K (x
))
1118 *total
= COSTS_N_INSNS (0);
1120 *total
= COSTS_N_INSNS (2);
1124 if (TARGET_MULTIPLY_ENABLED
)
1126 if (satisfies_constraint_K (x
))
1127 *total
= COSTS_N_INSNS (0);
1129 *total
= COSTS_N_INSNS (2);
1135 if (satisfies_constraint_K (x
))
1136 *total
= COSTS_N_INSNS (1);
1138 *total
= COSTS_N_INSNS (2);
1149 *total
= COSTS_N_INSNS (0);
1156 *total
= COSTS_N_INSNS (0);
1165 *total
= COSTS_N_INSNS (2);
1169 *total
= COSTS_N_INSNS (1);
1174 *total
= COSTS_N_INSNS (1);
1176 *total
= COSTS_N_INSNS (load_latency
);
1184 /* Implemenent TARGET_CAN_ELIMINATE. */
1187 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1189 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1192 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1195 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
,
1196 bool strict
, code_helper
)
1199 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1201 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1205 if (GET_CODE (x
) == PLUS
1206 && REG_P (XEXP (x
, 0))
1207 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1208 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1209 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1210 && satisfies_constraint_K (XEXP ((x
), 1)))
1214 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1220 /* Check a move is not memory to memory. */
1223 lm32_move_ok (machine_mode mode
, rtx operands
[2]) {
1224 if (memory_operand (operands
[0], mode
))
1225 return register_or_zero_operand (operands
[1], mode
);
1229 /* Implement TARGET_HARD_REGNO_MODE_OK. */
1232 lm32_hard_regno_mode_ok (unsigned int regno
, machine_mode
)
1234 return G_REG_P (regno
);
1237 /* Implement TARGET_MODES_TIEABLE_P. */
1240 lm32_modes_tieable_p (machine_mode mode1
, machine_mode mode2
)
1242 return (GET_MODE_CLASS (mode1
) == MODE_INT
1243 && GET_MODE_CLASS (mode2
) == MODE_INT
1244 && GET_MODE_SIZE (mode1
) <= UNITS_PER_WORD
1245 && GET_MODE_SIZE (mode2
) <= UNITS_PER_WORD
);
1248 /* Implement TARGET_STARTING_FRAME_OFFSET. */
1250 static HOST_WIDE_INT
1251 lm32_starting_frame_offset (void)
1253 return UNITS_PER_WORD
;