1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2018 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #define IN_TARGET_CODE 1
26 #include "coretypes.h"
31 #include "stringpool.h"
45 #include "tm-constrs.h"
48 /* This file should be included last. */
49 #include "target-def.h"
51 struct lm32_frame_info
53 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
54 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
55 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
56 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
57 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
58 unsigned int reg_save_mask
; /* mask of saved registers. */
61 /* Prototypes for static functions. */
62 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
63 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
64 static void stack_adjust (HOST_WIDE_INT amount
);
65 static bool lm32_in_small_data_p (const_tree
);
66 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
67 machine_mode mode
, tree type
,
68 int *pretend_size
, int no_rtl
);
69 static bool lm32_rtx_costs (rtx x
, machine_mode mode
, int outer_code
, int opno
,
70 int *total
, bool speed
);
71 static bool lm32_can_eliminate (const int, const int);
73 lm32_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
);
74 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
75 static void lm32_option_override (void);
76 static rtx
lm32_function_arg (cumulative_args_t cum
,
77 machine_mode mode
, const_tree type
,
79 static void lm32_function_arg_advance (cumulative_args_t cum
,
81 const_tree type
, bool named
);
82 static bool lm32_hard_regno_mode_ok (unsigned int, machine_mode
);
83 static bool lm32_modes_tieable_p (machine_mode
, machine_mode
);
84 static HOST_WIDE_INT
lm32_starting_frame_offset (void);
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
111 #define TARGET_LRA_P hook_bool_void_false
112 #undef TARGET_LEGITIMATE_ADDRESS_P
113 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
114 #undef TARGET_HARD_REGNO_MODE_OK
115 #define TARGET_HARD_REGNO_MODE_OK lm32_hard_regno_mode_ok
116 #undef TARGET_MODES_TIEABLE_P
117 #define TARGET_MODES_TIEABLE_P lm32_modes_tieable_p
119 #undef TARGET_CONSTANT_ALIGNMENT
120 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
122 #undef TARGET_STARTING_FRAME_OFFSET
123 #define TARGET_STARTING_FRAME_OFFSET lm32_starting_frame_offset
125 struct gcc_target targetm
= TARGET_INITIALIZER
;
127 /* Current frame information calculated by lm32_compute_frame_size. */
128 static struct lm32_frame_info current_frame_info
;
130 /* Return non-zero if the given return type should be returned in memory. */
133 lm32_return_in_memory (tree type
)
137 if (!AGGREGATE_TYPE_P (type
))
139 /* All simple types are returned in registers. */
143 size
= int_size_in_bytes (type
);
144 if (size
>= 0 && size
<= UNITS_PER_WORD
)
146 /* If it can fit in one register. */
153 /* Generate an emit a word sized add instruction. */
156 emit_add (rtx dest
, rtx src0
, rtx src1
)
159 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
163 /* Generate the code to compare (and possibly branch) two integer values
164 TEST_CODE is the comparison code we are trying to emulate
165 (or implement directly)
166 RESULT is where to store the result of the comparison,
167 or null to emit a branch
168 CMP0 CMP1 are the two comparison operands
169 DESTINATION is the destination of the branch, or null to only compare
173 gen_int_relational (enum rtx_code code
,
182 mode
= GET_MODE (cmp0
);
183 if (mode
== VOIDmode
)
184 mode
= GET_MODE (cmp1
);
186 /* Is this a branch or compare. */
187 branch_p
= (destination
!= 0);
189 /* Instruction set doesn't support LE or LT, so swap operands and use
200 code
= swap_condition (code
);
212 rtx insn
, cond
, label
;
214 /* Operands must be in registers. */
215 if (!register_operand (cmp0
, mode
))
216 cmp0
= force_reg (mode
, cmp0
);
217 if (!register_operand (cmp1
, mode
))
218 cmp1
= force_reg (mode
, cmp1
);
220 /* Generate conditional branch instruction. */
221 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
222 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
223 insn
= gen_rtx_SET (pc_rtx
, gen_rtx_IF_THEN_ELSE (VOIDmode
,
224 cond
, label
, pc_rtx
));
225 emit_jump_insn (insn
);
229 /* We can't have const_ints in cmp0, other than 0. */
230 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
231 cmp0
= force_reg (mode
, cmp0
);
233 /* If the comparison is against an int not in legal range
234 move it into a register. */
235 if (GET_CODE (cmp1
) == CONST_INT
)
245 if (!satisfies_constraint_K (cmp1
))
246 cmp1
= force_reg (mode
, cmp1
);
252 if (!satisfies_constraint_L (cmp1
))
253 cmp1
= force_reg (mode
, cmp1
);
260 /* Generate compare instruction. */
261 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
265 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
266 and OPERAND[3]. Store the result in OPERANDS[0]. */
269 lm32_expand_scc (rtx operands
[])
271 rtx target
= operands
[0];
272 enum rtx_code code
= GET_CODE (operands
[1]);
273 rtx op0
= operands
[2];
274 rtx op1
= operands
[3];
276 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
279 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
280 CODE and jump to OPERANDS[3] if the condition holds. */
283 lm32_expand_conditional_branch (rtx operands
[])
285 enum rtx_code code
= GET_CODE (operands
[0]);
286 rtx op0
= operands
[1];
287 rtx op1
= operands
[2];
288 rtx destination
= operands
[3];
290 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
293 /* Generate and emit RTL to save or restore callee save registers. */
295 expand_save_restore (struct lm32_frame_info
*info
, int op
)
297 unsigned int reg_save_mask
= info
->reg_save_mask
;
299 HOST_WIDE_INT offset
;
302 /* Callee saves are below locals and above outgoing arguments. */
303 offset
= info
->args_size
+ info
->callee_size
;
304 for (regno
= 0; regno
<= 31; regno
++)
306 if ((reg_save_mask
& (1 << regno
)) != 0)
311 offset_rtx
= GEN_INT (offset
);
312 if (satisfies_constraint_K (offset_rtx
))
314 mem
= gen_rtx_MEM (word_mode
,
321 /* r10 is caller saved so it can be used as a temp reg. */
324 r10
= gen_rtx_REG (word_mode
, 10);
325 insn
= emit_move_insn (r10
, offset_rtx
);
327 RTX_FRAME_RELATED_P (insn
) = 1;
328 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
330 RTX_FRAME_RELATED_P (insn
) = 1;
331 mem
= gen_rtx_MEM (word_mode
, r10
);
335 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
337 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
339 /* only prologue instructions which set the sp fp or save a
340 register should be marked as frame related. */
342 RTX_FRAME_RELATED_P (insn
) = 1;
343 offset
-= UNITS_PER_WORD
;
349 stack_adjust (HOST_WIDE_INT amount
)
353 if (!IN_RANGE (amount
, -32776, 32768))
355 /* r10 is caller saved so it can be used as a temp reg. */
357 r10
= gen_rtx_REG (word_mode
, 10);
358 insn
= emit_move_insn (r10
, GEN_INT (amount
));
360 RTX_FRAME_RELATED_P (insn
) = 1;
361 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
363 RTX_FRAME_RELATED_P (insn
) = 1;
367 insn
= emit_add (stack_pointer_rtx
,
368 stack_pointer_rtx
, GEN_INT (amount
));
370 RTX_FRAME_RELATED_P (insn
) = 1;
375 /* Create and emit instructions for a functions prologue. */
377 lm32_expand_prologue (void)
381 lm32_compute_frame_size (get_frame_size ());
383 if (current_frame_info
.total_size
> 0)
385 /* Add space on stack new frame. */
386 stack_adjust (-current_frame_info
.total_size
);
388 /* Save callee save registers. */
389 if (current_frame_info
.reg_save_mask
!= 0)
390 expand_save_restore (¤t_frame_info
, 0);
392 /* Setup frame pointer if it's needed. */
393 if (frame_pointer_needed
== 1)
396 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
397 RTX_FRAME_RELATED_P (insn
) = 1;
399 /* Add offset - Don't use total_size, as that includes pretend_size,
400 which isn't part of this frame? */
401 insn
= emit_add (frame_pointer_rtx
,
403 GEN_INT (current_frame_info
.args_size
+
404 current_frame_info
.callee_size
+
405 current_frame_info
.locals_size
));
406 RTX_FRAME_RELATED_P (insn
) = 1;
409 /* Prevent prologue from being scheduled into function body. */
410 emit_insn (gen_blockage ());
414 /* Create an emit instructions for a functions epilogue. */
416 lm32_expand_epilogue (void)
418 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
420 lm32_compute_frame_size (get_frame_size ());
422 if (current_frame_info
.total_size
> 0)
424 /* Prevent stack code from being reordered. */
425 emit_insn (gen_blockage ());
427 /* Restore callee save registers. */
428 if (current_frame_info
.reg_save_mask
!= 0)
429 expand_save_restore (¤t_frame_info
, 1);
431 /* Deallocate stack. */
432 stack_adjust (current_frame_info
.total_size
);
434 /* Return to calling function. */
435 emit_jump_insn (gen_return_internal (ra_rtx
));
439 /* Return to calling function. */
440 emit_jump_insn (gen_return_internal (ra_rtx
));
444 /* Return the bytes needed to compute the frame pointer from the current
447 lm32_compute_frame_size (int size
)
450 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
451 unsigned int reg_save_mask
;
454 args_size
= crtl
->outgoing_args_size
;
455 pretend_size
= crtl
->args
.pretend_args_size
;
459 /* Build mask that actually determines which regsiters we save
460 and calculate size required to store them in the stack. */
461 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
463 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
465 reg_save_mask
|= 1 << regno
;
466 callee_size
+= UNITS_PER_WORD
;
469 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
472 reg_save_mask
|= 1 << RA_REGNUM
;
473 callee_size
+= UNITS_PER_WORD
;
475 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
477 reg_save_mask
|= 1 << FP_REGNUM
;
478 callee_size
+= UNITS_PER_WORD
;
481 /* Compute total frame size. */
482 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
484 /* Align frame to appropriate boundary. */
485 total_size
= (total_size
+ 3) & ~3;
487 /* Save computed information. */
488 current_frame_info
.total_size
= total_size
;
489 current_frame_info
.callee_size
= callee_size
;
490 current_frame_info
.pretend_size
= pretend_size
;
491 current_frame_info
.locals_size
= locals_size
;
492 current_frame_info
.args_size
= args_size
;
493 current_frame_info
.reg_save_mask
= reg_save_mask
;
499 lm32_print_operand (FILE * file
, rtx op
, int letter
)
503 code
= GET_CODE (op
);
505 if (code
== SIGN_EXTEND
)
506 op
= XEXP (op
, 0), code
= GET_CODE (op
);
507 else if (code
== REG
|| code
== SUBREG
)
514 regnum
= true_regnum (op
);
516 fprintf (file
, "%s", reg_names
[regnum
]);
518 else if (code
== HIGH
)
519 output_addr_const (file
, XEXP (op
, 0));
520 else if (code
== MEM
)
521 output_address (GET_MODE (op
), XEXP (op
, 0));
522 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
523 fprintf (file
, "%s", reg_names
[0]);
524 else if (GET_CODE (op
) == CONST_DOUBLE
)
526 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
527 output_operand_lossage ("only 0.0 can be loaded as an immediate");
532 fprintf (file
, "e ");
534 fprintf (file
, "ne ");
536 fprintf (file
, "g ");
537 else if (code
== GTU
)
538 fprintf (file
, "gu ");
540 fprintf (file
, "l ");
541 else if (code
== LTU
)
542 fprintf (file
, "lu ");
544 fprintf (file
, "ge ");
545 else if (code
== GEU
)
546 fprintf (file
, "geu");
548 fprintf (file
, "le ");
549 else if (code
== LEU
)
550 fprintf (file
, "leu");
552 output_addr_const (file
, op
);
555 /* A C compound statement to output to stdio stream STREAM the
556 assembler syntax for an instruction operand that is a memory
557 reference whose address is ADDR. ADDR is an RTL expression.
559 On some machines, the syntax for a symbolic address depends on
560 the section that the address refers to. On these machines,
561 define the macro `ENCODE_SECTION_INFO' to store the information
562 into the `symbol_ref', and then check for it here. */
565 lm32_print_operand_address (FILE * file
, rtx addr
)
567 switch (GET_CODE (addr
))
570 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
574 output_address (VOIDmode
, XEXP (addr
, 0));
579 rtx arg0
= XEXP (addr
, 0);
580 rtx arg1
= XEXP (addr
, 1);
582 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
584 if (GET_CODE (arg1
) == CONST_INT
)
585 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
589 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
590 output_addr_const (file
, arg1
);
594 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
595 output_addr_const (file
, addr
);
597 fatal_insn ("bad operand", addr
);
602 if (SYMBOL_REF_SMALL_P (addr
))
604 fprintf (file
, "gp(");
605 output_addr_const (file
, addr
);
609 fatal_insn ("can't use non gp relative absolute address", addr
);
613 fatal_insn ("invalid addressing mode", addr
);
618 /* Determine where to put an argument to a function.
619 Value is zero to push the argument on the stack,
620 or a hard register in which to store the argument.
622 MODE is the argument's machine mode.
623 TYPE is the data type of the argument (as a tree).
624 This is null for libcalls where that information may
626 CUM is a variable of type CUMULATIVE_ARGS which gives info about
627 the preceding args and about the function being called.
628 NAMED is nonzero if this argument is a named parameter
629 (otherwise it is an extra parameter matching an ellipsis). */
632 lm32_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
633 const_tree type
, bool named
)
635 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
637 if (mode
== VOIDmode
)
638 /* Compute operand 2 of the call insn. */
641 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
644 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
647 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
651 lm32_function_arg_advance (cumulative_args_t cum
, machine_mode mode
,
652 const_tree type
, bool named ATTRIBUTE_UNUSED
)
654 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
658 lm32_compute_initial_elimination_offset (int from
, int to
)
660 HOST_WIDE_INT offset
= 0;
664 case ARG_POINTER_REGNUM
:
667 case FRAME_POINTER_REGNUM
:
670 case STACK_POINTER_REGNUM
:
672 lm32_compute_frame_size (get_frame_size ()) -
673 current_frame_info
.pretend_size
;
687 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, machine_mode mode
,
688 tree type
, int *pretend_size
, int no_rtl
)
690 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
694 fntype
= TREE_TYPE (current_function_decl
);
696 if (stdarg_p (fntype
))
697 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
700 /* this is the common case, we have been passed details setup
701 for the last named argument, we want to skip over the
702 registers, if any used in passing this named paramter in
703 order to determine which is the first registers used to pass
704 anonymous arguments. */
708 size
= int_size_in_bytes (type
);
710 size
= GET_MODE_SIZE (mode
);
713 *cum
+ LM32_FIRST_ARG_REG
+
714 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
717 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
719 int first_reg_offset
= first_anon_arg
;
720 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
723 regblock
= gen_rtx_MEM (BLKmode
,
724 plus_constant (Pmode
, arg_pointer_rtx
,
725 FIRST_PARM_OFFSET (0)));
726 move_block_from_reg (first_reg_offset
, regblock
, size
);
728 *pretend_size
= size
* UNITS_PER_WORD
;
732 /* Override command line options. */
734 lm32_option_override (void)
736 /* We must have sign-extend enabled if barrel-shift isn't. */
737 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
738 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
741 /* Return nonzero if this function is known to have a null epilogue.
742 This allows the optimizer to omit jumps to jumps if no stack
745 lm32_can_use_return (void)
747 if (!reload_completed
)
750 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
753 if (lm32_compute_frame_size (get_frame_size ()) != 0)
759 /* Support function to determine the return address of the function
760 'count' frames back up the stack. */
762 lm32_return_addr_rtx (int count
, rtx frame
)
767 if (!df_regs_ever_live_p (RA_REGNUM
))
768 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
771 r
= gen_rtx_MEM (Pmode
,
772 gen_rtx_PLUS (Pmode
, frame
,
773 GEN_INT (-2 * UNITS_PER_WORD
)));
774 set_mem_alias_set (r
, get_frame_alias_set ());
777 else if (flag_omit_frame_pointer
)
781 r
= gen_rtx_MEM (Pmode
,
782 gen_rtx_PLUS (Pmode
, frame
,
783 GEN_INT (-2 * UNITS_PER_WORD
)));
784 set_mem_alias_set (r
, get_frame_alias_set ());
789 /* Return true if EXP should be placed in the small data section. */
792 lm32_in_small_data_p (const_tree exp
)
794 /* We want to merge strings, so we never consider them small data. */
795 if (TREE_CODE (exp
) == STRING_CST
)
798 /* Functions are never in the small data area. Duh. */
799 if (TREE_CODE (exp
) == FUNCTION_DECL
)
802 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
804 const char *section
= DECL_SECTION_NAME (exp
);
805 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
810 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
812 /* If this is an incomplete type with size 0, then we can't put it
813 in sdata because it might be too big when completed. */
814 if (size
> 0 && size
<= g_switch_value
)
821 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
822 Assume that the areas do not overlap. */
825 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
826 HOST_WIDE_INT alignment
)
828 HOST_WIDE_INT offset
, delta
;
829 unsigned HOST_WIDE_INT bits
;
834 /* Work out how many bits to move at a time. */
848 mode
= int_mode_for_size (bits
, 0).require ();
849 delta
= bits
/ BITS_PER_UNIT
;
851 /* Allocate a buffer for the temporary registers. */
852 regs
= XALLOCAVEC (rtx
, length
/ delta
);
854 /* Load as many BITS-sized chunks as possible. */
855 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
857 regs
[i
] = gen_reg_rtx (mode
);
858 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
861 /* Copy the chunks to the destination. */
862 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
863 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
865 /* Mop up any left-over bytes. */
868 src
= adjust_address (src
, BLKmode
, offset
);
869 dest
= adjust_address (dest
, BLKmode
, offset
);
870 move_by_pieces (dest
, src
, length
- offset
,
871 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
875 /* Expand string/block move operations.
877 operands[0] is the pointer to the destination.
878 operands[1] is the pointer to the source.
879 operands[2] is the number of bytes to move.
880 operands[3] is the alignment. */
883 lm32_expand_block_move (rtx
* operands
)
885 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
887 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
888 INTVAL (operands
[3]));
894 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
895 isn't protected by a PIC unspec. */
897 nonpic_symbol_mentioned_p (rtx x
)
902 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
903 || GET_CODE (x
) == PC
)
906 /* We don't want to look into the possible MEM location of a
907 CONST_DOUBLE, since we're not going to use it, in general. */
908 if (GET_CODE (x
) == CONST_DOUBLE
)
911 if (GET_CODE (x
) == UNSPEC
)
914 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
915 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
921 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
922 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
925 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
932 /* Compute a (partial) cost for rtx X. Return true if the complete
933 cost has been computed, and false if subexpressions should be
934 scanned. In either case, *TOTAL contains the cost result. */
937 lm32_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
938 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
940 int code
= GET_CODE (x
);
943 const int arithmetic_latency
= 1;
944 const int shift_latency
= 1;
945 const int compare_latency
= 2;
946 const int multiply_latency
= 3;
947 const int load_latency
= 3;
948 const int libcall_size_cost
= 5;
950 /* Determine if we can handle the given mode size in a single instruction. */
951 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
964 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
967 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
974 *total
= COSTS_N_INSNS (1);
976 *total
= COSTS_N_INSNS (compare_latency
);
980 /* FIXME. Guessing here. */
981 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
988 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
991 *total
= COSTS_N_INSNS (1);
993 *total
= COSTS_N_INSNS (shift_latency
);
995 else if (TARGET_BARREL_SHIFT_ENABLED
)
997 /* FIXME: Guessing here. */
998 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
1000 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1002 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
1008 *total
= COSTS_N_INSNS (libcall_size_cost
);
1010 *total
= COSTS_N_INSNS (100);
1015 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1018 *total
= COSTS_N_INSNS (1);
1020 *total
= COSTS_N_INSNS (multiply_latency
);
1026 *total
= COSTS_N_INSNS (libcall_size_cost
);
1028 *total
= COSTS_N_INSNS (100);
1036 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1039 *total
= COSTS_N_INSNS (1);
1042 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1045 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1052 if (IN_RANGE (i
, 0, 65536))
1053 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1055 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1058 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1060 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1065 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1074 *total
= COSTS_N_INSNS (libcall_size_cost
);
1076 *total
= COSTS_N_INSNS (100);
1083 *total
= COSTS_N_INSNS (1);
1085 *total
= COSTS_N_INSNS (arithmetic_latency
);
1089 if (MEM_P (XEXP (x
, 0)))
1090 *total
= COSTS_N_INSNS (0);
1091 else if (small_mode
)
1094 *total
= COSTS_N_INSNS (1);
1096 *total
= COSTS_N_INSNS (arithmetic_latency
);
1099 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1108 *total
= COSTS_N_INSNS (0);
1119 if (satisfies_constraint_L (x
))
1120 *total
= COSTS_N_INSNS (0);
1122 *total
= COSTS_N_INSNS (2);
1129 if (satisfies_constraint_K (x
))
1130 *total
= COSTS_N_INSNS (0);
1132 *total
= COSTS_N_INSNS (2);
1136 if (TARGET_MULTIPLY_ENABLED
)
1138 if (satisfies_constraint_K (x
))
1139 *total
= COSTS_N_INSNS (0);
1141 *total
= COSTS_N_INSNS (2);
1147 if (satisfies_constraint_K (x
))
1148 *total
= COSTS_N_INSNS (1);
1150 *total
= COSTS_N_INSNS (2);
1161 *total
= COSTS_N_INSNS (0);
1168 *total
= COSTS_N_INSNS (0);
1177 *total
= COSTS_N_INSNS (2);
1181 *total
= COSTS_N_INSNS (1);
1186 *total
= COSTS_N_INSNS (1);
1188 *total
= COSTS_N_INSNS (load_latency
);
1196 /* Implemenent TARGET_CAN_ELIMINATE. */
1199 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1201 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1204 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1207 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1210 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1212 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1216 if (GET_CODE (x
) == PLUS
1217 && REG_P (XEXP (x
, 0))
1218 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1219 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1220 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1221 && satisfies_constraint_K (XEXP ((x
), 1)))
1225 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1231 /* Check a move is not memory to memory. */
1234 lm32_move_ok (machine_mode mode
, rtx operands
[2]) {
1235 if (memory_operand (operands
[0], mode
))
1236 return register_or_zero_operand (operands
[1], mode
);
1240 /* Implement TARGET_HARD_REGNO_MODE_OK. */
1243 lm32_hard_regno_mode_ok (unsigned int regno
, machine_mode
)
1245 return G_REG_P (regno
);
1248 /* Implement TARGET_MODES_TIEABLE_P. */
1251 lm32_modes_tieable_p (machine_mode mode1
, machine_mode mode2
)
1253 return (GET_MODE_CLASS (mode1
) == MODE_INT
1254 && GET_MODE_CLASS (mode2
) == MODE_INT
1255 && GET_MODE_SIZE (mode1
) <= UNITS_PER_WORD
1256 && GET_MODE_SIZE (mode2
) <= UNITS_PER_WORD
);
1259 /* Implement TARGET_STARTING_FRAME_OFFSET. */
1261 static HOST_WIDE_INT
1262 lm32_starting_frame_offset (void)
1264 return UNITS_PER_WORD
;