1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2017 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "stringpool.h"
43 #include "tm-constrs.h"
46 /* This file should be included last. */
47 #include "target-def.h"
49 struct lm32_frame_info
51 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
52 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
53 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
54 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
55 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
56 unsigned int reg_save_mask
; /* mask of saved registers. */
59 /* Prototypes for static functions. */
60 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
61 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
62 static void stack_adjust (HOST_WIDE_INT amount
);
63 static bool lm32_in_small_data_p (const_tree
);
64 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
65 machine_mode mode
, tree type
,
66 int *pretend_size
, int no_rtl
);
67 static bool lm32_rtx_costs (rtx x
, machine_mode mode
, int outer_code
, int opno
,
68 int *total
, bool speed
);
69 static bool lm32_can_eliminate (const int, const int);
71 lm32_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
);
72 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
73 static void lm32_option_override (void);
74 static rtx
lm32_function_arg (cumulative_args_t cum
,
75 machine_mode mode
, const_tree type
,
77 static void lm32_function_arg_advance (cumulative_args_t cum
,
79 const_tree type
, bool named
);
80 static bool lm32_hard_regno_mode_ok (unsigned int, machine_mode
);
82 #undef TARGET_OPTION_OVERRIDE
83 #define TARGET_OPTION_OVERRIDE lm32_option_override
84 #undef TARGET_ADDRESS_COST
85 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
86 #undef TARGET_RTX_COSTS
87 #define TARGET_RTX_COSTS lm32_rtx_costs
88 #undef TARGET_IN_SMALL_DATA_P
89 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
90 #undef TARGET_PROMOTE_FUNCTION_MODE
91 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
92 #undef TARGET_SETUP_INCOMING_VARARGS
93 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
94 #undef TARGET_FUNCTION_ARG
95 #define TARGET_FUNCTION_ARG lm32_function_arg
96 #undef TARGET_FUNCTION_ARG_ADVANCE
97 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
98 #undef TARGET_PROMOTE_PROTOTYPES
99 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
100 #undef TARGET_MIN_ANCHOR_OFFSET
101 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
102 #undef TARGET_MAX_ANCHOR_OFFSET
103 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
104 #undef TARGET_CAN_ELIMINATE
105 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
107 #define TARGET_LRA_P hook_bool_void_false
108 #undef TARGET_LEGITIMATE_ADDRESS_P
109 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
110 #undef TARGET_HARD_REGNO_MODE_OK
111 #define TARGET_HARD_REGNO_MODE_OK lm32_hard_regno_mode_ok
113 struct gcc_target targetm
= TARGET_INITIALIZER
;
115 /* Current frame information calculated by lm32_compute_frame_size. */
116 static struct lm32_frame_info current_frame_info
;
118 /* Return non-zero if the given return type should be returned in memory. */
121 lm32_return_in_memory (tree type
)
125 if (!AGGREGATE_TYPE_P (type
))
127 /* All simple types are returned in registers. */
131 size
= int_size_in_bytes (type
);
132 if (size
>= 0 && size
<= UNITS_PER_WORD
)
134 /* If it can fit in one register. */
141 /* Generate an emit a word sized add instruction. */
144 emit_add (rtx dest
, rtx src0
, rtx src1
)
147 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
151 /* Generate the code to compare (and possibly branch) two integer values
152 TEST_CODE is the comparison code we are trying to emulate
153 (or implement directly)
154 RESULT is where to store the result of the comparison,
155 or null to emit a branch
156 CMP0 CMP1 are the two comparison operands
157 DESTINATION is the destination of the branch, or null to only compare
161 gen_int_relational (enum rtx_code code
,
170 mode
= GET_MODE (cmp0
);
171 if (mode
== VOIDmode
)
172 mode
= GET_MODE (cmp1
);
174 /* Is this a branch or compare. */
175 branch_p
= (destination
!= 0);
177 /* Instruction set doesn't support LE or LT, so swap operands and use
188 code
= swap_condition (code
);
200 rtx insn
, cond
, label
;
202 /* Operands must be in registers. */
203 if (!register_operand (cmp0
, mode
))
204 cmp0
= force_reg (mode
, cmp0
);
205 if (!register_operand (cmp1
, mode
))
206 cmp1
= force_reg (mode
, cmp1
);
208 /* Generate conditional branch instruction. */
209 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
210 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
211 insn
= gen_rtx_SET (pc_rtx
, gen_rtx_IF_THEN_ELSE (VOIDmode
,
212 cond
, label
, pc_rtx
));
213 emit_jump_insn (insn
);
217 /* We can't have const_ints in cmp0, other than 0. */
218 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
219 cmp0
= force_reg (mode
, cmp0
);
221 /* If the comparison is against an int not in legal range
222 move it into a register. */
223 if (GET_CODE (cmp1
) == CONST_INT
)
233 if (!satisfies_constraint_K (cmp1
))
234 cmp1
= force_reg (mode
, cmp1
);
240 if (!satisfies_constraint_L (cmp1
))
241 cmp1
= force_reg (mode
, cmp1
);
248 /* Generate compare instruction. */
249 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
253 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
254 and OPERAND[3]. Store the result in OPERANDS[0]. */
257 lm32_expand_scc (rtx operands
[])
259 rtx target
= operands
[0];
260 enum rtx_code code
= GET_CODE (operands
[1]);
261 rtx op0
= operands
[2];
262 rtx op1
= operands
[3];
264 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
267 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
268 CODE and jump to OPERANDS[3] if the condition holds. */
271 lm32_expand_conditional_branch (rtx operands
[])
273 enum rtx_code code
= GET_CODE (operands
[0]);
274 rtx op0
= operands
[1];
275 rtx op1
= operands
[2];
276 rtx destination
= operands
[3];
278 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
281 /* Generate and emit RTL to save or restore callee save registers. */
283 expand_save_restore (struct lm32_frame_info
*info
, int op
)
285 unsigned int reg_save_mask
= info
->reg_save_mask
;
287 HOST_WIDE_INT offset
;
290 /* Callee saves are below locals and above outgoing arguments. */
291 offset
= info
->args_size
+ info
->callee_size
;
292 for (regno
= 0; regno
<= 31; regno
++)
294 if ((reg_save_mask
& (1 << regno
)) != 0)
299 offset_rtx
= GEN_INT (offset
);
300 if (satisfies_constraint_K (offset_rtx
))
302 mem
= gen_rtx_MEM (word_mode
,
309 /* r10 is caller saved so it can be used as a temp reg. */
312 r10
= gen_rtx_REG (word_mode
, 10);
313 insn
= emit_move_insn (r10
, offset_rtx
);
315 RTX_FRAME_RELATED_P (insn
) = 1;
316 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
318 RTX_FRAME_RELATED_P (insn
) = 1;
319 mem
= gen_rtx_MEM (word_mode
, r10
);
323 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
325 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
327 /* only prologue instructions which set the sp fp or save a
328 register should be marked as frame related. */
330 RTX_FRAME_RELATED_P (insn
) = 1;
331 offset
-= UNITS_PER_WORD
;
337 stack_adjust (HOST_WIDE_INT amount
)
341 if (!IN_RANGE (amount
, -32776, 32768))
343 /* r10 is caller saved so it can be used as a temp reg. */
345 r10
= gen_rtx_REG (word_mode
, 10);
346 insn
= emit_move_insn (r10
, GEN_INT (amount
));
348 RTX_FRAME_RELATED_P (insn
) = 1;
349 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
351 RTX_FRAME_RELATED_P (insn
) = 1;
355 insn
= emit_add (stack_pointer_rtx
,
356 stack_pointer_rtx
, GEN_INT (amount
));
358 RTX_FRAME_RELATED_P (insn
) = 1;
363 /* Create and emit instructions for a functions prologue. */
365 lm32_expand_prologue (void)
369 lm32_compute_frame_size (get_frame_size ());
371 if (current_frame_info
.total_size
> 0)
373 /* Add space on stack new frame. */
374 stack_adjust (-current_frame_info
.total_size
);
376 /* Save callee save registers. */
377 if (current_frame_info
.reg_save_mask
!= 0)
378 expand_save_restore (¤t_frame_info
, 0);
380 /* Setup frame pointer if it's needed. */
381 if (frame_pointer_needed
== 1)
384 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
385 RTX_FRAME_RELATED_P (insn
) = 1;
387 /* Add offset - Don't use total_size, as that includes pretend_size,
388 which isn't part of this frame? */
389 insn
= emit_add (frame_pointer_rtx
,
391 GEN_INT (current_frame_info
.args_size
+
392 current_frame_info
.callee_size
+
393 current_frame_info
.locals_size
));
394 RTX_FRAME_RELATED_P (insn
) = 1;
397 /* Prevent prologue from being scheduled into function body. */
398 emit_insn (gen_blockage ());
402 /* Create an emit instructions for a functions epilogue. */
404 lm32_expand_epilogue (void)
406 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
408 lm32_compute_frame_size (get_frame_size ());
410 if (current_frame_info
.total_size
> 0)
412 /* Prevent stack code from being reordered. */
413 emit_insn (gen_blockage ());
415 /* Restore callee save registers. */
416 if (current_frame_info
.reg_save_mask
!= 0)
417 expand_save_restore (¤t_frame_info
, 1);
419 /* Deallocate stack. */
420 stack_adjust (current_frame_info
.total_size
);
422 /* Return to calling function. */
423 emit_jump_insn (gen_return_internal (ra_rtx
));
427 /* Return to calling function. */
428 emit_jump_insn (gen_return_internal (ra_rtx
));
432 /* Return the bytes needed to compute the frame pointer from the current
435 lm32_compute_frame_size (int size
)
438 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
439 unsigned int reg_save_mask
;
442 args_size
= crtl
->outgoing_args_size
;
443 pretend_size
= crtl
->args
.pretend_args_size
;
447 /* Build mask that actually determines which regsiters we save
448 and calculate size required to store them in the stack. */
449 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
451 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
453 reg_save_mask
|= 1 << regno
;
454 callee_size
+= UNITS_PER_WORD
;
457 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
460 reg_save_mask
|= 1 << RA_REGNUM
;
461 callee_size
+= UNITS_PER_WORD
;
463 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
465 reg_save_mask
|= 1 << FP_REGNUM
;
466 callee_size
+= UNITS_PER_WORD
;
469 /* Compute total frame size. */
470 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
472 /* Align frame to appropriate boundary. */
473 total_size
= (total_size
+ 3) & ~3;
475 /* Save computed information. */
476 current_frame_info
.total_size
= total_size
;
477 current_frame_info
.callee_size
= callee_size
;
478 current_frame_info
.pretend_size
= pretend_size
;
479 current_frame_info
.locals_size
= locals_size
;
480 current_frame_info
.args_size
= args_size
;
481 current_frame_info
.reg_save_mask
= reg_save_mask
;
487 lm32_print_operand (FILE * file
, rtx op
, int letter
)
491 code
= GET_CODE (op
);
493 if (code
== SIGN_EXTEND
)
494 op
= XEXP (op
, 0), code
= GET_CODE (op
);
495 else if (code
== REG
|| code
== SUBREG
)
502 regnum
= true_regnum (op
);
504 fprintf (file
, "%s", reg_names
[regnum
]);
506 else if (code
== HIGH
)
507 output_addr_const (file
, XEXP (op
, 0));
508 else if (code
== MEM
)
509 output_address (GET_MODE (op
), XEXP (op
, 0));
510 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
511 fprintf (file
, "%s", reg_names
[0]);
512 else if (GET_CODE (op
) == CONST_DOUBLE
)
514 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
515 output_operand_lossage ("only 0.0 can be loaded as an immediate");
520 fprintf (file
, "e ");
522 fprintf (file
, "ne ");
524 fprintf (file
, "g ");
525 else if (code
== GTU
)
526 fprintf (file
, "gu ");
528 fprintf (file
, "l ");
529 else if (code
== LTU
)
530 fprintf (file
, "lu ");
532 fprintf (file
, "ge ");
533 else if (code
== GEU
)
534 fprintf (file
, "geu");
536 fprintf (file
, "le ");
537 else if (code
== LEU
)
538 fprintf (file
, "leu");
540 output_addr_const (file
, op
);
543 /* A C compound statement to output to stdio stream STREAM the
544 assembler syntax for an instruction operand that is a memory
545 reference whose address is ADDR. ADDR is an RTL expression.
547 On some machines, the syntax for a symbolic address depends on
548 the section that the address refers to. On these machines,
549 define the macro `ENCODE_SECTION_INFO' to store the information
550 into the `symbol_ref', and then check for it here. */
553 lm32_print_operand_address (FILE * file
, rtx addr
)
555 switch (GET_CODE (addr
))
558 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
562 output_address (VOIDmode
, XEXP (addr
, 0));
567 rtx arg0
= XEXP (addr
, 0);
568 rtx arg1
= XEXP (addr
, 1);
570 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
572 if (GET_CODE (arg1
) == CONST_INT
)
573 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
577 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
578 output_addr_const (file
, arg1
);
582 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
583 output_addr_const (file
, addr
);
585 fatal_insn ("bad operand", addr
);
590 if (SYMBOL_REF_SMALL_P (addr
))
592 fprintf (file
, "gp(");
593 output_addr_const (file
, addr
);
597 fatal_insn ("can't use non gp relative absolute address", addr
);
601 fatal_insn ("invalid addressing mode", addr
);
606 /* Determine where to put an argument to a function.
607 Value is zero to push the argument on the stack,
608 or a hard register in which to store the argument.
610 MODE is the argument's machine mode.
611 TYPE is the data type of the argument (as a tree).
612 This is null for libcalls where that information may
614 CUM is a variable of type CUMULATIVE_ARGS which gives info about
615 the preceding args and about the function being called.
616 NAMED is nonzero if this argument is a named parameter
617 (otherwise it is an extra parameter matching an ellipsis). */
620 lm32_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
621 const_tree type
, bool named
)
623 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
625 if (mode
== VOIDmode
)
626 /* Compute operand 2 of the call insn. */
629 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
632 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
635 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
639 lm32_function_arg_advance (cumulative_args_t cum
, machine_mode mode
,
640 const_tree type
, bool named ATTRIBUTE_UNUSED
)
642 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
646 lm32_compute_initial_elimination_offset (int from
, int to
)
648 HOST_WIDE_INT offset
= 0;
652 case ARG_POINTER_REGNUM
:
655 case FRAME_POINTER_REGNUM
:
658 case STACK_POINTER_REGNUM
:
660 lm32_compute_frame_size (get_frame_size ()) -
661 current_frame_info
.pretend_size
;
675 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, machine_mode mode
,
676 tree type
, int *pretend_size
, int no_rtl
)
678 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
682 fntype
= TREE_TYPE (current_function_decl
);
684 if (stdarg_p (fntype
))
685 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
688 /* this is the common case, we have been passed details setup
689 for the last named argument, we want to skip over the
690 registers, if any used in passing this named paramter in
691 order to determine which is the first registers used to pass
692 anonymous arguments. */
696 size
= int_size_in_bytes (type
);
698 size
= GET_MODE_SIZE (mode
);
701 *cum
+ LM32_FIRST_ARG_REG
+
702 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
705 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
707 int first_reg_offset
= first_anon_arg
;
708 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
711 regblock
= gen_rtx_MEM (BLKmode
,
712 plus_constant (Pmode
, arg_pointer_rtx
,
713 FIRST_PARM_OFFSET (0)));
714 move_block_from_reg (first_reg_offset
, regblock
, size
);
716 *pretend_size
= size
* UNITS_PER_WORD
;
720 /* Override command line options. */
722 lm32_option_override (void)
724 /* We must have sign-extend enabled if barrel-shift isn't. */
725 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
726 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
729 /* Return nonzero if this function is known to have a null epilogue.
730 This allows the optimizer to omit jumps to jumps if no stack
733 lm32_can_use_return (void)
735 if (!reload_completed
)
738 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
741 if (lm32_compute_frame_size (get_frame_size ()) != 0)
747 /* Support function to determine the return address of the function
748 'count' frames back up the stack. */
750 lm32_return_addr_rtx (int count
, rtx frame
)
755 if (!df_regs_ever_live_p (RA_REGNUM
))
756 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
759 r
= gen_rtx_MEM (Pmode
,
760 gen_rtx_PLUS (Pmode
, frame
,
761 GEN_INT (-2 * UNITS_PER_WORD
)));
762 set_mem_alias_set (r
, get_frame_alias_set ());
765 else if (flag_omit_frame_pointer
)
769 r
= gen_rtx_MEM (Pmode
,
770 gen_rtx_PLUS (Pmode
, frame
,
771 GEN_INT (-2 * UNITS_PER_WORD
)));
772 set_mem_alias_set (r
, get_frame_alias_set ());
777 /* Return true if EXP should be placed in the small data section. */
780 lm32_in_small_data_p (const_tree exp
)
782 /* We want to merge strings, so we never consider them small data. */
783 if (TREE_CODE (exp
) == STRING_CST
)
786 /* Functions are never in the small data area. Duh. */
787 if (TREE_CODE (exp
) == FUNCTION_DECL
)
790 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
792 const char *section
= DECL_SECTION_NAME (exp
);
793 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
798 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
800 /* If this is an incomplete type with size 0, then we can't put it
801 in sdata because it might be too big when completed. */
802 if (size
> 0 && size
<= g_switch_value
)
809 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
810 Assume that the areas do not overlap. */
813 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
814 HOST_WIDE_INT alignment
)
816 HOST_WIDE_INT offset
, delta
;
817 unsigned HOST_WIDE_INT bits
;
822 /* Work out how many bits to move at a time. */
836 mode
= mode_for_size (bits
, MODE_INT
, 0);
837 delta
= bits
/ BITS_PER_UNIT
;
839 /* Allocate a buffer for the temporary registers. */
840 regs
= XALLOCAVEC (rtx
, length
/ delta
);
842 /* Load as many BITS-sized chunks as possible. */
843 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
845 regs
[i
] = gen_reg_rtx (mode
);
846 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
849 /* Copy the chunks to the destination. */
850 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
851 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
853 /* Mop up any left-over bytes. */
856 src
= adjust_address (src
, BLKmode
, offset
);
857 dest
= adjust_address (dest
, BLKmode
, offset
);
858 move_by_pieces (dest
, src
, length
- offset
,
859 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
863 /* Expand string/block move operations.
865 operands[0] is the pointer to the destination.
866 operands[1] is the pointer to the source.
867 operands[2] is the number of bytes to move.
868 operands[3] is the alignment. */
871 lm32_expand_block_move (rtx
* operands
)
873 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
875 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
876 INTVAL (operands
[3]));
882 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
883 isn't protected by a PIC unspec. */
885 nonpic_symbol_mentioned_p (rtx x
)
890 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
891 || GET_CODE (x
) == PC
)
894 /* We don't want to look into the possible MEM location of a
895 CONST_DOUBLE, since we're not going to use it, in general. */
896 if (GET_CODE (x
) == CONST_DOUBLE
)
899 if (GET_CODE (x
) == UNSPEC
)
902 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
903 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
909 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
910 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
913 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
920 /* Compute a (partial) cost for rtx X. Return true if the complete
921 cost has been computed, and false if subexpressions should be
922 scanned. In either case, *TOTAL contains the cost result. */
925 lm32_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
926 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
928 int code
= GET_CODE (x
);
931 const int arithmetic_latency
= 1;
932 const int shift_latency
= 1;
933 const int compare_latency
= 2;
934 const int multiply_latency
= 3;
935 const int load_latency
= 3;
936 const int libcall_size_cost
= 5;
938 /* Determine if we can handle the given mode size in a single instruction. */
939 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
952 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
955 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
962 *total
= COSTS_N_INSNS (1);
964 *total
= COSTS_N_INSNS (compare_latency
);
968 /* FIXME. Guessing here. */
969 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
976 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
979 *total
= COSTS_N_INSNS (1);
981 *total
= COSTS_N_INSNS (shift_latency
);
983 else if (TARGET_BARREL_SHIFT_ENABLED
)
985 /* FIXME: Guessing here. */
986 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
988 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
990 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
996 *total
= COSTS_N_INSNS (libcall_size_cost
);
998 *total
= COSTS_N_INSNS (100);
1003 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1006 *total
= COSTS_N_INSNS (1);
1008 *total
= COSTS_N_INSNS (multiply_latency
);
1014 *total
= COSTS_N_INSNS (libcall_size_cost
);
1016 *total
= COSTS_N_INSNS (100);
1024 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1027 *total
= COSTS_N_INSNS (1);
1030 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1033 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1040 if (IN_RANGE (i
, 0, 65536))
1041 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1043 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1046 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1048 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1053 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1062 *total
= COSTS_N_INSNS (libcall_size_cost
);
1064 *total
= COSTS_N_INSNS (100);
1071 *total
= COSTS_N_INSNS (1);
1073 *total
= COSTS_N_INSNS (arithmetic_latency
);
1077 if (MEM_P (XEXP (x
, 0)))
1078 *total
= COSTS_N_INSNS (0);
1079 else if (small_mode
)
1082 *total
= COSTS_N_INSNS (1);
1084 *total
= COSTS_N_INSNS (arithmetic_latency
);
1087 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1096 *total
= COSTS_N_INSNS (0);
1107 if (satisfies_constraint_L (x
))
1108 *total
= COSTS_N_INSNS (0);
1110 *total
= COSTS_N_INSNS (2);
1117 if (satisfies_constraint_K (x
))
1118 *total
= COSTS_N_INSNS (0);
1120 *total
= COSTS_N_INSNS (2);
1124 if (TARGET_MULTIPLY_ENABLED
)
1126 if (satisfies_constraint_K (x
))
1127 *total
= COSTS_N_INSNS (0);
1129 *total
= COSTS_N_INSNS (2);
1135 if (satisfies_constraint_K (x
))
1136 *total
= COSTS_N_INSNS (1);
1138 *total
= COSTS_N_INSNS (2);
1149 *total
= COSTS_N_INSNS (0);
1156 *total
= COSTS_N_INSNS (0);
1165 *total
= COSTS_N_INSNS (2);
1169 *total
= COSTS_N_INSNS (1);
1174 *total
= COSTS_N_INSNS (1);
1176 *total
= COSTS_N_INSNS (load_latency
);
1184 /* Implemenent TARGET_CAN_ELIMINATE. */
1187 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1189 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1192 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1195 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1198 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1200 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1204 if (GET_CODE (x
) == PLUS
1205 && REG_P (XEXP (x
, 0))
1206 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1207 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1208 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1209 && satisfies_constraint_K (XEXP ((x
), 1)))
1213 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1219 /* Check a move is not memory to memory. */
1222 lm32_move_ok (machine_mode mode
, rtx operands
[2]) {
1223 if (memory_operand (operands
[0], mode
))
1224 return register_or_zero_operand (operands
[1], mode
);
1228 /* Implement TARGET_HARD_REGNO_MODE_OK. */
1231 lm32_hard_regno_mode_ok (unsigned int regno
, machine_mode
)
1233 return G_REG_P (regno
);