1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2013 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
43 #include "diagnostic-core.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "tm-constrs.h"
53 struct lm32_frame_info
55 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
56 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
57 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
58 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
59 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
60 unsigned int reg_save_mask
; /* mask of saved registers. */
63 /* Prototypes for static functions. */
64 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
65 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
66 static void stack_adjust (HOST_WIDE_INT amount
);
67 static bool lm32_in_small_data_p (const_tree
);
68 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
69 enum machine_mode mode
, tree type
,
70 int *pretend_size
, int no_rtl
);
71 static bool lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno
,
72 int *total
, bool speed
);
73 static bool lm32_can_eliminate (const int, const int);
75 lm32_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
);
76 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
77 static void lm32_option_override (void);
78 static rtx
lm32_function_arg (cumulative_args_t cum
,
79 enum machine_mode mode
, const_tree type
,
81 static void lm32_function_arg_advance (cumulative_args_t cum
,
82 enum machine_mode mode
,
83 const_tree type
, bool named
);
84 static bool lm32_legitimate_constant_p (enum machine_mode
, rtx
);
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
112 #undef TARGET_LEGITIMATE_CONSTANT_P
113 #define TARGET_LEGITIMATE_CONSTANT_P lm32_legitimate_constant_p
115 struct gcc_target targetm
= TARGET_INITIALIZER
;
117 /* Current frame information calculated by lm32_compute_frame_size. */
118 static struct lm32_frame_info current_frame_info
;
120 /* Return non-zero if the given return type should be returned in memory. */
123 lm32_return_in_memory (tree type
)
127 if (!AGGREGATE_TYPE_P (type
))
129 /* All simple types are returned in registers. */
133 size
= int_size_in_bytes (type
);
134 if (size
>= 0 && size
<= UNITS_PER_WORD
)
136 /* If it can fit in one register. */
143 /* Generate an emit a word sized add instruction. */
146 emit_add (rtx dest
, rtx src0
, rtx src1
)
149 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
153 /* Generate the code to compare (and possibly branch) two integer values
154 TEST_CODE is the comparison code we are trying to emulate
155 (or implement directly)
156 RESULT is where to store the result of the comparison,
157 or null to emit a branch
158 CMP0 CMP1 are the two comparison operands
159 DESTINATION is the destination of the branch, or null to only compare
163 gen_int_relational (enum rtx_code code
,
169 enum machine_mode mode
;
172 mode
= GET_MODE (cmp0
);
173 if (mode
== VOIDmode
)
174 mode
= GET_MODE (cmp1
);
176 /* Is this a branch or compare. */
177 branch_p
= (destination
!= 0);
179 /* Instruction set doesn't support LE or LT, so swap operands and use
190 code
= swap_condition (code
);
202 rtx insn
, cond
, label
;
204 /* Operands must be in registers. */
205 if (!register_operand (cmp0
, mode
))
206 cmp0
= force_reg (mode
, cmp0
);
207 if (!register_operand (cmp1
, mode
))
208 cmp1
= force_reg (mode
, cmp1
);
210 /* Generate conditional branch instruction. */
211 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
212 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
213 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
,
214 gen_rtx_IF_THEN_ELSE (VOIDmode
,
215 cond
, label
, pc_rtx
));
216 emit_jump_insn (insn
);
220 /* We can't have const_ints in cmp0, other than 0. */
221 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
222 cmp0
= force_reg (mode
, cmp0
);
224 /* If the comparison is against an int not in legal range
225 move it into a register. */
226 if (GET_CODE (cmp1
) == CONST_INT
)
236 if (!satisfies_constraint_K (cmp1
))
237 cmp1
= force_reg (mode
, cmp1
);
243 if (!satisfies_constraint_L (cmp1
))
244 cmp1
= force_reg (mode
, cmp1
);
251 /* Generate compare instruction. */
252 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
256 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
257 and OPERAND[3]. Store the result in OPERANDS[0]. */
260 lm32_expand_scc (rtx operands
[])
262 rtx target
= operands
[0];
263 enum rtx_code code
= GET_CODE (operands
[1]);
264 rtx op0
= operands
[2];
265 rtx op1
= operands
[3];
267 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
270 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
271 CODE and jump to OPERANDS[3] if the condition holds. */
274 lm32_expand_conditional_branch (rtx operands
[])
276 enum rtx_code code
= GET_CODE (operands
[0]);
277 rtx op0
= operands
[1];
278 rtx op1
= operands
[2];
279 rtx destination
= operands
[3];
281 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
284 /* Generate and emit RTL to save or restore callee save registers. */
286 expand_save_restore (struct lm32_frame_info
*info
, int op
)
288 unsigned int reg_save_mask
= info
->reg_save_mask
;
290 HOST_WIDE_INT offset
;
293 /* Callee saves are below locals and above outgoing arguments. */
294 offset
= info
->args_size
+ info
->callee_size
;
295 for (regno
= 0; regno
<= 31; regno
++)
297 if ((reg_save_mask
& (1 << regno
)) != 0)
302 offset_rtx
= GEN_INT (offset
);
303 if (satisfies_constraint_K (offset_rtx
))
305 mem
= gen_rtx_MEM (word_mode
,
312 /* r10 is caller saved so it can be used as a temp reg. */
315 r10
= gen_rtx_REG (word_mode
, 10);
316 insn
= emit_move_insn (r10
, offset_rtx
);
318 RTX_FRAME_RELATED_P (insn
) = 1;
319 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
321 RTX_FRAME_RELATED_P (insn
) = 1;
322 mem
= gen_rtx_MEM (word_mode
, r10
);
326 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
328 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
330 /* only prologue instructions which set the sp fp or save a
331 register should be marked as frame related. */
333 RTX_FRAME_RELATED_P (insn
) = 1;
334 offset
-= UNITS_PER_WORD
;
340 stack_adjust (HOST_WIDE_INT amount
)
344 if (!IN_RANGE (amount
, -32776, 32768))
346 /* r10 is caller saved so it can be used as a temp reg. */
348 r10
= gen_rtx_REG (word_mode
, 10);
349 insn
= emit_move_insn (r10
, GEN_INT (amount
));
351 RTX_FRAME_RELATED_P (insn
) = 1;
352 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
354 RTX_FRAME_RELATED_P (insn
) = 1;
358 insn
= emit_add (stack_pointer_rtx
,
359 stack_pointer_rtx
, GEN_INT (amount
));
361 RTX_FRAME_RELATED_P (insn
) = 1;
366 /* Create and emit instructions for a functions prologue. */
368 lm32_expand_prologue (void)
372 lm32_compute_frame_size (get_frame_size ());
374 if (current_frame_info
.total_size
> 0)
376 /* Add space on stack new frame. */
377 stack_adjust (-current_frame_info
.total_size
);
379 /* Save callee save registers. */
380 if (current_frame_info
.reg_save_mask
!= 0)
381 expand_save_restore (¤t_frame_info
, 0);
383 /* Setup frame pointer if it's needed. */
384 if (frame_pointer_needed
== 1)
387 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
388 RTX_FRAME_RELATED_P (insn
) = 1;
390 /* Add offset - Don't use total_size, as that includes pretend_size,
391 which isn't part of this frame? */
392 insn
= emit_add (frame_pointer_rtx
,
394 GEN_INT (current_frame_info
.args_size
+
395 current_frame_info
.callee_size
+
396 current_frame_info
.locals_size
));
397 RTX_FRAME_RELATED_P (insn
) = 1;
400 /* Prevent prologue from being scheduled into function body. */
401 emit_insn (gen_blockage ());
405 /* Create an emit instructions for a functions epilogue. */
407 lm32_expand_epilogue (void)
409 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
411 lm32_compute_frame_size (get_frame_size ());
413 if (current_frame_info
.total_size
> 0)
415 /* Prevent stack code from being reordered. */
416 emit_insn (gen_blockage ());
418 /* Restore callee save registers. */
419 if (current_frame_info
.reg_save_mask
!= 0)
420 expand_save_restore (¤t_frame_info
, 1);
422 /* Deallocate stack. */
423 stack_adjust (current_frame_info
.total_size
);
425 /* Return to calling function. */
426 emit_jump_insn (gen_return_internal (ra_rtx
));
430 /* Return to calling function. */
431 emit_jump_insn (gen_return_internal (ra_rtx
));
435 /* Return the bytes needed to compute the frame pointer from the current
438 lm32_compute_frame_size (int size
)
441 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
442 unsigned int reg_save_mask
;
445 args_size
= crtl
->outgoing_args_size
;
446 pretend_size
= crtl
->args
.pretend_args_size
;
450 /* Build mask that actually determines which regsiters we save
451 and calculate size required to store them in the stack. */
452 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
454 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
456 reg_save_mask
|= 1 << regno
;
457 callee_size
+= UNITS_PER_WORD
;
460 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
463 reg_save_mask
|= 1 << RA_REGNUM
;
464 callee_size
+= UNITS_PER_WORD
;
466 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
468 reg_save_mask
|= 1 << FP_REGNUM
;
469 callee_size
+= UNITS_PER_WORD
;
472 /* Compute total frame size. */
473 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
475 /* Align frame to appropriate boundary. */
476 total_size
= (total_size
+ 3) & ~3;
478 /* Save computed information. */
479 current_frame_info
.total_size
= total_size
;
480 current_frame_info
.callee_size
= callee_size
;
481 current_frame_info
.pretend_size
= pretend_size
;
482 current_frame_info
.locals_size
= locals_size
;
483 current_frame_info
.args_size
= args_size
;
484 current_frame_info
.reg_save_mask
= reg_save_mask
;
490 lm32_print_operand (FILE * file
, rtx op
, int letter
)
494 code
= GET_CODE (op
);
496 if (code
== SIGN_EXTEND
)
497 op
= XEXP (op
, 0), code
= GET_CODE (op
);
498 else if (code
== REG
|| code
== SUBREG
)
505 regnum
= true_regnum (op
);
507 fprintf (file
, "%s", reg_names
[regnum
]);
509 else if (code
== HIGH
)
510 output_addr_const (file
, XEXP (op
, 0));
511 else if (code
== MEM
)
512 output_address (XEXP (op
, 0));
513 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
514 fprintf (file
, "%s", reg_names
[0]);
515 else if (GET_CODE (op
) == CONST_DOUBLE
)
517 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
518 output_operand_lossage ("only 0.0 can be loaded as an immediate");
523 fprintf (file
, "e ");
525 fprintf (file
, "ne ");
527 fprintf (file
, "g ");
528 else if (code
== GTU
)
529 fprintf (file
, "gu ");
531 fprintf (file
, "l ");
532 else if (code
== LTU
)
533 fprintf (file
, "lu ");
535 fprintf (file
, "ge ");
536 else if (code
== GEU
)
537 fprintf (file
, "geu");
539 fprintf (file
, "le ");
540 else if (code
== LEU
)
541 fprintf (file
, "leu");
543 output_addr_const (file
, op
);
546 /* A C compound statement to output to stdio stream STREAM the
547 assembler syntax for an instruction operand that is a memory
548 reference whose address is ADDR. ADDR is an RTL expression.
550 On some machines, the syntax for a symbolic address depends on
551 the section that the address refers to. On these machines,
552 define the macro `ENCODE_SECTION_INFO' to store the information
553 into the `symbol_ref', and then check for it here. */
556 lm32_print_operand_address (FILE * file
, rtx addr
)
558 switch (GET_CODE (addr
))
561 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
565 output_address (XEXP (addr
, 0));
570 rtx arg0
= XEXP (addr
, 0);
571 rtx arg1
= XEXP (addr
, 1);
573 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
575 if (GET_CODE (arg1
) == CONST_INT
)
576 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
580 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
581 output_addr_const (file
, arg1
);
585 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
586 output_addr_const (file
, addr
);
588 fatal_insn ("bad operand", addr
);
593 if (SYMBOL_REF_SMALL_P (addr
))
595 fprintf (file
, "gp(");
596 output_addr_const (file
, addr
);
600 fatal_insn ("can't use non gp relative absolute address", addr
);
604 fatal_insn ("invalid addressing mode", addr
);
609 /* Determine where to put an argument to a function.
610 Value is zero to push the argument on the stack,
611 or a hard register in which to store the argument.
613 MODE is the argument's machine mode.
614 TYPE is the data type of the argument (as a tree).
615 This is null for libcalls where that information may
617 CUM is a variable of type CUMULATIVE_ARGS which gives info about
618 the preceding args and about the function being called.
619 NAMED is nonzero if this argument is a named parameter
620 (otherwise it is an extra parameter matching an ellipsis). */
623 lm32_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
624 const_tree type
, bool named
)
626 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
628 if (mode
== VOIDmode
)
629 /* Compute operand 2 of the call insn. */
632 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
635 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
638 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
642 lm32_function_arg_advance (cumulative_args_t cum
, enum machine_mode mode
,
643 const_tree type
, bool named ATTRIBUTE_UNUSED
)
645 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
649 lm32_compute_initial_elimination_offset (int from
, int to
)
651 HOST_WIDE_INT offset
= 0;
655 case ARG_POINTER_REGNUM
:
658 case FRAME_POINTER_REGNUM
:
661 case STACK_POINTER_REGNUM
:
663 lm32_compute_frame_size (get_frame_size ()) -
664 current_frame_info
.pretend_size
;
678 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, enum machine_mode mode
,
679 tree type
, int *pretend_size
, int no_rtl
)
681 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
685 fntype
= TREE_TYPE (current_function_decl
);
687 if (stdarg_p (fntype
))
688 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
691 /* this is the common case, we have been passed details setup
692 for the last named argument, we want to skip over the
693 registers, if any used in passing this named paramter in
694 order to determine which is the first registers used to pass
695 anonymous arguments. */
699 size
= int_size_in_bytes (type
);
701 size
= GET_MODE_SIZE (mode
);
704 *cum
+ LM32_FIRST_ARG_REG
+
705 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
708 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
710 int first_reg_offset
= first_anon_arg
;
711 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
714 regblock
= gen_rtx_MEM (BLKmode
,
715 plus_constant (Pmode
, arg_pointer_rtx
,
716 FIRST_PARM_OFFSET (0)));
717 move_block_from_reg (first_reg_offset
, regblock
, size
);
719 *pretend_size
= size
* UNITS_PER_WORD
;
723 /* Override command line options. */
725 lm32_option_override (void)
727 /* We must have sign-extend enabled if barrel-shift isn't. */
728 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
729 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
732 /* Return nonzero if this function is known to have a null epilogue.
733 This allows the optimizer to omit jumps to jumps if no stack
736 lm32_can_use_return (void)
738 if (!reload_completed
)
741 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
744 if (lm32_compute_frame_size (get_frame_size ()) != 0)
750 /* Support function to determine the return address of the function
751 'count' frames back up the stack. */
753 lm32_return_addr_rtx (int count
, rtx frame
)
758 if (!df_regs_ever_live_p (RA_REGNUM
))
759 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
762 r
= gen_rtx_MEM (Pmode
,
763 gen_rtx_PLUS (Pmode
, frame
,
764 GEN_INT (-2 * UNITS_PER_WORD
)));
765 set_mem_alias_set (r
, get_frame_alias_set ());
768 else if (flag_omit_frame_pointer
)
772 r
= gen_rtx_MEM (Pmode
,
773 gen_rtx_PLUS (Pmode
, frame
,
774 GEN_INT (-2 * UNITS_PER_WORD
)));
775 set_mem_alias_set (r
, get_frame_alias_set ());
780 /* Return true if EXP should be placed in the small data section. */
783 lm32_in_small_data_p (const_tree exp
)
785 /* We want to merge strings, so we never consider them small data. */
786 if (TREE_CODE (exp
) == STRING_CST
)
789 /* Functions are never in the small data area. Duh. */
790 if (TREE_CODE (exp
) == FUNCTION_DECL
)
793 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
795 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (exp
));
796 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
801 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
803 /* If this is an incomplete type with size 0, then we can't put it
804 in sdata because it might be too big when completed. */
805 if (size
> 0 && size
<= g_switch_value
)
812 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
813 Assume that the areas do not overlap. */
816 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
817 HOST_WIDE_INT alignment
)
819 HOST_WIDE_INT offset
, delta
;
820 unsigned HOST_WIDE_INT bits
;
822 enum machine_mode mode
;
825 /* Work out how many bits to move at a time. */
839 mode
= mode_for_size (bits
, MODE_INT
, 0);
840 delta
= bits
/ BITS_PER_UNIT
;
842 /* Allocate a buffer for the temporary registers. */
843 regs
= XALLOCAVEC (rtx
, length
/ delta
);
845 /* Load as many BITS-sized chunks as possible. */
846 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
848 regs
[i
] = gen_reg_rtx (mode
);
849 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
852 /* Copy the chunks to the destination. */
853 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
854 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
856 /* Mop up any left-over bytes. */
859 src
= adjust_address (src
, BLKmode
, offset
);
860 dest
= adjust_address (dest
, BLKmode
, offset
);
861 move_by_pieces (dest
, src
, length
- offset
,
862 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
866 /* Expand string/block move operations.
868 operands[0] is the pointer to the destination.
869 operands[1] is the pointer to the source.
870 operands[2] is the number of bytes to move.
871 operands[3] is the alignment. */
874 lm32_expand_block_move (rtx
* operands
)
876 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
878 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
879 INTVAL (operands
[3]));
885 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
886 isn't protected by a PIC unspec. */
888 nonpic_symbol_mentioned_p (rtx x
)
893 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
894 || GET_CODE (x
) == PC
)
897 /* We don't want to look into the possible MEM location of a
898 CONST_DOUBLE, since we're not going to use it, in general. */
899 if (GET_CODE (x
) == CONST_DOUBLE
)
902 if (GET_CODE (x
) == UNSPEC
)
905 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
906 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
912 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
913 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
916 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
923 /* Compute a (partial) cost for rtx X. Return true if the complete
924 cost has been computed, and false if subexpressions should be
925 scanned. In either case, *TOTAL contains the cost result. */
928 lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
929 int *total
, bool speed
)
931 enum machine_mode mode
= GET_MODE (x
);
934 const int arithmetic_latency
= 1;
935 const int shift_latency
= 1;
936 const int compare_latency
= 2;
937 const int multiply_latency
= 3;
938 const int load_latency
= 3;
939 const int libcall_size_cost
= 5;
941 /* Determine if we can handle the given mode size in a single instruction. */
942 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
955 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
958 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
965 *total
= COSTS_N_INSNS (1);
967 *total
= COSTS_N_INSNS (compare_latency
);
971 /* FIXME. Guessing here. */
972 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
979 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
982 *total
= COSTS_N_INSNS (1);
984 *total
= COSTS_N_INSNS (shift_latency
);
986 else if (TARGET_BARREL_SHIFT_ENABLED
)
988 /* FIXME: Guessing here. */
989 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
991 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
993 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
999 *total
= COSTS_N_INSNS (libcall_size_cost
);
1001 *total
= COSTS_N_INSNS (100);
1006 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1009 *total
= COSTS_N_INSNS (1);
1011 *total
= COSTS_N_INSNS (multiply_latency
);
1017 *total
= COSTS_N_INSNS (libcall_size_cost
);
1019 *total
= COSTS_N_INSNS (100);
1027 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1030 *total
= COSTS_N_INSNS (1);
1033 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1036 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1043 if (IN_RANGE (i
, 0, 65536))
1044 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1046 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1049 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1051 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1056 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1065 *total
= COSTS_N_INSNS (libcall_size_cost
);
1067 *total
= COSTS_N_INSNS (100);
1074 *total
= COSTS_N_INSNS (1);
1076 *total
= COSTS_N_INSNS (arithmetic_latency
);
1080 if (MEM_P (XEXP (x
, 0)))
1081 *total
= COSTS_N_INSNS (0);
1082 else if (small_mode
)
1085 *total
= COSTS_N_INSNS (1);
1087 *total
= COSTS_N_INSNS (arithmetic_latency
);
1090 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1099 *total
= COSTS_N_INSNS (0);
1110 if (satisfies_constraint_L (x
))
1111 *total
= COSTS_N_INSNS (0);
1113 *total
= COSTS_N_INSNS (2);
1120 if (satisfies_constraint_K (x
))
1121 *total
= COSTS_N_INSNS (0);
1123 *total
= COSTS_N_INSNS (2);
1127 if (TARGET_MULTIPLY_ENABLED
)
1129 if (satisfies_constraint_K (x
))
1130 *total
= COSTS_N_INSNS (0);
1132 *total
= COSTS_N_INSNS (2);
1138 if (satisfies_constraint_K (x
))
1139 *total
= COSTS_N_INSNS (1);
1141 *total
= COSTS_N_INSNS (2);
1152 *total
= COSTS_N_INSNS (0);
1159 *total
= COSTS_N_INSNS (0);
1168 *total
= COSTS_N_INSNS (2);
1172 *total
= COSTS_N_INSNS (1);
1177 *total
= COSTS_N_INSNS (1);
1179 *total
= COSTS_N_INSNS (load_latency
);
1187 /* Implemenent TARGET_CAN_ELIMINATE. */
1190 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1192 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1195 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1198 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1201 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1203 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1207 if (GET_CODE (x
) == PLUS
1208 && REG_P (XEXP (x
, 0))
1209 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1210 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1211 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1212 && satisfies_constraint_K (XEXP ((x
), 1)))
1216 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1222 /* Check a move is not memory to memory. */
1225 lm32_move_ok (enum machine_mode mode
, rtx operands
[2]) {
1226 if (memory_operand (operands
[0], mode
))
1227 return register_or_zero_operand (operands
[1], mode
);
1231 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1234 lm32_legitimate_constant_p (enum machine_mode mode
, rtx x
)
1236 /* 32-bit addresses require multiple instructions. */
1237 if (!flag_pic
&& reloc_operand (x
, mode
))