1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009, 2010, 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
43 #include "diagnostic-core.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "tm-constrs.h"
53 struct lm32_frame_info
55 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
56 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
57 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
58 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
59 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
60 unsigned int reg_save_mask
; /* mask of saved registers. */
63 /* Prototypes for static functions. */
64 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
65 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
66 static void stack_adjust (HOST_WIDE_INT amount
);
67 static bool lm32_in_small_data_p (const_tree
);
68 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
69 enum machine_mode mode
, tree type
,
70 int *pretend_size
, int no_rtl
);
71 static bool lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno
,
72 int *total
, bool speed
);
73 static bool lm32_can_eliminate (const int, const int);
75 lm32_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
);
76 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
77 static void lm32_option_override (void);
78 static rtx
lm32_function_arg (cumulative_args_t cum
,
79 enum machine_mode mode
, const_tree type
,
81 static void lm32_function_arg_advance (cumulative_args_t cum
,
82 enum machine_mode mode
,
83 const_tree type
, bool named
);
84 static bool lm32_legitimate_constant_p (enum machine_mode
, rtx
);
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
112 #undef TARGET_LEGITIMATE_CONSTANT_P
113 #define TARGET_LEGITIMATE_CONSTANT_P lm32_legitimate_constant_p
115 struct gcc_target targetm
= TARGET_INITIALIZER
;
117 /* Current frame information calculated by lm32_compute_frame_size. */
118 static struct lm32_frame_info current_frame_info
;
120 /* Return non-zero if the given return type should be returned in memory. */
123 lm32_return_in_memory (tree type
)
127 if (!AGGREGATE_TYPE_P (type
))
129 /* All simple types are returned in registers. */
133 size
= int_size_in_bytes (type
);
134 if (size
>= 0 && size
<= UNITS_PER_WORD
)
136 /* If it can fit in one register. */
143 /* Generate an emit a word sized add instruction. */
146 emit_add (rtx dest
, rtx src0
, rtx src1
)
149 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
153 /* Generate the code to compare (and possibly branch) two integer values
154 TEST_CODE is the comparison code we are trying to emulate
155 (or implement directly)
156 RESULT is where to store the result of the comparison,
157 or null to emit a branch
158 CMP0 CMP1 are the two comparison operands
159 DESTINATION is the destination of the branch, or null to only compare
163 gen_int_relational (enum rtx_code code
,
169 enum machine_mode mode
;
175 mode
= GET_MODE (cmp0
);
176 if (mode
== VOIDmode
)
177 mode
= GET_MODE (cmp1
);
179 /* Is this a branch or compare. */
180 branch_p
= (destination
!= 0);
182 /* Instruction set doesn't support LE or LT, so swap operands and use
193 code
= swap_condition (code
);
205 rtx insn
, cond
, label
;
207 /* Operands must be in registers. */
208 if (!register_operand (cmp0
, mode
))
209 cmp0
= force_reg (mode
, cmp0
);
210 if (!register_operand (cmp1
, mode
))
211 cmp1
= force_reg (mode
, cmp1
);
213 /* Generate conditional branch instruction. */
214 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
215 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
216 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
,
217 gen_rtx_IF_THEN_ELSE (VOIDmode
,
218 cond
, label
, pc_rtx
));
219 emit_jump_insn (insn
);
223 /* We can't have const_ints in cmp0, other than 0. */
224 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
225 cmp0
= force_reg (mode
, cmp0
);
227 /* If the comparison is against an int not in legal range
228 move it into a register. */
229 if (GET_CODE (cmp1
) == CONST_INT
)
239 if (!satisfies_constraint_K (cmp1
))
240 cmp1
= force_reg (mode
, cmp1
);
246 if (!satisfies_constraint_L (cmp1
))
247 cmp1
= force_reg (mode
, cmp1
);
254 /* Generate compare instruction. */
255 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
259 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
260 and OPERAND[3]. Store the result in OPERANDS[0]. */
263 lm32_expand_scc (rtx operands
[])
265 rtx target
= operands
[0];
266 enum rtx_code code
= GET_CODE (operands
[1]);
267 rtx op0
= operands
[2];
268 rtx op1
= operands
[3];
270 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
273 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
274 CODE and jump to OPERANDS[3] if the condition holds. */
277 lm32_expand_conditional_branch (rtx operands
[])
279 enum rtx_code code
= GET_CODE (operands
[0]);
280 rtx op0
= operands
[1];
281 rtx op1
= operands
[2];
282 rtx destination
= operands
[3];
284 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
287 /* Generate and emit RTL to save or restore callee save registers. */
289 expand_save_restore (struct lm32_frame_info
*info
, int op
)
291 unsigned int reg_save_mask
= info
->reg_save_mask
;
293 HOST_WIDE_INT offset
;
296 /* Callee saves are below locals and above outgoing arguments. */
297 offset
= info
->args_size
+ info
->callee_size
;
298 for (regno
= 0; regno
<= 31; regno
++)
300 if ((reg_save_mask
& (1 << regno
)) != 0)
305 offset_rtx
= GEN_INT (offset
);
306 if (satisfies_constraint_K (offset_rtx
))
308 mem
= gen_rtx_MEM (word_mode
,
315 /* r10 is caller saved so it can be used as a temp reg. */
318 r10
= gen_rtx_REG (word_mode
, 10);
319 insn
= emit_move_insn (r10
, offset_rtx
);
321 RTX_FRAME_RELATED_P (insn
) = 1;
322 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
324 RTX_FRAME_RELATED_P (insn
) = 1;
325 mem
= gen_rtx_MEM (word_mode
, r10
);
329 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
331 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
333 /* only prologue instructions which set the sp fp or save a
334 register should be marked as frame related. */
336 RTX_FRAME_RELATED_P (insn
) = 1;
337 offset
-= UNITS_PER_WORD
;
343 stack_adjust (HOST_WIDE_INT amount
)
347 if (!IN_RANGE (amount
, -32776, 32768))
349 /* r10 is caller saved so it can be used as a temp reg. */
351 r10
= gen_rtx_REG (word_mode
, 10);
352 insn
= emit_move_insn (r10
, GEN_INT (amount
));
354 RTX_FRAME_RELATED_P (insn
) = 1;
355 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
357 RTX_FRAME_RELATED_P (insn
) = 1;
361 insn
= emit_add (stack_pointer_rtx
,
362 stack_pointer_rtx
, GEN_INT (amount
));
364 RTX_FRAME_RELATED_P (insn
) = 1;
369 /* Create and emit instructions for a functions prologue. */
371 lm32_expand_prologue (void)
375 lm32_compute_frame_size (get_frame_size ());
377 if (current_frame_info
.total_size
> 0)
379 /* Add space on stack new frame. */
380 stack_adjust (-current_frame_info
.total_size
);
382 /* Save callee save registers. */
383 if (current_frame_info
.reg_save_mask
!= 0)
384 expand_save_restore (¤t_frame_info
, 0);
386 /* Setup frame pointer if it's needed. */
387 if (frame_pointer_needed
== 1)
390 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
391 RTX_FRAME_RELATED_P (insn
) = 1;
393 /* Add offset - Don't use total_size, as that includes pretend_size,
394 which isn't part of this frame? */
395 insn
= emit_add (frame_pointer_rtx
,
397 GEN_INT (current_frame_info
.args_size
+
398 current_frame_info
.callee_size
+
399 current_frame_info
.locals_size
));
400 RTX_FRAME_RELATED_P (insn
) = 1;
403 /* Prevent prologue from being scheduled into function body. */
404 emit_insn (gen_blockage ());
408 /* Create an emit instructions for a functions epilogue. */
410 lm32_expand_epilogue (void)
412 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
414 lm32_compute_frame_size (get_frame_size ());
416 if (current_frame_info
.total_size
> 0)
418 /* Prevent stack code from being reordered. */
419 emit_insn (gen_blockage ());
421 /* Restore callee save registers. */
422 if (current_frame_info
.reg_save_mask
!= 0)
423 expand_save_restore (¤t_frame_info
, 1);
425 /* Deallocate stack. */
426 stack_adjust (current_frame_info
.total_size
);
428 /* Return to calling function. */
429 emit_jump_insn (gen_return_internal (ra_rtx
));
433 /* Return to calling function. */
434 emit_jump_insn (gen_return_internal (ra_rtx
));
438 /* Return the bytes needed to compute the frame pointer from the current
441 lm32_compute_frame_size (int size
)
444 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
445 unsigned int reg_save_mask
;
448 args_size
= crtl
->outgoing_args_size
;
449 pretend_size
= crtl
->args
.pretend_args_size
;
453 /* Build mask that actually determines which regsiters we save
454 and calculate size required to store them in the stack. */
455 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
457 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
459 reg_save_mask
|= 1 << regno
;
460 callee_size
+= UNITS_PER_WORD
;
463 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
466 reg_save_mask
|= 1 << RA_REGNUM
;
467 callee_size
+= UNITS_PER_WORD
;
469 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
471 reg_save_mask
|= 1 << FP_REGNUM
;
472 callee_size
+= UNITS_PER_WORD
;
475 /* Compute total frame size. */
476 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
478 /* Align frame to appropriate boundary. */
479 total_size
= (total_size
+ 3) & ~3;
481 /* Save computed information. */
482 current_frame_info
.total_size
= total_size
;
483 current_frame_info
.callee_size
= callee_size
;
484 current_frame_info
.pretend_size
= pretend_size
;
485 current_frame_info
.locals_size
= locals_size
;
486 current_frame_info
.args_size
= args_size
;
487 current_frame_info
.reg_save_mask
= reg_save_mask
;
493 lm32_print_operand (FILE * file
, rtx op
, int letter
)
497 code
= GET_CODE (op
);
499 if (code
== SIGN_EXTEND
)
500 op
= XEXP (op
, 0), code
= GET_CODE (op
);
501 else if (code
== REG
|| code
== SUBREG
)
508 regnum
= true_regnum (op
);
510 fprintf (file
, "%s", reg_names
[regnum
]);
512 else if (code
== HIGH
)
513 output_addr_const (file
, XEXP (op
, 0));
514 else if (code
== MEM
)
515 output_address (XEXP (op
, 0));
516 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
517 fprintf (file
, "%s", reg_names
[0]);
518 else if (GET_CODE (op
) == CONST_DOUBLE
)
520 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
521 output_operand_lossage ("only 0.0 can be loaded as an immediate");
526 fprintf (file
, "e ");
528 fprintf (file
, "ne ");
530 fprintf (file
, "g ");
531 else if (code
== GTU
)
532 fprintf (file
, "gu ");
534 fprintf (file
, "l ");
535 else if (code
== LTU
)
536 fprintf (file
, "lu ");
538 fprintf (file
, "ge ");
539 else if (code
== GEU
)
540 fprintf (file
, "geu");
542 fprintf (file
, "le ");
543 else if (code
== LEU
)
544 fprintf (file
, "leu");
546 output_addr_const (file
, op
);
549 /* A C compound statement to output to stdio stream STREAM the
550 assembler syntax for an instruction operand that is a memory
551 reference whose address is ADDR. ADDR is an RTL expression.
553 On some machines, the syntax for a symbolic address depends on
554 the section that the address refers to. On these machines,
555 define the macro `ENCODE_SECTION_INFO' to store the information
556 into the `symbol_ref', and then check for it here. */
559 lm32_print_operand_address (FILE * file
, rtx addr
)
561 switch (GET_CODE (addr
))
564 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
568 output_address (XEXP (addr
, 0));
573 rtx arg0
= XEXP (addr
, 0);
574 rtx arg1
= XEXP (addr
, 1);
576 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
578 if (GET_CODE (arg1
) == CONST_INT
)
579 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
583 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
584 output_addr_const (file
, arg1
);
588 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
589 output_addr_const (file
, addr
);
591 fatal_insn ("bad operand", addr
);
596 if (SYMBOL_REF_SMALL_P (addr
))
598 fprintf (file
, "gp(");
599 output_addr_const (file
, addr
);
603 fatal_insn ("can't use non gp relative absolute address", addr
);
607 fatal_insn ("invalid addressing mode", addr
);
612 /* Determine where to put an argument to a function.
613 Value is zero to push the argument on the stack,
614 or a hard register in which to store the argument.
616 MODE is the argument's machine mode.
617 TYPE is the data type of the argument (as a tree).
618 This is null for libcalls where that information may
620 CUM is a variable of type CUMULATIVE_ARGS which gives info about
621 the preceding args and about the function being called.
622 NAMED is nonzero if this argument is a named parameter
623 (otherwise it is an extra parameter matching an ellipsis). */
626 lm32_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
627 const_tree type
, bool named
)
629 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
631 if (mode
== VOIDmode
)
632 /* Compute operand 2 of the call insn. */
635 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
638 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
641 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
645 lm32_function_arg_advance (cumulative_args_t cum
, enum machine_mode mode
,
646 const_tree type
, bool named ATTRIBUTE_UNUSED
)
648 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
652 lm32_compute_initial_elimination_offset (int from
, int to
)
654 HOST_WIDE_INT offset
= 0;
658 case ARG_POINTER_REGNUM
:
661 case FRAME_POINTER_REGNUM
:
664 case STACK_POINTER_REGNUM
:
666 lm32_compute_frame_size (get_frame_size ()) -
667 current_frame_info
.pretend_size
;
681 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, enum machine_mode mode
,
682 tree type
, int *pretend_size
, int no_rtl
)
684 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
688 fntype
= TREE_TYPE (current_function_decl
);
690 if (stdarg_p (fntype
))
691 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
694 /* this is the common case, we have been passed details setup
695 for the last named argument, we want to skip over the
696 registers, if any used in passing this named paramter in
697 order to determine which is the first registers used to pass
698 anonymous arguments. */
702 size
= int_size_in_bytes (type
);
704 size
= GET_MODE_SIZE (mode
);
707 *cum
+ LM32_FIRST_ARG_REG
+
708 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
711 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
713 int first_reg_offset
= first_anon_arg
;
714 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
717 regblock
= gen_rtx_MEM (BLKmode
,
718 plus_constant (Pmode
, arg_pointer_rtx
,
719 FIRST_PARM_OFFSET (0)));
720 move_block_from_reg (first_reg_offset
, regblock
, size
);
722 *pretend_size
= size
* UNITS_PER_WORD
;
726 /* Override command line options. */
728 lm32_option_override (void)
730 /* We must have sign-extend enabled if barrel-shift isn't. */
731 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
732 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
735 /* Return nonzero if this function is known to have a null epilogue.
736 This allows the optimizer to omit jumps to jumps if no stack
739 lm32_can_use_return (void)
741 if (!reload_completed
)
744 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
747 if (lm32_compute_frame_size (get_frame_size ()) != 0)
753 /* Support function to determine the return address of the function
754 'count' frames back up the stack. */
756 lm32_return_addr_rtx (int count
, rtx frame
)
761 if (!df_regs_ever_live_p (RA_REGNUM
))
762 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
765 r
= gen_rtx_MEM (Pmode
,
766 gen_rtx_PLUS (Pmode
, frame
,
767 GEN_INT (-2 * UNITS_PER_WORD
)));
768 set_mem_alias_set (r
, get_frame_alias_set ());
771 else if (flag_omit_frame_pointer
)
775 r
= gen_rtx_MEM (Pmode
,
776 gen_rtx_PLUS (Pmode
, frame
,
777 GEN_INT (-2 * UNITS_PER_WORD
)));
778 set_mem_alias_set (r
, get_frame_alias_set ());
783 /* Return true if EXP should be placed in the small data section. */
786 lm32_in_small_data_p (const_tree exp
)
788 /* We want to merge strings, so we never consider them small data. */
789 if (TREE_CODE (exp
) == STRING_CST
)
792 /* Functions are never in the small data area. Duh. */
793 if (TREE_CODE (exp
) == FUNCTION_DECL
)
796 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
798 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (exp
));
799 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
804 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
806 /* If this is an incomplete type with size 0, then we can't put it
807 in sdata because it might be too big when completed. */
808 if (size
> 0 && size
<= g_switch_value
)
815 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
816 Assume that the areas do not overlap. */
819 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
820 HOST_WIDE_INT alignment
)
822 HOST_WIDE_INT offset
, delta
;
823 unsigned HOST_WIDE_INT bits
;
825 enum machine_mode mode
;
828 /* Work out how many bits to move at a time. */
842 mode
= mode_for_size (bits
, MODE_INT
, 0);
843 delta
= bits
/ BITS_PER_UNIT
;
845 /* Allocate a buffer for the temporary registers. */
846 regs
= XALLOCAVEC (rtx
, length
/ delta
);
848 /* Load as many BITS-sized chunks as possible. */
849 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
851 regs
[i
] = gen_reg_rtx (mode
);
852 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
855 /* Copy the chunks to the destination. */
856 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
857 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
859 /* Mop up any left-over bytes. */
862 src
= adjust_address (src
, BLKmode
, offset
);
863 dest
= adjust_address (dest
, BLKmode
, offset
);
864 move_by_pieces (dest
, src
, length
- offset
,
865 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
869 /* Expand string/block move operations.
871 operands[0] is the pointer to the destination.
872 operands[1] is the pointer to the source.
873 operands[2] is the number of bytes to move.
874 operands[3] is the alignment. */
877 lm32_expand_block_move (rtx
* operands
)
879 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
881 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
882 INTVAL (operands
[3]));
888 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
889 isn't protected by a PIC unspec. */
891 nonpic_symbol_mentioned_p (rtx x
)
896 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
897 || GET_CODE (x
) == PC
)
900 /* We don't want to look into the possible MEM location of a
901 CONST_DOUBLE, since we're not going to use it, in general. */
902 if (GET_CODE (x
) == CONST_DOUBLE
)
905 if (GET_CODE (x
) == UNSPEC
)
908 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
909 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
915 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
916 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
919 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
926 /* Compute a (partial) cost for rtx X. Return true if the complete
927 cost has been computed, and false if subexpressions should be
928 scanned. In either case, *TOTAL contains the cost result. */
931 lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
932 int *total
, bool speed
)
934 enum machine_mode mode
= GET_MODE (x
);
937 const int arithmetic_latency
= 1;
938 const int shift_latency
= 1;
939 const int compare_latency
= 2;
940 const int multiply_latency
= 3;
941 const int load_latency
= 3;
942 const int libcall_size_cost
= 5;
944 /* Determine if we can handle the given mode size in a single instruction. */
945 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
958 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
961 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
968 *total
= COSTS_N_INSNS (1);
970 *total
= COSTS_N_INSNS (compare_latency
);
974 /* FIXME. Guessing here. */
975 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
982 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
985 *total
= COSTS_N_INSNS (1);
987 *total
= COSTS_N_INSNS (shift_latency
);
989 else if (TARGET_BARREL_SHIFT_ENABLED
)
991 /* FIXME: Guessing here. */
992 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
994 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
996 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
1002 *total
= COSTS_N_INSNS (libcall_size_cost
);
1004 *total
= COSTS_N_INSNS (100);
1009 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1012 *total
= COSTS_N_INSNS (1);
1014 *total
= COSTS_N_INSNS (multiply_latency
);
1020 *total
= COSTS_N_INSNS (libcall_size_cost
);
1022 *total
= COSTS_N_INSNS (100);
1030 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1033 *total
= COSTS_N_INSNS (1);
1036 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1039 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1046 if (IN_RANGE (i
, 0, 65536))
1047 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1049 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1052 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1054 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1059 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1068 *total
= COSTS_N_INSNS (libcall_size_cost
);
1070 *total
= COSTS_N_INSNS (100);
1077 *total
= COSTS_N_INSNS (1);
1079 *total
= COSTS_N_INSNS (arithmetic_latency
);
1083 if (MEM_P (XEXP (x
, 0)))
1084 *total
= COSTS_N_INSNS (0);
1085 else if (small_mode
)
1088 *total
= COSTS_N_INSNS (1);
1090 *total
= COSTS_N_INSNS (arithmetic_latency
);
1093 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1102 *total
= COSTS_N_INSNS (0);
1113 if (satisfies_constraint_L (x
))
1114 *total
= COSTS_N_INSNS (0);
1116 *total
= COSTS_N_INSNS (2);
1123 if (satisfies_constraint_K (x
))
1124 *total
= COSTS_N_INSNS (0);
1126 *total
= COSTS_N_INSNS (2);
1130 if (TARGET_MULTIPLY_ENABLED
)
1132 if (satisfies_constraint_K (x
))
1133 *total
= COSTS_N_INSNS (0);
1135 *total
= COSTS_N_INSNS (2);
1141 if (satisfies_constraint_K (x
))
1142 *total
= COSTS_N_INSNS (1);
1144 *total
= COSTS_N_INSNS (2);
1155 *total
= COSTS_N_INSNS (0);
1162 *total
= COSTS_N_INSNS (0);
1171 *total
= COSTS_N_INSNS (2);
1175 *total
= COSTS_N_INSNS (1);
1180 *total
= COSTS_N_INSNS (1);
1182 *total
= COSTS_N_INSNS (load_latency
);
1190 /* Implemenent TARGET_CAN_ELIMINATE. */
1193 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1195 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1198 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1201 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1204 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1206 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1210 if (GET_CODE (x
) == PLUS
1211 && REG_P (XEXP (x
, 0))
1212 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1213 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1214 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1215 && satisfies_constraint_K (XEXP ((x
), 1)))
1219 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1225 /* Check a move is not memory to memory. */
1228 lm32_move_ok (enum machine_mode mode
, rtx operands
[2]) {
1229 if (memory_operand (operands
[0], mode
))
1230 return register_or_zero_operand (operands
[1], mode
);
1234 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1237 lm32_legitimate_constant_p (enum machine_mode mode
, rtx x
)
1239 /* 32-bit addresses require multiple instructions. */
1240 if (!flag_pic
&& reloc_operand (x
, mode
))