1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
36 #include "dominance.h"
42 #include "cfgcleanup.h"
43 #include "basic-block.h"
44 #include "insn-config.h"
45 #include "conditions.h"
46 #include "insn-flags.h"
47 #include "insn-attr.h"
48 #include "insn-codes.h"
55 #include "fold-const.h"
58 #include "statistics.h"
59 #include "double-int.h"
61 #include "fixed-value.h"
72 #include "diagnostic-core.h"
77 #include "target-def.h"
78 #include "langhooks.h"
79 #include "tm-constrs.h"
83 struct lm32_frame_info
85 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
86 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
87 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
88 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
89 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
90 unsigned int reg_save_mask
; /* mask of saved registers. */
93 /* Prototypes for static functions. */
94 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
95 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
96 static void stack_adjust (HOST_WIDE_INT amount
);
97 static bool lm32_in_small_data_p (const_tree
);
98 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
99 machine_mode mode
, tree type
,
100 int *pretend_size
, int no_rtl
);
101 static bool lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno
,
102 int *total
, bool speed
);
103 static bool lm32_can_eliminate (const int, const int);
105 lm32_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
);
106 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
107 static void lm32_option_override (void);
108 static rtx
lm32_function_arg (cumulative_args_t cum
,
109 machine_mode mode
, const_tree type
,
111 static void lm32_function_arg_advance (cumulative_args_t cum
,
113 const_tree type
, bool named
);
115 #undef TARGET_OPTION_OVERRIDE
116 #define TARGET_OPTION_OVERRIDE lm32_option_override
117 #undef TARGET_ADDRESS_COST
118 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
119 #undef TARGET_RTX_COSTS
120 #define TARGET_RTX_COSTS lm32_rtx_costs
121 #undef TARGET_IN_SMALL_DATA_P
122 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
123 #undef TARGET_PROMOTE_FUNCTION_MODE
124 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
125 #undef TARGET_SETUP_INCOMING_VARARGS
126 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
127 #undef TARGET_FUNCTION_ARG
128 #define TARGET_FUNCTION_ARG lm32_function_arg
129 #undef TARGET_FUNCTION_ARG_ADVANCE
130 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
131 #undef TARGET_PROMOTE_PROTOTYPES
132 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
133 #undef TARGET_MIN_ANCHOR_OFFSET
134 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
135 #undef TARGET_MAX_ANCHOR_OFFSET
136 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
137 #undef TARGET_CAN_ELIMINATE
138 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
139 #undef TARGET_LEGITIMATE_ADDRESS_P
140 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
142 struct gcc_target targetm
= TARGET_INITIALIZER
;
144 /* Current frame information calculated by lm32_compute_frame_size. */
145 static struct lm32_frame_info current_frame_info
;
147 /* Return non-zero if the given return type should be returned in memory. */
150 lm32_return_in_memory (tree type
)
154 if (!AGGREGATE_TYPE_P (type
))
156 /* All simple types are returned in registers. */
160 size
= int_size_in_bytes (type
);
161 if (size
>= 0 && size
<= UNITS_PER_WORD
)
163 /* If it can fit in one register. */
170 /* Generate an emit a word sized add instruction. */
173 emit_add (rtx dest
, rtx src0
, rtx src1
)
176 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
180 /* Generate the code to compare (and possibly branch) two integer values
181 TEST_CODE is the comparison code we are trying to emulate
182 (or implement directly)
183 RESULT is where to store the result of the comparison,
184 or null to emit a branch
185 CMP0 CMP1 are the two comparison operands
186 DESTINATION is the destination of the branch, or null to only compare
190 gen_int_relational (enum rtx_code code
,
199 mode
= GET_MODE (cmp0
);
200 if (mode
== VOIDmode
)
201 mode
= GET_MODE (cmp1
);
203 /* Is this a branch or compare. */
204 branch_p
= (destination
!= 0);
206 /* Instruction set doesn't support LE or LT, so swap operands and use
217 code
= swap_condition (code
);
229 rtx insn
, cond
, label
;
231 /* Operands must be in registers. */
232 if (!register_operand (cmp0
, mode
))
233 cmp0
= force_reg (mode
, cmp0
);
234 if (!register_operand (cmp1
, mode
))
235 cmp1
= force_reg (mode
, cmp1
);
237 /* Generate conditional branch instruction. */
238 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
239 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
240 insn
= gen_rtx_SET (pc_rtx
, gen_rtx_IF_THEN_ELSE (VOIDmode
,
241 cond
, label
, pc_rtx
));
242 emit_jump_insn (insn
);
246 /* We can't have const_ints in cmp0, other than 0. */
247 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
248 cmp0
= force_reg (mode
, cmp0
);
250 /* If the comparison is against an int not in legal range
251 move it into a register. */
252 if (GET_CODE (cmp1
) == CONST_INT
)
262 if (!satisfies_constraint_K (cmp1
))
263 cmp1
= force_reg (mode
, cmp1
);
269 if (!satisfies_constraint_L (cmp1
))
270 cmp1
= force_reg (mode
, cmp1
);
277 /* Generate compare instruction. */
278 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
282 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
283 and OPERAND[3]. Store the result in OPERANDS[0]. */
286 lm32_expand_scc (rtx operands
[])
288 rtx target
= operands
[0];
289 enum rtx_code code
= GET_CODE (operands
[1]);
290 rtx op0
= operands
[2];
291 rtx op1
= operands
[3];
293 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
296 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
297 CODE and jump to OPERANDS[3] if the condition holds. */
300 lm32_expand_conditional_branch (rtx operands
[])
302 enum rtx_code code
= GET_CODE (operands
[0]);
303 rtx op0
= operands
[1];
304 rtx op1
= operands
[2];
305 rtx destination
= operands
[3];
307 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
310 /* Generate and emit RTL to save or restore callee save registers. */
312 expand_save_restore (struct lm32_frame_info
*info
, int op
)
314 unsigned int reg_save_mask
= info
->reg_save_mask
;
316 HOST_WIDE_INT offset
;
319 /* Callee saves are below locals and above outgoing arguments. */
320 offset
= info
->args_size
+ info
->callee_size
;
321 for (regno
= 0; regno
<= 31; regno
++)
323 if ((reg_save_mask
& (1 << regno
)) != 0)
328 offset_rtx
= GEN_INT (offset
);
329 if (satisfies_constraint_K (offset_rtx
))
331 mem
= gen_rtx_MEM (word_mode
,
338 /* r10 is caller saved so it can be used as a temp reg. */
341 r10
= gen_rtx_REG (word_mode
, 10);
342 insn
= emit_move_insn (r10
, offset_rtx
);
344 RTX_FRAME_RELATED_P (insn
) = 1;
345 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
347 RTX_FRAME_RELATED_P (insn
) = 1;
348 mem
= gen_rtx_MEM (word_mode
, r10
);
352 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
354 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
356 /* only prologue instructions which set the sp fp or save a
357 register should be marked as frame related. */
359 RTX_FRAME_RELATED_P (insn
) = 1;
360 offset
-= UNITS_PER_WORD
;
366 stack_adjust (HOST_WIDE_INT amount
)
370 if (!IN_RANGE (amount
, -32776, 32768))
372 /* r10 is caller saved so it can be used as a temp reg. */
374 r10
= gen_rtx_REG (word_mode
, 10);
375 insn
= emit_move_insn (r10
, GEN_INT (amount
));
377 RTX_FRAME_RELATED_P (insn
) = 1;
378 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
380 RTX_FRAME_RELATED_P (insn
) = 1;
384 insn
= emit_add (stack_pointer_rtx
,
385 stack_pointer_rtx
, GEN_INT (amount
));
387 RTX_FRAME_RELATED_P (insn
) = 1;
392 /* Create and emit instructions for a functions prologue. */
394 lm32_expand_prologue (void)
398 lm32_compute_frame_size (get_frame_size ());
400 if (current_frame_info
.total_size
> 0)
402 /* Add space on stack new frame. */
403 stack_adjust (-current_frame_info
.total_size
);
405 /* Save callee save registers. */
406 if (current_frame_info
.reg_save_mask
!= 0)
407 expand_save_restore (¤t_frame_info
, 0);
409 /* Setup frame pointer if it's needed. */
410 if (frame_pointer_needed
== 1)
413 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
414 RTX_FRAME_RELATED_P (insn
) = 1;
416 /* Add offset - Don't use total_size, as that includes pretend_size,
417 which isn't part of this frame? */
418 insn
= emit_add (frame_pointer_rtx
,
420 GEN_INT (current_frame_info
.args_size
+
421 current_frame_info
.callee_size
+
422 current_frame_info
.locals_size
));
423 RTX_FRAME_RELATED_P (insn
) = 1;
426 /* Prevent prologue from being scheduled into function body. */
427 emit_insn (gen_blockage ());
431 /* Create an emit instructions for a functions epilogue. */
433 lm32_expand_epilogue (void)
435 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
437 lm32_compute_frame_size (get_frame_size ());
439 if (current_frame_info
.total_size
> 0)
441 /* Prevent stack code from being reordered. */
442 emit_insn (gen_blockage ());
444 /* Restore callee save registers. */
445 if (current_frame_info
.reg_save_mask
!= 0)
446 expand_save_restore (¤t_frame_info
, 1);
448 /* Deallocate stack. */
449 stack_adjust (current_frame_info
.total_size
);
451 /* Return to calling function. */
452 emit_jump_insn (gen_return_internal (ra_rtx
));
456 /* Return to calling function. */
457 emit_jump_insn (gen_return_internal (ra_rtx
));
461 /* Return the bytes needed to compute the frame pointer from the current
464 lm32_compute_frame_size (int size
)
467 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
468 unsigned int reg_save_mask
;
471 args_size
= crtl
->outgoing_args_size
;
472 pretend_size
= crtl
->args
.pretend_args_size
;
476 /* Build mask that actually determines which regsiters we save
477 and calculate size required to store them in the stack. */
478 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
480 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
482 reg_save_mask
|= 1 << regno
;
483 callee_size
+= UNITS_PER_WORD
;
486 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
489 reg_save_mask
|= 1 << RA_REGNUM
;
490 callee_size
+= UNITS_PER_WORD
;
492 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
494 reg_save_mask
|= 1 << FP_REGNUM
;
495 callee_size
+= UNITS_PER_WORD
;
498 /* Compute total frame size. */
499 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
501 /* Align frame to appropriate boundary. */
502 total_size
= (total_size
+ 3) & ~3;
504 /* Save computed information. */
505 current_frame_info
.total_size
= total_size
;
506 current_frame_info
.callee_size
= callee_size
;
507 current_frame_info
.pretend_size
= pretend_size
;
508 current_frame_info
.locals_size
= locals_size
;
509 current_frame_info
.args_size
= args_size
;
510 current_frame_info
.reg_save_mask
= reg_save_mask
;
516 lm32_print_operand (FILE * file
, rtx op
, int letter
)
520 code
= GET_CODE (op
);
522 if (code
== SIGN_EXTEND
)
523 op
= XEXP (op
, 0), code
= GET_CODE (op
);
524 else if (code
== REG
|| code
== SUBREG
)
531 regnum
= true_regnum (op
);
533 fprintf (file
, "%s", reg_names
[regnum
]);
535 else if (code
== HIGH
)
536 output_addr_const (file
, XEXP (op
, 0));
537 else if (code
== MEM
)
538 output_address (XEXP (op
, 0));
539 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
540 fprintf (file
, "%s", reg_names
[0]);
541 else if (GET_CODE (op
) == CONST_DOUBLE
)
543 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
544 output_operand_lossage ("only 0.0 can be loaded as an immediate");
549 fprintf (file
, "e ");
551 fprintf (file
, "ne ");
553 fprintf (file
, "g ");
554 else if (code
== GTU
)
555 fprintf (file
, "gu ");
557 fprintf (file
, "l ");
558 else if (code
== LTU
)
559 fprintf (file
, "lu ");
561 fprintf (file
, "ge ");
562 else if (code
== GEU
)
563 fprintf (file
, "geu");
565 fprintf (file
, "le ");
566 else if (code
== LEU
)
567 fprintf (file
, "leu");
569 output_addr_const (file
, op
);
572 /* A C compound statement to output to stdio stream STREAM the
573 assembler syntax for an instruction operand that is a memory
574 reference whose address is ADDR. ADDR is an RTL expression.
576 On some machines, the syntax for a symbolic address depends on
577 the section that the address refers to. On these machines,
578 define the macro `ENCODE_SECTION_INFO' to store the information
579 into the `symbol_ref', and then check for it here. */
582 lm32_print_operand_address (FILE * file
, rtx addr
)
584 switch (GET_CODE (addr
))
587 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
591 output_address (XEXP (addr
, 0));
596 rtx arg0
= XEXP (addr
, 0);
597 rtx arg1
= XEXP (addr
, 1);
599 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
601 if (GET_CODE (arg1
) == CONST_INT
)
602 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
606 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
607 output_addr_const (file
, arg1
);
611 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
612 output_addr_const (file
, addr
);
614 fatal_insn ("bad operand", addr
);
619 if (SYMBOL_REF_SMALL_P (addr
))
621 fprintf (file
, "gp(");
622 output_addr_const (file
, addr
);
626 fatal_insn ("can't use non gp relative absolute address", addr
);
630 fatal_insn ("invalid addressing mode", addr
);
635 /* Determine where to put an argument to a function.
636 Value is zero to push the argument on the stack,
637 or a hard register in which to store the argument.
639 MODE is the argument's machine mode.
640 TYPE is the data type of the argument (as a tree).
641 This is null for libcalls where that information may
643 CUM is a variable of type CUMULATIVE_ARGS which gives info about
644 the preceding args and about the function being called.
645 NAMED is nonzero if this argument is a named parameter
646 (otherwise it is an extra parameter matching an ellipsis). */
649 lm32_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
650 const_tree type
, bool named
)
652 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
654 if (mode
== VOIDmode
)
655 /* Compute operand 2 of the call insn. */
658 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
661 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
664 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
668 lm32_function_arg_advance (cumulative_args_t cum
, machine_mode mode
,
669 const_tree type
, bool named ATTRIBUTE_UNUSED
)
671 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
675 lm32_compute_initial_elimination_offset (int from
, int to
)
677 HOST_WIDE_INT offset
= 0;
681 case ARG_POINTER_REGNUM
:
684 case FRAME_POINTER_REGNUM
:
687 case STACK_POINTER_REGNUM
:
689 lm32_compute_frame_size (get_frame_size ()) -
690 current_frame_info
.pretend_size
;
704 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, machine_mode mode
,
705 tree type
, int *pretend_size
, int no_rtl
)
707 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
711 fntype
= TREE_TYPE (current_function_decl
);
713 if (stdarg_p (fntype
))
714 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
717 /* this is the common case, we have been passed details setup
718 for the last named argument, we want to skip over the
719 registers, if any used in passing this named paramter in
720 order to determine which is the first registers used to pass
721 anonymous arguments. */
725 size
= int_size_in_bytes (type
);
727 size
= GET_MODE_SIZE (mode
);
730 *cum
+ LM32_FIRST_ARG_REG
+
731 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
734 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
736 int first_reg_offset
= first_anon_arg
;
737 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
740 regblock
= gen_rtx_MEM (BLKmode
,
741 plus_constant (Pmode
, arg_pointer_rtx
,
742 FIRST_PARM_OFFSET (0)));
743 move_block_from_reg (first_reg_offset
, regblock
, size
);
745 *pretend_size
= size
* UNITS_PER_WORD
;
749 /* Override command line options. */
751 lm32_option_override (void)
753 /* We must have sign-extend enabled if barrel-shift isn't. */
754 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
755 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
758 /* Return nonzero if this function is known to have a null epilogue.
759 This allows the optimizer to omit jumps to jumps if no stack
762 lm32_can_use_return (void)
764 if (!reload_completed
)
767 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
770 if (lm32_compute_frame_size (get_frame_size ()) != 0)
776 /* Support function to determine the return address of the function
777 'count' frames back up the stack. */
779 lm32_return_addr_rtx (int count
, rtx frame
)
784 if (!df_regs_ever_live_p (RA_REGNUM
))
785 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
788 r
= gen_rtx_MEM (Pmode
,
789 gen_rtx_PLUS (Pmode
, frame
,
790 GEN_INT (-2 * UNITS_PER_WORD
)));
791 set_mem_alias_set (r
, get_frame_alias_set ());
794 else if (flag_omit_frame_pointer
)
798 r
= gen_rtx_MEM (Pmode
,
799 gen_rtx_PLUS (Pmode
, frame
,
800 GEN_INT (-2 * UNITS_PER_WORD
)));
801 set_mem_alias_set (r
, get_frame_alias_set ());
806 /* Return true if EXP should be placed in the small data section. */
809 lm32_in_small_data_p (const_tree exp
)
811 /* We want to merge strings, so we never consider them small data. */
812 if (TREE_CODE (exp
) == STRING_CST
)
815 /* Functions are never in the small data area. Duh. */
816 if (TREE_CODE (exp
) == FUNCTION_DECL
)
819 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
821 const char *section
= DECL_SECTION_NAME (exp
);
822 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
827 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
829 /* If this is an incomplete type with size 0, then we can't put it
830 in sdata because it might be too big when completed. */
831 if (size
> 0 && size
<= g_switch_value
)
838 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
839 Assume that the areas do not overlap. */
842 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
843 HOST_WIDE_INT alignment
)
845 HOST_WIDE_INT offset
, delta
;
846 unsigned HOST_WIDE_INT bits
;
851 /* Work out how many bits to move at a time. */
865 mode
= mode_for_size (bits
, MODE_INT
, 0);
866 delta
= bits
/ BITS_PER_UNIT
;
868 /* Allocate a buffer for the temporary registers. */
869 regs
= XALLOCAVEC (rtx
, length
/ delta
);
871 /* Load as many BITS-sized chunks as possible. */
872 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
874 regs
[i
] = gen_reg_rtx (mode
);
875 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
878 /* Copy the chunks to the destination. */
879 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
880 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
882 /* Mop up any left-over bytes. */
885 src
= adjust_address (src
, BLKmode
, offset
);
886 dest
= adjust_address (dest
, BLKmode
, offset
);
887 move_by_pieces (dest
, src
, length
- offset
,
888 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
892 /* Expand string/block move operations.
894 operands[0] is the pointer to the destination.
895 operands[1] is the pointer to the source.
896 operands[2] is the number of bytes to move.
897 operands[3] is the alignment. */
900 lm32_expand_block_move (rtx
* operands
)
902 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
904 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
905 INTVAL (operands
[3]));
911 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
912 isn't protected by a PIC unspec. */
914 nonpic_symbol_mentioned_p (rtx x
)
919 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
920 || GET_CODE (x
) == PC
)
923 /* We don't want to look into the possible MEM location of a
924 CONST_DOUBLE, since we're not going to use it, in general. */
925 if (GET_CODE (x
) == CONST_DOUBLE
)
928 if (GET_CODE (x
) == UNSPEC
)
931 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
932 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
938 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
939 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
942 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
949 /* Compute a (partial) cost for rtx X. Return true if the complete
950 cost has been computed, and false if subexpressions should be
951 scanned. In either case, *TOTAL contains the cost result. */
954 lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
955 int *total
, bool speed
)
957 machine_mode mode
= GET_MODE (x
);
960 const int arithmetic_latency
= 1;
961 const int shift_latency
= 1;
962 const int compare_latency
= 2;
963 const int multiply_latency
= 3;
964 const int load_latency
= 3;
965 const int libcall_size_cost
= 5;
967 /* Determine if we can handle the given mode size in a single instruction. */
968 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
981 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
984 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
991 *total
= COSTS_N_INSNS (1);
993 *total
= COSTS_N_INSNS (compare_latency
);
997 /* FIXME. Guessing here. */
998 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
1005 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
1008 *total
= COSTS_N_INSNS (1);
1010 *total
= COSTS_N_INSNS (shift_latency
);
1012 else if (TARGET_BARREL_SHIFT_ENABLED
)
1014 /* FIXME: Guessing here. */
1015 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
1017 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1019 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
1025 *total
= COSTS_N_INSNS (libcall_size_cost
);
1027 *total
= COSTS_N_INSNS (100);
1032 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1035 *total
= COSTS_N_INSNS (1);
1037 *total
= COSTS_N_INSNS (multiply_latency
);
1043 *total
= COSTS_N_INSNS (libcall_size_cost
);
1045 *total
= COSTS_N_INSNS (100);
1053 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1056 *total
= COSTS_N_INSNS (1);
1059 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1062 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1069 if (IN_RANGE (i
, 0, 65536))
1070 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1072 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1075 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1077 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1082 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1091 *total
= COSTS_N_INSNS (libcall_size_cost
);
1093 *total
= COSTS_N_INSNS (100);
1100 *total
= COSTS_N_INSNS (1);
1102 *total
= COSTS_N_INSNS (arithmetic_latency
);
1106 if (MEM_P (XEXP (x
, 0)))
1107 *total
= COSTS_N_INSNS (0);
1108 else if (small_mode
)
1111 *total
= COSTS_N_INSNS (1);
1113 *total
= COSTS_N_INSNS (arithmetic_latency
);
1116 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1125 *total
= COSTS_N_INSNS (0);
1136 if (satisfies_constraint_L (x
))
1137 *total
= COSTS_N_INSNS (0);
1139 *total
= COSTS_N_INSNS (2);
1146 if (satisfies_constraint_K (x
))
1147 *total
= COSTS_N_INSNS (0);
1149 *total
= COSTS_N_INSNS (2);
1153 if (TARGET_MULTIPLY_ENABLED
)
1155 if (satisfies_constraint_K (x
))
1156 *total
= COSTS_N_INSNS (0);
1158 *total
= COSTS_N_INSNS (2);
1164 if (satisfies_constraint_K (x
))
1165 *total
= COSTS_N_INSNS (1);
1167 *total
= COSTS_N_INSNS (2);
1178 *total
= COSTS_N_INSNS (0);
1185 *total
= COSTS_N_INSNS (0);
1194 *total
= COSTS_N_INSNS (2);
1198 *total
= COSTS_N_INSNS (1);
1203 *total
= COSTS_N_INSNS (1);
1205 *total
= COSTS_N_INSNS (load_latency
);
1213 /* Implemenent TARGET_CAN_ELIMINATE. */
1216 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1218 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1221 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1224 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1227 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1229 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1233 if (GET_CODE (x
) == PLUS
1234 && REG_P (XEXP (x
, 0))
1235 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1236 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1237 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1238 && satisfies_constraint_K (XEXP ((x
), 1)))
1242 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1248 /* Check a move is not memory to memory. */
1251 lm32_move_ok (machine_mode mode
, rtx operands
[2]) {
1252 if (memory_operand (operands
[0], mode
))
1253 return register_or_zero_operand (operands
[1], mode
);