2014-10-24 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / config / lm32 / lm32.c
blob51714ecd640ab1f09e38ff20158008ac9194930f
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "calls.h"
39 #include "expr.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "hashtab.h"
44 #include "hash-set.h"
45 #include "vec.h"
46 #include "machmode.h"
47 #include "input.h"
48 #include "function.h"
49 #include "diagnostic-core.h"
50 #include "optabs.h"
51 #include "libfuncs.h"
52 #include "ggc.h"
53 #include "target.h"
54 #include "target-def.h"
55 #include "langhooks.h"
56 #include "tm-constrs.h"
57 #include "df.h"
58 #include "builtins.h"
60 struct lm32_frame_info
62 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
63 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
64 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
65 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
66 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
67 unsigned int reg_save_mask; /* mask of saved registers. */
70 /* Prototypes for static functions. */
71 static rtx emit_add (rtx dest, rtx src0, rtx src1);
72 static void expand_save_restore (struct lm32_frame_info *info, int op);
73 static void stack_adjust (HOST_WIDE_INT amount);
74 static bool lm32_in_small_data_p (const_tree);
75 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
76 enum machine_mode mode, tree type,
77 int *pretend_size, int no_rtl);
78 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int opno,
79 int *total, bool speed);
80 static bool lm32_can_eliminate (const int, const int);
81 static bool
82 lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
83 static HOST_WIDE_INT lm32_compute_frame_size (int size);
84 static void lm32_option_override (void);
85 static rtx lm32_function_arg (cumulative_args_t cum,
86 enum machine_mode mode, const_tree type,
87 bool named);
88 static void lm32_function_arg_advance (cumulative_args_t cum,
89 enum machine_mode mode,
90 const_tree type, bool named);
92 #undef TARGET_OPTION_OVERRIDE
93 #define TARGET_OPTION_OVERRIDE lm32_option_override
94 #undef TARGET_ADDRESS_COST
95 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
96 #undef TARGET_RTX_COSTS
97 #define TARGET_RTX_COSTS lm32_rtx_costs
98 #undef TARGET_IN_SMALL_DATA_P
99 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
100 #undef TARGET_PROMOTE_FUNCTION_MODE
101 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
102 #undef TARGET_SETUP_INCOMING_VARARGS
103 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
104 #undef TARGET_FUNCTION_ARG
105 #define TARGET_FUNCTION_ARG lm32_function_arg
106 #undef TARGET_FUNCTION_ARG_ADVANCE
107 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
108 #undef TARGET_PROMOTE_PROTOTYPES
109 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
110 #undef TARGET_MIN_ANCHOR_OFFSET
111 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
112 #undef TARGET_MAX_ANCHOR_OFFSET
113 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
114 #undef TARGET_CAN_ELIMINATE
115 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
116 #undef TARGET_LEGITIMATE_ADDRESS_P
117 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
119 struct gcc_target targetm = TARGET_INITIALIZER;
121 /* Current frame information calculated by lm32_compute_frame_size. */
122 static struct lm32_frame_info current_frame_info;
124 /* Return non-zero if the given return type should be returned in memory. */
127 lm32_return_in_memory (tree type)
129 HOST_WIDE_INT size;
131 if (!AGGREGATE_TYPE_P (type))
133 /* All simple types are returned in registers. */
134 return 0;
137 size = int_size_in_bytes (type);
138 if (size >= 0 && size <= UNITS_PER_WORD)
140 /* If it can fit in one register. */
141 return 0;
144 return 1;
147 /* Generate an emit a word sized add instruction. */
149 static rtx
150 emit_add (rtx dest, rtx src0, rtx src1)
152 rtx insn;
153 insn = emit_insn (gen_addsi3 (dest, src0, src1));
154 return insn;
157 /* Generate the code to compare (and possibly branch) two integer values
158 TEST_CODE is the comparison code we are trying to emulate
159 (or implement directly)
160 RESULT is where to store the result of the comparison,
161 or null to emit a branch
162 CMP0 CMP1 are the two comparison operands
163 DESTINATION is the destination of the branch, or null to only compare
166 static void
167 gen_int_relational (enum rtx_code code,
168 rtx result,
169 rtx cmp0,
170 rtx cmp1,
171 rtx destination)
173 enum machine_mode mode;
174 int branch_p;
176 mode = GET_MODE (cmp0);
177 if (mode == VOIDmode)
178 mode = GET_MODE (cmp1);
180 /* Is this a branch or compare. */
181 branch_p = (destination != 0);
183 /* Instruction set doesn't support LE or LT, so swap operands and use
184 GE, GT. */
185 switch (code)
187 case LE:
188 case LT:
189 case LEU:
190 case LTU:
192 rtx temp;
194 code = swap_condition (code);
195 temp = cmp0;
196 cmp0 = cmp1;
197 cmp1 = temp;
198 break;
200 default:
201 break;
204 if (branch_p)
206 rtx insn, cond, label;
208 /* Operands must be in registers. */
209 if (!register_operand (cmp0, mode))
210 cmp0 = force_reg (mode, cmp0);
211 if (!register_operand (cmp1, mode))
212 cmp1 = force_reg (mode, cmp1);
214 /* Generate conditional branch instruction. */
215 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
216 label = gen_rtx_LABEL_REF (VOIDmode, destination);
217 insn = gen_rtx_SET (VOIDmode, pc_rtx,
218 gen_rtx_IF_THEN_ELSE (VOIDmode,
219 cond, label, pc_rtx));
220 emit_jump_insn (insn);
222 else
224 /* We can't have const_ints in cmp0, other than 0. */
225 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
226 cmp0 = force_reg (mode, cmp0);
228 /* If the comparison is against an int not in legal range
229 move it into a register. */
230 if (GET_CODE (cmp1) == CONST_INT)
232 switch (code)
234 case EQ:
235 case NE:
236 case LE:
237 case LT:
238 case GE:
239 case GT:
240 if (!satisfies_constraint_K (cmp1))
241 cmp1 = force_reg (mode, cmp1);
242 break;
243 case LEU:
244 case LTU:
245 case GEU:
246 case GTU:
247 if (!satisfies_constraint_L (cmp1))
248 cmp1 = force_reg (mode, cmp1);
249 break;
250 default:
251 gcc_unreachable ();
255 /* Generate compare instruction. */
256 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
260 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
261 and OPERAND[3]. Store the result in OPERANDS[0]. */
263 void
264 lm32_expand_scc (rtx operands[])
266 rtx target = operands[0];
267 enum rtx_code code = GET_CODE (operands[1]);
268 rtx op0 = operands[2];
269 rtx op1 = operands[3];
271 gen_int_relational (code, target, op0, op1, NULL_RTX);
274 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
275 CODE and jump to OPERANDS[3] if the condition holds. */
277 void
278 lm32_expand_conditional_branch (rtx operands[])
280 enum rtx_code code = GET_CODE (operands[0]);
281 rtx op0 = operands[1];
282 rtx op1 = operands[2];
283 rtx destination = operands[3];
285 gen_int_relational (code, NULL_RTX, op0, op1, destination);
288 /* Generate and emit RTL to save or restore callee save registers. */
289 static void
290 expand_save_restore (struct lm32_frame_info *info, int op)
292 unsigned int reg_save_mask = info->reg_save_mask;
293 int regno;
294 HOST_WIDE_INT offset;
295 rtx insn;
297 /* Callee saves are below locals and above outgoing arguments. */
298 offset = info->args_size + info->callee_size;
299 for (regno = 0; regno <= 31; regno++)
301 if ((reg_save_mask & (1 << regno)) != 0)
303 rtx offset_rtx;
304 rtx mem;
306 offset_rtx = GEN_INT (offset);
307 if (satisfies_constraint_K (offset_rtx))
309 mem = gen_rtx_MEM (word_mode,
310 gen_rtx_PLUS (Pmode,
311 stack_pointer_rtx,
312 offset_rtx));
314 else
316 /* r10 is caller saved so it can be used as a temp reg. */
317 rtx r10;
319 r10 = gen_rtx_REG (word_mode, 10);
320 insn = emit_move_insn (r10, offset_rtx);
321 if (op == 0)
322 RTX_FRAME_RELATED_P (insn) = 1;
323 insn = emit_add (r10, r10, stack_pointer_rtx);
324 if (op == 0)
325 RTX_FRAME_RELATED_P (insn) = 1;
326 mem = gen_rtx_MEM (word_mode, r10);
329 if (op == 0)
330 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
331 else
332 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
334 /* only prologue instructions which set the sp fp or save a
335 register should be marked as frame related. */
336 if (op == 0)
337 RTX_FRAME_RELATED_P (insn) = 1;
338 offset -= UNITS_PER_WORD;
343 static void
344 stack_adjust (HOST_WIDE_INT amount)
346 rtx insn;
348 if (!IN_RANGE (amount, -32776, 32768))
350 /* r10 is caller saved so it can be used as a temp reg. */
351 rtx r10;
352 r10 = gen_rtx_REG (word_mode, 10);
353 insn = emit_move_insn (r10, GEN_INT (amount));
354 if (amount < 0)
355 RTX_FRAME_RELATED_P (insn) = 1;
356 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
357 if (amount < 0)
358 RTX_FRAME_RELATED_P (insn) = 1;
360 else
362 insn = emit_add (stack_pointer_rtx,
363 stack_pointer_rtx, GEN_INT (amount));
364 if (amount < 0)
365 RTX_FRAME_RELATED_P (insn) = 1;
370 /* Create and emit instructions for a functions prologue. */
371 void
372 lm32_expand_prologue (void)
374 rtx insn;
376 lm32_compute_frame_size (get_frame_size ());
378 if (current_frame_info.total_size > 0)
380 /* Add space on stack new frame. */
381 stack_adjust (-current_frame_info.total_size);
383 /* Save callee save registers. */
384 if (current_frame_info.reg_save_mask != 0)
385 expand_save_restore (&current_frame_info, 0);
387 /* Setup frame pointer if it's needed. */
388 if (frame_pointer_needed == 1)
390 /* Move sp to fp. */
391 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
392 RTX_FRAME_RELATED_P (insn) = 1;
394 /* Add offset - Don't use total_size, as that includes pretend_size,
395 which isn't part of this frame? */
396 insn = emit_add (frame_pointer_rtx,
397 frame_pointer_rtx,
398 GEN_INT (current_frame_info.args_size +
399 current_frame_info.callee_size +
400 current_frame_info.locals_size));
401 RTX_FRAME_RELATED_P (insn) = 1;
404 /* Prevent prologue from being scheduled into function body. */
405 emit_insn (gen_blockage ());
409 /* Create an emit instructions for a functions epilogue. */
410 void
411 lm32_expand_epilogue (void)
413 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
415 lm32_compute_frame_size (get_frame_size ());
417 if (current_frame_info.total_size > 0)
419 /* Prevent stack code from being reordered. */
420 emit_insn (gen_blockage ());
422 /* Restore callee save registers. */
423 if (current_frame_info.reg_save_mask != 0)
424 expand_save_restore (&current_frame_info, 1);
426 /* Deallocate stack. */
427 stack_adjust (current_frame_info.total_size);
429 /* Return to calling function. */
430 emit_jump_insn (gen_return_internal (ra_rtx));
432 else
434 /* Return to calling function. */
435 emit_jump_insn (gen_return_internal (ra_rtx));
439 /* Return the bytes needed to compute the frame pointer from the current
440 stack pointer. */
441 static HOST_WIDE_INT
442 lm32_compute_frame_size (int size)
444 int regno;
445 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
446 unsigned int reg_save_mask;
448 locals_size = size;
449 args_size = crtl->outgoing_args_size;
450 pretend_size = crtl->args.pretend_args_size;
451 callee_size = 0;
452 reg_save_mask = 0;
454 /* Build mask that actually determines which regsiters we save
455 and calculate size required to store them in the stack. */
456 for (regno = 1; regno < SP_REGNUM; regno++)
458 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
460 reg_save_mask |= 1 << regno;
461 callee_size += UNITS_PER_WORD;
464 if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
465 || !optimize)
467 reg_save_mask |= 1 << RA_REGNUM;
468 callee_size += UNITS_PER_WORD;
470 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
472 reg_save_mask |= 1 << FP_REGNUM;
473 callee_size += UNITS_PER_WORD;
476 /* Compute total frame size. */
477 total_size = pretend_size + args_size + locals_size + callee_size;
479 /* Align frame to appropriate boundary. */
480 total_size = (total_size + 3) & ~3;
482 /* Save computed information. */
483 current_frame_info.total_size = total_size;
484 current_frame_info.callee_size = callee_size;
485 current_frame_info.pretend_size = pretend_size;
486 current_frame_info.locals_size = locals_size;
487 current_frame_info.args_size = args_size;
488 current_frame_info.reg_save_mask = reg_save_mask;
490 return total_size;
493 void
494 lm32_print_operand (FILE * file, rtx op, int letter)
496 enum rtx_code code;
498 code = GET_CODE (op);
500 if (code == SIGN_EXTEND)
501 op = XEXP (op, 0), code = GET_CODE (op);
502 else if (code == REG || code == SUBREG)
504 int regnum;
506 if (code == REG)
507 regnum = REGNO (op);
508 else
509 regnum = true_regnum (op);
511 fprintf (file, "%s", reg_names[regnum]);
513 else if (code == HIGH)
514 output_addr_const (file, XEXP (op, 0));
515 else if (code == MEM)
516 output_address (XEXP (op, 0));
517 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
518 fprintf (file, "%s", reg_names[0]);
519 else if (GET_CODE (op) == CONST_DOUBLE)
521 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
522 output_operand_lossage ("only 0.0 can be loaded as an immediate");
523 else
524 fprintf (file, "0");
526 else if (code == EQ)
527 fprintf (file, "e ");
528 else if (code == NE)
529 fprintf (file, "ne ");
530 else if (code == GT)
531 fprintf (file, "g ");
532 else if (code == GTU)
533 fprintf (file, "gu ");
534 else if (code == LT)
535 fprintf (file, "l ");
536 else if (code == LTU)
537 fprintf (file, "lu ");
538 else if (code == GE)
539 fprintf (file, "ge ");
540 else if (code == GEU)
541 fprintf (file, "geu");
542 else if (code == LE)
543 fprintf (file, "le ");
544 else if (code == LEU)
545 fprintf (file, "leu");
546 else
547 output_addr_const (file, op);
550 /* A C compound statement to output to stdio stream STREAM the
551 assembler syntax for an instruction operand that is a memory
552 reference whose address is ADDR. ADDR is an RTL expression.
554 On some machines, the syntax for a symbolic address depends on
555 the section that the address refers to. On these machines,
556 define the macro `ENCODE_SECTION_INFO' to store the information
557 into the `symbol_ref', and then check for it here. */
559 void
560 lm32_print_operand_address (FILE * file, rtx addr)
562 switch (GET_CODE (addr))
564 case REG:
565 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
566 break;
568 case MEM:
569 output_address (XEXP (addr, 0));
570 break;
572 case PLUS:
574 rtx arg0 = XEXP (addr, 0);
575 rtx arg1 = XEXP (addr, 1);
577 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
579 if (GET_CODE (arg1) == CONST_INT)
580 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
581 INTVAL (arg1));
582 else
584 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
585 output_addr_const (file, arg1);
586 fprintf (file, ")");
589 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
590 output_addr_const (file, addr);
591 else
592 fatal_insn ("bad operand", addr);
594 break;
596 case SYMBOL_REF:
597 if (SYMBOL_REF_SMALL_P (addr))
599 fprintf (file, "gp(");
600 output_addr_const (file, addr);
601 fprintf (file, ")");
603 else
604 fatal_insn ("can't use non gp relative absolute address", addr);
605 break;
607 default:
608 fatal_insn ("invalid addressing mode", addr);
609 break;
613 /* Determine where to put an argument to a function.
614 Value is zero to push the argument on the stack,
615 or a hard register in which to store the argument.
617 MODE is the argument's machine mode.
618 TYPE is the data type of the argument (as a tree).
619 This is null for libcalls where that information may
620 not be available.
621 CUM is a variable of type CUMULATIVE_ARGS which gives info about
622 the preceding args and about the function being called.
623 NAMED is nonzero if this argument is a named parameter
624 (otherwise it is an extra parameter matching an ellipsis). */
626 static rtx
627 lm32_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
628 const_tree type, bool named)
630 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
632 if (mode == VOIDmode)
633 /* Compute operand 2 of the call insn. */
634 return GEN_INT (0);
636 if (targetm.calls.must_pass_in_stack (mode, type))
637 return NULL_RTX;
639 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
640 return NULL_RTX;
642 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
645 static void
646 lm32_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
647 const_tree type, bool named ATTRIBUTE_UNUSED)
649 *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
652 HOST_WIDE_INT
653 lm32_compute_initial_elimination_offset (int from, int to)
655 HOST_WIDE_INT offset = 0;
657 switch (from)
659 case ARG_POINTER_REGNUM:
660 switch (to)
662 case FRAME_POINTER_REGNUM:
663 offset = 0;
664 break;
665 case STACK_POINTER_REGNUM:
666 offset =
667 lm32_compute_frame_size (get_frame_size ()) -
668 current_frame_info.pretend_size;
669 break;
670 default:
671 gcc_unreachable ();
673 break;
674 default:
675 gcc_unreachable ();
678 return offset;
681 static void
682 lm32_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
683 tree type, int *pretend_size, int no_rtl)
685 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
686 int first_anon_arg;
687 tree fntype;
689 fntype = TREE_TYPE (current_function_decl);
691 if (stdarg_p (fntype))
692 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
693 else
695 /* this is the common case, we have been passed details setup
696 for the last named argument, we want to skip over the
697 registers, if any used in passing this named paramter in
698 order to determine which is the first registers used to pass
699 anonymous arguments. */
700 int size;
702 if (mode == BLKmode)
703 size = int_size_in_bytes (type);
704 else
705 size = GET_MODE_SIZE (mode);
707 first_anon_arg =
708 *cum + LM32_FIRST_ARG_REG +
709 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
712 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
714 int first_reg_offset = first_anon_arg;
715 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
716 rtx regblock;
718 regblock = gen_rtx_MEM (BLKmode,
719 plus_constant (Pmode, arg_pointer_rtx,
720 FIRST_PARM_OFFSET (0)));
721 move_block_from_reg (first_reg_offset, regblock, size);
723 *pretend_size = size * UNITS_PER_WORD;
727 /* Override command line options. */
728 static void
729 lm32_option_override (void)
731 /* We must have sign-extend enabled if barrel-shift isn't. */
732 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
733 target_flags |= MASK_SIGN_EXTEND_ENABLED;
736 /* Return nonzero if this function is known to have a null epilogue.
737 This allows the optimizer to omit jumps to jumps if no stack
738 was created. */
740 lm32_can_use_return (void)
742 if (!reload_completed)
743 return 0;
745 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
746 return 0;
748 if (lm32_compute_frame_size (get_frame_size ()) != 0)
749 return 0;
751 return 1;
754 /* Support function to determine the return address of the function
755 'count' frames back up the stack. */
757 lm32_return_addr_rtx (int count, rtx frame)
759 rtx r;
760 if (count == 0)
762 if (!df_regs_ever_live_p (RA_REGNUM))
763 r = gen_rtx_REG (Pmode, RA_REGNUM);
764 else
766 r = gen_rtx_MEM (Pmode,
767 gen_rtx_PLUS (Pmode, frame,
768 GEN_INT (-2 * UNITS_PER_WORD)));
769 set_mem_alias_set (r, get_frame_alias_set ());
772 else if (flag_omit_frame_pointer)
773 r = NULL_RTX;
774 else
776 r = gen_rtx_MEM (Pmode,
777 gen_rtx_PLUS (Pmode, frame,
778 GEN_INT (-2 * UNITS_PER_WORD)));
779 set_mem_alias_set (r, get_frame_alias_set ());
781 return r;
784 /* Return true if EXP should be placed in the small data section. */
786 static bool
787 lm32_in_small_data_p (const_tree exp)
789 /* We want to merge strings, so we never consider them small data. */
790 if (TREE_CODE (exp) == STRING_CST)
791 return false;
793 /* Functions are never in the small data area. Duh. */
794 if (TREE_CODE (exp) == FUNCTION_DECL)
795 return false;
797 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
799 const char *section = DECL_SECTION_NAME (exp);
800 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
801 return true;
803 else
805 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
807 /* If this is an incomplete type with size 0, then we can't put it
808 in sdata because it might be too big when completed. */
809 if (size > 0 && size <= g_switch_value)
810 return true;
813 return false;
816 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
817 Assume that the areas do not overlap. */
819 static void
820 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
821 HOST_WIDE_INT alignment)
823 HOST_WIDE_INT offset, delta;
824 unsigned HOST_WIDE_INT bits;
825 int i;
826 enum machine_mode mode;
827 rtx *regs;
829 /* Work out how many bits to move at a time. */
830 switch (alignment)
832 case 1:
833 bits = 8;
834 break;
835 case 2:
836 bits = 16;
837 break;
838 default:
839 bits = 32;
840 break;
843 mode = mode_for_size (bits, MODE_INT, 0);
844 delta = bits / BITS_PER_UNIT;
846 /* Allocate a buffer for the temporary registers. */
847 regs = XALLOCAVEC (rtx, length / delta);
849 /* Load as many BITS-sized chunks as possible. */
850 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
852 regs[i] = gen_reg_rtx (mode);
853 emit_move_insn (regs[i], adjust_address (src, mode, offset));
856 /* Copy the chunks to the destination. */
857 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
858 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
860 /* Mop up any left-over bytes. */
861 if (offset < length)
863 src = adjust_address (src, BLKmode, offset);
864 dest = adjust_address (dest, BLKmode, offset);
865 move_by_pieces (dest, src, length - offset,
866 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
870 /* Expand string/block move operations.
872 operands[0] is the pointer to the destination.
873 operands[1] is the pointer to the source.
874 operands[2] is the number of bytes to move.
875 operands[3] is the alignment. */
878 lm32_expand_block_move (rtx * operands)
880 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
882 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
883 INTVAL (operands[3]));
884 return 1;
886 return 0;
889 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
890 isn't protected by a PIC unspec. */
892 nonpic_symbol_mentioned_p (rtx x)
894 const char *fmt;
895 int i;
897 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
898 || GET_CODE (x) == PC)
899 return 1;
901 /* We don't want to look into the possible MEM location of a
902 CONST_DOUBLE, since we're not going to use it, in general. */
903 if (GET_CODE (x) == CONST_DOUBLE)
904 return 0;
906 if (GET_CODE (x) == UNSPEC)
907 return 0;
909 fmt = GET_RTX_FORMAT (GET_CODE (x));
910 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
912 if (fmt[i] == 'E')
914 int j;
916 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
917 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
918 return 1;
920 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
921 return 1;
924 return 0;
927 /* Compute a (partial) cost for rtx X. Return true if the complete
928 cost has been computed, and false if subexpressions should be
929 scanned. In either case, *TOTAL contains the cost result. */
931 static bool
932 lm32_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
933 int *total, bool speed)
935 enum machine_mode mode = GET_MODE (x);
936 bool small_mode;
938 const int arithmetic_latency = 1;
939 const int shift_latency = 1;
940 const int compare_latency = 2;
941 const int multiply_latency = 3;
942 const int load_latency = 3;
943 const int libcall_size_cost = 5;
945 /* Determine if we can handle the given mode size in a single instruction. */
946 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
948 switch (code)
951 case PLUS:
952 case MINUS:
953 case AND:
954 case IOR:
955 case XOR:
956 case NOT:
957 case NEG:
958 if (!speed)
959 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
960 else
961 *total =
962 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
963 break;
965 case COMPARE:
966 if (small_mode)
968 if (!speed)
969 *total = COSTS_N_INSNS (1);
970 else
971 *total = COSTS_N_INSNS (compare_latency);
973 else
975 /* FIXME. Guessing here. */
976 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
978 break;
980 case ASHIFT:
981 case ASHIFTRT:
982 case LSHIFTRT:
983 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
985 if (!speed)
986 *total = COSTS_N_INSNS (1);
987 else
988 *total = COSTS_N_INSNS (shift_latency);
990 else if (TARGET_BARREL_SHIFT_ENABLED)
992 /* FIXME: Guessing here. */
993 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
995 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
997 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
999 else
1001 /* Libcall. */
1002 if (!speed)
1003 *total = COSTS_N_INSNS (libcall_size_cost);
1004 else
1005 *total = COSTS_N_INSNS (100);
1007 break;
1009 case MULT:
1010 if (TARGET_MULTIPLY_ENABLED && small_mode)
1012 if (!speed)
1013 *total = COSTS_N_INSNS (1);
1014 else
1015 *total = COSTS_N_INSNS (multiply_latency);
1017 else
1019 /* Libcall. */
1020 if (!speed)
1021 *total = COSTS_N_INSNS (libcall_size_cost);
1022 else
1023 *total = COSTS_N_INSNS (100);
1025 break;
1027 case DIV:
1028 case MOD:
1029 case UDIV:
1030 case UMOD:
1031 if (TARGET_DIVIDE_ENABLED && small_mode)
1033 if (!speed)
1034 *total = COSTS_N_INSNS (1);
1035 else
1037 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1039 int cycles = 0;
1040 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1042 while (i)
1044 i >>= 2;
1045 cycles++;
1047 if (IN_RANGE (i, 0, 65536))
1048 *total = COSTS_N_INSNS (1 + 1 + cycles);
1049 else
1050 *total = COSTS_N_INSNS (2 + 1 + cycles);
1051 return true;
1053 else if (GET_CODE (XEXP (x, 1)) == REG)
1055 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1056 return true;
1058 else
1060 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1061 return false;
1065 else
1067 /* Libcall. */
1068 if (!speed)
1069 *total = COSTS_N_INSNS (libcall_size_cost);
1070 else
1071 *total = COSTS_N_INSNS (100);
1073 break;
1075 case HIGH:
1076 case LO_SUM:
1077 if (!speed)
1078 *total = COSTS_N_INSNS (1);
1079 else
1080 *total = COSTS_N_INSNS (arithmetic_latency);
1081 break;
1083 case ZERO_EXTEND:
1084 if (MEM_P (XEXP (x, 0)))
1085 *total = COSTS_N_INSNS (0);
1086 else if (small_mode)
1088 if (!speed)
1089 *total = COSTS_N_INSNS (1);
1090 else
1091 *total = COSTS_N_INSNS (arithmetic_latency);
1093 else
1094 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1095 break;
1097 case CONST_INT:
1099 switch (outer_code)
1101 case HIGH:
1102 case LO_SUM:
1103 *total = COSTS_N_INSNS (0);
1104 return true;
1106 case AND:
1107 case XOR:
1108 case IOR:
1109 case ASHIFT:
1110 case ASHIFTRT:
1111 case LSHIFTRT:
1112 case ROTATE:
1113 case ROTATERT:
1114 if (satisfies_constraint_L (x))
1115 *total = COSTS_N_INSNS (0);
1116 else
1117 *total = COSTS_N_INSNS (2);
1118 return true;
1120 case SET:
1121 case PLUS:
1122 case MINUS:
1123 case COMPARE:
1124 if (satisfies_constraint_K (x))
1125 *total = COSTS_N_INSNS (0);
1126 else
1127 *total = COSTS_N_INSNS (2);
1128 return true;
1130 case MULT:
1131 if (TARGET_MULTIPLY_ENABLED)
1133 if (satisfies_constraint_K (x))
1134 *total = COSTS_N_INSNS (0);
1135 else
1136 *total = COSTS_N_INSNS (2);
1137 return true;
1139 /* Fall through. */
1141 default:
1142 if (satisfies_constraint_K (x))
1143 *total = COSTS_N_INSNS (1);
1144 else
1145 *total = COSTS_N_INSNS (2);
1146 return true;
1150 case SYMBOL_REF:
1151 case CONST:
1152 switch (outer_code)
1154 case HIGH:
1155 case LO_SUM:
1156 *total = COSTS_N_INSNS (0);
1157 return true;
1159 case MEM:
1160 case SET:
1161 if (g_switch_value)
1163 *total = COSTS_N_INSNS (0);
1164 return true;
1166 break;
1168 /* Fall through. */
1170 case LABEL_REF:
1171 case CONST_DOUBLE:
1172 *total = COSTS_N_INSNS (2);
1173 return true;
1175 case SET:
1176 *total = COSTS_N_INSNS (1);
1177 break;
1179 case MEM:
1180 if (!speed)
1181 *total = COSTS_N_INSNS (1);
1182 else
1183 *total = COSTS_N_INSNS (load_latency);
1184 break;
1188 return false;
1191 /* Implemenent TARGET_CAN_ELIMINATE. */
1193 bool
1194 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1196 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1199 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1201 static bool
1202 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1204 /* (rM) */
1205 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1206 return true;
1207 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1208 return true;
1210 /* (rM)+literal) */
1211 if (GET_CODE (x) == PLUS
1212 && REG_P (XEXP (x, 0))
1213 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1214 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1215 && GET_CODE (XEXP (x, 1)) == CONST_INT
1216 && satisfies_constraint_K (XEXP ((x), 1)))
1217 return true;
1219 /* gp(sym) */
1220 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1221 return true;
1223 return false;
1226 /* Check a move is not memory to memory. */
1228 bool
1229 lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1230 if (memory_operand (operands[0], mode))
1231 return register_or_zero_operand (operands[1], mode);
1232 return true;