* tree-vect-loop-manip.c (vect_do_peeling): Do not use
[official-gcc.git] / gcc / config / lm32 / lm32.c
blob1d6cfa0dc8a988840d7cf1f862f157e40ffe484c
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2017 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "stringpool.h"
30 #include "attribs.h"
31 #include "df.h"
32 #include "memmodel.h"
33 #include "tm_p.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "calls.h"
40 #include "alias.h"
41 #include "explow.h"
42 #include "expr.h"
43 #include "tm-constrs.h"
44 #include "builtins.h"
46 /* This file should be included last. */
47 #include "target-def.h"
49 struct lm32_frame_info
51 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
52 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
53 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
54 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
55 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
56 unsigned int reg_save_mask; /* mask of saved registers. */
59 /* Prototypes for static functions. */
60 static rtx emit_add (rtx dest, rtx src0, rtx src1);
61 static void expand_save_restore (struct lm32_frame_info *info, int op);
62 static void stack_adjust (HOST_WIDE_INT amount);
63 static bool lm32_in_small_data_p (const_tree);
64 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
65 machine_mode mode, tree type,
66 int *pretend_size, int no_rtl);
67 static bool lm32_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno,
68 int *total, bool speed);
69 static bool lm32_can_eliminate (const int, const int);
70 static bool
71 lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
72 static HOST_WIDE_INT lm32_compute_frame_size (int size);
73 static void lm32_option_override (void);
74 static rtx lm32_function_arg (cumulative_args_t cum,
75 machine_mode mode, const_tree type,
76 bool named);
77 static void lm32_function_arg_advance (cumulative_args_t cum,
78 machine_mode mode,
79 const_tree type, bool named);
80 static bool lm32_hard_regno_mode_ok (unsigned int, machine_mode);
81 static bool lm32_modes_tieable_p (machine_mode, machine_mode);
82 static HOST_WIDE_INT lm32_starting_frame_offset (void);
84 #undef TARGET_OPTION_OVERRIDE
85 #define TARGET_OPTION_OVERRIDE lm32_option_override
86 #undef TARGET_ADDRESS_COST
87 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
88 #undef TARGET_RTX_COSTS
89 #define TARGET_RTX_COSTS lm32_rtx_costs
90 #undef TARGET_IN_SMALL_DATA_P
91 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
92 #undef TARGET_PROMOTE_FUNCTION_MODE
93 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
94 #undef TARGET_SETUP_INCOMING_VARARGS
95 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
96 #undef TARGET_FUNCTION_ARG
97 #define TARGET_FUNCTION_ARG lm32_function_arg
98 #undef TARGET_FUNCTION_ARG_ADVANCE
99 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
100 #undef TARGET_PROMOTE_PROTOTYPES
101 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
102 #undef TARGET_MIN_ANCHOR_OFFSET
103 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
104 #undef TARGET_MAX_ANCHOR_OFFSET
105 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
106 #undef TARGET_CAN_ELIMINATE
107 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
108 #undef TARGET_LRA_P
109 #define TARGET_LRA_P hook_bool_void_false
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
112 #undef TARGET_HARD_REGNO_MODE_OK
113 #define TARGET_HARD_REGNO_MODE_OK lm32_hard_regno_mode_ok
114 #undef TARGET_MODES_TIEABLE_P
115 #define TARGET_MODES_TIEABLE_P lm32_modes_tieable_p
117 #undef TARGET_CONSTANT_ALIGNMENT
118 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
120 #undef TARGET_STARTING_FRAME_OFFSET
121 #define TARGET_STARTING_FRAME_OFFSET lm32_starting_frame_offset
123 struct gcc_target targetm = TARGET_INITIALIZER;
125 /* Current frame information calculated by lm32_compute_frame_size. */
126 static struct lm32_frame_info current_frame_info;
128 /* Return non-zero if the given return type should be returned in memory. */
131 lm32_return_in_memory (tree type)
133 HOST_WIDE_INT size;
135 if (!AGGREGATE_TYPE_P (type))
137 /* All simple types are returned in registers. */
138 return 0;
141 size = int_size_in_bytes (type);
142 if (size >= 0 && size <= UNITS_PER_WORD)
144 /* If it can fit in one register. */
145 return 0;
148 return 1;
151 /* Generate an emit a word sized add instruction. */
153 static rtx
154 emit_add (rtx dest, rtx src0, rtx src1)
156 rtx insn;
157 insn = emit_insn (gen_addsi3 (dest, src0, src1));
158 return insn;
161 /* Generate the code to compare (and possibly branch) two integer values
162 TEST_CODE is the comparison code we are trying to emulate
163 (or implement directly)
164 RESULT is where to store the result of the comparison,
165 or null to emit a branch
166 CMP0 CMP1 are the two comparison operands
167 DESTINATION is the destination of the branch, or null to only compare
170 static void
171 gen_int_relational (enum rtx_code code,
172 rtx result,
173 rtx cmp0,
174 rtx cmp1,
175 rtx destination)
177 machine_mode mode;
178 int branch_p;
180 mode = GET_MODE (cmp0);
181 if (mode == VOIDmode)
182 mode = GET_MODE (cmp1);
184 /* Is this a branch or compare. */
185 branch_p = (destination != 0);
187 /* Instruction set doesn't support LE or LT, so swap operands and use
188 GE, GT. */
189 switch (code)
191 case LE:
192 case LT:
193 case LEU:
194 case LTU:
196 rtx temp;
198 code = swap_condition (code);
199 temp = cmp0;
200 cmp0 = cmp1;
201 cmp1 = temp;
202 break;
204 default:
205 break;
208 if (branch_p)
210 rtx insn, cond, label;
212 /* Operands must be in registers. */
213 if (!register_operand (cmp0, mode))
214 cmp0 = force_reg (mode, cmp0);
215 if (!register_operand (cmp1, mode))
216 cmp1 = force_reg (mode, cmp1);
218 /* Generate conditional branch instruction. */
219 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
220 label = gen_rtx_LABEL_REF (VOIDmode, destination);
221 insn = gen_rtx_SET (pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode,
222 cond, label, pc_rtx));
223 emit_jump_insn (insn);
225 else
227 /* We can't have const_ints in cmp0, other than 0. */
228 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
229 cmp0 = force_reg (mode, cmp0);
231 /* If the comparison is against an int not in legal range
232 move it into a register. */
233 if (GET_CODE (cmp1) == CONST_INT)
235 switch (code)
237 case EQ:
238 case NE:
239 case LE:
240 case LT:
241 case GE:
242 case GT:
243 if (!satisfies_constraint_K (cmp1))
244 cmp1 = force_reg (mode, cmp1);
245 break;
246 case LEU:
247 case LTU:
248 case GEU:
249 case GTU:
250 if (!satisfies_constraint_L (cmp1))
251 cmp1 = force_reg (mode, cmp1);
252 break;
253 default:
254 gcc_unreachable ();
258 /* Generate compare instruction. */
259 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
263 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
264 and OPERAND[3]. Store the result in OPERANDS[0]. */
266 void
267 lm32_expand_scc (rtx operands[])
269 rtx target = operands[0];
270 enum rtx_code code = GET_CODE (operands[1]);
271 rtx op0 = operands[2];
272 rtx op1 = operands[3];
274 gen_int_relational (code, target, op0, op1, NULL_RTX);
277 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
278 CODE and jump to OPERANDS[3] if the condition holds. */
280 void
281 lm32_expand_conditional_branch (rtx operands[])
283 enum rtx_code code = GET_CODE (operands[0]);
284 rtx op0 = operands[1];
285 rtx op1 = operands[2];
286 rtx destination = operands[3];
288 gen_int_relational (code, NULL_RTX, op0, op1, destination);
291 /* Generate and emit RTL to save or restore callee save registers. */
292 static void
293 expand_save_restore (struct lm32_frame_info *info, int op)
295 unsigned int reg_save_mask = info->reg_save_mask;
296 int regno;
297 HOST_WIDE_INT offset;
298 rtx insn;
300 /* Callee saves are below locals and above outgoing arguments. */
301 offset = info->args_size + info->callee_size;
302 for (regno = 0; regno <= 31; regno++)
304 if ((reg_save_mask & (1 << regno)) != 0)
306 rtx offset_rtx;
307 rtx mem;
309 offset_rtx = GEN_INT (offset);
310 if (satisfies_constraint_K (offset_rtx))
312 mem = gen_rtx_MEM (word_mode,
313 gen_rtx_PLUS (Pmode,
314 stack_pointer_rtx,
315 offset_rtx));
317 else
319 /* r10 is caller saved so it can be used as a temp reg. */
320 rtx r10;
322 r10 = gen_rtx_REG (word_mode, 10);
323 insn = emit_move_insn (r10, offset_rtx);
324 if (op == 0)
325 RTX_FRAME_RELATED_P (insn) = 1;
326 insn = emit_add (r10, r10, stack_pointer_rtx);
327 if (op == 0)
328 RTX_FRAME_RELATED_P (insn) = 1;
329 mem = gen_rtx_MEM (word_mode, r10);
332 if (op == 0)
333 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
334 else
335 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
337 /* only prologue instructions which set the sp fp or save a
338 register should be marked as frame related. */
339 if (op == 0)
340 RTX_FRAME_RELATED_P (insn) = 1;
341 offset -= UNITS_PER_WORD;
346 static void
347 stack_adjust (HOST_WIDE_INT amount)
349 rtx insn;
351 if (!IN_RANGE (amount, -32776, 32768))
353 /* r10 is caller saved so it can be used as a temp reg. */
354 rtx r10;
355 r10 = gen_rtx_REG (word_mode, 10);
356 insn = emit_move_insn (r10, GEN_INT (amount));
357 if (amount < 0)
358 RTX_FRAME_RELATED_P (insn) = 1;
359 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
360 if (amount < 0)
361 RTX_FRAME_RELATED_P (insn) = 1;
363 else
365 insn = emit_add (stack_pointer_rtx,
366 stack_pointer_rtx, GEN_INT (amount));
367 if (amount < 0)
368 RTX_FRAME_RELATED_P (insn) = 1;
373 /* Create and emit instructions for a functions prologue. */
374 void
375 lm32_expand_prologue (void)
377 rtx insn;
379 lm32_compute_frame_size (get_frame_size ());
381 if (current_frame_info.total_size > 0)
383 /* Add space on stack new frame. */
384 stack_adjust (-current_frame_info.total_size);
386 /* Save callee save registers. */
387 if (current_frame_info.reg_save_mask != 0)
388 expand_save_restore (&current_frame_info, 0);
390 /* Setup frame pointer if it's needed. */
391 if (frame_pointer_needed == 1)
393 /* Move sp to fp. */
394 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
395 RTX_FRAME_RELATED_P (insn) = 1;
397 /* Add offset - Don't use total_size, as that includes pretend_size,
398 which isn't part of this frame? */
399 insn = emit_add (frame_pointer_rtx,
400 frame_pointer_rtx,
401 GEN_INT (current_frame_info.args_size +
402 current_frame_info.callee_size +
403 current_frame_info.locals_size));
404 RTX_FRAME_RELATED_P (insn) = 1;
407 /* Prevent prologue from being scheduled into function body. */
408 emit_insn (gen_blockage ());
412 /* Create an emit instructions for a functions epilogue. */
413 void
414 lm32_expand_epilogue (void)
416 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
418 lm32_compute_frame_size (get_frame_size ());
420 if (current_frame_info.total_size > 0)
422 /* Prevent stack code from being reordered. */
423 emit_insn (gen_blockage ());
425 /* Restore callee save registers. */
426 if (current_frame_info.reg_save_mask != 0)
427 expand_save_restore (&current_frame_info, 1);
429 /* Deallocate stack. */
430 stack_adjust (current_frame_info.total_size);
432 /* Return to calling function. */
433 emit_jump_insn (gen_return_internal (ra_rtx));
435 else
437 /* Return to calling function. */
438 emit_jump_insn (gen_return_internal (ra_rtx));
442 /* Return the bytes needed to compute the frame pointer from the current
443 stack pointer. */
444 static HOST_WIDE_INT
445 lm32_compute_frame_size (int size)
447 int regno;
448 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
449 unsigned int reg_save_mask;
451 locals_size = size;
452 args_size = crtl->outgoing_args_size;
453 pretend_size = crtl->args.pretend_args_size;
454 callee_size = 0;
455 reg_save_mask = 0;
457 /* Build mask that actually determines which regsiters we save
458 and calculate size required to store them in the stack. */
459 for (regno = 1; regno < SP_REGNUM; regno++)
461 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
463 reg_save_mask |= 1 << regno;
464 callee_size += UNITS_PER_WORD;
467 if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
468 || !optimize)
470 reg_save_mask |= 1 << RA_REGNUM;
471 callee_size += UNITS_PER_WORD;
473 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
475 reg_save_mask |= 1 << FP_REGNUM;
476 callee_size += UNITS_PER_WORD;
479 /* Compute total frame size. */
480 total_size = pretend_size + args_size + locals_size + callee_size;
482 /* Align frame to appropriate boundary. */
483 total_size = (total_size + 3) & ~3;
485 /* Save computed information. */
486 current_frame_info.total_size = total_size;
487 current_frame_info.callee_size = callee_size;
488 current_frame_info.pretend_size = pretend_size;
489 current_frame_info.locals_size = locals_size;
490 current_frame_info.args_size = args_size;
491 current_frame_info.reg_save_mask = reg_save_mask;
493 return total_size;
496 void
497 lm32_print_operand (FILE * file, rtx op, int letter)
499 enum rtx_code code;
501 code = GET_CODE (op);
503 if (code == SIGN_EXTEND)
504 op = XEXP (op, 0), code = GET_CODE (op);
505 else if (code == REG || code == SUBREG)
507 int regnum;
509 if (code == REG)
510 regnum = REGNO (op);
511 else
512 regnum = true_regnum (op);
514 fprintf (file, "%s", reg_names[regnum]);
516 else if (code == HIGH)
517 output_addr_const (file, XEXP (op, 0));
518 else if (code == MEM)
519 output_address (GET_MODE (op), XEXP (op, 0));
520 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
521 fprintf (file, "%s", reg_names[0]);
522 else if (GET_CODE (op) == CONST_DOUBLE)
524 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
525 output_operand_lossage ("only 0.0 can be loaded as an immediate");
526 else
527 fprintf (file, "0");
529 else if (code == EQ)
530 fprintf (file, "e ");
531 else if (code == NE)
532 fprintf (file, "ne ");
533 else if (code == GT)
534 fprintf (file, "g ");
535 else if (code == GTU)
536 fprintf (file, "gu ");
537 else if (code == LT)
538 fprintf (file, "l ");
539 else if (code == LTU)
540 fprintf (file, "lu ");
541 else if (code == GE)
542 fprintf (file, "ge ");
543 else if (code == GEU)
544 fprintf (file, "geu");
545 else if (code == LE)
546 fprintf (file, "le ");
547 else if (code == LEU)
548 fprintf (file, "leu");
549 else
550 output_addr_const (file, op);
553 /* A C compound statement to output to stdio stream STREAM the
554 assembler syntax for an instruction operand that is a memory
555 reference whose address is ADDR. ADDR is an RTL expression.
557 On some machines, the syntax for a symbolic address depends on
558 the section that the address refers to. On these machines,
559 define the macro `ENCODE_SECTION_INFO' to store the information
560 into the `symbol_ref', and then check for it here. */
562 void
563 lm32_print_operand_address (FILE * file, rtx addr)
565 switch (GET_CODE (addr))
567 case REG:
568 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
569 break;
571 case MEM:
572 output_address (VOIDmode, XEXP (addr, 0));
573 break;
575 case PLUS:
577 rtx arg0 = XEXP (addr, 0);
578 rtx arg1 = XEXP (addr, 1);
580 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
582 if (GET_CODE (arg1) == CONST_INT)
583 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
584 INTVAL (arg1));
585 else
587 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
588 output_addr_const (file, arg1);
589 fprintf (file, ")");
592 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
593 output_addr_const (file, addr);
594 else
595 fatal_insn ("bad operand", addr);
597 break;
599 case SYMBOL_REF:
600 if (SYMBOL_REF_SMALL_P (addr))
602 fprintf (file, "gp(");
603 output_addr_const (file, addr);
604 fprintf (file, ")");
606 else
607 fatal_insn ("can't use non gp relative absolute address", addr);
608 break;
610 default:
611 fatal_insn ("invalid addressing mode", addr);
612 break;
616 /* Determine where to put an argument to a function.
617 Value is zero to push the argument on the stack,
618 or a hard register in which to store the argument.
620 MODE is the argument's machine mode.
621 TYPE is the data type of the argument (as a tree).
622 This is null for libcalls where that information may
623 not be available.
624 CUM is a variable of type CUMULATIVE_ARGS which gives info about
625 the preceding args and about the function being called.
626 NAMED is nonzero if this argument is a named parameter
627 (otherwise it is an extra parameter matching an ellipsis). */
629 static rtx
630 lm32_function_arg (cumulative_args_t cum_v, machine_mode mode,
631 const_tree type, bool named)
633 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
635 if (mode == VOIDmode)
636 /* Compute operand 2 of the call insn. */
637 return GEN_INT (0);
639 if (targetm.calls.must_pass_in_stack (mode, type))
640 return NULL_RTX;
642 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
643 return NULL_RTX;
645 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
648 static void
649 lm32_function_arg_advance (cumulative_args_t cum, machine_mode mode,
650 const_tree type, bool named ATTRIBUTE_UNUSED)
652 *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
655 HOST_WIDE_INT
656 lm32_compute_initial_elimination_offset (int from, int to)
658 HOST_WIDE_INT offset = 0;
660 switch (from)
662 case ARG_POINTER_REGNUM:
663 switch (to)
665 case FRAME_POINTER_REGNUM:
666 offset = 0;
667 break;
668 case STACK_POINTER_REGNUM:
669 offset =
670 lm32_compute_frame_size (get_frame_size ()) -
671 current_frame_info.pretend_size;
672 break;
673 default:
674 gcc_unreachable ();
676 break;
677 default:
678 gcc_unreachable ();
681 return offset;
684 static void
685 lm32_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
686 tree type, int *pretend_size, int no_rtl)
688 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
689 int first_anon_arg;
690 tree fntype;
692 fntype = TREE_TYPE (current_function_decl);
694 if (stdarg_p (fntype))
695 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
696 else
698 /* this is the common case, we have been passed details setup
699 for the last named argument, we want to skip over the
700 registers, if any used in passing this named paramter in
701 order to determine which is the first registers used to pass
702 anonymous arguments. */
703 int size;
705 if (mode == BLKmode)
706 size = int_size_in_bytes (type);
707 else
708 size = GET_MODE_SIZE (mode);
710 first_anon_arg =
711 *cum + LM32_FIRST_ARG_REG +
712 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
715 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
717 int first_reg_offset = first_anon_arg;
718 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
719 rtx regblock;
721 regblock = gen_rtx_MEM (BLKmode,
722 plus_constant (Pmode, arg_pointer_rtx,
723 FIRST_PARM_OFFSET (0)));
724 move_block_from_reg (first_reg_offset, regblock, size);
726 *pretend_size = size * UNITS_PER_WORD;
730 /* Override command line options. */
731 static void
732 lm32_option_override (void)
734 /* We must have sign-extend enabled if barrel-shift isn't. */
735 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
736 target_flags |= MASK_SIGN_EXTEND_ENABLED;
739 /* Return nonzero if this function is known to have a null epilogue.
740 This allows the optimizer to omit jumps to jumps if no stack
741 was created. */
743 lm32_can_use_return (void)
745 if (!reload_completed)
746 return 0;
748 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
749 return 0;
751 if (lm32_compute_frame_size (get_frame_size ()) != 0)
752 return 0;
754 return 1;
757 /* Support function to determine the return address of the function
758 'count' frames back up the stack. */
760 lm32_return_addr_rtx (int count, rtx frame)
762 rtx r;
763 if (count == 0)
765 if (!df_regs_ever_live_p (RA_REGNUM))
766 r = gen_rtx_REG (Pmode, RA_REGNUM);
767 else
769 r = gen_rtx_MEM (Pmode,
770 gen_rtx_PLUS (Pmode, frame,
771 GEN_INT (-2 * UNITS_PER_WORD)));
772 set_mem_alias_set (r, get_frame_alias_set ());
775 else if (flag_omit_frame_pointer)
776 r = NULL_RTX;
777 else
779 r = gen_rtx_MEM (Pmode,
780 gen_rtx_PLUS (Pmode, frame,
781 GEN_INT (-2 * UNITS_PER_WORD)));
782 set_mem_alias_set (r, get_frame_alias_set ());
784 return r;
787 /* Return true if EXP should be placed in the small data section. */
789 static bool
790 lm32_in_small_data_p (const_tree exp)
792 /* We want to merge strings, so we never consider them small data. */
793 if (TREE_CODE (exp) == STRING_CST)
794 return false;
796 /* Functions are never in the small data area. Duh. */
797 if (TREE_CODE (exp) == FUNCTION_DECL)
798 return false;
800 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
802 const char *section = DECL_SECTION_NAME (exp);
803 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
804 return true;
806 else
808 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
810 /* If this is an incomplete type with size 0, then we can't put it
811 in sdata because it might be too big when completed. */
812 if (size > 0 && size <= g_switch_value)
813 return true;
816 return false;
819 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
820 Assume that the areas do not overlap. */
822 static void
823 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
824 HOST_WIDE_INT alignment)
826 HOST_WIDE_INT offset, delta;
827 unsigned HOST_WIDE_INT bits;
828 int i;
829 machine_mode mode;
830 rtx *regs;
832 /* Work out how many bits to move at a time. */
833 switch (alignment)
835 case 1:
836 bits = 8;
837 break;
838 case 2:
839 bits = 16;
840 break;
841 default:
842 bits = 32;
843 break;
846 mode = int_mode_for_size (bits, 0).require ();
847 delta = bits / BITS_PER_UNIT;
849 /* Allocate a buffer for the temporary registers. */
850 regs = XALLOCAVEC (rtx, length / delta);
852 /* Load as many BITS-sized chunks as possible. */
853 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
855 regs[i] = gen_reg_rtx (mode);
856 emit_move_insn (regs[i], adjust_address (src, mode, offset));
859 /* Copy the chunks to the destination. */
860 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
861 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
863 /* Mop up any left-over bytes. */
864 if (offset < length)
866 src = adjust_address (src, BLKmode, offset);
867 dest = adjust_address (dest, BLKmode, offset);
868 move_by_pieces (dest, src, length - offset,
869 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
873 /* Expand string/block move operations.
875 operands[0] is the pointer to the destination.
876 operands[1] is the pointer to the source.
877 operands[2] is the number of bytes to move.
878 operands[3] is the alignment. */
881 lm32_expand_block_move (rtx * operands)
883 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
885 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
886 INTVAL (operands[3]));
887 return 1;
889 return 0;
892 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
893 isn't protected by a PIC unspec. */
895 nonpic_symbol_mentioned_p (rtx x)
897 const char *fmt;
898 int i;
900 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
901 || GET_CODE (x) == PC)
902 return 1;
904 /* We don't want to look into the possible MEM location of a
905 CONST_DOUBLE, since we're not going to use it, in general. */
906 if (GET_CODE (x) == CONST_DOUBLE)
907 return 0;
909 if (GET_CODE (x) == UNSPEC)
910 return 0;
912 fmt = GET_RTX_FORMAT (GET_CODE (x));
913 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
915 if (fmt[i] == 'E')
917 int j;
919 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
920 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
921 return 1;
923 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
924 return 1;
927 return 0;
930 /* Compute a (partial) cost for rtx X. Return true if the complete
931 cost has been computed, and false if subexpressions should be
932 scanned. In either case, *TOTAL contains the cost result. */
934 static bool
935 lm32_rtx_costs (rtx x, machine_mode mode, int outer_code,
936 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
938 int code = GET_CODE (x);
939 bool small_mode;
941 const int arithmetic_latency = 1;
942 const int shift_latency = 1;
943 const int compare_latency = 2;
944 const int multiply_latency = 3;
945 const int load_latency = 3;
946 const int libcall_size_cost = 5;
948 /* Determine if we can handle the given mode size in a single instruction. */
949 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
951 switch (code)
954 case PLUS:
955 case MINUS:
956 case AND:
957 case IOR:
958 case XOR:
959 case NOT:
960 case NEG:
961 if (!speed)
962 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
963 else
964 *total =
965 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
966 break;
968 case COMPARE:
969 if (small_mode)
971 if (!speed)
972 *total = COSTS_N_INSNS (1);
973 else
974 *total = COSTS_N_INSNS (compare_latency);
976 else
978 /* FIXME. Guessing here. */
979 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
981 break;
983 case ASHIFT:
984 case ASHIFTRT:
985 case LSHIFTRT:
986 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
988 if (!speed)
989 *total = COSTS_N_INSNS (1);
990 else
991 *total = COSTS_N_INSNS (shift_latency);
993 else if (TARGET_BARREL_SHIFT_ENABLED)
995 /* FIXME: Guessing here. */
996 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
998 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
1000 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
1002 else
1004 /* Libcall. */
1005 if (!speed)
1006 *total = COSTS_N_INSNS (libcall_size_cost);
1007 else
1008 *total = COSTS_N_INSNS (100);
1010 break;
1012 case MULT:
1013 if (TARGET_MULTIPLY_ENABLED && small_mode)
1015 if (!speed)
1016 *total = COSTS_N_INSNS (1);
1017 else
1018 *total = COSTS_N_INSNS (multiply_latency);
1020 else
1022 /* Libcall. */
1023 if (!speed)
1024 *total = COSTS_N_INSNS (libcall_size_cost);
1025 else
1026 *total = COSTS_N_INSNS (100);
1028 break;
1030 case DIV:
1031 case MOD:
1032 case UDIV:
1033 case UMOD:
1034 if (TARGET_DIVIDE_ENABLED && small_mode)
1036 if (!speed)
1037 *total = COSTS_N_INSNS (1);
1038 else
1040 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1042 int cycles = 0;
1043 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1045 while (i)
1047 i >>= 2;
1048 cycles++;
1050 if (IN_RANGE (i, 0, 65536))
1051 *total = COSTS_N_INSNS (1 + 1 + cycles);
1052 else
1053 *total = COSTS_N_INSNS (2 + 1 + cycles);
1054 return true;
1056 else if (GET_CODE (XEXP (x, 1)) == REG)
1058 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1059 return true;
1061 else
1063 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1064 return false;
1068 else
1070 /* Libcall. */
1071 if (!speed)
1072 *total = COSTS_N_INSNS (libcall_size_cost);
1073 else
1074 *total = COSTS_N_INSNS (100);
1076 break;
1078 case HIGH:
1079 case LO_SUM:
1080 if (!speed)
1081 *total = COSTS_N_INSNS (1);
1082 else
1083 *total = COSTS_N_INSNS (arithmetic_latency);
1084 break;
1086 case ZERO_EXTEND:
1087 if (MEM_P (XEXP (x, 0)))
1088 *total = COSTS_N_INSNS (0);
1089 else if (small_mode)
1091 if (!speed)
1092 *total = COSTS_N_INSNS (1);
1093 else
1094 *total = COSTS_N_INSNS (arithmetic_latency);
1096 else
1097 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1098 break;
1100 case CONST_INT:
1102 switch (outer_code)
1104 case HIGH:
1105 case LO_SUM:
1106 *total = COSTS_N_INSNS (0);
1107 return true;
1109 case AND:
1110 case XOR:
1111 case IOR:
1112 case ASHIFT:
1113 case ASHIFTRT:
1114 case LSHIFTRT:
1115 case ROTATE:
1116 case ROTATERT:
1117 if (satisfies_constraint_L (x))
1118 *total = COSTS_N_INSNS (0);
1119 else
1120 *total = COSTS_N_INSNS (2);
1121 return true;
1123 case SET:
1124 case PLUS:
1125 case MINUS:
1126 case COMPARE:
1127 if (satisfies_constraint_K (x))
1128 *total = COSTS_N_INSNS (0);
1129 else
1130 *total = COSTS_N_INSNS (2);
1131 return true;
1133 case MULT:
1134 if (TARGET_MULTIPLY_ENABLED)
1136 if (satisfies_constraint_K (x))
1137 *total = COSTS_N_INSNS (0);
1138 else
1139 *total = COSTS_N_INSNS (2);
1140 return true;
1142 /* Fall through. */
1144 default:
1145 if (satisfies_constraint_K (x))
1146 *total = COSTS_N_INSNS (1);
1147 else
1148 *total = COSTS_N_INSNS (2);
1149 return true;
1153 case SYMBOL_REF:
1154 case CONST:
1155 switch (outer_code)
1157 case HIGH:
1158 case LO_SUM:
1159 *total = COSTS_N_INSNS (0);
1160 return true;
1162 case MEM:
1163 case SET:
1164 if (g_switch_value)
1166 *total = COSTS_N_INSNS (0);
1167 return true;
1169 break;
1171 /* Fall through. */
1173 case LABEL_REF:
1174 case CONST_DOUBLE:
1175 *total = COSTS_N_INSNS (2);
1176 return true;
1178 case SET:
1179 *total = COSTS_N_INSNS (1);
1180 break;
1182 case MEM:
1183 if (!speed)
1184 *total = COSTS_N_INSNS (1);
1185 else
1186 *total = COSTS_N_INSNS (load_latency);
1187 break;
1191 return false;
1194 /* Implemenent TARGET_CAN_ELIMINATE. */
1196 bool
1197 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1199 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1202 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1204 static bool
1205 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1207 /* (rM) */
1208 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1209 return true;
1210 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1211 return true;
1213 /* (rM)+literal) */
1214 if (GET_CODE (x) == PLUS
1215 && REG_P (XEXP (x, 0))
1216 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1217 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1218 && GET_CODE (XEXP (x, 1)) == CONST_INT
1219 && satisfies_constraint_K (XEXP ((x), 1)))
1220 return true;
1222 /* gp(sym) */
1223 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1224 return true;
1226 return false;
1229 /* Check a move is not memory to memory. */
1231 bool
1232 lm32_move_ok (machine_mode mode, rtx operands[2]) {
1233 if (memory_operand (operands[0], mode))
1234 return register_or_zero_operand (operands[1], mode);
1235 return true;
1238 /* Implement TARGET_HARD_REGNO_MODE_OK. */
1240 static bool
1241 lm32_hard_regno_mode_ok (unsigned int regno, machine_mode)
1243 return G_REG_P (regno);
1246 /* Implement TARGET_MODES_TIEABLE_P. */
1248 static bool
1249 lm32_modes_tieable_p (machine_mode mode1, machine_mode mode2)
1251 return (GET_MODE_CLASS (mode1) == MODE_INT
1252 && GET_MODE_CLASS (mode2) == MODE_INT
1253 && GET_MODE_SIZE (mode1) <= UNITS_PER_WORD
1254 && GET_MODE_SIZE (mode2) <= UNITS_PER_WORD);
1257 /* Implement TARGET_STARTING_FRAME_OFFSET. */
1259 static HOST_WIDE_INT
1260 lm32_starting_frame_offset (void)
1262 return UNITS_PER_WORD;