include "expr.h".
[official-gcc.git] / gcc / config / arm / arm.c
blob732cc937ec05ccb018dbd76441858bb7e4923a1d
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
23 #include <stdio.h>
24 #include <string.h>
25 #include "assert.h"
26 #include "config.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "reload.h"
38 #include "tree.h"
39 #include "expr.h"
41 /* The maximum number of insns skipped which will be conditionalised if
42 possible. */
43 #define MAX_INSNS_SKIPPED 5
45 /* Some function declarations. */
46 extern FILE *asm_out_file;
47 extern char *output_multi_immediate ();
48 extern void arm_increase_location ();
50 HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
51 static int get_prologue_size PROTO ((void));
53 /* Define the information needed to generate branch insns. This is
54 stored from the compare operation. */
56 rtx arm_compare_op0, arm_compare_op1;
57 int arm_compare_fp;
59 /* What type of cpu are we compiling for? */
60 enum processor_type arm_cpu;
62 /* Waht type of floating point are we compiling for? */
63 enum floating_point_type arm_fpu;
65 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
66 must report the mode of the memory reference from PRINT_OPERAND to
67 PRINT_OPERAND_ADDRESS. */
68 enum machine_mode output_memory_reference_mode;
70 /* Nonzero if the prologue must setup `fp'. */
71 int current_function_anonymous_args;
73 /* Location counter of .text segment. */
74 int arm_text_location = 0;
76 /* Set to one if we think that lr is only saved because of subroutine calls,
77 but all of these can be `put after' return insns */
78 int lr_save_eliminated;
80 /* A hash table is used to store text segment labels and their associated
81 offset from the start of the text segment. */
82 struct label_offset
84 char *name;
85 int offset;
86 struct label_offset *cdr;
89 #define LABEL_HASH_SIZE 257
91 static struct label_offset *offset_table[LABEL_HASH_SIZE];
93 /* Set to 1 when a return insn is output, this means that the epilogue
94 is not needed. */
96 static int return_used_this_function;
98 /* For an explanation of these variables, see final_prescan_insn below. */
99 int arm_ccfsm_state;
100 int arm_current_cc;
101 rtx arm_target_insn;
102 int arm_target_label;
104 /* The condition codes of the ARM, and the inverse function. */
105 char *arm_condition_codes[] =
107 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
108 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
111 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
113 /* Return 1 if it is possible to return using a single instruction */
116 use_return_insn ()
118 int regno;
120 if (!reload_completed ||current_function_pretend_args_size
121 || current_function_anonymous_args
122 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
123 return 0;
125 /* Can't be done if any of the FPU regs are pushed, since this also
126 requires an insn */
127 for (regno = 20; regno < 24; regno++)
128 if (regs_ever_live[regno])
129 return 0;
131 return 1;
134 /* Return TRUE if int I is a valid immediate ARM constant. */
137 const_ok_for_arm (i)
138 HOST_WIDE_INT i;
140 unsigned HOST_WIDE_INT mask = ~0xFF;
142 /* Fast return for 0 and powers of 2 */
143 if ((i & (i - 1)) == 0)
144 return TRUE;
148 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
149 return TRUE;
150 mask =
151 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
152 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
153 } while (mask != ~0xFF);
155 return FALSE;
158 /* Return true if I is a valid constant for the operation CODE. */
160 const_ok_for_op (i, code, mode)
161 HOST_WIDE_INT i;
162 enum rtx_code code;
163 enum machine_mode mode;
165 if (const_ok_for_arm (i))
166 return 1;
168 switch (code)
170 case PLUS:
171 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
173 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
174 case XOR:
175 case IOR:
176 return 0;
178 case AND:
179 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
181 default:
182 abort ();
186 /* Emit a sequence of insns to handle a large constant.
187 CODE is the code of the operation required, it can be any of SET, PLUS,
188 IOR, AND, XOR, MINUS;
189 MODE is the mode in which the operation is being performed;
190 VAL is the integer to operate on;
191 SOURCE is the other operand (a register, or a null-pointer for SET);
192 SUBTARGETS means it is safe to create scratch registers if that will
193 either produce a simpler sequence, or we will want to cse the values. */
196 arm_split_constant (code, mode, val, target, source, subtargets)
197 enum rtx_code code;
198 enum machine_mode mode;
199 HOST_WIDE_INT val;
200 rtx target;
201 rtx source;
202 int subtargets;
204 int can_add = 0;
205 int can_invert = 0;
206 int can_negate = 0;
207 int can_negate_initial = 0;
208 int can_shift = 0;
209 int i;
210 int num_bits_set = 0;
211 int set_sign_bit_copies = 0;
212 int clear_sign_bit_copies = 0;
213 int clear_zero_bit_copies = 0;
214 int set_zero_bit_copies = 0;
215 int insns = 0;
216 rtx new_src;
217 unsigned HOST_WIDE_INT temp1, temp2;
218 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
220 /* find out which operations are safe for a given CODE. Also do a quick
221 check for degenerate cases; these can occur when DImode operations
222 are split. */
223 switch (code)
225 case SET:
226 can_invert = 1;
227 can_shift = 1;
228 can_negate = 1;
229 break;
231 case PLUS:
232 can_negate = 1;
233 can_negate_initial = 1;
234 break;
236 case IOR:
237 if (remainder == 0xffffffff)
239 emit_insn (gen_rtx (SET, VOIDmode, target,
240 GEN_INT (ARM_SIGN_EXTEND (val))));
241 return 1;
243 if (remainder == 0)
245 if (reload_completed && rtx_equal_p (target, source))
246 return 0;
247 emit_insn (gen_rtx (SET, VOIDmode, target, source));
248 return 1;
250 break;
252 case AND:
253 if (remainder == 0)
255 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
256 return 1;
258 if (remainder == 0xffffffff)
260 if (reload_completed && rtx_equal_p (target, source))
261 return 0;
262 emit_insn (gen_rtx (SET, VOIDmode, target, source));
263 return 1;
265 can_invert = 1;
266 break;
268 case XOR:
269 if (remainder == 0)
271 if (reload_completed && rtx_equal_p (target, source))
272 return 0;
273 emit_insn (gen_rtx (SET, VOIDmode, target, source));
274 return 1;
276 if (remainder == 0xffffffff)
278 emit_insn (gen_rtx (SET, VOIDmode, target,
279 gen_rtx (NOT, mode, source)));
280 return 1;
283 /* We don't know how to handle this yet below. */
284 abort ();
286 case MINUS:
287 /* We treat MINUS as (val - source), since (source - val) is always
288 passed as (source + (-val)). */
289 if (remainder == 0)
291 emit_insn (gen_rtx (SET, VOIDmode, target,
292 gen_rtx (NEG, mode, source)));
293 return 1;
295 if (const_ok_for_arm (val))
297 emit_insn (gen_rtx (SET, VOIDmode, target,
298 gen_rtx (MINUS, mode, GEN_INT (val), source)));
299 return 1;
301 can_negate = 1;
303 break;
305 default:
306 abort ();
309 /* If we can do it in one insn get out quickly */
310 if (const_ok_for_arm (val)
311 || (can_negate_initial && const_ok_for_arm (-val))
312 || (can_invert && const_ok_for_arm (~val)))
314 emit_insn (gen_rtx (SET, VOIDmode, target,
315 (source ? gen_rtx (code, mode, source,
316 GEN_INT (val)) : GEN_INT (val))));
317 return 1;
321 /* Calculate a few attributes that may be useful for specific
322 optimizations. */
324 for (i = 31; i >= 0; i--)
326 if ((remainder & (1 << i)) == 0)
327 clear_sign_bit_copies++;
328 else
329 break;
332 for (i = 31; i >= 0; i--)
334 if ((remainder & (1 << i)) != 0)
335 set_sign_bit_copies++;
336 else
337 break;
340 for (i = 0; i <= 31; i++)
342 if ((remainder & (1 << i)) == 0)
343 clear_zero_bit_copies++;
344 else
345 break;
348 for (i = 0; i <= 31; i++)
350 if ((remainder & (1 << i)) != 0)
351 set_zero_bit_copies++;
352 else
353 break;
356 switch (code)
358 case SET:
359 /* See if we can do this by sign_extending a constant that is known
360 to be negative. This is a good, way of doing it, since the shift
361 may well merge into a subsequent insn. */
362 if (set_sign_bit_copies > 1)
364 if (const_ok_for_arm
365 (temp1 = ARM_SIGN_EXTEND (remainder
366 << (set_sign_bit_copies - 1))))
368 new_src = subtargets ? gen_reg_rtx (mode) : target;
369 emit_insn (gen_rtx (SET, VOIDmode, new_src, GEN_INT (temp1)));
370 emit_insn (gen_ashrsi3 (target, new_src,
371 GEN_INT (set_sign_bit_copies - 1)));
372 return 2;
374 /* For an inverted constant, we will need to set the low bits,
375 these will be shifted out of harm's way. */
376 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
377 if (const_ok_for_arm (~temp1))
379 new_src = subtargets ? gen_reg_rtx (mode) : target;
380 emit_insn (gen_rtx (SET, VOIDmode, new_src, GEN_INT (temp1)));
381 emit_insn (gen_ashrsi3 (target, new_src,
382 GEN_INT (set_sign_bit_copies - 1)));
383 return 2;
387 /* See if we can generate this by setting the bottom (or the top)
388 16 bits, and then shifting these into the other half of the
389 word. We only look for the simplest cases, to do more would cost
390 too much. Be careful, however, not to generate this when the
391 alternative would take fewer insns. */
392 if (val & 0xffff0000)
394 temp1 = remainder & 0xffff0000;
395 temp2 = remainder & 0x0000ffff;
397 /* Overlaps outside this range are best done using other methods. */
398 for (i = 9; i < 24; i++)
400 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
401 && ! const_ok_for_arm (temp2))
403 insns
404 = arm_split_constant (code, mode, temp2,
405 (new_src
406 = subtargets ? gen_reg_rtx (mode)
407 : target),
408 source, subtargets);
409 source = new_src;
410 emit_insn (gen_rtx (SET, VOIDmode, target,
411 gen_rtx (IOR, mode,
412 gen_rtx (ASHIFT, mode, source,
413 GEN_INT (i)),
414 source)));
415 return insns + 1;
419 /* Don't duplicate cases already considered. */
420 for (i = 17; i < 24; i++)
422 if (((temp1 | (temp1 >> i)) == remainder)
423 && ! const_ok_for_arm (temp1))
425 insns
426 = arm_split_constant (code, mode, temp1,
427 (new_src
428 = subtargets ? gen_reg_rtx (mode)
429 : target),
430 source, subtargets);
431 source = new_src;
432 emit_insn (gen_rtx (SET, VOIDmode, target,
433 gen_rtx (IOR, mode,
434 gen_rtx (LSHIFTRT, mode, source,
435 GEN_INT (i)),
436 source)));
437 return insns + 1;
441 break;
443 case IOR:
444 case XOR:
445 /* If we have IOR or XOR, and the inverse of the constant can be loaded
446 in a single instruction, and we can find a temporary to put it in,
447 then this can be done in two instructions instead of 3-4. */
448 if (subtargets
449 || (reload_completed && ! reg_mentioned_p (target, source)))
451 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
453 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
455 emit_insn (gen_rtx (SET, VOIDmode, sub,
456 GEN_INT (ARM_SIGN_EXTEND (~ val))));
457 emit_insn (gen_rtx (SET, VOIDmode, target,
458 gen_rtx (code, mode, source, sub)));
459 return 2;
463 if (code == XOR)
464 break;
466 if (set_sign_bit_copies > 8
467 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
469 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
470 rtx shift = GEN_INT (set_sign_bit_copies);
472 emit_insn (gen_rtx (SET, VOIDmode, sub,
473 gen_rtx (NOT, mode,
474 gen_rtx (ASHIFT, mode, source,
475 shift))));
476 emit_insn (gen_rtx (SET, VOIDmode, target,
477 gen_rtx (NOT, mode,
478 gen_rtx (LSHIFTRT, mode, sub,
479 shift))));
480 return 2;
483 if (set_zero_bit_copies > 8
484 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
486 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
487 rtx shift = GEN_INT (set_zero_bit_copies);
489 emit_insn (gen_rtx (SET, VOIDmode, sub,
490 gen_rtx (NOT, mode,
491 gen_rtx (LSHIFTRT, mode, source,
492 shift))));
493 emit_insn (gen_rtx (SET, VOIDmode, target,
494 gen_rtx (NOT, mode,
495 gen_rtx (ASHIFT, mode, sub,
496 shift))));
497 return 2;
500 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
502 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
503 emit_insn (gen_rtx (SET, VOIDmode, sub,
504 gen_rtx (NOT, mode, source)));
505 source = sub;
506 if (subtargets)
507 sub = gen_reg_rtx (mode);
508 emit_insn (gen_rtx (SET, VOIDmode, sub,
509 gen_rtx (AND, mode, source, GEN_INT (temp1))));
510 emit_insn (gen_rtx (SET, VOIDmode, target,
511 gen_rtx (NOT, mode, sub)));
512 return 3;
514 break;
516 case AND:
517 /* See if two shifts will do 2 or more insn's worth of work. */
518 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
520 HOST_WIDE_INT shift_mask = ((0xffffffff
521 << (32 - clear_sign_bit_copies))
522 & 0xffffffff);
523 rtx new_source;
524 rtx shift = GEN_INT (clear_sign_bit_copies);
526 if ((remainder | shift_mask) != 0xffffffff)
528 new_source = subtargets ? gen_reg_rtx (mode) : target;
529 insns = arm_split_constant (AND, mode, remainder | shift_mask,
530 new_source, source, subtargets);
531 source = new_source;
534 new_source = subtargets ? gen_reg_rtx (mode) : target;
535 emit_insn (gen_ashlsi3 (new_source, source, shift));
536 emit_insn (gen_lshrsi3 (target, new_source, shift));
537 return insns + 2;
540 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
542 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
543 rtx new_source;
544 rtx shift = GEN_INT (clear_zero_bit_copies);
546 if ((remainder | shift_mask) != 0xffffffff)
548 new_source = subtargets ? gen_reg_rtx (mode) : target;
549 insns = arm_split_constant (AND, mode, remainder | shift_mask,
550 new_source, source, subtargets);
551 source = new_source;
554 new_source = subtargets ? gen_reg_rtx (mode) : target;
555 emit_insn (gen_lshrsi3 (new_source, source, shift));
556 emit_insn (gen_ashlsi3 (target, new_source, shift));
557 return insns + 2;
560 break;
562 default:
563 break;
566 for (i = 0; i < 32; i++)
567 if (remainder & (1 << i))
568 num_bits_set++;
570 if (code == AND || (can_invert && num_bits_set > 16))
571 remainder = (~remainder) & 0xffffffff;
572 else if (code == PLUS && num_bits_set > 16)
573 remainder = (-remainder) & 0xffffffff;
574 else
576 can_invert = 0;
577 can_negate = 0;
580 /* Now try and find a way of doing the job in either two or three
581 instructions.
582 We start by looking for the largest block of zeros that are aligned on
583 a 2-bit boundary, we then fill up the temps, wrapping around to the
584 top of the word when we drop off the bottom.
585 In the worst case this code should produce no more than four insns. */
587 int best_start = 0;
588 int best_consecutive_zeros = 0;
590 for (i = 0; i < 32; i += 2)
592 int consecutive_zeros = 0;
594 if (! (remainder & (3 << i)))
596 while ((i < 32) && ! (remainder & (3 << i)))
598 consecutive_zeros += 2;
599 i += 2;
601 if (consecutive_zeros > best_consecutive_zeros)
603 best_consecutive_zeros = consecutive_zeros;
604 best_start = i - consecutive_zeros;
606 i -= 2;
610 /* Now start emitting the insns, starting with the one with the highest
611 bit set: we do this so that the smallest number will be emitted last;
612 this is more likely to be combinable with addressing insns. */
613 i = best_start;
616 int end;
618 if (i <= 0)
619 i += 32;
620 if (remainder & (3 << (i - 2)))
622 end = i - 8;
623 if (end < 0)
624 end += 32;
625 temp1 = remainder & ((0x0ff << end)
626 | ((i < end) ? (0xff >> (32 - end)) : 0));
627 remainder &= ~temp1;
629 if (code == SET)
631 emit_insn (gen_rtx (SET, VOIDmode,
632 new_src = (subtargets ? gen_reg_rtx (mode)
633 : target),
634 GEN_INT (can_invert ? ~temp1 : temp1)));
635 can_invert = 0;
636 code = PLUS;
638 else if (code == MINUS)
640 emit_insn (gen_rtx (SET, VOIDmode,
641 new_src = (subtargets ? gen_reg_rtx (mode)
642 : target),
643 gen_rtx (code, mode, GEN_INT (temp1),
644 source)));
645 code = PLUS;
647 else
649 emit_insn (gen_rtx (SET, VOIDmode,
650 new_src = remainder ? (subtargets
651 ? gen_reg_rtx (mode)
652 : target) : target,
653 gen_rtx (code, mode, source,
654 GEN_INT (can_invert ? ~temp1
655 : (can_negate
656 ? -temp1 : temp1)))));
659 insns++;
660 source = new_src;
661 i -= 6;
663 i -= 2;
664 } while (remainder);
666 return insns;
669 #define REG_OR_SUBREG_REG(X) \
670 (GET_CODE (X) == REG \
671 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
673 #define REG_OR_SUBREG_RTX(X) \
674 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
676 #define ARM_FRAME_RTX(X) \
677 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
678 || (X) == arg_pointer_rtx)
681 arm_rtx_costs (x, code, outer_code)
682 rtx x;
683 enum rtx_code code, outer_code;
685 enum machine_mode mode = GET_MODE (x);
686 enum rtx_code subcode;
687 int extra_cost;
689 switch (code)
691 case MEM:
692 /* Memory costs quite a lot for the first word, but subsequent words
693 load at the equivalent of a single insn each. */
694 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
695 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
697 case DIV:
698 case MOD:
699 return 100;
701 case ROTATE:
702 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
703 return 4;
704 /* Fall through */
705 case ROTATERT:
706 if (mode != SImode)
707 return 8;
708 /* Fall through */
709 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
710 if (mode == DImode)
711 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
712 + ((GET_CODE (XEXP (x, 0)) == REG
713 || (GET_CODE (XEXP (x, 0)) == SUBREG
714 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
715 ? 0 : 8));
716 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
717 || (GET_CODE (XEXP (x, 0)) == SUBREG
718 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
719 ? 0 : 4)
720 + ((GET_CODE (XEXP (x, 1)) == REG
721 || (GET_CODE (XEXP (x, 1)) == SUBREG
722 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
723 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
724 ? 0 : 4));
726 case MINUS:
727 if (mode == DImode)
728 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
729 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
730 || (GET_CODE (XEXP (x, 0)) == CONST_INT
731 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
732 ? 0 : 8));
734 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
735 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
736 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
737 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
738 ? 0 : 8)
739 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
740 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
741 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
742 ? 0 : 8));
744 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
745 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
746 && REG_OR_SUBREG_REG (XEXP (x, 1))))
747 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
748 || subcode == ASHIFTRT || subcode == LSHIFTRT
749 || subcode == ROTATE || subcode == ROTATERT
750 || (subcode == MULT
751 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
752 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
753 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
754 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
755 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
756 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
757 && REG_OR_SUBREG_REG (XEXP (x, 0))))
758 return 1;
759 /* Fall through */
761 case PLUS:
762 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
763 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
764 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
765 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
766 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
767 ? 0 : 8));
769 /* Fall through */
770 case AND: case XOR: case IOR:
771 extra_cost = 0;
773 /* Normally the frame registers will be spilt into reg+const during
774 reload, so it is a bad idea to combine them with other instructions,
775 since then they might not be moved outside of loops. As a compromise
776 we allow integration with ops that have a constant as their second
777 operand. */
778 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
779 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
780 && GET_CODE (XEXP (x, 1)) != CONST_INT)
781 || (REG_OR_SUBREG_REG (XEXP (x, 0))
782 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
783 extra_cost = 4;
785 if (mode == DImode)
786 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
787 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
788 || (GET_CODE (XEXP (x, 1)) == CONST_INT
789 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
790 ? 0 : 8));
792 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
793 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
794 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
795 || (GET_CODE (XEXP (x, 1)) == CONST_INT
796 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
797 ? 0 : 4));
799 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
800 return (1 + extra_cost
801 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
802 || subcode == LSHIFTRT || subcode == ASHIFTRT
803 || subcode == ROTATE || subcode == ROTATERT
804 || (subcode == MULT
805 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
806 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
807 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
808 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
809 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
810 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
811 ? 0 : 4));
813 return 8;
815 case MULT:
816 if (GET_MODE_CLASS (mode) == MODE_FLOAT
817 || mode == DImode)
818 return 30;
820 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
822 HOST_WIDE_INT i = INTVAL (XEXP (x, 1)) & 0xffffffff;
823 int add_cost = const_ok_for_arm (i) ? 4 : 8;
824 int j;
826 /* This will need adjusting for ARM's with fast multiplies */
827 for (j = 0; i && j < 32; j += 2)
829 i &= ~(3 << j);
830 add_cost += 2;
833 return add_cost;
836 return (30 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
837 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
839 case NEG:
840 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
841 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
842 /* Fall through */
843 case NOT:
844 if (mode == DImode)
845 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
847 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
849 case IF_THEN_ELSE:
850 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
851 return 14;
852 return 2;
854 case COMPARE:
855 return 1;
857 case ABS:
858 return 4 + (mode == DImode ? 4 : 0);
860 case SIGN_EXTEND:
861 if (GET_MODE (XEXP (x, 0)) == QImode)
862 return (4 + (mode == DImode ? 4 : 0)
863 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
864 /* Fall through */
865 case ZERO_EXTEND:
866 switch (GET_MODE (XEXP (x, 0)))
868 case QImode:
869 return (1 + (mode == DImode ? 4 : 0)
870 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
872 case HImode:
873 return (4 + (mode == DImode ? 4 : 0)
874 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
876 case SImode:
877 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
879 abort ();
881 default:
882 return 99;
886 /* This code has been fixed for cross compilation. */
888 static int fpa_consts_inited = 0;
890 char *strings_fpa[8] = {
891 "0.0",
892 "1.0",
893 "2.0",
894 "3.0",
895 "4.0",
896 "5.0",
897 "0.5",
898 "10.0"
901 static REAL_VALUE_TYPE values_fpa[8];
903 static void
904 init_fpa_table ()
906 int i;
907 REAL_VALUE_TYPE r;
909 for (i = 0; i < 8; i++)
911 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
912 values_fpa[i] = r;
915 fpa_consts_inited = 1;
918 /* Return TRUE if rtx X is a valid immediate FPU constant. */
921 const_double_rtx_ok_for_fpu (x)
922 rtx x;
924 REAL_VALUE_TYPE r;
925 int i;
927 if (!fpa_consts_inited)
928 init_fpa_table ();
930 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
931 if (REAL_VALUE_MINUS_ZERO (r))
932 return 0;
934 for (i = 0; i < 8; i++)
935 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
936 return 1;
938 return 0;
941 /* Return TRUE if rtx X is a valid immediate FPU constant. */
944 neg_const_double_rtx_ok_for_fpu (x)
945 rtx x;
947 REAL_VALUE_TYPE r;
948 int i;
950 if (!fpa_consts_inited)
951 init_fpa_table ();
953 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
954 r = REAL_VALUE_NEGATE (r);
955 if (REAL_VALUE_MINUS_ZERO (r))
956 return 0;
958 for (i = 0; i < 8; i++)
959 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
960 return 1;
962 return 0;
965 /* Predicates for `match_operand' and `match_operator'. */
967 /* s_register_operand is the same as register_operand, but it doesn't accept
968 (SUBREG (MEM)...). */
971 s_register_operand (op, mode)
972 register rtx op;
973 enum machine_mode mode;
975 if (GET_MODE (op) != mode && mode != VOIDmode)
976 return 0;
978 if (GET_CODE (op) == SUBREG)
979 op = SUBREG_REG (op);
981 /* We don't consider registers whose class is NO_REGS
982 to be a register operand. */
983 return (GET_CODE (op) == REG
984 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
985 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
988 /* Only accept reg, subreg(reg), const_int. */
991 reg_or_int_operand (op, mode)
992 register rtx op;
993 enum machine_mode mode;
995 if (GET_CODE (op) == CONST_INT)
996 return 1;
998 if (GET_MODE (op) != mode && mode != VOIDmode)
999 return 0;
1001 if (GET_CODE (op) == SUBREG)
1002 op = SUBREG_REG (op);
1004 /* We don't consider registers whose class is NO_REGS
1005 to be a register operand. */
1006 return (GET_CODE (op) == REG
1007 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1008 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1011 /* Return 1 if OP is an item in memory, given that we are in reload. */
1014 reload_memory_operand (op, mode)
1015 rtx op;
1016 enum machine_mode mode;
1018 int regno = true_regnum (op);
1020 return (! CONSTANT_P (op)
1021 && (regno == -1
1022 || (GET_CODE (op) == REG
1023 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1026 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1029 arm_rhs_operand (op, mode)
1030 rtx op;
1031 enum machine_mode mode;
1033 return (s_register_operand (op, mode)
1034 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1037 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1041 arm_rhsm_operand (op, mode)
1042 rtx op;
1043 enum machine_mode mode;
1045 return (s_register_operand (op, mode)
1046 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1047 || memory_operand (op, mode));
1050 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1051 constant that is valid when negated. */
1054 arm_add_operand (op, mode)
1055 rtx op;
1056 enum machine_mode mode;
1058 return (s_register_operand (op, mode)
1059 || (GET_CODE (op) == CONST_INT
1060 && (const_ok_for_arm (INTVAL (op))
1061 || const_ok_for_arm (-INTVAL (op)))));
1065 arm_not_operand (op, mode)
1066 rtx op;
1067 enum machine_mode mode;
1069 return (s_register_operand (op, mode)
1070 || (GET_CODE (op) == CONST_INT
1071 && (const_ok_for_arm (INTVAL (op))
1072 || const_ok_for_arm (~INTVAL (op)))));
1075 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1078 fpu_rhs_operand (op, mode)
1079 rtx op;
1080 enum machine_mode mode;
1082 if (s_register_operand (op, mode))
1083 return TRUE;
1084 else if (GET_CODE (op) == CONST_DOUBLE)
1085 return (const_double_rtx_ok_for_fpu (op));
1087 return FALSE;
1091 fpu_add_operand (op, mode)
1092 rtx op;
1093 enum machine_mode mode;
1095 if (s_register_operand (op, mode))
1096 return TRUE;
1097 else if (GET_CODE (op) == CONST_DOUBLE)
1098 return (const_double_rtx_ok_for_fpu (op)
1099 || neg_const_double_rtx_ok_for_fpu (op));
1101 return FALSE;
1104 /* Return nonzero if OP is a constant power of two. */
1107 power_of_two_operand (op, mode)
1108 rtx op;
1109 enum machine_mode mode;
1111 if (GET_CODE (op) == CONST_INT)
1113 HOST_WIDE_INT value = INTVAL(op);
1114 return value != 0 && (value & (value - 1)) == 0;
1116 return FALSE;
1119 /* Return TRUE for a valid operand of a DImode operation.
1120 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1121 Note that this disallows MEM(REG+REG), but allows
1122 MEM(PRE/POST_INC/DEC(REG)). */
1125 di_operand (op, mode)
1126 rtx op;
1127 enum machine_mode mode;
1129 if (s_register_operand (op, mode))
1130 return TRUE;
1132 switch (GET_CODE (op))
1134 case CONST_DOUBLE:
1135 case CONST_INT:
1136 return TRUE;
1138 case MEM:
1139 return memory_address_p (DImode, XEXP (op, 0));
1141 default:
1142 return FALSE;
1146 /* Return TRUE for valid index operands. */
1149 index_operand (op, mode)
1150 rtx op;
1151 enum machine_mode mode;
1153 return (s_register_operand(op, mode)
1154 || (immediate_operand (op, mode)
1155 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1158 /* Return TRUE for valid shifts by a constant. This also accepts any
1159 power of two on the (somewhat overly relaxed) assumption that the
1160 shift operator in this case was a mult. */
1163 const_shift_operand (op, mode)
1164 rtx op;
1165 enum machine_mode mode;
1167 return (power_of_two_operand (op, mode)
1168 || (immediate_operand (op, mode)
1169 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1172 /* Return TRUE for arithmetic operators which can be combined with a multiply
1173 (shift). */
1176 shiftable_operator (x, mode)
1177 rtx x;
1178 enum machine_mode mode;
1180 if (GET_MODE (x) != mode)
1181 return FALSE;
1182 else
1184 enum rtx_code code = GET_CODE (x);
1186 return (code == PLUS || code == MINUS
1187 || code == IOR || code == XOR || code == AND);
1191 /* Return TRUE for shift operators. */
1194 shift_operator (x, mode)
1195 rtx x;
1196 enum machine_mode mode;
1198 if (GET_MODE (x) != mode)
1199 return FALSE;
1200 else
1202 enum rtx_code code = GET_CODE (x);
1204 if (code == MULT)
1205 return power_of_two_operand (XEXP (x, 1));
1207 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
1208 || code == ROTATERT);
1212 int equality_operator (x, mode)
1213 rtx x;
1214 enum machine_mode mode;
1216 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
1219 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1222 minmax_operator (x, mode)
1223 rtx x;
1224 enum machine_mode mode;
1226 enum rtx_code code = GET_CODE (x);
1228 if (GET_MODE (x) != mode)
1229 return FALSE;
1231 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
1234 /* return TRUE if x is EQ or NE */
1236 /* Return TRUE if this is the condition code register, if we aren't given
1237 a mode, accept any class CCmode register */
1240 cc_register (x, mode)
1241 rtx x;
1242 enum machine_mode mode;
1244 if (mode == VOIDmode)
1246 mode = GET_MODE (x);
1247 if (GET_MODE_CLASS (mode) != MODE_CC)
1248 return FALSE;
1251 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1252 return TRUE;
1254 return FALSE;
1257 enum rtx_code
1258 minmax_code (x)
1259 rtx x;
1261 enum rtx_code code = GET_CODE (x);
1263 if (code == SMAX)
1264 return GE;
1265 else if (code == SMIN)
1266 return LE;
1267 else if (code == UMIN)
1268 return LEU;
1269 else if (code == UMAX)
1270 return GEU;
1272 abort ();
1275 /* Return 1 if memory locations are adjacent */
1278 adjacent_mem_locations (a, b)
1279 rtx a, b;
1281 int val0 = 0, val1 = 0;
1282 int reg0, reg1;
1284 if ((GET_CODE (XEXP (a, 0)) == REG
1285 || (GET_CODE (XEXP (a, 0)) == PLUS
1286 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
1287 && (GET_CODE (XEXP (b, 0)) == REG
1288 || (GET_CODE (XEXP (b, 0)) == PLUS
1289 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
1291 if (GET_CODE (XEXP (a, 0)) == PLUS)
1293 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
1294 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
1296 else
1297 reg0 = REGNO (XEXP (a, 0));
1298 if (GET_CODE (XEXP (b, 0)) == PLUS)
1300 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
1301 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
1303 else
1304 reg1 = REGNO (XEXP (b, 0));
1305 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
1307 return 0;
1310 /* Return 1 if OP is a load multiple operation. It is known to be
1311 parallel and the first section will be tested. */
1314 load_multiple_operation (op, mode)
1315 rtx op;
1316 enum machine_mode mode;
1318 HOST_WIDE_INT count = XVECLEN (op, 0);
1319 int dest_regno;
1320 rtx src_addr;
1321 HOST_WIDE_INT i = 1, base = 0;
1322 rtx elt;
1324 if (count <= 1
1325 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1326 return 0;
1328 /* Check to see if this might be a write-back */
1329 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1331 i++;
1332 base = 1;
1334 /* Now check it more carefully */
1335 if (GET_CODE (SET_DEST (elt)) != REG
1336 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1337 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1338 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1339 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1340 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1341 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1342 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1343 != REGNO (SET_DEST (elt)))
1344 return 0;
1346 count--;
1349 /* Perform a quick check so we don't blow up below. */
1350 if (count <= i
1351 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1352 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
1353 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
1354 return 0;
1356 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
1357 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
1359 for (; i < count; i++)
1361 rtx elt = XVECEXP (op, 0, i);
1363 if (GET_CODE (elt) != SET
1364 || GET_CODE (SET_DEST (elt)) != REG
1365 || GET_MODE (SET_DEST (elt)) != SImode
1366 || REGNO (SET_DEST (elt)) != dest_regno + i - base
1367 || GET_CODE (SET_SRC (elt)) != MEM
1368 || GET_MODE (SET_SRC (elt)) != SImode
1369 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1370 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1371 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1372 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
1373 return 0;
1376 return 1;
1379 /* Return 1 if OP is a store multiple operation. It is known to be
1380 parallel and the first section will be tested. */
1383 store_multiple_operation (op, mode)
1384 rtx op;
1385 enum machine_mode mode;
1387 HOST_WIDE_INT count = XVECLEN (op, 0);
1388 int src_regno;
1389 rtx dest_addr;
1390 HOST_WIDE_INT i = 1, base = 0;
1391 rtx elt;
1393 if (count <= 1
1394 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1395 return 0;
1397 /* Check to see if this might be a write-back */
1398 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1400 i++;
1401 base = 1;
1403 /* Now check it more carefully */
1404 if (GET_CODE (SET_DEST (elt)) != REG
1405 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1406 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1407 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1408 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1409 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1410 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1411 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1412 != REGNO (SET_DEST (elt)))
1413 return 0;
1415 count--;
1418 /* Perform a quick check so we don't blow up below. */
1419 if (count <= i
1420 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1421 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
1422 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
1423 return 0;
1425 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
1426 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
1428 for (; i < count; i++)
1430 elt = XVECEXP (op, 0, i);
1432 if (GET_CODE (elt) != SET
1433 || GET_CODE (SET_SRC (elt)) != REG
1434 || GET_MODE (SET_SRC (elt)) != SImode
1435 || REGNO (SET_SRC (elt)) != src_regno + i - base
1436 || GET_CODE (SET_DEST (elt)) != MEM
1437 || GET_MODE (SET_DEST (elt)) != SImode
1438 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1439 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1440 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1441 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
1442 return 0;
1445 return 1;
1449 multi_register_push (op, mode)
1450 rtx op;
1451 enum machine_mode mode;
1453 if (GET_CODE (op) != PARALLEL
1454 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
1455 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
1456 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
1457 return 0;
1459 return 1;
1463 /* Routines for use with attributes */
1466 const_pool_offset (symbol)
1467 rtx symbol;
1469 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
1472 /* Routines for use in generating RTL */
1475 arm_gen_load_multiple (base_regno, count, from, up, write_back)
1476 int base_regno;
1477 int count;
1478 rtx from;
1479 int up;
1480 int write_back;
1482 int i = 0, j;
1483 rtx result;
1484 int sign = up ? 1 : -1;
1486 result = gen_rtx (PARALLEL, VOIDmode,
1487 rtvec_alloc (count + (write_back ? 2 : 0)));
1488 if (write_back)
1490 XVECEXP (result, 0, 0)
1491 = gen_rtx (SET, GET_MODE (from), from,
1492 plus_constant (from, count * 4 * sign));
1493 i = 1;
1494 count++;
1497 for (j = 0; i < count; i++, j++)
1499 XVECEXP (result, 0, i)
1500 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
1501 gen_rtx (MEM, SImode,
1502 plus_constant (from, j * 4 * sign)));
1505 if (write_back)
1506 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
1508 return result;
1512 arm_gen_store_multiple (base_regno, count, to, up, write_back)
1513 int base_regno;
1514 int count;
1515 rtx to;
1516 int up;
1517 int write_back;
1519 int i = 0, j;
1520 rtx result;
1521 int sign = up ? 1 : -1;
1523 result = gen_rtx (PARALLEL, VOIDmode,
1524 rtvec_alloc (count + (write_back ? 2 : 0)));
1525 if (write_back)
1527 XVECEXP (result, 0, 0)
1528 = gen_rtx (SET, GET_MODE (to), to,
1529 plus_constant (to, count * 4 * sign));
1530 i = 1;
1531 count++;
1534 for (j = 0; i < count; i++, j++)
1536 XVECEXP (result, 0, i)
1537 = gen_rtx (SET, VOIDmode,
1538 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
1539 gen_rtx (REG, SImode, base_regno + j));
1542 if (write_back)
1543 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
1545 return result;
1549 arm_gen_movstrqi (operands)
1550 rtx *operands;
1552 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
1553 int i, r;
1554 rtx const_sxteen = gen_rtx (CONST_INT, SImode, 16);
1555 rtx src, dst;
1556 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
1557 rtx part_bytes_reg = NULL;
1558 extern int optimize;
1560 if (GET_CODE (operands[2]) != CONST_INT
1561 || GET_CODE (operands[3]) != CONST_INT
1562 || INTVAL (operands[2]) > 64
1563 || INTVAL (operands[3]) & 3)
1564 return 0;
1566 st_dst = XEXP (operands[0], 0);
1567 st_src = XEXP (operands[1], 0);
1568 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
1569 fin_src = src = copy_to_mode_reg (SImode, st_src);
1571 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
1572 out_words_to_go = INTVAL (operands[2]) / 4;
1573 last_bytes = INTVAL (operands[2]) & 3;
1575 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
1576 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
1578 for (i = 0; in_words_to_go >= 2; i+=4)
1580 emit_insn (arm_gen_load_multiple (0, (in_words_to_go > 4
1581 ? 4 : in_words_to_go),
1582 src, TRUE, TRUE));
1583 if (out_words_to_go)
1585 if (out_words_to_go != 1)
1586 emit_insn (arm_gen_store_multiple (0, (out_words_to_go > 4
1587 ? 4 : out_words_to_go),
1588 dst, TRUE, TRUE));
1589 else
1591 emit_move_insn (gen_rtx (MEM, SImode, dst),
1592 gen_rtx (REG, SImode, 0));
1593 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
1597 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
1598 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
1601 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
1602 if (out_words_to_go)
1604 rtx sreg;
1606 emit_move_insn (sreg = gen_reg_rtx (SImode), gen_rtx (MEM, SImode, src));
1607 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
1608 emit_move_insn (gen_rtx (MEM, SImode, dst), sreg);
1609 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
1610 in_words_to_go--;
1612 if (in_words_to_go) /* Sanity check */
1613 abort ();
1616 if (in_words_to_go)
1618 if (in_words_to_go < 0)
1619 abort ();
1621 part_bytes_reg = copy_to_mode_reg (SImode, gen_rtx (MEM, SImode, src));
1622 emit_insn (gen_addsi3 (src, src, GEN_INT (4)));
1625 if (BYTES_BIG_ENDIAN && last_bytes)
1627 rtx tmp = gen_reg_rtx (SImode);
1629 if (part_bytes_reg == NULL)
1630 abort ();
1632 /* The bytes we want are in the top end of the word */
1633 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
1634 GEN_INT (8 * (4 - last_bytes))));
1635 part_bytes_reg = tmp;
1637 while (last_bytes)
1639 emit_move_insn (gen_rtx (MEM, QImode,
1640 plus_constant (dst, last_bytes - 1)),
1641 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
1642 if (--last_bytes)
1644 tmp = gen_reg_rtx (SImode);
1645 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
1646 part_bytes_reg = tmp;
1651 else
1653 while (last_bytes)
1655 if (part_bytes_reg == NULL)
1656 abort ();
1658 emit_move_insn (gen_rtx (MEM, QImode, dst),
1659 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
1660 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
1661 if (--last_bytes)
1663 rtx tmp = gen_reg_rtx (SImode);
1664 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
1665 part_bytes_reg = tmp;
1670 return 1;
1673 /* X and Y are two things to compare using CODE. Emit the compare insn and
1674 return the rtx for register 0 in the proper mode. FP means this is a
1675 floating point compare: I don't think that it is needed on the arm. */
1678 gen_compare_reg (code, x, y, fp)
1679 enum rtx_code code;
1680 rtx x, y;
1682 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1683 rtx cc_reg = gen_rtx (REG, mode, 24);
1685 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
1686 gen_rtx (COMPARE, mode, x, y)));
1688 return cc_reg;
1691 void
1692 arm_reload_out_hi (operands)
1693 rtx *operands;
1695 rtx base = find_replacement (&XEXP (operands[0], 0));
1697 if (BYTES_BIG_ENDIAN)
1699 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
1700 gen_rtx (SUBREG, QImode, operands[1], 0)));
1701 emit_insn (gen_lshrsi3 (operands[2],
1702 gen_rtx (SUBREG, SImode, operands[1], 0),
1703 GEN_INT (8)));
1704 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
1705 gen_rtx (SUBREG, QImode, operands[2], 0)));
1707 else
1709 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
1710 gen_rtx (SUBREG, QImode, operands[1], 0)));
1711 emit_insn (gen_lshrsi3 (operands[2],
1712 gen_rtx (SUBREG, SImode, operands[1], 0),
1713 GEN_INT (8)));
1714 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
1715 gen_rtx (SUBREG, QImode, operands[2], 0)));
1719 /* Check to see if a branch is forwards or backwards. Return TRUE if it
1720 is backwards. */
1723 arm_backwards_branch (from, to)
1724 int from, to;
1726 return insn_addresses[to] <= insn_addresses[from];
1729 /* Check to see if a branch is within the distance that can be done using
1730 an arithmetic expression. */
1732 short_branch (from, to)
1733 int from, to;
1735 int delta = insn_addresses[from] + 8 - insn_addresses[to];
1737 return abs (delta) < 980; /* A small margin for safety */
1740 /* Check to see that the insn isn't the target of the conditionalizing
1741 code */
1743 arm_insn_not_targeted (insn)
1744 rtx insn;
1746 return insn != arm_target_insn;
1750 /* Routines to output assembly language. */
1752 /* If the rtx is the correct value then return the string of the number.
1753 In this way we can ensure that valid double constants are generated even
1754 when cross compiling. */
1755 char *
1756 fp_immediate_constant (x)
1757 rtx x;
1759 REAL_VALUE_TYPE r;
1760 int i;
1762 if (!fpa_consts_inited)
1763 init_fpa_table ();
1765 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1766 for (i = 0; i < 8; i++)
1767 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1768 return strings_fpa[i];
1770 abort ();
1773 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
1774 static char *
1775 fp_const_from_val (r)
1776 REAL_VALUE_TYPE *r;
1778 int i;
1780 if (! fpa_consts_inited)
1781 init_fpa_table ();
1783 for (i = 0; i < 8; i++)
1784 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
1785 return strings_fpa[i];
1787 abort ();
1790 /* Output the operands of a LDM/STM instruction to STREAM.
1791 MASK is the ARM register set mask of which only bits 0-15 are important.
1792 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
1793 must follow the register list. */
1795 void
1796 print_multi_reg (stream, instr, mask, hat)
1797 FILE *stream;
1798 char *instr;
1799 int mask, hat;
1801 int i;
1802 int not_first = FALSE;
1804 fputc ('\t', stream);
1805 fprintf (stream, instr, ARM_REG_PREFIX);
1806 fputs (", {", stream);
1807 for (i = 0; i < 16; i++)
1808 if (mask & (1 << i))
1810 if (not_first)
1811 fprintf (stream, ", ");
1812 fprintf (stream, "%s%s", ARM_REG_PREFIX, reg_names[i]);
1813 not_first = TRUE;
1816 fprintf (stream, "}%s\n", hat ? "^" : "");
1819 /* Output a 'call' insn. */
1821 char *
1822 output_call (operands)
1823 rtx *operands;
1825 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
1827 if (REGNO (operands[0]) == 14)
1829 operands[0] = gen_rtx (REG, SImode, 12);
1830 output_asm_insn ("mov%?\t%0, %|lr", operands);
1832 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
1833 output_asm_insn ("mov%?\t%|pc, %0", operands);
1834 return "";
1837 static int
1838 eliminate_lr2ip (x)
1839 rtx *x;
1841 int something_changed = 0;
1842 rtx x0 = *x;
1843 int code = GET_CODE (x0);
1844 register int i, j;
1845 register char *fmt;
1847 switch (code)
1849 case REG:
1850 if (REGNO (x0) == 14)
1852 *x = gen_rtx (REG, SImode, 12);
1853 return 1;
1855 return 0;
1856 default:
1857 /* Scan through the sub-elements and change any references there */
1858 fmt = GET_RTX_FORMAT (code);
1859 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1860 if (fmt[i] == 'e')
1861 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
1862 else if (fmt[i] == 'E')
1863 for (j = 0; j < XVECLEN (x0, i); j++)
1864 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
1865 return something_changed;
1869 /* Output a 'call' insn that is a reference in memory. */
1871 char *
1872 output_call_mem (operands)
1873 rtx *operands;
1875 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
1876 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
1878 if (eliminate_lr2ip (&operands[0]))
1879 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
1881 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
1882 output_asm_insn ("ldr%?\t%|pc, %0", operands);
1883 return "";
1887 /* Output a move from arm registers to an fpu registers.
1888 OPERANDS[0] is an fpu register.
1889 OPERANDS[1] is the first registers of an arm register pair. */
1891 char *
1892 output_mov_long_double_fpu_from_arm (operands)
1893 rtx *operands;
1895 int arm_reg0 = REGNO (operands[1]);
1896 rtx ops[3];
1898 if (arm_reg0 == 12)
1899 abort();
1901 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1902 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1903 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
1905 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
1906 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
1907 return "";
1910 /* Output a move from an fpu register to arm registers.
1911 OPERANDS[0] is the first registers of an arm register pair.
1912 OPERANDS[1] is an fpu register. */
1914 char *
1915 output_mov_long_double_arm_from_fpu (operands)
1916 rtx *operands;
1918 int arm_reg0 = REGNO (operands[0]);
1919 rtx ops[3];
1921 if (arm_reg0 == 12)
1922 abort();
1924 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1925 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1926 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
1928 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
1929 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
1930 return "";
1933 /* Output a move from arm registers to arm registers of a long double
1934 OPERANDS[0] is the destination.
1935 OPERANDS[1] is the source. */
1936 char *
1937 output_mov_long_double_arm_from_arm (operands)
1938 rtx *operands;
1940 /* We have to be careful here because the two might overlap */
1941 int dest_start = REGNO (operands[0]);
1942 int src_start = REGNO (operands[1]);
1943 rtx ops[2];
1944 int i;
1946 if (dest_start < src_start)
1948 for (i = 0; i < 3; i++)
1950 ops[0] = gen_rtx (REG, SImode, dest_start + i);
1951 ops[1] = gen_rtx (REG, SImode, src_start + i);
1952 output_asm_insn ("mov%?\t%0, %1", ops);
1955 else
1957 for (i = 2; i >= 0; i--)
1959 ops[0] = gen_rtx (REG, SImode, dest_start + i);
1960 ops[1] = gen_rtx (REG, SImode, src_start + i);
1961 output_asm_insn ("mov%?\t%0, %1", ops);
1965 return "";
1969 /* Output a move from arm registers to an fpu registers.
1970 OPERANDS[0] is an fpu register.
1971 OPERANDS[1] is the first registers of an arm register pair. */
1973 char *
1974 output_mov_double_fpu_from_arm (operands)
1975 rtx *operands;
1977 int arm_reg0 = REGNO (operands[1]);
1978 rtx ops[2];
1980 if (arm_reg0 == 12)
1981 abort();
1982 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1983 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1984 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
1985 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
1986 return "";
1989 /* Output a move from an fpu register to arm registers.
1990 OPERANDS[0] is the first registers of an arm register pair.
1991 OPERANDS[1] is an fpu register. */
1993 char *
1994 output_mov_double_arm_from_fpu (operands)
1995 rtx *operands;
1997 int arm_reg0 = REGNO (operands[0]);
1998 rtx ops[2];
2000 if (arm_reg0 == 12)
2001 abort();
2003 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2004 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2005 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
2006 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
2007 return "";
2010 /* Output a move between double words.
2011 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
2012 or MEM<-REG and all MEMs must be offsettable addresses. */
2014 char *
2015 output_move_double (operands)
2016 rtx *operands;
2018 enum rtx_code code0 = GET_CODE (operands[0]);
2019 enum rtx_code code1 = GET_CODE (operands[1]);
2020 rtx otherops[2];
2022 if (code0 == REG)
2024 int reg0 = REGNO (operands[0]);
2026 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
2027 if (code1 == REG)
2029 int reg1 = REGNO (operands[1]);
2030 if (reg1 == 12)
2031 abort();
2033 otherops[1] = gen_rtx (REG, SImode, 1 + reg1);
2035 /* Ensure the second source is not overwritten */
2036 if (reg0 == 1 + reg1)
2038 output_asm_insn("mov%?\t%0, %1", otherops);
2039 output_asm_insn("mov%?\t%0, %1", operands);
2041 else
2043 output_asm_insn("mov%?\t%0, %1", operands);
2044 output_asm_insn("mov%?\t%0, %1", otherops);
2047 else if (code1 == CONST_DOUBLE)
2049 otherops[1] = gen_rtx (CONST_INT, VOIDmode,
2050 CONST_DOUBLE_HIGH (operands[1]));
2051 operands[1] = gen_rtx (CONST_INT, VOIDmode,
2052 CONST_DOUBLE_LOW (operands[1]));
2053 output_mov_immediate (operands, FALSE, "");
2054 output_mov_immediate (otherops, FALSE, "");
2056 else if (code1 == CONST_INT)
2058 otherops[1] = const0_rtx;
2059 /* sign extend the intval into the high-order word */
2060 /* Note: output_mov_immediate may clobber operands[1], so we
2061 put this out first */
2062 if (INTVAL (operands[1]) < 0)
2063 output_asm_insn ("mvn%?\t%0, %1", otherops);
2064 else
2065 output_asm_insn ("mov%?\t%0, %1", otherops);
2066 output_mov_immediate (operands, FALSE, "");
2068 else if (code1 == MEM)
2070 switch (GET_CODE (XEXP (operands[1], 0)))
2072 case REG:
2073 /* Handle the simple case where address is [r, #0] more
2074 efficient. */
2075 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2076 break;
2077 case PRE_INC:
2078 output_asm_insn ("add%?\t%m1, %m1, #8", operands);
2079 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2080 break;
2081 case PRE_DEC:
2082 output_asm_insn ("sub%?\t%m1, %m1, #8", operands);
2083 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2084 break;
2085 case POST_INC:
2086 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
2087 break;
2088 case POST_DEC:
2089 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2090 output_asm_insn ("sub%?\t%m1, %m1, #8", operands);
2091 break;
2092 default:
2093 otherops[1] = adj_offsettable_operand (operands[1], 4);
2094 /* Take care of overlapping base/data reg. */
2095 if (reg_mentioned_p (operands[0], operands[1]))
2097 output_asm_insn ("ldr%?\t%0, %1", otherops);
2098 output_asm_insn ("ldr%?\t%0, %1", operands);
2100 else
2102 output_asm_insn ("ldr%?\t%0, %1", operands);
2103 output_asm_insn ("ldr%?\t%0, %1", otherops);
2107 else abort(); /* Constraints should prevent this */
2109 else if (code0 == MEM && code1 == REG)
2111 if (REGNO (operands[1]) == 12)
2112 abort();
2113 switch (GET_CODE (XEXP (operands[0], 0)))
2115 case REG:
2116 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
2117 break;
2118 case PRE_INC:
2119 output_asm_insn ("add%?\t%m0, %m0, #8", operands);
2120 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
2121 break;
2122 case PRE_DEC:
2123 output_asm_insn ("sub%?\t%m0, %m0, #8", operands);
2124 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
2125 break;
2126 case POST_INC:
2127 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
2128 break;
2129 case POST_DEC:
2130 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
2131 output_asm_insn ("sub%?\t%m0, %m0, #8", operands);
2132 break;
2133 default:
2134 otherops[0] = adj_offsettable_operand (operands[0], 4);
2135 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
2136 output_asm_insn ("str%?\t%1, %0", operands);
2137 output_asm_insn ("str%?\t%1, %0", otherops);
2140 else abort(); /* Constraints should prevent this */
2142 return "";
2146 /* Output an arbitrary MOV reg, #n.
2147 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
2149 char *
2150 output_mov_immediate (operands)
2151 rtx *operands;
2153 HOST_WIDE_INT n = INTVAL (operands[1]);
2154 int n_ones = 0;
2155 int i;
2157 /* Try to use one MOV */
2158 if (const_ok_for_arm (n))
2160 output_asm_insn ("mov%?\t%0, %1", operands);
2161 return "";
2164 /* Try to use one MVN */
2165 if (const_ok_for_arm (~n))
2167 operands[1] = GEN_INT (~n);
2168 output_asm_insn ("mvn%?\t%0, %1", operands);
2169 return "";
2172 /* If all else fails, make it out of ORRs or BICs as appropriate. */
2174 for (i=0; i < 32; i++)
2175 if (n & 1 << i)
2176 n_ones++;
2178 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
2179 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
2180 ~n);
2181 else
2182 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
2185 return "";
2189 /* Output an ADD r, s, #n where n may be too big for one instruction. If
2190 adding zero to one register, output nothing. */
2192 char *
2193 output_add_immediate (operands)
2194 rtx *operands;
2196 HOST_WIDE_INT n = INTVAL (operands[2]);
2198 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
2200 if (n < 0)
2201 output_multi_immediate (operands,
2202 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
2203 -n);
2204 else
2205 output_multi_immediate (operands,
2206 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
2210 return "";
2213 /* Output a multiple immediate operation.
2214 OPERANDS is the vector of operands referred to in the output patterns.
2215 INSTR1 is the output pattern to use for the first constant.
2216 INSTR2 is the output pattern to use for subsequent constants.
2217 IMMED_OP is the index of the constant slot in OPERANDS.
2218 N is the constant value. */
2220 char *
2221 output_multi_immediate (operands, instr1, instr2, immed_op, n)
2222 rtx *operands;
2223 char *instr1, *instr2;
2224 int immed_op;
2225 HOST_WIDE_INT n;
2227 #if HOST_BITS_PER_WIDE_INT > 32
2228 n &= 0xffffffff;
2229 #endif
2231 if (n == 0)
2233 operands[immed_op] = const0_rtx;
2234 output_asm_insn (instr1, operands); /* Quick and easy output */
2236 else
2238 int i;
2239 char *instr = instr1;
2241 /* Note that n is never zero here (which would give no output) */
2242 for (i = 0; i < 32; i += 2)
2244 if (n & (3 << i))
2246 operands[immed_op] = GEN_INT (n & (255 << i));
2247 output_asm_insn (instr, operands);
2248 instr = instr2;
2249 i += 6;
2253 return "";
2257 /* Return the appropriate ARM instruction for the operation code.
2258 The returned result should not be overwritten. OP is the rtx of the
2259 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
2260 was shifted. */
2262 char *
2263 arithmetic_instr (op, shift_first_arg)
2264 rtx op;
2265 int shift_first_arg;
2267 switch (GET_CODE (op))
2269 case PLUS:
2270 return "add";
2272 case MINUS:
2273 return shift_first_arg ? "rsb" : "sub";
2275 case IOR:
2276 return "orr";
2278 case XOR:
2279 return "eor";
2281 case AND:
2282 return "and";
2284 default:
2285 abort ();
2290 /* Ensure valid constant shifts and return the appropriate shift mnemonic
2291 for the operation code. The returned result should not be overwritten.
2292 OP is the rtx code of the shift.
2293 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
2294 shift. */
2296 static char *
2297 shift_op (op, amountp)
2298 rtx op;
2299 HOST_WIDE_INT *amountp;
2301 char *mnem;
2302 enum rtx_code code = GET_CODE (op);
2304 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
2305 *amountp = -1;
2306 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
2307 *amountp = INTVAL (XEXP (op, 1));
2308 else
2309 abort ();
2311 switch (code)
2313 case ASHIFT:
2314 mnem = "asl";
2315 break;
2317 case ASHIFTRT:
2318 mnem = "asr";
2319 break;
2321 case LSHIFTRT:
2322 mnem = "lsr";
2323 break;
2325 case ROTATERT:
2326 mnem = "ror";
2327 break;
2329 case MULT:
2330 /* We never have to worry about the amount being other than a
2331 power of 2, since this case can never be reloaded from a reg. */
2332 if (*amountp != -1)
2333 *amountp = int_log2 (*amountp);
2334 else
2335 abort ();
2336 return "asl";
2338 default:
2339 abort ();
2342 if (*amountp != -1)
2344 /* This is not 100% correct, but follows from the desire to merge
2345 multiplication by a power of 2 with the recognizer for a
2346 shift. >=32 is not a valid shift for "asl", so we must try and
2347 output a shift that produces the correct arithmetical result.
2348 Using lsr #32 is idendical except for the fact that the carry bit
2349 is not set correctly if we set the flags; but we never use the
2350 carry bit from such an operation, so we can ignore that. */
2351 if (code == ROTATERT)
2352 *amountp &= 31; /* Rotate is just modulo 32 */
2353 else if (*amountp != (*amountp & 31))
2355 if (code == ASHIFT)
2356 mnem = "lsr";
2357 *amountp = 32;
2360 /* Shifts of 0 are no-ops. */
2361 if (*amountp == 0)
2362 return NULL;
2365 return mnem;
2369 /* Obtain the shift from the POWER of two. */
2371 HOST_WIDE_INT
2372 int_log2 (power)
2373 HOST_WIDE_INT power;
2375 HOST_WIDE_INT shift = 0;
2377 while (((1 << shift) & power) == 0)
2379 if (shift > 31)
2380 abort ();
2381 shift++;
2384 return shift;
2387 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
2388 /bin/as is horribly restrictive. */
2390 void
2391 output_ascii_pseudo_op (stream, p, len)
2392 FILE *stream;
2393 unsigned char *p;
2394 int len;
2396 int i;
2397 int len_so_far = 1000;
2398 int chars_so_far = 0;
2400 for (i = 0; i < len; i++)
2402 register int c = p[i];
2404 if (len_so_far > 50)
2406 if (chars_so_far)
2407 fputs ("\"\n", stream);
2408 fputs ("\t.ascii\t\"", stream);
2409 len_so_far = 0;
2410 arm_increase_location (chars_so_far);
2411 chars_so_far = 0;
2414 if (c == '\"' || c == '\\')
2416 putc('\\', stream);
2417 len_so_far++;
2420 if (c >= ' ' && c < 0177)
2422 putc (c, stream);
2423 len_so_far++;
2425 else
2427 fprintf (stream, "\\%03o", c);
2428 len_so_far +=4;
2431 chars_so_far++;
2434 fputs ("\"\n", stream);
2435 arm_increase_location (chars_so_far);
2439 /* Try to determine whether a pattern really clobbers the link register.
2440 This information is useful when peepholing, so that lr need not be pushed
2441 if we combine a call followed by a return.
2442 NOTE: This code does not check for side-effect expressions in a SET_SRC:
2443 such a check should not be needed because these only update an existing
2444 value within a register; the register must still be set elsewhere within
2445 the function. */
2447 static int
2448 pattern_really_clobbers_lr (x)
2449 rtx x;
2451 int i;
2453 switch (GET_CODE (x))
2455 case SET:
2456 switch (GET_CODE (SET_DEST (x)))
2458 case REG:
2459 return REGNO (SET_DEST (x)) == 14;
2461 case SUBREG:
2462 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
2463 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
2465 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
2466 return 0;
2467 abort ();
2469 default:
2470 return 0;
2473 case PARALLEL:
2474 for (i = 0; i < XVECLEN (x, 0); i++)
2475 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
2476 return 1;
2477 return 0;
2479 case CLOBBER:
2480 switch (GET_CODE (XEXP (x, 0)))
2482 case REG:
2483 return REGNO (XEXP (x, 0)) == 14;
2485 case SUBREG:
2486 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
2487 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
2488 abort ();
2490 default:
2491 return 0;
2494 case UNSPEC:
2495 return 1;
2497 default:
2498 return 0;
2502 static int
2503 function_really_clobbers_lr (first)
2504 rtx first;
2506 rtx insn, next;
2508 for (insn = first; insn; insn = next_nonnote_insn (insn))
2510 switch (GET_CODE (insn))
2512 case BARRIER:
2513 case NOTE:
2514 case CODE_LABEL:
2515 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
2516 case INLINE_HEADER:
2517 break;
2519 case INSN:
2520 if (pattern_really_clobbers_lr (PATTERN (insn)))
2521 return 1;
2522 break;
2524 case CALL_INSN:
2525 /* Don't yet know how to handle those calls that are not to a
2526 SYMBOL_REF */
2527 if (GET_CODE (PATTERN (insn)) != PARALLEL)
2528 abort ();
2530 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
2532 case CALL:
2533 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
2534 != SYMBOL_REF)
2535 return 1;
2536 break;
2538 case SET:
2539 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
2540 0, 0)), 0), 0))
2541 != SYMBOL_REF)
2542 return 1;
2543 break;
2545 default: /* Don't recognize it, be safe */
2546 return 1;
2549 /* A call can be made (by peepholing) not to clobber lr iff it is
2550 followed by a return. There may, however, be a use insn iff
2551 we are returning the result of the call.
2552 If we run off the end of the insn chain, then that means the
2553 call was at the end of the function. Unfortunately we don't
2554 have a return insn for the peephole to recognize, so we
2555 must reject this. (Can this be fixed by adding our own insn?) */
2556 if ((next = next_nonnote_insn (insn)) == NULL)
2557 return 1;
2559 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
2560 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
2561 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
2562 == REGNO (XEXP (PATTERN (next), 0))))
2563 if ((next = next_nonnote_insn (next)) == NULL)
2564 return 1;
2566 if (GET_CODE (next) == JUMP_INSN
2567 && GET_CODE (PATTERN (next)) == RETURN)
2568 break;
2569 return 1;
2571 default:
2572 abort ();
2576 /* We have reached the end of the chain so lr was _not_ clobbered */
2577 return 0;
2580 char *
2581 output_return_instruction (operand, really_return)
2582 rtx operand;
2583 int really_return;
2585 char instr[100];
2586 int reg, live_regs = 0;
2587 int volatile_func = (optimize > 0
2588 && TREE_THIS_VOLATILE (current_function_decl));
2590 return_used_this_function = 1;
2592 if (volatile_func)
2594 rtx ops[2];
2595 /* If this function was declared non-returning, and we have found a tail
2596 call, then we have to trust that the called function won't return. */
2597 if (! really_return)
2598 return "";
2600 /* Otherwise, trap an attempted return by aborting. */
2601 ops[0] = operand;
2602 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
2603 output_asm_insn ("bl%d0\t%a1", ops);
2604 return "";
2607 if (current_function_calls_alloca && ! really_return)
2608 abort();
2610 for (reg = 0; reg <= 10; reg++)
2611 if (regs_ever_live[reg] && ! call_used_regs[reg])
2612 live_regs++;
2614 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
2615 live_regs++;
2617 if (frame_pointer_needed)
2618 live_regs += 4;
2620 if (live_regs)
2622 if (lr_save_eliminated || ! regs_ever_live[14])
2623 live_regs++;
2625 if (frame_pointer_needed)
2626 strcpy (instr, "ldm%?%d0ea\t%|fp, {");
2627 else
2628 strcpy (instr, "ldm%?%d0fd\t%|sp!, {");
2630 for (reg = 0; reg <= 10; reg++)
2631 if (regs_ever_live[reg] && ! call_used_regs[reg])
2633 strcat (instr, "%|");
2634 strcat (instr, reg_names[reg]);
2635 if (--live_regs)
2636 strcat (instr, ", ");
2639 if (frame_pointer_needed)
2641 strcat (instr, "%|");
2642 strcat (instr, reg_names[11]);
2643 strcat (instr, ", ");
2644 strcat (instr, "%|");
2645 strcat (instr, reg_names[13]);
2646 strcat (instr, ", ");
2647 strcat (instr, "%|");
2648 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
2650 else
2652 strcat (instr, "%|");
2653 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
2655 strcat (instr, (TARGET_6 || !really_return) ? "}" : "}^");
2656 output_asm_insn (instr, &operand);
2658 else if (really_return)
2660 strcpy (instr,
2661 TARGET_6 ? "mov%?%d0\t%|pc, lr" : "mov%?%d0s\t%|pc, %|lr");
2662 output_asm_insn (instr, &operand);
2665 return "";
2669 arm_volatile_func ()
2671 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
2674 /* Return the size of the prologue. It's not too bad if we slightly
2675 over-estimate. */
2677 static int
2678 get_prologue_size ()
2680 return profile_flag ? 12 : 0;
2683 /* The amount of stack adjustment that happens here, in output_return and in
2684 output_epilogue must be exactly the same as was calculated during reload,
2685 or things will point to the wrong place. The only time we can safely
2686 ignore this constraint is when a function has no arguments on the stack,
2687 no stack frame requirement and no live registers execpt for `lr'. If we
2688 can guarantee that by making all function calls into tail calls and that
2689 lr is not clobbered in any other way, then there is no need to push lr
2690 onto the stack. */
2692 void
2693 output_func_prologue (f, frame_size)
2694 FILE *f;
2695 int frame_size;
2697 int reg, live_regs_mask = 0;
2698 rtx operands[3];
2699 int volatile_func = (optimize > 0
2700 && TREE_THIS_VOLATILE (current_function_decl));
2702 /* Nonzero if we must stuff some register arguments onto the stack as if
2703 they were passed there. */
2704 int store_arg_regs = 0;
2706 if (arm_ccfsm_state || arm_target_insn)
2707 abort (); /* Sanity check */
2709 return_used_this_function = 0;
2710 lr_save_eliminated = 0;
2712 fprintf (f, "\t%c args = %d, pretend = %d, frame = %d\n",
2713 ARM_COMMENT_CHAR, current_function_args_size,
2714 current_function_pretend_args_size, frame_size);
2715 fprintf (f, "\t%c frame_needed = %d, current_function_anonymous_args = %d\n",
2716 ARM_COMMENT_CHAR, frame_pointer_needed,
2717 current_function_anonymous_args);
2719 if (volatile_func)
2720 fprintf (f, "\t%c Volatile function.\n", ARM_COMMENT_CHAR);
2722 if (current_function_anonymous_args && current_function_pretend_args_size)
2723 store_arg_regs = 1;
2725 for (reg = 0; reg <= 10; reg++)
2726 if (regs_ever_live[reg] && ! call_used_regs[reg])
2727 live_regs_mask |= (1 << reg);
2729 if (frame_pointer_needed)
2730 live_regs_mask |= 0xD800;
2731 else if (regs_ever_live[14])
2733 if (! current_function_args_size
2734 && ! function_really_clobbers_lr (get_insns ()))
2735 lr_save_eliminated = 1;
2736 else
2737 live_regs_mask |= 0x4000;
2740 if (live_regs_mask)
2742 /* if a di mode load/store multiple is used, and the base register
2743 is r3, then r4 can become an ever live register without lr
2744 doing so, in this case we need to push lr as well, or we
2745 will fail to get a proper return. */
2747 live_regs_mask |= 0x4000;
2748 lr_save_eliminated = 0;
2752 if (lr_save_eliminated)
2753 fprintf (f,"\t%c I don't think this function clobbers lr\n",
2754 ARM_COMMENT_CHAR);
2758 void
2759 output_func_epilogue (f, frame_size)
2760 FILE *f;
2761 int frame_size;
2763 int reg, live_regs_mask = 0, code_size = 0;
2764 /* If we need this then it will always be at lesat this much */
2765 int floats_offset = 24;
2766 rtx operands[3];
2767 int volatile_func = (optimize > 0
2768 && TREE_THIS_VOLATILE (current_function_decl));
2770 if (use_return_insn() && return_used_this_function)
2772 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
2774 abort ();
2776 goto epilogue_done;
2779 /* A volatile function should never return. Call abort. */
2780 if (volatile_func)
2782 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
2783 output_asm_insn ("bl\t%a0", &op);
2784 code_size = 4;
2785 goto epilogue_done;
2788 for (reg = 0; reg <= 10; reg++)
2789 if (regs_ever_live[reg] && ! call_used_regs[reg])
2791 live_regs_mask |= (1 << reg);
2792 floats_offset += 4;
2795 if (frame_pointer_needed)
2797 for (reg = 23; reg > 15; reg--)
2798 if (regs_ever_live[reg] && ! call_used_regs[reg])
2800 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", ARM_REG_PREFIX,
2801 reg_names[reg], ARM_REG_PREFIX, floats_offset);
2802 floats_offset += 12;
2803 code_size += 4;
2806 live_regs_mask |= 0xA800;
2807 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
2808 TARGET_6 ? FALSE : TRUE);
2809 code_size += 4;
2811 else
2813 /* Restore stack pointer if necessary. */
2814 if (frame_size)
2816 operands[0] = operands[1] = stack_pointer_rtx;
2817 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
2818 output_add_immediate (operands);
2821 for (reg = 16; reg < 24; reg++)
2822 if (regs_ever_live[reg] && ! call_used_regs[reg])
2824 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", ARM_REG_PREFIX,
2825 reg_names[reg], ARM_REG_PREFIX);
2826 code_size += 4;
2828 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
2830 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
2831 TARGET_6 ? FALSE : TRUE);
2832 code_size += 4;
2834 else
2836 if (live_regs_mask || regs_ever_live[14])
2838 live_regs_mask |= 0x4000;
2839 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
2840 code_size += 4;
2842 if (current_function_pretend_args_size)
2844 operands[0] = operands[1] = stack_pointer_rtx;
2845 operands[2] = gen_rtx (CONST_INT, VOIDmode,
2846 current_function_pretend_args_size);
2847 output_add_immediate (operands);
2849 fprintf (f,
2850 TARGET_6 ? "\tmov\t%spc, %slr\n" : "\tmovs\t%spc, %slr\n",
2851 ARM_REG_PREFIX, ARM_REG_PREFIX, f);
2852 code_size += 4;
2856 epilogue_done:
2858 /* insn_addresses isn't allocated when not optimizing */
2860 if (optimize > 0)
2861 arm_increase_location (code_size
2862 + insn_addresses[INSN_UID (get_last_insn ())]
2863 + get_prologue_size ());
2865 current_function_anonymous_args = 0;
2868 static void
2869 emit_multi_reg_push (mask)
2870 int mask;
2872 int num_regs = 0;
2873 int i, j;
2874 rtx par;
2876 for (i = 0; i < 16; i++)
2877 if (mask & (1 << i))
2878 num_regs++;
2880 if (num_regs == 0 || num_regs > 16)
2881 abort ();
2883 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
2885 for (i = 0; i < 16; i++)
2887 if (mask & (1 << i))
2889 XVECEXP (par, 0, 0)
2890 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
2891 gen_rtx (PRE_DEC, BLKmode,
2892 stack_pointer_rtx)),
2893 gen_rtx (UNSPEC, BLKmode,
2894 gen_rtvec (1, gen_rtx (REG, SImode, i)),
2895 2));
2896 break;
2900 for (j = 1, i++; j < num_regs; i++)
2902 if (mask & (1 << i))
2904 XVECEXP (par, 0, j)
2905 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
2906 j++;
2909 emit_insn (par);
2912 void
2913 arm_expand_prologue ()
2915 int reg;
2916 rtx amount = GEN_INT (- get_frame_size ());
2917 rtx push_insn;
2918 int num_regs;
2919 int live_regs_mask = 0;
2920 int store_arg_regs = 0;
2921 int volatile_func = (optimize > 0
2922 && TREE_THIS_VOLATILE (current_function_decl));
2924 if (current_function_anonymous_args && current_function_pretend_args_size)
2925 store_arg_regs = 1;
2927 if (! volatile_func)
2928 for (reg = 0; reg <= 10; reg++)
2929 if (regs_ever_live[reg] && ! call_used_regs[reg])
2930 live_regs_mask |= 1 << reg;
2932 if (! volatile_func && regs_ever_live[14])
2933 live_regs_mask |= 0x4000;
2935 if (frame_pointer_needed)
2937 live_regs_mask |= 0xD800;
2938 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
2939 stack_pointer_rtx));
2942 if (current_function_pretend_args_size)
2944 if (store_arg_regs)
2945 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
2946 & 0xf);
2947 else
2948 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2949 GEN_INT (-current_function_pretend_args_size)));
2952 if (live_regs_mask)
2954 /* If we have to push any regs, then we must push lr as well, or
2955 we won't get a propper return. */
2956 live_regs_mask |= 0x4000;
2957 emit_multi_reg_push (live_regs_mask);
2960 /* For now the integer regs are still pushed in output_func_epilogue (). */
2962 if (! volatile_func)
2963 for (reg = 23; reg > 15; reg--)
2964 if (regs_ever_live[reg] && ! call_used_regs[reg])
2965 emit_insn (gen_rtx (SET, VOIDmode,
2966 gen_rtx (MEM, XFmode,
2967 gen_rtx (PRE_DEC, XFmode,
2968 stack_pointer_rtx)),
2969 gen_rtx (REG, XFmode, reg)));
2971 if (frame_pointer_needed)
2972 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
2973 (GEN_INT
2974 (-(4 + current_function_pretend_args_size)))));
2976 if (amount != const0_rtx)
2978 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
2979 emit_insn (gen_rtx (CLOBBER, VOIDmode,
2980 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
2983 /* If we are profiling, make sure no instructions are scheduled before
2984 the call to mcount. */
2985 if (profile_flag || profile_block_flag)
2986 emit_insn (gen_blockage ());
2990 /* If CODE is 'd', then the X is a condition operand and the instruction
2991 should only be executed if the condition is true.
2992 if CODE is 'D', then the X is a condition operand and the instruciton
2993 should only be executed if the condition is false: however, if the mode
2994 of the comparison is CCFPEmode, then always execute the instruction -- we
2995 do this because in these circumstances !GE does not necessarily imply LT;
2996 in these cases the instruction pattern will take care to make sure that
2997 an instruction containing %d will follow, thereby undoing the effects of
2998 doing this instrucion unconditionally.
2999 If CODE is 'N' then X is a floating point operand that must be negated
3000 before output.
3001 If CODE is 'B' then output a bitwise inverted value of X (a const int).
3002 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
3004 void
3005 arm_print_operand (stream, x, code)
3006 FILE *stream;
3007 rtx x;
3008 int code;
3010 switch (code)
3012 case '@':
3013 fputc (ARM_COMMENT_CHAR, stream);
3014 return;
3016 case '|':
3017 fputs (ARM_REG_PREFIX, stream);
3018 return;
3020 case '?':
3021 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
3022 fputs (arm_condition_codes[arm_current_cc], stream);
3023 return;
3025 case 'N':
3027 REAL_VALUE_TYPE r;
3028 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3029 r = REAL_VALUE_NEGATE (r);
3030 fprintf (stream, "%s", fp_const_from_val (&r));
3032 return;
3034 case 'B':
3035 if (GET_CODE (x) == CONST_INT)
3036 fprintf (stream,
3037 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3038 "%d",
3039 #else
3040 "%ld",
3041 #endif
3042 ARM_SIGN_EXTEND (~ INTVAL (x)));
3043 else
3045 putc ('~', stream);
3046 output_addr_const (stream, x);
3048 return;
3050 case 'i':
3051 fprintf (stream, "%s", arithmetic_instr (x, 1));
3052 return;
3054 case 'I':
3055 fprintf (stream, "%s", arithmetic_instr (x, 0));
3056 return;
3058 case 'S':
3060 HOST_WIDE_INT val;
3061 char *shift = shift_op (x, &val);
3063 if (shift)
3065 fprintf (stream, ", %s ", shift_op (x, &val));
3066 if (val == -1)
3067 arm_print_operand (stream, XEXP (x, 1), 0);
3068 else
3069 fprintf (stream,
3070 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3071 "#%d",
3072 #else
3073 "#%ld",
3074 #endif
3075 val);
3078 return;
3080 case 'R':
3081 if (REGNO (x) > 15)
3082 abort ();
3083 fputs (ARM_REG_PREFIX, stream);
3084 fputs (reg_names[REGNO (x) + 1], stream);
3085 return;
3087 case 'm':
3088 fputs (ARM_REG_PREFIX, stream);
3089 if (GET_CODE (XEXP (x, 0)) == REG)
3090 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
3091 else
3092 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
3093 return;
3095 case 'M':
3096 fprintf (stream, "{%s%s-%s%s}", ARM_REG_PREFIX, reg_names[REGNO (x)],
3097 ARM_REG_PREFIX, reg_names[REGNO (x) - 1
3098 + ((GET_MODE_SIZE (GET_MODE (x))
3099 + GET_MODE_SIZE (SImode) - 1)
3100 / GET_MODE_SIZE (SImode))]);
3101 return;
3103 case 'd':
3104 if (x)
3105 fputs (arm_condition_codes[get_arm_condition_code (x)],
3106 stream);
3107 return;
3109 case 'D':
3110 if (x && (flag_fast_math
3111 || GET_CODE (x) == EQ || GET_CODE (x) == NE
3112 || (GET_MODE (XEXP (x, 0)) != CCFPEmode
3113 && (GET_MODE_CLASS (GET_MODE (XEXP (x, 0)))
3114 != MODE_FLOAT))))
3115 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
3116 (get_arm_condition_code (x))],
3117 stream);
3118 return;
3120 default:
3121 if (x == 0)
3122 abort ();
3124 if (GET_CODE (x) == REG)
3126 fputs (ARM_REG_PREFIX, stream);
3127 fputs (reg_names[REGNO (x)], stream);
3129 else if (GET_CODE (x) == MEM)
3131 output_memory_reference_mode = GET_MODE (x);
3132 output_address (XEXP (x, 0));
3134 else if (GET_CODE (x) == CONST_DOUBLE)
3135 fprintf (stream, "#%s", fp_immediate_constant (x));
3136 else if (GET_CODE (x) == NEG)
3137 abort (); /* This should never happen now. */
3138 else
3140 fputc ('#', stream);
3141 output_addr_const (stream, x);
3146 /* Increase the `arm_text_location' by AMOUNT if we're in the text
3147 segment. */
3149 void
3150 arm_increase_location (amount)
3151 int amount;
3153 if (in_text_section ())
3154 arm_text_location += amount;
3158 /* Output a label definition. If this label is within the .text segment, it
3159 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
3160 Maybe GCC remembers names not starting with a `*' for a long time, but this
3161 is a minority anyway, so we just make a copy. Do not store the leading `*'
3162 if the name starts with one. */
3164 void
3165 arm_asm_output_label (stream, name)
3166 FILE *stream;
3167 char *name;
3169 char *real_name, *s;
3170 struct label_offset *cur;
3171 int hash = 0;
3173 assemble_name (stream, name);
3174 fputs (":\n", stream);
3175 if (! in_text_section ())
3176 return;
3178 if (name[0] == '*')
3180 real_name = xmalloc (1 + strlen (&name[1]));
3181 strcpy (real_name, &name[1]);
3183 else
3185 real_name = xmalloc (2 + strlen (name));
3186 strcpy (real_name, "_");
3187 strcat (real_name, name);
3189 for (s = real_name; *s; s++)
3190 hash += *s;
3192 hash = hash % LABEL_HASH_SIZE;
3193 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
3194 cur->name = real_name;
3195 cur->offset = arm_text_location;
3196 cur->cdr = offset_table[hash];
3197 offset_table[hash] = cur;
3200 /* Load a symbol that is known to be in the text segment into a register.
3201 This should never be called when not optimizing. */
3203 char *
3204 output_load_symbol (insn, operands)
3205 rtx insn;
3206 rtx *operands;
3208 char *s;
3209 char *name = XSTR (operands[1], 0);
3210 struct label_offset *he;
3211 int hash = 0;
3212 int offset;
3213 unsigned int mask, never_mask = 0xffffffff;
3214 int shift, inst;
3215 char buffer[100];
3217 if (optimize == 0 || *name != '*')
3218 abort ();
3220 for (s = &name[1]; *s; s++)
3221 hash += *s;
3223 hash = hash % LABEL_HASH_SIZE;
3224 he = offset_table[hash];
3225 while (he && strcmp (he->name, &name[1]))
3226 he = he->cdr;
3228 if (!he)
3229 abort ();
3231 offset = (arm_text_location + insn_addresses[INSN_UID (insn)]
3232 + get_prologue_size () + 8 - he->offset);
3233 if (offset < 0)
3234 abort ();
3236 /* When generating the instructions, we never mask out the bits that we
3237 think will be always zero, then if a mistake has occured somewhere, the
3238 assembler will spot it and generate an error. */
3240 /* If the symbol is word aligned then we might be able to reduce the
3241 number of loads. */
3242 shift = ((offset & 3) == 0) ? 2 : 0;
3244 /* Clear the bits from NEVER_MASK that will be orred in with the individual
3245 instructions. */
3246 for (; shift < 32; shift += 8)
3248 mask = 0xff << shift;
3249 if ((offset & mask) || ((unsigned) offset) > mask)
3250 never_mask &= ~mask;
3253 inst = 8;
3254 mask = 0xff << (shift - 32);
3256 while (mask && (never_mask & mask) == 0)
3258 if (inst == 8)
3260 strcpy (buffer, "sub%?\t%0, %|pc, #(8 + . -%a1)");
3261 if ((never_mask | mask) != 0xffffffff)
3262 sprintf (buffer + strlen (buffer), " & 0x%x", mask | never_mask);
3264 else
3265 sprintf (buffer, "sub%%?\t%%0, %%0, #(%d + . -%%a1) & 0x%x",
3266 inst, mask | never_mask);
3268 output_asm_insn (buffer, operands);
3269 mask <<= 8;
3270 inst -= 4;
3273 return "";
3276 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
3277 directive hence this hack, which works by reserving some `.space' in the
3278 bss segment directly.
3280 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
3281 define STATIC COMMON space but merely STATIC BSS space. */
3283 void
3284 output_lcomm_directive (stream, name, size, rounded)
3285 FILE *stream;
3286 char *name;
3287 int size, rounded;
3289 fprintf (stream, "\n\t.bss\t%c .lcomm\n", ARM_COMMENT_CHAR);
3290 assemble_name (stream, name);
3291 fprintf (stream, ":\t.space\t%d\n", rounded);
3292 if (in_text_section ())
3293 fputs ("\n\t.text\n", stream);
3294 else
3295 fputs ("\n\t.data\n", stream);
3298 /* A finite state machine takes care of noticing whether or not instructions
3299 can be conditionally executed, and thus decrease execution time and code
3300 size by deleting branch instructions. The fsm is controlled by
3301 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
3303 /* The state of the fsm controlling condition codes are:
3304 0: normal, do nothing special
3305 1: make ASM_OUTPUT_OPCODE not output this instruction
3306 2: make ASM_OUTPUT_OPCODE not output this instruction
3307 3: make instructions conditional
3308 4: make instructions conditional
3310 State transitions (state->state by whom under condition):
3311 0 -> 1 final_prescan_insn if the `target' is a label
3312 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
3313 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
3314 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
3315 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
3316 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
3317 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
3318 (the target insn is arm_target_insn).
3320 If the jump clobbers the conditions then we use states 2 and 4.
3322 A similar thing can be done with conditional return insns.
3324 XXX In case the `target' is an unconditional branch, this conditionalising
3325 of the instructions always reduces code size, but not always execution
3326 time. But then, I want to reduce the code size to somewhere near what
3327 /bin/cc produces. */
3329 /* Returns the index of the ARM condition code string in
3330 `arm_condition_codes'. COMPARISON should be an rtx like
3331 `(eq (...) (...))'. */
3334 get_arm_condition_code (comparison)
3335 rtx comparison;
3337 switch (GET_CODE (comparison))
3339 case NE: return (1);
3340 case EQ: return (0);
3341 case GE: return (10);
3342 case GT: return (12);
3343 case LE: return (13);
3344 case LT: return (11);
3345 case GEU: return (2);
3346 case GTU: return (8);
3347 case LEU: return (9);
3348 case LTU: return (3);
3349 default: abort ();
3351 /*NOTREACHED*/
3352 return (42);
3356 void
3357 final_prescan_insn (insn, opvec, noperands)
3358 rtx insn;
3359 rtx *opvec;
3360 int noperands;
3362 /* BODY will hold the body of INSN. */
3363 register rtx body = PATTERN (insn);
3365 /* This will be 1 if trying to repeat the trick, and things need to be
3366 reversed if it appears to fail. */
3367 int reverse = 0;
3369 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
3370 taken are clobbered, even if the rtl suggests otherwise. It also
3371 means that we have to grub around within the jump expression to find
3372 out what the conditions are when the jump isn't taken. */
3373 int jump_clobbers = 0;
3375 /* If we start with a return insn, we only succeed if we find another one. */
3376 int seeking_return = 0;
3378 /* START_INSN will hold the insn from where we start looking. This is the
3379 first insn after the following code_label if REVERSE is true. */
3380 rtx start_insn = insn;
3382 /* If in state 4, check if the target branch is reached, in order to
3383 change back to state 0. */
3384 if (arm_ccfsm_state == 4)
3386 if (insn == arm_target_insn)
3388 arm_target_insn = NULL;
3389 arm_ccfsm_state = 0;
3391 return;
3394 /* If in state 3, it is possible to repeat the trick, if this insn is an
3395 unconditional branch to a label, and immediately following this branch
3396 is the previous target label which is only used once, and the label this
3397 branch jumps to is not too far off. */
3398 if (arm_ccfsm_state == 3)
3400 if (simplejump_p (insn))
3402 start_insn = next_nonnote_insn (start_insn);
3403 if (GET_CODE (start_insn) == BARRIER)
3405 /* XXX Isn't this always a barrier? */
3406 start_insn = next_nonnote_insn (start_insn);
3408 if (GET_CODE (start_insn) == CODE_LABEL
3409 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
3410 && LABEL_NUSES (start_insn) == 1)
3411 reverse = TRUE;
3412 else
3413 return;
3415 else if (GET_CODE (body) == RETURN)
3417 start_insn = next_nonnote_insn (start_insn);
3418 if (GET_CODE (start_insn) == BARRIER)
3419 start_insn = next_nonnote_insn (start_insn);
3420 if (GET_CODE (start_insn) == CODE_LABEL
3421 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
3422 && LABEL_NUSES (start_insn) == 1)
3424 reverse = TRUE;
3425 seeking_return = 1;
3427 else
3428 return;
3430 else
3431 return;
3434 if (arm_ccfsm_state != 0 && !reverse)
3435 abort ();
3436 if (GET_CODE (insn) != JUMP_INSN)
3437 return;
3439 /* This jump might be paralled with a clobber of the condition codes
3440 the jump should always come first */
3441 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
3442 body = XVECEXP (body, 0, 0);
3444 #if 0
3445 /* If this is a conditional return then we don't want to know */
3446 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
3447 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
3448 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
3449 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
3450 return;
3451 #endif
3453 if (reverse
3454 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
3455 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
3457 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
3458 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
3459 int then_not_else = TRUE;
3460 rtx this_insn = start_insn, label = 0;
3462 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
3463 jump_clobbers = 1;
3465 /* Register the insn jumped to. */
3466 if (reverse)
3468 if (!seeking_return)
3469 label = XEXP (SET_SRC (body), 0);
3471 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
3472 label = XEXP (XEXP (SET_SRC (body), 1), 0);
3473 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
3475 label = XEXP (XEXP (SET_SRC (body), 2), 0);
3476 then_not_else = FALSE;
3478 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
3479 seeking_return = 1;
3480 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
3482 seeking_return = 1;
3483 then_not_else = FALSE;
3485 else
3486 abort ();
3488 /* See how many insns this branch skips, and what kind of insns. If all
3489 insns are okay, and the label or unconditional branch to the same
3490 label is not too far away, succeed. */
3491 for (insns_skipped = 0;
3492 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
3493 insns_skipped++)
3495 rtx scanbody;
3497 this_insn = next_nonnote_insn (this_insn);
3498 if (!this_insn)
3499 break;
3501 scanbody = PATTERN (this_insn);
3503 switch (GET_CODE (this_insn))
3505 case CODE_LABEL:
3506 /* Succeed if it is the target label, otherwise fail since
3507 control falls in from somewhere else. */
3508 if (this_insn == label)
3510 if (jump_clobbers)
3512 arm_ccfsm_state = 2;
3513 this_insn = next_nonnote_insn (this_insn);
3515 else
3516 arm_ccfsm_state = 1;
3517 succeed = TRUE;
3519 else
3520 fail = TRUE;
3521 break;
3523 case BARRIER:
3524 /* Succeed if the following insn is the target label.
3525 Otherwise fail.
3526 If return insns are used then the last insn in a function
3527 will be a barrier. */
3528 this_insn = next_nonnote_insn (this_insn);
3529 if (this_insn && this_insn == label)
3531 if (jump_clobbers)
3533 arm_ccfsm_state = 2;
3534 this_insn = next_nonnote_insn (this_insn);
3536 else
3537 arm_ccfsm_state = 1;
3538 succeed = TRUE;
3540 else
3541 fail = TRUE;
3542 break;
3544 case CALL_INSN:
3545 /* The arm 6xx uses full 32 bit addresses so the cc is not
3546 preserved over calls */
3547 if (TARGET_6)
3548 fail = TRUE;
3549 break;
3550 case JUMP_INSN:
3551 /* If this is an unconditional branch to the same label, succeed.
3552 If it is to another label, do nothing. If it is conditional,
3553 fail. */
3554 /* XXX Probably, the test for the SET and the PC are unnecessary. */
3556 if (GET_CODE (scanbody) == SET
3557 && GET_CODE (SET_DEST (scanbody)) == PC)
3559 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
3560 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
3562 arm_ccfsm_state = 2;
3563 succeed = TRUE;
3565 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
3566 fail = TRUE;
3568 else if (GET_CODE (scanbody) == RETURN
3569 && seeking_return)
3571 arm_ccfsm_state = 2;
3572 succeed = TRUE;
3574 else if (GET_CODE (scanbody) == PARALLEL)
3576 switch (get_attr_conds (this_insn))
3578 case CONDS_NOCOND:
3579 break;
3580 default:
3581 fail = TRUE;
3582 break;
3585 break;
3587 case INSN:
3588 /* Instructions using or affecting the condition codes make it
3589 fail. */
3590 if ((GET_CODE (scanbody) == SET
3591 || GET_CODE (scanbody) == PARALLEL)
3592 && get_attr_conds (this_insn) != CONDS_NOCOND)
3593 fail = TRUE;
3594 break;
3596 default:
3597 break;
3600 if (succeed)
3602 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
3603 arm_target_label = CODE_LABEL_NUMBER (label);
3604 else if (seeking_return || arm_ccfsm_state == 2)
3606 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
3608 this_insn = next_nonnote_insn (this_insn);
3609 if (this_insn && (GET_CODE (this_insn) == BARRIER
3610 || GET_CODE (this_insn) == CODE_LABEL))
3611 abort ();
3613 if (!this_insn)
3615 /* Oh, dear! we ran off the end.. give up */
3616 recog (PATTERN (insn), insn, NULL_PTR);
3617 arm_ccfsm_state = 0;
3618 arm_target_insn = NULL;
3619 return;
3621 arm_target_insn = this_insn;
3623 else
3624 abort ();
3625 if (jump_clobbers)
3627 if (reverse)
3628 abort ();
3629 arm_current_cc =
3630 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
3631 0), 0), 1));
3632 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
3633 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
3634 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
3635 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
3637 else
3639 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
3640 what it was. */
3641 if (!reverse)
3642 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
3643 0));
3646 if (reverse || then_not_else)
3647 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
3649 /* restore recog_operand (getting the attributes of other insns can
3650 destroy this array, but final.c assumes that it remains intact
3651 accross this call; since the insn has been recognized already we
3652 call recog direct). */
3653 recog (PATTERN (insn), insn, NULL_PTR);
3657 /* EOF */