(arm_reload_in_hi): New function.
[official-gcc.git] / gcc / config / arm / arm.c
blobcff93d2608e3f319c5d382b8917c85f908f51f12
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
23 #include <stdio.h>
24 #include <string.h>
25 #include "assert.h"
26 #include "config.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "reload.h"
38 #include "tree.h"
39 #include "expr.h"
41 /* The maximum number of insns skipped which will be conditionalised if
42 possible. */
43 #define MAX_INSNS_SKIPPED 5
45 /* Some function declarations. */
46 extern FILE *asm_out_file;
47 extern char *output_multi_immediate ();
48 extern void arm_increase_location ();
50 HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
51 static int get_prologue_size PROTO ((void));
53 /* Define the information needed to generate branch insns. This is
54 stored from the compare operation. */
56 rtx arm_compare_op0, arm_compare_op1;
57 int arm_compare_fp;
59 /* What type of cpu are we compiling for? */
60 enum processor_type arm_cpu;
62 /* Waht type of floating point are we compiling for? */
63 enum floating_point_type arm_fpu;
65 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
66 must report the mode of the memory reference from PRINT_OPERAND to
67 PRINT_OPERAND_ADDRESS. */
68 enum machine_mode output_memory_reference_mode;
70 /* Nonzero if the prologue must setup `fp'. */
71 int current_function_anonymous_args;
73 /* Location counter of .text segment. */
74 int arm_text_location = 0;
76 /* Set to one if we think that lr is only saved because of subroutine calls,
77 but all of these can be `put after' return insns */
78 int lr_save_eliminated;
80 /* A hash table is used to store text segment labels and their associated
81 offset from the start of the text segment. */
82 struct label_offset
84 char *name;
85 int offset;
86 struct label_offset *cdr;
89 #define LABEL_HASH_SIZE 257
91 static struct label_offset *offset_table[LABEL_HASH_SIZE];
93 /* Set to 1 when a return insn is output, this means that the epilogue
94 is not needed. */
96 static int return_used_this_function;
98 /* For an explanation of these variables, see final_prescan_insn below. */
99 int arm_ccfsm_state;
100 int arm_current_cc;
101 rtx arm_target_insn;
102 int arm_target_label;
104 /* The condition codes of the ARM, and the inverse function. */
105 char *arm_condition_codes[] =
107 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
108 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
111 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
113 /* Return 1 if it is possible to return using a single instruction */
116 use_return_insn ()
118 int regno;
120 if (!reload_completed ||current_function_pretend_args_size
121 || current_function_anonymous_args
122 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
123 return 0;
125 /* Can't be done if any of the FPU regs are pushed, since this also
126 requires an insn */
127 for (regno = 20; regno < 24; regno++)
128 if (regs_ever_live[regno])
129 return 0;
131 return 1;
134 /* Return TRUE if int I is a valid immediate ARM constant. */
137 const_ok_for_arm (i)
138 HOST_WIDE_INT i;
140 unsigned HOST_WIDE_INT mask = ~0xFF;
142 /* Fast return for 0 and powers of 2 */
143 if ((i & (i - 1)) == 0)
144 return TRUE;
148 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
149 return TRUE;
150 mask =
151 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
152 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
153 } while (mask != ~0xFF);
155 return FALSE;
158 /* Return true if I is a valid constant for the operation CODE. */
160 const_ok_for_op (i, code, mode)
161 HOST_WIDE_INT i;
162 enum rtx_code code;
163 enum machine_mode mode;
165 if (const_ok_for_arm (i))
166 return 1;
168 switch (code)
170 case PLUS:
171 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
173 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
174 case XOR:
175 case IOR:
176 return 0;
178 case AND:
179 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
181 default:
182 abort ();
186 /* Emit a sequence of insns to handle a large constant.
187 CODE is the code of the operation required, it can be any of SET, PLUS,
188 IOR, AND, XOR, MINUS;
189 MODE is the mode in which the operation is being performed;
190 VAL is the integer to operate on;
191 SOURCE is the other operand (a register, or a null-pointer for SET);
192 SUBTARGETS means it is safe to create scratch registers if that will
193 either produce a simpler sequence, or we will want to cse the values. */
196 arm_split_constant (code, mode, val, target, source, subtargets)
197 enum rtx_code code;
198 enum machine_mode mode;
199 HOST_WIDE_INT val;
200 rtx target;
201 rtx source;
202 int subtargets;
204 int can_add = 0;
205 int can_invert = 0;
206 int can_negate = 0;
207 int can_negate_initial = 0;
208 int can_shift = 0;
209 int i;
210 int num_bits_set = 0;
211 int set_sign_bit_copies = 0;
212 int clear_sign_bit_copies = 0;
213 int clear_zero_bit_copies = 0;
214 int set_zero_bit_copies = 0;
215 int insns = 0;
216 rtx new_src;
217 unsigned HOST_WIDE_INT temp1, temp2;
218 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
220 /* find out which operations are safe for a given CODE. Also do a quick
221 check for degenerate cases; these can occur when DImode operations
222 are split. */
223 switch (code)
225 case SET:
226 can_invert = 1;
227 can_shift = 1;
228 can_negate = 1;
229 break;
231 case PLUS:
232 can_negate = 1;
233 can_negate_initial = 1;
234 break;
236 case IOR:
237 if (remainder == 0xffffffff)
239 emit_insn (gen_rtx (SET, VOIDmode, target,
240 GEN_INT (ARM_SIGN_EXTEND (val))));
241 return 1;
243 if (remainder == 0)
245 if (reload_completed && rtx_equal_p (target, source))
246 return 0;
247 emit_insn (gen_rtx (SET, VOIDmode, target, source));
248 return 1;
250 break;
252 case AND:
253 if (remainder == 0)
255 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
256 return 1;
258 if (remainder == 0xffffffff)
260 if (reload_completed && rtx_equal_p (target, source))
261 return 0;
262 emit_insn (gen_rtx (SET, VOIDmode, target, source));
263 return 1;
265 can_invert = 1;
266 break;
268 case XOR:
269 if (remainder == 0)
271 if (reload_completed && rtx_equal_p (target, source))
272 return 0;
273 emit_insn (gen_rtx (SET, VOIDmode, target, source));
274 return 1;
276 if (remainder == 0xffffffff)
278 emit_insn (gen_rtx (SET, VOIDmode, target,
279 gen_rtx (NOT, mode, source)));
280 return 1;
283 /* We don't know how to handle this yet below. */
284 abort ();
286 case MINUS:
287 /* We treat MINUS as (val - source), since (source - val) is always
288 passed as (source + (-val)). */
289 if (remainder == 0)
291 emit_insn (gen_rtx (SET, VOIDmode, target,
292 gen_rtx (NEG, mode, source)));
293 return 1;
295 if (const_ok_for_arm (val))
297 emit_insn (gen_rtx (SET, VOIDmode, target,
298 gen_rtx (MINUS, mode, GEN_INT (val), source)));
299 return 1;
301 can_negate = 1;
303 break;
305 default:
306 abort ();
309 /* If we can do it in one insn get out quickly */
310 if (const_ok_for_arm (val)
311 || (can_negate_initial && const_ok_for_arm (-val))
312 || (can_invert && const_ok_for_arm (~val)))
314 emit_insn (gen_rtx (SET, VOIDmode, target,
315 (source ? gen_rtx (code, mode, source,
316 GEN_INT (val)) : GEN_INT (val))));
317 return 1;
321 /* Calculate a few attributes that may be useful for specific
322 optimizations. */
324 for (i = 31; i >= 0; i--)
326 if ((remainder & (1 << i)) == 0)
327 clear_sign_bit_copies++;
328 else
329 break;
332 for (i = 31; i >= 0; i--)
334 if ((remainder & (1 << i)) != 0)
335 set_sign_bit_copies++;
336 else
337 break;
340 for (i = 0; i <= 31; i++)
342 if ((remainder & (1 << i)) == 0)
343 clear_zero_bit_copies++;
344 else
345 break;
348 for (i = 0; i <= 31; i++)
350 if ((remainder & (1 << i)) != 0)
351 set_zero_bit_copies++;
352 else
353 break;
356 switch (code)
358 case SET:
359 /* See if we can do this by sign_extending a constant that is known
360 to be negative. This is a good, way of doing it, since the shift
361 may well merge into a subsequent insn. */
362 if (set_sign_bit_copies > 1)
364 if (const_ok_for_arm
365 (temp1 = ARM_SIGN_EXTEND (remainder
366 << (set_sign_bit_copies - 1))))
368 new_src = subtargets ? gen_reg_rtx (mode) : target;
369 emit_insn (gen_rtx (SET, VOIDmode, new_src, GEN_INT (temp1)));
370 emit_insn (gen_ashrsi3 (target, new_src,
371 GEN_INT (set_sign_bit_copies - 1)));
372 return 2;
374 /* For an inverted constant, we will need to set the low bits,
375 these will be shifted out of harm's way. */
376 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
377 if (const_ok_for_arm (~temp1))
379 new_src = subtargets ? gen_reg_rtx (mode) : target;
380 emit_insn (gen_rtx (SET, VOIDmode, new_src, GEN_INT (temp1)));
381 emit_insn (gen_ashrsi3 (target, new_src,
382 GEN_INT (set_sign_bit_copies - 1)));
383 return 2;
387 /* See if we can generate this by setting the bottom (or the top)
388 16 bits, and then shifting these into the other half of the
389 word. We only look for the simplest cases, to do more would cost
390 too much. Be careful, however, not to generate this when the
391 alternative would take fewer insns. */
392 if (val & 0xffff0000)
394 temp1 = remainder & 0xffff0000;
395 temp2 = remainder & 0x0000ffff;
397 /* Overlaps outside this range are best done using other methods. */
398 for (i = 9; i < 24; i++)
400 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
401 && ! const_ok_for_arm (temp2))
403 insns
404 = arm_split_constant (code, mode, temp2,
405 (new_src
406 = subtargets ? gen_reg_rtx (mode)
407 : target),
408 source, subtargets);
409 source = new_src;
410 emit_insn (gen_rtx (SET, VOIDmode, target,
411 gen_rtx (IOR, mode,
412 gen_rtx (ASHIFT, mode, source,
413 GEN_INT (i)),
414 source)));
415 return insns + 1;
419 /* Don't duplicate cases already considered. */
420 for (i = 17; i < 24; i++)
422 if (((temp1 | (temp1 >> i)) == remainder)
423 && ! const_ok_for_arm (temp1))
425 insns
426 = arm_split_constant (code, mode, temp1,
427 (new_src
428 = subtargets ? gen_reg_rtx (mode)
429 : target),
430 source, subtargets);
431 source = new_src;
432 emit_insn (gen_rtx (SET, VOIDmode, target,
433 gen_rtx (IOR, mode,
434 gen_rtx (LSHIFTRT, mode, source,
435 GEN_INT (i)),
436 source)));
437 return insns + 1;
441 break;
443 case IOR:
444 case XOR:
445 /* If we have IOR or XOR, and the inverse of the constant can be loaded
446 in a single instruction, and we can find a temporary to put it in,
447 then this can be done in two instructions instead of 3-4. */
448 if (subtargets
449 || (reload_completed && ! reg_mentioned_p (target, source)))
451 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
453 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
455 emit_insn (gen_rtx (SET, VOIDmode, sub,
456 GEN_INT (ARM_SIGN_EXTEND (~ val))));
457 emit_insn (gen_rtx (SET, VOIDmode, target,
458 gen_rtx (code, mode, source, sub)));
459 return 2;
463 if (code == XOR)
464 break;
466 if (set_sign_bit_copies > 8
467 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
469 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
470 rtx shift = GEN_INT (set_sign_bit_copies);
472 emit_insn (gen_rtx (SET, VOIDmode, sub,
473 gen_rtx (NOT, mode,
474 gen_rtx (ASHIFT, mode, source,
475 shift))));
476 emit_insn (gen_rtx (SET, VOIDmode, target,
477 gen_rtx (NOT, mode,
478 gen_rtx (LSHIFTRT, mode, sub,
479 shift))));
480 return 2;
483 if (set_zero_bit_copies > 8
484 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
486 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
487 rtx shift = GEN_INT (set_zero_bit_copies);
489 emit_insn (gen_rtx (SET, VOIDmode, sub,
490 gen_rtx (NOT, mode,
491 gen_rtx (LSHIFTRT, mode, source,
492 shift))));
493 emit_insn (gen_rtx (SET, VOIDmode, target,
494 gen_rtx (NOT, mode,
495 gen_rtx (ASHIFT, mode, sub,
496 shift))));
497 return 2;
500 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
502 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
503 emit_insn (gen_rtx (SET, VOIDmode, sub,
504 gen_rtx (NOT, mode, source)));
505 source = sub;
506 if (subtargets)
507 sub = gen_reg_rtx (mode);
508 emit_insn (gen_rtx (SET, VOIDmode, sub,
509 gen_rtx (AND, mode, source, GEN_INT (temp1))));
510 emit_insn (gen_rtx (SET, VOIDmode, target,
511 gen_rtx (NOT, mode, sub)));
512 return 3;
514 break;
516 case AND:
517 /* See if two shifts will do 2 or more insn's worth of work. */
518 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
520 HOST_WIDE_INT shift_mask = ((0xffffffff
521 << (32 - clear_sign_bit_copies))
522 & 0xffffffff);
523 rtx new_source;
524 rtx shift = GEN_INT (clear_sign_bit_copies);
526 if ((remainder | shift_mask) != 0xffffffff)
528 new_source = subtargets ? gen_reg_rtx (mode) : target;
529 insns = arm_split_constant (AND, mode, remainder | shift_mask,
530 new_source, source, subtargets);
531 source = new_source;
534 new_source = subtargets ? gen_reg_rtx (mode) : target;
535 emit_insn (gen_ashlsi3 (new_source, source, shift));
536 emit_insn (gen_lshrsi3 (target, new_source, shift));
537 return insns + 2;
540 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
542 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
543 rtx new_source;
544 rtx shift = GEN_INT (clear_zero_bit_copies);
546 if ((remainder | shift_mask) != 0xffffffff)
548 new_source = subtargets ? gen_reg_rtx (mode) : target;
549 insns = arm_split_constant (AND, mode, remainder | shift_mask,
550 new_source, source, subtargets);
551 source = new_source;
554 new_source = subtargets ? gen_reg_rtx (mode) : target;
555 emit_insn (gen_lshrsi3 (new_source, source, shift));
556 emit_insn (gen_ashlsi3 (target, new_source, shift));
557 return insns + 2;
560 break;
562 default:
563 break;
566 for (i = 0; i < 32; i++)
567 if (remainder & (1 << i))
568 num_bits_set++;
570 if (code == AND || (can_invert && num_bits_set > 16))
571 remainder = (~remainder) & 0xffffffff;
572 else if (code == PLUS && num_bits_set > 16)
573 remainder = (-remainder) & 0xffffffff;
574 else
576 can_invert = 0;
577 can_negate = 0;
580 /* Now try and find a way of doing the job in either two or three
581 instructions.
582 We start by looking for the largest block of zeros that are aligned on
583 a 2-bit boundary, we then fill up the temps, wrapping around to the
584 top of the word when we drop off the bottom.
585 In the worst case this code should produce no more than four insns. */
587 int best_start = 0;
588 int best_consecutive_zeros = 0;
590 for (i = 0; i < 32; i += 2)
592 int consecutive_zeros = 0;
594 if (! (remainder & (3 << i)))
596 while ((i < 32) && ! (remainder & (3 << i)))
598 consecutive_zeros += 2;
599 i += 2;
601 if (consecutive_zeros > best_consecutive_zeros)
603 best_consecutive_zeros = consecutive_zeros;
604 best_start = i - consecutive_zeros;
606 i -= 2;
610 /* Now start emitting the insns, starting with the one with the highest
611 bit set: we do this so that the smallest number will be emitted last;
612 this is more likely to be combinable with addressing insns. */
613 i = best_start;
616 int end;
618 if (i <= 0)
619 i += 32;
620 if (remainder & (3 << (i - 2)))
622 end = i - 8;
623 if (end < 0)
624 end += 32;
625 temp1 = remainder & ((0x0ff << end)
626 | ((i < end) ? (0xff >> (32 - end)) : 0));
627 remainder &= ~temp1;
629 if (code == SET)
631 emit_insn (gen_rtx (SET, VOIDmode,
632 new_src = (subtargets ? gen_reg_rtx (mode)
633 : target),
634 GEN_INT (can_invert ? ~temp1 : temp1)));
635 can_invert = 0;
636 code = PLUS;
638 else if (code == MINUS)
640 emit_insn (gen_rtx (SET, VOIDmode,
641 new_src = (subtargets ? gen_reg_rtx (mode)
642 : target),
643 gen_rtx (code, mode, GEN_INT (temp1),
644 source)));
645 code = PLUS;
647 else
649 emit_insn (gen_rtx (SET, VOIDmode,
650 new_src = remainder ? (subtargets
651 ? gen_reg_rtx (mode)
652 : target) : target,
653 gen_rtx (code, mode, source,
654 GEN_INT (can_invert ? ~temp1
655 : (can_negate
656 ? -temp1 : temp1)))));
659 insns++;
660 source = new_src;
661 i -= 6;
663 i -= 2;
664 } while (remainder);
666 return insns;
669 #define REG_OR_SUBREG_REG(X) \
670 (GET_CODE (X) == REG \
671 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
673 #define REG_OR_SUBREG_RTX(X) \
674 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
676 #define ARM_FRAME_RTX(X) \
677 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
678 || (X) == arg_pointer_rtx)
681 arm_rtx_costs (x, code, outer_code)
682 rtx x;
683 enum rtx_code code, outer_code;
685 enum machine_mode mode = GET_MODE (x);
686 enum rtx_code subcode;
687 int extra_cost;
689 switch (code)
691 case MEM:
692 /* Memory costs quite a lot for the first word, but subsequent words
693 load at the equivalent of a single insn each. */
694 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
695 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
697 case DIV:
698 case MOD:
699 return 100;
701 case ROTATE:
702 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
703 return 4;
704 /* Fall through */
705 case ROTATERT:
706 if (mode != SImode)
707 return 8;
708 /* Fall through */
709 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
710 if (mode == DImode)
711 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
712 + ((GET_CODE (XEXP (x, 0)) == REG
713 || (GET_CODE (XEXP (x, 0)) == SUBREG
714 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
715 ? 0 : 8));
716 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
717 || (GET_CODE (XEXP (x, 0)) == SUBREG
718 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
719 ? 0 : 4)
720 + ((GET_CODE (XEXP (x, 1)) == REG
721 || (GET_CODE (XEXP (x, 1)) == SUBREG
722 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
723 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
724 ? 0 : 4));
726 case MINUS:
727 if (mode == DImode)
728 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
729 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
730 || (GET_CODE (XEXP (x, 0)) == CONST_INT
731 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
732 ? 0 : 8));
734 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
735 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
736 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
737 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
738 ? 0 : 8)
739 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
740 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
741 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
742 ? 0 : 8));
744 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
745 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
746 && REG_OR_SUBREG_REG (XEXP (x, 1))))
747 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
748 || subcode == ASHIFTRT || subcode == LSHIFTRT
749 || subcode == ROTATE || subcode == ROTATERT
750 || (subcode == MULT
751 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
752 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
753 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
754 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
755 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
756 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
757 && REG_OR_SUBREG_REG (XEXP (x, 0))))
758 return 1;
759 /* Fall through */
761 case PLUS:
762 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
763 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
764 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
765 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
766 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
767 ? 0 : 8));
769 /* Fall through */
770 case AND: case XOR: case IOR:
771 extra_cost = 0;
773 /* Normally the frame registers will be spilt into reg+const during
774 reload, so it is a bad idea to combine them with other instructions,
775 since then they might not be moved outside of loops. As a compromise
776 we allow integration with ops that have a constant as their second
777 operand. */
778 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
779 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
780 && GET_CODE (XEXP (x, 1)) != CONST_INT)
781 || (REG_OR_SUBREG_REG (XEXP (x, 0))
782 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
783 extra_cost = 4;
785 if (mode == DImode)
786 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
787 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
788 || (GET_CODE (XEXP (x, 1)) == CONST_INT
789 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
790 ? 0 : 8));
792 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
793 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
794 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
795 || (GET_CODE (XEXP (x, 1)) == CONST_INT
796 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
797 ? 0 : 4));
799 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
800 return (1 + extra_cost
801 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
802 || subcode == LSHIFTRT || subcode == ASHIFTRT
803 || subcode == ROTATE || subcode == ROTATERT
804 || (subcode == MULT
805 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
806 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
807 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
808 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
809 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
810 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
811 ? 0 : 4));
813 return 8;
815 case MULT:
816 if (GET_MODE_CLASS (mode) == MODE_FLOAT
817 || mode == DImode)
818 return 30;
820 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
822 HOST_WIDE_INT i = INTVAL (XEXP (x, 1)) & 0xffffffff;
823 int add_cost = const_ok_for_arm (i) ? 4 : 8;
824 int j;
826 /* This will need adjusting for ARM's with fast multiplies */
827 for (j = 0; i && j < 32; j += 2)
829 i &= ~(3 << j);
830 add_cost += 2;
833 return add_cost;
836 return (30 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
837 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
839 case NEG:
840 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
841 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
842 /* Fall through */
843 case NOT:
844 if (mode == DImode)
845 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
847 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
849 case IF_THEN_ELSE:
850 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
851 return 14;
852 return 2;
854 case COMPARE:
855 return 1;
857 case ABS:
858 return 4 + (mode == DImode ? 4 : 0);
860 case SIGN_EXTEND:
861 if (GET_MODE (XEXP (x, 0)) == QImode)
862 return (4 + (mode == DImode ? 4 : 0)
863 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
864 /* Fall through */
865 case ZERO_EXTEND:
866 switch (GET_MODE (XEXP (x, 0)))
868 case QImode:
869 return (1 + (mode == DImode ? 4 : 0)
870 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
872 case HImode:
873 return (4 + (mode == DImode ? 4 : 0)
874 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
876 case SImode:
877 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
879 abort ();
881 default:
882 return 99;
886 /* This code has been fixed for cross compilation. */
888 static int fpa_consts_inited = 0;
890 char *strings_fpa[8] = {
891 "0.0",
892 "1.0",
893 "2.0",
894 "3.0",
895 "4.0",
896 "5.0",
897 "0.5",
898 "10.0"
901 static REAL_VALUE_TYPE values_fpa[8];
903 static void
904 init_fpa_table ()
906 int i;
907 REAL_VALUE_TYPE r;
909 for (i = 0; i < 8; i++)
911 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
912 values_fpa[i] = r;
915 fpa_consts_inited = 1;
918 /* Return TRUE if rtx X is a valid immediate FPU constant. */
921 const_double_rtx_ok_for_fpu (x)
922 rtx x;
924 REAL_VALUE_TYPE r;
925 int i;
927 if (!fpa_consts_inited)
928 init_fpa_table ();
930 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
931 if (REAL_VALUE_MINUS_ZERO (r))
932 return 0;
934 for (i = 0; i < 8; i++)
935 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
936 return 1;
938 return 0;
941 /* Return TRUE if rtx X is a valid immediate FPU constant. */
944 neg_const_double_rtx_ok_for_fpu (x)
945 rtx x;
947 REAL_VALUE_TYPE r;
948 int i;
950 if (!fpa_consts_inited)
951 init_fpa_table ();
953 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
954 r = REAL_VALUE_NEGATE (r);
955 if (REAL_VALUE_MINUS_ZERO (r))
956 return 0;
958 for (i = 0; i < 8; i++)
959 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
960 return 1;
962 return 0;
965 /* Predicates for `match_operand' and `match_operator'. */
967 /* s_register_operand is the same as register_operand, but it doesn't accept
968 (SUBREG (MEM)...). */
971 s_register_operand (op, mode)
972 register rtx op;
973 enum machine_mode mode;
975 if (GET_MODE (op) != mode && mode != VOIDmode)
976 return 0;
978 if (GET_CODE (op) == SUBREG)
979 op = SUBREG_REG (op);
981 /* We don't consider registers whose class is NO_REGS
982 to be a register operand. */
983 return (GET_CODE (op) == REG
984 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
985 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
988 /* Only accept reg, subreg(reg), const_int. */
991 reg_or_int_operand (op, mode)
992 register rtx op;
993 enum machine_mode mode;
995 if (GET_CODE (op) == CONST_INT)
996 return 1;
998 if (GET_MODE (op) != mode && mode != VOIDmode)
999 return 0;
1001 if (GET_CODE (op) == SUBREG)
1002 op = SUBREG_REG (op);
1004 /* We don't consider registers whose class is NO_REGS
1005 to be a register operand. */
1006 return (GET_CODE (op) == REG
1007 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1008 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1011 /* Return 1 if OP is an item in memory, given that we are in reload. */
1014 reload_memory_operand (op, mode)
1015 rtx op;
1016 enum machine_mode mode;
1018 int regno = true_regnum (op);
1020 return (! CONSTANT_P (op)
1021 && (regno == -1
1022 || (GET_CODE (op) == REG
1023 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1026 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1029 arm_rhs_operand (op, mode)
1030 rtx op;
1031 enum machine_mode mode;
1033 return (s_register_operand (op, mode)
1034 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1037 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1041 arm_rhsm_operand (op, mode)
1042 rtx op;
1043 enum machine_mode mode;
1045 return (s_register_operand (op, mode)
1046 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1047 || memory_operand (op, mode));
1050 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1051 constant that is valid when negated. */
1054 arm_add_operand (op, mode)
1055 rtx op;
1056 enum machine_mode mode;
1058 return (s_register_operand (op, mode)
1059 || (GET_CODE (op) == CONST_INT
1060 && (const_ok_for_arm (INTVAL (op))
1061 || const_ok_for_arm (-INTVAL (op)))));
1065 arm_not_operand (op, mode)
1066 rtx op;
1067 enum machine_mode mode;
1069 return (s_register_operand (op, mode)
1070 || (GET_CODE (op) == CONST_INT
1071 && (const_ok_for_arm (INTVAL (op))
1072 || const_ok_for_arm (~INTVAL (op)))));
1075 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1078 fpu_rhs_operand (op, mode)
1079 rtx op;
1080 enum machine_mode mode;
1082 if (s_register_operand (op, mode))
1083 return TRUE;
1084 else if (GET_CODE (op) == CONST_DOUBLE)
1085 return (const_double_rtx_ok_for_fpu (op));
1087 return FALSE;
1091 fpu_add_operand (op, mode)
1092 rtx op;
1093 enum machine_mode mode;
1095 if (s_register_operand (op, mode))
1096 return TRUE;
1097 else if (GET_CODE (op) == CONST_DOUBLE)
1098 return (const_double_rtx_ok_for_fpu (op)
1099 || neg_const_double_rtx_ok_for_fpu (op));
1101 return FALSE;
1104 /* Return nonzero if OP is a constant power of two. */
1107 power_of_two_operand (op, mode)
1108 rtx op;
1109 enum machine_mode mode;
1111 if (GET_CODE (op) == CONST_INT)
1113 HOST_WIDE_INT value = INTVAL(op);
1114 return value != 0 && (value & (value - 1)) == 0;
1116 return FALSE;
1119 /* Return TRUE for a valid operand of a DImode operation.
1120 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1121 Note that this disallows MEM(REG+REG), but allows
1122 MEM(PRE/POST_INC/DEC(REG)). */
1125 di_operand (op, mode)
1126 rtx op;
1127 enum machine_mode mode;
1129 if (s_register_operand (op, mode))
1130 return TRUE;
1132 switch (GET_CODE (op))
1134 case CONST_DOUBLE:
1135 case CONST_INT:
1136 return TRUE;
1138 case MEM:
1139 return memory_address_p (DImode, XEXP (op, 0));
1141 default:
1142 return FALSE;
1146 /* Return TRUE for valid index operands. */
1149 index_operand (op, mode)
1150 rtx op;
1151 enum machine_mode mode;
1153 return (s_register_operand(op, mode)
1154 || (immediate_operand (op, mode)
1155 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1158 /* Return TRUE for valid shifts by a constant. This also accepts any
1159 power of two on the (somewhat overly relaxed) assumption that the
1160 shift operator in this case was a mult. */
1163 const_shift_operand (op, mode)
1164 rtx op;
1165 enum machine_mode mode;
1167 return (power_of_two_operand (op, mode)
1168 || (immediate_operand (op, mode)
1169 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1172 /* Return TRUE for arithmetic operators which can be combined with a multiply
1173 (shift). */
1176 shiftable_operator (x, mode)
1177 rtx x;
1178 enum machine_mode mode;
1180 if (GET_MODE (x) != mode)
1181 return FALSE;
1182 else
1184 enum rtx_code code = GET_CODE (x);
1186 return (code == PLUS || code == MINUS
1187 || code == IOR || code == XOR || code == AND);
1191 /* Return TRUE for shift operators. */
1194 shift_operator (x, mode)
1195 rtx x;
1196 enum machine_mode mode;
1198 if (GET_MODE (x) != mode)
1199 return FALSE;
1200 else
1202 enum rtx_code code = GET_CODE (x);
1204 if (code == MULT)
1205 return power_of_two_operand (XEXP (x, 1));
1207 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
1208 || code == ROTATERT);
1212 int equality_operator (x, mode)
1213 rtx x;
1214 enum machine_mode mode;
1216 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
1219 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1222 minmax_operator (x, mode)
1223 rtx x;
1224 enum machine_mode mode;
1226 enum rtx_code code = GET_CODE (x);
1228 if (GET_MODE (x) != mode)
1229 return FALSE;
1231 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
1234 /* return TRUE if x is EQ or NE */
1236 /* Return TRUE if this is the condition code register, if we aren't given
1237 a mode, accept any class CCmode register */
1240 cc_register (x, mode)
1241 rtx x;
1242 enum machine_mode mode;
1244 if (mode == VOIDmode)
1246 mode = GET_MODE (x);
1247 if (GET_MODE_CLASS (mode) != MODE_CC)
1248 return FALSE;
1251 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1252 return TRUE;
1254 return FALSE;
1257 /* Return TRUE if this is the condition code register, if we aren't given
1258 a mode, accept any mode in class CC_MODE that is reversible */
1261 reversible_cc_register (x, mode)
1262 rtx x;
1263 enum machine_mode mode;
1265 if (mode == VOIDmode)
1267 mode = GET_MODE (x);
1268 if (GET_MODE_CLASS (mode) != MODE_CC
1269 && GET_CODE (x) == REG && REGNO (x) == 24)
1270 abort ();
1271 if (GET_MODE_CLASS (mode) != MODE_CC
1272 || (! flag_fast_math && ! REVERSIBLE_CC_MODE (mode)))
1273 return FALSE;
1276 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1277 return TRUE;
1279 return FALSE;
1282 enum rtx_code
1283 minmax_code (x)
1284 rtx x;
1286 enum rtx_code code = GET_CODE (x);
1288 if (code == SMAX)
1289 return GE;
1290 else if (code == SMIN)
1291 return LE;
1292 else if (code == UMIN)
1293 return LEU;
1294 else if (code == UMAX)
1295 return GEU;
1297 abort ();
1300 /* Return 1 if memory locations are adjacent */
1303 adjacent_mem_locations (a, b)
1304 rtx a, b;
1306 int val0 = 0, val1 = 0;
1307 int reg0, reg1;
1309 if ((GET_CODE (XEXP (a, 0)) == REG
1310 || (GET_CODE (XEXP (a, 0)) == PLUS
1311 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
1312 && (GET_CODE (XEXP (b, 0)) == REG
1313 || (GET_CODE (XEXP (b, 0)) == PLUS
1314 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
1316 if (GET_CODE (XEXP (a, 0)) == PLUS)
1318 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
1319 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
1321 else
1322 reg0 = REGNO (XEXP (a, 0));
1323 if (GET_CODE (XEXP (b, 0)) == PLUS)
1325 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
1326 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
1328 else
1329 reg1 = REGNO (XEXP (b, 0));
1330 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
1332 return 0;
1335 /* Return 1 if OP is a load multiple operation. It is known to be
1336 parallel and the first section will be tested. */
1339 load_multiple_operation (op, mode)
1340 rtx op;
1341 enum machine_mode mode;
1343 HOST_WIDE_INT count = XVECLEN (op, 0);
1344 int dest_regno;
1345 rtx src_addr;
1346 HOST_WIDE_INT i = 1, base = 0;
1347 rtx elt;
1349 if (count <= 1
1350 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1351 return 0;
1353 /* Check to see if this might be a write-back */
1354 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1356 i++;
1357 base = 1;
1359 /* Now check it more carefully */
1360 if (GET_CODE (SET_DEST (elt)) != REG
1361 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1362 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1363 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1364 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1365 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1366 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1367 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1368 != REGNO (SET_DEST (elt)))
1369 return 0;
1371 count--;
1374 /* Perform a quick check so we don't blow up below. */
1375 if (count <= i
1376 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1377 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
1378 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
1379 return 0;
1381 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
1382 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
1384 for (; i < count; i++)
1386 rtx elt = XVECEXP (op, 0, i);
1388 if (GET_CODE (elt) != SET
1389 || GET_CODE (SET_DEST (elt)) != REG
1390 || GET_MODE (SET_DEST (elt)) != SImode
1391 || REGNO (SET_DEST (elt)) != dest_regno + i - base
1392 || GET_CODE (SET_SRC (elt)) != MEM
1393 || GET_MODE (SET_SRC (elt)) != SImode
1394 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1395 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1396 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1397 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
1398 return 0;
1401 return 1;
1404 /* Return 1 if OP is a store multiple operation. It is known to be
1405 parallel and the first section will be tested. */
1408 store_multiple_operation (op, mode)
1409 rtx op;
1410 enum machine_mode mode;
1412 HOST_WIDE_INT count = XVECLEN (op, 0);
1413 int src_regno;
1414 rtx dest_addr;
1415 HOST_WIDE_INT i = 1, base = 0;
1416 rtx elt;
1418 if (count <= 1
1419 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1420 return 0;
1422 /* Check to see if this might be a write-back */
1423 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1425 i++;
1426 base = 1;
1428 /* Now check it more carefully */
1429 if (GET_CODE (SET_DEST (elt)) != REG
1430 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1431 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1432 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1433 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1434 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1435 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1436 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1437 != REGNO (SET_DEST (elt)))
1438 return 0;
1440 count--;
1443 /* Perform a quick check so we don't blow up below. */
1444 if (count <= i
1445 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1446 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
1447 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
1448 return 0;
1450 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
1451 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
1453 for (; i < count; i++)
1455 elt = XVECEXP (op, 0, i);
1457 if (GET_CODE (elt) != SET
1458 || GET_CODE (SET_SRC (elt)) != REG
1459 || GET_MODE (SET_SRC (elt)) != SImode
1460 || REGNO (SET_SRC (elt)) != src_regno + i - base
1461 || GET_CODE (SET_DEST (elt)) != MEM
1462 || GET_MODE (SET_DEST (elt)) != SImode
1463 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1464 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1465 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1466 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
1467 return 0;
1470 return 1;
1474 multi_register_push (op, mode)
1475 rtx op;
1476 enum machine_mode mode;
1478 if (GET_CODE (op) != PARALLEL
1479 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
1480 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
1481 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
1482 return 0;
1484 return 1;
1488 /* Routines for use with attributes */
1491 const_pool_offset (symbol)
1492 rtx symbol;
1494 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
1497 /* Routines for use in generating RTL */
1500 arm_gen_load_multiple (base_regno, count, from, up, write_back)
1501 int base_regno;
1502 int count;
1503 rtx from;
1504 int up;
1505 int write_back;
1507 int i = 0, j;
1508 rtx result;
1509 int sign = up ? 1 : -1;
1511 result = gen_rtx (PARALLEL, VOIDmode,
1512 rtvec_alloc (count + (write_back ? 2 : 0)));
1513 if (write_back)
1515 XVECEXP (result, 0, 0)
1516 = gen_rtx (SET, GET_MODE (from), from,
1517 plus_constant (from, count * 4 * sign));
1518 i = 1;
1519 count++;
1522 for (j = 0; i < count; i++, j++)
1524 XVECEXP (result, 0, i)
1525 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
1526 gen_rtx (MEM, SImode,
1527 plus_constant (from, j * 4 * sign)));
1530 if (write_back)
1531 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
1533 return result;
1537 arm_gen_store_multiple (base_regno, count, to, up, write_back)
1538 int base_regno;
1539 int count;
1540 rtx to;
1541 int up;
1542 int write_back;
1544 int i = 0, j;
1545 rtx result;
1546 int sign = up ? 1 : -1;
1548 result = gen_rtx (PARALLEL, VOIDmode,
1549 rtvec_alloc (count + (write_back ? 2 : 0)));
1550 if (write_back)
1552 XVECEXP (result, 0, 0)
1553 = gen_rtx (SET, GET_MODE (to), to,
1554 plus_constant (to, count * 4 * sign));
1555 i = 1;
1556 count++;
1559 for (j = 0; i < count; i++, j++)
1561 XVECEXP (result, 0, i)
1562 = gen_rtx (SET, VOIDmode,
1563 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
1564 gen_rtx (REG, SImode, base_regno + j));
1567 if (write_back)
1568 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
1570 return result;
1574 arm_gen_movstrqi (operands)
1575 rtx *operands;
1577 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
1578 int i, r;
1579 rtx const_sxteen = gen_rtx (CONST_INT, SImode, 16);
1580 rtx src, dst;
1581 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
1582 rtx part_bytes_reg = NULL;
1583 extern int optimize;
1585 if (GET_CODE (operands[2]) != CONST_INT
1586 || GET_CODE (operands[3]) != CONST_INT
1587 || INTVAL (operands[2]) > 64
1588 || INTVAL (operands[3]) & 3)
1589 return 0;
1591 st_dst = XEXP (operands[0], 0);
1592 st_src = XEXP (operands[1], 0);
1593 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
1594 fin_src = src = copy_to_mode_reg (SImode, st_src);
1596 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
1597 out_words_to_go = INTVAL (operands[2]) / 4;
1598 last_bytes = INTVAL (operands[2]) & 3;
1600 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
1601 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
1603 for (i = 0; in_words_to_go >= 2; i+=4)
1605 emit_insn (arm_gen_load_multiple (0, (in_words_to_go > 4
1606 ? 4 : in_words_to_go),
1607 src, TRUE, TRUE));
1608 if (out_words_to_go)
1610 if (out_words_to_go != 1)
1611 emit_insn (arm_gen_store_multiple (0, (out_words_to_go > 4
1612 ? 4 : out_words_to_go),
1613 dst, TRUE, TRUE));
1614 else
1616 emit_move_insn (gen_rtx (MEM, SImode, dst),
1617 gen_rtx (REG, SImode, 0));
1618 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
1622 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
1623 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
1626 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
1627 if (out_words_to_go)
1629 rtx sreg;
1631 emit_move_insn (sreg = gen_reg_rtx (SImode), gen_rtx (MEM, SImode, src));
1632 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
1633 emit_move_insn (gen_rtx (MEM, SImode, dst), sreg);
1634 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
1635 in_words_to_go--;
1637 if (in_words_to_go) /* Sanity check */
1638 abort ();
1641 if (in_words_to_go)
1643 if (in_words_to_go < 0)
1644 abort ();
1646 part_bytes_reg = copy_to_mode_reg (SImode, gen_rtx (MEM, SImode, src));
1647 emit_insn (gen_addsi3 (src, src, GEN_INT (4)));
1650 if (BYTES_BIG_ENDIAN && last_bytes)
1652 rtx tmp = gen_reg_rtx (SImode);
1654 if (part_bytes_reg == NULL)
1655 abort ();
1657 /* The bytes we want are in the top end of the word */
1658 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
1659 GEN_INT (8 * (4 - last_bytes))));
1660 part_bytes_reg = tmp;
1662 while (last_bytes)
1664 emit_move_insn (gen_rtx (MEM, QImode,
1665 plus_constant (dst, last_bytes - 1)),
1666 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
1667 if (--last_bytes)
1669 tmp = gen_reg_rtx (SImode);
1670 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
1671 part_bytes_reg = tmp;
1676 else
1678 while (last_bytes)
1680 if (part_bytes_reg == NULL)
1681 abort ();
1683 emit_move_insn (gen_rtx (MEM, QImode, dst),
1684 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
1685 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
1686 if (--last_bytes)
1688 rtx tmp = gen_reg_rtx (SImode);
1689 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
1690 part_bytes_reg = tmp;
1695 return 1;
1698 /* X and Y are two things to compare using CODE. Emit the compare insn and
1699 return the rtx for register 0 in the proper mode. FP means this is a
1700 floating point compare: I don't think that it is needed on the arm. */
1703 gen_compare_reg (code, x, y, fp)
1704 enum rtx_code code;
1705 rtx x, y;
1707 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1708 rtx cc_reg = gen_rtx (REG, mode, 24);
1710 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
1711 gen_rtx (COMPARE, mode, x, y)));
1713 return cc_reg;
1716 void
1717 arm_reload_in_hi (operands)
1718 rtx *operands;
1720 rtx base = find_replacement (&XEXP (operands[1], 0));
1722 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
1723 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
1724 gen_rtx (MEM, QImode,
1725 plus_constant (base, 1))));
1726 if (BYTES_BIG_ENDIAN)
1727 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
1728 operands[0], 0),
1729 gen_rtx (IOR, SImode,
1730 gen_rtx (ASHIFT, SImode,
1731 gen_rtx (SUBREG, SImode,
1732 operands[0], 0),
1733 GEN_INT (8)),
1734 operands[2])));
1735 else
1736 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
1737 operands[0], 0),
1738 gen_rtx (IOR, SImode,
1739 gen_rtx (ASHIFT, SImode,
1740 operands[2],
1741 GEN_INT (8)),
1742 gen_rtx (SUBREG, SImode, operands[0], 0))));
1745 void
1746 arm_reload_out_hi (operands)
1747 rtx *operands;
1749 rtx base = find_replacement (&XEXP (operands[0], 0));
1751 if (BYTES_BIG_ENDIAN)
1753 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
1754 gen_rtx (SUBREG, QImode, operands[1], 0)));
1755 emit_insn (gen_lshrsi3 (operands[2],
1756 gen_rtx (SUBREG, SImode, operands[1], 0),
1757 GEN_INT (8)));
1758 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
1759 gen_rtx (SUBREG, QImode, operands[2], 0)));
1761 else
1763 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
1764 gen_rtx (SUBREG, QImode, operands[1], 0)));
1765 emit_insn (gen_lshrsi3 (operands[2],
1766 gen_rtx (SUBREG, SImode, operands[1], 0),
1767 GEN_INT (8)));
1768 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
1769 gen_rtx (SUBREG, QImode, operands[2], 0)));
1773 /* Check to see if a branch is forwards or backwards. Return TRUE if it
1774 is backwards. */
1777 arm_backwards_branch (from, to)
1778 int from, to;
1780 return insn_addresses[to] <= insn_addresses[from];
1783 /* Check to see if a branch is within the distance that can be done using
1784 an arithmetic expression. */
1786 short_branch (from, to)
1787 int from, to;
1789 int delta = insn_addresses[from] + 8 - insn_addresses[to];
1791 return abs (delta) < 980; /* A small margin for safety */
1794 /* Check to see that the insn isn't the target of the conditionalizing
1795 code */
1797 arm_insn_not_targeted (insn)
1798 rtx insn;
1800 return insn != arm_target_insn;
1804 /* Routines to output assembly language. */
1806 /* If the rtx is the correct value then return the string of the number.
1807 In this way we can ensure that valid double constants are generated even
1808 when cross compiling. */
1809 char *
1810 fp_immediate_constant (x)
1811 rtx x;
1813 REAL_VALUE_TYPE r;
1814 int i;
1816 if (!fpa_consts_inited)
1817 init_fpa_table ();
1819 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1820 for (i = 0; i < 8; i++)
1821 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1822 return strings_fpa[i];
1824 abort ();
1827 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
1828 static char *
1829 fp_const_from_val (r)
1830 REAL_VALUE_TYPE *r;
1832 int i;
1834 if (! fpa_consts_inited)
1835 init_fpa_table ();
1837 for (i = 0; i < 8; i++)
1838 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
1839 return strings_fpa[i];
1841 abort ();
1844 /* Output the operands of a LDM/STM instruction to STREAM.
1845 MASK is the ARM register set mask of which only bits 0-15 are important.
1846 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
1847 must follow the register list. */
1849 void
1850 print_multi_reg (stream, instr, mask, hat)
1851 FILE *stream;
1852 char *instr;
1853 int mask, hat;
1855 int i;
1856 int not_first = FALSE;
1858 fputc ('\t', stream);
1859 fprintf (stream, instr, ARM_REG_PREFIX);
1860 fputs (", {", stream);
1861 for (i = 0; i < 16; i++)
1862 if (mask & (1 << i))
1864 if (not_first)
1865 fprintf (stream, ", ");
1866 fprintf (stream, "%s%s", ARM_REG_PREFIX, reg_names[i]);
1867 not_first = TRUE;
1870 fprintf (stream, "}%s\n", hat ? "^" : "");
1873 /* Output a 'call' insn. */
1875 char *
1876 output_call (operands)
1877 rtx *operands;
1879 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
1881 if (REGNO (operands[0]) == 14)
1883 operands[0] = gen_rtx (REG, SImode, 12);
1884 output_asm_insn ("mov%?\t%0, %|lr", operands);
1886 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
1887 output_asm_insn ("mov%?\t%|pc, %0", operands);
1888 return "";
1891 static int
1892 eliminate_lr2ip (x)
1893 rtx *x;
1895 int something_changed = 0;
1896 rtx x0 = *x;
1897 int code = GET_CODE (x0);
1898 register int i, j;
1899 register char *fmt;
1901 switch (code)
1903 case REG:
1904 if (REGNO (x0) == 14)
1906 *x = gen_rtx (REG, SImode, 12);
1907 return 1;
1909 return 0;
1910 default:
1911 /* Scan through the sub-elements and change any references there */
1912 fmt = GET_RTX_FORMAT (code);
1913 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1914 if (fmt[i] == 'e')
1915 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
1916 else if (fmt[i] == 'E')
1917 for (j = 0; j < XVECLEN (x0, i); j++)
1918 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
1919 return something_changed;
1923 /* Output a 'call' insn that is a reference in memory. */
1925 char *
1926 output_call_mem (operands)
1927 rtx *operands;
1929 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
1930 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
1932 if (eliminate_lr2ip (&operands[0]))
1933 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
1935 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
1936 output_asm_insn ("ldr%?\t%|pc, %0", operands);
1937 return "";
1941 /* Output a move from arm registers to an fpu registers.
1942 OPERANDS[0] is an fpu register.
1943 OPERANDS[1] is the first registers of an arm register pair. */
1945 char *
1946 output_mov_long_double_fpu_from_arm (operands)
1947 rtx *operands;
1949 int arm_reg0 = REGNO (operands[1]);
1950 rtx ops[3];
1952 if (arm_reg0 == 12)
1953 abort();
1955 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1956 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1957 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
1959 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
1960 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
1961 return "";
1964 /* Output a move from an fpu register to arm registers.
1965 OPERANDS[0] is the first registers of an arm register pair.
1966 OPERANDS[1] is an fpu register. */
1968 char *
1969 output_mov_long_double_arm_from_fpu (operands)
1970 rtx *operands;
1972 int arm_reg0 = REGNO (operands[0]);
1973 rtx ops[3];
1975 if (arm_reg0 == 12)
1976 abort();
1978 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1979 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1980 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
1982 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
1983 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
1984 return "";
1987 /* Output a move from arm registers to arm registers of a long double
1988 OPERANDS[0] is the destination.
1989 OPERANDS[1] is the source. */
1990 char *
1991 output_mov_long_double_arm_from_arm (operands)
1992 rtx *operands;
1994 /* We have to be careful here because the two might overlap */
1995 int dest_start = REGNO (operands[0]);
1996 int src_start = REGNO (operands[1]);
1997 rtx ops[2];
1998 int i;
2000 if (dest_start < src_start)
2002 for (i = 0; i < 3; i++)
2004 ops[0] = gen_rtx (REG, SImode, dest_start + i);
2005 ops[1] = gen_rtx (REG, SImode, src_start + i);
2006 output_asm_insn ("mov%?\t%0, %1", ops);
2009 else
2011 for (i = 2; i >= 0; i--)
2013 ops[0] = gen_rtx (REG, SImode, dest_start + i);
2014 ops[1] = gen_rtx (REG, SImode, src_start + i);
2015 output_asm_insn ("mov%?\t%0, %1", ops);
2019 return "";
2023 /* Output a move from arm registers to an fpu registers.
2024 OPERANDS[0] is an fpu register.
2025 OPERANDS[1] is the first registers of an arm register pair. */
2027 char *
2028 output_mov_double_fpu_from_arm (operands)
2029 rtx *operands;
2031 int arm_reg0 = REGNO (operands[1]);
2032 rtx ops[2];
2034 if (arm_reg0 == 12)
2035 abort();
2036 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2037 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2038 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
2039 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
2040 return "";
2043 /* Output a move from an fpu register to arm registers.
2044 OPERANDS[0] is the first registers of an arm register pair.
2045 OPERANDS[1] is an fpu register. */
2047 char *
2048 output_mov_double_arm_from_fpu (operands)
2049 rtx *operands;
2051 int arm_reg0 = REGNO (operands[0]);
2052 rtx ops[2];
2054 if (arm_reg0 == 12)
2055 abort();
2057 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2058 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2059 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
2060 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
2061 return "";
2064 /* Output a move between double words.
2065 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
2066 or MEM<-REG and all MEMs must be offsettable addresses. */
2068 char *
2069 output_move_double (operands)
2070 rtx *operands;
2072 enum rtx_code code0 = GET_CODE (operands[0]);
2073 enum rtx_code code1 = GET_CODE (operands[1]);
2074 rtx otherops[2];
2076 if (code0 == REG)
2078 int reg0 = REGNO (operands[0]);
2080 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
2081 if (code1 == REG)
2083 int reg1 = REGNO (operands[1]);
2084 if (reg1 == 12)
2085 abort();
2087 otherops[1] = gen_rtx (REG, SImode, 1 + reg1);
2089 /* Ensure the second source is not overwritten */
2090 if (reg0 == 1 + reg1)
2092 output_asm_insn("mov%?\t%0, %1", otherops);
2093 output_asm_insn("mov%?\t%0, %1", operands);
2095 else
2097 output_asm_insn("mov%?\t%0, %1", operands);
2098 output_asm_insn("mov%?\t%0, %1", otherops);
2101 else if (code1 == CONST_DOUBLE)
2103 otherops[1] = gen_rtx (CONST_INT, VOIDmode,
2104 CONST_DOUBLE_HIGH (operands[1]));
2105 operands[1] = gen_rtx (CONST_INT, VOIDmode,
2106 CONST_DOUBLE_LOW (operands[1]));
2107 output_mov_immediate (operands, FALSE, "");
2108 output_mov_immediate (otherops, FALSE, "");
2110 else if (code1 == CONST_INT)
2112 otherops[1] = const0_rtx;
2113 /* sign extend the intval into the high-order word */
2114 /* Note: output_mov_immediate may clobber operands[1], so we
2115 put this out first */
2116 if (INTVAL (operands[1]) < 0)
2117 output_asm_insn ("mvn%?\t%0, %1", otherops);
2118 else
2119 output_asm_insn ("mov%?\t%0, %1", otherops);
2120 output_mov_immediate (operands, FALSE, "");
2122 else if (code1 == MEM)
2124 switch (GET_CODE (XEXP (operands[1], 0)))
2126 case REG:
2127 /* Handle the simple case where address is [r, #0] more
2128 efficient. */
2129 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2130 break;
2131 case PRE_INC:
2132 output_asm_insn ("add%?\t%m1, %m1, #8", operands);
2133 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2134 break;
2135 case PRE_DEC:
2136 output_asm_insn ("sub%?\t%m1, %m1, #8", operands);
2137 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2138 break;
2139 case POST_INC:
2140 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
2141 break;
2142 case POST_DEC:
2143 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2144 output_asm_insn ("sub%?\t%m1, %m1, #8", operands);
2145 break;
2146 default:
2147 otherops[1] = adj_offsettable_operand (operands[1], 4);
2148 /* Take care of overlapping base/data reg. */
2149 if (reg_mentioned_p (operands[0], operands[1]))
2151 output_asm_insn ("ldr%?\t%0, %1", otherops);
2152 output_asm_insn ("ldr%?\t%0, %1", operands);
2154 else
2156 output_asm_insn ("ldr%?\t%0, %1", operands);
2157 output_asm_insn ("ldr%?\t%0, %1", otherops);
2161 else abort(); /* Constraints should prevent this */
2163 else if (code0 == MEM && code1 == REG)
2165 if (REGNO (operands[1]) == 12)
2166 abort();
2167 switch (GET_CODE (XEXP (operands[0], 0)))
2169 case REG:
2170 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
2171 break;
2172 case PRE_INC:
2173 output_asm_insn ("add%?\t%m0, %m0, #8", operands);
2174 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
2175 break;
2176 case PRE_DEC:
2177 output_asm_insn ("sub%?\t%m0, %m0, #8", operands);
2178 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
2179 break;
2180 case POST_INC:
2181 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
2182 break;
2183 case POST_DEC:
2184 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
2185 output_asm_insn ("sub%?\t%m0, %m0, #8", operands);
2186 break;
2187 default:
2188 otherops[0] = adj_offsettable_operand (operands[0], 4);
2189 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
2190 output_asm_insn ("str%?\t%1, %0", operands);
2191 output_asm_insn ("str%?\t%1, %0", otherops);
2194 else abort(); /* Constraints should prevent this */
2196 return "";
2200 /* Output an arbitrary MOV reg, #n.
2201 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
2203 char *
2204 output_mov_immediate (operands)
2205 rtx *operands;
2207 HOST_WIDE_INT n = INTVAL (operands[1]);
2208 int n_ones = 0;
2209 int i;
2211 /* Try to use one MOV */
2212 if (const_ok_for_arm (n))
2214 output_asm_insn ("mov%?\t%0, %1", operands);
2215 return "";
2218 /* Try to use one MVN */
2219 if (const_ok_for_arm (~n))
2221 operands[1] = GEN_INT (~n);
2222 output_asm_insn ("mvn%?\t%0, %1", operands);
2223 return "";
2226 /* If all else fails, make it out of ORRs or BICs as appropriate. */
2228 for (i=0; i < 32; i++)
2229 if (n & 1 << i)
2230 n_ones++;
2232 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
2233 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
2234 ~n);
2235 else
2236 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
2239 return "";
2243 /* Output an ADD r, s, #n where n may be too big for one instruction. If
2244 adding zero to one register, output nothing. */
2246 char *
2247 output_add_immediate (operands)
2248 rtx *operands;
2250 HOST_WIDE_INT n = INTVAL (operands[2]);
2252 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
2254 if (n < 0)
2255 output_multi_immediate (operands,
2256 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
2257 -n);
2258 else
2259 output_multi_immediate (operands,
2260 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
2264 return "";
2267 /* Output a multiple immediate operation.
2268 OPERANDS is the vector of operands referred to in the output patterns.
2269 INSTR1 is the output pattern to use for the first constant.
2270 INSTR2 is the output pattern to use for subsequent constants.
2271 IMMED_OP is the index of the constant slot in OPERANDS.
2272 N is the constant value. */
2274 char *
2275 output_multi_immediate (operands, instr1, instr2, immed_op, n)
2276 rtx *operands;
2277 char *instr1, *instr2;
2278 int immed_op;
2279 HOST_WIDE_INT n;
2281 #if HOST_BITS_PER_WIDE_INT > 32
2282 n &= 0xffffffff;
2283 #endif
2285 if (n == 0)
2287 operands[immed_op] = const0_rtx;
2288 output_asm_insn (instr1, operands); /* Quick and easy output */
2290 else
2292 int i;
2293 char *instr = instr1;
2295 /* Note that n is never zero here (which would give no output) */
2296 for (i = 0; i < 32; i += 2)
2298 if (n & (3 << i))
2300 operands[immed_op] = GEN_INT (n & (255 << i));
2301 output_asm_insn (instr, operands);
2302 instr = instr2;
2303 i += 6;
2307 return "";
2311 /* Return the appropriate ARM instruction for the operation code.
2312 The returned result should not be overwritten. OP is the rtx of the
2313 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
2314 was shifted. */
2316 char *
2317 arithmetic_instr (op, shift_first_arg)
2318 rtx op;
2319 int shift_first_arg;
2321 switch (GET_CODE (op))
2323 case PLUS:
2324 return "add";
2326 case MINUS:
2327 return shift_first_arg ? "rsb" : "sub";
2329 case IOR:
2330 return "orr";
2332 case XOR:
2333 return "eor";
2335 case AND:
2336 return "and";
2338 default:
2339 abort ();
2344 /* Ensure valid constant shifts and return the appropriate shift mnemonic
2345 for the operation code. The returned result should not be overwritten.
2346 OP is the rtx code of the shift.
2347 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
2348 shift. */
2350 static char *
2351 shift_op (op, amountp)
2352 rtx op;
2353 HOST_WIDE_INT *amountp;
2355 char *mnem;
2356 enum rtx_code code = GET_CODE (op);
2358 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
2359 *amountp = -1;
2360 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
2361 *amountp = INTVAL (XEXP (op, 1));
2362 else
2363 abort ();
2365 switch (code)
2367 case ASHIFT:
2368 mnem = "asl";
2369 break;
2371 case ASHIFTRT:
2372 mnem = "asr";
2373 break;
2375 case LSHIFTRT:
2376 mnem = "lsr";
2377 break;
2379 case ROTATERT:
2380 mnem = "ror";
2381 break;
2383 case MULT:
2384 /* We never have to worry about the amount being other than a
2385 power of 2, since this case can never be reloaded from a reg. */
2386 if (*amountp != -1)
2387 *amountp = int_log2 (*amountp);
2388 else
2389 abort ();
2390 return "asl";
2392 default:
2393 abort ();
2396 if (*amountp != -1)
2398 /* This is not 100% correct, but follows from the desire to merge
2399 multiplication by a power of 2 with the recognizer for a
2400 shift. >=32 is not a valid shift for "asl", so we must try and
2401 output a shift that produces the correct arithmetical result.
2402 Using lsr #32 is idendical except for the fact that the carry bit
2403 is not set correctly if we set the flags; but we never use the
2404 carry bit from such an operation, so we can ignore that. */
2405 if (code == ROTATERT)
2406 *amountp &= 31; /* Rotate is just modulo 32 */
2407 else if (*amountp != (*amountp & 31))
2409 if (code == ASHIFT)
2410 mnem = "lsr";
2411 *amountp = 32;
2414 /* Shifts of 0 are no-ops. */
2415 if (*amountp == 0)
2416 return NULL;
2419 return mnem;
2423 /* Obtain the shift from the POWER of two. */
2425 HOST_WIDE_INT
2426 int_log2 (power)
2427 HOST_WIDE_INT power;
2429 HOST_WIDE_INT shift = 0;
2431 while (((1 << shift) & power) == 0)
2433 if (shift > 31)
2434 abort ();
2435 shift++;
2438 return shift;
2441 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
2442 /bin/as is horribly restrictive. */
2444 void
2445 output_ascii_pseudo_op (stream, p, len)
2446 FILE *stream;
2447 unsigned char *p;
2448 int len;
2450 int i;
2451 int len_so_far = 1000;
2452 int chars_so_far = 0;
2454 for (i = 0; i < len; i++)
2456 register int c = p[i];
2458 if (len_so_far > 50)
2460 if (chars_so_far)
2461 fputs ("\"\n", stream);
2462 fputs ("\t.ascii\t\"", stream);
2463 len_so_far = 0;
2464 arm_increase_location (chars_so_far);
2465 chars_so_far = 0;
2468 if (c == '\"' || c == '\\')
2470 putc('\\', stream);
2471 len_so_far++;
2474 if (c >= ' ' && c < 0177)
2476 putc (c, stream);
2477 len_so_far++;
2479 else
2481 fprintf (stream, "\\%03o", c);
2482 len_so_far +=4;
2485 chars_so_far++;
2488 fputs ("\"\n", stream);
2489 arm_increase_location (chars_so_far);
2493 /* Try to determine whether a pattern really clobbers the link register.
2494 This information is useful when peepholing, so that lr need not be pushed
2495 if we combine a call followed by a return.
2496 NOTE: This code does not check for side-effect expressions in a SET_SRC:
2497 such a check should not be needed because these only update an existing
2498 value within a register; the register must still be set elsewhere within
2499 the function. */
2501 static int
2502 pattern_really_clobbers_lr (x)
2503 rtx x;
2505 int i;
2507 switch (GET_CODE (x))
2509 case SET:
2510 switch (GET_CODE (SET_DEST (x)))
2512 case REG:
2513 return REGNO (SET_DEST (x)) == 14;
2515 case SUBREG:
2516 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
2517 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
2519 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
2520 return 0;
2521 abort ();
2523 default:
2524 return 0;
2527 case PARALLEL:
2528 for (i = 0; i < XVECLEN (x, 0); i++)
2529 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
2530 return 1;
2531 return 0;
2533 case CLOBBER:
2534 switch (GET_CODE (XEXP (x, 0)))
2536 case REG:
2537 return REGNO (XEXP (x, 0)) == 14;
2539 case SUBREG:
2540 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
2541 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
2542 abort ();
2544 default:
2545 return 0;
2548 case UNSPEC:
2549 return 1;
2551 default:
2552 return 0;
2556 static int
2557 function_really_clobbers_lr (first)
2558 rtx first;
2560 rtx insn, next;
2562 for (insn = first; insn; insn = next_nonnote_insn (insn))
2564 switch (GET_CODE (insn))
2566 case BARRIER:
2567 case NOTE:
2568 case CODE_LABEL:
2569 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
2570 case INLINE_HEADER:
2571 break;
2573 case INSN:
2574 if (pattern_really_clobbers_lr (PATTERN (insn)))
2575 return 1;
2576 break;
2578 case CALL_INSN:
2579 /* Don't yet know how to handle those calls that are not to a
2580 SYMBOL_REF */
2581 if (GET_CODE (PATTERN (insn)) != PARALLEL)
2582 abort ();
2584 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
2586 case CALL:
2587 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
2588 != SYMBOL_REF)
2589 return 1;
2590 break;
2592 case SET:
2593 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
2594 0, 0)), 0), 0))
2595 != SYMBOL_REF)
2596 return 1;
2597 break;
2599 default: /* Don't recognize it, be safe */
2600 return 1;
2603 /* A call can be made (by peepholing) not to clobber lr iff it is
2604 followed by a return. There may, however, be a use insn iff
2605 we are returning the result of the call.
2606 If we run off the end of the insn chain, then that means the
2607 call was at the end of the function. Unfortunately we don't
2608 have a return insn for the peephole to recognize, so we
2609 must reject this. (Can this be fixed by adding our own insn?) */
2610 if ((next = next_nonnote_insn (insn)) == NULL)
2611 return 1;
2613 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
2614 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
2615 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
2616 == REGNO (XEXP (PATTERN (next), 0))))
2617 if ((next = next_nonnote_insn (next)) == NULL)
2618 return 1;
2620 if (GET_CODE (next) == JUMP_INSN
2621 && GET_CODE (PATTERN (next)) == RETURN)
2622 break;
2623 return 1;
2625 default:
2626 abort ();
2630 /* We have reached the end of the chain so lr was _not_ clobbered */
2631 return 0;
2634 char *
2635 output_return_instruction (operand, really_return)
2636 rtx operand;
2637 int really_return;
2639 char instr[100];
2640 int reg, live_regs = 0;
2641 int volatile_func = (optimize > 0
2642 && TREE_THIS_VOLATILE (current_function_decl));
2644 return_used_this_function = 1;
2646 if (volatile_func)
2648 rtx ops[2];
2649 /* If this function was declared non-returning, and we have found a tail
2650 call, then we have to trust that the called function won't return. */
2651 if (! really_return)
2652 return "";
2654 /* Otherwise, trap an attempted return by aborting. */
2655 ops[0] = operand;
2656 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
2657 output_asm_insn ("bl%d0\t%a1", ops);
2658 return "";
2661 if (current_function_calls_alloca && ! really_return)
2662 abort();
2664 for (reg = 0; reg <= 10; reg++)
2665 if (regs_ever_live[reg] && ! call_used_regs[reg])
2666 live_regs++;
2668 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
2669 live_regs++;
2671 if (frame_pointer_needed)
2672 live_regs += 4;
2674 if (live_regs)
2676 if (lr_save_eliminated || ! regs_ever_live[14])
2677 live_regs++;
2679 if (frame_pointer_needed)
2680 strcpy (instr, "ldm%?%d0ea\t%|fp, {");
2681 else
2682 strcpy (instr, "ldm%?%d0fd\t%|sp!, {");
2684 for (reg = 0; reg <= 10; reg++)
2685 if (regs_ever_live[reg] && ! call_used_regs[reg])
2687 strcat (instr, "%|");
2688 strcat (instr, reg_names[reg]);
2689 if (--live_regs)
2690 strcat (instr, ", ");
2693 if (frame_pointer_needed)
2695 strcat (instr, "%|");
2696 strcat (instr, reg_names[11]);
2697 strcat (instr, ", ");
2698 strcat (instr, "%|");
2699 strcat (instr, reg_names[13]);
2700 strcat (instr, ", ");
2701 strcat (instr, "%|");
2702 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
2704 else
2706 strcat (instr, "%|");
2707 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
2709 strcat (instr, (TARGET_6 || !really_return) ? "}" : "}^");
2710 output_asm_insn (instr, &operand);
2712 else if (really_return)
2714 strcpy (instr,
2715 TARGET_6 ? "mov%?%d0\t%|pc, lr" : "mov%?%d0s\t%|pc, %|lr");
2716 output_asm_insn (instr, &operand);
2719 return "";
2723 arm_volatile_func ()
2725 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
2728 /* Return the size of the prologue. It's not too bad if we slightly
2729 over-estimate. */
2731 static int
2732 get_prologue_size ()
2734 return profile_flag ? 12 : 0;
2737 /* The amount of stack adjustment that happens here, in output_return and in
2738 output_epilogue must be exactly the same as was calculated during reload,
2739 or things will point to the wrong place. The only time we can safely
2740 ignore this constraint is when a function has no arguments on the stack,
2741 no stack frame requirement and no live registers execpt for `lr'. If we
2742 can guarantee that by making all function calls into tail calls and that
2743 lr is not clobbered in any other way, then there is no need to push lr
2744 onto the stack. */
2746 void
2747 output_func_prologue (f, frame_size)
2748 FILE *f;
2749 int frame_size;
2751 int reg, live_regs_mask = 0;
2752 rtx operands[3];
2753 int volatile_func = (optimize > 0
2754 && TREE_THIS_VOLATILE (current_function_decl));
2756 /* Nonzero if we must stuff some register arguments onto the stack as if
2757 they were passed there. */
2758 int store_arg_regs = 0;
2760 if (arm_ccfsm_state || arm_target_insn)
2761 abort (); /* Sanity check */
2763 return_used_this_function = 0;
2764 lr_save_eliminated = 0;
2766 fprintf (f, "\t%c args = %d, pretend = %d, frame = %d\n",
2767 ARM_COMMENT_CHAR, current_function_args_size,
2768 current_function_pretend_args_size, frame_size);
2769 fprintf (f, "\t%c frame_needed = %d, current_function_anonymous_args = %d\n",
2770 ARM_COMMENT_CHAR, frame_pointer_needed,
2771 current_function_anonymous_args);
2773 if (volatile_func)
2774 fprintf (f, "\t%c Volatile function.\n", ARM_COMMENT_CHAR);
2776 if (current_function_anonymous_args && current_function_pretend_args_size)
2777 store_arg_regs = 1;
2779 for (reg = 0; reg <= 10; reg++)
2780 if (regs_ever_live[reg] && ! call_used_regs[reg])
2781 live_regs_mask |= (1 << reg);
2783 if (frame_pointer_needed)
2784 live_regs_mask |= 0xD800;
2785 else if (regs_ever_live[14])
2787 if (! current_function_args_size
2788 && ! function_really_clobbers_lr (get_insns ()))
2789 lr_save_eliminated = 1;
2790 else
2791 live_regs_mask |= 0x4000;
2794 if (live_regs_mask)
2796 /* if a di mode load/store multiple is used, and the base register
2797 is r3, then r4 can become an ever live register without lr
2798 doing so, in this case we need to push lr as well, or we
2799 will fail to get a proper return. */
2801 live_regs_mask |= 0x4000;
2802 lr_save_eliminated = 0;
2806 if (lr_save_eliminated)
2807 fprintf (f,"\t%c I don't think this function clobbers lr\n",
2808 ARM_COMMENT_CHAR);
2812 void
2813 output_func_epilogue (f, frame_size)
2814 FILE *f;
2815 int frame_size;
2817 int reg, live_regs_mask = 0, code_size = 0;
2818 /* If we need this then it will always be at lesat this much */
2819 int floats_offset = 24;
2820 rtx operands[3];
2821 int volatile_func = (optimize > 0
2822 && TREE_THIS_VOLATILE (current_function_decl));
2824 if (use_return_insn() && return_used_this_function)
2826 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
2828 abort ();
2830 goto epilogue_done;
2833 /* A volatile function should never return. Call abort. */
2834 if (volatile_func)
2836 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
2837 output_asm_insn ("bl\t%a0", &op);
2838 code_size = 4;
2839 goto epilogue_done;
2842 for (reg = 0; reg <= 10; reg++)
2843 if (regs_ever_live[reg] && ! call_used_regs[reg])
2845 live_regs_mask |= (1 << reg);
2846 floats_offset += 4;
2849 if (frame_pointer_needed)
2851 for (reg = 23; reg > 15; reg--)
2852 if (regs_ever_live[reg] && ! call_used_regs[reg])
2854 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", ARM_REG_PREFIX,
2855 reg_names[reg], ARM_REG_PREFIX, floats_offset);
2856 floats_offset += 12;
2857 code_size += 4;
2860 live_regs_mask |= 0xA800;
2861 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
2862 TARGET_6 ? FALSE : TRUE);
2863 code_size += 4;
2865 else
2867 /* Restore stack pointer if necessary. */
2868 if (frame_size)
2870 operands[0] = operands[1] = stack_pointer_rtx;
2871 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
2872 output_add_immediate (operands);
2875 for (reg = 16; reg < 24; reg++)
2876 if (regs_ever_live[reg] && ! call_used_regs[reg])
2878 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", ARM_REG_PREFIX,
2879 reg_names[reg], ARM_REG_PREFIX);
2880 code_size += 4;
2882 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
2884 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
2885 TARGET_6 ? FALSE : TRUE);
2886 code_size += 4;
2888 else
2890 if (live_regs_mask || regs_ever_live[14])
2892 live_regs_mask |= 0x4000;
2893 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
2894 code_size += 4;
2896 if (current_function_pretend_args_size)
2898 operands[0] = operands[1] = stack_pointer_rtx;
2899 operands[2] = gen_rtx (CONST_INT, VOIDmode,
2900 current_function_pretend_args_size);
2901 output_add_immediate (operands);
2903 fprintf (f,
2904 TARGET_6 ? "\tmov\t%spc, %slr\n" : "\tmovs\t%spc, %slr\n",
2905 ARM_REG_PREFIX, ARM_REG_PREFIX, f);
2906 code_size += 4;
2910 epilogue_done:
2912 /* insn_addresses isn't allocated when not optimizing */
2914 if (optimize > 0)
2915 arm_increase_location (code_size
2916 + insn_addresses[INSN_UID (get_last_insn ())]
2917 + get_prologue_size ());
2919 current_function_anonymous_args = 0;
2922 static void
2923 emit_multi_reg_push (mask)
2924 int mask;
2926 int num_regs = 0;
2927 int i, j;
2928 rtx par;
2930 for (i = 0; i < 16; i++)
2931 if (mask & (1 << i))
2932 num_regs++;
2934 if (num_regs == 0 || num_regs > 16)
2935 abort ();
2937 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
2939 for (i = 0; i < 16; i++)
2941 if (mask & (1 << i))
2943 XVECEXP (par, 0, 0)
2944 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
2945 gen_rtx (PRE_DEC, BLKmode,
2946 stack_pointer_rtx)),
2947 gen_rtx (UNSPEC, BLKmode,
2948 gen_rtvec (1, gen_rtx (REG, SImode, i)),
2949 2));
2950 break;
2954 for (j = 1, i++; j < num_regs; i++)
2956 if (mask & (1 << i))
2958 XVECEXP (par, 0, j)
2959 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
2960 j++;
2963 emit_insn (par);
2966 void
2967 arm_expand_prologue ()
2969 int reg;
2970 rtx amount = GEN_INT (- get_frame_size ());
2971 rtx push_insn;
2972 int num_regs;
2973 int live_regs_mask = 0;
2974 int store_arg_regs = 0;
2975 int volatile_func = (optimize > 0
2976 && TREE_THIS_VOLATILE (current_function_decl));
2978 if (current_function_anonymous_args && current_function_pretend_args_size)
2979 store_arg_regs = 1;
2981 if (! volatile_func)
2982 for (reg = 0; reg <= 10; reg++)
2983 if (regs_ever_live[reg] && ! call_used_regs[reg])
2984 live_regs_mask |= 1 << reg;
2986 if (! volatile_func && regs_ever_live[14])
2987 live_regs_mask |= 0x4000;
2989 if (frame_pointer_needed)
2991 live_regs_mask |= 0xD800;
2992 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
2993 stack_pointer_rtx));
2996 if (current_function_pretend_args_size)
2998 if (store_arg_regs)
2999 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
3000 & 0xf);
3001 else
3002 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
3003 GEN_INT (-current_function_pretend_args_size)));
3006 if (live_regs_mask)
3008 /* If we have to push any regs, then we must push lr as well, or
3009 we won't get a propper return. */
3010 live_regs_mask |= 0x4000;
3011 emit_multi_reg_push (live_regs_mask);
3014 /* For now the integer regs are still pushed in output_func_epilogue (). */
3016 if (! volatile_func)
3017 for (reg = 23; reg > 15; reg--)
3018 if (regs_ever_live[reg] && ! call_used_regs[reg])
3019 emit_insn (gen_rtx (SET, VOIDmode,
3020 gen_rtx (MEM, XFmode,
3021 gen_rtx (PRE_DEC, XFmode,
3022 stack_pointer_rtx)),
3023 gen_rtx (REG, XFmode, reg)));
3025 if (frame_pointer_needed)
3026 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
3027 (GEN_INT
3028 (-(4 + current_function_pretend_args_size)))));
3030 if (amount != const0_rtx)
3032 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
3033 emit_insn (gen_rtx (CLOBBER, VOIDmode,
3034 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
3037 /* If we are profiling, make sure no instructions are scheduled before
3038 the call to mcount. */
3039 if (profile_flag || profile_block_flag)
3040 emit_insn (gen_blockage ());
3044 /* If CODE is 'd', then the X is a condition operand and the instruction
3045 should only be executed if the condition is true.
3046 if CODE is 'D', then the X is a condition operand and the instruciton
3047 should only be executed if the condition is false: however, if the mode
3048 of the comparison is CCFPEmode, then always execute the instruction -- we
3049 do this because in these circumstances !GE does not necessarily imply LT;
3050 in these cases the instruction pattern will take care to make sure that
3051 an instruction containing %d will follow, thereby undoing the effects of
3052 doing this instrucion unconditionally.
3053 If CODE is 'N' then X is a floating point operand that must be negated
3054 before output.
3055 If CODE is 'B' then output a bitwise inverted value of X (a const int).
3056 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
3058 void
3059 arm_print_operand (stream, x, code)
3060 FILE *stream;
3061 rtx x;
3062 int code;
3064 switch (code)
3066 case '@':
3067 fputc (ARM_COMMENT_CHAR, stream);
3068 return;
3070 case '|':
3071 fputs (ARM_REG_PREFIX, stream);
3072 return;
3074 case '?':
3075 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
3076 fputs (arm_condition_codes[arm_current_cc], stream);
3077 return;
3079 case 'N':
3081 REAL_VALUE_TYPE r;
3082 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3083 r = REAL_VALUE_NEGATE (r);
3084 fprintf (stream, "%s", fp_const_from_val (&r));
3086 return;
3088 case 'B':
3089 if (GET_CODE (x) == CONST_INT)
3090 fprintf (stream,
3091 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3092 "%d",
3093 #else
3094 "%ld",
3095 #endif
3096 ARM_SIGN_EXTEND (~ INTVAL (x)));
3097 else
3099 putc ('~', stream);
3100 output_addr_const (stream, x);
3102 return;
3104 case 'i':
3105 fprintf (stream, "%s", arithmetic_instr (x, 1));
3106 return;
3108 case 'I':
3109 fprintf (stream, "%s", arithmetic_instr (x, 0));
3110 return;
3112 case 'S':
3114 HOST_WIDE_INT val;
3115 char *shift = shift_op (x, &val);
3117 if (shift)
3119 fprintf (stream, ", %s ", shift_op (x, &val));
3120 if (val == -1)
3121 arm_print_operand (stream, XEXP (x, 1), 0);
3122 else
3123 fprintf (stream,
3124 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3125 "#%d",
3126 #else
3127 "#%ld",
3128 #endif
3129 val);
3132 return;
3134 case 'R':
3135 if (REGNO (x) > 15)
3136 abort ();
3137 fputs (ARM_REG_PREFIX, stream);
3138 fputs (reg_names[REGNO (x) + 1], stream);
3139 return;
3141 case 'm':
3142 fputs (ARM_REG_PREFIX, stream);
3143 if (GET_CODE (XEXP (x, 0)) == REG)
3144 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
3145 else
3146 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
3147 return;
3149 case 'M':
3150 fprintf (stream, "{%s%s-%s%s}", ARM_REG_PREFIX, reg_names[REGNO (x)],
3151 ARM_REG_PREFIX, reg_names[REGNO (x) - 1
3152 + ((GET_MODE_SIZE (GET_MODE (x))
3153 + GET_MODE_SIZE (SImode) - 1)
3154 / GET_MODE_SIZE (SImode))]);
3155 return;
3157 case 'd':
3158 if (x)
3159 fputs (arm_condition_codes[get_arm_condition_code (x)],
3160 stream);
3161 return;
3163 case 'D':
3164 if (x && (flag_fast_math
3165 || GET_CODE (x) == EQ || GET_CODE (x) == NE
3166 || (GET_MODE (XEXP (x, 0)) != CCFPEmode
3167 && (GET_MODE_CLASS (GET_MODE (XEXP (x, 0)))
3168 != MODE_FLOAT))))
3169 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
3170 (get_arm_condition_code (x))],
3171 stream);
3172 return;
3174 default:
3175 if (x == 0)
3176 abort ();
3178 if (GET_CODE (x) == REG)
3180 fputs (ARM_REG_PREFIX, stream);
3181 fputs (reg_names[REGNO (x)], stream);
3183 else if (GET_CODE (x) == MEM)
3185 output_memory_reference_mode = GET_MODE (x);
3186 output_address (XEXP (x, 0));
3188 else if (GET_CODE (x) == CONST_DOUBLE)
3189 fprintf (stream, "#%s", fp_immediate_constant (x));
3190 else if (GET_CODE (x) == NEG)
3191 abort (); /* This should never happen now. */
3192 else
3194 fputc ('#', stream);
3195 output_addr_const (stream, x);
3200 /* Increase the `arm_text_location' by AMOUNT if we're in the text
3201 segment. */
3203 void
3204 arm_increase_location (amount)
3205 int amount;
3207 if (in_text_section ())
3208 arm_text_location += amount;
3212 /* Output a label definition. If this label is within the .text segment, it
3213 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
3214 Maybe GCC remembers names not starting with a `*' for a long time, but this
3215 is a minority anyway, so we just make a copy. Do not store the leading `*'
3216 if the name starts with one. */
3218 void
3219 arm_asm_output_label (stream, name)
3220 FILE *stream;
3221 char *name;
3223 char *real_name, *s;
3224 struct label_offset *cur;
3225 int hash = 0;
3227 assemble_name (stream, name);
3228 fputs (":\n", stream);
3229 if (! in_text_section ())
3230 return;
3232 if (name[0] == '*')
3234 real_name = xmalloc (1 + strlen (&name[1]));
3235 strcpy (real_name, &name[1]);
3237 else
3239 real_name = xmalloc (2 + strlen (name));
3240 strcpy (real_name, "_");
3241 strcat (real_name, name);
3243 for (s = real_name; *s; s++)
3244 hash += *s;
3246 hash = hash % LABEL_HASH_SIZE;
3247 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
3248 cur->name = real_name;
3249 cur->offset = arm_text_location;
3250 cur->cdr = offset_table[hash];
3251 offset_table[hash] = cur;
3254 /* Load a symbol that is known to be in the text segment into a register.
3255 This should never be called when not optimizing. */
3257 char *
3258 output_load_symbol (insn, operands)
3259 rtx insn;
3260 rtx *operands;
3262 char *s;
3263 char *name = XSTR (operands[1], 0);
3264 struct label_offset *he;
3265 int hash = 0;
3266 int offset;
3267 unsigned int mask, never_mask = 0xffffffff;
3268 int shift, inst;
3269 char buffer[100];
3271 if (optimize == 0 || *name != '*')
3272 abort ();
3274 for (s = &name[1]; *s; s++)
3275 hash += *s;
3277 hash = hash % LABEL_HASH_SIZE;
3278 he = offset_table[hash];
3279 while (he && strcmp (he->name, &name[1]))
3280 he = he->cdr;
3282 if (!he)
3283 abort ();
3285 offset = (arm_text_location + insn_addresses[INSN_UID (insn)]
3286 + get_prologue_size () + 8 - he->offset);
3287 if (offset < 0)
3288 abort ();
3290 /* When generating the instructions, we never mask out the bits that we
3291 think will be always zero, then if a mistake has occured somewhere, the
3292 assembler will spot it and generate an error. */
3294 /* If the symbol is word aligned then we might be able to reduce the
3295 number of loads. */
3296 shift = ((offset & 3) == 0) ? 2 : 0;
3298 /* Clear the bits from NEVER_MASK that will be orred in with the individual
3299 instructions. */
3300 for (; shift < 32; shift += 8)
3302 mask = 0xff << shift;
3303 if ((offset & mask) || ((unsigned) offset) > mask)
3304 never_mask &= ~mask;
3307 inst = 8;
3308 mask = 0xff << (shift - 32);
3310 while (mask && (never_mask & mask) == 0)
3312 if (inst == 8)
3314 strcpy (buffer, "sub%?\t%0, %|pc, #(8 + . -%a1)");
3315 if ((never_mask | mask) != 0xffffffff)
3316 sprintf (buffer + strlen (buffer), " & 0x%x", mask | never_mask);
3318 else
3319 sprintf (buffer, "sub%%?\t%%0, %%0, #(%d + . -%%a1) & 0x%x",
3320 inst, mask | never_mask);
3322 output_asm_insn (buffer, operands);
3323 mask <<= 8;
3324 inst -= 4;
3327 return "";
3330 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
3331 directive hence this hack, which works by reserving some `.space' in the
3332 bss segment directly.
3334 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
3335 define STATIC COMMON space but merely STATIC BSS space. */
3337 void
3338 output_lcomm_directive (stream, name, size, rounded)
3339 FILE *stream;
3340 char *name;
3341 int size, rounded;
3343 fprintf (stream, "\n\t.bss\t%c .lcomm\n", ARM_COMMENT_CHAR);
3344 assemble_name (stream, name);
3345 fprintf (stream, ":\t.space\t%d\n", rounded);
3346 if (in_text_section ())
3347 fputs ("\n\t.text\n", stream);
3348 else
3349 fputs ("\n\t.data\n", stream);
3352 /* A finite state machine takes care of noticing whether or not instructions
3353 can be conditionally executed, and thus decrease execution time and code
3354 size by deleting branch instructions. The fsm is controlled by
3355 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
3357 /* The state of the fsm controlling condition codes are:
3358 0: normal, do nothing special
3359 1: make ASM_OUTPUT_OPCODE not output this instruction
3360 2: make ASM_OUTPUT_OPCODE not output this instruction
3361 3: make instructions conditional
3362 4: make instructions conditional
3364 State transitions (state->state by whom under condition):
3365 0 -> 1 final_prescan_insn if the `target' is a label
3366 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
3367 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
3368 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
3369 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
3370 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
3371 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
3372 (the target insn is arm_target_insn).
3374 If the jump clobbers the conditions then we use states 2 and 4.
3376 A similar thing can be done with conditional return insns.
3378 XXX In case the `target' is an unconditional branch, this conditionalising
3379 of the instructions always reduces code size, but not always execution
3380 time. But then, I want to reduce the code size to somewhere near what
3381 /bin/cc produces. */
3383 /* Returns the index of the ARM condition code string in
3384 `arm_condition_codes'. COMPARISON should be an rtx like
3385 `(eq (...) (...))'. */
3388 get_arm_condition_code (comparison)
3389 rtx comparison;
3391 switch (GET_CODE (comparison))
3393 case NE: return (1);
3394 case EQ: return (0);
3395 case GE: return (10);
3396 case GT: return (12);
3397 case LE: return (13);
3398 case LT: return (11);
3399 case GEU: return (2);
3400 case GTU: return (8);
3401 case LEU: return (9);
3402 case LTU: return (3);
3403 default: abort ();
3405 /*NOTREACHED*/
3406 return (42);
3410 void
3411 final_prescan_insn (insn, opvec, noperands)
3412 rtx insn;
3413 rtx *opvec;
3414 int noperands;
3416 /* BODY will hold the body of INSN. */
3417 register rtx body = PATTERN (insn);
3419 /* This will be 1 if trying to repeat the trick, and things need to be
3420 reversed if it appears to fail. */
3421 int reverse = 0;
3423 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
3424 taken are clobbered, even if the rtl suggests otherwise. It also
3425 means that we have to grub around within the jump expression to find
3426 out what the conditions are when the jump isn't taken. */
3427 int jump_clobbers = 0;
3429 /* If we start with a return insn, we only succeed if we find another one. */
3430 int seeking_return = 0;
3432 /* START_INSN will hold the insn from where we start looking. This is the
3433 first insn after the following code_label if REVERSE is true. */
3434 rtx start_insn = insn;
3436 /* If in state 4, check if the target branch is reached, in order to
3437 change back to state 0. */
3438 if (arm_ccfsm_state == 4)
3440 if (insn == arm_target_insn)
3442 arm_target_insn = NULL;
3443 arm_ccfsm_state = 0;
3445 return;
3448 /* If in state 3, it is possible to repeat the trick, if this insn is an
3449 unconditional branch to a label, and immediately following this branch
3450 is the previous target label which is only used once, and the label this
3451 branch jumps to is not too far off. */
3452 if (arm_ccfsm_state == 3)
3454 if (simplejump_p (insn))
3456 start_insn = next_nonnote_insn (start_insn);
3457 if (GET_CODE (start_insn) == BARRIER)
3459 /* XXX Isn't this always a barrier? */
3460 start_insn = next_nonnote_insn (start_insn);
3462 if (GET_CODE (start_insn) == CODE_LABEL
3463 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
3464 && LABEL_NUSES (start_insn) == 1)
3465 reverse = TRUE;
3466 else
3467 return;
3469 else if (GET_CODE (body) == RETURN)
3471 start_insn = next_nonnote_insn (start_insn);
3472 if (GET_CODE (start_insn) == BARRIER)
3473 start_insn = next_nonnote_insn (start_insn);
3474 if (GET_CODE (start_insn) == CODE_LABEL
3475 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
3476 && LABEL_NUSES (start_insn) == 1)
3478 reverse = TRUE;
3479 seeking_return = 1;
3481 else
3482 return;
3484 else
3485 return;
3488 if (arm_ccfsm_state != 0 && !reverse)
3489 abort ();
3490 if (GET_CODE (insn) != JUMP_INSN)
3491 return;
3493 /* This jump might be paralled with a clobber of the condition codes
3494 the jump should always come first */
3495 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
3496 body = XVECEXP (body, 0, 0);
3498 #if 0
3499 /* If this is a conditional return then we don't want to know */
3500 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
3501 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
3502 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
3503 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
3504 return;
3505 #endif
3507 if (reverse
3508 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
3509 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
3511 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
3512 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
3513 int then_not_else = TRUE;
3514 rtx this_insn = start_insn, label = 0;
3516 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
3518 /* The code below is wrong for these, and I haven't time to
3519 fix it now. So we just do the safe thing and return. This
3520 whole function needs re-writing anyway. */
3521 jump_clobbers = 1;
3522 return;
3525 /* Register the insn jumped to. */
3526 if (reverse)
3528 if (!seeking_return)
3529 label = XEXP (SET_SRC (body), 0);
3531 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
3532 label = XEXP (XEXP (SET_SRC (body), 1), 0);
3533 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
3535 label = XEXP (XEXP (SET_SRC (body), 2), 0);
3536 then_not_else = FALSE;
3538 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
3539 seeking_return = 1;
3540 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
3542 seeking_return = 1;
3543 then_not_else = FALSE;
3545 else
3546 abort ();
3548 /* See how many insns this branch skips, and what kind of insns. If all
3549 insns are okay, and the label or unconditional branch to the same
3550 label is not too far away, succeed. */
3551 for (insns_skipped = 0;
3552 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
3553 insns_skipped++)
3555 rtx scanbody;
3557 this_insn = next_nonnote_insn (this_insn);
3558 if (!this_insn)
3559 break;
3561 scanbody = PATTERN (this_insn);
3563 switch (GET_CODE (this_insn))
3565 case CODE_LABEL:
3566 /* Succeed if it is the target label, otherwise fail since
3567 control falls in from somewhere else. */
3568 if (this_insn == label)
3570 if (jump_clobbers)
3572 arm_ccfsm_state = 2;
3573 this_insn = next_nonnote_insn (this_insn);
3575 else
3576 arm_ccfsm_state = 1;
3577 succeed = TRUE;
3579 else
3580 fail = TRUE;
3581 break;
3583 case BARRIER:
3584 /* Succeed if the following insn is the target label.
3585 Otherwise fail.
3586 If return insns are used then the last insn in a function
3587 will be a barrier. */
3588 this_insn = next_nonnote_insn (this_insn);
3589 if (this_insn && this_insn == label)
3591 if (jump_clobbers)
3593 arm_ccfsm_state = 2;
3594 this_insn = next_nonnote_insn (this_insn);
3596 else
3597 arm_ccfsm_state = 1;
3598 succeed = TRUE;
3600 else
3601 fail = TRUE;
3602 break;
3604 case CALL_INSN:
3605 /* The arm 6xx uses full 32 bit addresses so the cc is not
3606 preserved over calls */
3607 if (TARGET_6)
3608 fail = TRUE;
3609 break;
3610 case JUMP_INSN:
3611 /* If this is an unconditional branch to the same label, succeed.
3612 If it is to another label, do nothing. If it is conditional,
3613 fail. */
3614 /* XXX Probably, the test for the SET and the PC are unnecessary. */
3616 if (GET_CODE (scanbody) == SET
3617 && GET_CODE (SET_DEST (scanbody)) == PC)
3619 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
3620 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
3622 arm_ccfsm_state = 2;
3623 succeed = TRUE;
3625 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
3626 fail = TRUE;
3628 else if (GET_CODE (scanbody) == RETURN
3629 && seeking_return)
3631 arm_ccfsm_state = 2;
3632 succeed = TRUE;
3634 else if (GET_CODE (scanbody) == PARALLEL)
3636 switch (get_attr_conds (this_insn))
3638 case CONDS_NOCOND:
3639 break;
3640 default:
3641 fail = TRUE;
3642 break;
3645 break;
3647 case INSN:
3648 /* Instructions using or affecting the condition codes make it
3649 fail. */
3650 if ((GET_CODE (scanbody) == SET
3651 || GET_CODE (scanbody) == PARALLEL)
3652 && get_attr_conds (this_insn) != CONDS_NOCOND)
3653 fail = TRUE;
3654 break;
3656 default:
3657 break;
3660 if (succeed)
3662 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
3663 arm_target_label = CODE_LABEL_NUMBER (label);
3664 else if (seeking_return || arm_ccfsm_state == 2)
3666 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
3668 this_insn = next_nonnote_insn (this_insn);
3669 if (this_insn && (GET_CODE (this_insn) == BARRIER
3670 || GET_CODE (this_insn) == CODE_LABEL))
3671 abort ();
3673 if (!this_insn)
3675 /* Oh, dear! we ran off the end.. give up */
3676 recog (PATTERN (insn), insn, NULL_PTR);
3677 arm_ccfsm_state = 0;
3678 arm_target_insn = NULL;
3679 return;
3681 arm_target_insn = this_insn;
3683 else
3684 abort ();
3685 if (jump_clobbers)
3687 if (reverse)
3688 abort ();
3689 arm_current_cc =
3690 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
3691 0), 0), 1));
3692 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
3693 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
3694 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
3695 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
3697 else
3699 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
3700 what it was. */
3701 if (!reverse)
3702 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
3703 0));
3706 if (reverse || then_not_else)
3707 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
3709 /* restore recog_operand (getting the attributes of other insns can
3710 destroy this array, but final.c assumes that it remains intact
3711 accross this call; since the insn has been recognized already we
3712 call recog direct). */
3713 recog (PATTERN (insn), insn, NULL_PTR);
3717 /* EOF */