1 /* -*- Mode: Asm -*- */
2 /* Copyright (C) 1998, 1999, 2000, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov <denisc@overta.ru>
6 This file is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 In addition to the permissions in the GNU General Public License, the
12 Free Software Foundation gives you unlimited permission to link the
13 compiled version of this file into combinations with other programs,
14 and to distribute those combinations without any restriction coming
15 from the use of this file. (The General Public License restrictions
16 do apply in other respects; for example, they cover modification of
17 the file, and distribution when not linked into a combine
20 This file is distributed in the hope that it will be useful, but
21 WITHOUT ANY WARRANTY; without even the implied warranty of
22 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 General Public License for more details.
25 You should have received a copy of the GNU General Public License
26 along with this program; see the file COPYING. If not, write to
27 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
28 Boston, MA 02110-1301, USA. */
30 #define __zero_reg__ r1
31 #define __tmp_reg__ r0
36 /* Most of the functions here are called directly from avr.md
37 patterns, instead of using the standard libcall mechanisms.
38 This can make better code because GCC knows exactly which
39 of the call-used registers (not all of them) are clobbered. */
41 .section .text.libgcc, "ax", @progbits
43 .macro mov_l r_dest, r_src
44 #if defined (__AVR_HAVE_MOVW__)
51 .macro mov_h r_dest, r_src
52 #if defined (__AVR_HAVE_MOVW__)
59 /* Note: mulqi3, mulhi3 are open-coded on the enhanced core. */
60 #if !defined (__AVR_HAVE_MUL__)
61 /*******************************************************
63 *******************************************************/
64 #if defined (L_mulqi3)
66 #define r_arg2 r22 /* multiplicand */
67 #define r_arg1 r24 /* multiplier */
68 #define r_res __tmp_reg__ /* result */
73 clr r_res ; clear result
77 add r_arg2,r_arg2 ; shift multiplicand
78 breq __mulqi3_exit ; while multiplicand != 0
80 brne __mulqi3_loop ; exit if multiplier = 0
82 mov r_arg1,r_res ; result to return register
90 #endif /* defined (L_mulqi3) */
92 #if defined (L_mulqihi3)
104 #endif /* defined (L_mulqihi3) */
106 #if defined (L_umulqihi3)
114 #endif /* defined (L_umulqihi3) */
116 /*******************************************************
117 Multiplication 16 x 16
118 *******************************************************/
119 #if defined (L_mulhi3)
120 #define r_arg1L r24 /* multiplier Low */
121 #define r_arg1H r25 /* multiplier High */
122 #define r_arg2L r22 /* multiplicand Low */
123 #define r_arg2H r23 /* multiplicand High */
124 #define r_resL __tmp_reg__ /* result Low */
125 #define r_resH r21 /* result High */
130 clr r_resH ; clear result
131 clr r_resL ; clear result
135 add r_resL,r_arg2L ; result + multiplicand
138 add r_arg2L,r_arg2L ; shift multiplicand
141 cp r_arg2L,__zero_reg__
142 cpc r_arg2H,__zero_reg__
143 breq __mulhi3_exit ; while multiplicand != 0
145 lsr r_arg1H ; gets LSB of multiplier
148 brne __mulhi3_loop ; exit if multiplier = 0
150 mov r_arg1H,r_resH ; result to return register
162 #endif /* defined (L_mulhi3) */
163 #endif /* !defined (__AVR_HAVE_MUL__) */
165 #if defined (L_mulhisi3)
181 #endif /* defined (L_mulhisi3) */
183 #if defined (L_umulhisi3)
195 #endif /* defined (L_umulhisi3) */
197 #if defined (L_mulsi3)
198 /*******************************************************
199 Multiplication 32 x 32
200 *******************************************************/
201 #define r_arg1L r22 /* multiplier Low */
204 #define r_arg1HH r25 /* multiplier High */
207 #define r_arg2L r18 /* multiplicand Low */
210 #define r_arg2HH r21 /* multiplicand High */
212 #define r_resL r26 /* result Low */
215 #define r_resHH r31 /* result High */
221 #if defined (__AVR_HAVE_MUL__)
226 mul r_arg1HL, r_arg2L
229 mul r_arg1L, r_arg2HL
232 mul r_arg1HH, r_arg2L
234 mul r_arg1HL, r_arg2H
236 mul r_arg1H, r_arg2HL
238 mul r_arg1L, r_arg2HH
240 clr r_arg1HH ; use instead of __zero_reg__ to add carry
244 adc r_resHH, r_arg1HH ; add carry
248 adc r_resHH, r_arg1HH ; add carry
250 movw r_arg1HL, r_resHL
251 clr r1 ; __zero_reg__ clobbered by "mul"
254 clr r_resHH ; clear result
255 clr r_resHL ; clear result
256 clr r_resH ; clear result
257 clr r_resL ; clear result
261 add r_resL,r_arg2L ; result + multiplicand
266 add r_arg2L,r_arg2L ; shift multiplicand
268 adc r_arg2HL,r_arg2HL
269 adc r_arg2HH,r_arg2HH
271 lsr r_arg1HH ; gets LSB of multiplier
278 brne __mulsi3_loop ; exit if multiplier = 0
280 mov_h r_arg1HH,r_resHH ; result to return register
281 mov_l r_arg1HL,r_resHL
285 #endif /* defined (__AVR_HAVE_MUL__) */
303 #endif /* defined (L_mulsi3) */
305 /*******************************************************
306 Division 8 / 8 => (result + remainder)
307 *******************************************************/
308 #define r_rem r25 /* remainder */
309 #define r_arg1 r24 /* dividend, quotient */
310 #define r_arg2 r22 /* divisor */
311 #define r_cnt r23 /* loop count */
313 #if defined (L_udivmodqi4)
317 sub r_rem,r_rem ; clear remainder and carry
318 ldi r_cnt,9 ; init loop counter
319 rjmp __udivmodqi4_ep ; jump to entry point
321 rol r_rem ; shift dividend into remainder
322 cp r_rem,r_arg2 ; compare remainder & divisor
323 brcs __udivmodqi4_ep ; remainder <= divisor
324 sub r_rem,r_arg2 ; restore remainder
326 rol r_arg1 ; shift dividend (with CARRY)
327 dec r_cnt ; decrement loop counter
328 brne __udivmodqi4_loop
329 com r_arg1 ; complement result
330 ; because C flag was complemented in loop
333 #endif /* defined (L_udivmodqi4) */
335 #if defined (L_divmodqi4)
339 bst r_arg1,7 ; store sign of dividend
340 mov __tmp_reg__,r_arg1
341 eor __tmp_reg__,r_arg2; r0.7 is sign of result
343 neg r_arg1 ; dividend negative : negate
345 neg r_arg2 ; divisor negative : negate
346 rcall __udivmodqi4 ; do the unsigned div/mod
348 neg r_rem ; correct remainder sign
351 neg r_arg1 ; correct result sign
355 #endif /* defined (L_divmodqi4) */
363 /*******************************************************
364 Division 16 / 16 => (result + remainder)
365 *******************************************************/
366 #define r_remL r26 /* remainder Low */
367 #define r_remH r27 /* remainder High */
369 /* return: remainder */
370 #define r_arg1L r24 /* dividend Low */
371 #define r_arg1H r25 /* dividend High */
373 /* return: quotient */
374 #define r_arg2L r22 /* divisor Low */
375 #define r_arg2H r23 /* divisor High */
377 #define r_cnt r21 /* loop count */
379 #if defined (L_udivmodhi4)
384 sub r_remH,r_remH ; clear remainder and carry
385 ldi r_cnt,17 ; init loop counter
386 rjmp __udivmodhi4_ep ; jump to entry point
388 rol r_remL ; shift dividend into remainder
390 cp r_remL,r_arg2L ; compare remainder & divisor
392 brcs __udivmodhi4_ep ; remainder < divisor
393 sub r_remL,r_arg2L ; restore remainder
396 rol r_arg1L ; shift dividend (with CARRY)
398 dec r_cnt ; decrement loop counter
399 brne __udivmodhi4_loop
402 ; div/mod results to return registers, as for the div() function
403 mov_l r_arg2L, r_arg1L ; quotient
404 mov_h r_arg2H, r_arg1H
405 mov_l r_arg1L, r_remL ; remainder
406 mov_h r_arg1H, r_remH
409 #endif /* defined (L_udivmodhi4) */
411 #if defined (L_divmodhi4)
417 bst r_arg1H,7 ; store sign of dividend
418 mov __tmp_reg__,r_arg1H
419 eor __tmp_reg__,r_arg2H ; r0.7 is sign of result
420 rcall __divmodhi4_neg1 ; dividend negative : negate
422 rcall __divmodhi4_neg2 ; divisor negative : negate
423 rcall __udivmodhi4 ; do the unsigned div/mod
424 rcall __divmodhi4_neg1 ; correct remainder sign
426 brpl __divmodhi4_exit
429 neg r_arg2L ; correct divisor/result sign
434 brtc __divmodhi4_exit
436 neg r_arg1L ; correct dividend/remainder sign
440 #endif /* defined (L_divmodhi4) */
453 /*******************************************************
454 Division 32 / 32 => (result + remainder)
455 *******************************************************/
456 #define r_remHH r31 /* remainder High */
459 #define r_remL r26 /* remainder Low */
461 /* return: remainder */
462 #define r_arg1HH r25 /* dividend High */
465 #define r_arg1L r22 /* dividend Low */
467 /* return: quotient */
468 #define r_arg2HH r21 /* divisor High */
471 #define r_arg2L r18 /* divisor Low */
473 #define r_cnt __zero_reg__ /* loop count (0 after the loop!) */
475 #if defined (L_udivmodsi4)
479 ldi r_remL, 33 ; init loop counter
482 sub r_remH,r_remH ; clear remainder and carry
483 mov_l r_remHL, r_remL
484 mov_h r_remHH, r_remH
485 rjmp __udivmodsi4_ep ; jump to entry point
487 rol r_remL ; shift dividend into remainder
491 cp r_remL,r_arg2L ; compare remainder & divisor
495 brcs __udivmodsi4_ep ; remainder <= divisor
496 sub r_remL,r_arg2L ; restore remainder
501 rol r_arg1L ; shift dividend (with CARRY)
505 dec r_cnt ; decrement loop counter
506 brne __udivmodsi4_loop
507 ; __zero_reg__ now restored (r_cnt == 0)
512 ; div/mod results to return registers, as for the ldiv() function
513 mov_l r_arg2L, r_arg1L ; quotient
514 mov_h r_arg2H, r_arg1H
515 mov_l r_arg2HL, r_arg1HL
516 mov_h r_arg2HH, r_arg1HH
517 mov_l r_arg1L, r_remL ; remainder
518 mov_h r_arg1H, r_remH
519 mov_l r_arg1HL, r_remHL
520 mov_h r_arg1HH, r_remHH
523 #endif /* defined (L_udivmodsi4) */
525 #if defined (L_divmodsi4)
529 bst r_arg1HH,7 ; store sign of dividend
530 mov __tmp_reg__,r_arg1HH
531 eor __tmp_reg__,r_arg2HH ; r0.7 is sign of result
532 rcall __divmodsi4_neg1 ; dividend negative : negate
534 rcall __divmodsi4_neg2 ; divisor negative : negate
535 rcall __udivmodsi4 ; do the unsigned div/mod
536 rcall __divmodsi4_neg1 ; correct remainder sign
538 brcc __divmodsi4_exit
543 neg r_arg2L ; correct divisor/quotient sign
550 brtc __divmodsi4_exit
554 neg r_arg1L ; correct dividend/remainder sign
560 #endif /* defined (L_divmodsi4) */
562 /**********************************
563 * This is a prologue subroutine
564 **********************************/
565 #if defined (L_prologue)
567 .global __prologue_saves__
568 .func __prologue_saves__
592 in __tmp_reg__,__SREG__
595 out __SREG__,__tmp_reg__
599 #endif /* defined (L_prologue) */
602 * This is an epilogue subroutine
604 #if defined (L_epilogue)
606 .global __epilogue_restores__
607 .func __epilogue_restores__
608 __epilogue_restores__:
629 in __tmp_reg__,__SREG__
632 out __SREG__,__tmp_reg__
638 #endif /* defined (L_epilogue) */
641 .section .fini9,"ax",@progbits
648 /* Code from .fini8 ... .fini1 sections inserted by ld script. */
650 .section .fini0,"ax",@progbits
655 #endif /* defined (L_exit) */
663 #endif /* defined (L_cleanup) */
666 .global __tablejump2__
671 .global __tablejump__
673 #if defined (__AVR_HAVE_LPMX__)
687 #endif /* defined (L_tablejump) */
689 /* __do_copy_data is only necessary if there is anything in .data section.
690 Does not use RAMPZ - crt*.o provides a replacement for >64K devices. */
693 .section .init4,"ax",@progbits
694 .global __do_copy_data
696 ldi r17, hi8(__data_end)
697 ldi r26, lo8(__data_start)
698 ldi r27, hi8(__data_start)
699 ldi r30, lo8(__data_load_start)
700 ldi r31, hi8(__data_load_start)
701 rjmp .do_copy_data_start
703 #if defined (__AVR_HAVE_LPMX__)
711 cpi r26, lo8(__data_end)
713 brne .do_copy_data_loop
714 #endif /* L_copy_data */
716 /* __do_clear_bss is only necessary if there is anything in .bss section. */
719 .section .init4,"ax",@progbits
720 .global __do_clear_bss
722 ldi r17, hi8(__bss_end)
723 ldi r26, lo8(__bss_start)
724 ldi r27, hi8(__bss_start)
725 rjmp .do_clear_bss_start
729 cpi r26, lo8(__bss_end)
731 brne .do_clear_bss_loop
732 #endif /* L_clear_bss */
734 /* __do_global_ctors and __do_global_dtors are only necessary
735 if there are any constructors/destructors. */
737 #if defined (__AVR_MEGA__)
744 .section .init6,"ax",@progbits
745 .global __do_global_ctors
747 ldi r17, hi8(__ctors_start)
748 ldi r28, lo8(__ctors_end)
749 ldi r29, hi8(__ctors_end)
750 rjmp .do_global_ctors_start
751 .do_global_ctors_loop:
756 .do_global_ctors_start:
757 cpi r28, lo8(__ctors_start)
759 brne .do_global_ctors_loop
763 .section .fini6,"ax",@progbits
764 .global __do_global_dtors
766 ldi r17, hi8(__dtors_end)
767 ldi r28, lo8(__dtors_start)
768 ldi r29, hi8(__dtors_start)
769 rjmp .do_global_dtors_start
770 .do_global_dtors_loop:
775 .do_global_dtors_start:
776 cpi r28, lo8(__dtors_end)
778 brne .do_global_dtors_loop