1 /* -*- Mode: Asm -*- */
2 /* Copyright (C) 1998, 1999, 2000, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov <denisc@overta.ru>
6 This file is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 In addition to the permissions in the GNU General Public License, the
12 Free Software Foundation gives you unlimited permission to link the
13 compiled version of this file into combinations with other programs,
14 and to distribute those combinations without any restriction coming
15 from the use of this file. (The General Public License restrictions
16 do apply in other respects; for example, they cover modification of
17 the file, and distribution when not linked into a combine
20 This file is distributed in the hope that it will be useful, but
21 WITHOUT ANY WARRANTY; without even the implied warranty of
22 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 General Public License for more details.
25 You should have received a copy of the GNU General Public License
26 along with this program; see the file COPYING. If not, write to
27 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
28 Boston, MA 02110-1301, USA. */
30 #define __zero_reg__ r1
31 #define __tmp_reg__ r0
35 #define __RAMPZ__ 0x3B
37 /* Most of the functions here are called directly from avr.md
38 patterns, instead of using the standard libcall mechanisms.
39 This can make better code because GCC knows exactly which
40 of the call-used registers (not all of them) are clobbered. */
42 .section .text.libgcc, "ax", @progbits
44 .macro mov_l r_dest, r_src
45 #if defined (__AVR_HAVE_MOVW__)
52 .macro mov_h r_dest, r_src
53 #if defined (__AVR_HAVE_MOVW__)
60 /* Note: mulqi3, mulhi3 are open-coded on the enhanced core. */
61 #if !defined (__AVR_HAVE_MUL__)
62 /*******************************************************
64 *******************************************************/
65 #if defined (L_mulqi3)
67 #define r_arg2 r22 /* multiplicand */
68 #define r_arg1 r24 /* multiplier */
69 #define r_res __tmp_reg__ /* result */
74 clr r_res ; clear result
78 add r_arg2,r_arg2 ; shift multiplicand
79 breq __mulqi3_exit ; while multiplicand != 0
81 brne __mulqi3_loop ; exit if multiplier = 0
83 mov r_arg1,r_res ; result to return register
91 #endif /* defined (L_mulqi3) */
93 #if defined (L_mulqihi3)
105 #endif /* defined (L_mulqihi3) */
107 #if defined (L_umulqihi3)
115 #endif /* defined (L_umulqihi3) */
117 /*******************************************************
118 Multiplication 16 x 16
119 *******************************************************/
120 #if defined (L_mulhi3)
121 #define r_arg1L r24 /* multiplier Low */
122 #define r_arg1H r25 /* multiplier High */
123 #define r_arg2L r22 /* multiplicand Low */
124 #define r_arg2H r23 /* multiplicand High */
125 #define r_resL __tmp_reg__ /* result Low */
126 #define r_resH r21 /* result High */
131 clr r_resH ; clear result
132 clr r_resL ; clear result
136 add r_resL,r_arg2L ; result + multiplicand
139 add r_arg2L,r_arg2L ; shift multiplicand
142 cp r_arg2L,__zero_reg__
143 cpc r_arg2H,__zero_reg__
144 breq __mulhi3_exit ; while multiplicand != 0
146 lsr r_arg1H ; gets LSB of multiplier
149 brne __mulhi3_loop ; exit if multiplier = 0
151 mov r_arg1H,r_resH ; result to return register
163 #endif /* defined (L_mulhi3) */
164 #endif /* !defined (__AVR_HAVE_MUL__) */
166 #if defined (L_mulhisi3)
182 #endif /* defined (L_mulhisi3) */
184 #if defined (L_umulhisi3)
196 #endif /* defined (L_umulhisi3) */
198 #if defined (L_mulsi3)
199 /*******************************************************
200 Multiplication 32 x 32
201 *******************************************************/
202 #define r_arg1L r22 /* multiplier Low */
205 #define r_arg1HH r25 /* multiplier High */
208 #define r_arg2L r18 /* multiplicand Low */
211 #define r_arg2HH r21 /* multiplicand High */
213 #define r_resL r26 /* result Low */
216 #define r_resHH r31 /* result High */
222 #if defined (__AVR_HAVE_MUL__)
227 mul r_arg1HL, r_arg2L
230 mul r_arg1L, r_arg2HL
233 mul r_arg1HH, r_arg2L
235 mul r_arg1HL, r_arg2H
237 mul r_arg1H, r_arg2HL
239 mul r_arg1L, r_arg2HH
241 clr r_arg1HH ; use instead of __zero_reg__ to add carry
245 adc r_resHH, r_arg1HH ; add carry
249 adc r_resHH, r_arg1HH ; add carry
251 movw r_arg1HL, r_resHL
252 clr r1 ; __zero_reg__ clobbered by "mul"
255 clr r_resHH ; clear result
256 clr r_resHL ; clear result
257 clr r_resH ; clear result
258 clr r_resL ; clear result
262 add r_resL,r_arg2L ; result + multiplicand
267 add r_arg2L,r_arg2L ; shift multiplicand
269 adc r_arg2HL,r_arg2HL
270 adc r_arg2HH,r_arg2HH
272 lsr r_arg1HH ; gets LSB of multiplier
279 brne __mulsi3_loop ; exit if multiplier = 0
281 mov_h r_arg1HH,r_resHH ; result to return register
282 mov_l r_arg1HL,r_resHL
286 #endif /* defined (__AVR_HAVE_MUL__) */
304 #endif /* defined (L_mulsi3) */
306 /*******************************************************
307 Division 8 / 8 => (result + remainder)
308 *******************************************************/
309 #define r_rem r25 /* remainder */
310 #define r_arg1 r24 /* dividend, quotient */
311 #define r_arg2 r22 /* divisor */
312 #define r_cnt r23 /* loop count */
314 #if defined (L_udivmodqi4)
318 sub r_rem,r_rem ; clear remainder and carry
319 ldi r_cnt,9 ; init loop counter
320 rjmp __udivmodqi4_ep ; jump to entry point
322 rol r_rem ; shift dividend into remainder
323 cp r_rem,r_arg2 ; compare remainder & divisor
324 brcs __udivmodqi4_ep ; remainder <= divisor
325 sub r_rem,r_arg2 ; restore remainder
327 rol r_arg1 ; shift dividend (with CARRY)
328 dec r_cnt ; decrement loop counter
329 brne __udivmodqi4_loop
330 com r_arg1 ; complement result
331 ; because C flag was complemented in loop
334 #endif /* defined (L_udivmodqi4) */
336 #if defined (L_divmodqi4)
340 bst r_arg1,7 ; store sign of dividend
341 mov __tmp_reg__,r_arg1
342 eor __tmp_reg__,r_arg2; r0.7 is sign of result
344 neg r_arg1 ; dividend negative : negate
346 neg r_arg2 ; divisor negative : negate
347 rcall __udivmodqi4 ; do the unsigned div/mod
349 neg r_rem ; correct remainder sign
352 neg r_arg1 ; correct result sign
356 #endif /* defined (L_divmodqi4) */
364 /*******************************************************
365 Division 16 / 16 => (result + remainder)
366 *******************************************************/
367 #define r_remL r26 /* remainder Low */
368 #define r_remH r27 /* remainder High */
370 /* return: remainder */
371 #define r_arg1L r24 /* dividend Low */
372 #define r_arg1H r25 /* dividend High */
374 /* return: quotient */
375 #define r_arg2L r22 /* divisor Low */
376 #define r_arg2H r23 /* divisor High */
378 #define r_cnt r21 /* loop count */
380 #if defined (L_udivmodhi4)
385 sub r_remH,r_remH ; clear remainder and carry
386 ldi r_cnt,17 ; init loop counter
387 rjmp __udivmodhi4_ep ; jump to entry point
389 rol r_remL ; shift dividend into remainder
391 cp r_remL,r_arg2L ; compare remainder & divisor
393 brcs __udivmodhi4_ep ; remainder < divisor
394 sub r_remL,r_arg2L ; restore remainder
397 rol r_arg1L ; shift dividend (with CARRY)
399 dec r_cnt ; decrement loop counter
400 brne __udivmodhi4_loop
403 ; div/mod results to return registers, as for the div() function
404 mov_l r_arg2L, r_arg1L ; quotient
405 mov_h r_arg2H, r_arg1H
406 mov_l r_arg1L, r_remL ; remainder
407 mov_h r_arg1H, r_remH
410 #endif /* defined (L_udivmodhi4) */
412 #if defined (L_divmodhi4)
418 bst r_arg1H,7 ; store sign of dividend
419 mov __tmp_reg__,r_arg1H
420 eor __tmp_reg__,r_arg2H ; r0.7 is sign of result
421 rcall __divmodhi4_neg1 ; dividend negative : negate
423 rcall __divmodhi4_neg2 ; divisor negative : negate
424 rcall __udivmodhi4 ; do the unsigned div/mod
425 rcall __divmodhi4_neg1 ; correct remainder sign
427 brpl __divmodhi4_exit
430 neg r_arg2L ; correct divisor/result sign
435 brtc __divmodhi4_exit
437 neg r_arg1L ; correct dividend/remainder sign
441 #endif /* defined (L_divmodhi4) */
454 /*******************************************************
455 Division 32 / 32 => (result + remainder)
456 *******************************************************/
457 #define r_remHH r31 /* remainder High */
460 #define r_remL r26 /* remainder Low */
462 /* return: remainder */
463 #define r_arg1HH r25 /* dividend High */
466 #define r_arg1L r22 /* dividend Low */
468 /* return: quotient */
469 #define r_arg2HH r21 /* divisor High */
472 #define r_arg2L r18 /* divisor Low */
474 #define r_cnt __zero_reg__ /* loop count (0 after the loop!) */
476 #if defined (L_udivmodsi4)
480 ldi r_remL, 33 ; init loop counter
483 sub r_remH,r_remH ; clear remainder and carry
484 mov_l r_remHL, r_remL
485 mov_h r_remHH, r_remH
486 rjmp __udivmodsi4_ep ; jump to entry point
488 rol r_remL ; shift dividend into remainder
492 cp r_remL,r_arg2L ; compare remainder & divisor
496 brcs __udivmodsi4_ep ; remainder <= divisor
497 sub r_remL,r_arg2L ; restore remainder
502 rol r_arg1L ; shift dividend (with CARRY)
506 dec r_cnt ; decrement loop counter
507 brne __udivmodsi4_loop
508 ; __zero_reg__ now restored (r_cnt == 0)
513 ; div/mod results to return registers, as for the ldiv() function
514 mov_l r_arg2L, r_arg1L ; quotient
515 mov_h r_arg2H, r_arg1H
516 mov_l r_arg2HL, r_arg1HL
517 mov_h r_arg2HH, r_arg1HH
518 mov_l r_arg1L, r_remL ; remainder
519 mov_h r_arg1H, r_remH
520 mov_l r_arg1HL, r_remHL
521 mov_h r_arg1HH, r_remHH
524 #endif /* defined (L_udivmodsi4) */
526 #if defined (L_divmodsi4)
530 bst r_arg1HH,7 ; store sign of dividend
531 mov __tmp_reg__,r_arg1HH
532 eor __tmp_reg__,r_arg2HH ; r0.7 is sign of result
533 rcall __divmodsi4_neg1 ; dividend negative : negate
535 rcall __divmodsi4_neg2 ; divisor negative : negate
536 rcall __udivmodsi4 ; do the unsigned div/mod
537 rcall __divmodsi4_neg1 ; correct remainder sign
539 brcc __divmodsi4_exit
544 neg r_arg2L ; correct divisor/quotient sign
551 brtc __divmodsi4_exit
555 neg r_arg1L ; correct dividend/remainder sign
561 #endif /* defined (L_divmodsi4) */
563 /**********************************
564 * This is a prologue subroutine
565 **********************************/
566 #if defined (L_prologue)
568 .global __prologue_saves__
569 .func __prologue_saves__
593 in __tmp_reg__,__SREG__
596 out __SREG__,__tmp_reg__
598 #if defined (__AVR_HAVE_EIJMP_EICALL__)
605 #endif /* defined (L_prologue) */
608 * This is an epilogue subroutine
610 #if defined (L_epilogue)
612 .global __epilogue_restores__
613 .func __epilogue_restores__
614 __epilogue_restores__:
635 in __tmp_reg__,__SREG__
638 out __SREG__,__tmp_reg__
644 #endif /* defined (L_epilogue) */
647 .section .fini9,"ax",@progbits
654 /* Code from .fini8 ... .fini1 sections inserted by ld script. */
656 .section .fini0,"ax",@progbits
661 #endif /* defined (L_exit) */
669 #endif /* defined (L_cleanup) */
672 .global __tablejump2__
677 .global __tablejump__
679 #if defined (__AVR_HAVE_LPMX__)
684 #if defined (__AVR_HAVE_EIJMP_EICALL__)
696 #if defined (__AVR_HAVE_EIJMP_EICALL__)
702 #endif /* defined (L_tablejump) */
705 .section .init4,"ax",@progbits
706 .global __do_copy_data
708 #if defined(__AVR_HAVE_ELPMX__)
709 ldi r17, hi8(__data_end)
710 ldi r26, lo8(__data_start)
711 ldi r27, hi8(__data_start)
712 ldi r30, lo8(__data_load_start)
713 ldi r31, hi8(__data_load_start)
714 ldi r16, hh8(__data_load_start)
716 rjmp .L__do_copy_data_start
717 .L__do_copy_data_loop:
720 .L__do_copy_data_start:
721 cpi r26, lo8(__data_end)
723 brne .L__do_copy_data_loop
724 #elif !defined(__AVR_HAVE_ELPMX__) && defined(__AVR_HAVE_ELPM__)
725 ldi r17, hi8(__data_end)
726 ldi r26, lo8(__data_start)
727 ldi r27, hi8(__data_start)
728 ldi r30, lo8(__data_load_start)
729 ldi r31, hi8(__data_load_start)
730 ldi r16, hh8(__data_load_start - 0x10000)
731 .L__do_copy_data_carry:
734 rjmp .L__do_copy_data_start
735 .L__do_copy_data_loop:
739 brcs .L__do_copy_data_carry
740 .L__do_copy_data_start:
741 cpi r26, lo8(__data_end)
743 brne .L__do_copy_data_loop
744 #elif !defined(__AVR_HAVE_ELPMX__) && !defined(__AVR_HAVE_ELPM__)
745 ldi r17, hi8(__data_end)
746 ldi r26, lo8(__data_start)
747 ldi r27, hi8(__data_start)
748 ldi r30, lo8(__data_load_start)
749 ldi r31, hi8(__data_load_start)
750 rjmp .L__do_copy_data_start
751 .L__do_copy_data_loop:
752 #if defined (__AVR_HAVE_LPMX__)
759 .L__do_copy_data_start:
760 cpi r26, lo8(__data_end)
762 brne .L__do_copy_data_loop
763 #endif /* !defined(__AVR_HAVE_ELPMX__) && !defined(__AVR_HAVE_ELPM__) */
764 #endif /* L_copy_data */
766 /* __do_clear_bss is only necessary if there is anything in .bss section. */
769 .section .init4,"ax",@progbits
770 .global __do_clear_bss
772 ldi r17, hi8(__bss_end)
773 ldi r26, lo8(__bss_start)
774 ldi r27, hi8(__bss_start)
775 rjmp .do_clear_bss_start
779 cpi r26, lo8(__bss_end)
781 brne .do_clear_bss_loop
782 #endif /* L_clear_bss */
784 /* __do_global_ctors and __do_global_dtors are only necessary
785 if there are any constructors/destructors. */
787 #if defined (__AVR_HAVE_JMP_CALL__)
794 .section .init6,"ax",@progbits
795 .global __do_global_ctors
797 ldi r17, hi8(__ctors_start)
798 ldi r28, lo8(__ctors_end)
799 ldi r29, hi8(__ctors_end)
800 rjmp .do_global_ctors_start
801 .do_global_ctors_loop:
806 .do_global_ctors_start:
807 cpi r28, lo8(__ctors_start)
809 brne .do_global_ctors_loop
813 .section .fini6,"ax",@progbits
814 .global __do_global_dtors
816 ldi r17, hi8(__dtors_end)
817 ldi r28, lo8(__dtors_start)
818 ldi r29, hi8(__dtors_start)
819 rjmp .do_global_dtors_start
820 .do_global_dtors_loop:
825 .do_global_dtors_start:
826 cpi r28, lo8(__dtors_end)
828 brne .do_global_dtors_loop