*/
#define rem t8
+ R10KCBARRIER(0(ra))
/*
* The "issue break"s below are very approximate.
* Issue delays for dcache fills will perturb the schedule, as will
PREF( 1, 3*32(dst) )
.align 4
1:
+ R10KCBARRIER(0(ra))
EXC( LOAD t0, UNIT(0)(src), l_exc)
EXC( LOAD t1, UNIT(1)(src), l_exc_copy)
EXC( LOAD t2, UNIT(2)(src), l_exc_copy)
EXC( LOAD t3, UNIT(3)(src), l_exc_copy)
SUB len, len, 4*NBYTES
ADD src, src, 4*NBYTES
+ R10KCBARRIER(0(ra))
EXC( STORE t0, UNIT(0)(dst), s_exc_p4u)
EXC( STORE t1, UNIT(1)(dst), s_exc_p3u)
EXC( STORE t2, UNIT(2)(dst), s_exc_p2u)
beq rem, len, copy_bytes
nop
1:
+ R10KCBARRIER(0(ra))
EXC( LOAD t0, 0(src), l_exc)
ADD src, src, NBYTES
SUB len, len, NBYTES
EXC( LDREST t3, REST(0)(src), l_exc_copy)
SUB t2, t2, t1 # t2 = number of bytes copied
xor match, t0, t1
+ R10KCBARRIER(0(ra))
EXC( STFIRST t3, FIRST(0)(dst), s_exc)
beq len, t2, done
SUB len, len, t2
* It's OK to load FIRST(N+1) before REST(N) because the two addresses
* are to the same unit (unless src is aligned, but it's not).
*/
+ R10KCBARRIER(0(ra))
EXC( LDFIRST t0, FIRST(0)(src), l_exc)
EXC( LDFIRST t1, FIRST(1)(src), l_exc_copy)
SUB len, len, 4*NBYTES
beq rem, len, copy_bytes
nop
1:
+ R10KCBARRIER(0(ra))
EXC( LDFIRST t0, FIRST(0)(src), l_exc)
EXC( LDREST t0, REST(0)(src), l_exc_copy)
ADD src, src, NBYTES
nop
copy_bytes:
/* 0 < len < NBYTES */
+ R10KCBARRIER(0(ra))
#define COPY_BYTE(N) \
EXC( lb t0, N(src), l_exc); \
SUB len, len, 1; \
ADD a1, a2 # src = src + len
r_end_bytes:
+ R10KCBARRIER(0(ra))
lb t0, -1(a1)
SUB a2, a2, 0x1
sb t0, -1(a0)
move a2, zero
r_end_bytes_up:
+ R10KCBARRIER(0(ra))
lb t0, (a1)
SUB a2, a2, 0x1
sb t0, (a0)
.set at
#endif
+ R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
EX(LONG_S_L, a1, (a0), first_fixup) /* make word/dword aligned */
#endif
PTR_ADDU t1, a0 /* end address */
.set reorder
1: PTR_ADDIU a0, 64
+ R10KCBARRIER(0(ra))
f_fill64 a0, -64, a1, fwd_fixup
bne t1, a0, 1b
.set noreorder
memset_partial:
+ R10KCBARRIER(0(ra))
PTR_LA t1, 2f /* where to start */
#if LONGSIZE == 4
PTR_SUBU t1, t0
beqz a2, 1f
PTR_ADDU a0, a2 /* What's left */
+ R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
EX(LONG_S_R, a1, -1(a0), last_fixup)
#endif
PTR_ADDU t1, a0, a2
1: PTR_ADDIU a0, 1 /* fill bytewise */
+ R10KCBARRIER(0(ra))
bne t1, a0, 1b
sb a1, -1(a0)