: "+a"(src), "+c"(dst), "+d"(src2), "+g"(h)\
: "D"((x86_reg)src2Stride), "S"((x86_reg)dstStride),\
"m"(ff_pw_5), "m"(ff_pw_16)\
- : "memory"\
- XMM_CLOBBERS(, "%xmm0" , "%xmm1" , "%xmm2" , "%xmm3" , \
- "%xmm4" , "%xmm5" , "%xmm6" , "%xmm7" , \
- "%xmm8" , "%xmm9" , "%xmm10", "%xmm11", \
- "%xmm12", "%xmm13", "%xmm14", "%xmm15") \
+ : XMM_CLOBBERS("%xmm0" , "%xmm1" , "%xmm2" , "%xmm3" , \
+ "%xmm4" , "%xmm5" , "%xmm6" , "%xmm7" , \
+ "%xmm8" , "%xmm9" , "%xmm10", "%xmm11", \
+ "%xmm12", "%xmm13", "%xmm14", "%xmm15",)\
+ "memory"\
);\
}
#else // ARCH_X86_64
"jg 1b \n\t"\
: "+a"(src), "+c"(dst), "+d"(src2), "+g"(h)\
: "D"((x86_reg)src2Stride), "S"((x86_reg)dstStride)\
- : "memory"\
- XMM_CLOBBERS(, "%xmm0", "%xmm1", "%xmm2", "%xmm3", \
- "%xmm4", "%xmm5", "%xmm6", "%xmm7") \
+ : XMM_CLOBBERS("%xmm0", "%xmm1", "%xmm2", "%xmm3", \
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",)\
+ "memory"\
);\
}\
QPEL_H264_H16_XMM(OPNAME, OP, MMX)\
" jnz 1b \n\t"\
: "+a"(src), "+c"(dst), "+g"(h)\
: "D"((x86_reg)srcStride), "S"((x86_reg)dstStride)\
- : "memory"\
- XMM_CLOBBERS(, "%xmm0", "%xmm1", "%xmm2", "%xmm3", \
- "%xmm4", "%xmm5", "%xmm6", "%xmm7") \
+ : XMM_CLOBBERS("%xmm0", "%xmm1", "%xmm2", "%xmm3", \
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",)\
+ "memory"\
);\
}\
static void OPNAME ## h264_qpel16_h_lowpass_ ## MMX(uint8_t *dst, uint8_t *src, int dstStride, int srcStride){\
\
: "+a"(src), "+c"(dst)\
: "S"((x86_reg)srcStride), "D"((x86_reg)dstStride), "g"(h)\
- : "memory"\
- XMM_CLOBBERS(, "%xmm0", "%xmm1", "%xmm2", "%xmm3", \
- "%xmm4", "%xmm5", "%xmm6", "%xmm7") \
+ : XMM_CLOBBERS("%xmm0", "%xmm1", "%xmm2", "%xmm3", \
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",)\
+ "memory"\
);\
}\
static void OPNAME ## h264_qpel8_v_lowpass_ ## MMX(uint8_t *dst, uint8_t *src, int dstStride, int srcStride){\
"2: \n\t"
: "+a"(src)
: "c"(tmp), "S"((x86_reg)srcStride), "g"(size)
- : "memory"
- XMM_CLOBBERS(, "%xmm0", "%xmm1", "%xmm2", "%xmm3",
- "%xmm4", "%xmm5", "%xmm6", "%xmm7")
+ : XMM_CLOBBERS("%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",)
+ "memory"
);
tmp += 8;
src += 8 - (size+5)*srcStride;
" jnz 1b \n\t"\
: "+a"(tmp), "+c"(dst), "+g"(h)\
: "S"((x86_reg)dstStride)\
- : "memory"\
- XMM_CLOBBERS(, "%xmm0", "%xmm1", "%xmm2", "%xmm3", \
- "%xmm4", "%xmm5", "%xmm6", "%xmm7") \
+ : XMM_CLOBBERS("%xmm0", "%xmm1", "%xmm2", "%xmm3", \
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",)\
+ "memory"\
);\
}else{\
__asm__ volatile(\
" jnz 1b \n\t"\
: "+a"(tmp), "+c"(dst), "+g"(h)\
: "S"((x86_reg)dstStride)\
- : "memory"\
- XMM_CLOBBERS(, "%xmm0", "%xmm1", "%xmm2", "%xmm3", \
- "%xmm4", "%xmm5", "%xmm6", "%xmm7") \
+ : XMM_CLOBBERS("%xmm0", "%xmm1", "%xmm2", "%xmm3", \
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",)\
+ "memory"\
);\
}\
}
"6: \n\t"
: "+r"(block)
:
- : "%eax", "%ecx", "%edx", "%esi", "memory"
- XMM_CLOBBERS(, "%xmm0" , "%xmm1" , "%xmm2" , "%xmm3" ,
- "%xmm4" , "%xmm5" , "%xmm6" , "%xmm7")
+ : XMM_CLOBBERS("%xmm0" , "%xmm1" , "%xmm2" , "%xmm3" ,
+ "%xmm4" , "%xmm5" , "%xmm6" , "%xmm7" ,)
#if ARCH_X86_64
- XMM_CLOBBERS(, "%xmm8" , "%xmm9" , "%xmm10", "%xmm11",
- "%xmm12", "%xmm13", "%xmm14")
+ XMM_CLOBBERS("%xmm8" , "%xmm9" , "%xmm10", "%xmm11",
+ "%xmm12", "%xmm13", "%xmm14",)
#endif
+ "%eax", "%ecx", "%edx", "%esi", "memory"
);
}