__asm__ volatile ("nop");
}
#endif
+#if defined(__x86_64__)
+static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
+__shiftleft128(unsigned __int64 __l, unsigned __int64 __h, unsigned char __d) {
+ unsigned __int128 __val = ((unsigned __int128)__h << 64) | __l;
+ unsigned __int128 __res = __val << (__d & 63);
+ return (unsigned __int64)(__res >> 64);
+}
+static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
+__shiftright128(unsigned __int64 __l, unsigned __int64 __h, unsigned char __d) {
+ unsigned __int128 __val = ((unsigned __int128)__h << 64) | __l;
+ unsigned __int128 __res = __val >> (__d & 63);
+ return (unsigned __int64)__res;
+}
+#endif
/*----------------------------------------------------------------------------*\
|* Privileged intrinsics