-#define GST_READ_UINT64_LE(data) (_GST_GET (data, 7, 64, 56) | \
- _GST_GET (data, 6, 64, 48) | \
- _GST_GET (data, 5, 64, 40) | \
- _GST_GET (data, 4, 64, 32) | \
- _GST_GET (data, 3, 64, 24) | \
- _GST_GET (data, 2, 64, 16) | \
- _GST_GET (data, 1, 64, 8) | \
- _GST_GET (data, 0, 64, 0))
+#if GST_HAVE_UNALIGNED_ACCESS
+# if (G_BYTE_ORDER == G_BIG_ENDIAN)
+# define GST_READ_UINT64_BE(data) _GST_FAST_READ (64, data)
+# define GST_READ_UINT64_LE(data) _GST_FAST_READ_SWAP (64, data)
+# else
+# define GST_READ_UINT64_BE(data) _GST_FAST_READ_SWAP (64, data)
+# define GST_READ_UINT64_LE(data) _GST_FAST_READ (64, data)
+# endif
+#else
+#define _GST_READ_UINT64_BE(data) (_GST_GET (data, 0, 64, 56) | \
+ _GST_GET (data, 1, 64, 48) | \
+ _GST_GET (data, 2, 64, 40) | \
+ _GST_GET (data, 3, 64, 32) | \
+ _GST_GET (data, 4, 64, 24) | \
+ _GST_GET (data, 5, 64, 16) | \
+ _GST_GET (data, 6, 64, 8) | \
+ _GST_GET (data, 7, 64, 0))
+
+#define _GST_READ_UINT64_LE(data) (_GST_GET (data, 7, 64, 56) | \
+ _GST_GET (data, 6, 64, 48) | \
+ _GST_GET (data, 5, 64, 40) | \
+ _GST_GET (data, 4, 64, 32) | \
+ _GST_GET (data, 3, 64, 24) | \
+ _GST_GET (data, 2, 64, 16) | \
+ _GST_GET (data, 1, 64, 8) | \
+ _GST_GET (data, 0, 64, 0))
+
+#define GST_READ_UINT64_BE(data) __gst_slow_read64_be((const guint8 *)(data))
+static inline guint64 __gst_slow_read64_be (const guint8 * data) {
+ return _GST_READ_UINT64_BE (data);
+}
+#define GST_READ_UINT64_LE(data) __gst_slow_read64_le((const guint8 *)(data))
+static inline guint64 __gst_slow_read64_le (const guint8 * data) {
+ return _GST_READ_UINT64_LE (data);
+}
+#endif