generic-y += types.h
generic-y += ucontext.h
generic-y += user.h
+generic-y += word-at-a-time.h
generic-y += local64.h
generic-y += irq_regs.h
generic-y += local.h
+generic-y += word-at-a-time.h
* bit count instruction, that might be better than the multiply
* and shift, for example.
*/
+struct word_at_a_time {
+ const unsigned long one_bits, high_bits;
+};
+
+#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
#ifdef CONFIG_64BIT
#endif
-/* Return the high bit set in the first byte that is a zero */
-static inline unsigned long has_zero(unsigned long a)
+/* Return nonzero if it has a zero */
+static inline unsigned long has_zero(unsigned long a, unsigned long *bits, const struct word_at_a_time *c)
+{
+ unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
+ *bits = mask;
+ return mask;
+}
+
+static inline unsigned long prep_zero_mask(unsigned long a, unsigned long bits, const struct word_at_a_time *c)
+{
+ return bits;
+}
+
+static inline unsigned long create_zero_mask(unsigned long bits)
+{
+ bits = (bits - 1) & ~bits;
+ return bits >> 7;
+}
+
+/* The mask we created is directly usable as a bytemask */
+#define zero_bytemask(mask) (mask)
+
+static inline unsigned long find_zero(unsigned long mask)
{
- return ((a - REPEAT_BYTE(0x01)) & ~a) & REPEAT_BYTE(0x80);
+ return count_masked_bytes(mask);
}
/*
*/
static inline unsigned long hash_name(const char *name, unsigned int *hashp)
{
- unsigned long a, mask, hash, len;
+ unsigned long a, b, adata, bdata, mask, hash, len;
+ const struct word_at_a_time constants = WORD_AT_A_TIME_CONSTANTS;
hash = a = 0;
len = -sizeof(unsigned long);
hash = (hash + a) * 9;
len += sizeof(unsigned long);
a = load_unaligned_zeropad(name+len);
- /* Do we have any NUL or '/' bytes in this word? */
- mask = has_zero(a) | has_zero(a ^ REPEAT_BYTE('/'));
- } while (!mask);
-
- /* The mask *below* the first high bit set */
- mask = (mask - 1) & ~mask;
- mask >>= 7;
- hash += a & mask;
+ b = a ^ REPEAT_BYTE('/');
+ } while (!(has_zero(a, &adata, &constants) | has_zero(b, &bdata, &constants)));
+
+ adata = prep_zero_mask(a, adata, &constants);
+ bdata = prep_zero_mask(b, bdata, &constants);
+
+ mask = create_zero_mask(adata | bdata);
+
+ hash += a & zero_bytemask(mask);
*hashp = fold_hash(hash);
- return len + count_masked_bytes(mask);
+ return len + find_zero(mask);
}
#else
--- /dev/null
+#ifndef _ASM_WORD_AT_A_TIME_H
+#define _ASM_WORD_AT_A_TIME_H
+
+/*
+ * This says "generic", but it's actually big-endian only.
+ * Little-endian can use more efficient versions of these
+ * interfaces, see for example
+ * arch/x86/include/asm/word-at-a-time.h
+ * for those.
+ */
+
+#include <linux/kernel.h>
+
+struct word_at_a_time {
+ const unsigned long high_bits, low_bits;
+};
+
+#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0xfe) + 1, REPEAT_BYTE(0x7f) }
+
+/* Bit set in the bytes that have a zero */
+static inline long prep_zero_mask(unsigned long val, unsigned long rhs, const struct word_at_a_time *c)
+{
+ unsigned long mask = (val & c->low_bits) + c->low_bits;
+ return ~(mask | rhs);
+}
+
+#define create_zero_mask(mask) (mask)
+
+static inline long find_zero(unsigned long mask)
+{
+ long byte = 0;
+#ifdef CONFIG_64BIT
+ if (mask >> 32)
+ mask >>= 32;
+ else
+ byte = 4;
+#endif
+ if (mask >> 16)
+ mask >>= 16;
+ else
+ byte += 2;
+ return (mask >> 8) ? byte : byte + 1;
+}
+
+static inline bool has_zero(unsigned long val, unsigned long *data, const struct word_at_a_time *c)
+{
+ unsigned long rhs = val | c->low_bits;
+ *data = rhs;
+ return (val + c->high_bits) & ~rhs;
+}
+
+#endif /* _ASM_WORD_AT_A_TIME_H */
#include <linux/errno.h>
#include <asm/byteorder.h>
-
-static inline long find_zero(unsigned long mask)
-{
- long byte = 0;
-
-#ifdef __BIG_ENDIAN
-#ifdef CONFIG_64BIT
- if (mask >> 32)
- mask >>= 32;
- else
- byte = 4;
-#endif
- if (mask >> 16)
- mask >>= 16;
- else
- byte += 2;
- return (mask >> 8) ? byte : byte + 1;
-#else
-#ifdef CONFIG_64BIT
- if (!((unsigned int) mask)) {
- mask >>= 32;
- byte = 4;
- }
-#endif
- if (!(mask & 0xffff)) {
- mask >>= 16;
- byte += 2;
- }
- return (mask & 0xff) ? byte : byte + 1;
-#endif
-}
+#include <asm/word-at-a-time.h>
#ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
#define IS_UNALIGNED(src, dst) 0
*/
static inline long do_strncpy_from_user(char *dst, const char __user *src, long count, unsigned long max)
{
- const unsigned long high_bits = REPEAT_BYTE(0xfe) + 1;
- const unsigned long low_bits = REPEAT_BYTE(0x7f);
+ const struct word_at_a_time constants = WORD_AT_A_TIME_CONSTANTS;
long res = 0;
/*
goto byte_at_a_time;
while (max >= sizeof(unsigned long)) {
- unsigned long c, v, rhs;
+ unsigned long c, data;
/* Fall back to byte-at-a-time if we get a page fault */
if (unlikely(__get_user(c,(unsigned long __user *)(src+res))))
break;
- rhs = c | low_bits;
- v = (c + high_bits) & ~rhs;
*(unsigned long *)(dst+res) = c;
- if (v) {
- v = (c & low_bits) + low_bits;
- v = ~(v | rhs);
- return res + find_zero(v);
+ if (has_zero(c, &data, &constants)) {
+ data = prep_zero_mask(c, data, &constants);
+ data = create_zero_mask(data);
+ return res + find_zero(data);
}
res += sizeof(unsigned long);
max -= sizeof(unsigned long);