2 * Poly1305 authenticator algorithm, RFC7539
4 * Copyright (C) 2015 Martin Willi
6 * Based on public domain code by Andrew Moon and Daniel J. Bernstein.
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <crypto/algapi.h>
15 #include <crypto/internal/hash.h>
16 #include <crypto/poly1305.h>
17 #include <linux/crypto.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <asm/unaligned.h>
22 static inline u64 mlt(u64 a, u64 b)
27 static inline u32 sr(u64 v, u_char n)
32 static inline u32 and(u32 v, u32 mask)
37 int crypto_poly1305_init(struct shash_desc *desc)
39 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
41 poly1305_core_init(&dctx->h);
48 EXPORT_SYMBOL_GPL(crypto_poly1305_init);
50 void poly1305_core_setkey(struct poly1305_key *key, const u8 *raw_key)
52 /* r &= 0xffffffc0ffffffc0ffffffc0fffffff */
53 key->r[0] = (get_unaligned_le32(raw_key + 0) >> 0) & 0x3ffffff;
54 key->r[1] = (get_unaligned_le32(raw_key + 3) >> 2) & 0x3ffff03;
55 key->r[2] = (get_unaligned_le32(raw_key + 6) >> 4) & 0x3ffc0ff;
56 key->r[3] = (get_unaligned_le32(raw_key + 9) >> 6) & 0x3f03fff;
57 key->r[4] = (get_unaligned_le32(raw_key + 12) >> 8) & 0x00fffff;
59 EXPORT_SYMBOL_GPL(poly1305_core_setkey);
62 * Poly1305 requires a unique key for each tag, which implies that we can't set
63 * it on the tfm that gets accessed by multiple users simultaneously. Instead we
64 * expect the key as the first 32 bytes in the update() call.
66 unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx,
67 const u8 *src, unsigned int srclen)
70 if (!dctx->rset && srclen >= POLY1305_BLOCK_SIZE) {
71 poly1305_core_setkey(&dctx->r, src);
72 src += POLY1305_BLOCK_SIZE;
73 srclen -= POLY1305_BLOCK_SIZE;
76 if (srclen >= POLY1305_BLOCK_SIZE) {
77 dctx->s[0] = get_unaligned_le32(src + 0);
78 dctx->s[1] = get_unaligned_le32(src + 4);
79 dctx->s[2] = get_unaligned_le32(src + 8);
80 dctx->s[3] = get_unaligned_le32(src + 12);
81 src += POLY1305_BLOCK_SIZE;
82 srclen -= POLY1305_BLOCK_SIZE;
88 EXPORT_SYMBOL_GPL(crypto_poly1305_setdesckey);
90 static void poly1305_blocks_internal(struct poly1305_state *state,
91 const struct poly1305_key *key,
92 const void *src, unsigned int nblocks,
95 u32 r0, r1, r2, r3, r4;
97 u32 h0, h1, h2, h3, h4;
98 u64 d0, d1, d2, d3, d4;
122 h0 += (get_unaligned_le32(src + 0) >> 0) & 0x3ffffff;
123 h1 += (get_unaligned_le32(src + 3) >> 2) & 0x3ffffff;
124 h2 += (get_unaligned_le32(src + 6) >> 4) & 0x3ffffff;
125 h3 += (get_unaligned_le32(src + 9) >> 6) & 0x3ffffff;
126 h4 += (get_unaligned_le32(src + 12) >> 8) | hibit;
129 d0 = mlt(h0, r0) + mlt(h1, s4) + mlt(h2, s3) +
130 mlt(h3, s2) + mlt(h4, s1);
131 d1 = mlt(h0, r1) + mlt(h1, r0) + mlt(h2, s4) +
132 mlt(h3, s3) + mlt(h4, s2);
133 d2 = mlt(h0, r2) + mlt(h1, r1) + mlt(h2, r0) +
134 mlt(h3, s4) + mlt(h4, s3);
135 d3 = mlt(h0, r3) + mlt(h1, r2) + mlt(h2, r1) +
136 mlt(h3, r0) + mlt(h4, s4);
137 d4 = mlt(h0, r4) + mlt(h1, r3) + mlt(h2, r2) +
138 mlt(h3, r1) + mlt(h4, r0);
140 /* (partial) h %= p */
141 d1 += sr(d0, 26); h0 = and(d0, 0x3ffffff);
142 d2 += sr(d1, 26); h1 = and(d1, 0x3ffffff);
143 d3 += sr(d2, 26); h2 = and(d2, 0x3ffffff);
144 d4 += sr(d3, 26); h3 = and(d3, 0x3ffffff);
145 h0 += sr(d4, 26) * 5; h4 = and(d4, 0x3ffffff);
146 h1 += h0 >> 26; h0 = h0 & 0x3ffffff;
148 src += POLY1305_BLOCK_SIZE;
158 void poly1305_core_blocks(struct poly1305_state *state,
159 const struct poly1305_key *key,
160 const void *src, unsigned int nblocks)
162 poly1305_blocks_internal(state, key, src, nblocks, 1 << 24);
164 EXPORT_SYMBOL_GPL(poly1305_core_blocks);
166 static void poly1305_blocks(struct poly1305_desc_ctx *dctx,
167 const u8 *src, unsigned int srclen, u32 hibit)
169 unsigned int datalen;
171 if (unlikely(!dctx->sset)) {
172 datalen = crypto_poly1305_setdesckey(dctx, src, srclen);
173 src += srclen - datalen;
177 poly1305_blocks_internal(&dctx->h, &dctx->r,
178 src, srclen / POLY1305_BLOCK_SIZE, hibit);
181 int crypto_poly1305_update(struct shash_desc *desc,
182 const u8 *src, unsigned int srclen)
184 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
187 if (unlikely(dctx->buflen)) {
188 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
189 memcpy(dctx->buf + dctx->buflen, src, bytes);
192 dctx->buflen += bytes;
194 if (dctx->buflen == POLY1305_BLOCK_SIZE) {
195 poly1305_blocks(dctx, dctx->buf,
196 POLY1305_BLOCK_SIZE, 1 << 24);
201 if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
202 poly1305_blocks(dctx, src, srclen, 1 << 24);
203 src += srclen - (srclen % POLY1305_BLOCK_SIZE);
204 srclen %= POLY1305_BLOCK_SIZE;
207 if (unlikely(srclen)) {
208 dctx->buflen = srclen;
209 memcpy(dctx->buf, src, srclen);
214 EXPORT_SYMBOL_GPL(crypto_poly1305_update);
216 void poly1305_core_emit(const struct poly1305_state *state, void *dst)
218 u32 h0, h1, h2, h3, h4;
219 u32 g0, g1, g2, g3, g4;
229 h2 += (h1 >> 26); h1 = h1 & 0x3ffffff;
230 h3 += (h2 >> 26); h2 = h2 & 0x3ffffff;
231 h4 += (h3 >> 26); h3 = h3 & 0x3ffffff;
232 h0 += (h4 >> 26) * 5; h4 = h4 & 0x3ffffff;
233 h1 += (h0 >> 26); h0 = h0 & 0x3ffffff;
237 g1 = h1 + (g0 >> 26); g0 &= 0x3ffffff;
238 g2 = h2 + (g1 >> 26); g1 &= 0x3ffffff;
239 g3 = h3 + (g2 >> 26); g2 &= 0x3ffffff;
240 g4 = h4 + (g3 >> 26) - (1 << 26); g3 &= 0x3ffffff;
242 /* select h if h < p, or h + -p if h >= p */
243 mask = (g4 >> ((sizeof(u32) * 8) - 1)) - 1;
250 h0 = (h0 & mask) | g0;
251 h1 = (h1 & mask) | g1;
252 h2 = (h2 & mask) | g2;
253 h3 = (h3 & mask) | g3;
254 h4 = (h4 & mask) | g4;
256 /* h = h % (2^128) */
257 put_unaligned_le32((h0 >> 0) | (h1 << 26), dst + 0);
258 put_unaligned_le32((h1 >> 6) | (h2 << 20), dst + 4);
259 put_unaligned_le32((h2 >> 12) | (h3 << 14), dst + 8);
260 put_unaligned_le32((h3 >> 18) | (h4 << 8), dst + 12);
262 EXPORT_SYMBOL_GPL(poly1305_core_emit);
264 int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
266 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
270 if (unlikely(!dctx->sset))
273 if (unlikely(dctx->buflen)) {
274 dctx->buf[dctx->buflen++] = 1;
275 memset(dctx->buf + dctx->buflen, 0,
276 POLY1305_BLOCK_SIZE - dctx->buflen);
277 poly1305_blocks(dctx, dctx->buf, POLY1305_BLOCK_SIZE, 0);
280 poly1305_core_emit(&dctx->h, digest);
282 /* mac = (h + s) % (2^128) */
283 f = (f >> 32) + le32_to_cpu(digest[0]) + dctx->s[0];
284 put_unaligned_le32(f, dst + 0);
285 f = (f >> 32) + le32_to_cpu(digest[1]) + dctx->s[1];
286 put_unaligned_le32(f, dst + 4);
287 f = (f >> 32) + le32_to_cpu(digest[2]) + dctx->s[2];
288 put_unaligned_le32(f, dst + 8);
289 f = (f >> 32) + le32_to_cpu(digest[3]) + dctx->s[3];
290 put_unaligned_le32(f, dst + 12);
294 EXPORT_SYMBOL_GPL(crypto_poly1305_final);
296 static struct shash_alg poly1305_alg = {
297 .digestsize = POLY1305_DIGEST_SIZE,
298 .init = crypto_poly1305_init,
299 .update = crypto_poly1305_update,
300 .final = crypto_poly1305_final,
301 .descsize = sizeof(struct poly1305_desc_ctx),
303 .cra_name = "poly1305",
304 .cra_driver_name = "poly1305-generic",
306 .cra_blocksize = POLY1305_BLOCK_SIZE,
307 .cra_module = THIS_MODULE,
311 static int __init poly1305_mod_init(void)
313 return crypto_register_shash(&poly1305_alg);
316 static void __exit poly1305_mod_exit(void)
318 crypto_unregister_shash(&poly1305_alg);
321 subsys_initcall(poly1305_mod_init);
322 module_exit(poly1305_mod_exit);
324 MODULE_LICENSE("GPL");
325 MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
326 MODULE_DESCRIPTION("Poly1305 authenticator");
327 MODULE_ALIAS_CRYPTO("poly1305");
328 MODULE_ALIAS_CRYPTO("poly1305-generic");