Merge tag 'v5.15.57' into rpi-5.15.y
[platform/kernel/linux-rpi.git] / arch / arm64 / crypto / aes-neonbs-glue.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Bit sliced AES using NEON instructions
4  *
5  * Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6  */
7
8 #include <asm/neon.h>
9 #include <asm/simd.h>
10 #include <crypto/aes.h>
11 #include <crypto/ctr.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
15 #include <crypto/xts.h>
16 #include <linux/module.h>
17
18 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
19 MODULE_LICENSE("GPL v2");
20
21 asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
22
23 asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
24                                   int rounds, int blocks);
25 asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
26                                   int rounds, int blocks);
27
28 asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
29                                   int rounds, int blocks, u8 iv[]);
30
31 asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
32                                   int rounds, int blocks, u8 iv[], u8 final[]);
33
34 asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
35                                   int rounds, int blocks, u8 iv[]);
36 asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
37                                   int rounds, int blocks, u8 iv[]);
38
39 /* borrowed from aes-neon-blk.ko */
40 asmlinkage void neon_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
41                                      int rounds, int blocks);
42 asmlinkage void neon_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
43                                      int rounds, int blocks, u8 iv[]);
44 asmlinkage void neon_aes_xts_encrypt(u8 out[], u8 const in[],
45                                      u32 const rk1[], int rounds, int bytes,
46                                      u32 const rk2[], u8 iv[], int first);
47 asmlinkage void neon_aes_xts_decrypt(u8 out[], u8 const in[],
48                                      u32 const rk1[], int rounds, int bytes,
49                                      u32 const rk2[], u8 iv[], int first);
50
51 struct aesbs_ctx {
52         u8      rk[13 * (8 * AES_BLOCK_SIZE) + 32];
53         int     rounds;
54 } __aligned(AES_BLOCK_SIZE);
55
56 struct aesbs_cbc_ctx {
57         struct aesbs_ctx        key;
58         u32                     enc[AES_MAX_KEYLENGTH_U32];
59 };
60
61 struct aesbs_ctr_ctx {
62         struct aesbs_ctx        key;            /* must be first member */
63         struct crypto_aes_ctx   fallback;
64 };
65
66 struct aesbs_xts_ctx {
67         struct aesbs_ctx        key;
68         u32                     twkey[AES_MAX_KEYLENGTH_U32];
69         struct crypto_aes_ctx   cts;
70 };
71
72 static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
73                         unsigned int key_len)
74 {
75         struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
76         struct crypto_aes_ctx rk;
77         int err;
78
79         err = aes_expandkey(&rk, in_key, key_len);
80         if (err)
81                 return err;
82
83         ctx->rounds = 6 + key_len / 4;
84
85         kernel_neon_begin();
86         aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
87         kernel_neon_end();
88
89         return 0;
90 }
91
92 static int __ecb_crypt(struct skcipher_request *req,
93                        void (*fn)(u8 out[], u8 const in[], u8 const rk[],
94                                   int rounds, int blocks))
95 {
96         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
97         struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
98         struct skcipher_walk walk;
99         int err;
100
101         err = skcipher_walk_virt(&walk, req, false);
102
103         while (walk.nbytes >= AES_BLOCK_SIZE) {
104                 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
105
106                 if (walk.nbytes < walk.total)
107                         blocks = round_down(blocks,
108                                             walk.stride / AES_BLOCK_SIZE);
109
110                 kernel_neon_begin();
111                 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
112                    ctx->rounds, blocks);
113                 kernel_neon_end();
114                 err = skcipher_walk_done(&walk,
115                                          walk.nbytes - blocks * AES_BLOCK_SIZE);
116         }
117
118         return err;
119 }
120
121 static int ecb_encrypt(struct skcipher_request *req)
122 {
123         return __ecb_crypt(req, aesbs_ecb_encrypt);
124 }
125
126 static int ecb_decrypt(struct skcipher_request *req)
127 {
128         return __ecb_crypt(req, aesbs_ecb_decrypt);
129 }
130
131 static int aesbs_cbc_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
132                             unsigned int key_len)
133 {
134         struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
135         struct crypto_aes_ctx rk;
136         int err;
137
138         err = aes_expandkey(&rk, in_key, key_len);
139         if (err)
140                 return err;
141
142         ctx->key.rounds = 6 + key_len / 4;
143
144         memcpy(ctx->enc, rk.key_enc, sizeof(ctx->enc));
145
146         kernel_neon_begin();
147         aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
148         kernel_neon_end();
149         memzero_explicit(&rk, sizeof(rk));
150
151         return 0;
152 }
153
154 static int cbc_encrypt(struct skcipher_request *req)
155 {
156         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
157         struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
158         struct skcipher_walk walk;
159         int err;
160
161         err = skcipher_walk_virt(&walk, req, false);
162
163         while (walk.nbytes >= AES_BLOCK_SIZE) {
164                 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
165
166                 /* fall back to the non-bitsliced NEON implementation */
167                 kernel_neon_begin();
168                 neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
169                                      ctx->enc, ctx->key.rounds, blocks,
170                                      walk.iv);
171                 kernel_neon_end();
172                 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
173         }
174         return err;
175 }
176
177 static int cbc_decrypt(struct skcipher_request *req)
178 {
179         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
180         struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
181         struct skcipher_walk walk;
182         int err;
183
184         err = skcipher_walk_virt(&walk, req, false);
185
186         while (walk.nbytes >= AES_BLOCK_SIZE) {
187                 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
188
189                 if (walk.nbytes < walk.total)
190                         blocks = round_down(blocks,
191                                             walk.stride / AES_BLOCK_SIZE);
192
193                 kernel_neon_begin();
194                 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
195                                   ctx->key.rk, ctx->key.rounds, blocks,
196                                   walk.iv);
197                 kernel_neon_end();
198                 err = skcipher_walk_done(&walk,
199                                          walk.nbytes - blocks * AES_BLOCK_SIZE);
200         }
201
202         return err;
203 }
204
205 static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key,
206                                  unsigned int key_len)
207 {
208         struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
209         int err;
210
211         err = aes_expandkey(&ctx->fallback, in_key, key_len);
212         if (err)
213                 return err;
214
215         ctx->key.rounds = 6 + key_len / 4;
216
217         kernel_neon_begin();
218         aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds);
219         kernel_neon_end();
220
221         return 0;
222 }
223
224 static int ctr_encrypt(struct skcipher_request *req)
225 {
226         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
227         struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
228         struct skcipher_walk walk;
229         u8 buf[AES_BLOCK_SIZE];
230         int err;
231
232         err = skcipher_walk_virt(&walk, req, false);
233
234         while (walk.nbytes > 0) {
235                 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
236                 u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL;
237
238                 if (walk.nbytes < walk.total) {
239                         blocks = round_down(blocks,
240                                             walk.stride / AES_BLOCK_SIZE);
241                         final = NULL;
242                 }
243
244                 kernel_neon_begin();
245                 aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
246                                   ctx->rk, ctx->rounds, blocks, walk.iv, final);
247                 kernel_neon_end();
248
249                 if (final) {
250                         u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
251                         u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
252
253                         crypto_xor_cpy(dst, src, final,
254                                        walk.total % AES_BLOCK_SIZE);
255
256                         err = skcipher_walk_done(&walk, 0);
257                         break;
258                 }
259                 err = skcipher_walk_done(&walk,
260                                          walk.nbytes - blocks * AES_BLOCK_SIZE);
261         }
262         return err;
263 }
264
265 static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
266                             unsigned int key_len)
267 {
268         struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
269         struct crypto_aes_ctx rk;
270         int err;
271
272         err = xts_verify_key(tfm, in_key, key_len);
273         if (err)
274                 return err;
275
276         key_len /= 2;
277         err = aes_expandkey(&ctx->cts, in_key, key_len);
278         if (err)
279                 return err;
280
281         err = aes_expandkey(&rk, in_key + key_len, key_len);
282         if (err)
283                 return err;
284
285         memcpy(ctx->twkey, rk.key_enc, sizeof(ctx->twkey));
286
287         return aesbs_setkey(tfm, in_key, key_len);
288 }
289
290 static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
291 {
292         struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
293         unsigned long flags;
294
295         /*
296          * Temporarily disable interrupts to avoid races where
297          * cachelines are evicted when the CPU is interrupted
298          * to do something else.
299          */
300         local_irq_save(flags);
301         aes_encrypt(&ctx->fallback, dst, src);
302         local_irq_restore(flags);
303 }
304
305 static int ctr_encrypt_sync(struct skcipher_request *req)
306 {
307         if (!crypto_simd_usable())
308                 return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
309
310         return ctr_encrypt(req);
311 }
312
313 static int __xts_crypt(struct skcipher_request *req, bool encrypt,
314                        void (*fn)(u8 out[], u8 const in[], u8 const rk[],
315                                   int rounds, int blocks, u8 iv[]))
316 {
317         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
318         struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
319         int tail = req->cryptlen % (8 * AES_BLOCK_SIZE);
320         struct scatterlist sg_src[2], sg_dst[2];
321         struct skcipher_request subreq;
322         struct scatterlist *src, *dst;
323         struct skcipher_walk walk;
324         int nbytes, err;
325         int first = 1;
326         u8 *out, *in;
327
328         if (req->cryptlen < AES_BLOCK_SIZE)
329                 return -EINVAL;
330
331         /* ensure that the cts tail is covered by a single step */
332         if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) {
333                 int xts_blocks = DIV_ROUND_UP(req->cryptlen,
334                                               AES_BLOCK_SIZE) - 2;
335
336                 skcipher_request_set_tfm(&subreq, tfm);
337                 skcipher_request_set_callback(&subreq,
338                                               skcipher_request_flags(req),
339                                               NULL, NULL);
340                 skcipher_request_set_crypt(&subreq, req->src, req->dst,
341                                            xts_blocks * AES_BLOCK_SIZE,
342                                            req->iv);
343                 req = &subreq;
344         } else {
345                 tail = 0;
346         }
347
348         err = skcipher_walk_virt(&walk, req, false);
349         if (err)
350                 return err;
351
352         while (walk.nbytes >= AES_BLOCK_SIZE) {
353                 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
354
355                 if (walk.nbytes < walk.total || walk.nbytes % AES_BLOCK_SIZE)
356                         blocks = round_down(blocks,
357                                             walk.stride / AES_BLOCK_SIZE);
358
359                 out = walk.dst.virt.addr;
360                 in = walk.src.virt.addr;
361                 nbytes = walk.nbytes;
362
363                 kernel_neon_begin();
364                 if (likely(blocks > 6)) { /* plain NEON is faster otherwise */
365                         if (first)
366                                 neon_aes_ecb_encrypt(walk.iv, walk.iv,
367                                                      ctx->twkey,
368                                                      ctx->key.rounds, 1);
369                         first = 0;
370
371                         fn(out, in, ctx->key.rk, ctx->key.rounds, blocks,
372                            walk.iv);
373
374                         out += blocks * AES_BLOCK_SIZE;
375                         in += blocks * AES_BLOCK_SIZE;
376                         nbytes -= blocks * AES_BLOCK_SIZE;
377                 }
378
379                 if (walk.nbytes == walk.total && nbytes > 0)
380                         goto xts_tail;
381
382                 kernel_neon_end();
383                 err = skcipher_walk_done(&walk, nbytes);
384         }
385
386         if (err || likely(!tail))
387                 return err;
388
389         /* handle ciphertext stealing */
390         dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
391         if (req->dst != req->src)
392                 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
393
394         skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
395                                    req->iv);
396
397         err = skcipher_walk_virt(&walk, req, false);
398         if (err)
399                 return err;
400
401         out = walk.dst.virt.addr;
402         in = walk.src.virt.addr;
403         nbytes = walk.nbytes;
404
405         kernel_neon_begin();
406 xts_tail:
407         if (encrypt)
408                 neon_aes_xts_encrypt(out, in, ctx->cts.key_enc, ctx->key.rounds,
409                                      nbytes, ctx->twkey, walk.iv, first ?: 2);
410         else
411                 neon_aes_xts_decrypt(out, in, ctx->cts.key_dec, ctx->key.rounds,
412                                      nbytes, ctx->twkey, walk.iv, first ?: 2);
413         kernel_neon_end();
414
415         return skcipher_walk_done(&walk, 0);
416 }
417
418 static int xts_encrypt(struct skcipher_request *req)
419 {
420         return __xts_crypt(req, true, aesbs_xts_encrypt);
421 }
422
423 static int xts_decrypt(struct skcipher_request *req)
424 {
425         return __xts_crypt(req, false, aesbs_xts_decrypt);
426 }
427
428 static struct skcipher_alg aes_algs[] = { {
429         .base.cra_name          = "__ecb(aes)",
430         .base.cra_driver_name   = "__ecb-aes-neonbs",
431         .base.cra_priority      = 250,
432         .base.cra_blocksize     = AES_BLOCK_SIZE,
433         .base.cra_ctxsize       = sizeof(struct aesbs_ctx),
434         .base.cra_module        = THIS_MODULE,
435         .base.cra_flags         = CRYPTO_ALG_INTERNAL,
436
437         .min_keysize            = AES_MIN_KEY_SIZE,
438         .max_keysize            = AES_MAX_KEY_SIZE,
439         .walksize               = 8 * AES_BLOCK_SIZE,
440         .setkey                 = aesbs_setkey,
441         .encrypt                = ecb_encrypt,
442         .decrypt                = ecb_decrypt,
443 }, {
444         .base.cra_name          = "__cbc(aes)",
445         .base.cra_driver_name   = "__cbc-aes-neonbs",
446         .base.cra_priority      = 250,
447         .base.cra_blocksize     = AES_BLOCK_SIZE,
448         .base.cra_ctxsize       = sizeof(struct aesbs_cbc_ctx),
449         .base.cra_module        = THIS_MODULE,
450         .base.cra_flags         = CRYPTO_ALG_INTERNAL,
451
452         .min_keysize            = AES_MIN_KEY_SIZE,
453         .max_keysize            = AES_MAX_KEY_SIZE,
454         .walksize               = 8 * AES_BLOCK_SIZE,
455         .ivsize                 = AES_BLOCK_SIZE,
456         .setkey                 = aesbs_cbc_setkey,
457         .encrypt                = cbc_encrypt,
458         .decrypt                = cbc_decrypt,
459 }, {
460         .base.cra_name          = "__ctr(aes)",
461         .base.cra_driver_name   = "__ctr-aes-neonbs",
462         .base.cra_priority      = 250,
463         .base.cra_blocksize     = 1,
464         .base.cra_ctxsize       = sizeof(struct aesbs_ctx),
465         .base.cra_module        = THIS_MODULE,
466         .base.cra_flags         = CRYPTO_ALG_INTERNAL,
467
468         .min_keysize            = AES_MIN_KEY_SIZE,
469         .max_keysize            = AES_MAX_KEY_SIZE,
470         .chunksize              = AES_BLOCK_SIZE,
471         .walksize               = 8 * AES_BLOCK_SIZE,
472         .ivsize                 = AES_BLOCK_SIZE,
473         .setkey                 = aesbs_setkey,
474         .encrypt                = ctr_encrypt,
475         .decrypt                = ctr_encrypt,
476 }, {
477         .base.cra_name          = "ctr(aes)",
478         .base.cra_driver_name   = "ctr-aes-neonbs",
479         .base.cra_priority      = 250 - 1,
480         .base.cra_blocksize     = 1,
481         .base.cra_ctxsize       = sizeof(struct aesbs_ctr_ctx),
482         .base.cra_module        = THIS_MODULE,
483
484         .min_keysize            = AES_MIN_KEY_SIZE,
485         .max_keysize            = AES_MAX_KEY_SIZE,
486         .chunksize              = AES_BLOCK_SIZE,
487         .walksize               = 8 * AES_BLOCK_SIZE,
488         .ivsize                 = AES_BLOCK_SIZE,
489         .setkey                 = aesbs_ctr_setkey_sync,
490         .encrypt                = ctr_encrypt_sync,
491         .decrypt                = ctr_encrypt_sync,
492 }, {
493         .base.cra_name          = "__xts(aes)",
494         .base.cra_driver_name   = "__xts-aes-neonbs",
495         .base.cra_priority      = 250,
496         .base.cra_blocksize     = AES_BLOCK_SIZE,
497         .base.cra_ctxsize       = sizeof(struct aesbs_xts_ctx),
498         .base.cra_module        = THIS_MODULE,
499         .base.cra_flags         = CRYPTO_ALG_INTERNAL,
500
501         .min_keysize            = 2 * AES_MIN_KEY_SIZE,
502         .max_keysize            = 2 * AES_MAX_KEY_SIZE,
503         .walksize               = 8 * AES_BLOCK_SIZE,
504         .ivsize                 = AES_BLOCK_SIZE,
505         .setkey                 = aesbs_xts_setkey,
506         .encrypt                = xts_encrypt,
507         .decrypt                = xts_decrypt,
508 } };
509
510 static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
511
512 static void aes_exit(void)
513 {
514         int i;
515
516         for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
517                 if (aes_simd_algs[i])
518                         simd_skcipher_free(aes_simd_algs[i]);
519
520         crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
521 }
522
523 static int __init aes_init(void)
524 {
525         struct simd_skcipher_alg *simd;
526         const char *basename;
527         const char *algname;
528         const char *drvname;
529         int err;
530         int i;
531
532         if (!cpu_have_named_feature(ASIMD))
533                 return -ENODEV;
534
535         err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
536         if (err)
537                 return err;
538
539         for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
540                 if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
541                         continue;
542
543                 algname = aes_algs[i].base.cra_name + 2;
544                 drvname = aes_algs[i].base.cra_driver_name + 2;
545                 basename = aes_algs[i].base.cra_driver_name;
546                 simd = simd_skcipher_create_compat(algname, drvname, basename);
547                 err = PTR_ERR(simd);
548                 if (IS_ERR(simd))
549                         goto unregister_simds;
550
551                 aes_simd_algs[i] = simd;
552         }
553         return 0;
554
555 unregister_simds:
556         aes_exit();
557         return err;
558 }
559
560 module_init(aes_init);
561 module_exit(aes_exit);