1 // SPDX-License-Identifier: GPL-2.0+
3 * caam - Freescale FSL CAAM support for crypto API
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2019, 2023 NXP
8 * Based on talitos crypto API driver.
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
25 * | JobDesc #3 |------------
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
35 * So, a job desc looks like:
37 * ---------------------
39 * | ShareDesc Pointer |
46 * ---------------------
53 #include "desc_constr.h"
56 #include "sg_sw_sec4.h"
58 #include "caamalg_desc.h"
59 #include <asm/unaligned.h>
60 #include <crypto/internal/engine.h>
61 #include <crypto/xts.h>
62 #include <linux/dma-mapping.h>
63 #include <linux/device.h>
64 #include <linux/err.h>
65 #include <linux/kernel.h>
66 #include <linux/slab.h>
67 #include <linux/string.h>
72 #define CAAM_CRA_PRIORITY 3000
73 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
74 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
75 CTR_RFC3686_NONCE_SIZE + \
76 SHA512_DIGEST_SIZE * 2)
78 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
79 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
81 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
84 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
86 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
87 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
89 struct caam_alg_entry {
97 struct caam_aead_alg {
99 struct caam_alg_entry caam;
103 struct caam_skcipher_alg {
104 struct skcipher_alg skcipher;
105 struct caam_alg_entry caam;
110 * per-session context
113 struct crypto_engine_ctx enginectx;
114 u32 sh_desc_enc[DESC_MAX_USED_LEN];
115 u32 sh_desc_dec[DESC_MAX_USED_LEN];
116 u8 key[CAAM_MAX_KEY_SIZE];
117 dma_addr_t sh_desc_enc_dma;
118 dma_addr_t sh_desc_dec_dma;
120 enum dma_data_direction dir;
121 struct device *jrdev;
122 struct alginfo adata;
123 struct alginfo cdata;
124 unsigned int authsize;
125 bool xts_key_fallback;
126 struct crypto_skcipher *fallback;
129 struct caam_skcipher_req_ctx {
130 struct skcipher_edesc *edesc;
131 struct skcipher_request fallback_req;
134 struct caam_aead_req_ctx {
135 struct aead_edesc *edesc;
138 static int aead_null_set_sh_desc(struct crypto_aead *aead)
140 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
141 struct device *jrdev = ctx->jrdev;
142 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
144 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
145 ctx->adata.keylen_pad;
148 * Job Descriptor and Shared Descriptors
149 * must all fit into the 64-word Descriptor h/w Buffer
151 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
152 ctx->adata.key_inline = true;
153 ctx->adata.key_virt = ctx->key;
155 ctx->adata.key_inline = false;
156 ctx->adata.key_dma = ctx->key_dma;
159 /* aead_encrypt shared descriptor */
160 desc = ctx->sh_desc_enc;
161 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
163 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
164 desc_bytes(desc), ctx->dir);
167 * Job Descriptor and Shared Descriptors
168 * must all fit into the 64-word Descriptor h/w Buffer
170 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
171 ctx->adata.key_inline = true;
172 ctx->adata.key_virt = ctx->key;
174 ctx->adata.key_inline = false;
175 ctx->adata.key_dma = ctx->key_dma;
178 /* aead_decrypt shared descriptor */
179 desc = ctx->sh_desc_dec;
180 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
182 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
183 desc_bytes(desc), ctx->dir);
188 static int aead_set_sh_desc(struct crypto_aead *aead)
190 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
191 struct caam_aead_alg, aead);
192 unsigned int ivsize = crypto_aead_ivsize(aead);
193 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
194 struct device *jrdev = ctx->jrdev;
195 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
197 u32 *desc, *nonce = NULL;
199 unsigned int data_len[2];
200 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
201 OP_ALG_AAI_CTR_MOD128);
202 const bool is_rfc3686 = alg->caam.rfc3686;
207 /* NULL encryption / decryption */
208 if (!ctx->cdata.keylen)
209 return aead_null_set_sh_desc(aead);
212 * AES-CTR needs to load IV in CONTEXT1 reg
213 * at an offset of 128bits (16bytes)
214 * CONTEXT1[255:128] = IV
221 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
224 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
225 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
226 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
230 * In case |user key| > |derived key|, using DKP<imm,imm>
231 * would result in invalid opcodes (last bytes of user key) in
232 * the resulting descriptor. Use DKP<ptr,imm> instead => both
233 * virtual and dma key addresses are needed.
235 ctx->adata.key_virt = ctx->key;
236 ctx->adata.key_dma = ctx->key_dma;
238 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
239 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
241 data_len[0] = ctx->adata.keylen_pad;
242 data_len[1] = ctx->cdata.keylen;
248 * Job Descriptor and Shared Descriptors
249 * must all fit into the 64-word Descriptor h/w Buffer
251 if (desc_inline_query(DESC_AEAD_ENC_LEN +
252 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
253 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
254 ARRAY_SIZE(data_len)) < 0)
257 ctx->adata.key_inline = !!(inl_mask & 1);
258 ctx->cdata.key_inline = !!(inl_mask & 2);
260 /* aead_encrypt shared descriptor */
261 desc = ctx->sh_desc_enc;
262 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
263 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
264 false, ctrlpriv->era);
265 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
266 desc_bytes(desc), ctx->dir);
270 * Job Descriptor and Shared Descriptors
271 * must all fit into the 64-word Descriptor h/w Buffer
273 if (desc_inline_query(DESC_AEAD_DEC_LEN +
274 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
275 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
276 ARRAY_SIZE(data_len)) < 0)
279 ctx->adata.key_inline = !!(inl_mask & 1);
280 ctx->cdata.key_inline = !!(inl_mask & 2);
282 /* aead_decrypt shared descriptor */
283 desc = ctx->sh_desc_dec;
284 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
285 ctx->authsize, alg->caam.geniv, is_rfc3686,
286 nonce, ctx1_iv_off, false, ctrlpriv->era);
287 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
288 desc_bytes(desc), ctx->dir);
290 if (!alg->caam.geniv)
294 * Job Descriptor and Shared Descriptors
295 * must all fit into the 64-word Descriptor h/w Buffer
297 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
298 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
299 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
300 ARRAY_SIZE(data_len)) < 0)
303 ctx->adata.key_inline = !!(inl_mask & 1);
304 ctx->cdata.key_inline = !!(inl_mask & 2);
306 /* aead_givencrypt shared descriptor */
307 desc = ctx->sh_desc_enc;
308 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
309 ctx->authsize, is_rfc3686, nonce,
310 ctx1_iv_off, false, ctrlpriv->era);
311 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
312 desc_bytes(desc), ctx->dir);
318 static int aead_setauthsize(struct crypto_aead *authenc,
319 unsigned int authsize)
321 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
323 ctx->authsize = authsize;
324 aead_set_sh_desc(authenc);
329 static int gcm_set_sh_desc(struct crypto_aead *aead)
331 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
332 struct device *jrdev = ctx->jrdev;
333 unsigned int ivsize = crypto_aead_ivsize(aead);
335 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
338 if (!ctx->cdata.keylen || !ctx->authsize)
342 * AES GCM encrypt shared descriptor
343 * Job Descriptor and Shared Descriptor
344 * must fit into the 64-word Descriptor h/w Buffer
346 if (rem_bytes >= DESC_GCM_ENC_LEN) {
347 ctx->cdata.key_inline = true;
348 ctx->cdata.key_virt = ctx->key;
350 ctx->cdata.key_inline = false;
351 ctx->cdata.key_dma = ctx->key_dma;
354 desc = ctx->sh_desc_enc;
355 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
356 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
357 desc_bytes(desc), ctx->dir);
360 * Job Descriptor and Shared Descriptors
361 * must all fit into the 64-word Descriptor h/w Buffer
363 if (rem_bytes >= DESC_GCM_DEC_LEN) {
364 ctx->cdata.key_inline = true;
365 ctx->cdata.key_virt = ctx->key;
367 ctx->cdata.key_inline = false;
368 ctx->cdata.key_dma = ctx->key_dma;
371 desc = ctx->sh_desc_dec;
372 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
373 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
374 desc_bytes(desc), ctx->dir);
379 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
381 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
384 err = crypto_gcm_check_authsize(authsize);
388 ctx->authsize = authsize;
389 gcm_set_sh_desc(authenc);
394 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
396 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
397 struct device *jrdev = ctx->jrdev;
398 unsigned int ivsize = crypto_aead_ivsize(aead);
400 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
403 if (!ctx->cdata.keylen || !ctx->authsize)
407 * RFC4106 encrypt shared descriptor
408 * Job Descriptor and Shared Descriptor
409 * must fit into the 64-word Descriptor h/w Buffer
411 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
412 ctx->cdata.key_inline = true;
413 ctx->cdata.key_virt = ctx->key;
415 ctx->cdata.key_inline = false;
416 ctx->cdata.key_dma = ctx->key_dma;
419 desc = ctx->sh_desc_enc;
420 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
422 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
423 desc_bytes(desc), ctx->dir);
426 * Job Descriptor and Shared Descriptors
427 * must all fit into the 64-word Descriptor h/w Buffer
429 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
430 ctx->cdata.key_inline = true;
431 ctx->cdata.key_virt = ctx->key;
433 ctx->cdata.key_inline = false;
434 ctx->cdata.key_dma = ctx->key_dma;
437 desc = ctx->sh_desc_dec;
438 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
440 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
441 desc_bytes(desc), ctx->dir);
446 static int rfc4106_setauthsize(struct crypto_aead *authenc,
447 unsigned int authsize)
449 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
452 err = crypto_rfc4106_check_authsize(authsize);
456 ctx->authsize = authsize;
457 rfc4106_set_sh_desc(authenc);
462 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
464 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
465 struct device *jrdev = ctx->jrdev;
466 unsigned int ivsize = crypto_aead_ivsize(aead);
468 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
471 if (!ctx->cdata.keylen || !ctx->authsize)
475 * RFC4543 encrypt shared descriptor
476 * Job Descriptor and Shared Descriptor
477 * must fit into the 64-word Descriptor h/w Buffer
479 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
480 ctx->cdata.key_inline = true;
481 ctx->cdata.key_virt = ctx->key;
483 ctx->cdata.key_inline = false;
484 ctx->cdata.key_dma = ctx->key_dma;
487 desc = ctx->sh_desc_enc;
488 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
490 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
491 desc_bytes(desc), ctx->dir);
494 * Job Descriptor and Shared Descriptors
495 * must all fit into the 64-word Descriptor h/w Buffer
497 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
498 ctx->cdata.key_inline = true;
499 ctx->cdata.key_virt = ctx->key;
501 ctx->cdata.key_inline = false;
502 ctx->cdata.key_dma = ctx->key_dma;
505 desc = ctx->sh_desc_dec;
506 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
508 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
509 desc_bytes(desc), ctx->dir);
514 static int rfc4543_setauthsize(struct crypto_aead *authenc,
515 unsigned int authsize)
517 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
522 ctx->authsize = authsize;
523 rfc4543_set_sh_desc(authenc);
528 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
530 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
531 struct device *jrdev = ctx->jrdev;
532 unsigned int ivsize = crypto_aead_ivsize(aead);
535 if (!ctx->cdata.keylen || !ctx->authsize)
538 desc = ctx->sh_desc_enc;
539 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
540 ctx->authsize, true, false);
541 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
542 desc_bytes(desc), ctx->dir);
544 desc = ctx->sh_desc_dec;
545 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
546 ctx->authsize, false, false);
547 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
548 desc_bytes(desc), ctx->dir);
553 static int chachapoly_setauthsize(struct crypto_aead *aead,
554 unsigned int authsize)
556 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
558 if (authsize != POLY1305_DIGEST_SIZE)
561 ctx->authsize = authsize;
562 return chachapoly_set_sh_desc(aead);
565 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
568 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
569 unsigned int ivsize = crypto_aead_ivsize(aead);
570 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
572 if (keylen != CHACHA_KEY_SIZE + saltlen)
575 ctx->cdata.key_virt = key;
576 ctx->cdata.keylen = keylen - saltlen;
578 return chachapoly_set_sh_desc(aead);
581 static int aead_setkey(struct crypto_aead *aead,
582 const u8 *key, unsigned int keylen)
584 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
585 struct device *jrdev = ctx->jrdev;
586 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
587 struct crypto_authenc_keys keys;
590 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
593 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
594 keys.authkeylen + keys.enckeylen, keys.enckeylen,
596 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
597 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
600 * If DKP is supported, use it in the shared descriptor to generate
603 if (ctrlpriv->era >= 6) {
604 ctx->adata.keylen = keys.authkeylen;
605 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
608 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
611 memcpy(ctx->key, keys.authkey, keys.authkeylen);
612 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
614 dma_sync_single_for_device(jrdev, ctx->key_dma,
615 ctx->adata.keylen_pad +
616 keys.enckeylen, ctx->dir);
620 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
621 keys.authkeylen, CAAM_MAX_KEY_SIZE -
627 /* postpend encryption key to auth split key */
628 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
629 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
630 keys.enckeylen, ctx->dir);
632 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
633 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
634 ctx->adata.keylen_pad + keys.enckeylen, 1);
637 ctx->cdata.keylen = keys.enckeylen;
638 memzero_explicit(&keys, sizeof(keys));
639 return aead_set_sh_desc(aead);
641 memzero_explicit(&keys, sizeof(keys));
645 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
648 struct crypto_authenc_keys keys;
651 err = crypto_authenc_extractkeys(&keys, key, keylen);
655 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
656 aead_setkey(aead, key, keylen);
658 memzero_explicit(&keys, sizeof(keys));
662 static int gcm_setkey(struct crypto_aead *aead,
663 const u8 *key, unsigned int keylen)
665 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
666 struct device *jrdev = ctx->jrdev;
669 err = aes_check_keylen(keylen);
673 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
674 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
676 memcpy(ctx->key, key, keylen);
677 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
678 ctx->cdata.keylen = keylen;
680 return gcm_set_sh_desc(aead);
683 static int rfc4106_setkey(struct crypto_aead *aead,
684 const u8 *key, unsigned int keylen)
686 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
687 struct device *jrdev = ctx->jrdev;
690 err = aes_check_keylen(keylen - 4);
694 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
695 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
697 memcpy(ctx->key, key, keylen);
700 * The last four bytes of the key material are used as the salt value
701 * in the nonce. Update the AES key length.
703 ctx->cdata.keylen = keylen - 4;
704 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
706 return rfc4106_set_sh_desc(aead);
709 static int rfc4543_setkey(struct crypto_aead *aead,
710 const u8 *key, unsigned int keylen)
712 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
713 struct device *jrdev = ctx->jrdev;
716 err = aes_check_keylen(keylen - 4);
720 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
721 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
723 memcpy(ctx->key, key, keylen);
726 * The last four bytes of the key material are used as the salt value
727 * in the nonce. Update the AES key length.
729 ctx->cdata.keylen = keylen - 4;
730 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
732 return rfc4543_set_sh_desc(aead);
735 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
736 unsigned int keylen, const u32 ctx1_iv_off)
738 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
739 struct caam_skcipher_alg *alg =
740 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
742 struct device *jrdev = ctx->jrdev;
743 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
745 const bool is_rfc3686 = alg->caam.rfc3686;
747 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
748 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
750 ctx->cdata.keylen = keylen;
751 ctx->cdata.key_virt = key;
752 ctx->cdata.key_inline = true;
754 /* skcipher_encrypt shared descriptor */
755 desc = ctx->sh_desc_enc;
756 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
758 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
759 desc_bytes(desc), ctx->dir);
761 /* skcipher_decrypt shared descriptor */
762 desc = ctx->sh_desc_dec;
763 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
765 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
766 desc_bytes(desc), ctx->dir);
771 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
772 const u8 *key, unsigned int keylen)
776 err = aes_check_keylen(keylen);
780 return skcipher_setkey(skcipher, key, keylen, 0);
783 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
784 const u8 *key, unsigned int keylen)
791 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
792 * | *key = {KEY, NONCE}
794 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
795 keylen -= CTR_RFC3686_NONCE_SIZE;
797 err = aes_check_keylen(keylen);
801 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
804 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
805 const u8 *key, unsigned int keylen)
811 * AES-CTR needs to load IV in CONTEXT1 reg
812 * at an offset of 128bits (16bytes)
813 * CONTEXT1[255:128] = IV
817 err = aes_check_keylen(keylen);
821 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
824 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
825 const u8 *key, unsigned int keylen)
827 return verify_skcipher_des_key(skcipher, key) ?:
828 skcipher_setkey(skcipher, key, keylen, 0);
831 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
832 const u8 *key, unsigned int keylen)
834 return verify_skcipher_des3_key(skcipher, key) ?:
835 skcipher_setkey(skcipher, key, keylen, 0);
838 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
841 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
842 struct device *jrdev = ctx->jrdev;
843 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
847 err = xts_verify_key(skcipher, key, keylen);
849 dev_dbg(jrdev, "key size mismatch\n");
853 if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
854 ctx->xts_key_fallback = true;
856 if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
857 err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
862 ctx->cdata.keylen = keylen;
863 ctx->cdata.key_virt = key;
864 ctx->cdata.key_inline = true;
866 /* xts_skcipher_encrypt shared descriptor */
867 desc = ctx->sh_desc_enc;
868 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
869 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
870 desc_bytes(desc), ctx->dir);
872 /* xts_skcipher_decrypt shared descriptor */
873 desc = ctx->sh_desc_dec;
874 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
875 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
876 desc_bytes(desc), ctx->dir);
882 * aead_edesc - s/w-extended aead descriptor
883 * @src_nents: number of segments in input s/w scatterlist
884 * @dst_nents: number of segments in output s/w scatterlist
885 * @mapped_src_nents: number of segments in input h/w link table
886 * @mapped_dst_nents: number of segments in output h/w link table
887 * @sec4_sg_bytes: length of dma mapped sec4_sg space
888 * @bklog: stored to determine if the request needs backlog
889 * @sec4_sg_dma: bus physical mapped address of h/w link table
890 * @sec4_sg: pointer to h/w link table
891 * @hw_desc: the h/w job descriptor followed by any referenced link tables
896 int mapped_src_nents;
897 int mapped_dst_nents;
900 dma_addr_t sec4_sg_dma;
901 struct sec4_sg_entry *sec4_sg;
906 * skcipher_edesc - s/w-extended skcipher descriptor
907 * @src_nents: number of segments in input s/w scatterlist
908 * @dst_nents: number of segments in output s/w scatterlist
909 * @mapped_src_nents: number of segments in input h/w link table
910 * @mapped_dst_nents: number of segments in output h/w link table
911 * @iv_dma: dma address of iv for checking continuity and link table
912 * @sec4_sg_bytes: length of dma mapped sec4_sg space
913 * @bklog: stored to determine if the request needs backlog
914 * @sec4_sg_dma: bus physical mapped address of h/w link table
915 * @sec4_sg: pointer to h/w link table
916 * @hw_desc: the h/w job descriptor followed by any referenced link tables
919 struct skcipher_edesc {
922 int mapped_src_nents;
923 int mapped_dst_nents;
927 dma_addr_t sec4_sg_dma;
928 struct sec4_sg_entry *sec4_sg;
932 static void caam_unmap(struct device *dev, struct scatterlist *src,
933 struct scatterlist *dst, int src_nents,
935 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
940 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
942 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
944 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
948 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
950 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
954 static void aead_unmap(struct device *dev,
955 struct aead_edesc *edesc,
956 struct aead_request *req)
958 caam_unmap(dev, req->src, req->dst,
959 edesc->src_nents, edesc->dst_nents, 0, 0,
960 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
963 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
964 struct skcipher_request *req)
966 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
967 int ivsize = crypto_skcipher_ivsize(skcipher);
969 caam_unmap(dev, req->src, req->dst,
970 edesc->src_nents, edesc->dst_nents,
971 edesc->iv_dma, ivsize,
972 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
975 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
978 struct aead_request *req = context;
979 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
980 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
981 struct aead_edesc *edesc;
985 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
988 has_bklog = edesc->bklog;
991 ecode = caam_jr_strstatus(jrdev, err);
993 aead_unmap(jrdev, edesc, req);
998 * If no backlog flag, the completion of the request is done
999 * by CAAM, not crypto engine.
1002 aead_request_complete(req, ecode);
1004 crypto_finalize_aead_request(jrp->engine, req, ecode);
1007 static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc)
1010 return PTR_ALIGN((u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1011 dma_get_cache_alignment());
1014 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
1017 struct skcipher_request *req = context;
1018 struct skcipher_edesc *edesc;
1019 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1020 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1021 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
1022 int ivsize = crypto_skcipher_ivsize(skcipher);
1026 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1028 edesc = rctx->edesc;
1029 has_bklog = edesc->bklog;
1031 ecode = caam_jr_strstatus(jrdev, err);
1033 skcipher_unmap(jrdev, edesc, req);
1036 * The crypto API expects us to set the IV (req->iv) to the last
1037 * ciphertext block (CBC mode) or last counter (CTR mode).
1038 * This is used e.g. by the CTS mode.
1040 if (ivsize && !ecode) {
1041 memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize);
1043 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
1044 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1048 caam_dump_sg("dst @" __stringify(__LINE__)": ",
1049 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1050 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1055 * If no backlog flag, the completion of the request is done
1056 * by CAAM, not crypto engine.
1059 skcipher_request_complete(req, ecode);
1061 crypto_finalize_skcipher_request(jrp->engine, req, ecode);
1065 * Fill in aead job descriptor
1067 static void init_aead_job(struct aead_request *req,
1068 struct aead_edesc *edesc,
1069 bool all_contig, bool encrypt)
1071 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1072 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1073 int authsize = ctx->authsize;
1074 u32 *desc = edesc->hw_desc;
1075 u32 out_options, in_options;
1076 dma_addr_t dst_dma, src_dma;
1077 int len, sec4_sg_index = 0;
1081 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1082 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1084 len = desc_len(sh_desc);
1085 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1088 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1092 src_dma = edesc->sec4_sg_dma;
1093 sec4_sg_index += edesc->mapped_src_nents;
1094 in_options = LDST_SGF;
1097 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1101 out_options = in_options;
1103 if (unlikely(req->src != req->dst)) {
1104 if (!edesc->mapped_dst_nents) {
1107 } else if (edesc->mapped_dst_nents == 1) {
1108 dst_dma = sg_dma_address(req->dst);
1111 dst_dma = edesc->sec4_sg_dma +
1113 sizeof(struct sec4_sg_entry);
1114 out_options = LDST_SGF;
1119 append_seq_out_ptr(desc, dst_dma,
1120 req->assoclen + req->cryptlen + authsize,
1123 append_seq_out_ptr(desc, dst_dma,
1124 req->assoclen + req->cryptlen - authsize,
1128 static void init_gcm_job(struct aead_request *req,
1129 struct aead_edesc *edesc,
1130 bool all_contig, bool encrypt)
1132 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1133 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1134 unsigned int ivsize = crypto_aead_ivsize(aead);
1135 u32 *desc = edesc->hw_desc;
1136 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1139 init_aead_job(req, edesc, all_contig, encrypt);
1140 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1142 /* BUG This should not be specific to generic GCM. */
1144 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1145 last = FIFOLD_TYPE_LAST1;
1148 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1149 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1152 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1154 append_data(desc, req->iv, ivsize);
1155 /* End of blank commands */
1158 static void init_chachapoly_job(struct aead_request *req,
1159 struct aead_edesc *edesc, bool all_contig,
1162 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1163 unsigned int ivsize = crypto_aead_ivsize(aead);
1164 unsigned int assoclen = req->assoclen;
1165 u32 *desc = edesc->hw_desc;
1168 init_aead_job(req, edesc, all_contig, encrypt);
1170 if (ivsize != CHACHAPOLY_IV_SIZE) {
1171 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1175 * The associated data comes already with the IV but we need
1176 * to skip it when we authenticate or encrypt...
1181 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1184 * For IPsec load the IV further in the same register.
1185 * For RFC7539 simply load the 12 bytes nonce in a single operation
1187 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1188 LDST_SRCDST_BYTE_CONTEXT |
1189 ctx_iv_off << LDST_OFFSET_SHIFT);
1192 static void init_authenc_job(struct aead_request *req,
1193 struct aead_edesc *edesc,
1194 bool all_contig, bool encrypt)
1196 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1197 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1198 struct caam_aead_alg, aead);
1199 unsigned int ivsize = crypto_aead_ivsize(aead);
1200 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1201 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1202 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1203 OP_ALG_AAI_CTR_MOD128);
1204 const bool is_rfc3686 = alg->caam.rfc3686;
1205 u32 *desc = edesc->hw_desc;
1209 * AES-CTR needs to load IV in CONTEXT1 reg
1210 * at an offset of 128bits (16bytes)
1211 * CONTEXT1[255:128] = IV
1218 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1221 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1223 init_aead_job(req, edesc, all_contig, encrypt);
1226 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1227 * having DPOVRD as destination.
1229 if (ctrlpriv->era < 3)
1230 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1232 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1234 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1235 append_load_as_imm(desc, req->iv, ivsize,
1237 LDST_SRCDST_BYTE_CONTEXT |
1238 (ivoffset << LDST_OFFSET_SHIFT));
1242 * Fill in skcipher job descriptor
1244 static void init_skcipher_job(struct skcipher_request *req,
1245 struct skcipher_edesc *edesc,
1248 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1249 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
1250 struct device *jrdev = ctx->jrdev;
1251 int ivsize = crypto_skcipher_ivsize(skcipher);
1252 u32 *desc = edesc->hw_desc;
1254 u32 in_options = 0, out_options = 0;
1255 dma_addr_t src_dma, dst_dma, ptr;
1256 int len, sec4_sg_index = 0;
1258 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1259 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1260 dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
1261 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1263 caam_dump_sg("src @" __stringify(__LINE__)": ",
1264 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1265 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1267 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1268 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1270 len = desc_len(sh_desc);
1271 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1273 if (ivsize || edesc->mapped_src_nents > 1) {
1274 src_dma = edesc->sec4_sg_dma;
1275 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1276 in_options = LDST_SGF;
1278 src_dma = sg_dma_address(req->src);
1281 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1283 if (likely(req->src == req->dst)) {
1284 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1285 out_options = in_options;
1286 } else if (!ivsize && edesc->mapped_dst_nents == 1) {
1287 dst_dma = sg_dma_address(req->dst);
1289 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1290 sizeof(struct sec4_sg_entry);
1291 out_options = LDST_SGF;
1294 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
1298 * allocate and map the aead extended descriptor
1300 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1301 int desc_bytes, bool *all_contig_ptr,
1304 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1305 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1306 struct device *jrdev = ctx->jrdev;
1307 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1308 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1309 GFP_KERNEL : GFP_ATOMIC;
1310 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1311 int src_len, dst_len = 0;
1312 struct aead_edesc *edesc;
1313 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1314 unsigned int authsize = ctx->authsize;
1316 if (unlikely(req->dst != req->src)) {
1317 src_len = req->assoclen + req->cryptlen;
1318 dst_len = src_len + (encrypt ? authsize : (-authsize));
1320 src_nents = sg_nents_for_len(req->src, src_len);
1321 if (unlikely(src_nents < 0)) {
1322 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1324 return ERR_PTR(src_nents);
1327 dst_nents = sg_nents_for_len(req->dst, dst_len);
1328 if (unlikely(dst_nents < 0)) {
1329 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1331 return ERR_PTR(dst_nents);
1334 src_len = req->assoclen + req->cryptlen +
1335 (encrypt ? authsize : 0);
1337 src_nents = sg_nents_for_len(req->src, src_len);
1338 if (unlikely(src_nents < 0)) {
1339 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1341 return ERR_PTR(src_nents);
1345 if (likely(req->src == req->dst)) {
1346 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1348 if (unlikely(!mapped_src_nents)) {
1349 dev_err(jrdev, "unable to map source\n");
1350 return ERR_PTR(-ENOMEM);
1353 /* Cover also the case of null (zero length) input data */
1355 mapped_src_nents = dma_map_sg(jrdev, req->src,
1356 src_nents, DMA_TO_DEVICE);
1357 if (unlikely(!mapped_src_nents)) {
1358 dev_err(jrdev, "unable to map source\n");
1359 return ERR_PTR(-ENOMEM);
1362 mapped_src_nents = 0;
1365 /* Cover also the case of null (zero length) output data */
1367 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1370 if (unlikely(!mapped_dst_nents)) {
1371 dev_err(jrdev, "unable to map destination\n");
1372 dma_unmap_sg(jrdev, req->src, src_nents,
1374 return ERR_PTR(-ENOMEM);
1377 mapped_dst_nents = 0;
1382 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1383 * the end of the table by allocating more S/G entries.
1385 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1386 if (mapped_dst_nents > 1)
1387 sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1389 sec4_sg_len = pad_sg_nents(sec4_sg_len);
1391 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1393 /* allocate space for base edesc and hw desc commands, link tables */
1394 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags);
1396 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1398 return ERR_PTR(-ENOMEM);
1401 edesc->src_nents = src_nents;
1402 edesc->dst_nents = dst_nents;
1403 edesc->mapped_src_nents = mapped_src_nents;
1404 edesc->mapped_dst_nents = mapped_dst_nents;
1405 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1408 rctx->edesc = edesc;
1410 *all_contig_ptr = !(mapped_src_nents > 1);
1413 if (mapped_src_nents > 1) {
1414 sg_to_sec4_sg_last(req->src, src_len,
1415 edesc->sec4_sg + sec4_sg_index, 0);
1416 sec4_sg_index += mapped_src_nents;
1418 if (mapped_dst_nents > 1) {
1419 sg_to_sec4_sg_last(req->dst, dst_len,
1420 edesc->sec4_sg + sec4_sg_index, 0);
1426 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1427 sec4_sg_bytes, DMA_TO_DEVICE);
1428 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1429 dev_err(jrdev, "unable to map S/G table\n");
1430 aead_unmap(jrdev, edesc, req);
1432 return ERR_PTR(-ENOMEM);
1435 edesc->sec4_sg_bytes = sec4_sg_bytes;
1440 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
1442 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1443 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1444 struct aead_edesc *edesc = rctx->edesc;
1445 u32 *desc = edesc->hw_desc;
1449 * Only the backlog request are sent to crypto-engine since the others
1450 * can be handled by CAAM, if free, especially since JR has up to 1024
1451 * entries (more than the 10 entries from crypto-engine).
1453 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1454 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
1457 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
1459 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1460 aead_unmap(jrdev, edesc, req);
1467 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
1469 struct aead_edesc *edesc;
1470 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1471 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1472 struct device *jrdev = ctx->jrdev;
1476 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1479 return PTR_ERR(edesc);
1481 desc = edesc->hw_desc;
1483 init_chachapoly_job(req, edesc, all_contig, encrypt);
1484 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1485 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1488 return aead_enqueue_req(jrdev, req);
1491 static int chachapoly_encrypt(struct aead_request *req)
1493 return chachapoly_crypt(req, true);
1496 static int chachapoly_decrypt(struct aead_request *req)
1498 return chachapoly_crypt(req, false);
1501 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1503 struct aead_edesc *edesc;
1504 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1505 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1506 struct device *jrdev = ctx->jrdev;
1509 /* allocate extended descriptor */
1510 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1511 &all_contig, encrypt);
1513 return PTR_ERR(edesc);
1515 /* Create and submit job descriptor */
1516 init_authenc_job(req, edesc, all_contig, encrypt);
1518 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1519 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1520 desc_bytes(edesc->hw_desc), 1);
1522 return aead_enqueue_req(jrdev, req);
1525 static int aead_encrypt(struct aead_request *req)
1527 return aead_crypt(req, true);
1530 static int aead_decrypt(struct aead_request *req)
1532 return aead_crypt(req, false);
1535 static int aead_do_one_req(struct crypto_engine *engine, void *areq)
1537 struct aead_request *req = aead_request_cast(areq);
1538 struct caam_ctx *ctx = crypto_aead_ctx_dma(crypto_aead_reqtfm(req));
1539 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1540 u32 *desc = rctx->edesc->hw_desc;
1543 rctx->edesc->bklog = true;
1545 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
1547 if (ret == -ENOSPC && engine->retry_support)
1550 if (ret != -EINPROGRESS) {
1551 aead_unmap(ctx->jrdev, rctx->edesc, req);
1560 static inline int gcm_crypt(struct aead_request *req, bool encrypt)
1562 struct aead_edesc *edesc;
1563 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1564 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1565 struct device *jrdev = ctx->jrdev;
1568 /* allocate extended descriptor */
1569 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
1572 return PTR_ERR(edesc);
1574 /* Create and submit job descriptor */
1575 init_gcm_job(req, edesc, all_contig, encrypt);
1577 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1578 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1579 desc_bytes(edesc->hw_desc), 1);
1581 return aead_enqueue_req(jrdev, req);
1584 static int gcm_encrypt(struct aead_request *req)
1586 return gcm_crypt(req, true);
1589 static int gcm_decrypt(struct aead_request *req)
1591 return gcm_crypt(req, false);
1594 static int ipsec_gcm_encrypt(struct aead_request *req)
1596 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
1599 static int ipsec_gcm_decrypt(struct aead_request *req)
1601 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
1605 * allocate and map the skcipher extended descriptor for skcipher
1607 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1610 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1611 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
1612 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1613 struct device *jrdev = ctx->jrdev;
1614 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1615 GFP_KERNEL : GFP_ATOMIC;
1616 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1617 struct skcipher_edesc *edesc;
1618 dma_addr_t iv_dma = 0;
1620 int ivsize = crypto_skcipher_ivsize(skcipher);
1621 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1622 unsigned int aligned_size;
1624 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1625 if (unlikely(src_nents < 0)) {
1626 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1628 return ERR_PTR(src_nents);
1631 if (req->dst != req->src) {
1632 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1633 if (unlikely(dst_nents < 0)) {
1634 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1636 return ERR_PTR(dst_nents);
1640 if (likely(req->src == req->dst)) {
1641 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1643 if (unlikely(!mapped_src_nents)) {
1644 dev_err(jrdev, "unable to map source\n");
1645 return ERR_PTR(-ENOMEM);
1648 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1650 if (unlikely(!mapped_src_nents)) {
1651 dev_err(jrdev, "unable to map source\n");
1652 return ERR_PTR(-ENOMEM);
1654 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1656 if (unlikely(!mapped_dst_nents)) {
1657 dev_err(jrdev, "unable to map destination\n");
1658 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1659 return ERR_PTR(-ENOMEM);
1663 if (!ivsize && mapped_src_nents == 1)
1664 sec4_sg_ents = 0; // no need for an input hw s/g table
1666 sec4_sg_ents = mapped_src_nents + !!ivsize;
1667 dst_sg_idx = sec4_sg_ents;
1670 * Input, output HW S/G tables: [IV, src][dst, IV]
1671 * IV entries point to the same buffer
1672 * If src == dst, S/G entries are reused (S/G tables overlap)
1674 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1675 * the end of the table by allocating more S/G entries. Logic:
1677 * pad output S/G, if needed
1678 * else if (input S/G) ...
1679 * pad input S/G, if needed
1681 if (ivsize || mapped_dst_nents > 1) {
1682 if (req->src == req->dst)
1683 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1685 sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1688 sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
1691 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1694 * allocate space for base edesc and hw desc commands, link tables, IV
1696 aligned_size = sizeof(*edesc) + desc_bytes + sec4_sg_bytes;
1697 aligned_size = ALIGN(aligned_size, dma_get_cache_alignment());
1698 aligned_size += ~(ARCH_KMALLOC_MINALIGN - 1) &
1699 (dma_get_cache_alignment() - 1);
1700 aligned_size += ALIGN(ivsize, dma_get_cache_alignment());
1701 edesc = kzalloc(aligned_size, flags);
1703 dev_err(jrdev, "could not allocate extended descriptor\n");
1704 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1706 return ERR_PTR(-ENOMEM);
1709 edesc->src_nents = src_nents;
1710 edesc->dst_nents = dst_nents;
1711 edesc->mapped_src_nents = mapped_src_nents;
1712 edesc->mapped_dst_nents = mapped_dst_nents;
1713 edesc->sec4_sg_bytes = sec4_sg_bytes;
1714 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1716 rctx->edesc = edesc;
1718 /* Make sure IV is located in a DMAable area */
1720 iv = skcipher_edesc_iv(edesc);
1721 memcpy(iv, req->iv, ivsize);
1723 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
1724 if (dma_mapping_error(jrdev, iv_dma)) {
1725 dev_err(jrdev, "unable to map IV\n");
1726 caam_unmap(jrdev, req->src, req->dst, src_nents,
1727 dst_nents, 0, 0, 0, 0);
1729 return ERR_PTR(-ENOMEM);
1732 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1735 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1738 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1739 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1743 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1744 mapped_dst_nents, iv_dma, ivsize, 0);
1746 if (ivsize || mapped_dst_nents > 1)
1747 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1748 mapped_dst_nents - 1 + !!ivsize);
1750 if (sec4_sg_bytes) {
1751 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1754 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1755 dev_err(jrdev, "unable to map S/G table\n");
1756 caam_unmap(jrdev, req->src, req->dst, src_nents,
1757 dst_nents, iv_dma, ivsize, 0, 0);
1759 return ERR_PTR(-ENOMEM);
1763 edesc->iv_dma = iv_dma;
1765 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1766 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1772 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
1774 struct skcipher_request *req = skcipher_request_cast(areq);
1775 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req));
1776 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1777 u32 *desc = rctx->edesc->hw_desc;
1780 rctx->edesc->bklog = true;
1782 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
1784 if (ret == -ENOSPC && engine->retry_support)
1787 if (ret != -EINPROGRESS) {
1788 skcipher_unmap(ctx->jrdev, rctx->edesc, req);
1797 static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
1799 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1800 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
1802 return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
1805 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1807 struct skcipher_edesc *edesc;
1808 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1809 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
1810 struct device *jrdev = ctx->jrdev;
1811 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1812 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
1817 * XTS is expected to return an error even for input length = 0
1818 * Note that the case input length < block size will be caught during
1819 * HW offloading and return an error.
1821 if (!req->cryptlen && !ctx->fallback)
1824 if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
1825 ctx->xts_key_fallback)) {
1826 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1828 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
1829 skcipher_request_set_callback(&rctx->fallback_req,
1833 skcipher_request_set_crypt(&rctx->fallback_req, req->src,
1834 req->dst, req->cryptlen, req->iv);
1836 return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
1837 crypto_skcipher_decrypt(&rctx->fallback_req);
1840 /* allocate extended descriptor */
1841 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1843 return PTR_ERR(edesc);
1845 /* Create and submit job descriptor*/
1846 init_skcipher_job(req, edesc, encrypt);
1848 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1849 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1850 desc_bytes(edesc->hw_desc), 1);
1852 desc = edesc->hw_desc;
1854 * Only the backlog request are sent to crypto-engine since the others
1855 * can be handled by CAAM, if free, especially since JR has up to 1024
1856 * entries (more than the 10 entries from crypto-engine).
1858 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1859 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
1862 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
1864 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1865 skcipher_unmap(jrdev, edesc, req);
1872 static int skcipher_encrypt(struct skcipher_request *req)
1874 return skcipher_crypt(req, true);
1877 static int skcipher_decrypt(struct skcipher_request *req)
1879 return skcipher_crypt(req, false);
1882 static struct caam_skcipher_alg driver_algs[] = {
1886 .cra_name = "cbc(aes)",
1887 .cra_driver_name = "cbc-aes-caam",
1888 .cra_blocksize = AES_BLOCK_SIZE,
1890 .setkey = aes_skcipher_setkey,
1891 .encrypt = skcipher_encrypt,
1892 .decrypt = skcipher_decrypt,
1893 .min_keysize = AES_MIN_KEY_SIZE,
1894 .max_keysize = AES_MAX_KEY_SIZE,
1895 .ivsize = AES_BLOCK_SIZE,
1897 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1902 .cra_name = "cbc(des3_ede)",
1903 .cra_driver_name = "cbc-3des-caam",
1904 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1906 .setkey = des3_skcipher_setkey,
1907 .encrypt = skcipher_encrypt,
1908 .decrypt = skcipher_decrypt,
1909 .min_keysize = DES3_EDE_KEY_SIZE,
1910 .max_keysize = DES3_EDE_KEY_SIZE,
1911 .ivsize = DES3_EDE_BLOCK_SIZE,
1913 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1918 .cra_name = "cbc(des)",
1919 .cra_driver_name = "cbc-des-caam",
1920 .cra_blocksize = DES_BLOCK_SIZE,
1922 .setkey = des_skcipher_setkey,
1923 .encrypt = skcipher_encrypt,
1924 .decrypt = skcipher_decrypt,
1925 .min_keysize = DES_KEY_SIZE,
1926 .max_keysize = DES_KEY_SIZE,
1927 .ivsize = DES_BLOCK_SIZE,
1929 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1934 .cra_name = "ctr(aes)",
1935 .cra_driver_name = "ctr-aes-caam",
1938 .setkey = ctr_skcipher_setkey,
1939 .encrypt = skcipher_encrypt,
1940 .decrypt = skcipher_decrypt,
1941 .min_keysize = AES_MIN_KEY_SIZE,
1942 .max_keysize = AES_MAX_KEY_SIZE,
1943 .ivsize = AES_BLOCK_SIZE,
1944 .chunksize = AES_BLOCK_SIZE,
1946 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1947 OP_ALG_AAI_CTR_MOD128,
1952 .cra_name = "rfc3686(ctr(aes))",
1953 .cra_driver_name = "rfc3686-ctr-aes-caam",
1956 .setkey = rfc3686_skcipher_setkey,
1957 .encrypt = skcipher_encrypt,
1958 .decrypt = skcipher_decrypt,
1959 .min_keysize = AES_MIN_KEY_SIZE +
1960 CTR_RFC3686_NONCE_SIZE,
1961 .max_keysize = AES_MAX_KEY_SIZE +
1962 CTR_RFC3686_NONCE_SIZE,
1963 .ivsize = CTR_RFC3686_IV_SIZE,
1964 .chunksize = AES_BLOCK_SIZE,
1967 .class1_alg_type = OP_ALG_ALGSEL_AES |
1968 OP_ALG_AAI_CTR_MOD128,
1975 .cra_name = "xts(aes)",
1976 .cra_driver_name = "xts-aes-caam",
1977 .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
1978 .cra_blocksize = AES_BLOCK_SIZE,
1980 .setkey = xts_skcipher_setkey,
1981 .encrypt = skcipher_encrypt,
1982 .decrypt = skcipher_decrypt,
1983 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1984 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1985 .ivsize = AES_BLOCK_SIZE,
1987 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1992 .cra_name = "ecb(des)",
1993 .cra_driver_name = "ecb-des-caam",
1994 .cra_blocksize = DES_BLOCK_SIZE,
1996 .setkey = des_skcipher_setkey,
1997 .encrypt = skcipher_encrypt,
1998 .decrypt = skcipher_decrypt,
1999 .min_keysize = DES_KEY_SIZE,
2000 .max_keysize = DES_KEY_SIZE,
2002 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
2007 .cra_name = "ecb(aes)",
2008 .cra_driver_name = "ecb-aes-caam",
2009 .cra_blocksize = AES_BLOCK_SIZE,
2011 .setkey = aes_skcipher_setkey,
2012 .encrypt = skcipher_encrypt,
2013 .decrypt = skcipher_decrypt,
2014 .min_keysize = AES_MIN_KEY_SIZE,
2015 .max_keysize = AES_MAX_KEY_SIZE,
2017 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
2022 .cra_name = "ecb(des3_ede)",
2023 .cra_driver_name = "ecb-des3-caam",
2024 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2026 .setkey = des3_skcipher_setkey,
2027 .encrypt = skcipher_encrypt,
2028 .decrypt = skcipher_decrypt,
2029 .min_keysize = DES3_EDE_KEY_SIZE,
2030 .max_keysize = DES3_EDE_KEY_SIZE,
2032 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2036 static struct caam_aead_alg driver_aeads[] = {
2040 .cra_name = "rfc4106(gcm(aes))",
2041 .cra_driver_name = "rfc4106-gcm-aes-caam",
2044 .setkey = rfc4106_setkey,
2045 .setauthsize = rfc4106_setauthsize,
2046 .encrypt = ipsec_gcm_encrypt,
2047 .decrypt = ipsec_gcm_decrypt,
2048 .ivsize = GCM_RFC4106_IV_SIZE,
2049 .maxauthsize = AES_BLOCK_SIZE,
2052 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2059 .cra_name = "rfc4543(gcm(aes))",
2060 .cra_driver_name = "rfc4543-gcm-aes-caam",
2063 .setkey = rfc4543_setkey,
2064 .setauthsize = rfc4543_setauthsize,
2065 .encrypt = ipsec_gcm_encrypt,
2066 .decrypt = ipsec_gcm_decrypt,
2067 .ivsize = GCM_RFC4543_IV_SIZE,
2068 .maxauthsize = AES_BLOCK_SIZE,
2071 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2075 /* Galois Counter Mode */
2079 .cra_name = "gcm(aes)",
2080 .cra_driver_name = "gcm-aes-caam",
2083 .setkey = gcm_setkey,
2084 .setauthsize = gcm_setauthsize,
2085 .encrypt = gcm_encrypt,
2086 .decrypt = gcm_decrypt,
2087 .ivsize = GCM_AES_IV_SIZE,
2088 .maxauthsize = AES_BLOCK_SIZE,
2091 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2095 /* single-pass ipsec_esp descriptor */
2099 .cra_name = "authenc(hmac(md5),"
2100 "ecb(cipher_null))",
2101 .cra_driver_name = "authenc-hmac-md5-"
2102 "ecb-cipher_null-caam",
2103 .cra_blocksize = NULL_BLOCK_SIZE,
2105 .setkey = aead_setkey,
2106 .setauthsize = aead_setauthsize,
2107 .encrypt = aead_encrypt,
2108 .decrypt = aead_decrypt,
2109 .ivsize = NULL_IV_SIZE,
2110 .maxauthsize = MD5_DIGEST_SIZE,
2113 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2114 OP_ALG_AAI_HMAC_PRECOMP,
2120 .cra_name = "authenc(hmac(sha1),"
2121 "ecb(cipher_null))",
2122 .cra_driver_name = "authenc-hmac-sha1-"
2123 "ecb-cipher_null-caam",
2124 .cra_blocksize = NULL_BLOCK_SIZE,
2126 .setkey = aead_setkey,
2127 .setauthsize = aead_setauthsize,
2128 .encrypt = aead_encrypt,
2129 .decrypt = aead_decrypt,
2130 .ivsize = NULL_IV_SIZE,
2131 .maxauthsize = SHA1_DIGEST_SIZE,
2134 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2135 OP_ALG_AAI_HMAC_PRECOMP,
2141 .cra_name = "authenc(hmac(sha224),"
2142 "ecb(cipher_null))",
2143 .cra_driver_name = "authenc-hmac-sha224-"
2144 "ecb-cipher_null-caam",
2145 .cra_blocksize = NULL_BLOCK_SIZE,
2147 .setkey = aead_setkey,
2148 .setauthsize = aead_setauthsize,
2149 .encrypt = aead_encrypt,
2150 .decrypt = aead_decrypt,
2151 .ivsize = NULL_IV_SIZE,
2152 .maxauthsize = SHA224_DIGEST_SIZE,
2155 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2156 OP_ALG_AAI_HMAC_PRECOMP,
2162 .cra_name = "authenc(hmac(sha256),"
2163 "ecb(cipher_null))",
2164 .cra_driver_name = "authenc-hmac-sha256-"
2165 "ecb-cipher_null-caam",
2166 .cra_blocksize = NULL_BLOCK_SIZE,
2168 .setkey = aead_setkey,
2169 .setauthsize = aead_setauthsize,
2170 .encrypt = aead_encrypt,
2171 .decrypt = aead_decrypt,
2172 .ivsize = NULL_IV_SIZE,
2173 .maxauthsize = SHA256_DIGEST_SIZE,
2176 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2177 OP_ALG_AAI_HMAC_PRECOMP,
2183 .cra_name = "authenc(hmac(sha384),"
2184 "ecb(cipher_null))",
2185 .cra_driver_name = "authenc-hmac-sha384-"
2186 "ecb-cipher_null-caam",
2187 .cra_blocksize = NULL_BLOCK_SIZE,
2189 .setkey = aead_setkey,
2190 .setauthsize = aead_setauthsize,
2191 .encrypt = aead_encrypt,
2192 .decrypt = aead_decrypt,
2193 .ivsize = NULL_IV_SIZE,
2194 .maxauthsize = SHA384_DIGEST_SIZE,
2197 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2198 OP_ALG_AAI_HMAC_PRECOMP,
2204 .cra_name = "authenc(hmac(sha512),"
2205 "ecb(cipher_null))",
2206 .cra_driver_name = "authenc-hmac-sha512-"
2207 "ecb-cipher_null-caam",
2208 .cra_blocksize = NULL_BLOCK_SIZE,
2210 .setkey = aead_setkey,
2211 .setauthsize = aead_setauthsize,
2212 .encrypt = aead_encrypt,
2213 .decrypt = aead_decrypt,
2214 .ivsize = NULL_IV_SIZE,
2215 .maxauthsize = SHA512_DIGEST_SIZE,
2218 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2219 OP_ALG_AAI_HMAC_PRECOMP,
2225 .cra_name = "authenc(hmac(md5),cbc(aes))",
2226 .cra_driver_name = "authenc-hmac-md5-"
2228 .cra_blocksize = AES_BLOCK_SIZE,
2230 .setkey = aead_setkey,
2231 .setauthsize = aead_setauthsize,
2232 .encrypt = aead_encrypt,
2233 .decrypt = aead_decrypt,
2234 .ivsize = AES_BLOCK_SIZE,
2235 .maxauthsize = MD5_DIGEST_SIZE,
2238 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2239 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2240 OP_ALG_AAI_HMAC_PRECOMP,
2246 .cra_name = "echainiv(authenc(hmac(md5),"
2248 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2250 .cra_blocksize = AES_BLOCK_SIZE,
2252 .setkey = aead_setkey,
2253 .setauthsize = aead_setauthsize,
2254 .encrypt = aead_encrypt,
2255 .decrypt = aead_decrypt,
2256 .ivsize = AES_BLOCK_SIZE,
2257 .maxauthsize = MD5_DIGEST_SIZE,
2260 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2261 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2262 OP_ALG_AAI_HMAC_PRECOMP,
2269 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2270 .cra_driver_name = "authenc-hmac-sha1-"
2272 .cra_blocksize = AES_BLOCK_SIZE,
2274 .setkey = aead_setkey,
2275 .setauthsize = aead_setauthsize,
2276 .encrypt = aead_encrypt,
2277 .decrypt = aead_decrypt,
2278 .ivsize = AES_BLOCK_SIZE,
2279 .maxauthsize = SHA1_DIGEST_SIZE,
2282 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2283 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2284 OP_ALG_AAI_HMAC_PRECOMP,
2290 .cra_name = "echainiv(authenc(hmac(sha1),"
2292 .cra_driver_name = "echainiv-authenc-"
2293 "hmac-sha1-cbc-aes-caam",
2294 .cra_blocksize = AES_BLOCK_SIZE,
2296 .setkey = aead_setkey,
2297 .setauthsize = aead_setauthsize,
2298 .encrypt = aead_encrypt,
2299 .decrypt = aead_decrypt,
2300 .ivsize = AES_BLOCK_SIZE,
2301 .maxauthsize = SHA1_DIGEST_SIZE,
2304 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2305 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2306 OP_ALG_AAI_HMAC_PRECOMP,
2313 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2314 .cra_driver_name = "authenc-hmac-sha224-"
2316 .cra_blocksize = AES_BLOCK_SIZE,
2318 .setkey = aead_setkey,
2319 .setauthsize = aead_setauthsize,
2320 .encrypt = aead_encrypt,
2321 .decrypt = aead_decrypt,
2322 .ivsize = AES_BLOCK_SIZE,
2323 .maxauthsize = SHA224_DIGEST_SIZE,
2326 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2327 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2328 OP_ALG_AAI_HMAC_PRECOMP,
2334 .cra_name = "echainiv(authenc(hmac(sha224),"
2336 .cra_driver_name = "echainiv-authenc-"
2337 "hmac-sha224-cbc-aes-caam",
2338 .cra_blocksize = AES_BLOCK_SIZE,
2340 .setkey = aead_setkey,
2341 .setauthsize = aead_setauthsize,
2342 .encrypt = aead_encrypt,
2343 .decrypt = aead_decrypt,
2344 .ivsize = AES_BLOCK_SIZE,
2345 .maxauthsize = SHA224_DIGEST_SIZE,
2348 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2349 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2350 OP_ALG_AAI_HMAC_PRECOMP,
2357 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2358 .cra_driver_name = "authenc-hmac-sha256-"
2360 .cra_blocksize = AES_BLOCK_SIZE,
2362 .setkey = aead_setkey,
2363 .setauthsize = aead_setauthsize,
2364 .encrypt = aead_encrypt,
2365 .decrypt = aead_decrypt,
2366 .ivsize = AES_BLOCK_SIZE,
2367 .maxauthsize = SHA256_DIGEST_SIZE,
2370 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2371 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2372 OP_ALG_AAI_HMAC_PRECOMP,
2378 .cra_name = "echainiv(authenc(hmac(sha256),"
2380 .cra_driver_name = "echainiv-authenc-"
2381 "hmac-sha256-cbc-aes-caam",
2382 .cra_blocksize = AES_BLOCK_SIZE,
2384 .setkey = aead_setkey,
2385 .setauthsize = aead_setauthsize,
2386 .encrypt = aead_encrypt,
2387 .decrypt = aead_decrypt,
2388 .ivsize = AES_BLOCK_SIZE,
2389 .maxauthsize = SHA256_DIGEST_SIZE,
2392 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2393 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2394 OP_ALG_AAI_HMAC_PRECOMP,
2401 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2402 .cra_driver_name = "authenc-hmac-sha384-"
2404 .cra_blocksize = AES_BLOCK_SIZE,
2406 .setkey = aead_setkey,
2407 .setauthsize = aead_setauthsize,
2408 .encrypt = aead_encrypt,
2409 .decrypt = aead_decrypt,
2410 .ivsize = AES_BLOCK_SIZE,
2411 .maxauthsize = SHA384_DIGEST_SIZE,
2414 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2415 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2416 OP_ALG_AAI_HMAC_PRECOMP,
2422 .cra_name = "echainiv(authenc(hmac(sha384),"
2424 .cra_driver_name = "echainiv-authenc-"
2425 "hmac-sha384-cbc-aes-caam",
2426 .cra_blocksize = AES_BLOCK_SIZE,
2428 .setkey = aead_setkey,
2429 .setauthsize = aead_setauthsize,
2430 .encrypt = aead_encrypt,
2431 .decrypt = aead_decrypt,
2432 .ivsize = AES_BLOCK_SIZE,
2433 .maxauthsize = SHA384_DIGEST_SIZE,
2436 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2437 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2438 OP_ALG_AAI_HMAC_PRECOMP,
2445 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2446 .cra_driver_name = "authenc-hmac-sha512-"
2448 .cra_blocksize = AES_BLOCK_SIZE,
2450 .setkey = aead_setkey,
2451 .setauthsize = aead_setauthsize,
2452 .encrypt = aead_encrypt,
2453 .decrypt = aead_decrypt,
2454 .ivsize = AES_BLOCK_SIZE,
2455 .maxauthsize = SHA512_DIGEST_SIZE,
2458 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2459 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2460 OP_ALG_AAI_HMAC_PRECOMP,
2466 .cra_name = "echainiv(authenc(hmac(sha512),"
2468 .cra_driver_name = "echainiv-authenc-"
2469 "hmac-sha512-cbc-aes-caam",
2470 .cra_blocksize = AES_BLOCK_SIZE,
2472 .setkey = aead_setkey,
2473 .setauthsize = aead_setauthsize,
2474 .encrypt = aead_encrypt,
2475 .decrypt = aead_decrypt,
2476 .ivsize = AES_BLOCK_SIZE,
2477 .maxauthsize = SHA512_DIGEST_SIZE,
2480 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2481 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2482 OP_ALG_AAI_HMAC_PRECOMP,
2489 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2490 .cra_driver_name = "authenc-hmac-md5-"
2491 "cbc-des3_ede-caam",
2492 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2494 .setkey = des3_aead_setkey,
2495 .setauthsize = aead_setauthsize,
2496 .encrypt = aead_encrypt,
2497 .decrypt = aead_decrypt,
2498 .ivsize = DES3_EDE_BLOCK_SIZE,
2499 .maxauthsize = MD5_DIGEST_SIZE,
2502 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2503 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2504 OP_ALG_AAI_HMAC_PRECOMP,
2510 .cra_name = "echainiv(authenc(hmac(md5),"
2512 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2513 "cbc-des3_ede-caam",
2514 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2516 .setkey = des3_aead_setkey,
2517 .setauthsize = aead_setauthsize,
2518 .encrypt = aead_encrypt,
2519 .decrypt = aead_decrypt,
2520 .ivsize = DES3_EDE_BLOCK_SIZE,
2521 .maxauthsize = MD5_DIGEST_SIZE,
2524 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2525 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2526 OP_ALG_AAI_HMAC_PRECOMP,
2533 .cra_name = "authenc(hmac(sha1),"
2535 .cra_driver_name = "authenc-hmac-sha1-"
2536 "cbc-des3_ede-caam",
2537 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2539 .setkey = des3_aead_setkey,
2540 .setauthsize = aead_setauthsize,
2541 .encrypt = aead_encrypt,
2542 .decrypt = aead_decrypt,
2543 .ivsize = DES3_EDE_BLOCK_SIZE,
2544 .maxauthsize = SHA1_DIGEST_SIZE,
2547 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2548 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2549 OP_ALG_AAI_HMAC_PRECOMP,
2555 .cra_name = "echainiv(authenc(hmac(sha1),"
2557 .cra_driver_name = "echainiv-authenc-"
2559 "cbc-des3_ede-caam",
2560 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2562 .setkey = des3_aead_setkey,
2563 .setauthsize = aead_setauthsize,
2564 .encrypt = aead_encrypt,
2565 .decrypt = aead_decrypt,
2566 .ivsize = DES3_EDE_BLOCK_SIZE,
2567 .maxauthsize = SHA1_DIGEST_SIZE,
2570 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2571 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2572 OP_ALG_AAI_HMAC_PRECOMP,
2579 .cra_name = "authenc(hmac(sha224),"
2581 .cra_driver_name = "authenc-hmac-sha224-"
2582 "cbc-des3_ede-caam",
2583 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2585 .setkey = des3_aead_setkey,
2586 .setauthsize = aead_setauthsize,
2587 .encrypt = aead_encrypt,
2588 .decrypt = aead_decrypt,
2589 .ivsize = DES3_EDE_BLOCK_SIZE,
2590 .maxauthsize = SHA224_DIGEST_SIZE,
2593 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2594 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2595 OP_ALG_AAI_HMAC_PRECOMP,
2601 .cra_name = "echainiv(authenc(hmac(sha224),"
2603 .cra_driver_name = "echainiv-authenc-"
2605 "cbc-des3_ede-caam",
2606 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2608 .setkey = des3_aead_setkey,
2609 .setauthsize = aead_setauthsize,
2610 .encrypt = aead_encrypt,
2611 .decrypt = aead_decrypt,
2612 .ivsize = DES3_EDE_BLOCK_SIZE,
2613 .maxauthsize = SHA224_DIGEST_SIZE,
2616 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2617 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2618 OP_ALG_AAI_HMAC_PRECOMP,
2625 .cra_name = "authenc(hmac(sha256),"
2627 .cra_driver_name = "authenc-hmac-sha256-"
2628 "cbc-des3_ede-caam",
2629 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2631 .setkey = des3_aead_setkey,
2632 .setauthsize = aead_setauthsize,
2633 .encrypt = aead_encrypt,
2634 .decrypt = aead_decrypt,
2635 .ivsize = DES3_EDE_BLOCK_SIZE,
2636 .maxauthsize = SHA256_DIGEST_SIZE,
2639 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2640 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2641 OP_ALG_AAI_HMAC_PRECOMP,
2647 .cra_name = "echainiv(authenc(hmac(sha256),"
2649 .cra_driver_name = "echainiv-authenc-"
2651 "cbc-des3_ede-caam",
2652 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2654 .setkey = des3_aead_setkey,
2655 .setauthsize = aead_setauthsize,
2656 .encrypt = aead_encrypt,
2657 .decrypt = aead_decrypt,
2658 .ivsize = DES3_EDE_BLOCK_SIZE,
2659 .maxauthsize = SHA256_DIGEST_SIZE,
2662 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2663 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2664 OP_ALG_AAI_HMAC_PRECOMP,
2671 .cra_name = "authenc(hmac(sha384),"
2673 .cra_driver_name = "authenc-hmac-sha384-"
2674 "cbc-des3_ede-caam",
2675 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2677 .setkey = des3_aead_setkey,
2678 .setauthsize = aead_setauthsize,
2679 .encrypt = aead_encrypt,
2680 .decrypt = aead_decrypt,
2681 .ivsize = DES3_EDE_BLOCK_SIZE,
2682 .maxauthsize = SHA384_DIGEST_SIZE,
2685 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2686 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2687 OP_ALG_AAI_HMAC_PRECOMP,
2693 .cra_name = "echainiv(authenc(hmac(sha384),"
2695 .cra_driver_name = "echainiv-authenc-"
2697 "cbc-des3_ede-caam",
2698 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2700 .setkey = des3_aead_setkey,
2701 .setauthsize = aead_setauthsize,
2702 .encrypt = aead_encrypt,
2703 .decrypt = aead_decrypt,
2704 .ivsize = DES3_EDE_BLOCK_SIZE,
2705 .maxauthsize = SHA384_DIGEST_SIZE,
2708 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2709 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2710 OP_ALG_AAI_HMAC_PRECOMP,
2717 .cra_name = "authenc(hmac(sha512),"
2719 .cra_driver_name = "authenc-hmac-sha512-"
2720 "cbc-des3_ede-caam",
2721 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2723 .setkey = des3_aead_setkey,
2724 .setauthsize = aead_setauthsize,
2725 .encrypt = aead_encrypt,
2726 .decrypt = aead_decrypt,
2727 .ivsize = DES3_EDE_BLOCK_SIZE,
2728 .maxauthsize = SHA512_DIGEST_SIZE,
2731 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2732 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2733 OP_ALG_AAI_HMAC_PRECOMP,
2739 .cra_name = "echainiv(authenc(hmac(sha512),"
2741 .cra_driver_name = "echainiv-authenc-"
2743 "cbc-des3_ede-caam",
2744 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2746 .setkey = des3_aead_setkey,
2747 .setauthsize = aead_setauthsize,
2748 .encrypt = aead_encrypt,
2749 .decrypt = aead_decrypt,
2750 .ivsize = DES3_EDE_BLOCK_SIZE,
2751 .maxauthsize = SHA512_DIGEST_SIZE,
2754 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2755 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2756 OP_ALG_AAI_HMAC_PRECOMP,
2763 .cra_name = "authenc(hmac(md5),cbc(des))",
2764 .cra_driver_name = "authenc-hmac-md5-"
2766 .cra_blocksize = DES_BLOCK_SIZE,
2768 .setkey = aead_setkey,
2769 .setauthsize = aead_setauthsize,
2770 .encrypt = aead_encrypt,
2771 .decrypt = aead_decrypt,
2772 .ivsize = DES_BLOCK_SIZE,
2773 .maxauthsize = MD5_DIGEST_SIZE,
2776 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2777 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2778 OP_ALG_AAI_HMAC_PRECOMP,
2784 .cra_name = "echainiv(authenc(hmac(md5),"
2786 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2788 .cra_blocksize = DES_BLOCK_SIZE,
2790 .setkey = aead_setkey,
2791 .setauthsize = aead_setauthsize,
2792 .encrypt = aead_encrypt,
2793 .decrypt = aead_decrypt,
2794 .ivsize = DES_BLOCK_SIZE,
2795 .maxauthsize = MD5_DIGEST_SIZE,
2798 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2799 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2800 OP_ALG_AAI_HMAC_PRECOMP,
2807 .cra_name = "authenc(hmac(sha1),cbc(des))",
2808 .cra_driver_name = "authenc-hmac-sha1-"
2810 .cra_blocksize = DES_BLOCK_SIZE,
2812 .setkey = aead_setkey,
2813 .setauthsize = aead_setauthsize,
2814 .encrypt = aead_encrypt,
2815 .decrypt = aead_decrypt,
2816 .ivsize = DES_BLOCK_SIZE,
2817 .maxauthsize = SHA1_DIGEST_SIZE,
2820 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2821 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2822 OP_ALG_AAI_HMAC_PRECOMP,
2828 .cra_name = "echainiv(authenc(hmac(sha1),"
2830 .cra_driver_name = "echainiv-authenc-"
2831 "hmac-sha1-cbc-des-caam",
2832 .cra_blocksize = DES_BLOCK_SIZE,
2834 .setkey = aead_setkey,
2835 .setauthsize = aead_setauthsize,
2836 .encrypt = aead_encrypt,
2837 .decrypt = aead_decrypt,
2838 .ivsize = DES_BLOCK_SIZE,
2839 .maxauthsize = SHA1_DIGEST_SIZE,
2842 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2843 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2844 OP_ALG_AAI_HMAC_PRECOMP,
2851 .cra_name = "authenc(hmac(sha224),cbc(des))",
2852 .cra_driver_name = "authenc-hmac-sha224-"
2854 .cra_blocksize = DES_BLOCK_SIZE,
2856 .setkey = aead_setkey,
2857 .setauthsize = aead_setauthsize,
2858 .encrypt = aead_encrypt,
2859 .decrypt = aead_decrypt,
2860 .ivsize = DES_BLOCK_SIZE,
2861 .maxauthsize = SHA224_DIGEST_SIZE,
2864 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2865 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2866 OP_ALG_AAI_HMAC_PRECOMP,
2872 .cra_name = "echainiv(authenc(hmac(sha224),"
2874 .cra_driver_name = "echainiv-authenc-"
2875 "hmac-sha224-cbc-des-caam",
2876 .cra_blocksize = DES_BLOCK_SIZE,
2878 .setkey = aead_setkey,
2879 .setauthsize = aead_setauthsize,
2880 .encrypt = aead_encrypt,
2881 .decrypt = aead_decrypt,
2882 .ivsize = DES_BLOCK_SIZE,
2883 .maxauthsize = SHA224_DIGEST_SIZE,
2886 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2887 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2888 OP_ALG_AAI_HMAC_PRECOMP,
2895 .cra_name = "authenc(hmac(sha256),cbc(des))",
2896 .cra_driver_name = "authenc-hmac-sha256-"
2898 .cra_blocksize = DES_BLOCK_SIZE,
2900 .setkey = aead_setkey,
2901 .setauthsize = aead_setauthsize,
2902 .encrypt = aead_encrypt,
2903 .decrypt = aead_decrypt,
2904 .ivsize = DES_BLOCK_SIZE,
2905 .maxauthsize = SHA256_DIGEST_SIZE,
2908 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2909 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2910 OP_ALG_AAI_HMAC_PRECOMP,
2916 .cra_name = "echainiv(authenc(hmac(sha256),"
2918 .cra_driver_name = "echainiv-authenc-"
2919 "hmac-sha256-cbc-des-caam",
2920 .cra_blocksize = DES_BLOCK_SIZE,
2922 .setkey = aead_setkey,
2923 .setauthsize = aead_setauthsize,
2924 .encrypt = aead_encrypt,
2925 .decrypt = aead_decrypt,
2926 .ivsize = DES_BLOCK_SIZE,
2927 .maxauthsize = SHA256_DIGEST_SIZE,
2930 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2931 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2932 OP_ALG_AAI_HMAC_PRECOMP,
2939 .cra_name = "authenc(hmac(sha384),cbc(des))",
2940 .cra_driver_name = "authenc-hmac-sha384-"
2942 .cra_blocksize = DES_BLOCK_SIZE,
2944 .setkey = aead_setkey,
2945 .setauthsize = aead_setauthsize,
2946 .encrypt = aead_encrypt,
2947 .decrypt = aead_decrypt,
2948 .ivsize = DES_BLOCK_SIZE,
2949 .maxauthsize = SHA384_DIGEST_SIZE,
2952 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2953 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2954 OP_ALG_AAI_HMAC_PRECOMP,
2960 .cra_name = "echainiv(authenc(hmac(sha384),"
2962 .cra_driver_name = "echainiv-authenc-"
2963 "hmac-sha384-cbc-des-caam",
2964 .cra_blocksize = DES_BLOCK_SIZE,
2966 .setkey = aead_setkey,
2967 .setauthsize = aead_setauthsize,
2968 .encrypt = aead_encrypt,
2969 .decrypt = aead_decrypt,
2970 .ivsize = DES_BLOCK_SIZE,
2971 .maxauthsize = SHA384_DIGEST_SIZE,
2974 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2975 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2976 OP_ALG_AAI_HMAC_PRECOMP,
2983 .cra_name = "authenc(hmac(sha512),cbc(des))",
2984 .cra_driver_name = "authenc-hmac-sha512-"
2986 .cra_blocksize = DES_BLOCK_SIZE,
2988 .setkey = aead_setkey,
2989 .setauthsize = aead_setauthsize,
2990 .encrypt = aead_encrypt,
2991 .decrypt = aead_decrypt,
2992 .ivsize = DES_BLOCK_SIZE,
2993 .maxauthsize = SHA512_DIGEST_SIZE,
2996 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2997 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2998 OP_ALG_AAI_HMAC_PRECOMP,
3004 .cra_name = "echainiv(authenc(hmac(sha512),"
3006 .cra_driver_name = "echainiv-authenc-"
3007 "hmac-sha512-cbc-des-caam",
3008 .cra_blocksize = DES_BLOCK_SIZE,
3010 .setkey = aead_setkey,
3011 .setauthsize = aead_setauthsize,
3012 .encrypt = aead_encrypt,
3013 .decrypt = aead_decrypt,
3014 .ivsize = DES_BLOCK_SIZE,
3015 .maxauthsize = SHA512_DIGEST_SIZE,
3018 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3019 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3020 OP_ALG_AAI_HMAC_PRECOMP,
3027 .cra_name = "authenc(hmac(md5),"
3028 "rfc3686(ctr(aes)))",
3029 .cra_driver_name = "authenc-hmac-md5-"
3030 "rfc3686-ctr-aes-caam",
3033 .setkey = aead_setkey,
3034 .setauthsize = aead_setauthsize,
3035 .encrypt = aead_encrypt,
3036 .decrypt = aead_decrypt,
3037 .ivsize = CTR_RFC3686_IV_SIZE,
3038 .maxauthsize = MD5_DIGEST_SIZE,
3041 .class1_alg_type = OP_ALG_ALGSEL_AES |
3042 OP_ALG_AAI_CTR_MOD128,
3043 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3044 OP_ALG_AAI_HMAC_PRECOMP,
3051 .cra_name = "seqiv(authenc("
3052 "hmac(md5),rfc3686(ctr(aes))))",
3053 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3054 "rfc3686-ctr-aes-caam",
3057 .setkey = aead_setkey,
3058 .setauthsize = aead_setauthsize,
3059 .encrypt = aead_encrypt,
3060 .decrypt = aead_decrypt,
3061 .ivsize = CTR_RFC3686_IV_SIZE,
3062 .maxauthsize = MD5_DIGEST_SIZE,
3065 .class1_alg_type = OP_ALG_ALGSEL_AES |
3066 OP_ALG_AAI_CTR_MOD128,
3067 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3068 OP_ALG_AAI_HMAC_PRECOMP,
3076 .cra_name = "authenc(hmac(sha1),"
3077 "rfc3686(ctr(aes)))",
3078 .cra_driver_name = "authenc-hmac-sha1-"
3079 "rfc3686-ctr-aes-caam",
3082 .setkey = aead_setkey,
3083 .setauthsize = aead_setauthsize,
3084 .encrypt = aead_encrypt,
3085 .decrypt = aead_decrypt,
3086 .ivsize = CTR_RFC3686_IV_SIZE,
3087 .maxauthsize = SHA1_DIGEST_SIZE,
3090 .class1_alg_type = OP_ALG_ALGSEL_AES |
3091 OP_ALG_AAI_CTR_MOD128,
3092 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3093 OP_ALG_AAI_HMAC_PRECOMP,
3100 .cra_name = "seqiv(authenc("
3101 "hmac(sha1),rfc3686(ctr(aes))))",
3102 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3103 "rfc3686-ctr-aes-caam",
3106 .setkey = aead_setkey,
3107 .setauthsize = aead_setauthsize,
3108 .encrypt = aead_encrypt,
3109 .decrypt = aead_decrypt,
3110 .ivsize = CTR_RFC3686_IV_SIZE,
3111 .maxauthsize = SHA1_DIGEST_SIZE,
3114 .class1_alg_type = OP_ALG_ALGSEL_AES |
3115 OP_ALG_AAI_CTR_MOD128,
3116 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3117 OP_ALG_AAI_HMAC_PRECOMP,
3125 .cra_name = "authenc(hmac(sha224),"
3126 "rfc3686(ctr(aes)))",
3127 .cra_driver_name = "authenc-hmac-sha224-"
3128 "rfc3686-ctr-aes-caam",
3131 .setkey = aead_setkey,
3132 .setauthsize = aead_setauthsize,
3133 .encrypt = aead_encrypt,
3134 .decrypt = aead_decrypt,
3135 .ivsize = CTR_RFC3686_IV_SIZE,
3136 .maxauthsize = SHA224_DIGEST_SIZE,
3139 .class1_alg_type = OP_ALG_ALGSEL_AES |
3140 OP_ALG_AAI_CTR_MOD128,
3141 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3142 OP_ALG_AAI_HMAC_PRECOMP,
3149 .cra_name = "seqiv(authenc("
3150 "hmac(sha224),rfc3686(ctr(aes))))",
3151 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3152 "rfc3686-ctr-aes-caam",
3155 .setkey = aead_setkey,
3156 .setauthsize = aead_setauthsize,
3157 .encrypt = aead_encrypt,
3158 .decrypt = aead_decrypt,
3159 .ivsize = CTR_RFC3686_IV_SIZE,
3160 .maxauthsize = SHA224_DIGEST_SIZE,
3163 .class1_alg_type = OP_ALG_ALGSEL_AES |
3164 OP_ALG_AAI_CTR_MOD128,
3165 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3166 OP_ALG_AAI_HMAC_PRECOMP,
3174 .cra_name = "authenc(hmac(sha256),"
3175 "rfc3686(ctr(aes)))",
3176 .cra_driver_name = "authenc-hmac-sha256-"
3177 "rfc3686-ctr-aes-caam",
3180 .setkey = aead_setkey,
3181 .setauthsize = aead_setauthsize,
3182 .encrypt = aead_encrypt,
3183 .decrypt = aead_decrypt,
3184 .ivsize = CTR_RFC3686_IV_SIZE,
3185 .maxauthsize = SHA256_DIGEST_SIZE,
3188 .class1_alg_type = OP_ALG_ALGSEL_AES |
3189 OP_ALG_AAI_CTR_MOD128,
3190 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3191 OP_ALG_AAI_HMAC_PRECOMP,
3198 .cra_name = "seqiv(authenc(hmac(sha256),"
3199 "rfc3686(ctr(aes))))",
3200 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3201 "rfc3686-ctr-aes-caam",
3204 .setkey = aead_setkey,
3205 .setauthsize = aead_setauthsize,
3206 .encrypt = aead_encrypt,
3207 .decrypt = aead_decrypt,
3208 .ivsize = CTR_RFC3686_IV_SIZE,
3209 .maxauthsize = SHA256_DIGEST_SIZE,
3212 .class1_alg_type = OP_ALG_ALGSEL_AES |
3213 OP_ALG_AAI_CTR_MOD128,
3214 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3215 OP_ALG_AAI_HMAC_PRECOMP,
3223 .cra_name = "authenc(hmac(sha384),"
3224 "rfc3686(ctr(aes)))",
3225 .cra_driver_name = "authenc-hmac-sha384-"
3226 "rfc3686-ctr-aes-caam",
3229 .setkey = aead_setkey,
3230 .setauthsize = aead_setauthsize,
3231 .encrypt = aead_encrypt,
3232 .decrypt = aead_decrypt,
3233 .ivsize = CTR_RFC3686_IV_SIZE,
3234 .maxauthsize = SHA384_DIGEST_SIZE,
3237 .class1_alg_type = OP_ALG_ALGSEL_AES |
3238 OP_ALG_AAI_CTR_MOD128,
3239 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3240 OP_ALG_AAI_HMAC_PRECOMP,
3247 .cra_name = "seqiv(authenc(hmac(sha384),"
3248 "rfc3686(ctr(aes))))",
3249 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3250 "rfc3686-ctr-aes-caam",
3253 .setkey = aead_setkey,
3254 .setauthsize = aead_setauthsize,
3255 .encrypt = aead_encrypt,
3256 .decrypt = aead_decrypt,
3257 .ivsize = CTR_RFC3686_IV_SIZE,
3258 .maxauthsize = SHA384_DIGEST_SIZE,
3261 .class1_alg_type = OP_ALG_ALGSEL_AES |
3262 OP_ALG_AAI_CTR_MOD128,
3263 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3264 OP_ALG_AAI_HMAC_PRECOMP,
3272 .cra_name = "authenc(hmac(sha512),"
3273 "rfc3686(ctr(aes)))",
3274 .cra_driver_name = "authenc-hmac-sha512-"
3275 "rfc3686-ctr-aes-caam",
3278 .setkey = aead_setkey,
3279 .setauthsize = aead_setauthsize,
3280 .encrypt = aead_encrypt,
3281 .decrypt = aead_decrypt,
3282 .ivsize = CTR_RFC3686_IV_SIZE,
3283 .maxauthsize = SHA512_DIGEST_SIZE,
3286 .class1_alg_type = OP_ALG_ALGSEL_AES |
3287 OP_ALG_AAI_CTR_MOD128,
3288 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3289 OP_ALG_AAI_HMAC_PRECOMP,
3296 .cra_name = "seqiv(authenc(hmac(sha512),"
3297 "rfc3686(ctr(aes))))",
3298 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3299 "rfc3686-ctr-aes-caam",
3302 .setkey = aead_setkey,
3303 .setauthsize = aead_setauthsize,
3304 .encrypt = aead_encrypt,
3305 .decrypt = aead_decrypt,
3306 .ivsize = CTR_RFC3686_IV_SIZE,
3307 .maxauthsize = SHA512_DIGEST_SIZE,
3310 .class1_alg_type = OP_ALG_ALGSEL_AES |
3311 OP_ALG_AAI_CTR_MOD128,
3312 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3313 OP_ALG_AAI_HMAC_PRECOMP,
3321 .cra_name = "rfc7539(chacha20,poly1305)",
3322 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3326 .setkey = chachapoly_setkey,
3327 .setauthsize = chachapoly_setauthsize,
3328 .encrypt = chachapoly_encrypt,
3329 .decrypt = chachapoly_decrypt,
3330 .ivsize = CHACHAPOLY_IV_SIZE,
3331 .maxauthsize = POLY1305_DIGEST_SIZE,
3334 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3336 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3344 .cra_name = "rfc7539esp(chacha20,poly1305)",
3345 .cra_driver_name = "rfc7539esp-chacha20-"
3349 .setkey = chachapoly_setkey,
3350 .setauthsize = chachapoly_setauthsize,
3351 .encrypt = chachapoly_encrypt,
3352 .decrypt = chachapoly_decrypt,
3354 .maxauthsize = POLY1305_DIGEST_SIZE,
3357 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3359 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3366 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3369 dma_addr_t dma_addr;
3370 struct caam_drv_private *priv;
3371 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
3374 ctx->jrdev = caam_jr_alloc();
3375 if (IS_ERR(ctx->jrdev)) {
3376 pr_err("Job Ring Device allocation for transform failed\n");
3377 return PTR_ERR(ctx->jrdev);
3380 priv = dev_get_drvdata(ctx->jrdev->parent);
3381 if (priv->era >= 6 && uses_dkp)
3382 ctx->dir = DMA_BIDIRECTIONAL;
3384 ctx->dir = DMA_TO_DEVICE;
3386 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3387 offsetof(struct caam_ctx,
3390 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3391 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3392 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3393 caam_jr_free(ctx->jrdev);
3397 ctx->sh_desc_enc_dma = dma_addr;
3398 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3401 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
3404 /* copy descriptor header template value */
3405 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3406 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3411 static int caam_cra_init(struct crypto_skcipher *tfm)
3413 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3414 struct caam_skcipher_alg *caam_alg =
3415 container_of(alg, typeof(*caam_alg), skcipher);
3416 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
3417 u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3420 ctx->enginectx.op.do_one_request = skcipher_do_one_req;
3422 if (alg_aai == OP_ALG_AAI_XTS) {
3423 const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
3424 struct crypto_skcipher *fallback;
3426 fallback = crypto_alloc_skcipher(tfm_name, 0,
3427 CRYPTO_ALG_NEED_FALLBACK);
3428 if (IS_ERR(fallback)) {
3429 pr_err("Failed to allocate %s fallback: %ld\n",
3430 tfm_name, PTR_ERR(fallback));
3431 return PTR_ERR(fallback);
3434 ctx->fallback = fallback;
3435 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
3436 crypto_skcipher_reqsize(fallback));
3438 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
3441 ret = caam_init_common(ctx, &caam_alg->caam, false);
3442 if (ret && ctx->fallback)
3443 crypto_free_skcipher(ctx->fallback);
3448 static int caam_aead_init(struct crypto_aead *tfm)
3450 struct aead_alg *alg = crypto_aead_alg(tfm);
3451 struct caam_aead_alg *caam_alg =
3452 container_of(alg, struct caam_aead_alg, aead);
3453 struct caam_ctx *ctx = crypto_aead_ctx_dma(tfm);
3455 crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
3457 ctx->enginectx.op.do_one_request = aead_do_one_req;
3459 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
3462 static void caam_exit_common(struct caam_ctx *ctx)
3464 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3465 offsetof(struct caam_ctx, sh_desc_enc_dma) -
3466 offsetof(struct caam_ctx, sh_desc_enc),
3467 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3468 caam_jr_free(ctx->jrdev);
3471 static void caam_cra_exit(struct crypto_skcipher *tfm)
3473 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
3476 crypto_free_skcipher(ctx->fallback);
3477 caam_exit_common(ctx);
3480 static void caam_aead_exit(struct crypto_aead *tfm)
3482 caam_exit_common(crypto_aead_ctx_dma(tfm));
3485 void caam_algapi_exit(void)
3489 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3490 struct caam_aead_alg *t_alg = driver_aeads + i;
3492 if (t_alg->registered)
3493 crypto_unregister_aead(&t_alg->aead);
3496 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3497 struct caam_skcipher_alg *t_alg = driver_algs + i;
3499 if (t_alg->registered)
3500 crypto_unregister_skcipher(&t_alg->skcipher);
3504 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3506 struct skcipher_alg *alg = &t_alg->skcipher;
3508 alg->base.cra_module = THIS_MODULE;
3509 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3510 alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding();
3511 alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3512 CRYPTO_ALG_KERN_DRIVER_ONLY);
3514 alg->init = caam_cra_init;
3515 alg->exit = caam_cra_exit;
3518 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3520 struct aead_alg *alg = &t_alg->aead;
3522 alg->base.cra_module = THIS_MODULE;
3523 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3524 alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding();
3525 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3526 CRYPTO_ALG_KERN_DRIVER_ONLY;
3528 alg->init = caam_aead_init;
3529 alg->exit = caam_aead_exit;
3532 int caam_algapi_init(struct device *ctrldev)
3534 struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
3536 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3537 unsigned int md_limit = SHA512_DIGEST_SIZE;
3538 bool registered = false, gcm_support;
3541 * Register crypto algorithms the device supports.
3542 * First, detect presence and attributes of DES, AES, and MD blocks.
3544 if (priv->era < 10) {
3545 struct caam_perfmon __iomem *perfmon = &priv->jr[0]->perfmon;
3546 u32 cha_vid, cha_inst, aes_rn;
3548 cha_vid = rd_reg32(&perfmon->cha_id_ls);
3549 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3550 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3552 cha_inst = rd_reg32(&perfmon->cha_num_ls);
3553 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3554 CHA_ID_LS_DES_SHIFT;
3555 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3556 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3560 aes_rn = rd_reg32(&perfmon->cha_rev_ls) & CHA_ID_LS_AES_MASK;
3561 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3563 struct version_regs __iomem *vreg = &priv->jr[0]->vreg;
3566 aesa = rd_reg32(&vreg->aesa);
3567 mdha = rd_reg32(&vreg->mdha);
3569 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3570 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3572 des_inst = rd_reg32(&vreg->desa) & CHA_VER_NUM_MASK;
3573 aes_inst = aesa & CHA_VER_NUM_MASK;
3574 md_inst = mdha & CHA_VER_NUM_MASK;
3575 ccha_inst = rd_reg32(&vreg->ccha) & CHA_VER_NUM_MASK;
3576 ptha_inst = rd_reg32(&vreg->ptha) & CHA_VER_NUM_MASK;
3578 gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3581 /* If MD is present, limit digest size based on LP256 */
3582 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
3583 md_limit = SHA256_DIGEST_SIZE;
3585 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3586 struct caam_skcipher_alg *t_alg = driver_algs + i;
3587 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3589 /* Skip DES algorithms if not supported by device */
3591 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3592 (alg_sel == OP_ALG_ALGSEL_DES)))
3595 /* Skip AES algorithms if not supported by device */
3596 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3600 * Check support for AES modes not available
3603 if (aes_vid == CHA_VER_VID_AES_LP &&
3604 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3608 caam_skcipher_alg_init(t_alg);
3610 err = crypto_register_skcipher(&t_alg->skcipher);
3612 pr_warn("%s alg registration failed\n",
3613 t_alg->skcipher.base.cra_driver_name);
3617 t_alg->registered = true;
3621 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3622 struct caam_aead_alg *t_alg = driver_aeads + i;
3623 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3625 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3627 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3629 /* Skip DES algorithms if not supported by device */
3631 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3632 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3635 /* Skip AES algorithms if not supported by device */
3636 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3639 /* Skip CHACHA20 algorithms if not supported by device */
3640 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3643 /* Skip POLY1305 algorithms if not supported by device */
3644 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3647 /* Skip GCM algorithms if not supported by device */
3648 if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3649 alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3653 * Skip algorithms requiring message digests
3654 * if MD or MD size is not supported by device.
3656 if (is_mdha(c2_alg_sel) &&
3657 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3660 caam_aead_alg_init(t_alg);
3662 err = crypto_register_aead(&t_alg->aead);
3664 pr_warn("%s alg registration failed\n",
3665 t_alg->aead.base.cra_driver_name);
3669 t_alg->registered = true;
3674 pr_info("caam algorithms registered in /proc/crypto\n");