Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
[platform/kernel/linux-rpi.git] / drivers / crypto / chelsio / chcr_algo.c
1 /*
2  * This file is part of the Chelsio T6 Crypto driver for Linux.
3  *
4  * Copyright (c) 2003-2016 Chelsio Communications, Inc. All rights reserved.
5  *
6  * This software is available to you under a choice of one of two
7  * licenses.  You may choose to be licensed under the terms of the GNU
8  * General Public License (GPL) Version 2, available from the file
9  * COPYING in the main directory of this source tree, or the
10  * OpenIB.org BSD license below:
11  *
12  *     Redistribution and use in source and binary forms, with or
13  *     without modification, are permitted provided that the following
14  *     conditions are met:
15  *
16  *      - Redistributions of source code must retain the above
17  *        copyright notice, this list of conditions and the following
18  *        disclaimer.
19  *
20  *      - Redistributions in binary form must reproduce the above
21  *        copyright notice, this list of conditions and the following
22  *        disclaimer in the documentation and/or other materials
23  *        provided with the distribution.
24  *
25  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
26  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
27  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
28  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
29  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
30  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
31  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
32  * SOFTWARE.
33  *
34  * Written and Maintained by:
35  *      Manoj Malviya (manojmalviya@chelsio.com)
36  *      Atul Gupta (atul.gupta@chelsio.com)
37  *      Jitendra Lulla (jlulla@chelsio.com)
38  *      Yeshaswi M R Gowda (yeshaswi@chelsio.com)
39  *      Harsh Jain (harsh@chelsio.com)
40  */
41
42 #define pr_fmt(fmt) "chcr:" fmt
43
44 #include <linux/kernel.h>
45 #include <linux/module.h>
46 #include <linux/crypto.h>
47 #include <linux/cryptohash.h>
48 #include <linux/skbuff.h>
49 #include <linux/rtnetlink.h>
50 #include <linux/highmem.h>
51 #include <linux/scatterlist.h>
52
53 #include <crypto/aes.h>
54 #include <crypto/algapi.h>
55 #include <crypto/hash.h>
56 #include <crypto/sha.h>
57 #include <crypto/authenc.h>
58 #include <crypto/internal/aead.h>
59 #include <crypto/null.h>
60 #include <crypto/internal/skcipher.h>
61 #include <crypto/aead.h>
62 #include <crypto/scatterwalk.h>
63 #include <crypto/internal/hash.h>
64
65 #include "t4fw_api.h"
66 #include "t4_msg.h"
67 #include "chcr_core.h"
68 #include "chcr_algo.h"
69 #include "chcr_crypto.h"
70
71 static inline  struct chcr_aead_ctx *AEAD_CTX(struct chcr_context *ctx)
72 {
73         return ctx->crypto_ctx->aeadctx;
74 }
75
76 static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx)
77 {
78         return ctx->crypto_ctx->ablkctx;
79 }
80
81 static inline struct hmac_ctx *HMAC_CTX(struct chcr_context *ctx)
82 {
83         return ctx->crypto_ctx->hmacctx;
84 }
85
86 static inline struct chcr_gcm_ctx *GCM_CTX(struct chcr_aead_ctx *gctx)
87 {
88         return gctx->ctx->gcm;
89 }
90
91 static inline struct chcr_authenc_ctx *AUTHENC_CTX(struct chcr_aead_ctx *gctx)
92 {
93         return gctx->ctx->authenc;
94 }
95
96 static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx)
97 {
98         return ctx->dev->u_ctx;
99 }
100
101 static inline int is_ofld_imm(const struct sk_buff *skb)
102 {
103         return (skb->len <= CRYPTO_MAX_IMM_TX_PKT_LEN);
104 }
105
106 /*
107  *      sgl_len - calculates the size of an SGL of the given capacity
108  *      @n: the number of SGL entries
109  *      Calculates the number of flits needed for a scatter/gather list that
110  *      can hold the given number of entries.
111  */
112 static inline unsigned int sgl_len(unsigned int n)
113 {
114         n--;
115         return (3 * n) / 2 + (n & 1) + 2;
116 }
117
118 static void chcr_verify_tag(struct aead_request *req, u8 *input, int *err)
119 {
120         u8 temp[SHA512_DIGEST_SIZE];
121         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
122         int authsize = crypto_aead_authsize(tfm);
123         struct cpl_fw6_pld *fw6_pld;
124         int cmp = 0;
125
126         fw6_pld = (struct cpl_fw6_pld *)input;
127         if ((get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) ||
128             (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_GCM)) {
129                 cmp = memcmp(&fw6_pld->data[2], (fw6_pld + 1), authsize);
130         } else {
131
132                 sg_pcopy_to_buffer(req->src, sg_nents(req->src), temp,
133                                 authsize, req->assoclen +
134                                 req->cryptlen - authsize);
135                 cmp = memcmp(temp, (fw6_pld + 1), authsize);
136         }
137         if (cmp)
138                 *err = -EBADMSG;
139         else
140                 *err = 0;
141 }
142
143 /*
144  *      chcr_handle_resp - Unmap the DMA buffers associated with the request
145  *      @req: crypto request
146  */
147 int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
148                          int err)
149 {
150         struct crypto_tfm *tfm = req->tfm;
151         struct chcr_context *ctx = crypto_tfm_ctx(tfm);
152         struct uld_ctx *u_ctx = ULD_CTX(ctx);
153         struct chcr_req_ctx ctx_req;
154         struct cpl_fw6_pld *fw6_pld;
155         unsigned int digestsize, updated_digestsize;
156
157         switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
158         case CRYPTO_ALG_TYPE_AEAD:
159                 ctx_req.req.aead_req = (struct aead_request *)req;
160                 ctx_req.ctx.reqctx = aead_request_ctx(ctx_req.req.aead_req);
161                 dma_unmap_sg(&u_ctx->lldi.pdev->dev, ctx_req.req.aead_req->dst,
162                              ctx_req.ctx.reqctx->dst_nents, DMA_FROM_DEVICE);
163                 if (ctx_req.ctx.reqctx->skb) {
164                         kfree_skb(ctx_req.ctx.reqctx->skb);
165                         ctx_req.ctx.reqctx->skb = NULL;
166                 }
167                 if (ctx_req.ctx.reqctx->verify == VERIFY_SW) {
168                         chcr_verify_tag(ctx_req.req.aead_req, input,
169                                         &err);
170                         ctx_req.ctx.reqctx->verify = VERIFY_HW;
171                 }
172                 break;
173
174         case CRYPTO_ALG_TYPE_BLKCIPHER:
175                 ctx_req.req.ablk_req = (struct ablkcipher_request *)req;
176                 ctx_req.ctx.ablk_ctx =
177                         ablkcipher_request_ctx(ctx_req.req.ablk_req);
178                 if (!err) {
179                         fw6_pld = (struct cpl_fw6_pld *)input;
180                         memcpy(ctx_req.req.ablk_req->info, &fw6_pld->data[2],
181                                AES_BLOCK_SIZE);
182                 }
183                 dma_unmap_sg(&u_ctx->lldi.pdev->dev, ctx_req.req.ablk_req->dst,
184                              ctx_req.ctx.ablk_ctx->dst_nents, DMA_FROM_DEVICE);
185                 if (ctx_req.ctx.ablk_ctx->skb) {
186                         kfree_skb(ctx_req.ctx.ablk_ctx->skb);
187                         ctx_req.ctx.ablk_ctx->skb = NULL;
188                 }
189                 break;
190
191         case CRYPTO_ALG_TYPE_AHASH:
192                 ctx_req.req.ahash_req = (struct ahash_request *)req;
193                 ctx_req.ctx.ahash_ctx =
194                         ahash_request_ctx(ctx_req.req.ahash_req);
195                 digestsize =
196                         crypto_ahash_digestsize(crypto_ahash_reqtfm(
197                                                         ctx_req.req.ahash_req));
198                 updated_digestsize = digestsize;
199                 if (digestsize == SHA224_DIGEST_SIZE)
200                         updated_digestsize = SHA256_DIGEST_SIZE;
201                 else if (digestsize == SHA384_DIGEST_SIZE)
202                         updated_digestsize = SHA512_DIGEST_SIZE;
203                 if (ctx_req.ctx.ahash_ctx->skb) {
204                         kfree_skb(ctx_req.ctx.ahash_ctx->skb);
205                         ctx_req.ctx.ahash_ctx->skb = NULL;
206                 }
207                 if (ctx_req.ctx.ahash_ctx->result == 1) {
208                         ctx_req.ctx.ahash_ctx->result = 0;
209                         memcpy(ctx_req.req.ahash_req->result, input +
210                                sizeof(struct cpl_fw6_pld),
211                                digestsize);
212                 } else {
213                         memcpy(ctx_req.ctx.ahash_ctx->partial_hash, input +
214                                sizeof(struct cpl_fw6_pld),
215                                updated_digestsize);
216                 }
217                 break;
218         }
219         return err;
220 }
221
222 /*
223  *      calc_tx_flits_ofld - calculate # of flits for an offload packet
224  *      @skb: the packet
225  *      Returns the number of flits needed for the given offload packet.
226  *      These packets are already fully constructed and no additional headers
227  *      will be added.
228  */
229 static inline unsigned int calc_tx_flits_ofld(const struct sk_buff *skb)
230 {
231         unsigned int flits, cnt;
232
233         if (is_ofld_imm(skb))
234                 return DIV_ROUND_UP(skb->len, 8);
235
236         flits = skb_transport_offset(skb) / 8;   /* headers */
237         cnt = skb_shinfo(skb)->nr_frags;
238         if (skb_tail_pointer(skb) != skb_transport_header(skb))
239                 cnt++;
240         return flits + sgl_len(cnt);
241 }
242
243 static inline void get_aes_decrypt_key(unsigned char *dec_key,
244                                        const unsigned char *key,
245                                        unsigned int keylength)
246 {
247         u32 temp;
248         u32 w_ring[MAX_NK];
249         int i, j, k;
250         u8  nr, nk;
251
252         switch (keylength) {
253         case AES_KEYLENGTH_128BIT:
254                 nk = KEYLENGTH_4BYTES;
255                 nr = NUMBER_OF_ROUNDS_10;
256                 break;
257         case AES_KEYLENGTH_192BIT:
258                 nk = KEYLENGTH_6BYTES;
259                 nr = NUMBER_OF_ROUNDS_12;
260                 break;
261         case AES_KEYLENGTH_256BIT:
262                 nk = KEYLENGTH_8BYTES;
263                 nr = NUMBER_OF_ROUNDS_14;
264                 break;
265         default:
266                 return;
267         }
268         for (i = 0; i < nk; i++)
269                 w_ring[i] = be32_to_cpu(*(u32 *)&key[4 * i]);
270
271         i = 0;
272         temp = w_ring[nk - 1];
273         while (i + nk < (nr + 1) * 4) {
274                 if (!(i % nk)) {
275                         /* RotWord(temp) */
276                         temp = (temp << 8) | (temp >> 24);
277                         temp = aes_ks_subword(temp);
278                         temp ^= round_constant[i / nk];
279                 } else if (nk == 8 && (i % 4 == 0)) {
280                         temp = aes_ks_subword(temp);
281                 }
282                 w_ring[i % nk] ^= temp;
283                 temp = w_ring[i % nk];
284                 i++;
285         }
286         i--;
287         for (k = 0, j = i % nk; k < nk; k++) {
288                 *((u32 *)dec_key + k) = htonl(w_ring[j]);
289                 j--;
290                 if (j < 0)
291                         j += nk;
292         }
293 }
294
295 static struct crypto_shash *chcr_alloc_shash(unsigned int ds)
296 {
297         struct crypto_shash *base_hash = NULL;
298
299         switch (ds) {
300         case SHA1_DIGEST_SIZE:
301                 base_hash = crypto_alloc_shash("sha1", 0, 0);
302                 break;
303         case SHA224_DIGEST_SIZE:
304                 base_hash = crypto_alloc_shash("sha224", 0, 0);
305                 break;
306         case SHA256_DIGEST_SIZE:
307                 base_hash = crypto_alloc_shash("sha256", 0, 0);
308                 break;
309         case SHA384_DIGEST_SIZE:
310                 base_hash = crypto_alloc_shash("sha384", 0, 0);
311                 break;
312         case SHA512_DIGEST_SIZE:
313                 base_hash = crypto_alloc_shash("sha512", 0, 0);
314                 break;
315         }
316
317         return base_hash;
318 }
319
320 static int chcr_compute_partial_hash(struct shash_desc *desc,
321                                      char *iopad, char *result_hash,
322                                      int digest_size)
323 {
324         struct sha1_state sha1_st;
325         struct sha256_state sha256_st;
326         struct sha512_state sha512_st;
327         int error;
328
329         if (digest_size == SHA1_DIGEST_SIZE) {
330                 error = crypto_shash_init(desc) ?:
331                         crypto_shash_update(desc, iopad, SHA1_BLOCK_SIZE) ?:
332                         crypto_shash_export(desc, (void *)&sha1_st);
333                 memcpy(result_hash, sha1_st.state, SHA1_DIGEST_SIZE);
334         } else if (digest_size == SHA224_DIGEST_SIZE) {
335                 error = crypto_shash_init(desc) ?:
336                         crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
337                         crypto_shash_export(desc, (void *)&sha256_st);
338                 memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
339
340         } else if (digest_size == SHA256_DIGEST_SIZE) {
341                 error = crypto_shash_init(desc) ?:
342                         crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
343                         crypto_shash_export(desc, (void *)&sha256_st);
344                 memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
345
346         } else if (digest_size == SHA384_DIGEST_SIZE) {
347                 error = crypto_shash_init(desc) ?:
348                         crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
349                         crypto_shash_export(desc, (void *)&sha512_st);
350                 memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
351
352         } else if (digest_size == SHA512_DIGEST_SIZE) {
353                 error = crypto_shash_init(desc) ?:
354                         crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
355                         crypto_shash_export(desc, (void *)&sha512_st);
356                 memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
357         } else {
358                 error = -EINVAL;
359                 pr_err("Unknown digest size %d\n", digest_size);
360         }
361         return error;
362 }
363
364 static void chcr_change_order(char *buf, int ds)
365 {
366         int i;
367
368         if (ds == SHA512_DIGEST_SIZE) {
369                 for (i = 0; i < (ds / sizeof(u64)); i++)
370                         *((__be64 *)buf + i) =
371                                 cpu_to_be64(*((u64 *)buf + i));
372         } else {
373                 for (i = 0; i < (ds / sizeof(u32)); i++)
374                         *((__be32 *)buf + i) =
375                                 cpu_to_be32(*((u32 *)buf + i));
376         }
377 }
378
379 static inline int is_hmac(struct crypto_tfm *tfm)
380 {
381         struct crypto_alg *alg = tfm->__crt_alg;
382         struct chcr_alg_template *chcr_crypto_alg =
383                 container_of(__crypto_ahash_alg(alg), struct chcr_alg_template,
384                              alg.hash);
385         if (chcr_crypto_alg->type == CRYPTO_ALG_TYPE_HMAC)
386                 return 1;
387         return 0;
388 }
389
390 static void write_phys_cpl(struct cpl_rx_phys_dsgl *phys_cpl,
391                            struct scatterlist *sg,
392                            struct phys_sge_parm *sg_param)
393 {
394         struct phys_sge_pairs *to;
395         int out_buf_size = sg_param->obsize;
396         unsigned int nents = sg_param->nents, i, j = 0;
397
398         phys_cpl->op_to_tid = htonl(CPL_RX_PHYS_DSGL_OPCODE_V(CPL_RX_PHYS_DSGL)
399                                     | CPL_RX_PHYS_DSGL_ISRDMA_V(0));
400         phys_cpl->pcirlxorder_to_noofsgentr =
401                 htonl(CPL_RX_PHYS_DSGL_PCIRLXORDER_V(0) |
402                       CPL_RX_PHYS_DSGL_PCINOSNOOP_V(0) |
403                       CPL_RX_PHYS_DSGL_PCITPHNTENB_V(0) |
404                       CPL_RX_PHYS_DSGL_PCITPHNT_V(0) |
405                       CPL_RX_PHYS_DSGL_DCAID_V(0) |
406                       CPL_RX_PHYS_DSGL_NOOFSGENTR_V(nents));
407         phys_cpl->rss_hdr_int.opcode = CPL_RX_PHYS_ADDR;
408         phys_cpl->rss_hdr_int.qid = htons(sg_param->qid);
409         phys_cpl->rss_hdr_int.hash_val = 0;
410         to = (struct phys_sge_pairs *)((unsigned char *)phys_cpl +
411                                        sizeof(struct cpl_rx_phys_dsgl));
412
413         for (i = 0; nents; to++) {
414                 for (j = 0; j < 8 && nents; j++, nents--) {
415                         out_buf_size -= sg_dma_len(sg);
416                         to->len[j] = htons(sg_dma_len(sg));
417                         to->addr[j] = cpu_to_be64(sg_dma_address(sg));
418                         sg = sg_next(sg);
419                 }
420         }
421         if (out_buf_size) {
422                 j--;
423                 to--;
424                 to->len[j] = htons(ntohs(to->len[j]) + (out_buf_size));
425         }
426 }
427
428 static inline int map_writesg_phys_cpl(struct device *dev,
429                                         struct cpl_rx_phys_dsgl *phys_cpl,
430                                         struct scatterlist *sg,
431                                         struct phys_sge_parm *sg_param)
432 {
433         if (!sg || !sg_param->nents)
434                 return 0;
435
436         sg_param->nents = dma_map_sg(dev, sg, sg_param->nents, DMA_FROM_DEVICE);
437         if (sg_param->nents == 0) {
438                 pr_err("CHCR : DMA mapping failed\n");
439                 return -EINVAL;
440         }
441         write_phys_cpl(phys_cpl, sg, sg_param);
442         return 0;
443 }
444
445 static inline int get_aead_subtype(struct crypto_aead *aead)
446 {
447         struct aead_alg *alg = crypto_aead_alg(aead);
448         struct chcr_alg_template *chcr_crypto_alg =
449                 container_of(alg, struct chcr_alg_template, alg.aead);
450         return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
451 }
452
453 static inline int get_cryptoalg_subtype(struct crypto_tfm *tfm)
454 {
455         struct crypto_alg *alg = tfm->__crt_alg;
456         struct chcr_alg_template *chcr_crypto_alg =
457                 container_of(alg, struct chcr_alg_template, alg.crypto);
458
459         return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
460 }
461
462 static inline void write_buffer_to_skb(struct sk_buff *skb,
463                                         unsigned int *frags,
464                                         char *bfr,
465                                         u8 bfr_len)
466 {
467         skb->len += bfr_len;
468         skb->data_len += bfr_len;
469         skb->truesize += bfr_len;
470         get_page(virt_to_page(bfr));
471         skb_fill_page_desc(skb, *frags, virt_to_page(bfr),
472                            offset_in_page(bfr), bfr_len);
473         (*frags)++;
474 }
475
476
477 static inline void
478 write_sg_to_skb(struct sk_buff *skb, unsigned int *frags,
479                         struct scatterlist *sg, unsigned int count)
480 {
481         struct page *spage;
482         unsigned int page_len;
483
484         skb->len += count;
485         skb->data_len += count;
486         skb->truesize += count;
487
488         while (count > 0) {
489                 if (!sg || (!(sg->length)))
490                         break;
491                 spage = sg_page(sg);
492                 get_page(spage);
493                 page_len = min(sg->length, count);
494                 skb_fill_page_desc(skb, *frags, spage, sg->offset, page_len);
495                 (*frags)++;
496                 count -= page_len;
497                 sg = sg_next(sg);
498         }
499 }
500
501 static int generate_copy_rrkey(struct ablk_ctx *ablkctx,
502                                struct _key_ctx *key_ctx)
503 {
504         if (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) {
505                 memcpy(key_ctx->key, ablkctx->rrkey, ablkctx->enckey_len);
506         } else {
507                 memcpy(key_ctx->key,
508                        ablkctx->key + (ablkctx->enckey_len >> 1),
509                        ablkctx->enckey_len >> 1);
510                 memcpy(key_ctx->key + (ablkctx->enckey_len >> 1),
511                        ablkctx->rrkey, ablkctx->enckey_len >> 1);
512         }
513         return 0;
514 }
515
516 static inline void create_wreq(struct chcr_context *ctx,
517                                struct chcr_wr *chcr_req,
518                                void *req, struct sk_buff *skb,
519                                int kctx_len, int hash_sz,
520                                int is_iv,
521                                unsigned int sc_len)
522 {
523         struct uld_ctx *u_ctx = ULD_CTX(ctx);
524         int iv_loc = IV_DSGL;
525         int qid = u_ctx->lldi.rxq_ids[ctx->tx_channel_id];
526         unsigned int immdatalen = 0, nr_frags = 0;
527
528         if (is_ofld_imm(skb)) {
529                 immdatalen = skb->data_len;
530                 iv_loc = IV_IMMEDIATE;
531         } else {
532                 nr_frags = skb_shinfo(skb)->nr_frags;
533         }
534
535         chcr_req->wreq.op_to_cctx_size = FILL_WR_OP_CCTX_SIZE(immdatalen,
536                                 ((sizeof(chcr_req->key_ctx) + kctx_len) >> 4));
537         chcr_req->wreq.pld_size_hash_size =
538                 htonl(FW_CRYPTO_LOOKASIDE_WR_PLD_SIZE_V(sgl_lengths[nr_frags]) |
539                       FW_CRYPTO_LOOKASIDE_WR_HASH_SIZE_V(hash_sz));
540         chcr_req->wreq.len16_pkd =
541                 htonl(FW_CRYPTO_LOOKASIDE_WR_LEN16_V(DIV_ROUND_UP(
542                                     (calc_tx_flits_ofld(skb) * 8), 16)));
543         chcr_req->wreq.cookie = cpu_to_be64((uintptr_t)req);
544         chcr_req->wreq.rx_chid_to_rx_q_id =
545                 FILL_WR_RX_Q_ID(ctx->dev->tx_channel_id, qid,
546                                 is_iv ? iv_loc : IV_NOP);
547
548         chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(ctx->dev->tx_channel_id);
549         chcr_req->ulptx.len = htonl((DIV_ROUND_UP((calc_tx_flits_ofld(skb) * 8),
550                                         16) - ((sizeof(chcr_req->wreq)) >> 4)));
551
552         chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(immdatalen);
553         chcr_req->sc_imm.len = cpu_to_be32(sizeof(struct cpl_tx_sec_pdu) +
554                                    sizeof(chcr_req->key_ctx) +
555                                    kctx_len + sc_len + immdatalen);
556 }
557
558 /**
559  *      create_cipher_wr - form the WR for cipher operations
560  *      @req: cipher req.
561  *      @ctx: crypto driver context of the request.
562  *      @qid: ingress qid where response of this WR should be received.
563  *      @op_type:       encryption or decryption
564  */
565 static struct sk_buff
566 *create_cipher_wr(struct ablkcipher_request *req,
567                   unsigned short qid,
568                   unsigned short op_type)
569 {
570         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
571         struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
572         struct uld_ctx *u_ctx = ULD_CTX(ctx);
573         struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
574         struct sk_buff *skb = NULL;
575         struct chcr_wr *chcr_req;
576         struct cpl_rx_phys_dsgl *phys_cpl;
577         struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
578         struct phys_sge_parm sg_param;
579         unsigned int frags = 0, transhdr_len, phys_dsgl;
580         unsigned int ivsize = crypto_ablkcipher_ivsize(tfm), kctx_len;
581         gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
582                         GFP_ATOMIC;
583
584         if (!req->info)
585                 return ERR_PTR(-EINVAL);
586         reqctx->dst_nents = sg_nents_for_len(req->dst, req->nbytes);
587         if (reqctx->dst_nents <= 0) {
588                 pr_err("AES:Invalid Destination sg lists\n");
589                 return ERR_PTR(-EINVAL);
590         }
591         if ((ablkctx->enckey_len == 0) || (ivsize > AES_BLOCK_SIZE) ||
592             (req->nbytes <= 0) || (req->nbytes % AES_BLOCK_SIZE)) {
593                 pr_err("AES: Invalid value of Key Len %d nbytes %d IV Len %d\n",
594                        ablkctx->enckey_len, req->nbytes, ivsize);
595                 return ERR_PTR(-EINVAL);
596         }
597
598         phys_dsgl = get_space_for_phys_dsgl(reqctx->dst_nents);
599
600         kctx_len = (DIV_ROUND_UP(ablkctx->enckey_len, 16) * 16);
601         transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, phys_dsgl);
602         skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
603         if (!skb)
604                 return ERR_PTR(-ENOMEM);
605         skb_reserve(skb, sizeof(struct sge_opaque_hdr));
606         chcr_req = (struct chcr_wr *)__skb_put(skb, transhdr_len);
607         memset(chcr_req, 0, transhdr_len);
608         chcr_req->sec_cpl.op_ivinsrtofst =
609                 FILL_SEC_CPL_OP_IVINSR(ctx->dev->tx_channel_id, 2, 1);
610
611         chcr_req->sec_cpl.pldlen = htonl(ivsize + req->nbytes);
612         chcr_req->sec_cpl.aadstart_cipherstop_hi =
613                         FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, ivsize + 1, 0);
614
615         chcr_req->sec_cpl.cipherstop_lo_authinsert =
616                         FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
617         chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type, 0,
618                                                          ablkctx->ciph_mode,
619                                                          0, 0, ivsize >> 1);
620         chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 0,
621                                                           0, 1, phys_dsgl);
622
623         chcr_req->key_ctx.ctx_hdr = ablkctx->key_ctx_hdr;
624         if (op_type == CHCR_DECRYPT_OP) {
625                 generate_copy_rrkey(ablkctx, &chcr_req->key_ctx);
626         } else {
627                 if (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) {
628                         memcpy(chcr_req->key_ctx.key, ablkctx->key,
629                                ablkctx->enckey_len);
630                 } else {
631                         memcpy(chcr_req->key_ctx.key, ablkctx->key +
632                                (ablkctx->enckey_len >> 1),
633                                ablkctx->enckey_len >> 1);
634                         memcpy(chcr_req->key_ctx.key +
635                                (ablkctx->enckey_len >> 1),
636                                ablkctx->key,
637                                ablkctx->enckey_len >> 1);
638                 }
639         }
640         phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
641         sg_param.nents = reqctx->dst_nents;
642         sg_param.obsize = req->nbytes;
643         sg_param.qid = qid;
644         sg_param.align = 1;
645         if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, req->dst,
646                                  &sg_param))
647                 goto map_fail1;
648
649         skb_set_transport_header(skb, transhdr_len);
650         memcpy(reqctx->iv, req->info, ivsize);
651         write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
652         write_sg_to_skb(skb, &frags, req->src, req->nbytes);
653         create_wreq(ctx, chcr_req, req, skb, kctx_len, 0, 1,
654                         sizeof(struct cpl_rx_phys_dsgl) + phys_dsgl);
655         reqctx->skb = skb;
656         skb_get(skb);
657         return skb;
658 map_fail1:
659         kfree_skb(skb);
660         return ERR_PTR(-ENOMEM);
661 }
662
663 static int chcr_aes_cbc_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
664                                unsigned int keylen)
665 {
666         struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
667         struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
668         unsigned int ck_size, context_size;
669         u16 alignment = 0;
670
671         if (keylen == AES_KEYSIZE_128) {
672                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
673         } else if (keylen == AES_KEYSIZE_192) {
674                 alignment = 8;
675                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
676         } else if (keylen == AES_KEYSIZE_256) {
677                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
678         } else {
679                 goto badkey_err;
680         }
681         memcpy(ablkctx->key, key, keylen);
682         ablkctx->enckey_len = keylen;
683         get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, keylen << 3);
684         context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
685                         keylen + alignment) >> 4;
686
687         ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
688                                                 0, 0, context_size);
689         ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CBC;
690         return 0;
691 badkey_err:
692         crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
693         ablkctx->enckey_len = 0;
694         return -EINVAL;
695 }
696
697 static int cxgb4_is_crypto_q_full(struct net_device *dev, unsigned int idx)
698 {
699         struct adapter *adap = netdev2adap(dev);
700         struct sge_uld_txq_info *txq_info =
701                 adap->sge.uld_txq_info[CXGB4_TX_CRYPTO];
702         struct sge_uld_txq *txq;
703         int ret = 0;
704
705         local_bh_disable();
706         txq = &txq_info->uldtxq[idx];
707         spin_lock(&txq->sendq.lock);
708         if (txq->full)
709                 ret = -1;
710         spin_unlock(&txq->sendq.lock);
711         local_bh_enable();
712         return ret;
713 }
714
715 static int chcr_aes_encrypt(struct ablkcipher_request *req)
716 {
717         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
718         struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
719         struct uld_ctx *u_ctx = ULD_CTX(ctx);
720         struct sk_buff *skb;
721
722         if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
723                                             ctx->tx_channel_id))) {
724                 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
725                         return -EBUSY;
726         }
727
728         skb = create_cipher_wr(req, u_ctx->lldi.rxq_ids[ctx->tx_channel_id],
729                                CHCR_ENCRYPT_OP);
730         if (IS_ERR(skb)) {
731                 pr_err("chcr : %s : Failed to form WR. No memory\n", __func__);
732                 return  PTR_ERR(skb);
733         }
734         skb->dev = u_ctx->lldi.ports[0];
735         set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_channel_id);
736         chcr_send_wr(skb);
737         return -EINPROGRESS;
738 }
739
740 static int chcr_aes_decrypt(struct ablkcipher_request *req)
741 {
742         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
743         struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
744         struct uld_ctx *u_ctx = ULD_CTX(ctx);
745         struct sk_buff *skb;
746
747         if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
748                                             ctx->tx_channel_id))) {
749                 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
750                         return -EBUSY;
751         }
752
753         skb = create_cipher_wr(req, u_ctx->lldi.rxq_ids[0],
754                                CHCR_DECRYPT_OP);
755         if (IS_ERR(skb)) {
756                 pr_err("chcr : %s : Failed to form WR. No memory\n", __func__);
757                 return PTR_ERR(skb);
758         }
759         skb->dev = u_ctx->lldi.ports[0];
760         set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_channel_id);
761         chcr_send_wr(skb);
762         return -EINPROGRESS;
763 }
764
765 static int chcr_device_init(struct chcr_context *ctx)
766 {
767         struct uld_ctx *u_ctx;
768         unsigned int id;
769         int err = 0, rxq_perchan, rxq_idx;
770
771         id = smp_processor_id();
772         if (!ctx->dev) {
773                 err = assign_chcr_device(&ctx->dev);
774                 if (err) {
775                         pr_err("chcr device assignment fails\n");
776                         goto out;
777                 }
778                 u_ctx = ULD_CTX(ctx);
779                 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
780                 rxq_idx = ctx->dev->tx_channel_id * rxq_perchan;
781                 rxq_idx += id % rxq_perchan;
782                 spin_lock(&ctx->dev->lock_chcr_dev);
783                 ctx->tx_channel_id = rxq_idx;
784                 ctx->dev->tx_channel_id = !ctx->dev->tx_channel_id;
785                 spin_unlock(&ctx->dev->lock_chcr_dev);
786         }
787 out:
788         return err;
789 }
790
791 static int chcr_cra_init(struct crypto_tfm *tfm)
792 {
793         tfm->crt_ablkcipher.reqsize =  sizeof(struct chcr_blkcipher_req_ctx);
794         return chcr_device_init(crypto_tfm_ctx(tfm));
795 }
796
797 static int get_alg_config(struct algo_param *params,
798                           unsigned int auth_size)
799 {
800         switch (auth_size) {
801         case SHA1_DIGEST_SIZE:
802                 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_160;
803                 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA1;
804                 params->result_size = SHA1_DIGEST_SIZE;
805                 break;
806         case SHA224_DIGEST_SIZE:
807                 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
808                 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA224;
809                 params->result_size = SHA256_DIGEST_SIZE;
810                 break;
811         case SHA256_DIGEST_SIZE:
812                 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
813                 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA256;
814                 params->result_size = SHA256_DIGEST_SIZE;
815                 break;
816         case SHA384_DIGEST_SIZE:
817                 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
818                 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_384;
819                 params->result_size = SHA512_DIGEST_SIZE;
820                 break;
821         case SHA512_DIGEST_SIZE:
822                 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
823                 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_512;
824                 params->result_size = SHA512_DIGEST_SIZE;
825                 break;
826         default:
827                 pr_err("chcr : ERROR, unsupported digest size\n");
828                 return -EINVAL;
829         }
830         return 0;
831 }
832
833 static inline void chcr_free_shash(struct crypto_shash *base_hash)
834 {
835                 crypto_free_shash(base_hash);
836 }
837
838 /**
839  *      create_hash_wr - Create hash work request
840  *      @req - Cipher req base
841  */
842 static struct sk_buff *create_hash_wr(struct ahash_request *req,
843                                       struct hash_wr_param *param)
844 {
845         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
846         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
847         struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
848         struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
849         struct sk_buff *skb = NULL;
850         struct chcr_wr *chcr_req;
851         unsigned int frags = 0, transhdr_len, iopad_alignment = 0;
852         unsigned int digestsize = crypto_ahash_digestsize(tfm);
853         unsigned int kctx_len = 0;
854         u8 hash_size_in_response = 0;
855         gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
856                 GFP_ATOMIC;
857
858         iopad_alignment = KEYCTX_ALIGN_PAD(digestsize);
859         kctx_len = param->alg_prm.result_size + iopad_alignment;
860         if (param->opad_needed)
861                 kctx_len += param->alg_prm.result_size + iopad_alignment;
862
863         if (req_ctx->result)
864                 hash_size_in_response = digestsize;
865         else
866                 hash_size_in_response = param->alg_prm.result_size;
867         transhdr_len = HASH_TRANSHDR_SIZE(kctx_len);
868         skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
869         if (!skb)
870                 return skb;
871
872         skb_reserve(skb, sizeof(struct sge_opaque_hdr));
873         chcr_req = (struct chcr_wr *)__skb_put(skb, transhdr_len);
874         memset(chcr_req, 0, transhdr_len);
875
876         chcr_req->sec_cpl.op_ivinsrtofst =
877                 FILL_SEC_CPL_OP_IVINSR(ctx->dev->tx_channel_id, 2, 0);
878         chcr_req->sec_cpl.pldlen = htonl(param->bfr_len + param->sg_len);
879
880         chcr_req->sec_cpl.aadstart_cipherstop_hi =
881                 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, 0, 0);
882         chcr_req->sec_cpl.cipherstop_lo_authinsert =
883                 FILL_SEC_CPL_AUTHINSERT(0, 1, 0, 0);
884         chcr_req->sec_cpl.seqno_numivs =
885                 FILL_SEC_CPL_SCMD0_SEQNO(0, 0, 0, param->alg_prm.auth_mode,
886                                          param->opad_needed, 0);
887
888         chcr_req->sec_cpl.ivgen_hdrlen =
889                 FILL_SEC_CPL_IVGEN_HDRLEN(param->last, param->more, 0, 1, 0, 0);
890
891         memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash,
892                param->alg_prm.result_size);
893
894         if (param->opad_needed)
895                 memcpy(chcr_req->key_ctx.key +
896                        ((param->alg_prm.result_size <= 32) ? 32 :
897                         CHCR_HASH_MAX_DIGEST_SIZE),
898                        hmacctx->opad, param->alg_prm.result_size);
899
900         chcr_req->key_ctx.ctx_hdr = FILL_KEY_CTX_HDR(CHCR_KEYCTX_NO_KEY,
901                                             param->alg_prm.mk_size, 0,
902                                             param->opad_needed,
903                                             ((kctx_len +
904                                              sizeof(chcr_req->key_ctx)) >> 4));
905         chcr_req->sec_cpl.scmd1 = cpu_to_be64((u64)param->scmd1);
906
907         skb_set_transport_header(skb, transhdr_len);
908         if (param->bfr_len != 0)
909                 write_buffer_to_skb(skb, &frags, req_ctx->reqbfr,
910                                     param->bfr_len);
911         if (param->sg_len != 0)
912                 write_sg_to_skb(skb, &frags, req->src, param->sg_len);
913
914         create_wreq(ctx, chcr_req, req, skb, kctx_len, hash_size_in_response, 0,
915                         DUMMY_BYTES);
916         req_ctx->skb = skb;
917         skb_get(skb);
918         return skb;
919 }
920
921 static int chcr_ahash_update(struct ahash_request *req)
922 {
923         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
924         struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
925         struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
926         struct uld_ctx *u_ctx = NULL;
927         struct sk_buff *skb;
928         u8 remainder = 0, bs;
929         unsigned int nbytes = req->nbytes;
930         struct hash_wr_param params;
931
932         bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
933
934         u_ctx = ULD_CTX(ctx);
935         if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
936                                             ctx->tx_channel_id))) {
937                 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
938                         return -EBUSY;
939         }
940
941         if (nbytes + req_ctx->reqlen >= bs) {
942                 remainder = (nbytes + req_ctx->reqlen) % bs;
943                 nbytes = nbytes + req_ctx->reqlen - remainder;
944         } else {
945                 sg_pcopy_to_buffer(req->src, sg_nents(req->src), req_ctx->reqbfr
946                                    + req_ctx->reqlen, nbytes, 0);
947                 req_ctx->reqlen += nbytes;
948                 return 0;
949         }
950
951         params.opad_needed = 0;
952         params.more = 1;
953         params.last = 0;
954         params.sg_len = nbytes - req_ctx->reqlen;
955         params.bfr_len = req_ctx->reqlen;
956         params.scmd1 = 0;
957         get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
958         req_ctx->result = 0;
959         req_ctx->data_len += params.sg_len + params.bfr_len;
960         skb = create_hash_wr(req, &params);
961         if (!skb)
962                 return -ENOMEM;
963
964         if (remainder) {
965                 u8 *temp;
966                 /* Swap buffers */
967                 temp = req_ctx->reqbfr;
968                 req_ctx->reqbfr = req_ctx->skbfr;
969                 req_ctx->skbfr = temp;
970                 sg_pcopy_to_buffer(req->src, sg_nents(req->src),
971                                    req_ctx->reqbfr, remainder, req->nbytes -
972                                    remainder);
973         }
974         req_ctx->reqlen = remainder;
975         skb->dev = u_ctx->lldi.ports[0];
976         set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_channel_id);
977         chcr_send_wr(skb);
978
979         return -EINPROGRESS;
980 }
981
982 static void create_last_hash_block(char *bfr_ptr, unsigned int bs, u64 scmd1)
983 {
984         memset(bfr_ptr, 0, bs);
985         *bfr_ptr = 0x80;
986         if (bs == 64)
987                 *(__be64 *)(bfr_ptr + 56) = cpu_to_be64(scmd1  << 3);
988         else
989                 *(__be64 *)(bfr_ptr + 120) =  cpu_to_be64(scmd1  << 3);
990 }
991
992 static int chcr_ahash_final(struct ahash_request *req)
993 {
994         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
995         struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
996         struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
997         struct hash_wr_param params;
998         struct sk_buff *skb;
999         struct uld_ctx *u_ctx = NULL;
1000         u8 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1001
1002         u_ctx = ULD_CTX(ctx);
1003         if (is_hmac(crypto_ahash_tfm(rtfm)))
1004                 params.opad_needed = 1;
1005         else
1006                 params.opad_needed = 0;
1007         params.sg_len = 0;
1008         get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
1009         req_ctx->result = 1;
1010         params.bfr_len = req_ctx->reqlen;
1011         req_ctx->data_len += params.bfr_len + params.sg_len;
1012         if (req_ctx->reqlen == 0) {
1013                 create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1014                 params.last = 0;
1015                 params.more = 1;
1016                 params.scmd1 = 0;
1017                 params.bfr_len = bs;
1018
1019         } else {
1020                 params.scmd1 = req_ctx->data_len;
1021                 params.last = 1;
1022                 params.more = 0;
1023         }
1024         skb = create_hash_wr(req, &params);
1025         if (!skb)
1026                 return -ENOMEM;
1027
1028         skb->dev = u_ctx->lldi.ports[0];
1029         set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_channel_id);
1030         chcr_send_wr(skb);
1031         return -EINPROGRESS;
1032 }
1033
1034 static int chcr_ahash_finup(struct ahash_request *req)
1035 {
1036         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1037         struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1038         struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1039         struct uld_ctx *u_ctx = NULL;
1040         struct sk_buff *skb;
1041         struct hash_wr_param params;
1042         u8  bs;
1043
1044         bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1045         u_ctx = ULD_CTX(ctx);
1046
1047         if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1048                                             ctx->tx_channel_id))) {
1049                 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1050                         return -EBUSY;
1051         }
1052
1053         if (is_hmac(crypto_ahash_tfm(rtfm)))
1054                 params.opad_needed = 1;
1055         else
1056                 params.opad_needed = 0;
1057
1058         params.sg_len = req->nbytes;
1059         params.bfr_len = req_ctx->reqlen;
1060         get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
1061         req_ctx->data_len += params.bfr_len + params.sg_len;
1062         req_ctx->result = 1;
1063         if ((req_ctx->reqlen + req->nbytes) == 0) {
1064                 create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1065                 params.last = 0;
1066                 params.more = 1;
1067                 params.scmd1 = 0;
1068                 params.bfr_len = bs;
1069         } else {
1070                 params.scmd1 = req_ctx->data_len;
1071                 params.last = 1;
1072                 params.more = 0;
1073         }
1074
1075         skb = create_hash_wr(req, &params);
1076         if (!skb)
1077                 return -ENOMEM;
1078
1079         skb->dev = u_ctx->lldi.ports[0];
1080         set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_channel_id);
1081         chcr_send_wr(skb);
1082
1083         return -EINPROGRESS;
1084 }
1085
1086 static int chcr_ahash_digest(struct ahash_request *req)
1087 {
1088         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1089         struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1090         struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1091         struct uld_ctx *u_ctx = NULL;
1092         struct sk_buff *skb;
1093         struct hash_wr_param params;
1094         u8  bs;
1095
1096         rtfm->init(req);
1097         bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1098
1099         u_ctx = ULD_CTX(ctx);
1100         if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1101                                             ctx->tx_channel_id))) {
1102                 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1103                         return -EBUSY;
1104         }
1105
1106         if (is_hmac(crypto_ahash_tfm(rtfm)))
1107                 params.opad_needed = 1;
1108         else
1109                 params.opad_needed = 0;
1110
1111         params.last = 0;
1112         params.more = 0;
1113         params.sg_len = req->nbytes;
1114         params.bfr_len = 0;
1115         params.scmd1 = 0;
1116         get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
1117         req_ctx->result = 1;
1118         req_ctx->data_len += params.bfr_len + params.sg_len;
1119
1120         if (req->nbytes == 0) {
1121                 create_last_hash_block(req_ctx->reqbfr, bs, 0);
1122                 params.more = 1;
1123                 params.bfr_len = bs;
1124         }
1125
1126         skb = create_hash_wr(req, &params);
1127         if (!skb)
1128                 return -ENOMEM;
1129
1130         skb->dev = u_ctx->lldi.ports[0];
1131         set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_channel_id);
1132         chcr_send_wr(skb);
1133         return -EINPROGRESS;
1134 }
1135
1136 static int chcr_ahash_export(struct ahash_request *areq, void *out)
1137 {
1138         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1139         struct chcr_ahash_req_ctx *state = out;
1140
1141         state->reqlen = req_ctx->reqlen;
1142         state->data_len = req_ctx->data_len;
1143         memcpy(state->bfr1, req_ctx->reqbfr, req_ctx->reqlen);
1144         memcpy(state->partial_hash, req_ctx->partial_hash,
1145                CHCR_HASH_MAX_DIGEST_SIZE);
1146                 return 0;
1147 }
1148
1149 static int chcr_ahash_import(struct ahash_request *areq, const void *in)
1150 {
1151         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1152         struct chcr_ahash_req_ctx *state = (struct chcr_ahash_req_ctx *)in;
1153
1154         req_ctx->reqlen = state->reqlen;
1155         req_ctx->data_len = state->data_len;
1156         req_ctx->reqbfr = req_ctx->bfr1;
1157         req_ctx->skbfr = req_ctx->bfr2;
1158         memcpy(req_ctx->bfr1, state->bfr1, CHCR_HASH_MAX_BLOCK_SIZE_128);
1159         memcpy(req_ctx->partial_hash, state->partial_hash,
1160                CHCR_HASH_MAX_DIGEST_SIZE);
1161         return 0;
1162 }
1163
1164 static int chcr_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
1165                              unsigned int keylen)
1166 {
1167         struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1168         struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1169         unsigned int digestsize = crypto_ahash_digestsize(tfm);
1170         unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1171         unsigned int i, err = 0, updated_digestsize;
1172
1173         SHASH_DESC_ON_STACK(shash, hmacctx->base_hash);
1174
1175         /* use the key to calculate the ipad and opad. ipad will sent with the
1176          * first request's data. opad will be sent with the final hash result
1177          * ipad in hmacctx->ipad and opad in hmacctx->opad location
1178          */
1179         shash->tfm = hmacctx->base_hash;
1180         shash->flags = crypto_shash_get_flags(hmacctx->base_hash);
1181         if (keylen > bs) {
1182                 err = crypto_shash_digest(shash, key, keylen,
1183                                           hmacctx->ipad);
1184                 if (err)
1185                         goto out;
1186                 keylen = digestsize;
1187         } else {
1188                 memcpy(hmacctx->ipad, key, keylen);
1189         }
1190         memset(hmacctx->ipad + keylen, 0, bs - keylen);
1191         memcpy(hmacctx->opad, hmacctx->ipad, bs);
1192
1193         for (i = 0; i < bs / sizeof(int); i++) {
1194                 *((unsigned int *)(&hmacctx->ipad) + i) ^= IPAD_DATA;
1195                 *((unsigned int *)(&hmacctx->opad) + i) ^= OPAD_DATA;
1196         }
1197
1198         updated_digestsize = digestsize;
1199         if (digestsize == SHA224_DIGEST_SIZE)
1200                 updated_digestsize = SHA256_DIGEST_SIZE;
1201         else if (digestsize == SHA384_DIGEST_SIZE)
1202                 updated_digestsize = SHA512_DIGEST_SIZE;
1203         err = chcr_compute_partial_hash(shash, hmacctx->ipad,
1204                                         hmacctx->ipad, digestsize);
1205         if (err)
1206                 goto out;
1207         chcr_change_order(hmacctx->ipad, updated_digestsize);
1208
1209         err = chcr_compute_partial_hash(shash, hmacctx->opad,
1210                                         hmacctx->opad, digestsize);
1211         if (err)
1212                 goto out;
1213         chcr_change_order(hmacctx->opad, updated_digestsize);
1214 out:
1215         return err;
1216 }
1217
1218 static int chcr_aes_xts_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
1219                                unsigned int key_len)
1220 {
1221         struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
1222         struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1223         unsigned short context_size = 0;
1224
1225         if ((key_len != (AES_KEYSIZE_128 << 1)) &&
1226             (key_len != (AES_KEYSIZE_256 << 1))) {
1227                 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
1228                                      CRYPTO_TFM_RES_BAD_KEY_LEN);
1229                 ablkctx->enckey_len = 0;
1230                 return -EINVAL;
1231
1232         }
1233
1234         memcpy(ablkctx->key, key, key_len);
1235         ablkctx->enckey_len = key_len;
1236         get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, key_len << 2);
1237         context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD + key_len) >> 4;
1238         ablkctx->key_ctx_hdr =
1239                 FILL_KEY_CTX_HDR((key_len == AES_KEYSIZE_256) ?
1240                                  CHCR_KEYCTX_CIPHER_KEY_SIZE_128 :
1241                                  CHCR_KEYCTX_CIPHER_KEY_SIZE_256,
1242                                  CHCR_KEYCTX_NO_KEY, 1,
1243                                  0, context_size);
1244         ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_XTS;
1245         return 0;
1246 }
1247
1248 static int chcr_sha_init(struct ahash_request *areq)
1249 {
1250         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1251         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1252         int digestsize =  crypto_ahash_digestsize(tfm);
1253
1254         req_ctx->data_len = 0;
1255         req_ctx->reqlen = 0;
1256         req_ctx->reqbfr = req_ctx->bfr1;
1257         req_ctx->skbfr = req_ctx->bfr2;
1258         req_ctx->skb = NULL;
1259         req_ctx->result = 0;
1260         copy_hash_init_values(req_ctx->partial_hash, digestsize);
1261         return 0;
1262 }
1263
1264 static int chcr_sha_cra_init(struct crypto_tfm *tfm)
1265 {
1266         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1267                                  sizeof(struct chcr_ahash_req_ctx));
1268         return chcr_device_init(crypto_tfm_ctx(tfm));
1269 }
1270
1271 static int chcr_hmac_init(struct ahash_request *areq)
1272 {
1273         struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1274         struct crypto_ahash *rtfm = crypto_ahash_reqtfm(areq);
1275         struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1276         struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1277         unsigned int digestsize = crypto_ahash_digestsize(rtfm);
1278         unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1279
1280         chcr_sha_init(areq);
1281         req_ctx->data_len = bs;
1282         if (is_hmac(crypto_ahash_tfm(rtfm))) {
1283                 if (digestsize == SHA224_DIGEST_SIZE)
1284                         memcpy(req_ctx->partial_hash, hmacctx->ipad,
1285                                SHA256_DIGEST_SIZE);
1286                 else if (digestsize == SHA384_DIGEST_SIZE)
1287                         memcpy(req_ctx->partial_hash, hmacctx->ipad,
1288                                SHA512_DIGEST_SIZE);
1289                 else
1290                         memcpy(req_ctx->partial_hash, hmacctx->ipad,
1291                                digestsize);
1292         }
1293         return 0;
1294 }
1295
1296 static int chcr_hmac_cra_init(struct crypto_tfm *tfm)
1297 {
1298         struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1299         struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1300         unsigned int digestsize =
1301                 crypto_ahash_digestsize(__crypto_ahash_cast(tfm));
1302
1303         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1304                                  sizeof(struct chcr_ahash_req_ctx));
1305         hmacctx->base_hash = chcr_alloc_shash(digestsize);
1306         if (IS_ERR(hmacctx->base_hash))
1307                 return PTR_ERR(hmacctx->base_hash);
1308         return chcr_device_init(crypto_tfm_ctx(tfm));
1309 }
1310
1311 static void chcr_hmac_cra_exit(struct crypto_tfm *tfm)
1312 {
1313         struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1314         struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1315
1316         if (hmacctx->base_hash) {
1317                 chcr_free_shash(hmacctx->base_hash);
1318                 hmacctx->base_hash = NULL;
1319         }
1320 }
1321
1322 static int chcr_copy_assoc(struct aead_request *req,
1323                                 struct chcr_aead_ctx *ctx)
1324 {
1325         SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
1326
1327         skcipher_request_set_tfm(skreq, ctx->null);
1328         skcipher_request_set_callback(skreq, aead_request_flags(req),
1329                         NULL, NULL);
1330         skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen,
1331                         NULL);
1332
1333         return crypto_skcipher_encrypt(skreq);
1334 }
1335
1336 static unsigned char get_hmac(unsigned int authsize)
1337 {
1338         switch (authsize) {
1339         case ICV_8:
1340                 return CHCR_SCMD_HMAC_CTRL_PL1;
1341         case ICV_10:
1342                 return CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
1343         case ICV_12:
1344                 return CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
1345         }
1346         return CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
1347 }
1348
1349
1350 static struct sk_buff *create_authenc_wr(struct aead_request *req,
1351                                          unsigned short qid,
1352                                          int size,
1353                                          unsigned short op_type)
1354 {
1355         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1356         struct chcr_context *ctx = crypto_aead_ctx(tfm);
1357         struct uld_ctx *u_ctx = ULD_CTX(ctx);
1358         struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1359         struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
1360         struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1361         struct sk_buff *skb = NULL;
1362         struct chcr_wr *chcr_req;
1363         struct cpl_rx_phys_dsgl *phys_cpl;
1364         struct phys_sge_parm sg_param;
1365         struct scatterlist *src, *dst;
1366         struct scatterlist src_sg[2], dst_sg[2];
1367         unsigned int frags = 0, transhdr_len;
1368         unsigned int ivsize = crypto_aead_ivsize(tfm), dst_size = 0;
1369         unsigned int   kctx_len = 0;
1370         unsigned short stop_offset = 0;
1371         unsigned int  assoclen = req->assoclen;
1372         unsigned int  authsize = crypto_aead_authsize(tfm);
1373         int err = 0;
1374         int null = 0;
1375         gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1376                 GFP_ATOMIC;
1377
1378         if (aeadctx->enckey_len == 0 || (req->cryptlen == 0))
1379                 goto err;
1380
1381         if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1382                 goto err;
1383
1384         if (sg_nents_for_len(req->src, req->assoclen + req->cryptlen) < 0)
1385                 goto err;
1386         src = scatterwalk_ffwd(src_sg, req->src, req->assoclen);
1387         dst = src;
1388         if (req->src != req->dst) {
1389                 err = chcr_copy_assoc(req, aeadctx);
1390                 if (err)
1391                         return ERR_PTR(err);
1392                 dst = scatterwalk_ffwd(dst_sg, req->dst, req->assoclen);
1393         }
1394         if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_NULL) {
1395                 null = 1;
1396                 assoclen = 0;
1397         }
1398         reqctx->dst_nents = sg_nents_for_len(dst, req->cryptlen +
1399                                              (op_type ? -authsize : authsize));
1400         if (reqctx->dst_nents <= 0) {
1401                 pr_err("AUTHENC:Invalid Destination sg entries\n");
1402                 goto err;
1403         }
1404         dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1405         kctx_len = (ntohl(KEY_CONTEXT_CTX_LEN_V(aeadctx->key_ctx_hdr)) << 4)
1406                 - sizeof(chcr_req->key_ctx);
1407         transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1408         skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1409         if (!skb)
1410                 goto err;
1411
1412         /* LLD is going to write the sge hdr. */
1413         skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1414
1415         /* Write WR */
1416         chcr_req = (struct chcr_wr *) __skb_put(skb, transhdr_len);
1417         memset(chcr_req, 0, transhdr_len);
1418
1419         stop_offset = (op_type == CHCR_ENCRYPT_OP) ? 0 : authsize;
1420
1421         /*
1422          * Input order  is AAD,IV and Payload. where IV should be included as
1423          * the part of authdata. All other fields should be filled according
1424          * to the hardware spec
1425          */
1426         chcr_req->sec_cpl.op_ivinsrtofst =
1427                 FILL_SEC_CPL_OP_IVINSR(ctx->dev->tx_channel_id, 2,
1428                                        (ivsize ? (assoclen + 1) : 0));
1429         chcr_req->sec_cpl.pldlen = htonl(assoclen + ivsize + req->cryptlen);
1430         chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1431                                         assoclen ? 1 : 0, assoclen,
1432                                         assoclen + ivsize + 1,
1433                                         (stop_offset & 0x1F0) >> 4);
1434         chcr_req->sec_cpl.cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(
1435                                         stop_offset & 0xF,
1436                                         null ? 0 : assoclen + ivsize + 1,
1437                                         stop_offset, stop_offset);
1438         chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
1439                                         (op_type == CHCR_ENCRYPT_OP) ? 1 : 0,
1440                                         CHCR_SCMD_CIPHER_MODE_AES_CBC,
1441                                         actx->auth_mode, aeadctx->hmac_ctrl,
1442                                         ivsize >> 1);
1443         chcr_req->sec_cpl.ivgen_hdrlen =  FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
1444                                          0, 1, dst_size);
1445
1446         chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1447         if (op_type == CHCR_ENCRYPT_OP)
1448                 memcpy(chcr_req->key_ctx.key, aeadctx->key,
1449                        aeadctx->enckey_len);
1450         else
1451                 memcpy(chcr_req->key_ctx.key, actx->dec_rrkey,
1452                        aeadctx->enckey_len);
1453
1454         memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) <<
1455                                         4), actx->h_iopad, kctx_len -
1456                                 (DIV_ROUND_UP(aeadctx->enckey_len, 16) << 4));
1457
1458         phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1459         sg_param.nents = reqctx->dst_nents;
1460         sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1461         sg_param.qid = qid;
1462         sg_param.align = 0;
1463         if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, dst,
1464                                   &sg_param))
1465                 goto dstmap_fail;
1466
1467         skb_set_transport_header(skb, transhdr_len);
1468
1469         if (assoclen) {
1470                 /* AAD buffer in */
1471                 write_sg_to_skb(skb, &frags, req->src, assoclen);
1472
1473         }
1474         write_buffer_to_skb(skb, &frags, req->iv, ivsize);
1475         write_sg_to_skb(skb, &frags, src, req->cryptlen);
1476         create_wreq(ctx, chcr_req, req, skb, kctx_len, size, 1,
1477                    sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1478         reqctx->skb = skb;
1479         skb_get(skb);
1480
1481         return skb;
1482 dstmap_fail:
1483         /* ivmap_fail: */
1484         kfree_skb(skb);
1485 err:
1486         return ERR_PTR(-EINVAL);
1487 }
1488
1489 static void aes_gcm_empty_pld_pad(struct scatterlist *sg,
1490                                   unsigned short offset)
1491 {
1492         struct page *spage;
1493         unsigned char *addr;
1494
1495         spage = sg_page(sg);
1496         get_page(spage); /* so that it is not freed by NIC */
1497 #ifdef KMAP_ATOMIC_ARGS
1498         addr = kmap_atomic(spage, KM_SOFTIRQ0);
1499 #else
1500         addr = kmap_atomic(spage);
1501 #endif
1502         memset(addr + sg->offset, 0, offset + 1);
1503
1504         kunmap_atomic(addr);
1505 }
1506
1507 static int set_msg_len(u8 *block, unsigned int msglen, int csize)
1508 {
1509         __be32 data;
1510
1511         memset(block, 0, csize);
1512         block += csize;
1513
1514         if (csize >= 4)
1515                 csize = 4;
1516         else if (msglen > (unsigned int)(1 << (8 * csize)))
1517                 return -EOVERFLOW;
1518
1519         data = cpu_to_be32(msglen);
1520         memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
1521
1522         return 0;
1523 }
1524
1525 static void generate_b0(struct aead_request *req,
1526                         struct chcr_aead_ctx *aeadctx,
1527                         unsigned short op_type)
1528 {
1529         unsigned int l, lp, m;
1530         int rc;
1531         struct crypto_aead *aead = crypto_aead_reqtfm(req);
1532         struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1533         u8 *b0 = reqctx->scratch_pad;
1534
1535         m = crypto_aead_authsize(aead);
1536
1537         memcpy(b0, reqctx->iv, 16);
1538
1539         lp = b0[0];
1540         l = lp + 1;
1541
1542         /* set m, bits 3-5 */
1543         *b0 |= (8 * ((m - 2) / 2));
1544
1545         /* set adata, bit 6, if associated data is used */
1546         if (req->assoclen)
1547                 *b0 |= 64;
1548         rc = set_msg_len(b0 + 16 - l,
1549                          (op_type == CHCR_DECRYPT_OP) ?
1550                          req->cryptlen - m : req->cryptlen, l);
1551 }
1552
1553 static inline int crypto_ccm_check_iv(const u8 *iv)
1554 {
1555         /* 2 <= L <= 8, so 1 <= L' <= 7. */
1556         if (iv[0] < 1 || iv[0] > 7)
1557                 return -EINVAL;
1558
1559         return 0;
1560 }
1561
1562 static int ccm_format_packet(struct aead_request *req,
1563                              struct chcr_aead_ctx *aeadctx,
1564                              unsigned int sub_type,
1565                              unsigned short op_type)
1566 {
1567         struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1568         int rc = 0;
1569
1570         if (req->assoclen > T5_MAX_AAD_SIZE) {
1571                 pr_err("CCM: Unsupported AAD data. It should be < %d\n",
1572                        T5_MAX_AAD_SIZE);
1573                 return -EINVAL;
1574         }
1575         if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
1576                 reqctx->iv[0] = 3;
1577                 memcpy(reqctx->iv + 1, &aeadctx->salt[0], 3);
1578                 memcpy(reqctx->iv + 4, req->iv, 8);
1579                 memset(reqctx->iv + 12, 0, 4);
1580                 *((unsigned short *)(reqctx->scratch_pad + 16)) =
1581                         htons(req->assoclen - 8);
1582         } else {
1583                 memcpy(reqctx->iv, req->iv, 16);
1584                 *((unsigned short *)(reqctx->scratch_pad + 16)) =
1585                         htons(req->assoclen);
1586         }
1587         generate_b0(req, aeadctx, op_type);
1588         /* zero the ctr value */
1589         memset(reqctx->iv + 15 - reqctx->iv[0], 0, reqctx->iv[0] + 1);
1590         return rc;
1591 }
1592
1593 static void fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu *sec_cpl,
1594                                   unsigned int dst_size,
1595                                   struct aead_request *req,
1596                                   unsigned short op_type,
1597                                           struct chcr_context *chcrctx)
1598 {
1599         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1600         unsigned int ivsize = AES_BLOCK_SIZE;
1601         unsigned int cipher_mode = CHCR_SCMD_CIPHER_MODE_AES_CCM;
1602         unsigned int mac_mode = CHCR_SCMD_AUTH_MODE_CBCMAC;
1603         unsigned int c_id = chcrctx->dev->tx_channel_id;
1604         unsigned int ccm_xtra;
1605         unsigned char tag_offset = 0, auth_offset = 0;
1606         unsigned char hmac_ctrl = get_hmac(crypto_aead_authsize(tfm));
1607         unsigned int assoclen;
1608
1609         if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
1610                 assoclen = req->assoclen - 8;
1611         else
1612                 assoclen = req->assoclen;
1613         ccm_xtra = CCM_B0_SIZE +
1614                 ((assoclen) ? CCM_AAD_FIELD_SIZE : 0);
1615
1616         auth_offset = req->cryptlen ?
1617                 (assoclen + ivsize + 1 + ccm_xtra) : 0;
1618         if (op_type == CHCR_DECRYPT_OP) {
1619                 if (crypto_aead_authsize(tfm) != req->cryptlen)
1620                         tag_offset = crypto_aead_authsize(tfm);
1621                 else
1622                         auth_offset = 0;
1623         }
1624
1625
1626         sec_cpl->op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(c_id,
1627                                          2, (ivsize ?  (assoclen + 1) :  0) +
1628                                          ccm_xtra);
1629         sec_cpl->pldlen =
1630                 htonl(assoclen + ivsize + req->cryptlen + ccm_xtra);
1631         /* For CCM there wil be b0 always. So AAD start will be 1 always */
1632         sec_cpl->aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1633                                         1, assoclen + ccm_xtra, assoclen
1634                                         + ivsize + 1 + ccm_xtra, 0);
1635
1636         sec_cpl->cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(0,
1637                                         auth_offset, tag_offset,
1638                                         (op_type == CHCR_ENCRYPT_OP) ? 0 :
1639                                         crypto_aead_authsize(tfm));
1640         sec_cpl->seqno_numivs =  FILL_SEC_CPL_SCMD0_SEQNO(op_type,
1641                                         (op_type == CHCR_ENCRYPT_OP) ? 0 : 1,
1642                                         cipher_mode, mac_mode, hmac_ctrl,
1643                                         ivsize >> 1);
1644
1645         sec_cpl->ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0,
1646                                         1, dst_size);
1647 }
1648
1649 int aead_ccm_validate_input(unsigned short op_type,
1650                             struct aead_request *req,
1651                             struct chcr_aead_ctx *aeadctx,
1652                             unsigned int sub_type)
1653 {
1654         if (sub_type != CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
1655                 if (crypto_ccm_check_iv(req->iv)) {
1656                         pr_err("CCM: IV check fails\n");
1657                         return -EINVAL;
1658                 }
1659         } else {
1660                 if (req->assoclen != 16 && req->assoclen != 20) {
1661                         pr_err("RFC4309: Invalid AAD length %d\n",
1662                                req->assoclen);
1663                         return -EINVAL;
1664                 }
1665         }
1666         if (aeadctx->enckey_len == 0) {
1667                 pr_err("CCM: Encryption key not set\n");
1668                 return -EINVAL;
1669         }
1670         return 0;
1671 }
1672
1673 unsigned int fill_aead_req_fields(struct sk_buff *skb,
1674                                   struct aead_request *req,
1675                                   struct scatterlist *src,
1676                                   unsigned int ivsize,
1677                                   struct chcr_aead_ctx *aeadctx)
1678 {
1679         unsigned int frags = 0;
1680         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1681         struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1682         /* b0 and aad length(if available) */
1683
1684         write_buffer_to_skb(skb, &frags, reqctx->scratch_pad, CCM_B0_SIZE +
1685                                 (req->assoclen ?  CCM_AAD_FIELD_SIZE : 0));
1686         if (req->assoclen) {
1687                 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
1688                         write_sg_to_skb(skb, &frags, req->src,
1689                                         req->assoclen - 8);
1690                 else
1691                         write_sg_to_skb(skb, &frags, req->src, req->assoclen);
1692         }
1693         write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
1694         if (req->cryptlen)
1695                 write_sg_to_skb(skb, &frags, src, req->cryptlen);
1696
1697         return frags;
1698 }
1699
1700 static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
1701                                           unsigned short qid,
1702                                           int size,
1703                                           unsigned short op_type)
1704 {
1705         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1706         struct chcr_context *ctx = crypto_aead_ctx(tfm);
1707         struct uld_ctx *u_ctx = ULD_CTX(ctx);
1708         struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1709         struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1710         struct sk_buff *skb = NULL;
1711         struct chcr_wr *chcr_req;
1712         struct cpl_rx_phys_dsgl *phys_cpl;
1713         struct phys_sge_parm sg_param;
1714         struct scatterlist *src, *dst;
1715         struct scatterlist src_sg[2], dst_sg[2];
1716         unsigned int frags = 0, transhdr_len, ivsize = AES_BLOCK_SIZE;
1717         unsigned int dst_size = 0, kctx_len;
1718         unsigned int sub_type;
1719         unsigned int authsize = crypto_aead_authsize(tfm);
1720         int err = 0;
1721         gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1722                 GFP_ATOMIC;
1723
1724
1725         if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1726                 goto err;
1727
1728         if (sg_nents_for_len(req->src, req->assoclen + req->cryptlen) < 0)
1729                 goto err;
1730         sub_type = get_aead_subtype(tfm);
1731         src = scatterwalk_ffwd(src_sg, req->src, req->assoclen);
1732         dst = src;
1733         if (req->src != req->dst) {
1734                 err = chcr_copy_assoc(req, aeadctx);
1735                 if (err) {
1736                         pr_err("AAD copy to destination buffer fails\n");
1737                         return ERR_PTR(err);
1738                 }
1739                 dst = scatterwalk_ffwd(dst_sg, req->dst, req->assoclen);
1740         }
1741         reqctx->dst_nents = sg_nents_for_len(dst, req->cryptlen +
1742                                              (op_type ? -authsize : authsize));
1743         if (reqctx->dst_nents <= 0) {
1744                 pr_err("CCM:Invalid Destination sg entries\n");
1745                 goto err;
1746         }
1747
1748
1749         if (aead_ccm_validate_input(op_type, req, aeadctx, sub_type))
1750                 goto err;
1751
1752         dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1753         kctx_len = ((DIV_ROUND_UP(aeadctx->enckey_len, 16)) << 4) * 2;
1754         transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1755         skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)),  flags);
1756
1757         if (!skb)
1758                 goto err;
1759
1760         skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1761
1762         chcr_req = (struct chcr_wr *) __skb_put(skb, transhdr_len);
1763         memset(chcr_req, 0, transhdr_len);
1764
1765         fill_sec_cpl_for_aead(&chcr_req->sec_cpl, dst_size, req, op_type, ctx);
1766
1767         chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1768         memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
1769         memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) *
1770                                         16), aeadctx->key, aeadctx->enckey_len);
1771
1772         phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1773         if (ccm_format_packet(req, aeadctx, sub_type, op_type))
1774                 goto dstmap_fail;
1775
1776         sg_param.nents = reqctx->dst_nents;
1777         sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1778         sg_param.qid = qid;
1779         sg_param.align = 0;
1780         if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, dst,
1781                                   &sg_param))
1782                 goto dstmap_fail;
1783
1784         skb_set_transport_header(skb, transhdr_len);
1785         frags = fill_aead_req_fields(skb, req, src, ivsize, aeadctx);
1786         create_wreq(ctx, chcr_req, req, skb, kctx_len, 0, 1,
1787                     sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1788         reqctx->skb = skb;
1789         skb_get(skb);
1790         return skb;
1791 dstmap_fail:
1792         kfree_skb(skb);
1793         skb = NULL;
1794 err:
1795         return ERR_PTR(-EINVAL);
1796 }
1797
1798 static struct sk_buff *create_gcm_wr(struct aead_request *req,
1799                                      unsigned short qid,
1800                                      int size,
1801                                      unsigned short op_type)
1802 {
1803         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1804         struct chcr_context *ctx = crypto_aead_ctx(tfm);
1805         struct uld_ctx *u_ctx = ULD_CTX(ctx);
1806         struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1807         struct chcr_aead_reqctx  *reqctx = aead_request_ctx(req);
1808         struct sk_buff *skb = NULL;
1809         struct chcr_wr *chcr_req;
1810         struct cpl_rx_phys_dsgl *phys_cpl;
1811         struct phys_sge_parm sg_param;
1812         struct scatterlist *src, *dst;
1813         struct scatterlist src_sg[2], dst_sg[2];
1814         unsigned int frags = 0, transhdr_len;
1815         unsigned int ivsize = AES_BLOCK_SIZE;
1816         unsigned int dst_size = 0, kctx_len;
1817         unsigned char tag_offset = 0;
1818         unsigned int crypt_len = 0;
1819         unsigned int authsize = crypto_aead_authsize(tfm);
1820         unsigned char hmac_ctrl = get_hmac(authsize);
1821         int err = 0;
1822         gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1823                 GFP_ATOMIC;
1824
1825         /* validate key size */
1826         if (aeadctx->enckey_len == 0)
1827                 goto err;
1828
1829         if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1830                 goto err;
1831
1832         if (sg_nents_for_len(req->src, req->assoclen + req->cryptlen) < 0)
1833                 goto err;
1834
1835         src = scatterwalk_ffwd(src_sg, req->src, req->assoclen);
1836         dst = src;
1837         if (req->src != req->dst) {
1838                 err = chcr_copy_assoc(req, aeadctx);
1839                 if (err)
1840                         return  ERR_PTR(err);
1841                 dst = scatterwalk_ffwd(dst_sg, req->dst, req->assoclen);
1842         }
1843
1844         if (!req->cryptlen)
1845                 /* null-payload is not supported in the hardware.
1846                  * software is sending block size
1847                  */
1848                 crypt_len = AES_BLOCK_SIZE;
1849         else
1850                 crypt_len = req->cryptlen;
1851         reqctx->dst_nents = sg_nents_for_len(dst, req->cryptlen +
1852                                              (op_type ? -authsize : authsize));
1853         if (reqctx->dst_nents <= 0) {
1854                 pr_err("GCM:Invalid Destination sg entries\n");
1855                 goto err;
1856         }
1857
1858
1859         dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1860         kctx_len = ((DIV_ROUND_UP(aeadctx->enckey_len, 16)) << 4) +
1861                 AEAD_H_SIZE;
1862         transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1863         skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1864         if (!skb)
1865                 goto err;
1866
1867         /* NIC driver is going to write the sge hdr. */
1868         skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1869
1870         chcr_req = (struct chcr_wr *)__skb_put(skb, transhdr_len);
1871         memset(chcr_req, 0, transhdr_len);
1872
1873         if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106)
1874                 req->assoclen -= 8;
1875
1876         tag_offset = (op_type == CHCR_ENCRYPT_OP) ? 0 : authsize;
1877         chcr_req->sec_cpl.op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(
1878                                         ctx->dev->tx_channel_id, 2, (ivsize ?
1879                                         (req->assoclen + 1) : 0));
1880         chcr_req->sec_cpl.pldlen = htonl(req->assoclen + ivsize + crypt_len);
1881         chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1882                                         req->assoclen ? 1 : 0, req->assoclen,
1883                                         req->assoclen + ivsize + 1, 0);
1884         if (req->cryptlen) {
1885                 chcr_req->sec_cpl.cipherstop_lo_authinsert =
1886                         FILL_SEC_CPL_AUTHINSERT(0, req->assoclen + ivsize + 1,
1887                                                 tag_offset, tag_offset);
1888                 chcr_req->sec_cpl.seqno_numivs =
1889                         FILL_SEC_CPL_SCMD0_SEQNO(op_type, (op_type ==
1890                                         CHCR_ENCRYPT_OP) ? 1 : 0,
1891                                         CHCR_SCMD_CIPHER_MODE_AES_GCM,
1892                                         CHCR_SCMD_AUTH_MODE_GHASH, hmac_ctrl,
1893                                         ivsize >> 1);
1894         } else {
1895                 chcr_req->sec_cpl.cipherstop_lo_authinsert =
1896                         FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
1897                 chcr_req->sec_cpl.seqno_numivs =
1898                         FILL_SEC_CPL_SCMD0_SEQNO(op_type,
1899                                         (op_type ==  CHCR_ENCRYPT_OP) ?
1900                                         1 : 0, CHCR_SCMD_CIPHER_MODE_AES_CBC,
1901                                         0, 0, ivsize >> 1);
1902         }
1903         chcr_req->sec_cpl.ivgen_hdrlen =  FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
1904                                         0, 1, dst_size);
1905         chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1906         memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
1907         memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) *
1908                                 16), GCM_CTX(aeadctx)->ghash_h, AEAD_H_SIZE);
1909
1910         /* prepare a 16 byte iv */
1911         /* S   A   L  T |  IV | 0x00000001 */
1912         if (get_aead_subtype(tfm) ==
1913             CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
1914                 memcpy(reqctx->iv, aeadctx->salt, 4);
1915                 memcpy(reqctx->iv + 4, req->iv, 8);
1916         } else {
1917                 memcpy(reqctx->iv, req->iv, 12);
1918         }
1919         *((unsigned int *)(reqctx->iv + 12)) = htonl(0x01);
1920
1921         phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1922         sg_param.nents = reqctx->dst_nents;
1923         sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1924         sg_param.qid = qid;
1925         sg_param.align = 0;
1926         if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, dst,
1927                                   &sg_param))
1928                 goto dstmap_fail;
1929
1930         skb_set_transport_header(skb, transhdr_len);
1931
1932         write_sg_to_skb(skb, &frags, req->src, req->assoclen);
1933
1934         write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
1935
1936         if (req->cryptlen) {
1937                 write_sg_to_skb(skb, &frags, src, req->cryptlen);
1938         } else {
1939                 aes_gcm_empty_pld_pad(req->dst, authsize - 1);
1940                 write_sg_to_skb(skb, &frags, dst, crypt_len);
1941         }
1942
1943         create_wreq(ctx, chcr_req, req, skb, kctx_len, size, 1,
1944                         sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1945         reqctx->skb = skb;
1946         skb_get(skb);
1947         return skb;
1948
1949 dstmap_fail:
1950         /* ivmap_fail: */
1951         kfree_skb(skb);
1952         skb = NULL;
1953 err:
1954         return skb;
1955 }
1956
1957
1958
1959 static int chcr_aead_cra_init(struct crypto_aead *tfm)
1960 {
1961         struct chcr_context *ctx = crypto_aead_ctx(tfm);
1962         struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1963
1964         crypto_aead_set_reqsize(tfm, sizeof(struct chcr_aead_reqctx));
1965         aeadctx->null = crypto_get_default_null_skcipher();
1966         if (IS_ERR(aeadctx->null))
1967                 return PTR_ERR(aeadctx->null);
1968         return chcr_device_init(ctx);
1969 }
1970
1971 static void chcr_aead_cra_exit(struct crypto_aead *tfm)
1972 {
1973         crypto_put_default_null_skcipher();
1974 }
1975
1976 static int chcr_authenc_null_setauthsize(struct crypto_aead *tfm,
1977                                         unsigned int authsize)
1978 {
1979         struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
1980
1981         aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NOP;
1982         aeadctx->mayverify = VERIFY_HW;
1983         return 0;
1984 }
1985 static int chcr_authenc_setauthsize(struct crypto_aead *tfm,
1986                                     unsigned int authsize)
1987 {
1988         struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
1989         u32 maxauth = crypto_aead_maxauthsize(tfm);
1990
1991         /*SHA1 authsize in ipsec is 12 instead of 10 i.e maxauthsize / 2 is not
1992          * true for sha1. authsize == 12 condition should be before
1993          * authsize == (maxauth >> 1)
1994          */
1995         if (authsize == ICV_4) {
1996                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
1997                 aeadctx->mayverify = VERIFY_HW;
1998         } else if (authsize == ICV_6) {
1999                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
2000                 aeadctx->mayverify = VERIFY_HW;
2001         } else if (authsize == ICV_10) {
2002                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
2003                 aeadctx->mayverify = VERIFY_HW;
2004         } else if (authsize == ICV_12) {
2005                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2006                 aeadctx->mayverify = VERIFY_HW;
2007         } else if (authsize == ICV_14) {
2008                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2009                 aeadctx->mayverify = VERIFY_HW;
2010         } else if (authsize == (maxauth >> 1)) {
2011                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2012                 aeadctx->mayverify = VERIFY_HW;
2013         } else if (authsize == maxauth) {
2014                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2015                 aeadctx->mayverify = VERIFY_HW;
2016         } else {
2017                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2018                 aeadctx->mayverify = VERIFY_SW;
2019         }
2020         return 0;
2021 }
2022
2023
2024 static int chcr_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
2025 {
2026         struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2027
2028         switch (authsize) {
2029         case ICV_4:
2030                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2031                 aeadctx->mayverify = VERIFY_HW;
2032                 break;
2033         case ICV_8:
2034                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2035                 aeadctx->mayverify = VERIFY_HW;
2036                 break;
2037         case ICV_12:
2038                  aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2039                  aeadctx->mayverify = VERIFY_HW;
2040                 break;
2041         case ICV_14:
2042                  aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2043                  aeadctx->mayverify = VERIFY_HW;
2044                 break;
2045         case ICV_16:
2046                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2047                 aeadctx->mayverify = VERIFY_HW;
2048                 break;
2049         case ICV_13:
2050         case ICV_15:
2051                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2052                 aeadctx->mayverify = VERIFY_SW;
2053                 break;
2054         default:
2055
2056                   crypto_tfm_set_flags((struct crypto_tfm *) tfm,
2057                         CRYPTO_TFM_RES_BAD_KEY_LEN);
2058                 return -EINVAL;
2059         }
2060         return 0;
2061 }
2062
2063 static int chcr_4106_4309_setauthsize(struct crypto_aead *tfm,
2064                                           unsigned int authsize)
2065 {
2066         struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2067
2068         switch (authsize) {
2069         case ICV_8:
2070                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2071                 aeadctx->mayverify = VERIFY_HW;
2072                 break;
2073         case ICV_12:
2074                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2075                 aeadctx->mayverify = VERIFY_HW;
2076                 break;
2077         case ICV_16:
2078                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2079                 aeadctx->mayverify = VERIFY_HW;
2080                 break;
2081         default:
2082                 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
2083                                      CRYPTO_TFM_RES_BAD_KEY_LEN);
2084                 return -EINVAL;
2085         }
2086         return 0;
2087 }
2088
2089 static int chcr_ccm_setauthsize(struct crypto_aead *tfm,
2090                                 unsigned int authsize)
2091 {
2092         struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2093
2094         switch (authsize) {
2095         case ICV_4:
2096                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2097                 aeadctx->mayverify = VERIFY_HW;
2098                 break;
2099         case ICV_6:
2100                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
2101                 aeadctx->mayverify = VERIFY_HW;
2102                 break;
2103         case ICV_8:
2104                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2105                 aeadctx->mayverify = VERIFY_HW;
2106                 break;
2107         case ICV_10:
2108                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
2109                 aeadctx->mayverify = VERIFY_HW;
2110                 break;
2111         case ICV_12:
2112                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2113                 aeadctx->mayverify = VERIFY_HW;
2114                 break;
2115         case ICV_14:
2116                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2117                 aeadctx->mayverify = VERIFY_HW;
2118                 break;
2119         case ICV_16:
2120                 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2121                 aeadctx->mayverify = VERIFY_HW;
2122                 break;
2123         default:
2124                 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
2125                                      CRYPTO_TFM_RES_BAD_KEY_LEN);
2126                 return -EINVAL;
2127         }
2128         return 0;
2129 }
2130
2131 static int chcr_aead_ccm_setkey(struct crypto_aead *aead,
2132                                 const u8 *key,
2133                                 unsigned int keylen)
2134 {
2135         struct chcr_context *ctx = crypto_aead_ctx(aead);
2136         struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2137         unsigned char ck_size, mk_size;
2138         int key_ctx_size = 0;
2139
2140         memcpy(aeadctx->key, key, keylen);
2141         aeadctx->enckey_len = keylen;
2142         key_ctx_size = sizeof(struct _key_ctx) +
2143                 ((DIV_ROUND_UP(keylen, 16)) << 4)  * 2;
2144         if (keylen == AES_KEYSIZE_128) {
2145                 mk_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2146                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2147         } else if (keylen == AES_KEYSIZE_192) {
2148                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2149                 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_192;
2150         } else if (keylen == AES_KEYSIZE_256) {
2151                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2152                 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
2153         } else {
2154                 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2155                                      CRYPTO_TFM_RES_BAD_KEY_LEN);
2156                 aeadctx->enckey_len = 0;
2157                 return  -EINVAL;
2158         }
2159         aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, mk_size, 0, 0,
2160                                                 key_ctx_size >> 4);
2161         return 0;
2162 }
2163
2164 static int chcr_aead_rfc4309_setkey(struct crypto_aead *aead, const u8 *key,
2165                                     unsigned int keylen)
2166 {
2167         struct chcr_context *ctx = crypto_aead_ctx(aead);
2168          struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2169
2170         if (keylen < 3) {
2171                 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2172                                      CRYPTO_TFM_RES_BAD_KEY_LEN);
2173                 aeadctx->enckey_len = 0;
2174                 return  -EINVAL;
2175         }
2176         keylen -= 3;
2177         memcpy(aeadctx->salt, key + keylen, 3);
2178         return chcr_aead_ccm_setkey(aead, key, keylen);
2179 }
2180
2181 static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
2182                            unsigned int keylen)
2183 {
2184         struct chcr_context *ctx = crypto_aead_ctx(aead);
2185         struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2186         struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx);
2187         struct blkcipher_desc h_desc;
2188         struct scatterlist src[1];
2189         unsigned int ck_size;
2190         int ret = 0, key_ctx_size = 0;
2191
2192         if (get_aead_subtype(aead) ==
2193             CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
2194                 keylen -= 4;  /* nonce/salt is present in the last 4 bytes */
2195                 memcpy(aeadctx->salt, key + keylen, 4);
2196         }
2197         if (keylen == AES_KEYSIZE_128) {
2198                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2199         } else if (keylen == AES_KEYSIZE_192) {
2200                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2201         } else if (keylen == AES_KEYSIZE_256) {
2202                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2203         } else {
2204                 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2205                                      CRYPTO_TFM_RES_BAD_KEY_LEN);
2206                 aeadctx->enckey_len = 0;
2207                 pr_err("GCM: Invalid key length %d", keylen);
2208                 ret = -EINVAL;
2209                 goto out;
2210         }
2211
2212         memcpy(aeadctx->key, key, keylen);
2213         aeadctx->enckey_len = keylen;
2214         key_ctx_size = sizeof(struct _key_ctx) +
2215                 ((DIV_ROUND_UP(keylen, 16)) << 4) +
2216                 AEAD_H_SIZE;
2217                 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size,
2218                                                 CHCR_KEYCTX_MAC_KEY_SIZE_128,
2219                                                 0, 0,
2220                                                 key_ctx_size >> 4);
2221         /* Calculate the H = CIPH(K, 0 repeated 16 times) using sync aes
2222          * blkcipher It will go on key context
2223          */
2224         h_desc.tfm = crypto_alloc_blkcipher("cbc(aes-generic)", 0, 0);
2225         if (IS_ERR(h_desc.tfm)) {
2226                 aeadctx->enckey_len = 0;
2227                 ret = -ENOMEM;
2228                 goto out;
2229         }
2230         h_desc.flags = 0;
2231         ret = crypto_blkcipher_setkey(h_desc.tfm, key, keylen);
2232         if (ret) {
2233                 aeadctx->enckey_len = 0;
2234                 goto out1;
2235         }
2236         memset(gctx->ghash_h, 0, AEAD_H_SIZE);
2237         sg_init_one(&src[0], gctx->ghash_h, AEAD_H_SIZE);
2238         ret = crypto_blkcipher_encrypt(&h_desc, &src[0], &src[0], AEAD_H_SIZE);
2239
2240 out1:
2241         crypto_free_blkcipher(h_desc.tfm);
2242 out:
2243         return ret;
2244 }
2245
2246 static int chcr_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
2247                                    unsigned int keylen)
2248 {
2249         struct chcr_context *ctx = crypto_aead_ctx(authenc);
2250         struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2251         struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2252         /* it contains auth and cipher key both*/
2253         struct crypto_authenc_keys keys;
2254         unsigned int bs;
2255         unsigned int max_authsize = crypto_aead_alg(authenc)->maxauthsize;
2256         int err = 0, i, key_ctx_len = 0;
2257         unsigned char ck_size = 0;
2258         unsigned char pad[CHCR_HASH_MAX_BLOCK_SIZE_128] = { 0 };
2259         struct crypto_shash *base_hash = NULL;
2260         struct algo_param param;
2261         int align;
2262         u8 *o_ptr = NULL;
2263
2264         if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
2265                 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
2266                 goto out;
2267         }
2268
2269         if (get_alg_config(&param, max_authsize)) {
2270                 pr_err("chcr : Unsupported digest size\n");
2271                 goto out;
2272         }
2273         if (keys.enckeylen == AES_KEYSIZE_128) {
2274                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2275         } else if (keys.enckeylen == AES_KEYSIZE_192) {
2276                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2277         } else if (keys.enckeylen == AES_KEYSIZE_256) {
2278                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2279         } else {
2280                 pr_err("chcr : Unsupported cipher key\n");
2281                 goto out;
2282         }
2283
2284         /* Copy only encryption key. We use authkey to generate h(ipad) and
2285          * h(opad) so authkey is not needed again. authkeylen size have the
2286          * size of the hash digest size.
2287          */
2288         memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
2289         aeadctx->enckey_len = keys.enckeylen;
2290         get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
2291                             aeadctx->enckey_len << 3);
2292
2293         base_hash  = chcr_alloc_shash(max_authsize);
2294         if (IS_ERR(base_hash)) {
2295                 pr_err("chcr : Base driver cannot be loaded\n");
2296                 goto out;
2297         }
2298         {
2299                 SHASH_DESC_ON_STACK(shash, base_hash);
2300                 shash->tfm = base_hash;
2301                 shash->flags = crypto_shash_get_flags(base_hash);
2302                 bs = crypto_shash_blocksize(base_hash);
2303                 align = KEYCTX_ALIGN_PAD(max_authsize);
2304                 o_ptr =  actx->h_iopad + param.result_size + align;
2305
2306                 if (keys.authkeylen > bs) {
2307                         err = crypto_shash_digest(shash, keys.authkey,
2308                                                   keys.authkeylen,
2309                                                   o_ptr);
2310                         if (err) {
2311                                 pr_err("chcr : Base driver cannot be loaded\n");
2312                                 goto out;
2313                         }
2314                         keys.authkeylen = max_authsize;
2315                 } else
2316                         memcpy(o_ptr, keys.authkey, keys.authkeylen);
2317
2318                 /* Compute the ipad-digest*/
2319                 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
2320                 memcpy(pad, o_ptr, keys.authkeylen);
2321                 for (i = 0; i < bs >> 2; i++)
2322                         *((unsigned int *)pad + i) ^= IPAD_DATA;
2323
2324                 if (chcr_compute_partial_hash(shash, pad, actx->h_iopad,
2325                                               max_authsize))
2326                         goto out;
2327                 /* Compute the opad-digest */
2328                 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
2329                 memcpy(pad, o_ptr, keys.authkeylen);
2330                 for (i = 0; i < bs >> 2; i++)
2331                         *((unsigned int *)pad + i) ^= OPAD_DATA;
2332
2333                 if (chcr_compute_partial_hash(shash, pad, o_ptr, max_authsize))
2334                         goto out;
2335
2336                 /* convert the ipad and opad digest to network order */
2337                 chcr_change_order(actx->h_iopad, param.result_size);
2338                 chcr_change_order(o_ptr, param.result_size);
2339                 key_ctx_len = sizeof(struct _key_ctx) +
2340                         ((DIV_ROUND_UP(keys.enckeylen, 16)) << 4) +
2341                         (param.result_size + align) * 2;
2342                 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, param.mk_size,
2343                                                 0, 1, key_ctx_len >> 4);
2344                 actx->auth_mode = param.auth_mode;
2345                 chcr_free_shash(base_hash);
2346
2347                 return 0;
2348         }
2349 out:
2350         aeadctx->enckey_len = 0;
2351         if (base_hash)
2352                 chcr_free_shash(base_hash);
2353         return -EINVAL;
2354 }
2355
2356 static int chcr_aead_digest_null_setkey(struct crypto_aead *authenc,
2357                                         const u8 *key, unsigned int keylen)
2358 {
2359         struct chcr_context *ctx = crypto_aead_ctx(authenc);
2360         struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2361         struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2362         struct crypto_authenc_keys keys;
2363
2364         /* it contains auth and cipher key both*/
2365         int key_ctx_len = 0;
2366         unsigned char ck_size = 0;
2367
2368         if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
2369                 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
2370                 goto out;
2371         }
2372         if (keys.enckeylen == AES_KEYSIZE_128) {
2373                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2374         } else if (keys.enckeylen == AES_KEYSIZE_192) {
2375                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2376         } else if (keys.enckeylen == AES_KEYSIZE_256) {
2377                 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2378         } else {
2379                 pr_err("chcr : Unsupported cipher key\n");
2380                 goto out;
2381         }
2382         memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
2383         aeadctx->enckey_len = keys.enckeylen;
2384         get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
2385                                     aeadctx->enckey_len << 3);
2386         key_ctx_len =  sizeof(struct _key_ctx)
2387                 + ((DIV_ROUND_UP(keys.enckeylen, 16)) << 4);
2388
2389         aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY, 0,
2390                                                 0, key_ctx_len >> 4);
2391         actx->auth_mode = CHCR_SCMD_AUTH_MODE_NOP;
2392         return 0;
2393 out:
2394         aeadctx->enckey_len = 0;
2395         return -EINVAL;
2396 }
2397 static int chcr_aead_encrypt(struct aead_request *req)
2398 {
2399         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2400         struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2401
2402         reqctx->verify = VERIFY_HW;
2403
2404         switch (get_aead_subtype(tfm)) {
2405         case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC:
2406         case CRYPTO_ALG_SUB_TYPE_AEAD_NULL:
2407                 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2408                                     create_authenc_wr);
2409         case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
2410         case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
2411                 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2412                                     create_aead_ccm_wr);
2413         default:
2414                 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2415                                     create_gcm_wr);
2416         }
2417 }
2418
2419 static int chcr_aead_decrypt(struct aead_request *req)
2420 {
2421         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2422         struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2423         struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2424         int size;
2425
2426         if (aeadctx->mayverify == VERIFY_SW) {
2427                 size = crypto_aead_maxauthsize(tfm);
2428                 reqctx->verify = VERIFY_SW;
2429         } else {
2430                 size = 0;
2431                 reqctx->verify = VERIFY_HW;
2432         }
2433
2434         switch (get_aead_subtype(tfm)) {
2435         case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC:
2436         case CRYPTO_ALG_SUB_TYPE_AEAD_NULL:
2437                 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2438                                     create_authenc_wr);
2439         case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
2440         case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
2441                 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2442                                     create_aead_ccm_wr);
2443         default:
2444                 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2445                                     create_gcm_wr);
2446         }
2447 }
2448
2449 static int chcr_aead_op(struct aead_request *req,
2450                           unsigned short op_type,
2451                           int size,
2452                           create_wr_t create_wr_fn)
2453 {
2454         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2455         struct chcr_context *ctx = crypto_aead_ctx(tfm);
2456         struct uld_ctx *u_ctx = ULD_CTX(ctx);
2457         struct sk_buff *skb;
2458
2459         if (ctx && !ctx->dev) {
2460                 pr_err("chcr : %s : No crypto device.\n", __func__);
2461                 return -ENXIO;
2462         }
2463         if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
2464                                    ctx->tx_channel_id)) {
2465                 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
2466                         return -EBUSY;
2467         }
2468
2469         /* Form a WR from req */
2470         skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[ctx->tx_channel_id], size,
2471                            op_type);
2472
2473         if (IS_ERR(skb) || skb == NULL) {
2474                 pr_err("chcr : %s : failed to form WR. No memory\n", __func__);
2475                 return PTR_ERR(skb);
2476         }
2477
2478         skb->dev = u_ctx->lldi.ports[0];
2479         set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_channel_id);
2480         chcr_send_wr(skb);
2481         return -EINPROGRESS;
2482 }
2483 static struct chcr_alg_template driver_algs[] = {
2484         /* AES-CBC */
2485         {
2486                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2487                 .is_registered = 0,
2488                 .alg.crypto = {
2489                         .cra_name               = "cbc(aes)",
2490                         .cra_driver_name        = "cbc-aes-chcr",
2491                         .cra_priority           = CHCR_CRA_PRIORITY,
2492                         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
2493                                 CRYPTO_ALG_ASYNC,
2494                         .cra_blocksize          = AES_BLOCK_SIZE,
2495                         .cra_ctxsize            = sizeof(struct chcr_context)
2496                                 + sizeof(struct ablk_ctx),
2497                         .cra_alignmask          = 0,
2498                         .cra_type               = &crypto_ablkcipher_type,
2499                         .cra_module             = THIS_MODULE,
2500                         .cra_init               = chcr_cra_init,
2501                         .cra_exit               = NULL,
2502                         .cra_u.ablkcipher       = {
2503                                 .min_keysize    = AES_MIN_KEY_SIZE,
2504                                 .max_keysize    = AES_MAX_KEY_SIZE,
2505                                 .ivsize         = AES_BLOCK_SIZE,
2506                                 .setkey                 = chcr_aes_cbc_setkey,
2507                                 .encrypt                = chcr_aes_encrypt,
2508                                 .decrypt                = chcr_aes_decrypt,
2509                         }
2510                 }
2511         },
2512         {
2513                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2514                 .is_registered = 0,
2515                 .alg.crypto =   {
2516                         .cra_name               = "xts(aes)",
2517                         .cra_driver_name        = "xts-aes-chcr",
2518                         .cra_priority           = CHCR_CRA_PRIORITY,
2519                         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
2520                                 CRYPTO_ALG_ASYNC,
2521                         .cra_blocksize          = AES_BLOCK_SIZE,
2522                         .cra_ctxsize            = sizeof(struct chcr_context) +
2523                                 sizeof(struct ablk_ctx),
2524                         .cra_alignmask          = 0,
2525                         .cra_type               = &crypto_ablkcipher_type,
2526                         .cra_module             = THIS_MODULE,
2527                         .cra_init               = chcr_cra_init,
2528                         .cra_exit               = NULL,
2529                         .cra_u = {
2530                                 .ablkcipher = {
2531                                         .min_keysize    = 2 * AES_MIN_KEY_SIZE,
2532                                         .max_keysize    = 2 * AES_MAX_KEY_SIZE,
2533                                         .ivsize         = AES_BLOCK_SIZE,
2534                                         .setkey         = chcr_aes_xts_setkey,
2535                                         .encrypt        = chcr_aes_encrypt,
2536                                         .decrypt        = chcr_aes_decrypt,
2537                                 }
2538                         }
2539                 }
2540         },
2541         /* SHA */
2542         {
2543                 .type = CRYPTO_ALG_TYPE_AHASH,
2544                 .is_registered = 0,
2545                 .alg.hash = {
2546                         .halg.digestsize = SHA1_DIGEST_SIZE,
2547                         .halg.base = {
2548                                 .cra_name = "sha1",
2549                                 .cra_driver_name = "sha1-chcr",
2550                                 .cra_blocksize = SHA1_BLOCK_SIZE,
2551                         }
2552                 }
2553         },
2554         {
2555                 .type = CRYPTO_ALG_TYPE_AHASH,
2556                 .is_registered = 0,
2557                 .alg.hash = {
2558                         .halg.digestsize = SHA256_DIGEST_SIZE,
2559                         .halg.base = {
2560                                 .cra_name = "sha256",
2561                                 .cra_driver_name = "sha256-chcr",
2562                                 .cra_blocksize = SHA256_BLOCK_SIZE,
2563                         }
2564                 }
2565         },
2566         {
2567                 .type = CRYPTO_ALG_TYPE_AHASH,
2568                 .is_registered = 0,
2569                 .alg.hash = {
2570                         .halg.digestsize = SHA224_DIGEST_SIZE,
2571                         .halg.base = {
2572                                 .cra_name = "sha224",
2573                                 .cra_driver_name = "sha224-chcr",
2574                                 .cra_blocksize = SHA224_BLOCK_SIZE,
2575                         }
2576                 }
2577         },
2578         {
2579                 .type = CRYPTO_ALG_TYPE_AHASH,
2580                 .is_registered = 0,
2581                 .alg.hash = {
2582                         .halg.digestsize = SHA384_DIGEST_SIZE,
2583                         .halg.base = {
2584                                 .cra_name = "sha384",
2585                                 .cra_driver_name = "sha384-chcr",
2586                                 .cra_blocksize = SHA384_BLOCK_SIZE,
2587                         }
2588                 }
2589         },
2590         {
2591                 .type = CRYPTO_ALG_TYPE_AHASH,
2592                 .is_registered = 0,
2593                 .alg.hash = {
2594                         .halg.digestsize = SHA512_DIGEST_SIZE,
2595                         .halg.base = {
2596                                 .cra_name = "sha512",
2597                                 .cra_driver_name = "sha512-chcr",
2598                                 .cra_blocksize = SHA512_BLOCK_SIZE,
2599                         }
2600                 }
2601         },
2602         /* HMAC */
2603         {
2604                 .type = CRYPTO_ALG_TYPE_HMAC,
2605                 .is_registered = 0,
2606                 .alg.hash = {
2607                         .halg.digestsize = SHA1_DIGEST_SIZE,
2608                         .halg.base = {
2609                                 .cra_name = "hmac(sha1)",
2610                                 .cra_driver_name = "hmac-sha1-chcr",
2611                                 .cra_blocksize = SHA1_BLOCK_SIZE,
2612                         }
2613                 }
2614         },
2615         {
2616                 .type = CRYPTO_ALG_TYPE_HMAC,
2617                 .is_registered = 0,
2618                 .alg.hash = {
2619                         .halg.digestsize = SHA224_DIGEST_SIZE,
2620                         .halg.base = {
2621                                 .cra_name = "hmac(sha224)",
2622                                 .cra_driver_name = "hmac-sha224-chcr",
2623                                 .cra_blocksize = SHA224_BLOCK_SIZE,
2624                         }
2625                 }
2626         },
2627         {
2628                 .type = CRYPTO_ALG_TYPE_HMAC,
2629                 .is_registered = 0,
2630                 .alg.hash = {
2631                         .halg.digestsize = SHA256_DIGEST_SIZE,
2632                         .halg.base = {
2633                                 .cra_name = "hmac(sha256)",
2634                                 .cra_driver_name = "hmac-sha256-chcr",
2635                                 .cra_blocksize = SHA256_BLOCK_SIZE,
2636                         }
2637                 }
2638         },
2639         {
2640                 .type = CRYPTO_ALG_TYPE_HMAC,
2641                 .is_registered = 0,
2642                 .alg.hash = {
2643                         .halg.digestsize = SHA384_DIGEST_SIZE,
2644                         .halg.base = {
2645                                 .cra_name = "hmac(sha384)",
2646                                 .cra_driver_name = "hmac-sha384-chcr",
2647                                 .cra_blocksize = SHA384_BLOCK_SIZE,
2648                         }
2649                 }
2650         },
2651         {
2652                 .type = CRYPTO_ALG_TYPE_HMAC,
2653                 .is_registered = 0,
2654                 .alg.hash = {
2655                         .halg.digestsize = SHA512_DIGEST_SIZE,
2656                         .halg.base = {
2657                                 .cra_name = "hmac(sha512)",
2658                                 .cra_driver_name = "hmac-sha512-chcr",
2659                                 .cra_blocksize = SHA512_BLOCK_SIZE,
2660                         }
2661                 }
2662         },
2663         /* Add AEAD Algorithms */
2664         {
2665                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_GCM,
2666                 .is_registered = 0,
2667                 .alg.aead = {
2668                         .base = {
2669                                 .cra_name = "gcm(aes)",
2670                                 .cra_driver_name = "gcm-aes-chcr",
2671                                 .cra_blocksize  = 1,
2672                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2673                                                 sizeof(struct chcr_aead_ctx) +
2674                                                 sizeof(struct chcr_gcm_ctx),
2675                         },
2676                         .ivsize = 12,
2677                         .maxauthsize = GHASH_DIGEST_SIZE,
2678                         .setkey = chcr_gcm_setkey,
2679                         .setauthsize = chcr_gcm_setauthsize,
2680                 }
2681         },
2682         {
2683                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106,
2684                 .is_registered = 0,
2685                 .alg.aead = {
2686                         .base = {
2687                                 .cra_name = "rfc4106(gcm(aes))",
2688                                 .cra_driver_name = "rfc4106-gcm-aes-chcr",
2689                                 .cra_blocksize   = 1,
2690                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2691                                                 sizeof(struct chcr_aead_ctx) +
2692                                                 sizeof(struct chcr_gcm_ctx),
2693
2694                         },
2695                         .ivsize = 8,
2696                         .maxauthsize    = GHASH_DIGEST_SIZE,
2697                         .setkey = chcr_gcm_setkey,
2698                         .setauthsize    = chcr_4106_4309_setauthsize,
2699                 }
2700         },
2701         {
2702                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_CCM,
2703                 .is_registered = 0,
2704                 .alg.aead = {
2705                         .base = {
2706                                 .cra_name = "ccm(aes)",
2707                                 .cra_driver_name = "ccm-aes-chcr",
2708                                 .cra_blocksize   = 1,
2709                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2710                                                 sizeof(struct chcr_aead_ctx),
2711
2712                         },
2713                         .ivsize = AES_BLOCK_SIZE,
2714                         .maxauthsize    = GHASH_DIGEST_SIZE,
2715                         .setkey = chcr_aead_ccm_setkey,
2716                         .setauthsize    = chcr_ccm_setauthsize,
2717                 }
2718         },
2719         {
2720                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309,
2721                 .is_registered = 0,
2722                 .alg.aead = {
2723                         .base = {
2724                                 .cra_name = "rfc4309(ccm(aes))",
2725                                 .cra_driver_name = "rfc4309-ccm-aes-chcr",
2726                                 .cra_blocksize   = 1,
2727                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2728                                                 sizeof(struct chcr_aead_ctx),
2729
2730                         },
2731                         .ivsize = 8,
2732                         .maxauthsize    = GHASH_DIGEST_SIZE,
2733                         .setkey = chcr_aead_rfc4309_setkey,
2734                         .setauthsize = chcr_4106_4309_setauthsize,
2735                 }
2736         },
2737         {
2738                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2739                 .is_registered = 0,
2740                 .alg.aead = {
2741                         .base = {
2742                                 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2743                                 .cra_driver_name =
2744                                         "authenc-hmac-sha1-cbc-aes-chcr",
2745                                 .cra_blocksize   = AES_BLOCK_SIZE,
2746                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2747                                                 sizeof(struct chcr_aead_ctx) +
2748                                                 sizeof(struct chcr_authenc_ctx),
2749
2750                         },
2751                         .ivsize = AES_BLOCK_SIZE,
2752                         .maxauthsize = SHA1_DIGEST_SIZE,
2753                         .setkey = chcr_authenc_setkey,
2754                         .setauthsize = chcr_authenc_setauthsize,
2755                 }
2756         },
2757         {
2758                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2759                 .is_registered = 0,
2760                 .alg.aead = {
2761                         .base = {
2762
2763                                 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2764                                 .cra_driver_name =
2765                                         "authenc-hmac-sha256-cbc-aes-chcr",
2766                                 .cra_blocksize   = AES_BLOCK_SIZE,
2767                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2768                                                 sizeof(struct chcr_aead_ctx) +
2769                                                 sizeof(struct chcr_authenc_ctx),
2770
2771                         },
2772                         .ivsize = AES_BLOCK_SIZE,
2773                         .maxauthsize    = SHA256_DIGEST_SIZE,
2774                         .setkey = chcr_authenc_setkey,
2775                         .setauthsize = chcr_authenc_setauthsize,
2776                 }
2777         },
2778         {
2779                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2780                 .is_registered = 0,
2781                 .alg.aead = {
2782                         .base = {
2783                                 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2784                                 .cra_driver_name =
2785                                         "authenc-hmac-sha224-cbc-aes-chcr",
2786                                 .cra_blocksize   = AES_BLOCK_SIZE,
2787                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2788                                                 sizeof(struct chcr_aead_ctx) +
2789                                                 sizeof(struct chcr_authenc_ctx),
2790                         },
2791                         .ivsize = AES_BLOCK_SIZE,
2792                         .maxauthsize = SHA224_DIGEST_SIZE,
2793                         .setkey = chcr_authenc_setkey,
2794                         .setauthsize = chcr_authenc_setauthsize,
2795                 }
2796         },
2797         {
2798                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2799                 .is_registered = 0,
2800                 .alg.aead = {
2801                         .base = {
2802                                 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2803                                 .cra_driver_name =
2804                                         "authenc-hmac-sha384-cbc-aes-chcr",
2805                                 .cra_blocksize   = AES_BLOCK_SIZE,
2806                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2807                                                 sizeof(struct chcr_aead_ctx) +
2808                                                 sizeof(struct chcr_authenc_ctx),
2809
2810                         },
2811                         .ivsize = AES_BLOCK_SIZE,
2812                         .maxauthsize = SHA384_DIGEST_SIZE,
2813                         .setkey = chcr_authenc_setkey,
2814                         .setauthsize = chcr_authenc_setauthsize,
2815                 }
2816         },
2817         {
2818                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2819                 .is_registered = 0,
2820                 .alg.aead = {
2821                         .base = {
2822                                 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2823                                 .cra_driver_name =
2824                                         "authenc-hmac-sha512-cbc-aes-chcr",
2825                                 .cra_blocksize   = AES_BLOCK_SIZE,
2826                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2827                                                 sizeof(struct chcr_aead_ctx) +
2828                                                 sizeof(struct chcr_authenc_ctx),
2829
2830                         },
2831                         .ivsize = AES_BLOCK_SIZE,
2832                         .maxauthsize = SHA512_DIGEST_SIZE,
2833                         .setkey = chcr_authenc_setkey,
2834                         .setauthsize = chcr_authenc_setauthsize,
2835                 }
2836         },
2837         {
2838                 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_NULL,
2839                 .is_registered = 0,
2840                 .alg.aead = {
2841                         .base = {
2842                                 .cra_name = "authenc(digest_null,cbc(aes))",
2843                                 .cra_driver_name =
2844                                         "authenc-digest_null-cbc-aes-chcr",
2845                                 .cra_blocksize   = AES_BLOCK_SIZE,
2846                                 .cra_ctxsize =  sizeof(struct chcr_context) +
2847                                                 sizeof(struct chcr_aead_ctx) +
2848                                                 sizeof(struct chcr_authenc_ctx),
2849
2850                         },
2851                         .ivsize  = AES_BLOCK_SIZE,
2852                         .maxauthsize = 0,
2853                         .setkey  = chcr_aead_digest_null_setkey,
2854                         .setauthsize = chcr_authenc_null_setauthsize,
2855                 }
2856         },
2857 };
2858
2859 /*
2860  *      chcr_unregister_alg - Deregister crypto algorithms with
2861  *      kernel framework.
2862  */
2863 static int chcr_unregister_alg(void)
2864 {
2865         int i;
2866
2867         for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2868                 switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
2869                 case CRYPTO_ALG_TYPE_ABLKCIPHER:
2870                         if (driver_algs[i].is_registered)
2871                                 crypto_unregister_alg(
2872                                                 &driver_algs[i].alg.crypto);
2873                         break;
2874                 case CRYPTO_ALG_TYPE_AEAD:
2875                         if (driver_algs[i].is_registered)
2876                                 crypto_unregister_aead(
2877                                                 &driver_algs[i].alg.aead);
2878                         break;
2879                 case CRYPTO_ALG_TYPE_AHASH:
2880                         if (driver_algs[i].is_registered)
2881                                 crypto_unregister_ahash(
2882                                                 &driver_algs[i].alg.hash);
2883                         break;
2884                 }
2885                 driver_algs[i].is_registered = 0;
2886         }
2887         return 0;
2888 }
2889
2890 #define SZ_AHASH_CTX sizeof(struct chcr_context)
2891 #define SZ_AHASH_H_CTX (sizeof(struct chcr_context) + sizeof(struct hmac_ctx))
2892 #define SZ_AHASH_REQ_CTX sizeof(struct chcr_ahash_req_ctx)
2893 #define AHASH_CRA_FLAGS (CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC)
2894
2895 /*
2896  *      chcr_register_alg - Register crypto algorithms with kernel framework.
2897  */
2898 static int chcr_register_alg(void)
2899 {
2900         struct crypto_alg ai;
2901         struct ahash_alg *a_hash;
2902         int err = 0, i;
2903         char *name = NULL;
2904
2905         for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2906                 if (driver_algs[i].is_registered)
2907                         continue;
2908                 switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
2909                 case CRYPTO_ALG_TYPE_ABLKCIPHER:
2910                         err = crypto_register_alg(&driver_algs[i].alg.crypto);
2911                         name = driver_algs[i].alg.crypto.cra_driver_name;
2912                         break;
2913                 case CRYPTO_ALG_TYPE_AEAD:
2914                         driver_algs[i].alg.aead.base.cra_priority =
2915                                 CHCR_CRA_PRIORITY;
2916                         driver_algs[i].alg.aead.base.cra_flags =
2917                                 CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
2918                         driver_algs[i].alg.aead.encrypt = chcr_aead_encrypt;
2919                         driver_algs[i].alg.aead.decrypt = chcr_aead_decrypt;
2920                         driver_algs[i].alg.aead.init = chcr_aead_cra_init;
2921                         driver_algs[i].alg.aead.exit = chcr_aead_cra_exit;
2922                         driver_algs[i].alg.aead.base.cra_module = THIS_MODULE;
2923                         err = crypto_register_aead(&driver_algs[i].alg.aead);
2924                         name = driver_algs[i].alg.aead.base.cra_driver_name;
2925                         break;
2926                 case CRYPTO_ALG_TYPE_AHASH:
2927                         a_hash = &driver_algs[i].alg.hash;
2928                         a_hash->update = chcr_ahash_update;
2929                         a_hash->final = chcr_ahash_final;
2930                         a_hash->finup = chcr_ahash_finup;
2931                         a_hash->digest = chcr_ahash_digest;
2932                         a_hash->export = chcr_ahash_export;
2933                         a_hash->import = chcr_ahash_import;
2934                         a_hash->halg.statesize = SZ_AHASH_REQ_CTX;
2935                         a_hash->halg.base.cra_priority = CHCR_CRA_PRIORITY;
2936                         a_hash->halg.base.cra_module = THIS_MODULE;
2937                         a_hash->halg.base.cra_flags = AHASH_CRA_FLAGS;
2938                         a_hash->halg.base.cra_alignmask = 0;
2939                         a_hash->halg.base.cra_exit = NULL;
2940                         a_hash->halg.base.cra_type = &crypto_ahash_type;
2941
2942                         if (driver_algs[i].type == CRYPTO_ALG_TYPE_HMAC) {
2943                                 a_hash->halg.base.cra_init = chcr_hmac_cra_init;
2944                                 a_hash->halg.base.cra_exit = chcr_hmac_cra_exit;
2945                                 a_hash->init = chcr_hmac_init;
2946                                 a_hash->setkey = chcr_ahash_setkey;
2947                                 a_hash->halg.base.cra_ctxsize = SZ_AHASH_H_CTX;
2948                         } else {
2949                                 a_hash->init = chcr_sha_init;
2950                                 a_hash->halg.base.cra_ctxsize = SZ_AHASH_CTX;
2951                                 a_hash->halg.base.cra_init = chcr_sha_cra_init;
2952                         }
2953                         err = crypto_register_ahash(&driver_algs[i].alg.hash);
2954                         ai = driver_algs[i].alg.hash.halg.base;
2955                         name = ai.cra_driver_name;
2956                         break;
2957                 }
2958                 if (err) {
2959                         pr_err("chcr : %s : Algorithm registration failed\n",
2960                                name);
2961                         goto register_err;
2962                 } else {
2963                         driver_algs[i].is_registered = 1;
2964                 }
2965         }
2966         return 0;
2967
2968 register_err:
2969         chcr_unregister_alg();
2970         return err;
2971 }
2972
2973 /*
2974  *      start_crypto - Register the crypto algorithms.
2975  *      This should called once when the first device comesup. After this
2976  *      kernel will start calling driver APIs for crypto operations.
2977  */
2978 int start_crypto(void)
2979 {
2980         return chcr_register_alg();
2981 }
2982
2983 /*
2984  *      stop_crypto - Deregister all the crypto algorithms with kernel.
2985  *      This should be called once when the last device goes down. After this
2986  *      kernel will not call the driver API for crypto operations.
2987  */
2988 int stop_crypto(void)
2989 {
2990         chcr_unregister_alg();
2991         return 0;
2992 }