crypto: stm32 - Use new crypto_engine_op interface
[platform/kernel/linux-starfive.git] / drivers / crypto / stm32 / stm32-hash.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * This file is part of STM32 Crypto driver for Linux.
4  *
5  * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
6  * Author(s): Lionel DEBIEVE <lionel.debieve@st.com> for STMicroelectronics.
7  */
8
9 #include <crypto/engine.h>
10 #include <crypto/internal/hash.h>
11 #include <crypto/md5.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/sha1.h>
14 #include <crypto/sha2.h>
15 #include <crypto/sha3.h>
16 #include <linux/clk.h>
17 #include <linux/delay.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/dmaengine.h>
20 #include <linux/interrupt.h>
21 #include <linux/iopoll.h>
22 #include <linux/kernel.h>
23 #include <linux/module.h>
24 #include <linux/of_device.h>
25 #include <linux/platform_device.h>
26 #include <linux/pm_runtime.h>
27 #include <linux/reset.h>
28 #include <linux/string.h>
29
30 #define HASH_CR                         0x00
31 #define HASH_DIN                        0x04
32 #define HASH_STR                        0x08
33 #define HASH_UX500_HREG(x)              (0x0c + ((x) * 0x04))
34 #define HASH_IMR                        0x20
35 #define HASH_SR                         0x24
36 #define HASH_CSR(x)                     (0x0F8 + ((x) * 0x04))
37 #define HASH_HREG(x)                    (0x310 + ((x) * 0x04))
38 #define HASH_HWCFGR                     0x3F0
39 #define HASH_VER                        0x3F4
40 #define HASH_ID                         0x3F8
41
42 /* Control Register */
43 #define HASH_CR_INIT                    BIT(2)
44 #define HASH_CR_DMAE                    BIT(3)
45 #define HASH_CR_DATATYPE_POS            4
46 #define HASH_CR_MODE                    BIT(6)
47 #define HASH_CR_ALGO_POS                7
48 #define HASH_CR_MDMAT                   BIT(13)
49 #define HASH_CR_DMAA                    BIT(14)
50 #define HASH_CR_LKEY                    BIT(16)
51
52 /* Interrupt */
53 #define HASH_DINIE                      BIT(0)
54 #define HASH_DCIE                       BIT(1)
55
56 /* Interrupt Mask */
57 #define HASH_MASK_CALC_COMPLETION       BIT(0)
58 #define HASH_MASK_DATA_INPUT            BIT(1)
59
60 /* Status Flags */
61 #define HASH_SR_DATA_INPUT_READY        BIT(0)
62 #define HASH_SR_OUTPUT_READY            BIT(1)
63 #define HASH_SR_DMA_ACTIVE              BIT(2)
64 #define HASH_SR_BUSY                    BIT(3)
65
66 /* STR Register */
67 #define HASH_STR_NBLW_MASK              GENMASK(4, 0)
68 #define HASH_STR_DCAL                   BIT(8)
69
70 /* HWCFGR Register */
71 #define HASH_HWCFG_DMA_MASK             GENMASK(3, 0)
72
73 /* Context swap register */
74 #define HASH_CSR_NB_SHA256_HMAC         54
75 #define HASH_CSR_NB_SHA256              38
76 #define HASH_CSR_NB_SHA512_HMAC         103
77 #define HASH_CSR_NB_SHA512              91
78 #define HASH_CSR_NB_SHA3_HMAC           88
79 #define HASH_CSR_NB_SHA3                72
80 #define HASH_CSR_NB_MAX                 HASH_CSR_NB_SHA512_HMAC
81
82 #define HASH_FLAGS_INIT                 BIT(0)
83 #define HASH_FLAGS_OUTPUT_READY         BIT(1)
84 #define HASH_FLAGS_CPU                  BIT(2)
85 #define HASH_FLAGS_DMA_ACTIVE           BIT(3)
86 #define HASH_FLAGS_HMAC_INIT            BIT(4)
87 #define HASH_FLAGS_HMAC_FINAL           BIT(5)
88 #define HASH_FLAGS_HMAC_KEY             BIT(6)
89 #define HASH_FLAGS_SHA3_MODE            BIT(7)
90 #define HASH_FLAGS_FINAL                BIT(15)
91 #define HASH_FLAGS_FINUP                BIT(16)
92 #define HASH_FLAGS_ALGO_MASK            GENMASK(20, 17)
93 #define HASH_FLAGS_ALGO_SHIFT           17
94 #define HASH_FLAGS_ERRORS               BIT(21)
95 #define HASH_FLAGS_EMPTY                BIT(22)
96 #define HASH_FLAGS_HMAC                 BIT(23)
97
98 #define HASH_OP_UPDATE                  1
99 #define HASH_OP_FINAL                   2
100
101 #define HASH_BURST_LEVEL                4
102
103 enum stm32_hash_data_format {
104         HASH_DATA_32_BITS               = 0x0,
105         HASH_DATA_16_BITS               = 0x1,
106         HASH_DATA_8_BITS                = 0x2,
107         HASH_DATA_1_BIT                 = 0x3
108 };
109
110 #define HASH_BUFLEN                     (SHA3_224_BLOCK_SIZE + 4)
111 #define HASH_MAX_KEY_SIZE               (SHA512_BLOCK_SIZE * 8)
112
113 enum stm32_hash_algo {
114         HASH_SHA1                       = 0,
115         HASH_MD5                        = 1,
116         HASH_SHA224                     = 2,
117         HASH_SHA256                     = 3,
118         HASH_SHA3_224                   = 4,
119         HASH_SHA3_256                   = 5,
120         HASH_SHA3_384                   = 6,
121         HASH_SHA3_512                   = 7,
122         HASH_SHA384                     = 12,
123         HASH_SHA512                     = 15,
124 };
125
126 enum ux500_hash_algo {
127         HASH_SHA256_UX500               = 0,
128         HASH_SHA1_UX500                 = 1,
129 };
130
131 #define HASH_AUTOSUSPEND_DELAY          50
132
133 struct stm32_hash_ctx {
134         struct stm32_hash_dev   *hdev;
135         struct crypto_shash     *xtfm;
136         unsigned long           flags;
137
138         u8                      key[HASH_MAX_KEY_SIZE];
139         int                     keylen;
140 };
141
142 struct stm32_hash_state {
143         u32                     flags;
144
145         u16                     bufcnt;
146         u16                     blocklen;
147
148         u8 buffer[HASH_BUFLEN] __aligned(4);
149
150         /* hash state */
151         u32                     hw_context[3 + HASH_CSR_NB_MAX];
152 };
153
154 struct stm32_hash_request_ctx {
155         struct stm32_hash_dev   *hdev;
156         unsigned long           op;
157
158         u8 digest[SHA512_DIGEST_SIZE] __aligned(sizeof(u32));
159         size_t                  digcnt;
160
161         /* DMA */
162         struct scatterlist      *sg;
163         unsigned int            offset;
164         unsigned int            total;
165         struct scatterlist      sg_key;
166
167         dma_addr_t              dma_addr;
168         size_t                  dma_ct;
169         int                     nents;
170
171         u8                      data_type;
172
173         struct stm32_hash_state state;
174 };
175
176 struct stm32_hash_algs_info {
177         struct ahash_engine_alg *algs_list;
178         size_t                  size;
179 };
180
181 struct stm32_hash_pdata {
182         const int                               alg_shift;
183         const struct stm32_hash_algs_info       *algs_info;
184         size_t                                  algs_info_size;
185         bool                                    has_sr;
186         bool                                    has_mdmat;
187         bool                                    broken_emptymsg;
188         bool                                    ux500;
189 };
190
191 struct stm32_hash_dev {
192         struct list_head        list;
193         struct device           *dev;
194         struct clk              *clk;
195         struct reset_control    *rst;
196         void __iomem            *io_base;
197         phys_addr_t             phys_base;
198         u32                     dma_mode;
199         bool                    polled;
200
201         struct ahash_request    *req;
202         struct crypto_engine    *engine;
203
204         unsigned long           flags;
205
206         struct dma_chan         *dma_lch;
207         struct completion       dma_completion;
208
209         const struct stm32_hash_pdata   *pdata;
210 };
211
212 struct stm32_hash_drv {
213         struct list_head        dev_list;
214         spinlock_t              lock; /* List protection access */
215 };
216
217 static struct stm32_hash_drv stm32_hash = {
218         .dev_list = LIST_HEAD_INIT(stm32_hash.dev_list),
219         .lock = __SPIN_LOCK_UNLOCKED(stm32_hash.lock),
220 };
221
222 static void stm32_hash_dma_callback(void *param);
223
224 static inline u32 stm32_hash_read(struct stm32_hash_dev *hdev, u32 offset)
225 {
226         return readl_relaxed(hdev->io_base + offset);
227 }
228
229 static inline void stm32_hash_write(struct stm32_hash_dev *hdev,
230                                     u32 offset, u32 value)
231 {
232         writel_relaxed(value, hdev->io_base + offset);
233 }
234
235 static inline int stm32_hash_wait_busy(struct stm32_hash_dev *hdev)
236 {
237         u32 status;
238
239         /* The Ux500 lacks the special status register, we poll the DCAL bit instead */
240         if (!hdev->pdata->has_sr)
241                 return readl_relaxed_poll_timeout(hdev->io_base + HASH_STR, status,
242                                                   !(status & HASH_STR_DCAL), 10, 10000);
243
244         return readl_relaxed_poll_timeout(hdev->io_base + HASH_SR, status,
245                                    !(status & HASH_SR_BUSY), 10, 10000);
246 }
247
248 static void stm32_hash_set_nblw(struct stm32_hash_dev *hdev, int length)
249 {
250         u32 reg;
251
252         reg = stm32_hash_read(hdev, HASH_STR);
253         reg &= ~(HASH_STR_NBLW_MASK);
254         reg |= (8U * ((length) % 4U));
255         stm32_hash_write(hdev, HASH_STR, reg);
256 }
257
258 static int stm32_hash_write_key(struct stm32_hash_dev *hdev)
259 {
260         struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
261         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
262         u32 reg;
263         int keylen = ctx->keylen;
264         void *key = ctx->key;
265
266         if (keylen) {
267                 stm32_hash_set_nblw(hdev, keylen);
268
269                 while (keylen > 0) {
270                         stm32_hash_write(hdev, HASH_DIN, *(u32 *)key);
271                         keylen -= 4;
272                         key += 4;
273                 }
274
275                 reg = stm32_hash_read(hdev, HASH_STR);
276                 reg |= HASH_STR_DCAL;
277                 stm32_hash_write(hdev, HASH_STR, reg);
278
279                 return -EINPROGRESS;
280         }
281
282         return 0;
283 }
284
285 static void stm32_hash_write_ctrl(struct stm32_hash_dev *hdev)
286 {
287         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
288         struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
289         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
290         struct stm32_hash_state *state = &rctx->state;
291         u32 alg = (state->flags & HASH_FLAGS_ALGO_MASK) >> HASH_FLAGS_ALGO_SHIFT;
292
293         u32 reg = HASH_CR_INIT;
294
295         if (!(hdev->flags & HASH_FLAGS_INIT)) {
296                 if (hdev->pdata->ux500) {
297                         reg |= ((alg & BIT(0)) << HASH_CR_ALGO_POS);
298                 } else {
299                         if (hdev->pdata->alg_shift == HASH_CR_ALGO_POS)
300                                 reg |= ((alg & BIT(1)) << 17) |
301                                        ((alg & BIT(0)) << HASH_CR_ALGO_POS);
302                         else
303                                 reg |= alg << hdev->pdata->alg_shift;
304                 }
305
306                 reg |= (rctx->data_type << HASH_CR_DATATYPE_POS);
307
308                 if (state->flags & HASH_FLAGS_HMAC) {
309                         hdev->flags |= HASH_FLAGS_HMAC;
310                         reg |= HASH_CR_MODE;
311                         if (ctx->keylen > crypto_ahash_blocksize(tfm))
312                                 reg |= HASH_CR_LKEY;
313                 }
314
315                 if (!hdev->polled)
316                         stm32_hash_write(hdev, HASH_IMR, HASH_DCIE);
317
318                 stm32_hash_write(hdev, HASH_CR, reg);
319
320                 hdev->flags |= HASH_FLAGS_INIT;
321
322                 /*
323                  * After first block + 1 words are fill up,
324                  * we only need to fill 1 block to start partial computation
325                  */
326                 rctx->state.blocklen -= sizeof(u32);
327
328                 dev_dbg(hdev->dev, "Write Control %x\n", reg);
329         }
330 }
331
332 static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx)
333 {
334         struct stm32_hash_state *state = &rctx->state;
335         size_t count;
336
337         while ((state->bufcnt < state->blocklen) && rctx->total) {
338                 count = min(rctx->sg->length - rctx->offset, rctx->total);
339                 count = min_t(size_t, count, state->blocklen - state->bufcnt);
340
341                 if (count <= 0) {
342                         if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) {
343                                 rctx->sg = sg_next(rctx->sg);
344                                 continue;
345                         } else {
346                                 break;
347                         }
348                 }
349
350                 scatterwalk_map_and_copy(state->buffer + state->bufcnt,
351                                          rctx->sg, rctx->offset, count, 0);
352
353                 state->bufcnt += count;
354                 rctx->offset += count;
355                 rctx->total -= count;
356
357                 if (rctx->offset == rctx->sg->length) {
358                         rctx->sg = sg_next(rctx->sg);
359                         if (rctx->sg)
360                                 rctx->offset = 0;
361                         else
362                                 rctx->total = 0;
363                 }
364         }
365 }
366
367 static int stm32_hash_xmit_cpu(struct stm32_hash_dev *hdev,
368                                const u8 *buf, size_t length, int final)
369 {
370         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
371         struct stm32_hash_state *state = &rctx->state;
372         unsigned int count, len32;
373         const u32 *buffer = (const u32 *)buf;
374         u32 reg;
375
376         if (final) {
377                 hdev->flags |= HASH_FLAGS_FINAL;
378
379                 /* Do not process empty messages if hw is buggy. */
380                 if (!(hdev->flags & HASH_FLAGS_INIT) && !length &&
381                     hdev->pdata->broken_emptymsg) {
382                         state->flags |= HASH_FLAGS_EMPTY;
383                         return 0;
384                 }
385         }
386
387         len32 = DIV_ROUND_UP(length, sizeof(u32));
388
389         dev_dbg(hdev->dev, "%s: length: %zd, final: %x len32 %i\n",
390                 __func__, length, final, len32);
391
392         hdev->flags |= HASH_FLAGS_CPU;
393
394         stm32_hash_write_ctrl(hdev);
395
396         if (stm32_hash_wait_busy(hdev))
397                 return -ETIMEDOUT;
398
399         if ((hdev->flags & HASH_FLAGS_HMAC) &&
400             (!(hdev->flags & HASH_FLAGS_HMAC_KEY))) {
401                 hdev->flags |= HASH_FLAGS_HMAC_KEY;
402                 stm32_hash_write_key(hdev);
403                 if (stm32_hash_wait_busy(hdev))
404                         return -ETIMEDOUT;
405         }
406
407         for (count = 0; count < len32; count++)
408                 stm32_hash_write(hdev, HASH_DIN, buffer[count]);
409
410         if (final) {
411                 if (stm32_hash_wait_busy(hdev))
412                         return -ETIMEDOUT;
413
414                 stm32_hash_set_nblw(hdev, length);
415                 reg = stm32_hash_read(hdev, HASH_STR);
416                 reg |= HASH_STR_DCAL;
417                 stm32_hash_write(hdev, HASH_STR, reg);
418                 if (hdev->flags & HASH_FLAGS_HMAC) {
419                         if (stm32_hash_wait_busy(hdev))
420                                 return -ETIMEDOUT;
421                         stm32_hash_write_key(hdev);
422                 }
423                 return -EINPROGRESS;
424         }
425
426         return 0;
427 }
428
429 static int hash_swap_reg(struct stm32_hash_request_ctx *rctx)
430 {
431         struct stm32_hash_state *state = &rctx->state;
432
433         switch ((state->flags & HASH_FLAGS_ALGO_MASK) >>
434                 HASH_FLAGS_ALGO_SHIFT) {
435         case HASH_MD5:
436         case HASH_SHA1:
437         case HASH_SHA224:
438         case HASH_SHA256:
439                 if (state->flags & HASH_FLAGS_HMAC)
440                         return HASH_CSR_NB_SHA256_HMAC;
441                 else
442                         return HASH_CSR_NB_SHA256;
443                 break;
444
445         case HASH_SHA384:
446         case HASH_SHA512:
447                 if (state->flags & HASH_FLAGS_HMAC)
448                         return HASH_CSR_NB_SHA512_HMAC;
449                 else
450                         return HASH_CSR_NB_SHA512;
451                 break;
452
453         case HASH_SHA3_224:
454         case HASH_SHA3_256:
455         case HASH_SHA3_384:
456         case HASH_SHA3_512:
457                 if (state->flags & HASH_FLAGS_HMAC)
458                         return HASH_CSR_NB_SHA3_HMAC;
459                 else
460                         return HASH_CSR_NB_SHA3;
461                 break;
462
463         default:
464                 return -EINVAL;
465         }
466 }
467
468 static int stm32_hash_update_cpu(struct stm32_hash_dev *hdev)
469 {
470         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
471         struct stm32_hash_state *state = &rctx->state;
472         u32 *preg = state->hw_context;
473         int bufcnt, err = 0, final;
474         int i, swap_reg;
475
476         dev_dbg(hdev->dev, "%s flags %x\n", __func__, state->flags);
477
478         final = state->flags & HASH_FLAGS_FINAL;
479
480         while ((rctx->total >= state->blocklen) ||
481                (state->bufcnt + rctx->total >= state->blocklen)) {
482                 stm32_hash_append_sg(rctx);
483                 bufcnt = state->bufcnt;
484                 state->bufcnt = 0;
485                 err = stm32_hash_xmit_cpu(hdev, state->buffer, bufcnt, 0);
486                 if (err)
487                         return err;
488         }
489
490         stm32_hash_append_sg(rctx);
491
492         if (final) {
493                 bufcnt = state->bufcnt;
494                 state->bufcnt = 0;
495                 return stm32_hash_xmit_cpu(hdev, state->buffer, bufcnt, 1);
496         }
497
498         if (!(hdev->flags & HASH_FLAGS_INIT))
499                 return 0;
500
501         if (stm32_hash_wait_busy(hdev))
502                 return -ETIMEDOUT;
503
504         swap_reg = hash_swap_reg(rctx);
505
506         if (!hdev->pdata->ux500)
507                 *preg++ = stm32_hash_read(hdev, HASH_IMR);
508         *preg++ = stm32_hash_read(hdev, HASH_STR);
509         *preg++ = stm32_hash_read(hdev, HASH_CR);
510         for (i = 0; i < swap_reg; i++)
511                 *preg++ = stm32_hash_read(hdev, HASH_CSR(i));
512
513         state->flags |= HASH_FLAGS_INIT;
514
515         return err;
516 }
517
518 static int stm32_hash_xmit_dma(struct stm32_hash_dev *hdev,
519                                struct scatterlist *sg, int length, int mdma)
520 {
521         struct dma_async_tx_descriptor *in_desc;
522         dma_cookie_t cookie;
523         u32 reg;
524         int err;
525
526         in_desc = dmaengine_prep_slave_sg(hdev->dma_lch, sg, 1,
527                                           DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT |
528                                           DMA_CTRL_ACK);
529         if (!in_desc) {
530                 dev_err(hdev->dev, "dmaengine_prep_slave error\n");
531                 return -ENOMEM;
532         }
533
534         reinit_completion(&hdev->dma_completion);
535         in_desc->callback = stm32_hash_dma_callback;
536         in_desc->callback_param = hdev;
537
538         hdev->flags |= HASH_FLAGS_FINAL;
539         hdev->flags |= HASH_FLAGS_DMA_ACTIVE;
540
541         reg = stm32_hash_read(hdev, HASH_CR);
542
543         if (hdev->pdata->has_mdmat) {
544                 if (mdma)
545                         reg |= HASH_CR_MDMAT;
546                 else
547                         reg &= ~HASH_CR_MDMAT;
548         }
549         reg |= HASH_CR_DMAE;
550
551         stm32_hash_write(hdev, HASH_CR, reg);
552
553         stm32_hash_set_nblw(hdev, length);
554
555         cookie = dmaengine_submit(in_desc);
556         err = dma_submit_error(cookie);
557         if (err)
558                 return -ENOMEM;
559
560         dma_async_issue_pending(hdev->dma_lch);
561
562         if (!wait_for_completion_timeout(&hdev->dma_completion,
563                                          msecs_to_jiffies(100)))
564                 err = -ETIMEDOUT;
565
566         if (dma_async_is_tx_complete(hdev->dma_lch, cookie,
567                                      NULL, NULL) != DMA_COMPLETE)
568                 err = -ETIMEDOUT;
569
570         if (err) {
571                 dev_err(hdev->dev, "DMA Error %i\n", err);
572                 dmaengine_terminate_all(hdev->dma_lch);
573                 return err;
574         }
575
576         return -EINPROGRESS;
577 }
578
579 static void stm32_hash_dma_callback(void *param)
580 {
581         struct stm32_hash_dev *hdev = param;
582
583         complete(&hdev->dma_completion);
584 }
585
586 static int stm32_hash_hmac_dma_send(struct stm32_hash_dev *hdev)
587 {
588         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
589         struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
590         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
591         int err;
592
593         if (ctx->keylen < rctx->state.blocklen || hdev->dma_mode == 1) {
594                 err = stm32_hash_write_key(hdev);
595                 if (stm32_hash_wait_busy(hdev))
596                         return -ETIMEDOUT;
597         } else {
598                 if (!(hdev->flags & HASH_FLAGS_HMAC_KEY))
599                         sg_init_one(&rctx->sg_key, ctx->key,
600                                     ALIGN(ctx->keylen, sizeof(u32)));
601
602                 rctx->dma_ct = dma_map_sg(hdev->dev, &rctx->sg_key, 1,
603                                           DMA_TO_DEVICE);
604                 if (rctx->dma_ct == 0) {
605                         dev_err(hdev->dev, "dma_map_sg error\n");
606                         return -ENOMEM;
607                 }
608
609                 err = stm32_hash_xmit_dma(hdev, &rctx->sg_key, ctx->keylen, 0);
610
611                 dma_unmap_sg(hdev->dev, &rctx->sg_key, 1, DMA_TO_DEVICE);
612         }
613
614         return err;
615 }
616
617 static int stm32_hash_dma_init(struct stm32_hash_dev *hdev)
618 {
619         struct dma_slave_config dma_conf;
620         struct dma_chan *chan;
621         int err;
622
623         memset(&dma_conf, 0, sizeof(dma_conf));
624
625         dma_conf.direction = DMA_MEM_TO_DEV;
626         dma_conf.dst_addr = hdev->phys_base + HASH_DIN;
627         dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
628         dma_conf.src_maxburst = HASH_BURST_LEVEL;
629         dma_conf.dst_maxburst = HASH_BURST_LEVEL;
630         dma_conf.device_fc = false;
631
632         chan = dma_request_chan(hdev->dev, "in");
633         if (IS_ERR(chan))
634                 return PTR_ERR(chan);
635
636         hdev->dma_lch = chan;
637
638         err = dmaengine_slave_config(hdev->dma_lch, &dma_conf);
639         if (err) {
640                 dma_release_channel(hdev->dma_lch);
641                 hdev->dma_lch = NULL;
642                 dev_err(hdev->dev, "Couldn't configure DMA slave.\n");
643                 return err;
644         }
645
646         init_completion(&hdev->dma_completion);
647
648         return 0;
649 }
650
651 static int stm32_hash_dma_send(struct stm32_hash_dev *hdev)
652 {
653         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
654         u32 *buffer = (void *)rctx->state.buffer;
655         struct scatterlist sg[1], *tsg;
656         int err = 0, reg, ncp = 0;
657         unsigned int i, len = 0, bufcnt = 0;
658         bool is_last = false;
659
660         rctx->sg = hdev->req->src;
661         rctx->total = hdev->req->nbytes;
662
663         rctx->nents = sg_nents(rctx->sg);
664         if (rctx->nents < 0)
665                 return -EINVAL;
666
667         stm32_hash_write_ctrl(hdev);
668
669         if (hdev->flags & HASH_FLAGS_HMAC) {
670                 err = stm32_hash_hmac_dma_send(hdev);
671                 if (err != -EINPROGRESS)
672                         return err;
673         }
674
675         for_each_sg(rctx->sg, tsg, rctx->nents, i) {
676                 sg[0] = *tsg;
677                 len = sg->length;
678
679                 if (sg_is_last(sg) || (bufcnt + sg[0].length) >= rctx->total) {
680                         sg->length = rctx->total - bufcnt;
681                         is_last = true;
682                         if (hdev->dma_mode == 1) {
683                                 len = (ALIGN(sg->length, 16) - 16);
684
685                                 ncp = sg_pcopy_to_buffer(
686                                         rctx->sg, rctx->nents,
687                                         rctx->state.buffer, sg->length - len,
688                                         rctx->total - sg->length + len);
689
690                                 sg->length = len;
691                         } else {
692                                 if (!(IS_ALIGNED(sg->length, sizeof(u32)))) {
693                                         len = sg->length;
694                                         sg->length = ALIGN(sg->length,
695                                                            sizeof(u32));
696                                 }
697                         }
698                 }
699
700                 rctx->dma_ct = dma_map_sg(hdev->dev, sg, 1,
701                                           DMA_TO_DEVICE);
702                 if (rctx->dma_ct == 0) {
703                         dev_err(hdev->dev, "dma_map_sg error\n");
704                         return -ENOMEM;
705                 }
706
707                 err = stm32_hash_xmit_dma(hdev, sg, len, !is_last);
708
709                 bufcnt += sg[0].length;
710                 dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
711
712                 if (err == -ENOMEM)
713                         return err;
714                 if (is_last)
715                         break;
716         }
717
718         if (hdev->dma_mode == 1) {
719                 if (stm32_hash_wait_busy(hdev))
720                         return -ETIMEDOUT;
721                 reg = stm32_hash_read(hdev, HASH_CR);
722                 reg &= ~HASH_CR_DMAE;
723                 reg |= HASH_CR_DMAA;
724                 stm32_hash_write(hdev, HASH_CR, reg);
725
726                 if (ncp) {
727                         memset(buffer + ncp, 0,
728                                DIV_ROUND_UP(ncp, sizeof(u32)) - ncp);
729                         writesl(hdev->io_base + HASH_DIN, buffer,
730                                 DIV_ROUND_UP(ncp, sizeof(u32)));
731                 }
732                 stm32_hash_set_nblw(hdev, ncp);
733                 reg = stm32_hash_read(hdev, HASH_STR);
734                 reg |= HASH_STR_DCAL;
735                 stm32_hash_write(hdev, HASH_STR, reg);
736                 err = -EINPROGRESS;
737         }
738
739         if (hdev->flags & HASH_FLAGS_HMAC) {
740                 if (stm32_hash_wait_busy(hdev))
741                         return -ETIMEDOUT;
742                 err = stm32_hash_hmac_dma_send(hdev);
743         }
744
745         return err;
746 }
747
748 static struct stm32_hash_dev *stm32_hash_find_dev(struct stm32_hash_ctx *ctx)
749 {
750         struct stm32_hash_dev *hdev = NULL, *tmp;
751
752         spin_lock_bh(&stm32_hash.lock);
753         if (!ctx->hdev) {
754                 list_for_each_entry(tmp, &stm32_hash.dev_list, list) {
755                         hdev = tmp;
756                         break;
757                 }
758                 ctx->hdev = hdev;
759         } else {
760                 hdev = ctx->hdev;
761         }
762
763         spin_unlock_bh(&stm32_hash.lock);
764
765         return hdev;
766 }
767
768 static bool stm32_hash_dma_aligned_data(struct ahash_request *req)
769 {
770         struct scatterlist *sg;
771         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
772         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
773         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
774         int i;
775
776         if (!hdev->dma_lch || req->nbytes <= rctx->state.blocklen)
777                 return false;
778
779         if (sg_nents(req->src) > 1) {
780                 if (hdev->dma_mode == 1)
781                         return false;
782                 for_each_sg(req->src, sg, sg_nents(req->src), i) {
783                         if ((!IS_ALIGNED(sg->length, sizeof(u32))) &&
784                             (!sg_is_last(sg)))
785                                 return false;
786                 }
787         }
788
789         if (req->src->offset % 4)
790                 return false;
791
792         return true;
793 }
794
795 static int stm32_hash_init(struct ahash_request *req)
796 {
797         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
798         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
799         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
800         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
801         struct stm32_hash_state *state = &rctx->state;
802         bool sha3_mode = ctx->flags & HASH_FLAGS_SHA3_MODE;
803
804         rctx->hdev = hdev;
805
806         state->flags = HASH_FLAGS_CPU;
807
808         if (sha3_mode)
809                 state->flags |= HASH_FLAGS_SHA3_MODE;
810
811         rctx->digcnt = crypto_ahash_digestsize(tfm);
812         switch (rctx->digcnt) {
813         case MD5_DIGEST_SIZE:
814                 state->flags |= HASH_MD5 << HASH_FLAGS_ALGO_SHIFT;
815                 break;
816         case SHA1_DIGEST_SIZE:
817                 if (hdev->pdata->ux500)
818                         state->flags |= HASH_SHA1_UX500 << HASH_FLAGS_ALGO_SHIFT;
819                 else
820                         state->flags |= HASH_SHA1 << HASH_FLAGS_ALGO_SHIFT;
821                 break;
822         case SHA224_DIGEST_SIZE:
823                 if (sha3_mode)
824                         state->flags |= HASH_SHA3_224 << HASH_FLAGS_ALGO_SHIFT;
825                 else
826                         state->flags |= HASH_SHA224 << HASH_FLAGS_ALGO_SHIFT;
827                 break;
828         case SHA256_DIGEST_SIZE:
829                 if (sha3_mode) {
830                         state->flags |= HASH_SHA3_256 << HASH_FLAGS_ALGO_SHIFT;
831                 } else {
832                         if (hdev->pdata->ux500)
833                                 state->flags |= HASH_SHA256_UX500 << HASH_FLAGS_ALGO_SHIFT;
834                         else
835                                 state->flags |= HASH_SHA256 << HASH_FLAGS_ALGO_SHIFT;
836                 }
837                 break;
838         case SHA384_DIGEST_SIZE:
839                 if (sha3_mode)
840                         state->flags |= HASH_SHA3_384 << HASH_FLAGS_ALGO_SHIFT;
841                 else
842                         state->flags |= HASH_SHA384 << HASH_FLAGS_ALGO_SHIFT;
843                 break;
844         case SHA512_DIGEST_SIZE:
845                 if (sha3_mode)
846                         state->flags |= HASH_SHA3_512 << HASH_FLAGS_ALGO_SHIFT;
847                 else
848                         state->flags |= HASH_SHA512 << HASH_FLAGS_ALGO_SHIFT;
849                 break;
850         default:
851                 return -EINVAL;
852         }
853
854         rctx->state.bufcnt = 0;
855         rctx->state.blocklen = crypto_ahash_blocksize(tfm) + sizeof(u32);
856         if (rctx->state.blocklen > HASH_BUFLEN) {
857                 dev_err(hdev->dev, "Error, block too large");
858                 return -EINVAL;
859         }
860         rctx->total = 0;
861         rctx->offset = 0;
862         rctx->data_type = HASH_DATA_8_BITS;
863
864         if (ctx->flags & HASH_FLAGS_HMAC)
865                 state->flags |= HASH_FLAGS_HMAC;
866
867         dev_dbg(hdev->dev, "%s Flags %x\n", __func__, state->flags);
868
869         return 0;
870 }
871
872 static int stm32_hash_update_req(struct stm32_hash_dev *hdev)
873 {
874         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
875         struct stm32_hash_state *state = &rctx->state;
876
877         if (!(state->flags & HASH_FLAGS_CPU))
878                 return stm32_hash_dma_send(hdev);
879
880         return stm32_hash_update_cpu(hdev);
881 }
882
883 static int stm32_hash_final_req(struct stm32_hash_dev *hdev)
884 {
885         struct ahash_request *req = hdev->req;
886         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
887         struct stm32_hash_state *state = &rctx->state;
888         int buflen = state->bufcnt;
889
890         if (state->flags & HASH_FLAGS_FINUP)
891                 return stm32_hash_update_req(hdev);
892
893         state->bufcnt = 0;
894
895         return stm32_hash_xmit_cpu(hdev, state->buffer, buflen, 1);
896 }
897
898 static void stm32_hash_emptymsg_fallback(struct ahash_request *req)
899 {
900         struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
901         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(ahash);
902         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
903         struct stm32_hash_dev *hdev = rctx->hdev;
904         int ret;
905
906         dev_dbg(hdev->dev, "use fallback message size 0 key size %d\n",
907                 ctx->keylen);
908
909         if (!ctx->xtfm) {
910                 dev_err(hdev->dev, "no fallback engine\n");
911                 return;
912         }
913
914         if (ctx->keylen) {
915                 ret = crypto_shash_setkey(ctx->xtfm, ctx->key, ctx->keylen);
916                 if (ret) {
917                         dev_err(hdev->dev, "failed to set key ret=%d\n", ret);
918                         return;
919                 }
920         }
921
922         ret = crypto_shash_tfm_digest(ctx->xtfm, NULL, 0, rctx->digest);
923         if (ret)
924                 dev_err(hdev->dev, "shash digest error\n");
925 }
926
927 static void stm32_hash_copy_hash(struct ahash_request *req)
928 {
929         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
930         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
931         struct stm32_hash_state *state = &rctx->state;
932         struct stm32_hash_dev *hdev = rctx->hdev;
933         __be32 *hash = (void *)rctx->digest;
934         unsigned int i, hashsize;
935
936         if (hdev->pdata->broken_emptymsg && (state->flags & HASH_FLAGS_EMPTY))
937                 return stm32_hash_emptymsg_fallback(req);
938
939         hashsize = crypto_ahash_digestsize(tfm);
940
941         for (i = 0; i < hashsize / sizeof(u32); i++) {
942                 if (hdev->pdata->ux500)
943                         hash[i] = cpu_to_be32(stm32_hash_read(hdev,
944                                               HASH_UX500_HREG(i)));
945                 else
946                         hash[i] = cpu_to_be32(stm32_hash_read(hdev,
947                                               HASH_HREG(i)));
948         }
949 }
950
951 static int stm32_hash_finish(struct ahash_request *req)
952 {
953         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
954         u32 reg;
955
956         reg = stm32_hash_read(rctx->hdev, HASH_SR);
957         reg &= ~HASH_SR_OUTPUT_READY;
958         stm32_hash_write(rctx->hdev, HASH_SR, reg);
959
960         if (!req->result)
961                 return -EINVAL;
962
963         memcpy(req->result, rctx->digest, rctx->digcnt);
964
965         return 0;
966 }
967
968 static void stm32_hash_finish_req(struct ahash_request *req, int err)
969 {
970         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
971         struct stm32_hash_dev *hdev = rctx->hdev;
972
973         if (!err && (HASH_FLAGS_FINAL & hdev->flags)) {
974                 stm32_hash_copy_hash(req);
975                 err = stm32_hash_finish(req);
976         }
977
978         pm_runtime_mark_last_busy(hdev->dev);
979         pm_runtime_put_autosuspend(hdev->dev);
980
981         crypto_finalize_hash_request(hdev->engine, req, err);
982 }
983
984 static int stm32_hash_handle_queue(struct stm32_hash_dev *hdev,
985                                    struct ahash_request *req)
986 {
987         return crypto_transfer_hash_request_to_engine(hdev->engine, req);
988 }
989
990 static int stm32_hash_one_request(struct crypto_engine *engine, void *areq)
991 {
992         struct ahash_request *req = container_of(areq, struct ahash_request,
993                                                  base);
994         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
995         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
996         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
997         struct stm32_hash_state *state = &rctx->state;
998         int swap_reg;
999         int err = 0;
1000
1001         if (!hdev)
1002                 return -ENODEV;
1003
1004         dev_dbg(hdev->dev, "processing new req, op: %lu, nbytes %d\n",
1005                 rctx->op, req->nbytes);
1006
1007         pm_runtime_get_sync(hdev->dev);
1008
1009         hdev->req = req;
1010         hdev->flags = 0;
1011         swap_reg = hash_swap_reg(rctx);
1012
1013         if (state->flags & HASH_FLAGS_INIT) {
1014                 u32 *preg = rctx->state.hw_context;
1015                 u32 reg;
1016                 int i;
1017
1018                 if (!hdev->pdata->ux500)
1019                         stm32_hash_write(hdev, HASH_IMR, *preg++);
1020                 stm32_hash_write(hdev, HASH_STR, *preg++);
1021                 stm32_hash_write(hdev, HASH_CR, *preg);
1022                 reg = *preg++ | HASH_CR_INIT;
1023                 stm32_hash_write(hdev, HASH_CR, reg);
1024
1025                 for (i = 0; i < swap_reg; i++)
1026                         stm32_hash_write(hdev, HASH_CSR(i), *preg++);
1027
1028                 hdev->flags |= HASH_FLAGS_INIT;
1029
1030                 if (state->flags & HASH_FLAGS_HMAC)
1031                         hdev->flags |= HASH_FLAGS_HMAC |
1032                                        HASH_FLAGS_HMAC_KEY;
1033         }
1034
1035         if (rctx->op == HASH_OP_UPDATE)
1036                 err = stm32_hash_update_req(hdev);
1037         else if (rctx->op == HASH_OP_FINAL)
1038                 err = stm32_hash_final_req(hdev);
1039
1040         /* If we have an IRQ, wait for that, else poll for completion */
1041         if (err == -EINPROGRESS && hdev->polled) {
1042                 if (stm32_hash_wait_busy(hdev))
1043                         err = -ETIMEDOUT;
1044                 else {
1045                         hdev->flags |= HASH_FLAGS_OUTPUT_READY;
1046                         err = 0;
1047                 }
1048         }
1049
1050         if (err != -EINPROGRESS)
1051         /* done task will not finish it, so do it here */
1052                 stm32_hash_finish_req(req, err);
1053
1054         return 0;
1055 }
1056
1057 static int stm32_hash_enqueue(struct ahash_request *req, unsigned int op)
1058 {
1059         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1060         struct stm32_hash_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1061         struct stm32_hash_dev *hdev = ctx->hdev;
1062
1063         rctx->op = op;
1064
1065         return stm32_hash_handle_queue(hdev, req);
1066 }
1067
1068 static int stm32_hash_update(struct ahash_request *req)
1069 {
1070         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1071         struct stm32_hash_state *state = &rctx->state;
1072
1073         if (!req->nbytes || !(state->flags & HASH_FLAGS_CPU))
1074                 return 0;
1075
1076         rctx->total = req->nbytes;
1077         rctx->sg = req->src;
1078         rctx->offset = 0;
1079
1080         if ((state->bufcnt + rctx->total < state->blocklen)) {
1081                 stm32_hash_append_sg(rctx);
1082                 return 0;
1083         }
1084
1085         return stm32_hash_enqueue(req, HASH_OP_UPDATE);
1086 }
1087
1088 static int stm32_hash_final(struct ahash_request *req)
1089 {
1090         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1091         struct stm32_hash_state *state = &rctx->state;
1092
1093         state->flags |= HASH_FLAGS_FINAL;
1094
1095         return stm32_hash_enqueue(req, HASH_OP_FINAL);
1096 }
1097
1098 static int stm32_hash_finup(struct ahash_request *req)
1099 {
1100         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1101         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
1102         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1103         struct stm32_hash_state *state = &rctx->state;
1104
1105         if (!req->nbytes)
1106                 goto out;
1107
1108         state->flags |= HASH_FLAGS_FINUP;
1109         rctx->total = req->nbytes;
1110         rctx->sg = req->src;
1111         rctx->offset = 0;
1112
1113         if (hdev->dma_lch && stm32_hash_dma_aligned_data(req))
1114                 state->flags &= ~HASH_FLAGS_CPU;
1115
1116 out:
1117         return stm32_hash_final(req);
1118 }
1119
1120 static int stm32_hash_digest(struct ahash_request *req)
1121 {
1122         return stm32_hash_init(req) ?: stm32_hash_finup(req);
1123 }
1124
1125 static int stm32_hash_export(struct ahash_request *req, void *out)
1126 {
1127         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1128
1129         memcpy(out, &rctx->state, sizeof(rctx->state));
1130
1131         return 0;
1132 }
1133
1134 static int stm32_hash_import(struct ahash_request *req, const void *in)
1135 {
1136         struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1137
1138         stm32_hash_init(req);
1139         memcpy(&rctx->state, in, sizeof(rctx->state));
1140
1141         return 0;
1142 }
1143
1144 static int stm32_hash_setkey(struct crypto_ahash *tfm,
1145                              const u8 *key, unsigned int keylen)
1146 {
1147         struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
1148
1149         if (keylen <= HASH_MAX_KEY_SIZE) {
1150                 memcpy(ctx->key, key, keylen);
1151                 ctx->keylen = keylen;
1152         } else {
1153                 return -ENOMEM;
1154         }
1155
1156         return 0;
1157 }
1158
1159 static int stm32_hash_init_fallback(struct crypto_tfm *tfm)
1160 {
1161         struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1162         struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1163         const char *name = crypto_tfm_alg_name(tfm);
1164         struct crypto_shash *xtfm;
1165
1166         /* The fallback is only needed on Ux500 */
1167         if (!hdev->pdata->ux500)
1168                 return 0;
1169
1170         xtfm = crypto_alloc_shash(name, 0, CRYPTO_ALG_NEED_FALLBACK);
1171         if (IS_ERR(xtfm)) {
1172                 dev_err(hdev->dev, "failed to allocate %s fallback\n",
1173                         name);
1174                 return PTR_ERR(xtfm);
1175         }
1176         dev_info(hdev->dev, "allocated %s fallback\n", name);
1177         ctx->xtfm = xtfm;
1178
1179         return 0;
1180 }
1181
1182 static int stm32_hash_cra_init_algs(struct crypto_tfm *tfm, u32 algs_flags)
1183 {
1184         struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1185
1186         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1187                                  sizeof(struct stm32_hash_request_ctx));
1188
1189         ctx->keylen = 0;
1190
1191         if (algs_flags)
1192                 ctx->flags |= algs_flags;
1193
1194         return stm32_hash_init_fallback(tfm);
1195 }
1196
1197 static int stm32_hash_cra_init(struct crypto_tfm *tfm)
1198 {
1199         return stm32_hash_cra_init_algs(tfm, 0);
1200 }
1201
1202 static int stm32_hash_cra_hmac_init(struct crypto_tfm *tfm)
1203 {
1204         return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_HMAC);
1205 }
1206
1207 static int stm32_hash_cra_sha3_init(struct crypto_tfm *tfm)
1208 {
1209         return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_SHA3_MODE);
1210 }
1211
1212 static int stm32_hash_cra_sha3_hmac_init(struct crypto_tfm *tfm)
1213 {
1214         return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_SHA3_MODE |
1215                                         HASH_FLAGS_HMAC);
1216 }
1217
1218
1219 static void stm32_hash_cra_exit(struct crypto_tfm *tfm)
1220 {
1221         struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1222
1223         if (ctx->xtfm)
1224                 crypto_free_shash(ctx->xtfm);
1225 }
1226
1227 static irqreturn_t stm32_hash_irq_thread(int irq, void *dev_id)
1228 {
1229         struct stm32_hash_dev *hdev = dev_id;
1230
1231         if (HASH_FLAGS_CPU & hdev->flags) {
1232                 if (HASH_FLAGS_OUTPUT_READY & hdev->flags) {
1233                         hdev->flags &= ~HASH_FLAGS_OUTPUT_READY;
1234                         goto finish;
1235                 }
1236         } else if (HASH_FLAGS_DMA_ACTIVE & hdev->flags) {
1237                 hdev->flags &= ~HASH_FLAGS_DMA_ACTIVE;
1238                         goto finish;
1239         }
1240
1241         return IRQ_HANDLED;
1242
1243 finish:
1244         /* Finish current request */
1245         stm32_hash_finish_req(hdev->req, 0);
1246
1247         return IRQ_HANDLED;
1248 }
1249
1250 static irqreturn_t stm32_hash_irq_handler(int irq, void *dev_id)
1251 {
1252         struct stm32_hash_dev *hdev = dev_id;
1253         u32 reg;
1254
1255         reg = stm32_hash_read(hdev, HASH_SR);
1256         if (reg & HASH_SR_OUTPUT_READY) {
1257                 hdev->flags |= HASH_FLAGS_OUTPUT_READY;
1258                 /* Disable IT*/
1259                 stm32_hash_write(hdev, HASH_IMR, 0);
1260                 return IRQ_WAKE_THREAD;
1261         }
1262
1263         return IRQ_NONE;
1264 }
1265
1266 static struct ahash_engine_alg algs_md5[] = {
1267         {
1268                 .base.init = stm32_hash_init,
1269                 .base.update = stm32_hash_update,
1270                 .base.final = stm32_hash_final,
1271                 .base.finup = stm32_hash_finup,
1272                 .base.digest = stm32_hash_digest,
1273                 .base.export = stm32_hash_export,
1274                 .base.import = stm32_hash_import,
1275                 .base.halg = {
1276                         .digestsize = MD5_DIGEST_SIZE,
1277                         .statesize = sizeof(struct stm32_hash_state),
1278                         .base = {
1279                                 .cra_name = "md5",
1280                                 .cra_driver_name = "stm32-md5",
1281                                 .cra_priority = 200,
1282                                 .cra_flags = CRYPTO_ALG_ASYNC |
1283                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1284                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1285                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1286                                 .cra_alignmask = 3,
1287                                 .cra_init = stm32_hash_cra_init,
1288                                 .cra_exit = stm32_hash_cra_exit,
1289                                 .cra_module = THIS_MODULE,
1290                         }
1291                 },
1292                 .op = {
1293                         .do_one_request = stm32_hash_one_request,
1294                 },
1295         },
1296         {
1297                 .base.init = stm32_hash_init,
1298                 .base.update = stm32_hash_update,
1299                 .base.final = stm32_hash_final,
1300                 .base.finup = stm32_hash_finup,
1301                 .base.digest = stm32_hash_digest,
1302                 .base.export = stm32_hash_export,
1303                 .base.import = stm32_hash_import,
1304                 .base.setkey = stm32_hash_setkey,
1305                 .base.halg = {
1306                         .digestsize = MD5_DIGEST_SIZE,
1307                         .statesize = sizeof(struct stm32_hash_state),
1308                         .base = {
1309                                 .cra_name = "hmac(md5)",
1310                                 .cra_driver_name = "stm32-hmac-md5",
1311                                 .cra_priority = 200,
1312                                 .cra_flags = CRYPTO_ALG_ASYNC |
1313                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1314                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1315                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1316                                 .cra_alignmask = 3,
1317                                 .cra_init = stm32_hash_cra_hmac_init,
1318                                 .cra_exit = stm32_hash_cra_exit,
1319                                 .cra_module = THIS_MODULE,
1320                         }
1321                 },
1322                 .op = {
1323                         .do_one_request = stm32_hash_one_request,
1324                 },
1325         }
1326 };
1327
1328 static struct ahash_engine_alg algs_sha1[] = {
1329         {
1330                 .base.init = stm32_hash_init,
1331                 .base.update = stm32_hash_update,
1332                 .base.final = stm32_hash_final,
1333                 .base.finup = stm32_hash_finup,
1334                 .base.digest = stm32_hash_digest,
1335                 .base.export = stm32_hash_export,
1336                 .base.import = stm32_hash_import,
1337                 .base.halg = {
1338                         .digestsize = SHA1_DIGEST_SIZE,
1339                         .statesize = sizeof(struct stm32_hash_state),
1340                         .base = {
1341                                 .cra_name = "sha1",
1342                                 .cra_driver_name = "stm32-sha1",
1343                                 .cra_priority = 200,
1344                                 .cra_flags = CRYPTO_ALG_ASYNC |
1345                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1346                                 .cra_blocksize = SHA1_BLOCK_SIZE,
1347                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1348                                 .cra_alignmask = 3,
1349                                 .cra_init = stm32_hash_cra_init,
1350                                 .cra_exit = stm32_hash_cra_exit,
1351                                 .cra_module = THIS_MODULE,
1352                         }
1353                 },
1354                 .op = {
1355                         .do_one_request = stm32_hash_one_request,
1356                 },
1357         },
1358         {
1359                 .base.init = stm32_hash_init,
1360                 .base.update = stm32_hash_update,
1361                 .base.final = stm32_hash_final,
1362                 .base.finup = stm32_hash_finup,
1363                 .base.digest = stm32_hash_digest,
1364                 .base.export = stm32_hash_export,
1365                 .base.import = stm32_hash_import,
1366                 .base.setkey = stm32_hash_setkey,
1367                 .base.halg = {
1368                         .digestsize = SHA1_DIGEST_SIZE,
1369                         .statesize = sizeof(struct stm32_hash_state),
1370                         .base = {
1371                                 .cra_name = "hmac(sha1)",
1372                                 .cra_driver_name = "stm32-hmac-sha1",
1373                                 .cra_priority = 200,
1374                                 .cra_flags = CRYPTO_ALG_ASYNC |
1375                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1376                                 .cra_blocksize = SHA1_BLOCK_SIZE,
1377                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1378                                 .cra_alignmask = 3,
1379                                 .cra_init = stm32_hash_cra_hmac_init,
1380                                 .cra_exit = stm32_hash_cra_exit,
1381                                 .cra_module = THIS_MODULE,
1382                         }
1383                 },
1384                 .op = {
1385                         .do_one_request = stm32_hash_one_request,
1386                 },
1387         },
1388 };
1389
1390 static struct ahash_engine_alg algs_sha224[] = {
1391         {
1392                 .base.init = stm32_hash_init,
1393                 .base.update = stm32_hash_update,
1394                 .base.final = stm32_hash_final,
1395                 .base.finup = stm32_hash_finup,
1396                 .base.digest = stm32_hash_digest,
1397                 .base.export = stm32_hash_export,
1398                 .base.import = stm32_hash_import,
1399                 .base.halg = {
1400                         .digestsize = SHA224_DIGEST_SIZE,
1401                         .statesize = sizeof(struct stm32_hash_state),
1402                         .base = {
1403                                 .cra_name = "sha224",
1404                                 .cra_driver_name = "stm32-sha224",
1405                                 .cra_priority = 200,
1406                                 .cra_flags = CRYPTO_ALG_ASYNC |
1407                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1408                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1409                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1410                                 .cra_alignmask = 3,
1411                                 .cra_init = stm32_hash_cra_init,
1412                                 .cra_exit = stm32_hash_cra_exit,
1413                                 .cra_module = THIS_MODULE,
1414                         }
1415                 },
1416                 .op = {
1417                         .do_one_request = stm32_hash_one_request,
1418                 },
1419         },
1420         {
1421                 .base.init = stm32_hash_init,
1422                 .base.update = stm32_hash_update,
1423                 .base.final = stm32_hash_final,
1424                 .base.finup = stm32_hash_finup,
1425                 .base.digest = stm32_hash_digest,
1426                 .base.setkey = stm32_hash_setkey,
1427                 .base.export = stm32_hash_export,
1428                 .base.import = stm32_hash_import,
1429                 .base.halg = {
1430                         .digestsize = SHA224_DIGEST_SIZE,
1431                         .statesize = sizeof(struct stm32_hash_state),
1432                         .base = {
1433                                 .cra_name = "hmac(sha224)",
1434                                 .cra_driver_name = "stm32-hmac-sha224",
1435                                 .cra_priority = 200,
1436                                 .cra_flags = CRYPTO_ALG_ASYNC |
1437                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1438                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1439                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1440                                 .cra_alignmask = 3,
1441                                 .cra_init = stm32_hash_cra_hmac_init,
1442                                 .cra_exit = stm32_hash_cra_exit,
1443                                 .cra_module = THIS_MODULE,
1444                         }
1445                 },
1446                 .op = {
1447                         .do_one_request = stm32_hash_one_request,
1448                 },
1449         },
1450 };
1451
1452 static struct ahash_engine_alg algs_sha256[] = {
1453         {
1454                 .base.init = stm32_hash_init,
1455                 .base.update = stm32_hash_update,
1456                 .base.final = stm32_hash_final,
1457                 .base.finup = stm32_hash_finup,
1458                 .base.digest = stm32_hash_digest,
1459                 .base.export = stm32_hash_export,
1460                 .base.import = stm32_hash_import,
1461                 .base.halg = {
1462                         .digestsize = SHA256_DIGEST_SIZE,
1463                         .statesize = sizeof(struct stm32_hash_state),
1464                         .base = {
1465                                 .cra_name = "sha256",
1466                                 .cra_driver_name = "stm32-sha256",
1467                                 .cra_priority = 200,
1468                                 .cra_flags = CRYPTO_ALG_ASYNC |
1469                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1470                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1471                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1472                                 .cra_alignmask = 3,
1473                                 .cra_init = stm32_hash_cra_init,
1474                                 .cra_exit = stm32_hash_cra_exit,
1475                                 .cra_module = THIS_MODULE,
1476                         }
1477                 },
1478                 .op = {
1479                         .do_one_request = stm32_hash_one_request,
1480                 },
1481         },
1482         {
1483                 .base.init = stm32_hash_init,
1484                 .base.update = stm32_hash_update,
1485                 .base.final = stm32_hash_final,
1486                 .base.finup = stm32_hash_finup,
1487                 .base.digest = stm32_hash_digest,
1488                 .base.export = stm32_hash_export,
1489                 .base.import = stm32_hash_import,
1490                 .base.setkey = stm32_hash_setkey,
1491                 .base.halg = {
1492                         .digestsize = SHA256_DIGEST_SIZE,
1493                         .statesize = sizeof(struct stm32_hash_state),
1494                         .base = {
1495                                 .cra_name = "hmac(sha256)",
1496                                 .cra_driver_name = "stm32-hmac-sha256",
1497                                 .cra_priority = 200,
1498                                 .cra_flags = CRYPTO_ALG_ASYNC |
1499                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1500                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1501                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1502                                 .cra_alignmask = 3,
1503                                 .cra_init = stm32_hash_cra_hmac_init,
1504                                 .cra_exit = stm32_hash_cra_exit,
1505                                 .cra_module = THIS_MODULE,
1506                         }
1507                 },
1508                 .op = {
1509                         .do_one_request = stm32_hash_one_request,
1510                 },
1511         },
1512 };
1513
1514 static struct ahash_engine_alg algs_sha384_sha512[] = {
1515         {
1516                 .base.init = stm32_hash_init,
1517                 .base.update = stm32_hash_update,
1518                 .base.final = stm32_hash_final,
1519                 .base.finup = stm32_hash_finup,
1520                 .base.digest = stm32_hash_digest,
1521                 .base.export = stm32_hash_export,
1522                 .base.import = stm32_hash_import,
1523                 .base.halg = {
1524                         .digestsize = SHA384_DIGEST_SIZE,
1525                         .statesize = sizeof(struct stm32_hash_state),
1526                         .base = {
1527                                 .cra_name = "sha384",
1528                                 .cra_driver_name = "stm32-sha384",
1529                                 .cra_priority = 200,
1530                                 .cra_flags = CRYPTO_ALG_ASYNC |
1531                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1532                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1533                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1534                                 .cra_alignmask = 3,
1535                                 .cra_init = stm32_hash_cra_init,
1536                                 .cra_exit = stm32_hash_cra_exit,
1537                                 .cra_module = THIS_MODULE,
1538                         }
1539                 },
1540                 .op = {
1541                         .do_one_request = stm32_hash_one_request,
1542                 },
1543         },
1544         {
1545                 .base.init = stm32_hash_init,
1546                 .base.update = stm32_hash_update,
1547                 .base.final = stm32_hash_final,
1548                 .base.finup = stm32_hash_finup,
1549                 .base.digest = stm32_hash_digest,
1550                 .base.setkey = stm32_hash_setkey,
1551                 .base.export = stm32_hash_export,
1552                 .base.import = stm32_hash_import,
1553                 .base.halg = {
1554                         .digestsize = SHA384_DIGEST_SIZE,
1555                         .statesize = sizeof(struct stm32_hash_state),
1556                         .base = {
1557                                 .cra_name = "hmac(sha384)",
1558                                 .cra_driver_name = "stm32-hmac-sha384",
1559                                 .cra_priority = 200,
1560                                 .cra_flags = CRYPTO_ALG_ASYNC |
1561                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1562                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1563                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1564                                 .cra_alignmask = 3,
1565                                 .cra_init = stm32_hash_cra_hmac_init,
1566                                 .cra_exit = stm32_hash_cra_exit,
1567                                 .cra_module = THIS_MODULE,
1568                         }
1569                 },
1570                 .op = {
1571                         .do_one_request = stm32_hash_one_request,
1572                 },
1573         },
1574         {
1575                 .base.init = stm32_hash_init,
1576                 .base.update = stm32_hash_update,
1577                 .base.final = stm32_hash_final,
1578                 .base.finup = stm32_hash_finup,
1579                 .base.digest = stm32_hash_digest,
1580                 .base.export = stm32_hash_export,
1581                 .base.import = stm32_hash_import,
1582                 .base.halg = {
1583                         .digestsize = SHA512_DIGEST_SIZE,
1584                         .statesize = sizeof(struct stm32_hash_state),
1585                         .base = {
1586                                 .cra_name = "sha512",
1587                                 .cra_driver_name = "stm32-sha512",
1588                                 .cra_priority = 200,
1589                                 .cra_flags = CRYPTO_ALG_ASYNC |
1590                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1591                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1592                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1593                                 .cra_alignmask = 3,
1594                                 .cra_init = stm32_hash_cra_init,
1595                                 .cra_exit = stm32_hash_cra_exit,
1596                                 .cra_module = THIS_MODULE,
1597                         }
1598                 },
1599                 .op = {
1600                         .do_one_request = stm32_hash_one_request,
1601                 },
1602         },
1603         {
1604                 .base.init = stm32_hash_init,
1605                 .base.update = stm32_hash_update,
1606                 .base.final = stm32_hash_final,
1607                 .base.finup = stm32_hash_finup,
1608                 .base.digest = stm32_hash_digest,
1609                 .base.export = stm32_hash_export,
1610                 .base.import = stm32_hash_import,
1611                 .base.setkey = stm32_hash_setkey,
1612                 .base.halg = {
1613                         .digestsize = SHA512_DIGEST_SIZE,
1614                         .statesize = sizeof(struct stm32_hash_state),
1615                         .base = {
1616                                 .cra_name = "hmac(sha512)",
1617                                 .cra_driver_name = "stm32-hmac-sha512",
1618                                 .cra_priority = 200,
1619                                 .cra_flags = CRYPTO_ALG_ASYNC |
1620                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1621                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1622                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1623                                 .cra_alignmask = 3,
1624                                 .cra_init = stm32_hash_cra_hmac_init,
1625                                 .cra_exit = stm32_hash_cra_exit,
1626                                 .cra_module = THIS_MODULE,
1627                         }
1628                 },
1629                 .op = {
1630                         .do_one_request = stm32_hash_one_request,
1631                 },
1632         },
1633 };
1634
1635 static struct ahash_engine_alg algs_sha3[] = {
1636         {
1637                 .base.init = stm32_hash_init,
1638                 .base.update = stm32_hash_update,
1639                 .base.final = stm32_hash_final,
1640                 .base.finup = stm32_hash_finup,
1641                 .base.digest = stm32_hash_digest,
1642                 .base.export = stm32_hash_export,
1643                 .base.import = stm32_hash_import,
1644                 .base.halg = {
1645                         .digestsize = SHA3_224_DIGEST_SIZE,
1646                         .statesize = sizeof(struct stm32_hash_state),
1647                         .base = {
1648                                 .cra_name = "sha3-224",
1649                                 .cra_driver_name = "stm32-sha3-224",
1650                                 .cra_priority = 200,
1651                                 .cra_flags = CRYPTO_ALG_ASYNC |
1652                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1653                                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
1654                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1655                                 .cra_alignmask = 3,
1656                                 .cra_init = stm32_hash_cra_sha3_init,
1657                                 .cra_exit = stm32_hash_cra_exit,
1658                                 .cra_module = THIS_MODULE,
1659                         }
1660                 },
1661                 .op = {
1662                         .do_one_request = stm32_hash_one_request,
1663                 },
1664         },
1665         {
1666                 .base.init = stm32_hash_init,
1667                 .base.update = stm32_hash_update,
1668                 .base.final = stm32_hash_final,
1669                 .base.finup = stm32_hash_finup,
1670                 .base.digest = stm32_hash_digest,
1671                 .base.export = stm32_hash_export,
1672                 .base.import = stm32_hash_import,
1673                 .base.setkey = stm32_hash_setkey,
1674                 .base.halg = {
1675                         .digestsize = SHA3_224_DIGEST_SIZE,
1676                         .statesize = sizeof(struct stm32_hash_state),
1677                         .base = {
1678                                 .cra_name = "hmac(sha3-224)",
1679                                 .cra_driver_name = "stm32-hmac-sha3-224",
1680                                 .cra_priority = 200,
1681                                 .cra_flags = CRYPTO_ALG_ASYNC |
1682                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1683                                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
1684                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1685                                 .cra_alignmask = 3,
1686                                 .cra_init = stm32_hash_cra_sha3_hmac_init,
1687                                 .cra_exit = stm32_hash_cra_exit,
1688                                 .cra_module = THIS_MODULE,
1689                         }
1690                 },
1691                 .op = {
1692                         .do_one_request = stm32_hash_one_request,
1693                 },
1694         },
1695         {
1696                 .base.init = stm32_hash_init,
1697                 .base.update = stm32_hash_update,
1698                 .base.final = stm32_hash_final,
1699                 .base.finup = stm32_hash_finup,
1700                 .base.digest = stm32_hash_digest,
1701                 .base.export = stm32_hash_export,
1702                 .base.import = stm32_hash_import,
1703                 .base.halg = {
1704                         .digestsize = SHA3_256_DIGEST_SIZE,
1705                         .statesize = sizeof(struct stm32_hash_state),
1706                         .base = {
1707                                 .cra_name = "sha3-256",
1708                                 .cra_driver_name = "stm32-sha3-256",
1709                                 .cra_priority = 200,
1710                                 .cra_flags = CRYPTO_ALG_ASYNC |
1711                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1712                                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
1713                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1714                                 .cra_alignmask = 3,
1715                                 .cra_init = stm32_hash_cra_sha3_init,
1716                                 .cra_exit = stm32_hash_cra_exit,
1717                                 .cra_module = THIS_MODULE,
1718                         }
1719                 },
1720                 .op = {
1721                         .do_one_request = stm32_hash_one_request,
1722                 },
1723         },
1724         {
1725                 .base.init = stm32_hash_init,
1726                 .base.update = stm32_hash_update,
1727                 .base.final = stm32_hash_final,
1728                 .base.finup = stm32_hash_finup,
1729                 .base.digest = stm32_hash_digest,
1730                 .base.export = stm32_hash_export,
1731                 .base.import = stm32_hash_import,
1732                 .base.setkey = stm32_hash_setkey,
1733                 .base.halg = {
1734                         .digestsize = SHA3_256_DIGEST_SIZE,
1735                         .statesize = sizeof(struct stm32_hash_state),
1736                         .base = {
1737                                 .cra_name = "hmac(sha3-256)",
1738                                 .cra_driver_name = "stm32-hmac-sha3-256",
1739                                 .cra_priority = 200,
1740                                 .cra_flags = CRYPTO_ALG_ASYNC |
1741                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1742                                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
1743                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1744                                 .cra_alignmask = 3,
1745                                 .cra_init = stm32_hash_cra_sha3_hmac_init,
1746                                 .cra_exit = stm32_hash_cra_exit,
1747                                 .cra_module = THIS_MODULE,
1748                         }
1749                 },
1750                 .op = {
1751                         .do_one_request = stm32_hash_one_request,
1752                 },
1753         },
1754         {
1755                 .base.init = stm32_hash_init,
1756                 .base.update = stm32_hash_update,
1757                 .base.final = stm32_hash_final,
1758                 .base.finup = stm32_hash_finup,
1759                 .base.digest = stm32_hash_digest,
1760                 .base.export = stm32_hash_export,
1761                 .base.import = stm32_hash_import,
1762                 .base.halg = {
1763                         .digestsize = SHA3_384_DIGEST_SIZE,
1764                         .statesize = sizeof(struct stm32_hash_state),
1765                         .base = {
1766                                 .cra_name = "sha3-384",
1767                                 .cra_driver_name = "stm32-sha3-384",
1768                                 .cra_priority = 200,
1769                                 .cra_flags = CRYPTO_ALG_ASYNC |
1770                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1771                                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
1772                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1773                                 .cra_alignmask = 3,
1774                                 .cra_init = stm32_hash_cra_sha3_init,
1775                                 .cra_exit = stm32_hash_cra_exit,
1776                                 .cra_module = THIS_MODULE,
1777                         }
1778                 },
1779                 .op = {
1780                         .do_one_request = stm32_hash_one_request,
1781                 },
1782         },
1783         {
1784                 .base.init = stm32_hash_init,
1785                 .base.update = stm32_hash_update,
1786                 .base.final = stm32_hash_final,
1787                 .base.finup = stm32_hash_finup,
1788                 .base.digest = stm32_hash_digest,
1789                 .base.export = stm32_hash_export,
1790                 .base.import = stm32_hash_import,
1791                 .base.setkey = stm32_hash_setkey,
1792                 .base.halg = {
1793                         .digestsize = SHA3_384_DIGEST_SIZE,
1794                         .statesize = sizeof(struct stm32_hash_state),
1795                         .base = {
1796                                 .cra_name = "hmac(sha3-384)",
1797                                 .cra_driver_name = "stm32-hmac-sha3-384",
1798                                 .cra_priority = 200,
1799                                 .cra_flags = CRYPTO_ALG_ASYNC |
1800                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1801                                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
1802                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1803                                 .cra_alignmask = 3,
1804                                 .cra_init = stm32_hash_cra_sha3_hmac_init,
1805                                 .cra_exit = stm32_hash_cra_exit,
1806                                 .cra_module = THIS_MODULE,
1807                         }
1808                 },
1809                 .op = {
1810                         .do_one_request = stm32_hash_one_request,
1811                 },
1812         },
1813         {
1814                 .base.init = stm32_hash_init,
1815                 .base.update = stm32_hash_update,
1816                 .base.final = stm32_hash_final,
1817                 .base.finup = stm32_hash_finup,
1818                 .base.digest = stm32_hash_digest,
1819                 .base.export = stm32_hash_export,
1820                 .base.import = stm32_hash_import,
1821                 .base.halg = {
1822                         .digestsize = SHA3_512_DIGEST_SIZE,
1823                         .statesize = sizeof(struct stm32_hash_state),
1824                         .base = {
1825                                 .cra_name = "sha3-512",
1826                                 .cra_driver_name = "stm32-sha3-512",
1827                                 .cra_priority = 200,
1828                                 .cra_flags = CRYPTO_ALG_ASYNC |
1829                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1830                                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
1831                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1832                                 .cra_alignmask = 3,
1833                                 .cra_init = stm32_hash_cra_sha3_init,
1834                                 .cra_exit = stm32_hash_cra_exit,
1835                                 .cra_module = THIS_MODULE,
1836                         }
1837                 },
1838                 .op = {
1839                         .do_one_request = stm32_hash_one_request,
1840                 },
1841         },
1842         {
1843                 .base.init = stm32_hash_init,
1844                 .base.update = stm32_hash_update,
1845                 .base.final = stm32_hash_final,
1846                 .base.finup = stm32_hash_finup,
1847                 .base.digest = stm32_hash_digest,
1848                 .base.export = stm32_hash_export,
1849                 .base.import = stm32_hash_import,
1850                 .base.setkey = stm32_hash_setkey,
1851                 .base.halg = {
1852                         .digestsize = SHA3_512_DIGEST_SIZE,
1853                         .statesize = sizeof(struct stm32_hash_state),
1854                         .base = {
1855                                 .cra_name = "hmac(sha3-512)",
1856                                 .cra_driver_name = "stm32-hmac-sha3-512",
1857                                 .cra_priority = 200,
1858                                 .cra_flags = CRYPTO_ALG_ASYNC |
1859                                         CRYPTO_ALG_KERN_DRIVER_ONLY,
1860                                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
1861                                 .cra_ctxsize = sizeof(struct stm32_hash_ctx),
1862                                 .cra_alignmask = 3,
1863                                 .cra_init = stm32_hash_cra_sha3_hmac_init,
1864                                 .cra_exit = stm32_hash_cra_exit,
1865                                 .cra_module = THIS_MODULE,
1866                         }
1867                 },
1868                 .op = {
1869                         .do_one_request = stm32_hash_one_request,
1870                 },
1871         }
1872 };
1873
1874 static int stm32_hash_register_algs(struct stm32_hash_dev *hdev)
1875 {
1876         unsigned int i, j;
1877         int err;
1878
1879         for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1880                 for (j = 0; j < hdev->pdata->algs_info[i].size; j++) {
1881                         err = crypto_engine_register_ahash(
1882                                 &hdev->pdata->algs_info[i].algs_list[j]);
1883                         if (err)
1884                                 goto err_algs;
1885                 }
1886         }
1887
1888         return 0;
1889 err_algs:
1890         dev_err(hdev->dev, "Algo %d : %d failed\n", i, j);
1891         for (; i--; ) {
1892                 for (; j--;)
1893                         crypto_engine_unregister_ahash(
1894                                 &hdev->pdata->algs_info[i].algs_list[j]);
1895         }
1896
1897         return err;
1898 }
1899
1900 static int stm32_hash_unregister_algs(struct stm32_hash_dev *hdev)
1901 {
1902         unsigned int i, j;
1903
1904         for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1905                 for (j = 0; j < hdev->pdata->algs_info[i].size; j++)
1906                         crypto_engine_unregister_ahash(
1907                                 &hdev->pdata->algs_info[i].algs_list[j]);
1908         }
1909
1910         return 0;
1911 }
1912
1913 static struct stm32_hash_algs_info stm32_hash_algs_info_ux500[] = {
1914         {
1915                 .algs_list      = algs_sha1,
1916                 .size           = ARRAY_SIZE(algs_sha1),
1917         },
1918         {
1919                 .algs_list      = algs_sha256,
1920                 .size           = ARRAY_SIZE(algs_sha256),
1921         },
1922 };
1923
1924 static const struct stm32_hash_pdata stm32_hash_pdata_ux500 = {
1925         .alg_shift      = 7,
1926         .algs_info      = stm32_hash_algs_info_ux500,
1927         .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_ux500),
1928         .broken_emptymsg = true,
1929         .ux500          = true,
1930 };
1931
1932 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f4[] = {
1933         {
1934                 .algs_list      = algs_md5,
1935                 .size           = ARRAY_SIZE(algs_md5),
1936         },
1937         {
1938                 .algs_list      = algs_sha1,
1939                 .size           = ARRAY_SIZE(algs_sha1),
1940         },
1941 };
1942
1943 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f4 = {
1944         .alg_shift      = 7,
1945         .algs_info      = stm32_hash_algs_info_stm32f4,
1946         .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f4),
1947         .has_sr         = true,
1948         .has_mdmat      = true,
1949 };
1950
1951 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f7[] = {
1952         {
1953                 .algs_list      = algs_md5,
1954                 .size           = ARRAY_SIZE(algs_md5),
1955         },
1956         {
1957                 .algs_list      = algs_sha1,
1958                 .size           = ARRAY_SIZE(algs_sha1),
1959         },
1960         {
1961                 .algs_list      = algs_sha224,
1962                 .size           = ARRAY_SIZE(algs_sha224),
1963         },
1964         {
1965                 .algs_list      = algs_sha256,
1966                 .size           = ARRAY_SIZE(algs_sha256),
1967         },
1968 };
1969
1970 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f7 = {
1971         .alg_shift      = 7,
1972         .algs_info      = stm32_hash_algs_info_stm32f7,
1973         .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f7),
1974         .has_sr         = true,
1975         .has_mdmat      = true,
1976 };
1977
1978 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32mp13[] = {
1979         {
1980                 .algs_list      = algs_sha1,
1981                 .size           = ARRAY_SIZE(algs_sha1),
1982         },
1983         {
1984                 .algs_list      = algs_sha224,
1985                 .size           = ARRAY_SIZE(algs_sha224),
1986         },
1987         {
1988                 .algs_list      = algs_sha256,
1989                 .size           = ARRAY_SIZE(algs_sha256),
1990         },
1991         {
1992                 .algs_list      = algs_sha384_sha512,
1993                 .size           = ARRAY_SIZE(algs_sha384_sha512),
1994         },
1995         {
1996                 .algs_list      = algs_sha3,
1997                 .size           = ARRAY_SIZE(algs_sha3),
1998         },
1999 };
2000
2001 static const struct stm32_hash_pdata stm32_hash_pdata_stm32mp13 = {
2002         .alg_shift      = 17,
2003         .algs_info      = stm32_hash_algs_info_stm32mp13,
2004         .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32mp13),
2005         .has_sr         = true,
2006         .has_mdmat      = true,
2007 };
2008
2009 static const struct of_device_id stm32_hash_of_match[] = {
2010         { .compatible = "stericsson,ux500-hash", .data = &stm32_hash_pdata_ux500 },
2011         { .compatible = "st,stm32f456-hash", .data = &stm32_hash_pdata_stm32f4 },
2012         { .compatible = "st,stm32f756-hash", .data = &stm32_hash_pdata_stm32f7 },
2013         { .compatible = "st,stm32mp13-hash", .data = &stm32_hash_pdata_stm32mp13 },
2014         {},
2015 };
2016
2017 MODULE_DEVICE_TABLE(of, stm32_hash_of_match);
2018
2019 static int stm32_hash_get_of_match(struct stm32_hash_dev *hdev,
2020                                    struct device *dev)
2021 {
2022         hdev->pdata = of_device_get_match_data(dev);
2023         if (!hdev->pdata) {
2024                 dev_err(dev, "no compatible OF match\n");
2025                 return -EINVAL;
2026         }
2027
2028         return 0;
2029 }
2030
2031 static int stm32_hash_probe(struct platform_device *pdev)
2032 {
2033         struct stm32_hash_dev *hdev;
2034         struct device *dev = &pdev->dev;
2035         struct resource *res;
2036         int ret, irq;
2037
2038         hdev = devm_kzalloc(dev, sizeof(*hdev), GFP_KERNEL);
2039         if (!hdev)
2040                 return -ENOMEM;
2041
2042         hdev->io_base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
2043         if (IS_ERR(hdev->io_base))
2044                 return PTR_ERR(hdev->io_base);
2045
2046         hdev->phys_base = res->start;
2047
2048         ret = stm32_hash_get_of_match(hdev, dev);
2049         if (ret)
2050                 return ret;
2051
2052         irq = platform_get_irq_optional(pdev, 0);
2053         if (irq < 0 && irq != -ENXIO)
2054                 return irq;
2055
2056         if (irq > 0) {
2057                 ret = devm_request_threaded_irq(dev, irq,
2058                                                 stm32_hash_irq_handler,
2059                                                 stm32_hash_irq_thread,
2060                                                 IRQF_ONESHOT,
2061                                                 dev_name(dev), hdev);
2062                 if (ret) {
2063                         dev_err(dev, "Cannot grab IRQ\n");
2064                         return ret;
2065                 }
2066         } else {
2067                 dev_info(dev, "No IRQ, use polling mode\n");
2068                 hdev->polled = true;
2069         }
2070
2071         hdev->clk = devm_clk_get(&pdev->dev, NULL);
2072         if (IS_ERR(hdev->clk))
2073                 return dev_err_probe(dev, PTR_ERR(hdev->clk),
2074                                      "failed to get clock for hash\n");
2075
2076         ret = clk_prepare_enable(hdev->clk);
2077         if (ret) {
2078                 dev_err(dev, "failed to enable hash clock (%d)\n", ret);
2079                 return ret;
2080         }
2081
2082         pm_runtime_set_autosuspend_delay(dev, HASH_AUTOSUSPEND_DELAY);
2083         pm_runtime_use_autosuspend(dev);
2084
2085         pm_runtime_get_noresume(dev);
2086         pm_runtime_set_active(dev);
2087         pm_runtime_enable(dev);
2088
2089         hdev->rst = devm_reset_control_get(&pdev->dev, NULL);
2090         if (IS_ERR(hdev->rst)) {
2091                 if (PTR_ERR(hdev->rst) == -EPROBE_DEFER) {
2092                         ret = -EPROBE_DEFER;
2093                         goto err_reset;
2094                 }
2095         } else {
2096                 reset_control_assert(hdev->rst);
2097                 udelay(2);
2098                 reset_control_deassert(hdev->rst);
2099         }
2100
2101         hdev->dev = dev;
2102
2103         platform_set_drvdata(pdev, hdev);
2104
2105         ret = stm32_hash_dma_init(hdev);
2106         switch (ret) {
2107         case 0:
2108                 break;
2109         case -ENOENT:
2110         case -ENODEV:
2111                 dev_info(dev, "DMA mode not available\n");
2112                 break;
2113         default:
2114                 dev_err(dev, "DMA init error %d\n", ret);
2115                 goto err_dma;
2116         }
2117
2118         spin_lock(&stm32_hash.lock);
2119         list_add_tail(&hdev->list, &stm32_hash.dev_list);
2120         spin_unlock(&stm32_hash.lock);
2121
2122         /* Initialize crypto engine */
2123         hdev->engine = crypto_engine_alloc_init(dev, 1);
2124         if (!hdev->engine) {
2125                 ret = -ENOMEM;
2126                 goto err_engine;
2127         }
2128
2129         ret = crypto_engine_start(hdev->engine);
2130         if (ret)
2131                 goto err_engine_start;
2132
2133         if (hdev->pdata->ux500)
2134                 /* FIXME: implement DMA mode for Ux500 */
2135                 hdev->dma_mode = 0;
2136         else
2137                 hdev->dma_mode = stm32_hash_read(hdev, HASH_HWCFGR) & HASH_HWCFG_DMA_MASK;
2138
2139         /* Register algos */
2140         ret = stm32_hash_register_algs(hdev);
2141         if (ret)
2142                 goto err_algs;
2143
2144         dev_info(dev, "Init HASH done HW ver %x DMA mode %u\n",
2145                  stm32_hash_read(hdev, HASH_VER), hdev->dma_mode);
2146
2147         pm_runtime_put_sync(dev);
2148
2149         return 0;
2150
2151 err_algs:
2152 err_engine_start:
2153         crypto_engine_exit(hdev->engine);
2154 err_engine:
2155         spin_lock(&stm32_hash.lock);
2156         list_del(&hdev->list);
2157         spin_unlock(&stm32_hash.lock);
2158 err_dma:
2159         if (hdev->dma_lch)
2160                 dma_release_channel(hdev->dma_lch);
2161 err_reset:
2162         pm_runtime_disable(dev);
2163         pm_runtime_put_noidle(dev);
2164
2165         clk_disable_unprepare(hdev->clk);
2166
2167         return ret;
2168 }
2169
2170 static void stm32_hash_remove(struct platform_device *pdev)
2171 {
2172         struct stm32_hash_dev *hdev = platform_get_drvdata(pdev);
2173         int ret;
2174
2175         ret = pm_runtime_get_sync(hdev->dev);
2176
2177         stm32_hash_unregister_algs(hdev);
2178
2179         crypto_engine_exit(hdev->engine);
2180
2181         spin_lock(&stm32_hash.lock);
2182         list_del(&hdev->list);
2183         spin_unlock(&stm32_hash.lock);
2184
2185         if (hdev->dma_lch)
2186                 dma_release_channel(hdev->dma_lch);
2187
2188         pm_runtime_disable(hdev->dev);
2189         pm_runtime_put_noidle(hdev->dev);
2190
2191         if (ret >= 0)
2192                 clk_disable_unprepare(hdev->clk);
2193 }
2194
2195 #ifdef CONFIG_PM
2196 static int stm32_hash_runtime_suspend(struct device *dev)
2197 {
2198         struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
2199
2200         clk_disable_unprepare(hdev->clk);
2201
2202         return 0;
2203 }
2204
2205 static int stm32_hash_runtime_resume(struct device *dev)
2206 {
2207         struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
2208         int ret;
2209
2210         ret = clk_prepare_enable(hdev->clk);
2211         if (ret) {
2212                 dev_err(hdev->dev, "Failed to prepare_enable clock\n");
2213                 return ret;
2214         }
2215
2216         return 0;
2217 }
2218 #endif
2219
2220 static const struct dev_pm_ops stm32_hash_pm_ops = {
2221         SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2222                                 pm_runtime_force_resume)
2223         SET_RUNTIME_PM_OPS(stm32_hash_runtime_suspend,
2224                            stm32_hash_runtime_resume, NULL)
2225 };
2226
2227 static struct platform_driver stm32_hash_driver = {
2228         .probe          = stm32_hash_probe,
2229         .remove_new     = stm32_hash_remove,
2230         .driver         = {
2231                 .name   = "stm32-hash",
2232                 .pm = &stm32_hash_pm_ops,
2233                 .of_match_table = stm32_hash_of_match,
2234         }
2235 };
2236
2237 module_platform_driver(stm32_hash_driver);
2238
2239 MODULE_DESCRIPTION("STM32 SHA1/SHA2/SHA3 & MD5 (HMAC) hw accelerator driver");
2240 MODULE_AUTHOR("Lionel Debieve <lionel.debieve@st.com>");
2241 MODULE_LICENSE("GPL v2");