crypto: hisilicon/qm - fix missing destroy qp_idr
[platform/kernel/linux-rpi.git] / drivers / crypto / talitos.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * talitos - Freescale Integrated Security Engine (SEC) device driver
4  *
5  * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
6  *
7  * Scatterlist Crypto API glue code copied from files with the following:
8  * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
9  *
10  * Crypto algorithm registration code copied from hifn driver:
11  * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
12  * All rights reserved.
13  */
14
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/mod_devicetable.h>
18 #include <linux/device.h>
19 #include <linux/interrupt.h>
20 #include <linux/crypto.h>
21 #include <linux/hw_random.h>
22 #include <linux/of_address.h>
23 #include <linux/of_irq.h>
24 #include <linux/of_platform.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/io.h>
27 #include <linux/spinlock.h>
28 #include <linux/rtnetlink.h>
29 #include <linux/slab.h>
30
31 #include <crypto/algapi.h>
32 #include <crypto/aes.h>
33 #include <crypto/internal/des.h>
34 #include <crypto/sha1.h>
35 #include <crypto/sha2.h>
36 #include <crypto/md5.h>
37 #include <crypto/internal/aead.h>
38 #include <crypto/authenc.h>
39 #include <crypto/internal/skcipher.h>
40 #include <crypto/hash.h>
41 #include <crypto/internal/hash.h>
42 #include <crypto/scatterwalk.h>
43
44 #include "talitos.h"
45
46 static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
47                            unsigned int len, bool is_sec1)
48 {
49         ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
50         if (is_sec1) {
51                 ptr->len1 = cpu_to_be16(len);
52         } else {
53                 ptr->len = cpu_to_be16(len);
54                 ptr->eptr = upper_32_bits(dma_addr);
55         }
56 }
57
58 static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
59                              struct talitos_ptr *src_ptr, bool is_sec1)
60 {
61         dst_ptr->ptr = src_ptr->ptr;
62         if (is_sec1) {
63                 dst_ptr->len1 = src_ptr->len1;
64         } else {
65                 dst_ptr->len = src_ptr->len;
66                 dst_ptr->eptr = src_ptr->eptr;
67         }
68 }
69
70 static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
71                                            bool is_sec1)
72 {
73         if (is_sec1)
74                 return be16_to_cpu(ptr->len1);
75         else
76                 return be16_to_cpu(ptr->len);
77 }
78
79 static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
80                                    bool is_sec1)
81 {
82         if (!is_sec1)
83                 ptr->j_extent = val;
84 }
85
86 static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
87 {
88         if (!is_sec1)
89                 ptr->j_extent |= val;
90 }
91
92 /*
93  * map virtual single (contiguous) pointer to h/w descriptor pointer
94  */
95 static void __map_single_talitos_ptr(struct device *dev,
96                                      struct talitos_ptr *ptr,
97                                      unsigned int len, void *data,
98                                      enum dma_data_direction dir,
99                                      unsigned long attrs)
100 {
101         dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
102         struct talitos_private *priv = dev_get_drvdata(dev);
103         bool is_sec1 = has_ftr_sec1(priv);
104
105         to_talitos_ptr(ptr, dma_addr, len, is_sec1);
106 }
107
108 static void map_single_talitos_ptr(struct device *dev,
109                                    struct talitos_ptr *ptr,
110                                    unsigned int len, void *data,
111                                    enum dma_data_direction dir)
112 {
113         __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
114 }
115
116 static void map_single_talitos_ptr_nosync(struct device *dev,
117                                           struct talitos_ptr *ptr,
118                                           unsigned int len, void *data,
119                                           enum dma_data_direction dir)
120 {
121         __map_single_talitos_ptr(dev, ptr, len, data, dir,
122                                  DMA_ATTR_SKIP_CPU_SYNC);
123 }
124
125 /*
126  * unmap bus single (contiguous) h/w descriptor pointer
127  */
128 static void unmap_single_talitos_ptr(struct device *dev,
129                                      struct talitos_ptr *ptr,
130                                      enum dma_data_direction dir)
131 {
132         struct talitos_private *priv = dev_get_drvdata(dev);
133         bool is_sec1 = has_ftr_sec1(priv);
134
135         dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
136                          from_talitos_ptr_len(ptr, is_sec1), dir);
137 }
138
139 static int reset_channel(struct device *dev, int ch)
140 {
141         struct talitos_private *priv = dev_get_drvdata(dev);
142         unsigned int timeout = TALITOS_TIMEOUT;
143         bool is_sec1 = has_ftr_sec1(priv);
144
145         if (is_sec1) {
146                 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
147                           TALITOS1_CCCR_LO_RESET);
148
149                 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
150                         TALITOS1_CCCR_LO_RESET) && --timeout)
151                         cpu_relax();
152         } else {
153                 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
154                           TALITOS2_CCCR_RESET);
155
156                 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
157                         TALITOS2_CCCR_RESET) && --timeout)
158                         cpu_relax();
159         }
160
161         if (timeout == 0) {
162                 dev_err(dev, "failed to reset channel %d\n", ch);
163                 return -EIO;
164         }
165
166         /* set 36-bit addressing, done writeback enable and done IRQ enable */
167         setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
168                   TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
169         /* enable chaining descriptors */
170         if (is_sec1)
171                 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
172                           TALITOS_CCCR_LO_NE);
173
174         /* and ICCR writeback, if available */
175         if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
176                 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
177                           TALITOS_CCCR_LO_IWSE);
178
179         return 0;
180 }
181
182 static int reset_device(struct device *dev)
183 {
184         struct talitos_private *priv = dev_get_drvdata(dev);
185         unsigned int timeout = TALITOS_TIMEOUT;
186         bool is_sec1 = has_ftr_sec1(priv);
187         u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
188
189         setbits32(priv->reg + TALITOS_MCR, mcr);
190
191         while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
192                && --timeout)
193                 cpu_relax();
194
195         if (priv->irq[1]) {
196                 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
197                 setbits32(priv->reg + TALITOS_MCR, mcr);
198         }
199
200         if (timeout == 0) {
201                 dev_err(dev, "failed to reset device\n");
202                 return -EIO;
203         }
204
205         return 0;
206 }
207
208 /*
209  * Reset and initialize the device
210  */
211 static int init_device(struct device *dev)
212 {
213         struct talitos_private *priv = dev_get_drvdata(dev);
214         int ch, err;
215         bool is_sec1 = has_ftr_sec1(priv);
216
217         /*
218          * Master reset
219          * errata documentation: warning: certain SEC interrupts
220          * are not fully cleared by writing the MCR:SWR bit,
221          * set bit twice to completely reset
222          */
223         err = reset_device(dev);
224         if (err)
225                 return err;
226
227         err = reset_device(dev);
228         if (err)
229                 return err;
230
231         /* reset channels */
232         for (ch = 0; ch < priv->num_channels; ch++) {
233                 err = reset_channel(dev, ch);
234                 if (err)
235                         return err;
236         }
237
238         /* enable channel done and error interrupts */
239         if (is_sec1) {
240                 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
241                 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
242                 /* disable parity error check in DEU (erroneous? test vect.) */
243                 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
244         } else {
245                 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
246                 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
247         }
248
249         /* disable integrity check error interrupts (use writeback instead) */
250         if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
251                 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
252                           TALITOS_MDEUICR_LO_ICE);
253
254         return 0;
255 }
256
257 /**
258  * talitos_submit - submits a descriptor to the device for processing
259  * @dev:        the SEC device to be used
260  * @ch:         the SEC device channel to be used
261  * @desc:       the descriptor to be processed by the device
262  * @callback:   whom to call when processing is complete
263  * @context:    a handle for use by caller (optional)
264  *
265  * desc must contain valid dma-mapped (bus physical) address pointers.
266  * callback must check err and feedback in descriptor header
267  * for device processing status.
268  */
269 static int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
270                           void (*callback)(struct device *dev,
271                                            struct talitos_desc *desc,
272                                            void *context, int error),
273                           void *context)
274 {
275         struct talitos_private *priv = dev_get_drvdata(dev);
276         struct talitos_request *request;
277         unsigned long flags;
278         int head;
279         bool is_sec1 = has_ftr_sec1(priv);
280
281         spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
282
283         if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
284                 /* h/w fifo is full */
285                 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
286                 return -EAGAIN;
287         }
288
289         head = priv->chan[ch].head;
290         request = &priv->chan[ch].fifo[head];
291
292         /* map descriptor and save caller data */
293         if (is_sec1) {
294                 desc->hdr1 = desc->hdr;
295                 request->dma_desc = dma_map_single(dev, &desc->hdr1,
296                                                    TALITOS_DESC_SIZE,
297                                                    DMA_BIDIRECTIONAL);
298         } else {
299                 request->dma_desc = dma_map_single(dev, desc,
300                                                    TALITOS_DESC_SIZE,
301                                                    DMA_BIDIRECTIONAL);
302         }
303         request->callback = callback;
304         request->context = context;
305
306         /* increment fifo head */
307         priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
308
309         smp_wmb();
310         request->desc = desc;
311
312         /* GO! */
313         wmb();
314         out_be32(priv->chan[ch].reg + TALITOS_FF,
315                  upper_32_bits(request->dma_desc));
316         out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
317                  lower_32_bits(request->dma_desc));
318
319         spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
320
321         return -EINPROGRESS;
322 }
323
324 static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
325 {
326         struct talitos_edesc *edesc;
327
328         if (!is_sec1)
329                 return request->desc->hdr;
330
331         if (!request->desc->next_desc)
332                 return request->desc->hdr1;
333
334         edesc = container_of(request->desc, struct talitos_edesc, desc);
335
336         return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
337 }
338
339 /*
340  * process what was done, notify callback of error if not
341  */
342 static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
343 {
344         struct talitos_private *priv = dev_get_drvdata(dev);
345         struct talitos_request *request, saved_req;
346         unsigned long flags;
347         int tail, status;
348         bool is_sec1 = has_ftr_sec1(priv);
349
350         spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
351
352         tail = priv->chan[ch].tail;
353         while (priv->chan[ch].fifo[tail].desc) {
354                 __be32 hdr;
355
356                 request = &priv->chan[ch].fifo[tail];
357
358                 /* descriptors with their done bits set don't get the error */
359                 rmb();
360                 hdr = get_request_hdr(request, is_sec1);
361
362                 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
363                         status = 0;
364                 else
365                         if (!error)
366                                 break;
367                         else
368                                 status = error;
369
370                 dma_unmap_single(dev, request->dma_desc,
371                                  TALITOS_DESC_SIZE,
372                                  DMA_BIDIRECTIONAL);
373
374                 /* copy entries so we can call callback outside lock */
375                 saved_req.desc = request->desc;
376                 saved_req.callback = request->callback;
377                 saved_req.context = request->context;
378
379                 /* release request entry in fifo */
380                 smp_wmb();
381                 request->desc = NULL;
382
383                 /* increment fifo tail */
384                 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
385
386                 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
387
388                 atomic_dec(&priv->chan[ch].submit_count);
389
390                 saved_req.callback(dev, saved_req.desc, saved_req.context,
391                                    status);
392                 /* channel may resume processing in single desc error case */
393                 if (error && !reset_ch && status == error)
394                         return;
395                 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
396                 tail = priv->chan[ch].tail;
397         }
398
399         spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
400 }
401
402 /*
403  * process completed requests for channels that have done status
404  */
405 #define DEF_TALITOS1_DONE(name, ch_done_mask)                           \
406 static void talitos1_done_##name(unsigned long data)                    \
407 {                                                                       \
408         struct device *dev = (struct device *)data;                     \
409         struct talitos_private *priv = dev_get_drvdata(dev);            \
410         unsigned long flags;                                            \
411                                                                         \
412         if (ch_done_mask & 0x10000000)                                  \
413                 flush_channel(dev, 0, 0, 0);                    \
414         if (ch_done_mask & 0x40000000)                                  \
415                 flush_channel(dev, 1, 0, 0);                    \
416         if (ch_done_mask & 0x00010000)                                  \
417                 flush_channel(dev, 2, 0, 0);                    \
418         if (ch_done_mask & 0x00040000)                                  \
419                 flush_channel(dev, 3, 0, 0);                    \
420                                                                         \
421         /* At this point, all completed channels have been processed */ \
422         /* Unmask done interrupts for channels completed later on. */   \
423         spin_lock_irqsave(&priv->reg_lock, flags);                      \
424         clrbits32(priv->reg + TALITOS_IMR, ch_done_mask);               \
425         clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);    \
426         spin_unlock_irqrestore(&priv->reg_lock, flags);                 \
427 }
428
429 DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
430 DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
431
432 #define DEF_TALITOS2_DONE(name, ch_done_mask)                           \
433 static void talitos2_done_##name(unsigned long data)                    \
434 {                                                                       \
435         struct device *dev = (struct device *)data;                     \
436         struct talitos_private *priv = dev_get_drvdata(dev);            \
437         unsigned long flags;                                            \
438                                                                         \
439         if (ch_done_mask & 1)                                           \
440                 flush_channel(dev, 0, 0, 0);                            \
441         if (ch_done_mask & (1 << 2))                                    \
442                 flush_channel(dev, 1, 0, 0);                            \
443         if (ch_done_mask & (1 << 4))                                    \
444                 flush_channel(dev, 2, 0, 0);                            \
445         if (ch_done_mask & (1 << 6))                                    \
446                 flush_channel(dev, 3, 0, 0);                            \
447                                                                         \
448         /* At this point, all completed channels have been processed */ \
449         /* Unmask done interrupts for channels completed later on. */   \
450         spin_lock_irqsave(&priv->reg_lock, flags);                      \
451         setbits32(priv->reg + TALITOS_IMR, ch_done_mask);               \
452         setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);    \
453         spin_unlock_irqrestore(&priv->reg_lock, flags);                 \
454 }
455
456 DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
457 DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
458 DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
459 DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
460
461 /*
462  * locate current (offending) descriptor
463  */
464 static __be32 current_desc_hdr(struct device *dev, int ch)
465 {
466         struct talitos_private *priv = dev_get_drvdata(dev);
467         int tail, iter;
468         dma_addr_t cur_desc;
469
470         cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
471         cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
472
473         if (!cur_desc) {
474                 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
475                 return 0;
476         }
477
478         tail = priv->chan[ch].tail;
479
480         iter = tail;
481         while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
482                priv->chan[ch].fifo[iter].desc->next_desc != cpu_to_be32(cur_desc)) {
483                 iter = (iter + 1) & (priv->fifo_len - 1);
484                 if (iter == tail) {
485                         dev_err(dev, "couldn't locate current descriptor\n");
486                         return 0;
487                 }
488         }
489
490         if (priv->chan[ch].fifo[iter].desc->next_desc == cpu_to_be32(cur_desc)) {
491                 struct talitos_edesc *edesc;
492
493                 edesc = container_of(priv->chan[ch].fifo[iter].desc,
494                                      struct talitos_edesc, desc);
495                 return ((struct talitos_desc *)
496                         (edesc->buf + edesc->dma_len))->hdr;
497         }
498
499         return priv->chan[ch].fifo[iter].desc->hdr;
500 }
501
502 /*
503  * user diagnostics; report root cause of error based on execution unit status
504  */
505 static void report_eu_error(struct device *dev, int ch, __be32 desc_hdr)
506 {
507         struct talitos_private *priv = dev_get_drvdata(dev);
508         int i;
509
510         if (!desc_hdr)
511                 desc_hdr = cpu_to_be32(in_be32(priv->chan[ch].reg + TALITOS_DESCBUF));
512
513         switch (desc_hdr & DESC_HDR_SEL0_MASK) {
514         case DESC_HDR_SEL0_AFEU:
515                 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
516                         in_be32(priv->reg_afeu + TALITOS_EUISR),
517                         in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
518                 break;
519         case DESC_HDR_SEL0_DEU:
520                 dev_err(dev, "DEUISR 0x%08x_%08x\n",
521                         in_be32(priv->reg_deu + TALITOS_EUISR),
522                         in_be32(priv->reg_deu + TALITOS_EUISR_LO));
523                 break;
524         case DESC_HDR_SEL0_MDEUA:
525         case DESC_HDR_SEL0_MDEUB:
526                 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
527                         in_be32(priv->reg_mdeu + TALITOS_EUISR),
528                         in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
529                 break;
530         case DESC_HDR_SEL0_RNG:
531                 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
532                         in_be32(priv->reg_rngu + TALITOS_ISR),
533                         in_be32(priv->reg_rngu + TALITOS_ISR_LO));
534                 break;
535         case DESC_HDR_SEL0_PKEU:
536                 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
537                         in_be32(priv->reg_pkeu + TALITOS_EUISR),
538                         in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
539                 break;
540         case DESC_HDR_SEL0_AESU:
541                 dev_err(dev, "AESUISR 0x%08x_%08x\n",
542                         in_be32(priv->reg_aesu + TALITOS_EUISR),
543                         in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
544                 break;
545         case DESC_HDR_SEL0_CRCU:
546                 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
547                         in_be32(priv->reg_crcu + TALITOS_EUISR),
548                         in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
549                 break;
550         case DESC_HDR_SEL0_KEU:
551                 dev_err(dev, "KEUISR 0x%08x_%08x\n",
552                         in_be32(priv->reg_pkeu + TALITOS_EUISR),
553                         in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
554                 break;
555         }
556
557         switch (desc_hdr & DESC_HDR_SEL1_MASK) {
558         case DESC_HDR_SEL1_MDEUA:
559         case DESC_HDR_SEL1_MDEUB:
560                 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
561                         in_be32(priv->reg_mdeu + TALITOS_EUISR),
562                         in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
563                 break;
564         case DESC_HDR_SEL1_CRCU:
565                 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
566                         in_be32(priv->reg_crcu + TALITOS_EUISR),
567                         in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
568                 break;
569         }
570
571         for (i = 0; i < 8; i++)
572                 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
573                         in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
574                         in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
575 }
576
577 /*
578  * recover from error interrupts
579  */
580 static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
581 {
582         struct talitos_private *priv = dev_get_drvdata(dev);
583         unsigned int timeout = TALITOS_TIMEOUT;
584         int ch, error, reset_dev = 0;
585         u32 v_lo;
586         bool is_sec1 = has_ftr_sec1(priv);
587         int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
588
589         for (ch = 0; ch < priv->num_channels; ch++) {
590                 /* skip channels without errors */
591                 if (is_sec1) {
592                         /* bits 29, 31, 17, 19 */
593                         if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
594                                 continue;
595                 } else {
596                         if (!(isr & (1 << (ch * 2 + 1))))
597                                 continue;
598                 }
599
600                 error = -EINVAL;
601
602                 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
603
604                 if (v_lo & TALITOS_CCPSR_LO_DOF) {
605                         dev_err(dev, "double fetch fifo overflow error\n");
606                         error = -EAGAIN;
607                         reset_ch = 1;
608                 }
609                 if (v_lo & TALITOS_CCPSR_LO_SOF) {
610                         /* h/w dropped descriptor */
611                         dev_err(dev, "single fetch fifo overflow error\n");
612                         error = -EAGAIN;
613                 }
614                 if (v_lo & TALITOS_CCPSR_LO_MDTE)
615                         dev_err(dev, "master data transfer error\n");
616                 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
617                         dev_err(dev, is_sec1 ? "pointer not complete error\n"
618                                              : "s/g data length zero error\n");
619                 if (v_lo & TALITOS_CCPSR_LO_FPZ)
620                         dev_err(dev, is_sec1 ? "parity error\n"
621                                              : "fetch pointer zero error\n");
622                 if (v_lo & TALITOS_CCPSR_LO_IDH)
623                         dev_err(dev, "illegal descriptor header error\n");
624                 if (v_lo & TALITOS_CCPSR_LO_IEU)
625                         dev_err(dev, is_sec1 ? "static assignment error\n"
626                                              : "invalid exec unit error\n");
627                 if (v_lo & TALITOS_CCPSR_LO_EU)
628                         report_eu_error(dev, ch, current_desc_hdr(dev, ch));
629                 if (!is_sec1) {
630                         if (v_lo & TALITOS_CCPSR_LO_GB)
631                                 dev_err(dev, "gather boundary error\n");
632                         if (v_lo & TALITOS_CCPSR_LO_GRL)
633                                 dev_err(dev, "gather return/length error\n");
634                         if (v_lo & TALITOS_CCPSR_LO_SB)
635                                 dev_err(dev, "scatter boundary error\n");
636                         if (v_lo & TALITOS_CCPSR_LO_SRL)
637                                 dev_err(dev, "scatter return/length error\n");
638                 }
639
640                 flush_channel(dev, ch, error, reset_ch);
641
642                 if (reset_ch) {
643                         reset_channel(dev, ch);
644                 } else {
645                         setbits32(priv->chan[ch].reg + TALITOS_CCCR,
646                                   TALITOS2_CCCR_CONT);
647                         setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
648                         while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
649                                TALITOS2_CCCR_CONT) && --timeout)
650                                 cpu_relax();
651                         if (timeout == 0) {
652                                 dev_err(dev, "failed to restart channel %d\n",
653                                         ch);
654                                 reset_dev = 1;
655                         }
656                 }
657         }
658         if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
659             (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
660                 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
661                         dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
662                                 isr, isr_lo);
663                 else
664                         dev_err(dev, "done overflow, internal time out, or "
665                                 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
666
667                 /* purge request queues */
668                 for (ch = 0; ch < priv->num_channels; ch++)
669                         flush_channel(dev, ch, -EIO, 1);
670
671                 /* reset and reinitialize the device */
672                 init_device(dev);
673         }
674 }
675
676 #define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet)          \
677 static irqreturn_t talitos1_interrupt_##name(int irq, void *data)              \
678 {                                                                              \
679         struct device *dev = data;                                             \
680         struct talitos_private *priv = dev_get_drvdata(dev);                   \
681         u32 isr, isr_lo;                                                       \
682         unsigned long flags;                                                   \
683                                                                                \
684         spin_lock_irqsave(&priv->reg_lock, flags);                             \
685         isr = in_be32(priv->reg + TALITOS_ISR);                                \
686         isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);                          \
687         /* Acknowledge interrupt */                                            \
688         out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
689         out_be32(priv->reg + TALITOS_ICR_LO, isr_lo);                          \
690                                                                                \
691         if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) {    \
692                 spin_unlock_irqrestore(&priv->reg_lock, flags);                \
693                 talitos_error(dev, isr & ch_err_mask, isr_lo);                 \
694         }                                                                      \
695         else {                                                                 \
696                 if (likely(isr & ch_done_mask)) {                              \
697                         /* mask further done interrupts. */                    \
698                         setbits32(priv->reg + TALITOS_IMR, ch_done_mask);      \
699                         /* done_task will unmask done interrupts at exit */    \
700                         tasklet_schedule(&priv->done_task[tlet]);              \
701                 }                                                              \
702                 spin_unlock_irqrestore(&priv->reg_lock, flags);                \
703         }                                                                      \
704                                                                                \
705         return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED :  \
706                                                                 IRQ_NONE;      \
707 }
708
709 DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
710
711 #define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet)          \
712 static irqreturn_t talitos2_interrupt_##name(int irq, void *data)              \
713 {                                                                              \
714         struct device *dev = data;                                             \
715         struct talitos_private *priv = dev_get_drvdata(dev);                   \
716         u32 isr, isr_lo;                                                       \
717         unsigned long flags;                                                   \
718                                                                                \
719         spin_lock_irqsave(&priv->reg_lock, flags);                             \
720         isr = in_be32(priv->reg + TALITOS_ISR);                                \
721         isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);                          \
722         /* Acknowledge interrupt */                                            \
723         out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
724         out_be32(priv->reg + TALITOS_ICR_LO, isr_lo);                          \
725                                                                                \
726         if (unlikely(isr & ch_err_mask || isr_lo)) {                           \
727                 spin_unlock_irqrestore(&priv->reg_lock, flags);                \
728                 talitos_error(dev, isr & ch_err_mask, isr_lo);                 \
729         }                                                                      \
730         else {                                                                 \
731                 if (likely(isr & ch_done_mask)) {                              \
732                         /* mask further done interrupts. */                    \
733                         clrbits32(priv->reg + TALITOS_IMR, ch_done_mask);      \
734                         /* done_task will unmask done interrupts at exit */    \
735                         tasklet_schedule(&priv->done_task[tlet]);              \
736                 }                                                              \
737                 spin_unlock_irqrestore(&priv->reg_lock, flags);                \
738         }                                                                      \
739                                                                                \
740         return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED :  \
741                                                                 IRQ_NONE;      \
742 }
743
744 DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
745 DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
746                        0)
747 DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
748                        1)
749
750 /*
751  * hwrng
752  */
753 static int talitos_rng_data_present(struct hwrng *rng, int wait)
754 {
755         struct device *dev = (struct device *)rng->priv;
756         struct talitos_private *priv = dev_get_drvdata(dev);
757         u32 ofl;
758         int i;
759
760         for (i = 0; i < 20; i++) {
761                 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
762                       TALITOS_RNGUSR_LO_OFL;
763                 if (ofl || !wait)
764                         break;
765                 udelay(10);
766         }
767
768         return !!ofl;
769 }
770
771 static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
772 {
773         struct device *dev = (struct device *)rng->priv;
774         struct talitos_private *priv = dev_get_drvdata(dev);
775
776         /* rng fifo requires 64-bit accesses */
777         *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
778         *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
779
780         return sizeof(u32);
781 }
782
783 static int talitos_rng_init(struct hwrng *rng)
784 {
785         struct device *dev = (struct device *)rng->priv;
786         struct talitos_private *priv = dev_get_drvdata(dev);
787         unsigned int timeout = TALITOS_TIMEOUT;
788
789         setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
790         while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
791                  & TALITOS_RNGUSR_LO_RD)
792                && --timeout)
793                 cpu_relax();
794         if (timeout == 0) {
795                 dev_err(dev, "failed to reset rng hw\n");
796                 return -ENODEV;
797         }
798
799         /* start generating */
800         setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
801
802         return 0;
803 }
804
805 static int talitos_register_rng(struct device *dev)
806 {
807         struct talitos_private *priv = dev_get_drvdata(dev);
808         int err;
809
810         priv->rng.name          = dev_driver_string(dev);
811         priv->rng.init          = talitos_rng_init;
812         priv->rng.data_present  = talitos_rng_data_present;
813         priv->rng.data_read     = talitos_rng_data_read;
814         priv->rng.priv          = (unsigned long)dev;
815
816         err = hwrng_register(&priv->rng);
817         if (!err)
818                 priv->rng_registered = true;
819
820         return err;
821 }
822
823 static void talitos_unregister_rng(struct device *dev)
824 {
825         struct talitos_private *priv = dev_get_drvdata(dev);
826
827         if (!priv->rng_registered)
828                 return;
829
830         hwrng_unregister(&priv->rng);
831         priv->rng_registered = false;
832 }
833
834 /*
835  * crypto alg
836  */
837 #define TALITOS_CRA_PRIORITY            3000
838 /*
839  * Defines a priority for doing AEAD with descriptors type
840  * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
841  */
842 #define TALITOS_CRA_PRIORITY_AEAD_HSNA  (TALITOS_CRA_PRIORITY - 1)
843 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
844 #define TALITOS_MAX_KEY_SIZE            (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
845 #else
846 #define TALITOS_MAX_KEY_SIZE            (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
847 #endif
848 #define TALITOS_MAX_IV_LENGTH           16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
849
850 struct talitos_ctx {
851         struct device *dev;
852         int ch;
853         __be32 desc_hdr_template;
854         u8 key[TALITOS_MAX_KEY_SIZE];
855         u8 iv[TALITOS_MAX_IV_LENGTH];
856         dma_addr_t dma_key;
857         unsigned int keylen;
858         unsigned int enckeylen;
859         unsigned int authkeylen;
860 };
861
862 #define HASH_MAX_BLOCK_SIZE             SHA512_BLOCK_SIZE
863 #define TALITOS_MDEU_MAX_CONTEXT_SIZE   TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
864
865 struct talitos_ahash_req_ctx {
866         u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
867         unsigned int hw_context_size;
868         u8 buf[2][HASH_MAX_BLOCK_SIZE];
869         int buf_idx;
870         unsigned int swinit;
871         unsigned int first;
872         unsigned int last;
873         unsigned int to_hash_later;
874         unsigned int nbuf;
875         struct scatterlist bufsl[2];
876         struct scatterlist *psrc;
877 };
878
879 struct talitos_export_state {
880         u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
881         u8 buf[HASH_MAX_BLOCK_SIZE];
882         unsigned int swinit;
883         unsigned int first;
884         unsigned int last;
885         unsigned int to_hash_later;
886         unsigned int nbuf;
887 };
888
889 static int aead_setkey(struct crypto_aead *authenc,
890                        const u8 *key, unsigned int keylen)
891 {
892         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
893         struct device *dev = ctx->dev;
894         struct crypto_authenc_keys keys;
895
896         if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
897                 goto badkey;
898
899         if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
900                 goto badkey;
901
902         if (ctx->keylen)
903                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
904
905         memcpy(ctx->key, keys.authkey, keys.authkeylen);
906         memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
907
908         ctx->keylen = keys.authkeylen + keys.enckeylen;
909         ctx->enckeylen = keys.enckeylen;
910         ctx->authkeylen = keys.authkeylen;
911         ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
912                                       DMA_TO_DEVICE);
913
914         memzero_explicit(&keys, sizeof(keys));
915         return 0;
916
917 badkey:
918         memzero_explicit(&keys, sizeof(keys));
919         return -EINVAL;
920 }
921
922 static int aead_des3_setkey(struct crypto_aead *authenc,
923                             const u8 *key, unsigned int keylen)
924 {
925         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
926         struct device *dev = ctx->dev;
927         struct crypto_authenc_keys keys;
928         int err;
929
930         err = crypto_authenc_extractkeys(&keys, key, keylen);
931         if (unlikely(err))
932                 goto out;
933
934         err = -EINVAL;
935         if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
936                 goto out;
937
938         err = verify_aead_des3_key(authenc, keys.enckey, keys.enckeylen);
939         if (err)
940                 goto out;
941
942         if (ctx->keylen)
943                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
944
945         memcpy(ctx->key, keys.authkey, keys.authkeylen);
946         memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
947
948         ctx->keylen = keys.authkeylen + keys.enckeylen;
949         ctx->enckeylen = keys.enckeylen;
950         ctx->authkeylen = keys.authkeylen;
951         ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
952                                       DMA_TO_DEVICE);
953
954 out:
955         memzero_explicit(&keys, sizeof(keys));
956         return err;
957 }
958
959 static void talitos_sg_unmap(struct device *dev,
960                              struct talitos_edesc *edesc,
961                              struct scatterlist *src,
962                              struct scatterlist *dst,
963                              unsigned int len, unsigned int offset)
964 {
965         struct talitos_private *priv = dev_get_drvdata(dev);
966         bool is_sec1 = has_ftr_sec1(priv);
967         unsigned int src_nents = edesc->src_nents ? : 1;
968         unsigned int dst_nents = edesc->dst_nents ? : 1;
969
970         if (is_sec1 && dst && dst_nents > 1) {
971                 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
972                                            len, DMA_FROM_DEVICE);
973                 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
974                                      offset);
975         }
976         if (src != dst) {
977                 if (src_nents == 1 || !is_sec1)
978                         dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
979
980                 if (dst && (dst_nents == 1 || !is_sec1))
981                         dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
982         } else if (src_nents == 1 || !is_sec1) {
983                 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
984         }
985 }
986
987 static void ipsec_esp_unmap(struct device *dev,
988                             struct talitos_edesc *edesc,
989                             struct aead_request *areq, bool encrypt)
990 {
991         struct crypto_aead *aead = crypto_aead_reqtfm(areq);
992         struct talitos_ctx *ctx = crypto_aead_ctx(aead);
993         unsigned int ivsize = crypto_aead_ivsize(aead);
994         unsigned int authsize = crypto_aead_authsize(aead);
995         unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
996         bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
997         struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
998
999         if (is_ipsec_esp)
1000                 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1001                                          DMA_FROM_DEVICE);
1002         unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
1003
1004         talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1005                          cryptlen + authsize, areq->assoclen);
1006
1007         if (edesc->dma_len)
1008                 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1009                                  DMA_BIDIRECTIONAL);
1010
1011         if (!is_ipsec_esp) {
1012                 unsigned int dst_nents = edesc->dst_nents ? : 1;
1013
1014                 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
1015                                    areq->assoclen + cryptlen - ivsize);
1016         }
1017 }
1018
1019 /*
1020  * ipsec_esp descriptor callbacks
1021  */
1022 static void ipsec_esp_encrypt_done(struct device *dev,
1023                                    struct talitos_desc *desc, void *context,
1024                                    int err)
1025 {
1026         struct aead_request *areq = context;
1027         struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1028         unsigned int ivsize = crypto_aead_ivsize(authenc);
1029         struct talitos_edesc *edesc;
1030
1031         edesc = container_of(desc, struct talitos_edesc, desc);
1032
1033         ipsec_esp_unmap(dev, edesc, areq, true);
1034
1035         dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1036
1037         kfree(edesc);
1038
1039         aead_request_complete(areq, err);
1040 }
1041
1042 static void ipsec_esp_decrypt_swauth_done(struct device *dev,
1043                                           struct talitos_desc *desc,
1044                                           void *context, int err)
1045 {
1046         struct aead_request *req = context;
1047         struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1048         unsigned int authsize = crypto_aead_authsize(authenc);
1049         struct talitos_edesc *edesc;
1050         char *oicv, *icv;
1051
1052         edesc = container_of(desc, struct talitos_edesc, desc);
1053
1054         ipsec_esp_unmap(dev, edesc, req, false);
1055
1056         if (!err) {
1057                 /* auth check */
1058                 oicv = edesc->buf + edesc->dma_len;
1059                 icv = oicv - authsize;
1060
1061                 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
1062         }
1063
1064         kfree(edesc);
1065
1066         aead_request_complete(req, err);
1067 }
1068
1069 static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
1070                                           struct talitos_desc *desc,
1071                                           void *context, int err)
1072 {
1073         struct aead_request *req = context;
1074         struct talitos_edesc *edesc;
1075
1076         edesc = container_of(desc, struct talitos_edesc, desc);
1077
1078         ipsec_esp_unmap(dev, edesc, req, false);
1079
1080         /* check ICV auth status */
1081         if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1082                      DESC_HDR_LO_ICCR1_PASS))
1083                 err = -EBADMSG;
1084
1085         kfree(edesc);
1086
1087         aead_request_complete(req, err);
1088 }
1089
1090 /*
1091  * convert scatterlist to SEC h/w link table format
1092  * stop at cryptlen bytes
1093  */
1094 static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1095                                  unsigned int offset, int datalen, int elen,
1096                                  struct talitos_ptr *link_tbl_ptr, int align)
1097 {
1098         int n_sg = elen ? sg_count + 1 : sg_count;
1099         int count = 0;
1100         int cryptlen = datalen + elen;
1101         int padding = ALIGN(cryptlen, align) - cryptlen;
1102
1103         while (cryptlen && sg && n_sg--) {
1104                 unsigned int len = sg_dma_len(sg);
1105
1106                 if (offset >= len) {
1107                         offset -= len;
1108                         goto next;
1109                 }
1110
1111                 len -= offset;
1112
1113                 if (len > cryptlen)
1114                         len = cryptlen;
1115
1116                 if (datalen > 0 && len > datalen) {
1117                         to_talitos_ptr(link_tbl_ptr + count,
1118                                        sg_dma_address(sg) + offset, datalen, 0);
1119                         to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1120                         count++;
1121                         len -= datalen;
1122                         offset += datalen;
1123                 }
1124                 to_talitos_ptr(link_tbl_ptr + count,
1125                                sg_dma_address(sg) + offset, sg_next(sg) ? len : len + padding, 0);
1126                 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1127                 count++;
1128                 cryptlen -= len;
1129                 datalen -= len;
1130                 offset = 0;
1131
1132 next:
1133                 sg = sg_next(sg);
1134         }
1135
1136         /* tag end of link table */
1137         if (count > 0)
1138                 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1139                                        DESC_PTR_LNKTBL_RET, 0);
1140
1141         return count;
1142 }
1143
1144 static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1145                               unsigned int len, struct talitos_edesc *edesc,
1146                               struct talitos_ptr *ptr, int sg_count,
1147                               unsigned int offset, int tbl_off, int elen,
1148                               bool force, int align)
1149 {
1150         struct talitos_private *priv = dev_get_drvdata(dev);
1151         bool is_sec1 = has_ftr_sec1(priv);
1152         int aligned_len = ALIGN(len, align);
1153
1154         if (!src) {
1155                 to_talitos_ptr(ptr, 0, 0, is_sec1);
1156                 return 1;
1157         }
1158         to_talitos_ptr_ext_set(ptr, elen, is_sec1);
1159         if (sg_count == 1 && !force) {
1160                 to_talitos_ptr(ptr, sg_dma_address(src) + offset, aligned_len, is_sec1);
1161                 return sg_count;
1162         }
1163         if (is_sec1) {
1164                 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, aligned_len, is_sec1);
1165                 return sg_count;
1166         }
1167         sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
1168                                          &edesc->link_tbl[tbl_off], align);
1169         if (sg_count == 1 && !force) {
1170                 /* Only one segment now, so no link tbl needed*/
1171                 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1172                 return sg_count;
1173         }
1174         to_talitos_ptr(ptr, edesc->dma_link_tbl +
1175                             tbl_off * sizeof(struct talitos_ptr), aligned_len, is_sec1);
1176         to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1177
1178         return sg_count;
1179 }
1180
1181 static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1182                           unsigned int len, struct talitos_edesc *edesc,
1183                           struct talitos_ptr *ptr, int sg_count,
1184                           unsigned int offset, int tbl_off)
1185 {
1186         return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1187                                   tbl_off, 0, false, 1);
1188 }
1189
1190 /*
1191  * fill in and submit ipsec_esp descriptor
1192  */
1193 static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
1194                      bool encrypt,
1195                      void (*callback)(struct device *dev,
1196                                       struct talitos_desc *desc,
1197                                       void *context, int error))
1198 {
1199         struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1200         unsigned int authsize = crypto_aead_authsize(aead);
1201         struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1202         struct device *dev = ctx->dev;
1203         struct talitos_desc *desc = &edesc->desc;
1204         unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1205         unsigned int ivsize = crypto_aead_ivsize(aead);
1206         int tbl_off = 0;
1207         int sg_count, ret;
1208         int elen = 0;
1209         bool sync_needed = false;
1210         struct talitos_private *priv = dev_get_drvdata(dev);
1211         bool is_sec1 = has_ftr_sec1(priv);
1212         bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1213         struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1214         struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
1215         dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
1216
1217         /* hmac key */
1218         to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
1219
1220         sg_count = edesc->src_nents ?: 1;
1221         if (is_sec1 && sg_count > 1)
1222                 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1223                                   areq->assoclen + cryptlen);
1224         else
1225                 sg_count = dma_map_sg(dev, areq->src, sg_count,
1226                                       (areq->src == areq->dst) ?
1227                                       DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1228
1229         /* hmac data */
1230         ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1231                              &desc->ptr[1], sg_count, 0, tbl_off);
1232
1233         if (ret > 1) {
1234                 tbl_off += ret;
1235                 sync_needed = true;
1236         }
1237
1238         /* cipher iv */
1239         to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
1240
1241         /* cipher key */
1242         to_talitos_ptr(ckey_ptr, ctx->dma_key  + ctx->authkeylen,
1243                        ctx->enckeylen, is_sec1);
1244
1245         /*
1246          * cipher in
1247          * map and adjust cipher len to aead request cryptlen.
1248          * extent is bytes of HMAC postpended to ciphertext,
1249          * typically 12 for ipsec
1250          */
1251         if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1252                 elen = authsize;
1253
1254         ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1255                                  sg_count, areq->assoclen, tbl_off, elen,
1256                                  false, 1);
1257
1258         if (ret > 1) {
1259                 tbl_off += ret;
1260                 sync_needed = true;
1261         }
1262
1263         /* cipher out */
1264         if (areq->src != areq->dst) {
1265                 sg_count = edesc->dst_nents ? : 1;
1266                 if (!is_sec1 || sg_count == 1)
1267                         dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1268         }
1269
1270         if (is_ipsec_esp && encrypt)
1271                 elen = authsize;
1272         else
1273                 elen = 0;
1274         ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1275                                  sg_count, areq->assoclen, tbl_off, elen,
1276                                  is_ipsec_esp && !encrypt, 1);
1277         tbl_off += ret;
1278
1279         if (!encrypt && is_ipsec_esp) {
1280                 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1281
1282                 /* Add an entry to the link table for ICV data */
1283                 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1284                 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
1285
1286                 /* icv data follows link tables */
1287                 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1288                 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1289                 sync_needed = true;
1290         } else if (!encrypt) {
1291                 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1292                 sync_needed = true;
1293         } else if (!is_ipsec_esp) {
1294                 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1295                                sg_count, areq->assoclen + cryptlen, tbl_off);
1296         }
1297
1298         /* iv out */
1299         if (is_ipsec_esp)
1300                 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1301                                        DMA_FROM_DEVICE);
1302
1303         if (sync_needed)
1304                 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1305                                            edesc->dma_len,
1306                                            DMA_BIDIRECTIONAL);
1307
1308         ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1309         if (ret != -EINPROGRESS) {
1310                 ipsec_esp_unmap(dev, edesc, areq, encrypt);
1311                 kfree(edesc);
1312         }
1313         return ret;
1314 }
1315
1316 /*
1317  * allocate and map the extended descriptor
1318  */
1319 static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1320                                                  struct scatterlist *src,
1321                                                  struct scatterlist *dst,
1322                                                  u8 *iv,
1323                                                  unsigned int assoclen,
1324                                                  unsigned int cryptlen,
1325                                                  unsigned int authsize,
1326                                                  unsigned int ivsize,
1327                                                  int icv_stashing,
1328                                                  u32 cryptoflags,
1329                                                  bool encrypt)
1330 {
1331         struct talitos_edesc *edesc;
1332         int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
1333         dma_addr_t iv_dma = 0;
1334         gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1335                       GFP_ATOMIC;
1336         struct talitos_private *priv = dev_get_drvdata(dev);
1337         bool is_sec1 = has_ftr_sec1(priv);
1338         int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
1339
1340         if (cryptlen + authsize > max_len) {
1341                 dev_err(dev, "length exceeds h/w max limit\n");
1342                 return ERR_PTR(-EINVAL);
1343         }
1344
1345         if (!dst || dst == src) {
1346                 src_len = assoclen + cryptlen + authsize;
1347                 src_nents = sg_nents_for_len(src, src_len);
1348                 if (src_nents < 0) {
1349                         dev_err(dev, "Invalid number of src SG.\n");
1350                         return ERR_PTR(-EINVAL);
1351                 }
1352                 src_nents = (src_nents == 1) ? 0 : src_nents;
1353                 dst_nents = dst ? src_nents : 0;
1354                 dst_len = 0;
1355         } else { /* dst && dst != src*/
1356                 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1357                 src_nents = sg_nents_for_len(src, src_len);
1358                 if (src_nents < 0) {
1359                         dev_err(dev, "Invalid number of src SG.\n");
1360                         return ERR_PTR(-EINVAL);
1361                 }
1362                 src_nents = (src_nents == 1) ? 0 : src_nents;
1363                 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1364                 dst_nents = sg_nents_for_len(dst, dst_len);
1365                 if (dst_nents < 0) {
1366                         dev_err(dev, "Invalid number of dst SG.\n");
1367                         return ERR_PTR(-EINVAL);
1368                 }
1369                 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
1370         }
1371
1372         /*
1373          * allocate space for base edesc plus the link tables,
1374          * allowing for two separate entries for AD and generated ICV (+ 2),
1375          * and space for two sets of ICVs (stashed and generated)
1376          */
1377         alloc_len = sizeof(struct talitos_edesc);
1378         if (src_nents || dst_nents || !encrypt) {
1379                 if (is_sec1)
1380                         dma_len = (src_nents ? src_len : 0) +
1381                                   (dst_nents ? dst_len : 0) + authsize;
1382                 else
1383                         dma_len = (src_nents + dst_nents + 2) *
1384                                   sizeof(struct talitos_ptr) + authsize;
1385                 alloc_len += dma_len;
1386         } else {
1387                 dma_len = 0;
1388         }
1389         alloc_len += icv_stashing ? authsize : 0;
1390
1391         /* if its a ahash, add space for a second desc next to the first one */
1392         if (is_sec1 && !dst)
1393                 alloc_len += sizeof(struct talitos_desc);
1394         alloc_len += ivsize;
1395
1396         edesc = kmalloc(alloc_len, GFP_DMA | flags);
1397         if (!edesc)
1398                 return ERR_PTR(-ENOMEM);
1399         if (ivsize) {
1400                 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
1401                 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1402         }
1403         memset(&edesc->desc, 0, sizeof(edesc->desc));
1404
1405         edesc->src_nents = src_nents;
1406         edesc->dst_nents = dst_nents;
1407         edesc->iv_dma = iv_dma;
1408         edesc->dma_len = dma_len;
1409         if (dma_len)
1410                 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
1411                                                      edesc->dma_len,
1412                                                      DMA_BIDIRECTIONAL);
1413
1414         return edesc;
1415 }
1416
1417 static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
1418                                               int icv_stashing, bool encrypt)
1419 {
1420         struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1421         unsigned int authsize = crypto_aead_authsize(authenc);
1422         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1423         unsigned int ivsize = crypto_aead_ivsize(authenc);
1424         unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1425
1426         return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1427                                    iv, areq->assoclen, cryptlen,
1428                                    authsize, ivsize, icv_stashing,
1429                                    areq->base.flags, encrypt);
1430 }
1431
1432 static int aead_encrypt(struct aead_request *req)
1433 {
1434         struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1435         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1436         struct talitos_edesc *edesc;
1437
1438         /* allocate extended descriptor */
1439         edesc = aead_edesc_alloc(req, req->iv, 0, true);
1440         if (IS_ERR(edesc))
1441                 return PTR_ERR(edesc);
1442
1443         /* set encrypt */
1444         edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1445
1446         return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
1447 }
1448
1449 static int aead_decrypt(struct aead_request *req)
1450 {
1451         struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1452         unsigned int authsize = crypto_aead_authsize(authenc);
1453         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1454         struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1455         struct talitos_edesc *edesc;
1456         void *icvdata;
1457
1458         /* allocate extended descriptor */
1459         edesc = aead_edesc_alloc(req, req->iv, 1, false);
1460         if (IS_ERR(edesc))
1461                 return PTR_ERR(edesc);
1462
1463         if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1464             (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
1465             ((!edesc->src_nents && !edesc->dst_nents) ||
1466              priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
1467
1468                 /* decrypt and check the ICV */
1469                 edesc->desc.hdr = ctx->desc_hdr_template |
1470                                   DESC_HDR_DIR_INBOUND |
1471                                   DESC_HDR_MODE1_MDEU_CICV;
1472
1473                 /* reset integrity check result bits */
1474
1475                 return ipsec_esp(edesc, req, false,
1476                                  ipsec_esp_decrypt_hwauth_done);
1477         }
1478
1479         /* Have to check the ICV with software */
1480         edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1481
1482         /* stash incoming ICV for later cmp with ICV generated by the h/w */
1483         icvdata = edesc->buf + edesc->dma_len;
1484
1485         sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1486                            req->assoclen + req->cryptlen - authsize);
1487
1488         return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
1489 }
1490
1491 static int skcipher_setkey(struct crypto_skcipher *cipher,
1492                              const u8 *key, unsigned int keylen)
1493 {
1494         struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1495         struct device *dev = ctx->dev;
1496
1497         if (ctx->keylen)
1498                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1499
1500         memcpy(&ctx->key, key, keylen);
1501         ctx->keylen = keylen;
1502
1503         ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1504
1505         return 0;
1506 }
1507
1508 static int skcipher_des_setkey(struct crypto_skcipher *cipher,
1509                                  const u8 *key, unsigned int keylen)
1510 {
1511         return verify_skcipher_des_key(cipher, key) ?:
1512                skcipher_setkey(cipher, key, keylen);
1513 }
1514
1515 static int skcipher_des3_setkey(struct crypto_skcipher *cipher,
1516                                   const u8 *key, unsigned int keylen)
1517 {
1518         return verify_skcipher_des3_key(cipher, key) ?:
1519                skcipher_setkey(cipher, key, keylen);
1520 }
1521
1522 static int skcipher_aes_setkey(struct crypto_skcipher *cipher,
1523                                   const u8 *key, unsigned int keylen)
1524 {
1525         if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1526             keylen == AES_KEYSIZE_256)
1527                 return skcipher_setkey(cipher, key, keylen);
1528
1529         return -EINVAL;
1530 }
1531
1532 static void common_nonsnoop_unmap(struct device *dev,
1533                                   struct talitos_edesc *edesc,
1534                                   struct skcipher_request *areq)
1535 {
1536         unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1537
1538         talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen, 0);
1539         unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1540
1541         if (edesc->dma_len)
1542                 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1543                                  DMA_BIDIRECTIONAL);
1544 }
1545
1546 static void skcipher_done(struct device *dev,
1547                             struct talitos_desc *desc, void *context,
1548                             int err)
1549 {
1550         struct skcipher_request *areq = context;
1551         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1552         struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1553         unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1554         struct talitos_edesc *edesc;
1555
1556         edesc = container_of(desc, struct talitos_edesc, desc);
1557
1558         common_nonsnoop_unmap(dev, edesc, areq);
1559         memcpy(areq->iv, ctx->iv, ivsize);
1560
1561         kfree(edesc);
1562
1563         areq->base.complete(&areq->base, err);
1564 }
1565
1566 static int common_nonsnoop(struct talitos_edesc *edesc,
1567                            struct skcipher_request *areq,
1568                            void (*callback) (struct device *dev,
1569                                              struct talitos_desc *desc,
1570                                              void *context, int error))
1571 {
1572         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1573         struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1574         struct device *dev = ctx->dev;
1575         struct talitos_desc *desc = &edesc->desc;
1576         unsigned int cryptlen = areq->cryptlen;
1577         unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1578         int sg_count, ret;
1579         bool sync_needed = false;
1580         struct talitos_private *priv = dev_get_drvdata(dev);
1581         bool is_sec1 = has_ftr_sec1(priv);
1582         bool is_ctr = (desc->hdr & DESC_HDR_SEL0_MASK) == DESC_HDR_SEL0_AESU &&
1583                       (desc->hdr & DESC_HDR_MODE0_AESU_MASK) == DESC_HDR_MODE0_AESU_CTR;
1584
1585         /* first DWORD empty */
1586
1587         /* cipher iv */
1588         to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
1589
1590         /* cipher key */
1591         to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
1592
1593         sg_count = edesc->src_nents ?: 1;
1594         if (is_sec1 && sg_count > 1)
1595                 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1596                                   cryptlen);
1597         else
1598                 sg_count = dma_map_sg(dev, areq->src, sg_count,
1599                                       (areq->src == areq->dst) ?
1600                                       DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1601         /*
1602          * cipher in
1603          */
1604         sg_count = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[3],
1605                                       sg_count, 0, 0, 0, false, is_ctr ? 16 : 1);
1606         if (sg_count > 1)
1607                 sync_needed = true;
1608
1609         /* cipher out */
1610         if (areq->src != areq->dst) {
1611                 sg_count = edesc->dst_nents ? : 1;
1612                 if (!is_sec1 || sg_count == 1)
1613                         dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1614         }
1615
1616         ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1617                              sg_count, 0, (edesc->src_nents + 1));
1618         if (ret > 1)
1619                 sync_needed = true;
1620
1621         /* iv out */
1622         map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
1623                                DMA_FROM_DEVICE);
1624
1625         /* last DWORD empty */
1626
1627         if (sync_needed)
1628                 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1629                                            edesc->dma_len, DMA_BIDIRECTIONAL);
1630
1631         ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1632         if (ret != -EINPROGRESS) {
1633                 common_nonsnoop_unmap(dev, edesc, areq);
1634                 kfree(edesc);
1635         }
1636         return ret;
1637 }
1638
1639 static struct talitos_edesc *skcipher_edesc_alloc(struct skcipher_request *
1640                                                     areq, bool encrypt)
1641 {
1642         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1643         struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1644         unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1645
1646         return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1647                                    areq->iv, 0, areq->cryptlen, 0, ivsize, 0,
1648                                    areq->base.flags, encrypt);
1649 }
1650
1651 static int skcipher_encrypt(struct skcipher_request *areq)
1652 {
1653         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1654         struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1655         struct talitos_edesc *edesc;
1656         unsigned int blocksize =
1657                         crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
1658
1659         if (!areq->cryptlen)
1660                 return 0;
1661
1662         if (areq->cryptlen % blocksize)
1663                 return -EINVAL;
1664
1665         /* allocate extended descriptor */
1666         edesc = skcipher_edesc_alloc(areq, true);
1667         if (IS_ERR(edesc))
1668                 return PTR_ERR(edesc);
1669
1670         /* set encrypt */
1671         edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1672
1673         return common_nonsnoop(edesc, areq, skcipher_done);
1674 }
1675
1676 static int skcipher_decrypt(struct skcipher_request *areq)
1677 {
1678         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1679         struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1680         struct talitos_edesc *edesc;
1681         unsigned int blocksize =
1682                         crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
1683
1684         if (!areq->cryptlen)
1685                 return 0;
1686
1687         if (areq->cryptlen % blocksize)
1688                 return -EINVAL;
1689
1690         /* allocate extended descriptor */
1691         edesc = skcipher_edesc_alloc(areq, false);
1692         if (IS_ERR(edesc))
1693                 return PTR_ERR(edesc);
1694
1695         edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1696
1697         return common_nonsnoop(edesc, areq, skcipher_done);
1698 }
1699
1700 static void common_nonsnoop_hash_unmap(struct device *dev,
1701                                        struct talitos_edesc *edesc,
1702                                        struct ahash_request *areq)
1703 {
1704         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1705         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1706         struct talitos_private *priv = dev_get_drvdata(dev);
1707         bool is_sec1 = has_ftr_sec1(priv);
1708         struct talitos_desc *desc = &edesc->desc;
1709         struct talitos_desc *desc2 = (struct talitos_desc *)
1710                                      (edesc->buf + edesc->dma_len);
1711
1712         unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1713         if (desc->next_desc &&
1714             desc->ptr[5].ptr != desc2->ptr[5].ptr)
1715                 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
1716         if (req_ctx->last)
1717                 memcpy(areq->result, req_ctx->hw_context,
1718                        crypto_ahash_digestsize(tfm));
1719
1720         if (req_ctx->psrc)
1721                 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
1722
1723         /* When using hashctx-in, must unmap it. */
1724         if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1725                 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1726                                          DMA_TO_DEVICE);
1727         else if (desc->next_desc)
1728                 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1729                                          DMA_TO_DEVICE);
1730
1731         if (is_sec1 && req_ctx->nbuf)
1732                 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1733                                          DMA_TO_DEVICE);
1734
1735         if (edesc->dma_len)
1736                 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1737                                  DMA_BIDIRECTIONAL);
1738
1739         if (edesc->desc.next_desc)
1740                 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1741                                  TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
1742 }
1743
1744 static void ahash_done(struct device *dev,
1745                        struct talitos_desc *desc, void *context,
1746                        int err)
1747 {
1748         struct ahash_request *areq = context;
1749         struct talitos_edesc *edesc =
1750                  container_of(desc, struct talitos_edesc, desc);
1751         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1752
1753         if (!req_ctx->last && req_ctx->to_hash_later) {
1754                 /* Position any partial block for next update/final/finup */
1755                 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
1756                 req_ctx->nbuf = req_ctx->to_hash_later;
1757         }
1758         common_nonsnoop_hash_unmap(dev, edesc, areq);
1759
1760         kfree(edesc);
1761
1762         areq->base.complete(&areq->base, err);
1763 }
1764
1765 /*
1766  * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1767  * ourself and submit a padded block
1768  */
1769 static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
1770                                struct talitos_edesc *edesc,
1771                                struct talitos_ptr *ptr)
1772 {
1773         static u8 padded_hash[64] = {
1774                 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1775                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1776                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1777                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1778         };
1779
1780         pr_err_once("Bug in SEC1, padding ourself\n");
1781         edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1782         map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1783                                (char *)padded_hash, DMA_TO_DEVICE);
1784 }
1785
1786 static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1787                                 struct ahash_request *areq, unsigned int length,
1788                                 void (*callback) (struct device *dev,
1789                                                   struct talitos_desc *desc,
1790                                                   void *context, int error))
1791 {
1792         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1793         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1794         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1795         struct device *dev = ctx->dev;
1796         struct talitos_desc *desc = &edesc->desc;
1797         int ret;
1798         bool sync_needed = false;
1799         struct talitos_private *priv = dev_get_drvdata(dev);
1800         bool is_sec1 = has_ftr_sec1(priv);
1801         int sg_count;
1802
1803         /* first DWORD empty */
1804
1805         /* hash context in */
1806         if (!req_ctx->first || req_ctx->swinit) {
1807                 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1808                                               req_ctx->hw_context_size,
1809                                               req_ctx->hw_context,
1810                                               DMA_TO_DEVICE);
1811                 req_ctx->swinit = 0;
1812         }
1813         /* Indicate next op is not the first. */
1814         req_ctx->first = 0;
1815
1816         /* HMAC key */
1817         if (ctx->keylen)
1818                 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1819                                is_sec1);
1820
1821         if (is_sec1 && req_ctx->nbuf)
1822                 length -= req_ctx->nbuf;
1823
1824         sg_count = edesc->src_nents ?: 1;
1825         if (is_sec1 && sg_count > 1)
1826                 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
1827         else if (length)
1828                 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1829                                       DMA_TO_DEVICE);
1830         /*
1831          * data in
1832          */
1833         if (is_sec1 && req_ctx->nbuf) {
1834                 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1835                                        req_ctx->buf[req_ctx->buf_idx],
1836                                        DMA_TO_DEVICE);
1837         } else {
1838                 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1839                                           &desc->ptr[3], sg_count, 0, 0);
1840                 if (sg_count > 1)
1841                         sync_needed = true;
1842         }
1843
1844         /* fifth DWORD empty */
1845
1846         /* hash/HMAC out -or- hash context out */
1847         if (req_ctx->last)
1848                 map_single_talitos_ptr(dev, &desc->ptr[5],
1849                                        crypto_ahash_digestsize(tfm),
1850                                        req_ctx->hw_context, DMA_FROM_DEVICE);
1851         else
1852                 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1853                                               req_ctx->hw_context_size,
1854                                               req_ctx->hw_context,
1855                                               DMA_FROM_DEVICE);
1856
1857         /* last DWORD empty */
1858
1859         if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1860                 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1861
1862         if (is_sec1 && req_ctx->nbuf && length) {
1863                 struct talitos_desc *desc2 = (struct talitos_desc *)
1864                                              (edesc->buf + edesc->dma_len);
1865                 dma_addr_t next_desc;
1866
1867                 memset(desc2, 0, sizeof(*desc2));
1868                 desc2->hdr = desc->hdr;
1869                 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1870                 desc2->hdr1 = desc2->hdr;
1871                 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1872                 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1873                 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1874
1875                 if (desc->ptr[1].ptr)
1876                         copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1877                                          is_sec1);
1878                 else
1879                         map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1880                                                       req_ctx->hw_context_size,
1881                                                       req_ctx->hw_context,
1882                                                       DMA_TO_DEVICE);
1883                 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1884                 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1885                                           &desc2->ptr[3], sg_count, 0, 0);
1886                 if (sg_count > 1)
1887                         sync_needed = true;
1888                 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1889                 if (req_ctx->last)
1890                         map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1891                                                       req_ctx->hw_context_size,
1892                                                       req_ctx->hw_context,
1893                                                       DMA_FROM_DEVICE);
1894
1895                 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1896                                            DMA_BIDIRECTIONAL);
1897                 desc->next_desc = cpu_to_be32(next_desc);
1898         }
1899
1900         if (sync_needed)
1901                 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1902                                            edesc->dma_len, DMA_BIDIRECTIONAL);
1903
1904         ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1905         if (ret != -EINPROGRESS) {
1906                 common_nonsnoop_hash_unmap(dev, edesc, areq);
1907                 kfree(edesc);
1908         }
1909         return ret;
1910 }
1911
1912 static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1913                                                unsigned int nbytes)
1914 {
1915         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1916         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1917         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1918         struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1919         bool is_sec1 = has_ftr_sec1(priv);
1920
1921         if (is_sec1)
1922                 nbytes -= req_ctx->nbuf;
1923
1924         return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
1925                                    nbytes, 0, 0, 0, areq->base.flags, false);
1926 }
1927
1928 static int ahash_init(struct ahash_request *areq)
1929 {
1930         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1931         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1932         struct device *dev = ctx->dev;
1933         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1934         unsigned int size;
1935         dma_addr_t dma;
1936
1937         /* Initialize the context */
1938         req_ctx->buf_idx = 0;
1939         req_ctx->nbuf = 0;
1940         req_ctx->first = 1; /* first indicates h/w must init its context */
1941         req_ctx->swinit = 0; /* assume h/w init of context */
1942         size =  (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
1943                         ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1944                         : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
1945         req_ctx->hw_context_size = size;
1946
1947         dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1948                              DMA_TO_DEVICE);
1949         dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1950
1951         return 0;
1952 }
1953
1954 /*
1955  * on h/w without explicit sha224 support, we initialize h/w context
1956  * manually with sha224 constants, and tell it to run sha256.
1957  */
1958 static int ahash_init_sha224_swinit(struct ahash_request *areq)
1959 {
1960         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1961
1962         req_ctx->hw_context[0] = SHA224_H0;
1963         req_ctx->hw_context[1] = SHA224_H1;
1964         req_ctx->hw_context[2] = SHA224_H2;
1965         req_ctx->hw_context[3] = SHA224_H3;
1966         req_ctx->hw_context[4] = SHA224_H4;
1967         req_ctx->hw_context[5] = SHA224_H5;
1968         req_ctx->hw_context[6] = SHA224_H6;
1969         req_ctx->hw_context[7] = SHA224_H7;
1970
1971         /* init 64-bit count */
1972         req_ctx->hw_context[8] = 0;
1973         req_ctx->hw_context[9] = 0;
1974
1975         ahash_init(areq);
1976         req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1977
1978         return 0;
1979 }
1980
1981 static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1982 {
1983         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1984         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1985         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1986         struct talitos_edesc *edesc;
1987         unsigned int blocksize =
1988                         crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1989         unsigned int nbytes_to_hash;
1990         unsigned int to_hash_later;
1991         unsigned int nsg;
1992         int nents;
1993         struct device *dev = ctx->dev;
1994         struct talitos_private *priv = dev_get_drvdata(dev);
1995         bool is_sec1 = has_ftr_sec1(priv);
1996         u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
1997
1998         if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1999                 /* Buffer up to one whole block */
2000                 nents = sg_nents_for_len(areq->src, nbytes);
2001                 if (nents < 0) {
2002                         dev_err(ctx->dev, "Invalid number of src SG.\n");
2003                         return nents;
2004                 }
2005                 sg_copy_to_buffer(areq->src, nents,
2006                                   ctx_buf + req_ctx->nbuf, nbytes);
2007                 req_ctx->nbuf += nbytes;
2008                 return 0;
2009         }
2010
2011         /* At least (blocksize + 1) bytes are available to hash */
2012         nbytes_to_hash = nbytes + req_ctx->nbuf;
2013         to_hash_later = nbytes_to_hash & (blocksize - 1);
2014
2015         if (req_ctx->last)
2016                 to_hash_later = 0;
2017         else if (to_hash_later)
2018                 /* There is a partial block. Hash the full block(s) now */
2019                 nbytes_to_hash -= to_hash_later;
2020         else {
2021                 /* Keep one block buffered */
2022                 nbytes_to_hash -= blocksize;
2023                 to_hash_later = blocksize;
2024         }
2025
2026         /* Chain in any previously buffered data */
2027         if (!is_sec1 && req_ctx->nbuf) {
2028                 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2029                 sg_init_table(req_ctx->bufsl, nsg);
2030                 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
2031                 if (nsg > 1)
2032                         sg_chain(req_ctx->bufsl, 2, areq->src);
2033                 req_ctx->psrc = req_ctx->bufsl;
2034         } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2035                 int offset;
2036
2037                 if (nbytes_to_hash > blocksize)
2038                         offset = blocksize - req_ctx->nbuf;
2039                 else
2040                         offset = nbytes_to_hash - req_ctx->nbuf;
2041                 nents = sg_nents_for_len(areq->src, offset);
2042                 if (nents < 0) {
2043                         dev_err(ctx->dev, "Invalid number of src SG.\n");
2044                         return nents;
2045                 }
2046                 sg_copy_to_buffer(areq->src, nents,
2047                                   ctx_buf + req_ctx->nbuf, offset);
2048                 req_ctx->nbuf += offset;
2049                 req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2050                                                  offset);
2051         } else
2052                 req_ctx->psrc = areq->src;
2053
2054         if (to_hash_later) {
2055                 nents = sg_nents_for_len(areq->src, nbytes);
2056                 if (nents < 0) {
2057                         dev_err(ctx->dev, "Invalid number of src SG.\n");
2058                         return nents;
2059                 }
2060                 sg_pcopy_to_buffer(areq->src, nents,
2061                                    req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
2062                                       to_hash_later,
2063                                       nbytes - to_hash_later);
2064         }
2065         req_ctx->to_hash_later = to_hash_later;
2066
2067         /* Allocate extended descriptor */
2068         edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2069         if (IS_ERR(edesc))
2070                 return PTR_ERR(edesc);
2071
2072         edesc->desc.hdr = ctx->desc_hdr_template;
2073
2074         /* On last one, request SEC to pad; otherwise continue */
2075         if (req_ctx->last)
2076                 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2077         else
2078                 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2079
2080         /* request SEC to INIT hash. */
2081         if (req_ctx->first && !req_ctx->swinit)
2082                 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2083
2084         /* When the tfm context has a keylen, it's an HMAC.
2085          * A first or last (ie. not middle) descriptor must request HMAC.
2086          */
2087         if (ctx->keylen && (req_ctx->first || req_ctx->last))
2088                 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2089
2090         return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
2091 }
2092
2093 static int ahash_update(struct ahash_request *areq)
2094 {
2095         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2096
2097         req_ctx->last = 0;
2098
2099         return ahash_process_req(areq, areq->nbytes);
2100 }
2101
2102 static int ahash_final(struct ahash_request *areq)
2103 {
2104         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2105
2106         req_ctx->last = 1;
2107
2108         return ahash_process_req(areq, 0);
2109 }
2110
2111 static int ahash_finup(struct ahash_request *areq)
2112 {
2113         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2114
2115         req_ctx->last = 1;
2116
2117         return ahash_process_req(areq, areq->nbytes);
2118 }
2119
2120 static int ahash_digest(struct ahash_request *areq)
2121 {
2122         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2123         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
2124
2125         ahash->init(areq);
2126         req_ctx->last = 1;
2127
2128         return ahash_process_req(areq, areq->nbytes);
2129 }
2130
2131 static int ahash_export(struct ahash_request *areq, void *out)
2132 {
2133         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2134         struct talitos_export_state *export = out;
2135         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2136         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2137         struct device *dev = ctx->dev;
2138         dma_addr_t dma;
2139
2140         dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2141                              DMA_FROM_DEVICE);
2142         dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
2143
2144         memcpy(export->hw_context, req_ctx->hw_context,
2145                req_ctx->hw_context_size);
2146         memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
2147         export->swinit = req_ctx->swinit;
2148         export->first = req_ctx->first;
2149         export->last = req_ctx->last;
2150         export->to_hash_later = req_ctx->to_hash_later;
2151         export->nbuf = req_ctx->nbuf;
2152
2153         return 0;
2154 }
2155
2156 static int ahash_import(struct ahash_request *areq, const void *in)
2157 {
2158         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2159         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2160         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2161         struct device *dev = ctx->dev;
2162         const struct talitos_export_state *export = in;
2163         unsigned int size;
2164         dma_addr_t dma;
2165
2166         memset(req_ctx, 0, sizeof(*req_ctx));
2167         size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2168                         ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2169                         : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2170         req_ctx->hw_context_size = size;
2171         memcpy(req_ctx->hw_context, export->hw_context, size);
2172         memcpy(req_ctx->buf[0], export->buf, export->nbuf);
2173         req_ctx->swinit = export->swinit;
2174         req_ctx->first = export->first;
2175         req_ctx->last = export->last;
2176         req_ctx->to_hash_later = export->to_hash_later;
2177         req_ctx->nbuf = export->nbuf;
2178
2179         dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2180                              DMA_TO_DEVICE);
2181         dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2182
2183         return 0;
2184 }
2185
2186 static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2187                    u8 *hash)
2188 {
2189         struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2190
2191         struct scatterlist sg[1];
2192         struct ahash_request *req;
2193         struct crypto_wait wait;
2194         int ret;
2195
2196         crypto_init_wait(&wait);
2197
2198         req = ahash_request_alloc(tfm, GFP_KERNEL);
2199         if (!req)
2200                 return -ENOMEM;
2201
2202         /* Keep tfm keylen == 0 during hash of the long key */
2203         ctx->keylen = 0;
2204         ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2205                                    crypto_req_done, &wait);
2206
2207         sg_init_one(&sg[0], key, keylen);
2208
2209         ahash_request_set_crypt(req, sg, hash, keylen);
2210         ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2211
2212         ahash_request_free(req);
2213
2214         return ret;
2215 }
2216
2217 static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2218                         unsigned int keylen)
2219 {
2220         struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2221         struct device *dev = ctx->dev;
2222         unsigned int blocksize =
2223                         crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2224         unsigned int digestsize = crypto_ahash_digestsize(tfm);
2225         unsigned int keysize = keylen;
2226         u8 hash[SHA512_DIGEST_SIZE];
2227         int ret;
2228
2229         if (keylen <= blocksize)
2230                 memcpy(ctx->key, key, keysize);
2231         else {
2232                 /* Must get the hash of the long key */
2233                 ret = keyhash(tfm, key, keylen, hash);
2234
2235                 if (ret)
2236                         return -EINVAL;
2237
2238                 keysize = digestsize;
2239                 memcpy(ctx->key, hash, digestsize);
2240         }
2241
2242         if (ctx->keylen)
2243                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2244
2245         ctx->keylen = keysize;
2246         ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
2247
2248         return 0;
2249 }
2250
2251
2252 struct talitos_alg_template {
2253         u32 type;
2254         u32 priority;
2255         union {
2256                 struct skcipher_alg skcipher;
2257                 struct ahash_alg hash;
2258                 struct aead_alg aead;
2259         } alg;
2260         __be32 desc_hdr_template;
2261 };
2262
2263 static struct talitos_alg_template driver_algs[] = {
2264         /* AEAD algorithms.  These use a single-pass ipsec_esp descriptor */
2265         {       .type = CRYPTO_ALG_TYPE_AEAD,
2266                 .alg.aead = {
2267                         .base = {
2268                                 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2269                                 .cra_driver_name = "authenc-hmac-sha1-"
2270                                                    "cbc-aes-talitos",
2271                                 .cra_blocksize = AES_BLOCK_SIZE,
2272                                 .cra_flags = CRYPTO_ALG_ASYNC |
2273                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2274                         },
2275                         .ivsize = AES_BLOCK_SIZE,
2276                         .maxauthsize = SHA1_DIGEST_SIZE,
2277                 },
2278                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2279                                      DESC_HDR_SEL0_AESU |
2280                                      DESC_HDR_MODE0_AESU_CBC |
2281                                      DESC_HDR_SEL1_MDEUA |
2282                                      DESC_HDR_MODE1_MDEU_INIT |
2283                                      DESC_HDR_MODE1_MDEU_PAD |
2284                                      DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2285         },
2286         {       .type = CRYPTO_ALG_TYPE_AEAD,
2287                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2288                 .alg.aead = {
2289                         .base = {
2290                                 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2291                                 .cra_driver_name = "authenc-hmac-sha1-"
2292                                                    "cbc-aes-talitos-hsna",
2293                                 .cra_blocksize = AES_BLOCK_SIZE,
2294                                 .cra_flags = CRYPTO_ALG_ASYNC |
2295                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2296                         },
2297                         .ivsize = AES_BLOCK_SIZE,
2298                         .maxauthsize = SHA1_DIGEST_SIZE,
2299                 },
2300                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2301                                      DESC_HDR_SEL0_AESU |
2302                                      DESC_HDR_MODE0_AESU_CBC |
2303                                      DESC_HDR_SEL1_MDEUA |
2304                                      DESC_HDR_MODE1_MDEU_INIT |
2305                                      DESC_HDR_MODE1_MDEU_PAD |
2306                                      DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2307         },
2308         {       .type = CRYPTO_ALG_TYPE_AEAD,
2309                 .alg.aead = {
2310                         .base = {
2311                                 .cra_name = "authenc(hmac(sha1),"
2312                                             "cbc(des3_ede))",
2313                                 .cra_driver_name = "authenc-hmac-sha1-"
2314                                                    "cbc-3des-talitos",
2315                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2316                                 .cra_flags = CRYPTO_ALG_ASYNC |
2317                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2318                         },
2319                         .ivsize = DES3_EDE_BLOCK_SIZE,
2320                         .maxauthsize = SHA1_DIGEST_SIZE,
2321                         .setkey = aead_des3_setkey,
2322                 },
2323                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2324                                      DESC_HDR_SEL0_DEU |
2325                                      DESC_HDR_MODE0_DEU_CBC |
2326                                      DESC_HDR_MODE0_DEU_3DES |
2327                                      DESC_HDR_SEL1_MDEUA |
2328                                      DESC_HDR_MODE1_MDEU_INIT |
2329                                      DESC_HDR_MODE1_MDEU_PAD |
2330                                      DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2331         },
2332         {       .type = CRYPTO_ALG_TYPE_AEAD,
2333                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2334                 .alg.aead = {
2335                         .base = {
2336                                 .cra_name = "authenc(hmac(sha1),"
2337                                             "cbc(des3_ede))",
2338                                 .cra_driver_name = "authenc-hmac-sha1-"
2339                                                    "cbc-3des-talitos-hsna",
2340                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2341                                 .cra_flags = CRYPTO_ALG_ASYNC |
2342                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2343                         },
2344                         .ivsize = DES3_EDE_BLOCK_SIZE,
2345                         .maxauthsize = SHA1_DIGEST_SIZE,
2346                         .setkey = aead_des3_setkey,
2347                 },
2348                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2349                                      DESC_HDR_SEL0_DEU |
2350                                      DESC_HDR_MODE0_DEU_CBC |
2351                                      DESC_HDR_MODE0_DEU_3DES |
2352                                      DESC_HDR_SEL1_MDEUA |
2353                                      DESC_HDR_MODE1_MDEU_INIT |
2354                                      DESC_HDR_MODE1_MDEU_PAD |
2355                                      DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2356         },
2357         {       .type = CRYPTO_ALG_TYPE_AEAD,
2358                 .alg.aead = {
2359                         .base = {
2360                                 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2361                                 .cra_driver_name = "authenc-hmac-sha224-"
2362                                                    "cbc-aes-talitos",
2363                                 .cra_blocksize = AES_BLOCK_SIZE,
2364                                 .cra_flags = CRYPTO_ALG_ASYNC |
2365                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2366                         },
2367                         .ivsize = AES_BLOCK_SIZE,
2368                         .maxauthsize = SHA224_DIGEST_SIZE,
2369                 },
2370                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2371                                      DESC_HDR_SEL0_AESU |
2372                                      DESC_HDR_MODE0_AESU_CBC |
2373                                      DESC_HDR_SEL1_MDEUA |
2374                                      DESC_HDR_MODE1_MDEU_INIT |
2375                                      DESC_HDR_MODE1_MDEU_PAD |
2376                                      DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2377         },
2378         {       .type = CRYPTO_ALG_TYPE_AEAD,
2379                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2380                 .alg.aead = {
2381                         .base = {
2382                                 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2383                                 .cra_driver_name = "authenc-hmac-sha224-"
2384                                                    "cbc-aes-talitos-hsna",
2385                                 .cra_blocksize = AES_BLOCK_SIZE,
2386                                 .cra_flags = CRYPTO_ALG_ASYNC |
2387                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2388                         },
2389                         .ivsize = AES_BLOCK_SIZE,
2390                         .maxauthsize = SHA224_DIGEST_SIZE,
2391                 },
2392                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2393                                      DESC_HDR_SEL0_AESU |
2394                                      DESC_HDR_MODE0_AESU_CBC |
2395                                      DESC_HDR_SEL1_MDEUA |
2396                                      DESC_HDR_MODE1_MDEU_INIT |
2397                                      DESC_HDR_MODE1_MDEU_PAD |
2398                                      DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2399         },
2400         {       .type = CRYPTO_ALG_TYPE_AEAD,
2401                 .alg.aead = {
2402                         .base = {
2403                                 .cra_name = "authenc(hmac(sha224),"
2404                                             "cbc(des3_ede))",
2405                                 .cra_driver_name = "authenc-hmac-sha224-"
2406                                                    "cbc-3des-talitos",
2407                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2408                                 .cra_flags = CRYPTO_ALG_ASYNC |
2409                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2410                         },
2411                         .ivsize = DES3_EDE_BLOCK_SIZE,
2412                         .maxauthsize = SHA224_DIGEST_SIZE,
2413                         .setkey = aead_des3_setkey,
2414                 },
2415                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2416                                      DESC_HDR_SEL0_DEU |
2417                                      DESC_HDR_MODE0_DEU_CBC |
2418                                      DESC_HDR_MODE0_DEU_3DES |
2419                                      DESC_HDR_SEL1_MDEUA |
2420                                      DESC_HDR_MODE1_MDEU_INIT |
2421                                      DESC_HDR_MODE1_MDEU_PAD |
2422                                      DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2423         },
2424         {       .type = CRYPTO_ALG_TYPE_AEAD,
2425                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2426                 .alg.aead = {
2427                         .base = {
2428                                 .cra_name = "authenc(hmac(sha224),"
2429                                             "cbc(des3_ede))",
2430                                 .cra_driver_name = "authenc-hmac-sha224-"
2431                                                    "cbc-3des-talitos-hsna",
2432                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2433                                 .cra_flags = CRYPTO_ALG_ASYNC |
2434                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2435                         },
2436                         .ivsize = DES3_EDE_BLOCK_SIZE,
2437                         .maxauthsize = SHA224_DIGEST_SIZE,
2438                         .setkey = aead_des3_setkey,
2439                 },
2440                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2441                                      DESC_HDR_SEL0_DEU |
2442                                      DESC_HDR_MODE0_DEU_CBC |
2443                                      DESC_HDR_MODE0_DEU_3DES |
2444                                      DESC_HDR_SEL1_MDEUA |
2445                                      DESC_HDR_MODE1_MDEU_INIT |
2446                                      DESC_HDR_MODE1_MDEU_PAD |
2447                                      DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2448         },
2449         {       .type = CRYPTO_ALG_TYPE_AEAD,
2450                 .alg.aead = {
2451                         .base = {
2452                                 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2453                                 .cra_driver_name = "authenc-hmac-sha256-"
2454                                                    "cbc-aes-talitos",
2455                                 .cra_blocksize = AES_BLOCK_SIZE,
2456                                 .cra_flags = CRYPTO_ALG_ASYNC |
2457                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2458                         },
2459                         .ivsize = AES_BLOCK_SIZE,
2460                         .maxauthsize = SHA256_DIGEST_SIZE,
2461                 },
2462                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2463                                      DESC_HDR_SEL0_AESU |
2464                                      DESC_HDR_MODE0_AESU_CBC |
2465                                      DESC_HDR_SEL1_MDEUA |
2466                                      DESC_HDR_MODE1_MDEU_INIT |
2467                                      DESC_HDR_MODE1_MDEU_PAD |
2468                                      DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2469         },
2470         {       .type = CRYPTO_ALG_TYPE_AEAD,
2471                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2472                 .alg.aead = {
2473                         .base = {
2474                                 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2475                                 .cra_driver_name = "authenc-hmac-sha256-"
2476                                                    "cbc-aes-talitos-hsna",
2477                                 .cra_blocksize = AES_BLOCK_SIZE,
2478                                 .cra_flags = CRYPTO_ALG_ASYNC |
2479                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2480                         },
2481                         .ivsize = AES_BLOCK_SIZE,
2482                         .maxauthsize = SHA256_DIGEST_SIZE,
2483                 },
2484                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2485                                      DESC_HDR_SEL0_AESU |
2486                                      DESC_HDR_MODE0_AESU_CBC |
2487                                      DESC_HDR_SEL1_MDEUA |
2488                                      DESC_HDR_MODE1_MDEU_INIT |
2489                                      DESC_HDR_MODE1_MDEU_PAD |
2490                                      DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2491         },
2492         {       .type = CRYPTO_ALG_TYPE_AEAD,
2493                 .alg.aead = {
2494                         .base = {
2495                                 .cra_name = "authenc(hmac(sha256),"
2496                                             "cbc(des3_ede))",
2497                                 .cra_driver_name = "authenc-hmac-sha256-"
2498                                                    "cbc-3des-talitos",
2499                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2500                                 .cra_flags = CRYPTO_ALG_ASYNC |
2501                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2502                         },
2503                         .ivsize = DES3_EDE_BLOCK_SIZE,
2504                         .maxauthsize = SHA256_DIGEST_SIZE,
2505                         .setkey = aead_des3_setkey,
2506                 },
2507                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2508                                      DESC_HDR_SEL0_DEU |
2509                                      DESC_HDR_MODE0_DEU_CBC |
2510                                      DESC_HDR_MODE0_DEU_3DES |
2511                                      DESC_HDR_SEL1_MDEUA |
2512                                      DESC_HDR_MODE1_MDEU_INIT |
2513                                      DESC_HDR_MODE1_MDEU_PAD |
2514                                      DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2515         },
2516         {       .type = CRYPTO_ALG_TYPE_AEAD,
2517                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2518                 .alg.aead = {
2519                         .base = {
2520                                 .cra_name = "authenc(hmac(sha256),"
2521                                             "cbc(des3_ede))",
2522                                 .cra_driver_name = "authenc-hmac-sha256-"
2523                                                    "cbc-3des-talitos-hsna",
2524                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2525                                 .cra_flags = CRYPTO_ALG_ASYNC |
2526                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2527                         },
2528                         .ivsize = DES3_EDE_BLOCK_SIZE,
2529                         .maxauthsize = SHA256_DIGEST_SIZE,
2530                         .setkey = aead_des3_setkey,
2531                 },
2532                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2533                                      DESC_HDR_SEL0_DEU |
2534                                      DESC_HDR_MODE0_DEU_CBC |
2535                                      DESC_HDR_MODE0_DEU_3DES |
2536                                      DESC_HDR_SEL1_MDEUA |
2537                                      DESC_HDR_MODE1_MDEU_INIT |
2538                                      DESC_HDR_MODE1_MDEU_PAD |
2539                                      DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2540         },
2541         {       .type = CRYPTO_ALG_TYPE_AEAD,
2542                 .alg.aead = {
2543                         .base = {
2544                                 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2545                                 .cra_driver_name = "authenc-hmac-sha384-"
2546                                                    "cbc-aes-talitos",
2547                                 .cra_blocksize = AES_BLOCK_SIZE,
2548                                 .cra_flags = CRYPTO_ALG_ASYNC |
2549                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2550                         },
2551                         .ivsize = AES_BLOCK_SIZE,
2552                         .maxauthsize = SHA384_DIGEST_SIZE,
2553                 },
2554                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2555                                      DESC_HDR_SEL0_AESU |
2556                                      DESC_HDR_MODE0_AESU_CBC |
2557                                      DESC_HDR_SEL1_MDEUB |
2558                                      DESC_HDR_MODE1_MDEU_INIT |
2559                                      DESC_HDR_MODE1_MDEU_PAD |
2560                                      DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2561         },
2562         {       .type = CRYPTO_ALG_TYPE_AEAD,
2563                 .alg.aead = {
2564                         .base = {
2565                                 .cra_name = "authenc(hmac(sha384),"
2566                                             "cbc(des3_ede))",
2567                                 .cra_driver_name = "authenc-hmac-sha384-"
2568                                                    "cbc-3des-talitos",
2569                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2570                                 .cra_flags = CRYPTO_ALG_ASYNC |
2571                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2572                         },
2573                         .ivsize = DES3_EDE_BLOCK_SIZE,
2574                         .maxauthsize = SHA384_DIGEST_SIZE,
2575                         .setkey = aead_des3_setkey,
2576                 },
2577                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2578                                      DESC_HDR_SEL0_DEU |
2579                                      DESC_HDR_MODE0_DEU_CBC |
2580                                      DESC_HDR_MODE0_DEU_3DES |
2581                                      DESC_HDR_SEL1_MDEUB |
2582                                      DESC_HDR_MODE1_MDEU_INIT |
2583                                      DESC_HDR_MODE1_MDEU_PAD |
2584                                      DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2585         },
2586         {       .type = CRYPTO_ALG_TYPE_AEAD,
2587                 .alg.aead = {
2588                         .base = {
2589                                 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2590                                 .cra_driver_name = "authenc-hmac-sha512-"
2591                                                    "cbc-aes-talitos",
2592                                 .cra_blocksize = AES_BLOCK_SIZE,
2593                                 .cra_flags = CRYPTO_ALG_ASYNC |
2594                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2595                         },
2596                         .ivsize = AES_BLOCK_SIZE,
2597                         .maxauthsize = SHA512_DIGEST_SIZE,
2598                 },
2599                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2600                                      DESC_HDR_SEL0_AESU |
2601                                      DESC_HDR_MODE0_AESU_CBC |
2602                                      DESC_HDR_SEL1_MDEUB |
2603                                      DESC_HDR_MODE1_MDEU_INIT |
2604                                      DESC_HDR_MODE1_MDEU_PAD |
2605                                      DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2606         },
2607         {       .type = CRYPTO_ALG_TYPE_AEAD,
2608                 .alg.aead = {
2609                         .base = {
2610                                 .cra_name = "authenc(hmac(sha512),"
2611                                             "cbc(des3_ede))",
2612                                 .cra_driver_name = "authenc-hmac-sha512-"
2613                                                    "cbc-3des-talitos",
2614                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2615                                 .cra_flags = CRYPTO_ALG_ASYNC |
2616                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2617                         },
2618                         .ivsize = DES3_EDE_BLOCK_SIZE,
2619                         .maxauthsize = SHA512_DIGEST_SIZE,
2620                         .setkey = aead_des3_setkey,
2621                 },
2622                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2623                                      DESC_HDR_SEL0_DEU |
2624                                      DESC_HDR_MODE0_DEU_CBC |
2625                                      DESC_HDR_MODE0_DEU_3DES |
2626                                      DESC_HDR_SEL1_MDEUB |
2627                                      DESC_HDR_MODE1_MDEU_INIT |
2628                                      DESC_HDR_MODE1_MDEU_PAD |
2629                                      DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2630         },
2631         {       .type = CRYPTO_ALG_TYPE_AEAD,
2632                 .alg.aead = {
2633                         .base = {
2634                                 .cra_name = "authenc(hmac(md5),cbc(aes))",
2635                                 .cra_driver_name = "authenc-hmac-md5-"
2636                                                    "cbc-aes-talitos",
2637                                 .cra_blocksize = AES_BLOCK_SIZE,
2638                                 .cra_flags = CRYPTO_ALG_ASYNC |
2639                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2640                         },
2641                         .ivsize = AES_BLOCK_SIZE,
2642                         .maxauthsize = MD5_DIGEST_SIZE,
2643                 },
2644                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2645                                      DESC_HDR_SEL0_AESU |
2646                                      DESC_HDR_MODE0_AESU_CBC |
2647                                      DESC_HDR_SEL1_MDEUA |
2648                                      DESC_HDR_MODE1_MDEU_INIT |
2649                                      DESC_HDR_MODE1_MDEU_PAD |
2650                                      DESC_HDR_MODE1_MDEU_MD5_HMAC,
2651         },
2652         {       .type = CRYPTO_ALG_TYPE_AEAD,
2653                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2654                 .alg.aead = {
2655                         .base = {
2656                                 .cra_name = "authenc(hmac(md5),cbc(aes))",
2657                                 .cra_driver_name = "authenc-hmac-md5-"
2658                                                    "cbc-aes-talitos-hsna",
2659                                 .cra_blocksize = AES_BLOCK_SIZE,
2660                                 .cra_flags = CRYPTO_ALG_ASYNC |
2661                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2662                         },
2663                         .ivsize = AES_BLOCK_SIZE,
2664                         .maxauthsize = MD5_DIGEST_SIZE,
2665                 },
2666                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2667                                      DESC_HDR_SEL0_AESU |
2668                                      DESC_HDR_MODE0_AESU_CBC |
2669                                      DESC_HDR_SEL1_MDEUA |
2670                                      DESC_HDR_MODE1_MDEU_INIT |
2671                                      DESC_HDR_MODE1_MDEU_PAD |
2672                                      DESC_HDR_MODE1_MDEU_MD5_HMAC,
2673         },
2674         {       .type = CRYPTO_ALG_TYPE_AEAD,
2675                 .alg.aead = {
2676                         .base = {
2677                                 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2678                                 .cra_driver_name = "authenc-hmac-md5-"
2679                                                    "cbc-3des-talitos",
2680                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2681                                 .cra_flags = CRYPTO_ALG_ASYNC |
2682                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2683                         },
2684                         .ivsize = DES3_EDE_BLOCK_SIZE,
2685                         .maxauthsize = MD5_DIGEST_SIZE,
2686                         .setkey = aead_des3_setkey,
2687                 },
2688                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2689                                      DESC_HDR_SEL0_DEU |
2690                                      DESC_HDR_MODE0_DEU_CBC |
2691                                      DESC_HDR_MODE0_DEU_3DES |
2692                                      DESC_HDR_SEL1_MDEUA |
2693                                      DESC_HDR_MODE1_MDEU_INIT |
2694                                      DESC_HDR_MODE1_MDEU_PAD |
2695                                      DESC_HDR_MODE1_MDEU_MD5_HMAC,
2696         },
2697         {       .type = CRYPTO_ALG_TYPE_AEAD,
2698                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2699                 .alg.aead = {
2700                         .base = {
2701                                 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2702                                 .cra_driver_name = "authenc-hmac-md5-"
2703                                                    "cbc-3des-talitos-hsna",
2704                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2705                                 .cra_flags = CRYPTO_ALG_ASYNC |
2706                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2707                         },
2708                         .ivsize = DES3_EDE_BLOCK_SIZE,
2709                         .maxauthsize = MD5_DIGEST_SIZE,
2710                         .setkey = aead_des3_setkey,
2711                 },
2712                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2713                                      DESC_HDR_SEL0_DEU |
2714                                      DESC_HDR_MODE0_DEU_CBC |
2715                                      DESC_HDR_MODE0_DEU_3DES |
2716                                      DESC_HDR_SEL1_MDEUA |
2717                                      DESC_HDR_MODE1_MDEU_INIT |
2718                                      DESC_HDR_MODE1_MDEU_PAD |
2719                                      DESC_HDR_MODE1_MDEU_MD5_HMAC,
2720         },
2721         /* SKCIPHER algorithms. */
2722         {       .type = CRYPTO_ALG_TYPE_SKCIPHER,
2723                 .alg.skcipher = {
2724                         .base.cra_name = "ecb(aes)",
2725                         .base.cra_driver_name = "ecb-aes-talitos",
2726                         .base.cra_blocksize = AES_BLOCK_SIZE,
2727                         .base.cra_flags = CRYPTO_ALG_ASYNC |
2728                                           CRYPTO_ALG_ALLOCATES_MEMORY,
2729                         .min_keysize = AES_MIN_KEY_SIZE,
2730                         .max_keysize = AES_MAX_KEY_SIZE,
2731                         .setkey = skcipher_aes_setkey,
2732                 },
2733                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2734                                      DESC_HDR_SEL0_AESU,
2735         },
2736         {       .type = CRYPTO_ALG_TYPE_SKCIPHER,
2737                 .alg.skcipher = {
2738                         .base.cra_name = "cbc(aes)",
2739                         .base.cra_driver_name = "cbc-aes-talitos",
2740                         .base.cra_blocksize = AES_BLOCK_SIZE,
2741                         .base.cra_flags = CRYPTO_ALG_ASYNC |
2742                                           CRYPTO_ALG_ALLOCATES_MEMORY,
2743                         .min_keysize = AES_MIN_KEY_SIZE,
2744                         .max_keysize = AES_MAX_KEY_SIZE,
2745                         .ivsize = AES_BLOCK_SIZE,
2746                         .setkey = skcipher_aes_setkey,
2747                 },
2748                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2749                                      DESC_HDR_SEL0_AESU |
2750                                      DESC_HDR_MODE0_AESU_CBC,
2751         },
2752         {       .type = CRYPTO_ALG_TYPE_SKCIPHER,
2753                 .alg.skcipher = {
2754                         .base.cra_name = "ctr(aes)",
2755                         .base.cra_driver_name = "ctr-aes-talitos",
2756                         .base.cra_blocksize = 1,
2757                         .base.cra_flags = CRYPTO_ALG_ASYNC |
2758                                           CRYPTO_ALG_ALLOCATES_MEMORY,
2759                         .min_keysize = AES_MIN_KEY_SIZE,
2760                         .max_keysize = AES_MAX_KEY_SIZE,
2761                         .ivsize = AES_BLOCK_SIZE,
2762                         .setkey = skcipher_aes_setkey,
2763                 },
2764                 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
2765                                      DESC_HDR_SEL0_AESU |
2766                                      DESC_HDR_MODE0_AESU_CTR,
2767         },
2768         {       .type = CRYPTO_ALG_TYPE_SKCIPHER,
2769                 .alg.skcipher = {
2770                         .base.cra_name = "ctr(aes)",
2771                         .base.cra_driver_name = "ctr-aes-talitos",
2772                         .base.cra_blocksize = 1,
2773                         .base.cra_flags = CRYPTO_ALG_ASYNC |
2774                                           CRYPTO_ALG_ALLOCATES_MEMORY,
2775                         .min_keysize = AES_MIN_KEY_SIZE,
2776                         .max_keysize = AES_MAX_KEY_SIZE,
2777                         .ivsize = AES_BLOCK_SIZE,
2778                         .setkey = skcipher_aes_setkey,
2779                 },
2780                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2781                                      DESC_HDR_SEL0_AESU |
2782                                      DESC_HDR_MODE0_AESU_CTR,
2783         },
2784         {       .type = CRYPTO_ALG_TYPE_SKCIPHER,
2785                 .alg.skcipher = {
2786                         .base.cra_name = "ecb(des)",
2787                         .base.cra_driver_name = "ecb-des-talitos",
2788                         .base.cra_blocksize = DES_BLOCK_SIZE,
2789                         .base.cra_flags = CRYPTO_ALG_ASYNC |
2790                                           CRYPTO_ALG_ALLOCATES_MEMORY,
2791                         .min_keysize = DES_KEY_SIZE,
2792                         .max_keysize = DES_KEY_SIZE,
2793                         .setkey = skcipher_des_setkey,
2794                 },
2795                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2796                                      DESC_HDR_SEL0_DEU,
2797         },
2798         {       .type = CRYPTO_ALG_TYPE_SKCIPHER,
2799                 .alg.skcipher = {
2800                         .base.cra_name = "cbc(des)",
2801                         .base.cra_driver_name = "cbc-des-talitos",
2802                         .base.cra_blocksize = DES_BLOCK_SIZE,
2803                         .base.cra_flags = CRYPTO_ALG_ASYNC |
2804                                           CRYPTO_ALG_ALLOCATES_MEMORY,
2805                         .min_keysize = DES_KEY_SIZE,
2806                         .max_keysize = DES_KEY_SIZE,
2807                         .ivsize = DES_BLOCK_SIZE,
2808                         .setkey = skcipher_des_setkey,
2809                 },
2810                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2811                                      DESC_HDR_SEL0_DEU |
2812                                      DESC_HDR_MODE0_DEU_CBC,
2813         },
2814         {       .type = CRYPTO_ALG_TYPE_SKCIPHER,
2815                 .alg.skcipher = {
2816                         .base.cra_name = "ecb(des3_ede)",
2817                         .base.cra_driver_name = "ecb-3des-talitos",
2818                         .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2819                         .base.cra_flags = CRYPTO_ALG_ASYNC |
2820                                           CRYPTO_ALG_ALLOCATES_MEMORY,
2821                         .min_keysize = DES3_EDE_KEY_SIZE,
2822                         .max_keysize = DES3_EDE_KEY_SIZE,
2823                         .setkey = skcipher_des3_setkey,
2824                 },
2825                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2826                                      DESC_HDR_SEL0_DEU |
2827                                      DESC_HDR_MODE0_DEU_3DES,
2828         },
2829         {       .type = CRYPTO_ALG_TYPE_SKCIPHER,
2830                 .alg.skcipher = {
2831                         .base.cra_name = "cbc(des3_ede)",
2832                         .base.cra_driver_name = "cbc-3des-talitos",
2833                         .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2834                         .base.cra_flags = CRYPTO_ALG_ASYNC |
2835                                           CRYPTO_ALG_ALLOCATES_MEMORY,
2836                         .min_keysize = DES3_EDE_KEY_SIZE,
2837                         .max_keysize = DES3_EDE_KEY_SIZE,
2838                         .ivsize = DES3_EDE_BLOCK_SIZE,
2839                         .setkey = skcipher_des3_setkey,
2840                 },
2841                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2842                                      DESC_HDR_SEL0_DEU |
2843                                      DESC_HDR_MODE0_DEU_CBC |
2844                                      DESC_HDR_MODE0_DEU_3DES,
2845         },
2846         /* AHASH algorithms. */
2847         {       .type = CRYPTO_ALG_TYPE_AHASH,
2848                 .alg.hash = {
2849                         .halg.digestsize = MD5_DIGEST_SIZE,
2850                         .halg.statesize = sizeof(struct talitos_export_state),
2851                         .halg.base = {
2852                                 .cra_name = "md5",
2853                                 .cra_driver_name = "md5-talitos",
2854                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2855                                 .cra_flags = CRYPTO_ALG_ASYNC |
2856                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2857                         }
2858                 },
2859                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2860                                      DESC_HDR_SEL0_MDEUA |
2861                                      DESC_HDR_MODE0_MDEU_MD5,
2862         },
2863         {       .type = CRYPTO_ALG_TYPE_AHASH,
2864                 .alg.hash = {
2865                         .halg.digestsize = SHA1_DIGEST_SIZE,
2866                         .halg.statesize = sizeof(struct talitos_export_state),
2867                         .halg.base = {
2868                                 .cra_name = "sha1",
2869                                 .cra_driver_name = "sha1-talitos",
2870                                 .cra_blocksize = SHA1_BLOCK_SIZE,
2871                                 .cra_flags = CRYPTO_ALG_ASYNC |
2872                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2873                         }
2874                 },
2875                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2876                                      DESC_HDR_SEL0_MDEUA |
2877                                      DESC_HDR_MODE0_MDEU_SHA1,
2878         },
2879         {       .type = CRYPTO_ALG_TYPE_AHASH,
2880                 .alg.hash = {
2881                         .halg.digestsize = SHA224_DIGEST_SIZE,
2882                         .halg.statesize = sizeof(struct talitos_export_state),
2883                         .halg.base = {
2884                                 .cra_name = "sha224",
2885                                 .cra_driver_name = "sha224-talitos",
2886                                 .cra_blocksize = SHA224_BLOCK_SIZE,
2887                                 .cra_flags = CRYPTO_ALG_ASYNC |
2888                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2889                         }
2890                 },
2891                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2892                                      DESC_HDR_SEL0_MDEUA |
2893                                      DESC_HDR_MODE0_MDEU_SHA224,
2894         },
2895         {       .type = CRYPTO_ALG_TYPE_AHASH,
2896                 .alg.hash = {
2897                         .halg.digestsize = SHA256_DIGEST_SIZE,
2898                         .halg.statesize = sizeof(struct talitos_export_state),
2899                         .halg.base = {
2900                                 .cra_name = "sha256",
2901                                 .cra_driver_name = "sha256-talitos",
2902                                 .cra_blocksize = SHA256_BLOCK_SIZE,
2903                                 .cra_flags = CRYPTO_ALG_ASYNC |
2904                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2905                         }
2906                 },
2907                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2908                                      DESC_HDR_SEL0_MDEUA |
2909                                      DESC_HDR_MODE0_MDEU_SHA256,
2910         },
2911         {       .type = CRYPTO_ALG_TYPE_AHASH,
2912                 .alg.hash = {
2913                         .halg.digestsize = SHA384_DIGEST_SIZE,
2914                         .halg.statesize = sizeof(struct talitos_export_state),
2915                         .halg.base = {
2916                                 .cra_name = "sha384",
2917                                 .cra_driver_name = "sha384-talitos",
2918                                 .cra_blocksize = SHA384_BLOCK_SIZE,
2919                                 .cra_flags = CRYPTO_ALG_ASYNC |
2920                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2921                         }
2922                 },
2923                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2924                                      DESC_HDR_SEL0_MDEUB |
2925                                      DESC_HDR_MODE0_MDEUB_SHA384,
2926         },
2927         {       .type = CRYPTO_ALG_TYPE_AHASH,
2928                 .alg.hash = {
2929                         .halg.digestsize = SHA512_DIGEST_SIZE,
2930                         .halg.statesize = sizeof(struct talitos_export_state),
2931                         .halg.base = {
2932                                 .cra_name = "sha512",
2933                                 .cra_driver_name = "sha512-talitos",
2934                                 .cra_blocksize = SHA512_BLOCK_SIZE,
2935                                 .cra_flags = CRYPTO_ALG_ASYNC |
2936                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2937                         }
2938                 },
2939                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2940                                      DESC_HDR_SEL0_MDEUB |
2941                                      DESC_HDR_MODE0_MDEUB_SHA512,
2942         },
2943         {       .type = CRYPTO_ALG_TYPE_AHASH,
2944                 .alg.hash = {
2945                         .halg.digestsize = MD5_DIGEST_SIZE,
2946                         .halg.statesize = sizeof(struct talitos_export_state),
2947                         .halg.base = {
2948                                 .cra_name = "hmac(md5)",
2949                                 .cra_driver_name = "hmac-md5-talitos",
2950                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2951                                 .cra_flags = CRYPTO_ALG_ASYNC |
2952                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2953                         }
2954                 },
2955                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2956                                      DESC_HDR_SEL0_MDEUA |
2957                                      DESC_HDR_MODE0_MDEU_MD5,
2958         },
2959         {       .type = CRYPTO_ALG_TYPE_AHASH,
2960                 .alg.hash = {
2961                         .halg.digestsize = SHA1_DIGEST_SIZE,
2962                         .halg.statesize = sizeof(struct talitos_export_state),
2963                         .halg.base = {
2964                                 .cra_name = "hmac(sha1)",
2965                                 .cra_driver_name = "hmac-sha1-talitos",
2966                                 .cra_blocksize = SHA1_BLOCK_SIZE,
2967                                 .cra_flags = CRYPTO_ALG_ASYNC |
2968                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2969                         }
2970                 },
2971                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2972                                      DESC_HDR_SEL0_MDEUA |
2973                                      DESC_HDR_MODE0_MDEU_SHA1,
2974         },
2975         {       .type = CRYPTO_ALG_TYPE_AHASH,
2976                 .alg.hash = {
2977                         .halg.digestsize = SHA224_DIGEST_SIZE,
2978                         .halg.statesize = sizeof(struct talitos_export_state),
2979                         .halg.base = {
2980                                 .cra_name = "hmac(sha224)",
2981                                 .cra_driver_name = "hmac-sha224-talitos",
2982                                 .cra_blocksize = SHA224_BLOCK_SIZE,
2983                                 .cra_flags = CRYPTO_ALG_ASYNC |
2984                                              CRYPTO_ALG_ALLOCATES_MEMORY,
2985                         }
2986                 },
2987                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2988                                      DESC_HDR_SEL0_MDEUA |
2989                                      DESC_HDR_MODE0_MDEU_SHA224,
2990         },
2991         {       .type = CRYPTO_ALG_TYPE_AHASH,
2992                 .alg.hash = {
2993                         .halg.digestsize = SHA256_DIGEST_SIZE,
2994                         .halg.statesize = sizeof(struct talitos_export_state),
2995                         .halg.base = {
2996                                 .cra_name = "hmac(sha256)",
2997                                 .cra_driver_name = "hmac-sha256-talitos",
2998                                 .cra_blocksize = SHA256_BLOCK_SIZE,
2999                                 .cra_flags = CRYPTO_ALG_ASYNC |
3000                                              CRYPTO_ALG_ALLOCATES_MEMORY,
3001                         }
3002                 },
3003                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3004                                      DESC_HDR_SEL0_MDEUA |
3005                                      DESC_HDR_MODE0_MDEU_SHA256,
3006         },
3007         {       .type = CRYPTO_ALG_TYPE_AHASH,
3008                 .alg.hash = {
3009                         .halg.digestsize = SHA384_DIGEST_SIZE,
3010                         .halg.statesize = sizeof(struct talitos_export_state),
3011                         .halg.base = {
3012                                 .cra_name = "hmac(sha384)",
3013                                 .cra_driver_name = "hmac-sha384-talitos",
3014                                 .cra_blocksize = SHA384_BLOCK_SIZE,
3015                                 .cra_flags = CRYPTO_ALG_ASYNC |
3016                                              CRYPTO_ALG_ALLOCATES_MEMORY,
3017                         }
3018                 },
3019                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3020                                      DESC_HDR_SEL0_MDEUB |
3021                                      DESC_HDR_MODE0_MDEUB_SHA384,
3022         },
3023         {       .type = CRYPTO_ALG_TYPE_AHASH,
3024                 .alg.hash = {
3025                         .halg.digestsize = SHA512_DIGEST_SIZE,
3026                         .halg.statesize = sizeof(struct talitos_export_state),
3027                         .halg.base = {
3028                                 .cra_name = "hmac(sha512)",
3029                                 .cra_driver_name = "hmac-sha512-talitos",
3030                                 .cra_blocksize = SHA512_BLOCK_SIZE,
3031                                 .cra_flags = CRYPTO_ALG_ASYNC |
3032                                              CRYPTO_ALG_ALLOCATES_MEMORY,
3033                         }
3034                 },
3035                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3036                                      DESC_HDR_SEL0_MDEUB |
3037                                      DESC_HDR_MODE0_MDEUB_SHA512,
3038         }
3039 };
3040
3041 struct talitos_crypto_alg {
3042         struct list_head entry;
3043         struct device *dev;
3044         struct talitos_alg_template algt;
3045 };
3046
3047 static int talitos_init_common(struct talitos_ctx *ctx,
3048                                struct talitos_crypto_alg *talitos_alg)
3049 {
3050         struct talitos_private *priv;
3051
3052         /* update context with ptr to dev */
3053         ctx->dev = talitos_alg->dev;
3054
3055         /* assign SEC channel to tfm in round-robin fashion */
3056         priv = dev_get_drvdata(ctx->dev);
3057         ctx->ch = atomic_inc_return(&priv->last_chan) &
3058                   (priv->num_channels - 1);
3059
3060         /* copy descriptor header template value */
3061         ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
3062
3063         /* select done notification */
3064         ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3065
3066         return 0;
3067 }
3068
3069 static int talitos_cra_init_aead(struct crypto_aead *tfm)
3070 {
3071         struct aead_alg *alg = crypto_aead_alg(tfm);
3072         struct talitos_crypto_alg *talitos_alg;
3073         struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3074
3075         talitos_alg = container_of(alg, struct talitos_crypto_alg,
3076                                    algt.alg.aead);
3077
3078         return talitos_init_common(ctx, talitos_alg);
3079 }
3080
3081 static int talitos_cra_init_skcipher(struct crypto_skcipher *tfm)
3082 {
3083         struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3084         struct talitos_crypto_alg *talitos_alg;
3085         struct talitos_ctx *ctx = crypto_skcipher_ctx(tfm);
3086
3087         talitos_alg = container_of(alg, struct talitos_crypto_alg,
3088                                    algt.alg.skcipher);
3089
3090         return talitos_init_common(ctx, talitos_alg);
3091 }
3092
3093 static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3094 {
3095         struct crypto_alg *alg = tfm->__crt_alg;
3096         struct talitos_crypto_alg *talitos_alg;
3097         struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3098
3099         talitos_alg = container_of(__crypto_ahash_alg(alg),
3100                                    struct talitos_crypto_alg,
3101                                    algt.alg.hash);
3102
3103         ctx->keylen = 0;
3104         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3105                                  sizeof(struct talitos_ahash_req_ctx));
3106
3107         return talitos_init_common(ctx, talitos_alg);
3108 }
3109
3110 static void talitos_cra_exit(struct crypto_tfm *tfm)
3111 {
3112         struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3113         struct device *dev = ctx->dev;
3114
3115         if (ctx->keylen)
3116                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3117 }
3118
3119 /*
3120  * given the alg's descriptor header template, determine whether descriptor
3121  * type and primary/secondary execution units required match the hw
3122  * capabilities description provided in the device tree node.
3123  */
3124 static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3125 {
3126         struct talitos_private *priv = dev_get_drvdata(dev);
3127         int ret;
3128
3129         ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3130               (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3131
3132         if (SECONDARY_EU(desc_hdr_template))
3133                 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3134                               & priv->exec_units);
3135
3136         return ret;
3137 }
3138
3139 static int talitos_remove(struct platform_device *ofdev)
3140 {
3141         struct device *dev = &ofdev->dev;
3142         struct talitos_private *priv = dev_get_drvdata(dev);
3143         struct talitos_crypto_alg *t_alg, *n;
3144         int i;
3145
3146         list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
3147                 switch (t_alg->algt.type) {
3148                 case CRYPTO_ALG_TYPE_SKCIPHER:
3149                         crypto_unregister_skcipher(&t_alg->algt.alg.skcipher);
3150                         break;
3151                 case CRYPTO_ALG_TYPE_AEAD:
3152                         crypto_unregister_aead(&t_alg->algt.alg.aead);
3153                         break;
3154                 case CRYPTO_ALG_TYPE_AHASH:
3155                         crypto_unregister_ahash(&t_alg->algt.alg.hash);
3156                         break;
3157                 }
3158                 list_del(&t_alg->entry);
3159         }
3160
3161         if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3162                 talitos_unregister_rng(dev);
3163
3164         for (i = 0; i < 2; i++)
3165                 if (priv->irq[i]) {
3166                         free_irq(priv->irq[i], dev);
3167                         irq_dispose_mapping(priv->irq[i]);
3168                 }
3169
3170         tasklet_kill(&priv->done_task[0]);
3171         if (priv->irq[1])
3172                 tasklet_kill(&priv->done_task[1]);
3173
3174         return 0;
3175 }
3176
3177 static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3178                                                     struct talitos_alg_template
3179                                                            *template)
3180 {
3181         struct talitos_private *priv = dev_get_drvdata(dev);
3182         struct talitos_crypto_alg *t_alg;
3183         struct crypto_alg *alg;
3184
3185         t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3186                              GFP_KERNEL);
3187         if (!t_alg)
3188                 return ERR_PTR(-ENOMEM);
3189
3190         t_alg->algt = *template;
3191
3192         switch (t_alg->algt.type) {
3193         case CRYPTO_ALG_TYPE_SKCIPHER:
3194                 alg = &t_alg->algt.alg.skcipher.base;
3195                 alg->cra_exit = talitos_cra_exit;
3196                 t_alg->algt.alg.skcipher.init = talitos_cra_init_skcipher;
3197                 t_alg->algt.alg.skcipher.setkey =
3198                         t_alg->algt.alg.skcipher.setkey ?: skcipher_setkey;
3199                 t_alg->algt.alg.skcipher.encrypt = skcipher_encrypt;
3200                 t_alg->algt.alg.skcipher.decrypt = skcipher_decrypt;
3201                 if (!strcmp(alg->cra_name, "ctr(aes)") && !has_ftr_sec1(priv) &&
3202                     DESC_TYPE(t_alg->algt.desc_hdr_template) !=
3203                     DESC_TYPE(DESC_HDR_TYPE_AESU_CTR_NONSNOOP)) {
3204                         devm_kfree(dev, t_alg);
3205                         return ERR_PTR(-ENOTSUPP);
3206                 }
3207                 break;
3208         case CRYPTO_ALG_TYPE_AEAD:
3209                 alg = &t_alg->algt.alg.aead.base;
3210                 alg->cra_exit = talitos_cra_exit;
3211                 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3212                 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3213                                               aead_setkey;
3214                 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3215                 t_alg->algt.alg.aead.decrypt = aead_decrypt;
3216                 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3217                     !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
3218                         devm_kfree(dev, t_alg);
3219                         return ERR_PTR(-ENOTSUPP);
3220                 }
3221                 break;
3222         case CRYPTO_ALG_TYPE_AHASH:
3223                 alg = &t_alg->algt.alg.hash.halg.base;
3224                 alg->cra_init = talitos_cra_init_ahash;
3225                 alg->cra_exit = talitos_cra_exit;
3226                 t_alg->algt.alg.hash.init = ahash_init;
3227                 t_alg->algt.alg.hash.update = ahash_update;
3228                 t_alg->algt.alg.hash.final = ahash_final;
3229                 t_alg->algt.alg.hash.finup = ahash_finup;
3230                 t_alg->algt.alg.hash.digest = ahash_digest;
3231                 if (!strncmp(alg->cra_name, "hmac", 4))
3232                         t_alg->algt.alg.hash.setkey = ahash_setkey;
3233                 t_alg->algt.alg.hash.import = ahash_import;
3234                 t_alg->algt.alg.hash.export = ahash_export;
3235
3236                 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
3237                     !strncmp(alg->cra_name, "hmac", 4)) {
3238                         devm_kfree(dev, t_alg);
3239                         return ERR_PTR(-ENOTSUPP);
3240                 }
3241                 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3242                     (!strcmp(alg->cra_name, "sha224") ||
3243                      !strcmp(alg->cra_name, "hmac(sha224)"))) {
3244                         t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3245                         t_alg->algt.desc_hdr_template =
3246                                         DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3247                                         DESC_HDR_SEL0_MDEUA |
3248                                         DESC_HDR_MODE0_MDEU_SHA256;
3249                 }
3250                 break;
3251         default:
3252                 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
3253                 devm_kfree(dev, t_alg);
3254                 return ERR_PTR(-EINVAL);
3255         }
3256
3257         alg->cra_module = THIS_MODULE;
3258         if (t_alg->algt.priority)
3259                 alg->cra_priority = t_alg->algt.priority;
3260         else
3261                 alg->cra_priority = TALITOS_CRA_PRIORITY;
3262         if (has_ftr_sec1(priv))
3263                 alg->cra_alignmask = 3;
3264         else
3265                 alg->cra_alignmask = 0;
3266         alg->cra_ctxsize = sizeof(struct talitos_ctx);
3267         alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
3268
3269         t_alg->dev = dev;
3270
3271         return t_alg;
3272 }
3273
3274 static int talitos_probe_irq(struct platform_device *ofdev)
3275 {
3276         struct device *dev = &ofdev->dev;
3277         struct device_node *np = ofdev->dev.of_node;
3278         struct talitos_private *priv = dev_get_drvdata(dev);
3279         int err;
3280         bool is_sec1 = has_ftr_sec1(priv);
3281
3282         priv->irq[0] = irq_of_parse_and_map(np, 0);
3283         if (!priv->irq[0]) {
3284                 dev_err(dev, "failed to map irq\n");
3285                 return -EINVAL;
3286         }
3287         if (is_sec1) {
3288                 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3289                                   dev_driver_string(dev), dev);
3290                 goto primary_out;
3291         }
3292
3293         priv->irq[1] = irq_of_parse_and_map(np, 1);
3294
3295         /* get the primary irq line */
3296         if (!priv->irq[1]) {
3297                 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
3298                                   dev_driver_string(dev), dev);
3299                 goto primary_out;
3300         }
3301
3302         err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
3303                           dev_driver_string(dev), dev);
3304         if (err)
3305                 goto primary_out;
3306
3307         /* get the secondary irq line */
3308         err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
3309                           dev_driver_string(dev), dev);
3310         if (err) {
3311                 dev_err(dev, "failed to request secondary irq\n");
3312                 irq_dispose_mapping(priv->irq[1]);
3313                 priv->irq[1] = 0;
3314         }
3315
3316         return err;
3317
3318 primary_out:
3319         if (err) {
3320                 dev_err(dev, "failed to request primary irq\n");
3321                 irq_dispose_mapping(priv->irq[0]);
3322                 priv->irq[0] = 0;
3323         }
3324
3325         return err;
3326 }
3327
3328 static int talitos_probe(struct platform_device *ofdev)
3329 {
3330         struct device *dev = &ofdev->dev;
3331         struct device_node *np = ofdev->dev.of_node;
3332         struct talitos_private *priv;
3333         int i, err;
3334         int stride;
3335         struct resource *res;
3336
3337         priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
3338         if (!priv)
3339                 return -ENOMEM;
3340
3341         INIT_LIST_HEAD(&priv->alg_list);
3342
3343         dev_set_drvdata(dev, priv);
3344
3345         priv->ofdev = ofdev;
3346
3347         spin_lock_init(&priv->reg_lock);
3348
3349         res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3350         if (!res)
3351                 return -ENXIO;
3352         priv->reg = devm_ioremap(dev, res->start, resource_size(res));
3353         if (!priv->reg) {
3354                 dev_err(dev, "failed to of_iomap\n");
3355                 err = -ENOMEM;
3356                 goto err_out;
3357         }
3358
3359         /* get SEC version capabilities from device tree */
3360         of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3361         of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3362         of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3363         of_property_read_u32(np, "fsl,descriptor-types-mask",
3364                              &priv->desc_types);
3365
3366         if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3367             !priv->exec_units || !priv->desc_types) {
3368                 dev_err(dev, "invalid property data in device tree node\n");
3369                 err = -EINVAL;
3370                 goto err_out;
3371         }
3372
3373         if (of_device_is_compatible(np, "fsl,sec3.0"))
3374                 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3375
3376         if (of_device_is_compatible(np, "fsl,sec2.1"))
3377                 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
3378                                   TALITOS_FTR_SHA224_HWINIT |
3379                                   TALITOS_FTR_HMAC_OK;
3380
3381         if (of_device_is_compatible(np, "fsl,sec1.0"))
3382                 priv->features |= TALITOS_FTR_SEC1;
3383
3384         if (of_device_is_compatible(np, "fsl,sec1.2")) {
3385                 priv->reg_deu = priv->reg + TALITOS12_DEU;
3386                 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3387                 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3388                 stride = TALITOS1_CH_STRIDE;
3389         } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3390                 priv->reg_deu = priv->reg + TALITOS10_DEU;
3391                 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3392                 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3393                 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3394                 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3395                 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3396                 stride = TALITOS1_CH_STRIDE;
3397         } else {
3398                 priv->reg_deu = priv->reg + TALITOS2_DEU;
3399                 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3400                 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3401                 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3402                 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3403                 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3404                 priv->reg_keu = priv->reg + TALITOS2_KEU;
3405                 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3406                 stride = TALITOS2_CH_STRIDE;
3407         }
3408
3409         err = talitos_probe_irq(ofdev);
3410         if (err)
3411                 goto err_out;
3412
3413         if (has_ftr_sec1(priv)) {
3414                 if (priv->num_channels == 1)
3415                         tasklet_init(&priv->done_task[0], talitos1_done_ch0,
3416                                      (unsigned long)dev);
3417                 else
3418                         tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3419                                      (unsigned long)dev);
3420         } else {
3421                 if (priv->irq[1]) {
3422                         tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3423                                      (unsigned long)dev);
3424                         tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3425                                      (unsigned long)dev);
3426                 } else if (priv->num_channels == 1) {
3427                         tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3428                                      (unsigned long)dev);
3429                 } else {
3430                         tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3431                                      (unsigned long)dev);
3432                 }
3433         }
3434
3435         priv->chan = devm_kcalloc(dev,
3436                                   priv->num_channels,
3437                                   sizeof(struct talitos_channel),
3438                                   GFP_KERNEL);
3439         if (!priv->chan) {
3440                 dev_err(dev, "failed to allocate channel management space\n");
3441                 err = -ENOMEM;
3442                 goto err_out;
3443         }
3444
3445         priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3446
3447         for (i = 0; i < priv->num_channels; i++) {
3448                 priv->chan[i].reg = priv->reg + stride * (i + 1);
3449                 if (!priv->irq[1] || !(i & 1))
3450                         priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
3451
3452                 spin_lock_init(&priv->chan[i].head_lock);
3453                 spin_lock_init(&priv->chan[i].tail_lock);
3454
3455                 priv->chan[i].fifo = devm_kcalloc(dev,
3456                                                 priv->fifo_len,
3457                                                 sizeof(struct talitos_request),
3458                                                 GFP_KERNEL);
3459                 if (!priv->chan[i].fifo) {
3460                         dev_err(dev, "failed to allocate request fifo %d\n", i);
3461                         err = -ENOMEM;
3462                         goto err_out;
3463                 }
3464
3465                 atomic_set(&priv->chan[i].submit_count,
3466                            -(priv->chfifo_len - 1));
3467         }
3468
3469         dma_set_mask(dev, DMA_BIT_MASK(36));
3470
3471         /* reset and initialize the h/w */
3472         err = init_device(dev);
3473         if (err) {
3474                 dev_err(dev, "failed to initialize device\n");
3475                 goto err_out;
3476         }
3477
3478         /* register the RNG, if available */
3479         if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3480                 err = talitos_register_rng(dev);
3481                 if (err) {
3482                         dev_err(dev, "failed to register hwrng: %d\n", err);
3483                         goto err_out;
3484                 } else
3485                         dev_info(dev, "hwrng\n");
3486         }
3487
3488         /* register crypto algorithms the device supports */
3489         for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3490                 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3491                         struct talitos_crypto_alg *t_alg;
3492                         struct crypto_alg *alg = NULL;
3493
3494                         t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3495                         if (IS_ERR(t_alg)) {
3496                                 err = PTR_ERR(t_alg);
3497                                 if (err == -ENOTSUPP)
3498                                         continue;
3499                                 goto err_out;
3500                         }
3501
3502                         switch (t_alg->algt.type) {
3503                         case CRYPTO_ALG_TYPE_SKCIPHER:
3504                                 err = crypto_register_skcipher(
3505                                                 &t_alg->algt.alg.skcipher);
3506                                 alg = &t_alg->algt.alg.skcipher.base;
3507                                 break;
3508
3509                         case CRYPTO_ALG_TYPE_AEAD:
3510                                 err = crypto_register_aead(
3511                                         &t_alg->algt.alg.aead);
3512                                 alg = &t_alg->algt.alg.aead.base;
3513                                 break;
3514
3515                         case CRYPTO_ALG_TYPE_AHASH:
3516                                 err = crypto_register_ahash(
3517                                                 &t_alg->algt.alg.hash);
3518                                 alg = &t_alg->algt.alg.hash.halg.base;
3519                                 break;
3520                         }
3521                         if (err) {
3522                                 dev_err(dev, "%s alg registration failed\n",
3523                                         alg->cra_driver_name);
3524                                 devm_kfree(dev, t_alg);
3525                         } else
3526                                 list_add_tail(&t_alg->entry, &priv->alg_list);
3527                 }
3528         }
3529         if (!list_empty(&priv->alg_list))
3530                 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3531                          (char *)of_get_property(np, "compatible", NULL));
3532
3533         return 0;
3534
3535 err_out:
3536         talitos_remove(ofdev);
3537
3538         return err;
3539 }
3540
3541 static const struct of_device_id talitos_match[] = {
3542 #ifdef CONFIG_CRYPTO_DEV_TALITOS1
3543         {
3544                 .compatible = "fsl,sec1.0",
3545         },
3546 #endif
3547 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
3548         {
3549                 .compatible = "fsl,sec2.0",
3550         },
3551 #endif
3552         {},
3553 };
3554 MODULE_DEVICE_TABLE(of, talitos_match);
3555
3556 static struct platform_driver talitos_driver = {
3557         .driver = {
3558                 .name = "talitos",
3559                 .of_match_table = talitos_match,
3560         },
3561         .probe = talitos_probe,
3562         .remove = talitos_remove,
3563 };
3564
3565 module_platform_driver(talitos_driver);
3566
3567 MODULE_LICENSE("GPL");
3568 MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3569 MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");