1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * talitos - Freescale Integrated Security Engine (SEC) device driver
5 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
7 * Scatterlist Crypto API glue code copied from files with the following:
8 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
10 * Crypto algorithm registration code copied from hifn driver:
11 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
12 * All rights reserved.
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/mod_devicetable.h>
18 #include <linux/device.h>
19 #include <linux/interrupt.h>
20 #include <linux/crypto.h>
21 #include <linux/hw_random.h>
22 #include <linux/of_address.h>
23 #include <linux/of_irq.h>
24 #include <linux/of_platform.h>
25 #include <linux/dma-mapping.h>
27 #include <linux/spinlock.h>
28 #include <linux/rtnetlink.h>
29 #include <linux/slab.h>
31 #include <crypto/algapi.h>
32 #include <crypto/aes.h>
33 #include <crypto/internal/des.h>
34 #include <crypto/sha1.h>
35 #include <crypto/sha2.h>
36 #include <crypto/md5.h>
37 #include <crypto/internal/aead.h>
38 #include <crypto/authenc.h>
39 #include <crypto/internal/skcipher.h>
40 #include <crypto/hash.h>
41 #include <crypto/internal/hash.h>
42 #include <crypto/scatterwalk.h>
46 static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
47 unsigned int len, bool is_sec1)
49 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
51 ptr->len1 = cpu_to_be16(len);
53 ptr->len = cpu_to_be16(len);
54 ptr->eptr = upper_32_bits(dma_addr);
58 static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
59 struct talitos_ptr *src_ptr, bool is_sec1)
61 dst_ptr->ptr = src_ptr->ptr;
63 dst_ptr->len1 = src_ptr->len1;
65 dst_ptr->len = src_ptr->len;
66 dst_ptr->eptr = src_ptr->eptr;
70 static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
74 return be16_to_cpu(ptr->len1);
76 return be16_to_cpu(ptr->len);
79 static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
86 static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
93 * map virtual single (contiguous) pointer to h/w descriptor pointer
95 static void __map_single_talitos_ptr(struct device *dev,
96 struct talitos_ptr *ptr,
97 unsigned int len, void *data,
98 enum dma_data_direction dir,
101 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
102 struct talitos_private *priv = dev_get_drvdata(dev);
103 bool is_sec1 = has_ftr_sec1(priv);
105 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
108 static void map_single_talitos_ptr(struct device *dev,
109 struct talitos_ptr *ptr,
110 unsigned int len, void *data,
111 enum dma_data_direction dir)
113 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
116 static void map_single_talitos_ptr_nosync(struct device *dev,
117 struct talitos_ptr *ptr,
118 unsigned int len, void *data,
119 enum dma_data_direction dir)
121 __map_single_talitos_ptr(dev, ptr, len, data, dir,
122 DMA_ATTR_SKIP_CPU_SYNC);
126 * unmap bus single (contiguous) h/w descriptor pointer
128 static void unmap_single_talitos_ptr(struct device *dev,
129 struct talitos_ptr *ptr,
130 enum dma_data_direction dir)
132 struct talitos_private *priv = dev_get_drvdata(dev);
133 bool is_sec1 = has_ftr_sec1(priv);
135 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
136 from_talitos_ptr_len(ptr, is_sec1), dir);
139 static int reset_channel(struct device *dev, int ch)
141 struct talitos_private *priv = dev_get_drvdata(dev);
142 unsigned int timeout = TALITOS_TIMEOUT;
143 bool is_sec1 = has_ftr_sec1(priv);
146 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
147 TALITOS1_CCCR_LO_RESET);
149 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
150 TALITOS1_CCCR_LO_RESET) && --timeout)
153 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
154 TALITOS2_CCCR_RESET);
156 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
157 TALITOS2_CCCR_RESET) && --timeout)
162 dev_err(dev, "failed to reset channel %d\n", ch);
166 /* set 36-bit addressing, done writeback enable and done IRQ enable */
167 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
168 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
169 /* enable chaining descriptors */
171 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
174 /* and ICCR writeback, if available */
175 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
176 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
177 TALITOS_CCCR_LO_IWSE);
182 static int reset_device(struct device *dev)
184 struct talitos_private *priv = dev_get_drvdata(dev);
185 unsigned int timeout = TALITOS_TIMEOUT;
186 bool is_sec1 = has_ftr_sec1(priv);
187 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
189 setbits32(priv->reg + TALITOS_MCR, mcr);
191 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
196 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
197 setbits32(priv->reg + TALITOS_MCR, mcr);
201 dev_err(dev, "failed to reset device\n");
209 * Reset and initialize the device
211 static int init_device(struct device *dev)
213 struct talitos_private *priv = dev_get_drvdata(dev);
215 bool is_sec1 = has_ftr_sec1(priv);
219 * errata documentation: warning: certain SEC interrupts
220 * are not fully cleared by writing the MCR:SWR bit,
221 * set bit twice to completely reset
223 err = reset_device(dev);
227 err = reset_device(dev);
232 for (ch = 0; ch < priv->num_channels; ch++) {
233 err = reset_channel(dev, ch);
238 /* enable channel done and error interrupts */
240 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
241 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
242 /* disable parity error check in DEU (erroneous? test vect.) */
243 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
245 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
246 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
249 /* disable integrity check error interrupts (use writeback instead) */
250 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
251 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
252 TALITOS_MDEUICR_LO_ICE);
258 * talitos_submit - submits a descriptor to the device for processing
259 * @dev: the SEC device to be used
260 * @ch: the SEC device channel to be used
261 * @desc: the descriptor to be processed by the device
262 * @callback: whom to call when processing is complete
263 * @context: a handle for use by caller (optional)
265 * desc must contain valid dma-mapped (bus physical) address pointers.
266 * callback must check err and feedback in descriptor header
267 * for device processing status.
269 static int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
270 void (*callback)(struct device *dev,
271 struct talitos_desc *desc,
272 void *context, int error),
275 struct talitos_private *priv = dev_get_drvdata(dev);
276 struct talitos_request *request;
279 bool is_sec1 = has_ftr_sec1(priv);
281 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
283 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
284 /* h/w fifo is full */
285 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
289 head = priv->chan[ch].head;
290 request = &priv->chan[ch].fifo[head];
292 /* map descriptor and save caller data */
294 desc->hdr1 = desc->hdr;
295 request->dma_desc = dma_map_single(dev, &desc->hdr1,
299 request->dma_desc = dma_map_single(dev, desc,
303 request->callback = callback;
304 request->context = context;
306 /* increment fifo head */
307 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
310 request->desc = desc;
314 out_be32(priv->chan[ch].reg + TALITOS_FF,
315 upper_32_bits(request->dma_desc));
316 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
317 lower_32_bits(request->dma_desc));
319 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
324 static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
326 struct talitos_edesc *edesc;
329 return request->desc->hdr;
331 if (!request->desc->next_desc)
332 return request->desc->hdr1;
334 edesc = container_of(request->desc, struct talitos_edesc, desc);
336 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
340 * process what was done, notify callback of error if not
342 static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
344 struct talitos_private *priv = dev_get_drvdata(dev);
345 struct talitos_request *request, saved_req;
348 bool is_sec1 = has_ftr_sec1(priv);
350 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
352 tail = priv->chan[ch].tail;
353 while (priv->chan[ch].fifo[tail].desc) {
356 request = &priv->chan[ch].fifo[tail];
358 /* descriptors with their done bits set don't get the error */
360 hdr = get_request_hdr(request, is_sec1);
362 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
370 dma_unmap_single(dev, request->dma_desc,
374 /* copy entries so we can call callback outside lock */
375 saved_req.desc = request->desc;
376 saved_req.callback = request->callback;
377 saved_req.context = request->context;
379 /* release request entry in fifo */
381 request->desc = NULL;
383 /* increment fifo tail */
384 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
386 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
388 atomic_dec(&priv->chan[ch].submit_count);
390 saved_req.callback(dev, saved_req.desc, saved_req.context,
392 /* channel may resume processing in single desc error case */
393 if (error && !reset_ch && status == error)
395 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
396 tail = priv->chan[ch].tail;
399 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
403 * process completed requests for channels that have done status
405 #define DEF_TALITOS1_DONE(name, ch_done_mask) \
406 static void talitos1_done_##name(unsigned long data) \
408 struct device *dev = (struct device *)data; \
409 struct talitos_private *priv = dev_get_drvdata(dev); \
410 unsigned long flags; \
412 if (ch_done_mask & 0x10000000) \
413 flush_channel(dev, 0, 0, 0); \
414 if (ch_done_mask & 0x40000000) \
415 flush_channel(dev, 1, 0, 0); \
416 if (ch_done_mask & 0x00010000) \
417 flush_channel(dev, 2, 0, 0); \
418 if (ch_done_mask & 0x00040000) \
419 flush_channel(dev, 3, 0, 0); \
421 /* At this point, all completed channels have been processed */ \
422 /* Unmask done interrupts for channels completed later on. */ \
423 spin_lock_irqsave(&priv->reg_lock, flags); \
424 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
425 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
426 spin_unlock_irqrestore(&priv->reg_lock, flags); \
429 DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
430 DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
432 #define DEF_TALITOS2_DONE(name, ch_done_mask) \
433 static void talitos2_done_##name(unsigned long data) \
435 struct device *dev = (struct device *)data; \
436 struct talitos_private *priv = dev_get_drvdata(dev); \
437 unsigned long flags; \
439 if (ch_done_mask & 1) \
440 flush_channel(dev, 0, 0, 0); \
441 if (ch_done_mask & (1 << 2)) \
442 flush_channel(dev, 1, 0, 0); \
443 if (ch_done_mask & (1 << 4)) \
444 flush_channel(dev, 2, 0, 0); \
445 if (ch_done_mask & (1 << 6)) \
446 flush_channel(dev, 3, 0, 0); \
448 /* At this point, all completed channels have been processed */ \
449 /* Unmask done interrupts for channels completed later on. */ \
450 spin_lock_irqsave(&priv->reg_lock, flags); \
451 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
452 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
453 spin_unlock_irqrestore(&priv->reg_lock, flags); \
456 DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
457 DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
458 DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
459 DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
462 * locate current (offending) descriptor
464 static __be32 current_desc_hdr(struct device *dev, int ch)
466 struct talitos_private *priv = dev_get_drvdata(dev);
470 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
471 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
474 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
478 tail = priv->chan[ch].tail;
481 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
482 priv->chan[ch].fifo[iter].desc->next_desc != cpu_to_be32(cur_desc)) {
483 iter = (iter + 1) & (priv->fifo_len - 1);
485 dev_err(dev, "couldn't locate current descriptor\n");
490 if (priv->chan[ch].fifo[iter].desc->next_desc == cpu_to_be32(cur_desc)) {
491 struct talitos_edesc *edesc;
493 edesc = container_of(priv->chan[ch].fifo[iter].desc,
494 struct talitos_edesc, desc);
495 return ((struct talitos_desc *)
496 (edesc->buf + edesc->dma_len))->hdr;
499 return priv->chan[ch].fifo[iter].desc->hdr;
503 * user diagnostics; report root cause of error based on execution unit status
505 static void report_eu_error(struct device *dev, int ch, __be32 desc_hdr)
507 struct talitos_private *priv = dev_get_drvdata(dev);
511 desc_hdr = cpu_to_be32(in_be32(priv->chan[ch].reg + TALITOS_DESCBUF));
513 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
514 case DESC_HDR_SEL0_AFEU:
515 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
516 in_be32(priv->reg_afeu + TALITOS_EUISR),
517 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
519 case DESC_HDR_SEL0_DEU:
520 dev_err(dev, "DEUISR 0x%08x_%08x\n",
521 in_be32(priv->reg_deu + TALITOS_EUISR),
522 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
524 case DESC_HDR_SEL0_MDEUA:
525 case DESC_HDR_SEL0_MDEUB:
526 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
527 in_be32(priv->reg_mdeu + TALITOS_EUISR),
528 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
530 case DESC_HDR_SEL0_RNG:
531 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
532 in_be32(priv->reg_rngu + TALITOS_ISR),
533 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
535 case DESC_HDR_SEL0_PKEU:
536 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
537 in_be32(priv->reg_pkeu + TALITOS_EUISR),
538 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
540 case DESC_HDR_SEL0_AESU:
541 dev_err(dev, "AESUISR 0x%08x_%08x\n",
542 in_be32(priv->reg_aesu + TALITOS_EUISR),
543 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
545 case DESC_HDR_SEL0_CRCU:
546 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
547 in_be32(priv->reg_crcu + TALITOS_EUISR),
548 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
550 case DESC_HDR_SEL0_KEU:
551 dev_err(dev, "KEUISR 0x%08x_%08x\n",
552 in_be32(priv->reg_pkeu + TALITOS_EUISR),
553 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
557 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
558 case DESC_HDR_SEL1_MDEUA:
559 case DESC_HDR_SEL1_MDEUB:
560 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
561 in_be32(priv->reg_mdeu + TALITOS_EUISR),
562 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
564 case DESC_HDR_SEL1_CRCU:
565 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
566 in_be32(priv->reg_crcu + TALITOS_EUISR),
567 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
571 for (i = 0; i < 8; i++)
572 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
573 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
574 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
578 * recover from error interrupts
580 static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
582 struct talitos_private *priv = dev_get_drvdata(dev);
583 unsigned int timeout = TALITOS_TIMEOUT;
584 int ch, error, reset_dev = 0;
586 bool is_sec1 = has_ftr_sec1(priv);
587 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
589 for (ch = 0; ch < priv->num_channels; ch++) {
590 /* skip channels without errors */
592 /* bits 29, 31, 17, 19 */
593 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
596 if (!(isr & (1 << (ch * 2 + 1))))
602 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
604 if (v_lo & TALITOS_CCPSR_LO_DOF) {
605 dev_err(dev, "double fetch fifo overflow error\n");
609 if (v_lo & TALITOS_CCPSR_LO_SOF) {
610 /* h/w dropped descriptor */
611 dev_err(dev, "single fetch fifo overflow error\n");
614 if (v_lo & TALITOS_CCPSR_LO_MDTE)
615 dev_err(dev, "master data transfer error\n");
616 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
617 dev_err(dev, is_sec1 ? "pointer not complete error\n"
618 : "s/g data length zero error\n");
619 if (v_lo & TALITOS_CCPSR_LO_FPZ)
620 dev_err(dev, is_sec1 ? "parity error\n"
621 : "fetch pointer zero error\n");
622 if (v_lo & TALITOS_CCPSR_LO_IDH)
623 dev_err(dev, "illegal descriptor header error\n");
624 if (v_lo & TALITOS_CCPSR_LO_IEU)
625 dev_err(dev, is_sec1 ? "static assignment error\n"
626 : "invalid exec unit error\n");
627 if (v_lo & TALITOS_CCPSR_LO_EU)
628 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
630 if (v_lo & TALITOS_CCPSR_LO_GB)
631 dev_err(dev, "gather boundary error\n");
632 if (v_lo & TALITOS_CCPSR_LO_GRL)
633 dev_err(dev, "gather return/length error\n");
634 if (v_lo & TALITOS_CCPSR_LO_SB)
635 dev_err(dev, "scatter boundary error\n");
636 if (v_lo & TALITOS_CCPSR_LO_SRL)
637 dev_err(dev, "scatter return/length error\n");
640 flush_channel(dev, ch, error, reset_ch);
643 reset_channel(dev, ch);
645 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
647 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
648 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
649 TALITOS2_CCCR_CONT) && --timeout)
652 dev_err(dev, "failed to restart channel %d\n",
658 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
659 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
660 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
661 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
664 dev_err(dev, "done overflow, internal time out, or "
665 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
667 /* purge request queues */
668 for (ch = 0; ch < priv->num_channels; ch++)
669 flush_channel(dev, ch, -EIO, 1);
671 /* reset and reinitialize the device */
676 #define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
677 static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
679 struct device *dev = data; \
680 struct talitos_private *priv = dev_get_drvdata(dev); \
682 unsigned long flags; \
684 spin_lock_irqsave(&priv->reg_lock, flags); \
685 isr = in_be32(priv->reg + TALITOS_ISR); \
686 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
687 /* Acknowledge interrupt */ \
688 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
689 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
691 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
692 spin_unlock_irqrestore(&priv->reg_lock, flags); \
693 talitos_error(dev, isr & ch_err_mask, isr_lo); \
696 if (likely(isr & ch_done_mask)) { \
697 /* mask further done interrupts. */ \
698 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
699 /* done_task will unmask done interrupts at exit */ \
700 tasklet_schedule(&priv->done_task[tlet]); \
702 spin_unlock_irqrestore(&priv->reg_lock, flags); \
705 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
709 DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
711 #define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
712 static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
714 struct device *dev = data; \
715 struct talitos_private *priv = dev_get_drvdata(dev); \
717 unsigned long flags; \
719 spin_lock_irqsave(&priv->reg_lock, flags); \
720 isr = in_be32(priv->reg + TALITOS_ISR); \
721 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
722 /* Acknowledge interrupt */ \
723 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
724 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
726 if (unlikely(isr & ch_err_mask || isr_lo)) { \
727 spin_unlock_irqrestore(&priv->reg_lock, flags); \
728 talitos_error(dev, isr & ch_err_mask, isr_lo); \
731 if (likely(isr & ch_done_mask)) { \
732 /* mask further done interrupts. */ \
733 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
734 /* done_task will unmask done interrupts at exit */ \
735 tasklet_schedule(&priv->done_task[tlet]); \
737 spin_unlock_irqrestore(&priv->reg_lock, flags); \
740 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
744 DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
745 DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
747 DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
753 static int talitos_rng_data_present(struct hwrng *rng, int wait)
755 struct device *dev = (struct device *)rng->priv;
756 struct talitos_private *priv = dev_get_drvdata(dev);
760 for (i = 0; i < 20; i++) {
761 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
762 TALITOS_RNGUSR_LO_OFL;
771 static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
773 struct device *dev = (struct device *)rng->priv;
774 struct talitos_private *priv = dev_get_drvdata(dev);
776 /* rng fifo requires 64-bit accesses */
777 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
778 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
783 static int talitos_rng_init(struct hwrng *rng)
785 struct device *dev = (struct device *)rng->priv;
786 struct talitos_private *priv = dev_get_drvdata(dev);
787 unsigned int timeout = TALITOS_TIMEOUT;
789 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
790 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
791 & TALITOS_RNGUSR_LO_RD)
795 dev_err(dev, "failed to reset rng hw\n");
799 /* start generating */
800 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
805 static int talitos_register_rng(struct device *dev)
807 struct talitos_private *priv = dev_get_drvdata(dev);
810 priv->rng.name = dev_driver_string(dev);
811 priv->rng.init = talitos_rng_init;
812 priv->rng.data_present = talitos_rng_data_present;
813 priv->rng.data_read = talitos_rng_data_read;
814 priv->rng.priv = (unsigned long)dev;
816 err = hwrng_register(&priv->rng);
818 priv->rng_registered = true;
823 static void talitos_unregister_rng(struct device *dev)
825 struct talitos_private *priv = dev_get_drvdata(dev);
827 if (!priv->rng_registered)
830 hwrng_unregister(&priv->rng);
831 priv->rng_registered = false;
837 #define TALITOS_CRA_PRIORITY 3000
839 * Defines a priority for doing AEAD with descriptors type
840 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
842 #define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
843 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
844 #define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
846 #define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
848 #define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
853 __be32 desc_hdr_template;
854 u8 key[TALITOS_MAX_KEY_SIZE];
855 u8 iv[TALITOS_MAX_IV_LENGTH];
858 unsigned int enckeylen;
859 unsigned int authkeylen;
862 #define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
863 #define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
865 struct talitos_ahash_req_ctx {
866 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
867 unsigned int hw_context_size;
868 u8 buf[2][HASH_MAX_BLOCK_SIZE];
873 unsigned int to_hash_later;
875 struct scatterlist bufsl[2];
876 struct scatterlist *psrc;
879 struct talitos_export_state {
880 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
881 u8 buf[HASH_MAX_BLOCK_SIZE];
885 unsigned int to_hash_later;
889 static int aead_setkey(struct crypto_aead *authenc,
890 const u8 *key, unsigned int keylen)
892 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
893 struct device *dev = ctx->dev;
894 struct crypto_authenc_keys keys;
896 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
899 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
903 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
905 memcpy(ctx->key, keys.authkey, keys.authkeylen);
906 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
908 ctx->keylen = keys.authkeylen + keys.enckeylen;
909 ctx->enckeylen = keys.enckeylen;
910 ctx->authkeylen = keys.authkeylen;
911 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
914 memzero_explicit(&keys, sizeof(keys));
918 memzero_explicit(&keys, sizeof(keys));
922 static int aead_des3_setkey(struct crypto_aead *authenc,
923 const u8 *key, unsigned int keylen)
925 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
926 struct device *dev = ctx->dev;
927 struct crypto_authenc_keys keys;
930 err = crypto_authenc_extractkeys(&keys, key, keylen);
935 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
938 err = verify_aead_des3_key(authenc, keys.enckey, keys.enckeylen);
943 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
945 memcpy(ctx->key, keys.authkey, keys.authkeylen);
946 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
948 ctx->keylen = keys.authkeylen + keys.enckeylen;
949 ctx->enckeylen = keys.enckeylen;
950 ctx->authkeylen = keys.authkeylen;
951 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
955 memzero_explicit(&keys, sizeof(keys));
959 static void talitos_sg_unmap(struct device *dev,
960 struct talitos_edesc *edesc,
961 struct scatterlist *src,
962 struct scatterlist *dst,
963 unsigned int len, unsigned int offset)
965 struct talitos_private *priv = dev_get_drvdata(dev);
966 bool is_sec1 = has_ftr_sec1(priv);
967 unsigned int src_nents = edesc->src_nents ? : 1;
968 unsigned int dst_nents = edesc->dst_nents ? : 1;
970 if (is_sec1 && dst && dst_nents > 1) {
971 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
972 len, DMA_FROM_DEVICE);
973 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
977 if (src_nents == 1 || !is_sec1)
978 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
980 if (dst && (dst_nents == 1 || !is_sec1))
981 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
982 } else if (src_nents == 1 || !is_sec1) {
983 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
987 static void ipsec_esp_unmap(struct device *dev,
988 struct talitos_edesc *edesc,
989 struct aead_request *areq, bool encrypt)
991 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
992 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
993 unsigned int ivsize = crypto_aead_ivsize(aead);
994 unsigned int authsize = crypto_aead_authsize(aead);
995 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
996 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
997 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
1000 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1002 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
1004 talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1005 cryptlen + authsize, areq->assoclen);
1008 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1011 if (!is_ipsec_esp) {
1012 unsigned int dst_nents = edesc->dst_nents ? : 1;
1014 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
1015 areq->assoclen + cryptlen - ivsize);
1020 * ipsec_esp descriptor callbacks
1022 static void ipsec_esp_encrypt_done(struct device *dev,
1023 struct talitos_desc *desc, void *context,
1026 struct aead_request *areq = context;
1027 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1028 unsigned int ivsize = crypto_aead_ivsize(authenc);
1029 struct talitos_edesc *edesc;
1031 edesc = container_of(desc, struct talitos_edesc, desc);
1033 ipsec_esp_unmap(dev, edesc, areq, true);
1035 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1039 aead_request_complete(areq, err);
1042 static void ipsec_esp_decrypt_swauth_done(struct device *dev,
1043 struct talitos_desc *desc,
1044 void *context, int err)
1046 struct aead_request *req = context;
1047 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1048 unsigned int authsize = crypto_aead_authsize(authenc);
1049 struct talitos_edesc *edesc;
1052 edesc = container_of(desc, struct talitos_edesc, desc);
1054 ipsec_esp_unmap(dev, edesc, req, false);
1058 oicv = edesc->buf + edesc->dma_len;
1059 icv = oicv - authsize;
1061 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
1066 aead_request_complete(req, err);
1069 static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
1070 struct talitos_desc *desc,
1071 void *context, int err)
1073 struct aead_request *req = context;
1074 struct talitos_edesc *edesc;
1076 edesc = container_of(desc, struct talitos_edesc, desc);
1078 ipsec_esp_unmap(dev, edesc, req, false);
1080 /* check ICV auth status */
1081 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1082 DESC_HDR_LO_ICCR1_PASS))
1087 aead_request_complete(req, err);
1091 * convert scatterlist to SEC h/w link table format
1092 * stop at cryptlen bytes
1094 static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1095 unsigned int offset, int datalen, int elen,
1096 struct talitos_ptr *link_tbl_ptr, int align)
1098 int n_sg = elen ? sg_count + 1 : sg_count;
1100 int cryptlen = datalen + elen;
1101 int padding = ALIGN(cryptlen, align) - cryptlen;
1103 while (cryptlen && sg && n_sg--) {
1104 unsigned int len = sg_dma_len(sg);
1106 if (offset >= len) {
1116 if (datalen > 0 && len > datalen) {
1117 to_talitos_ptr(link_tbl_ptr + count,
1118 sg_dma_address(sg) + offset, datalen, 0);
1119 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1124 to_talitos_ptr(link_tbl_ptr + count,
1125 sg_dma_address(sg) + offset, sg_next(sg) ? len : len + padding, 0);
1126 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1136 /* tag end of link table */
1138 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1139 DESC_PTR_LNKTBL_RET, 0);
1144 static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1145 unsigned int len, struct talitos_edesc *edesc,
1146 struct talitos_ptr *ptr, int sg_count,
1147 unsigned int offset, int tbl_off, int elen,
1148 bool force, int align)
1150 struct talitos_private *priv = dev_get_drvdata(dev);
1151 bool is_sec1 = has_ftr_sec1(priv);
1152 int aligned_len = ALIGN(len, align);
1155 to_talitos_ptr(ptr, 0, 0, is_sec1);
1158 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
1159 if (sg_count == 1 && !force) {
1160 to_talitos_ptr(ptr, sg_dma_address(src) + offset, aligned_len, is_sec1);
1164 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, aligned_len, is_sec1);
1167 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
1168 &edesc->link_tbl[tbl_off], align);
1169 if (sg_count == 1 && !force) {
1170 /* Only one segment now, so no link tbl needed*/
1171 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1174 to_talitos_ptr(ptr, edesc->dma_link_tbl +
1175 tbl_off * sizeof(struct talitos_ptr), aligned_len, is_sec1);
1176 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1181 static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1182 unsigned int len, struct talitos_edesc *edesc,
1183 struct talitos_ptr *ptr, int sg_count,
1184 unsigned int offset, int tbl_off)
1186 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1187 tbl_off, 0, false, 1);
1191 * fill in and submit ipsec_esp descriptor
1193 static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
1195 void (*callback)(struct device *dev,
1196 struct talitos_desc *desc,
1197 void *context, int error))
1199 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1200 unsigned int authsize = crypto_aead_authsize(aead);
1201 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1202 struct device *dev = ctx->dev;
1203 struct talitos_desc *desc = &edesc->desc;
1204 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1205 unsigned int ivsize = crypto_aead_ivsize(aead);
1209 bool sync_needed = false;
1210 struct talitos_private *priv = dev_get_drvdata(dev);
1211 bool is_sec1 = has_ftr_sec1(priv);
1212 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1213 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1214 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
1215 dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
1218 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
1220 sg_count = edesc->src_nents ?: 1;
1221 if (is_sec1 && sg_count > 1)
1222 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1223 areq->assoclen + cryptlen);
1225 sg_count = dma_map_sg(dev, areq->src, sg_count,
1226 (areq->src == areq->dst) ?
1227 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1230 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1231 &desc->ptr[1], sg_count, 0, tbl_off);
1239 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
1242 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1243 ctx->enckeylen, is_sec1);
1247 * map and adjust cipher len to aead request cryptlen.
1248 * extent is bytes of HMAC postpended to ciphertext,
1249 * typically 12 for ipsec
1251 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1254 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1255 sg_count, areq->assoclen, tbl_off, elen,
1264 if (areq->src != areq->dst) {
1265 sg_count = edesc->dst_nents ? : 1;
1266 if (!is_sec1 || sg_count == 1)
1267 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1270 if (is_ipsec_esp && encrypt)
1274 ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1275 sg_count, areq->assoclen, tbl_off, elen,
1276 is_ipsec_esp && !encrypt, 1);
1279 if (!encrypt && is_ipsec_esp) {
1280 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1282 /* Add an entry to the link table for ICV data */
1283 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1284 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
1286 /* icv data follows link tables */
1287 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1288 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1290 } else if (!encrypt) {
1291 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1293 } else if (!is_ipsec_esp) {
1294 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1295 sg_count, areq->assoclen + cryptlen, tbl_off);
1300 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1304 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1308 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1309 if (ret != -EINPROGRESS) {
1310 ipsec_esp_unmap(dev, edesc, areq, encrypt);
1317 * allocate and map the extended descriptor
1319 static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1320 struct scatterlist *src,
1321 struct scatterlist *dst,
1323 unsigned int assoclen,
1324 unsigned int cryptlen,
1325 unsigned int authsize,
1326 unsigned int ivsize,
1331 struct talitos_edesc *edesc;
1332 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
1333 dma_addr_t iv_dma = 0;
1334 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1336 struct talitos_private *priv = dev_get_drvdata(dev);
1337 bool is_sec1 = has_ftr_sec1(priv);
1338 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
1340 if (cryptlen + authsize > max_len) {
1341 dev_err(dev, "length exceeds h/w max limit\n");
1342 return ERR_PTR(-EINVAL);
1345 if (!dst || dst == src) {
1346 src_len = assoclen + cryptlen + authsize;
1347 src_nents = sg_nents_for_len(src, src_len);
1348 if (src_nents < 0) {
1349 dev_err(dev, "Invalid number of src SG.\n");
1350 return ERR_PTR(-EINVAL);
1352 src_nents = (src_nents == 1) ? 0 : src_nents;
1353 dst_nents = dst ? src_nents : 0;
1355 } else { /* dst && dst != src*/
1356 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1357 src_nents = sg_nents_for_len(src, src_len);
1358 if (src_nents < 0) {
1359 dev_err(dev, "Invalid number of src SG.\n");
1360 return ERR_PTR(-EINVAL);
1362 src_nents = (src_nents == 1) ? 0 : src_nents;
1363 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1364 dst_nents = sg_nents_for_len(dst, dst_len);
1365 if (dst_nents < 0) {
1366 dev_err(dev, "Invalid number of dst SG.\n");
1367 return ERR_PTR(-EINVAL);
1369 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
1373 * allocate space for base edesc plus the link tables,
1374 * allowing for two separate entries for AD and generated ICV (+ 2),
1375 * and space for two sets of ICVs (stashed and generated)
1377 alloc_len = sizeof(struct talitos_edesc);
1378 if (src_nents || dst_nents || !encrypt) {
1380 dma_len = (src_nents ? src_len : 0) +
1381 (dst_nents ? dst_len : 0) + authsize;
1383 dma_len = (src_nents + dst_nents + 2) *
1384 sizeof(struct talitos_ptr) + authsize;
1385 alloc_len += dma_len;
1389 alloc_len += icv_stashing ? authsize : 0;
1391 /* if its a ahash, add space for a second desc next to the first one */
1392 if (is_sec1 && !dst)
1393 alloc_len += sizeof(struct talitos_desc);
1394 alloc_len += ivsize;
1396 edesc = kmalloc(alloc_len, GFP_DMA | flags);
1398 return ERR_PTR(-ENOMEM);
1400 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
1401 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1403 memset(&edesc->desc, 0, sizeof(edesc->desc));
1405 edesc->src_nents = src_nents;
1406 edesc->dst_nents = dst_nents;
1407 edesc->iv_dma = iv_dma;
1408 edesc->dma_len = dma_len;
1410 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
1417 static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
1418 int icv_stashing, bool encrypt)
1420 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1421 unsigned int authsize = crypto_aead_authsize(authenc);
1422 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1423 unsigned int ivsize = crypto_aead_ivsize(authenc);
1424 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1426 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1427 iv, areq->assoclen, cryptlen,
1428 authsize, ivsize, icv_stashing,
1429 areq->base.flags, encrypt);
1432 static int aead_encrypt(struct aead_request *req)
1434 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1435 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1436 struct talitos_edesc *edesc;
1438 /* allocate extended descriptor */
1439 edesc = aead_edesc_alloc(req, req->iv, 0, true);
1441 return PTR_ERR(edesc);
1444 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1446 return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
1449 static int aead_decrypt(struct aead_request *req)
1451 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1452 unsigned int authsize = crypto_aead_authsize(authenc);
1453 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1454 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1455 struct talitos_edesc *edesc;
1458 /* allocate extended descriptor */
1459 edesc = aead_edesc_alloc(req, req->iv, 1, false);
1461 return PTR_ERR(edesc);
1463 if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1464 (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
1465 ((!edesc->src_nents && !edesc->dst_nents) ||
1466 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
1468 /* decrypt and check the ICV */
1469 edesc->desc.hdr = ctx->desc_hdr_template |
1470 DESC_HDR_DIR_INBOUND |
1471 DESC_HDR_MODE1_MDEU_CICV;
1473 /* reset integrity check result bits */
1475 return ipsec_esp(edesc, req, false,
1476 ipsec_esp_decrypt_hwauth_done);
1479 /* Have to check the ICV with software */
1480 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1482 /* stash incoming ICV for later cmp with ICV generated by the h/w */
1483 icvdata = edesc->buf + edesc->dma_len;
1485 sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1486 req->assoclen + req->cryptlen - authsize);
1488 return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
1491 static int skcipher_setkey(struct crypto_skcipher *cipher,
1492 const u8 *key, unsigned int keylen)
1494 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1495 struct device *dev = ctx->dev;
1498 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1500 memcpy(&ctx->key, key, keylen);
1501 ctx->keylen = keylen;
1503 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1508 static int skcipher_des_setkey(struct crypto_skcipher *cipher,
1509 const u8 *key, unsigned int keylen)
1511 return verify_skcipher_des_key(cipher, key) ?:
1512 skcipher_setkey(cipher, key, keylen);
1515 static int skcipher_des3_setkey(struct crypto_skcipher *cipher,
1516 const u8 *key, unsigned int keylen)
1518 return verify_skcipher_des3_key(cipher, key) ?:
1519 skcipher_setkey(cipher, key, keylen);
1522 static int skcipher_aes_setkey(struct crypto_skcipher *cipher,
1523 const u8 *key, unsigned int keylen)
1525 if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1526 keylen == AES_KEYSIZE_256)
1527 return skcipher_setkey(cipher, key, keylen);
1532 static void common_nonsnoop_unmap(struct device *dev,
1533 struct talitos_edesc *edesc,
1534 struct skcipher_request *areq)
1536 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1538 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen, 0);
1539 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1542 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1546 static void skcipher_done(struct device *dev,
1547 struct talitos_desc *desc, void *context,
1550 struct skcipher_request *areq = context;
1551 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1552 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1553 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1554 struct talitos_edesc *edesc;
1556 edesc = container_of(desc, struct talitos_edesc, desc);
1558 common_nonsnoop_unmap(dev, edesc, areq);
1559 memcpy(areq->iv, ctx->iv, ivsize);
1563 areq->base.complete(&areq->base, err);
1566 static int common_nonsnoop(struct talitos_edesc *edesc,
1567 struct skcipher_request *areq,
1568 void (*callback) (struct device *dev,
1569 struct talitos_desc *desc,
1570 void *context, int error))
1572 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1573 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1574 struct device *dev = ctx->dev;
1575 struct talitos_desc *desc = &edesc->desc;
1576 unsigned int cryptlen = areq->cryptlen;
1577 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1579 bool sync_needed = false;
1580 struct talitos_private *priv = dev_get_drvdata(dev);
1581 bool is_sec1 = has_ftr_sec1(priv);
1582 bool is_ctr = (desc->hdr & DESC_HDR_SEL0_MASK) == DESC_HDR_SEL0_AESU &&
1583 (desc->hdr & DESC_HDR_MODE0_AESU_MASK) == DESC_HDR_MODE0_AESU_CTR;
1585 /* first DWORD empty */
1588 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
1591 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
1593 sg_count = edesc->src_nents ?: 1;
1594 if (is_sec1 && sg_count > 1)
1595 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1598 sg_count = dma_map_sg(dev, areq->src, sg_count,
1599 (areq->src == areq->dst) ?
1600 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1604 sg_count = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[3],
1605 sg_count, 0, 0, 0, false, is_ctr ? 16 : 1);
1610 if (areq->src != areq->dst) {
1611 sg_count = edesc->dst_nents ? : 1;
1612 if (!is_sec1 || sg_count == 1)
1613 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1616 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1617 sg_count, 0, (edesc->src_nents + 1));
1622 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
1625 /* last DWORD empty */
1628 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1629 edesc->dma_len, DMA_BIDIRECTIONAL);
1631 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1632 if (ret != -EINPROGRESS) {
1633 common_nonsnoop_unmap(dev, edesc, areq);
1639 static struct talitos_edesc *skcipher_edesc_alloc(struct skcipher_request *
1642 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1643 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1644 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1646 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1647 areq->iv, 0, areq->cryptlen, 0, ivsize, 0,
1648 areq->base.flags, encrypt);
1651 static int skcipher_encrypt(struct skcipher_request *areq)
1653 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1654 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1655 struct talitos_edesc *edesc;
1656 unsigned int blocksize =
1657 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
1659 if (!areq->cryptlen)
1662 if (areq->cryptlen % blocksize)
1665 /* allocate extended descriptor */
1666 edesc = skcipher_edesc_alloc(areq, true);
1668 return PTR_ERR(edesc);
1671 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1673 return common_nonsnoop(edesc, areq, skcipher_done);
1676 static int skcipher_decrypt(struct skcipher_request *areq)
1678 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1679 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1680 struct talitos_edesc *edesc;
1681 unsigned int blocksize =
1682 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
1684 if (!areq->cryptlen)
1687 if (areq->cryptlen % blocksize)
1690 /* allocate extended descriptor */
1691 edesc = skcipher_edesc_alloc(areq, false);
1693 return PTR_ERR(edesc);
1695 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1697 return common_nonsnoop(edesc, areq, skcipher_done);
1700 static void common_nonsnoop_hash_unmap(struct device *dev,
1701 struct talitos_edesc *edesc,
1702 struct ahash_request *areq)
1704 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1705 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1706 struct talitos_private *priv = dev_get_drvdata(dev);
1707 bool is_sec1 = has_ftr_sec1(priv);
1708 struct talitos_desc *desc = &edesc->desc;
1709 struct talitos_desc *desc2 = (struct talitos_desc *)
1710 (edesc->buf + edesc->dma_len);
1712 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1713 if (desc->next_desc &&
1714 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1715 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
1717 memcpy(areq->result, req_ctx->hw_context,
1718 crypto_ahash_digestsize(tfm));
1721 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
1723 /* When using hashctx-in, must unmap it. */
1724 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1725 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1727 else if (desc->next_desc)
1728 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1731 if (is_sec1 && req_ctx->nbuf)
1732 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1736 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1739 if (edesc->desc.next_desc)
1740 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1741 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
1744 static void ahash_done(struct device *dev,
1745 struct talitos_desc *desc, void *context,
1748 struct ahash_request *areq = context;
1749 struct talitos_edesc *edesc =
1750 container_of(desc, struct talitos_edesc, desc);
1751 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1753 if (!req_ctx->last && req_ctx->to_hash_later) {
1754 /* Position any partial block for next update/final/finup */
1755 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
1756 req_ctx->nbuf = req_ctx->to_hash_later;
1758 common_nonsnoop_hash_unmap(dev, edesc, areq);
1762 areq->base.complete(&areq->base, err);
1766 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1767 * ourself and submit a padded block
1769 static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
1770 struct talitos_edesc *edesc,
1771 struct talitos_ptr *ptr)
1773 static u8 padded_hash[64] = {
1774 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1775 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1776 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1777 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1780 pr_err_once("Bug in SEC1, padding ourself\n");
1781 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1782 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1783 (char *)padded_hash, DMA_TO_DEVICE);
1786 static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1787 struct ahash_request *areq, unsigned int length,
1788 void (*callback) (struct device *dev,
1789 struct talitos_desc *desc,
1790 void *context, int error))
1792 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1793 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1794 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1795 struct device *dev = ctx->dev;
1796 struct talitos_desc *desc = &edesc->desc;
1798 bool sync_needed = false;
1799 struct talitos_private *priv = dev_get_drvdata(dev);
1800 bool is_sec1 = has_ftr_sec1(priv);
1803 /* first DWORD empty */
1805 /* hash context in */
1806 if (!req_ctx->first || req_ctx->swinit) {
1807 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1808 req_ctx->hw_context_size,
1809 req_ctx->hw_context,
1811 req_ctx->swinit = 0;
1813 /* Indicate next op is not the first. */
1818 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1821 if (is_sec1 && req_ctx->nbuf)
1822 length -= req_ctx->nbuf;
1824 sg_count = edesc->src_nents ?: 1;
1825 if (is_sec1 && sg_count > 1)
1826 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
1828 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1833 if (is_sec1 && req_ctx->nbuf) {
1834 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1835 req_ctx->buf[req_ctx->buf_idx],
1838 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1839 &desc->ptr[3], sg_count, 0, 0);
1844 /* fifth DWORD empty */
1846 /* hash/HMAC out -or- hash context out */
1848 map_single_talitos_ptr(dev, &desc->ptr[5],
1849 crypto_ahash_digestsize(tfm),
1850 req_ctx->hw_context, DMA_FROM_DEVICE);
1852 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1853 req_ctx->hw_context_size,
1854 req_ctx->hw_context,
1857 /* last DWORD empty */
1859 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1860 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1862 if (is_sec1 && req_ctx->nbuf && length) {
1863 struct talitos_desc *desc2 = (struct talitos_desc *)
1864 (edesc->buf + edesc->dma_len);
1865 dma_addr_t next_desc;
1867 memset(desc2, 0, sizeof(*desc2));
1868 desc2->hdr = desc->hdr;
1869 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1870 desc2->hdr1 = desc2->hdr;
1871 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1872 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1873 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1875 if (desc->ptr[1].ptr)
1876 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1879 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1880 req_ctx->hw_context_size,
1881 req_ctx->hw_context,
1883 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1884 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1885 &desc2->ptr[3], sg_count, 0, 0);
1888 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1890 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1891 req_ctx->hw_context_size,
1892 req_ctx->hw_context,
1895 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1897 desc->next_desc = cpu_to_be32(next_desc);
1901 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1902 edesc->dma_len, DMA_BIDIRECTIONAL);
1904 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1905 if (ret != -EINPROGRESS) {
1906 common_nonsnoop_hash_unmap(dev, edesc, areq);
1912 static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1913 unsigned int nbytes)
1915 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1916 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1917 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1918 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1919 bool is_sec1 = has_ftr_sec1(priv);
1922 nbytes -= req_ctx->nbuf;
1924 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
1925 nbytes, 0, 0, 0, areq->base.flags, false);
1928 static int ahash_init(struct ahash_request *areq)
1930 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1931 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1932 struct device *dev = ctx->dev;
1933 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1937 /* Initialize the context */
1938 req_ctx->buf_idx = 0;
1940 req_ctx->first = 1; /* first indicates h/w must init its context */
1941 req_ctx->swinit = 0; /* assume h/w init of context */
1942 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
1943 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1944 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
1945 req_ctx->hw_context_size = size;
1947 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1949 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1955 * on h/w without explicit sha224 support, we initialize h/w context
1956 * manually with sha224 constants, and tell it to run sha256.
1958 static int ahash_init_sha224_swinit(struct ahash_request *areq)
1960 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1962 req_ctx->hw_context[0] = SHA224_H0;
1963 req_ctx->hw_context[1] = SHA224_H1;
1964 req_ctx->hw_context[2] = SHA224_H2;
1965 req_ctx->hw_context[3] = SHA224_H3;
1966 req_ctx->hw_context[4] = SHA224_H4;
1967 req_ctx->hw_context[5] = SHA224_H5;
1968 req_ctx->hw_context[6] = SHA224_H6;
1969 req_ctx->hw_context[7] = SHA224_H7;
1971 /* init 64-bit count */
1972 req_ctx->hw_context[8] = 0;
1973 req_ctx->hw_context[9] = 0;
1976 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1981 static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1983 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1984 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1985 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1986 struct talitos_edesc *edesc;
1987 unsigned int blocksize =
1988 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1989 unsigned int nbytes_to_hash;
1990 unsigned int to_hash_later;
1993 struct device *dev = ctx->dev;
1994 struct talitos_private *priv = dev_get_drvdata(dev);
1995 bool is_sec1 = has_ftr_sec1(priv);
1996 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
1998 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1999 /* Buffer up to one whole block */
2000 nents = sg_nents_for_len(areq->src, nbytes);
2002 dev_err(ctx->dev, "Invalid number of src SG.\n");
2005 sg_copy_to_buffer(areq->src, nents,
2006 ctx_buf + req_ctx->nbuf, nbytes);
2007 req_ctx->nbuf += nbytes;
2011 /* At least (blocksize + 1) bytes are available to hash */
2012 nbytes_to_hash = nbytes + req_ctx->nbuf;
2013 to_hash_later = nbytes_to_hash & (blocksize - 1);
2017 else if (to_hash_later)
2018 /* There is a partial block. Hash the full block(s) now */
2019 nbytes_to_hash -= to_hash_later;
2021 /* Keep one block buffered */
2022 nbytes_to_hash -= blocksize;
2023 to_hash_later = blocksize;
2026 /* Chain in any previously buffered data */
2027 if (!is_sec1 && req_ctx->nbuf) {
2028 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2029 sg_init_table(req_ctx->bufsl, nsg);
2030 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
2032 sg_chain(req_ctx->bufsl, 2, areq->src);
2033 req_ctx->psrc = req_ctx->bufsl;
2034 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2037 if (nbytes_to_hash > blocksize)
2038 offset = blocksize - req_ctx->nbuf;
2040 offset = nbytes_to_hash - req_ctx->nbuf;
2041 nents = sg_nents_for_len(areq->src, offset);
2043 dev_err(ctx->dev, "Invalid number of src SG.\n");
2046 sg_copy_to_buffer(areq->src, nents,
2047 ctx_buf + req_ctx->nbuf, offset);
2048 req_ctx->nbuf += offset;
2049 req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2052 req_ctx->psrc = areq->src;
2054 if (to_hash_later) {
2055 nents = sg_nents_for_len(areq->src, nbytes);
2057 dev_err(ctx->dev, "Invalid number of src SG.\n");
2060 sg_pcopy_to_buffer(areq->src, nents,
2061 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
2063 nbytes - to_hash_later);
2065 req_ctx->to_hash_later = to_hash_later;
2067 /* Allocate extended descriptor */
2068 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2070 return PTR_ERR(edesc);
2072 edesc->desc.hdr = ctx->desc_hdr_template;
2074 /* On last one, request SEC to pad; otherwise continue */
2076 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2078 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2080 /* request SEC to INIT hash. */
2081 if (req_ctx->first && !req_ctx->swinit)
2082 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2084 /* When the tfm context has a keylen, it's an HMAC.
2085 * A first or last (ie. not middle) descriptor must request HMAC.
2087 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2088 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2090 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
2093 static int ahash_update(struct ahash_request *areq)
2095 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2099 return ahash_process_req(areq, areq->nbytes);
2102 static int ahash_final(struct ahash_request *areq)
2104 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2108 return ahash_process_req(areq, 0);
2111 static int ahash_finup(struct ahash_request *areq)
2113 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2117 return ahash_process_req(areq, areq->nbytes);
2120 static int ahash_digest(struct ahash_request *areq)
2122 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2123 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
2128 return ahash_process_req(areq, areq->nbytes);
2131 static int ahash_export(struct ahash_request *areq, void *out)
2133 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2134 struct talitos_export_state *export = out;
2135 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2136 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2137 struct device *dev = ctx->dev;
2140 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2142 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
2144 memcpy(export->hw_context, req_ctx->hw_context,
2145 req_ctx->hw_context_size);
2146 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
2147 export->swinit = req_ctx->swinit;
2148 export->first = req_ctx->first;
2149 export->last = req_ctx->last;
2150 export->to_hash_later = req_ctx->to_hash_later;
2151 export->nbuf = req_ctx->nbuf;
2156 static int ahash_import(struct ahash_request *areq, const void *in)
2158 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2159 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2160 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2161 struct device *dev = ctx->dev;
2162 const struct talitos_export_state *export = in;
2166 memset(req_ctx, 0, sizeof(*req_ctx));
2167 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2168 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2169 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2170 req_ctx->hw_context_size = size;
2171 memcpy(req_ctx->hw_context, export->hw_context, size);
2172 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
2173 req_ctx->swinit = export->swinit;
2174 req_ctx->first = export->first;
2175 req_ctx->last = export->last;
2176 req_ctx->to_hash_later = export->to_hash_later;
2177 req_ctx->nbuf = export->nbuf;
2179 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2181 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2186 static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2189 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2191 struct scatterlist sg[1];
2192 struct ahash_request *req;
2193 struct crypto_wait wait;
2196 crypto_init_wait(&wait);
2198 req = ahash_request_alloc(tfm, GFP_KERNEL);
2202 /* Keep tfm keylen == 0 during hash of the long key */
2204 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2205 crypto_req_done, &wait);
2207 sg_init_one(&sg[0], key, keylen);
2209 ahash_request_set_crypt(req, sg, hash, keylen);
2210 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2212 ahash_request_free(req);
2217 static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2218 unsigned int keylen)
2220 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2221 struct device *dev = ctx->dev;
2222 unsigned int blocksize =
2223 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2224 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2225 unsigned int keysize = keylen;
2226 u8 hash[SHA512_DIGEST_SIZE];
2229 if (keylen <= blocksize)
2230 memcpy(ctx->key, key, keysize);
2232 /* Must get the hash of the long key */
2233 ret = keyhash(tfm, key, keylen, hash);
2238 keysize = digestsize;
2239 memcpy(ctx->key, hash, digestsize);
2243 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2245 ctx->keylen = keysize;
2246 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
2252 struct talitos_alg_template {
2256 struct skcipher_alg skcipher;
2257 struct ahash_alg hash;
2258 struct aead_alg aead;
2260 __be32 desc_hdr_template;
2263 static struct talitos_alg_template driver_algs[] = {
2264 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
2265 { .type = CRYPTO_ALG_TYPE_AEAD,
2268 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2269 .cra_driver_name = "authenc-hmac-sha1-"
2271 .cra_blocksize = AES_BLOCK_SIZE,
2272 .cra_flags = CRYPTO_ALG_ASYNC |
2273 CRYPTO_ALG_ALLOCATES_MEMORY,
2275 .ivsize = AES_BLOCK_SIZE,
2276 .maxauthsize = SHA1_DIGEST_SIZE,
2278 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2279 DESC_HDR_SEL0_AESU |
2280 DESC_HDR_MODE0_AESU_CBC |
2281 DESC_HDR_SEL1_MDEUA |
2282 DESC_HDR_MODE1_MDEU_INIT |
2283 DESC_HDR_MODE1_MDEU_PAD |
2284 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2286 { .type = CRYPTO_ALG_TYPE_AEAD,
2287 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2290 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2291 .cra_driver_name = "authenc-hmac-sha1-"
2292 "cbc-aes-talitos-hsna",
2293 .cra_blocksize = AES_BLOCK_SIZE,
2294 .cra_flags = CRYPTO_ALG_ASYNC |
2295 CRYPTO_ALG_ALLOCATES_MEMORY,
2297 .ivsize = AES_BLOCK_SIZE,
2298 .maxauthsize = SHA1_DIGEST_SIZE,
2300 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2301 DESC_HDR_SEL0_AESU |
2302 DESC_HDR_MODE0_AESU_CBC |
2303 DESC_HDR_SEL1_MDEUA |
2304 DESC_HDR_MODE1_MDEU_INIT |
2305 DESC_HDR_MODE1_MDEU_PAD |
2306 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2308 { .type = CRYPTO_ALG_TYPE_AEAD,
2311 .cra_name = "authenc(hmac(sha1),"
2313 .cra_driver_name = "authenc-hmac-sha1-"
2315 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2316 .cra_flags = CRYPTO_ALG_ASYNC |
2317 CRYPTO_ALG_ALLOCATES_MEMORY,
2319 .ivsize = DES3_EDE_BLOCK_SIZE,
2320 .maxauthsize = SHA1_DIGEST_SIZE,
2321 .setkey = aead_des3_setkey,
2323 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2325 DESC_HDR_MODE0_DEU_CBC |
2326 DESC_HDR_MODE0_DEU_3DES |
2327 DESC_HDR_SEL1_MDEUA |
2328 DESC_HDR_MODE1_MDEU_INIT |
2329 DESC_HDR_MODE1_MDEU_PAD |
2330 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2332 { .type = CRYPTO_ALG_TYPE_AEAD,
2333 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2336 .cra_name = "authenc(hmac(sha1),"
2338 .cra_driver_name = "authenc-hmac-sha1-"
2339 "cbc-3des-talitos-hsna",
2340 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2341 .cra_flags = CRYPTO_ALG_ASYNC |
2342 CRYPTO_ALG_ALLOCATES_MEMORY,
2344 .ivsize = DES3_EDE_BLOCK_SIZE,
2345 .maxauthsize = SHA1_DIGEST_SIZE,
2346 .setkey = aead_des3_setkey,
2348 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2350 DESC_HDR_MODE0_DEU_CBC |
2351 DESC_HDR_MODE0_DEU_3DES |
2352 DESC_HDR_SEL1_MDEUA |
2353 DESC_HDR_MODE1_MDEU_INIT |
2354 DESC_HDR_MODE1_MDEU_PAD |
2355 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2357 { .type = CRYPTO_ALG_TYPE_AEAD,
2360 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2361 .cra_driver_name = "authenc-hmac-sha224-"
2363 .cra_blocksize = AES_BLOCK_SIZE,
2364 .cra_flags = CRYPTO_ALG_ASYNC |
2365 CRYPTO_ALG_ALLOCATES_MEMORY,
2367 .ivsize = AES_BLOCK_SIZE,
2368 .maxauthsize = SHA224_DIGEST_SIZE,
2370 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2371 DESC_HDR_SEL0_AESU |
2372 DESC_HDR_MODE0_AESU_CBC |
2373 DESC_HDR_SEL1_MDEUA |
2374 DESC_HDR_MODE1_MDEU_INIT |
2375 DESC_HDR_MODE1_MDEU_PAD |
2376 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2378 { .type = CRYPTO_ALG_TYPE_AEAD,
2379 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2382 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2383 .cra_driver_name = "authenc-hmac-sha224-"
2384 "cbc-aes-talitos-hsna",
2385 .cra_blocksize = AES_BLOCK_SIZE,
2386 .cra_flags = CRYPTO_ALG_ASYNC |
2387 CRYPTO_ALG_ALLOCATES_MEMORY,
2389 .ivsize = AES_BLOCK_SIZE,
2390 .maxauthsize = SHA224_DIGEST_SIZE,
2392 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2393 DESC_HDR_SEL0_AESU |
2394 DESC_HDR_MODE0_AESU_CBC |
2395 DESC_HDR_SEL1_MDEUA |
2396 DESC_HDR_MODE1_MDEU_INIT |
2397 DESC_HDR_MODE1_MDEU_PAD |
2398 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2400 { .type = CRYPTO_ALG_TYPE_AEAD,
2403 .cra_name = "authenc(hmac(sha224),"
2405 .cra_driver_name = "authenc-hmac-sha224-"
2407 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2408 .cra_flags = CRYPTO_ALG_ASYNC |
2409 CRYPTO_ALG_ALLOCATES_MEMORY,
2411 .ivsize = DES3_EDE_BLOCK_SIZE,
2412 .maxauthsize = SHA224_DIGEST_SIZE,
2413 .setkey = aead_des3_setkey,
2415 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2417 DESC_HDR_MODE0_DEU_CBC |
2418 DESC_HDR_MODE0_DEU_3DES |
2419 DESC_HDR_SEL1_MDEUA |
2420 DESC_HDR_MODE1_MDEU_INIT |
2421 DESC_HDR_MODE1_MDEU_PAD |
2422 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2424 { .type = CRYPTO_ALG_TYPE_AEAD,
2425 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2428 .cra_name = "authenc(hmac(sha224),"
2430 .cra_driver_name = "authenc-hmac-sha224-"
2431 "cbc-3des-talitos-hsna",
2432 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2433 .cra_flags = CRYPTO_ALG_ASYNC |
2434 CRYPTO_ALG_ALLOCATES_MEMORY,
2436 .ivsize = DES3_EDE_BLOCK_SIZE,
2437 .maxauthsize = SHA224_DIGEST_SIZE,
2438 .setkey = aead_des3_setkey,
2440 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2442 DESC_HDR_MODE0_DEU_CBC |
2443 DESC_HDR_MODE0_DEU_3DES |
2444 DESC_HDR_SEL1_MDEUA |
2445 DESC_HDR_MODE1_MDEU_INIT |
2446 DESC_HDR_MODE1_MDEU_PAD |
2447 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2449 { .type = CRYPTO_ALG_TYPE_AEAD,
2452 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2453 .cra_driver_name = "authenc-hmac-sha256-"
2455 .cra_blocksize = AES_BLOCK_SIZE,
2456 .cra_flags = CRYPTO_ALG_ASYNC |
2457 CRYPTO_ALG_ALLOCATES_MEMORY,
2459 .ivsize = AES_BLOCK_SIZE,
2460 .maxauthsize = SHA256_DIGEST_SIZE,
2462 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2463 DESC_HDR_SEL0_AESU |
2464 DESC_HDR_MODE0_AESU_CBC |
2465 DESC_HDR_SEL1_MDEUA |
2466 DESC_HDR_MODE1_MDEU_INIT |
2467 DESC_HDR_MODE1_MDEU_PAD |
2468 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2470 { .type = CRYPTO_ALG_TYPE_AEAD,
2471 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2474 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2475 .cra_driver_name = "authenc-hmac-sha256-"
2476 "cbc-aes-talitos-hsna",
2477 .cra_blocksize = AES_BLOCK_SIZE,
2478 .cra_flags = CRYPTO_ALG_ASYNC |
2479 CRYPTO_ALG_ALLOCATES_MEMORY,
2481 .ivsize = AES_BLOCK_SIZE,
2482 .maxauthsize = SHA256_DIGEST_SIZE,
2484 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2485 DESC_HDR_SEL0_AESU |
2486 DESC_HDR_MODE0_AESU_CBC |
2487 DESC_HDR_SEL1_MDEUA |
2488 DESC_HDR_MODE1_MDEU_INIT |
2489 DESC_HDR_MODE1_MDEU_PAD |
2490 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2492 { .type = CRYPTO_ALG_TYPE_AEAD,
2495 .cra_name = "authenc(hmac(sha256),"
2497 .cra_driver_name = "authenc-hmac-sha256-"
2499 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2500 .cra_flags = CRYPTO_ALG_ASYNC |
2501 CRYPTO_ALG_ALLOCATES_MEMORY,
2503 .ivsize = DES3_EDE_BLOCK_SIZE,
2504 .maxauthsize = SHA256_DIGEST_SIZE,
2505 .setkey = aead_des3_setkey,
2507 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2509 DESC_HDR_MODE0_DEU_CBC |
2510 DESC_HDR_MODE0_DEU_3DES |
2511 DESC_HDR_SEL1_MDEUA |
2512 DESC_HDR_MODE1_MDEU_INIT |
2513 DESC_HDR_MODE1_MDEU_PAD |
2514 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2516 { .type = CRYPTO_ALG_TYPE_AEAD,
2517 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2520 .cra_name = "authenc(hmac(sha256),"
2522 .cra_driver_name = "authenc-hmac-sha256-"
2523 "cbc-3des-talitos-hsna",
2524 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2525 .cra_flags = CRYPTO_ALG_ASYNC |
2526 CRYPTO_ALG_ALLOCATES_MEMORY,
2528 .ivsize = DES3_EDE_BLOCK_SIZE,
2529 .maxauthsize = SHA256_DIGEST_SIZE,
2530 .setkey = aead_des3_setkey,
2532 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2534 DESC_HDR_MODE0_DEU_CBC |
2535 DESC_HDR_MODE0_DEU_3DES |
2536 DESC_HDR_SEL1_MDEUA |
2537 DESC_HDR_MODE1_MDEU_INIT |
2538 DESC_HDR_MODE1_MDEU_PAD |
2539 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2541 { .type = CRYPTO_ALG_TYPE_AEAD,
2544 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2545 .cra_driver_name = "authenc-hmac-sha384-"
2547 .cra_blocksize = AES_BLOCK_SIZE,
2548 .cra_flags = CRYPTO_ALG_ASYNC |
2549 CRYPTO_ALG_ALLOCATES_MEMORY,
2551 .ivsize = AES_BLOCK_SIZE,
2552 .maxauthsize = SHA384_DIGEST_SIZE,
2554 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2555 DESC_HDR_SEL0_AESU |
2556 DESC_HDR_MODE0_AESU_CBC |
2557 DESC_HDR_SEL1_MDEUB |
2558 DESC_HDR_MODE1_MDEU_INIT |
2559 DESC_HDR_MODE1_MDEU_PAD |
2560 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2562 { .type = CRYPTO_ALG_TYPE_AEAD,
2565 .cra_name = "authenc(hmac(sha384),"
2567 .cra_driver_name = "authenc-hmac-sha384-"
2569 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2570 .cra_flags = CRYPTO_ALG_ASYNC |
2571 CRYPTO_ALG_ALLOCATES_MEMORY,
2573 .ivsize = DES3_EDE_BLOCK_SIZE,
2574 .maxauthsize = SHA384_DIGEST_SIZE,
2575 .setkey = aead_des3_setkey,
2577 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2579 DESC_HDR_MODE0_DEU_CBC |
2580 DESC_HDR_MODE0_DEU_3DES |
2581 DESC_HDR_SEL1_MDEUB |
2582 DESC_HDR_MODE1_MDEU_INIT |
2583 DESC_HDR_MODE1_MDEU_PAD |
2584 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2586 { .type = CRYPTO_ALG_TYPE_AEAD,
2589 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2590 .cra_driver_name = "authenc-hmac-sha512-"
2592 .cra_blocksize = AES_BLOCK_SIZE,
2593 .cra_flags = CRYPTO_ALG_ASYNC |
2594 CRYPTO_ALG_ALLOCATES_MEMORY,
2596 .ivsize = AES_BLOCK_SIZE,
2597 .maxauthsize = SHA512_DIGEST_SIZE,
2599 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2600 DESC_HDR_SEL0_AESU |
2601 DESC_HDR_MODE0_AESU_CBC |
2602 DESC_HDR_SEL1_MDEUB |
2603 DESC_HDR_MODE1_MDEU_INIT |
2604 DESC_HDR_MODE1_MDEU_PAD |
2605 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2607 { .type = CRYPTO_ALG_TYPE_AEAD,
2610 .cra_name = "authenc(hmac(sha512),"
2612 .cra_driver_name = "authenc-hmac-sha512-"
2614 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2615 .cra_flags = CRYPTO_ALG_ASYNC |
2616 CRYPTO_ALG_ALLOCATES_MEMORY,
2618 .ivsize = DES3_EDE_BLOCK_SIZE,
2619 .maxauthsize = SHA512_DIGEST_SIZE,
2620 .setkey = aead_des3_setkey,
2622 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2624 DESC_HDR_MODE0_DEU_CBC |
2625 DESC_HDR_MODE0_DEU_3DES |
2626 DESC_HDR_SEL1_MDEUB |
2627 DESC_HDR_MODE1_MDEU_INIT |
2628 DESC_HDR_MODE1_MDEU_PAD |
2629 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2631 { .type = CRYPTO_ALG_TYPE_AEAD,
2634 .cra_name = "authenc(hmac(md5),cbc(aes))",
2635 .cra_driver_name = "authenc-hmac-md5-"
2637 .cra_blocksize = AES_BLOCK_SIZE,
2638 .cra_flags = CRYPTO_ALG_ASYNC |
2639 CRYPTO_ALG_ALLOCATES_MEMORY,
2641 .ivsize = AES_BLOCK_SIZE,
2642 .maxauthsize = MD5_DIGEST_SIZE,
2644 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2645 DESC_HDR_SEL0_AESU |
2646 DESC_HDR_MODE0_AESU_CBC |
2647 DESC_HDR_SEL1_MDEUA |
2648 DESC_HDR_MODE1_MDEU_INIT |
2649 DESC_HDR_MODE1_MDEU_PAD |
2650 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2652 { .type = CRYPTO_ALG_TYPE_AEAD,
2653 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2656 .cra_name = "authenc(hmac(md5),cbc(aes))",
2657 .cra_driver_name = "authenc-hmac-md5-"
2658 "cbc-aes-talitos-hsna",
2659 .cra_blocksize = AES_BLOCK_SIZE,
2660 .cra_flags = CRYPTO_ALG_ASYNC |
2661 CRYPTO_ALG_ALLOCATES_MEMORY,
2663 .ivsize = AES_BLOCK_SIZE,
2664 .maxauthsize = MD5_DIGEST_SIZE,
2666 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2667 DESC_HDR_SEL0_AESU |
2668 DESC_HDR_MODE0_AESU_CBC |
2669 DESC_HDR_SEL1_MDEUA |
2670 DESC_HDR_MODE1_MDEU_INIT |
2671 DESC_HDR_MODE1_MDEU_PAD |
2672 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2674 { .type = CRYPTO_ALG_TYPE_AEAD,
2677 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2678 .cra_driver_name = "authenc-hmac-md5-"
2680 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2681 .cra_flags = CRYPTO_ALG_ASYNC |
2682 CRYPTO_ALG_ALLOCATES_MEMORY,
2684 .ivsize = DES3_EDE_BLOCK_SIZE,
2685 .maxauthsize = MD5_DIGEST_SIZE,
2686 .setkey = aead_des3_setkey,
2688 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2690 DESC_HDR_MODE0_DEU_CBC |
2691 DESC_HDR_MODE0_DEU_3DES |
2692 DESC_HDR_SEL1_MDEUA |
2693 DESC_HDR_MODE1_MDEU_INIT |
2694 DESC_HDR_MODE1_MDEU_PAD |
2695 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2697 { .type = CRYPTO_ALG_TYPE_AEAD,
2698 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2701 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2702 .cra_driver_name = "authenc-hmac-md5-"
2703 "cbc-3des-talitos-hsna",
2704 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2705 .cra_flags = CRYPTO_ALG_ASYNC |
2706 CRYPTO_ALG_ALLOCATES_MEMORY,
2708 .ivsize = DES3_EDE_BLOCK_SIZE,
2709 .maxauthsize = MD5_DIGEST_SIZE,
2710 .setkey = aead_des3_setkey,
2712 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2714 DESC_HDR_MODE0_DEU_CBC |
2715 DESC_HDR_MODE0_DEU_3DES |
2716 DESC_HDR_SEL1_MDEUA |
2717 DESC_HDR_MODE1_MDEU_INIT |
2718 DESC_HDR_MODE1_MDEU_PAD |
2719 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2721 /* SKCIPHER algorithms. */
2722 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2724 .base.cra_name = "ecb(aes)",
2725 .base.cra_driver_name = "ecb-aes-talitos",
2726 .base.cra_blocksize = AES_BLOCK_SIZE,
2727 .base.cra_flags = CRYPTO_ALG_ASYNC |
2728 CRYPTO_ALG_ALLOCATES_MEMORY,
2729 .min_keysize = AES_MIN_KEY_SIZE,
2730 .max_keysize = AES_MAX_KEY_SIZE,
2731 .setkey = skcipher_aes_setkey,
2733 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2736 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2738 .base.cra_name = "cbc(aes)",
2739 .base.cra_driver_name = "cbc-aes-talitos",
2740 .base.cra_blocksize = AES_BLOCK_SIZE,
2741 .base.cra_flags = CRYPTO_ALG_ASYNC |
2742 CRYPTO_ALG_ALLOCATES_MEMORY,
2743 .min_keysize = AES_MIN_KEY_SIZE,
2744 .max_keysize = AES_MAX_KEY_SIZE,
2745 .ivsize = AES_BLOCK_SIZE,
2746 .setkey = skcipher_aes_setkey,
2748 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2749 DESC_HDR_SEL0_AESU |
2750 DESC_HDR_MODE0_AESU_CBC,
2752 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2754 .base.cra_name = "ctr(aes)",
2755 .base.cra_driver_name = "ctr-aes-talitos",
2756 .base.cra_blocksize = 1,
2757 .base.cra_flags = CRYPTO_ALG_ASYNC |
2758 CRYPTO_ALG_ALLOCATES_MEMORY,
2759 .min_keysize = AES_MIN_KEY_SIZE,
2760 .max_keysize = AES_MAX_KEY_SIZE,
2761 .ivsize = AES_BLOCK_SIZE,
2762 .setkey = skcipher_aes_setkey,
2764 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
2765 DESC_HDR_SEL0_AESU |
2766 DESC_HDR_MODE0_AESU_CTR,
2768 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2770 .base.cra_name = "ctr(aes)",
2771 .base.cra_driver_name = "ctr-aes-talitos",
2772 .base.cra_blocksize = 1,
2773 .base.cra_flags = CRYPTO_ALG_ASYNC |
2774 CRYPTO_ALG_ALLOCATES_MEMORY,
2775 .min_keysize = AES_MIN_KEY_SIZE,
2776 .max_keysize = AES_MAX_KEY_SIZE,
2777 .ivsize = AES_BLOCK_SIZE,
2778 .setkey = skcipher_aes_setkey,
2780 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2781 DESC_HDR_SEL0_AESU |
2782 DESC_HDR_MODE0_AESU_CTR,
2784 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2786 .base.cra_name = "ecb(des)",
2787 .base.cra_driver_name = "ecb-des-talitos",
2788 .base.cra_blocksize = DES_BLOCK_SIZE,
2789 .base.cra_flags = CRYPTO_ALG_ASYNC |
2790 CRYPTO_ALG_ALLOCATES_MEMORY,
2791 .min_keysize = DES_KEY_SIZE,
2792 .max_keysize = DES_KEY_SIZE,
2793 .setkey = skcipher_des_setkey,
2795 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2798 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2800 .base.cra_name = "cbc(des)",
2801 .base.cra_driver_name = "cbc-des-talitos",
2802 .base.cra_blocksize = DES_BLOCK_SIZE,
2803 .base.cra_flags = CRYPTO_ALG_ASYNC |
2804 CRYPTO_ALG_ALLOCATES_MEMORY,
2805 .min_keysize = DES_KEY_SIZE,
2806 .max_keysize = DES_KEY_SIZE,
2807 .ivsize = DES_BLOCK_SIZE,
2808 .setkey = skcipher_des_setkey,
2810 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2812 DESC_HDR_MODE0_DEU_CBC,
2814 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2816 .base.cra_name = "ecb(des3_ede)",
2817 .base.cra_driver_name = "ecb-3des-talitos",
2818 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2819 .base.cra_flags = CRYPTO_ALG_ASYNC |
2820 CRYPTO_ALG_ALLOCATES_MEMORY,
2821 .min_keysize = DES3_EDE_KEY_SIZE,
2822 .max_keysize = DES3_EDE_KEY_SIZE,
2823 .setkey = skcipher_des3_setkey,
2825 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2827 DESC_HDR_MODE0_DEU_3DES,
2829 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2831 .base.cra_name = "cbc(des3_ede)",
2832 .base.cra_driver_name = "cbc-3des-talitos",
2833 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2834 .base.cra_flags = CRYPTO_ALG_ASYNC |
2835 CRYPTO_ALG_ALLOCATES_MEMORY,
2836 .min_keysize = DES3_EDE_KEY_SIZE,
2837 .max_keysize = DES3_EDE_KEY_SIZE,
2838 .ivsize = DES3_EDE_BLOCK_SIZE,
2839 .setkey = skcipher_des3_setkey,
2841 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2843 DESC_HDR_MODE0_DEU_CBC |
2844 DESC_HDR_MODE0_DEU_3DES,
2846 /* AHASH algorithms. */
2847 { .type = CRYPTO_ALG_TYPE_AHASH,
2849 .halg.digestsize = MD5_DIGEST_SIZE,
2850 .halg.statesize = sizeof(struct talitos_export_state),
2853 .cra_driver_name = "md5-talitos",
2854 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2855 .cra_flags = CRYPTO_ALG_ASYNC |
2856 CRYPTO_ALG_ALLOCATES_MEMORY,
2859 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2860 DESC_HDR_SEL0_MDEUA |
2861 DESC_HDR_MODE0_MDEU_MD5,
2863 { .type = CRYPTO_ALG_TYPE_AHASH,
2865 .halg.digestsize = SHA1_DIGEST_SIZE,
2866 .halg.statesize = sizeof(struct talitos_export_state),
2869 .cra_driver_name = "sha1-talitos",
2870 .cra_blocksize = SHA1_BLOCK_SIZE,
2871 .cra_flags = CRYPTO_ALG_ASYNC |
2872 CRYPTO_ALG_ALLOCATES_MEMORY,
2875 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2876 DESC_HDR_SEL0_MDEUA |
2877 DESC_HDR_MODE0_MDEU_SHA1,
2879 { .type = CRYPTO_ALG_TYPE_AHASH,
2881 .halg.digestsize = SHA224_DIGEST_SIZE,
2882 .halg.statesize = sizeof(struct talitos_export_state),
2884 .cra_name = "sha224",
2885 .cra_driver_name = "sha224-talitos",
2886 .cra_blocksize = SHA224_BLOCK_SIZE,
2887 .cra_flags = CRYPTO_ALG_ASYNC |
2888 CRYPTO_ALG_ALLOCATES_MEMORY,
2891 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2892 DESC_HDR_SEL0_MDEUA |
2893 DESC_HDR_MODE0_MDEU_SHA224,
2895 { .type = CRYPTO_ALG_TYPE_AHASH,
2897 .halg.digestsize = SHA256_DIGEST_SIZE,
2898 .halg.statesize = sizeof(struct talitos_export_state),
2900 .cra_name = "sha256",
2901 .cra_driver_name = "sha256-talitos",
2902 .cra_blocksize = SHA256_BLOCK_SIZE,
2903 .cra_flags = CRYPTO_ALG_ASYNC |
2904 CRYPTO_ALG_ALLOCATES_MEMORY,
2907 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2908 DESC_HDR_SEL0_MDEUA |
2909 DESC_HDR_MODE0_MDEU_SHA256,
2911 { .type = CRYPTO_ALG_TYPE_AHASH,
2913 .halg.digestsize = SHA384_DIGEST_SIZE,
2914 .halg.statesize = sizeof(struct talitos_export_state),
2916 .cra_name = "sha384",
2917 .cra_driver_name = "sha384-talitos",
2918 .cra_blocksize = SHA384_BLOCK_SIZE,
2919 .cra_flags = CRYPTO_ALG_ASYNC |
2920 CRYPTO_ALG_ALLOCATES_MEMORY,
2923 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2924 DESC_HDR_SEL0_MDEUB |
2925 DESC_HDR_MODE0_MDEUB_SHA384,
2927 { .type = CRYPTO_ALG_TYPE_AHASH,
2929 .halg.digestsize = SHA512_DIGEST_SIZE,
2930 .halg.statesize = sizeof(struct talitos_export_state),
2932 .cra_name = "sha512",
2933 .cra_driver_name = "sha512-talitos",
2934 .cra_blocksize = SHA512_BLOCK_SIZE,
2935 .cra_flags = CRYPTO_ALG_ASYNC |
2936 CRYPTO_ALG_ALLOCATES_MEMORY,
2939 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2940 DESC_HDR_SEL0_MDEUB |
2941 DESC_HDR_MODE0_MDEUB_SHA512,
2943 { .type = CRYPTO_ALG_TYPE_AHASH,
2945 .halg.digestsize = MD5_DIGEST_SIZE,
2946 .halg.statesize = sizeof(struct talitos_export_state),
2948 .cra_name = "hmac(md5)",
2949 .cra_driver_name = "hmac-md5-talitos",
2950 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2951 .cra_flags = CRYPTO_ALG_ASYNC |
2952 CRYPTO_ALG_ALLOCATES_MEMORY,
2955 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2956 DESC_HDR_SEL0_MDEUA |
2957 DESC_HDR_MODE0_MDEU_MD5,
2959 { .type = CRYPTO_ALG_TYPE_AHASH,
2961 .halg.digestsize = SHA1_DIGEST_SIZE,
2962 .halg.statesize = sizeof(struct talitos_export_state),
2964 .cra_name = "hmac(sha1)",
2965 .cra_driver_name = "hmac-sha1-talitos",
2966 .cra_blocksize = SHA1_BLOCK_SIZE,
2967 .cra_flags = CRYPTO_ALG_ASYNC |
2968 CRYPTO_ALG_ALLOCATES_MEMORY,
2971 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2972 DESC_HDR_SEL0_MDEUA |
2973 DESC_HDR_MODE0_MDEU_SHA1,
2975 { .type = CRYPTO_ALG_TYPE_AHASH,
2977 .halg.digestsize = SHA224_DIGEST_SIZE,
2978 .halg.statesize = sizeof(struct talitos_export_state),
2980 .cra_name = "hmac(sha224)",
2981 .cra_driver_name = "hmac-sha224-talitos",
2982 .cra_blocksize = SHA224_BLOCK_SIZE,
2983 .cra_flags = CRYPTO_ALG_ASYNC |
2984 CRYPTO_ALG_ALLOCATES_MEMORY,
2987 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2988 DESC_HDR_SEL0_MDEUA |
2989 DESC_HDR_MODE0_MDEU_SHA224,
2991 { .type = CRYPTO_ALG_TYPE_AHASH,
2993 .halg.digestsize = SHA256_DIGEST_SIZE,
2994 .halg.statesize = sizeof(struct talitos_export_state),
2996 .cra_name = "hmac(sha256)",
2997 .cra_driver_name = "hmac-sha256-talitos",
2998 .cra_blocksize = SHA256_BLOCK_SIZE,
2999 .cra_flags = CRYPTO_ALG_ASYNC |
3000 CRYPTO_ALG_ALLOCATES_MEMORY,
3003 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3004 DESC_HDR_SEL0_MDEUA |
3005 DESC_HDR_MODE0_MDEU_SHA256,
3007 { .type = CRYPTO_ALG_TYPE_AHASH,
3009 .halg.digestsize = SHA384_DIGEST_SIZE,
3010 .halg.statesize = sizeof(struct talitos_export_state),
3012 .cra_name = "hmac(sha384)",
3013 .cra_driver_name = "hmac-sha384-talitos",
3014 .cra_blocksize = SHA384_BLOCK_SIZE,
3015 .cra_flags = CRYPTO_ALG_ASYNC |
3016 CRYPTO_ALG_ALLOCATES_MEMORY,
3019 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3020 DESC_HDR_SEL0_MDEUB |
3021 DESC_HDR_MODE0_MDEUB_SHA384,
3023 { .type = CRYPTO_ALG_TYPE_AHASH,
3025 .halg.digestsize = SHA512_DIGEST_SIZE,
3026 .halg.statesize = sizeof(struct talitos_export_state),
3028 .cra_name = "hmac(sha512)",
3029 .cra_driver_name = "hmac-sha512-talitos",
3030 .cra_blocksize = SHA512_BLOCK_SIZE,
3031 .cra_flags = CRYPTO_ALG_ASYNC |
3032 CRYPTO_ALG_ALLOCATES_MEMORY,
3035 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3036 DESC_HDR_SEL0_MDEUB |
3037 DESC_HDR_MODE0_MDEUB_SHA512,
3041 struct talitos_crypto_alg {
3042 struct list_head entry;
3044 struct talitos_alg_template algt;
3047 static int talitos_init_common(struct talitos_ctx *ctx,
3048 struct talitos_crypto_alg *talitos_alg)
3050 struct talitos_private *priv;
3052 /* update context with ptr to dev */
3053 ctx->dev = talitos_alg->dev;
3055 /* assign SEC channel to tfm in round-robin fashion */
3056 priv = dev_get_drvdata(ctx->dev);
3057 ctx->ch = atomic_inc_return(&priv->last_chan) &
3058 (priv->num_channels - 1);
3060 /* copy descriptor header template value */
3061 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
3063 /* select done notification */
3064 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3069 static int talitos_cra_init_aead(struct crypto_aead *tfm)
3071 struct aead_alg *alg = crypto_aead_alg(tfm);
3072 struct talitos_crypto_alg *talitos_alg;
3073 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3075 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3078 return talitos_init_common(ctx, talitos_alg);
3081 static int talitos_cra_init_skcipher(struct crypto_skcipher *tfm)
3083 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3084 struct talitos_crypto_alg *talitos_alg;
3085 struct talitos_ctx *ctx = crypto_skcipher_ctx(tfm);
3087 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3090 return talitos_init_common(ctx, talitos_alg);
3093 static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3095 struct crypto_alg *alg = tfm->__crt_alg;
3096 struct talitos_crypto_alg *talitos_alg;
3097 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3099 talitos_alg = container_of(__crypto_ahash_alg(alg),
3100 struct talitos_crypto_alg,
3104 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3105 sizeof(struct talitos_ahash_req_ctx));
3107 return talitos_init_common(ctx, talitos_alg);
3110 static void talitos_cra_exit(struct crypto_tfm *tfm)
3112 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3113 struct device *dev = ctx->dev;
3116 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3120 * given the alg's descriptor header template, determine whether descriptor
3121 * type and primary/secondary execution units required match the hw
3122 * capabilities description provided in the device tree node.
3124 static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3126 struct talitos_private *priv = dev_get_drvdata(dev);
3129 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3130 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3132 if (SECONDARY_EU(desc_hdr_template))
3133 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3134 & priv->exec_units);
3139 static int talitos_remove(struct platform_device *ofdev)
3141 struct device *dev = &ofdev->dev;
3142 struct talitos_private *priv = dev_get_drvdata(dev);
3143 struct talitos_crypto_alg *t_alg, *n;
3146 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
3147 switch (t_alg->algt.type) {
3148 case CRYPTO_ALG_TYPE_SKCIPHER:
3149 crypto_unregister_skcipher(&t_alg->algt.alg.skcipher);
3151 case CRYPTO_ALG_TYPE_AEAD:
3152 crypto_unregister_aead(&t_alg->algt.alg.aead);
3154 case CRYPTO_ALG_TYPE_AHASH:
3155 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3158 list_del(&t_alg->entry);
3161 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3162 talitos_unregister_rng(dev);
3164 for (i = 0; i < 2; i++)
3166 free_irq(priv->irq[i], dev);
3167 irq_dispose_mapping(priv->irq[i]);
3170 tasklet_kill(&priv->done_task[0]);
3172 tasklet_kill(&priv->done_task[1]);
3177 static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3178 struct talitos_alg_template
3181 struct talitos_private *priv = dev_get_drvdata(dev);
3182 struct talitos_crypto_alg *t_alg;
3183 struct crypto_alg *alg;
3185 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3188 return ERR_PTR(-ENOMEM);
3190 t_alg->algt = *template;
3192 switch (t_alg->algt.type) {
3193 case CRYPTO_ALG_TYPE_SKCIPHER:
3194 alg = &t_alg->algt.alg.skcipher.base;
3195 alg->cra_exit = talitos_cra_exit;
3196 t_alg->algt.alg.skcipher.init = talitos_cra_init_skcipher;
3197 t_alg->algt.alg.skcipher.setkey =
3198 t_alg->algt.alg.skcipher.setkey ?: skcipher_setkey;
3199 t_alg->algt.alg.skcipher.encrypt = skcipher_encrypt;
3200 t_alg->algt.alg.skcipher.decrypt = skcipher_decrypt;
3201 if (!strcmp(alg->cra_name, "ctr(aes)") && !has_ftr_sec1(priv) &&
3202 DESC_TYPE(t_alg->algt.desc_hdr_template) !=
3203 DESC_TYPE(DESC_HDR_TYPE_AESU_CTR_NONSNOOP)) {
3204 devm_kfree(dev, t_alg);
3205 return ERR_PTR(-ENOTSUPP);
3208 case CRYPTO_ALG_TYPE_AEAD:
3209 alg = &t_alg->algt.alg.aead.base;
3210 alg->cra_exit = talitos_cra_exit;
3211 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3212 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3214 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3215 t_alg->algt.alg.aead.decrypt = aead_decrypt;
3216 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3217 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
3218 devm_kfree(dev, t_alg);
3219 return ERR_PTR(-ENOTSUPP);
3222 case CRYPTO_ALG_TYPE_AHASH:
3223 alg = &t_alg->algt.alg.hash.halg.base;
3224 alg->cra_init = talitos_cra_init_ahash;
3225 alg->cra_exit = talitos_cra_exit;
3226 t_alg->algt.alg.hash.init = ahash_init;
3227 t_alg->algt.alg.hash.update = ahash_update;
3228 t_alg->algt.alg.hash.final = ahash_final;
3229 t_alg->algt.alg.hash.finup = ahash_finup;
3230 t_alg->algt.alg.hash.digest = ahash_digest;
3231 if (!strncmp(alg->cra_name, "hmac", 4))
3232 t_alg->algt.alg.hash.setkey = ahash_setkey;
3233 t_alg->algt.alg.hash.import = ahash_import;
3234 t_alg->algt.alg.hash.export = ahash_export;
3236 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
3237 !strncmp(alg->cra_name, "hmac", 4)) {
3238 devm_kfree(dev, t_alg);
3239 return ERR_PTR(-ENOTSUPP);
3241 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3242 (!strcmp(alg->cra_name, "sha224") ||
3243 !strcmp(alg->cra_name, "hmac(sha224)"))) {
3244 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3245 t_alg->algt.desc_hdr_template =
3246 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3247 DESC_HDR_SEL0_MDEUA |
3248 DESC_HDR_MODE0_MDEU_SHA256;
3252 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
3253 devm_kfree(dev, t_alg);
3254 return ERR_PTR(-EINVAL);
3257 alg->cra_module = THIS_MODULE;
3258 if (t_alg->algt.priority)
3259 alg->cra_priority = t_alg->algt.priority;
3261 alg->cra_priority = TALITOS_CRA_PRIORITY;
3262 if (has_ftr_sec1(priv))
3263 alg->cra_alignmask = 3;
3265 alg->cra_alignmask = 0;
3266 alg->cra_ctxsize = sizeof(struct talitos_ctx);
3267 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
3274 static int talitos_probe_irq(struct platform_device *ofdev)
3276 struct device *dev = &ofdev->dev;
3277 struct device_node *np = ofdev->dev.of_node;
3278 struct talitos_private *priv = dev_get_drvdata(dev);
3280 bool is_sec1 = has_ftr_sec1(priv);
3282 priv->irq[0] = irq_of_parse_and_map(np, 0);
3283 if (!priv->irq[0]) {
3284 dev_err(dev, "failed to map irq\n");
3288 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3289 dev_driver_string(dev), dev);
3293 priv->irq[1] = irq_of_parse_and_map(np, 1);
3295 /* get the primary irq line */
3296 if (!priv->irq[1]) {
3297 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
3298 dev_driver_string(dev), dev);
3302 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
3303 dev_driver_string(dev), dev);
3307 /* get the secondary irq line */
3308 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
3309 dev_driver_string(dev), dev);
3311 dev_err(dev, "failed to request secondary irq\n");
3312 irq_dispose_mapping(priv->irq[1]);
3320 dev_err(dev, "failed to request primary irq\n");
3321 irq_dispose_mapping(priv->irq[0]);
3328 static int talitos_probe(struct platform_device *ofdev)
3330 struct device *dev = &ofdev->dev;
3331 struct device_node *np = ofdev->dev.of_node;
3332 struct talitos_private *priv;
3335 struct resource *res;
3337 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
3341 INIT_LIST_HEAD(&priv->alg_list);
3343 dev_set_drvdata(dev, priv);
3345 priv->ofdev = ofdev;
3347 spin_lock_init(&priv->reg_lock);
3349 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3352 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
3354 dev_err(dev, "failed to of_iomap\n");
3359 /* get SEC version capabilities from device tree */
3360 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3361 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3362 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3363 of_property_read_u32(np, "fsl,descriptor-types-mask",
3366 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3367 !priv->exec_units || !priv->desc_types) {
3368 dev_err(dev, "invalid property data in device tree node\n");
3373 if (of_device_is_compatible(np, "fsl,sec3.0"))
3374 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3376 if (of_device_is_compatible(np, "fsl,sec2.1"))
3377 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
3378 TALITOS_FTR_SHA224_HWINIT |
3379 TALITOS_FTR_HMAC_OK;
3381 if (of_device_is_compatible(np, "fsl,sec1.0"))
3382 priv->features |= TALITOS_FTR_SEC1;
3384 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3385 priv->reg_deu = priv->reg + TALITOS12_DEU;
3386 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3387 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3388 stride = TALITOS1_CH_STRIDE;
3389 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3390 priv->reg_deu = priv->reg + TALITOS10_DEU;
3391 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3392 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3393 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3394 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3395 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3396 stride = TALITOS1_CH_STRIDE;
3398 priv->reg_deu = priv->reg + TALITOS2_DEU;
3399 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3400 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3401 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3402 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3403 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3404 priv->reg_keu = priv->reg + TALITOS2_KEU;
3405 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3406 stride = TALITOS2_CH_STRIDE;
3409 err = talitos_probe_irq(ofdev);
3413 if (has_ftr_sec1(priv)) {
3414 if (priv->num_channels == 1)
3415 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
3416 (unsigned long)dev);
3418 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3419 (unsigned long)dev);
3422 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3423 (unsigned long)dev);
3424 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3425 (unsigned long)dev);
3426 } else if (priv->num_channels == 1) {
3427 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3428 (unsigned long)dev);
3430 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3431 (unsigned long)dev);
3435 priv->chan = devm_kcalloc(dev,
3437 sizeof(struct talitos_channel),
3440 dev_err(dev, "failed to allocate channel management space\n");
3445 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3447 for (i = 0; i < priv->num_channels; i++) {
3448 priv->chan[i].reg = priv->reg + stride * (i + 1);
3449 if (!priv->irq[1] || !(i & 1))
3450 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
3452 spin_lock_init(&priv->chan[i].head_lock);
3453 spin_lock_init(&priv->chan[i].tail_lock);
3455 priv->chan[i].fifo = devm_kcalloc(dev,
3457 sizeof(struct talitos_request),
3459 if (!priv->chan[i].fifo) {
3460 dev_err(dev, "failed to allocate request fifo %d\n", i);
3465 atomic_set(&priv->chan[i].submit_count,
3466 -(priv->chfifo_len - 1));
3469 dma_set_mask(dev, DMA_BIT_MASK(36));
3471 /* reset and initialize the h/w */
3472 err = init_device(dev);
3474 dev_err(dev, "failed to initialize device\n");
3478 /* register the RNG, if available */
3479 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3480 err = talitos_register_rng(dev);
3482 dev_err(dev, "failed to register hwrng: %d\n", err);
3485 dev_info(dev, "hwrng\n");
3488 /* register crypto algorithms the device supports */
3489 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3490 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3491 struct talitos_crypto_alg *t_alg;
3492 struct crypto_alg *alg = NULL;
3494 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3495 if (IS_ERR(t_alg)) {
3496 err = PTR_ERR(t_alg);
3497 if (err == -ENOTSUPP)
3502 switch (t_alg->algt.type) {
3503 case CRYPTO_ALG_TYPE_SKCIPHER:
3504 err = crypto_register_skcipher(
3505 &t_alg->algt.alg.skcipher);
3506 alg = &t_alg->algt.alg.skcipher.base;
3509 case CRYPTO_ALG_TYPE_AEAD:
3510 err = crypto_register_aead(
3511 &t_alg->algt.alg.aead);
3512 alg = &t_alg->algt.alg.aead.base;
3515 case CRYPTO_ALG_TYPE_AHASH:
3516 err = crypto_register_ahash(
3517 &t_alg->algt.alg.hash);
3518 alg = &t_alg->algt.alg.hash.halg.base;
3522 dev_err(dev, "%s alg registration failed\n",
3523 alg->cra_driver_name);
3524 devm_kfree(dev, t_alg);
3526 list_add_tail(&t_alg->entry, &priv->alg_list);
3529 if (!list_empty(&priv->alg_list))
3530 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3531 (char *)of_get_property(np, "compatible", NULL));
3536 talitos_remove(ofdev);
3541 static const struct of_device_id talitos_match[] = {
3542 #ifdef CONFIG_CRYPTO_DEV_TALITOS1
3544 .compatible = "fsl,sec1.0",
3547 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
3549 .compatible = "fsl,sec2.0",
3554 MODULE_DEVICE_TABLE(of, talitos_match);
3556 static struct platform_driver talitos_driver = {
3559 .of_match_table = talitos_match,
3561 .probe = talitos_probe,
3562 .remove = talitos_remove,
3565 module_platform_driver(talitos_driver);
3567 MODULE_LICENSE("GPL");
3568 MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3569 MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");