2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
5 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
6 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
7 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
8 * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
9 * License terms: GNU General Public License (GPL) version 2
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/crypto.h>
15 #include <linux/dmaengine.h>
16 #include <linux/err.h>
17 #include <linux/errno.h>
18 #include <linux/interrupt.h>
20 #include <linux/irqreturn.h>
21 #include <linux/klist.h>
22 #include <linux/module.h>
23 #include <linux/platform_device.h>
24 #include <linux/regulator/consumer.h>
25 #include <linux/semaphore.h>
26 #include <linux/platform_data/dma-ste-dma40.h>
28 #include <crypto/aes.h>
29 #include <crypto/algapi.h>
30 #include <crypto/ctr.h>
31 #include <crypto/des.h>
32 #include <crypto/scatterwalk.h>
34 #include <linux/platform_data/crypto-ux500.h>
39 #define CRYP_MAX_KEY_SIZE 32
40 #define BYTES_PER_WORD 4
43 static atomic_t session_id;
45 static struct stedma40_chan_cfg *mem_to_engine;
46 static struct stedma40_chan_cfg *engine_to_mem;
49 * struct cryp_driver_data - data specific to the driver.
51 * @device_list: A list of registered devices to choose from.
52 * @device_allocation: A semaphore initialized with number of devices.
54 struct cryp_driver_data {
55 struct klist device_list;
56 struct semaphore device_allocation;
60 * struct cryp_ctx - Crypto context
61 * @config: Crypto mode.
62 * @key[CRYP_MAX_KEY_SIZE]: Key.
63 * @keylen: Length of key.
64 * @iv: Pointer to initialization vector.
65 * @indata: Pointer to indata.
66 * @outdata: Pointer to outdata.
67 * @datalen: Length of indata.
68 * @outlen: Length of outdata.
69 * @blocksize: Size of blocks.
70 * @updated: Updated flag.
71 * @dev_ctx: Device dependent context.
72 * @device: Pointer to the device.
75 struct cryp_config config;
76 u8 key[CRYP_MAX_KEY_SIZE];
85 struct cryp_device_context dev_ctx;
86 struct cryp_device_data *device;
90 static struct cryp_driver_data driver_data;
93 * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
94 * @in: Data to convert.
96 static inline u32 uint8p_to_uint32_be(u8 *in)
98 u32 *data = (u32 *)in;
100 return cpu_to_be32p(data);
104 * swap_bits_in_byte - mirror the bits in a byte
105 * @b: the byte to be mirrored
107 * The bits are swapped the following way:
108 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
109 * nibble 2 (n2) bits 4-7.
112 * (The "old" (moved) bit is replaced with a zero)
113 * 1. Move bit 6 and 7, 4 positions to the left.
114 * 2. Move bit 3 and 5, 2 positions to the left.
115 * 3. Move bit 1-4, 1 position to the left.
118 * 1. Move bit 0 and 1, 4 positions to the right.
119 * 2. Move bit 2 and 4, 2 positions to the right.
120 * 3. Move bit 3-6, 1 position to the right.
122 * Combine the two nibbles to a complete and swapped byte.
125 static inline u8 swap_bits_in_byte(u8 b)
127 #define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */
128 #define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5,
130 #define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4,
132 #define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */
133 #define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4,
135 #define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6,
141 /* Swap most significant nibble */
142 /* Right shift 4, bits 6 and 7 */
143 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4));
144 /* Right shift 2, bits 3 and 5 */
145 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
146 /* Right shift 1, bits 1-4 */
147 n1 = (n1 & R_SHIFT_1_MASK) >> 1;
149 /* Swap least significant nibble */
150 /* Left shift 4, bits 0 and 1 */
151 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4));
152 /* Left shift 2, bits 2 and 4 */
153 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
154 /* Left shift 1, bits 3-6 */
155 n2 = (n2 & L_SHIFT_1_MASK) << 1;
160 static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
167 j = len - BYTES_PER_WORD;
169 for (i = 0; i < BYTES_PER_WORD; i++) {
170 index = len - j - BYTES_PER_WORD + i;
172 swap_bits_in_byte(in[index]);
178 static void add_session_id(struct cryp_ctx *ctx)
181 * We never want 0 to be a valid value, since this is the default value
182 * for the software context.
184 if (unlikely(atomic_inc_and_test(&session_id)))
185 atomic_inc(&session_id);
187 ctx->session_id = atomic_read(&session_id);
190 static irqreturn_t cryp_interrupt_handler(int irq, void *param)
192 struct cryp_ctx *ctx;
194 struct cryp_device_data *device_data;
201 /* The device is coming from the one found in hw_crypt_noxts. */
202 device_data = (struct cryp_device_data *)param;
204 ctx = device_data->current_ctx;
211 dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
212 cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
215 if (cryp_pending_irq_src(device_data,
216 CRYP_IRQ_SRC_OUTPUT_FIFO)) {
217 if (ctx->outlen / ctx->blocksize > 0) {
218 for (i = 0; i < ctx->blocksize / 4; i++) {
219 *(ctx->outdata) = readl_relaxed(
220 &device_data->base->dout);
225 if (ctx->outlen == 0) {
226 cryp_disable_irq_src(device_data,
227 CRYP_IRQ_SRC_OUTPUT_FIFO);
230 } else if (cryp_pending_irq_src(device_data,
231 CRYP_IRQ_SRC_INPUT_FIFO)) {
232 if (ctx->datalen / ctx->blocksize > 0) {
233 for (i = 0 ; i < ctx->blocksize / 4; i++) {
234 writel_relaxed(ctx->indata,
235 &device_data->base->din);
240 if (ctx->datalen == 0)
241 cryp_disable_irq_src(device_data,
242 CRYP_IRQ_SRC_INPUT_FIFO);
244 if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
245 CRYP_PUT_BITS(&device_data->base->cr,
250 cryp_wait_until_done(device_data);
258 static int mode_is_aes(enum cryp_algo_mode mode)
260 return CRYP_ALGO_AES_ECB == mode ||
261 CRYP_ALGO_AES_CBC == mode ||
262 CRYP_ALGO_AES_CTR == mode ||
263 CRYP_ALGO_AES_XTS == mode;
266 static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
267 enum cryp_init_vector_index index)
269 struct cryp_init_vector_value vector_value;
271 dev_dbg(device_data->dev, "[%s]", __func__);
273 vector_value.init_value_left = left;
274 vector_value.init_value_right = right;
276 return cryp_configure_init_vector(device_data,
281 static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
285 int num_of_regs = ctx->blocksize / 8;
286 u32 iv[AES_BLOCK_SIZE / 4];
288 dev_dbg(device_data->dev, "[%s]", __func__);
291 * Since we loop on num_of_regs we need to have a check in case
292 * someone provides an incorrect blocksize which would force calling
293 * cfg_iv with i greater than 2 which is an error.
295 if (num_of_regs > 2) {
296 dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
297 __func__, ctx->blocksize);
301 for (i = 0; i < ctx->blocksize / 4; i++)
302 iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
304 for (i = 0; i < num_of_regs; i++) {
305 status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
306 (enum cryp_init_vector_index) i);
313 static int set_key(struct cryp_device_data *device_data,
316 enum cryp_key_reg_index index)
318 struct cryp_key_value key_value;
321 dev_dbg(device_data->dev, "[%s]", __func__);
323 key_value.key_value_left = left_key;
324 key_value.key_value_right = right_key;
326 cryp_error = cryp_configure_key_values(device_data,
330 dev_err(device_data->dev, "[%s]: "
331 "cryp_configure_key_values() failed!", __func__);
336 static int cfg_keys(struct cryp_ctx *ctx)
339 int num_of_regs = ctx->keylen / 8;
340 u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
343 dev_dbg(ctx->device->dev, "[%s]", __func__);
345 if (mode_is_aes(ctx->config.algomode)) {
346 swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
350 for (i = 0; i < ctx->keylen / 4; i++)
351 swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
354 for (i = 0; i < num_of_regs; i++) {
355 cryp_error = set_key(ctx->device,
356 *(((u32 *)swapped_key)+i*2),
357 *(((u32 *)swapped_key)+i*2+1),
358 (enum cryp_key_reg_index) i);
360 if (cryp_error != 0) {
361 dev_err(ctx->device->dev, "[%s]: set_key() failed!",
369 static int cryp_setup_context(struct cryp_ctx *ctx,
370 struct cryp_device_data *device_data)
372 u32 control_register = CRYP_CR_DEFAULT;
375 case CRYP_MODE_INTERRUPT:
376 writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
380 writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
387 if (ctx->updated == 0) {
388 cryp_flush_inoutfifo(device_data);
389 if (cfg_keys(ctx) != 0) {
390 dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
396 CRYP_ALGO_AES_ECB != ctx->config.algomode &&
397 CRYP_ALGO_DES_ECB != ctx->config.algomode &&
398 CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
399 if (cfg_ivs(device_data, ctx) != 0)
403 cryp_set_configuration(device_data, &ctx->config,
406 } else if (ctx->updated == 1 &&
407 ctx->session_id != atomic_read(&session_id)) {
408 cryp_flush_inoutfifo(device_data);
409 cryp_restore_device_context(device_data, &ctx->dev_ctx);
412 control_register = ctx->dev_ctx.cr;
414 control_register = ctx->dev_ctx.cr;
416 writel(control_register |
417 (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
418 &device_data->base->cr);
423 static int cryp_get_device_data(struct cryp_ctx *ctx,
424 struct cryp_device_data **device_data)
427 struct klist_iter device_iterator;
428 struct klist_node *device_node;
429 struct cryp_device_data *local_device_data = NULL;
430 pr_debug(DEV_DBG_NAME " [%s]", __func__);
432 /* Wait until a device is available */
433 ret = down_interruptible(&driver_data.device_allocation);
435 return ret; /* Interrupted */
437 /* Select a device */
438 klist_iter_init(&driver_data.device_list, &device_iterator);
440 device_node = klist_next(&device_iterator);
441 while (device_node) {
442 local_device_data = container_of(device_node,
443 struct cryp_device_data, list_node);
444 spin_lock(&local_device_data->ctx_lock);
445 /* current_ctx allocates a device, NULL = unallocated */
446 if (local_device_data->current_ctx) {
447 device_node = klist_next(&device_iterator);
449 local_device_data->current_ctx = ctx;
450 ctx->device = local_device_data;
451 spin_unlock(&local_device_data->ctx_lock);
454 spin_unlock(&local_device_data->ctx_lock);
456 klist_iter_exit(&device_iterator);
460 * No free device found.
461 * Since we allocated a device with down_interruptible, this
462 * should not be able to happen.
463 * Number of available devices, which are contained in
464 * device_allocation, is therefore decremented by not doing
465 * an up(device_allocation).
470 *device_data = local_device_data;
475 static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
478 dma_cap_zero(device_data->dma.mask);
479 dma_cap_set(DMA_SLAVE, device_data->dma.mask);
481 device_data->dma.cfg_mem2cryp = mem_to_engine;
482 device_data->dma.chan_mem2cryp =
483 dma_request_channel(device_data->dma.mask,
485 device_data->dma.cfg_mem2cryp);
487 device_data->dma.cfg_cryp2mem = engine_to_mem;
488 device_data->dma.chan_cryp2mem =
489 dma_request_channel(device_data->dma.mask,
491 device_data->dma.cfg_cryp2mem);
493 init_completion(&device_data->dma.cryp_dma_complete);
496 static void cryp_dma_out_callback(void *data)
498 struct cryp_ctx *ctx = (struct cryp_ctx *) data;
499 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
501 complete(&ctx->device->dma.cryp_dma_complete);
504 static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
505 struct scatterlist *sg,
507 enum dma_data_direction direction)
509 struct dma_async_tx_descriptor *desc;
510 struct dma_chan *channel = NULL;
513 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
515 if (unlikely(!IS_ALIGNED((u32)sg, 4))) {
516 dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
517 "aligned! Addr: 0x%08x", __func__, (u32)sg);
523 channel = ctx->device->dma.chan_mem2cryp;
524 ctx->device->dma.sg_src = sg;
525 ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
526 ctx->device->dma.sg_src,
527 ctx->device->dma.nents_src,
530 if (!ctx->device->dma.sg_src_len) {
531 dev_dbg(ctx->device->dev,
532 "[%s]: Could not map the sg list (TO_DEVICE)",
537 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
538 "(TO_DEVICE)", __func__);
540 desc = channel->device->device_prep_slave_sg(channel,
541 ctx->device->dma.sg_src,
542 ctx->device->dma.sg_src_len,
543 direction, DMA_CTRL_ACK, NULL);
546 case DMA_FROM_DEVICE:
547 channel = ctx->device->dma.chan_cryp2mem;
548 ctx->device->dma.sg_dst = sg;
549 ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
550 ctx->device->dma.sg_dst,
551 ctx->device->dma.nents_dst,
554 if (!ctx->device->dma.sg_dst_len) {
555 dev_dbg(ctx->device->dev,
556 "[%s]: Could not map the sg list (FROM_DEVICE)",
561 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
562 "(FROM_DEVICE)", __func__);
564 desc = channel->device->device_prep_slave_sg(channel,
565 ctx->device->dma.sg_dst,
566 ctx->device->dma.sg_dst_len,
569 DMA_PREP_INTERRUPT, NULL);
571 desc->callback = cryp_dma_out_callback;
572 desc->callback_param = ctx;
576 dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
581 cookie = desc->tx_submit(desc);
582 dma_async_issue_pending(channel);
587 static void cryp_dma_done(struct cryp_ctx *ctx)
589 struct dma_chan *chan;
591 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
593 chan = ctx->device->dma.chan_mem2cryp;
594 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
595 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
596 ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
598 chan = ctx->device->dma.chan_cryp2mem;
599 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
600 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
601 ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
604 static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
607 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
608 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
611 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
619 static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
621 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
623 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
631 static void cryp_polling_mode(struct cryp_ctx *ctx,
632 struct cryp_device_data *device_data)
634 int len = ctx->blocksize / BYTES_PER_WORD;
635 int remaining_length = ctx->datalen;
636 u32 *indata = (u32 *)ctx->indata;
637 u32 *outdata = (u32 *)ctx->outdata;
639 while (remaining_length > 0) {
640 writesl(&device_data->base->din, indata, len);
642 remaining_length -= (len * BYTES_PER_WORD);
643 cryp_wait_until_done(device_data);
645 readsl(&device_data->base->dout, outdata, len);
647 cryp_wait_until_done(device_data);
651 static int cryp_disable_power(struct device *dev,
652 struct cryp_device_data *device_data,
653 bool save_device_context)
657 dev_dbg(dev, "[%s]", __func__);
659 spin_lock(&device_data->power_state_spinlock);
660 if (!device_data->power_state)
663 spin_lock(&device_data->ctx_lock);
664 if (save_device_context && device_data->current_ctx) {
665 cryp_save_device_context(device_data,
666 &device_data->current_ctx->dev_ctx,
668 device_data->restore_dev_ctx = true;
670 spin_unlock(&device_data->ctx_lock);
672 clk_disable(device_data->clk);
673 ret = regulator_disable(device_data->pwr_regulator);
675 dev_err(dev, "[%s]: "
676 "regulator_disable() failed!",
679 device_data->power_state = false;
682 spin_unlock(&device_data->power_state_spinlock);
687 static int cryp_enable_power(
689 struct cryp_device_data *device_data,
690 bool restore_device_context)
694 dev_dbg(dev, "[%s]", __func__);
696 spin_lock(&device_data->power_state_spinlock);
697 if (!device_data->power_state) {
698 ret = regulator_enable(device_data->pwr_regulator);
700 dev_err(dev, "[%s]: regulator_enable() failed!",
705 ret = clk_enable(device_data->clk);
707 dev_err(dev, "[%s]: clk_enable() failed!",
709 regulator_disable(device_data->pwr_regulator);
712 device_data->power_state = true;
715 if (device_data->restore_dev_ctx) {
716 spin_lock(&device_data->ctx_lock);
717 if (restore_device_context && device_data->current_ctx) {
718 device_data->restore_dev_ctx = false;
719 cryp_restore_device_context(device_data,
720 &device_data->current_ctx->dev_ctx);
722 spin_unlock(&device_data->ctx_lock);
725 spin_unlock(&device_data->power_state_spinlock);
730 static int hw_crypt_noxts(struct cryp_ctx *ctx,
731 struct cryp_device_data *device_data)
735 const u8 *indata = ctx->indata;
736 u8 *outdata = ctx->outdata;
737 u32 datalen = ctx->datalen;
738 u32 outlen = datalen;
740 pr_debug(DEV_DBG_NAME " [%s]", __func__);
742 ctx->outlen = ctx->datalen;
744 if (unlikely(!IS_ALIGNED((u32)indata, 4))) {
745 pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
746 "0x%08x", __func__, (u32)indata);
750 ret = cryp_setup_context(ctx, device_data);
755 if (cryp_mode == CRYP_MODE_INTERRUPT) {
756 cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
757 CRYP_IRQ_SRC_OUTPUT_FIFO);
760 * ctx->outlen is decremented in the cryp_interrupt_handler
761 * function. We had to add cpu_relax() (barrier) to make sure
762 * that gcc didn't optimze away this variable.
764 while (ctx->outlen > 0)
766 } else if (cryp_mode == CRYP_MODE_POLLING ||
767 cryp_mode == CRYP_MODE_DMA) {
769 * The reason for having DMA in this if case is that if we are
770 * running cryp_mode = 2, then we separate DMA routines for
771 * handling cipher/plaintext > blocksize, except when
772 * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
773 * the polling mode. Overhead of doing DMA setup eats up the
776 cryp_polling_mode(ctx, device_data);
778 dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
784 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
788 ctx->indata = indata;
789 ctx->outdata = outdata;
790 ctx->datalen = datalen;
791 ctx->outlen = outlen;
796 static int get_nents(struct scatterlist *sg, int nbytes)
801 nbytes -= sg->length;
802 sg = scatterwalk_sg_next(sg);
809 static int ablk_dma_crypt(struct ablkcipher_request *areq)
811 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
812 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
813 struct cryp_device_data *device_data;
815 int bytes_written = 0;
819 pr_debug(DEV_DBG_NAME " [%s]", __func__);
821 ctx->datalen = areq->nbytes;
822 ctx->outlen = areq->nbytes;
824 ret = cryp_get_device_data(ctx, &device_data);
828 ret = cryp_setup_context(ctx, device_data);
832 /* We have the device now, so store the nents in the dma struct. */
833 ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
834 ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
836 /* Enable DMA in- and output. */
837 cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
839 bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
840 bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
842 wait_for_completion(&ctx->device->dma.cryp_dma_complete);
845 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
849 spin_lock(&device_data->ctx_lock);
850 device_data->current_ctx = NULL;
852 spin_unlock(&device_data->ctx_lock);
855 * The down_interruptible part for this semaphore is called in
856 * cryp_get_device_data.
858 up(&driver_data.device_allocation);
860 if (unlikely(bytes_written != bytes_read))
866 static int ablk_crypt(struct ablkcipher_request *areq)
868 struct ablkcipher_walk walk;
869 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
870 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
871 struct cryp_device_data *device_data;
872 unsigned long src_paddr;
873 unsigned long dst_paddr;
877 pr_debug(DEV_DBG_NAME " [%s]", __func__);
879 ret = cryp_get_device_data(ctx, &device_data);
883 ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
884 ret = ablkcipher_walk_phys(areq, &walk);
887 pr_err(DEV_DBG_NAME "[%s]: ablkcipher_walk_phys() failed!",
892 while ((nbytes = walk.nbytes) > 0) {
894 src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
895 ctx->indata = phys_to_virt(src_paddr);
897 dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
898 ctx->outdata = phys_to_virt(dst_paddr);
900 ctx->datalen = nbytes - (nbytes % ctx->blocksize);
902 ret = hw_crypt_noxts(ctx, device_data);
906 nbytes -= ctx->datalen;
907 ret = ablkcipher_walk_done(areq, &walk, nbytes);
911 ablkcipher_walk_complete(&walk);
914 /* Release the device */
915 spin_lock(&device_data->ctx_lock);
916 device_data->current_ctx = NULL;
918 spin_unlock(&device_data->ctx_lock);
921 * The down_interruptible part for this semaphore is called in
922 * cryp_get_device_data.
924 up(&driver_data.device_allocation);
929 static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
930 const u8 *key, unsigned int keylen)
932 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
933 u32 *flags = &cipher->base.crt_flags;
935 pr_debug(DEV_DBG_NAME " [%s]", __func__);
938 case AES_KEYSIZE_128:
939 ctx->config.keysize = CRYP_KEY_SIZE_128;
942 case AES_KEYSIZE_192:
943 ctx->config.keysize = CRYP_KEY_SIZE_192;
946 case AES_KEYSIZE_256:
947 ctx->config.keysize = CRYP_KEY_SIZE_256;
951 pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
952 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
956 memcpy(ctx->key, key, keylen);
957 ctx->keylen = keylen;
964 static int des_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
965 const u8 *key, unsigned int keylen)
967 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
968 u32 *flags = &cipher->base.crt_flags;
969 u32 tmp[DES_EXPKEY_WORDS];
972 pr_debug(DEV_DBG_NAME " [%s]", __func__);
973 if (keylen != DES_KEY_SIZE) {
974 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
975 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
980 ret = des_ekey(tmp, key);
981 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
982 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
983 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
988 memcpy(ctx->key, key, keylen);
989 ctx->keylen = keylen;
995 static int des3_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
996 const u8 *key, unsigned int keylen)
998 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
999 u32 *flags = &cipher->base.crt_flags;
1000 const u32 *K = (const u32 *)key;
1001 u32 tmp[DES3_EDE_EXPKEY_WORDS];
1004 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1005 if (keylen != DES3_EDE_KEY_SIZE) {
1006 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
1007 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
1012 /* Checking key interdependency for weak key detection. */
1013 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
1014 !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
1015 (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1016 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1017 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
1021 for (i = 0; i < 3; i++) {
1022 ret = des_ekey(tmp, key + i*DES_KEY_SIZE);
1023 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1024 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1025 pr_debug(DEV_DBG_NAME " [%s]: "
1026 "CRYPTO_TFM_REQ_WEAK_KEY", __func__);
1031 memcpy(ctx->key, key, keylen);
1032 ctx->keylen = keylen;
1038 static int cryp_blk_encrypt(struct ablkcipher_request *areq)
1040 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1041 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1043 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1045 ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1048 * DMA does not work for DES due to a hw bug */
1049 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1050 return ablk_dma_crypt(areq);
1052 /* For everything except DMA, we run the non DMA version. */
1053 return ablk_crypt(areq);
1056 static int cryp_blk_decrypt(struct ablkcipher_request *areq)
1058 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1059 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1061 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1063 ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1065 /* DMA does not work for DES due to a hw bug */
1066 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1067 return ablk_dma_crypt(areq);
1069 /* For everything except DMA, we run the non DMA version. */
1070 return ablk_crypt(areq);
1073 struct cryp_algo_template {
1074 enum cryp_algo_mode algomode;
1075 struct crypto_alg crypto;
1078 static int cryp_cra_init(struct crypto_tfm *tfm)
1080 struct cryp_ctx *ctx = crypto_tfm_ctx(tfm);
1081 struct crypto_alg *alg = tfm->__crt_alg;
1082 struct cryp_algo_template *cryp_alg = container_of(alg,
1083 struct cryp_algo_template,
1086 ctx->config.algomode = cryp_alg->algomode;
1087 ctx->blocksize = crypto_tfm_alg_blocksize(tfm);
1092 static struct cryp_algo_template cryp_algs[] = {
1094 .algomode = CRYP_ALGO_AES_ECB,
1097 .cra_driver_name = "aes-ux500",
1098 .cra_priority = 300,
1099 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1101 .cra_blocksize = AES_BLOCK_SIZE,
1102 .cra_ctxsize = sizeof(struct cryp_ctx),
1104 .cra_type = &crypto_ablkcipher_type,
1105 .cra_init = cryp_cra_init,
1106 .cra_module = THIS_MODULE,
1109 .min_keysize = AES_MIN_KEY_SIZE,
1110 .max_keysize = AES_MAX_KEY_SIZE,
1111 .setkey = aes_ablkcipher_setkey,
1112 .encrypt = cryp_blk_encrypt,
1113 .decrypt = cryp_blk_decrypt
1119 .algomode = CRYP_ALGO_AES_ECB,
1121 .cra_name = "ecb(aes)",
1122 .cra_driver_name = "ecb-aes-ux500",
1123 .cra_priority = 300,
1124 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1126 .cra_blocksize = AES_BLOCK_SIZE,
1127 .cra_ctxsize = sizeof(struct cryp_ctx),
1129 .cra_type = &crypto_ablkcipher_type,
1130 .cra_init = cryp_cra_init,
1131 .cra_module = THIS_MODULE,
1134 .min_keysize = AES_MIN_KEY_SIZE,
1135 .max_keysize = AES_MAX_KEY_SIZE,
1136 .setkey = aes_ablkcipher_setkey,
1137 .encrypt = cryp_blk_encrypt,
1138 .decrypt = cryp_blk_decrypt,
1144 .algomode = CRYP_ALGO_AES_CBC,
1146 .cra_name = "cbc(aes)",
1147 .cra_driver_name = "cbc-aes-ux500",
1148 .cra_priority = 300,
1149 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1151 .cra_blocksize = AES_BLOCK_SIZE,
1152 .cra_ctxsize = sizeof(struct cryp_ctx),
1154 .cra_type = &crypto_ablkcipher_type,
1155 .cra_init = cryp_cra_init,
1156 .cra_module = THIS_MODULE,
1159 .min_keysize = AES_MIN_KEY_SIZE,
1160 .max_keysize = AES_MAX_KEY_SIZE,
1161 .setkey = aes_ablkcipher_setkey,
1162 .encrypt = cryp_blk_encrypt,
1163 .decrypt = cryp_blk_decrypt,
1164 .ivsize = AES_BLOCK_SIZE,
1170 .algomode = CRYP_ALGO_AES_CTR,
1172 .cra_name = "ctr(aes)",
1173 .cra_driver_name = "ctr-aes-ux500",
1174 .cra_priority = 300,
1175 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1177 .cra_blocksize = AES_BLOCK_SIZE,
1178 .cra_ctxsize = sizeof(struct cryp_ctx),
1180 .cra_type = &crypto_ablkcipher_type,
1181 .cra_init = cryp_cra_init,
1182 .cra_module = THIS_MODULE,
1185 .min_keysize = AES_MIN_KEY_SIZE,
1186 .max_keysize = AES_MAX_KEY_SIZE,
1187 .setkey = aes_ablkcipher_setkey,
1188 .encrypt = cryp_blk_encrypt,
1189 .decrypt = cryp_blk_decrypt,
1190 .ivsize = AES_BLOCK_SIZE,
1196 .algomode = CRYP_ALGO_DES_ECB,
1199 .cra_driver_name = "des-ux500",
1200 .cra_priority = 300,
1201 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1203 .cra_blocksize = DES_BLOCK_SIZE,
1204 .cra_ctxsize = sizeof(struct cryp_ctx),
1206 .cra_type = &crypto_ablkcipher_type,
1207 .cra_init = cryp_cra_init,
1208 .cra_module = THIS_MODULE,
1211 .min_keysize = DES_KEY_SIZE,
1212 .max_keysize = DES_KEY_SIZE,
1213 .setkey = des_ablkcipher_setkey,
1214 .encrypt = cryp_blk_encrypt,
1215 .decrypt = cryp_blk_decrypt
1222 .algomode = CRYP_ALGO_TDES_ECB,
1224 .cra_name = "des3_ede",
1225 .cra_driver_name = "des3_ede-ux500",
1226 .cra_priority = 300,
1227 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1229 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1230 .cra_ctxsize = sizeof(struct cryp_ctx),
1232 .cra_type = &crypto_ablkcipher_type,
1233 .cra_init = cryp_cra_init,
1234 .cra_module = THIS_MODULE,
1237 .min_keysize = DES3_EDE_KEY_SIZE,
1238 .max_keysize = DES3_EDE_KEY_SIZE,
1239 .setkey = des_ablkcipher_setkey,
1240 .encrypt = cryp_blk_encrypt,
1241 .decrypt = cryp_blk_decrypt
1247 .algomode = CRYP_ALGO_DES_ECB,
1249 .cra_name = "ecb(des)",
1250 .cra_driver_name = "ecb-des-ux500",
1251 .cra_priority = 300,
1252 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1254 .cra_blocksize = DES_BLOCK_SIZE,
1255 .cra_ctxsize = sizeof(struct cryp_ctx),
1257 .cra_type = &crypto_ablkcipher_type,
1258 .cra_init = cryp_cra_init,
1259 .cra_module = THIS_MODULE,
1262 .min_keysize = DES_KEY_SIZE,
1263 .max_keysize = DES_KEY_SIZE,
1264 .setkey = des_ablkcipher_setkey,
1265 .encrypt = cryp_blk_encrypt,
1266 .decrypt = cryp_blk_decrypt,
1272 .algomode = CRYP_ALGO_TDES_ECB,
1274 .cra_name = "ecb(des3_ede)",
1275 .cra_driver_name = "ecb-des3_ede-ux500",
1276 .cra_priority = 300,
1277 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1279 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1280 .cra_ctxsize = sizeof(struct cryp_ctx),
1282 .cra_type = &crypto_ablkcipher_type,
1283 .cra_init = cryp_cra_init,
1284 .cra_module = THIS_MODULE,
1287 .min_keysize = DES3_EDE_KEY_SIZE,
1288 .max_keysize = DES3_EDE_KEY_SIZE,
1289 .setkey = des3_ablkcipher_setkey,
1290 .encrypt = cryp_blk_encrypt,
1291 .decrypt = cryp_blk_decrypt,
1297 .algomode = CRYP_ALGO_DES_CBC,
1299 .cra_name = "cbc(des)",
1300 .cra_driver_name = "cbc-des-ux500",
1301 .cra_priority = 300,
1302 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1304 .cra_blocksize = DES_BLOCK_SIZE,
1305 .cra_ctxsize = sizeof(struct cryp_ctx),
1307 .cra_type = &crypto_ablkcipher_type,
1308 .cra_init = cryp_cra_init,
1309 .cra_module = THIS_MODULE,
1312 .min_keysize = DES_KEY_SIZE,
1313 .max_keysize = DES_KEY_SIZE,
1314 .setkey = des_ablkcipher_setkey,
1315 .encrypt = cryp_blk_encrypt,
1316 .decrypt = cryp_blk_decrypt,
1322 .algomode = CRYP_ALGO_TDES_CBC,
1324 .cra_name = "cbc(des3_ede)",
1325 .cra_driver_name = "cbc-des3_ede-ux500",
1326 .cra_priority = 300,
1327 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1329 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1330 .cra_ctxsize = sizeof(struct cryp_ctx),
1332 .cra_type = &crypto_ablkcipher_type,
1333 .cra_init = cryp_cra_init,
1334 .cra_module = THIS_MODULE,
1337 .min_keysize = DES3_EDE_KEY_SIZE,
1338 .max_keysize = DES3_EDE_KEY_SIZE,
1339 .setkey = des3_ablkcipher_setkey,
1340 .encrypt = cryp_blk_encrypt,
1341 .decrypt = cryp_blk_decrypt,
1342 .ivsize = DES3_EDE_BLOCK_SIZE,
1350 * cryp_algs_register_all -
1352 static int cryp_algs_register_all(void)
1358 pr_debug("[%s]", __func__);
1360 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1361 ret = crypto_register_alg(&cryp_algs[i].crypto);
1364 pr_err("[%s] alg registration failed",
1365 cryp_algs[i].crypto.cra_driver_name);
1371 for (i = 0; i < count; i++)
1372 crypto_unregister_alg(&cryp_algs[i].crypto);
1377 * cryp_algs_unregister_all -
1379 static void cryp_algs_unregister_all(void)
1383 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1385 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1386 crypto_unregister_alg(&cryp_algs[i].crypto);
1389 static int ux500_cryp_probe(struct platform_device *pdev)
1393 struct resource *res = NULL;
1394 struct resource *res_irq = NULL;
1395 struct cryp_device_data *device_data;
1396 struct cryp_protection_config prot = {
1397 .privilege_access = CRYP_STATE_ENABLE
1399 struct device *dev = &pdev->dev;
1401 dev_dbg(dev, "[%s]", __func__);
1402 device_data = kzalloc(sizeof(struct cryp_device_data), GFP_ATOMIC);
1404 dev_err(dev, "[%s]: kzalloc() failed!", __func__);
1409 device_data->dev = dev;
1410 device_data->current_ctx = NULL;
1412 /* Grab the DMA configuration from platform data. */
1413 mem_to_engine = &((struct cryp_platform_data *)
1414 dev->platform_data)->mem_to_engine;
1415 engine_to_mem = &((struct cryp_platform_data *)
1416 dev->platform_data)->engine_to_mem;
1418 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1420 dev_err(dev, "[%s]: platform_get_resource() failed",
1426 res = request_mem_region(res->start, resource_size(res), pdev->name);
1428 dev_err(dev, "[%s]: request_mem_region() failed",
1434 device_data->base = ioremap(res->start, resource_size(res));
1435 if (!device_data->base) {
1436 dev_err(dev, "[%s]: ioremap failed!", __func__);
1441 spin_lock_init(&device_data->ctx_lock);
1442 spin_lock_init(&device_data->power_state_spinlock);
1444 /* Enable power for CRYP hardware block */
1445 device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1446 if (IS_ERR(device_data->pwr_regulator)) {
1447 dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1448 ret = PTR_ERR(device_data->pwr_regulator);
1449 device_data->pwr_regulator = NULL;
1453 /* Enable the clk for CRYP hardware block */
1454 device_data->clk = clk_get(&pdev->dev, NULL);
1455 if (IS_ERR(device_data->clk)) {
1456 dev_err(dev, "[%s]: clk_get() failed!", __func__);
1457 ret = PTR_ERR(device_data->clk);
1461 /* Enable device power (and clock) */
1462 ret = cryp_enable_power(device_data->dev, device_data, false);
1464 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1468 cryp_error = cryp_check(device_data);
1469 if (cryp_error != 0) {
1470 dev_err(dev, "[%s]: cryp_init() failed!", __func__);
1475 cryp_error = cryp_configure_protection(device_data, &prot);
1476 if (cryp_error != 0) {
1477 dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1483 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1485 dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1491 ret = request_irq(res_irq->start,
1492 cryp_interrupt_handler,
1497 dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1501 if (cryp_mode == CRYP_MODE_DMA)
1502 cryp_dma_setup_channel(device_data, dev);
1504 platform_set_drvdata(pdev, device_data);
1506 /* Put the new device into the device list... */
1507 klist_add_tail(&device_data->list_node, &driver_data.device_list);
1509 /* ... and signal that a new device is available. */
1510 up(&driver_data.device_allocation);
1512 atomic_set(&session_id, 1);
1514 ret = cryp_algs_register_all();
1516 dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1524 cryp_disable_power(device_data->dev, device_data, false);
1527 clk_put(device_data->clk);
1530 regulator_put(device_data->pwr_regulator);
1533 iounmap(device_data->base);
1536 release_mem_region(res->start, resource_size(res));
1544 static int ux500_cryp_remove(struct platform_device *pdev)
1546 struct resource *res = NULL;
1547 struct resource *res_irq = NULL;
1548 struct cryp_device_data *device_data;
1550 dev_dbg(&pdev->dev, "[%s]", __func__);
1551 device_data = platform_get_drvdata(pdev);
1553 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1558 /* Try to decrease the number of available devices. */
1559 if (down_trylock(&driver_data.device_allocation))
1562 /* Check that the device is free */
1563 spin_lock(&device_data->ctx_lock);
1564 /* current_ctx allocates a device, NULL = unallocated */
1565 if (device_data->current_ctx) {
1566 /* The device is busy */
1567 spin_unlock(&device_data->ctx_lock);
1568 /* Return the device to the pool. */
1569 up(&driver_data.device_allocation);
1573 spin_unlock(&device_data->ctx_lock);
1575 /* Remove the device from the list */
1576 if (klist_node_attached(&device_data->list_node))
1577 klist_remove(&device_data->list_node);
1579 /* If this was the last device, remove the services */
1580 if (list_empty(&driver_data.device_list.k_list))
1581 cryp_algs_unregister_all();
1583 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1585 dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
1588 disable_irq(res_irq->start);
1589 free_irq(res_irq->start, device_data);
1592 if (cryp_disable_power(&pdev->dev, device_data, false))
1593 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1596 clk_put(device_data->clk);
1597 regulator_put(device_data->pwr_regulator);
1599 iounmap(device_data->base);
1601 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1603 release_mem_region(res->start, resource_size(res));
1610 static void ux500_cryp_shutdown(struct platform_device *pdev)
1612 struct resource *res_irq = NULL;
1613 struct cryp_device_data *device_data;
1615 dev_dbg(&pdev->dev, "[%s]", __func__);
1617 device_data = platform_get_drvdata(pdev);
1619 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1624 /* Check that the device is free */
1625 spin_lock(&device_data->ctx_lock);
1626 /* current_ctx allocates a device, NULL = unallocated */
1627 if (!device_data->current_ctx) {
1628 if (down_trylock(&driver_data.device_allocation))
1629 dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1630 "Shutting down anyway...", __func__);
1632 * (Allocate the device)
1633 * Need to set this to non-null (dummy) value,
1634 * to avoid usage if context switching.
1636 device_data->current_ctx++;
1638 spin_unlock(&device_data->ctx_lock);
1640 /* Remove the device from the list */
1641 if (klist_node_attached(&device_data->list_node))
1642 klist_remove(&device_data->list_node);
1644 /* If this was the last device, remove the services */
1645 if (list_empty(&driver_data.device_list.k_list))
1646 cryp_algs_unregister_all();
1648 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1650 dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
1653 disable_irq(res_irq->start);
1654 free_irq(res_irq->start, device_data);
1657 if (cryp_disable_power(&pdev->dev, device_data, false))
1658 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1663 static int ux500_cryp_suspend(struct device *dev)
1666 struct platform_device *pdev = to_platform_device(dev);
1667 struct cryp_device_data *device_data;
1668 struct resource *res_irq;
1669 struct cryp_ctx *temp_ctx = NULL;
1671 dev_dbg(dev, "[%s]", __func__);
1674 device_data = platform_get_drvdata(pdev);
1676 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1680 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1682 dev_err(dev, "[%s]: IORESOURCE_IRQ, unavailable", __func__);
1684 disable_irq(res_irq->start);
1686 spin_lock(&device_data->ctx_lock);
1687 if (!device_data->current_ctx)
1688 device_data->current_ctx++;
1689 spin_unlock(&device_data->ctx_lock);
1691 if (device_data->current_ctx == ++temp_ctx) {
1692 if (down_interruptible(&driver_data.device_allocation))
1693 dev_dbg(dev, "[%s]: down_interruptible() failed",
1695 ret = cryp_disable_power(dev, device_data, false);
1698 ret = cryp_disable_power(dev, device_data, true);
1701 dev_err(dev, "[%s]: cryp_disable_power()", __func__);
1706 static int ux500_cryp_resume(struct device *dev)
1709 struct platform_device *pdev = to_platform_device(dev);
1710 struct cryp_device_data *device_data;
1711 struct resource *res_irq;
1712 struct cryp_ctx *temp_ctx = NULL;
1714 dev_dbg(dev, "[%s]", __func__);
1716 device_data = platform_get_drvdata(pdev);
1718 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1722 spin_lock(&device_data->ctx_lock);
1723 if (device_data->current_ctx == ++temp_ctx)
1724 device_data->current_ctx = NULL;
1725 spin_unlock(&device_data->ctx_lock);
1728 if (!device_data->current_ctx)
1729 up(&driver_data.device_allocation);
1731 ret = cryp_enable_power(dev, device_data, true);
1734 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1736 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1738 enable_irq(res_irq->start);
1744 static SIMPLE_DEV_PM_OPS(ux500_cryp_pm, ux500_cryp_suspend, ux500_cryp_resume);
1746 static struct platform_driver cryp_driver = {
1747 .probe = ux500_cryp_probe,
1748 .remove = ux500_cryp_remove,
1749 .shutdown = ux500_cryp_shutdown,
1751 .owner = THIS_MODULE,
1753 .pm = &ux500_cryp_pm,
1757 static int __init ux500_cryp_mod_init(void)
1759 pr_debug("[%s] is called!", __func__);
1760 klist_init(&driver_data.device_list, NULL, NULL);
1761 /* Initialize the semaphore to 0 devices (locked state) */
1762 sema_init(&driver_data.device_allocation, 0);
1763 return platform_driver_register(&cryp_driver);
1766 static void __exit ux500_cryp_mod_fini(void)
1768 pr_debug("[%s] is called!", __func__);
1769 platform_driver_unregister(&cryp_driver);
1773 module_init(ux500_cryp_mod_init);
1774 module_exit(ux500_cryp_mod_fini);
1776 module_param(cryp_mode, int, 0);
1778 MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1779 MODULE_ALIAS("aes-all");
1780 MODULE_ALIAS("des-all");
1782 MODULE_LICENSE("GPL");