1 // SPDX-License-Identifier: GPL-2.0
3 // Driver for AT91 USART Controllers as SPI
5 // Copyright (C) 2018 Microchip Technology Inc.
7 // Author: Radu Pirea <radu.pirea@microchip.com>
10 #include <linux/delay.h>
11 #include <linux/dmaengine.h>
12 #include <linux/dma-direction.h>
13 #include <linux/interrupt.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/of_platform.h>
17 #include <linux/of_gpio.h>
18 #include <linux/pinctrl/consumer.h>
19 #include <linux/platform_device.h>
20 #include <linux/pm_runtime.h>
22 #include <linux/spi/spi.h>
32 #define US_VERSION 0xFC
34 #define US_CR_RSTRX BIT(2)
35 #define US_CR_RSTTX BIT(3)
36 #define US_CR_RXEN BIT(4)
37 #define US_CR_RXDIS BIT(5)
38 #define US_CR_TXEN BIT(6)
39 #define US_CR_TXDIS BIT(7)
41 #define US_MR_SPI_MASTER 0x0E
42 #define US_MR_CHRL GENMASK(7, 6)
43 #define US_MR_CPHA BIT(8)
44 #define US_MR_CPOL BIT(16)
45 #define US_MR_CLKO BIT(18)
46 #define US_MR_WRDBT BIT(20)
47 #define US_MR_LOOP BIT(15)
49 #define US_IR_RXRDY BIT(0)
50 #define US_IR_TXRDY BIT(1)
51 #define US_IR_OVRE BIT(5)
53 #define US_BRGR_SIZE BIT(16)
55 #define US_MIN_CLK_DIV 0x06
56 #define US_MAX_CLK_DIV BIT(16)
58 #define US_RESET (US_CR_RSTRX | US_CR_RSTTX)
59 #define US_DISABLE (US_CR_RXDIS | US_CR_TXDIS)
60 #define US_ENABLE (US_CR_RXEN | US_CR_TXEN)
61 #define US_OVRE_RXRDY_IRQS (US_IR_OVRE | US_IR_RXRDY)
64 (US_MR_SPI_MASTER | US_MR_CHRL | US_MR_CLKO | US_MR_WRDBT)
65 #define US_DMA_MIN_BYTES 16
66 #define US_DMA_TIMEOUT (msecs_to_jiffies(1000))
68 /* Register access macros */
69 #define at91_usart_spi_readl(port, reg) \
70 readl_relaxed((port)->regs + US_##reg)
71 #define at91_usart_spi_writel(port, reg, value) \
72 writel_relaxed((value), (port)->regs + US_##reg)
74 #define at91_usart_spi_readb(port, reg) \
75 readb_relaxed((port)->regs + US_##reg)
76 #define at91_usart_spi_writeb(port, reg, value) \
77 writeb_relaxed((value), (port)->regs + US_##reg)
79 struct at91_usart_spi {
80 struct platform_device *mpdev;
81 struct spi_transfer *current_transfer;
86 struct completion xfer_completion;
88 /*used in interrupt to protect data reading*/
94 unsigned int current_tx_remaining_bytes;
95 unsigned int current_rx_remaining_bytes;
104 static void dma_callback(void *data)
106 struct spi_controller *ctlr = data;
107 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
109 at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
110 aus->current_rx_remaining_bytes = 0;
111 complete(&aus->xfer_completion);
114 static bool at91_usart_spi_can_dma(struct spi_controller *ctrl,
115 struct spi_device *spi,
116 struct spi_transfer *xfer)
118 struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
120 return aus->use_dma && xfer->len >= US_DMA_MIN_BYTES;
123 static int at91_usart_spi_configure_dma(struct spi_controller *ctlr,
124 struct at91_usart_spi *aus)
126 struct dma_slave_config slave_config;
127 struct device *dev = &aus->mpdev->dev;
128 phys_addr_t phybase = aus->phybase;
133 dma_cap_set(DMA_SLAVE, mask);
135 ctlr->dma_tx = dma_request_chan(dev, "tx");
136 if (IS_ERR_OR_NULL(ctlr->dma_tx)) {
137 if (IS_ERR(ctlr->dma_tx)) {
138 err = PTR_ERR(ctlr->dma_tx);
139 goto at91_usart_spi_error_clear;
143 "DMA TX channel not available, SPI unable to use DMA\n");
145 goto at91_usart_spi_error_clear;
148 ctlr->dma_rx = dma_request_chan(dev, "rx");
149 if (IS_ERR_OR_NULL(ctlr->dma_rx)) {
150 if (IS_ERR(ctlr->dma_rx)) {
151 err = PTR_ERR(ctlr->dma_rx);
152 goto at91_usart_spi_error;
156 "DMA RX channel not available, SPI unable to use DMA\n");
158 goto at91_usart_spi_error;
161 slave_config.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
162 slave_config.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
163 slave_config.dst_addr = (dma_addr_t)phybase + US_THR;
164 slave_config.src_addr = (dma_addr_t)phybase + US_RHR;
165 slave_config.src_maxburst = 1;
166 slave_config.dst_maxburst = 1;
167 slave_config.device_fc = false;
169 slave_config.direction = DMA_DEV_TO_MEM;
170 if (dmaengine_slave_config(ctlr->dma_rx, &slave_config)) {
172 "failed to configure rx dma channel\n");
174 goto at91_usart_spi_error;
177 slave_config.direction = DMA_MEM_TO_DEV;
178 if (dmaengine_slave_config(ctlr->dma_tx, &slave_config)) {
180 "failed to configure tx dma channel\n");
182 goto at91_usart_spi_error;
188 at91_usart_spi_error:
189 if (!IS_ERR_OR_NULL(ctlr->dma_tx))
190 dma_release_channel(ctlr->dma_tx);
191 if (!IS_ERR_OR_NULL(ctlr->dma_rx))
192 dma_release_channel(ctlr->dma_rx);
196 at91_usart_spi_error_clear:
200 static void at91_usart_spi_release_dma(struct spi_controller *ctlr)
203 dma_release_channel(ctlr->dma_rx);
205 dma_release_channel(ctlr->dma_tx);
208 static void at91_usart_spi_stop_dma(struct spi_controller *ctlr)
211 dmaengine_terminate_all(ctlr->dma_rx);
213 dmaengine_terminate_all(ctlr->dma_tx);
216 static int at91_usart_spi_dma_transfer(struct spi_controller *ctlr,
217 struct spi_transfer *xfer)
219 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
220 struct dma_chan *rxchan = ctlr->dma_rx;
221 struct dma_chan *txchan = ctlr->dma_tx;
222 struct dma_async_tx_descriptor *rxdesc;
223 struct dma_async_tx_descriptor *txdesc;
226 /* Disable RX interrupt */
227 at91_usart_spi_writel(aus, IDR, US_IR_RXRDY);
229 rxdesc = dmaengine_prep_slave_sg(rxchan,
236 goto at91_usart_spi_err_dma;
238 txdesc = dmaengine_prep_slave_sg(txchan,
245 goto at91_usart_spi_err_dma;
247 rxdesc->callback = dma_callback;
248 rxdesc->callback_param = ctlr;
250 cookie = rxdesc->tx_submit(rxdesc);
251 if (dma_submit_error(cookie))
252 goto at91_usart_spi_err_dma;
254 cookie = txdesc->tx_submit(txdesc);
255 if (dma_submit_error(cookie))
256 goto at91_usart_spi_err_dma;
258 rxchan->device->device_issue_pending(rxchan);
259 txchan->device->device_issue_pending(txchan);
263 at91_usart_spi_err_dma:
264 /* Enable RX interrupt if something fails and fallback to PIO */
265 at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
266 at91_usart_spi_stop_dma(ctlr);
271 static unsigned long at91_usart_spi_dma_timeout(struct at91_usart_spi *aus)
273 return wait_for_completion_timeout(&aus->xfer_completion,
277 static inline u32 at91_usart_spi_tx_ready(struct at91_usart_spi *aus)
279 return aus->status & US_IR_TXRDY;
282 static inline u32 at91_usart_spi_rx_ready(struct at91_usart_spi *aus)
284 return aus->status & US_IR_RXRDY;
287 static inline u32 at91_usart_spi_check_overrun(struct at91_usart_spi *aus)
289 return aus->status & US_IR_OVRE;
292 static inline u32 at91_usart_spi_read_status(struct at91_usart_spi *aus)
294 aus->status = at91_usart_spi_readl(aus, CSR);
298 static inline void at91_usart_spi_tx(struct at91_usart_spi *aus)
300 unsigned int len = aus->current_transfer->len;
301 unsigned int remaining = aus->current_tx_remaining_bytes;
302 const u8 *tx_buf = aus->current_transfer->tx_buf;
307 if (at91_usart_spi_tx_ready(aus)) {
308 at91_usart_spi_writeb(aus, THR, tx_buf[len - remaining]);
309 aus->current_tx_remaining_bytes--;
313 static inline void at91_usart_spi_rx(struct at91_usart_spi *aus)
315 int len = aus->current_transfer->len;
316 int remaining = aus->current_rx_remaining_bytes;
317 u8 *rx_buf = aus->current_transfer->rx_buf;
322 rx_buf[len - remaining] = at91_usart_spi_readb(aus, RHR);
323 aus->current_rx_remaining_bytes--;
327 at91_usart_spi_set_xfer_speed(struct at91_usart_spi *aus,
328 struct spi_transfer *xfer)
330 at91_usart_spi_writel(aus, BRGR,
331 DIV_ROUND_UP(aus->spi_clk, xfer->speed_hz));
334 static irqreturn_t at91_usart_spi_interrupt(int irq, void *dev_id)
336 struct spi_controller *controller = dev_id;
337 struct at91_usart_spi *aus = spi_master_get_devdata(controller);
339 spin_lock(&aus->lock);
340 at91_usart_spi_read_status(aus);
342 if (at91_usart_spi_check_overrun(aus)) {
343 aus->xfer_failed = true;
344 at91_usart_spi_writel(aus, IDR, US_IR_OVRE | US_IR_RXRDY);
345 spin_unlock(&aus->lock);
349 if (at91_usart_spi_rx_ready(aus)) {
350 at91_usart_spi_rx(aus);
351 spin_unlock(&aus->lock);
355 spin_unlock(&aus->lock);
360 static int at91_usart_spi_setup(struct spi_device *spi)
362 struct at91_usart_spi *aus = spi_master_get_devdata(spi->controller);
363 u32 *ausd = spi->controller_state;
364 unsigned int mr = at91_usart_spi_readl(aus, MR);
366 if (spi->mode & SPI_CPOL)
371 if (spi->mode & SPI_CPHA)
376 if (spi->mode & SPI_LOOP)
382 ausd = kzalloc(sizeof(*ausd), GFP_KERNEL);
386 spi->controller_state = ausd;
392 "setup: bpw %u mode 0x%x -> mr %d %08x\n",
393 spi->bits_per_word, spi->mode, spi->chip_select, mr);
398 static int at91_usart_spi_transfer_one(struct spi_controller *ctlr,
399 struct spi_device *spi,
400 struct spi_transfer *xfer)
402 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
403 unsigned long dma_timeout = 0;
406 at91_usart_spi_set_xfer_speed(aus, xfer);
407 aus->xfer_failed = false;
408 aus->current_transfer = xfer;
409 aus->current_tx_remaining_bytes = xfer->len;
410 aus->current_rx_remaining_bytes = xfer->len;
412 while ((aus->current_tx_remaining_bytes ||
413 aus->current_rx_remaining_bytes) && !aus->xfer_failed) {
414 reinit_completion(&aus->xfer_completion);
415 if (at91_usart_spi_can_dma(ctlr, spi, xfer) &&
417 ret = at91_usart_spi_dma_transfer(ctlr, xfer);
421 dma_timeout = at91_usart_spi_dma_timeout(aus);
423 if (WARN_ON(dma_timeout == 0)) {
424 dev_err(&spi->dev, "DMA transfer timeout\n");
427 aus->current_tx_remaining_bytes = 0;
429 at91_usart_spi_read_status(aus);
430 at91_usart_spi_tx(aus);
436 if (aus->xfer_failed) {
437 dev_err(aus->dev, "Overrun!\n");
444 static int at91_usart_spi_prepare_message(struct spi_controller *ctlr,
445 struct spi_message *message)
447 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
448 struct spi_device *spi = message->spi;
449 u32 *ausd = spi->controller_state;
451 at91_usart_spi_writel(aus, CR, US_ENABLE);
452 at91_usart_spi_writel(aus, IER, US_OVRE_RXRDY_IRQS);
453 at91_usart_spi_writel(aus, MR, *ausd);
458 static int at91_usart_spi_unprepare_message(struct spi_controller *ctlr,
459 struct spi_message *message)
461 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
463 at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
464 at91_usart_spi_writel(aus, IDR, US_OVRE_RXRDY_IRQS);
469 static void at91_usart_spi_cleanup(struct spi_device *spi)
471 struct at91_usart_spi_device *ausd = spi->controller_state;
473 spi->controller_state = NULL;
477 static void at91_usart_spi_init(struct at91_usart_spi *aus)
479 at91_usart_spi_writel(aus, MR, US_INIT);
480 at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
483 static int at91_usart_gpio_setup(struct platform_device *pdev)
485 struct device_node *np = pdev->dev.parent->of_node;
493 nb = of_gpio_named_count(np, "cs-gpios");
494 for (i = 0; i < nb; i++) {
495 int cs_gpio = of_get_named_gpio(np, "cs-gpios", i);
500 if (gpio_is_valid(cs_gpio)) {
501 ret = devm_gpio_request_one(&pdev->dev, cs_gpio,
503 dev_name(&pdev->dev));
512 static int at91_usart_spi_probe(struct platform_device *pdev)
514 struct resource *regs;
515 struct spi_controller *controller;
516 struct at91_usart_spi *aus;
521 regs = platform_get_resource(to_platform_device(pdev->dev.parent),
526 irq = platform_get_irq(to_platform_device(pdev->dev.parent), 0);
530 clk = devm_clk_get(pdev->dev.parent, "usart");
535 controller = spi_alloc_master(&pdev->dev, sizeof(*aus));
537 goto at91_usart_spi_probe_fail;
539 ret = at91_usart_gpio_setup(pdev);
541 goto at91_usart_spi_probe_fail;
543 controller->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LOOP | SPI_CS_HIGH;
544 controller->dev.of_node = pdev->dev.parent->of_node;
545 controller->bits_per_word_mask = SPI_BPW_MASK(8);
546 controller->setup = at91_usart_spi_setup;
547 controller->flags = SPI_MASTER_MUST_RX | SPI_MASTER_MUST_TX;
548 controller->transfer_one = at91_usart_spi_transfer_one;
549 controller->prepare_message = at91_usart_spi_prepare_message;
550 controller->unprepare_message = at91_usart_spi_unprepare_message;
551 controller->can_dma = at91_usart_spi_can_dma;
552 controller->cleanup = at91_usart_spi_cleanup;
553 controller->max_speed_hz = DIV_ROUND_UP(clk_get_rate(clk),
555 controller->min_speed_hz = DIV_ROUND_UP(clk_get_rate(clk),
557 platform_set_drvdata(pdev, controller);
559 aus = spi_master_get_devdata(controller);
561 aus->dev = &pdev->dev;
562 aus->regs = devm_ioremap_resource(&pdev->dev, regs);
563 if (IS_ERR(aus->regs)) {
564 ret = PTR_ERR(aus->regs);
565 goto at91_usart_spi_probe_fail;
571 ret = devm_request_irq(&pdev->dev, irq, at91_usart_spi_interrupt, 0,
572 dev_name(&pdev->dev), controller);
574 goto at91_usart_spi_probe_fail;
576 ret = clk_prepare_enable(clk);
578 goto at91_usart_spi_probe_fail;
580 aus->spi_clk = clk_get_rate(clk);
581 at91_usart_spi_init(aus);
583 aus->phybase = regs->start;
585 aus->mpdev = to_platform_device(pdev->dev.parent);
587 ret = at91_usart_spi_configure_dma(controller, aus);
589 goto at91_usart_fail_dma;
591 spin_lock_init(&aus->lock);
592 init_completion(&aus->xfer_completion);
594 ret = devm_spi_register_master(&pdev->dev, controller);
596 goto at91_usart_fail_register_master;
599 "AT91 USART SPI Controller version 0x%x at %pa (irq %d)\n",
600 at91_usart_spi_readl(aus, VERSION),
605 at91_usart_fail_register_master:
606 at91_usart_spi_release_dma(controller);
608 clk_disable_unprepare(clk);
609 at91_usart_spi_probe_fail:
610 spi_master_put(controller);
614 __maybe_unused static int at91_usart_spi_runtime_suspend(struct device *dev)
616 struct spi_controller *ctlr = dev_get_drvdata(dev);
617 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
619 clk_disable_unprepare(aus->clk);
620 pinctrl_pm_select_sleep_state(dev);
625 __maybe_unused static int at91_usart_spi_runtime_resume(struct device *dev)
627 struct spi_controller *ctrl = dev_get_drvdata(dev);
628 struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
630 pinctrl_pm_select_default_state(dev);
632 return clk_prepare_enable(aus->clk);
635 __maybe_unused static int at91_usart_spi_suspend(struct device *dev)
637 struct spi_controller *ctrl = dev_get_drvdata(dev);
640 ret = spi_controller_suspend(ctrl);
644 if (!pm_runtime_suspended(dev))
645 at91_usart_spi_runtime_suspend(dev);
650 __maybe_unused static int at91_usart_spi_resume(struct device *dev)
652 struct spi_controller *ctrl = dev_get_drvdata(dev);
653 struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
656 if (!pm_runtime_suspended(dev)) {
657 ret = at91_usart_spi_runtime_resume(dev);
662 at91_usart_spi_init(aus);
664 return spi_controller_resume(ctrl);
667 static int at91_usart_spi_remove(struct platform_device *pdev)
669 struct spi_controller *ctlr = platform_get_drvdata(pdev);
670 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
672 at91_usart_spi_release_dma(ctlr);
673 clk_disable_unprepare(aus->clk);
678 static const struct dev_pm_ops at91_usart_spi_pm_ops = {
679 SET_SYSTEM_SLEEP_PM_OPS(at91_usart_spi_suspend, at91_usart_spi_resume)
680 SET_RUNTIME_PM_OPS(at91_usart_spi_runtime_suspend,
681 at91_usart_spi_runtime_resume, NULL)
684 static struct platform_driver at91_usart_spi_driver = {
686 .name = "at91_usart_spi",
687 .pm = &at91_usart_spi_pm_ops,
689 .probe = at91_usart_spi_probe,
690 .remove = at91_usart_spi_remove,
693 module_platform_driver(at91_usart_spi_driver);
695 MODULE_DESCRIPTION("Microchip AT91 USART SPI Controller driver");
696 MODULE_AUTHOR("Radu Pirea <radu.pirea@microchip.com>");
697 MODULE_LICENSE("GPL v2");
698 MODULE_ALIAS("platform:at91_usart_spi");