1 // SPDX-License-Identifier: GPL-2.0+
3 * (C) Copyright 2018 Xilinx
5 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
11 #include <asm/arch/sys_proto.h>
12 #include <asm/cache.h>
20 #include <ubi_uboot.h>
22 #include <dm/device_compat.h>
23 #include <linux/bitops.h>
24 #include <linux/err.h>
25 #include <linux/sizes.h>
26 #include <zynqmp_firmware.h>
28 #define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
29 #define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
30 #define GQSPI_CONFIG_DMA_MODE (2 << 30)
31 #define GQSPI_CONFIG_CPHA_MASK BIT(2)
32 #define GQSPI_CONFIG_CPOL_MASK BIT(1)
35 * QSPI Interrupt Registers bit Masks
37 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
40 #define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
41 #define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
42 #define GQSPI_IXR_TXFIFOEMPTY_MASK 0x00000100 /* QSPI TX FIFO is Empty */
43 #define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
44 #define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
45 #define GQSPI_IXR_GFNFULL_MASK 0x00000200 /* QSPI GENFIFO not full */
46 #define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
47 GQSPI_IXR_RXNEMTY_MASK)
50 * QSPI Enable Register bit Masks
52 * This register is used to enable or disable the QSPI controller
54 #define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
56 #define GQSPI_GFIFO_LOW_BUS BIT(14)
57 #define GQSPI_GFIFO_CS_LOWER BIT(12)
58 #define GQSPI_GFIFO_UP_BUS BIT(15)
59 #define GQSPI_GFIFO_CS_UPPER BIT(13)
60 #define GQSPI_SPI_MODE_QSPI (3 << 10)
61 #define GQSPI_SPI_MODE_SPI BIT(10)
62 #define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
63 #define GQSPI_IMD_DATA_CS_ASSERT 5
64 #define GQSPI_IMD_DATA_CS_DEASSERT 5
65 #define GQSPI_GFIFO_TX BIT(16)
66 #define GQSPI_GFIFO_RX BIT(17)
67 #define GQSPI_GFIFO_STRIPE_MASK BIT(18)
68 #define GQSPI_GFIFO_IMD_MASK 0xFF
69 #define GQSPI_GFIFO_EXP_MASK BIT(9)
70 #define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
71 #define GQSPI_STRT_GEN_FIFO BIT(28)
72 #define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
73 #define GQSPI_GFIFO_WP_HOLD BIT(19)
74 #define GQSPI_BAUD_DIV_MASK (7 << 3)
75 #define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
76 #define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
77 #define GQSPI_DMA_DST_I_STS_DONE BIT(1)
78 #define GQSPI_DMA_DST_I_STS_MASK 0xFE
81 #define GQSPI_GFIFO_SELECT BIT(0)
82 #define GQSPI_FIFO_THRESHOLD 1
83 #define GQSPI_GENFIFO_THRESHOLD 31
85 #define SPI_XFER_ON_BOTH 0
86 #define SPI_XFER_ON_LOWER 1
87 #define SPI_XFER_ON_UPPER 2
89 #define GQSPI_DMA_ALIGN 0x4
90 #define GQSPI_MAX_BAUD_RATE_VAL 7
91 #define GQSPI_DFLT_BAUD_RATE_VAL 2
93 #define GQSPI_TIMEOUT 100000000
95 #define GQSPI_BAUD_DIV_SHIFT 2
96 #define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
97 #define GQSPI_LPBK_DLY_ADJ_DLY_1 0x2
98 #define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
99 #define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
100 #define GQSPI_USE_DATA_DLY 0x1
101 #define GQSPI_USE_DATA_DLY_SHIFT 31
102 #define GQSPI_DATA_DLY_ADJ_VALUE 0x2
103 #define GQSPI_DATA_DLY_ADJ_SHIFT 28
104 #define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
105 #define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
106 #define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
107 #define IOU_TAPDLY_BYPASS_OFST !(IS_ENABLED(CONFIG_ARCH_VERSAL) || \
108 IS_ENABLED(CONFIG_ARCH_VERSAL_NET)) ? \
109 0xFF180390 : 0xF103003C
110 #define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
111 #define GQSPI_FREQ_37_5MHZ 37500000
112 #define GQSPI_FREQ_40MHZ 40000000
113 #define GQSPI_FREQ_100MHZ 100000000
114 #define GQSPI_FREQ_150MHZ 150000000
115 #define IOU_TAPDLY_BYPASS_MASK 0x7
117 #define GQSPI_REG_OFFSET 0x100
118 #define GQSPI_DMA_REG_OFFSET 0x800
120 /* QSPI register offsets */
121 struct zynqmp_qspi_regs {
122 u32 confr; /* 0x00 */
125 u32 idisr; /* 0x0C */
126 u32 imaskr; /* 0x10 */
129 u32 txd0r; /* 0x1C */
132 u32 txftr; /* 0x28 */
133 u32 rxftr; /* 0x2C */
134 u32 gpior; /* 0x30 */
135 u32 reserved0; /* 0x34 */
136 u32 lpbkdly; /* 0x38 */
137 u32 reserved1; /* 0x3C */
138 u32 genfifo; /* 0x40 */
139 u32 gqspisel; /* 0x44 */
140 u32 reserved2; /* 0x48 */
141 u32 gqfifoctrl; /* 0x4C */
142 u32 gqfthr; /* 0x50 */
143 u32 gqpollcfg; /* 0x54 */
144 u32 gqpollto; /* 0x58 */
145 u32 gqxfersts; /* 0x5C */
146 u32 gqfifosnap; /* 0x60 */
147 u32 gqrxcpy; /* 0x64 */
148 u32 reserved3[36]; /* 0x68 */
149 u32 gqspidlyadj; /* 0xF8 */
152 struct zynqmp_qspi_dma_regs {
153 u32 dmadst; /* 0x00 */
154 u32 dmasize; /* 0x04 */
155 u32 dmasts; /* 0x08 */
156 u32 dmactrl; /* 0x0C */
157 u32 reserved0; /* 0x10 */
158 u32 dmaisr; /* 0x14 */
159 u32 dmaier; /* 0x18 */
160 u32 dmaidr; /* 0x1C */
161 u32 dmaimr; /* 0x20 */
162 u32 dmactrl2; /* 0x24 */
163 u32 dmadstmsb; /* 0x28 */
166 struct zynqmp_qspi_plat {
167 struct zynqmp_qspi_regs *regs;
168 struct zynqmp_qspi_dma_regs *dma_regs;
171 unsigned int io_mode;
174 struct zynqmp_qspi_priv {
175 struct zynqmp_qspi_regs *regs;
176 struct zynqmp_qspi_dma_regs *dma_regs;
180 unsigned int io_mode;
181 int bytes_to_transfer;
182 int bytes_to_receive;
183 const struct spi_mem_op *op;
186 static int zynqmp_qspi_of_to_plat(struct udevice *bus)
188 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
190 debug("%s\n", __func__);
192 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
194 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
195 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
197 plat->io_mode = dev_read_bool(bus, "has-io-mode");
202 static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
205 struct zynqmp_qspi_regs *regs = priv->regs;
207 writel(GQSPI_GFIFO_SELECT, ®s->gqspisel);
208 writel(GQSPI_GFIFO_ALL_INT_MASK, ®s->idisr);
209 writel(GQSPI_FIFO_THRESHOLD, ®s->txftr);
210 writel(GQSPI_FIFO_THRESHOLD, ®s->rxftr);
211 writel(GQSPI_GENFIFO_THRESHOLD, ®s->gqfthr);
212 writel(GQSPI_GFIFO_ALL_INT_MASK, ®s->isr);
213 writel(~GQSPI_ENABLE_ENABLE_MASK, ®s->enbr);
215 config_reg = readl(®s->confr);
216 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
217 GQSPI_CONFIG_MODE_EN_MASK);
218 config_reg |= GQSPI_GFIFO_WP_HOLD | GQSPI_DFLT_BAUD_RATE_DIV;
219 config_reg |= GQSPI_GFIFO_STRT_MODE_MASK;
221 config_reg |= GQSPI_CONFIG_DMA_MODE;
223 writel(config_reg, ®s->confr);
225 writel(GQSPI_ENABLE_ENABLE_MASK, ®s->enbr);
228 static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
230 u32 gqspi_fifo_reg = 0;
232 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
233 GQSPI_GFIFO_CS_LOWER;
235 return gqspi_fifo_reg;
238 static u32 zynqmp_qspi_genfifo_mode(u8 buswidth)
242 return GQSPI_SPI_MODE_SPI;
244 return GQSPI_SPI_MODE_DUAL_SPI;
246 return GQSPI_SPI_MODE_QSPI;
248 debug("Unsupported bus width %u\n", buswidth);
249 return GQSPI_SPI_MODE_SPI;
253 static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
256 struct zynqmp_qspi_regs *regs = priv->regs;
260 writel(gqspi_fifo_reg, ®s->genfifo);
262 config_reg = readl(®s->confr);
263 /* Manual start if needed */
264 config_reg |= GQSPI_STRT_GEN_FIFO;
265 writel(config_reg, ®s->confr);
267 /* Enable interrupts */
268 ier = readl(®s->ier);
269 ier |= GQSPI_IXR_GFEMTY_MASK;
270 writel(ier, ®s->ier);
272 /* Wait until the gen fifo is empty to write the new command */
273 ret = wait_for_bit_le32(®s->isr, GQSPI_IXR_GFEMTY_MASK, 1,
276 printf("%s Timeout\n", __func__);
280 static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
282 u32 gqspi_fifo_reg = 0;
285 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
286 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
287 GQSPI_IMD_DATA_CS_ASSERT;
289 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
290 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
293 debug("GFIFO_CMD_CS: 0x%x\n", gqspi_fifo_reg);
295 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
298 static void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
300 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
301 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
302 struct zynqmp_qspi_regs *regs = priv->regs;
303 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
306 clk_rate = plat->frequency;
307 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
309 debug("%s, req_hz:%d, clk_rate:%d, baudrateval:%d\n",
310 __func__, reqhz, clk_rate, baudrateval);
312 if (!(IS_ENABLED(CONFIG_ARCH_VERSAL) ||
313 IS_ENABLED(CONFIG_ARCH_VERSAL_NET))) {
314 if (reqhz <= GQSPI_FREQ_40MHZ) {
315 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
316 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
317 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
318 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
319 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
320 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK;
321 datadlyadj = (GQSPI_USE_DATA_DLY <<
322 GQSPI_USE_DATA_DLY_SHIFT) |
323 (GQSPI_DATA_DLY_ADJ_VALUE <<
324 GQSPI_DATA_DLY_ADJ_SHIFT);
325 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
326 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK |
327 GQSPI_LPBK_DLY_ADJ_DLY_0;
329 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST,
330 IOU_TAPDLY_BYPASS_MASK, tapdlybypass);
332 if (reqhz <= GQSPI_FREQ_37_5MHZ) {
333 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
334 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
335 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
336 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
337 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
338 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK;
339 datadlyadj = GQSPI_USE_DATA_DLY <<
340 GQSPI_USE_DATA_DLY_SHIFT;
341 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
342 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK |
343 (GQSPI_LPBK_DLY_ADJ_DLY_1 <<
344 GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT);
346 writel(tapdlybypass, IOU_TAPDLY_BYPASS_OFST);
348 writel(lpbkdlyadj, ®s->lpbkdly);
349 writel(datadlyadj, ®s->gqspidlyadj);
352 static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
354 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
355 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
356 struct zynqmp_qspi_regs *regs = priv->regs;
358 u8 baud_rate_val = 0;
360 debug("%s\n", __func__);
361 if (speed > plat->frequency)
362 speed = plat->frequency;
364 if (plat->speed_hz != speed) {
365 /* Set the clock frequency */
366 /* If speed == 0, default to lowest speed */
367 while ((baud_rate_val < 8) &&
369 (2 << baud_rate_val)) > speed))
372 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
373 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
375 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
377 confr = readl(®s->confr);
378 confr &= ~GQSPI_BAUD_DIV_MASK;
379 confr |= (baud_rate_val << 3);
380 writel(confr, ®s->confr);
381 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
383 debug("regs=%p, speed=%d\n", priv->regs, plat->speed_hz);
389 static int zynqmp_qspi_probe(struct udevice *bus)
391 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
392 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
397 debug("%s: bus:%p, priv:%p\n", __func__, bus, priv);
399 priv->regs = plat->regs;
400 priv->dma_regs = plat->dma_regs;
401 priv->io_mode = plat->io_mode;
403 ret = clk_get_by_index(bus, 0, &clk);
405 dev_err(bus, "failed to get clock\n");
409 clock = clk_get_rate(&clk);
410 if (IS_ERR_VALUE(clock)) {
411 dev_err(bus, "failed to get rate\n");
414 debug("%s: CLK %ld\n", __func__, clock);
416 ret = clk_enable(&clk);
418 dev_err(bus, "failed to enable clock\n");
421 plat->frequency = clock;
422 plat->speed_hz = plat->frequency / 2;
424 /* init the zynq spi hw */
425 zynqmp_qspi_init_hw(priv);
430 static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
432 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
433 struct zynqmp_qspi_regs *regs = priv->regs;
436 debug("%s\n", __func__);
437 /* Set the SPI Clock phase and polarities */
438 confr = readl(®s->confr);
439 confr &= ~(GQSPI_CONFIG_CPHA_MASK | GQSPI_CONFIG_CPOL_MASK);
442 confr |= GQSPI_CONFIG_CPHA_MASK;
444 confr |= GQSPI_CONFIG_CPOL_MASK;
446 writel(confr, ®s->confr);
451 static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
455 struct zynqmp_qspi_regs *regs = priv->regs;
456 u32 *buf = (u32 *)priv->tx_buf;
459 debug("TxFIFO: 0x%x, size: 0x%x\n", readl(®s->isr),
463 ret = wait_for_bit_le32(®s->isr, GQSPI_IXR_TXNFULL_MASK, 1,
466 printf("%s: Timeout\n", __func__);
471 writel(*buf, ®s->txd0r);
479 data |= GENMASK(31, 8);
482 data = *((u16 *)buf);
484 data |= GENMASK(31, 16);
489 data |= GENMASK(31, 24);
492 writel(data, ®s->txd0r);
497 ret = wait_for_bit_le32(®s->isr, GQSPI_IXR_TXFIFOEMPTY_MASK, 1,
500 printf("%s: Timeout\n", __func__);
508 static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
510 const struct spi_mem_op *op = priv->op;
512 u8 i, dummy_cycles, addr;
515 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
516 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->cmd.buswidth);
517 gen_fifo_cmd |= GQSPI_GFIFO_TX;
518 gen_fifo_cmd |= op->cmd.opcode;
519 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
522 for (i = 0; i < op->addr.nbytes; i++) {
523 addr = op->addr.val >> (8 * (op->addr.nbytes - i - 1));
525 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
526 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->addr.buswidth);
527 gen_fifo_cmd |= GQSPI_GFIFO_TX;
528 gen_fifo_cmd |= addr;
530 debug("GFIFO_CMD_Cmd = 0x%x\n", gen_fifo_cmd);
532 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
536 if (op->dummy.nbytes) {
537 dummy_cycles = op->dummy.nbytes * 8 / op->dummy.buswidth;
539 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
540 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->dummy.buswidth);
541 gen_fifo_cmd &= ~(GQSPI_GFIFO_TX | GQSPI_GFIFO_RX);
542 gen_fifo_cmd |= GQSPI_GFIFO_DATA_XFR_MASK;
543 gen_fifo_cmd |= dummy_cycles;
544 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
548 static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
555 if (priv->len > 255) {
556 if (priv->len & (1 << expval)) {
557 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
558 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
559 *gen_fifo_cmd |= expval;
560 priv->len -= (1 << expval);
565 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
566 GQSPI_GFIFO_EXP_MASK);
567 *gen_fifo_cmd |= (u8)priv->len;
575 static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
581 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
582 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
583 gen_fifo_cmd |= GQSPI_GFIFO_TX | GQSPI_GFIFO_DATA_XFR_MASK;
586 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
587 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
589 debug("GFIFO_CMD_TX:0x%x\n", gen_fifo_cmd);
591 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
592 ret = zynqmp_qspi_fill_tx_fifo(priv, 1 << len);
594 ret = zynqmp_qspi_fill_tx_fifo(priv, len);
602 static int zynqmp_qspi_start_io(struct zynqmp_qspi_priv *priv,
603 u32 gen_fifo_cmd, u32 *buf)
606 u32 actuallen = priv->len;
607 u32 config_reg, ier, isr;
608 u32 timeout = GQSPI_TIMEOUT;
609 struct zynqmp_qspi_regs *regs = priv->regs;
614 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
615 /* If exponent bit is set, reset immediate to be 2^len */
616 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
617 priv->bytes_to_receive = (1 << len);
619 priv->bytes_to_receive = len;
620 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
621 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
623 config_reg = readl(®s->confr);
624 config_reg |= GQSPI_STRT_GEN_FIFO;
625 writel(config_reg, ®s->confr);
626 /* Enable RX interrupts for IO mode */
627 ier = readl(®s->ier);
628 ier |= GQSPI_IXR_ALL_MASK;
629 writel(ier, ®s->ier);
630 while (priv->bytes_to_receive && timeout) {
631 isr = readl(®s->isr);
632 if (isr & GQSPI_IXR_RXNEMTY_MASK) {
633 if (priv->bytes_to_receive >= 4) {
634 *traverse = readl(®s->drxr);
636 priv->bytes_to_receive -= 4;
638 last_bits = readl(®s->drxr);
639 memcpy(traverse, &last_bits,
640 priv->bytes_to_receive);
641 priv->bytes_to_receive = 0;
643 timeout = GQSPI_TIMEOUT;
650 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
651 (unsigned long)buf, (unsigned long)priv->rx_buf,
654 printf("IO timeout: %d\n", readl(®s->isr));
662 static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
663 u32 gen_fifo_cmd, u32 *buf)
667 u32 actuallen = priv->len;
668 u32 totallen = priv->len;
670 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
673 if (totallen >= SZ_512M)
676 priv->len = totallen;
678 totallen -= priv->len; /* Save remaining bytes length to read */
679 actuallen = priv->len; /* Actual number of bytes reading */
681 writel((unsigned long)buf, &dma_regs->dmadst);
682 writel(roundup(priv->len, GQSPI_DMA_ALIGN), &dma_regs->dmasize);
683 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
684 addr = (unsigned long)buf;
685 size = roundup(priv->len, GQSPI_DMA_ALIGN);
686 flush_dcache_range(addr, addr + size);
689 zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
690 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
692 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
695 ret = wait_for_bit_le32(&dma_regs->dmaisr,
696 GQSPI_DMA_DST_I_STS_DONE, 1,
699 printf("DMA Timeout:0x%x\n", readl(&dma_regs->dmaisr));
703 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
705 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
706 (unsigned long)buf, (unsigned long)priv->rx_buf, *buf,
709 if (buf != priv->rx_buf)
710 memcpy(priv->rx_buf, buf, actuallen);
712 buf = (u32 *)((u8 *)buf + actuallen);
713 priv->rx_buf = (u8 *)priv->rx_buf + actuallen;
719 static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
723 u32 actuallen = priv->len;
725 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
726 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
727 gen_fifo_cmd |= GQSPI_GFIFO_RX | GQSPI_GFIFO_DATA_XFR_MASK;
730 * Check if receive buffer is aligned to 4 byte and length
731 * is multiples of four byte as we are using dma to receive.
733 if ((!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
734 !(actuallen % GQSPI_DMA_ALIGN)) || priv->io_mode) {
735 buf = (u32 *)priv->rx_buf;
737 return zynqmp_qspi_start_io(priv, gen_fifo_cmd, buf);
739 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
742 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len, GQSPI_DMA_ALIGN));
744 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
747 static int zynqmp_qspi_claim_bus(struct udevice *dev)
749 struct udevice *bus = dev->parent;
750 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
751 struct zynqmp_qspi_regs *regs = priv->regs;
753 writel(GQSPI_ENABLE_ENABLE_MASK, ®s->enbr);
758 static int zynqmp_qspi_release_bus(struct udevice *dev)
760 struct udevice *bus = dev->parent;
761 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
762 struct zynqmp_qspi_regs *regs = priv->regs;
764 writel(~GQSPI_ENABLE_ENABLE_MASK, ®s->enbr);
769 static int zynqmp_qspi_exec_op(struct spi_slave *slave,
770 const struct spi_mem_op *op)
772 struct zynqmp_qspi_priv *priv = dev_get_priv(slave->dev->parent);
776 priv->tx_buf = op->data.buf.out;
777 priv->rx_buf = op->data.buf.in;
778 priv->len = op->data.nbytes;
780 zynqmp_qspi_chipselect(priv, 1);
782 /* Send opcode, addr, dummy */
783 zynqmp_qspi_genfifo_cmd(priv);
785 /* Request the transfer */
786 if (op->data.dir == SPI_MEM_DATA_IN)
787 ret = zynqmp_qspi_genfifo_fill_rx(priv);
788 else if (op->data.dir == SPI_MEM_DATA_OUT)
789 ret = zynqmp_qspi_genfifo_fill_tx(priv);
791 zynqmp_qspi_chipselect(priv, 0);
796 static const struct spi_controller_mem_ops zynqmp_qspi_mem_ops = {
797 .exec_op = zynqmp_qspi_exec_op,
800 static const struct dm_spi_ops zynqmp_qspi_ops = {
801 .claim_bus = zynqmp_qspi_claim_bus,
802 .release_bus = zynqmp_qspi_release_bus,
803 .set_speed = zynqmp_qspi_set_speed,
804 .set_mode = zynqmp_qspi_set_mode,
805 .mem_ops = &zynqmp_qspi_mem_ops,
808 static const struct udevice_id zynqmp_qspi_ids[] = {
809 { .compatible = "xlnx,zynqmp-qspi-1.0" },
810 { .compatible = "xlnx,versal-qspi-1.0" },
814 U_BOOT_DRIVER(zynqmp_qspi) = {
815 .name = "zynqmp_qspi",
817 .of_match = zynqmp_qspi_ids,
818 .ops = &zynqmp_qspi_ops,
819 .of_to_plat = zynqmp_qspi_of_to_plat,
820 .plat_auto = sizeof(struct zynqmp_qspi_plat),
821 .priv_auto = sizeof(struct zynqmp_qspi_priv),
822 .probe = zynqmp_qspi_probe,