1 // SPDX-License-Identifier: GPL-2.0+
3 * Arasan NAND Flash Controller Driver
5 * Copyright (C) 2014 - 2015 Xilinx, Inc.
11 #include <linux/errno.h>
12 #include <linux/mtd/mtd.h>
13 #include <linux/mtd/rawnand.h>
14 #include <linux/mtd/partitions.h>
15 #include <linux/mtd/nand_ecc.h>
16 #include <asm/arch/hardware.h>
17 #include <asm/arch/sys_proto.h>
20 struct arasan_nand_info {
21 void __iomem *nand_base;
23 bool on_die_ecc_enabled;
57 #define arasan_nand_base ((struct nand_regs __iomem *)ARASAN_NAND_BASEADDR)
59 struct arasan_nand_command_format {
66 #define ONDIE_ECC_FEATURE_ADDR 0x90
67 #define ENABLE_ONDIE_ECC 0x08
69 #define ARASAN_PROG_RD_MASK 0x00000001
70 #define ARASAN_PROG_BLK_ERS_MASK 0x00000004
71 #define ARASAN_PROG_RD_ID_MASK 0x00000040
72 #define ARASAN_PROG_RD_STS_MASK 0x00000008
73 #define ARASAN_PROG_PG_PROG_MASK 0x00000010
74 #define ARASAN_PROG_RD_PARAM_PG_MASK 0x00000080
75 #define ARASAN_PROG_RST_MASK 0x00000100
76 #define ARASAN_PROG_GET_FTRS_MASK 0x00000200
77 #define ARASAN_PROG_SET_FTRS_MASK 0x00000400
78 #define ARASAN_PROG_CHNG_ROWADR_END_MASK 0x00400000
80 #define ARASAN_NAND_CMD_ECC_ON_MASK 0x80000000
81 #define ARASAN_NAND_CMD_CMD12_MASK 0xFFFF
82 #define ARASAN_NAND_CMD_PG_SIZE_MASK 0x3800000
83 #define ARASAN_NAND_CMD_PG_SIZE_SHIFT 23
84 #define ARASAN_NAND_CMD_CMD2_SHIFT 8
85 #define ARASAN_NAND_CMD_ADDR_CYCL_MASK 0x70000000
86 #define ARASAN_NAND_CMD_ADDR_CYCL_SHIFT 28
88 #define ARASAN_NAND_MEM_ADDR1_PAGE_MASK 0xFFFF0000
89 #define ARASAN_NAND_MEM_ADDR1_COL_MASK 0xFFFF
90 #define ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT 16
91 #define ARASAN_NAND_MEM_ADDR2_PAGE_MASK 0xFF
92 #define ARASAN_NAND_MEM_ADDR2_CS_MASK 0xC0000000
93 #define ARASAN_NAND_MEM_ADDR2_CS0_MASK (0x3 << 30)
94 #define ARASAN_NAND_MEM_ADDR2_CS1_MASK (0x1 << 30)
95 #define ARASAN_NAND_MEM_ADDR2_BCH_MASK 0xE000000
96 #define ARASAN_NAND_MEM_ADDR2_BCH_SHIFT 25
98 #define ARASAN_NAND_INT_STS_ERR_EN_MASK 0x10
99 #define ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK 0x08
100 #define ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK 0x02
101 #define ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK 0x01
102 #define ARASAN_NAND_INT_STS_XFR_CMPLT_MASK 0x04
104 #define ARASAN_NAND_PKT_REG_PKT_CNT_MASK 0xFFF000
105 #define ARASAN_NAND_PKT_REG_PKT_SIZE_MASK 0x7FF
106 #define ARASAN_NAND_PKT_REG_PKT_CNT_SHFT 12
108 #define ARASAN_NAND_ROW_ADDR_CYCL_MASK 0x0F
109 #define ARASAN_NAND_COL_ADDR_CYCL_MASK 0xF0
110 #define ARASAN_NAND_COL_ADDR_CYCL_SHIFT 4
112 #define ARASAN_NAND_ECC_SIZE_SHIFT 16
113 #define ARASAN_NAND_ECC_BCH_SHIFT 27
115 #define ARASAN_NAND_PKTSIZE_1K 1024
116 #define ARASAN_NAND_PKTSIZE_512 512
118 #define ARASAN_NAND_POLL_TIMEOUT 1000000
119 #define ARASAN_NAND_INVALID_ADDR_CYCL 0xFF
121 #define ERR_ADDR_CYCLE -1
122 #define READ_BUFF_SIZE 0x4000
124 static struct arasan_nand_command_format *curr_cmd;
134 static struct arasan_nand_command_format arasan_nand_commands[] = {
135 {NAND_CMD_READ0, NAND_CMD_READSTART, NAND_ADDR_CYCL_BOTH,
136 ARASAN_PROG_RD_MASK},
137 {NAND_CMD_RNDOUT, NAND_CMD_RNDOUTSTART, NAND_ADDR_CYCL_COL,
138 ARASAN_PROG_RD_MASK},
139 {NAND_CMD_READID, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
140 ARASAN_PROG_RD_ID_MASK},
141 {NAND_CMD_STATUS, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE,
142 ARASAN_PROG_RD_STS_MASK},
143 {NAND_CMD_SEQIN, NAND_CMD_PAGEPROG, NAND_ADDR_CYCL_BOTH,
144 ARASAN_PROG_PG_PROG_MASK},
145 {NAND_CMD_RNDIN, NAND_CMD_NONE, NAND_ADDR_CYCL_COL,
146 ARASAN_PROG_CHNG_ROWADR_END_MASK},
147 {NAND_CMD_ERASE1, NAND_CMD_ERASE2, NAND_ADDR_CYCL_ROW,
148 ARASAN_PROG_BLK_ERS_MASK},
149 {NAND_CMD_RESET, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE,
150 ARASAN_PROG_RST_MASK},
151 {NAND_CMD_PARAM, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
152 ARASAN_PROG_RD_PARAM_PG_MASK},
153 {NAND_CMD_GET_FEATURES, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
154 ARASAN_PROG_GET_FTRS_MASK},
155 {NAND_CMD_SET_FEATURES, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
156 ARASAN_PROG_SET_FTRS_MASK},
157 {NAND_CMD_NONE, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE, 0},
160 struct arasan_ecc_matrix {
162 u32 ecc_codeword_size;
170 static const struct arasan_ecc_matrix ecc_matrix[] = {
171 {512, 512, 1, 0, 0, 0x20D, 0x3},
172 {512, 512, 4, 1, 3, 0x209, 0x7},
173 {512, 512, 8, 1, 2, 0x203, 0xD},
177 {2048, 512, 1, 0, 0, 0x834, 0xC},
178 {2048, 512, 4, 1, 3, 0x826, 0x1A},
179 {2048, 512, 8, 1, 2, 0x80c, 0x34},
180 {2048, 512, 12, 1, 1, 0x822, 0x4E},
181 {2048, 512, 16, 1, 0, 0x808, 0x68},
182 {2048, 1024, 24, 1, 4, 0x81c, 0x54},
186 {4096, 512, 1, 0, 0, 0x1068, 0x18},
187 {4096, 512, 4, 1, 3, 0x104c, 0x34},
188 {4096, 512, 8, 1, 2, 0x1018, 0x68},
189 {4096, 512, 12, 1, 1, 0x1044, 0x9C},
190 {4096, 512, 16, 1, 0, 0x1010, 0xD0},
191 {4096, 1024, 24, 1, 4, 0x1038, 0xA8},
195 {8192, 512, 1, 0, 0, 0x20d0, 0x30},
196 {8192, 512, 4, 1, 3, 0x2098, 0x68},
197 {8192, 512, 8, 1, 2, 0x2030, 0xD0},
198 {8192, 512, 12, 1, 1, 0x2088, 0x138},
199 {8192, 512, 16, 1, 0, 0x2020, 0x1A0},
200 {8192, 1024, 24, 1, 4, 0x2070, 0x150},
204 {16384, 512, 1, 0, 0, 0x4460, 0x60},
205 {16384, 512, 4, 1, 3, 0x43f0, 0xD0},
206 {16384, 512, 8, 1, 2, 0x4320, 0x1A0},
207 {16384, 512, 12, 1, 1, 0x4250, 0x270},
208 {16384, 512, 16, 1, 0, 0x4180, 0x340},
209 {16384, 1024, 24, 1, 4, 0x4220, 0x2A0}
212 static struct nand_ecclayout ondie_nand_oob_64 = {
216 8, 9, 10, 11, 12, 13, 14, 15,
217 24, 25, 26, 27, 28, 29, 30, 31,
218 40, 41, 42, 43, 44, 45, 46, 47,
219 56, 57, 58, 59, 60, 61, 62, 63
223 { .offset = 4, .length = 4 },
224 { .offset = 20, .length = 4 },
225 { .offset = 36, .length = 4 },
226 { .offset = 52, .length = 4 }
231 * bbt decriptors for chips with on-die ECC and
232 * chips with 64-byte OOB
234 static u8 bbt_pattern[] = {'B', 'b', 't', '0' };
235 static u8 mirror_pattern[] = {'1', 't', 'b', 'B' };
237 static struct nand_bbt_descr bbt_main_descr = {
238 .options = NAND_BBT_LASTBLOCK | NAND_BBT_CREATE | NAND_BBT_WRITE |
239 NAND_BBT_2BIT | NAND_BBT_VERSION | NAND_BBT_PERCHIP,
244 .pattern = bbt_pattern
247 static struct nand_bbt_descr bbt_mirror_descr = {
248 .options = NAND_BBT_LASTBLOCK | NAND_BBT_CREATE | NAND_BBT_WRITE |
249 NAND_BBT_2BIT | NAND_BBT_VERSION | NAND_BBT_PERCHIP,
254 .pattern = mirror_pattern
257 static u8 buf_data[READ_BUFF_SIZE];
258 static u32 buf_index;
260 static struct nand_ecclayout nand_oob;
262 static struct nand_chip nand_chip[CONFIG_SYS_MAX_NAND_DEVICE];
264 static void arasan_nand_select_chip(struct mtd_info *mtd, int chip)
268 reg_val = readl(&arasan_nand_base->memadr_reg2);
270 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS0_MASK;
271 writel(reg_val, &arasan_nand_base->memadr_reg2);
272 } else if (chip == 1) {
273 reg_val |= ARASAN_NAND_MEM_ADDR2_CS1_MASK;
274 writel(reg_val, &arasan_nand_base->memadr_reg2);
278 static void arasan_nand_enable_ecc(void)
282 reg_val = readl(&arasan_nand_base->cmd_reg);
283 reg_val |= ARASAN_NAND_CMD_ECC_ON_MASK;
285 writel(reg_val, &arasan_nand_base->cmd_reg);
288 static u8 arasan_nand_get_addrcycle(struct mtd_info *mtd)
291 struct nand_chip *chip = mtd_to_nand(mtd);
293 switch (curr_cmd->addr_cycles) {
294 case NAND_ADDR_CYCL_NONE:
297 case NAND_ADDR_CYCL_ONE:
300 case NAND_ADDR_CYCL_ROW:
301 addrcycles = chip->onfi_params.addr_cycles &
302 ARASAN_NAND_ROW_ADDR_CYCL_MASK;
304 case NAND_ADDR_CYCL_COL:
305 addrcycles = (chip->onfi_params.addr_cycles &
306 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
307 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
309 case NAND_ADDR_CYCL_BOTH:
310 addrcycles = chip->onfi_params.addr_cycles &
311 ARASAN_NAND_ROW_ADDR_CYCL_MASK;
312 addrcycles += (chip->onfi_params.addr_cycles &
313 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
314 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
317 addrcycles = ARASAN_NAND_INVALID_ADDR_CYCL;
323 static int arasan_nand_read_page(struct mtd_info *mtd, u8 *buf, u32 size)
325 struct nand_chip *chip = mtd_to_nand(mtd);
326 struct arasan_nand_info *nand = nand_get_controller_data(chip);
327 u32 reg_val, i, pktsize, pktnum;
328 u32 *bufptr = (u32 *)buf;
333 if (chip->ecc_step_ds >= ARASAN_NAND_PKTSIZE_1K)
334 pktsize = ARASAN_NAND_PKTSIZE_1K;
336 pktsize = ARASAN_NAND_PKTSIZE_512;
339 pktnum = size/pktsize + 1;
341 pktnum = size/pktsize;
343 reg_val = readl(&arasan_nand_base->intsts_enr);
344 reg_val |= ARASAN_NAND_INT_STS_ERR_EN_MASK |
345 ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK;
346 writel(reg_val, &arasan_nand_base->intsts_enr);
348 reg_val = readl(&arasan_nand_base->pkt_reg);
349 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
350 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
351 reg_val |= (pktnum << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) |
353 writel(reg_val, &arasan_nand_base->pkt_reg);
355 if (!nand->on_die_ecc_enabled) {
356 arasan_nand_enable_ecc();
357 addr_cycles = arasan_nand_get_addrcycle(mtd);
358 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
359 return ERR_ADDR_CYCLE;
361 writel((NAND_CMD_RNDOUTSTART << ARASAN_NAND_CMD_CMD2_SHIFT) |
362 NAND_CMD_RNDOUT | (addr_cycles <<
363 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT),
364 &arasan_nand_base->ecc_sprcmd_reg);
366 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
368 while (rdcount < pktnum) {
369 timeout = ARASAN_NAND_POLL_TIMEOUT;
370 while (!(readl(&arasan_nand_base->intsts_reg) &
371 ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK) && timeout) {
376 puts("arasan_read_page: timedout:Buff RDY\n");
382 if (pktnum == rdcount) {
383 reg_val = readl(&arasan_nand_base->intsts_enr);
384 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
385 writel(reg_val, &arasan_nand_base->intsts_enr);
387 reg_val = readl(&arasan_nand_base->intsts_enr);
388 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
389 &arasan_nand_base->intsts_enr);
391 reg_val = readl(&arasan_nand_base->intsts_reg);
392 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
393 &arasan_nand_base->intsts_reg);
395 for (i = 0; i < pktsize/4; i++)
396 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
401 if (rdcount >= pktnum)
404 writel(ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
405 &arasan_nand_base->intsts_enr);
408 timeout = ARASAN_NAND_POLL_TIMEOUT;
410 while (!(readl(&arasan_nand_base->intsts_reg) &
411 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
416 puts("arasan rd_page timedout:Xfer CMPLT\n");
420 reg_val = readl(&arasan_nand_base->intsts_enr);
421 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
422 &arasan_nand_base->intsts_enr);
423 reg_val = readl(&arasan_nand_base->intsts_reg);
424 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
425 &arasan_nand_base->intsts_reg);
427 if (!nand->on_die_ecc_enabled) {
428 if (readl(&arasan_nand_base->intsts_reg) &
429 ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK) {
430 printf("arasan rd_page:sbiterror\n");
434 if (readl(&arasan_nand_base->intsts_reg) &
435 ARASAN_NAND_INT_STS_ERR_EN_MASK) {
436 mtd->ecc_stats.failed++;
437 printf("arasan rd_page:multibiterror\n");
445 static int arasan_nand_read_page_hwecc(struct mtd_info *mtd,
446 struct nand_chip *chip, u8 *buf, int oob_required, int page)
450 status = arasan_nand_read_page(mtd, buf, (mtd->writesize));
453 chip->ecc.read_oob(mtd, chip, page);
458 static void arasan_nand_fill_tx(const u8 *buf, int len)
460 u32 __iomem *nand = &arasan_nand_base->buf_dataport;
462 if (((unsigned long)buf & 0x3) != 0) {
463 if (((unsigned long)buf & 0x1) != 0) {
471 if (((unsigned long)buf & 0x3) != 0) {
473 writew(*(u16 *)buf, nand);
481 writel(*(u32 *)buf, nand);
488 writew(*(u16 *)buf, nand);
498 static int arasan_nand_write_page_hwecc(struct mtd_info *mtd,
499 struct nand_chip *chip, const u8 *buf, int oob_required,
502 u32 reg_val, i, pktsize, pktnum;
503 const u32 *bufptr = (const u32 *)buf;
504 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
505 u32 size = mtd->writesize;
507 u8 column_addr_cycles;
508 struct arasan_nand_info *nand = nand_get_controller_data(chip);
510 if (chip->ecc_step_ds >= ARASAN_NAND_PKTSIZE_1K)
511 pktsize = ARASAN_NAND_PKTSIZE_1K;
513 pktsize = ARASAN_NAND_PKTSIZE_512;
516 pktnum = size/pktsize + 1;
518 pktnum = size/pktsize;
520 reg_val = readl(&arasan_nand_base->pkt_reg);
521 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
522 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
523 reg_val |= (pktnum << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | pktsize;
524 writel(reg_val, &arasan_nand_base->pkt_reg);
526 if (!nand->on_die_ecc_enabled) {
527 arasan_nand_enable_ecc();
528 column_addr_cycles = (chip->onfi_params.addr_cycles &
529 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
530 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
531 writel((NAND_CMD_RNDIN | (column_addr_cycles << 28)),
532 &arasan_nand_base->ecc_sprcmd_reg);
534 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
536 while (rdcount < pktnum) {
537 timeout = ARASAN_NAND_POLL_TIMEOUT;
538 while (!(readl(&arasan_nand_base->intsts_reg) &
539 ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK) && timeout) {
545 puts("arasan_write_page: timedout:Buff RDY\n");
551 if (pktnum == rdcount) {
552 reg_val = readl(&arasan_nand_base->intsts_enr);
553 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
554 writel(reg_val, &arasan_nand_base->intsts_enr);
556 reg_val = readl(&arasan_nand_base->intsts_enr);
557 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
558 &arasan_nand_base->intsts_enr);
561 reg_val = readl(&arasan_nand_base->intsts_reg);
562 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
563 &arasan_nand_base->intsts_reg);
565 for (i = 0; i < pktsize/4; i++)
566 writel(bufptr[i], &arasan_nand_base->buf_dataport);
570 if (rdcount >= pktnum)
573 writel(ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
574 &arasan_nand_base->intsts_enr);
577 timeout = ARASAN_NAND_POLL_TIMEOUT;
579 while (!(readl(&arasan_nand_base->intsts_reg) &
580 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
585 puts("arasan write_page timedout:Xfer CMPLT\n");
589 reg_val = readl(&arasan_nand_base->intsts_enr);
590 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
591 &arasan_nand_base->intsts_enr);
592 reg_val = readl(&arasan_nand_base->intsts_reg);
593 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
594 &arasan_nand_base->intsts_reg);
597 chip->ecc.write_oob(mtd, chip, nand->page);
602 static int arasan_nand_read_oob(struct mtd_info *mtd, struct nand_chip *chip,
605 chip->cmdfunc(mtd, NAND_CMD_READOOB, 0, page);
606 chip->read_buf(mtd, chip->oob_poi, (mtd->oobsize));
611 static int arasan_nand_write_oob(struct mtd_info *mtd, struct nand_chip *chip,
615 const u8 *buf = chip->oob_poi;
617 chip->cmdfunc(mtd, NAND_CMD_SEQIN, mtd->writesize, page);
618 chip->write_buf(mtd, buf, mtd->oobsize);
623 static int arasan_nand_reset(struct arasan_nand_command_format *curr_cmd)
625 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
628 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
629 &arasan_nand_base->intsts_enr);
630 cmd_reg = readl(&arasan_nand_base->cmd_reg);
631 cmd_reg &= ~ARASAN_NAND_CMD_CMD12_MASK;
633 cmd_reg |= curr_cmd->cmd1 |
634 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
635 writel(cmd_reg, &arasan_nand_base->cmd_reg);
636 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
638 while (!(readl(&arasan_nand_base->intsts_reg) &
639 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
644 printf("ERROR:%s timedout\n", __func__);
648 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
649 &arasan_nand_base->intsts_enr);
651 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
652 &arasan_nand_base->intsts_reg);
657 static u8 arasan_nand_page(struct mtd_info *mtd)
661 switch (mtd->writesize) {
681 printf("%s:Pagesize>16K\n", __func__);
688 static int arasan_nand_send_wrcmd(struct arasan_nand_command_format *curr_cmd,
689 int column, int page_addr, struct mtd_info *mtd)
692 u8 page_val, addr_cycles;
694 writel(ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
695 &arasan_nand_base->intsts_enr);
696 reg_val = readl(&arasan_nand_base->cmd_reg);
697 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
698 reg_val |= curr_cmd->cmd1 |
699 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
700 if (curr_cmd->cmd1 == NAND_CMD_SEQIN) {
701 reg_val &= ~ARASAN_NAND_CMD_PG_SIZE_MASK;
702 page_val = arasan_nand_page(mtd);
703 reg_val |= (page_val << ARASAN_NAND_CMD_PG_SIZE_SHIFT);
706 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
707 addr_cycles = arasan_nand_get_addrcycle(mtd);
709 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
710 return ERR_ADDR_CYCLE;
712 reg_val |= (addr_cycles <<
713 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
714 writel(reg_val, &arasan_nand_base->cmd_reg);
719 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
720 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
721 column &= ARASAN_NAND_MEM_ADDR1_COL_MASK;
722 writel(page|column, &arasan_nand_base->memadr_reg1);
724 reg_val = readl(&arasan_nand_base->memadr_reg2);
725 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
726 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
727 writel(reg_val, &arasan_nand_base->memadr_reg2);
732 static void arasan_nand_write_buf(struct mtd_info *mtd, const u8 *buf, int len)
735 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
737 reg_val = readl(&arasan_nand_base->pkt_reg);
738 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
739 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
741 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | len;
742 writel(reg_val, &arasan_nand_base->pkt_reg);
743 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
745 while (!(readl(&arasan_nand_base->intsts_reg) &
746 ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK) && timeout) {
752 puts("ERROR:arasan_nand_write_buf timedout:Buff RDY\n");
754 reg_val = readl(&arasan_nand_base->intsts_enr);
755 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
756 writel(reg_val, &arasan_nand_base->intsts_enr);
757 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
758 &arasan_nand_base->intsts_enr);
759 reg_val = readl(&arasan_nand_base->intsts_reg);
760 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
761 &arasan_nand_base->intsts_reg);
763 arasan_nand_fill_tx(buf, len);
765 timeout = ARASAN_NAND_POLL_TIMEOUT;
766 while (!(readl(&arasan_nand_base->intsts_reg) &
767 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
772 puts("ERROR:arasan_nand_write_buf timedout:Xfer CMPLT\n");
774 writel(readl(&arasan_nand_base->intsts_enr) |
775 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
776 &arasan_nand_base->intsts_enr);
777 writel(readl(&arasan_nand_base->intsts_reg) |
778 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
779 &arasan_nand_base->intsts_reg);
782 static int arasan_nand_erase(struct arasan_nand_command_format *curr_cmd,
783 int column, int page_addr, struct mtd_info *mtd)
786 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
789 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
790 &arasan_nand_base->intsts_enr);
791 reg_val = readl(&arasan_nand_base->cmd_reg);
792 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
793 reg_val |= curr_cmd->cmd1 |
794 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
795 row_addr_cycles = arasan_nand_get_addrcycle(mtd);
797 if (row_addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
798 return ERR_ADDR_CYCLE;
800 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
801 reg_val |= (row_addr_cycles <<
802 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
804 writel(reg_val, &arasan_nand_base->cmd_reg);
806 page = (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
807 ARASAN_NAND_MEM_ADDR1_COL_MASK;
808 column = page_addr & ARASAN_NAND_MEM_ADDR1_COL_MASK;
809 writel(column | (page << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT),
810 &arasan_nand_base->memadr_reg1);
812 reg_val = readl(&arasan_nand_base->memadr_reg2);
813 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
814 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
815 writel(reg_val, &arasan_nand_base->memadr_reg2);
816 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
818 while (!(readl(&arasan_nand_base->intsts_reg) &
819 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
824 printf("ERROR:%s timedout:Xfer CMPLT\n", __func__);
828 reg_val = readl(&arasan_nand_base->intsts_enr);
829 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
830 &arasan_nand_base->intsts_enr);
831 reg_val = readl(&arasan_nand_base->intsts_reg);
832 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
833 &arasan_nand_base->intsts_reg);
838 static int arasan_nand_read_status(struct arasan_nand_command_format *curr_cmd,
839 int column, int page_addr, struct mtd_info *mtd)
842 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
845 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
846 &arasan_nand_base->intsts_enr);
847 reg_val = readl(&arasan_nand_base->cmd_reg);
848 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
849 reg_val |= curr_cmd->cmd1 |
850 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
851 addr_cycles = arasan_nand_get_addrcycle(mtd);
853 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
854 return ERR_ADDR_CYCLE;
856 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
857 reg_val |= (addr_cycles <<
858 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
860 writel(reg_val, &arasan_nand_base->cmd_reg);
862 reg_val = readl(&arasan_nand_base->pkt_reg);
863 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
864 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
865 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | 1;
866 writel(reg_val, &arasan_nand_base->pkt_reg);
868 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
869 while (!(readl(&arasan_nand_base->intsts_reg) &
870 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
876 printf("ERROR:%s: timedout:Xfer CMPLT\n", __func__);
880 reg_val = readl(&arasan_nand_base->intsts_enr);
881 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
882 &arasan_nand_base->intsts_enr);
883 reg_val = readl(&arasan_nand_base->intsts_reg);
884 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
885 &arasan_nand_base->intsts_reg);
890 static int arasan_nand_send_rdcmd(struct arasan_nand_command_format *curr_cmd,
891 int column, int page_addr, struct mtd_info *mtd)
893 u32 reg_val, addr_cycles, page;
896 reg_val = readl(&arasan_nand_base->intsts_enr);
897 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
898 &arasan_nand_base->intsts_enr);
900 reg_val = readl(&arasan_nand_base->cmd_reg);
901 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
902 reg_val |= curr_cmd->cmd1 |
903 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
905 if (curr_cmd->cmd1 == NAND_CMD_RNDOUT ||
906 curr_cmd->cmd1 == NAND_CMD_READ0) {
907 reg_val &= ~ARASAN_NAND_CMD_PG_SIZE_MASK;
908 page_val = arasan_nand_page(mtd);
909 reg_val |= (page_val << ARASAN_NAND_CMD_PG_SIZE_SHIFT);
912 reg_val &= ~ARASAN_NAND_CMD_ECC_ON_MASK;
914 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
916 addr_cycles = arasan_nand_get_addrcycle(mtd);
918 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
919 return ERR_ADDR_CYCLE;
921 reg_val |= (addr_cycles << 28);
922 writel(reg_val, &arasan_nand_base->cmd_reg);
927 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
928 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
929 column &= ARASAN_NAND_MEM_ADDR1_COL_MASK;
930 writel(page | column, &arasan_nand_base->memadr_reg1);
932 reg_val = readl(&arasan_nand_base->memadr_reg2);
933 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
934 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
935 writel(reg_val, &arasan_nand_base->memadr_reg2);
942 static void arasan_nand_read_buf(struct mtd_info *mtd, u8 *buf, int size)
945 u32 *bufptr = (u32 *)buf;
946 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
948 reg_val = readl(&arasan_nand_base->pkt_reg);
949 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
950 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
951 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | size;
952 writel(reg_val, &arasan_nand_base->pkt_reg);
954 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
956 while (!(readl(&arasan_nand_base->intsts_reg) &
957 ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK) && timeout) {
963 puts("ERROR:arasan_nand_read_buf timedout:Buff RDY\n");
965 reg_val = readl(&arasan_nand_base->intsts_enr);
966 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
967 writel(reg_val, &arasan_nand_base->intsts_enr);
969 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
970 &arasan_nand_base->intsts_enr);
971 reg_val = readl(&arasan_nand_base->intsts_reg);
972 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
973 &arasan_nand_base->intsts_reg);
976 for (i = 0; i < size / 4; i++)
977 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
980 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
982 timeout = ARASAN_NAND_POLL_TIMEOUT;
984 while (!(readl(&arasan_nand_base->intsts_reg) &
985 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
991 puts("ERROR:arasan_nand_read_buf timedout:Xfer CMPLT\n");
993 reg_val = readl(&arasan_nand_base->intsts_enr);
994 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
995 &arasan_nand_base->intsts_enr);
996 reg_val = readl(&arasan_nand_base->intsts_reg);
997 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
998 &arasan_nand_base->intsts_reg);
1001 static u8 arasan_nand_read_byte(struct mtd_info *mtd)
1003 struct nand_chip *chip = mtd_to_nand(mtd);
1006 struct nand_onfi_params *p;
1008 if (buf_index == 0) {
1009 p = &chip->onfi_params;
1010 if (curr_cmd->cmd1 == NAND_CMD_READID)
1012 else if (curr_cmd->cmd1 == NAND_CMD_PARAM)
1013 size = sizeof(struct nand_onfi_params);
1014 else if (curr_cmd->cmd1 == NAND_CMD_RNDOUT)
1015 size = le16_to_cpu(p->ext_param_page_length) * 16;
1016 else if (curr_cmd->cmd1 == NAND_CMD_GET_FEATURES)
1018 else if (curr_cmd->cmd1 == NAND_CMD_STATUS)
1019 return readb(&arasan_nand_base->flash_sts_reg);
1022 chip->read_buf(mtd, &buf_data[0], size);
1025 val = *(&buf_data[0] + buf_index);
1031 static void arasan_nand_cmd_function(struct mtd_info *mtd, unsigned int command,
1032 int column, int page_addr)
1035 struct nand_chip *chip = mtd_to_nand(mtd);
1036 struct arasan_nand_info *nand = nand_get_controller_data(chip);
1039 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
1040 &arasan_nand_base->intsts_enr);
1042 if ((command == NAND_CMD_READOOB) &&
1043 (mtd->writesize > 512)) {
1044 column += mtd->writesize;
1045 command = NAND_CMD_READ0;
1048 /* Get the command format */
1049 for (i = 0; (arasan_nand_commands[i].cmd1 != NAND_CMD_NONE ||
1050 arasan_nand_commands[i].cmd2 != NAND_CMD_NONE); i++) {
1051 if (command == arasan_nand_commands[i].cmd1) {
1052 curr_cmd = &arasan_nand_commands[i];
1057 if (curr_cmd == NULL) {
1058 printf("Unsupported Command; 0x%x\n", command);
1062 if (curr_cmd->cmd1 == NAND_CMD_RESET)
1063 ret = arasan_nand_reset(curr_cmd);
1065 if ((curr_cmd->cmd1 == NAND_CMD_READID) ||
1066 (curr_cmd->cmd1 == NAND_CMD_PARAM) ||
1067 (curr_cmd->cmd1 == NAND_CMD_RNDOUT) ||
1068 (curr_cmd->cmd1 == NAND_CMD_GET_FEATURES) ||
1069 (curr_cmd->cmd1 == NAND_CMD_READ0))
1070 ret = arasan_nand_send_rdcmd(curr_cmd, column, page_addr, mtd);
1072 if ((curr_cmd->cmd1 == NAND_CMD_SET_FEATURES) ||
1073 (curr_cmd->cmd1 == NAND_CMD_SEQIN)) {
1074 nand->page = page_addr;
1075 ret = arasan_nand_send_wrcmd(curr_cmd, column, page_addr, mtd);
1078 if (curr_cmd->cmd1 == NAND_CMD_ERASE1)
1079 ret = arasan_nand_erase(curr_cmd, column, page_addr, mtd);
1081 if (curr_cmd->cmd1 == NAND_CMD_STATUS)
1082 ret = arasan_nand_read_status(curr_cmd, column, page_addr, mtd);
1085 printf("ERROR:%s:command:0x%x\n", __func__, curr_cmd->cmd1);
1088 static void arasan_check_ondie(struct mtd_info *mtd)
1090 struct nand_chip *nand_chip = mtd_to_nand(mtd);
1091 struct arasan_nand_info *nand = nand_get_controller_data(nand_chip);
1094 u8 set_feature[4] = {ENABLE_ONDIE_ECC, 0x00, 0x00, 0x00};
1097 /* Send the command for reading device ID */
1098 nand_chip->cmdfunc(mtd, NAND_CMD_RESET, -1, -1);
1099 nand_chip->cmdfunc(mtd, NAND_CMD_READID, 0, -1);
1101 /* Read manufacturer and device IDs */
1102 maf_id = nand_chip->read_byte(mtd);
1103 dev_id = nand_chip->read_byte(mtd);
1105 if ((maf_id == NAND_MFR_MICRON) &&
1106 ((dev_id == 0xf1) || (dev_id == 0xa1) || (dev_id == 0xb1) ||
1107 (dev_id == 0xaa) || (dev_id == 0xba) || (dev_id == 0xda) ||
1108 (dev_id == 0xca) || (dev_id == 0xac) || (dev_id == 0xbc) ||
1109 (dev_id == 0xdc) || (dev_id == 0xcc) || (dev_id == 0xa3) ||
1110 (dev_id == 0xb3) || (dev_id == 0xd3) || (dev_id == 0xc3))) {
1111 nand_chip->cmdfunc(mtd, NAND_CMD_SET_FEATURES,
1112 ONDIE_ECC_FEATURE_ADDR, -1);
1114 nand_chip->write_buf(mtd, &set_feature[0], 4);
1115 nand_chip->cmdfunc(mtd, NAND_CMD_GET_FEATURES,
1116 ONDIE_ECC_FEATURE_ADDR, -1);
1118 for (i = 0; i < 4; i++)
1119 get_feature[i] = nand_chip->read_byte(mtd);
1121 if (get_feature[0] & ENABLE_ONDIE_ECC)
1122 nand->on_die_ecc_enabled = true;
1124 printf("%s: Unable to enable OnDie ECC\n", __func__);
1126 /* Use the BBT pattern descriptors */
1127 nand_chip->bbt_td = &bbt_main_descr;
1128 nand_chip->bbt_md = &bbt_mirror_descr;
1132 static int arasan_nand_ecc_init(struct mtd_info *mtd)
1135 u32 regval, eccpos_start, i, eccaddr;
1136 struct nand_chip *nand_chip = mtd_to_nand(mtd);
1138 for (i = 0; i < ARRAY_SIZE(ecc_matrix); i++) {
1139 if ((ecc_matrix[i].pagesize == mtd->writesize) &&
1140 (ecc_matrix[i].ecc_codeword_size >=
1141 nand_chip->ecc_step_ds)) {
1142 if (ecc_matrix[i].eccbits >=
1143 nand_chip->ecc_strength_ds) {
1154 eccaddr = mtd->writesize + mtd->oobsize -
1155 ecc_matrix[found].eccsize;
1158 (ecc_matrix[found].eccsize << ARASAN_NAND_ECC_SIZE_SHIFT) |
1159 (ecc_matrix[found].bch << ARASAN_NAND_ECC_BCH_SHIFT);
1160 writel(regval, &arasan_nand_base->ecc_reg);
1162 if (ecc_matrix[found].bch) {
1163 regval = readl(&arasan_nand_base->memadr_reg2);
1164 regval &= ~ARASAN_NAND_MEM_ADDR2_BCH_MASK;
1165 regval |= (ecc_matrix[found].bchval <<
1166 ARASAN_NAND_MEM_ADDR2_BCH_SHIFT);
1167 writel(regval, &arasan_nand_base->memadr_reg2);
1170 nand_oob.eccbytes = ecc_matrix[found].eccsize;
1171 eccpos_start = mtd->oobsize - nand_oob.eccbytes;
1173 for (i = 0; i < nand_oob.eccbytes; i++)
1174 nand_oob.eccpos[i] = eccpos_start + i;
1176 nand_oob.oobfree[0].offset = 2;
1177 nand_oob.oobfree[0].length = eccpos_start - 2;
1179 nand_chip->ecc.size = ecc_matrix[found].ecc_codeword_size;
1180 nand_chip->ecc.strength = ecc_matrix[found].eccbits;
1181 nand_chip->ecc.bytes = ecc_matrix[found].eccsize;
1182 nand_chip->ecc.layout = &nand_oob;
1187 static int arasan_nand_init(struct nand_chip *nand_chip, int devnum)
1189 struct arasan_nand_info *nand;
1190 struct mtd_info *mtd;
1193 nand = calloc(1, sizeof(struct arasan_nand_info));
1195 printf("%s: failed to allocate\n", __func__);
1199 nand->nand_base = arasan_nand_base;
1200 mtd = nand_to_mtd(nand_chip);
1201 nand_set_controller_data(nand_chip, nand);
1203 #ifdef CONFIG_SYS_NAND_NO_SUBPAGE_WRITE
1204 nand_chip->options |= NAND_NO_SUBPAGE_WRITE;
1207 /* Set the driver entry points for MTD */
1208 nand_chip->cmdfunc = arasan_nand_cmd_function;
1209 nand_chip->select_chip = arasan_nand_select_chip;
1210 nand_chip->read_byte = arasan_nand_read_byte;
1212 /* Buffer read/write routines */
1213 nand_chip->read_buf = arasan_nand_read_buf;
1214 nand_chip->write_buf = arasan_nand_write_buf;
1215 nand_chip->bbt_options = NAND_BBT_USE_FLASH;
1217 writel(0x0, &arasan_nand_base->cmd_reg);
1218 writel(0x0, &arasan_nand_base->pgm_reg);
1220 /* first scan to find the device and get the page size */
1221 if (nand_scan_ident(mtd, CONFIG_SYS_NAND_MAX_CHIPS, NULL)) {
1222 printf("%s: nand_scan_ident failed\n", __func__);
1226 nand_chip->ecc.mode = NAND_ECC_HW;
1227 nand_chip->ecc.hwctl = NULL;
1228 nand_chip->ecc.read_page = arasan_nand_read_page_hwecc;
1229 nand_chip->ecc.write_page = arasan_nand_write_page_hwecc;
1230 nand_chip->ecc.read_oob = arasan_nand_read_oob;
1231 nand_chip->ecc.write_oob = arasan_nand_write_oob;
1233 arasan_check_ondie(mtd);
1236 * If on die supported, then give priority to on-die ecc and use
1237 * it instead of controller ecc.
1239 if (nand->on_die_ecc_enabled) {
1240 nand_chip->ecc.strength = 1;
1241 nand_chip->ecc.size = mtd->writesize;
1242 nand_chip->ecc.bytes = 0;
1243 nand_chip->ecc.layout = &ondie_nand_oob_64;
1245 if (arasan_nand_ecc_init(mtd)) {
1246 printf("%s: nand_ecc_init failed\n", __func__);
1251 if (nand_scan_tail(mtd)) {
1252 printf("%s: nand_scan_tail failed\n", __func__);
1256 if (nand_register(devnum, mtd)) {
1257 printf("Nand Register Fail\n");
1267 void board_nand_init(void)
1269 struct nand_chip *nand = &nand_chip[0];
1271 if (arasan_nand_init(nand, 0))
1272 puts("NAND init failed\n");