+#if defined(CONFIG_DMA_LPC32XX)
+/*
+ * Prepares DMA descriptors for NAND RD/WR operations
+ * If the size is < 256 Bytes then it is assumed to be
+ * an OOB transfer
+ */
+static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
+ const u8 *buffer, int size,
+ int read)
+{
+ u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
+ struct lpc32xx_dmac_ll *dmalist_cur;
+ struct lpc32xx_dmac_ll *dmalist_cur_ecc;
+
+ /*
+ * CTRL descriptor entry for reading ECC
+ * Copy Multiple times to sync DMA with Flash Controller
+ */
+ ecc_ctrl = 0x5 |
+ DMAC_CHAN_SRC_BURST_1 |
+ DMAC_CHAN_DEST_BURST_1 |
+ DMAC_CHAN_SRC_WIDTH_32 |
+ DMAC_CHAN_DEST_WIDTH_32 |
+ DMAC_CHAN_DEST_AHB1;
+
+ /* CTRL descriptor entry for reading/writing Data */
+ ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
+ DMAC_CHAN_SRC_BURST_4 |
+ DMAC_CHAN_DEST_BURST_4 |
+ DMAC_CHAN_SRC_WIDTH_32 |
+ DMAC_CHAN_DEST_WIDTH_32 |
+ DMAC_CHAN_DEST_AHB1;
+
+ /* CTRL descriptor entry for reading/writing Spare Area */
+ oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
+ DMAC_CHAN_SRC_BURST_4 |
+ DMAC_CHAN_DEST_BURST_4 |
+ DMAC_CHAN_SRC_WIDTH_32 |
+ DMAC_CHAN_DEST_WIDTH_32 |
+ DMAC_CHAN_DEST_AHB1;
+
+ if (read) {
+ dmasrc = lpc32xx_dmac_set_dma_data();
+ dmadst = (u32)buffer;
+ ctrl |= DMAC_CHAN_DEST_AUTOINC;
+ } else {
+ dmadst = lpc32xx_dmac_set_dma_data();
+ dmasrc = (u32)buffer;
+ ctrl |= DMAC_CHAN_SRC_AUTOINC;
+ }
+
+ /*
+ * Write Operation Sequence for Small Block NAND
+ * ----------------------------------------------------------
+ * 1. X'fer 256 bytes of data from Memory to Flash.
+ * 2. Copy generated ECC data from Register to Spare Area
+ * 3. X'fer next 256 bytes of data from Memory to Flash.
+ * 4. Copy generated ECC data from Register to Spare Area.
+ * 5. X'fer 16 byets of Spare area from Memory to Flash.
+ * Read Operation Sequence for Small Block NAND
+ * ----------------------------------------------------------
+ * 1. X'fer 256 bytes of data from Flash to Memory.
+ * 2. Copy generated ECC data from Register to ECC calc Buffer.
+ * 3. X'fer next 256 bytes of data from Flash to Memory.
+ * 4. Copy generated ECC data from Register to ECC calc Buffer.
+ * 5. X'fer 16 bytes of Spare area from Flash to Memory.
+ * Write Operation Sequence for Large Block NAND
+ * ----------------------------------------------------------
+ * 1. Steps(1-4) of Write Operations repeate for four times
+ * which generates 16 DMA descriptors to X'fer 2048 bytes of
+ * data & 32 bytes of ECC data.
+ * 2. X'fer 64 bytes of Spare area from Memory to Flash.
+ * Read Operation Sequence for Large Block NAND
+ * ----------------------------------------------------------
+ * 1. Steps(1-4) of Read Operations repeate for four times
+ * which generates 16 DMA descriptors to X'fer 2048 bytes of
+ * data & 32 bytes of ECC data.
+ * 2. X'fer 64 bytes of Spare area from Flash to Memory.
+ */
+
+ for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
+ dmalist_cur = &dmalist[i * 2];
+ dmalist_cur_ecc = &dmalist[(i * 2) + 1];
+
+ dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
+ dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
+ dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
+ dmalist_cur->next_ctrl = ctrl;
+
+ dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
+ dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
+ dmalist_cur_ecc->next_lli =
+ lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
+ dmalist_cur_ecc->next_ctrl = ecc_ctrl;
+ }
+
+ if (i) { /* Data only transfer */
+ dmalist_cur_ecc = &dmalist[(i * 2) - 1];
+ dmalist_cur_ecc->next_lli = 0;
+ dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
+ return;
+ }
+
+ /* OOB only transfer */
+ if (read) {
+ dmasrc = lpc32xx_dmac_set_dma_data();
+ dmadst = (u32)buffer;
+ oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
+ } else {
+ dmadst = lpc32xx_dmac_set_dma_data();
+ dmasrc = (u32)buffer;
+ oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
+ }
+
+ /* Read/ Write Spare Area Data To/From Flash */
+ dmalist_cur = &dmalist[i * 2];
+ dmalist_cur->dma_src = dmasrc;
+ dmalist_cur->dma_dest = dmadst;
+ dmalist_cur->next_lli = 0;
+ dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
+}
+
+static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
+ int len, int read)
+{
+ struct nand_chip *chip = mtd->priv;
+ u32 config;
+ int ret;
+
+ /* DMA Channel Configuration */
+ config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
+ (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
+ (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
+ DMAC_CHAN_ENABLE;
+
+ /* Prepare DMA descriptors */
+ lpc32xx_nand_dma_configure(chip, buf, len, read);
+
+ /* Setup SLC controller and start transfer */
+ if (read)
+ setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
+ else /* NAND_ECC_WRITE */
+ clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
+ setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
+
+ /* Write length for new transfers */
+ if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
+ readl(&lpc32xx_nand_slc_regs->tc))) {
+ int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
+ writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
+ }
+
+ setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
+
+ /* Start DMA transfers */
+ ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
+ if (unlikely(ret < 0))
+ BUG();
+
+
+ /* Wait for NAND to be ready */
+ while (!lpc32xx_nand_dev_ready(mtd))
+ ;
+
+ /* Wait till DMA transfer is DONE */
+ if (lpc32xx_dma_wait_status(dmachan))
+ pr_err("NAND DMA transfer error!\r\n");
+
+ /* Stop DMA & HW ECC */
+ clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
+ clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
+ CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
+}
+
+static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
+{
+ int i;
+ for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
+ i += CONFIG_SYS_NAND_ECCBYTES) {
+ u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
+ ce = ~(ce << 2) & 0xFFFFFF;
+ spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
+ spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
+ spare[i] = (u8)(ce & 0xFF);
+ }
+ return 0;
+}
+
+static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
+ uint8_t *ecc_code)
+{
+ return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
+}
+
+/*
+ * Enables and prepares SLC NAND controller
+ * for doing data transfers with H/W ECC enabled.
+ */
+static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
+{
+ /* Clear ECC */
+ writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
+
+ /* Setup SLC controller for H/W ECC operations */
+ setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
+}
+
+/*
+ * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
+ * mtd: MTD block structure
+ * dat: raw data read from the chip
+ * read_ecc: ECC from the chip
+ * calc_ecc: the ECC calculated from raw data
+ *
+ * Detect and correct a 1 bit error for 256 byte block
+ */
+int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
+ u_char *read_ecc, u_char *calc_ecc)
+{
+ unsigned int i;
+ int ret1, ret2 = 0;
+ u_char *r = read_ecc;
+ u_char *c = calc_ecc;
+ u16 data_offset = 0;
+
+ for (i = 0 ; i < ECCSTEPS ; i++) {
+ r += CONFIG_SYS_NAND_ECCBYTES;
+ c += CONFIG_SYS_NAND_ECCBYTES;
+ data_offset += CONFIG_SYS_NAND_ECCSIZE;
+
+ ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
+ if (ret1 < 0)
+ return -EBADMSG;
+ else
+ ret2 += ret1;
+ }
+
+ return ret2;
+}
+#endif
+
+#if defined(CONFIG_DMA_LPC32XX)
+static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
+{
+ lpc32xx_nand_xfer(mtd, buf, len, 1);
+}
+#else