lpc32xx_nand_slc.c 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * LPC32xx SLC NAND flash controller driver
  4. *
  5. * (C) Copyright 2015 Vladimir Zapolskiy <vz@mleia.com>
  6. *
  7. * Hardware ECC support original source code
  8. * Copyright (C) 2008 by NXP Semiconductors
  9. * Author: Kevin Wells
  10. *
  11. * Copyright (c) 2015 Tyco Fire Protection Products.
  12. */
  13. #include <common.h>
  14. #include <nand.h>
  15. #include <linux/mtd/nand_ecc.h>
  16. #include <linux/errno.h>
  17. #include <asm/io.h>
  18. #include <asm/arch/config.h>
  19. #include <asm/arch/clk.h>
  20. #include <asm/arch/sys_proto.h>
  21. #include <asm/arch/dma.h>
  22. #include <asm/arch/cpu.h>
  23. #if defined(CONFIG_DMA_LPC32XX) && defined(CONFIG_SPL_BUILD)
  24. #warning "DMA support in SPL image is not tested"
  25. #endif
  26. struct lpc32xx_nand_slc_regs {
  27. u32 data;
  28. u32 addr;
  29. u32 cmd;
  30. u32 stop;
  31. u32 ctrl;
  32. u32 cfg;
  33. u32 stat;
  34. u32 int_stat;
  35. u32 ien;
  36. u32 isr;
  37. u32 icr;
  38. u32 tac;
  39. u32 tc;
  40. u32 ecc;
  41. u32 dma_data;
  42. };
  43. /* CFG register */
  44. #define CFG_CE_LOW (1 << 5)
  45. #define CFG_DMA_ECC (1 << 4) /* Enable DMA ECC bit */
  46. #define CFG_ECC_EN (1 << 3) /* ECC enable bit */
  47. #define CFG_DMA_BURST (1 << 2) /* DMA burst bit */
  48. #define CFG_DMA_DIR (1 << 1) /* DMA write(0)/read(1) bit */
  49. /* CTRL register */
  50. #define CTRL_SW_RESET (1 << 2)
  51. #define CTRL_ECC_CLEAR (1 << 1) /* Reset ECC bit */
  52. #define CTRL_DMA_START (1 << 0) /* Start DMA channel bit */
  53. /* STAT register */
  54. #define STAT_DMA_FIFO (1 << 2) /* DMA FIFO has data bit */
  55. #define STAT_NAND_READY (1 << 0)
  56. /* INT_STAT register */
  57. #define INT_STAT_TC (1 << 1)
  58. #define INT_STAT_RDY (1 << 0)
  59. /* TAC register bits, be aware of overflows */
  60. #define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
  61. #define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
  62. #define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
  63. #define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
  64. #define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
  65. #define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
  66. #define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
  67. #define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
  68. /* NAND ECC Layout for small page NAND devices
  69. * Note: For large page devices, the default layouts are used. */
  70. static struct nand_ecclayout lpc32xx_nand_oob_16 = {
  71. .eccbytes = 6,
  72. .eccpos = {10, 11, 12, 13, 14, 15},
  73. .oobfree = {
  74. {.offset = 0,
  75. . length = 4},
  76. {.offset = 6,
  77. . length = 4}
  78. }
  79. };
  80. #if defined(CONFIG_DMA_LPC32XX)
  81. #define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
  82. /*
  83. * DMA Descriptors
  84. * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
  85. * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
  86. */
  87. static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
  88. static u32 ecc_buffer[8]; /* MAX ECC size */
  89. static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
  90. /*
  91. * Helper macro for the DMA client (i.e. NAND SLC):
  92. * - to write the next DMA linked list item address
  93. * (see arch/include/asm/arch-lpc32xx/dma.h).
  94. * - to assign the DMA data register to DMA source or destination address.
  95. * - to assign the ECC register to DMA source or destination address.
  96. */
  97. #define lpc32xx_dmac_next_lli(x) ((u32)x)
  98. #define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
  99. #define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
  100. #endif
  101. static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
  102. = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
  103. static void lpc32xx_nand_init(void)
  104. {
  105. uint32_t hclk = get_hclk_clk_rate();
  106. /* Reset SLC NAND controller */
  107. writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
  108. /* 8-bit bus, no DMA, no ECC, ordinary CE signal */
  109. writel(0, &lpc32xx_nand_slc_regs->cfg);
  110. /* Interrupts disabled and cleared */
  111. writel(0, &lpc32xx_nand_slc_regs->ien);
  112. writel(INT_STAT_TC | INT_STAT_RDY,
  113. &lpc32xx_nand_slc_regs->icr);
  114. /* Configure NAND flash timings */
  115. writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
  116. TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
  117. TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
  118. TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
  119. TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
  120. TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
  121. TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
  122. TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
  123. &lpc32xx_nand_slc_regs->tac);
  124. }
  125. static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
  126. int cmd, unsigned int ctrl)
  127. {
  128. debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
  129. if (ctrl & NAND_NCE)
  130. setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
  131. else
  132. clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
  133. if (cmd == NAND_CMD_NONE)
  134. return;
  135. if (ctrl & NAND_CLE)
  136. writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
  137. else if (ctrl & NAND_ALE)
  138. writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
  139. }
  140. static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
  141. {
  142. return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
  143. }
  144. #if defined(CONFIG_DMA_LPC32XX)
  145. /*
  146. * Prepares DMA descriptors for NAND RD/WR operations
  147. * If the size is < 256 Bytes then it is assumed to be
  148. * an OOB transfer
  149. */
  150. static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
  151. const u8 *buffer, int size,
  152. int read)
  153. {
  154. u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
  155. struct lpc32xx_dmac_ll *dmalist_cur;
  156. struct lpc32xx_dmac_ll *dmalist_cur_ecc;
  157. /*
  158. * CTRL descriptor entry for reading ECC
  159. * Copy Multiple times to sync DMA with Flash Controller
  160. */
  161. ecc_ctrl = 0x5 |
  162. DMAC_CHAN_SRC_BURST_1 |
  163. DMAC_CHAN_DEST_BURST_1 |
  164. DMAC_CHAN_SRC_WIDTH_32 |
  165. DMAC_CHAN_DEST_WIDTH_32 |
  166. DMAC_CHAN_DEST_AHB1;
  167. /* CTRL descriptor entry for reading/writing Data */
  168. ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
  169. DMAC_CHAN_SRC_BURST_4 |
  170. DMAC_CHAN_DEST_BURST_4 |
  171. DMAC_CHAN_SRC_WIDTH_32 |
  172. DMAC_CHAN_DEST_WIDTH_32 |
  173. DMAC_CHAN_DEST_AHB1;
  174. /* CTRL descriptor entry for reading/writing Spare Area */
  175. oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
  176. DMAC_CHAN_SRC_BURST_4 |
  177. DMAC_CHAN_DEST_BURST_4 |
  178. DMAC_CHAN_SRC_WIDTH_32 |
  179. DMAC_CHAN_DEST_WIDTH_32 |
  180. DMAC_CHAN_DEST_AHB1;
  181. if (read) {
  182. dmasrc = lpc32xx_dmac_set_dma_data();
  183. dmadst = (u32)buffer;
  184. ctrl |= DMAC_CHAN_DEST_AUTOINC;
  185. } else {
  186. dmadst = lpc32xx_dmac_set_dma_data();
  187. dmasrc = (u32)buffer;
  188. ctrl |= DMAC_CHAN_SRC_AUTOINC;
  189. }
  190. /*
  191. * Write Operation Sequence for Small Block NAND
  192. * ----------------------------------------------------------
  193. * 1. X'fer 256 bytes of data from Memory to Flash.
  194. * 2. Copy generated ECC data from Register to Spare Area
  195. * 3. X'fer next 256 bytes of data from Memory to Flash.
  196. * 4. Copy generated ECC data from Register to Spare Area.
  197. * 5. X'fer 16 byets of Spare area from Memory to Flash.
  198. * Read Operation Sequence for Small Block NAND
  199. * ----------------------------------------------------------
  200. * 1. X'fer 256 bytes of data from Flash to Memory.
  201. * 2. Copy generated ECC data from Register to ECC calc Buffer.
  202. * 3. X'fer next 256 bytes of data from Flash to Memory.
  203. * 4. Copy generated ECC data from Register to ECC calc Buffer.
  204. * 5. X'fer 16 bytes of Spare area from Flash to Memory.
  205. * Write Operation Sequence for Large Block NAND
  206. * ----------------------------------------------------------
  207. * 1. Steps(1-4) of Write Operations repeate for four times
  208. * which generates 16 DMA descriptors to X'fer 2048 bytes of
  209. * data & 32 bytes of ECC data.
  210. * 2. X'fer 64 bytes of Spare area from Memory to Flash.
  211. * Read Operation Sequence for Large Block NAND
  212. * ----------------------------------------------------------
  213. * 1. Steps(1-4) of Read Operations repeate for four times
  214. * which generates 16 DMA descriptors to X'fer 2048 bytes of
  215. * data & 32 bytes of ECC data.
  216. * 2. X'fer 64 bytes of Spare area from Flash to Memory.
  217. */
  218. for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
  219. dmalist_cur = &dmalist[i * 2];
  220. dmalist_cur_ecc = &dmalist[(i * 2) + 1];
  221. dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
  222. dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
  223. dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
  224. dmalist_cur->next_ctrl = ctrl;
  225. dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
  226. dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
  227. dmalist_cur_ecc->next_lli =
  228. lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
  229. dmalist_cur_ecc->next_ctrl = ecc_ctrl;
  230. }
  231. if (i) { /* Data only transfer */
  232. dmalist_cur_ecc = &dmalist[(i * 2) - 1];
  233. dmalist_cur_ecc->next_lli = 0;
  234. dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
  235. return;
  236. }
  237. /* OOB only transfer */
  238. if (read) {
  239. dmasrc = lpc32xx_dmac_set_dma_data();
  240. dmadst = (u32)buffer;
  241. oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
  242. } else {
  243. dmadst = lpc32xx_dmac_set_dma_data();
  244. dmasrc = (u32)buffer;
  245. oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
  246. }
  247. /* Read/ Write Spare Area Data To/From Flash */
  248. dmalist_cur = &dmalist[i * 2];
  249. dmalist_cur->dma_src = dmasrc;
  250. dmalist_cur->dma_dest = dmadst;
  251. dmalist_cur->next_lli = 0;
  252. dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
  253. }
  254. static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
  255. int len, int read)
  256. {
  257. struct nand_chip *chip = mtd_to_nand(mtd);
  258. u32 config;
  259. int ret;
  260. /* DMA Channel Configuration */
  261. config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
  262. (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
  263. (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
  264. DMAC_CHAN_ENABLE;
  265. /* Prepare DMA descriptors */
  266. lpc32xx_nand_dma_configure(chip, buf, len, read);
  267. /* Setup SLC controller and start transfer */
  268. if (read)
  269. setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
  270. else /* NAND_ECC_WRITE */
  271. clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
  272. setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
  273. /* Write length for new transfers */
  274. if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
  275. readl(&lpc32xx_nand_slc_regs->tc))) {
  276. int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
  277. writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
  278. }
  279. setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
  280. /* Start DMA transfers */
  281. ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
  282. if (unlikely(ret < 0))
  283. BUG();
  284. /* Wait for NAND to be ready */
  285. while (!lpc32xx_nand_dev_ready(mtd))
  286. ;
  287. /* Wait till DMA transfer is DONE */
  288. if (lpc32xx_dma_wait_status(dmachan))
  289. pr_err("NAND DMA transfer error!\r\n");
  290. /* Stop DMA & HW ECC */
  291. clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
  292. clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
  293. CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
  294. }
  295. static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
  296. {
  297. int i;
  298. for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
  299. i += CONFIG_SYS_NAND_ECCBYTES) {
  300. u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
  301. ce = ~(ce << 2) & 0xFFFFFF;
  302. spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
  303. spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
  304. spare[i] = (u8)(ce & 0xFF);
  305. }
  306. return 0;
  307. }
  308. static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
  309. uint8_t *ecc_code)
  310. {
  311. return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
  312. }
  313. /*
  314. * Enables and prepares SLC NAND controller
  315. * for doing data transfers with H/W ECC enabled.
  316. */
  317. static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
  318. {
  319. /* Clear ECC */
  320. writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
  321. /* Setup SLC controller for H/W ECC operations */
  322. setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
  323. }
  324. /*
  325. * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
  326. * mtd: MTD block structure
  327. * dat: raw data read from the chip
  328. * read_ecc: ECC from the chip
  329. * calc_ecc: the ECC calculated from raw data
  330. *
  331. * Detect and correct a 1 bit error for 256 byte block
  332. */
  333. int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
  334. u_char *read_ecc, u_char *calc_ecc)
  335. {
  336. unsigned int i;
  337. int ret1, ret2 = 0;
  338. u_char *r = read_ecc;
  339. u_char *c = calc_ecc;
  340. u16 data_offset = 0;
  341. for (i = 0 ; i < ECCSTEPS ; i++) {
  342. r += CONFIG_SYS_NAND_ECCBYTES;
  343. c += CONFIG_SYS_NAND_ECCBYTES;
  344. data_offset += CONFIG_SYS_NAND_ECCSIZE;
  345. ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
  346. if (ret1 < 0)
  347. return -EBADMSG;
  348. else
  349. ret2 += ret1;
  350. }
  351. return ret2;
  352. }
  353. #endif
  354. #if defined(CONFIG_DMA_LPC32XX)
  355. static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
  356. {
  357. lpc32xx_nand_xfer(mtd, buf, len, 1);
  358. }
  359. #else
  360. static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
  361. {
  362. while (len-- > 0)
  363. *buf++ = readl(&lpc32xx_nand_slc_regs->data);
  364. }
  365. #endif
  366. static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
  367. {
  368. return readl(&lpc32xx_nand_slc_regs->data);
  369. }
  370. #if defined(CONFIG_DMA_LPC32XX)
  371. static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
  372. int len)
  373. {
  374. lpc32xx_nand_xfer(mtd, buf, len, 0);
  375. }
  376. #else
  377. static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
  378. {
  379. while (len-- > 0)
  380. writel(*buf++, &lpc32xx_nand_slc_regs->data);
  381. }
  382. #endif
  383. static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
  384. {
  385. writel(byte, &lpc32xx_nand_slc_regs->data);
  386. }
  387. #if defined(CONFIG_DMA_LPC32XX)
  388. /* Reuse the logic from "nand_read_page_hwecc()" */
  389. static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
  390. uint8_t *buf, int oob_required, int page)
  391. {
  392. int i;
  393. int stat;
  394. uint8_t *p = buf;
  395. uint8_t *ecc_calc = chip->buffers->ecccalc;
  396. uint8_t *ecc_code = chip->buffers->ecccode;
  397. uint32_t *eccpos = chip->ecc.layout->eccpos;
  398. unsigned int max_bitflips = 0;
  399. /*
  400. * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
  401. * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
  402. * of a page size using DMA controller scatter/gather mode through
  403. * linked list; the ECC read is done without any software intervention.
  404. */
  405. lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
  406. lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
  407. lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
  408. lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
  409. for (i = 0; i < chip->ecc.total; i++)
  410. ecc_code[i] = chip->oob_poi[eccpos[i]];
  411. stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
  412. if (stat < 0)
  413. mtd->ecc_stats.failed++;
  414. else {
  415. mtd->ecc_stats.corrected += stat;
  416. max_bitflips = max_t(unsigned int, max_bitflips, stat);
  417. }
  418. return max_bitflips;
  419. }
  420. /* Reuse the logic from "nand_write_page_hwecc()" */
  421. static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
  422. struct nand_chip *chip,
  423. const uint8_t *buf, int oob_required,
  424. int page)
  425. {
  426. int i;
  427. uint8_t *ecc_calc = chip->buffers->ecccalc;
  428. const uint8_t *p = buf;
  429. uint32_t *eccpos = chip->ecc.layout->eccpos;
  430. /*
  431. * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
  432. * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
  433. * of a page size using DMA controller scatter/gather mode through
  434. * linked list; the ECC read is done without any software intervention.
  435. */
  436. lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
  437. lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
  438. lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
  439. for (i = 0; i < chip->ecc.total; i++)
  440. chip->oob_poi[eccpos[i]] = ecc_calc[i];
  441. lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
  442. return 0;
  443. }
  444. #endif
  445. /*
  446. * LPC32xx has only one SLC NAND controller, don't utilize
  447. * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
  448. * both in SPL NAND and U-Boot images.
  449. */
  450. int board_nand_init(struct nand_chip *lpc32xx_chip)
  451. {
  452. #if defined(CONFIG_DMA_LPC32XX)
  453. int ret;
  454. /* Acquire a channel for our use */
  455. ret = lpc32xx_dma_get_channel();
  456. if (unlikely(ret < 0)) {
  457. pr_info("Unable to get free DMA channel for NAND transfers\n");
  458. return -1;
  459. }
  460. dmachan = (unsigned int)ret;
  461. #endif
  462. lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
  463. lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
  464. /*
  465. * The implementation of these functions is quite common, but
  466. * they MUST be defined, because access to data register
  467. * is strictly 32-bit aligned.
  468. */
  469. lpc32xx_chip->read_byte = lpc32xx_read_byte;
  470. lpc32xx_chip->write_byte = lpc32xx_write_byte;
  471. #if defined(CONFIG_DMA_LPC32XX)
  472. /* Hardware ECC calculation is supported when DMA driver is selected */
  473. lpc32xx_chip->ecc.mode = NAND_ECC_HW;
  474. lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
  475. lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
  476. lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
  477. lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
  478. lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
  479. lpc32xx_chip->chip_delay = 2000;
  480. lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
  481. lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
  482. lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
  483. #else
  484. /*
  485. * Hardware ECC calculation is not supported by the driver,
  486. * because it requires DMA support, see LPC32x0 User Manual,
  487. * note after SLC_ECC register description (UM10326, p.198)
  488. */
  489. lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
  490. /*
  491. * The implementation of these functions is quite common, but
  492. * they MUST be defined, because access to data register
  493. * is strictly 32-bit aligned.
  494. */
  495. lpc32xx_chip->read_buf = lpc32xx_read_buf;
  496. lpc32xx_chip->write_buf = lpc32xx_write_buf;
  497. #endif
  498. /*
  499. * These values are predefined
  500. * for both small and large page NAND flash devices.
  501. */
  502. lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
  503. lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
  504. lpc32xx_chip->ecc.strength = 1;
  505. if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
  506. lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
  507. #if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
  508. lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
  509. #endif
  510. /* Initialize NAND interface */
  511. lpc32xx_nand_init();
  512. return 0;
  513. }