2 * drivers/mtd/nand/pxa3xx_nand.c
4 * Copyright © 2005 Intel Corporation
5 * Copyright © 2006 Marvell International Ltd.
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
12 #include <linux/kernel.h>
13 #include <linux/module.h>
14 #include <linux/interrupt.h>
15 #include <linux/platform_device.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/delay.h>
18 #include <linux/clk.h>
19 #include <linux/mtd/mtd.h>
20 #include <linux/mtd/nand.h>
21 #include <linux/mtd/partitions.h>
23 #include <linux/irq.h>
24 #include <linux/slab.h>
27 #include <plat/pxa3xx_nand.h>
29 #define CHIP_DELAY_TIMEOUT (2 * HZ/10)
31 /* registers and bit definitions */
32 #define NDCR (0x00) /* Control register */
33 #define NDTR0CS0 (0x04) /* Timing Parameter 0 for CS0 */
34 #define NDTR1CS0 (0x0C) /* Timing Parameter 1 for CS0 */
35 #define NDSR (0x14) /* Status Register */
36 #define NDPCR (0x18) /* Page Count Register */
37 #define NDBDR0 (0x1C) /* Bad Block Register 0 */
38 #define NDBDR1 (0x20) /* Bad Block Register 1 */
39 #define NDDB (0x40) /* Data Buffer */
40 #define NDCB0 (0x48) /* Command Buffer0 */
41 #define NDCB1 (0x4C) /* Command Buffer1 */
42 #define NDCB2 (0x50) /* Command Buffer2 */
44 #define NDCR_SPARE_EN (0x1 << 31)
45 #define NDCR_ECC_EN (0x1 << 30)
46 #define NDCR_DMA_EN (0x1 << 29)
47 #define NDCR_ND_RUN (0x1 << 28)
48 #define NDCR_DWIDTH_C (0x1 << 27)
49 #define NDCR_DWIDTH_M (0x1 << 26)
50 #define NDCR_PAGE_SZ (0x1 << 24)
51 #define NDCR_NCSX (0x1 << 23)
52 #define NDCR_ND_MODE (0x3 << 21)
53 #define NDCR_NAND_MODE (0x0)
54 #define NDCR_CLR_PG_CNT (0x1 << 20)
55 #define NDCR_CLR_ECC (0x1 << 19)
56 #define NDCR_RD_ID_CNT_MASK (0x7 << 16)
57 #define NDCR_RD_ID_CNT(x) (((x) << 16) & NDCR_RD_ID_CNT_MASK)
59 #define NDCR_RA_START (0x1 << 15)
60 #define NDCR_PG_PER_BLK (0x1 << 14)
61 #define NDCR_ND_ARB_EN (0x1 << 12)
63 #define NDSR_MASK (0xfff)
64 #define NDSR_RDY (0x1 << 11)
65 #define NDSR_CS0_PAGED (0x1 << 10)
66 #define NDSR_CS1_PAGED (0x1 << 9)
67 #define NDSR_CS0_CMDD (0x1 << 8)
68 #define NDSR_CS1_CMDD (0x1 << 7)
69 #define NDSR_CS0_BBD (0x1 << 6)
70 #define NDSR_CS1_BBD (0x1 << 5)
71 #define NDSR_DBERR (0x1 << 4)
72 #define NDSR_SBERR (0x1 << 3)
73 #define NDSR_WRDREQ (0x1 << 2)
74 #define NDSR_RDDREQ (0x1 << 1)
75 #define NDSR_WRCMDREQ (0x1)
77 #define NDCB0_AUTO_RS (0x1 << 25)
78 #define NDCB0_CSEL (0x1 << 24)
79 #define NDCB0_CMD_TYPE_MASK (0x7 << 21)
80 #define NDCB0_CMD_TYPE(x) (((x) << 21) & NDCB0_CMD_TYPE_MASK)
81 #define NDCB0_NC (0x1 << 20)
82 #define NDCB0_DBC (0x1 << 19)
83 #define NDCB0_ADDR_CYC_MASK (0x7 << 16)
84 #define NDCB0_ADDR_CYC(x) (((x) << 16) & NDCB0_ADDR_CYC_MASK)
85 #define NDCB0_CMD2_MASK (0xff << 8)
86 #define NDCB0_CMD1_MASK (0xff)
87 #define NDCB0_ADDR_CYC_SHIFT (16)
89 /* macros for registers read/write */
90 #define nand_writel(info, off, val) \
91 __raw_writel((val), (info)->mmio_base + (off))
93 #define nand_readl(info, off) \
94 __raw_readl((info)->mmio_base + (off))
96 /* error code and state */
116 struct pxa3xx_nand_info
{
117 struct nand_chip nand_chip
;
119 struct platform_device
*pdev
;
120 const struct pxa3xx_nand_flash
*flash_info
;
123 void __iomem
*mmio_base
;
124 unsigned long mmio_phys
;
126 unsigned int buf_start
;
127 unsigned int buf_count
;
129 /* DMA information */
133 unsigned char *data_buff
;
134 dma_addr_t data_buff_phys
;
135 size_t data_buff_size
;
137 struct pxa_dma_desc
*data_desc
;
138 dma_addr_t data_desc_addr
;
142 /* saved column/page_addr during CMD_SEQIN */
146 /* relate to the command */
149 int use_ecc
; /* use HW ECC ? */
150 int use_dma
; /* use DMA ? */
152 size_t data_size
; /* data size in FIFO */
154 struct completion cmd_complete
;
156 /* generated NDCBx register values */
161 /* calculated from pxa3xx_nand_flash data */
163 size_t read_id_bytes
;
165 unsigned int col_addr_cycles
;
166 unsigned int row_addr_cycles
;
169 static int use_dma
= 1;
170 module_param(use_dma
, bool, 0444);
171 MODULE_PARM_DESC(use_dma
, "enable DMA for data transfering to/from NAND HW");
174 * Default NAND flash controller configuration setup by the
175 * bootloader. This configuration is used only when pdata->keep_config is set
177 static struct pxa3xx_nand_timing default_timing
;
178 static struct pxa3xx_nand_flash default_flash
;
180 static struct pxa3xx_nand_cmdset smallpage_cmdset
= {
184 .read_status
= 0x0070,
190 .lock_status
= 0x007A,
193 static struct pxa3xx_nand_cmdset largepage_cmdset
= {
197 .read_status
= 0x0070,
203 .lock_status
= 0x007A,
206 #ifdef CONFIG_MTD_NAND_PXA3xx_BUILTIN
207 static struct pxa3xx_nand_timing samsung512MbX16_timing
= {
219 static struct pxa3xx_nand_flash samsung512MbX16
= {
220 .timing
= &samsung512MbX16_timing
,
221 .cmdset
= &smallpage_cmdset
,
222 .page_per_block
= 32,
230 static struct pxa3xx_nand_flash samsung2GbX8
= {
231 .timing
= &samsung512MbX16_timing
,
232 .cmdset
= &smallpage_cmdset
,
233 .page_per_block
= 64,
241 static struct pxa3xx_nand_flash samsung32GbX8
= {
242 .timing
= &samsung512MbX16_timing
,
243 .cmdset
= &smallpage_cmdset
,
244 .page_per_block
= 128,
252 static struct pxa3xx_nand_timing micron_timing
= {
264 static struct pxa3xx_nand_flash micron1GbX8
= {
265 .timing
= µn_timing
,
266 .cmdset
= &largepage_cmdset
,
267 .page_per_block
= 64,
275 static struct pxa3xx_nand_flash micron1GbX16
= {
276 .timing
= µn_timing
,
277 .cmdset
= &largepage_cmdset
,
278 .page_per_block
= 64,
286 static struct pxa3xx_nand_flash micron4GbX8
= {
287 .timing
= µn_timing
,
288 .cmdset
= &largepage_cmdset
,
289 .page_per_block
= 64,
297 static struct pxa3xx_nand_flash micron4GbX16
= {
298 .timing
= µn_timing
,
299 .cmdset
= &largepage_cmdset
,
300 .page_per_block
= 64,
308 static struct pxa3xx_nand_timing stm2GbX16_timing
= {
320 static struct pxa3xx_nand_flash stm2GbX16
= {
321 .timing
= &stm2GbX16_timing
,
322 .cmdset
= &largepage_cmdset
,
323 .page_per_block
= 64,
331 static struct pxa3xx_nand_flash
*builtin_flash_types
[] = {
341 #endif /* CONFIG_MTD_NAND_PXA3xx_BUILTIN */
343 #define NDTR0_tCH(c) (min((c), 7) << 19)
344 #define NDTR0_tCS(c) (min((c), 7) << 16)
345 #define NDTR0_tWH(c) (min((c), 7) << 11)
346 #define NDTR0_tWP(c) (min((c), 7) << 8)
347 #define NDTR0_tRH(c) (min((c), 7) << 3)
348 #define NDTR0_tRP(c) (min((c), 7) << 0)
350 #define NDTR1_tR(c) (min((c), 65535) << 16)
351 #define NDTR1_tWHR(c) (min((c), 15) << 4)
352 #define NDTR1_tAR(c) (min((c), 15) << 0)
354 #define tCH_NDTR0(r) (((r) >> 19) & 0x7)
355 #define tCS_NDTR0(r) (((r) >> 16) & 0x7)
356 #define tWH_NDTR0(r) (((r) >> 11) & 0x7)
357 #define tWP_NDTR0(r) (((r) >> 8) & 0x7)
358 #define tRH_NDTR0(r) (((r) >> 3) & 0x7)
359 #define tRP_NDTR0(r) (((r) >> 0) & 0x7)
361 #define tR_NDTR1(r) (((r) >> 16) & 0xffff)
362 #define tWHR_NDTR1(r) (((r) >> 4) & 0xf)
363 #define tAR_NDTR1(r) (((r) >> 0) & 0xf)
365 /* convert nano-seconds to nand flash controller clock cycles */
366 #define ns2cycle(ns, clk) (int)((ns) * (clk / 1000000) / 1000)
368 /* convert nand flash controller clock cycles to nano-seconds */
369 #define cycle2ns(c, clk) ((((c) + 1) * 1000000 + clk / 500) / (clk / 1000))
371 static void pxa3xx_nand_set_timing(struct pxa3xx_nand_info
*info
,
372 const struct pxa3xx_nand_timing
*t
)
374 unsigned long nand_clk
= clk_get_rate(info
->clk
);
375 uint32_t ndtr0
, ndtr1
;
377 ndtr0
= NDTR0_tCH(ns2cycle(t
->tCH
, nand_clk
)) |
378 NDTR0_tCS(ns2cycle(t
->tCS
, nand_clk
)) |
379 NDTR0_tWH(ns2cycle(t
->tWH
, nand_clk
)) |
380 NDTR0_tWP(ns2cycle(t
->tWP
, nand_clk
)) |
381 NDTR0_tRH(ns2cycle(t
->tRH
, nand_clk
)) |
382 NDTR0_tRP(ns2cycle(t
->tRP
, nand_clk
));
384 ndtr1
= NDTR1_tR(ns2cycle(t
->tR
, nand_clk
)) |
385 NDTR1_tWHR(ns2cycle(t
->tWHR
, nand_clk
)) |
386 NDTR1_tAR(ns2cycle(t
->tAR
, nand_clk
));
388 nand_writel(info
, NDTR0CS0
, ndtr0
);
389 nand_writel(info
, NDTR1CS0
, ndtr1
);
392 #define WAIT_EVENT_TIMEOUT 10
394 static int wait_for_event(struct pxa3xx_nand_info
*info
, uint32_t event
)
396 int timeout
= WAIT_EVENT_TIMEOUT
;
400 ndsr
= nand_readl(info
, NDSR
) & NDSR_MASK
;
402 nand_writel(info
, NDSR
, ndsr
);
411 static int prepare_read_prog_cmd(struct pxa3xx_nand_info
*info
,
412 uint16_t cmd
, int column
, int page_addr
)
414 const struct pxa3xx_nand_flash
*f
= info
->flash_info
;
415 const struct pxa3xx_nand_cmdset
*cmdset
= f
->cmdset
;
417 /* calculate data size */
418 switch (f
->page_size
) {
420 info
->data_size
= (info
->use_ecc
) ? 2088 : 2112;
423 info
->data_size
= (info
->use_ecc
) ? 520 : 528;
429 /* generate values for NDCBx registers */
430 info
->ndcb0
= cmd
| ((cmd
& 0xff00) ? NDCB0_DBC
: 0);
433 info
->ndcb0
|= NDCB0_ADDR_CYC(info
->row_addr_cycles
+ info
->col_addr_cycles
);
435 if (info
->col_addr_cycles
== 2) {
436 /* large block, 2 cycles for column address
437 * row address starts from 3rd cycle
439 info
->ndcb1
|= page_addr
<< 16;
440 if (info
->row_addr_cycles
== 3)
441 info
->ndcb2
= (page_addr
>> 16) & 0xff;
443 /* small block, 1 cycles for column address
444 * row address starts from 2nd cycle
446 info
->ndcb1
= page_addr
<< 8;
448 if (cmd
== cmdset
->program
)
449 info
->ndcb0
|= NDCB0_CMD_TYPE(1) | NDCB0_AUTO_RS
;
454 static int prepare_erase_cmd(struct pxa3xx_nand_info
*info
,
455 uint16_t cmd
, int page_addr
)
457 info
->ndcb0
= cmd
| ((cmd
& 0xff00) ? NDCB0_DBC
: 0);
458 info
->ndcb0
|= NDCB0_CMD_TYPE(2) | NDCB0_AUTO_RS
| NDCB0_ADDR_CYC(3);
459 info
->ndcb1
= page_addr
;
464 static int prepare_other_cmd(struct pxa3xx_nand_info
*info
, uint16_t cmd
)
466 const struct pxa3xx_nand_cmdset
*cmdset
= info
->flash_info
->cmdset
;
468 info
->ndcb0
= cmd
| ((cmd
& 0xff00) ? NDCB0_DBC
: 0);
472 if (cmd
== cmdset
->read_id
) {
473 info
->ndcb0
|= NDCB0_CMD_TYPE(3);
475 } else if (cmd
== cmdset
->read_status
) {
476 info
->ndcb0
|= NDCB0_CMD_TYPE(4);
478 } else if (cmd
== cmdset
->reset
|| cmd
== cmdset
->lock
||
479 cmd
== cmdset
->unlock
) {
480 info
->ndcb0
|= NDCB0_CMD_TYPE(5);
487 static void enable_int(struct pxa3xx_nand_info
*info
, uint32_t int_mask
)
491 ndcr
= nand_readl(info
, NDCR
);
492 nand_writel(info
, NDCR
, ndcr
& ~int_mask
);
495 static void disable_int(struct pxa3xx_nand_info
*info
, uint32_t int_mask
)
499 ndcr
= nand_readl(info
, NDCR
);
500 nand_writel(info
, NDCR
, ndcr
| int_mask
);
503 /* NOTE: it is a must to set ND_RUN firstly, then write command buffer
504 * otherwise, it does not work
506 static int write_cmd(struct pxa3xx_nand_info
*info
)
510 /* clear status bits and run */
511 nand_writel(info
, NDSR
, NDSR_MASK
);
513 ndcr
= info
->reg_ndcr
;
515 ndcr
|= info
->use_ecc
? NDCR_ECC_EN
: 0;
516 ndcr
|= info
->use_dma
? NDCR_DMA_EN
: 0;
519 nand_writel(info
, NDCR
, ndcr
);
521 if (wait_for_event(info
, NDSR_WRCMDREQ
)) {
522 printk(KERN_ERR
"timed out writing command\n");
526 nand_writel(info
, NDCB0
, info
->ndcb0
);
527 nand_writel(info
, NDCB0
, info
->ndcb1
);
528 nand_writel(info
, NDCB0
, info
->ndcb2
);
532 static int handle_data_pio(struct pxa3xx_nand_info
*info
)
534 int ret
, timeout
= CHIP_DELAY_TIMEOUT
;
536 switch (info
->state
) {
537 case STATE_PIO_WRITING
:
538 __raw_writesl(info
->mmio_base
+ NDDB
, info
->data_buff
,
539 DIV_ROUND_UP(info
->data_size
, 4));
541 enable_int(info
, NDSR_CS0_BBD
| NDSR_CS0_CMDD
);
543 ret
= wait_for_completion_timeout(&info
->cmd_complete
, timeout
);
545 printk(KERN_ERR
"program command time out\n");
549 case STATE_PIO_READING
:
550 __raw_readsl(info
->mmio_base
+ NDDB
, info
->data_buff
,
551 DIV_ROUND_UP(info
->data_size
, 4));
554 printk(KERN_ERR
"%s: invalid state %d\n", __func__
,
559 info
->state
= STATE_READY
;
563 static void start_data_dma(struct pxa3xx_nand_info
*info
, int dir_out
)
565 struct pxa_dma_desc
*desc
= info
->data_desc
;
566 int dma_len
= ALIGN(info
->data_size
, 32);
568 desc
->ddadr
= DDADR_STOP
;
569 desc
->dcmd
= DCMD_ENDIRQEN
| DCMD_WIDTH4
| DCMD_BURST32
| dma_len
;
572 desc
->dsadr
= info
->data_buff_phys
;
573 desc
->dtadr
= info
->mmio_phys
+ NDDB
;
574 desc
->dcmd
|= DCMD_INCSRCADDR
| DCMD_FLOWTRG
;
576 desc
->dtadr
= info
->data_buff_phys
;
577 desc
->dsadr
= info
->mmio_phys
+ NDDB
;
578 desc
->dcmd
|= DCMD_INCTRGADDR
| DCMD_FLOWSRC
;
581 DRCMR(info
->drcmr_dat
) = DRCMR_MAPVLD
| info
->data_dma_ch
;
582 DDADR(info
->data_dma_ch
) = info
->data_desc_addr
;
583 DCSR(info
->data_dma_ch
) |= DCSR_RUN
;
586 static void pxa3xx_nand_data_dma_irq(int channel
, void *data
)
588 struct pxa3xx_nand_info
*info
= data
;
591 dcsr
= DCSR(channel
);
592 DCSR(channel
) = dcsr
;
594 if (dcsr
& DCSR_BUSERR
) {
595 info
->retcode
= ERR_DMABUSERR
;
596 complete(&info
->cmd_complete
);
599 if (info
->state
== STATE_DMA_WRITING
) {
600 info
->state
= STATE_DMA_DONE
;
601 enable_int(info
, NDSR_CS0_BBD
| NDSR_CS0_CMDD
);
603 info
->state
= STATE_READY
;
604 complete(&info
->cmd_complete
);
608 static irqreturn_t
pxa3xx_nand_irq(int irq
, void *devid
)
610 struct pxa3xx_nand_info
*info
= devid
;
613 status
= nand_readl(info
, NDSR
);
615 if (status
& (NDSR_RDDREQ
| NDSR_DBERR
| NDSR_SBERR
)) {
616 if (status
& NDSR_DBERR
)
617 info
->retcode
= ERR_DBERR
;
618 else if (status
& NDSR_SBERR
)
619 info
->retcode
= ERR_SBERR
;
621 disable_int(info
, NDSR_RDDREQ
| NDSR_DBERR
| NDSR_SBERR
);
624 info
->state
= STATE_DMA_READING
;
625 start_data_dma(info
, 0);
627 info
->state
= STATE_PIO_READING
;
628 complete(&info
->cmd_complete
);
630 } else if (status
& NDSR_WRDREQ
) {
631 disable_int(info
, NDSR_WRDREQ
);
633 info
->state
= STATE_DMA_WRITING
;
634 start_data_dma(info
, 1);
636 info
->state
= STATE_PIO_WRITING
;
637 complete(&info
->cmd_complete
);
639 } else if (status
& (NDSR_CS0_BBD
| NDSR_CS0_CMDD
)) {
640 if (status
& NDSR_CS0_BBD
)
641 info
->retcode
= ERR_BBERR
;
643 disable_int(info
, NDSR_CS0_BBD
| NDSR_CS0_CMDD
);
644 info
->state
= STATE_READY
;
645 complete(&info
->cmd_complete
);
647 nand_writel(info
, NDSR
, status
);
651 static int pxa3xx_nand_do_cmd(struct pxa3xx_nand_info
*info
, uint32_t event
)
654 int ret
, timeout
= CHIP_DELAY_TIMEOUT
;
656 if (write_cmd(info
)) {
657 info
->retcode
= ERR_SENDCMD
;
661 info
->state
= STATE_CMD_HANDLE
;
663 enable_int(info
, event
);
665 ret
= wait_for_completion_timeout(&info
->cmd_complete
, timeout
);
667 printk(KERN_ERR
"command execution timed out\n");
668 info
->retcode
= ERR_SENDCMD
;
672 if (info
->use_dma
== 0 && info
->data_size
> 0)
673 if (handle_data_pio(info
))
679 ndcr
= nand_readl(info
, NDCR
);
680 nand_writel(info
, NDCR
, ndcr
& ~NDCR_ND_RUN
);
685 static int pxa3xx_nand_dev_ready(struct mtd_info
*mtd
)
687 struct pxa3xx_nand_info
*info
= mtd
->priv
;
688 return (nand_readl(info
, NDSR
) & NDSR_RDY
) ? 1 : 0;
691 static inline int is_buf_blank(uint8_t *buf
, size_t len
)
693 for (; len
> 0; len
--)
699 static void pxa3xx_nand_cmdfunc(struct mtd_info
*mtd
, unsigned command
,
700 int column
, int page_addr
)
702 struct pxa3xx_nand_info
*info
= mtd
->priv
;
703 const struct pxa3xx_nand_flash
*flash_info
= info
->flash_info
;
704 const struct pxa3xx_nand_cmdset
*cmdset
= flash_info
->cmdset
;
707 info
->use_dma
= (use_dma
) ? 1 : 0;
710 info
->state
= STATE_READY
;
712 init_completion(&info
->cmd_complete
);
715 case NAND_CMD_READOOB
:
716 /* disable HW ECC to get all the OOB data */
717 info
->buf_count
= mtd
->writesize
+ mtd
->oobsize
;
718 info
->buf_start
= mtd
->writesize
+ column
;
719 memset(info
->data_buff
, 0xFF, info
->buf_count
);
721 if (prepare_read_prog_cmd(info
, cmdset
->read1
, column
, page_addr
))
724 pxa3xx_nand_do_cmd(info
, NDSR_RDDREQ
| NDSR_DBERR
| NDSR_SBERR
);
726 /* We only are OOB, so if the data has error, does not matter */
727 if (info
->retcode
== ERR_DBERR
)
728 info
->retcode
= ERR_NONE
;
733 info
->retcode
= ERR_NONE
;
734 info
->buf_start
= column
;
735 info
->buf_count
= mtd
->writesize
+ mtd
->oobsize
;
736 memset(info
->data_buff
, 0xFF, info
->buf_count
);
738 if (prepare_read_prog_cmd(info
, cmdset
->read1
, column
, page_addr
))
741 pxa3xx_nand_do_cmd(info
, NDSR_RDDREQ
| NDSR_DBERR
| NDSR_SBERR
);
743 if (info
->retcode
== ERR_DBERR
) {
744 /* for blank page (all 0xff), HW will calculate its ECC as
745 * 0, which is different from the ECC information within
746 * OOB, ignore such double bit errors
748 if (is_buf_blank(info
->data_buff
, mtd
->writesize
))
749 info
->retcode
= ERR_NONE
;
753 info
->buf_start
= column
;
754 info
->buf_count
= mtd
->writesize
+ mtd
->oobsize
;
755 memset(info
->data_buff
, 0xff, info
->buf_count
);
757 /* save column/page_addr for next CMD_PAGEPROG */
758 info
->seqin_column
= column
;
759 info
->seqin_page_addr
= page_addr
;
761 case NAND_CMD_PAGEPROG
:
762 info
->use_ecc
= (info
->seqin_column
>= mtd
->writesize
) ? 0 : 1;
764 if (prepare_read_prog_cmd(info
, cmdset
->program
,
765 info
->seqin_column
, info
->seqin_page_addr
))
768 pxa3xx_nand_do_cmd(info
, NDSR_WRDREQ
);
770 case NAND_CMD_ERASE1
:
771 if (prepare_erase_cmd(info
, cmdset
->erase
, page_addr
))
774 pxa3xx_nand_do_cmd(info
, NDSR_CS0_BBD
| NDSR_CS0_CMDD
);
776 case NAND_CMD_ERASE2
:
778 case NAND_CMD_READID
:
779 case NAND_CMD_STATUS
:
780 info
->use_dma
= 0; /* force PIO read */
782 info
->buf_count
= (command
== NAND_CMD_READID
) ?
783 info
->read_id_bytes
: 1;
785 if (prepare_other_cmd(info
, (command
== NAND_CMD_READID
) ?
786 cmdset
->read_id
: cmdset
->read_status
))
789 pxa3xx_nand_do_cmd(info
, NDSR_RDDREQ
);
792 if (prepare_other_cmd(info
, cmdset
->reset
))
795 ret
= pxa3xx_nand_do_cmd(info
, NDSR_CS0_CMDD
);
801 if (nand_readl(info
, NDSR
) & NDSR_RDY
)
806 ndcr
= nand_readl(info
, NDCR
);
807 nand_writel(info
, NDCR
, ndcr
& ~NDCR_ND_RUN
);
811 printk(KERN_ERR
"non-supported command.\n");
815 if (info
->retcode
== ERR_DBERR
) {
816 printk(KERN_ERR
"double bit error @ page %08x\n", page_addr
);
817 info
->retcode
= ERR_NONE
;
821 static uint8_t pxa3xx_nand_read_byte(struct mtd_info
*mtd
)
823 struct pxa3xx_nand_info
*info
= mtd
->priv
;
826 if (info
->buf_start
< info
->buf_count
)
827 /* Has just send a new command? */
828 retval
= info
->data_buff
[info
->buf_start
++];
833 static u16
pxa3xx_nand_read_word(struct mtd_info
*mtd
)
835 struct pxa3xx_nand_info
*info
= mtd
->priv
;
838 if (!(info
->buf_start
& 0x01) && info
->buf_start
< info
->buf_count
) {
839 retval
= *((u16
*)(info
->data_buff
+info
->buf_start
));
840 info
->buf_start
+= 2;
845 static void pxa3xx_nand_read_buf(struct mtd_info
*mtd
, uint8_t *buf
, int len
)
847 struct pxa3xx_nand_info
*info
= mtd
->priv
;
848 int real_len
= min_t(size_t, len
, info
->buf_count
- info
->buf_start
);
850 memcpy(buf
, info
->data_buff
+ info
->buf_start
, real_len
);
851 info
->buf_start
+= real_len
;
854 static void pxa3xx_nand_write_buf(struct mtd_info
*mtd
,
855 const uint8_t *buf
, int len
)
857 struct pxa3xx_nand_info
*info
= mtd
->priv
;
858 int real_len
= min_t(size_t, len
, info
->buf_count
- info
->buf_start
);
860 memcpy(info
->data_buff
+ info
->buf_start
, buf
, real_len
);
861 info
->buf_start
+= real_len
;
864 static int pxa3xx_nand_verify_buf(struct mtd_info
*mtd
,
865 const uint8_t *buf
, int len
)
870 static void pxa3xx_nand_select_chip(struct mtd_info
*mtd
, int chip
)
875 static int pxa3xx_nand_waitfunc(struct mtd_info
*mtd
, struct nand_chip
*this)
877 struct pxa3xx_nand_info
*info
= mtd
->priv
;
879 /* pxa3xx_nand_send_command has waited for command complete */
880 if (this->state
== FL_WRITING
|| this->state
== FL_ERASING
) {
881 if (info
->retcode
== ERR_NONE
)
885 * any error make it return 0x01 which will tell
886 * the caller the erase and write fail
895 static void pxa3xx_nand_ecc_hwctl(struct mtd_info
*mtd
, int mode
)
900 static int pxa3xx_nand_ecc_calculate(struct mtd_info
*mtd
,
901 const uint8_t *dat
, uint8_t *ecc_code
)
906 static int pxa3xx_nand_ecc_correct(struct mtd_info
*mtd
,
907 uint8_t *dat
, uint8_t *read_ecc
, uint8_t *calc_ecc
)
909 struct pxa3xx_nand_info
*info
= mtd
->priv
;
911 * Any error include ERR_SEND_CMD, ERR_DBERR, ERR_BUSERR, we
912 * consider it as a ecc error which will tell the caller the
913 * read fail We have distinguish all the errors, but the
914 * nand_read_ecc only check this function return value
916 * Corrected (single-bit) errors must also be noted.
918 if (info
->retcode
== ERR_SBERR
)
920 else if (info
->retcode
!= ERR_NONE
)
926 static int __readid(struct pxa3xx_nand_info
*info
, uint32_t *id
)
928 const struct pxa3xx_nand_flash
*f
= info
->flash_info
;
929 const struct pxa3xx_nand_cmdset
*cmdset
= f
->cmdset
;
933 if (prepare_other_cmd(info
, cmdset
->read_id
)) {
934 printk(KERN_ERR
"failed to prepare command\n");
942 /* Wait for CMDDM(command done successfully) */
943 if (wait_for_event(info
, NDSR_RDDREQ
))
946 __raw_readsl(info
->mmio_base
+ NDDB
, id_buff
, 2);
947 *id
= id_buff
[0] | (id_buff
[1] << 8);
951 ndcr
= nand_readl(info
, NDCR
);
952 nand_writel(info
, NDCR
, ndcr
& ~NDCR_ND_RUN
);
957 static int pxa3xx_nand_config_flash(struct pxa3xx_nand_info
*info
,
958 const struct pxa3xx_nand_flash
*f
)
960 struct platform_device
*pdev
= info
->pdev
;
961 struct pxa3xx_nand_platform_data
*pdata
= pdev
->dev
.platform_data
;
962 uint32_t ndcr
= 0x00000FFF; /* disable all interrupts */
964 if (f
->page_size
!= 2048 && f
->page_size
!= 512)
967 if (f
->flash_width
!= 16 && f
->flash_width
!= 8)
970 /* calculate flash information */
971 info
->oob_size
= (f
->page_size
== 2048) ? 64 : 16;
972 info
->read_id_bytes
= (f
->page_size
== 2048) ? 4 : 2;
974 /* calculate addressing information */
975 info
->col_addr_cycles
= (f
->page_size
== 2048) ? 2 : 1;
977 if (f
->num_blocks
* f
->page_per_block
> 65536)
978 info
->row_addr_cycles
= 3;
980 info
->row_addr_cycles
= 2;
982 ndcr
|= (pdata
->enable_arbiter
) ? NDCR_ND_ARB_EN
: 0;
983 ndcr
|= (info
->col_addr_cycles
== 2) ? NDCR_RA_START
: 0;
984 ndcr
|= (f
->page_per_block
== 64) ? NDCR_PG_PER_BLK
: 0;
985 ndcr
|= (f
->page_size
== 2048) ? NDCR_PAGE_SZ
: 0;
986 ndcr
|= (f
->flash_width
== 16) ? NDCR_DWIDTH_M
: 0;
987 ndcr
|= (f
->dfc_width
== 16) ? NDCR_DWIDTH_C
: 0;
989 ndcr
|= NDCR_RD_ID_CNT(info
->read_id_bytes
);
990 ndcr
|= NDCR_SPARE_EN
; /* enable spare by default */
992 info
->reg_ndcr
= ndcr
;
994 pxa3xx_nand_set_timing(info
, f
->timing
);
995 info
->flash_info
= f
;
999 static void pxa3xx_nand_detect_timing(struct pxa3xx_nand_info
*info
,
1000 struct pxa3xx_nand_timing
*t
)
1002 unsigned long nand_clk
= clk_get_rate(info
->clk
);
1003 uint32_t ndtr0
= nand_readl(info
, NDTR0CS0
);
1004 uint32_t ndtr1
= nand_readl(info
, NDTR1CS0
);
1006 t
->tCH
= cycle2ns(tCH_NDTR0(ndtr0
), nand_clk
);
1007 t
->tCS
= cycle2ns(tCS_NDTR0(ndtr0
), nand_clk
);
1008 t
->tWH
= cycle2ns(tWH_NDTR0(ndtr0
), nand_clk
);
1009 t
->tWP
= cycle2ns(tWP_NDTR0(ndtr0
), nand_clk
);
1010 t
->tRH
= cycle2ns(tRH_NDTR0(ndtr0
), nand_clk
);
1011 t
->tRP
= cycle2ns(tRP_NDTR0(ndtr0
), nand_clk
);
1013 t
->tR
= cycle2ns(tR_NDTR1(ndtr1
), nand_clk
);
1014 t
->tWHR
= cycle2ns(tWHR_NDTR1(ndtr1
), nand_clk
);
1015 t
->tAR
= cycle2ns(tAR_NDTR1(ndtr1
), nand_clk
);
1018 static int pxa3xx_nand_detect_config(struct pxa3xx_nand_info
*info
)
1020 uint32_t ndcr
= nand_readl(info
, NDCR
);
1021 struct nand_flash_dev
*type
= NULL
;
1025 default_flash
.page_per_block
= ndcr
& NDCR_PG_PER_BLK
? 64 : 32;
1026 default_flash
.page_size
= ndcr
& NDCR_PAGE_SZ
? 2048 : 512;
1027 default_flash
.flash_width
= ndcr
& NDCR_DWIDTH_M
? 16 : 8;
1028 default_flash
.dfc_width
= ndcr
& NDCR_DWIDTH_C
? 16 : 8;
1030 if (default_flash
.page_size
== 2048)
1031 default_flash
.cmdset
= &largepage_cmdset
;
1033 default_flash
.cmdset
= &smallpage_cmdset
;
1035 /* set info fields needed to __readid */
1036 info
->flash_info
= &default_flash
;
1037 info
->read_id_bytes
= (default_flash
.page_size
== 2048) ? 4 : 2;
1038 info
->reg_ndcr
= ndcr
;
1040 if (__readid(info
, &id
))
1043 /* Lookup the flash id */
1044 id
= (id
>> 8) & 0xff; /* device id is byte 2 */
1045 for (i
= 0; nand_flash_ids
[i
].name
!= NULL
; i
++) {
1046 if (id
== nand_flash_ids
[i
].id
) {
1047 type
= &nand_flash_ids
[i
];
1055 /* fill the missing flash information */
1056 i
= __ffs(default_flash
.page_per_block
* default_flash
.page_size
);
1057 default_flash
.num_blocks
= type
->chipsize
<< (20 - i
);
1059 info
->oob_size
= (default_flash
.page_size
== 2048) ? 64 : 16;
1061 /* calculate addressing information */
1062 info
->col_addr_cycles
= (default_flash
.page_size
== 2048) ? 2 : 1;
1064 if (default_flash
.num_blocks
* default_flash
.page_per_block
> 65536)
1065 info
->row_addr_cycles
= 3;
1067 info
->row_addr_cycles
= 2;
1069 pxa3xx_nand_detect_timing(info
, &default_timing
);
1070 default_flash
.timing
= &default_timing
;
1075 static int pxa3xx_nand_detect_flash(struct pxa3xx_nand_info
*info
,
1076 const struct pxa3xx_nand_platform_data
*pdata
)
1078 const struct pxa3xx_nand_flash
*f
;
1082 if (pdata
->keep_config
)
1083 if (pxa3xx_nand_detect_config(info
) == 0)
1086 for (i
= 0; i
<pdata
->num_flash
; ++i
) {
1087 f
= pdata
->flash
+ i
;
1089 if (pxa3xx_nand_config_flash(info
, f
))
1092 if (__readid(info
, &id
))
1095 if (id
== f
->chip_id
)
1099 #ifdef CONFIG_MTD_NAND_PXA3xx_BUILTIN
1100 for (i
= 0; i
< ARRAY_SIZE(builtin_flash_types
); i
++) {
1102 f
= builtin_flash_types
[i
];
1104 if (pxa3xx_nand_config_flash(info
, f
))
1107 if (__readid(info
, &id
))
1110 if (id
== f
->chip_id
)
1115 dev_warn(&info
->pdev
->dev
,
1116 "failed to detect configured nand flash; found %04x instead of\n",
1121 /* the maximum possible buffer size for large page with OOB data
1122 * is: 2048 + 64 = 2112 bytes, allocate a page here for both the
1123 * data buffer and the DMA descriptor
1125 #define MAX_BUFF_SIZE PAGE_SIZE
1127 static int pxa3xx_nand_init_buff(struct pxa3xx_nand_info
*info
)
1129 struct platform_device
*pdev
= info
->pdev
;
1130 int data_desc_offset
= MAX_BUFF_SIZE
- sizeof(struct pxa_dma_desc
);
1133 info
->data_buff
= kmalloc(MAX_BUFF_SIZE
, GFP_KERNEL
);
1134 if (info
->data_buff
== NULL
)
1139 info
->data_buff
= dma_alloc_coherent(&pdev
->dev
, MAX_BUFF_SIZE
,
1140 &info
->data_buff_phys
, GFP_KERNEL
);
1141 if (info
->data_buff
== NULL
) {
1142 dev_err(&pdev
->dev
, "failed to allocate dma buffer\n");
1146 info
->data_buff_size
= MAX_BUFF_SIZE
;
1147 info
->data_desc
= (void *)info
->data_buff
+ data_desc_offset
;
1148 info
->data_desc_addr
= info
->data_buff_phys
+ data_desc_offset
;
1150 info
->data_dma_ch
= pxa_request_dma("nand-data", DMA_PRIO_LOW
,
1151 pxa3xx_nand_data_dma_irq
, info
);
1152 if (info
->data_dma_ch
< 0) {
1153 dev_err(&pdev
->dev
, "failed to request data dma\n");
1154 dma_free_coherent(&pdev
->dev
, info
->data_buff_size
,
1155 info
->data_buff
, info
->data_buff_phys
);
1156 return info
->data_dma_ch
;
1162 static struct nand_ecclayout hw_smallpage_ecclayout
= {
1164 .eccpos
= {8, 9, 10, 11, 12, 13 },
1165 .oobfree
= { {2, 6} }
1168 static struct nand_ecclayout hw_largepage_ecclayout
= {
1171 40, 41, 42, 43, 44, 45, 46, 47,
1172 48, 49, 50, 51, 52, 53, 54, 55,
1173 56, 57, 58, 59, 60, 61, 62, 63},
1174 .oobfree
= { {2, 38} }
1177 static void pxa3xx_nand_init_mtd(struct mtd_info
*mtd
,
1178 struct pxa3xx_nand_info
*info
)
1180 const struct pxa3xx_nand_flash
*f
= info
->flash_info
;
1181 struct nand_chip
*this = &info
->nand_chip
;
1183 this->options
= (f
->flash_width
== 16) ? NAND_BUSWIDTH_16
: 0;
1185 this->waitfunc
= pxa3xx_nand_waitfunc
;
1186 this->select_chip
= pxa3xx_nand_select_chip
;
1187 this->dev_ready
= pxa3xx_nand_dev_ready
;
1188 this->cmdfunc
= pxa3xx_nand_cmdfunc
;
1189 this->read_word
= pxa3xx_nand_read_word
;
1190 this->read_byte
= pxa3xx_nand_read_byte
;
1191 this->read_buf
= pxa3xx_nand_read_buf
;
1192 this->write_buf
= pxa3xx_nand_write_buf
;
1193 this->verify_buf
= pxa3xx_nand_verify_buf
;
1195 this->ecc
.mode
= NAND_ECC_HW
;
1196 this->ecc
.hwctl
= pxa3xx_nand_ecc_hwctl
;
1197 this->ecc
.calculate
= pxa3xx_nand_ecc_calculate
;
1198 this->ecc
.correct
= pxa3xx_nand_ecc_correct
;
1199 this->ecc
.size
= f
->page_size
;
1201 if (f
->page_size
== 2048)
1202 this->ecc
.layout
= &hw_largepage_ecclayout
;
1204 this->ecc
.layout
= &hw_smallpage_ecclayout
;
1206 this->chip_delay
= 25;
1209 static int pxa3xx_nand_probe(struct platform_device
*pdev
)
1211 struct pxa3xx_nand_platform_data
*pdata
;
1212 struct pxa3xx_nand_info
*info
;
1213 struct nand_chip
*this;
1214 struct mtd_info
*mtd
;
1218 pdata
= pdev
->dev
.platform_data
;
1221 dev_err(&pdev
->dev
, "no platform data defined\n");
1225 mtd
= kzalloc(sizeof(struct mtd_info
) + sizeof(struct pxa3xx_nand_info
),
1228 dev_err(&pdev
->dev
, "failed to allocate memory\n");
1232 info
= (struct pxa3xx_nand_info
*)(&mtd
[1]);
1235 this = &info
->nand_chip
;
1237 mtd
->owner
= THIS_MODULE
;
1239 info
->clk
= clk_get(&pdev
->dev
, NULL
);
1240 if (IS_ERR(info
->clk
)) {
1241 dev_err(&pdev
->dev
, "failed to get nand clock\n");
1242 ret
= PTR_ERR(info
->clk
);
1245 clk_enable(info
->clk
);
1247 r
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
1249 dev_err(&pdev
->dev
, "no resource defined for data DMA\n");
1253 info
->drcmr_dat
= r
->start
;
1255 r
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
1257 dev_err(&pdev
->dev
, "no resource defined for command DMA\n");
1261 info
->drcmr_cmd
= r
->start
;
1263 irq
= platform_get_irq(pdev
, 0);
1265 dev_err(&pdev
->dev
, "no IRQ resource defined\n");
1270 r
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1272 dev_err(&pdev
->dev
, "no IO memory resource defined\n");
1277 r
= request_mem_region(r
->start
, resource_size(r
), pdev
->name
);
1279 dev_err(&pdev
->dev
, "failed to request memory resource\n");
1284 info
->mmio_base
= ioremap(r
->start
, resource_size(r
));
1285 if (info
->mmio_base
== NULL
) {
1286 dev_err(&pdev
->dev
, "ioremap() failed\n");
1290 info
->mmio_phys
= r
->start
;
1292 ret
= pxa3xx_nand_init_buff(info
);
1296 /* initialize all interrupts to be disabled */
1297 disable_int(info
, NDSR_MASK
);
1299 ret
= request_irq(irq
, pxa3xx_nand_irq
, IRQF_DISABLED
,
1302 dev_err(&pdev
->dev
, "failed to request IRQ\n");
1306 ret
= pxa3xx_nand_detect_flash(info
, pdata
);
1308 dev_err(&pdev
->dev
, "failed to detect flash\n");
1313 pxa3xx_nand_init_mtd(mtd
, info
);
1315 platform_set_drvdata(pdev
, mtd
);
1317 if (nand_scan(mtd
, 1)) {
1318 dev_err(&pdev
->dev
, "failed to scan nand\n");
1323 #ifdef CONFIG_MTD_PARTITIONS
1324 if (mtd_has_cmdlinepart()) {
1325 static const char *probes
[] = { "cmdlinepart", NULL
};
1326 struct mtd_partition
*parts
;
1329 nr_parts
= parse_mtd_partitions(mtd
, probes
, &parts
, 0);
1332 return add_mtd_partitions(mtd
, parts
, nr_parts
);
1335 return add_mtd_partitions(mtd
, pdata
->parts
, pdata
->nr_parts
);
1341 free_irq(irq
, info
);
1344 pxa_free_dma(info
->data_dma_ch
);
1345 dma_free_coherent(&pdev
->dev
, info
->data_buff_size
,
1346 info
->data_buff
, info
->data_buff_phys
);
1348 kfree(info
->data_buff
);
1350 iounmap(info
->mmio_base
);
1352 release_mem_region(r
->start
, resource_size(r
));
1354 clk_disable(info
->clk
);
1361 static int pxa3xx_nand_remove(struct platform_device
*pdev
)
1363 struct mtd_info
*mtd
= platform_get_drvdata(pdev
);
1364 struct pxa3xx_nand_info
*info
= mtd
->priv
;
1368 platform_set_drvdata(pdev
, NULL
);
1370 del_mtd_device(mtd
);
1371 #ifdef CONFIG_MTD_PARTITIONS
1372 del_mtd_partitions(mtd
);
1374 irq
= platform_get_irq(pdev
, 0);
1376 free_irq(irq
, info
);
1378 pxa_free_dma(info
->data_dma_ch
);
1379 dma_free_writecombine(&pdev
->dev
, info
->data_buff_size
,
1380 info
->data_buff
, info
->data_buff_phys
);
1382 kfree(info
->data_buff
);
1384 iounmap(info
->mmio_base
);
1385 r
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1386 release_mem_region(r
->start
, resource_size(r
));
1388 clk_disable(info
->clk
);
1396 static int pxa3xx_nand_suspend(struct platform_device
*pdev
, pm_message_t state
)
1398 struct mtd_info
*mtd
= (struct mtd_info
*)platform_get_drvdata(pdev
);
1399 struct pxa3xx_nand_info
*info
= mtd
->priv
;
1401 if (info
->state
!= STATE_READY
) {
1402 dev_err(&pdev
->dev
, "driver busy, state = %d\n", info
->state
);
1409 static int pxa3xx_nand_resume(struct platform_device
*pdev
)
1411 struct mtd_info
*mtd
= (struct mtd_info
*)platform_get_drvdata(pdev
);
1412 struct pxa3xx_nand_info
*info
= mtd
->priv
;
1414 clk_enable(info
->clk
);
1416 return pxa3xx_nand_config_flash(info
, info
->flash_info
);
1419 #define pxa3xx_nand_suspend NULL
1420 #define pxa3xx_nand_resume NULL
1423 static struct platform_driver pxa3xx_nand_driver
= {
1425 .name
= "pxa3xx-nand",
1427 .probe
= pxa3xx_nand_probe
,
1428 .remove
= pxa3xx_nand_remove
,
1429 .suspend
= pxa3xx_nand_suspend
,
1430 .resume
= pxa3xx_nand_resume
,
1433 static int __init
pxa3xx_nand_init(void)
1435 return platform_driver_register(&pxa3xx_nand_driver
);
1437 module_init(pxa3xx_nand_init
);
1439 static void __exit
pxa3xx_nand_exit(void)
1441 platform_driver_unregister(&pxa3xx_nand_driver
);
1443 module_exit(pxa3xx_nand_exit
);
1445 MODULE_LICENSE("GPL");
1446 MODULE_DESCRIPTION("PXA3xx NAND controller driver");