Lines Matching full:ctrl
309 struct brcmnand_controller *ctrl; member
592 static inline u32 nand_readreg(struct brcmnand_controller *ctrl, u32 offs) in nand_readreg() argument
594 return brcmnand_readl(ctrl->nand_base + offs); in nand_readreg()
597 static inline void nand_writereg(struct brcmnand_controller *ctrl, u32 offs, in nand_writereg() argument
600 brcmnand_writel(val, ctrl->nand_base + offs); in nand_writereg()
603 static int brcmnand_revision_init(struct brcmnand_controller *ctrl) in brcmnand_revision_init() argument
613 ctrl->nand_version = nand_readreg(ctrl, 0) & 0xffff; in brcmnand_revision_init()
616 if (ctrl->nand_version < 0x0201) { in brcmnand_revision_init()
617 dev_err(ctrl->dev, "version %#x not supported\n", in brcmnand_revision_init()
618 ctrl->nand_version); in brcmnand_revision_init()
623 if (ctrl->nand_version >= 0x0702) in brcmnand_revision_init()
624 ctrl->reg_offsets = brcmnand_regs_v72; in brcmnand_revision_init()
625 else if (ctrl->nand_version == 0x0701) in brcmnand_revision_init()
626 ctrl->reg_offsets = brcmnand_regs_v71; in brcmnand_revision_init()
627 else if (ctrl->nand_version >= 0x0600) in brcmnand_revision_init()
628 ctrl->reg_offsets = brcmnand_regs_v60; in brcmnand_revision_init()
629 else if (ctrl->nand_version >= 0x0500) in brcmnand_revision_init()
630 ctrl->reg_offsets = brcmnand_regs_v50; in brcmnand_revision_init()
631 else if (ctrl->nand_version >= 0x0303) in brcmnand_revision_init()
632 ctrl->reg_offsets = brcmnand_regs_v33; in brcmnand_revision_init()
633 else if (ctrl->nand_version >= 0x0201) in brcmnand_revision_init()
634 ctrl->reg_offsets = brcmnand_regs_v21; in brcmnand_revision_init()
637 if (ctrl->nand_version >= 0x0701) in brcmnand_revision_init()
638 ctrl->reg_spacing = 0x14; in brcmnand_revision_init()
640 ctrl->reg_spacing = 0x10; in brcmnand_revision_init()
643 if (ctrl->nand_version >= 0x0701) { in brcmnand_revision_init()
644 ctrl->cs_offsets = brcmnand_cs_offsets_v71; in brcmnand_revision_init()
646 ctrl->cs_offsets = brcmnand_cs_offsets; in brcmnand_revision_init()
649 if (ctrl->nand_version >= 0x0303 && in brcmnand_revision_init()
650 ctrl->nand_version <= 0x0500) in brcmnand_revision_init()
651 ctrl->cs0_offsets = brcmnand_cs_offsets_cs0; in brcmnand_revision_init()
655 if (ctrl->nand_version >= 0x0701) { in brcmnand_revision_init()
657 ctrl->max_page_size = 16 * 1024; in brcmnand_revision_init()
658 ctrl->max_block_size = 2 * 1024 * 1024; in brcmnand_revision_init()
660 if (ctrl->nand_version >= 0x0304) in brcmnand_revision_init()
661 ctrl->page_sizes = page_sizes_v3_4; in brcmnand_revision_init()
662 else if (ctrl->nand_version >= 0x0202) in brcmnand_revision_init()
663 ctrl->page_sizes = page_sizes_v2_2; in brcmnand_revision_init()
665 ctrl->page_sizes = page_sizes_v2_1; in brcmnand_revision_init()
667 if (ctrl->nand_version >= 0x0202) in brcmnand_revision_init()
668 ctrl->page_size_shift = CFG_PAGE_SIZE_SHIFT; in brcmnand_revision_init()
670 ctrl->page_size_shift = CFG_PAGE_SIZE_SHIFT_v2_1; in brcmnand_revision_init()
672 if (ctrl->nand_version >= 0x0600) in brcmnand_revision_init()
673 ctrl->block_sizes = block_sizes_v6; in brcmnand_revision_init()
674 else if (ctrl->nand_version >= 0x0400) in brcmnand_revision_init()
675 ctrl->block_sizes = block_sizes_v4; in brcmnand_revision_init()
676 else if (ctrl->nand_version >= 0x0202) in brcmnand_revision_init()
677 ctrl->block_sizes = block_sizes_v2_2; in brcmnand_revision_init()
679 ctrl->block_sizes = block_sizes_v2_1; in brcmnand_revision_init()
681 if (ctrl->nand_version < 0x0400) { in brcmnand_revision_init()
682 if (ctrl->nand_version < 0x0202) in brcmnand_revision_init()
683 ctrl->max_page_size = 2048; in brcmnand_revision_init()
685 ctrl->max_page_size = 4096; in brcmnand_revision_init()
686 ctrl->max_block_size = 512 * 1024; in brcmnand_revision_init()
691 if (ctrl->nand_version == 0x0702) in brcmnand_revision_init()
692 ctrl->max_oob = 128; in brcmnand_revision_init()
693 else if (ctrl->nand_version >= 0x0600) in brcmnand_revision_init()
694 ctrl->max_oob = 64; in brcmnand_revision_init()
695 else if (ctrl->nand_version >= 0x0500) in brcmnand_revision_init()
696 ctrl->max_oob = 32; in brcmnand_revision_init()
698 ctrl->max_oob = 16; in brcmnand_revision_init()
701 if (ctrl->nand_version >= 0x0600 && ctrl->nand_version != 0x0601) in brcmnand_revision_init()
702 ctrl->features |= BRCMNAND_HAS_PREFETCH; in brcmnand_revision_init()
708 if (ctrl->nand_version >= 0x0700) in brcmnand_revision_init()
709 ctrl->features |= BRCMNAND_HAS_CACHE_MODE; in brcmnand_revision_init()
711 if (ctrl->nand_version >= 0x0500) in brcmnand_revision_init()
712 ctrl->features |= BRCMNAND_HAS_1K_SECTORS; in brcmnand_revision_init()
714 if (ctrl->nand_version >= 0x0700) in brcmnand_revision_init()
715 ctrl->features |= BRCMNAND_HAS_WP; in brcmnand_revision_init()
716 else if (of_property_read_bool(ctrl->dev->of_node, "brcm,nand-has-wp")) in brcmnand_revision_init()
717 ctrl->features |= BRCMNAND_HAS_WP; in brcmnand_revision_init()
722 static void brcmnand_flash_dma_revision_init(struct brcmnand_controller *ctrl) in brcmnand_flash_dma_revision_init() argument
725 if (ctrl->nand_version >= 0x0703) in brcmnand_flash_dma_revision_init()
726 ctrl->flash_dma_offsets = flash_dma_regs_v4; in brcmnand_flash_dma_revision_init()
727 else if (ctrl->nand_version == 0x0602) in brcmnand_flash_dma_revision_init()
728 ctrl->flash_dma_offsets = flash_dma_regs_v0; in brcmnand_flash_dma_revision_init()
730 ctrl->flash_dma_offsets = flash_dma_regs_v1; in brcmnand_flash_dma_revision_init()
733 static inline u32 brcmnand_read_reg(struct brcmnand_controller *ctrl, in brcmnand_read_reg() argument
736 u16 offs = ctrl->reg_offsets[reg]; in brcmnand_read_reg()
739 return nand_readreg(ctrl, offs); in brcmnand_read_reg()
744 static inline void brcmnand_write_reg(struct brcmnand_controller *ctrl, in brcmnand_write_reg() argument
747 u16 offs = ctrl->reg_offsets[reg]; in brcmnand_write_reg()
750 nand_writereg(ctrl, offs, val); in brcmnand_write_reg()
753 static inline void brcmnand_rmw_reg(struct brcmnand_controller *ctrl, in brcmnand_rmw_reg() argument
757 u32 tmp = brcmnand_read_reg(ctrl, reg); in brcmnand_rmw_reg()
761 brcmnand_write_reg(ctrl, reg, tmp); in brcmnand_rmw_reg()
764 static inline u32 brcmnand_read_fc(struct brcmnand_controller *ctrl, int word) in brcmnand_read_fc() argument
766 return __raw_readl(ctrl->nand_fc + word * 4); in brcmnand_read_fc()
769 static inline void brcmnand_write_fc(struct brcmnand_controller *ctrl, in brcmnand_write_fc() argument
772 __raw_writel(val, ctrl->nand_fc + word * 4); in brcmnand_write_fc()
775 static inline void edu_writel(struct brcmnand_controller *ctrl, in edu_writel() argument
778 u16 offs = ctrl->edu_offsets[reg]; in edu_writel()
780 brcmnand_writel(val, ctrl->edu_base + offs); in edu_writel()
783 static inline u32 edu_readl(struct brcmnand_controller *ctrl, in edu_readl() argument
786 u16 offs = ctrl->edu_offsets[reg]; in edu_readl()
788 return brcmnand_readl(ctrl->edu_base + offs); in edu_readl()
791 static void brcmnand_clear_ecc_addr(struct brcmnand_controller *ctrl) in brcmnand_clear_ecc_addr() argument
795 brcmnand_write_reg(ctrl, BRCMNAND_UNCORR_ADDR, 0); in brcmnand_clear_ecc_addr()
796 brcmnand_write_reg(ctrl, BRCMNAND_CORR_ADDR, 0); in brcmnand_clear_ecc_addr()
797 brcmnand_write_reg(ctrl, BRCMNAND_UNCORR_EXT_ADDR, 0); in brcmnand_clear_ecc_addr()
798 brcmnand_write_reg(ctrl, BRCMNAND_CORR_EXT_ADDR, 0); in brcmnand_clear_ecc_addr()
801 static u64 brcmnand_get_uncorrecc_addr(struct brcmnand_controller *ctrl) in brcmnand_get_uncorrecc_addr() argument
805 err_addr = brcmnand_read_reg(ctrl, BRCMNAND_UNCORR_ADDR); in brcmnand_get_uncorrecc_addr()
806 err_addr |= ((u64)(brcmnand_read_reg(ctrl, in brcmnand_get_uncorrecc_addr()
813 static u64 brcmnand_get_correcc_addr(struct brcmnand_controller *ctrl) in brcmnand_get_correcc_addr() argument
817 err_addr = brcmnand_read_reg(ctrl, BRCMNAND_CORR_ADDR); in brcmnand_get_correcc_addr()
818 err_addr |= ((u64)(brcmnand_read_reg(ctrl, in brcmnand_get_correcc_addr()
829 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_set_cmd_addr() local
831 brcmnand_write_reg(ctrl, BRCMNAND_CMD_EXT_ADDRESS, in brcmnand_set_cmd_addr()
833 (void)brcmnand_read_reg(ctrl, BRCMNAND_CMD_EXT_ADDRESS); in brcmnand_set_cmd_addr()
834 brcmnand_write_reg(ctrl, BRCMNAND_CMD_ADDRESS, in brcmnand_set_cmd_addr()
836 (void)brcmnand_read_reg(ctrl, BRCMNAND_CMD_ADDRESS); in brcmnand_set_cmd_addr()
839 static inline u16 brcmnand_cs_offset(struct brcmnand_controller *ctrl, int cs, in brcmnand_cs_offset() argument
842 u16 offs_cs0 = ctrl->reg_offsets[BRCMNAND_CS0_BASE]; in brcmnand_cs_offset()
843 u16 offs_cs1 = ctrl->reg_offsets[BRCMNAND_CS1_BASE]; in brcmnand_cs_offset()
846 if (cs == 0 && ctrl->cs0_offsets) in brcmnand_cs_offset()
847 cs_offs = ctrl->cs0_offsets[reg]; in brcmnand_cs_offset()
849 cs_offs = ctrl->cs_offsets[reg]; in brcmnand_cs_offset()
852 return offs_cs1 + (cs - 1) * ctrl->reg_spacing + cs_offs; in brcmnand_cs_offset()
854 return offs_cs0 + cs * ctrl->reg_spacing + cs_offs; in brcmnand_cs_offset()
857 static inline u32 brcmnand_count_corrected(struct brcmnand_controller *ctrl) in brcmnand_count_corrected() argument
859 if (ctrl->nand_version < 0x0600) in brcmnand_count_corrected()
861 return brcmnand_read_reg(ctrl, BRCMNAND_CORR_COUNT); in brcmnand_count_corrected()
866 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_wr_corr_thresh() local
871 if (!ctrl->reg_offsets[reg]) in brcmnand_wr_corr_thresh()
874 if (ctrl->nand_version == 0x0702) in brcmnand_wr_corr_thresh()
876 else if (ctrl->nand_version >= 0x0600) in brcmnand_wr_corr_thresh()
878 else if (ctrl->nand_version >= 0x0500) in brcmnand_wr_corr_thresh()
883 if (ctrl->nand_version >= 0x0702) { in brcmnand_wr_corr_thresh()
887 } else if (ctrl->nand_version >= 0x0600) { in brcmnand_wr_corr_thresh()
892 brcmnand_rmw_reg(ctrl, reg, (bits - 1) << shift, shift, val); in brcmnand_wr_corr_thresh()
895 static inline int brcmnand_cmd_shift(struct brcmnand_controller *ctrl) in brcmnand_cmd_shift() argument
897 if (ctrl->nand_version < 0x0602) in brcmnand_cmd_shift()
926 static inline u32 brcmnand_spare_area_mask(struct brcmnand_controller *ctrl) in brcmnand_spare_area_mask() argument
928 if (ctrl->nand_version == 0x0702) in brcmnand_spare_area_mask()
930 else if (ctrl->nand_version >= 0x0600) in brcmnand_spare_area_mask()
932 else if (ctrl->nand_version >= 0x0303) in brcmnand_spare_area_mask()
941 static inline u32 brcmnand_ecc_level_mask(struct brcmnand_controller *ctrl) in brcmnand_ecc_level_mask() argument
943 u32 mask = (ctrl->nand_version >= 0x0600) ? 0x1f : 0x0f; in brcmnand_ecc_level_mask()
948 if (ctrl->nand_version >= 0x0702) in brcmnand_ecc_level_mask()
956 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_set_ecc_enabled() local
957 u16 offs = brcmnand_cs_offset(ctrl, host->cs, BRCMNAND_CS_ACC_CONTROL); in brcmnand_set_ecc_enabled()
958 u32 acc_control = nand_readreg(ctrl, offs); in brcmnand_set_ecc_enabled()
967 acc_control &= ~brcmnand_ecc_level_mask(ctrl); in brcmnand_set_ecc_enabled()
970 nand_writereg(ctrl, offs, acc_control); in brcmnand_set_ecc_enabled()
973 static inline int brcmnand_sector_1k_shift(struct brcmnand_controller *ctrl) in brcmnand_sector_1k_shift() argument
975 if (ctrl->nand_version >= 0x0702) in brcmnand_sector_1k_shift()
977 else if (ctrl->nand_version >= 0x0600) in brcmnand_sector_1k_shift()
979 else if (ctrl->nand_version >= 0x0500) in brcmnand_sector_1k_shift()
987 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_get_sector_size_1k() local
988 int shift = brcmnand_sector_1k_shift(ctrl); in brcmnand_get_sector_size_1k()
989 u16 acc_control_offs = brcmnand_cs_offset(ctrl, host->cs, in brcmnand_get_sector_size_1k()
995 return (nand_readreg(ctrl, acc_control_offs) >> shift) & 0x1; in brcmnand_get_sector_size_1k()
1000 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_set_sector_size_1k() local
1001 int shift = brcmnand_sector_1k_shift(ctrl); in brcmnand_set_sector_size_1k()
1002 u16 acc_control_offs = brcmnand_cs_offset(ctrl, host->cs, in brcmnand_set_sector_size_1k()
1009 tmp = nand_readreg(ctrl, acc_control_offs); in brcmnand_set_sector_size_1k()
1012 nand_writereg(ctrl, acc_control_offs, tmp); in brcmnand_set_sector_size_1k()
1024 static int bcmnand_ctrl_poll_status(struct brcmnand_controller *ctrl, in bcmnand_ctrl_poll_status() argument
1036 val = brcmnand_read_reg(ctrl, BRCMNAND_INTFC_STATUS); in bcmnand_ctrl_poll_status()
1043 dev_warn(ctrl->dev, "timeout on status poll (expected %x got %x)\n", in bcmnand_ctrl_poll_status()
1049 static inline void brcmnand_set_wp(struct brcmnand_controller *ctrl, bool en) in brcmnand_set_wp() argument
1053 brcmnand_rmw_reg(ctrl, BRCMNAND_CS_SELECT, CS_SELECT_NAND_WP, 0, val); in brcmnand_set_wp()
1060 static inline bool has_flash_dma(struct brcmnand_controller *ctrl) in has_flash_dma() argument
1062 return ctrl->flash_dma_base; in has_flash_dma()
1065 static inline bool has_edu(struct brcmnand_controller *ctrl) in has_edu() argument
1067 return ctrl->edu_base; in has_edu()
1070 static inline bool use_dma(struct brcmnand_controller *ctrl) in use_dma() argument
1072 return has_flash_dma(ctrl) || has_edu(ctrl); in use_dma()
1075 static inline void disable_ctrl_irqs(struct brcmnand_controller *ctrl) in disable_ctrl_irqs() argument
1077 if (ctrl->pio_poll_mode) in disable_ctrl_irqs()
1080 if (has_flash_dma(ctrl)) { in disable_ctrl_irqs()
1081 ctrl->flash_dma_base = NULL; in disable_ctrl_irqs()
1082 disable_irq(ctrl->dma_irq); in disable_ctrl_irqs()
1085 disable_irq(ctrl->irq); in disable_ctrl_irqs()
1086 ctrl->pio_poll_mode = true; in disable_ctrl_irqs()
1095 static inline void flash_dma_writel(struct brcmnand_controller *ctrl, in flash_dma_writel() argument
1098 u16 offs = ctrl->flash_dma_offsets[dma_reg]; in flash_dma_writel()
1100 brcmnand_writel(val, ctrl->flash_dma_base + offs); in flash_dma_writel()
1103 static inline u32 flash_dma_readl(struct brcmnand_controller *ctrl, in flash_dma_readl() argument
1106 u16 offs = ctrl->flash_dma_offsets[dma_reg]; in flash_dma_readl()
1108 return brcmnand_readl(ctrl->flash_dma_base + offs); in flash_dma_readl()
1123 static inline bool is_hamming_ecc(struct brcmnand_controller *ctrl, in is_hamming_ecc() argument
1126 if (ctrl->nand_version <= 0x0701) in is_hamming_ecc()
1287 if (is_hamming_ecc(host->ctrl, p)) { in brcmstb_choose_ecc_layout()
1319 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_wp() local
1321 if ((ctrl->features & BRCMNAND_HAS_WP) && wp_on == 1) { in brcmnand_wp()
1326 dev_dbg(ctrl->dev, "WP %s\n", wp ? "on" : "off"); in brcmnand_wp()
1331 * make sure ctrl/flash ready before and after in brcmnand_wp()
1334 ret = bcmnand_ctrl_poll_status(ctrl, NAND_CTRL_RDY | in brcmnand_wp()
1341 brcmnand_set_wp(ctrl, wp); in brcmnand_wp()
1344 ret = bcmnand_ctrl_poll_status(ctrl, in brcmnand_wp()
1360 static inline u8 oob_reg_read(struct brcmnand_controller *ctrl, u32 offs) in oob_reg_read() argument
1364 offset0 = ctrl->reg_offsets[BRCMNAND_OOB_READ_BASE]; in oob_reg_read()
1365 offset10 = ctrl->reg_offsets[BRCMNAND_OOB_READ_10_BASE]; in oob_reg_read()
1367 if (offs >= ctrl->max_oob) in oob_reg_read()
1375 return nand_readreg(ctrl, reg_offs) >> (24 - ((offs & 0x03) << 3)); in oob_reg_read()
1378 static inline void oob_reg_write(struct brcmnand_controller *ctrl, u32 offs, in oob_reg_write() argument
1383 offset0 = ctrl->reg_offsets[BRCMNAND_OOB_WRITE_BASE]; in oob_reg_write()
1384 offset10 = ctrl->reg_offsets[BRCMNAND_OOB_WRITE_10_BASE]; in oob_reg_write()
1386 if (offs >= ctrl->max_oob) in oob_reg_write()
1394 nand_writereg(ctrl, reg_offs, data); in oob_reg_write()
1399 * @ctrl: NAND controller
1405 static int read_oob_from_regs(struct brcmnand_controller *ctrl, int i, u8 *oob, in read_oob_from_regs() argument
1413 tbytes = max(0, tbytes - (int)ctrl->max_oob); in read_oob_from_regs()
1414 tbytes = min_t(int, tbytes, ctrl->max_oob); in read_oob_from_regs()
1417 oob[j] = oob_reg_read(ctrl, j); in read_oob_from_regs()
1428 static int write_oob_to_regs(struct brcmnand_controller *ctrl, int i, in write_oob_to_regs() argument
1436 tbytes = max(0, tbytes - (int)ctrl->max_oob); in write_oob_to_regs()
1437 tbytes = min_t(int, tbytes, ctrl->max_oob); in write_oob_to_regs()
1440 oob_reg_write(ctrl, j, in write_oob_to_regs()
1448 static void brcmnand_edu_init(struct brcmnand_controller *ctrl) in brcmnand_edu_init() argument
1451 edu_writel(ctrl, EDU_ERR_STATUS, 0); in brcmnand_edu_init()
1452 edu_readl(ctrl, EDU_ERR_STATUS); in brcmnand_edu_init()
1453 edu_writel(ctrl, EDU_DONE, 0); in brcmnand_edu_init()
1454 edu_writel(ctrl, EDU_DONE, 0); in brcmnand_edu_init()
1455 edu_writel(ctrl, EDU_DONE, 0); in brcmnand_edu_init()
1456 edu_writel(ctrl, EDU_DONE, 0); in brcmnand_edu_init()
1457 edu_readl(ctrl, EDU_DONE); in brcmnand_edu_init()
1463 struct brcmnand_controller *ctrl = data; in brcmnand_edu_irq() local
1465 if (ctrl->edu_count) { in brcmnand_edu_irq()
1466 ctrl->edu_count--; in brcmnand_edu_irq()
1467 while (!(edu_readl(ctrl, EDU_DONE) & EDU_DONE_MASK)) in brcmnand_edu_irq()
1469 edu_writel(ctrl, EDU_DONE, 0); in brcmnand_edu_irq()
1470 edu_readl(ctrl, EDU_DONE); in brcmnand_edu_irq()
1473 if (ctrl->edu_count) { in brcmnand_edu_irq()
1474 ctrl->edu_dram_addr += FC_BYTES; in brcmnand_edu_irq()
1475 ctrl->edu_ext_addr += FC_BYTES; in brcmnand_edu_irq()
1477 edu_writel(ctrl, EDU_DRAM_ADDR, (u32)ctrl->edu_dram_addr); in brcmnand_edu_irq()
1478 edu_readl(ctrl, EDU_DRAM_ADDR); in brcmnand_edu_irq()
1479 edu_writel(ctrl, EDU_EXT_ADDR, ctrl->edu_ext_addr); in brcmnand_edu_irq()
1480 edu_readl(ctrl, EDU_EXT_ADDR); in brcmnand_edu_irq()
1483 edu_writel(ctrl, EDU_CMD, ctrl->edu_cmd); in brcmnand_edu_irq()
1484 edu_readl(ctrl, EDU_CMD); in brcmnand_edu_irq()
1489 complete(&ctrl->edu_done); in brcmnand_edu_irq()
1496 struct brcmnand_controller *ctrl = data; in brcmnand_ctlrdy_irq() local
1499 if (ctrl->dma_pending) in brcmnand_ctlrdy_irq()
1503 if (ctrl->edu_pending) { in brcmnand_ctlrdy_irq()
1504 if (irq == ctrl->irq && ((int)ctrl->edu_irq >= 0)) in brcmnand_ctlrdy_irq()
1512 complete(&ctrl->done); in brcmnand_ctlrdy_irq()
1519 struct brcmnand_controller *ctrl = data; in brcmnand_irq() local
1521 if (ctrl->soc->ctlrdy_ack(ctrl->soc)) in brcmnand_irq()
1529 struct brcmnand_controller *ctrl = data; in brcmnand_dma_irq() local
1531 complete(&ctrl->dma_done); in brcmnand_dma_irq()
1538 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_send_cmd() local
1542 cmd_addr = brcmnand_read_reg(ctrl, BRCMNAND_CMD_ADDRESS); in brcmnand_send_cmd()
1544 dev_dbg(ctrl->dev, "send native cmd %d addr 0x%llx\n", cmd, cmd_addr); in brcmnand_send_cmd()
1546 BUG_ON(ctrl->cmd_pending != 0); in brcmnand_send_cmd()
1547 ctrl->cmd_pending = cmd; in brcmnand_send_cmd()
1549 ret = bcmnand_ctrl_poll_status(ctrl, NAND_CTRL_RDY, NAND_CTRL_RDY, 0); in brcmnand_send_cmd()
1553 brcmnand_write_reg(ctrl, BRCMNAND_CMD_START, in brcmnand_send_cmd()
1554 cmd << brcmnand_cmd_shift(ctrl)); in brcmnand_send_cmd()
1562 unsigned int ctrl) in brcmnand_cmd_ctrl() argument
1570 struct brcmnand_controller *ctrl = host->ctrl; in brcmstb_nand_wait_for_completion() local
1577 disable_ctrl_irqs(ctrl); in brcmstb_nand_wait_for_completion()
1578 sts = bcmnand_ctrl_poll_status(ctrl, NAND_CTRL_RDY, in brcmstb_nand_wait_for_completion()
1585 sts = wait_for_completion_timeout(&ctrl->done, timeo); in brcmstb_nand_wait_for_completion()
1595 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_waitfunc() local
1598 dev_dbg(ctrl->dev, "wait on native cmd %d\n", ctrl->cmd_pending); in brcmnand_waitfunc()
1599 if (ctrl->cmd_pending) in brcmnand_waitfunc()
1603 u32 cmd = brcmnand_read_reg(ctrl, BRCMNAND_CMD_START) in brcmnand_waitfunc()
1604 >> brcmnand_cmd_shift(ctrl); in brcmnand_waitfunc()
1606 dev_err_ratelimited(ctrl->dev, in brcmnand_waitfunc()
1608 dev_err_ratelimited(ctrl->dev, "intfc status %08x\n", in brcmnand_waitfunc()
1609 brcmnand_read_reg(ctrl, BRCMNAND_INTFC_STATUS)); in brcmnand_waitfunc()
1611 ctrl->cmd_pending = 0; in brcmnand_waitfunc()
1612 return brcmnand_read_reg(ctrl, BRCMNAND_INTFC_STATUS) & in brcmnand_waitfunc()
1631 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_low_level_op() local
1656 dev_dbg(ctrl->dev, "ll_op cmd %#x\n", tmp); in brcmnand_low_level_op()
1658 brcmnand_write_reg(ctrl, BRCMNAND_LL_OP, tmp); in brcmnand_low_level_op()
1659 (void)brcmnand_read_reg(ctrl, BRCMNAND_LL_OP); in brcmnand_low_level_op()
1670 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_cmdfunc() local
1681 dev_dbg(ctrl->dev, "cmd 0x%x addr 0x%llx\n", command, in brcmnand_cmdfunc()
1738 u32 *flash_cache = (u32 *)ctrl->flash_cache; in brcmnand_cmdfunc()
1741 brcmnand_soc_data_bus_prepare(ctrl->soc, true); in brcmnand_cmdfunc()
1752 flash_cache[i] = be32_to_cpu(brcmnand_read_fc(ctrl, i)); in brcmnand_cmdfunc()
1754 brcmnand_soc_data_bus_unprepare(ctrl->soc, true); in brcmnand_cmdfunc()
1770 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_read_byte() local
1777 ret = brcmnand_read_reg(ctrl, BRCMNAND_ID) >> in brcmnand_read_byte()
1780 ret = brcmnand_read_reg(ctrl, BRCMNAND_ID_EXT) >> in brcmnand_read_byte()
1785 ret = oob_reg_read(ctrl, host->last_byte); in brcmnand_read_byte()
1789 ret = brcmnand_read_reg(ctrl, BRCMNAND_INTFC_STATUS) & in brcmnand_read_byte()
1804 ret = ctrl->flash_cache[offs]; in brcmnand_read_byte()
1813 ret = brcmnand_read_reg(ctrl, BRCMNAND_LL_RDATA) & 0xff; in brcmnand_read_byte()
1817 dev_dbg(ctrl->dev, "read byte = 0x%02x\n", ret); in brcmnand_read_byte()
1855 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_edu_trans() local
1863 pa = dma_map_single(ctrl->dev, buf, len, dir); in brcmnand_edu_trans()
1864 if (dma_mapping_error(ctrl->dev, pa)) { in brcmnand_edu_trans()
1865 dev_err(ctrl->dev, "unable to map buffer for EDU DMA\n"); in brcmnand_edu_trans()
1869 ctrl->edu_pending = true; in brcmnand_edu_trans()
1870 ctrl->edu_dram_addr = pa; in brcmnand_edu_trans()
1871 ctrl->edu_ext_addr = addr; in brcmnand_edu_trans()
1872 ctrl->edu_cmd = edu_cmd; in brcmnand_edu_trans()
1873 ctrl->edu_count = trans; in brcmnand_edu_trans()
1875 edu_writel(ctrl, EDU_DRAM_ADDR, (u32)ctrl->edu_dram_addr); in brcmnand_edu_trans()
1876 edu_readl(ctrl, EDU_DRAM_ADDR); in brcmnand_edu_trans()
1877 edu_writel(ctrl, EDU_EXT_ADDR, ctrl->edu_ext_addr); in brcmnand_edu_trans()
1878 edu_readl(ctrl, EDU_EXT_ADDR); in brcmnand_edu_trans()
1879 edu_writel(ctrl, EDU_LENGTH, FC_BYTES); in brcmnand_edu_trans()
1880 edu_readl(ctrl, EDU_LENGTH); in brcmnand_edu_trans()
1884 edu_writel(ctrl, EDU_CMD, ctrl->edu_cmd); in brcmnand_edu_trans()
1885 edu_readl(ctrl, EDU_CMD); in brcmnand_edu_trans()
1887 if (wait_for_completion_timeout(&ctrl->edu_done, timeo) <= 0) { in brcmnand_edu_trans()
1888 dev_err(ctrl->dev, in brcmnand_edu_trans()
1890 edu_readl(ctrl, EDU_STATUS), in brcmnand_edu_trans()
1891 edu_readl(ctrl, EDU_ERR_STATUS)); in brcmnand_edu_trans()
1894 dma_unmap_single(ctrl->dev, pa, len, dir); in brcmnand_edu_trans()
1897 if (((brcmnand_read_reg(ctrl, BRCMNAND_INTFC_STATUS) & in brcmnand_edu_trans()
1900 dev_info(ctrl->dev, "program failed at %llx\n", in brcmnand_edu_trans()
1906 if (edu_readl(ctrl, EDU_STATUS) & EDU_STATUS_ACTIVE) in brcmnand_edu_trans()
1907 dev_warn(ctrl->dev, "EDU still active: %#x\n", in brcmnand_edu_trans()
1908 edu_readl(ctrl, EDU_STATUS)); in brcmnand_edu_trans()
1910 if (unlikely(edu_readl(ctrl, EDU_ERR_STATUS) & EDU_ERR_STATUS_ERRACK)) { in brcmnand_edu_trans()
1911 dev_warn(ctrl->dev, "EDU RBUS error at addr %llx\n", in brcmnand_edu_trans()
1916 ctrl->edu_pending = false; in brcmnand_edu_trans()
1917 brcmnand_edu_init(ctrl); in brcmnand_edu_trans()
1918 edu_writel(ctrl, EDU_STOP, 0); /* force stop */ in brcmnand_edu_trans()
1919 edu_readl(ctrl, EDU_STOP); in brcmnand_edu_trans()
1928 err_addr = brcmnand_get_uncorrecc_addr(ctrl); in brcmnand_edu_trans()
1930 err_addr = brcmnand_get_correcc_addr(ctrl); in brcmnand_edu_trans()
1978 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_dma_run() local
1981 flash_dma_writel(ctrl, FLASH_DMA_FIRST_DESC, lower_32_bits(desc)); in brcmnand_dma_run()
1982 (void)flash_dma_readl(ctrl, FLASH_DMA_FIRST_DESC); in brcmnand_dma_run()
1983 if (ctrl->nand_version > 0x0602) { in brcmnand_dma_run()
1984 flash_dma_writel(ctrl, FLASH_DMA_FIRST_DESC_EXT, in brcmnand_dma_run()
1986 (void)flash_dma_readl(ctrl, FLASH_DMA_FIRST_DESC_EXT); in brcmnand_dma_run()
1990 ctrl->dma_pending = true; in brcmnand_dma_run()
1992 flash_dma_writel(ctrl, FLASH_DMA_CTRL, 0x03); /* wake | run */ in brcmnand_dma_run()
1994 if (wait_for_completion_timeout(&ctrl->dma_done, timeo) <= 0) { in brcmnand_dma_run()
1995 dev_err(ctrl->dev, in brcmnand_dma_run()
1997 flash_dma_readl(ctrl, FLASH_DMA_STATUS), in brcmnand_dma_run()
1998 flash_dma_readl(ctrl, FLASH_DMA_ERROR_STATUS)); in brcmnand_dma_run()
2000 ctrl->dma_pending = false; in brcmnand_dma_run()
2001 flash_dma_writel(ctrl, FLASH_DMA_CTRL, 0); /* force stop */ in brcmnand_dma_run()
2007 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_dma_trans() local
2011 buf_pa = dma_map_single(ctrl->dev, buf, len, dir); in brcmnand_dma_trans()
2012 if (dma_mapping_error(ctrl->dev, buf_pa)) { in brcmnand_dma_trans()
2013 dev_err(ctrl->dev, "unable to map buffer for DMA\n"); in brcmnand_dma_trans()
2017 brcmnand_fill_dma_desc(host, ctrl->dma_desc, addr, buf_pa, len, in brcmnand_dma_trans()
2020 brcmnand_dma_run(host, ctrl->dma_pa); in brcmnand_dma_trans()
2022 dma_unmap_single(ctrl->dev, buf_pa, len, dir); in brcmnand_dma_trans()
2024 if (ctrl->dma_desc->status_valid & FLASH_DMA_ECC_ERROR) in brcmnand_dma_trans()
2026 else if (ctrl->dma_desc->status_valid & FLASH_DMA_CORR_ERROR) in brcmnand_dma_trans()
2040 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_read_by_pio() local
2043 brcmnand_clear_ecc_addr(ctrl); in brcmnand_read_by_pio()
2052 brcmnand_soc_data_bus_prepare(ctrl->soc, false); in brcmnand_read_by_pio()
2055 *buf = brcmnand_read_fc(ctrl, j); in brcmnand_read_by_pio()
2057 brcmnand_soc_data_bus_unprepare(ctrl->soc, false); in brcmnand_read_by_pio()
2061 oob += read_oob_from_regs(ctrl, i, oob, in brcmnand_read_by_pio()
2066 *err_addr = brcmnand_get_uncorrecc_addr(ctrl); in brcmnand_read_by_pio()
2073 *err_addr = brcmnand_get_correcc_addr(ctrl); in brcmnand_read_by_pio()
2139 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_read() local
2145 dev_dbg(ctrl->dev, "read %llx -> %p\n", (unsigned long long)addr, buf); in brcmnand_read()
2148 brcmnand_clear_ecc_addr(ctrl); in brcmnand_read()
2150 if (ctrl->dma_trans && !oob && flash_dma_buf_ok(buf)) { in brcmnand_read()
2151 err = ctrl->dma_trans(host, addr, buf, in brcmnand_read()
2162 if (has_edu(ctrl) && err_addr) in brcmnand_read()
2182 if ((ctrl->nand_version == 0x0700) || in brcmnand_read()
2183 (ctrl->nand_version == 0x0701)) { in brcmnand_read()
2194 if (ctrl->nand_version < 0x0702) { in brcmnand_read()
2202 dev_dbg(ctrl->dev, "uncorrectable error at 0x%llx\n", in brcmnand_read()
2210 unsigned int corrected = brcmnand_count_corrected(ctrl); in brcmnand_read()
2217 dev_dbg(ctrl->dev, "corrected error at 0x%llx\n", in brcmnand_read()
2283 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_write() local
2287 dev_dbg(ctrl->dev, "write %llx <- %p\n", (unsigned long long)addr, buf); in brcmnand_write()
2290 dev_warn(ctrl->dev, "unaligned buffer: %p\n", buf); in brcmnand_write()
2296 for (i = 0; i < ctrl->max_oob; i += 4) in brcmnand_write()
2297 oob_reg_write(ctrl, i, 0xffffffff); in brcmnand_write()
2299 if (use_dma(ctrl) && !oob && flash_dma_buf_ok(buf)) { in brcmnand_write()
2300 if (ctrl->dma_trans(host, addr, (u32 *)buf, mtd->writesize, in brcmnand_write()
2313 brcmnand_soc_data_bus_prepare(ctrl->soc, false); in brcmnand_write()
2316 brcmnand_write_fc(ctrl, j, *buf); in brcmnand_write()
2318 brcmnand_soc_data_bus_unprepare(ctrl->soc, false); in brcmnand_write()
2321 brcmnand_write_fc(ctrl, j, 0xffffffff); in brcmnand_write()
2325 oob += write_oob_to_regs(ctrl, i, oob, in brcmnand_write()
2335 dev_info(ctrl->dev, "program failed at %llx\n", in brcmnand_write()
2402 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_set_cfg() local
2404 u16 cfg_offs = brcmnand_cs_offset(ctrl, host->cs, BRCMNAND_CS_CFG); in brcmnand_set_cfg()
2405 u16 cfg_ext_offs = brcmnand_cs_offset(ctrl, host->cs, in brcmnand_set_cfg()
2407 u16 acc_control_offs = brcmnand_cs_offset(ctrl, host->cs, in brcmnand_set_cfg()
2412 if (ctrl->block_sizes) { in brcmnand_set_cfg()
2415 for (i = 0, found = 0; ctrl->block_sizes[i]; i++) in brcmnand_set_cfg()
2416 if (ctrl->block_sizes[i] * 1024 == cfg->block_size) { in brcmnand_set_cfg()
2421 dev_warn(ctrl->dev, "invalid block size %u\n", in brcmnand_set_cfg()
2429 if (cfg->block_size < BRCMNAND_MIN_BLOCKSIZE || (ctrl->max_block_size && in brcmnand_set_cfg()
2430 cfg->block_size > ctrl->max_block_size)) { in brcmnand_set_cfg()
2431 dev_warn(ctrl->dev, "invalid block size %u\n", in brcmnand_set_cfg()
2436 if (ctrl->page_sizes) { in brcmnand_set_cfg()
2439 for (i = 0, found = 0; ctrl->page_sizes[i]; i++) in brcmnand_set_cfg()
2440 if (ctrl->page_sizes[i] == cfg->page_size) { in brcmnand_set_cfg()
2445 dev_warn(ctrl->dev, "invalid page size %u\n", in brcmnand_set_cfg()
2453 if (cfg->page_size < BRCMNAND_MIN_PAGESIZE || (ctrl->max_page_size && in brcmnand_set_cfg()
2454 cfg->page_size > ctrl->max_page_size)) { in brcmnand_set_cfg()
2455 dev_warn(ctrl->dev, "invalid page size %u\n", cfg->page_size); in brcmnand_set_cfg()
2460 dev_warn(ctrl->dev, "invalid device size 0x%llx\n", in brcmnand_set_cfg()
2472 tmp |= (page_size << ctrl->page_size_shift) | in brcmnand_set_cfg()
2474 nand_writereg(ctrl, cfg_offs, tmp); in brcmnand_set_cfg()
2476 nand_writereg(ctrl, cfg_offs, tmp); in brcmnand_set_cfg()
2479 nand_writereg(ctrl, cfg_ext_offs, tmp); in brcmnand_set_cfg()
2482 tmp = nand_readreg(ctrl, acc_control_offs); in brcmnand_set_cfg()
2483 tmp &= ~brcmnand_ecc_level_mask(ctrl); in brcmnand_set_cfg()
2484 tmp &= ~brcmnand_spare_area_mask(ctrl); in brcmnand_set_cfg()
2485 if (ctrl->nand_version >= 0x0302) { in brcmnand_set_cfg()
2489 nand_writereg(ctrl, acc_control_offs, tmp); in brcmnand_set_cfg()
2511 if (is_hamming_ecc(host->ctrl, cfg)) in brcmnand_print_cfg()
2537 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_setup_dev() local
2555 if (cfg->spare_area_size > ctrl->max_oob) in brcmnand_setup_dev()
2556 cfg->spare_area_size = ctrl->max_oob; in brcmnand_setup_dev()
2571 dev_err(ctrl->dev, "only HW ECC supported; selected: %d\n", in brcmnand_setup_dev()
2587 dev_err(ctrl->dev, "invalid Hamming params: %d bits per %d bytes\n", in brcmnand_setup_dev()
2598 dev_info(ctrl->dev, "Using ECC step-size %d, strength %d\n", in brcmnand_setup_dev()
2612 if (!(ctrl->features & BRCMNAND_HAS_1K_SECTORS)) { in brcmnand_setup_dev()
2613 dev_err(ctrl->dev, "1KB sectors not supported\n"); in brcmnand_setup_dev()
2617 dev_err(ctrl->dev, in brcmnand_setup_dev()
2626 dev_err(ctrl->dev, "unsupported ECC size: %d\n", in brcmnand_setup_dev()
2644 dev_info(ctrl->dev, "detected %s\n", msg); in brcmnand_setup_dev()
2647 offs = brcmnand_cs_offset(ctrl, host->cs, BRCMNAND_CS_ACC_CONTROL); in brcmnand_setup_dev()
2648 tmp = nand_readreg(ctrl, offs); in brcmnand_setup_dev()
2653 if (ctrl->nand_version >= 0x0702) in brcmnand_setup_dev()
2656 if (ctrl->features & BRCMNAND_HAS_PREFETCH) in brcmnand_setup_dev()
2659 nand_writereg(ctrl, offs, tmp); in brcmnand_setup_dev()
2692 if (is_hamming_ecc(host->ctrl, &host->hwcfg)) { in brcmnand_attach_chip()
2706 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_init_cs() local
2749 chip->controller = &ctrl->controller; in brcmnand_init_cs()
2756 cfg_offs = brcmnand_cs_offset(ctrl, host->cs, BRCMNAND_CS_CFG); in brcmnand_init_cs()
2757 nand_writereg(ctrl, cfg_offs, in brcmnand_init_cs()
2758 nand_readreg(ctrl, cfg_offs) & ~CFG_BUS_WIDTH); in brcmnand_init_cs()
2774 struct brcmnand_controller *ctrl = host->ctrl; in brcmnand_save_restore_cs_config() local
2775 u16 cfg_offs = brcmnand_cs_offset(ctrl, host->cs, BRCMNAND_CS_CFG); in brcmnand_save_restore_cs_config()
2776 u16 cfg_ext_offs = brcmnand_cs_offset(ctrl, host->cs, in brcmnand_save_restore_cs_config()
2778 u16 acc_control_offs = brcmnand_cs_offset(ctrl, host->cs, in brcmnand_save_restore_cs_config()
2780 u16 t1_offs = brcmnand_cs_offset(ctrl, host->cs, BRCMNAND_CS_TIMING1); in brcmnand_save_restore_cs_config()
2781 u16 t2_offs = brcmnand_cs_offset(ctrl, host->cs, BRCMNAND_CS_TIMING2); in brcmnand_save_restore_cs_config()
2784 nand_writereg(ctrl, cfg_offs, host->hwcfg.config); in brcmnand_save_restore_cs_config()
2786 nand_writereg(ctrl, cfg_ext_offs, in brcmnand_save_restore_cs_config()
2788 nand_writereg(ctrl, acc_control_offs, host->hwcfg.acc_control); in brcmnand_save_restore_cs_config()
2789 nand_writereg(ctrl, t1_offs, host->hwcfg.timing_1); in brcmnand_save_restore_cs_config()
2790 nand_writereg(ctrl, t2_offs, host->hwcfg.timing_2); in brcmnand_save_restore_cs_config()
2792 host->hwcfg.config = nand_readreg(ctrl, cfg_offs); in brcmnand_save_restore_cs_config()
2795 nand_readreg(ctrl, cfg_ext_offs); in brcmnand_save_restore_cs_config()
2796 host->hwcfg.acc_control = nand_readreg(ctrl, acc_control_offs); in brcmnand_save_restore_cs_config()
2797 host->hwcfg.timing_1 = nand_readreg(ctrl, t1_offs); in brcmnand_save_restore_cs_config()
2798 host->hwcfg.timing_2 = nand_readreg(ctrl, t2_offs); in brcmnand_save_restore_cs_config()
2804 struct brcmnand_controller *ctrl = dev_get_drvdata(dev); in brcmnand_suspend() local
2807 list_for_each_entry(host, &ctrl->host_list, node) in brcmnand_suspend()
2810 ctrl->nand_cs_nand_select = brcmnand_read_reg(ctrl, BRCMNAND_CS_SELECT); in brcmnand_suspend()
2811 ctrl->nand_cs_nand_xor = brcmnand_read_reg(ctrl, BRCMNAND_CS_XOR); in brcmnand_suspend()
2812 ctrl->corr_stat_threshold = in brcmnand_suspend()
2813 brcmnand_read_reg(ctrl, BRCMNAND_CORR_THRESHOLD); in brcmnand_suspend()
2815 if (has_flash_dma(ctrl)) in brcmnand_suspend()
2816 ctrl->flash_dma_mode = flash_dma_readl(ctrl, FLASH_DMA_MODE); in brcmnand_suspend()
2817 else if (has_edu(ctrl)) in brcmnand_suspend()
2818 ctrl->edu_config = edu_readl(ctrl, EDU_CONFIG); in brcmnand_suspend()
2825 struct brcmnand_controller *ctrl = dev_get_drvdata(dev); in brcmnand_resume() local
2828 if (has_flash_dma(ctrl)) { in brcmnand_resume()
2829 flash_dma_writel(ctrl, FLASH_DMA_MODE, ctrl->flash_dma_mode); in brcmnand_resume()
2830 flash_dma_writel(ctrl, FLASH_DMA_ERROR_STATUS, 0); in brcmnand_resume()
2833 if (has_edu(ctrl)) { in brcmnand_resume()
2834 ctrl->edu_config = edu_readl(ctrl, EDU_CONFIG); in brcmnand_resume()
2835 edu_writel(ctrl, EDU_CONFIG, ctrl->edu_config); in brcmnand_resume()
2836 edu_readl(ctrl, EDU_CONFIG); in brcmnand_resume()
2837 brcmnand_edu_init(ctrl); in brcmnand_resume()
2840 brcmnand_write_reg(ctrl, BRCMNAND_CS_SELECT, ctrl->nand_cs_nand_select); in brcmnand_resume()
2841 brcmnand_write_reg(ctrl, BRCMNAND_CS_XOR, ctrl->nand_cs_nand_xor); in brcmnand_resume()
2842 brcmnand_write_reg(ctrl, BRCMNAND_CORR_THRESHOLD, in brcmnand_resume()
2843 ctrl->corr_stat_threshold); in brcmnand_resume()
2844 if (ctrl->soc) { in brcmnand_resume()
2846 ctrl->soc->ctlrdy_ack(ctrl->soc); in brcmnand_resume()
2847 ctrl->soc->ctlrdy_set_enabled(ctrl->soc, true); in brcmnand_resume()
2850 list_for_each_entry(host, &ctrl->host_list, node) { in brcmnand_resume()
2890 struct brcmnand_controller *ctrl = dev_get_drvdata(&pdev->dev); in brcmnand_edu_setup() local
2896 ctrl->edu_base = devm_ioremap_resource(dev, res); in brcmnand_edu_setup()
2897 if (IS_ERR(ctrl->edu_base)) in brcmnand_edu_setup()
2898 return PTR_ERR(ctrl->edu_base); in brcmnand_edu_setup()
2900 ctrl->edu_offsets = edu_regs; in brcmnand_edu_setup()
2902 edu_writel(ctrl, EDU_CONFIG, EDU_CONFIG_MODE_NAND | in brcmnand_edu_setup()
2904 edu_readl(ctrl, EDU_CONFIG); in brcmnand_edu_setup()
2907 brcmnand_edu_init(ctrl); in brcmnand_edu_setup()
2909 ctrl->edu_irq = platform_get_irq_optional(pdev, 1); in brcmnand_edu_setup()
2910 if (ctrl->edu_irq < 0) { in brcmnand_edu_setup()
2914 ret = devm_request_irq(dev, ctrl->edu_irq, in brcmnand_edu_setup()
2916 "brcmnand-edu", ctrl); in brcmnand_edu_setup()
2918 dev_err(ctrl->dev, "can't allocate IRQ %d: error %d\n", in brcmnand_edu_setup()
2919 ctrl->edu_irq, ret); in brcmnand_edu_setup()
2924 ctrl->edu_irq); in brcmnand_edu_setup()
2935 struct brcmnand_controller *ctrl; in brcmnand_probe() local
2946 ctrl = devm_kzalloc(dev, sizeof(*ctrl), GFP_KERNEL); in brcmnand_probe()
2947 if (!ctrl) in brcmnand_probe()
2950 dev_set_drvdata(dev, ctrl); in brcmnand_probe()
2951 ctrl->dev = dev; in brcmnand_probe()
2953 init_completion(&ctrl->done); in brcmnand_probe()
2954 init_completion(&ctrl->dma_done); in brcmnand_probe()
2955 init_completion(&ctrl->edu_done); in brcmnand_probe()
2956 nand_controller_init(&ctrl->controller); in brcmnand_probe()
2957 ctrl->controller.ops = &brcmnand_controller_ops; in brcmnand_probe()
2958 INIT_LIST_HEAD(&ctrl->host_list); in brcmnand_probe()
2962 ctrl->nand_base = devm_ioremap_resource(dev, res); in brcmnand_probe()
2963 if (IS_ERR(ctrl->nand_base)) in brcmnand_probe()
2964 return PTR_ERR(ctrl->nand_base); in brcmnand_probe()
2967 ctrl->clk = devm_clk_get(dev, "nand"); in brcmnand_probe()
2968 if (!IS_ERR(ctrl->clk)) { in brcmnand_probe()
2969 ret = clk_prepare_enable(ctrl->clk); in brcmnand_probe()
2973 ret = PTR_ERR(ctrl->clk); in brcmnand_probe()
2977 ctrl->clk = NULL; in brcmnand_probe()
2981 ret = brcmnand_revision_init(ctrl); in brcmnand_probe()
2991 ctrl->nand_fc = devm_ioremap_resource(dev, res); in brcmnand_probe()
2992 if (IS_ERR(ctrl->nand_fc)) { in brcmnand_probe()
2993 ret = PTR_ERR(ctrl->nand_fc); in brcmnand_probe()
2997 ctrl->nand_fc = ctrl->nand_base + in brcmnand_probe()
2998 ctrl->reg_offsets[BRCMNAND_FC_BASE]; in brcmnand_probe()
3004 ctrl->flash_dma_base = devm_ioremap_resource(dev, res); in brcmnand_probe()
3005 if (IS_ERR(ctrl->flash_dma_base)) { in brcmnand_probe()
3006 ret = PTR_ERR(ctrl->flash_dma_base); in brcmnand_probe()
3011 brcmnand_flash_dma_revision_init(ctrl); in brcmnand_probe()
3014 if (ctrl->nand_version >= 0x0700) in brcmnand_probe()
3024 flash_dma_writel(ctrl, FLASH_DMA_MODE, FLASH_DMA_MODE_MASK); in brcmnand_probe()
3025 flash_dma_writel(ctrl, FLASH_DMA_ERROR_STATUS, 0); in brcmnand_probe()
3028 ctrl->dma_desc = dmam_alloc_coherent(dev, in brcmnand_probe()
3029 sizeof(*ctrl->dma_desc), in brcmnand_probe()
3030 &ctrl->dma_pa, GFP_KERNEL); in brcmnand_probe()
3031 if (!ctrl->dma_desc) { in brcmnand_probe()
3036 ctrl->dma_irq = platform_get_irq(pdev, 1); in brcmnand_probe()
3037 if ((int)ctrl->dma_irq < 0) { in brcmnand_probe()
3043 ret = devm_request_irq(dev, ctrl->dma_irq, in brcmnand_probe()
3045 ctrl); in brcmnand_probe()
3048 ctrl->dma_irq, ret); in brcmnand_probe()
3054 ctrl->dma_trans = brcmnand_dma_trans; in brcmnand_probe()
3060 if (has_edu(ctrl)) in brcmnand_probe()
3062 ctrl->dma_trans = brcmnand_edu_trans; in brcmnand_probe()
3066 brcmnand_rmw_reg(ctrl, BRCMNAND_CS_SELECT, in brcmnand_probe()
3069 brcmnand_rmw_reg(ctrl, BRCMNAND_CS_XOR, 0xff, 0, 0); in brcmnand_probe()
3071 if (ctrl->features & BRCMNAND_HAS_WP) { in brcmnand_probe()
3074 brcmnand_set_wp(ctrl, false); in brcmnand_probe()
3080 ctrl->irq = platform_get_irq(pdev, 0); in brcmnand_probe()
3081 if ((int)ctrl->irq < 0) { in brcmnand_probe()
3092 ctrl->soc = soc; in brcmnand_probe()
3094 ret = devm_request_irq(dev, ctrl->irq, brcmnand_irq, 0, in brcmnand_probe()
3095 DRV_NAME, ctrl); in brcmnand_probe()
3098 ctrl->soc->ctlrdy_ack(ctrl->soc); in brcmnand_probe()
3099 ctrl->soc->ctlrdy_set_enabled(ctrl->soc, true); in brcmnand_probe()
3102 ret = devm_request_irq(dev, ctrl->irq, brcmnand_ctlrdy_irq, 0, in brcmnand_probe()
3103 DRV_NAME, ctrl); in brcmnand_probe()
3107 ctrl->irq, ret); in brcmnand_probe()
3122 host->ctrl = ctrl; in brcmnand_probe()
3130 list_add_tail(&host->node, &ctrl->host_list); in brcmnand_probe()
3135 if (list_empty(&ctrl->host_list)) { in brcmnand_probe()
3143 clk_disable_unprepare(ctrl->clk); in brcmnand_probe()
3151 struct brcmnand_controller *ctrl = dev_get_drvdata(&pdev->dev); in brcmnand_remove() local
3156 list_for_each_entry(host, &ctrl->host_list, node) { in brcmnand_remove()
3163 clk_disable_unprepare(ctrl->clk); in brcmnand_remove()