Lines Matching refs:sp
96 #define CLK_TO_US(sp, clkcnt) DIV_ROUND_UP(clkcnt, sp->spi_freq / 1000000) argument
114 static inline void mtk_nor_rmw(struct mtk_nor *sp, u32 reg, u32 set, u32 clr) in mtk_nor_rmw() argument
116 u32 val = readl(sp->base + reg); in mtk_nor_rmw()
120 writel(val, sp->base + reg); in mtk_nor_rmw()
123 static inline int mtk_nor_cmd_exec(struct mtk_nor *sp, u32 cmd, ulong clk) in mtk_nor_cmd_exec() argument
125 ulong delay = CLK_TO_US(sp, clk); in mtk_nor_cmd_exec()
129 writel(cmd, sp->base + MTK_NOR_REG_CMD); in mtk_nor_cmd_exec()
130 ret = readl_poll_timeout(sp->base + MTK_NOR_REG_CMD, reg, !(reg & cmd), in mtk_nor_cmd_exec()
133 dev_err(sp->dev, "command %u timeout.\n", cmd); in mtk_nor_cmd_exec()
137 static void mtk_nor_set_addr(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_set_addr() argument
143 writeb(addr & 0xff, sp->base + MTK_NOR_REG_RADR(i)); in mtk_nor_set_addr()
147 writeb(addr & 0xff, sp->base + MTK_NOR_REG_RADR3); in mtk_nor_set_addr()
148 mtk_nor_rmw(sp, MTK_NOR_REG_BUSCFG, MTK_NOR_4B_ADDR, 0); in mtk_nor_set_addr()
150 mtk_nor_rmw(sp, MTK_NOR_REG_BUSCFG, 0, MTK_NOR_4B_ADDR); in mtk_nor_set_addr()
154 static bool need_bounce(struct mtk_nor *sp, const struct spi_mem_op *op) in need_bounce() argument
252 struct mtk_nor *sp = spi_controller_get_devdata(mem->spi->master); in mtk_nor_adjust_op_size() local
267 else if (!need_bounce(sp, op)) in mtk_nor_adjust_op_size()
314 static void mtk_nor_setup_bus(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_setup_bus() argument
323 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA(4)); in mtk_nor_setup_bus()
328 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA(3)); in mtk_nor_setup_bus()
333 mtk_nor_rmw(sp, MTK_NOR_REG_CFG1, MTK_NOR_FAST_READ, 0); in mtk_nor_setup_bus()
335 mtk_nor_rmw(sp, MTK_NOR_REG_CFG1, 0, MTK_NOR_FAST_READ); in mtk_nor_setup_bus()
337 mtk_nor_rmw(sp, MTK_NOR_REG_BUSCFG, reg, MTK_NOR_BUS_MODE_MASK); in mtk_nor_setup_bus()
340 static int mtk_nor_dma_exec(struct mtk_nor *sp, u32 from, unsigned int length, in mtk_nor_dma_exec() argument
347 writel(from, sp->base + MTK_NOR_REG_DMA_FADR); in mtk_nor_dma_exec()
348 writel(dma_addr, sp->base + MTK_NOR_REG_DMA_DADR); in mtk_nor_dma_exec()
349 writel(dma_addr + length, sp->base + MTK_NOR_REG_DMA_END_DADR); in mtk_nor_dma_exec()
351 if (sp->high_dma) { in mtk_nor_dma_exec()
353 sp->base + MTK_NOR_REG_DMA_DADR_HB); in mtk_nor_dma_exec()
355 sp->base + MTK_NOR_REG_DMA_END_DADR_HB); in mtk_nor_dma_exec()
358 if (sp->has_irq) { in mtk_nor_dma_exec()
359 reinit_completion(&sp->op_done); in mtk_nor_dma_exec()
360 mtk_nor_rmw(sp, MTK_NOR_REG_IRQ_EN, MTK_NOR_IRQ_DMA, 0); in mtk_nor_dma_exec()
363 mtk_nor_rmw(sp, MTK_NOR_REG_DMA_CTL, MTK_NOR_DMA_START, 0); in mtk_nor_dma_exec()
365 delay = CLK_TO_US(sp, (length + 5) * BITS_PER_BYTE); in mtk_nor_dma_exec()
367 if (sp->has_irq) { in mtk_nor_dma_exec()
368 if (!wait_for_completion_timeout(&sp->op_done, in mtk_nor_dma_exec()
372 ret = readl_poll_timeout(sp->base + MTK_NOR_REG_DMA_CTL, reg, in mtk_nor_dma_exec()
378 dev_err(sp->dev, "dma read timeout.\n"); in mtk_nor_dma_exec()
383 static int mtk_nor_read_bounce(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_read_bounce() argument
393 ret = mtk_nor_dma_exec(sp, op->addr.val, rdlen, sp->buffer_dma); in mtk_nor_read_bounce()
396 memcpy(op->data.buf.in, sp->buffer, op->data.nbytes); in mtk_nor_read_bounce()
401 static int mtk_nor_read_dma(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_read_dma() argument
406 if (need_bounce(sp, op)) in mtk_nor_read_dma()
407 return mtk_nor_read_bounce(sp, op); in mtk_nor_read_dma()
409 dma_addr = dma_map_single(sp->dev, op->data.buf.in, in mtk_nor_read_dma()
412 if (dma_mapping_error(sp->dev, dma_addr)) in mtk_nor_read_dma()
415 ret = mtk_nor_dma_exec(sp, op->addr.val, op->data.nbytes, dma_addr); in mtk_nor_read_dma()
417 dma_unmap_single(sp->dev, dma_addr, op->data.nbytes, DMA_FROM_DEVICE); in mtk_nor_read_dma()
422 static int mtk_nor_read_pio(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_read_pio() argument
427 ret = mtk_nor_cmd_exec(sp, MTK_NOR_CMD_READ, 6 * BITS_PER_BYTE); in mtk_nor_read_pio()
429 buf[0] = readb(sp->base + MTK_NOR_REG_RDATA); in mtk_nor_read_pio()
433 static int mtk_nor_write_buffer_enable(struct mtk_nor *sp) in mtk_nor_write_buffer_enable() argument
438 if (sp->wbuf_en) in mtk_nor_write_buffer_enable()
441 val = readl(sp->base + MTK_NOR_REG_CFG2); in mtk_nor_write_buffer_enable()
442 writel(val | MTK_NOR_WR_BUF_EN, sp->base + MTK_NOR_REG_CFG2); in mtk_nor_write_buffer_enable()
443 ret = readl_poll_timeout(sp->base + MTK_NOR_REG_CFG2, val, in mtk_nor_write_buffer_enable()
446 sp->wbuf_en = true; in mtk_nor_write_buffer_enable()
450 static int mtk_nor_write_buffer_disable(struct mtk_nor *sp) in mtk_nor_write_buffer_disable() argument
455 if (!sp->wbuf_en) in mtk_nor_write_buffer_disable()
457 val = readl(sp->base + MTK_NOR_REG_CFG2); in mtk_nor_write_buffer_disable()
458 writel(val & ~MTK_NOR_WR_BUF_EN, sp->base + MTK_NOR_REG_CFG2); in mtk_nor_write_buffer_disable()
459 ret = readl_poll_timeout(sp->base + MTK_NOR_REG_CFG2, val, in mtk_nor_write_buffer_disable()
462 sp->wbuf_en = false; in mtk_nor_write_buffer_disable()
466 static int mtk_nor_pp_buffered(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_pp_buffered() argument
472 ret = mtk_nor_write_buffer_enable(sp); in mtk_nor_pp_buffered()
479 writel(val, sp->base + MTK_NOR_REG_PP_DATA); in mtk_nor_pp_buffered()
481 return mtk_nor_cmd_exec(sp, MTK_NOR_CMD_WRITE, in mtk_nor_pp_buffered()
485 static int mtk_nor_pp_unbuffered(struct mtk_nor *sp, in mtk_nor_pp_unbuffered() argument
491 ret = mtk_nor_write_buffer_disable(sp); in mtk_nor_pp_unbuffered()
494 writeb(buf[0], sp->base + MTK_NOR_REG_WDATA); in mtk_nor_pp_unbuffered()
495 return mtk_nor_cmd_exec(sp, MTK_NOR_CMD_WRITE, 6 * BITS_PER_BYTE); in mtk_nor_pp_unbuffered()
498 static int mtk_nor_spi_mem_prg(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_spi_mem_prg() argument
528 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
534 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
541 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
546 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
552 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
557 writel(prg_len * BITS_PER_BYTE, sp->base + MTK_NOR_REG_PRG_CNT); in mtk_nor_spi_mem_prg()
558 ret = mtk_nor_cmd_exec(sp, MTK_NOR_CMD_PROGRAM, in mtk_nor_spi_mem_prg()
567 reg = sp->base + MTK_NOR_REG_SHIFT(reg_offset); in mtk_nor_spi_mem_prg()
577 struct mtk_nor *sp = spi_controller_get_devdata(mem->spi->master); in mtk_nor_exec_op() local
582 return mtk_nor_spi_mem_prg(sp, op); in mtk_nor_exec_op()
585 mtk_nor_set_addr(sp, op); in mtk_nor_exec_op()
586 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA0); in mtk_nor_exec_op()
588 return mtk_nor_pp_buffered(sp, op); in mtk_nor_exec_op()
589 return mtk_nor_pp_unbuffered(sp, op); in mtk_nor_exec_op()
593 ret = mtk_nor_write_buffer_disable(sp); in mtk_nor_exec_op()
596 mtk_nor_setup_bus(sp, op); in mtk_nor_exec_op()
598 mtk_nor_set_addr(sp, op); in mtk_nor_exec_op()
599 return mtk_nor_read_pio(sp, op); in mtk_nor_exec_op()
601 return mtk_nor_read_dma(sp, op); in mtk_nor_exec_op()
605 return mtk_nor_spi_mem_prg(sp, op); in mtk_nor_exec_op()
610 struct mtk_nor *sp = spi_controller_get_devdata(spi->master); in mtk_nor_setup() local
612 if (spi->max_speed_hz && (spi->max_speed_hz < sp->spi_freq)) { in mtk_nor_setup()
614 sp->spi_freq); in mtk_nor_setup()
617 spi->max_speed_hz = sp->spi_freq; in mtk_nor_setup()
625 struct mtk_nor *sp = spi_controller_get_devdata(master); in mtk_nor_transfer_one_message() local
638 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_transfer_one_message()
647 writel(trx_len * BITS_PER_BYTE, sp->base + MTK_NOR_REG_PRG_CNT); in mtk_nor_transfer_one_message()
649 stat = mtk_nor_cmd_exec(sp, MTK_NOR_CMD_PROGRAM, in mtk_nor_transfer_one_message()
658 reg = sp->base + MTK_NOR_REG_SHIFT(reg_offset); in mtk_nor_transfer_one_message()
672 static void mtk_nor_disable_clk(struct mtk_nor *sp) in mtk_nor_disable_clk() argument
674 clk_disable_unprepare(sp->spi_clk); in mtk_nor_disable_clk()
675 clk_disable_unprepare(sp->ctlr_clk); in mtk_nor_disable_clk()
676 clk_disable_unprepare(sp->axi_clk); in mtk_nor_disable_clk()
679 static int mtk_nor_enable_clk(struct mtk_nor *sp) in mtk_nor_enable_clk() argument
683 ret = clk_prepare_enable(sp->spi_clk); in mtk_nor_enable_clk()
687 ret = clk_prepare_enable(sp->ctlr_clk); in mtk_nor_enable_clk()
689 clk_disable_unprepare(sp->spi_clk); in mtk_nor_enable_clk()
693 ret = clk_prepare_enable(sp->axi_clk); in mtk_nor_enable_clk()
695 clk_disable_unprepare(sp->spi_clk); in mtk_nor_enable_clk()
696 clk_disable_unprepare(sp->ctlr_clk); in mtk_nor_enable_clk()
703 static void mtk_nor_init(struct mtk_nor *sp) in mtk_nor_init() argument
705 writel(0, sp->base + MTK_NOR_REG_IRQ_EN); in mtk_nor_init()
706 writel(MTK_NOR_IRQ_MASK, sp->base + MTK_NOR_REG_IRQ_STAT); in mtk_nor_init()
708 writel(MTK_NOR_ENABLE_SF_CMD, sp->base + MTK_NOR_REG_WP); in mtk_nor_init()
709 mtk_nor_rmw(sp, MTK_NOR_REG_CFG2, MTK_NOR_WR_CUSTOM_OP_EN, 0); in mtk_nor_init()
710 mtk_nor_rmw(sp, MTK_NOR_REG_CFG3, in mtk_nor_init()
716 struct mtk_nor *sp = data; in mtk_nor_irq_handler() local
719 irq_status = readl(sp->base + MTK_NOR_REG_IRQ_STAT); in mtk_nor_irq_handler()
720 irq_enabled = readl(sp->base + MTK_NOR_REG_IRQ_EN); in mtk_nor_irq_handler()
722 writel(irq_status, sp->base + MTK_NOR_REG_IRQ_STAT); in mtk_nor_irq_handler()
728 complete(&sp->op_done); in mtk_nor_irq_handler()
729 writel(0, sp->base + MTK_NOR_REG_IRQ_EN); in mtk_nor_irq_handler()
756 struct mtk_nor *sp; in mtk_nor_probe() local
784 ctlr = devm_spi_alloc_master(&pdev->dev, sizeof(*sp)); in mtk_nor_probe()
802 sp = spi_controller_get_devdata(ctlr); in mtk_nor_probe()
803 sp->base = base; in mtk_nor_probe()
804 sp->has_irq = false; in mtk_nor_probe()
805 sp->wbuf_en = false; in mtk_nor_probe()
806 sp->ctlr = ctlr; in mtk_nor_probe()
807 sp->dev = &pdev->dev; in mtk_nor_probe()
808 sp->spi_clk = spi_clk; in mtk_nor_probe()
809 sp->ctlr_clk = ctlr_clk; in mtk_nor_probe()
810 sp->axi_clk = axi_clk; in mtk_nor_probe()
811 sp->high_dma = (dma_bits > 32); in mtk_nor_probe()
812 sp->buffer = dmam_alloc_coherent(&pdev->dev, in mtk_nor_probe()
814 &sp->buffer_dma, GFP_KERNEL); in mtk_nor_probe()
815 if (!sp->buffer) in mtk_nor_probe()
818 if ((uintptr_t)sp->buffer & MTK_NOR_DMA_ALIGN_MASK) { in mtk_nor_probe()
819 dev_err(sp->dev, "misaligned allocation of internal buffer.\n"); in mtk_nor_probe()
823 ret = mtk_nor_enable_clk(sp); in mtk_nor_probe()
827 sp->spi_freq = clk_get_rate(sp->spi_clk); in mtk_nor_probe()
829 mtk_nor_init(sp); in mtk_nor_probe()
834 dev_warn(sp->dev, "IRQ not available."); in mtk_nor_probe()
836 ret = devm_request_irq(sp->dev, irq, mtk_nor_irq_handler, 0, in mtk_nor_probe()
837 pdev->name, sp); in mtk_nor_probe()
839 dev_warn(sp->dev, "failed to request IRQ."); in mtk_nor_probe()
841 init_completion(&sp->op_done); in mtk_nor_probe()
842 sp->has_irq = true; in mtk_nor_probe()
859 dev_info(&pdev->dev, "spi frequency: %d Hz\n", sp->spi_freq); in mtk_nor_probe()
868 mtk_nor_disable_clk(sp); in mtk_nor_probe()
876 struct mtk_nor *sp = spi_controller_get_devdata(ctlr); in mtk_nor_remove() local
882 mtk_nor_disable_clk(sp); in mtk_nor_remove()
890 struct mtk_nor *sp = spi_controller_get_devdata(ctlr); in mtk_nor_runtime_suspend() local
892 mtk_nor_disable_clk(sp); in mtk_nor_runtime_suspend()
900 struct mtk_nor *sp = spi_controller_get_devdata(ctlr); in mtk_nor_runtime_resume() local
902 return mtk_nor_enable_clk(sp); in mtk_nor_runtime_resume()