Lines Matching refs:mdata

181 static void mtk_spi_reset(struct mtk_spi *mdata)  in mtk_spi_reset()  argument
186 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_reset()
188 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_reset()
190 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_reset()
192 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_reset()
202 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_prepare_message() local
207 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_prepare_message()
235 if (mdata->dev_comp->enhance_timing) { in mtk_spi_prepare_message()
257 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_prepare_message()
260 if (mdata->dev_comp->need_pad_sel) in mtk_spi_prepare_message()
261 writel(mdata->pad_sel[spi->chip_select], in mtk_spi_prepare_message()
262 mdata->base + SPI_PAD_SEL_REG); in mtk_spi_prepare_message()
270 struct mtk_spi *mdata = spi_master_get_devdata(spi->master); in mtk_spi_set_cs() local
275 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
278 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
281 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
282 mdata->state = MTK_SPI_IDLE; in mtk_spi_set_cs()
283 mtk_spi_reset(mdata); in mtk_spi_set_cs()
291 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_prepare_transfer() local
293 spi_clk_hz = clk_get_rate(mdata->spi_clk); in mtk_spi_prepare_transfer()
302 if (mdata->dev_comp->enhance_timing) { in mtk_spi_prepare_transfer()
307 writel(reg_val, mdata->base + SPI_CFG2_REG); in mtk_spi_prepare_transfer()
312 writel(reg_val, mdata->base + SPI_CFG0_REG); in mtk_spi_prepare_transfer()
319 writel(reg_val, mdata->base + SPI_CFG0_REG); in mtk_spi_prepare_transfer()
322 reg_val = readl(mdata->base + SPI_CFG1_REG); in mtk_spi_prepare_transfer()
325 writel(reg_val, mdata->base + SPI_CFG1_REG); in mtk_spi_prepare_transfer()
331 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_setup_packet() local
333 packet_size = min_t(u32, mdata->xfer_len, MTK_SPI_PACKET_SIZE); in mtk_spi_setup_packet()
334 packet_loop = mdata->xfer_len / packet_size; in mtk_spi_setup_packet()
336 reg_val = readl(mdata->base + SPI_CFG1_REG); in mtk_spi_setup_packet()
340 writel(reg_val, mdata->base + SPI_CFG1_REG); in mtk_spi_setup_packet()
346 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_enable_transfer() local
348 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_enable_transfer()
349 if (mdata->state == MTK_SPI_IDLE) in mtk_spi_enable_transfer()
353 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_enable_transfer()
371 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_update_mdata_len() local
373 if (mdata->tx_sgl_len && mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
374 if (mdata->tx_sgl_len > mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
375 mult_delta = mtk_spi_get_mult_delta(mdata->rx_sgl_len); in mtk_spi_update_mdata_len()
376 mdata->xfer_len = mdata->rx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
377 mdata->rx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
378 mdata->tx_sgl_len -= mdata->xfer_len; in mtk_spi_update_mdata_len()
380 mult_delta = mtk_spi_get_mult_delta(mdata->tx_sgl_len); in mtk_spi_update_mdata_len()
381 mdata->xfer_len = mdata->tx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
382 mdata->tx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
383 mdata->rx_sgl_len -= mdata->xfer_len; in mtk_spi_update_mdata_len()
385 } else if (mdata->tx_sgl_len) { in mtk_spi_update_mdata_len()
386 mult_delta = mtk_spi_get_mult_delta(mdata->tx_sgl_len); in mtk_spi_update_mdata_len()
387 mdata->xfer_len = mdata->tx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
388 mdata->tx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
389 } else if (mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
390 mult_delta = mtk_spi_get_mult_delta(mdata->rx_sgl_len); in mtk_spi_update_mdata_len()
391 mdata->xfer_len = mdata->rx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
392 mdata->rx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
399 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_setup_dma_addr() local
401 if (mdata->tx_sgl) { in mtk_spi_setup_dma_addr()
403 mdata->base + SPI_TX_SRC_REG); in mtk_spi_setup_dma_addr()
405 if (mdata->dev_comp->dma_ext) in mtk_spi_setup_dma_addr()
407 mdata->base + SPI_TX_SRC_REG_64); in mtk_spi_setup_dma_addr()
411 if (mdata->rx_sgl) { in mtk_spi_setup_dma_addr()
413 mdata->base + SPI_RX_DST_REG); in mtk_spi_setup_dma_addr()
415 if (mdata->dev_comp->dma_ext) in mtk_spi_setup_dma_addr()
417 mdata->base + SPI_RX_DST_REG_64); in mtk_spi_setup_dma_addr()
428 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_fifo_transfer() local
430 mdata->cur_transfer = xfer; in mtk_spi_fifo_transfer()
431 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, xfer->len); in mtk_spi_fifo_transfer()
432 mdata->num_xfered = 0; in mtk_spi_fifo_transfer()
438 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, xfer->tx_buf, cnt); in mtk_spi_fifo_transfer()
443 writel(reg_val, mdata->base + SPI_TX_DATA_REG); in mtk_spi_fifo_transfer()
457 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_dma_transfer() local
459 mdata->tx_sgl = NULL; in mtk_spi_dma_transfer()
460 mdata->rx_sgl = NULL; in mtk_spi_dma_transfer()
461 mdata->tx_sgl_len = 0; in mtk_spi_dma_transfer()
462 mdata->rx_sgl_len = 0; in mtk_spi_dma_transfer()
463 mdata->cur_transfer = xfer; in mtk_spi_dma_transfer()
464 mdata->num_xfered = 0; in mtk_spi_dma_transfer()
468 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_dma_transfer()
473 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_dma_transfer()
476 mdata->tx_sgl = xfer->tx_sg.sgl; in mtk_spi_dma_transfer()
478 mdata->rx_sgl = xfer->rx_sg.sgl; in mtk_spi_dma_transfer()
480 if (mdata->tx_sgl) { in mtk_spi_dma_transfer()
481 xfer->tx_dma = sg_dma_address(mdata->tx_sgl); in mtk_spi_dma_transfer()
482 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl); in mtk_spi_dma_transfer()
484 if (mdata->rx_sgl) { in mtk_spi_dma_transfer()
485 xfer->rx_dma = sg_dma_address(mdata->rx_sgl); in mtk_spi_dma_transfer()
486 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl); in mtk_spi_dma_transfer()
519 struct mtk_spi *mdata = spi_master_get_devdata(spi->master); in mtk_spi_setup() local
524 if (mdata->dev_comp->need_pad_sel && gpio_is_valid(spi->cs_gpio)) in mtk_spi_setup()
534 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_interrupt() local
535 struct spi_transfer *trans = mdata->cur_transfer; in mtk_spi_interrupt()
537 reg_val = readl(mdata->base + SPI_STATUS0_REG); in mtk_spi_interrupt()
539 mdata->state = MTK_SPI_PAUSED; in mtk_spi_interrupt()
541 mdata->state = MTK_SPI_IDLE; in mtk_spi_interrupt()
545 cnt = mdata->xfer_len / 4; in mtk_spi_interrupt()
546 ioread32_rep(mdata->base + SPI_RX_DATA_REG, in mtk_spi_interrupt()
547 trans->rx_buf + mdata->num_xfered, cnt); in mtk_spi_interrupt()
548 remainder = mdata->xfer_len % 4; in mtk_spi_interrupt()
550 reg_val = readl(mdata->base + SPI_RX_DATA_REG); in mtk_spi_interrupt()
552 mdata->num_xfered + in mtk_spi_interrupt()
559 mdata->num_xfered += mdata->xfer_len; in mtk_spi_interrupt()
560 if (mdata->num_xfered == trans->len) { in mtk_spi_interrupt()
565 len = trans->len - mdata->num_xfered; in mtk_spi_interrupt()
566 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, len); in mtk_spi_interrupt()
569 cnt = mdata->xfer_len / 4; in mtk_spi_interrupt()
570 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, in mtk_spi_interrupt()
571 trans->tx_buf + mdata->num_xfered, cnt); in mtk_spi_interrupt()
573 remainder = mdata->xfer_len % 4; in mtk_spi_interrupt()
577 trans->tx_buf + (cnt * 4) + mdata->num_xfered, in mtk_spi_interrupt()
579 writel(reg_val, mdata->base + SPI_TX_DATA_REG); in mtk_spi_interrupt()
587 if (mdata->tx_sgl) in mtk_spi_interrupt()
588 trans->tx_dma += mdata->xfer_len; in mtk_spi_interrupt()
589 if (mdata->rx_sgl) in mtk_spi_interrupt()
590 trans->rx_dma += mdata->xfer_len; in mtk_spi_interrupt()
592 if (mdata->tx_sgl && (mdata->tx_sgl_len == 0)) { in mtk_spi_interrupt()
593 mdata->tx_sgl = sg_next(mdata->tx_sgl); in mtk_spi_interrupt()
594 if (mdata->tx_sgl) { in mtk_spi_interrupt()
595 trans->tx_dma = sg_dma_address(mdata->tx_sgl); in mtk_spi_interrupt()
596 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl); in mtk_spi_interrupt()
599 if (mdata->rx_sgl && (mdata->rx_sgl_len == 0)) { in mtk_spi_interrupt()
600 mdata->rx_sgl = sg_next(mdata->rx_sgl); in mtk_spi_interrupt()
601 if (mdata->rx_sgl) { in mtk_spi_interrupt()
602 trans->rx_dma = sg_dma_address(mdata->rx_sgl); in mtk_spi_interrupt()
603 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl); in mtk_spi_interrupt()
607 if (!mdata->tx_sgl && !mdata->rx_sgl) { in mtk_spi_interrupt()
609 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_interrupt()
612 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_interrupt()
629 struct mtk_spi *mdata; in mtk_spi_probe() local
633 master = spi_alloc_master(&pdev->dev, sizeof(*mdata)); in mtk_spi_probe()
656 mdata = spi_master_get_devdata(master); in mtk_spi_probe()
657 mdata->dev_comp = of_id->data; in mtk_spi_probe()
659 if (mdata->dev_comp->enhance_timing) in mtk_spi_probe()
662 if (mdata->dev_comp->must_tx) in mtk_spi_probe()
665 if (mdata->dev_comp->need_pad_sel) { in mtk_spi_probe()
666 mdata->pad_num = of_property_count_u32_elems( in mtk_spi_probe()
669 if (mdata->pad_num < 0) { in mtk_spi_probe()
676 mdata->pad_sel = devm_kmalloc_array(&pdev->dev, mdata->pad_num, in mtk_spi_probe()
678 if (!mdata->pad_sel) { in mtk_spi_probe()
683 for (i = 0; i < mdata->pad_num; i++) { in mtk_spi_probe()
686 i, &mdata->pad_sel[i]); in mtk_spi_probe()
687 if (mdata->pad_sel[i] > MT8173_SPI_MAX_PAD_SEL) { in mtk_spi_probe()
689 i, mdata->pad_sel[i]); in mtk_spi_probe()
697 mdata->base = devm_platform_ioremap_resource(pdev, 0); in mtk_spi_probe()
698 if (IS_ERR(mdata->base)) { in mtk_spi_probe()
699 ret = PTR_ERR(mdata->base); in mtk_spi_probe()
719 mdata->parent_clk = devm_clk_get(&pdev->dev, "parent-clk"); in mtk_spi_probe()
720 if (IS_ERR(mdata->parent_clk)) { in mtk_spi_probe()
721 ret = PTR_ERR(mdata->parent_clk); in mtk_spi_probe()
726 mdata->sel_clk = devm_clk_get(&pdev->dev, "sel-clk"); in mtk_spi_probe()
727 if (IS_ERR(mdata->sel_clk)) { in mtk_spi_probe()
728 ret = PTR_ERR(mdata->sel_clk); in mtk_spi_probe()
733 mdata->spi_clk = devm_clk_get(&pdev->dev, "spi-clk"); in mtk_spi_probe()
734 if (IS_ERR(mdata->spi_clk)) { in mtk_spi_probe()
735 ret = PTR_ERR(mdata->spi_clk); in mtk_spi_probe()
740 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_probe()
746 ret = clk_set_parent(mdata->sel_clk, mdata->parent_clk); in mtk_spi_probe()
749 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_probe()
753 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_probe()
763 if (mdata->dev_comp->need_pad_sel) { in mtk_spi_probe()
764 if (mdata->pad_num != master->num_chipselect) { in mtk_spi_probe()
767 mdata->pad_num, master->num_chipselect); in mtk_spi_probe()
793 if (mdata->dev_comp->dma_ext) in mtk_spi_probe()
815 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_remove() local
819 mtk_spi_reset(mdata); in mtk_spi_remove()
829 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_suspend() local
836 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_suspend()
845 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_resume() local
848 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_resume()
857 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_resume()
867 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_runtime_suspend() local
869 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_runtime_suspend()
877 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_runtime_resume() local
880 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_runtime_resume()