|
|
|
From 8d6b6d7e135e9bbfc923d34a45cb0e72695e63ed Mon Sep 17 00:00:00 2001
|
|
|
|
From: Abhishek Sahu <absahu@codeaurora.org>
|
|
|
|
Date: Mon, 25 Sep 2017 13:21:26 +0530
|
|
|
|
Subject: [PATCH 3/7] mtd: nand: qcom: support for command descriptor formation
|
|
|
|
|
|
|
|
1. Add the function for command descriptor preparation which will
|
|
|
|
be used only by BAM DMA and it will form the DMA descriptors
|
|
|
|
containing command elements
|
|
|
|
2. DMA_PREP_CMD flag should be used for forming command DMA
|
|
|
|
descriptors
|
|
|
|
|
|
|
|
Reviewed-by: Archit Taneja <architt@codeaurora.org>
|
|
|
|
Signed-off-by: Abhishek Sahu <absahu@codeaurora.org>
|
|
|
|
Signed-off-by: Boris Brezillon <boris.brezillon@free-electrons.com>
|
|
|
|
---
|
|
|
|
drivers/mtd/nand/qcom_nandc.c | 108 +++++++++++++++++++++++++++++++++++-------
|
|
|
|
1 file changed, 92 insertions(+), 16 deletions(-)
|
|
|
|
|
|
|
|
--- a/drivers/mtd/nand/qcom_nandc.c
|
|
|
|
+++ b/drivers/mtd/nand/qcom_nandc.c
|
|
|
|
@@ -200,6 +200,14 @@ nandc_set_reg(nandc, NAND_READ_LOCATION_
|
|
|
|
*/
|
|
|
|
#define dev_cmd_reg_addr(nandc, reg) ((nandc)->props->dev_cmd_reg_start + (reg))
|
|
|
|
|
|
|
|
+/* Returns the NAND register physical address */
|
|
|
|
+#define nandc_reg_phys(chip, offset) ((chip)->base_phys + (offset))
|
|
|
|
+
|
|
|
|
+/* Returns the dma address for reg read buffer */
|
|
|
|
+#define reg_buf_dma_addr(chip, vaddr) \
|
|
|
|
+ ((chip)->reg_read_dma + \
|
|
|
|
+ ((uint8_t *)(vaddr) - (uint8_t *)(chip)->reg_read_buf))
|
|
|
|
+
|
|
|
|
#define QPIC_PER_CW_CMD_ELEMENTS 32
|
|
|
|
#define QPIC_PER_CW_CMD_SGL 32
|
|
|
|
#define QPIC_PER_CW_DATA_SGL 8
|
|
|
|
@@ -317,7 +325,8 @@ struct nandc_regs {
|
|
|
|
* controller
|
|
|
|
* @dev: parent device
|
|
|
|
* @base: MMIO base
|
|
|
|
- * @base_dma: physical base address of controller registers
|
|
|
|
+ * @base_phys: physical base address of controller registers
|
|
|
|
+ * @base_dma: dma base address of controller registers
|
|
|
|
* @core_clk: controller clock
|
|
|
|
* @aon_clk: another controller clock
|
|
|
|
*
|
|
|
|
@@ -350,6 +359,7 @@ struct qcom_nand_controller {
|
|
|
|
struct device *dev;
|
|
|
|
|
|
|
|
void __iomem *base;
|
|
|
|
+ phys_addr_t base_phys;
|
|
|
|
dma_addr_t base_dma;
|
|
|
|
|
|
|
|
struct clk *core_clk;
|
|
|
|
@@ -751,6 +761,66 @@ static int prepare_bam_async_desc(struct
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
+ * Prepares the command descriptor for BAM DMA which will be used for NAND
|
|
|
|
+ * register reads and writes. The command descriptor requires the command
|
|
|
|
+ * to be formed in command element type so this function uses the command
|
|
|
|
+ * element from bam transaction ce array and fills the same with required
|
|
|
|
+ * data. A single SGL can contain multiple command elements so
|
|
|
|
+ * NAND_BAM_NEXT_SGL will be used for starting the separate SGL
|
|
|
|
+ * after the current command element.
|
|
|
|
+ */
|
|
|
|
+static int prep_bam_dma_desc_cmd(struct qcom_nand_controller *nandc, bool read,
|
|
|
|
+ int reg_off, const void *vaddr,
|
|
|
|
+ int size, unsigned int flags)
|
|
|
|
+{
|
|
|
|
+ int bam_ce_size;
|
|
|
|
+ int i, ret;
|
|
|
|
+ struct bam_cmd_element *bam_ce_buffer;
|
|
|
|
+ struct bam_transaction *bam_txn = nandc->bam_txn;
|
|
|
|
+
|
|
|
|
+ bam_ce_buffer = &bam_txn->bam_ce[bam_txn->bam_ce_pos];
|
|
|
|
+
|
|
|
|
+ /* fill the command desc */
|
|
|
|
+ for (i = 0; i < size; i++) {
|
|
|
|
+ if (read)
|
|
|
|
+ bam_prep_ce(&bam_ce_buffer[i],
|
|
|
|
+ nandc_reg_phys(nandc, reg_off + 4 * i),
|
|
|
|
+ BAM_READ_COMMAND,
|
|
|
|
+ reg_buf_dma_addr(nandc,
|
|
|
|
+ (__le32 *)vaddr + i));
|
|
|
|
+ else
|
|
|
|
+ bam_prep_ce_le32(&bam_ce_buffer[i],
|
|
|
|
+ nandc_reg_phys(nandc, reg_off + 4 * i),
|
|
|
|
+ BAM_WRITE_COMMAND,
|
|
|
|
+ *((__le32 *)vaddr + i));
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ bam_txn->bam_ce_pos += size;
|
|
|
|
+
|
|
|
|
+ /* use the separate sgl after this command */
|
|
|
|
+ if (flags & NAND_BAM_NEXT_SGL) {
|
|
|
|
+ bam_ce_buffer = &bam_txn->bam_ce[bam_txn->bam_ce_start];
|
|
|
|
+ bam_ce_size = (bam_txn->bam_ce_pos -
|
|
|
|
+ bam_txn->bam_ce_start) *
|
|
|
|
+ sizeof(struct bam_cmd_element);
|
|
|
|
+ sg_set_buf(&bam_txn->cmd_sgl[bam_txn->cmd_sgl_pos],
|
|
|
|
+ bam_ce_buffer, bam_ce_size);
|
|
|
|
+ bam_txn->cmd_sgl_pos++;
|
|
|
|
+ bam_txn->bam_ce_start = bam_txn->bam_ce_pos;
|
|
|
|
+
|
|
|
|
+ if (flags & NAND_BAM_NWD) {
|
|
|
|
+ ret = prepare_bam_async_desc(nandc, nandc->cmd_chan,
|
|
|
|
+ DMA_PREP_FENCE |
|
|
|
|
+ DMA_PREP_CMD);
|
|
|
|
+ if (ret)
|
|
|
|
+ return ret;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ return 0;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
* Prepares the data descriptor for BAM DMA which will be used for NAND
|
|
|
|
* data reads and writes.
|
|
|
|
*/
|
|
|
|
@@ -868,19 +938,22 @@ static int read_reg_dma(struct qcom_nand
|
|
|
|
{
|
|
|
|
bool flow_control = false;
|
|
|
|
void *vaddr;
|
|
|
|
- int size;
|
|
|
|
|
|
|
|
- if (first == NAND_READ_ID || first == NAND_FLASH_STATUS)
|
|
|
|
- flow_control = true;
|
|
|
|
+ vaddr = nandc->reg_read_buf + nandc->reg_read_pos;
|
|
|
|
+ nandc->reg_read_pos += num_regs;
|
|
|
|
|
|
|
|
if (first == NAND_DEV_CMD_VLD || first == NAND_DEV_CMD1)
|
|
|
|
first = dev_cmd_reg_addr(nandc, first);
|
|
|
|
|
|
|
|
- size = num_regs * sizeof(u32);
|
|
|
|
- vaddr = nandc->reg_read_buf + nandc->reg_read_pos;
|
|
|
|
- nandc->reg_read_pos += num_regs;
|
|
|
|
+ if (nandc->props->is_bam)
|
|
|
|
+ return prep_bam_dma_desc_cmd(nandc, true, first, vaddr,
|
|
|
|
+ num_regs, flags);
|
|
|
|
+
|
|
|
|
+ if (first == NAND_READ_ID || first == NAND_FLASH_STATUS)
|
|
|
|
+ flow_control = true;
|
|
|
|
|
|
|
|
- return prep_adm_dma_desc(nandc, true, first, vaddr, size, flow_control);
|
|
|
|
+ return prep_adm_dma_desc(nandc, true, first, vaddr,
|
|
|
|
+ num_regs * sizeof(u32), flow_control);
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
@@ -897,13 +970,9 @@ static int write_reg_dma(struct qcom_nan
|
|
|
|
bool flow_control = false;
|
|
|
|
struct nandc_regs *regs = nandc->regs;
|
|
|
|
void *vaddr;
|
|
|
|
- int size;
|
|
|
|
|
|
|
|
vaddr = offset_to_nandc_reg(regs, first);
|
|
|
|
|
|
|
|
- if (first == NAND_FLASH_CMD)
|
|
|
|
- flow_control = true;
|
|
|
|
-
|
|
|
|
if (first == NAND_ERASED_CW_DETECT_CFG) {
|
|
|
|
if (flags & NAND_ERASED_CW_SET)
|
|
|
|
vaddr = ®s->erased_cw_detect_cfg_set;
|
|
|
|
@@ -920,10 +989,15 @@ static int write_reg_dma(struct qcom_nan
|
|
|
|
if (first == NAND_DEV_CMD_VLD_RESTORE || first == NAND_DEV_CMD_VLD)
|
|
|
|
first = dev_cmd_reg_addr(nandc, NAND_DEV_CMD_VLD);
|
|
|
|
|
|
|
|
- size = num_regs * sizeof(u32);
|
|
|
|
+ if (nandc->props->is_bam)
|
|
|
|
+ return prep_bam_dma_desc_cmd(nandc, false, first, vaddr,
|
|
|
|
+ num_regs, flags);
|
|
|
|
+
|
|
|
|
+ if (first == NAND_FLASH_CMD)
|
|
|
|
+ flow_control = true;
|
|
|
|
|
|
|
|
- return prep_adm_dma_desc(nandc, false, first, vaddr, size,
|
|
|
|
- flow_control);
|
|
|
|
+ return prep_adm_dma_desc(nandc, false, first, vaddr,
|
|
|
|
+ num_regs * sizeof(u32), flow_control);
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
@@ -1187,7 +1261,8 @@ static int submit_descs(struct qcom_nand
|
|
|
|
}
|
|
|
|
|
|
|
|
if (bam_txn->cmd_sgl_pos > bam_txn->cmd_sgl_start) {
|
|
|
|
- r = prepare_bam_async_desc(nandc, nandc->cmd_chan, 0);
|
|
|
|
+ r = prepare_bam_async_desc(nandc, nandc->cmd_chan,
|
|
|
|
+ DMA_PREP_CMD);
|
|
|
|
if (r)
|
|
|
|
return r;
|
|
|
|
}
|
|
|
|
@@ -2725,6 +2800,7 @@ static int qcom_nandc_probe(struct platf
|
|
|
|
if (IS_ERR(nandc->base))
|
|
|
|
return PTR_ERR(nandc->base);
|
|
|
|
|
|
|
|
+ nandc->base_phys = res->start;
|
|
|
|
nandc->base_dma = phys_to_dma(dev, (phys_addr_t)res->start);
|
|
|
|
|
|
|
|
nandc->core_clk = devm_clk_get(dev, "core");
|