Skip to content

Commit

Permalink
dmaengine: stm32-dma: add support to trigger STM32 MDMA
Browse files Browse the repository at this point in the history
STM32 MDMA can be triggered by STM32 DMA channels transfer complete.
The "request line number" triggering STM32 MDMA is the STM32 DMAMUX channel
id set by stm32-dmamux driver in dma_spec->args[3].

stm32-dma driver fills the struct stm32_dma_mdma_config used to configure
the MDMA with struct dma_slave_config .peripheral_config/.peripheral_size.

Signed-off-by: Amelie Delaunay <amelie.delaunay@foss.st.com>
  • Loading branch information
ADESTM authored and intel-lab-lkp committed Jul 13, 2022
1 parent d5e1057 commit e1f4515
Showing 1 changed file with 55 additions and 1 deletion.
56 changes: 55 additions & 1 deletion drivers/dma/stm32-dma.c
Expand Up @@ -142,6 +142,8 @@
#define STM32_DMA_DIRECT_MODE_GET(n) (((n) & STM32_DMA_DIRECT_MODE_MASK) >> 2)
#define STM32_DMA_ALT_ACK_MODE_MASK BIT(4)
#define STM32_DMA_ALT_ACK_MODE_GET(n) (((n) & STM32_DMA_ALT_ACK_MODE_MASK) >> 4)
#define STM32_DMA_MDMA_STREAM_ID_MASK GENMASK(19, 16)
#define STM32_DMA_MDMA_STREAM_ID_GET(n) (((n) & STM32_DMA_MDMA_STREAM_ID_MASK) >> 16)

enum stm32_dma_width {
STM32_DMA_BYTE,
Expand Down Expand Up @@ -195,6 +197,19 @@ struct stm32_dma_desc {
struct stm32_dma_sg_req sg_req[];
};

/**
* struct stm32_dma_mdma_cfg - STM32 DMA MDMA configuration
* @stream_id: DMA request to trigger STM32 MDMA transfer
* @ifcr: DMA interrupt flag clear register address,
* used by STM32 MDMA to clear DMA Transfer Complete flag
* @tcf: DMA Transfer Complete flag
*/
struct stm32_dma_mdma_config {
u32 stream_id;
u32 ifcr;
u32 tcf;
};

struct stm32_dma_chan {
struct virt_dma_chan vchan;
bool config_init;
Expand All @@ -209,6 +224,8 @@ struct stm32_dma_chan {
u32 mem_burst;
u32 mem_width;
enum dma_status status;
bool trig_mdma;
struct stm32_dma_mdma_config mdma_config;
};

struct stm32_dma_device {
Expand Down Expand Up @@ -388,6 +405,13 @@ static int stm32_dma_slave_config(struct dma_chan *c,

memcpy(&chan->dma_sconfig, config, sizeof(*config));

/* Check if user is requesting DMA to trigger STM32 MDMA */
if (config->peripheral_size) {
config->peripheral_config = &chan->mdma_config;
config->peripheral_size = sizeof(chan->mdma_config);
chan->trig_mdma = true;
}

chan->config_init = true;

return 0;
Expand Down Expand Up @@ -576,6 +600,10 @@ static void stm32_dma_start_transfer(struct stm32_dma_chan *chan)
sg_req = &chan->desc->sg_req[chan->next_sg];
reg = &sg_req->chan_reg;

/* When DMA triggers STM32 MDMA, DMA Transfer Complete is managed by STM32 MDMA */
if (chan->trig_mdma && chan->dma_sconfig.direction != DMA_MEM_TO_DEV)
reg->dma_scr &= ~STM32_DMA_SCR_TCIE;

reg->dma_scr &= ~STM32_DMA_SCR_EN;
stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr);
stm32_dma_write(dmadev, STM32_DMA_SPAR(chan->id), reg->dma_spar);
Expand Down Expand Up @@ -725,6 +753,8 @@ static void stm32_dma_handle_chan_done(struct stm32_dma_chan *chan, u32 scr)

if (chan->desc->cyclic) {
vchan_cyclic_callback(&chan->desc->vdesc);
if (chan->trig_mdma)
return;
stm32_dma_sg_inc(chan);
/* cyclic while CIRC/DBM disable => post resume reconfiguration needed */
if (!(scr & (STM32_DMA_SCR_CIRC | STM32_DMA_SCR_DBM)))
Expand Down Expand Up @@ -1099,6 +1129,10 @@ static struct dma_async_tx_descriptor *stm32_dma_prep_slave_sg(
else
chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL;

/* Activate Double Buffer Mode if DMA triggers STM32 MDMA and more than 1 sg */
if (chan->trig_mdma && sg_len > 1)
chan->chan_reg.dma_scr |= STM32_DMA_SCR_DBM;

for_each_sg(sgl, sg, sg_len, i) {
ret = stm32_dma_set_xfer_param(chan, direction, &buswidth,
sg_dma_len(sg),
Expand All @@ -1120,6 +1154,8 @@ static struct dma_async_tx_descriptor *stm32_dma_prep_slave_sg(
desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar;
desc->sg_req[i].chan_reg.dma_sm0ar = sg_dma_address(sg);
desc->sg_req[i].chan_reg.dma_sm1ar = sg_dma_address(sg);
if (chan->trig_mdma)
desc->sg_req[i].chan_reg.dma_sm1ar += sg_dma_len(sg);
desc->sg_req[i].chan_reg.dma_sndtr = nb_data_items;
}

Expand Down Expand Up @@ -1207,8 +1243,11 @@ static struct dma_async_tx_descriptor *stm32_dma_prep_dma_cyclic(
desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar;
desc->sg_req[i].chan_reg.dma_sm0ar = buf_addr;
desc->sg_req[i].chan_reg.dma_sm1ar = buf_addr;
if (chan->trig_mdma)
desc->sg_req[i].chan_reg.dma_sm1ar += period_len;
desc->sg_req[i].chan_reg.dma_sndtr = nb_data_items;
buf_addr += period_len;
if (!chan->trig_mdma)
buf_addr += period_len;
}

desc->num_sgs = num_periods;
Expand Down Expand Up @@ -1491,6 +1530,7 @@ static void stm32_dma_set_config(struct stm32_dma_chan *chan,
chan->threshold = STM32_DMA_FIFO_THRESHOLD_NONE;
if (STM32_DMA_ALT_ACK_MODE_GET(cfg->features))
chan->chan_reg.dma_scr |= STM32_DMA_SCR_TRBUFF;
chan->mdma_config.stream_id = STM32_DMA_MDMA_STREAM_ID_GET(cfg->features);
}

static struct dma_chan *stm32_dma_of_xlate(struct of_phandle_args *dma_spec,
Expand Down Expand Up @@ -1630,6 +1670,20 @@ static int stm32_dma_probe(struct platform_device *pdev)
chan->id = i;
chan->vchan.desc_free = stm32_dma_desc_free;
vchan_init(&chan->vchan, dd);

chan->mdma_config.ifcr = res->start;
chan->mdma_config.ifcr += (chan->id & 4) ? STM32_DMA_HIFCR : STM32_DMA_LIFCR;

chan->mdma_config.tcf = STM32_DMA_TCI;
/*
* bit0 of chan->id represents the need to left shift by 6
* bit1 of chan->id represents the need to extra left shift by 16
* TCIF0, chan->id = b0000; TCIF4, chan->id = b0100: left shift by 0*6 + 0*16
* TCIF1, chan->id = b0001; TCIF5, chan->id = b0101: left shift by 1*6 + 0*16
* TCIF2, chan->id = b0010; TCIF6, chan->id = b0110: left shift by 0*6 + 1*16
* TCIF3, chan->id = b0011; TCIF7, chan->id = b0111: left shift by 1*6 + 1*16
*/
chan->mdma_config.tcf <<= (6 * (chan->id & 0x1) + 16 * ((chan->id & 0x2) >> 1));
}

ret = dma_async_device_register(dd);
Expand Down

0 comments on commit e1f4515

Please sign in to comment.