mirror of
https://github.com/edk2-porting/linux-next.git
synced 2024-12-18 18:23:53 +08:00
dmaengine: stm32-mdma: Fix incomplete Hw descriptors allocator
Only 1 Hw Descriptor is allocated. Loop over required Hw descriptor for proper allocation. Signed-off-by: Pierre-Yves MORDRET <pierre-yves.mordret@st.com> Signed-off-by: Vinod Koul <vkoul@kernel.org>
This commit is contained in:
parent
ee6de9ac52
commit
bbb5a4e1e7
@ -252,13 +252,17 @@ struct stm32_mdma_hwdesc {
|
||||
u32 cmdr;
|
||||
} __aligned(64);
|
||||
|
||||
struct stm32_mdma_desc_node {
|
||||
struct stm32_mdma_hwdesc *hwdesc;
|
||||
dma_addr_t hwdesc_phys;
|
||||
};
|
||||
|
||||
struct stm32_mdma_desc {
|
||||
struct virt_dma_desc vdesc;
|
||||
u32 ccr;
|
||||
struct stm32_mdma_hwdesc *hwdesc;
|
||||
dma_addr_t hwdesc_phys;
|
||||
bool cyclic;
|
||||
u32 count;
|
||||
struct stm32_mdma_desc_node node[];
|
||||
};
|
||||
|
||||
struct stm32_mdma_chan {
|
||||
@ -344,30 +348,42 @@ static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
|
||||
struct stm32_mdma_chan *chan, u32 count)
|
||||
{
|
||||
struct stm32_mdma_desc *desc;
|
||||
int i;
|
||||
|
||||
desc = kzalloc(sizeof(*desc), GFP_NOWAIT);
|
||||
desc = kzalloc(offsetof(typeof(*desc), node[count]), GFP_NOWAIT);
|
||||
if (!desc)
|
||||
return NULL;
|
||||
|
||||
desc->hwdesc = dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
|
||||
&desc->hwdesc_phys);
|
||||
if (!desc->hwdesc) {
|
||||
dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
|
||||
kfree(desc);
|
||||
return NULL;
|
||||
for (i = 0; i < count; i++) {
|
||||
desc->node[i].hwdesc =
|
||||
dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
|
||||
&desc->node[i].hwdesc_phys);
|
||||
if (!desc->node[i].hwdesc)
|
||||
goto err;
|
||||
}
|
||||
|
||||
desc->count = count;
|
||||
|
||||
return desc;
|
||||
|
||||
err:
|
||||
dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
|
||||
while (--i >= 0)
|
||||
dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
|
||||
desc->node[i].hwdesc_phys);
|
||||
kfree(desc);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
|
||||
{
|
||||
struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
|
||||
struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
|
||||
int i;
|
||||
|
||||
dma_pool_free(chan->desc_pool, desc->hwdesc, desc->hwdesc_phys);
|
||||
for (i = 0; i < desc->count; i++)
|
||||
dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
|
||||
desc->node[i].hwdesc_phys);
|
||||
kfree(desc);
|
||||
}
|
||||
|
||||
@ -666,18 +682,18 @@ static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
|
||||
}
|
||||
|
||||
static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
|
||||
struct stm32_mdma_hwdesc *hwdesc)
|
||||
struct stm32_mdma_desc_node *node)
|
||||
{
|
||||
dev_dbg(chan2dev(chan), "hwdesc: 0x%p\n", hwdesc);
|
||||
dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", hwdesc->ctcr);
|
||||
dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", hwdesc->cbndtr);
|
||||
dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", hwdesc->csar);
|
||||
dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", hwdesc->cdar);
|
||||
dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", hwdesc->cbrur);
|
||||
dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", hwdesc->clar);
|
||||
dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", hwdesc->ctbr);
|
||||
dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", hwdesc->cmar);
|
||||
dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", hwdesc->cmdr);
|
||||
dev_dbg(chan2dev(chan), "hwdesc: %pad\n", &node->hwdesc_phys);
|
||||
dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", node->hwdesc->ctcr);
|
||||
dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", node->hwdesc->cbndtr);
|
||||
dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", node->hwdesc->csar);
|
||||
dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", node->hwdesc->cdar);
|
||||
dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", node->hwdesc->cbrur);
|
||||
dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", node->hwdesc->clar);
|
||||
dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", node->hwdesc->ctbr);
|
||||
dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", node->hwdesc->cmar);
|
||||
dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", node->hwdesc->cmdr);
|
||||
}
|
||||
|
||||
static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
|
||||
@ -691,7 +707,7 @@ static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
|
||||
struct stm32_mdma_hwdesc *hwdesc;
|
||||
u32 next = count + 1;
|
||||
|
||||
hwdesc = &desc->hwdesc[count];
|
||||
hwdesc = desc->node[count].hwdesc;
|
||||
hwdesc->ctcr = ctcr;
|
||||
hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
|
||||
STM32_MDMA_CBNDTR_BRDUM |
|
||||
@ -701,19 +717,20 @@ static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
|
||||
hwdesc->csar = src_addr;
|
||||
hwdesc->cdar = dst_addr;
|
||||
hwdesc->cbrur = 0;
|
||||
hwdesc->clar = desc->hwdesc_phys + next * sizeof(*hwdesc);
|
||||
hwdesc->ctbr = ctbr;
|
||||
hwdesc->cmar = config->mask_addr;
|
||||
hwdesc->cmdr = config->mask_data;
|
||||
|
||||
if (is_last) {
|
||||
if (is_cyclic)
|
||||
hwdesc->clar = desc->hwdesc_phys;
|
||||
hwdesc->clar = desc->node[0].hwdesc_phys;
|
||||
else
|
||||
hwdesc->clar = 0;
|
||||
} else {
|
||||
hwdesc->clar = desc->node[next].hwdesc_phys;
|
||||
}
|
||||
|
||||
stm32_mdma_dump_hwdesc(chan, hwdesc);
|
||||
stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
|
||||
}
|
||||
|
||||
static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
|
||||
@ -777,7 +794,7 @@ stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
|
||||
{
|
||||
struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
|
||||
struct stm32_mdma_desc *desc;
|
||||
int ret;
|
||||
int i, ret;
|
||||
|
||||
/*
|
||||
* Once DMA is in setup cyclic mode the channel we cannot assign this
|
||||
@ -803,7 +820,9 @@ stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
|
||||
return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
|
||||
|
||||
xfer_setup_err:
|
||||
dma_pool_free(chan->desc_pool, &desc->hwdesc, desc->hwdesc_phys);
|
||||
for (i = 0; i < desc->count; i++)
|
||||
dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
|
||||
desc->node[i].hwdesc_phys);
|
||||
kfree(desc);
|
||||
return NULL;
|
||||
}
|
||||
@ -892,7 +911,9 @@ stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
|
||||
return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
|
||||
|
||||
xfer_setup_err:
|
||||
dma_pool_free(chan->desc_pool, &desc->hwdesc, desc->hwdesc_phys);
|
||||
for (i = 0; i < desc->count; i++)
|
||||
dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
|
||||
desc->node[i].hwdesc_phys);
|
||||
kfree(desc);
|
||||
return NULL;
|
||||
}
|
||||
@ -1006,7 +1027,7 @@ stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
|
||||
ctcr |= STM32_MDMA_CTCR_PKE;
|
||||
|
||||
/* Prepare hardware descriptor */
|
||||
hwdesc = desc->hwdesc;
|
||||
hwdesc = desc->node[0].hwdesc;
|
||||
hwdesc->ctcr = ctcr;
|
||||
hwdesc->cbndtr = cbndtr;
|
||||
hwdesc->csar = src;
|
||||
@ -1017,7 +1038,7 @@ stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
|
||||
hwdesc->cmar = 0;
|
||||
hwdesc->cmdr = 0;
|
||||
|
||||
stm32_mdma_dump_hwdesc(chan, hwdesc);
|
||||
stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
|
||||
} else {
|
||||
/* Setup a LLI transfer */
|
||||
ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
|
||||
@ -1117,7 +1138,7 @@ static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
|
||||
}
|
||||
|
||||
chan->desc = to_stm32_mdma_desc(vdesc);
|
||||
hwdesc = chan->desc->hwdesc;
|
||||
hwdesc = chan->desc->node[0].hwdesc;
|
||||
chan->curr_hwdesc = 0;
|
||||
|
||||
stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
|
||||
@ -1195,7 +1216,7 @@ static int stm32_mdma_resume(struct dma_chan *c)
|
||||
unsigned long flags;
|
||||
u32 status, reg;
|
||||
|
||||
hwdesc = &chan->desc->hwdesc[chan->curr_hwdesc];
|
||||
hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
|
||||
|
||||
spin_lock_irqsave(&chan->vchan.lock, flags);
|
||||
|
||||
@ -1265,13 +1286,13 @@ static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
|
||||
u32 curr_hwdesc)
|
||||
{
|
||||
struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
|
||||
struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc;
|
||||
u32 cbndtr, residue, modulo, burst_size;
|
||||
int i;
|
||||
|
||||
residue = 0;
|
||||
for (i = curr_hwdesc + 1; i < desc->count; i++) {
|
||||
struct stm32_mdma_hwdesc *hwdesc = &desc->hwdesc[i];
|
||||
|
||||
hwdesc = desc->node[i].hwdesc;
|
||||
residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
|
||||
}
|
||||
cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
|
||||
|
Loading…
Reference in New Issue
Block a user