2
0
mirror of https://github.com/edk2-porting/linux-next.git synced 2024-12-19 10:44:14 +08:00

Merge branch 'topic/sprd' into for-linus

This commit is contained in:
Vinod Koul 2018-12-31 19:32:21 +05:30
commit 990beed934
2 changed files with 194 additions and 20 deletions

View File

@ -36,6 +36,8 @@
#define SPRD_DMA_GLB_CHN_EN_STS 0x1c
#define SPRD_DMA_GLB_DEBUG_STS 0x20
#define SPRD_DMA_GLB_ARB_SEL_STS 0x24
#define SPRD_DMA_GLB_2STAGE_GRP1 0x28
#define SPRD_DMA_GLB_2STAGE_GRP2 0x2c
#define SPRD_DMA_GLB_REQ_UID(uid) (0x4 * ((uid) - 1))
#define SPRD_DMA_GLB_REQ_UID_OFFSET 0x2000
@ -57,6 +59,18 @@
#define SPRD_DMA_CHN_SRC_BLK_STEP 0x38
#define SPRD_DMA_CHN_DES_BLK_STEP 0x3c
/* SPRD_DMA_GLB_2STAGE_GRP register definition */
#define SPRD_DMA_GLB_2STAGE_EN BIT(24)
#define SPRD_DMA_GLB_CHN_INT_MASK GENMASK(23, 20)
#define SPRD_DMA_GLB_LIST_DONE_TRG BIT(19)
#define SPRD_DMA_GLB_TRANS_DONE_TRG BIT(18)
#define SPRD_DMA_GLB_BLOCK_DONE_TRG BIT(17)
#define SPRD_DMA_GLB_FRAG_DONE_TRG BIT(16)
#define SPRD_DMA_GLB_TRG_OFFSET 16
#define SPRD_DMA_GLB_DEST_CHN_MASK GENMASK(13, 8)
#define SPRD_DMA_GLB_DEST_CHN_OFFSET 8
#define SPRD_DMA_GLB_SRC_CHN_MASK GENMASK(5, 0)
/* SPRD_DMA_CHN_INTC register definition */
#define SPRD_DMA_INT_MASK GENMASK(4, 0)
#define SPRD_DMA_INT_CLR_OFFSET 24
@ -118,6 +132,10 @@
#define SPRD_DMA_SRC_TRSF_STEP_OFFSET 0
#define SPRD_DMA_TRSF_STEP_MASK GENMASK(15, 0)
/* define DMA channel mode & trigger mode mask */
#define SPRD_DMA_CHN_MODE_MASK GENMASK(7, 0)
#define SPRD_DMA_TRG_MODE_MASK GENMASK(7, 0)
/* define the DMA transfer step type */
#define SPRD_DMA_NONE_STEP 0
#define SPRD_DMA_BYTE_STEP 1
@ -159,6 +177,7 @@ struct sprd_dma_chn_hw {
struct sprd_dma_desc {
struct virt_dma_desc vd;
struct sprd_dma_chn_hw chn_hw;
enum dma_transfer_direction dir;
};
/* dma channel description */
@ -169,6 +188,8 @@ struct sprd_dma_chn {
struct dma_slave_config slave_cfg;
u32 chn_num;
u32 dev_id;
enum sprd_dma_chn_mode chn_mode;
enum sprd_dma_trg_mode trg_mode;
struct sprd_dma_desc *cur_desc;
};
@ -205,6 +226,16 @@ static inline struct sprd_dma_desc *to_sprd_dma_desc(struct virt_dma_desc *vd)
return container_of(vd, struct sprd_dma_desc, vd);
}
static void sprd_dma_glb_update(struct sprd_dma_dev *sdev, u32 reg,
u32 mask, u32 val)
{
u32 orig = readl(sdev->glb_base + reg);
u32 tmp;
tmp = (orig & ~mask) | val;
writel(tmp, sdev->glb_base + reg);
}
static void sprd_dma_chn_update(struct sprd_dma_chn *schan, u32 reg,
u32 mask, u32 val)
{
@ -331,6 +362,17 @@ static void sprd_dma_stop_and_disable(struct sprd_dma_chn *schan)
sprd_dma_disable_chn(schan);
}
static unsigned long sprd_dma_get_src_addr(struct sprd_dma_chn *schan)
{
unsigned long addr, addr_high;
addr = readl(schan->chn_base + SPRD_DMA_CHN_SRC_ADDR);
addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_PTR) &
SPRD_DMA_HIGH_ADDR_MASK;
return addr | (addr_high << SPRD_DMA_HIGH_ADDR_OFFSET);
}
static unsigned long sprd_dma_get_dst_addr(struct sprd_dma_chn *schan)
{
unsigned long addr, addr_high;
@ -377,6 +419,49 @@ static enum sprd_dma_req_mode sprd_dma_get_req_type(struct sprd_dma_chn *schan)
return (frag_reg >> SPRD_DMA_REQ_MODE_OFFSET) & SPRD_DMA_REQ_MODE_MASK;
}
static int sprd_dma_set_2stage_config(struct sprd_dma_chn *schan)
{
struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
u32 val, chn = schan->chn_num + 1;
switch (schan->chn_mode) {
case SPRD_DMA_SRC_CHN0:
val = chn & SPRD_DMA_GLB_SRC_CHN_MASK;
val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET;
val |= SPRD_DMA_GLB_2STAGE_EN;
sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP1, val, val);
break;
case SPRD_DMA_SRC_CHN1:
val = chn & SPRD_DMA_GLB_SRC_CHN_MASK;
val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET;
val |= SPRD_DMA_GLB_2STAGE_EN;
sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP2, val, val);
break;
case SPRD_DMA_DST_CHN0:
val = (chn << SPRD_DMA_GLB_DEST_CHN_OFFSET) &
SPRD_DMA_GLB_DEST_CHN_MASK;
val |= SPRD_DMA_GLB_2STAGE_EN;
sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP1, val, val);
break;
case SPRD_DMA_DST_CHN1:
val = (chn << SPRD_DMA_GLB_DEST_CHN_OFFSET) &
SPRD_DMA_GLB_DEST_CHN_MASK;
val |= SPRD_DMA_GLB_2STAGE_EN;
sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP2, val, val);
break;
default:
dev_err(sdev->dma_dev.dev, "invalid channel mode setting %d\n",
schan->chn_mode);
return -EINVAL;
}
return 0;
}
static void sprd_dma_set_chn_config(struct sprd_dma_chn *schan,
struct sprd_dma_desc *sdesc)
{
@ -410,6 +495,13 @@ static void sprd_dma_start(struct sprd_dma_chn *schan)
list_del(&vd->node);
schan->cur_desc = to_sprd_dma_desc(vd);
/*
* Set 2-stage configuration if the channel starts one 2-stage
* transfer.
*/
if (schan->chn_mode && sprd_dma_set_2stage_config(schan))
return;
/*
* Copy the DMA configuration from DMA descriptor to this hardware
* channel.
@ -427,6 +519,7 @@ static void sprd_dma_stop(struct sprd_dma_chn *schan)
sprd_dma_stop_and_disable(schan);
sprd_dma_unset_uid(schan);
sprd_dma_clear_int(schan);
schan->cur_desc = NULL;
}
static bool sprd_dma_check_trans_done(struct sprd_dma_desc *sdesc,
@ -450,7 +543,7 @@ static irqreturn_t dma_irq_handle(int irq, void *dev_id)
struct sprd_dma_desc *sdesc;
enum sprd_dma_req_mode req_type;
enum sprd_dma_int_type int_type;
bool trans_done = false;
bool trans_done = false, cyclic = false;
u32 i;
while (irq_status) {
@ -465,13 +558,19 @@ static irqreturn_t dma_irq_handle(int irq, void *dev_id)
sdesc = schan->cur_desc;
/* Check if the dma request descriptor is done. */
trans_done = sprd_dma_check_trans_done(sdesc, int_type,
req_type);
if (trans_done == true) {
vchan_cookie_complete(&sdesc->vd);
schan->cur_desc = NULL;
sprd_dma_start(schan);
/* cyclic mode schedule callback */
cyclic = schan->linklist.phy_addr ? true : false;
if (cyclic == true) {
vchan_cyclic_callback(&sdesc->vd);
} else {
/* Check if the dma request descriptor is done. */
trans_done = sprd_dma_check_trans_done(sdesc, int_type,
req_type);
if (trans_done == true) {
vchan_cookie_complete(&sdesc->vd);
schan->cur_desc = NULL;
sprd_dma_start(schan);
}
}
spin_unlock(&schan->vc.lock);
}
@ -534,7 +633,12 @@ static enum dma_status sprd_dma_tx_status(struct dma_chan *chan,
else
pos = 0;
} else if (schan->cur_desc && schan->cur_desc->vd.tx.cookie == cookie) {
pos = sprd_dma_get_dst_addr(schan);
struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd);
if (sdesc->dir == DMA_DEV_TO_MEM)
pos = sprd_dma_get_dst_addr(schan);
else
pos = sprd_dma_get_src_addr(schan);
} else {
pos = 0;
}
@ -593,6 +697,7 @@ static int sprd_dma_fill_desc(struct dma_chan *chan,
{
struct sprd_dma_dev *sdev = to_sprd_dma_dev(chan);
struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
enum sprd_dma_chn_mode chn_mode = schan->chn_mode;
u32 req_mode = (flags >> SPRD_DMA_REQ_SHIFT) & SPRD_DMA_REQ_MODE_MASK;
u32 int_mode = flags & SPRD_DMA_INT_MASK;
int src_datawidth, dst_datawidth, src_step, dst_step;
@ -604,7 +709,16 @@ static int sprd_dma_fill_desc(struct dma_chan *chan,
dev_err(sdev->dma_dev.dev, "invalid source step\n");
return src_step;
}
dst_step = SPRD_DMA_NONE_STEP;
/*
* For 2-stage transfer, destination channel step can not be 0,
* since destination device is AON IRAM.
*/
if (chn_mode == SPRD_DMA_DST_CHN0 ||
chn_mode == SPRD_DMA_DST_CHN1)
dst_step = src_step;
else
dst_step = SPRD_DMA_NONE_STEP;
} else {
dst_step = sprd_dma_get_step(slave_cfg->dst_addr_width);
if (dst_step < 0) {
@ -674,13 +788,11 @@ static int sprd_dma_fill_desc(struct dma_chan *chan,
/* link-list configuration */
if (schan->linklist.phy_addr) {
if (sg_index == sglen - 1)
hw->frg_len |= SPRD_DMA_LLIST_END;
hw->cfg |= SPRD_DMA_LINKLIST_EN;
/* link-list index */
temp = (sg_index + 1) % sglen;
temp = sglen ? (sg_index + 1) % sglen : 0;
/* Next link-list configuration's physical address offset */
temp = temp * sizeof(*hw) + SPRD_DMA_CHN_SRC_ADDR;
/*
@ -804,6 +916,8 @@ sprd_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
if (!sdesc)
return NULL;
sdesc->dir = dir;
for_each_sg(sgl, sg, sglen, i) {
len = sg_dma_len(sg);
@ -831,6 +945,12 @@ sprd_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
}
}
/* Set channel mode and trigger mode for 2-stage transfer */
schan->chn_mode =
(flags >> SPRD_DMA_CHN_MODE_SHIFT) & SPRD_DMA_CHN_MODE_MASK;
schan->trg_mode =
(flags >> SPRD_DMA_TRG_MODE_SHIFT) & SPRD_DMA_TRG_MODE_MASK;
ret = sprd_dma_fill_desc(chan, &sdesc->chn_hw, 0, 0, src, dst, len,
dir, flags, slave_cfg);
if (ret) {
@ -847,9 +967,6 @@ static int sprd_dma_slave_config(struct dma_chan *chan,
struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
struct dma_slave_config *slave_cfg = &schan->slave_cfg;
if (!is_slave_direction(config->direction))
return -EINVAL;
memcpy(slave_cfg, config, sizeof(*config));
return 0;
}
@ -1109,4 +1226,5 @@ module_platform_driver(sprd_dma_driver);
MODULE_LICENSE("GPL v2");
MODULE_DESCRIPTION("DMA driver for Spreadtrum");
MODULE_AUTHOR("Baolin Wang <baolin.wang@spreadtrum.com>");
MODULE_AUTHOR("Eric Long <eric.long@spreadtrum.com>");
MODULE_ALIAS("platform:sprd-dma");

View File

@ -3,9 +3,65 @@
#ifndef _SPRD_DMA_H_
#define _SPRD_DMA_H_
#define SPRD_DMA_REQ_SHIFT 16
#define SPRD_DMA_FLAGS(req_mode, int_type) \
((req_mode) << SPRD_DMA_REQ_SHIFT | (int_type))
#define SPRD_DMA_REQ_SHIFT 8
#define SPRD_DMA_TRG_MODE_SHIFT 16
#define SPRD_DMA_CHN_MODE_SHIFT 24
#define SPRD_DMA_FLAGS(chn_mode, trg_mode, req_mode, int_type) \
((chn_mode) << SPRD_DMA_CHN_MODE_SHIFT | \
(trg_mode) << SPRD_DMA_TRG_MODE_SHIFT | \
(req_mode) << SPRD_DMA_REQ_SHIFT | (int_type))
/*
* The Spreadtrum DMA controller supports channel 2-stage tansfer, that means
* we can request 2 dma channels, one for source channel, and another one for
* destination channel. Each channel is independent, and has its own
* configurations. Once the source channel's transaction is done, it will
* trigger the destination channel's transaction automatically by hardware
* signal.
*
* To support 2-stage tansfer, we must configure the channel mode and trigger
* mode as below definition.
*/
/*
* enum sprd_dma_chn_mode: define the DMA channel mode for 2-stage transfer
* @SPRD_DMA_CHN_MODE_NONE: No channel mode setting which means channel doesn't
* support the 2-stage transfer.
* @SPRD_DMA_SRC_CHN0: Channel used as source channel 0.
* @SPRD_DMA_SRC_CHN1: Channel used as source channel 1.
* @SPRD_DMA_DST_CHN0: Channel used as destination channel 0.
* @SPRD_DMA_DST_CHN1: Channel used as destination channel 1.
*
* Now the DMA controller can supports 2 groups 2-stage transfer.
*/
enum sprd_dma_chn_mode {
SPRD_DMA_CHN_MODE_NONE,
SPRD_DMA_SRC_CHN0,
SPRD_DMA_SRC_CHN1,
SPRD_DMA_DST_CHN0,
SPRD_DMA_DST_CHN1,
};
/*
* enum sprd_dma_trg_mode: define the DMA channel trigger mode for 2-stage
* transfer
* @SPRD_DMA_NO_TRG: No trigger setting.
* @SPRD_DMA_FRAG_DONE_TRG: Trigger the transaction of destination channel
* automatically once the source channel's fragment request is done.
* @SPRD_DMA_BLOCK_DONE_TRG: Trigger the transaction of destination channel
* automatically once the source channel's block request is done.
* @SPRD_DMA_TRANS_DONE_TRG: Trigger the transaction of destination channel
* automatically once the source channel's transfer request is done.
* @SPRD_DMA_LIST_DONE_TRG: Trigger the transaction of destination channel
* automatically once the source channel's link-list request is done.
*/
enum sprd_dma_trg_mode {
SPRD_DMA_NO_TRG,
SPRD_DMA_FRAG_DONE_TRG,
SPRD_DMA_BLOCK_DONE_TRG,
SPRD_DMA_TRANS_DONE_TRG,
SPRD_DMA_LIST_DONE_TRG,
};
/*
* enum sprd_dma_req_mode: define the DMA request mode