dmaengine: dw: enable and disable controller when needed
Enable controller automatically whenever first user requires for a channel and disable it when the last user gone. Signed-off-by: Andy Shevchenko <andriy.shevchenko@linux.intel.com> Signed-off-by: Vinod Koul <vinod.koul@intel.com>
This commit is contained in:
Родитель
2540f74b18
Коммит
99d9bf4ed2
|
@ -1094,6 +1094,31 @@ static void dwc_issue_pending(struct dma_chan *chan)
|
||||||
spin_unlock_irqrestore(&dwc->lock, flags);
|
spin_unlock_irqrestore(&dwc->lock, flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*----------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
static void dw_dma_off(struct dw_dma *dw)
|
||||||
|
{
|
||||||
|
int i;
|
||||||
|
|
||||||
|
dma_writel(dw, CFG, 0);
|
||||||
|
|
||||||
|
channel_clear_bit(dw, MASK.XFER, dw->all_chan_mask);
|
||||||
|
channel_clear_bit(dw, MASK.SRC_TRAN, dw->all_chan_mask);
|
||||||
|
channel_clear_bit(dw, MASK.DST_TRAN, dw->all_chan_mask);
|
||||||
|
channel_clear_bit(dw, MASK.ERROR, dw->all_chan_mask);
|
||||||
|
|
||||||
|
while (dma_readl(dw, CFG) & DW_CFG_DMA_EN)
|
||||||
|
cpu_relax();
|
||||||
|
|
||||||
|
for (i = 0; i < dw->dma.chancnt; i++)
|
||||||
|
dw->chan[i].initialized = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void dw_dma_on(struct dw_dma *dw)
|
||||||
|
{
|
||||||
|
dma_writel(dw, CFG, DW_CFG_DMA_EN);
|
||||||
|
}
|
||||||
|
|
||||||
static int dwc_alloc_chan_resources(struct dma_chan *chan)
|
static int dwc_alloc_chan_resources(struct dma_chan *chan)
|
||||||
{
|
{
|
||||||
struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
|
struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
|
||||||
|
@ -1118,6 +1143,11 @@ static int dwc_alloc_chan_resources(struct dma_chan *chan)
|
||||||
* doesn't mean what you think it means), and status writeback.
|
* doesn't mean what you think it means), and status writeback.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/* Enable controller here if needed */
|
||||||
|
if (!dw->in_use)
|
||||||
|
dw_dma_on(dw);
|
||||||
|
dw->in_use |= dwc->mask;
|
||||||
|
|
||||||
spin_lock_irqsave(&dwc->lock, flags);
|
spin_lock_irqsave(&dwc->lock, flags);
|
||||||
i = dwc->descs_allocated;
|
i = dwc->descs_allocated;
|
||||||
while (dwc->descs_allocated < NR_DESCS_PER_CHANNEL) {
|
while (dwc->descs_allocated < NR_DESCS_PER_CHANNEL) {
|
||||||
|
@ -1182,6 +1212,11 @@ static void dwc_free_chan_resources(struct dma_chan *chan)
|
||||||
|
|
||||||
spin_unlock_irqrestore(&dwc->lock, flags);
|
spin_unlock_irqrestore(&dwc->lock, flags);
|
||||||
|
|
||||||
|
/* Disable controller in case it was a last user */
|
||||||
|
dw->in_use &= ~dwc->mask;
|
||||||
|
if (!dw->in_use)
|
||||||
|
dw_dma_off(dw);
|
||||||
|
|
||||||
list_for_each_entry_safe(desc, _desc, &list, desc_node) {
|
list_for_each_entry_safe(desc, _desc, &list, desc_node) {
|
||||||
dev_vdbg(chan2dev(chan), " freeing descriptor %p\n", desc);
|
dev_vdbg(chan2dev(chan), " freeing descriptor %p\n", desc);
|
||||||
dma_pool_free(dw->desc_pool, desc, desc->txd.phys);
|
dma_pool_free(dw->desc_pool, desc, desc->txd.phys);
|
||||||
|
@ -1452,29 +1487,6 @@ EXPORT_SYMBOL(dw_dma_cyclic_free);
|
||||||
|
|
||||||
/*----------------------------------------------------------------------*/
|
/*----------------------------------------------------------------------*/
|
||||||
|
|
||||||
static void dw_dma_off(struct dw_dma *dw)
|
|
||||||
{
|
|
||||||
int i;
|
|
||||||
|
|
||||||
dma_writel(dw, CFG, 0);
|
|
||||||
|
|
||||||
channel_clear_bit(dw, MASK.XFER, dw->all_chan_mask);
|
|
||||||
channel_clear_bit(dw, MASK.SRC_TRAN, dw->all_chan_mask);
|
|
||||||
channel_clear_bit(dw, MASK.DST_TRAN, dw->all_chan_mask);
|
|
||||||
channel_clear_bit(dw, MASK.ERROR, dw->all_chan_mask);
|
|
||||||
|
|
||||||
while (dma_readl(dw, CFG) & DW_CFG_DMA_EN)
|
|
||||||
cpu_relax();
|
|
||||||
|
|
||||||
for (i = 0; i < dw->dma.chancnt; i++)
|
|
||||||
dw->chan[i].initialized = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void dw_dma_on(struct dw_dma *dw)
|
|
||||||
{
|
|
||||||
dma_writel(dw, CFG, DW_CFG_DMA_EN);
|
|
||||||
}
|
|
||||||
|
|
||||||
int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
|
int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
|
||||||
{
|
{
|
||||||
struct dw_dma *dw;
|
struct dw_dma *dw;
|
||||||
|
@ -1648,8 +1660,6 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
|
||||||
dw->dma.device_tx_status = dwc_tx_status;
|
dw->dma.device_tx_status = dwc_tx_status;
|
||||||
dw->dma.device_issue_pending = dwc_issue_pending;
|
dw->dma.device_issue_pending = dwc_issue_pending;
|
||||||
|
|
||||||
dw_dma_on(dw);
|
|
||||||
|
|
||||||
err = dma_async_device_register(&dw->dma);
|
err = dma_async_device_register(&dw->dma);
|
||||||
if (err)
|
if (err)
|
||||||
goto err_dma_register;
|
goto err_dma_register;
|
||||||
|
|
|
@ -281,6 +281,7 @@ struct dw_dma {
|
||||||
/* channels */
|
/* channels */
|
||||||
struct dw_dma_chan *chan;
|
struct dw_dma_chan *chan;
|
||||||
u8 all_chan_mask;
|
u8 all_chan_mask;
|
||||||
|
u8 in_use;
|
||||||
|
|
||||||
/* hardware configuration */
|
/* hardware configuration */
|
||||||
unsigned char nr_masters;
|
unsigned char nr_masters;
|
||||||
|
|
Загрузка…
Ссылка в новой задаче