Source
573
573
}
574
574
}
575
575
spin_unlock(&schan->vc.lock);
576
576
}
577
577
578
578
return IRQ_HANDLED;
579
579
}
580
580
581
581
static int sprd_dma_alloc_chan_resources(struct dma_chan *chan)
582
582
{
583
-
struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
584
-
int ret;
585
-
586
-
ret = pm_runtime_get_sync(chan->device->dev);
587
-
if (ret < 0)
588
-
return ret;
589
-
590
-
schan->dev_id = SPRD_DMA_SOFTWARE_UID;
591
-
return 0;
583
+
return pm_runtime_get_sync(chan->device->dev);
592
584
}
593
585
594
586
static void sprd_dma_free_chan_resources(struct dma_chan *chan)
595
587
{
596
588
struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
597
589
unsigned long flags;
598
590
599
591
spin_lock_irqsave(&schan->vc.lock, flags);
600
592
sprd_dma_stop(schan);
601
593
spin_unlock_irqrestore(&schan->vc.lock, flags);
1014
1006
static void sprd_dma_free_desc(struct virt_dma_desc *vd)
1015
1007
{
1016
1008
struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd);
1017
1009
1018
1010
kfree(sdesc);
1019
1011
}
1020
1012
1021
1013
static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param)
1022
1014
{
1023
1015
struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1024
-
struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
1025
-
u32 req = *(u32 *)param;
1016
+
u32 slave_id = *(u32 *)param;
1026
1017
1027
-
if (req < sdev->total_chns)
1028
-
return req == schan->chn_num + 1;
1029
-
else
1030
-
return false;
1018
+
schan->dev_id = slave_id;
1019
+
return true;
1031
1020
}
1032
1021
1033
1022
static int sprd_dma_probe(struct platform_device *pdev)
1034
1023
{
1035
1024
struct device_node *np = pdev->dev.of_node;
1036
1025
struct sprd_dma_dev *sdev;
1037
1026
struct sprd_dma_chn *dma_chn;
1038
1027
struct resource *res;
1039
1028
u32 chn_count;
1040
1029
int ret, i;