@@ -778,6 +778,51 @@ static void xilinx_dma_free_chan_resources(struct dma_chan *dchan)
778778 }
779779}
780780
781+ /**
782+ * xilinx_dma_get_residue - Compute residue for a given descriptor
783+ * @chan: Driver specific dma channel
784+ * @desc: dma transaction descriptor
785+ *
786+ * Return: The number of residue bytes for the descriptor.
787+ */
788+ static u32 xilinx_dma_get_residue (struct xilinx_dma_chan * chan ,
789+ struct xilinx_dma_tx_descriptor * desc )
790+ {
791+ struct xilinx_cdma_tx_segment * cdma_seg ;
792+ struct xilinx_axidma_tx_segment * axidma_seg ;
793+ struct xilinx_cdma_desc_hw * cdma_hw ;
794+ struct xilinx_axidma_desc_hw * axidma_hw ;
795+ struct list_head * entry ;
796+ u32 residue = 0 ;
797+
798+ /**
799+ * VDMA and simple mode do not support residue reporting, so the
800+ * residue field will always be 0.
801+ */
802+ if (chan -> xdev -> dma_config -> dmatype == XDMA_TYPE_VDMA || !chan -> has_sg )
803+ return residue ;
804+
805+ list_for_each (entry , & desc -> segments ) {
806+ if (chan -> xdev -> dma_config -> dmatype == XDMA_TYPE_CDMA ) {
807+ cdma_seg = list_entry (entry ,
808+ struct xilinx_cdma_tx_segment ,
809+ node );
810+ cdma_hw = & cdma_seg -> hw ;
811+ residue += (cdma_hw -> control - cdma_hw -> status ) &
812+ chan -> xdev -> max_buffer_len ;
813+ } else {
814+ axidma_seg = list_entry (entry ,
815+ struct xilinx_axidma_tx_segment ,
816+ node );
817+ axidma_hw = & axidma_seg -> hw ;
818+ residue += (axidma_hw -> control - axidma_hw -> status ) &
819+ chan -> xdev -> max_buffer_len ;
820+ }
821+ }
822+
823+ return residue ;
824+ }
825+
781826/**
782827 * xilinx_dma_chan_handle_cyclic - Cyclic dma callback
783828 * @chan: Driver specific dma channel
@@ -959,33 +1004,22 @@ static enum dma_status xilinx_dma_tx_status(struct dma_chan *dchan,
9591004{
9601005 struct xilinx_dma_chan * chan = to_xilinx_chan (dchan );
9611006 struct xilinx_dma_tx_descriptor * desc ;
962- struct xilinx_axidma_tx_segment * segment ;
963- struct xilinx_axidma_desc_hw * hw ;
9641007 enum dma_status ret ;
9651008 unsigned long flags ;
966- u32 residue = 0 ;
9671009
9681010 ret = dma_cookie_status (dchan , cookie , txstate );
9691011 if (ret == DMA_COMPLETE || !txstate )
9701012 return ret ;
9711013
972- if (chan -> xdev -> dma_config -> dmatype == XDMA_TYPE_AXIDMA ) {
973- spin_lock_irqsave (& chan -> lock , flags );
1014+ spin_lock_irqsave (& chan -> lock , flags );
9741015
975- desc = list_last_entry (& chan -> active_list ,
976- struct xilinx_dma_tx_descriptor , node );
977- if (chan -> has_sg ) {
978- list_for_each_entry (segment , & desc -> segments , node ) {
979- hw = & segment -> hw ;
980- residue += (hw -> control - hw -> status ) &
981- chan -> xdev -> max_buffer_len ;
982- }
983- }
984- spin_unlock_irqrestore (& chan -> lock , flags );
1016+ desc = list_last_entry (& chan -> active_list ,
1017+ struct xilinx_dma_tx_descriptor , node );
1018+ chan -> residue = xilinx_dma_get_residue (chan , desc );
9851019
986- chan -> residue = residue ;
987- dma_set_residue ( txstate , chan -> residue );
988- }
1020+ spin_unlock_irqrestore ( & chan -> lock , flags ) ;
1021+
1022+ dma_set_residue ( txstate , chan -> residue );
9891023
9901024 return ret ;
9911025}
@@ -2681,14 +2715,17 @@ static int xilinx_dma_probe(struct platform_device *pdev)
26812715 xdev -> common .device_prep_slave_sg = xilinx_dma_prep_slave_sg ;
26822716 xdev -> common .device_prep_dma_cyclic =
26832717 xilinx_dma_prep_dma_cyclic ;
2684- /* Residue calculation is supported by only AXI DMA */
2718+ /* Residue calculation is supported by only AXI DMA and CDMA */
26852719 xdev -> common .residue_granularity =
26862720 DMA_RESIDUE_GRANULARITY_SEGMENT ;
26872721 } else if (xdev -> dma_config -> dmatype == XDMA_TYPE_CDMA ) {
26882722 dma_cap_set (DMA_MEMCPY , xdev -> common .cap_mask );
26892723 dma_cap_set (DMA_SG , xdev -> common .cap_mask );
26902724 xdev -> common .device_prep_dma_memcpy = xilinx_cdma_prep_memcpy ;
26912725 xdev -> common .device_prep_dma_sg = xilinx_cdma_prep_sg ;
2726+ /* Residue calculation is supported by only AXI DMA and CDMA */
2727+ xdev -> common .residue_granularity =
2728+ DMA_RESIDUE_GRANULARITY_SEGMENT ;
26922729 } else {
26932730 xdev -> common .device_prep_interleaved_dma =
26942731 xilinx_vdma_dma_prep_interleaved ;
0 commit comments