2626#include <linux/intel_mid_dma.h>
2727#include <linux/pci.h>
2828
29+ #define RX_BUSY 0
30+ #define TX_BUSY 1
31+
2932struct mid_dma {
3033 struct intel_mid_dma_slave dmas_tx ;
3134 struct intel_mid_dma_slave dmas_rx ;
@@ -98,15 +101,14 @@ static void mid_spi_dma_exit(struct dw_spi *dws)
98101}
99102
100103/*
101- * dws->dma_chan_done is cleared before the dma transfer starts,
102- * callback for rx/tx channel will each increment it by 1.
103- * Reaching 2 means the whole spi transaction is done.
104+ * dws->dma_chan_busy is set before the dma transfer starts, callback for tx
105+ * channel will clear a corresponding bit.
104106 */
105- static void dw_spi_dma_done (void * arg )
107+ static void dw_spi_dma_tx_done (void * arg )
106108{
107109 struct dw_spi * dws = arg ;
108110
109- if (++ dws -> dma_chan_done != 2 )
111+ if (test_and_clear_bit ( TX_BUSY , & dws -> dma_chan_busy ) & BIT ( RX_BUSY ) )
110112 return ;
111113 dw_spi_xfer_done (dws );
112114}
@@ -116,6 +118,9 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws)
116118 struct dma_slave_config txconf ;
117119 struct dma_async_tx_descriptor * txdesc ;
118120
121+ if (!dws -> tx_dma )
122+ return NULL ;
123+
119124 txconf .direction = DMA_MEM_TO_DEV ;
120125 txconf .dst_addr = dws -> dma_addr ;
121126 txconf .dst_maxburst = LNW_DMA_MSIZE_16 ;
@@ -134,17 +139,33 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws)
134139 1 ,
135140 DMA_MEM_TO_DEV ,
136141 DMA_PREP_INTERRUPT | DMA_CTRL_ACK );
137- txdesc -> callback = dw_spi_dma_done ;
142+ txdesc -> callback = dw_spi_dma_tx_done ;
138143 txdesc -> callback_param = dws ;
139144
140145 return txdesc ;
141146}
142147
148+ /*
149+ * dws->dma_chan_busy is set before the dma transfer starts, callback for rx
150+ * channel will clear a corresponding bit.
151+ */
152+ static void dw_spi_dma_rx_done (void * arg )
153+ {
154+ struct dw_spi * dws = arg ;
155+
156+ if (test_and_clear_bit (RX_BUSY , & dws -> dma_chan_busy ) & BIT (TX_BUSY ))
157+ return ;
158+ dw_spi_xfer_done (dws );
159+ }
160+
143161static struct dma_async_tx_descriptor * dw_spi_dma_prepare_rx (struct dw_spi * dws )
144162{
145163 struct dma_slave_config rxconf ;
146164 struct dma_async_tx_descriptor * rxdesc ;
147165
166+ if (!dws -> rx_dma )
167+ return NULL ;
168+
148169 rxconf .direction = DMA_DEV_TO_MEM ;
149170 rxconf .src_addr = dws -> dma_addr ;
150171 rxconf .src_maxburst = LNW_DMA_MSIZE_16 ;
@@ -163,7 +184,7 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws)
163184 1 ,
164185 DMA_DEV_TO_MEM ,
165186 DMA_PREP_INTERRUPT | DMA_CTRL_ACK );
166- rxdesc -> callback = dw_spi_dma_done ;
187+ rxdesc -> callback = dw_spi_dma_rx_done ;
167188 rxdesc -> callback_param = dws ;
168189
169190 return rxdesc ;
@@ -195,20 +216,24 @@ static int mid_spi_dma_transfer(struct dw_spi *dws, int cs_change)
195216 if (cs_change )
196217 dw_spi_dma_setup (dws );
197218
198- dws -> dma_chan_done = 0 ;
199-
200219 /* 2. Prepare the TX dma transfer */
201220 txdesc = dw_spi_dma_prepare_tx (dws );
202221
203222 /* 3. Prepare the RX dma transfer */
204223 rxdesc = dw_spi_dma_prepare_rx (dws );
205224
206225 /* rx must be started before tx due to spi instinct */
207- dmaengine_submit (rxdesc );
208- dma_async_issue_pending (dws -> rxchan );
209-
210- dmaengine_submit (txdesc );
211- dma_async_issue_pending (dws -> txchan );
226+ if (rxdesc ) {
227+ set_bit (RX_BUSY , & dws -> dma_chan_busy );
228+ dmaengine_submit (rxdesc );
229+ dma_async_issue_pending (dws -> rxchan );
230+ }
231+
232+ if (txdesc ) {
233+ set_bit (TX_BUSY , & dws -> dma_chan_busy );
234+ dmaengine_submit (txdesc );
235+ dma_async_issue_pending (dws -> txchan );
236+ }
212237
213238 return 0 ;
214239}
0 commit comments