@@ -334,6 +334,7 @@ dw_edma_device_transfer(struct dw_edma_transfer *xfer)
struct dw_edma_chunk *chunk;
struct dw_edma_burst *burst;
struct dw_edma_desc *desc;
+ bool read = false;
u32 cnt = 0;
int i;
@@ -424,7 +425,36 @@ dw_edma_device_transfer(struct dw_edma_transfer *xfer)
chunk->ll_region.sz += burst->sz;
desc->alloc_sz += burst->sz;
- if (chan->dir == EDMA_DIR_WRITE) {
+ /****************************************************************
+ *
+ * Root Complex Endpoint
+ * +-----------------------+ +----------------------+
+ * | | TX CH | |
+ * | | | |
+ * | DEV_TO_MEM <-------------+ MEM_TO_DEV |
+ * | | | |
+ * | | | |
+ * | MEM_TO_DEV +-------------> DEV_TO_MEM |
+ * | | | |
+ * | | RX CH | |
+ * +-----------------------+ +----------------------+
+ *
+ * If eDMA is controlled by the Root complex, TX channel
+ * (EDMA_DIR_WRITE) is used for memory read (DEV_TO_MEM) and RX
+ * channel (EDMA_DIR_READ) is used for memory write (MEM_TO_DEV).
+ *
+ * If eDMA is controlled by the endpoint, RX channel
+ * (EDMA_DIR_READ) is used for memory read (DEV_TO_MEM) and TX
+ * channel (EDMA_DIR_WRITE) is used for memory write (MEM_TO_DEV).
+ *
+ ****************************************************************/
+
+ if ((dir == DMA_DEV_TO_MEM && chan->dir == EDMA_DIR_READ) ||
+ (dir == DMA_DEV_TO_MEM && chan->dir == EDMA_DIR_WRITE))
+ read = true;
+
+ /* Program the source and destination addresses for DMA read/write */
+ if (read) {
burst->sar = src_addr;
if (xfer->type == EDMA_XFER_CYCLIC) {
burst->dar = xfer->xfer.cyclic.paddr;