Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-or-later
0002 /*
0003  * DMA driver for Xilinx ZynqMP DMA Engine
0004  *
0005  * Copyright (C) 2016 Xilinx, Inc. All rights reserved.
0006  */
0007 
0008 #include <linux/bitops.h>
0009 #include <linux/dma-mapping.h>
0010 #include <linux/init.h>
0011 #include <linux/interrupt.h>
0012 #include <linux/io.h>
0013 #include <linux/module.h>
0014 #include <linux/of_dma.h>
0015 #include <linux/of_platform.h>
0016 #include <linux/slab.h>
0017 #include <linux/clk.h>
0018 #include <linux/io-64-nonatomic-lo-hi.h>
0019 #include <linux/pm_runtime.h>
0020 
0021 #include "../dmaengine.h"
0022 
0023 /* Register Offsets */
0024 #define ZYNQMP_DMA_ISR          0x100
0025 #define ZYNQMP_DMA_IMR          0x104
0026 #define ZYNQMP_DMA_IER          0x108
0027 #define ZYNQMP_DMA_IDS          0x10C
0028 #define ZYNQMP_DMA_CTRL0        0x110
0029 #define ZYNQMP_DMA_CTRL1        0x114
0030 #define ZYNQMP_DMA_DATA_ATTR        0x120
0031 #define ZYNQMP_DMA_DSCR_ATTR        0x124
0032 #define ZYNQMP_DMA_SRC_DSCR_WRD0    0x128
0033 #define ZYNQMP_DMA_SRC_DSCR_WRD1    0x12C
0034 #define ZYNQMP_DMA_SRC_DSCR_WRD2    0x130
0035 #define ZYNQMP_DMA_SRC_DSCR_WRD3    0x134
0036 #define ZYNQMP_DMA_DST_DSCR_WRD0    0x138
0037 #define ZYNQMP_DMA_DST_DSCR_WRD1    0x13C
0038 #define ZYNQMP_DMA_DST_DSCR_WRD2    0x140
0039 #define ZYNQMP_DMA_DST_DSCR_WRD3    0x144
0040 #define ZYNQMP_DMA_SRC_START_LSB    0x158
0041 #define ZYNQMP_DMA_SRC_START_MSB    0x15C
0042 #define ZYNQMP_DMA_DST_START_LSB    0x160
0043 #define ZYNQMP_DMA_DST_START_MSB    0x164
0044 #define ZYNQMP_DMA_TOTAL_BYTE       0x188
0045 #define ZYNQMP_DMA_RATE_CTRL        0x18C
0046 #define ZYNQMP_DMA_IRQ_SRC_ACCT     0x190
0047 #define ZYNQMP_DMA_IRQ_DST_ACCT     0x194
0048 #define ZYNQMP_DMA_CTRL2        0x200
0049 
0050 /* Interrupt registers bit field definitions */
0051 #define ZYNQMP_DMA_DONE         BIT(10)
0052 #define ZYNQMP_DMA_AXI_WR_DATA      BIT(9)
0053 #define ZYNQMP_DMA_AXI_RD_DATA      BIT(8)
0054 #define ZYNQMP_DMA_AXI_RD_DST_DSCR  BIT(7)
0055 #define ZYNQMP_DMA_AXI_RD_SRC_DSCR  BIT(6)
0056 #define ZYNQMP_DMA_IRQ_DST_ACCT_ERR BIT(5)
0057 #define ZYNQMP_DMA_IRQ_SRC_ACCT_ERR BIT(4)
0058 #define ZYNQMP_DMA_BYTE_CNT_OVRFL   BIT(3)
0059 #define ZYNQMP_DMA_DST_DSCR_DONE    BIT(2)
0060 #define ZYNQMP_DMA_INV_APB      BIT(0)
0061 
0062 /* Control 0 register bit field definitions */
0063 #define ZYNQMP_DMA_OVR_FETCH        BIT(7)
0064 #define ZYNQMP_DMA_POINT_TYPE_SG    BIT(6)
0065 #define ZYNQMP_DMA_RATE_CTRL_EN     BIT(3)
0066 
0067 /* Control 1 register bit field definitions */
0068 #define ZYNQMP_DMA_SRC_ISSUE        GENMASK(4, 0)
0069 
0070 /* Data Attribute register bit field definitions */
0071 #define ZYNQMP_DMA_ARBURST      GENMASK(27, 26)
0072 #define ZYNQMP_DMA_ARCACHE      GENMASK(25, 22)
0073 #define ZYNQMP_DMA_ARCACHE_OFST     22
0074 #define ZYNQMP_DMA_ARQOS        GENMASK(21, 18)
0075 #define ZYNQMP_DMA_ARQOS_OFST       18
0076 #define ZYNQMP_DMA_ARLEN        GENMASK(17, 14)
0077 #define ZYNQMP_DMA_ARLEN_OFST       14
0078 #define ZYNQMP_DMA_AWBURST      GENMASK(13, 12)
0079 #define ZYNQMP_DMA_AWCACHE      GENMASK(11, 8)
0080 #define ZYNQMP_DMA_AWCACHE_OFST     8
0081 #define ZYNQMP_DMA_AWQOS        GENMASK(7, 4)
0082 #define ZYNQMP_DMA_AWQOS_OFST       4
0083 #define ZYNQMP_DMA_AWLEN        GENMASK(3, 0)
0084 #define ZYNQMP_DMA_AWLEN_OFST       0
0085 
0086 /* Descriptor Attribute register bit field definitions */
0087 #define ZYNQMP_DMA_AXCOHRNT     BIT(8)
0088 #define ZYNQMP_DMA_AXCACHE      GENMASK(7, 4)
0089 #define ZYNQMP_DMA_AXCACHE_OFST     4
0090 #define ZYNQMP_DMA_AXQOS        GENMASK(3, 0)
0091 #define ZYNQMP_DMA_AXQOS_OFST       0
0092 
0093 /* Control register 2 bit field definitions */
0094 #define ZYNQMP_DMA_ENABLE       BIT(0)
0095 
0096 /* Buffer Descriptor definitions */
0097 #define ZYNQMP_DMA_DESC_CTRL_STOP   0x10
0098 #define ZYNQMP_DMA_DESC_CTRL_COMP_INT   0x4
0099 #define ZYNQMP_DMA_DESC_CTRL_SIZE_256   0x2
0100 #define ZYNQMP_DMA_DESC_CTRL_COHRNT 0x1
0101 
0102 /* Interrupt Mask specific definitions */
0103 #define ZYNQMP_DMA_INT_ERR  (ZYNQMP_DMA_AXI_RD_DATA | \
0104                 ZYNQMP_DMA_AXI_WR_DATA | \
0105                 ZYNQMP_DMA_AXI_RD_DST_DSCR | \
0106                 ZYNQMP_DMA_AXI_RD_SRC_DSCR | \
0107                 ZYNQMP_DMA_INV_APB)
0108 #define ZYNQMP_DMA_INT_OVRFL    (ZYNQMP_DMA_BYTE_CNT_OVRFL | \
0109                 ZYNQMP_DMA_IRQ_SRC_ACCT_ERR | \
0110                 ZYNQMP_DMA_IRQ_DST_ACCT_ERR)
0111 #define ZYNQMP_DMA_INT_DONE (ZYNQMP_DMA_DONE | ZYNQMP_DMA_DST_DSCR_DONE)
0112 #define ZYNQMP_DMA_INT_EN_DEFAULT_MASK  (ZYNQMP_DMA_INT_DONE | \
0113                     ZYNQMP_DMA_INT_ERR | \
0114                     ZYNQMP_DMA_INT_OVRFL | \
0115                     ZYNQMP_DMA_DST_DSCR_DONE)
0116 
0117 /* Max number of descriptors per channel */
0118 #define ZYNQMP_DMA_NUM_DESCS    32
0119 
0120 /* Max transfer size per descriptor */
0121 #define ZYNQMP_DMA_MAX_TRANS_LEN    0x40000000
0122 
0123 /* Max burst lengths */
0124 #define ZYNQMP_DMA_MAX_DST_BURST_LEN    32768U
0125 #define ZYNQMP_DMA_MAX_SRC_BURST_LEN    32768U
0126 
0127 /* Reset values for data attributes */
0128 #define ZYNQMP_DMA_AXCACHE_VAL      0xF
0129 
0130 #define ZYNQMP_DMA_SRC_ISSUE_RST_VAL    0x1F
0131 
0132 #define ZYNQMP_DMA_IDS_DEFAULT_MASK 0xFFF
0133 
0134 /* Bus width in bits */
0135 #define ZYNQMP_DMA_BUS_WIDTH_64     64
0136 #define ZYNQMP_DMA_BUS_WIDTH_128    128
0137 
0138 #define ZDMA_PM_TIMEOUT         100
0139 
0140 #define ZYNQMP_DMA_DESC_SIZE(chan)  (chan->desc_size)
0141 
0142 #define to_chan(chan)       container_of(chan, struct zynqmp_dma_chan, \
0143                          common)
0144 #define tx_to_desc(tx)      container_of(tx, struct zynqmp_dma_desc_sw, \
0145                          async_tx)
0146 
0147 /**
0148  * struct zynqmp_dma_desc_ll - Hw linked list descriptor
0149  * @addr: Buffer address
0150  * @size: Size of the buffer
0151  * @ctrl: Control word
0152  * @nxtdscraddr: Next descriptor base address
0153  * @rsvd: Reserved field and for Hw internal use.
0154  */
0155 struct zynqmp_dma_desc_ll {
0156     u64 addr;
0157     u32 size;
0158     u32 ctrl;
0159     u64 nxtdscraddr;
0160     u64 rsvd;
0161 };
0162 
0163 /**
0164  * struct zynqmp_dma_desc_sw - Per Transaction structure
0165  * @src: Source address for simple mode dma
0166  * @dst: Destination address for simple mode dma
0167  * @len: Transfer length for simple mode dma
0168  * @node: Node in the channel descriptor list
0169  * @tx_list: List head for the current transfer
0170  * @async_tx: Async transaction descriptor
0171  * @src_v: Virtual address of the src descriptor
0172  * @src_p: Physical address of the src descriptor
0173  * @dst_v: Virtual address of the dst descriptor
0174  * @dst_p: Physical address of the dst descriptor
0175  */
0176 struct zynqmp_dma_desc_sw {
0177     u64 src;
0178     u64 dst;
0179     u32 len;
0180     struct list_head node;
0181     struct list_head tx_list;
0182     struct dma_async_tx_descriptor async_tx;
0183     struct zynqmp_dma_desc_ll *src_v;
0184     dma_addr_t src_p;
0185     struct zynqmp_dma_desc_ll *dst_v;
0186     dma_addr_t dst_p;
0187 };
0188 
0189 /**
0190  * struct zynqmp_dma_chan - Driver specific DMA channel structure
0191  * @zdev: Driver specific device structure
0192  * @regs: Control registers offset
0193  * @lock: Descriptor operation lock
0194  * @pending_list: Descriptors waiting
0195  * @free_list: Descriptors free
0196  * @active_list: Descriptors active
0197  * @sw_desc_pool: SW descriptor pool
0198  * @done_list: Complete descriptors
0199  * @common: DMA common channel
0200  * @desc_pool_v: Statically allocated descriptor base
0201  * @desc_pool_p: Physical allocated descriptor base
0202  * @desc_free_cnt: Descriptor available count
0203  * @dev: The dma device
0204  * @irq: Channel IRQ
0205  * @is_dmacoherent: Tells whether dma operations are coherent or not
0206  * @tasklet: Cleanup work after irq
0207  * @idle : Channel status;
0208  * @desc_size: Size of the low level descriptor
0209  * @err: Channel has errors
0210  * @bus_width: Bus width
0211  * @src_burst_len: Source burst length
0212  * @dst_burst_len: Dest burst length
0213  */
0214 struct zynqmp_dma_chan {
0215     struct zynqmp_dma_device *zdev;
0216     void __iomem *regs;
0217     spinlock_t lock;
0218     struct list_head pending_list;
0219     struct list_head free_list;
0220     struct list_head active_list;
0221     struct zynqmp_dma_desc_sw *sw_desc_pool;
0222     struct list_head done_list;
0223     struct dma_chan common;
0224     void *desc_pool_v;
0225     dma_addr_t desc_pool_p;
0226     u32 desc_free_cnt;
0227     struct device *dev;
0228     int irq;
0229     bool is_dmacoherent;
0230     struct tasklet_struct tasklet;
0231     bool idle;
0232     size_t desc_size;
0233     bool err;
0234     u32 bus_width;
0235     u32 src_burst_len;
0236     u32 dst_burst_len;
0237 };
0238 
0239 /**
0240  * struct zynqmp_dma_device - DMA device structure
0241  * @dev: Device Structure
0242  * @common: DMA device structure
0243  * @chan: Driver specific DMA channel
0244  * @clk_main: Pointer to main clock
0245  * @clk_apb: Pointer to apb clock
0246  */
0247 struct zynqmp_dma_device {
0248     struct device *dev;
0249     struct dma_device common;
0250     struct zynqmp_dma_chan *chan;
0251     struct clk *clk_main;
0252     struct clk *clk_apb;
0253 };
0254 
0255 static inline void zynqmp_dma_writeq(struct zynqmp_dma_chan *chan, u32 reg,
0256                      u64 value)
0257 {
0258     lo_hi_writeq(value, chan->regs + reg);
0259 }
0260 
0261 /**
0262  * zynqmp_dma_update_desc_to_ctrlr - Updates descriptor to the controller
0263  * @chan: ZynqMP DMA DMA channel pointer
0264  * @desc: Transaction descriptor pointer
0265  */
0266 static void zynqmp_dma_update_desc_to_ctrlr(struct zynqmp_dma_chan *chan,
0267                       struct zynqmp_dma_desc_sw *desc)
0268 {
0269     dma_addr_t addr;
0270 
0271     addr = desc->src_p;
0272     zynqmp_dma_writeq(chan, ZYNQMP_DMA_SRC_START_LSB, addr);
0273     addr = desc->dst_p;
0274     zynqmp_dma_writeq(chan, ZYNQMP_DMA_DST_START_LSB, addr);
0275 }
0276 
0277 /**
0278  * zynqmp_dma_desc_config_eod - Mark the descriptor as end descriptor
0279  * @chan: ZynqMP DMA channel pointer
0280  * @desc: Hw descriptor pointer
0281  */
0282 static void zynqmp_dma_desc_config_eod(struct zynqmp_dma_chan *chan,
0283                        void *desc)
0284 {
0285     struct zynqmp_dma_desc_ll *hw = (struct zynqmp_dma_desc_ll *)desc;
0286 
0287     hw->ctrl |= ZYNQMP_DMA_DESC_CTRL_STOP;
0288     hw++;
0289     hw->ctrl |= ZYNQMP_DMA_DESC_CTRL_COMP_INT | ZYNQMP_DMA_DESC_CTRL_STOP;
0290 }
0291 
0292 /**
0293  * zynqmp_dma_config_sg_ll_desc - Configure the linked list descriptor
0294  * @chan: ZynqMP DMA channel pointer
0295  * @sdesc: Hw descriptor pointer
0296  * @src: Source buffer address
0297  * @dst: Destination buffer address
0298  * @len: Transfer length
0299  * @prev: Previous hw descriptor pointer
0300  */
0301 static void zynqmp_dma_config_sg_ll_desc(struct zynqmp_dma_chan *chan,
0302                    struct zynqmp_dma_desc_ll *sdesc,
0303                    dma_addr_t src, dma_addr_t dst, size_t len,
0304                    struct zynqmp_dma_desc_ll *prev)
0305 {
0306     struct zynqmp_dma_desc_ll *ddesc = sdesc + 1;
0307 
0308     sdesc->size = ddesc->size = len;
0309     sdesc->addr = src;
0310     ddesc->addr = dst;
0311 
0312     sdesc->ctrl = ddesc->ctrl = ZYNQMP_DMA_DESC_CTRL_SIZE_256;
0313     if (chan->is_dmacoherent) {
0314         sdesc->ctrl |= ZYNQMP_DMA_DESC_CTRL_COHRNT;
0315         ddesc->ctrl |= ZYNQMP_DMA_DESC_CTRL_COHRNT;
0316     }
0317 
0318     if (prev) {
0319         dma_addr_t addr = chan->desc_pool_p +
0320                 ((uintptr_t)sdesc - (uintptr_t)chan->desc_pool_v);
0321         ddesc = prev + 1;
0322         prev->nxtdscraddr = addr;
0323         ddesc->nxtdscraddr = addr + ZYNQMP_DMA_DESC_SIZE(chan);
0324     }
0325 }
0326 
0327 /**
0328  * zynqmp_dma_init - Initialize the channel
0329  * @chan: ZynqMP DMA channel pointer
0330  */
0331 static void zynqmp_dma_init(struct zynqmp_dma_chan *chan)
0332 {
0333     u32 val;
0334 
0335     writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
0336     val = readl(chan->regs + ZYNQMP_DMA_ISR);
0337     writel(val, chan->regs + ZYNQMP_DMA_ISR);
0338 
0339     if (chan->is_dmacoherent) {
0340         val = ZYNQMP_DMA_AXCOHRNT;
0341         val = (val & ~ZYNQMP_DMA_AXCACHE) |
0342             (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_AXCACHE_OFST);
0343         writel(val, chan->regs + ZYNQMP_DMA_DSCR_ATTR);
0344     }
0345 
0346     val = readl(chan->regs + ZYNQMP_DMA_DATA_ATTR);
0347     if (chan->is_dmacoherent) {
0348         val = (val & ~ZYNQMP_DMA_ARCACHE) |
0349             (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_ARCACHE_OFST);
0350         val = (val & ~ZYNQMP_DMA_AWCACHE) |
0351             (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_AWCACHE_OFST);
0352     }
0353     writel(val, chan->regs + ZYNQMP_DMA_DATA_ATTR);
0354 
0355     /* Clearing the interrupt account rgisters */
0356     val = readl(chan->regs + ZYNQMP_DMA_IRQ_SRC_ACCT);
0357     val = readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
0358 
0359     chan->idle = true;
0360 }
0361 
0362 /**
0363  * zynqmp_dma_tx_submit - Submit DMA transaction
0364  * @tx: Async transaction descriptor pointer
0365  *
0366  * Return: cookie value
0367  */
0368 static dma_cookie_t zynqmp_dma_tx_submit(struct dma_async_tx_descriptor *tx)
0369 {
0370     struct zynqmp_dma_chan *chan = to_chan(tx->chan);
0371     struct zynqmp_dma_desc_sw *desc, *new;
0372     dma_cookie_t cookie;
0373     unsigned long irqflags;
0374 
0375     new = tx_to_desc(tx);
0376     spin_lock_irqsave(&chan->lock, irqflags);
0377     cookie = dma_cookie_assign(tx);
0378 
0379     if (!list_empty(&chan->pending_list)) {
0380         desc = list_last_entry(&chan->pending_list,
0381                      struct zynqmp_dma_desc_sw, node);
0382         if (!list_empty(&desc->tx_list))
0383             desc = list_last_entry(&desc->tx_list,
0384                            struct zynqmp_dma_desc_sw, node);
0385         desc->src_v->nxtdscraddr = new->src_p;
0386         desc->src_v->ctrl &= ~ZYNQMP_DMA_DESC_CTRL_STOP;
0387         desc->dst_v->nxtdscraddr = new->dst_p;
0388         desc->dst_v->ctrl &= ~ZYNQMP_DMA_DESC_CTRL_STOP;
0389     }
0390 
0391     list_add_tail(&new->node, &chan->pending_list);
0392     spin_unlock_irqrestore(&chan->lock, irqflags);
0393 
0394     return cookie;
0395 }
0396 
0397 /**
0398  * zynqmp_dma_get_descriptor - Get the sw descriptor from the pool
0399  * @chan: ZynqMP DMA channel pointer
0400  *
0401  * Return: The sw descriptor
0402  */
0403 static struct zynqmp_dma_desc_sw *
0404 zynqmp_dma_get_descriptor(struct zynqmp_dma_chan *chan)
0405 {
0406     struct zynqmp_dma_desc_sw *desc;
0407     unsigned long irqflags;
0408 
0409     spin_lock_irqsave(&chan->lock, irqflags);
0410     desc = list_first_entry(&chan->free_list,
0411                 struct zynqmp_dma_desc_sw, node);
0412     list_del(&desc->node);
0413     spin_unlock_irqrestore(&chan->lock, irqflags);
0414 
0415     INIT_LIST_HEAD(&desc->tx_list);
0416     /* Clear the src and dst descriptor memory */
0417     memset((void *)desc->src_v, 0, ZYNQMP_DMA_DESC_SIZE(chan));
0418     memset((void *)desc->dst_v, 0, ZYNQMP_DMA_DESC_SIZE(chan));
0419 
0420     return desc;
0421 }
0422 
0423 /**
0424  * zynqmp_dma_free_descriptor - Issue pending transactions
0425  * @chan: ZynqMP DMA channel pointer
0426  * @sdesc: Transaction descriptor pointer
0427  */
0428 static void zynqmp_dma_free_descriptor(struct zynqmp_dma_chan *chan,
0429                  struct zynqmp_dma_desc_sw *sdesc)
0430 {
0431     struct zynqmp_dma_desc_sw *child, *next;
0432 
0433     chan->desc_free_cnt++;
0434     list_move_tail(&sdesc->node, &chan->free_list);
0435     list_for_each_entry_safe(child, next, &sdesc->tx_list, node) {
0436         chan->desc_free_cnt++;
0437         list_move_tail(&child->node, &chan->free_list);
0438     }
0439 }
0440 
0441 /**
0442  * zynqmp_dma_free_desc_list - Free descriptors list
0443  * @chan: ZynqMP DMA channel pointer
0444  * @list: List to parse and delete the descriptor
0445  */
0446 static void zynqmp_dma_free_desc_list(struct zynqmp_dma_chan *chan,
0447                       struct list_head *list)
0448 {
0449     struct zynqmp_dma_desc_sw *desc, *next;
0450 
0451     list_for_each_entry_safe(desc, next, list, node)
0452         zynqmp_dma_free_descriptor(chan, desc);
0453 }
0454 
0455 /**
0456  * zynqmp_dma_alloc_chan_resources - Allocate channel resources
0457  * @dchan: DMA channel
0458  *
0459  * Return: Number of descriptors on success and failure value on error
0460  */
0461 static int zynqmp_dma_alloc_chan_resources(struct dma_chan *dchan)
0462 {
0463     struct zynqmp_dma_chan *chan = to_chan(dchan);
0464     struct zynqmp_dma_desc_sw *desc;
0465     int i, ret;
0466 
0467     ret = pm_runtime_resume_and_get(chan->dev);
0468     if (ret < 0)
0469         return ret;
0470 
0471     chan->sw_desc_pool = kcalloc(ZYNQMP_DMA_NUM_DESCS, sizeof(*desc),
0472                      GFP_KERNEL);
0473     if (!chan->sw_desc_pool)
0474         return -ENOMEM;
0475 
0476     chan->idle = true;
0477     chan->desc_free_cnt = ZYNQMP_DMA_NUM_DESCS;
0478 
0479     INIT_LIST_HEAD(&chan->free_list);
0480 
0481     for (i = 0; i < ZYNQMP_DMA_NUM_DESCS; i++) {
0482         desc = chan->sw_desc_pool + i;
0483         dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
0484         desc->async_tx.tx_submit = zynqmp_dma_tx_submit;
0485         list_add_tail(&desc->node, &chan->free_list);
0486     }
0487 
0488     chan->desc_pool_v = dma_alloc_coherent(chan->dev,
0489                            (2 * ZYNQMP_DMA_DESC_SIZE(chan) *
0490                            ZYNQMP_DMA_NUM_DESCS),
0491                            &chan->desc_pool_p, GFP_KERNEL);
0492     if (!chan->desc_pool_v)
0493         return -ENOMEM;
0494 
0495     for (i = 0; i < ZYNQMP_DMA_NUM_DESCS; i++) {
0496         desc = chan->sw_desc_pool + i;
0497         desc->src_v = (struct zynqmp_dma_desc_ll *) (chan->desc_pool_v +
0498                     (i * ZYNQMP_DMA_DESC_SIZE(chan) * 2));
0499         desc->dst_v = (struct zynqmp_dma_desc_ll *) (desc->src_v + 1);
0500         desc->src_p = chan->desc_pool_p +
0501                 (i * ZYNQMP_DMA_DESC_SIZE(chan) * 2);
0502         desc->dst_p = desc->src_p + ZYNQMP_DMA_DESC_SIZE(chan);
0503     }
0504 
0505     return ZYNQMP_DMA_NUM_DESCS;
0506 }
0507 
0508 /**
0509  * zynqmp_dma_start - Start DMA channel
0510  * @chan: ZynqMP DMA channel pointer
0511  */
0512 static void zynqmp_dma_start(struct zynqmp_dma_chan *chan)
0513 {
0514     writel(ZYNQMP_DMA_INT_EN_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IER);
0515     writel(0, chan->regs + ZYNQMP_DMA_TOTAL_BYTE);
0516     chan->idle = false;
0517     writel(ZYNQMP_DMA_ENABLE, chan->regs + ZYNQMP_DMA_CTRL2);
0518 }
0519 
0520 /**
0521  * zynqmp_dma_handle_ovfl_int - Process the overflow interrupt
0522  * @chan: ZynqMP DMA channel pointer
0523  * @status: Interrupt status value
0524  */
0525 static void zynqmp_dma_handle_ovfl_int(struct zynqmp_dma_chan *chan, u32 status)
0526 {
0527     if (status & ZYNQMP_DMA_BYTE_CNT_OVRFL)
0528         writel(0, chan->regs + ZYNQMP_DMA_TOTAL_BYTE);
0529     if (status & ZYNQMP_DMA_IRQ_DST_ACCT_ERR)
0530         readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
0531     if (status & ZYNQMP_DMA_IRQ_SRC_ACCT_ERR)
0532         readl(chan->regs + ZYNQMP_DMA_IRQ_SRC_ACCT);
0533 }
0534 
0535 static void zynqmp_dma_config(struct zynqmp_dma_chan *chan)
0536 {
0537     u32 val, burst_val;
0538 
0539     val = readl(chan->regs + ZYNQMP_DMA_CTRL0);
0540     val |= ZYNQMP_DMA_POINT_TYPE_SG;
0541     writel(val, chan->regs + ZYNQMP_DMA_CTRL0);
0542 
0543     val = readl(chan->regs + ZYNQMP_DMA_DATA_ATTR);
0544     burst_val = __ilog2_u32(chan->src_burst_len);
0545     val = (val & ~ZYNQMP_DMA_ARLEN) |
0546         ((burst_val << ZYNQMP_DMA_ARLEN_OFST) & ZYNQMP_DMA_ARLEN);
0547     burst_val = __ilog2_u32(chan->dst_burst_len);
0548     val = (val & ~ZYNQMP_DMA_AWLEN) |
0549         ((burst_val << ZYNQMP_DMA_AWLEN_OFST) & ZYNQMP_DMA_AWLEN);
0550     writel(val, chan->regs + ZYNQMP_DMA_DATA_ATTR);
0551 }
0552 
0553 /**
0554  * zynqmp_dma_device_config - Zynqmp dma device configuration
0555  * @dchan: DMA channel
0556  * @config: DMA device config
0557  *
0558  * Return: 0 always
0559  */
0560 static int zynqmp_dma_device_config(struct dma_chan *dchan,
0561                     struct dma_slave_config *config)
0562 {
0563     struct zynqmp_dma_chan *chan = to_chan(dchan);
0564 
0565     chan->src_burst_len = clamp(config->src_maxburst, 1U,
0566         ZYNQMP_DMA_MAX_SRC_BURST_LEN);
0567     chan->dst_burst_len = clamp(config->dst_maxburst, 1U,
0568         ZYNQMP_DMA_MAX_DST_BURST_LEN);
0569 
0570     return 0;
0571 }
0572 
0573 /**
0574  * zynqmp_dma_start_transfer - Initiate the new transfer
0575  * @chan: ZynqMP DMA channel pointer
0576  */
0577 static void zynqmp_dma_start_transfer(struct zynqmp_dma_chan *chan)
0578 {
0579     struct zynqmp_dma_desc_sw *desc;
0580 
0581     if (!chan->idle)
0582         return;
0583 
0584     zynqmp_dma_config(chan);
0585 
0586     desc = list_first_entry_or_null(&chan->pending_list,
0587                     struct zynqmp_dma_desc_sw, node);
0588     if (!desc)
0589         return;
0590 
0591     list_splice_tail_init(&chan->pending_list, &chan->active_list);
0592     zynqmp_dma_update_desc_to_ctrlr(chan, desc);
0593     zynqmp_dma_start(chan);
0594 }
0595 
0596 
0597 /**
0598  * zynqmp_dma_chan_desc_cleanup - Cleanup the completed descriptors
0599  * @chan: ZynqMP DMA channel
0600  */
0601 static void zynqmp_dma_chan_desc_cleanup(struct zynqmp_dma_chan *chan)
0602 {
0603     struct zynqmp_dma_desc_sw *desc, *next;
0604     unsigned long irqflags;
0605 
0606     spin_lock_irqsave(&chan->lock, irqflags);
0607 
0608     list_for_each_entry_safe(desc, next, &chan->done_list, node) {
0609         struct dmaengine_desc_callback cb;
0610 
0611         dmaengine_desc_get_callback(&desc->async_tx, &cb);
0612         if (dmaengine_desc_callback_valid(&cb)) {
0613             spin_unlock_irqrestore(&chan->lock, irqflags);
0614             dmaengine_desc_callback_invoke(&cb, NULL);
0615             spin_lock_irqsave(&chan->lock, irqflags);
0616         }
0617 
0618         /* Run any dependencies, then free the descriptor */
0619         zynqmp_dma_free_descriptor(chan, desc);
0620     }
0621 
0622     spin_unlock_irqrestore(&chan->lock, irqflags);
0623 }
0624 
0625 /**
0626  * zynqmp_dma_complete_descriptor - Mark the active descriptor as complete
0627  * @chan: ZynqMP DMA channel pointer
0628  */
0629 static void zynqmp_dma_complete_descriptor(struct zynqmp_dma_chan *chan)
0630 {
0631     struct zynqmp_dma_desc_sw *desc;
0632 
0633     desc = list_first_entry_or_null(&chan->active_list,
0634                     struct zynqmp_dma_desc_sw, node);
0635     if (!desc)
0636         return;
0637     list_del(&desc->node);
0638     dma_cookie_complete(&desc->async_tx);
0639     list_add_tail(&desc->node, &chan->done_list);
0640 }
0641 
0642 /**
0643  * zynqmp_dma_issue_pending - Issue pending transactions
0644  * @dchan: DMA channel pointer
0645  */
0646 static void zynqmp_dma_issue_pending(struct dma_chan *dchan)
0647 {
0648     struct zynqmp_dma_chan *chan = to_chan(dchan);
0649     unsigned long irqflags;
0650 
0651     spin_lock_irqsave(&chan->lock, irqflags);
0652     zynqmp_dma_start_transfer(chan);
0653     spin_unlock_irqrestore(&chan->lock, irqflags);
0654 }
0655 
0656 /**
0657  * zynqmp_dma_free_descriptors - Free channel descriptors
0658  * @chan: ZynqMP DMA channel pointer
0659  */
0660 static void zynqmp_dma_free_descriptors(struct zynqmp_dma_chan *chan)
0661 {
0662     unsigned long irqflags;
0663 
0664     spin_lock_irqsave(&chan->lock, irqflags);
0665     zynqmp_dma_free_desc_list(chan, &chan->active_list);
0666     zynqmp_dma_free_desc_list(chan, &chan->pending_list);
0667     zynqmp_dma_free_desc_list(chan, &chan->done_list);
0668     spin_unlock_irqrestore(&chan->lock, irqflags);
0669 }
0670 
0671 /**
0672  * zynqmp_dma_free_chan_resources - Free channel resources
0673  * @dchan: DMA channel pointer
0674  */
0675 static void zynqmp_dma_free_chan_resources(struct dma_chan *dchan)
0676 {
0677     struct zynqmp_dma_chan *chan = to_chan(dchan);
0678 
0679     zynqmp_dma_free_descriptors(chan);
0680     dma_free_coherent(chan->dev,
0681         (2 * ZYNQMP_DMA_DESC_SIZE(chan) * ZYNQMP_DMA_NUM_DESCS),
0682         chan->desc_pool_v, chan->desc_pool_p);
0683     kfree(chan->sw_desc_pool);
0684     pm_runtime_mark_last_busy(chan->dev);
0685     pm_runtime_put_autosuspend(chan->dev);
0686 }
0687 
0688 /**
0689  * zynqmp_dma_reset - Reset the channel
0690  * @chan: ZynqMP DMA channel pointer
0691  */
0692 static void zynqmp_dma_reset(struct zynqmp_dma_chan *chan)
0693 {
0694     unsigned long irqflags;
0695 
0696     writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
0697 
0698     spin_lock_irqsave(&chan->lock, irqflags);
0699     zynqmp_dma_complete_descriptor(chan);
0700     spin_unlock_irqrestore(&chan->lock, irqflags);
0701     zynqmp_dma_chan_desc_cleanup(chan);
0702     zynqmp_dma_free_descriptors(chan);
0703 
0704     zynqmp_dma_init(chan);
0705 }
0706 
0707 /**
0708  * zynqmp_dma_irq_handler - ZynqMP DMA Interrupt handler
0709  * @irq: IRQ number
0710  * @data: Pointer to the ZynqMP DMA channel structure
0711  *
0712  * Return: IRQ_HANDLED/IRQ_NONE
0713  */
0714 static irqreturn_t zynqmp_dma_irq_handler(int irq, void *data)
0715 {
0716     struct zynqmp_dma_chan *chan = (struct zynqmp_dma_chan *)data;
0717     u32 isr, imr, status;
0718     irqreturn_t ret = IRQ_NONE;
0719 
0720     isr = readl(chan->regs + ZYNQMP_DMA_ISR);
0721     imr = readl(chan->regs + ZYNQMP_DMA_IMR);
0722     status = isr & ~imr;
0723 
0724     writel(isr, chan->regs + ZYNQMP_DMA_ISR);
0725     if (status & ZYNQMP_DMA_INT_DONE) {
0726         tasklet_schedule(&chan->tasklet);
0727         ret = IRQ_HANDLED;
0728     }
0729 
0730     if (status & ZYNQMP_DMA_DONE)
0731         chan->idle = true;
0732 
0733     if (status & ZYNQMP_DMA_INT_ERR) {
0734         chan->err = true;
0735         tasklet_schedule(&chan->tasklet);
0736         dev_err(chan->dev, "Channel %p has errors\n", chan);
0737         ret = IRQ_HANDLED;
0738     }
0739 
0740     if (status & ZYNQMP_DMA_INT_OVRFL) {
0741         zynqmp_dma_handle_ovfl_int(chan, status);
0742         dev_dbg(chan->dev, "Channel %p overflow interrupt\n", chan);
0743         ret = IRQ_HANDLED;
0744     }
0745 
0746     return ret;
0747 }
0748 
0749 /**
0750  * zynqmp_dma_do_tasklet - Schedule completion tasklet
0751  * @t: Pointer to the ZynqMP DMA channel structure
0752  */
0753 static void zynqmp_dma_do_tasklet(struct tasklet_struct *t)
0754 {
0755     struct zynqmp_dma_chan *chan = from_tasklet(chan, t, tasklet);
0756     u32 count;
0757     unsigned long irqflags;
0758 
0759     if (chan->err) {
0760         zynqmp_dma_reset(chan);
0761         chan->err = false;
0762         return;
0763     }
0764 
0765     spin_lock_irqsave(&chan->lock, irqflags);
0766     count = readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
0767     while (count) {
0768         zynqmp_dma_complete_descriptor(chan);
0769         count--;
0770     }
0771     spin_unlock_irqrestore(&chan->lock, irqflags);
0772 
0773     zynqmp_dma_chan_desc_cleanup(chan);
0774 
0775     if (chan->idle) {
0776         spin_lock_irqsave(&chan->lock, irqflags);
0777         zynqmp_dma_start_transfer(chan);
0778         spin_unlock_irqrestore(&chan->lock, irqflags);
0779     }
0780 }
0781 
0782 /**
0783  * zynqmp_dma_device_terminate_all - Aborts all transfers on a channel
0784  * @dchan: DMA channel pointer
0785  *
0786  * Return: Always '0'
0787  */
0788 static int zynqmp_dma_device_terminate_all(struct dma_chan *dchan)
0789 {
0790     struct zynqmp_dma_chan *chan = to_chan(dchan);
0791 
0792     writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
0793     zynqmp_dma_free_descriptors(chan);
0794 
0795     return 0;
0796 }
0797 
0798 /**
0799  * zynqmp_dma_prep_memcpy - prepare descriptors for memcpy transaction
0800  * @dchan: DMA channel
0801  * @dma_dst: Destination buffer address
0802  * @dma_src: Source buffer address
0803  * @len: Transfer length
0804  * @flags: transfer ack flags
0805  *
0806  * Return: Async transaction descriptor on success and NULL on failure
0807  */
0808 static struct dma_async_tx_descriptor *zynqmp_dma_prep_memcpy(
0809                 struct dma_chan *dchan, dma_addr_t dma_dst,
0810                 dma_addr_t dma_src, size_t len, ulong flags)
0811 {
0812     struct zynqmp_dma_chan *chan;
0813     struct zynqmp_dma_desc_sw *new, *first = NULL;
0814     void *desc = NULL, *prev = NULL;
0815     size_t copy;
0816     u32 desc_cnt;
0817     unsigned long irqflags;
0818 
0819     chan = to_chan(dchan);
0820 
0821     desc_cnt = DIV_ROUND_UP(len, ZYNQMP_DMA_MAX_TRANS_LEN);
0822 
0823     spin_lock_irqsave(&chan->lock, irqflags);
0824     if (desc_cnt > chan->desc_free_cnt) {
0825         spin_unlock_irqrestore(&chan->lock, irqflags);
0826         dev_dbg(chan->dev, "chan %p descs are not available\n", chan);
0827         return NULL;
0828     }
0829     chan->desc_free_cnt = chan->desc_free_cnt - desc_cnt;
0830     spin_unlock_irqrestore(&chan->lock, irqflags);
0831 
0832     do {
0833         /* Allocate and populate the descriptor */
0834         new = zynqmp_dma_get_descriptor(chan);
0835 
0836         copy = min_t(size_t, len, ZYNQMP_DMA_MAX_TRANS_LEN);
0837         desc = (struct zynqmp_dma_desc_ll *)new->src_v;
0838         zynqmp_dma_config_sg_ll_desc(chan, desc, dma_src,
0839                          dma_dst, copy, prev);
0840         prev = desc;
0841         len -= copy;
0842         dma_src += copy;
0843         dma_dst += copy;
0844         if (!first)
0845             first = new;
0846         else
0847             list_add_tail(&new->node, &first->tx_list);
0848     } while (len);
0849 
0850     zynqmp_dma_desc_config_eod(chan, desc);
0851     async_tx_ack(&first->async_tx);
0852     first->async_tx.flags = (enum dma_ctrl_flags)flags;
0853     return &first->async_tx;
0854 }
0855 
0856 /**
0857  * zynqmp_dma_chan_remove - Channel remove function
0858  * @chan: ZynqMP DMA channel pointer
0859  */
0860 static void zynqmp_dma_chan_remove(struct zynqmp_dma_chan *chan)
0861 {
0862     if (!chan)
0863         return;
0864 
0865     if (chan->irq)
0866         devm_free_irq(chan->zdev->dev, chan->irq, chan);
0867     tasklet_kill(&chan->tasklet);
0868     list_del(&chan->common.device_node);
0869 }
0870 
0871 /**
0872  * zynqmp_dma_chan_probe - Per Channel Probing
0873  * @zdev: Driver specific device structure
0874  * @pdev: Pointer to the platform_device structure
0875  *
0876  * Return: '0' on success and failure value on error
0877  */
0878 static int zynqmp_dma_chan_probe(struct zynqmp_dma_device *zdev,
0879                struct platform_device *pdev)
0880 {
0881     struct zynqmp_dma_chan *chan;
0882     struct resource *res;
0883     struct device_node *node = pdev->dev.of_node;
0884     int err;
0885 
0886     chan = devm_kzalloc(zdev->dev, sizeof(*chan), GFP_KERNEL);
0887     if (!chan)
0888         return -ENOMEM;
0889     chan->dev = zdev->dev;
0890     chan->zdev = zdev;
0891 
0892     res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
0893     chan->regs = devm_ioremap_resource(&pdev->dev, res);
0894     if (IS_ERR(chan->regs))
0895         return PTR_ERR(chan->regs);
0896 
0897     chan->bus_width = ZYNQMP_DMA_BUS_WIDTH_64;
0898     chan->dst_burst_len = ZYNQMP_DMA_MAX_DST_BURST_LEN;
0899     chan->src_burst_len = ZYNQMP_DMA_MAX_SRC_BURST_LEN;
0900     err = of_property_read_u32(node, "xlnx,bus-width", &chan->bus_width);
0901     if (err < 0) {
0902         dev_err(&pdev->dev, "missing xlnx,bus-width property\n");
0903         return err;
0904     }
0905 
0906     if (chan->bus_width != ZYNQMP_DMA_BUS_WIDTH_64 &&
0907         chan->bus_width != ZYNQMP_DMA_BUS_WIDTH_128) {
0908         dev_err(zdev->dev, "invalid bus-width value");
0909         return -EINVAL;
0910     }
0911 
0912     chan->is_dmacoherent =  of_property_read_bool(node, "dma-coherent");
0913     zdev->chan = chan;
0914     tasklet_setup(&chan->tasklet, zynqmp_dma_do_tasklet);
0915     spin_lock_init(&chan->lock);
0916     INIT_LIST_HEAD(&chan->active_list);
0917     INIT_LIST_HEAD(&chan->pending_list);
0918     INIT_LIST_HEAD(&chan->done_list);
0919     INIT_LIST_HEAD(&chan->free_list);
0920 
0921     dma_cookie_init(&chan->common);
0922     chan->common.device = &zdev->common;
0923     list_add_tail(&chan->common.device_node, &zdev->common.channels);
0924 
0925     zynqmp_dma_init(chan);
0926     chan->irq = platform_get_irq(pdev, 0);
0927     if (chan->irq < 0)
0928         return -ENXIO;
0929     err = devm_request_irq(&pdev->dev, chan->irq, zynqmp_dma_irq_handler, 0,
0930                    "zynqmp-dma", chan);
0931     if (err)
0932         return err;
0933 
0934     chan->desc_size = sizeof(struct zynqmp_dma_desc_ll);
0935     chan->idle = true;
0936     return 0;
0937 }
0938 
0939 /**
0940  * of_zynqmp_dma_xlate - Translation function
0941  * @dma_spec: Pointer to DMA specifier as found in the device tree
0942  * @ofdma: Pointer to DMA controller data
0943  *
0944  * Return: DMA channel pointer on success and NULL on error
0945  */
0946 static struct dma_chan *of_zynqmp_dma_xlate(struct of_phandle_args *dma_spec,
0947                         struct of_dma *ofdma)
0948 {
0949     struct zynqmp_dma_device *zdev = ofdma->of_dma_data;
0950 
0951     return dma_get_slave_channel(&zdev->chan->common);
0952 }
0953 
0954 /**
0955  * zynqmp_dma_suspend - Suspend method for the driver
0956  * @dev:    Address of the device structure
0957  *
0958  * Put the driver into low power mode.
0959  * Return: 0 on success and failure value on error
0960  */
0961 static int __maybe_unused zynqmp_dma_suspend(struct device *dev)
0962 {
0963     if (!device_may_wakeup(dev))
0964         return pm_runtime_force_suspend(dev);
0965 
0966     return 0;
0967 }
0968 
0969 /**
0970  * zynqmp_dma_resume - Resume from suspend
0971  * @dev:    Address of the device structure
0972  *
0973  * Resume operation after suspend.
0974  * Return: 0 on success and failure value on error
0975  */
0976 static int __maybe_unused zynqmp_dma_resume(struct device *dev)
0977 {
0978     if (!device_may_wakeup(dev))
0979         return pm_runtime_force_resume(dev);
0980 
0981     return 0;
0982 }
0983 
0984 /**
0985  * zynqmp_dma_runtime_suspend - Runtime suspend method for the driver
0986  * @dev:    Address of the device structure
0987  *
0988  * Put the driver into low power mode.
0989  * Return: 0 always
0990  */
0991 static int __maybe_unused zynqmp_dma_runtime_suspend(struct device *dev)
0992 {
0993     struct zynqmp_dma_device *zdev = dev_get_drvdata(dev);
0994 
0995     clk_disable_unprepare(zdev->clk_main);
0996     clk_disable_unprepare(zdev->clk_apb);
0997 
0998     return 0;
0999 }
1000 
1001 /**
1002  * zynqmp_dma_runtime_resume - Runtime suspend method for the driver
1003  * @dev:    Address of the device structure
1004  *
1005  * Put the driver into low power mode.
1006  * Return: 0 always
1007  */
1008 static int __maybe_unused zynqmp_dma_runtime_resume(struct device *dev)
1009 {
1010     struct zynqmp_dma_device *zdev = dev_get_drvdata(dev);
1011     int err;
1012 
1013     err = clk_prepare_enable(zdev->clk_main);
1014     if (err) {
1015         dev_err(dev, "Unable to enable main clock.\n");
1016         return err;
1017     }
1018 
1019     err = clk_prepare_enable(zdev->clk_apb);
1020     if (err) {
1021         dev_err(dev, "Unable to enable apb clock.\n");
1022         clk_disable_unprepare(zdev->clk_main);
1023         return err;
1024     }
1025 
1026     return 0;
1027 }
1028 
1029 static const struct dev_pm_ops zynqmp_dma_dev_pm_ops = {
1030     SET_SYSTEM_SLEEP_PM_OPS(zynqmp_dma_suspend, zynqmp_dma_resume)
1031     SET_RUNTIME_PM_OPS(zynqmp_dma_runtime_suspend,
1032                zynqmp_dma_runtime_resume, NULL)
1033 };
1034 
1035 /**
1036  * zynqmp_dma_probe - Driver probe function
1037  * @pdev: Pointer to the platform_device structure
1038  *
1039  * Return: '0' on success and failure value on error
1040  */
1041 static int zynqmp_dma_probe(struct platform_device *pdev)
1042 {
1043     struct zynqmp_dma_device *zdev;
1044     struct dma_device *p;
1045     int ret;
1046 
1047     zdev = devm_kzalloc(&pdev->dev, sizeof(*zdev), GFP_KERNEL);
1048     if (!zdev)
1049         return -ENOMEM;
1050 
1051     zdev->dev = &pdev->dev;
1052     INIT_LIST_HEAD(&zdev->common.channels);
1053 
1054     dma_set_mask(&pdev->dev, DMA_BIT_MASK(44));
1055     dma_cap_set(DMA_MEMCPY, zdev->common.cap_mask);
1056 
1057     p = &zdev->common;
1058     p->device_prep_dma_memcpy = zynqmp_dma_prep_memcpy;
1059     p->device_terminate_all = zynqmp_dma_device_terminate_all;
1060     p->device_issue_pending = zynqmp_dma_issue_pending;
1061     p->device_alloc_chan_resources = zynqmp_dma_alloc_chan_resources;
1062     p->device_free_chan_resources = zynqmp_dma_free_chan_resources;
1063     p->device_tx_status = dma_cookie_status;
1064     p->device_config = zynqmp_dma_device_config;
1065     p->dev = &pdev->dev;
1066 
1067     zdev->clk_main = devm_clk_get(&pdev->dev, "clk_main");
1068     if (IS_ERR(zdev->clk_main))
1069         return dev_err_probe(&pdev->dev, PTR_ERR(zdev->clk_main),
1070                      "main clock not found.\n");
1071 
1072     zdev->clk_apb = devm_clk_get(&pdev->dev, "clk_apb");
1073     if (IS_ERR(zdev->clk_apb))
1074         return dev_err_probe(&pdev->dev, PTR_ERR(zdev->clk_apb),
1075                      "apb clock not found.\n");
1076 
1077     platform_set_drvdata(pdev, zdev);
1078     pm_runtime_set_autosuspend_delay(zdev->dev, ZDMA_PM_TIMEOUT);
1079     pm_runtime_use_autosuspend(zdev->dev);
1080     pm_runtime_enable(zdev->dev);
1081     ret = pm_runtime_resume_and_get(zdev->dev);
1082     if (ret < 0) {
1083         dev_err(&pdev->dev, "device wakeup failed.\n");
1084         pm_runtime_disable(zdev->dev);
1085     }
1086     if (!pm_runtime_enabled(zdev->dev)) {
1087         ret = zynqmp_dma_runtime_resume(zdev->dev);
1088         if (ret)
1089             return ret;
1090     }
1091 
1092     ret = zynqmp_dma_chan_probe(zdev, pdev);
1093     if (ret) {
1094         dev_err_probe(&pdev->dev, ret, "Probing channel failed\n");
1095         goto err_disable_pm;
1096     }
1097 
1098     p->dst_addr_widths = BIT(zdev->chan->bus_width / 8);
1099     p->src_addr_widths = BIT(zdev->chan->bus_width / 8);
1100 
1101     ret = dma_async_device_register(&zdev->common);
1102     if (ret) {
1103         dev_err(zdev->dev, "failed to register the dma device\n");
1104         goto free_chan_resources;
1105     }
1106 
1107     ret = of_dma_controller_register(pdev->dev.of_node,
1108                      of_zynqmp_dma_xlate, zdev);
1109     if (ret) {
1110         dev_err_probe(&pdev->dev, ret, "Unable to register DMA to DT\n");
1111         dma_async_device_unregister(&zdev->common);
1112         goto free_chan_resources;
1113     }
1114 
1115     pm_runtime_mark_last_busy(zdev->dev);
1116     pm_runtime_put_sync_autosuspend(zdev->dev);
1117 
1118     return 0;
1119 
1120 free_chan_resources:
1121     zynqmp_dma_chan_remove(zdev->chan);
1122 err_disable_pm:
1123     if (!pm_runtime_enabled(zdev->dev))
1124         zynqmp_dma_runtime_suspend(zdev->dev);
1125     pm_runtime_disable(zdev->dev);
1126     return ret;
1127 }
1128 
1129 /**
1130  * zynqmp_dma_remove - Driver remove function
1131  * @pdev: Pointer to the platform_device structure
1132  *
1133  * Return: Always '0'
1134  */
1135 static int zynqmp_dma_remove(struct platform_device *pdev)
1136 {
1137     struct zynqmp_dma_device *zdev = platform_get_drvdata(pdev);
1138 
1139     of_dma_controller_free(pdev->dev.of_node);
1140     dma_async_device_unregister(&zdev->common);
1141 
1142     zynqmp_dma_chan_remove(zdev->chan);
1143     pm_runtime_disable(zdev->dev);
1144     if (!pm_runtime_enabled(zdev->dev))
1145         zynqmp_dma_runtime_suspend(zdev->dev);
1146 
1147     return 0;
1148 }
1149 
1150 static const struct of_device_id zynqmp_dma_of_match[] = {
1151     { .compatible = "xlnx,zynqmp-dma-1.0", },
1152     {}
1153 };
1154 MODULE_DEVICE_TABLE(of, zynqmp_dma_of_match);
1155 
1156 static struct platform_driver zynqmp_dma_driver = {
1157     .driver = {
1158         .name = "xilinx-zynqmp-dma",
1159         .of_match_table = zynqmp_dma_of_match,
1160         .pm = &zynqmp_dma_dev_pm_ops,
1161     },
1162     .probe = zynqmp_dma_probe,
1163     .remove = zynqmp_dma_remove,
1164 };
1165 
1166 module_platform_driver(zynqmp_dma_driver);
1167 
1168 MODULE_LICENSE("GPL");
1169 MODULE_AUTHOR("Xilinx, Inc.");
1170 MODULE_DESCRIPTION("Xilinx ZynqMP DMA driver");