// SPDX-License-Identifier: GPL-2.0-or-later /* * DMA driver for Xilinx Video DMA Engine * * Copyright (C) 2010-2014 Xilinx, Inc. All rights reserved. * * Based on the Freescale DMA driver. * * Description: * The AXI Video Direct Memory Access (AXI VDMA) core is a soft Xilinx IP * core that provides high-bandwidth direct memory access between memory * and AXI4-Stream type video target peripherals. The core provides efficient * two dimensional DMA operations with independent asynchronous read (S2MM) * and write (MM2S) channel operation. It can be configured to have either * one channel or two channels. If configured as two channels, one is to * transmit to the video device (MM2S) and another is to receive from the * video device (S2MM). Initialization, status, interrupt and management * registers are accessed through an AXI4-Lite slave interface. * * The AXI Direct Memory Access (AXI DMA) core is a soft Xilinx IP core that * provides high-bandwidth one dimensional direct memory access between memory * and AXI4-Stream target peripherals. It supports one receive and one * transmit channel, both of them optional at synthesis time. * * The AXI CDMA, is a soft IP, which provides high-bandwidth Direct Memory * Access (DMA) between a memory-mapped source address and a memory-mapped * destination address. * * The AXI Multichannel Direct Memory Access (AXI MCDMA) core is a soft * Xilinx IP that provides high-bandwidth direct memory access between * memory and AXI4-Stream target peripherals. It provides scatter gather * (SG) interface with multiple channels independent configuration support. * */ #include <linux/bitops.h> #include <linux/dmapool.h> #include <linux/dma/xilinx_dma.h> #include <linux/init.h> #include <linux/interrupt.h> #include <linux/io.h> #include <linux/iopoll.h> #include <linux/module.h> #include <linux/of.h> #include <linux/of_dma.h> #include <linux/of_irq.h> #include <linux/platform_device.h> #include <linux/slab.h> #include <linux/clk.h> #include <linux/io-64-nonatomic-lo-hi.h> #include "../dmaengine.h" /* Register/Descriptor Offsets */ #define XILINX_DMA_MM2S_CTRL_OFFSET … #define XILINX_DMA_S2MM_CTRL_OFFSET … #define XILINX_VDMA_MM2S_DESC_OFFSET … #define XILINX_VDMA_S2MM_DESC_OFFSET … /* Control Registers */ #define XILINX_DMA_REG_DMACR … #define XILINX_DMA_DMACR_DELAY_MAX … #define XILINX_DMA_DMACR_DELAY_SHIFT … #define XILINX_DMA_DMACR_FRAME_COUNT_MAX … #define XILINX_DMA_DMACR_FRAME_COUNT_SHIFT … #define XILINX_DMA_DMACR_ERR_IRQ … #define XILINX_DMA_DMACR_DLY_CNT_IRQ … #define XILINX_DMA_DMACR_FRM_CNT_IRQ … #define XILINX_DMA_DMACR_MASTER_SHIFT … #define XILINX_DMA_DMACR_FSYNCSRC_SHIFT … #define XILINX_DMA_DMACR_FRAMECNT_EN … #define XILINX_DMA_DMACR_GENLOCK_EN … #define XILINX_DMA_DMACR_RESET … #define XILINX_DMA_DMACR_CIRC_EN … #define XILINX_DMA_DMACR_RUNSTOP … #define XILINX_DMA_DMACR_FSYNCSRC_MASK … #define XILINX_DMA_DMACR_DELAY_MASK … #define XILINX_DMA_DMACR_FRAME_COUNT_MASK … #define XILINX_DMA_DMACR_MASTER_MASK … #define XILINX_DMA_REG_DMASR … #define XILINX_DMA_DMASR_EOL_LATE_ERR … #define XILINX_DMA_DMASR_ERR_IRQ … #define XILINX_DMA_DMASR_DLY_CNT_IRQ … #define XILINX_DMA_DMASR_FRM_CNT_IRQ … #define XILINX_DMA_DMASR_SOF_LATE_ERR … #define XILINX_DMA_DMASR_SG_DEC_ERR … #define XILINX_DMA_DMASR_SG_SLV_ERR … #define XILINX_DMA_DMASR_EOF_EARLY_ERR … #define XILINX_DMA_DMASR_SOF_EARLY_ERR … #define XILINX_DMA_DMASR_DMA_DEC_ERR … #define XILINX_DMA_DMASR_DMA_SLAVE_ERR … #define XILINX_DMA_DMASR_DMA_INT_ERR … #define XILINX_DMA_DMASR_SG_MASK … #define XILINX_DMA_DMASR_IDLE … #define XILINX_DMA_DMASR_HALTED … #define XILINX_DMA_DMASR_DELAY_MASK … #define XILINX_DMA_DMASR_FRAME_COUNT_MASK … #define XILINX_DMA_REG_CURDESC … #define XILINX_DMA_REG_TAILDESC … #define XILINX_DMA_REG_REG_INDEX … #define XILINX_DMA_REG_FRMSTORE … #define XILINX_DMA_REG_THRESHOLD … #define XILINX_DMA_REG_FRMPTR_STS … #define XILINX_DMA_REG_PARK_PTR … #define XILINX_DMA_PARK_PTR_WR_REF_SHIFT … #define XILINX_DMA_PARK_PTR_WR_REF_MASK … #define XILINX_DMA_PARK_PTR_RD_REF_SHIFT … #define XILINX_DMA_PARK_PTR_RD_REF_MASK … #define XILINX_DMA_REG_VDMA_VERSION … /* Register Direct Mode Registers */ #define XILINX_DMA_REG_VSIZE … #define XILINX_DMA_VSIZE_MASK … #define XILINX_DMA_REG_HSIZE … #define XILINX_DMA_HSIZE_MASK … #define XILINX_DMA_REG_FRMDLY_STRIDE … #define XILINX_DMA_FRMDLY_STRIDE_FRMDLY_SHIFT … #define XILINX_DMA_FRMDLY_STRIDE_STRIDE_SHIFT … #define XILINX_VDMA_REG_START_ADDRESS(n) … #define XILINX_VDMA_REG_START_ADDRESS_64(n) … #define XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP … #define XILINX_VDMA_ENABLE_VERTICAL_FLIP … /* HW specific definitions */ #define XILINX_MCDMA_MAX_CHANS_PER_DEVICE … #define XILINX_DMA_MAX_CHANS_PER_DEVICE … #define XILINX_CDMA_MAX_CHANS_PER_DEVICE … #define XILINX_DMA_DMAXR_ALL_IRQ_MASK … #define XILINX_DMA_DMASR_ALL_ERR_MASK … /* * Recoverable errors are DMA Internal error, SOF Early, EOF Early * and SOF Late. They are only recoverable when C_FLUSH_ON_FSYNC * is enabled in the h/w system. */ #define XILINX_DMA_DMASR_ERR_RECOVER_MASK … /* Axi VDMA Flush on Fsync bits */ #define XILINX_DMA_FLUSH_S2MM … #define XILINX_DMA_FLUSH_MM2S … #define XILINX_DMA_FLUSH_BOTH … /* Delay loop counter to prevent hardware failure */ #define XILINX_DMA_LOOP_COUNT … /* AXI DMA Specific Registers/Offsets */ #define XILINX_DMA_REG_SRCDSTADDR … #define XILINX_DMA_REG_BTT … /* AXI DMA Specific Masks/Bit fields */ #define XILINX_DMA_MAX_TRANS_LEN_MIN … #define XILINX_DMA_MAX_TRANS_LEN_MAX … #define XILINX_DMA_V2_MAX_TRANS_LEN_MAX … #define XILINX_DMA_CR_COALESCE_MAX … #define XILINX_DMA_CR_DELAY_MAX … #define XILINX_DMA_CR_CYCLIC_BD_EN_MASK … #define XILINX_DMA_CR_COALESCE_SHIFT … #define XILINX_DMA_CR_DELAY_SHIFT … #define XILINX_DMA_BD_SOP … #define XILINX_DMA_BD_EOP … #define XILINX_DMA_BD_COMP_MASK … #define XILINX_DMA_COALESCE_MAX … #define XILINX_DMA_NUM_DESCS … #define XILINX_DMA_NUM_APP_WORDS … /* AXI CDMA Specific Registers/Offsets */ #define XILINX_CDMA_REG_SRCADDR … #define XILINX_CDMA_REG_DSTADDR … /* AXI CDMA Specific Masks */ #define XILINX_CDMA_CR_SGMODE … #define xilinx_prep_dma_addr_t(addr) … /* AXI MCDMA Specific Registers/Offsets */ #define XILINX_MCDMA_MM2S_CTRL_OFFSET … #define XILINX_MCDMA_S2MM_CTRL_OFFSET … #define XILINX_MCDMA_CHEN_OFFSET … #define XILINX_MCDMA_CH_ERR_OFFSET … #define XILINX_MCDMA_RXINT_SER_OFFSET … #define XILINX_MCDMA_TXINT_SER_OFFSET … #define XILINX_MCDMA_CHAN_CR_OFFSET(x) … #define XILINX_MCDMA_CHAN_SR_OFFSET(x) … #define XILINX_MCDMA_CHAN_CDESC_OFFSET(x) … #define XILINX_MCDMA_CHAN_TDESC_OFFSET(x) … /* AXI MCDMA Specific Masks/Shifts */ #define XILINX_MCDMA_COALESCE_SHIFT … #define XILINX_MCDMA_COALESCE_MAX … #define XILINX_MCDMA_IRQ_ALL_MASK … #define XILINX_MCDMA_COALESCE_MASK … #define XILINX_MCDMA_CR_RUNSTOP_MASK … #define XILINX_MCDMA_IRQ_IOC_MASK … #define XILINX_MCDMA_IRQ_DELAY_MASK … #define XILINX_MCDMA_IRQ_ERR_MASK … #define XILINX_MCDMA_BD_EOP … #define XILINX_MCDMA_BD_SOP … /** * struct xilinx_vdma_desc_hw - Hardware Descriptor * @next_desc: Next Descriptor Pointer @0x00 * @pad1: Reserved @0x04 * @buf_addr: Buffer address @0x08 * @buf_addr_msb: MSB of Buffer address @0x0C * @vsize: Vertical Size @0x10 * @hsize: Horizontal Size @0x14 * @stride: Number of bytes between the first * pixels of each horizontal line @0x18 */ struct xilinx_vdma_desc_hw { … } __aligned(…); /** * struct xilinx_axidma_desc_hw - Hardware Descriptor for AXI DMA * @next_desc: Next Descriptor Pointer @0x00 * @next_desc_msb: MSB of Next Descriptor Pointer @0x04 * @buf_addr: Buffer address @0x08 * @buf_addr_msb: MSB of Buffer address @0x0C * @reserved1: Reserved @0x10 * @reserved2: Reserved @0x14 * @control: Control field @0x18 * @status: Status field @0x1C * @app: APP Fields @0x20 - 0x30 */ struct xilinx_axidma_desc_hw { … } __aligned(…); /** * struct xilinx_aximcdma_desc_hw - Hardware Descriptor for AXI MCDMA * @next_desc: Next Descriptor Pointer @0x00 * @next_desc_msb: MSB of Next Descriptor Pointer @0x04 * @buf_addr: Buffer address @0x08 * @buf_addr_msb: MSB of Buffer address @0x0C * @rsvd: Reserved field @0x10 * @control: Control Information field @0x14 * @status: Status field @0x18 * @sideband_status: Status of sideband signals @0x1C * @app: APP Fields @0x20 - 0x30 */ struct xilinx_aximcdma_desc_hw { … } __aligned(…); /** * struct xilinx_cdma_desc_hw - Hardware Descriptor * @next_desc: Next Descriptor Pointer @0x00 * @next_desc_msb: Next Descriptor Pointer MSB @0x04 * @src_addr: Source address @0x08 * @src_addr_msb: Source address MSB @0x0C * @dest_addr: Destination address @0x10 * @dest_addr_msb: Destination address MSB @0x14 * @control: Control field @0x18 * @status: Status field @0x1C */ struct xilinx_cdma_desc_hw { … } __aligned(…); /** * struct xilinx_vdma_tx_segment - Descriptor segment * @hw: Hardware descriptor * @node: Node in the descriptor segments list * @phys: Physical address of segment */ struct xilinx_vdma_tx_segment { … } __aligned(…); /** * struct xilinx_axidma_tx_segment - Descriptor segment * @hw: Hardware descriptor * @node: Node in the descriptor segments list * @phys: Physical address of segment */ struct xilinx_axidma_tx_segment { … } __aligned(…); /** * struct xilinx_aximcdma_tx_segment - Descriptor segment * @hw: Hardware descriptor * @node: Node in the descriptor segments list * @phys: Physical address of segment */ struct xilinx_aximcdma_tx_segment { … } __aligned(…); /** * struct xilinx_cdma_tx_segment - Descriptor segment * @hw: Hardware descriptor * @node: Node in the descriptor segments list * @phys: Physical address of segment */ struct xilinx_cdma_tx_segment { … } __aligned(…); /** * struct xilinx_dma_tx_descriptor - Per Transaction structure * @async_tx: Async transaction descriptor * @segments: TX segments list * @node: Node in the channel descriptors list * @cyclic: Check for cyclic transfers. * @err: Whether the descriptor has an error. * @residue: Residue of the completed descriptor */ struct xilinx_dma_tx_descriptor { … }; /** * struct xilinx_dma_chan - Driver specific DMA channel structure * @xdev: Driver specific device structure * @ctrl_offset: Control registers offset * @desc_offset: TX descriptor registers offset * @lock: Descriptor operation lock * @pending_list: Descriptors waiting * @active_list: Descriptors ready to submit * @done_list: Complete descriptors * @free_seg_list: Free descriptors * @common: DMA common channel * @desc_pool: Descriptors pool * @dev: The dma device * @irq: Channel IRQ * @id: Channel ID * @direction: Transfer direction * @num_frms: Number of frames * @has_sg: Support scatter transfers * @cyclic: Check for cyclic transfers. * @genlock: Support genlock mode * @err: Channel has errors * @idle: Check for channel idle * @terminating: Check for channel being synchronized by user * @tasklet: Cleanup work after irq * @config: Device configuration info * @flush_on_fsync: Flush on Frame sync * @desc_pendingcount: Descriptor pending count * @ext_addr: Indicates 64 bit addressing is supported by dma channel * @desc_submitcount: Descriptor h/w submitted count * @seg_v: Statically allocated segments base * @seg_mv: Statically allocated segments base for MCDMA * @seg_p: Physical allocated segments base * @cyclic_seg_v: Statically allocated segment base for cyclic transfers * @cyclic_seg_p: Physical allocated segments base for cyclic dma * @start_transfer: Differentiate b/w DMA IP's transfer * @stop_transfer: Differentiate b/w DMA IP's quiesce * @tdest: TDEST value for mcdma * @has_vflip: S2MM vertical flip * @irq_delay: Interrupt delay timeout */ struct xilinx_dma_chan { … }; /** * enum xdma_ip_type - DMA IP type. * * @XDMA_TYPE_AXIDMA: Axi dma ip. * @XDMA_TYPE_CDMA: Axi cdma ip. * @XDMA_TYPE_VDMA: Axi vdma ip. * @XDMA_TYPE_AXIMCDMA: Axi MCDMA ip. * */ enum xdma_ip_type { … }; struct xilinx_dma_config { … }; /** * struct xilinx_dma_device - DMA device structure * @regs: I/O mapped base address * @dev: Device Structure * @common: DMA device structure * @chan: Driver specific DMA channel * @flush_on_fsync: Flush on frame sync * @ext_addr: Indicates 64 bit addressing is supported by dma device * @pdev: Platform device structure pointer * @dma_config: DMA config structure * @axi_clk: DMA Axi4-lite interace clock * @tx_clk: DMA mm2s clock * @txs_clk: DMA mm2s stream clock * @rx_clk: DMA s2mm clock * @rxs_clk: DMA s2mm stream clock * @s2mm_chan_id: DMA s2mm channel identifier * @mm2s_chan_id: DMA mm2s channel identifier * @max_buffer_len: Max buffer length * @has_axistream_connected: AXI DMA connected to AXI Stream IP */ struct xilinx_dma_device { … }; /* Macros */ #define to_xilinx_chan(chan) … #define to_dma_tx_descriptor(tx) … #define xilinx_dma_poll_timeout(chan, reg, val, cond, delay_us, timeout_us) … /* IO accessors */ static inline u32 dma_read(struct xilinx_dma_chan *chan, u32 reg) { … } static inline void dma_write(struct xilinx_dma_chan *chan, u32 reg, u32 value) { … } static inline void vdma_desc_write(struct xilinx_dma_chan *chan, u32 reg, u32 value) { … } static inline u32 dma_ctrl_read(struct xilinx_dma_chan *chan, u32 reg) { … } static inline void dma_ctrl_write(struct xilinx_dma_chan *chan, u32 reg, u32 value) { … } static inline void dma_ctrl_clr(struct xilinx_dma_chan *chan, u32 reg, u32 clr) { … } static inline void dma_ctrl_set(struct xilinx_dma_chan *chan, u32 reg, u32 set) { … } /** * vdma_desc_write_64 - 64-bit descriptor write * @chan: Driver specific VDMA channel * @reg: Register to write * @value_lsb: lower address of the descriptor. * @value_msb: upper address of the descriptor. * * Since vdma driver is trying to write to a register offset which is not a * multiple of 64 bits(ex : 0x5c), we are writing as two separate 32 bits * instead of a single 64 bit register write. */ static inline void vdma_desc_write_64(struct xilinx_dma_chan *chan, u32 reg, u32 value_lsb, u32 value_msb) { … } static inline void dma_writeq(struct xilinx_dma_chan *chan, u32 reg, u64 value) { … } static inline void xilinx_write(struct xilinx_dma_chan *chan, u32 reg, dma_addr_t addr) { … } static inline void xilinx_axidma_buf(struct xilinx_dma_chan *chan, struct xilinx_axidma_desc_hw *hw, dma_addr_t buf_addr, size_t sg_used, size_t period_len) { … } static inline void xilinx_aximcdma_buf(struct xilinx_dma_chan *chan, struct xilinx_aximcdma_desc_hw *hw, dma_addr_t buf_addr, size_t sg_used) { … } /** * xilinx_dma_get_metadata_ptr- Populate metadata pointer and payload length * @tx: async transaction descriptor * @payload_len: metadata payload length * @max_len: metadata max length * Return: The app field pointer. */ static void *xilinx_dma_get_metadata_ptr(struct dma_async_tx_descriptor *tx, size_t *payload_len, size_t *max_len) { … } static struct dma_descriptor_metadata_ops xilinx_dma_metadata_ops = …; /* ----------------------------------------------------------------------------- * Descriptors and segments alloc and free */ /** * xilinx_vdma_alloc_tx_segment - Allocate transaction segment * @chan: Driver specific DMA channel * * Return: The allocated segment on success and NULL on failure. */ static struct xilinx_vdma_tx_segment * xilinx_vdma_alloc_tx_segment(struct xilinx_dma_chan *chan) { … } /** * xilinx_cdma_alloc_tx_segment - Allocate transaction segment * @chan: Driver specific DMA channel * * Return: The allocated segment on success and NULL on failure. */ static struct xilinx_cdma_tx_segment * xilinx_cdma_alloc_tx_segment(struct xilinx_dma_chan *chan) { … } /** * xilinx_axidma_alloc_tx_segment - Allocate transaction segment * @chan: Driver specific DMA channel * * Return: The allocated segment on success and NULL on failure. */ static struct xilinx_axidma_tx_segment * xilinx_axidma_alloc_tx_segment(struct xilinx_dma_chan *chan) { … } /** * xilinx_aximcdma_alloc_tx_segment - Allocate transaction segment * @chan: Driver specific DMA channel * * Return: The allocated segment on success and NULL on failure. */ static struct xilinx_aximcdma_tx_segment * xilinx_aximcdma_alloc_tx_segment(struct xilinx_dma_chan *chan) { … } static void xilinx_dma_clean_hw_desc(struct xilinx_axidma_desc_hw *hw) { … } static void xilinx_mcdma_clean_hw_desc(struct xilinx_aximcdma_desc_hw *hw) { … } /** * xilinx_dma_free_tx_segment - Free transaction segment * @chan: Driver specific DMA channel * @segment: DMA transaction segment */ static void xilinx_dma_free_tx_segment(struct xilinx_dma_chan *chan, struct xilinx_axidma_tx_segment *segment) { … } /** * xilinx_mcdma_free_tx_segment - Free transaction segment * @chan: Driver specific DMA channel * @segment: DMA transaction segment */ static void xilinx_mcdma_free_tx_segment(struct xilinx_dma_chan *chan, struct xilinx_aximcdma_tx_segment * segment) { … } /** * xilinx_cdma_free_tx_segment - Free transaction segment * @chan: Driver specific DMA channel * @segment: DMA transaction segment */ static void xilinx_cdma_free_tx_segment(struct xilinx_dma_chan *chan, struct xilinx_cdma_tx_segment *segment) { … } /** * xilinx_vdma_free_tx_segment - Free transaction segment * @chan: Driver specific DMA channel * @segment: DMA transaction segment */ static void xilinx_vdma_free_tx_segment(struct xilinx_dma_chan *chan, struct xilinx_vdma_tx_segment *segment) { … } /** * xilinx_dma_alloc_tx_descriptor - Allocate transaction descriptor * @chan: Driver specific DMA channel * * Return: The allocated descriptor on success and NULL on failure. */ static struct xilinx_dma_tx_descriptor * xilinx_dma_alloc_tx_descriptor(struct xilinx_dma_chan *chan) { … } /** * xilinx_dma_free_tx_descriptor - Free transaction descriptor * @chan: Driver specific DMA channel * @desc: DMA transaction descriptor */ static void xilinx_dma_free_tx_descriptor(struct xilinx_dma_chan *chan, struct xilinx_dma_tx_descriptor *desc) { … } /* Required functions */ /** * xilinx_dma_free_desc_list - Free descriptors list * @chan: Driver specific DMA channel * @list: List to parse and delete the descriptor */ static void xilinx_dma_free_desc_list(struct xilinx_dma_chan *chan, struct list_head *list) { … } /** * xilinx_dma_free_descriptors - Free channel descriptors * @chan: Driver specific DMA channel */ static void xilinx_dma_free_descriptors(struct xilinx_dma_chan *chan) { … } /** * xilinx_dma_free_chan_resources - Free channel resources * @dchan: DMA channel */ static void xilinx_dma_free_chan_resources(struct dma_chan *dchan) { … } /** * xilinx_dma_get_residue - Compute residue for a given descriptor * @chan: Driver specific dma channel * @desc: dma transaction descriptor * * Return: The number of residue bytes for the descriptor. */ static u32 xilinx_dma_get_residue(struct xilinx_dma_chan *chan, struct xilinx_dma_tx_descriptor *desc) { … } /** * xilinx_dma_chan_handle_cyclic - Cyclic dma callback * @chan: Driver specific dma channel * @desc: dma transaction descriptor * @flags: flags for spin lock */ static void xilinx_dma_chan_handle_cyclic(struct xilinx_dma_chan *chan, struct xilinx_dma_tx_descriptor *desc, unsigned long *flags) { … } /** * xilinx_dma_chan_desc_cleanup - Clean channel descriptors * @chan: Driver specific DMA channel */ static void xilinx_dma_chan_desc_cleanup(struct xilinx_dma_chan *chan) { … } /** * xilinx_dma_do_tasklet - Schedule completion tasklet * @t: Pointer to the Xilinx DMA channel structure */ static void xilinx_dma_do_tasklet(struct tasklet_struct *t) { … } /** * xilinx_dma_alloc_chan_resources - Allocate channel resources * @dchan: DMA channel * * Return: '0' on success and failure value on error */ static int xilinx_dma_alloc_chan_resources(struct dma_chan *dchan) { … } /** * xilinx_dma_calc_copysize - Calculate the amount of data to copy * @chan: Driver specific DMA channel * @size: Total data that needs to be copied * @done: Amount of data that has been already copied * * Return: Amount of data that has to be copied */ static int xilinx_dma_calc_copysize(struct xilinx_dma_chan *chan, int size, int done) { … } /** * xilinx_dma_tx_status - Get DMA transaction status * @dchan: DMA channel * @cookie: Transaction identifier * @txstate: Transaction state * * Return: DMA transaction status */ static enum dma_status xilinx_dma_tx_status(struct dma_chan *dchan, dma_cookie_t cookie, struct dma_tx_state *txstate) { … } /** * xilinx_dma_stop_transfer - Halt DMA channel * @chan: Driver specific DMA channel * * Return: '0' on success and failure value on error */ static int xilinx_dma_stop_transfer(struct xilinx_dma_chan *chan) { … } /** * xilinx_cdma_stop_transfer - Wait for the current transfer to complete * @chan: Driver specific DMA channel * * Return: '0' on success and failure value on error */ static int xilinx_cdma_stop_transfer(struct xilinx_dma_chan *chan) { … } /** * xilinx_dma_start - Start DMA channel * @chan: Driver specific DMA channel */ static void xilinx_dma_start(struct xilinx_dma_chan *chan) { … } /** * xilinx_vdma_start_transfer - Starts VDMA transfer * @chan: Driver specific channel struct pointer */ static void xilinx_vdma_start_transfer(struct xilinx_dma_chan *chan) { … } /** * xilinx_cdma_start_transfer - Starts cdma transfer * @chan: Driver specific channel struct pointer */ static void xilinx_cdma_start_transfer(struct xilinx_dma_chan *chan) { … } /** * xilinx_dma_start_transfer - Starts DMA transfer * @chan: Driver specific channel struct pointer */ static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan) { … } /** * xilinx_mcdma_start_transfer - Starts MCDMA transfer * @chan: Driver specific channel struct pointer */ static void xilinx_mcdma_start_transfer(struct xilinx_dma_chan *chan) { … } /** * xilinx_dma_issue_pending - Issue pending transactions * @dchan: DMA channel */ static void xilinx_dma_issue_pending(struct dma_chan *dchan) { … } /** * xilinx_dma_device_config - Configure the DMA channel * @dchan: DMA channel * @config: channel configuration * * Return: 0 always. */ static int xilinx_dma_device_config(struct dma_chan *dchan, struct dma_slave_config *config) { … } /** * xilinx_dma_complete_descriptor - Mark the active descriptor as complete * @chan : xilinx DMA channel * * CONTEXT: hardirq */ static void xilinx_dma_complete_descriptor(struct xilinx_dma_chan *chan) { … } /** * xilinx_dma_reset - Reset DMA channel * @chan: Driver specific DMA channel * * Return: '0' on success and failure value on error */ static int xilinx_dma_reset(struct xilinx_dma_chan *chan) { … } /** * xilinx_dma_chan_reset - Reset DMA channel and enable interrupts * @chan: Driver specific DMA channel * * Return: '0' on success and failure value on error */ static int xilinx_dma_chan_reset(struct xilinx_dma_chan *chan) { … } /** * xilinx_mcdma_irq_handler - MCDMA Interrupt handler * @irq: IRQ number * @data: Pointer to the Xilinx MCDMA channel structure * * Return: IRQ_HANDLED/IRQ_NONE */ static irqreturn_t xilinx_mcdma_irq_handler(int irq, void *data) { … } /** * xilinx_dma_irq_handler - DMA Interrupt handler * @irq: IRQ number * @data: Pointer to the Xilinx DMA channel structure * * Return: IRQ_HANDLED/IRQ_NONE */ static irqreturn_t xilinx_dma_irq_handler(int irq, void *data) { … } /** * append_desc_queue - Queuing descriptor * @chan: Driver specific dma channel * @desc: dma transaction descriptor */ static void append_desc_queue(struct xilinx_dma_chan *chan, struct xilinx_dma_tx_descriptor *desc) { … } /** * xilinx_dma_tx_submit - Submit DMA transaction * @tx: Async transaction descriptor * * Return: cookie value on success and failure value on error */ static dma_cookie_t xilinx_dma_tx_submit(struct dma_async_tx_descriptor *tx) { … } /** * xilinx_vdma_dma_prep_interleaved - prepare a descriptor for a * DMA_SLAVE transaction * @dchan: DMA channel * @xt: Interleaved template pointer * @flags: transfer ack flags * * Return: Async transaction descriptor on success and NULL on failure */ static struct dma_async_tx_descriptor * xilinx_vdma_dma_prep_interleaved(struct dma_chan *dchan, struct dma_interleaved_template *xt, unsigned long flags) { … } /** * xilinx_cdma_prep_memcpy - prepare descriptors for a memcpy transaction * @dchan: DMA channel * @dma_dst: destination address * @dma_src: source address * @len: transfer length * @flags: transfer ack flags * * Return: Async transaction descriptor on success and NULL on failure */ static struct dma_async_tx_descriptor * xilinx_cdma_prep_memcpy(struct dma_chan *dchan, dma_addr_t dma_dst, dma_addr_t dma_src, size_t len, unsigned long flags) { … } /** * xilinx_dma_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction * @dchan: DMA channel * @sgl: scatterlist to transfer to/from * @sg_len: number of entries in @scatterlist * @direction: DMA direction * @flags: transfer ack flags * @context: APP words of the descriptor * * Return: Async transaction descriptor on success and NULL on failure */ static struct dma_async_tx_descriptor *xilinx_dma_prep_slave_sg( struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) { … } /** * xilinx_dma_prep_dma_cyclic - prepare descriptors for a DMA_SLAVE transaction * @dchan: DMA channel * @buf_addr: Physical address of the buffer * @buf_len: Total length of the cyclic buffers * @period_len: length of individual cyclic buffer * @direction: DMA direction * @flags: transfer ack flags * * Return: Async transaction descriptor on success and NULL on failure */ static struct dma_async_tx_descriptor *xilinx_dma_prep_dma_cyclic( struct dma_chan *dchan, dma_addr_t buf_addr, size_t buf_len, size_t period_len, enum dma_transfer_direction direction, unsigned long flags) { … } /** * xilinx_mcdma_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction * @dchan: DMA channel * @sgl: scatterlist to transfer to/from * @sg_len: number of entries in @scatterlist * @direction: DMA direction * @flags: transfer ack flags * @context: APP words of the descriptor * * Return: Async transaction descriptor on success and NULL on failure */ static struct dma_async_tx_descriptor * xilinx_mcdma_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) { … } /** * xilinx_dma_terminate_all - Halt the channel and free descriptors * @dchan: Driver specific DMA Channel pointer * * Return: '0' always. */ static int xilinx_dma_terminate_all(struct dma_chan *dchan) { … } static void xilinx_dma_synchronize(struct dma_chan *dchan) { … } /** * xilinx_vdma_channel_set_config - Configure VDMA channel * Run-time configuration for Axi VDMA, supports: * . halt the channel * . configure interrupt coalescing and inter-packet delay threshold * . start/stop parking * . enable genlock * * @dchan: DMA channel * @cfg: VDMA device configuration pointer * * Return: '0' on success and failure value on error */ int xilinx_vdma_channel_set_config(struct dma_chan *dchan, struct xilinx_vdma_config *cfg) { … } EXPORT_SYMBOL(…); /* ----------------------------------------------------------------------------- * Probe and remove */ /** * xilinx_dma_chan_remove - Per Channel remove function * @chan: Driver specific DMA channel */ static void xilinx_dma_chan_remove(struct xilinx_dma_chan *chan) { … } static int axidma_clk_init(struct platform_device *pdev, struct clk **axi_clk, struct clk **tx_clk, struct clk **rx_clk, struct clk **sg_clk, struct clk **tmp_clk) { … } static int axicdma_clk_init(struct platform_device *pdev, struct clk **axi_clk, struct clk **dev_clk, struct clk **tmp_clk, struct clk **tmp1_clk, struct clk **tmp2_clk) { … } static int axivdma_clk_init(struct platform_device *pdev, struct clk **axi_clk, struct clk **tx_clk, struct clk **txs_clk, struct clk **rx_clk, struct clk **rxs_clk) { … } static void xdma_disable_allclks(struct xilinx_dma_device *xdev) { … } /** * xilinx_dma_chan_probe - Per Channel Probing * It get channel features from the device tree entry and * initialize special channel handling routines * * @xdev: Driver specific device structure * @node: Device node * * Return: '0' on success and failure value on error */ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev, struct device_node *node) { … } /** * xilinx_dma_child_probe - Per child node probe * It get number of dma-channels per child node from * device-tree and initializes all the channels. * * @xdev: Driver specific device structure * @node: Device node * * Return: '0' on success and failure value on error. */ static int xilinx_dma_child_probe(struct xilinx_dma_device *xdev, struct device_node *node) { … } /** * of_dma_xilinx_xlate - Translation function * @dma_spec: Pointer to DMA specifier as found in the device tree * @ofdma: Pointer to DMA controller data * * Return: DMA channel pointer on success and NULL on error */ static struct dma_chan *of_dma_xilinx_xlate(struct of_phandle_args *dma_spec, struct of_dma *ofdma) { … } static const struct xilinx_dma_config axidma_config = …; static const struct xilinx_dma_config aximcdma_config = …; static const struct xilinx_dma_config axicdma_config = …; static const struct xilinx_dma_config axivdma_config = …; static const struct of_device_id xilinx_dma_of_ids[] = …; MODULE_DEVICE_TABLE(of, xilinx_dma_of_ids); /** * xilinx_dma_probe - Driver probe function * @pdev: Pointer to the platform_device structure * * Return: '0' on success and failure value on error */ static int xilinx_dma_probe(struct platform_device *pdev) { … } /** * xilinx_dma_remove - Driver remove function * @pdev: Pointer to the platform_device structure */ static void xilinx_dma_remove(struct platform_device *pdev) { … } static struct platform_driver xilinx_vdma_driver = …; module_platform_driver(…) …; MODULE_AUTHOR(…) …; MODULE_DESCRIPTION(…) …; MODULE_LICENSE(…) …;