// SPDX-License-Identifier: GPL-2.0-or-later /* * DMA driver for Xilinx DMA/Bridge Subsystem * * Copyright (C) 2017-2020 Xilinx, Inc. All rights reserved. * Copyright (C) 2022, Advanced Micro Devices, Inc. */ /* * The DMA/Bridge Subsystem for PCI Express allows for the movement of data * between Host memory and the DMA subsystem. It does this by operating on * 'descriptors' that contain information about the source, destination and * amount of data to transfer. These direct memory transfers can be both in * the Host to Card (H2C) and Card to Host (C2H) transfers. The DMA can be * configured to have a single AXI4 Master interface shared by all channels * or one AXI4-Stream interface for each channel enabled. Memory transfers are * specified on a per-channel basis in descriptor linked lists, which the DMA * fetches from host memory and processes. Events such as descriptor completion * and errors are signaled using interrupts. The core also provides up to 16 * user interrupt wires that generate interrupts to the host. */ #include <linux/mod_devicetable.h> #include <linux/bitfield.h> #include <linux/dmapool.h> #include <linux/regmap.h> #include <linux/dmaengine.h> #include <linux/dma/amd_xdma.h> #include <linux/platform_device.h> #include <linux/platform_data/amd_xdma.h> #include <linux/dma-mapping.h> #include <linux/pci.h> #include "../virt-dma.h" #include "xdma-regs.h" /* mmio regmap config for all XDMA registers */ static const struct regmap_config xdma_regmap_config = …; /** * struct xdma_desc_block - Descriptor block * @virt_addr: Virtual address of block start * @dma_addr: DMA address of block start */ struct xdma_desc_block { … }; /** * struct xdma_chan - Driver specific DMA channel structure * @vchan: Virtual channel * @xdev_hdl: Pointer to DMA device structure * @base: Offset of channel registers * @desc_pool: Descriptor pool * @busy: Busy flag of the channel * @dir: Transferring direction of the channel * @cfg: Transferring config of the channel * @irq: IRQ assigned to the channel */ struct xdma_chan { … }; /** * struct xdma_desc - DMA desc structure * @vdesc: Virtual DMA descriptor * @chan: DMA channel pointer * @dir: Transferring direction of the request * @desc_blocks: Hardware descriptor blocks * @dblk_num: Number of hardware descriptor blocks * @desc_num: Number of hardware descriptors * @completed_desc_num: Completed hardware descriptors * @cyclic: Cyclic transfer vs. scatter-gather * @interleaved_dma: Interleaved DMA transfer * @periods: Number of periods in the cyclic transfer * @period_size: Size of a period in bytes in cyclic transfers * @frames_left: Number of frames left in interleaved DMA transfer * @error: tx error flag */ struct xdma_desc { … }; #define XDMA_DEV_STATUS_REG_DMA … #define XDMA_DEV_STATUS_INIT_MSIX … /** * struct xdma_device - DMA device structure * @pdev: Platform device pointer * @dma_dev: DMA device structure * @rmap: MMIO regmap for DMA registers * @h2c_chans: Host to Card channels * @c2h_chans: Card to Host channels * @h2c_chan_num: Number of H2C channels * @c2h_chan_num: Number of C2H channels * @irq_start: Start IRQ assigned to device * @irq_num: Number of IRQ assigned to device * @status: Initialization status */ struct xdma_device { … }; #define xdma_err(xdev, fmt, args...) … #define XDMA_CHAN_NUM(_xd) … /* Get the last desc in a desc block */ static inline void *xdma_blk_last_desc(struct xdma_desc_block *block) { … } /** * xdma_link_sg_desc_blocks - Link SG descriptor blocks for DMA transfer * @sw_desc: Tx descriptor pointer */ static void xdma_link_sg_desc_blocks(struct xdma_desc *sw_desc) { … } /** * xdma_link_cyclic_desc_blocks - Link cyclic descriptor blocks for DMA transfer * @sw_desc: Tx descriptor pointer */ static void xdma_link_cyclic_desc_blocks(struct xdma_desc *sw_desc) { … } static inline struct xdma_chan *to_xdma_chan(struct dma_chan *chan) { … } static inline struct xdma_desc *to_xdma_desc(struct virt_dma_desc *vdesc) { … } /** * xdma_channel_init - Initialize DMA channel registers * @chan: DMA channel pointer */ static int xdma_channel_init(struct xdma_chan *chan) { … } /** * xdma_free_desc - Free descriptor * @vdesc: Virtual DMA descriptor */ static void xdma_free_desc(struct virt_dma_desc *vdesc) { … } /** * xdma_alloc_desc - Allocate descriptor * @chan: DMA channel pointer * @desc_num: Number of hardware descriptors * @cyclic: Whether this is a cyclic transfer */ static struct xdma_desc * xdma_alloc_desc(struct xdma_chan *chan, u32 desc_num, bool cyclic) { … } /** * xdma_xfer_start - Start DMA transfer * @xchan: DMA channel pointer */ static int xdma_xfer_start(struct xdma_chan *xchan) { … } /** * xdma_xfer_stop - Stop DMA transfer * @xchan: DMA channel pointer */ static int xdma_xfer_stop(struct xdma_chan *xchan) { … } /** * xdma_alloc_channels - Detect and allocate DMA channels * @xdev: DMA device pointer * @dir: Channel direction */ static int xdma_alloc_channels(struct xdma_device *xdev, enum dma_transfer_direction dir) { … } /** * xdma_issue_pending - Issue pending transactions * @chan: DMA channel pointer */ static void xdma_issue_pending(struct dma_chan *chan) { … } /** * xdma_terminate_all - Terminate all transactions * @chan: DMA channel pointer */ static int xdma_terminate_all(struct dma_chan *chan) { … } /** * xdma_synchronize - Synchronize terminated transactions * @chan: DMA channel pointer */ static void xdma_synchronize(struct dma_chan *chan) { … } /** * xdma_fill_descs() - Fill hardware descriptors for one contiguous memory chunk. * More than one descriptor will be used if the size is bigger * than XDMA_DESC_BLEN_MAX. * @sw_desc: Descriptor container * @src_addr: First value for the ->src_addr field * @dst_addr: First value for the ->dst_addr field * @size: Size of the contiguous memory block * @filled_descs_num: Index of the first descriptor to take care of in @sw_desc */ static inline u32 xdma_fill_descs(struct xdma_desc *sw_desc, u64 src_addr, u64 dst_addr, u32 size, u32 filled_descs_num) { … } /** * xdma_prep_device_sg - prepare a descriptor for a DMA transaction * @chan: DMA channel pointer * @sgl: Transfer scatter gather list * @sg_len: Length of scatter gather list * @dir: Transfer direction * @flags: transfer ack flags * @context: APP words of the descriptor */ static struct dma_async_tx_descriptor * xdma_prep_device_sg(struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long flags, void *context) { … } /** * xdma_prep_dma_cyclic - prepare for cyclic DMA transactions * @chan: DMA channel pointer * @address: Device DMA address to access * @size: Total length to transfer * @period_size: Period size to use for each transfer * @dir: Transfer direction * @flags: Transfer ack flags */ static struct dma_async_tx_descriptor * xdma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t address, size_t size, size_t period_size, enum dma_transfer_direction dir, unsigned long flags) { … } /** * xdma_prep_interleaved_dma - Prepare virtual descriptor for interleaved DMA transfers * @chan: DMA channel * @xt: DMA transfer template * @flags: tx flags */ static struct dma_async_tx_descriptor * xdma_prep_interleaved_dma(struct dma_chan *chan, struct dma_interleaved_template *xt, unsigned long flags) { … } /** * xdma_device_config - Configure the DMA channel * @chan: DMA channel * @cfg: channel configuration */ static int xdma_device_config(struct dma_chan *chan, struct dma_slave_config *cfg) { … } /** * xdma_free_chan_resources - Free channel resources * @chan: DMA channel */ static void xdma_free_chan_resources(struct dma_chan *chan) { … } /** * xdma_alloc_chan_resources - Allocate channel resources * @chan: DMA channel */ static int xdma_alloc_chan_resources(struct dma_chan *chan) { … } static enum dma_status xdma_tx_status(struct dma_chan *chan, dma_cookie_t cookie, struct dma_tx_state *state) { … } /** * xdma_channel_isr - XDMA channel interrupt handler * @irq: IRQ number * @dev_id: Pointer to the DMA channel structure */ static irqreturn_t xdma_channel_isr(int irq, void *dev_id) { … } /** * xdma_irq_fini - Uninitialize IRQ * @xdev: DMA device pointer */ static void xdma_irq_fini(struct xdma_device *xdev) { … } /** * xdma_set_vector_reg - configure hardware IRQ registers * @xdev: DMA device pointer * @vec_tbl_start: Start of IRQ registers * @irq_start: Start of IRQ * @irq_num: Number of IRQ */ static int xdma_set_vector_reg(struct xdma_device *xdev, u32 vec_tbl_start, u32 irq_start, u32 irq_num) { … } /** * xdma_irq_init - initialize IRQs * @xdev: DMA device pointer */ static int xdma_irq_init(struct xdma_device *xdev) { … } static bool xdma_filter_fn(struct dma_chan *chan, void *param) { … } /** * xdma_disable_user_irq - Disable user interrupt * @pdev: Pointer to the platform_device structure * @irq_num: System IRQ number */ void xdma_disable_user_irq(struct platform_device *pdev, u32 irq_num) { … } EXPORT_SYMBOL(…); /** * xdma_enable_user_irq - Enable user logic interrupt * @pdev: Pointer to the platform_device structure * @irq_num: System IRQ number */ int xdma_enable_user_irq(struct platform_device *pdev, u32 irq_num) { … } EXPORT_SYMBOL(…); /** * xdma_get_user_irq - Get system IRQ number * @pdev: Pointer to the platform_device structure * @user_irq_index: User logic IRQ wire index * * Return: The system IRQ number allocated for the given wire index. */ int xdma_get_user_irq(struct platform_device *pdev, u32 user_irq_index) { … } EXPORT_SYMBOL(…); /** * xdma_remove - Driver remove function * @pdev: Pointer to the platform_device structure */ static void xdma_remove(struct platform_device *pdev) { … } /** * xdma_probe - Driver probe function * @pdev: Pointer to the platform_device structure */ static int xdma_probe(struct platform_device *pdev) { … } static const struct platform_device_id xdma_id_table[] = …; MODULE_DEVICE_TABLE(platform, xdma_id_table); static struct platform_driver xdma_driver = …; module_platform_driver(…) …; MODULE_DESCRIPTION(…) …; MODULE_AUTHOR(…) …; MODULE_LICENSE(…) …;