linux/drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c

// SPDX-License-Identifier: GPL-2.0
// (C) 2017-2018 Synopsys, Inc. (www.synopsys.com)

/*
 * Synopsys DesignWare AXI DMA Controller driver.
 *
 * Author: Eugeniy Paltsev <[email protected]>
 */

#include <linux/bitops.h>
#include <linux/delay.h>
#include <linux/device.h>
#include <linux/dmaengine.h>
#include <linux/dmapool.h>
#include <linux/dma-mapping.h>
#include <linux/err.h>
#include <linux/interrupt.h>
#include <linux/io.h>
#include <linux/iopoll.h>
#include <linux/io-64-nonatomic-lo-hi.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/of.h>
#include <linux/of_dma.h>
#include <linux/platform_device.h>
#include <linux/pm_runtime.h>
#include <linux/property.h>
#include <linux/reset.h>
#include <linux/slab.h>
#include <linux/types.h>

#include "dw-axi-dmac.h"
#include "../dmaengine.h"
#include "../virt-dma.h"

/*
 * The set of bus widths supported by the DMA controller. DW AXI DMAC supports
 * master data bus width up to 512 bits (for both AXI master interfaces), but
 * it depends on IP block configuration.
 */
#define AXI_DMA_BUSWIDTHS

#define AXI_DMA_FLAG_HAS_APB_REGS
#define AXI_DMA_FLAG_HAS_RESETS
#define AXI_DMA_FLAG_USE_CFG2

static inline void
axi_dma_iowrite32(struct axi_dma_chip *chip, u32 reg, u32 val)
{}

static inline u32 axi_dma_ioread32(struct axi_dma_chip *chip, u32 reg)
{}

static inline void
axi_dma_iowrite64(struct axi_dma_chip *chip, u32 reg, u64 val)
{}

static inline u64 axi_dma_ioread64(struct axi_dma_chip *chip, u32 reg)
{}

static inline void
axi_chan_iowrite32(struct axi_dma_chan *chan, u32 reg, u32 val)
{}

static inline u32 axi_chan_ioread32(struct axi_dma_chan *chan, u32 reg)
{}

static inline void
axi_chan_iowrite64(struct axi_dma_chan *chan, u32 reg, u64 val)
{}

static inline void axi_chan_config_write(struct axi_dma_chan *chan,
					 struct axi_dma_chan_config *config)
{}

static inline void axi_dma_disable(struct axi_dma_chip *chip)
{}

static inline void axi_dma_enable(struct axi_dma_chip *chip)
{}

static inline void axi_dma_irq_disable(struct axi_dma_chip *chip)
{}

static inline void axi_dma_irq_enable(struct axi_dma_chip *chip)
{}

static inline void axi_chan_irq_disable(struct axi_dma_chan *chan, u32 irq_mask)
{}

static inline void axi_chan_irq_set(struct axi_dma_chan *chan, u32 irq_mask)
{}

static inline void axi_chan_irq_sig_set(struct axi_dma_chan *chan, u32 irq_mask)
{}

static inline void axi_chan_irq_clear(struct axi_dma_chan *chan, u32 irq_mask)
{}

static inline u32 axi_chan_irq_read(struct axi_dma_chan *chan)
{}

static inline void axi_chan_disable(struct axi_dma_chan *chan)
{}

static inline void axi_chan_enable(struct axi_dma_chan *chan)
{}

static inline bool axi_chan_is_hw_enable(struct axi_dma_chan *chan)
{}

static void axi_dma_hw_init(struct axi_dma_chip *chip)
{}

static u32 axi_chan_get_xfer_width(struct axi_dma_chan *chan, dma_addr_t src,
				   dma_addr_t dst, size_t len)
{}

static inline const char *axi_chan_name(struct axi_dma_chan *chan)
{}

static struct axi_dma_desc *axi_desc_alloc(u32 num)
{}

static struct axi_dma_lli *axi_desc_get(struct axi_dma_chan *chan,
					dma_addr_t *addr)
{}

static void axi_desc_put(struct axi_dma_desc *desc)
{}

static void vchan_desc_put(struct virt_dma_desc *vdesc)
{}

static enum dma_status
dma_chan_tx_status(struct dma_chan *dchan, dma_cookie_t cookie,
		  struct dma_tx_state *txstate)
{}

static void write_desc_llp(struct axi_dma_hw_desc *desc, dma_addr_t adr)
{}

static void write_chan_llp(struct axi_dma_chan *chan, dma_addr_t adr)
{}

static void dw_axi_dma_set_byte_halfword(struct axi_dma_chan *chan, bool set)
{}
/* Called in chan locked context */
static void axi_chan_block_xfer_start(struct axi_dma_chan *chan,
				      struct axi_dma_desc *first)
{}

static void axi_chan_start_first_queued(struct axi_dma_chan *chan)
{}

static void dma_chan_issue_pending(struct dma_chan *dchan)
{}

static void dw_axi_dma_synchronize(struct dma_chan *dchan)
{}

static int dma_chan_alloc_chan_resources(struct dma_chan *dchan)
{}

static void dma_chan_free_chan_resources(struct dma_chan *dchan)
{}

static void dw_axi_dma_set_hw_channel(struct axi_dma_chan *chan, bool set)
{}

/*
 * If DW_axi_dmac sees CHx_CTL.ShadowReg_Or_LLI_Last bit of the fetched LLI
 * as 1, it understands that the current block is the final block in the
 * transfer and completes the DMA transfer operation at the end of current
 * block transfer.
 */
static void set_desc_last(struct axi_dma_hw_desc *desc)
{}

static void write_desc_sar(struct axi_dma_hw_desc *desc, dma_addr_t adr)
{}

static void write_desc_dar(struct axi_dma_hw_desc *desc, dma_addr_t adr)
{}

static void set_desc_src_master(struct axi_dma_hw_desc *desc)
{}

static void set_desc_dest_master(struct axi_dma_hw_desc *hw_desc,
				 struct axi_dma_desc *desc)
{}

static int dw_axi_dma_set_hw_desc(struct axi_dma_chan *chan,
				  struct axi_dma_hw_desc *hw_desc,
				  dma_addr_t mem_addr, size_t len)
{}

static size_t calculate_block_len(struct axi_dma_chan *chan,
				  dma_addr_t dma_addr, size_t buf_len,
				  enum dma_transfer_direction direction)
{}

static struct dma_async_tx_descriptor *
dw_axi_dma_chan_prep_cyclic(struct dma_chan *dchan, dma_addr_t dma_addr,
			    size_t buf_len, size_t period_len,
			    enum dma_transfer_direction direction,
			    unsigned long flags)
{}

static struct dma_async_tx_descriptor *
dw_axi_dma_chan_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl,
			      unsigned int sg_len,
			      enum dma_transfer_direction direction,
			      unsigned long flags, void *context)
{}

static struct dma_async_tx_descriptor *
dma_chan_prep_dma_memcpy(struct dma_chan *dchan, dma_addr_t dst_adr,
			 dma_addr_t src_adr, size_t len, unsigned long flags)
{}

static int dw_axi_dma_chan_slave_config(struct dma_chan *dchan,
					struct dma_slave_config *config)
{}

static void axi_chan_dump_lli(struct axi_dma_chan *chan,
			      struct axi_dma_hw_desc *desc)
{}

static void axi_chan_list_dump_lli(struct axi_dma_chan *chan,
				   struct axi_dma_desc *desc_head)
{}

static noinline void axi_chan_handle_err(struct axi_dma_chan *chan, u32 status)
{}

static void axi_chan_block_xfer_complete(struct axi_dma_chan *chan)
{}

static irqreturn_t dw_axi_dma_interrupt(int irq, void *dev_id)
{}

static int dma_chan_terminate_all(struct dma_chan *dchan)
{}

static int dma_chan_pause(struct dma_chan *dchan)
{}

/* Called in chan locked context */
static inline void axi_chan_resume(struct axi_dma_chan *chan)
{}

static int dma_chan_resume(struct dma_chan *dchan)
{}

static int axi_dma_suspend(struct axi_dma_chip *chip)
{}

static int axi_dma_resume(struct axi_dma_chip *chip)
{}

static int __maybe_unused axi_dma_runtime_suspend(struct device *dev)
{}

static int __maybe_unused axi_dma_runtime_resume(struct device *dev)
{}

static struct dma_chan *dw_axi_dma_of_xlate(struct of_phandle_args *dma_spec,
					    struct of_dma *ofdma)
{}

static int parse_device_properties(struct axi_dma_chip *chip)
{}

static int axi_req_irqs(struct platform_device *pdev, struct axi_dma_chip *chip)
{}

static int dw_probe(struct platform_device *pdev)
{}

static void dw_remove(struct platform_device *pdev)
{}

static const struct dev_pm_ops dw_axi_dma_pm_ops =;

static const struct of_device_id dw_dma_of_id_table[] =;
MODULE_DEVICE_TABLE(of, dw_dma_of_id_table);

static struct platform_driver dw_driver =;
module_platform_driver();

MODULE_LICENSE();
MODULE_DESCRIPTION();
MODULE_AUTHOR();