linux/drivers/dma/xgene-dma.c

// SPDX-License-Identifier: GPL-2.0-or-later
/*
 * Applied Micro X-Gene SoC DMA engine Driver
 *
 * Copyright (c) 2015, Applied Micro Circuits Corporation
 * Authors: Rameshwar Prasad Sahu <[email protected]>
 *	    Loc Ho <[email protected]>
 *
 * NOTE: PM support is currently not available.
 */

#include <linux/acpi.h>
#include <linux/clk.h>
#include <linux/delay.h>
#include <linux/dma-mapping.h>
#include <linux/dmaengine.h>
#include <linux/dmapool.h>
#include <linux/interrupt.h>
#include <linux/io.h>
#include <linux/irq.h>
#include <linux/mod_devicetable.h>
#include <linux/module.h>
#include <linux/platform_device.h>

#include "dmaengine.h"

/* X-Gene DMA ring csr registers and bit definations */
#define XGENE_DMA_RING_CONFIG
#define XGENE_DMA_RING_ENABLE
#define XGENE_DMA_RING_ID
#define XGENE_DMA_RING_ID_SETUP(v)
#define XGENE_DMA_RING_ID_BUF
#define XGENE_DMA_RING_ID_BUF_SETUP(v)
#define XGENE_DMA_RING_THRESLD0_SET1
#define XGENE_DMA_RING_THRESLD0_SET1_VAL
#define XGENE_DMA_RING_THRESLD1_SET1
#define XGENE_DMA_RING_THRESLD1_SET1_VAL
#define XGENE_DMA_RING_HYSTERESIS
#define XGENE_DMA_RING_HYSTERESIS_VAL
#define XGENE_DMA_RING_STATE
#define XGENE_DMA_RING_STATE_WR_BASE
#define XGENE_DMA_RING_NE_INT_MODE
#define XGENE_DMA_RING_NE_INT_MODE_SET(m, v)
#define XGENE_DMA_RING_NE_INT_MODE_RESET(m, v)
#define XGENE_DMA_RING_CLKEN
#define XGENE_DMA_RING_SRST
#define XGENE_DMA_RING_MEM_RAM_SHUTDOWN
#define XGENE_DMA_RING_BLK_MEM_RDY
#define XGENE_DMA_RING_BLK_MEM_RDY_VAL
#define XGENE_DMA_RING_ID_GET(owner, num)
#define XGENE_DMA_RING_DST_ID(v)
#define XGENE_DMA_RING_CMD_OFFSET
#define XGENE_DMA_RING_CMD_BASE_OFFSET(v)
#define XGENE_DMA_RING_COHERENT_SET(m)
#define XGENE_DMA_RING_ADDRL_SET(m, v)
#define XGENE_DMA_RING_ADDRH_SET(m, v)
#define XGENE_DMA_RING_ACCEPTLERR_SET(m)
#define XGENE_DMA_RING_SIZE_SET(m, v)
#define XGENE_DMA_RING_RECOMBBUF_SET(m)
#define XGENE_DMA_RING_RECOMTIMEOUTL_SET(m)
#define XGENE_DMA_RING_RECOMTIMEOUTH_SET(m)
#define XGENE_DMA_RING_SELTHRSH_SET(m)
#define XGENE_DMA_RING_TYPE_SET(m, v)

/* X-Gene DMA device csr registers and bit definitions */
#define XGENE_DMA_IPBRR
#define XGENE_DMA_DEV_ID_RD(v)
#define XGENE_DMA_BUS_ID_RD(v)
#define XGENE_DMA_REV_NO_RD(v)
#define XGENE_DMA_GCR
#define XGENE_DMA_CH_SETUP(v)
#define XGENE_DMA_ENABLE(v)
#define XGENE_DMA_DISABLE(v)
#define XGENE_DMA_RAID6_CONT
#define XGENE_DMA_RAID6_MULTI_CTRL(v)
#define XGENE_DMA_INT
#define XGENE_DMA_INT_MASK
#define XGENE_DMA_INT_ALL_MASK
#define XGENE_DMA_INT_ALL_UNMASK
#define XGENE_DMA_INT_MASK_SHIFT
#define XGENE_DMA_RING_INT0_MASK
#define XGENE_DMA_RING_INT1_MASK
#define XGENE_DMA_RING_INT2_MASK
#define XGENE_DMA_RING_INT3_MASK
#define XGENE_DMA_RING_INT4_MASK
#define XGENE_DMA_CFG_RING_WQ_ASSOC
#define XGENE_DMA_ASSOC_RING_MNGR1
#define XGENE_DMA_MEM_RAM_SHUTDOWN
#define XGENE_DMA_BLK_MEM_RDY
#define XGENE_DMA_BLK_MEM_RDY_VAL
#define XGENE_DMA_RING_CMD_SM_OFFSET

/* X-Gene SoC EFUSE csr register and bit defination */
#define XGENE_SOC_JTAG1_SHADOW
#define XGENE_DMA_PQ_DISABLE_MASK

/* X-Gene DMA Descriptor format */
#define XGENE_DMA_DESC_NV_BIT
#define XGENE_DMA_DESC_IN_BIT
#define XGENE_DMA_DESC_C_BIT
#define XGENE_DMA_DESC_DR_BIT
#define XGENE_DMA_DESC_ELERR_POS
#define XGENE_DMA_DESC_RTYPE_POS
#define XGENE_DMA_DESC_LERR_POS
#define XGENE_DMA_DESC_BUFLEN_POS
#define XGENE_DMA_DESC_HOENQ_NUM_POS
#define XGENE_DMA_DESC_ELERR_RD(m)
#define XGENE_DMA_DESC_LERR_RD(m)
#define XGENE_DMA_DESC_STATUS(elerr, lerr)

/* X-Gene DMA descriptor empty s/w signature */
#define XGENE_DMA_DESC_EMPTY_SIGNATURE

/* X-Gene DMA configurable parameters defines */
#define XGENE_DMA_RING_NUM
#define XGENE_DMA_BUFNUM
#define XGENE_DMA_CPU_BUFNUM
#define XGENE_DMA_RING_OWNER_DMA
#define XGENE_DMA_RING_OWNER_CPU
#define XGENE_DMA_RING_TYPE_REGULAR
#define XGENE_DMA_RING_WQ_DESC_SIZE
#define XGENE_DMA_RING_NUM_CONFIG
#define XGENE_DMA_MAX_CHANNEL
#define XGENE_DMA_XOR_CHANNEL
#define XGENE_DMA_PQ_CHANNEL
#define XGENE_DMA_MAX_BYTE_CNT
#define XGENE_DMA_MAX_64B_DESC_BYTE_CNT
#define XGENE_DMA_MAX_XOR_SRC
#define XGENE_DMA_16K_BUFFER_LEN_CODE
#define XGENE_DMA_INVALID_LEN_CODE

/* X-Gene DMA descriptor error codes */
#define ERR_DESC_AXI
#define ERR_BAD_DESC
#define ERR_READ_DATA_AXI
#define ERR_WRITE_DATA_AXI
#define ERR_FBP_TIMEOUT
#define ERR_ECC
#define ERR_DIFF_SIZE
#define ERR_SCT_GAT_LEN
#define ERR_CRC_ERR
#define ERR_CHKSUM
#define ERR_DIF

/* X-Gene DMA error interrupt codes */
#define ERR_DIF_SIZE_INT
#define ERR_GS_ERR_INT
#define ERR_FPB_TIMEO_INT
#define ERR_WFIFO_OVF_INT
#define ERR_RFIFO_OVF_INT
#define ERR_WR_TIMEO_INT
#define ERR_RD_TIMEO_INT
#define ERR_WR_ERR_INT
#define ERR_RD_ERR_INT
#define ERR_BAD_DESC_INT
#define ERR_DESC_DST_INT
#define ERR_DESC_SRC_INT

/* X-Gene DMA flyby operation code */
#define FLYBY_2SRC_XOR
#define FLYBY_3SRC_XOR
#define FLYBY_4SRC_XOR
#define FLYBY_5SRC_XOR

/* X-Gene DMA SW descriptor flags */
#define XGENE_DMA_FLAG_64B_DESC

/* Define to dump X-Gene DMA descriptor */
#define XGENE_DMA_DESC_DUMP(desc, m)

#define to_dma_desc_sw(tx)
#define to_dma_chan(dchan)

#define chan_dbg(chan, fmt, arg...)
#define chan_err(chan, fmt, arg...)

struct xgene_dma_desc_hw {};

enum xgene_dma_ring_cfgsize {};

struct xgene_dma_ring {};

struct xgene_dma_desc_sw {};

/**
 * struct xgene_dma_chan - internal representation of an X-Gene DMA channel
 * @dma_chan: dmaengine channel object member
 * @pdma: X-Gene DMA device structure reference
 * @dev: struct device reference for dma mapping api
 * @id: raw id of this channel
 * @rx_irq: channel IRQ
 * @name: name of X-Gene DMA channel
 * @lock: serializes enqueue/dequeue operations to the descriptor pool
 * @pending: number of transaction request pushed to DMA controller for
 *	execution, but still waiting for completion,
 * @max_outstanding: max number of outstanding request we can push to channel
 * @ld_pending: descriptors which are queued to run, but have not yet been
 *	submitted to the hardware for execution
 * @ld_running: descriptors which are currently being executing by the hardware
 * @ld_completed: descriptors which have finished execution by the hardware.
 *	These descriptors have already had their cleanup actions run. They
 *	are waiting for the ACK bit to be set by the async tx API.
 * @desc_pool: descriptor pool for DMA operations
 * @tasklet: bottom half where all completed descriptors cleans
 * @tx_ring: transmit ring descriptor that we use to prepare actual
 *	descriptors for further executions
 * @rx_ring: receive ring descriptor that we use to get completed DMA
 *	descriptors during cleanup time
 */
struct xgene_dma_chan {};

/**
 * struct xgene_dma - internal representation of an X-Gene DMA device
 * @dev: reference to this device's struct device
 * @clk: reference to this device's clock
 * @err_irq: DMA error irq number
 * @ring_num: start id number for DMA ring
 * @csr_dma: base for DMA register access
 * @csr_ring: base for DMA ring register access
 * @csr_ring_cmd: base for DMA ring command register access
 * @csr_efuse: base for efuse register access
 * @dma_dev: embedded struct dma_device
 * @chan: reference to X-Gene DMA channels
 */
struct xgene_dma {};

static const char * const xgene_dma_desc_err[] =;

static const char * const xgene_dma_err[] =;

static bool is_pq_enabled(struct xgene_dma *pdma)
{}

static u64 xgene_dma_encode_len(size_t len)
{}

static u8 xgene_dma_encode_xor_flyby(u32 src_cnt)
{}

static void xgene_dma_set_src_buffer(__le64 *ext8, size_t *len,
				     dma_addr_t *paddr)
{}

static __le64 *xgene_dma_lookup_ext8(struct xgene_dma_desc_hw *desc, int idx)
{}

static void xgene_dma_init_desc(struct xgene_dma_desc_hw *desc,
				u16 dst_ring_num)
{}

static void xgene_dma_prep_xor_desc(struct xgene_dma_chan *chan,
				    struct xgene_dma_desc_sw *desc_sw,
				    dma_addr_t *dst, dma_addr_t *src,
				    u32 src_cnt, size_t *nbytes,
				    const u8 *scf)
{}

static dma_cookie_t xgene_dma_tx_submit(struct dma_async_tx_descriptor *tx)
{}

static void xgene_dma_clean_descriptor(struct xgene_dma_chan *chan,
				       struct xgene_dma_desc_sw *desc)
{}

static struct xgene_dma_desc_sw *xgene_dma_alloc_descriptor(
				 struct xgene_dma_chan *chan)
{}

/**
 * xgene_dma_clean_completed_descriptor - free all descriptors which
 * has been completed and acked
 * @chan: X-Gene DMA channel
 *
 * This function is used on all completed and acked descriptors.
 */
static void xgene_dma_clean_completed_descriptor(struct xgene_dma_chan *chan)
{}

/**
 * xgene_dma_run_tx_complete_actions - cleanup a single link descriptor
 * @chan: X-Gene DMA channel
 * @desc: descriptor to cleanup and free
 *
 * This function is used on a descriptor which has been executed by the DMA
 * controller. It will run any callbacks, submit any dependencies.
 */
static void xgene_dma_run_tx_complete_actions(struct xgene_dma_chan *chan,
					      struct xgene_dma_desc_sw *desc)
{}

/**
 * xgene_dma_clean_running_descriptor - move the completed descriptor from
 * ld_running to ld_completed
 * @chan: X-Gene DMA channel
 * @desc: the descriptor which is completed
 *
 * Free the descriptor directly if acked by async_tx api,
 * else move it to queue ld_completed.
 */
static void xgene_dma_clean_running_descriptor(struct xgene_dma_chan *chan,
					       struct xgene_dma_desc_sw *desc)
{}

static void xgene_chan_xfer_request(struct xgene_dma_chan *chan,
				    struct xgene_dma_desc_sw *desc_sw)
{}

/**
 * xgene_chan_xfer_ld_pending - push any pending transactions to hw
 * @chan : X-Gene DMA channel
 *
 * LOCKING: must hold chan->lock
 */
static void xgene_chan_xfer_ld_pending(struct xgene_dma_chan *chan)
{}

/**
 * xgene_dma_cleanup_descriptors - cleanup link descriptors which are completed
 * and move them to ld_completed to free until flag 'ack' is set
 * @chan: X-Gene DMA channel
 *
 * This function is used on descriptors which have been executed by the DMA
 * controller. It will run any callbacks, submit any dependencies, then
 * free these descriptors if flag 'ack' is set.
 */
static void xgene_dma_cleanup_descriptors(struct xgene_dma_chan *chan)
{}

static int xgene_dma_alloc_chan_resources(struct dma_chan *dchan)
{}

/**
 * xgene_dma_free_desc_list - Free all descriptors in a queue
 * @chan: X-Gene DMA channel
 * @list: the list to free
 *
 * LOCKING: must hold chan->lock
 */
static void xgene_dma_free_desc_list(struct xgene_dma_chan *chan,
				     struct list_head *list)
{}

static void xgene_dma_free_chan_resources(struct dma_chan *dchan)
{}

static struct dma_async_tx_descriptor *xgene_dma_prep_xor(
	struct dma_chan *dchan, dma_addr_t dst,	dma_addr_t *src,
	u32 src_cnt, size_t len, unsigned long flags)
{}

static struct dma_async_tx_descriptor *xgene_dma_prep_pq(
	struct dma_chan *dchan, dma_addr_t *dst, dma_addr_t *src,
	u32 src_cnt, const u8 *scf, size_t len, unsigned long flags)
{}

static void xgene_dma_issue_pending(struct dma_chan *dchan)
{}

static enum dma_status xgene_dma_tx_status(struct dma_chan *dchan,
					   dma_cookie_t cookie,
					   struct dma_tx_state *txstate)
{}

static void xgene_dma_tasklet_cb(struct tasklet_struct *t)
{}

static irqreturn_t xgene_dma_chan_ring_isr(int irq, void *id)
{}

static irqreturn_t xgene_dma_err_isr(int irq, void *id)
{}

static void xgene_dma_wr_ring_state(struct xgene_dma_ring *ring)
{}

static void xgene_dma_clr_ring_state(struct xgene_dma_ring *ring)
{}

static void xgene_dma_setup_ring(struct xgene_dma_ring *ring)
{}

static void xgene_dma_clear_ring(struct xgene_dma_ring *ring)
{}

static void xgene_dma_set_ring_cmd(struct xgene_dma_ring *ring)
{}

static int xgene_dma_get_ring_size(struct xgene_dma_chan *chan,
				   enum xgene_dma_ring_cfgsize cfgsize)
{}

static void xgene_dma_delete_ring_one(struct xgene_dma_ring *ring)
{}

static void xgene_dma_delete_chan_rings(struct xgene_dma_chan *chan)
{}

static int xgene_dma_create_ring_one(struct xgene_dma_chan *chan,
				     struct xgene_dma_ring *ring,
				     enum xgene_dma_ring_cfgsize cfgsize)
{}

static int xgene_dma_create_chan_rings(struct xgene_dma_chan *chan)
{}

static int xgene_dma_init_rings(struct xgene_dma *pdma)
{}

static void xgene_dma_enable(struct xgene_dma *pdma)
{}

static void xgene_dma_disable(struct xgene_dma *pdma)
{}

static void xgene_dma_mask_interrupts(struct xgene_dma *pdma)
{}

static void xgene_dma_unmask_interrupts(struct xgene_dma *pdma)
{}

static void xgene_dma_init_hw(struct xgene_dma *pdma)
{}

static int xgene_dma_init_ring_mngr(struct xgene_dma *pdma)
{}

static int xgene_dma_init_mem(struct xgene_dma *pdma)
{}

static int xgene_dma_request_irqs(struct xgene_dma *pdma)
{}

static void xgene_dma_free_irqs(struct xgene_dma *pdma)
{}

static void xgene_dma_set_caps(struct xgene_dma_chan *chan,
			       struct dma_device *dma_dev)
{}

static int xgene_dma_async_register(struct xgene_dma *pdma, int id)
{}

static int xgene_dma_init_async(struct xgene_dma *pdma)
{}

static void xgene_dma_async_unregister(struct xgene_dma *pdma)
{}

static void xgene_dma_init_channels(struct xgene_dma *pdma)
{}

static int xgene_dma_get_resources(struct platform_device *pdev,
				   struct xgene_dma *pdma)
{}

static int xgene_dma_probe(struct platform_device *pdev)
{}

static void xgene_dma_remove(struct platform_device *pdev)
{}

#ifdef CONFIG_ACPI
static const struct acpi_device_id xgene_dma_acpi_match_ptr[] =;
MODULE_DEVICE_TABLE(acpi, xgene_dma_acpi_match_ptr);
#endif

static const struct of_device_id xgene_dma_of_match_ptr[] =;
MODULE_DEVICE_TABLE(of, xgene_dma_of_match_ptr);

static struct platform_driver xgene_dma_driver =;

module_platform_driver();

MODULE_DESCRIPTION();
MODULE_AUTHOR();
MODULE_AUTHOR();
MODULE_LICENSE();
MODULE_VERSION();