/* SPDX-License-Identifier: GPL-2.0-only */ /* * Virtual DMA channel support for DMAengine * * Copyright (C) 2012 Russell King */ #ifndef VIRT_DMA_H #define VIRT_DMA_H #include <linux/dmaengine.h> #include <linux/interrupt.h> #include "dmaengine.h" struct virt_dma_desc { … }; struct virt_dma_chan { … }; static inline struct virt_dma_chan *to_virt_chan(struct dma_chan *chan) { … } void vchan_dma_desc_free_list(struct virt_dma_chan *vc, struct list_head *head); void vchan_init(struct virt_dma_chan *vc, struct dma_device *dmadev); struct virt_dma_desc *vchan_find_desc(struct virt_dma_chan *, dma_cookie_t); extern dma_cookie_t vchan_tx_submit(struct dma_async_tx_descriptor *); extern int vchan_tx_desc_free(struct dma_async_tx_descriptor *); /** * vchan_tx_prep - prepare a descriptor * @vc: virtual channel allocating this descriptor * @vd: virtual descriptor to prepare * @tx_flags: flags argument passed in to prepare function */ static inline struct dma_async_tx_descriptor *vchan_tx_prep(struct virt_dma_chan *vc, struct virt_dma_desc *vd, unsigned long tx_flags) { … } /** * vchan_issue_pending - move submitted descriptors to issued list * @vc: virtual channel to update * * vc.lock must be held by caller */ static inline bool vchan_issue_pending(struct virt_dma_chan *vc) { … } /** * vchan_cookie_complete - report completion of a descriptor * @vd: virtual descriptor to update * * vc.lock must be held by caller */ static inline void vchan_cookie_complete(struct virt_dma_desc *vd) { … } /** * vchan_vdesc_fini - Free or reuse a descriptor * @vd: virtual descriptor to free/reuse */ static inline void vchan_vdesc_fini(struct virt_dma_desc *vd) { … } /** * vchan_cyclic_callback - report the completion of a period * @vd: virtual descriptor */ static inline void vchan_cyclic_callback(struct virt_dma_desc *vd) { … } /** * vchan_terminate_vdesc - Disable pending cyclic callback * @vd: virtual descriptor to be terminated * * vc.lock must be held by caller */ static inline void vchan_terminate_vdesc(struct virt_dma_desc *vd) { … } /** * vchan_next_desc - peek at the next descriptor to be processed * @vc: virtual channel to obtain descriptor from * * vc.lock must be held by caller */ static inline struct virt_dma_desc *vchan_next_desc(struct virt_dma_chan *vc) { … } /** * vchan_get_all_descriptors - obtain all submitted and issued descriptors * @vc: virtual channel to get descriptors from * @head: list of descriptors found * * vc.lock must be held by caller * * Removes all submitted and issued descriptors from internal lists, and * provides a list of all descriptors found */ static inline void vchan_get_all_descriptors(struct virt_dma_chan *vc, struct list_head *head) { … } static inline void vchan_free_chan_resources(struct virt_dma_chan *vc) { … } /** * vchan_synchronize() - synchronize callback execution to the current context * @vc: virtual channel to synchronize * * Makes sure that all scheduled or active callbacks have finished running. For * proper operation the caller has to ensure that no new callbacks are scheduled * after the invocation of this function started. * Free up the terminated cyclic descriptor to prevent memory leakage. */ static inline void vchan_synchronize(struct virt_dma_chan *vc) { … } #endif