#include <linux/sched/mm.h>
#include <linux/dma-fence-array.h>
#include <drm/drm_gem.h>
#include "display/intel_display.h"
#include "display/intel_frontbuffer.h"
#include "gem/i915_gem_lmem.h"
#include "gem/i915_gem_object_frontbuffer.h"
#include "gem/i915_gem_tiling.h"
#include "gt/intel_engine.h"
#include "gt/intel_engine_heartbeat.h"
#include "gt/intel_gt.h"
#include "gt/intel_gt_pm.h"
#include "gt/intel_gt_requests.h"
#include "gt/intel_tlb.h"
#include "i915_drv.h"
#include "i915_gem_evict.h"
#include "i915_sw_fence_work.h"
#include "i915_trace.h"
#include "i915_vma.h"
#include "i915_vma_resource.h"
static inline void assert_vma_held_evict(const struct i915_vma *vma)
{ … }
static struct kmem_cache *slab_vmas;
static struct i915_vma *i915_vma_alloc(void)
{ … }
static void i915_vma_free(struct i915_vma *vma)
{ … }
#if IS_ENABLED(CONFIG_DRM_I915_ERRLOG_GEM) && IS_ENABLED(CONFIG_DRM_DEBUG_MM)
#include <linux/stackdepot.h>
static void vma_print_allocator(struct i915_vma *vma, const char *reason)
{
char buf[512];
if (!vma->node.stack) {
drm_dbg(vma->obj->base.dev,
"vma.node [%08llx + %08llx] %s: unknown owner\n",
vma->node.start, vma->node.size, reason);
return;
}
stack_depot_snprint(vma->node.stack, buf, sizeof(buf), 0);
drm_dbg(vma->obj->base.dev,
"vma.node [%08llx + %08llx] %s: inserted at %s\n",
vma->node.start, vma->node.size, reason, buf);
}
#else
static void vma_print_allocator(struct i915_vma *vma, const char *reason)
{ … }
#endif
static inline struct i915_vma *active_to_vma(struct i915_active *ref)
{ … }
static int __i915_vma_active(struct i915_active *ref)
{ … }
static void __i915_vma_retire(struct i915_active *ref)
{ … }
static struct i915_vma *
vma_create(struct drm_i915_gem_object *obj,
struct i915_address_space *vm,
const struct i915_gtt_view *view)
{ … }
static struct i915_vma *
i915_vma_lookup(struct drm_i915_gem_object *obj,
struct i915_address_space *vm,
const struct i915_gtt_view *view)
{ … }
struct i915_vma *
i915_vma_instance(struct drm_i915_gem_object *obj,
struct i915_address_space *vm,
const struct i915_gtt_view *view)
{ … }
struct i915_vma_work { … };
static void __vma_bind(struct dma_fence_work *work)
{ … }
static void __vma_release(struct dma_fence_work *work)
{ … }
static const struct dma_fence_work_ops bind_ops = …;
struct i915_vma_work *i915_vma_work(void)
{ … }
int i915_vma_wait_for_bind(struct i915_vma *vma)
{ … }
#if IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
static int i915_vma_verify_bind_complete(struct i915_vma *vma)
{
struct dma_fence *fence = i915_active_fence_get(&vma->active.excl);
int err;
if (!fence)
return 0;
if (dma_fence_is_signaled(fence))
err = fence->error;
else
err = -EBUSY;
dma_fence_put(fence);
return err;
}
#else
#define i915_vma_verify_bind_complete(_vma) …
#endif
I915_SELFTEST_EXPORT void
i915_vma_resource_init_from_vma(struct i915_vma_resource *vma_res,
struct i915_vma *vma)
{ … }
int i915_vma_bind(struct i915_vma *vma,
unsigned int pat_index,
u32 flags,
struct i915_vma_work *work,
struct i915_vma_resource *vma_res)
{ … }
void __iomem *i915_vma_pin_iomap(struct i915_vma *vma)
{ … }
void i915_vma_flush_writes(struct i915_vma *vma)
{ … }
void i915_vma_unpin_iomap(struct i915_vma *vma)
{ … }
void i915_vma_unpin_and_release(struct i915_vma **p_vma, unsigned int flags)
{ … }
bool i915_vma_misplaced(const struct i915_vma *vma,
u64 size, u64 alignment, u64 flags)
{ … }
void __i915_vma_set_map_and_fenceable(struct i915_vma *vma)
{ … }
bool i915_gem_valid_gtt_space(struct i915_vma *vma, unsigned long color)
{ … }
static int
i915_vma_insert(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
u64 size, u64 alignment, u64 flags)
{ … }
static void
i915_vma_detach(struct i915_vma *vma)
{ … }
static bool try_qad_pin(struct i915_vma *vma, unsigned int flags)
{ … }
static struct scatterlist *
rotate_pages(struct drm_i915_gem_object *obj, unsigned int offset,
unsigned int width, unsigned int height,
unsigned int src_stride, unsigned int dst_stride,
struct sg_table *st, struct scatterlist *sg)
{ … }
static noinline struct sg_table *
intel_rotate_pages(struct intel_rotation_info *rot_info,
struct drm_i915_gem_object *obj)
{ … }
static struct scatterlist *
add_padding_pages(unsigned int count,
struct sg_table *st, struct scatterlist *sg)
{ … }
static struct scatterlist *
remap_tiled_color_plane_pages(struct drm_i915_gem_object *obj,
unsigned long offset, unsigned int alignment_pad,
unsigned int width, unsigned int height,
unsigned int src_stride, unsigned int dst_stride,
struct sg_table *st, struct scatterlist *sg,
unsigned int *gtt_offset)
{ … }
static struct scatterlist *
remap_contiguous_pages(struct drm_i915_gem_object *obj,
pgoff_t obj_offset,
unsigned int count,
struct sg_table *st, struct scatterlist *sg)
{ … }
static struct scatterlist *
remap_linear_color_plane_pages(struct drm_i915_gem_object *obj,
pgoff_t obj_offset, unsigned int alignment_pad,
unsigned int size,
struct sg_table *st, struct scatterlist *sg,
unsigned int *gtt_offset)
{ … }
static struct scatterlist *
remap_color_plane_pages(const struct intel_remapped_info *rem_info,
struct drm_i915_gem_object *obj,
int color_plane,
struct sg_table *st, struct scatterlist *sg,
unsigned int *gtt_offset)
{ … }
static noinline struct sg_table *
intel_remap_pages(struct intel_remapped_info *rem_info,
struct drm_i915_gem_object *obj)
{ … }
static noinline struct sg_table *
intel_partial_pages(const struct i915_gtt_view *view,
struct drm_i915_gem_object *obj)
{ … }
static int
__i915_vma_get_pages(struct i915_vma *vma)
{ … }
I915_SELFTEST_EXPORT int i915_vma_get_pages(struct i915_vma *vma)
{ … }
void vma_invalidate_tlb(struct i915_address_space *vm, u32 *tlb)
{ … }
static void __vma_put_pages(struct i915_vma *vma, unsigned int count)
{ … }
I915_SELFTEST_EXPORT void i915_vma_put_pages(struct i915_vma *vma)
{ … }
static void vma_unbind_pages(struct i915_vma *vma)
{ … }
int i915_vma_pin_ww(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
u64 size, u64 alignment, u64 flags)
{ … }
static void flush_idle_contexts(struct intel_gt *gt)
{ … }
static int __i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
u32 align, unsigned int flags)
{ … }
int i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
u32 align, unsigned int flags)
{ … }
void i915_ggtt_clear_scanout(struct drm_i915_gem_object *obj)
{ … }
static void __vma_close(struct i915_vma *vma, struct intel_gt *gt)
{ … }
void i915_vma_close(struct i915_vma *vma)
{ … }
static void __i915_vma_remove_closed(struct i915_vma *vma)
{ … }
void i915_vma_reopen(struct i915_vma *vma)
{ … }
static void force_unbind(struct i915_vma *vma)
{ … }
static void release_references(struct i915_vma *vma, struct intel_gt *gt,
bool vm_ddestroy)
{ … }
void i915_vma_destroy_locked(struct i915_vma *vma)
{ … }
void i915_vma_destroy(struct i915_vma *vma)
{ … }
void i915_vma_parked(struct intel_gt *gt)
{ … }
static void __i915_vma_iounmap(struct i915_vma *vma)
{ … }
void i915_vma_revoke_mmap(struct i915_vma *vma)
{ … }
static int
__i915_request_await_bind(struct i915_request *rq, struct i915_vma *vma)
{ … }
static int __i915_vma_move_to_active(struct i915_vma *vma, struct i915_request *rq)
{ … }
int _i915_vma_move_to_active(struct i915_vma *vma,
struct i915_request *rq,
struct dma_fence *fence,
unsigned int flags)
{ … }
struct dma_fence *__i915_vma_evict(struct i915_vma *vma, bool async)
{ … }
int __i915_vma_unbind(struct i915_vma *vma)
{ … }
static struct dma_fence *__i915_vma_unbind_async(struct i915_vma *vma)
{ … }
int i915_vma_unbind(struct i915_vma *vma)
{ … }
int i915_vma_unbind_async(struct i915_vma *vma, bool trylock_vm)
{ … }
int i915_vma_unbind_unlocked(struct i915_vma *vma)
{ … }
struct i915_vma *i915_vma_make_unshrinkable(struct i915_vma *vma)
{ … }
void i915_vma_make_shrinkable(struct i915_vma *vma)
{ … }
void i915_vma_make_purgeable(struct i915_vma *vma)
{ … }
#if IS_ENABLED(CONFIG_DRM_I915_SELFTEST)
#include "selftests/i915_vma.c"
#endif
void i915_vma_module_exit(void)
{ … }
int __init i915_vma_module_init(void)
{ … }