linux/drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c

// SPDX-License-Identifier: GPL-2.0-only
/*
 * Copyright (C) 2013 Red Hat
 * Copyright (c) 2014-2018, 2020-2021 The Linux Foundation. All rights reserved.
 * Copyright (c) 2022-2023 Qualcomm Innovation Center, Inc. All rights reserved.
 *
 * Author: Rob Clark <[email protected]>
 */

#define pr_fmt(fmt)
#include <linux/debugfs.h>
#include <linux/kthread.h>
#include <linux/seq_file.h>

#include <drm/drm_atomic.h>
#include <drm/drm_crtc.h>
#include <drm/drm_file.h>
#include <drm/drm_probe_helper.h>
#include <drm/drm_framebuffer.h>

#include "msm_drv.h"
#include "dpu_kms.h"
#include "dpu_hwio.h"
#include "dpu_hw_catalog.h"
#include "dpu_hw_intf.h"
#include "dpu_hw_ctl.h"
#include "dpu_hw_dspp.h"
#include "dpu_hw_dsc.h"
#include "dpu_hw_merge3d.h"
#include "dpu_hw_cdm.h"
#include "dpu_formats.h"
#include "dpu_encoder_phys.h"
#include "dpu_crtc.h"
#include "dpu_trace.h"
#include "dpu_core_irq.h"
#include "disp/msm_disp_snapshot.h"

#define DPU_DEBUG_ENC(e, fmt, ...)

#define DPU_ERROR_ENC(e, fmt, ...)

#define DPU_ERROR_ENC_RATELIMITED(e, fmt, ...)

/*
 * Two to anticipate panels that can do cmd/vid dynamic switching
 * plan is to create all possible physical encoder types, and switch between
 * them at runtime
 */
#define NUM_PHYS_ENCODER_TYPES

#define MAX_PHYS_ENCODERS_PER_VIRTUAL

#define MAX_CHANNELS_PER_ENC

#define IDLE_SHORT_TIMEOUT

#define MAX_HDISPLAY_SPLIT

/* timeout in frames waiting for frame done */
#define DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES

/**
 * enum dpu_enc_rc_events - events for resource control state machine
 * @DPU_ENC_RC_EVENT_KICKOFF:
 *	This event happens at NORMAL priority.
 *	Event that signals the start of the transfer. When this event is
 *	received, enable MDP/DSI core clocks. Regardless of the previous
 *	state, the resource should be in ON state at the end of this event.
 * @DPU_ENC_RC_EVENT_FRAME_DONE:
 *	This event happens at INTERRUPT level.
 *	Event signals the end of the data transfer after the PP FRAME_DONE
 *	event. At the end of this event, a delayed work is scheduled to go to
 *	IDLE_PC state after IDLE_TIMEOUT time.
 * @DPU_ENC_RC_EVENT_PRE_STOP:
 *	This event happens at NORMAL priority.
 *	This event, when received during the ON state, leave the RC STATE
 *	in the PRE_OFF state. It should be followed by the STOP event as
 *	part of encoder disable.
 *	If received during IDLE or OFF states, it will do nothing.
 * @DPU_ENC_RC_EVENT_STOP:
 *	This event happens at NORMAL priority.
 *	When this event is received, disable all the MDP/DSI core clocks, and
 *	disable IRQs. It should be called from the PRE_OFF or IDLE states.
 *	IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
 *	PRE_OFF is expected when PRE_STOP was executed during the ON state.
 *	Resource state should be in OFF at the end of the event.
 * @DPU_ENC_RC_EVENT_ENTER_IDLE:
 *	This event happens at NORMAL priority from a work item.
 *	Event signals that there were no frame updates for IDLE_TIMEOUT time.
 *	This would disable MDP/DSI core clocks and change the resource state
 *	to IDLE.
 */
enum dpu_enc_rc_events {};

/*
 * enum dpu_enc_rc_states - states that the resource control maintains
 * @DPU_ENC_RC_STATE_OFF: Resource is in OFF state
 * @DPU_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
 * @DPU_ENC_RC_STATE_ON: Resource is in ON state
 * @DPU_ENC_RC_STATE_MODESET: Resource is in modeset state
 * @DPU_ENC_RC_STATE_IDLE: Resource is in IDLE state
 */
enum dpu_enc_rc_states {};

/**
 * struct dpu_encoder_virt - virtual encoder. Container of one or more physical
 *	encoders. Virtual encoder manages one "logical" display. Physical
 *	encoders manage one intf block, tied to a specific panel/sub-panel.
 *	Virtual encoder defers as much as possible to the physical encoders.
 *	Virtual encoder registers itself with the DRM Framework as the encoder.
 * @base:		drm_encoder base class for registration with DRM
 * @enc_spinlock:	Virtual-Encoder-Wide Spin Lock for IRQ purposes
 * @enabled:		True if the encoder is active, protected by enc_lock
 * @commit_done_timedout: True if there has been a timeout on commit after
 *			enabling the encoder.
 * @num_phys_encs:	Actual number of physical encoders contained.
 * @phys_encs:		Container of physical encoders managed.
 * @cur_master:		Pointer to the current master in this mode. Optimization
 *			Only valid after enable. Cleared as disable.
 * @cur_slave:		As above but for the slave encoder.
 * @hw_pp:		Handle to the pingpong blocks used for the display. No.
 *			pingpong blocks can be different than num_phys_encs.
 * @hw_dsc:		Handle to the DSC blocks used for the display.
 * @dsc_mask:		Bitmask of used DSC blocks.
 * @intfs_swapped:	Whether or not the phys_enc interfaces have been swapped
 *			for partial update right-only cases, such as pingpong
 *			split where virtual pingpong does not generate IRQs
 * @crtc:		Pointer to the currently assigned crtc. Normally you
 *			would use crtc->state->encoder_mask to determine the
 *			link between encoder/crtc. However in this case we need
 *			to track crtc in the disable() hook which is called
 *			_after_ encoder_mask is cleared.
 * @connector:		If a mode is set, cached pointer to the active connector
 * @enc_lock:			Lock around physical encoder
 *				create/destroy/enable/disable
 * @frame_busy_mask:		Bitmask tracking which phys_enc we are still
 *				busy processing current command.
 *				Bit0 = phys_encs[0] etc.
 * @frame_done_timeout_ms:	frame done timeout in ms
 * @frame_done_timeout_cnt:	atomic counter tracking the number of frame
 * 				done timeouts
 * @frame_done_timer:		watchdog timer for frame done event
 * @disp_info:			local copy of msm_display_info struct
 * @idle_pc_supported:		indicate if idle power collaps is supported
 * @rc_lock:			resource control mutex lock to protect
 *				virt encoder over various state changes
 * @rc_state:			resource controller state
 * @delayed_off_work:		delayed worker to schedule disabling of
 *				clks and resources after IDLE_TIMEOUT time.
 * @topology:                   topology of the display
 * @idle_timeout:		idle timeout duration in milliseconds
 * @wide_bus_en:		wide bus is enabled on this interface
 * @dsc:			drm_dsc_config pointer, for DSC-enabled encoders
 */
struct dpu_encoder_virt {};

#define to_dpu_encoder_virt(x)

static u32 dither_matrix[DITHER_MATRIX_SZ] =;

u32 dpu_encoder_get_drm_fmt(struct dpu_encoder_phys *phys_enc)
{}

bool dpu_encoder_needs_periph_flush(struct dpu_encoder_phys *phys_enc)
{}

bool dpu_encoder_is_widebus_enabled(const struct drm_encoder *drm_enc)
{}

bool dpu_encoder_is_dsc_enabled(const struct drm_encoder *drm_enc)
{}

int dpu_encoder_get_crc_values_cnt(const struct drm_encoder *drm_enc)
{}

void dpu_encoder_setup_misr(const struct drm_encoder *drm_enc)
{}

int dpu_encoder_get_crc(const struct drm_encoder *drm_enc, u32 *crcs, int pos)
{}

static void _dpu_encoder_setup_dither(struct dpu_hw_pingpong *hw_pp, unsigned bpc)
{}

static char *dpu_encoder_helper_get_intf_type(enum dpu_intf_mode intf_mode)
{}

void dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys *phys_enc,
		enum dpu_intr_idx intr_idx)
{}

static int dpu_encoder_helper_wait_event_timeout(int32_t drm_id,
		u32 irq_idx, struct dpu_encoder_wait_info *info);

int dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys *phys_enc,
		unsigned int irq_idx,
		void (*func)(void *arg),
		struct dpu_encoder_wait_info *wait_info)
{}

int dpu_encoder_get_vsync_count(struct drm_encoder *drm_enc)
{}

int dpu_encoder_get_linecount(struct drm_encoder *drm_enc)
{}

void dpu_encoder_helper_split_config(
		struct dpu_encoder_phys *phys_enc,
		enum dpu_intf interface)
{}

bool dpu_encoder_use_dsc_merge(struct drm_encoder *drm_enc)
{}

struct drm_dsc_config *dpu_encoder_get_dsc_config(struct drm_encoder *drm_enc)
{}

static struct msm_display_topology dpu_encoder_get_topology(
			struct dpu_encoder_virt *dpu_enc,
			struct dpu_kms *dpu_kms,
			struct drm_display_mode *mode,
			struct drm_crtc_state *crtc_state,
			struct drm_dsc_config *dsc)
{}

static int dpu_encoder_virt_atomic_check(
		struct drm_encoder *drm_enc,
		struct drm_crtc_state *crtc_state,
		struct drm_connector_state *conn_state)
{}

static void _dpu_encoder_update_vsync_source(struct dpu_encoder_virt *dpu_enc,
			struct msm_display_info *disp_info)
{}

static void _dpu_encoder_irq_enable(struct drm_encoder *drm_enc)
{}

static void _dpu_encoder_irq_disable(struct drm_encoder *drm_enc)
{}

static void _dpu_encoder_resource_enable(struct drm_encoder *drm_enc)
{}

static void _dpu_encoder_resource_disable(struct drm_encoder *drm_enc)
{}

static int dpu_encoder_resource_control(struct drm_encoder *drm_enc,
		u32 sw_event)
{}

void dpu_encoder_prepare_wb_job(struct drm_encoder *drm_enc,
		struct drm_writeback_job *job)
{}

void dpu_encoder_cleanup_wb_job(struct drm_encoder *drm_enc,
		struct drm_writeback_job *job)
{}

static void dpu_encoder_virt_atomic_mode_set(struct drm_encoder *drm_enc,
					     struct drm_crtc_state *crtc_state,
					     struct drm_connector_state *conn_state)
{}

static void _dpu_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
{}

void dpu_encoder_virt_runtime_resume(struct drm_encoder *drm_enc)
{}

static void dpu_encoder_virt_atomic_enable(struct drm_encoder *drm_enc,
					struct drm_atomic_state *state)
{}

static void dpu_encoder_virt_atomic_disable(struct drm_encoder *drm_enc,
					struct drm_atomic_state *state)
{}

static struct dpu_hw_intf *dpu_encoder_get_intf(const struct dpu_mdss_cfg *catalog,
		struct dpu_rm *dpu_rm,
		enum dpu_intf_type type, u32 controller_id)
{}

void dpu_encoder_vblank_callback(struct drm_encoder *drm_enc,
		struct dpu_encoder_phys *phy_enc)
{}

void dpu_encoder_underrun_callback(struct drm_encoder *drm_enc,
		struct dpu_encoder_phys *phy_enc)
{}

void dpu_encoder_assign_crtc(struct drm_encoder *drm_enc, struct drm_crtc *crtc)
{}

void dpu_encoder_toggle_vblank_for_crtc(struct drm_encoder *drm_enc,
					struct drm_crtc *crtc, bool enable)
{}

void dpu_encoder_frame_done_callback(
		struct drm_encoder *drm_enc,
		struct dpu_encoder_phys *ready_phys, u32 event)
{}

static void dpu_encoder_off_work(struct work_struct *work)
{}

/**
 * _dpu_encoder_trigger_flush - trigger flush for a physical encoder
 * @drm_enc: Pointer to drm encoder structure
 * @phys: Pointer to physical encoder structure
 * @extra_flush_bits: Additional bit mask to include in flush trigger
 */
static void _dpu_encoder_trigger_flush(struct drm_encoder *drm_enc,
		struct dpu_encoder_phys *phys, uint32_t extra_flush_bits)
{}

/**
 * _dpu_encoder_trigger_start - trigger start for a physical encoder
 * @phys: Pointer to physical encoder structure
 */
static void _dpu_encoder_trigger_start(struct dpu_encoder_phys *phys)
{}

void dpu_encoder_helper_trigger_start(struct dpu_encoder_phys *phys_enc)
{}

static int dpu_encoder_helper_wait_event_timeout(
		int32_t drm_id,
		unsigned int irq_idx,
		struct dpu_encoder_wait_info *info)
{}

static void dpu_encoder_helper_hw_reset(struct dpu_encoder_phys *phys_enc)
{}

/**
 * _dpu_encoder_kickoff_phys - handle physical encoder kickoff
 *	Iterate through the physical encoders and perform consolidated flush
 *	and/or control start triggering as needed. This is done in the virtual
 *	encoder rather than the individual physical ones in order to handle
 *	use cases that require visibility into multiple physical encoders at
 *	a time.
 * @dpu_enc: Pointer to virtual encoder structure
 */
static void _dpu_encoder_kickoff_phys(struct dpu_encoder_virt *dpu_enc)
{}

void dpu_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
{}

static u32 _dpu_encoder_calculate_linetime(struct dpu_encoder_virt *dpu_enc,
		struct drm_display_mode *mode)
{}

int dpu_encoder_vsync_time(struct drm_encoder *drm_enc, ktime_t *wakeup_time)
{}

static u32
dpu_encoder_dsc_initial_line_calc(struct drm_dsc_config *dsc,
				  u32 enc_ip_width)
{}

static void dpu_encoder_dsc_pipe_cfg(struct dpu_hw_ctl *ctl,
				     struct dpu_hw_dsc *hw_dsc,
				     struct dpu_hw_pingpong *hw_pp,
				     struct drm_dsc_config *dsc,
				     u32 common_mode,
				     u32 initial_lines)
{}

static void dpu_encoder_prep_dsc(struct dpu_encoder_virt *dpu_enc,
				 struct drm_dsc_config *dsc)
{}

void dpu_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc)
{}

bool dpu_encoder_is_valid_for_commit(struct drm_encoder *drm_enc)
{}

void dpu_encoder_kickoff(struct drm_encoder *drm_enc)
{}

static void dpu_encoder_helper_reset_mixers(struct dpu_encoder_phys *phys_enc)
{}

static void dpu_encoder_dsc_pipe_clr(struct dpu_hw_ctl *ctl,
				     struct dpu_hw_dsc *hw_dsc,
				     struct dpu_hw_pingpong *hw_pp)
{}

static void dpu_encoder_unprep_dsc(struct dpu_encoder_virt *dpu_enc)
{}

void dpu_encoder_helper_phys_cleanup(struct dpu_encoder_phys *phys_enc)
{}

void dpu_encoder_helper_phys_setup_cdm(struct dpu_encoder_phys *phys_enc,
				       const struct msm_format *dpu_fmt,
				       u32 output_type)
{}

#ifdef CONFIG_DEBUG_FS
static int _dpu_encoder_status_show(struct seq_file *s, void *data)
{}

DEFINE_SHOW_ATTRIBUTE();

static void dpu_encoder_debugfs_init(struct drm_encoder *drm_enc, struct dentry *root)
{}
#else
#define dpu_encoder_debugfs_init
#endif

static int dpu_encoder_virt_add_phys_encs(
		struct drm_device *dev,
		struct msm_display_info *disp_info,
		struct dpu_encoder_virt *dpu_enc,
		struct dpu_enc_phys_init_params *params)
{}

static int dpu_encoder_setup_display(struct dpu_encoder_virt *dpu_enc,
				 struct dpu_kms *dpu_kms,
				 struct msm_display_info *disp_info)
{}

static void dpu_encoder_frame_done_timeout(struct timer_list *t)
{}

static const struct drm_encoder_helper_funcs dpu_encoder_helper_funcs =;

static const struct drm_encoder_funcs dpu_encoder_funcs =;

struct drm_encoder *dpu_encoder_init(struct drm_device *dev,
		int drm_enc_mode,
		struct msm_display_info *disp_info)
{}

/**
 * dpu_encoder_wait_for_commit_done() - Wait for encoder to flush pending state
 * @drm_enc:	encoder pointer
 *
 * Wait for hardware to have flushed the current pending changes to hardware at
 * a vblank or CTL_START. Physical encoders will map this differently depending
 * on the type: vid mode -> vsync_irq, cmd mode -> CTL_START.
 *
 * Return: 0 on success, -EWOULDBLOCK if already signaled, error otherwise
 */
int dpu_encoder_wait_for_commit_done(struct drm_encoder *drm_enc)
{}

/**
 * dpu_encoder_wait_for_tx_complete() - Wait for encoder to transfer pixels to panel
 * @drm_enc:	encoder pointer
 *
 * Wait for the hardware to transfer all the pixels to the panel. Physical
 * encoders will map this differently depending on the type: vid mode -> vsync_irq,
 * cmd mode -> pp_done.
 *
 * Return: 0 on success, -EWOULDBLOCK if already signaled, error otherwise
 */
int dpu_encoder_wait_for_tx_complete(struct drm_encoder *drm_enc)
{}

enum dpu_intf_mode dpu_encoder_get_intf_mode(struct drm_encoder *encoder)
{}

unsigned int dpu_encoder_helper_get_dsc(struct dpu_encoder_phys *phys_enc)
{}

void dpu_encoder_phys_init(struct dpu_encoder_phys *phys_enc,
			  struct dpu_enc_phys_init_params *p)
{}