#include <linux/delay.h>
#include <linux/kernel.h>
#include <linux/firmware.h>
#include <linux/module.h>
#include <linux/pci.h>
#include "amdgpu.h"
#include "amdgpu_gfx.h"
#include "amdgpu_psp.h"
#include "nv.h"
#include "nvd.h"
#include "gc/gc_10_1_0_offset.h"
#include "gc/gc_10_1_0_sh_mask.h"
#include "smuio/smuio_11_0_0_offset.h"
#include "smuio/smuio_11_0_0_sh_mask.h"
#include "navi10_enum.h"
#include "ivsrcid/gfx/irqsrcs_gfx_10_1.h"
#include "soc15.h"
#include "soc15d.h"
#include "soc15_common.h"
#include "clearstate_gfx10.h"
#include "v10_structs.h"
#include "gfx_v10_0.h"
#include "nbio_v2_3.h"
#define GFX10_NUM_GFX_RINGS_NV1X …
#define GFX10_NUM_GFX_RINGS_Sienna_Cichlid …
#define GFX10_MEC_HPD_SIZE …
#define F32_CE_PROGRAM_RAM_SIZE …
#define RLCG_UCODE_LOADING_START_ADDRESS …
#define mmCGTT_GS_NGG_CLK_CTRL …
#define mmCGTT_GS_NGG_CLK_CTRL_BASE_IDX …
#define mmCGTT_SPI_RA0_CLK_CTRL …
#define mmCGTT_SPI_RA0_CLK_CTRL_BASE_IDX …
#define mmCGTT_SPI_RA1_CLK_CTRL …
#define mmCGTT_SPI_RA1_CLK_CTRL_BASE_IDX …
#define GB_ADDR_CONFIG__NUM_PKRS__SHIFT …
#define GB_ADDR_CONFIG__NUM_PKRS_MASK …
#define mmCGTS_TCC_DISABLE_gc_10_3 …
#define mmCGTS_TCC_DISABLE_gc_10_3_BASE_IDX …
#define mmCGTS_USER_TCC_DISABLE_gc_10_3 …
#define mmCGTS_USER_TCC_DISABLE_gc_10_3_BASE_IDX …
#define mmCP_MEC_CNTL_Sienna_Cichlid …
#define mmCP_MEC_CNTL_Sienna_Cichlid_BASE_IDX …
#define mmRLC_SAFE_MODE_Sienna_Cichlid …
#define mmRLC_SAFE_MODE_Sienna_Cichlid_BASE_IDX …
#define mmRLC_CP_SCHEDULERS_Sienna_Cichlid …
#define mmRLC_CP_SCHEDULERS_Sienna_Cichlid_BASE_IDX …
#define mmSPI_CONFIG_CNTL_Sienna_Cichlid …
#define mmSPI_CONFIG_CNTL_Sienna_Cichlid_BASE_IDX …
#define mmVGT_ESGS_RING_SIZE_Sienna_Cichlid …
#define mmVGT_ESGS_RING_SIZE_Sienna_Cichlid_BASE_IDX …
#define mmVGT_GSVS_RING_SIZE_Sienna_Cichlid …
#define mmVGT_GSVS_RING_SIZE_Sienna_Cichlid_BASE_IDX …
#define mmVGT_TF_RING_SIZE_Sienna_Cichlid …
#define mmVGT_TF_RING_SIZE_Sienna_Cichlid_BASE_IDX …
#define mmVGT_HS_OFFCHIP_PARAM_Sienna_Cichlid …
#define mmVGT_HS_OFFCHIP_PARAM_Sienna_Cichlid_BASE_IDX …
#define mmVGT_TF_MEMORY_BASE_Sienna_Cichlid …
#define mmVGT_TF_MEMORY_BASE_Sienna_Cichlid_BASE_IDX …
#define mmVGT_TF_MEMORY_BASE_HI_Sienna_Cichlid …
#define mmVGT_TF_MEMORY_BASE_HI_Sienna_Cichlid_BASE_IDX …
#define GRBM_STATUS2__RLC_BUSY_Sienna_Cichlid__SHIFT …
#define GRBM_STATUS2__RLC_BUSY_Sienna_Cichlid_MASK …
#define CP_RB_DOORBELL_RANGE_LOWER__DOORBELL_RANGE_LOWER_Sienna_Cichlid_MASK …
#define CP_RB_DOORBELL_RANGE_LOWER__DOORBELL_RANGE_LOWER_Sienna_Cichlid__SHIFT …
#define CP_RB_DOORBELL_RANGE_UPPER__DOORBELL_RANGE_UPPER_Sienna_Cichlid_MASK …
#define mmGCR_GENERAL_CNTL_Sienna_Cichlid …
#define mmGCR_GENERAL_CNTL_Sienna_Cichlid_BASE_IDX …
#define mmGOLDEN_TSC_COUNT_UPPER_Cyan_Skillfish …
#define mmGOLDEN_TSC_COUNT_UPPER_Cyan_Skillfish_BASE_IDX …
#define mmGOLDEN_TSC_COUNT_LOWER_Cyan_Skillfish …
#define mmGOLDEN_TSC_COUNT_LOWER_Cyan_Skillfish_BASE_IDX …
#define mmGOLDEN_TSC_COUNT_UPPER_Vangogh …
#define mmGOLDEN_TSC_COUNT_UPPER_Vangogh_BASE_IDX …
#define mmGOLDEN_TSC_COUNT_LOWER_Vangogh …
#define mmGOLDEN_TSC_COUNT_LOWER_Vangogh_BASE_IDX …
#define mmGOLDEN_TSC_COUNT_UPPER_GC_10_3_6 …
#define mmGOLDEN_TSC_COUNT_UPPER_GC_10_3_6_BASE_IDX …
#define mmGOLDEN_TSC_COUNT_LOWER_GC_10_3_6 …
#define mmGOLDEN_TSC_COUNT_LOWER_GC_10_3_6_BASE_IDX …
#define mmSPI_CONFIG_CNTL_1_Vangogh …
#define mmSPI_CONFIG_CNTL_1_Vangogh_BASE_IDX …
#define mmVGT_TF_MEMORY_BASE_HI_Vangogh …
#define mmVGT_TF_MEMORY_BASE_HI_Vangogh_BASE_IDX …
#define mmVGT_HS_OFFCHIP_PARAM_Vangogh …
#define mmVGT_HS_OFFCHIP_PARAM_Vangogh_BASE_IDX …
#define mmVGT_TF_RING_SIZE_Vangogh …
#define mmVGT_TF_RING_SIZE_Vangogh_BASE_IDX …
#define mmVGT_GSVS_RING_SIZE_Vangogh …
#define mmVGT_GSVS_RING_SIZE_Vangogh_BASE_IDX …
#define mmVGT_TF_MEMORY_BASE_Vangogh …
#define mmVGT_TF_MEMORY_BASE_Vangogh_BASE_IDX …
#define mmVGT_ESGS_RING_SIZE_Vangogh …
#define mmVGT_ESGS_RING_SIZE_Vangogh_BASE_IDX …
#define mmSPI_CONFIG_CNTL_Vangogh …
#define mmSPI_CONFIG_CNTL_Vangogh_BASE_IDX …
#define mmGCR_GENERAL_CNTL_Vangogh …
#define mmGCR_GENERAL_CNTL_Vangogh_BASE_IDX …
#define RLC_PG_DELAY_3__CGCG_ACTIVE_BEFORE_CGPG_MASK_Vangogh …
#define mmCP_HYP_PFP_UCODE_ADDR …
#define mmCP_HYP_PFP_UCODE_ADDR_BASE_IDX …
#define mmCP_HYP_PFP_UCODE_DATA …
#define mmCP_HYP_PFP_UCODE_DATA_BASE_IDX …
#define mmCP_HYP_CE_UCODE_ADDR …
#define mmCP_HYP_CE_UCODE_ADDR_BASE_IDX …
#define mmCP_HYP_CE_UCODE_DATA …
#define mmCP_HYP_CE_UCODE_DATA_BASE_IDX …
#define mmCP_HYP_ME_UCODE_ADDR …
#define mmCP_HYP_ME_UCODE_ADDR_BASE_IDX …
#define mmCP_HYP_ME_UCODE_DATA …
#define mmCP_HYP_ME_UCODE_DATA_BASE_IDX …
#define mmCPG_PSP_DEBUG …
#define mmCPG_PSP_DEBUG_BASE_IDX …
#define mmCPC_PSP_DEBUG …
#define mmCPC_PSP_DEBUG_BASE_IDX …
#define CPC_PSP_DEBUG__GPA_OVERRIDE_MASK …
#define CPG_PSP_DEBUG__GPA_OVERRIDE_MASK …
#define mmCC_GC_SA_UNIT_DISABLE …
#define mmCC_GC_SA_UNIT_DISABLE_BASE_IDX …
#define CC_GC_SA_UNIT_DISABLE__SA_DISABLE__SHIFT …
#define CC_GC_SA_UNIT_DISABLE__SA_DISABLE_MASK …
#define mmGC_USER_SA_UNIT_DISABLE …
#define mmGC_USER_SA_UNIT_DISABLE_BASE_IDX …
#define GC_USER_SA_UNIT_DISABLE__SA_DISABLE__SHIFT …
#define GC_USER_SA_UNIT_DISABLE__SA_DISABLE_MASK …
#define mmPA_SC_ENHANCE_3 …
#define mmPA_SC_ENHANCE_3_BASE_IDX …
#define PA_SC_ENHANCE_3__FORCE_PBB_WORKLOAD_MODE_TO_ZERO__SHIFT …
#define PA_SC_ENHANCE_3__FORCE_PBB_WORKLOAD_MODE_TO_ZERO_MASK …
#define mmCGTT_SPI_CS_CLK_CTRL …
#define mmCGTT_SPI_CS_CLK_CTRL_BASE_IDX …
#define mmGCUTCL2_CGTT_CLK_CTRL_Sienna_Cichlid …
#define mmGCUTCL2_CGTT_CLK_CTRL_Sienna_Cichlid_BASE_IDX …
#define mmGCVM_L2_CGTT_CLK_CTRL_Sienna_Cichlid …
#define mmGCVM_L2_CGTT_CLK_CTRL_Sienna_Cichlid_BASE_IDX …
#define mmGC_THROTTLE_CTRL_Sienna_Cichlid …
#define mmGC_THROTTLE_CTRL_Sienna_Cichlid_BASE_IDX …
#define mmRLC_SPARE_INT_0_Sienna_Cichlid …
#define mmRLC_SPARE_INT_0_Sienna_Cichlid_BASE_IDX …
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
MODULE_FIRMWARE(…) …;
static const struct amdgpu_hwip_reg_entry gc_reg_list_10_1[] = …;
static const struct amdgpu_hwip_reg_entry gc_cp_reg_list_10[] = …;
static const struct amdgpu_hwip_reg_entry gc_gfx_queue_reg_list_10[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_1[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_0_nv10[] = …;
static const struct soc15_reg_golden golden_settings_gc_rlc_spm_10_0_nv10[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_1_1[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_1_2[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_1_nv14[] = …;
static const struct soc15_reg_golden golden_settings_gc_rlc_spm_10_1_nv14[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_1_2_nv12[] = …;
static const struct soc15_reg_golden golden_settings_gc_rlc_spm_10_1_2_nv12[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3_sienna_cichlid[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3_2[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3_vangogh[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3_3[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3_4[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3_5[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_0_cyan_skillfish[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3_6[] = …;
static const struct soc15_reg_golden golden_settings_gc_10_3_7[] = …;
#define DEFAULT_SH_MEM_CONFIG …
#define CYAN_SKILLFISH_GB_ADDR_CONFIG_GOLDEN …
static void gfx_v10_0_set_ring_funcs(struct amdgpu_device *adev);
static void gfx_v10_0_set_irq_funcs(struct amdgpu_device *adev);
static void gfx_v10_0_set_gds_init(struct amdgpu_device *adev);
static void gfx_v10_0_set_rlc_funcs(struct amdgpu_device *adev);
static void gfx_v10_0_set_mqd_funcs(struct amdgpu_device *adev);
static int gfx_v10_0_get_cu_info(struct amdgpu_device *adev,
struct amdgpu_cu_info *cu_info);
static uint64_t gfx_v10_0_get_gpu_clock_counter(struct amdgpu_device *adev);
static void gfx_v10_0_select_se_sh(struct amdgpu_device *adev, u32 se_num,
u32 sh_num, u32 instance, int xcc_id);
static u32 gfx_v10_0_get_wgp_active_bitmap_per_sh(struct amdgpu_device *adev);
static int gfx_v10_0_rlc_backdoor_autoload_buffer_init(struct amdgpu_device *adev);
static void gfx_v10_0_rlc_backdoor_autoload_buffer_fini(struct amdgpu_device *adev);
static int gfx_v10_0_rlc_backdoor_autoload_enable(struct amdgpu_device *adev);
static int gfx_v10_0_wait_for_rlc_autoload_complete(struct amdgpu_device *adev);
static void gfx_v10_0_ring_emit_ce_meta(struct amdgpu_ring *ring, bool resume);
static void gfx_v10_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume);
static void gfx_v10_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start, bool secure);
static u32 gfx_v10_3_get_disabled_sa(struct amdgpu_device *adev);
static void gfx_v10_3_program_pbb_mode(struct amdgpu_device *adev);
static void gfx_v10_3_set_power_brake_sequence(struct amdgpu_device *adev);
static void gfx_v10_0_ring_invalidate_tlbs(struct amdgpu_ring *ring,
uint16_t pasid, uint32_t flush_type,
bool all_hub, uint8_t dst_sel);
static void gfx_v10_0_update_spm_vmid_internal(struct amdgpu_device *adev,
unsigned int vmid);
static int gfx_v10_0_set_powergating_state(void *handle,
enum amd_powergating_state state);
static void gfx10_kiq_set_resources(struct amdgpu_ring *kiq_ring, uint64_t queue_mask)
{ … }
static void gfx10_kiq_map_queues(struct amdgpu_ring *kiq_ring,
struct amdgpu_ring *ring)
{ … }
static void gfx10_kiq_unmap_queues(struct amdgpu_ring *kiq_ring,
struct amdgpu_ring *ring,
enum amdgpu_unmap_queues_action action,
u64 gpu_addr, u64 seq)
{ … }
static void gfx10_kiq_query_status(struct amdgpu_ring *kiq_ring,
struct amdgpu_ring *ring,
u64 addr,
u64 seq)
{ … }
static void gfx10_kiq_invalidate_tlbs(struct amdgpu_ring *kiq_ring,
uint16_t pasid, uint32_t flush_type,
bool all_hub)
{ … }
static const struct kiq_pm4_funcs gfx_v10_0_kiq_pm4_funcs = …;
static void gfx_v10_0_set_kiq_pm4_funcs(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_init_spm_golden_registers(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_init_golden_registers(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_write_data_to_reg(struct amdgpu_ring *ring, int eng_sel,
bool wc, uint32_t reg, uint32_t val)
{ … }
static void gfx_v10_0_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel,
int mem_space, int opt, uint32_t addr0,
uint32_t addr1, uint32_t ref, uint32_t mask,
uint32_t inv)
{ … }
static int gfx_v10_0_ring_test_ring(struct amdgpu_ring *ring)
{ … }
static int gfx_v10_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
{ … }
static void gfx_v10_0_free_microcode(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_check_fw_write_wait(struct amdgpu_device *adev)
{ … }
static bool gfx_v10_0_navi10_gfxoff_should_enable(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_check_gfxoff_flag(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_init_microcode(struct amdgpu_device *adev)
{ … }
static u32 gfx_v10_0_get_csb_size(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_get_csb_buffer(struct amdgpu_device *adev,
volatile u32 *buffer)
{ … }
static void gfx_v10_0_rlc_fini(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_init_rlcg_reg_access_ctrl(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_init(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_mec_fini(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_me_init(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_mec_init(struct amdgpu_device *adev)
{ … }
static uint32_t wave_read_ind(struct amdgpu_device *adev, uint32_t wave, uint32_t address)
{ … }
static void wave_read_regs(struct amdgpu_device *adev, uint32_t wave,
uint32_t thread, uint32_t regno,
uint32_t num, uint32_t *out)
{ … }
static void gfx_v10_0_read_wave_data(struct amdgpu_device *adev, uint32_t xcc_id, uint32_t simd, uint32_t wave, uint32_t *dst, int *no_fields)
{ … }
static void gfx_v10_0_read_wave_sgprs(struct amdgpu_device *adev, uint32_t xcc_id, uint32_t simd,
uint32_t wave, uint32_t start,
uint32_t size, uint32_t *dst)
{ … }
static void gfx_v10_0_read_wave_vgprs(struct amdgpu_device *adev, uint32_t xcc_id, uint32_t simd,
uint32_t wave, uint32_t thread,
uint32_t start, uint32_t size,
uint32_t *dst)
{ … }
static void gfx_v10_0_select_me_pipe_q(struct amdgpu_device *adev,
u32 me, u32 pipe, u32 q, u32 vm, u32 xcc_id)
{ … }
static void gfx_v10_0_update_perfmon_mgcg(struct amdgpu_device *adev,
bool enable)
{ … }
static const struct amdgpu_gfx_funcs gfx_v10_0_gfx_funcs = …;
static void gfx_v10_0_gpu_early_init(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_gfx_ring_init(struct amdgpu_device *adev, int ring_id,
int me, int pipe, int queue)
{ … }
static int gfx_v10_0_compute_ring_init(struct amdgpu_device *adev, int ring_id,
int mec, int pipe, int queue)
{ … }
static void gfx_v10_0_alloc_ip_dump(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_sw_init(void *handle)
{ … }
static void gfx_v10_0_pfp_fini(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_ce_fini(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_me_fini(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_sw_fini(void *handle)
{ … }
static void gfx_v10_0_select_se_sh(struct amdgpu_device *adev, u32 se_num,
u32 sh_num, u32 instance, int xcc_id)
{ … }
static u32 gfx_v10_0_get_rb_active_bitmap(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_setup_rb(struct amdgpu_device *adev)
{ … }
static u32 gfx_v10_0_init_pa_sc_tile_steering_override(struct amdgpu_device *adev)
{ … }
#define DEFAULT_SH_MEM_BASES …
static void gfx_v10_0_debug_trap_config_init(struct amdgpu_device *adev,
uint32_t first_vmid,
uint32_t last_vmid)
{ … }
static void gfx_v10_0_init_compute_vmid(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_init_gds_vmid(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_tcp_harvest(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_get_tcc_info(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_constants_init(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_enable_gui_idle_interrupt(struct amdgpu_device *adev,
bool enable)
{ … }
static int gfx_v10_0_init_csb(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_rlc_stop(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_rlc_reset(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_rlc_smu_handshake_cntl(struct amdgpu_device *adev,
bool enable)
{ … }
static void gfx_v10_0_rlc_start(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_rlc_enable_srm(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_load_microcode(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_resume(struct amdgpu_device *adev)
{ … }
static struct { … } rlc_autoload_info[FIRMWARE_ID_MAX];
static int gfx_v10_0_parse_rlc_toc(struct amdgpu_device *adev)
{ … }
static uint32_t gfx_v10_0_calc_toc_total_size(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_backdoor_autoload_buffer_init(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_rlc_backdoor_autoload_buffer_fini(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_rlc_backdoor_autoload_copy_ucode(struct amdgpu_device *adev,
FIRMWARE_ID id,
const void *fw_data,
uint32_t fw_size)
{ … }
static void gfx_v10_0_rlc_backdoor_autoload_copy_toc_ucode(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_rlc_backdoor_autoload_copy_sdma_ucode(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_backdoor_autoload_enable(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_backdoor_autoload_config_me_cache(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_backdoor_autoload_config_ce_cache(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_backdoor_autoload_config_pfp_cache(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_rlc_backdoor_autoload_config_mec_cache(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_wait_for_rlc_autoload_complete(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_cp_gfx_enable(struct amdgpu_device *adev, bool enable)
{ … }
static int gfx_v10_0_cp_gfx_load_pfp_microcode(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_cp_gfx_load_ce_microcode(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_cp_gfx_load_me_microcode(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_cp_gfx_load_microcode(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_cp_gfx_start(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_cp_gfx_switch_pipe(struct amdgpu_device *adev,
CP_PIPE_ID pipe)
{ … }
static void gfx_v10_0_cp_gfx_set_doorbell(struct amdgpu_device *adev,
struct amdgpu_ring *ring)
{ … }
static int gfx_v10_0_cp_gfx_resume(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_cp_compute_enable(struct amdgpu_device *adev, bool enable)
{ … }
static int gfx_v10_0_cp_compute_load_microcode(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_kiq_setting(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_0_gfx_mqd_set_priority(struct amdgpu_device *adev,
struct v10_gfx_mqd *mqd,
struct amdgpu_mqd_prop *prop)
{ … }
static int gfx_v10_0_gfx_mqd_init(struct amdgpu_device *adev, void *m,
struct amdgpu_mqd_prop *prop)
{ … }
static int gfx_v10_0_gfx_init_queue(struct amdgpu_ring *ring)
{ … }
static int gfx_v10_0_cp_async_gfx_ring_resume(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_compute_mqd_init(struct amdgpu_device *adev, void *m,
struct amdgpu_mqd_prop *prop)
{ … }
static int gfx_v10_0_kiq_init_register(struct amdgpu_ring *ring)
{ … }
static int gfx_v10_0_kiq_init_queue(struct amdgpu_ring *ring)
{ … }
static int gfx_v10_0_kcq_init_queue(struct amdgpu_ring *ring)
{ … }
static int gfx_v10_0_kiq_resume(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_kcq_resume(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_cp_resume(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_cp_enable(struct amdgpu_device *adev, bool enable)
{ … }
static bool gfx_v10_0_check_grbm_cam_remapping(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_setup_grbm_cam_remapping(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_disable_gpa_mode(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_hw_init(void *handle)
{ … }
static int gfx_v10_0_hw_fini(void *handle)
{ … }
static int gfx_v10_0_suspend(void *handle)
{ … }
static int gfx_v10_0_resume(void *handle)
{ … }
static bool gfx_v10_0_is_idle(void *handle)
{ … }
static int gfx_v10_0_wait_for_idle(void *handle)
{ … }
static int gfx_v10_0_soft_reset(void *handle)
{ … }
static uint64_t gfx_v10_0_get_gpu_clock_counter(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_ring_emit_gds_switch(struct amdgpu_ring *ring,
uint32_t vmid,
uint32_t gds_base, uint32_t gds_size,
uint32_t gws_base, uint32_t gws_size,
uint32_t oa_base, uint32_t oa_size)
{ … }
static int gfx_v10_0_early_init(void *handle)
{ … }
static int gfx_v10_0_late_init(void *handle)
{ … }
static bool gfx_v10_0_is_rlc_enabled(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_set_safe_mode(struct amdgpu_device *adev, int xcc_id)
{ … }
static void gfx_v10_0_unset_safe_mode(struct amdgpu_device *adev, int xcc_id)
{ … }
static void gfx_v10_0_update_medium_grain_clock_gating(struct amdgpu_device *adev,
bool enable)
{ … }
static void gfx_v10_0_update_3d_clock_gating(struct amdgpu_device *adev,
bool enable)
{ … }
static void gfx_v10_0_update_coarse_grain_clock_gating(struct amdgpu_device *adev,
bool enable)
{ … }
static void gfx_v10_0_update_fine_grain_clock_gating(struct amdgpu_device *adev,
bool enable)
{ … }
static void gfx_v10_0_apply_medium_grain_clock_gating_workaround(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_update_gfx_clock_gating(struct amdgpu_device *adev,
bool enable)
{ … }
static void gfx_v10_0_update_spm_vmid_internal(struct amdgpu_device *adev,
unsigned int vmid)
{ … }
static void gfx_v10_0_update_spm_vmid(struct amdgpu_device *adev, struct amdgpu_ring *ring, unsigned int vmid)
{ … }
static bool gfx_v10_0_check_rlcg_range(struct amdgpu_device *adev,
uint32_t offset,
struct soc15_reg_rlcg *entries, int arr_size)
{ … }
static bool gfx_v10_0_is_rlcg_access_range(struct amdgpu_device *adev, u32 offset)
{ … }
static void gfx_v10_cntl_power_gating(struct amdgpu_device *adev, bool enable)
{ … }
static void gfx_v10_cntl_pg(struct amdgpu_device *adev, bool enable)
{ … }
static const struct amdgpu_rlc_funcs gfx_v10_0_rlc_funcs = …;
static const struct amdgpu_rlc_funcs gfx_v10_0_rlc_funcs_sriov = …;
static int gfx_v10_0_set_powergating_state(void *handle,
enum amd_powergating_state state)
{ … }
static int gfx_v10_0_set_clockgating_state(void *handle,
enum amd_clockgating_state state)
{ … }
static void gfx_v10_0_get_clockgating_state(void *handle, u64 *flags)
{ … }
static u64 gfx_v10_0_ring_get_rptr_gfx(struct amdgpu_ring *ring)
{ … }
static u64 gfx_v10_0_ring_get_wptr_gfx(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_0_ring_set_wptr_gfx(struct amdgpu_ring *ring)
{ … }
static u64 gfx_v10_0_ring_get_rptr_compute(struct amdgpu_ring *ring)
{ … }
static u64 gfx_v10_0_ring_get_wptr_compute(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_0_ring_set_wptr_compute(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_0_ring_emit_ib_gfx(struct amdgpu_ring *ring,
struct amdgpu_job *job,
struct amdgpu_ib *ib,
uint32_t flags)
{ … }
static void gfx_v10_0_ring_emit_ib_compute(struct amdgpu_ring *ring,
struct amdgpu_job *job,
struct amdgpu_ib *ib,
uint32_t flags)
{ … }
static void gfx_v10_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
u64 seq, unsigned int flags)
{ … }
static void gfx_v10_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_0_ring_invalidate_tlbs(struct amdgpu_ring *ring,
uint16_t pasid, uint32_t flush_type,
bool all_hub, uint8_t dst_sel)
{ … }
static void gfx_v10_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
unsigned int vmid, uint64_t pd_addr)
{ … }
static void gfx_v10_0_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr,
u64 seq, unsigned int flags)
{ … }
static void gfx_v10_0_ring_emit_sb(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_0_ring_emit_cntxcntl(struct amdgpu_ring *ring,
uint32_t flags)
{ … }
static unsigned int gfx_v10_0_ring_emit_init_cond_exec(struct amdgpu_ring *ring,
uint64_t addr)
{ … }
static int gfx_v10_0_ring_preempt_ib(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_0_ring_emit_ce_meta(struct amdgpu_ring *ring, bool resume)
{ … }
static void gfx_v10_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume)
{ … }
static void gfx_v10_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start,
bool secure)
{ … }
static void gfx_v10_0_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg,
uint32_t reg_val_offs)
{ … }
static void gfx_v10_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
uint32_t val)
{ … }
static void gfx_v10_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
uint32_t val, uint32_t mask)
{ … }
static void gfx_v10_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
uint32_t reg0, uint32_t reg1,
uint32_t ref, uint32_t mask)
{ … }
static void gfx_v10_0_ring_soft_recovery(struct amdgpu_ring *ring,
unsigned int vmid)
{ … }
static void
gfx_v10_0_set_gfx_eop_interrupt_state(struct amdgpu_device *adev,
uint32_t me, uint32_t pipe,
enum amdgpu_interrupt_state state)
{ … }
static void gfx_v10_0_set_compute_eop_interrupt_state(struct amdgpu_device *adev,
int me, int pipe,
enum amdgpu_interrupt_state state)
{ … }
static int gfx_v10_0_set_eop_interrupt_state(struct amdgpu_device *adev,
struct amdgpu_irq_src *src,
unsigned int type,
enum amdgpu_interrupt_state state)
{ … }
static int gfx_v10_0_eop_irq(struct amdgpu_device *adev,
struct amdgpu_irq_src *source,
struct amdgpu_iv_entry *entry)
{ … }
static int gfx_v10_0_set_priv_reg_fault_state(struct amdgpu_device *adev,
struct amdgpu_irq_src *source,
unsigned int type,
enum amdgpu_interrupt_state state)
{ … }
static int gfx_v10_0_set_priv_inst_fault_state(struct amdgpu_device *adev,
struct amdgpu_irq_src *source,
unsigned int type,
enum amdgpu_interrupt_state state)
{ … }
static void gfx_v10_0_handle_priv_fault(struct amdgpu_device *adev,
struct amdgpu_iv_entry *entry)
{ … }
static int gfx_v10_0_priv_reg_irq(struct amdgpu_device *adev,
struct amdgpu_irq_src *source,
struct amdgpu_iv_entry *entry)
{ … }
static int gfx_v10_0_priv_inst_irq(struct amdgpu_device *adev,
struct amdgpu_irq_src *source,
struct amdgpu_iv_entry *entry)
{ … }
static int gfx_v10_0_kiq_set_interrupt_state(struct amdgpu_device *adev,
struct amdgpu_irq_src *src,
unsigned int type,
enum amdgpu_interrupt_state state)
{ … }
static int gfx_v10_0_kiq_irq(struct amdgpu_device *adev,
struct amdgpu_irq_src *source,
struct amdgpu_iv_entry *entry)
{ … }
static void gfx_v10_0_emit_mem_sync(struct amdgpu_ring *ring)
{ … }
static void gfx_v10_ip_print(void *handle, struct drm_printer *p)
{ … }
static void gfx_v10_ip_dump(void *handle)
{ … }
static const struct amd_ip_funcs gfx_v10_0_ip_funcs = …;
static const struct amdgpu_ring_funcs gfx_v10_0_ring_funcs_gfx = …;
static const struct amdgpu_ring_funcs gfx_v10_0_ring_funcs_compute = …;
static const struct amdgpu_ring_funcs gfx_v10_0_ring_funcs_kiq = …;
static void gfx_v10_0_set_ring_funcs(struct amdgpu_device *adev)
{ … }
static const struct amdgpu_irq_src_funcs gfx_v10_0_eop_irq_funcs = …;
static const struct amdgpu_irq_src_funcs gfx_v10_0_priv_reg_irq_funcs = …;
static const struct amdgpu_irq_src_funcs gfx_v10_0_priv_inst_irq_funcs = …;
static const struct amdgpu_irq_src_funcs gfx_v10_0_kiq_irq_funcs = …;
static void gfx_v10_0_set_irq_funcs(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_set_rlc_funcs(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_set_gds_init(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_set_mqd_funcs(struct amdgpu_device *adev)
{ … }
static void gfx_v10_0_set_user_wgp_inactive_bitmap_per_sh(struct amdgpu_device *adev,
u32 bitmap)
{ … }
static u32 gfx_v10_0_get_wgp_active_bitmap_per_sh(struct amdgpu_device *adev)
{ … }
static u32 gfx_v10_0_get_cu_active_bitmap_per_sh(struct amdgpu_device *adev)
{ … }
static int gfx_v10_0_get_cu_info(struct amdgpu_device *adev,
struct amdgpu_cu_info *cu_info)
{ … }
static u32 gfx_v10_3_get_disabled_sa(struct amdgpu_device *adev)
{ … }
static void gfx_v10_3_program_pbb_mode(struct amdgpu_device *adev)
{ … }
static void gfx_v10_3_set_power_brake_sequence(struct amdgpu_device *adev)
{ … }
const struct amdgpu_ip_block_version gfx_v10_0_ip_block = …;