#include "umc_v6_1.h"
#include "amdgpu_ras.h"
#include "amdgpu_umc.h"
#include "amdgpu.h"
#include "rsmu/rsmu_0_0_2_offset.h"
#include "rsmu/rsmu_0_0_2_sh_mask.h"
#include "umc/umc_6_1_1_offset.h"
#include "umc/umc_6_1_1_sh_mask.h"
#include "umc/umc_6_1_2_offset.h"
#define UMC_6_INST_DIST …
const uint32_t
umc_v6_1_channel_idx_tbl[UMC_V6_1_UMC_INSTANCE_NUM][UMC_V6_1_CHANNEL_INSTANCE_NUM] = …;
static void umc_v6_1_enable_umc_index_mode(struct amdgpu_device *adev)
{ … }
static void umc_v6_1_disable_umc_index_mode(struct amdgpu_device *adev)
{ … }
static uint32_t umc_v6_1_get_umc_index_mode_state(struct amdgpu_device *adev)
{ … }
static inline uint32_t get_umc_6_reg_offset(struct amdgpu_device *adev,
uint32_t umc_inst,
uint32_t ch_inst)
{ … }
static void umc_v6_1_clear_error_count_per_channel(struct amdgpu_device *adev,
uint32_t umc_reg_offset)
{ … }
static void umc_v6_1_clear_error_count(struct amdgpu_device *adev)
{ … }
static void umc_v6_1_query_correctable_error_count(struct amdgpu_device *adev,
uint32_t umc_reg_offset,
unsigned long *error_count)
{ … }
static void umc_v6_1_querry_uncorrectable_error_count(struct amdgpu_device *adev,
uint32_t umc_reg_offset,
unsigned long *error_count)
{ … }
static void umc_v6_1_query_ras_error_count(struct amdgpu_device *adev,
void *ras_error_status)
{ … }
static void umc_v6_1_query_error_address(struct amdgpu_device *adev,
struct ras_err_data *err_data,
uint32_t umc_reg_offset,
uint32_t ch_inst,
uint32_t umc_inst)
{ … }
static void umc_v6_1_query_ras_error_address(struct amdgpu_device *adev,
void *ras_error_status)
{ … }
static void umc_v6_1_err_cnt_init_per_channel(struct amdgpu_device *adev,
uint32_t umc_reg_offset)
{ … }
static void umc_v6_1_err_cnt_init(struct amdgpu_device *adev)
{ … }
const struct amdgpu_ras_block_hw_ops umc_v6_1_ras_hw_ops = …;
struct amdgpu_umc_ras umc_v6_1_ras = …;