/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_umc.c | 32 struct ras_err_data *err_data, uint64_t err_addr, in amdgpu_umc_convert_error_address() argument 38 err_data, err_addr, ch_inst, umc_inst); in amdgpu_umc_convert_error_address() 52 struct ras_err_data err_data; in amdgpu_umc_page_retirement_mca() local 55 ret = amdgpu_ras_error_data_init(&err_data); in amdgpu_umc_page_retirement_mca() 59 err_data.err_addr = in amdgpu_umc_page_retirement_mca() 62 if (!err_data.err_addr) { in amdgpu_umc_page_retirement_mca() 69 err_data.err_addr_len = adev->umc.max_ras_err_cnt_per_query; in amdgpu_umc_page_retirement_mca() 74 ret = amdgpu_umc_convert_error_address(adev, &err_data, err_addr, in amdgpu_umc_page_retirement_mca() 80 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_umc_page_retirement_mca() 81 err_data in amdgpu_umc_page_retirement_mca() 97 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; amdgpu_umc_handle_bad_pages() local 193 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; amdgpu_umc_do_page_retirement() local 228 struct ras_err_data err_data; amdgpu_umc_pasid_poison_handler() local 361 amdgpu_umc_fill_error_record(struct ras_err_data * err_data,uint64_t err_addr,uint64_t retired_page,uint32_t channel_index,uint32_t umc_inst) amdgpu_umc_fill_error_record() argument 492 amdgpu_umc_pages_in_a_row(struct amdgpu_device * adev,struct ras_err_data * err_data,uint64_t pa_addr) amdgpu_umc_pages_in_a_row() argument 512 struct ras_err_data err_data; amdgpu_umc_lookup_bad_pages_in_a_row() local [all...] |
H A D | amdgpu_ras.c | 168 struct ras_err_data err_data; in amdgpu_reserve_page_direct() local 187 ret = amdgpu_ras_error_data_init(&err_data); in amdgpu_reserve_page_direct() 192 err_data.err_addr = &err_rec; in amdgpu_reserve_page_direct() 193 amdgpu_umc_fill_error_record(&err_data, address, address, 0, 0); in amdgpu_reserve_page_direct() 196 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_reserve_page_direct() 197 err_data.err_addr_cnt, false); in amdgpu_reserve_page_direct() 201 amdgpu_ras_error_data_fini(&err_data); in amdgpu_reserve_page_direct() 660 amdgpu_ras_error_data_fini(&obj->err_data); in put_obj() 692 if (amdgpu_ras_error_data_init(&obj->err_data)) in amdgpu_ras_create_obj() 1019 static void amdgpu_ras_get_ecc_info(struct amdgpu_device *adev, struct ras_err_data *err_data) in amdgpu_ras_get_ecc_info() argument 1053 amdgpu_ras_error_print_error_data(struct amdgpu_device * adev,struct ras_manager * ras_mgr,struct ras_err_data * err_data,struct ras_query_context * qctx,const char * blk_name,bool is_ue,bool is_de) amdgpu_ras_error_print_error_data() argument 1145 amdgpu_ras_error_generate_report(struct amdgpu_device * adev,struct ras_query_if * query_if,struct ras_err_data * err_data,struct ras_query_context * qctx) amdgpu_ras_error_generate_report() argument 1224 amdgpu_ras_virt_error_generate_report(struct amdgpu_device * adev,struct ras_query_if * query_if,struct ras_err_data * err_data,struct ras_query_context * qctx) amdgpu_ras_virt_error_generate_report() argument 1258 amdgpu_rasmgr_error_data_statistic_update(struct ras_manager * obj,struct ras_err_data * err_data) amdgpu_rasmgr_error_data_statistic_update() argument 1282 amdgpu_ras_mgr_virt_error_data_statistics_update(struct ras_manager * obj,struct ras_err_data * err_data) amdgpu_ras_mgr_virt_error_data_statistics_update() argument 1330 amdgpu_aca_log_ras_error_data(struct amdgpu_device * adev,enum amdgpu_ras_block blk,enum aca_error_type type,struct ras_err_data * err_data,struct ras_query_context * qctx) amdgpu_aca_log_ras_error_data() argument 1362 amdgpu_ras_query_error_status_helper(struct amdgpu_device * adev,struct ras_query_if * info,struct ras_err_data * err_data,struct ras_query_context * qctx,unsigned int error_query_mode) amdgpu_ras_query_error_status_helper() argument 1428 struct ras_err_data err_data; amdgpu_ras_query_error_status_with_event() local 2274 struct ras_err_data err_data; amdgpu_ras_interrupt_umc_handler() local 2744 amdgpu_ras_mca2pa_by_idx(struct amdgpu_device * adev,struct eeprom_table_record * bps,struct ras_err_data * err_data) amdgpu_ras_mca2pa_by_idx() argument 2773 amdgpu_ras_mca2pa(struct amdgpu_device * adev,struct eeprom_table_record * bps,struct ras_err_data * err_data) amdgpu_ras_mca2pa() argument 2837 __amdgpu_ras_convert_rec_array_from_rom(struct amdgpu_device * adev,struct eeprom_table_record * bps,struct ras_err_data * err_data,enum amdgpu_memory_partition nps) __amdgpu_ras_convert_rec_array_from_rom() argument 2895 __amdgpu_ras_convert_rec_from_rom(struct amdgpu_device * adev,struct eeprom_table_record * bps,struct ras_err_data * err_data,enum amdgpu_memory_partition nps) __amdgpu_ras_convert_rec_from_rom() argument 2941 struct ras_err_data err_data; amdgpu_ras_add_bad_pages() local 3289 struct ras_err_data err_data; amdgpu_ras_do_page_retirement() local 5000 amdgpu_ras_error_data_init(struct ras_err_data * err_data) amdgpu_ras_error_data_init() argument 5018 amdgpu_ras_error_data_fini(struct ras_err_data * err_data) amdgpu_ras_error_data_fini() argument 5026 amdgpu_ras_error_find_node_by_id(struct ras_err_data * err_data,struct amdgpu_smuio_mcm_config_info * mcm_info) amdgpu_ras_error_find_node_by_id() argument 5074 amdgpu_ras_error_get_info(struct ras_err_data * err_data,struct amdgpu_smuio_mcm_config_info * mcm_info) amdgpu_ras_error_get_info() argument 5096 amdgpu_ras_error_statistic_ue_count(struct ras_err_data * err_data,struct amdgpu_smuio_mcm_config_info * mcm_info,u64 count) amdgpu_ras_error_statistic_ue_count() argument 5118 amdgpu_ras_error_statistic_ce_count(struct ras_err_data * err_data,struct amdgpu_smuio_mcm_config_info * mcm_info,u64 count) amdgpu_ras_error_statistic_ce_count() argument 5140 amdgpu_ras_error_statistic_de_count(struct ras_err_data * err_data,struct amdgpu_smuio_mcm_config_info * mcm_info,u64 count) amdgpu_ras_error_statistic_de_count() argument [all...] |
H A D | umc_v8_7.c | 93 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v8_7_ecc_info_query_ras_error_count() local 104 &(err_data->ce_count)); in umc_v8_7_ecc_info_query_ras_error_count() 107 &(err_data->ue_count)); in umc_v8_7_ecc_info_query_ras_error_count() 112 struct ras_err_data *err_data, uint64_t err_addr, in umc_v8_7_convert_error_address() argument 126 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v8_7_convert_error_address() 131 struct ras_err_data *err_data, in umc_v8_7_ecc_info_query_error_address() argument 145 if (!err_data->err_addr) in umc_v8_7_ecc_info_query_error_address() 155 umc_v8_7_convert_error_address(adev, err_data, err_addr, in umc_v8_7_ecc_info_query_error_address() 163 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v8_7_ecc_info_query_ras_error_address() local 174 err_data, in umc_v8_7_ecc_info_query_ras_error_address() 304 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; umc_v8_7_query_ras_error_count() local 327 umc_v8_7_query_error_address(struct amdgpu_device * adev,struct ras_err_data * err_data,uint32_t umc_reg_offset,uint32_t ch_inst,uint32_t umc_inst) umc_v8_7_query_error_address() argument 371 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; umc_v8_7_query_ras_error_address() local [all...] |
H A D | umc_v8_10.c | 147 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_query_ecc_error_count() local 153 &(err_data->ce_count)); in umc_v8_10_query_ecc_error_count() 156 &(err_data->ue_count)); in umc_v8_10_query_ecc_error_count() 206 struct ras_err_data *err_data, uint64_t err_addr, in umc_v8_10_convert_error_address() argument 239 amdgpu_umc_fill_error_record(err_data, na_err_addr, in umc_v8_10_convert_error_address() 251 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_query_error_address() local 262 if (!err_data->err_addr) { in umc_v8_10_query_error_address() 277 umc_v8_10_convert_error_address(adev, err_data, err_addr, in umc_v8_10_query_error_address() 383 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_ecc_info_query_ecc_error_count() local 387 &(err_data in umc_v8_10_ecc_info_query_ecc_error_count() 407 struct ras_err_data *err_data = (struct ras_err_data *)data; umc_v8_10_ecc_info_query_error_address() local [all...] |
H A D | umc_v12_0.c | 141 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v12_0_query_error_count() local 161 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, ue_count); in umc_v12_0_query_error_count() 162 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, ce_count); in umc_v12_0_query_error_count() 163 amdgpu_ras_error_statistic_de_count(err_data, &mcm_info, de_count); in umc_v12_0_query_error_count() 236 struct ras_err_data *err_data, in umc_v12_0_convert_error_address() argument 297 if (!err_data && !dump_addr) in umc_v12_0_convert_error_address() 317 if (err_data) in umc_v12_0_convert_error_address() 318 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v12_0_convert_error_address() 330 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v12_0_query_error_address() local 346 if (!err_data in umc_v12_0_query_error_address() 631 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; umc_v12_0_fill_error_record() local [all...] |
H A D | umc_v6_1.c | 257 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_count() local 279 &(err_data->ce_count)); in umc_v6_1_query_ras_error_count() 282 &(err_data->ue_count)); in umc_v6_1_query_ras_error_count() 296 struct ras_err_data *err_data, in umc_v6_1_query_error_address() argument 324 if (!err_data->err_addr) { in umc_v6_1_query_error_address() 345 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_1_query_error_address() 356 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_address() local 377 err_data, in umc_v6_1_query_ras_error_address()
|
H A D | gfx_v9_4.c | 689 struct ras_err_data *err_data) in gfx_v9_4_query_utc_edc_status() argument 715 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 723 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 737 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 746 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 759 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 767 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 781 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 790 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 804 err_data in gfx_v9_4_query_utc_edc_status() 868 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; gfx_v9_4_query_ras_error_count() local [all...] |
H A D | umc_v8_14.c | 96 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_14_query_error_count_per_channel() local 102 &(err_data->ce_count)); in umc_v8_14_query_error_count_per_channel() 105 &(err_data->ue_count)); in umc_v8_14_query_error_count_per_channel()
|
H A D | amdgpu_ras.h | 603 #define for_each_ras_error(err_node, err_data) \ argument 604 list_for_each_entry(err_node, &(err_data)->err_node_list, node) 616 void *err_data, 652 struct ras_err_data err_data; member 944 int amdgpu_ras_error_data_init(struct ras_err_data *err_data); 945 void amdgpu_ras_error_data_fini(struct ras_err_data *err_data); 946 int amdgpu_ras_error_statistic_ce_count(struct ras_err_data *err_data, 949 int amdgpu_ras_error_statistic_ue_count(struct ras_err_data *err_data, 952 int amdgpu_ras_error_statistic_de_count(struct ras_err_data *err_data,
|
H A D | amdgpu_aca.c | 494 static int aca_log_aca_error_data(struct aca_bank_error *bank_error, enum aca_error_type type, struct ras_err_data *err_data) in aca_log_aca_error_data() argument 513 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, count); in aca_log_aca_error_data() 516 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, count); in aca_log_aca_error_data() 519 amdgpu_ras_error_statistic_de_count(err_data, &mcm_info, count); in aca_log_aca_error_data() 528 static int aca_log_aca_error(struct aca_handle *handle, enum aca_error_type type, struct ras_err_data *err_data) in aca_log_aca_error() argument 540 aca_log_aca_error_data(bank_error, type, err_data); in aca_log_aca_error() 551 struct ras_err_data *err_data, struct ras_query_context *qctx) in __aca_get_error_data() argument 575 aca_log_aca_error(handle, ACA_ERROR_TYPE_DEFERRED, err_data); in __aca_get_error_data() 577 return aca_log_aca_error(handle, type, err_data); in __aca_get_error_data() 589 enum aca_error_type type, struct ras_err_data *err_data, in amdgpu_aca_get_error_data() argument [all...] |
H A D | amdgpu_umc.h | 109 struct ras_err_data *err_data, 162 int amdgpu_umc_fill_error_record(struct ras_err_data *err_data, 185 struct ras_err_data *err_data, uint64_t pa_addr);
|
H A D | sdma_v4_4.c | 199 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in sdma_v4_4_query_ras_error_count_by_instance() local 219 * err_data->ue_count should be initialized to 0 in sdma_v4_4_query_ras_error_count_by_instance() 225 err_data->ue_count += sec_count; in sdma_v4_4_query_ras_error_count_by_instance() 231 err_data->ce_count = 0; in sdma_v4_4_query_ras_error_count_by_instance()
|
H A D | amdgpu_xgmi.c | 1400 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_xgmi_legacy_query_ras_error_count() local 1408 err_data->ue_count = 0; in amdgpu_xgmi_legacy_query_ras_error_count() 1409 err_data->ce_count = 0; in amdgpu_xgmi_legacy_query_ras_error_count() 1490 err_data->ue_count += ue_cnt; in amdgpu_xgmi_legacy_query_ras_error_count() 1491 err_data->ce_count += ce_cnt; in amdgpu_xgmi_legacy_query_ras_error_count() 1519 u64 mca_base, struct ras_err_data *err_data) in __xgmi_v6_4_0_query_error_count() argument 1530 amdgpu_ras_error_statistic_ue_count(err_data, mcm_info, 1ULL); in __xgmi_v6_4_0_query_error_count() 1533 amdgpu_ras_error_statistic_ce_count(err_data, mcm_info, 1ULL); in __xgmi_v6_4_0_query_error_count() 1542 static void xgmi_v6_4_0_query_error_count(struct amdgpu_device *adev, int xgmi_inst, struct ras_err_data *err_data) in xgmi_v6_4_0_query_error_count() argument 1551 __xgmi_v6_4_0_query_error_count(adev, &mcm_info, xgmi_v6_4_0_mca_base_array[i], err_data); in xgmi_v6_4_0_query_error_count() 1556 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; xgmi_v6_4_0_query_ras_error_count() local [all...] |
H A D | umc_v6_7.h | 75 struct ras_err_data *err_data, uint64_t err_addr,
|
H A D | mmhub_v1_0.c | 797 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_0_query_ras_error_count() local 802 err_data->ue_count = 0; in mmhub_v1_0_query_ras_error_count() 803 err_data->ce_count = 0; in mmhub_v1_0_query_ras_error_count() 814 err_data->ce_count += sec_count; in mmhub_v1_0_query_ras_error_count() 815 err_data->ue_count += ded_count; in mmhub_v1_0_query_ras_error_count()
|
H A D | mmhub_v1_7.c | 1271 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_7_query_ras_error_count() local 1276 err_data->ue_count = 0; in mmhub_v1_7_query_ras_error_count() 1277 err_data->ce_count = 0; in mmhub_v1_7_query_ras_error_count() 1287 err_data->ce_count += sec_count; in mmhub_v1_7_query_ras_error_count() 1288 err_data->ue_count += ded_count; in mmhub_v1_7_query_ras_error_count()
|
H A D | gfx_v9_4_2.c | 1648 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in gfx_v9_4_2_query_ras_error_count() local 1654 err_data->ue_count = 0; in gfx_v9_4_2_query_ras_error_count() 1655 err_data->ce_count = 0; in gfx_v9_4_2_query_ras_error_count() 1658 err_data->ce_count += sec_count; in gfx_v9_4_2_query_ras_error_count() 1659 err_data->ue_count += ded_count; in gfx_v9_4_2_query_ras_error_count() 1662 err_data->ce_count += sec_count; in gfx_v9_4_2_query_ras_error_count() 1663 err_data->ue_count += ded_count; in gfx_v9_4_2_query_ras_error_count()
|
H A D | mmhub_v1_8.c | 695 struct ras_err_data *err_data = (struct ras_err_data *)ras_err_status; in mmhub_v1_8_inst_query_ras_error_count() local 722 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, ce_count); in mmhub_v1_8_inst_query_ras_error_count() 723 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, ue_count); in mmhub_v1_8_inst_query_ras_error_count()
|
H A D | amdgpu_aca.h | 226 enum aca_error_type type, struct ras_err_data *err_data,
|
H A D | mmhub_v9_4.c | 1632 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v9_4_query_ras_error_count() local 1637 err_data->ue_count = 0; in mmhub_v9_4_query_ras_error_count() 1638 err_data->ce_count = 0; in mmhub_v9_4_query_ras_error_count() 1648 err_data->ce_count += sec_count; in mmhub_v9_4_query_ras_error_count() 1649 err_data->ue_count += ded_count; in mmhub_v9_4_query_ras_error_count()
|
H A D | amdgpu_gfx.c | 1000 void *err_data, in amdgpu_gfx_process_ras_data_cb() argument 1013 adev->gfx.ras->ras_block.hw_ops->query_ras_error_count(adev, err_data); in amdgpu_gfx_process_ras_data_cb() 1046 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_gfx_ras_error_func() local 1048 if (err_data) { in amdgpu_gfx_ras_error_func() 1049 err_data->ue_count = 0; in amdgpu_gfx_ras_error_func() 1050 err_data->ce_count = 0; in amdgpu_gfx_ras_error_func()
|
/linux/drivers/misc/ |
H A D | smpro-errmon.c | 251 unsigned char err_data[MAX_READ_BLOCK_LENGTH]; in smpro_error_data_read() local 271 memset(err_data, 0x00, MAX_READ_BLOCK_LENGTH); in smpro_error_data_read() 272 ret = regmap_noinc_read(errmon->regmap, err_info->data, err_data, err_length); in smpro_error_data_read() 285 return sysfs_emit(buf, "%*phN\n", MAX_READ_BLOCK_LENGTH, err_data); in smpro_error_data_read()
|
/linux/drivers/mtd/nand/raw/ |
H A D | nuvoton-ma35d1-nand-controller.c | 290 u32 err_data[6]; in ma35_nfi_correct() local 317 err_data[i] = readl(nand->regs + MA35_NFI_REG_NANDECCED0 + i * 4); in ma35_nfi_correct() 320 temp_data[i * 4 + 0] = err_data[i] & 0xff; in ma35_nfi_correct() 321 temp_data[i * 4 + 1] = (err_data[i] >> 8) & 0xff; in ma35_nfi_correct() 322 temp_data[i * 4 + 2] = (err_data[i] >> 16) & 0xff; in ma35_nfi_correct() 323 temp_data[i * 4 + 3] = (err_data[i] >> 24) & 0xff; in ma35_nfi_correct()
|
/linux/drivers/clk/hisilicon/ |
H A D | clk.c | 78 goto err_data; in hisi_clk_init() 84 err_data: in hisi_clk_init()
|
/linux/drivers/net/wireless/intersil/p54/ |
H A D | eeprom.c | 529 goto err_data; in p54_parse_rssical() 546 goto err_data; in p54_parse_rssical() 593 err_data: in p54_parse_rssical()
|