| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_mmhub.c | 27 struct amdgpu_mmhub_ras *ras; in amdgpu_mmhub_ras_sw_init() local 29 if (!adev->mmhub.ras) in amdgpu_mmhub_ras_sw_init() 32 ras = adev->mmhub.ras; in amdgpu_mmhub_ras_sw_init() 33 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_mmhub_ras_sw_init() 39 strcpy(ras->ras_block.ras_comm.name, "mmhub"); in amdgpu_mmhub_ras_sw_init() 40 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__MMHUB; in amdgpu_mmhub_ras_sw_init() 41 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_mmhub_ras_sw_init() 42 adev->mmhub.ras_if = &ras->ras_block.ras_comm; in amdgpu_mmhub_ras_sw_init()
|
| H A D | amdgpu_nbio.c | 28 struct amdgpu_nbio_ras *ras; in amdgpu_nbio_ras_sw_init() local 30 if (!adev->nbio.ras) in amdgpu_nbio_ras_sw_init() 33 ras = adev->nbio.ras; in amdgpu_nbio_ras_sw_init() 34 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_nbio_ras_sw_init() 40 strcpy(ras->ras_block.ras_comm.name, "pcie_bif"); in amdgpu_nbio_ras_sw_init() 41 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__PCIE_BIF; in amdgpu_nbio_ras_sw_init() 42 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_nbio_ras_sw_init() 43 adev->nbio.ras_if = &ras->ras_block.ras_comm; in amdgpu_nbio_ras_sw_init()
|
| H A D | amdgpu_mca.c | 33 if (adev->umc.ras->check_ecc_err_status) in amdgpu_mca_is_deferred_error() 34 return adev->umc.ras->check_ecc_err_status(adev, in amdgpu_mca_is_deferred_error() 87 struct amdgpu_mca_ras_block *ras; in amdgpu_mca_mp0_ras_sw_init() local 89 if (!adev->mca.mp0.ras) in amdgpu_mca_mp0_ras_sw_init() 92 ras = adev->mca.mp0.ras; in amdgpu_mca_mp0_ras_sw_init() 94 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_mca_mp0_ras_sw_init() 100 strcpy(ras->ras_block.ras_comm.name, "mca.mp0"); in amdgpu_mca_mp0_ras_sw_init() 101 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__MCA; in amdgpu_mca_mp0_ras_sw_init() 102 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_mca_mp0_ras_sw_init() 103 adev->mca.mp0.ras_if = &ras->ras_block.ras_comm; in amdgpu_mca_mp0_ras_sw_init() [all …]
|
| H A D | amdgpu_ras.c | 1094 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_ras_get_ecc_info() local 1101 ret = amdgpu_dpm_get_ecc_info(adev, (void *)&(ras->umc_ecc)); in amdgpu_ras_get_ecc_info() 1103 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_ras_get_ecc_info() 1104 adev->umc.ras->ras_block.hw_ops->query_ras_error_count) in amdgpu_ras_get_ecc_info() 1105 adev->umc.ras->ras_block.hw_ops->query_ras_error_count(adev, err_data); in amdgpu_ras_get_ecc_info() 1110 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_ras_get_ecc_info() 1111 adev->umc.ras->ras_block.hw_ops->query_ras_error_address) in amdgpu_ras_get_ecc_info() 1112 adev->umc.ras->ras_block.hw_ops->query_ras_error_address(adev, err_data); in amdgpu_ras_get_ecc_info() 1114 if (adev->umc.ras && in amdgpu_ras_get_ecc_info() 1115 adev->umc.ras->ecc_info_query_ras_error_count) in amdgpu_ras_get_ecc_info() [all …]
|
| H A D | amdgpu_sdma.c | 314 struct amdgpu_sdma_ras *ras = NULL; in amdgpu_sdma_ras_sw_init() local 319 if (!adev->sdma.ras) in amdgpu_sdma_ras_sw_init() 322 ras = adev->sdma.ras; in amdgpu_sdma_ras_sw_init() 324 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_sdma_ras_sw_init() 330 strcpy(ras->ras_block.ras_comm.name, "sdma"); in amdgpu_sdma_ras_sw_init() 331 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__SDMA; in amdgpu_sdma_ras_sw_init() 332 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_sdma_ras_sw_init() 333 adev->sdma.ras_if = &ras->ras_block.ras_comm; in amdgpu_sdma_ras_sw_init() 336 if (!ras->ras_block.ras_late_init) in amdgpu_sdma_ras_sw_init() 337 ras->ras_block.ras_late_init = amdgpu_sdma_ras_late_init; in amdgpu_sdma_ras_sw_init() [all …]
|
| H A D | umc_v6_7.c | 101 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v6_7_ecc_info_query_correctable_error_count() local 109 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v6_7_ecc_info_query_correctable_error_count() 116 if (ras->umc_ecc.record_ce_addr_supported) { in umc_v6_7_ecc_info_query_correctable_error_count() 121 err_addr = ras->umc_ecc.ecc[eccinfo_table_idx].mca_ceumc_addr; in umc_v6_7_ecc_info_query_correctable_error_count() 143 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v6_7_ecc_info_querry_uncorrectable_error_count() local 150 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v6_7_ecc_info_querry_uncorrectable_error_count() 228 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v6_7_ecc_info_query_error_address() local 232 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v6_7_ecc_info_query_error_address() 244 err_addr = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_addr; in umc_v6_7_ecc_info_query_error_address()
|
| H A D | amdgpu_jpeg.c | 317 struct amdgpu_jpeg_ras *ras; in amdgpu_jpeg_ras_sw_init() local 319 if (!adev->jpeg.ras) in amdgpu_jpeg_ras_sw_init() 322 ras = adev->jpeg.ras; in amdgpu_jpeg_ras_sw_init() 323 err = amdgpu_ras_register_ras_block(adev, &ras->ras_block); in amdgpu_jpeg_ras_sw_init() 329 strcpy(ras->ras_block.ras_comm.name, "jpeg"); in amdgpu_jpeg_ras_sw_init() 330 ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__JPEG; in amdgpu_jpeg_ras_sw_init() 331 ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__POISON; in amdgpu_jpeg_ras_sw_init() 332 adev->jpeg.ras_if = &ras->ras_block.ras_comm; in amdgpu_jpeg_ras_sw_init() 334 if (!ras->ras_block.ras_late_init) in amdgpu_jpeg_ras_sw_init() 335 ras->ras_block.ras_late_init = amdgpu_jpeg_ras_late_init; in amdgpu_jpeg_ras_sw_init()
|
| H A D | umc_v8_7.c | 56 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_7_ecc_info_query_correctable_error_count() local 63 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_7_ecc_info_query_correctable_error_count() 75 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_7_ecc_info_querry_uncorrectable_error_count() local 80 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_7_ecc_info_querry_uncorrectable_error_count() 137 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_7_ecc_info_query_error_address() local 140 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_7_ecc_info_query_error_address() 152 err_addr = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_addr; in umc_v8_7_ecc_info_query_error_address()
|
| H A D | umc_v8_10.c | 341 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_10_ecc_info_query_correctable_error_count() local 349 ecc_ce_cnt = ras->umc_ecc.ecc[eccinfo_table_idx].ce_count_lo_chip; in umc_v8_10_ecc_info_query_correctable_error_count() 360 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_10_ecc_info_query_uncorrectable_error_count() local 368 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_10_ecc_info_query_uncorrectable_error_count() 408 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in umc_v8_10_ecc_info_query_error_address() local 415 mc_umc_status = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_status; in umc_v8_10_ecc_info_query_error_address() 428 err_addr = ras->umc_ecc.ecc[eccinfo_table_idx].mca_umc_addr; in umc_v8_10_ecc_info_query_error_address()
|
| H A D | amdgpu_virt.c | 908 ratelimit_state_init(&adev->virt.ras.ras_error_cnt_rs, 5 * HZ, 1); in amdgpu_virt_init_ras() 909 ratelimit_state_init(&adev->virt.ras.ras_cper_dump_rs, 5 * HZ, 1); in amdgpu_virt_init_ras() 910 ratelimit_state_init(&adev->virt.ras.ras_chk_criti_rs, 5 * HZ, 1); in amdgpu_virt_init_ras() 912 ratelimit_set_flags(&adev->virt.ras.ras_error_cnt_rs, in amdgpu_virt_init_ras() 914 ratelimit_set_flags(&adev->virt.ras.ras_cper_dump_rs, in amdgpu_virt_init_ras() 916 ratelimit_set_flags(&adev->virt.ras.ras_chk_criti_rs, in amdgpu_virt_init_ras() 919 mutex_init(&adev->virt.ras.ras_telemetry_mutex); in amdgpu_virt_init_ras() 922 adev->virt.ras.cper_rptr = 0; in amdgpu_virt_init_ras() 1625 if (__ratelimit(&virt->ras.ras_error_cnt_rs) || force_update) { in amdgpu_virt_req_ras_err_count_internal() 1626 mutex_lock(&virt->ras.ras_telemetry_mutex); in amdgpu_virt_req_ras_err_count_internal() [all …]
|
| H A D | gfx_v11_0_3.c | 95 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in gfx_v11_0_3_poison_consumption_handler() local 97 ras->gpu_reset_flags |= AMDGPU_RAS_GPU_RESET_MODE2_RESET; in gfx_v11_0_3_poison_consumption_handler()
|
| H A D | gmc_v9_0.c | 1421 adev->umc.ras = &umc_v6_1_ras; in gmc_v9_0_set_umc_funcs() 1430 adev->umc.ras = &umc_v6_1_ras; in gmc_v9_0_set_umc_funcs() 1440 adev->umc.ras = &umc_v6_7_ras; in gmc_v9_0_set_umc_funcs() 1455 adev->umc.ras = &umc_v12_0_ras; in gmc_v9_0_set_umc_funcs() 1485 adev->mmhub.ras = &mmhub_v1_0_ras; in gmc_v9_0_set_mmhub_ras_funcs() 1488 adev->mmhub.ras = &mmhub_v9_4_ras; in gmc_v9_0_set_mmhub_ras_funcs() 1491 adev->mmhub.ras = &mmhub_v1_7_ras; in gmc_v9_0_set_mmhub_ras_funcs() 1495 adev->mmhub.ras = &mmhub_v1_8_ras; in gmc_v9_0_set_mmhub_ras_funcs() 1513 adev->hdp.ras = &hdp_v4_0_ras; in gmc_v9_0_set_hdp_ras_funcs() 1524 mca->mp0.ras = &mca_v3_0_mp0_ras; in gmc_v9_0_set_mca_ras_funcs() [all …]
|
| H A D | soc15.c | 508 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in soc15_asic_baco_reset() local 512 if (ras && adev->ras_enabled) in soc15_asic_baco_reset() 520 if (ras && adev->ras_enabled) in soc15_asic_baco_reset() 531 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in soc15_asic_reset_method() local 565 if (ras && adev->ras_enabled && in soc15_asic_reset_method() 1337 if (adev->nbio.ras && in soc15_common_hw_fini() 1338 adev->nbio.ras->init_ras_controller_interrupt) in soc15_common_hw_fini() 1340 if (adev->nbio.ras && in soc15_common_hw_fini() 1341 adev->nbio.ras->init_ras_err_event_athub_interrupt) in soc15_common_hw_fini()
|
| H A D | amdgpu_mmhub.h | 71 struct amdgpu_mmhub_ras *ras; member
|
| /linux/drivers/gpu/drm/amd/pm/powerplay/hwmgr/ |
| H A D | vega20_baco.c | 75 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in vega20_baco_set_state() local 86 if (!ras || !adev->ras_enabled) { in vega20_baco_set_state()
|
| /linux/drivers/edac/ |
| H A D | i5100_edac.c | 433 unsigned ras, in i5100_handle_ce() argument 441 bank, cas, ras); in i5100_handle_ce() 455 unsigned ras, in i5100_handle_ue() argument 463 bank, cas, ras); in i5100_handle_ue() 483 unsigned ras; in i5100_read_log() local 503 ras = i5100_recmemb_ras(dw2); in i5100_read_log() 512 i5100_handle_ce(mci, chan, bank, rank, syndrome, cas, ras, msg); in i5100_read_log() 525 ras = i5100_nrecmemb_ras(dw2); in i5100_read_log() 534 i5100_handle_ue(mci, chan, bank, rank, syndrome, cas, ras, msg); in i5100_read_log()
|
| /linux/drivers/ras/ |
| H A D | Kconfig | 34 source "arch/x86/ras/Kconfig" 35 source "drivers/ras/amd/atl/Kconfig"
|
| H A D | Makefile | 2 obj-$(CONFIG_RAS) += ras.o
|
| /linux/include/linux/netfilter/ |
| H A D | nf_conntrack_h323_asn1.h | 91 int DecodeRasMessage(unsigned char *buf, size_t sz, RasMessage * ras);
|
| /linux/drivers/cxl/core/ |
| H A D | Makefile | 17 cxl_core-y += ras.o
|
| H A D | pci.c | 654 return __cxl_handle_cor_ras(cxlds, cxlds->regs.ras); in cxl_handle_endpoint_cor_ras() 714 return __cxl_handle_ras(cxlds, cxlds->regs.ras); in cxl_handle_endpoint_ras() 739 if (!map->component_map.ras.valid) in cxl_dport_map_ras() 795 return __cxl_handle_cor_ras(cxlds, dport->regs.ras); in cxl_handle_rdport_cor_ras() 801 return __cxl_handle_ras(cxlds, dport->regs.ras); in cxl_handle_rdport_ras()
|
| H A D | regs.c | 93 rmap = &map->ras; in cxl_probe_component_regs() 213 { &map->component_map.ras, ®s->ras }, in cxl_map_component_regs()
|
| /linux/arch/x86/ras/ |
| H A D | Kconfig | 20 Add extra files to (debugfs)/ras/cec to test the correctable error
|
| /linux/Documentation/translations/zh_CN/admin-guide/ |
| H A D | index.rst | 117 * ras
|
| /linux/Documentation/translations/zh_TW/admin-guide/ |
| H A D | index.rst | 120 * ras
|