Lines Matching refs:umc

60 		kcalloc(adev->umc.max_ras_err_cnt_per_query,  in amdgpu_umc_page_retirement_mca()
69 err_data.err_addr_len = adev->umc.max_ras_err_cnt_per_query; in amdgpu_umc_page_retirement_mca()
109 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_umc_handle_bad_pages()
110 adev->umc.ras->ras_block.hw_ops->query_ras_error_count) in amdgpu_umc_handle_bad_pages()
111 adev->umc.ras->ras_block.hw_ops->query_ras_error_count(adev, ras_error_status); in amdgpu_umc_handle_bad_pages()
113 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_umc_handle_bad_pages()
114 adev->umc.ras->ras_block.hw_ops->query_ras_error_address && in amdgpu_umc_handle_bad_pages()
115 adev->umc.max_ras_err_cnt_per_query) { in amdgpu_umc_handle_bad_pages()
117 kcalloc(adev->umc.max_ras_err_cnt_per_query, in amdgpu_umc_handle_bad_pages()
127 err_data->err_addr_len = adev->umc.max_ras_err_cnt_per_query; in amdgpu_umc_handle_bad_pages()
132 adev->umc.ras->ras_block.hw_ops->query_ras_error_address(adev, ras_error_status); in amdgpu_umc_handle_bad_pages()
136 if (adev->umc.ras && in amdgpu_umc_handle_bad_pages()
137 adev->umc.ras->ecc_info_query_ras_error_count) in amdgpu_umc_handle_bad_pages()
138 adev->umc.ras->ecc_info_query_ras_error_count(adev, ras_error_status); in amdgpu_umc_handle_bad_pages()
140 if (adev->umc.ras && in amdgpu_umc_handle_bad_pages()
141 adev->umc.ras->ecc_info_query_ras_error_address && in amdgpu_umc_handle_bad_pages()
142 adev->umc.max_ras_err_cnt_per_query) { in amdgpu_umc_handle_bad_pages()
144 kcalloc(adev->umc.max_ras_err_cnt_per_query, in amdgpu_umc_handle_bad_pages()
154 err_data->err_addr_len = adev->umc.max_ras_err_cnt_per_query; in amdgpu_umc_handle_bad_pages()
159 adev->umc.ras->ecc_info_query_ras_error_address(adev, ras_error_status); in amdgpu_umc_handle_bad_pages()
289 if (!adev->umc.ras) in amdgpu_umc_ras_sw_init()
292 ras = adev->umc.ras; in amdgpu_umc_ras_sw_init()
300 strcpy(adev->umc.ras->ras_block.ras_comm.name, "umc"); in amdgpu_umc_ras_sw_init()
303 adev->umc.ras_if = &ras->ras_block.ras_comm; in amdgpu_umc_ras_sw_init()
332 if (adev->umc.ras && in amdgpu_umc_ras_late_init()
333 adev->umc.ras->err_cnt_init) in amdgpu_umc_ras_late_init()
334 adev->umc.ras->err_cnt_init(adev); in amdgpu_umc_ras_late_init()
347 struct ras_common_if *ras_if = adev->umc.ras_if; in amdgpu_umc_process_ecc_irq()
398 if (adev->umc.node_inst_num) { in amdgpu_umc_loop_channels()
424 if (adev->umc.ras->update_ecc_status) in amdgpu_umc_update_ecc_status()
425 return adev->umc.ras->update_ecc_status(adev, in amdgpu_umc_update_ecc_status()
456 err_data->err_addr_len = adev->umc.retire_unit; in amdgpu_umc_pages_in_a_row()
459 if (adev->umc.ras && adev->umc.ras->convert_ras_err_addr) in amdgpu_umc_pages_in_a_row()
460 return adev->umc.ras->convert_ras_err_addr(adev, err_data, NULL, in amdgpu_umc_pages_in_a_row()
472 err_data.err_addr = kcalloc(adev->umc.retire_unit, in amdgpu_umc_lookup_bad_pages_in_a_row()
483 for (i = 0; i < adev->umc.retire_unit; i++) { in amdgpu_umc_lookup_bad_pages_in_a_row()
497 uint64_t err_addr, uint32_t ch, uint32_t umc, in amdgpu_umc_mca_to_addr() argument
507 addr_in.ma.umc_inst = umc; in amdgpu_umc_mca_to_addr()
511 if (adev->umc.ras && adev->umc.ras->convert_ras_err_addr) { in amdgpu_umc_mca_to_addr()
512 ret = adev->umc.ras->convert_ras_err_addr(adev, NULL, &addr_in, in amdgpu_umc_mca_to_addr()