Home
last modified time | relevance | path

Searched refs:dpa (Results 1 – 25 of 25) sorted by relevance

/linux/tools/testing/cxl/test/
H A Dcxl_translate.c141 static int run_translation_test(u64 dpa, int pos, u8 r_eiw, u16 r_eig, in run_translation_test() argument
148 translated_spa = to_hpa(dpa, pos, r_eiw, r_eig, hb_ways, math); in run_translation_test()
157 if (reverse_dpa != dpa) { in run_translation_test()
159 dpa, reverse_dpa); in run_translation_test()
188 static int parse_test_vector(const char *entry, u64 *dpa, int *pos, u8 *r_eiw, in parse_test_vector() argument
195 parsed = sscanf(entry, "%llu %d %u %u %u %d %llu", dpa, pos, &tmp_r_eiw, in parse_test_vector()
250 u64 dpa, hpa, reverse_dpa; in test_random_params() local
261 dpa = get_random_u64() >> 12; in test_random_params()
266 hpa = cxl_calculate_hpa_offset(dpa, pos, eiw, eig); in test_random_params()
270 if (reverse_dpa == dpa && reverse_pos == pos) in test_random_params()
[all …]
H A Dmem.c1129 u64 dpa; member
1137 u64 dpa; in cxl_get_injected_po() local
1146 if (mock_poison_list[i].dpa < offset || in cxl_get_injected_po()
1147 mock_poison_list[i].dpa > offset + length - 1) in cxl_get_injected_po()
1150 dpa = mock_poison_list[i].dpa + CXL_POISON_SOURCE_INJECTED; in cxl_get_injected_po()
1151 po->record[nr_records].address = cpu_to_le64(dpa); in cxl_get_injected_po()
1195 static int mock_poison_add(struct cxl_dev_state *cxlds, u64 dpa) in mock_poison_add() argument
1208 mock_poison_list[i].dpa = dpa; in mock_poison_add()
1219 static bool mock_poison_found(struct cxl_dev_state *cxlds, u64 dpa) in mock_poison_found() argument
1223 mock_poison_list[i].dpa == dpa) in mock_poison_found()
[all …]
/linux/drivers/nvdimm/
H A Ddimm.c47 ndd->dpa.name = dev_name(dev); in nvdimm_probe()
50 ndd->dpa.start = 0; in nvdimm_probe()
51 ndd->dpa.end = -1; in nvdimm_probe()
H A Dlabel.h91 __le64 dpa; member
128 __le64 dpa; member
168 __le64 dpa; member
H A Dnd.h35 struct resource dpa; member
121 return __le64_to_cpu(nd_label->cxl.dpa); in nsl_get_dpa()
122 return __le64_to_cpu(nd_label->efi.dpa); in nsl_get_dpa()
126 struct nd_namespace_label *nd_label, u64 dpa) in nsl_set_dpa() argument
129 nd_label->cxl.dpa = __cpu_to_le64(dpa); in nsl_set_dpa()
131 nd_label->efi.dpa = __cpu_to_le64(dpa); in nsl_set_dpa()
362 for (res = (ndd)->dpa.child; res; res = res->sibling)
365 for (res = (ndd)->dpa.child, next = res ? res->sibling : NULL; \
H A Dregion_devs.c542 unsigned long val, dpa; in align_store() local
558 dpa = div_u64_rem(val, mappings, &remainder); in align_store()
559 if (!is_power_of_2(dpa) || dpa < PAGE_SIZE in align_store()
H A Ddimm_devs.c810 __release_region(&ndd->dpa, res->start, resource_size(res)); in nvdimm_free_dpa()
824 res = __request_region(&ndd->dpa, start, n, name, 0); in nvdimm_allocate_dpa()
H A Dlabel.c575 u64 dpa = nsl_get_dpa(ndd, nd_label); in nd_label_active_count() local
579 slot, label_slot, dpa, size); in nd_label_active_count()
/linux/drivers/video/fbdev/via/
H A Dvt1636.c179 struct VT1636_DPA_SETTING dpa = {0x00, 0x00}, dpa_16x12 = {0x0B, 0x03}, in viafb_vt1636_patch_skew_on_vt3324() local
195 pdpa = &dpa; in viafb_vt1636_patch_skew_on_vt3324()
204 struct VT1636_DPA_SETTING dpa = {0x00, 0x00}; in viafb_vt1636_patch_skew_on_vt3327() local
215 set_dpa_vt1636(plvds_setting_info, plvds_chip_info, &dpa); in viafb_vt1636_patch_skew_on_vt3327()
/linux/drivers/cxl/
H A Dmem.c86 static int cxl_debugfs_poison_inject(void *data, u64 dpa) in cxl_debugfs_poison_inject() argument
90 return cxl_inject_poison(cxlmd, dpa); in cxl_debugfs_poison_inject()
96 static int cxl_debugfs_poison_clear(void *data, u64 dpa) in cxl_debugfs_poison_clear() argument
100 return cxl_clear_poison(cxlmd, dpa); in cxl_debugfs_poison_clear()
/linux/drivers/cxl/core/
H A Dedac.c903 u64 dpa; member
925 rec = xa_load(&array_rec->rec_gen_media, attrbs->dpa); in cxl_find_rec_gen_media()
946 rec = xa_load(&array_rec->rec_dram, attrbs->dpa); in cxl_find_rec_dram()
1187 u64 dpa; member
1299 attrbs.dpa = ctx->dpa; in cxl_mem_get_rec_dram()
1347 ACQUIRE(rwsem_read_intr, dpa_rwsem)(&cxl_rwsem.dpa); in cxl_mem_perform_sparing()
1440 CXL_SPARING_GET_ATTR(dpa, u64) in CXL_SPARING_GET_ATTR()
1517 static int cxl_mem_sparing_set_dpa(struct device *dev, void *drv_data, u64 dpa) in cxl_mem_sparing_set_dpa() argument
1523 if (!cxl_resource_contains_addr(&cxlds->dpa_res, dpa)) in cxl_mem_sparing_set_dpa()
1526 ctx->dpa = dpa; in cxl_mem_sparing_set_dpa()
[all …]
H A Dhdm.c21 .dpa = __RWSEM_INITIALIZER(cxl_rwsem.dpa),
210 guard(rwsem_read)(&cxl_rwsem.dpa); in cxl_dpa_debug()
262 lockdep_assert_held_write(&cxl_rwsem.dpa); in __cxl_dpa_release()
277 guard(rwsem_write)(&cxl_rwsem.dpa); in cxl_dpa_release()
289 lockdep_assert_held_write(&cxl_rwsem.dpa); in devm_cxl_dpa_release()
357 lockdep_assert_held_write(&cxl_rwsem.dpa); in __cxl_dpa_reserve()
466 guard(rwsem_write)(&cxl_rwsem.dpa); in cxl_dpa_setup()
512 scoped_guard(rwsem_write, &cxl_rwsem.dpa) in devm_cxl_dpa_reserve()
524 guard(rwsem_read)(&cxl_rwsem.dpa); in cxl_dpa_size()
535 lockdep_assert_held(&cxl_rwsem.dpa); in cxl_dpa_resource_start()
[all …]
H A Dtrace.h460 __field(u64, dpa)
485 __entry->dpa = le64_to_cpu(rec->media_hdr.phys_addr);
486 __entry->dpa_flags = __entry->dpa & CXL_DPA_FLAGS_MASK;
488 __entry->dpa &= CXL_DPA_MASK;
523 __entry->dpa, show_dpa_flags(__entry->dpa_flags),
603 __field(u64, dpa)
634 __entry->dpa = le64_to_cpu(rec->media_hdr.phys_addr);
635 __entry->dpa_flags = __entry->dpa & CXL_DPA_FLAGS_MASK;
636 __entry->dpa &= CXL_DPA_MASK;
678 __entry->dpa, show_dpa_flags(__entry->dpa_flags),
[all …]
H A Dregion.c2258 guard(rwsem_read)(&cxl_rwsem.dpa); in __attach_target()
2262 guard(rwsem_read)(&cxl_rwsem.dpa); in __attach_target()
2901 u64 dpa; member
2909 u64 dpa = ctx->dpa; in __cxl_dpa_to_region() local
2918 if (!cxl_resource_contains_addr(cxled->dpa_res, dpa)) in __cxl_dpa_to_region()
2928 dev_dbg(dev, "dpa:0x%llx mapped in region:%s\n", dpa, in __cxl_dpa_to_region()
2931 dev_dbg(dev, "dpa:0x%llx mapped in endpoint:%s\n", dpa, in __cxl_dpa_to_region()
2939 struct cxl_region *cxl_dpa_to_region(const struct cxl_memdev *cxlmd, u64 dpa) in cxl_dpa_to_region() argument
2945 .dpa = dpa, in cxl_dpa_to_region()
3116 u64 dpa) in cxl_dpa_to_hpa() argument
[all …]
H A Dcdat.c574 struct range dpa = { in dpa_perf_contains() local
579 return range_contains(&perf->dpa_range, &dpa); in dpa_perf_contains()
987 lockdep_assert_held(&cxl_rwsem.dpa); in cxl_region_shared_upstream_bandwidth_update()
1057 lockdep_assert_held(&cxl_rwsem.dpa); in cxl_region_perf_data_calculate()
H A Dmbox.c908 u64 dpa, hpa = ULLONG_MAX, hpa_alias = ULLONG_MAX; in cxl_event_trace_record() local
917 guard(rwsem_read)(&cxl_rwsem.dpa); in cxl_event_trace_record()
919 dpa = le64_to_cpu(evt->media_hdr.phys_addr) & CXL_DPA_MASK; in cxl_event_trace_record()
920 cxlr = cxl_dpa_to_region(cxlmd, dpa); in cxl_event_trace_record()
924 hpa = cxl_dpa_to_hpa(cxlr, cxlmd, dpa); in cxl_event_trace_record()
H A Dport.c241 guard(rwsem_read)(&cxl_rwsem.dpa); in dpa_resource_show()
/linux/drivers/edac/
H A Dmem_repair.c85 MR_ATTR_SHOW(dpa, get_dpa, u64, "0x%llx\n")
123 MR_ATTR_STORE(dpa, set_dpa, u64, kstrtou64) in MR_ATTR_STORE()
296 [MR_DPA] = __ATTR_RW(dpa),
/linux/include/linux/
H A Dedac.h818 int (*get_dpa)(struct device *dev, void *drv_data, u64 *dpa);
819 int (*set_dpa)(struct device *dev, void *drv_data, u64 dpa);
820 int (*get_min_dpa)(struct device *dev, void *drv_data, u64 *dpa);
821 int (*get_max_dpa)(struct device *dev, void *drv_data, u64 *dpa);
/linux/tools/testing/nvdimm/test/
H A Dnfit_test.h53 __u64 dpa; member
H A Dnfit.c674 u64 dpa; in nfit_test_search_spa() local
684 dpa = ctx.addr - nd_region->ndr_start; in nfit_test_search_spa()
698 spa->devices[0].dpa = dpa; in nfit_test_search_spa()
/linux/drivers/gpu/drm/xe/
H A Dxe_svm.c405 u64 dpa; in xe_vram_region_page_to_dpa() local
413 dpa = vr->dpa_base + offset; in xe_vram_region_page_to_dpa()
415 return dpa; in xe_vram_region_page_to_dpa()
/linux/arch/arm64/boot/dts/apple/
H A Dt8012-pmgr.dtsi462 label = "dpa";
H A Dt8011-pmgr.dtsi427 label = "dpa";
/linux/drivers/scsi/qla2xxx/
H A Dqla_def.h3155 } dpa; member