Lines Matching full:device
28 /* Cached device registers (and derived data) for easy access */
47 static inline struct dsa_state *to_dsa_state(struct vfio_pci_device *device) in to_dsa_state() argument
49 return device->driver.region.vaddr; in to_dsa_state()
52 static bool dsa_int_handle_request_required(struct vfio_pci_device *device) in dsa_int_handle_request_required() argument
54 void *bar0 = device->bars[0].vaddr; in dsa_int_handle_request_required()
66 static int dsa_probe(struct vfio_pci_device *device) in dsa_probe() argument
68 if (!vfio_pci_device_match(device, PCI_VENDOR_ID_INTEL, in dsa_probe()
72 if (dsa_int_handle_request_required(device)) { in dsa_probe()
73 dev_err(device, "Device requires requesting interrupt handles\n"); in dsa_probe()
80 static void dsa_check_sw_err(struct vfio_pci_device *device) in dsa_check_sw_err() argument
82 void *reg = device->bars[0].vaddr + IDXD_SWERR_OFFSET; in dsa_check_sw_err()
94 dev_err(device, "SWERR: 0x%016lx 0x%016lx 0x%016lx 0x%016lx\n", in dsa_check_sw_err()
97 dev_err(device, " valid: 0x%x\n", err.valid); in dsa_check_sw_err()
98 dev_err(device, " overflow: 0x%x\n", err.overflow); in dsa_check_sw_err()
99 dev_err(device, " desc_valid: 0x%x\n", err.desc_valid); in dsa_check_sw_err()
100 dev_err(device, " wq_idx_valid: 0x%x\n", err.wq_idx_valid); in dsa_check_sw_err()
101 dev_err(device, " batch: 0x%x\n", err.batch); in dsa_check_sw_err()
102 dev_err(device, " fault_rw: 0x%x\n", err.fault_rw); in dsa_check_sw_err()
103 dev_err(device, " priv: 0x%x\n", err.priv); in dsa_check_sw_err()
104 dev_err(device, " error: 0x%x\n", err.error); in dsa_check_sw_err()
105 dev_err(device, " wq_idx: 0x%x\n", err.wq_idx); in dsa_check_sw_err()
106 dev_err(device, " operation: 0x%x\n", err.operation); in dsa_check_sw_err()
107 dev_err(device, " pasid: 0x%x\n", err.pasid); in dsa_check_sw_err()
108 dev_err(device, " batch_idx: 0x%x\n", err.batch_idx); in dsa_check_sw_err()
109 dev_err(device, " invalid_flags: 0x%x\n", err.invalid_flags); in dsa_check_sw_err()
110 dev_err(device, " fault_addr: 0x%lx\n", err.fault_addr); in dsa_check_sw_err()
115 static void dsa_command(struct vfio_pci_device *device, u32 cmd) in dsa_command() argument
119 void *bar0 = device->bars[0].vaddr; in dsa_command()
126 dsa_check_sw_err(device); in dsa_command()
140 static void dsa_wq_init(struct vfio_pci_device *device) in dsa_wq_init() argument
142 struct dsa_state *dsa = to_dsa_state(device); in dsa_wq_init()
170 static void dsa_group_init(struct vfio_pci_device *device) in dsa_group_init() argument
172 struct dsa_state *dsa = to_dsa_state(device); in dsa_group_init()
184 static void dsa_register_cache_init(struct vfio_pci_device *device) in dsa_register_cache_init() argument
186 struct dsa_state *dsa = to_dsa_state(device); in dsa_register_cache_init()
187 void *bar0 = device->bars[0].vaddr; in dsa_register_cache_init()
207 static void dsa_init(struct vfio_pci_device *device) in dsa_init() argument
209 struct dsa_state *dsa = to_dsa_state(device); in dsa_init()
211 VFIO_ASSERT_GE(device->driver.region.size, sizeof(*dsa)); in dsa_init()
213 vfio_pci_config_writew(device, PCI_COMMAND, in dsa_init()
218 dsa_command(device, IDXD_CMD_RESET_DEVICE); in dsa_init()
220 dsa_register_cache_init(device); in dsa_init()
221 dsa_wq_init(device); in dsa_init()
222 dsa_group_init(device); in dsa_init()
224 dsa_command(device, IDXD_CMD_ENABLE_DEVICE); in dsa_init()
225 dsa_command(device, IDXD_CMD_ENABLE_WQ); in dsa_init()
227 vfio_pci_msix_enable(device, MSIX_VECTOR, 1); in dsa_init()
229 device->driver.max_memcpy_count = in dsa_init()
231 device->driver.max_memcpy_size = 1UL << dsa->gen_cap.max_xfer_shift; in dsa_init()
232 device->driver.msi = MSIX_VECTOR; in dsa_init()
235 static void dsa_remove(struct vfio_pci_device *device) in dsa_remove() argument
237 dsa_command(device, IDXD_CMD_RESET_DEVICE); in dsa_remove()
238 vfio_pci_msix_disable(device); in dsa_remove()
241 static int dsa_completion_wait(struct vfio_pci_device *device, in dsa_completion_wait() argument
247 dsa_check_sw_err(device); in dsa_completion_wait()
259 dev_err(device, "Error detected during memcpy operation: 0x%x\n", status); in dsa_completion_wait()
263 static void dsa_copy_desc_init(struct vfio_pci_device *device, in dsa_copy_desc_init() argument
268 struct dsa_state *dsa = to_dsa_state(device); in dsa_copy_desc_init()
283 .completion_addr = to_iova(device, &dsa->copy_completion), in dsa_copy_desc_init()
288 static void dsa_batch_desc_init(struct vfio_pci_device *device, in dsa_batch_desc_init() argument
292 struct dsa_state *dsa = to_dsa_state(device); in dsa_batch_desc_init()
298 .completion_addr = to_iova(device, &dsa->batch_completion), in dsa_batch_desc_init()
299 .desc_list_addr = to_iova(device, &dsa->copy[0]), in dsa_batch_desc_init()
304 static void dsa_desc_write(struct vfio_pci_device *device, struct dsa_hw_desc *desc) in dsa_desc_write() argument
306 /* Write the contents (not address) of the 64-byte descriptor to the device. */ in dsa_desc_write()
307 iosubmit_cmds512(device->bars[2].vaddr, desc, 1); in dsa_desc_write()
310 static void dsa_memcpy_one(struct vfio_pci_device *device, in dsa_memcpy_one() argument
313 struct dsa_state *dsa = to_dsa_state(device); in dsa_memcpy_one()
317 dsa_copy_desc_init(device, &dsa->copy[0], src, dst, size, interrupt); in dsa_memcpy_one()
318 dsa_desc_write(device, &dsa->copy[0]); in dsa_memcpy_one()
321 static void dsa_memcpy_batch(struct vfio_pci_device *device, in dsa_memcpy_batch() argument
324 struct dsa_state *dsa = to_dsa_state(device); in dsa_memcpy_batch()
332 dsa_copy_desc_init(device, copy_desc, src, dst, size, false); in dsa_memcpy_batch()
355 dsa_batch_desc_init(device, batch_desc, nr_copies); in dsa_memcpy_batch()
361 dsa_desc_write(device, batch_desc); in dsa_memcpy_batch()
367 static void dsa_memcpy_start(struct vfio_pci_device *device, in dsa_memcpy_start() argument
370 struct dsa_state *dsa = to_dsa_state(device); in dsa_memcpy_start()
374 dsa_memcpy_one(device, src, dst, size, false); in dsa_memcpy_start()
376 dsa_memcpy_batch(device, src, dst, size, count); in dsa_memcpy_start()
381 static int dsa_memcpy_wait(struct vfio_pci_device *device) in dsa_memcpy_wait() argument
383 struct dsa_state *dsa = to_dsa_state(device); in dsa_memcpy_wait()
387 r = dsa_completion_wait(device, &dsa->copy_completion); in dsa_memcpy_wait()
389 r = dsa_completion_wait(device, &dsa->batch_completion); in dsa_memcpy_wait()
396 static void dsa_send_msi(struct vfio_pci_device *device) in dsa_send_msi() argument
398 struct dsa_state *dsa = to_dsa_state(device); in dsa_send_msi()
400 dsa_memcpy_one(device, in dsa_send_msi()
401 to_iova(device, &dsa->send_msi_src), in dsa_send_msi()
402 to_iova(device, &dsa->send_msi_dst), in dsa_send_msi()
405 VFIO_ASSERT_EQ(dsa_completion_wait(device, &dsa->copy_completion), 0); in dsa_send_msi()