Lines Matching refs:mdev

22 vp_modern_map_capability(struct virtio_pci_modern_device *mdev, int off,  in vp_modern_map_capability()  argument
26 struct pci_dev *dev = mdev->pci_dev; in vp_modern_map_capability()
40 if (bar >= PCI_STD_NUM_BARS || !(mdev->modern_bars & (1 << bar))) { in vp_modern_map_capability()
223 int vp_modern_probe(struct virtio_pci_modern_device *mdev) in vp_modern_probe() argument
225 struct pci_dev *pci_dev = mdev->pci_dev; in vp_modern_probe()
233 if (mdev->device_id_check) { in vp_modern_probe()
234 devid = mdev->device_id_check(pci_dev); in vp_modern_probe()
237 mdev->id.device = devid; in vp_modern_probe()
247 mdev->id.device = pci_dev->subsystem_device; in vp_modern_probe()
250 mdev->id.device = pci_dev->device - 0x1040; in vp_modern_probe()
253 mdev->id.vendor = pci_dev->subsystem_vendor; in vp_modern_probe()
258 &mdev->modern_bars); in vp_modern_probe()
268 &mdev->modern_bars); in vp_modern_probe()
271 &mdev->modern_bars); in vp_modern_probe()
280 mdev->dma_mask ? : DMA_BIT_MASK(64)); in vp_modern_probe()
292 &mdev->modern_bars); in vp_modern_probe()
294 err = pci_request_selected_regions(pci_dev, mdev->modern_bars, in vp_modern_probe()
300 mdev->common = vp_modern_map_capability(mdev, common, in vp_modern_probe()
304 &mdev->common_len, NULL); in vp_modern_probe()
305 if (!mdev->common) in vp_modern_probe()
307 mdev->isr = vp_modern_map_capability(mdev, isr, sizeof(u8), 1, in vp_modern_probe()
310 if (!mdev->isr) in vp_modern_probe()
317 &mdev->notify_offset_multiplier); in vp_modern_probe()
334 mdev->notify_base = vp_modern_map_capability(mdev, notify, in vp_modern_probe()
337 &mdev->notify_len, in vp_modern_probe()
338 &mdev->notify_pa); in vp_modern_probe()
339 if (!mdev->notify_base) in vp_modern_probe()
342 mdev->notify_map_cap = notify; in vp_modern_probe()
349 mdev->device = vp_modern_map_capability(mdev, device, 0, 4, in vp_modern_probe()
351 &mdev->device_len, in vp_modern_probe()
353 if (!mdev->device) in vp_modern_probe()
360 if (mdev->notify_base) in vp_modern_probe()
361 pci_iounmap(pci_dev, mdev->notify_base); in vp_modern_probe()
363 pci_iounmap(pci_dev, mdev->isr); in vp_modern_probe()
365 pci_iounmap(pci_dev, mdev->common); in vp_modern_probe()
367 pci_release_selected_regions(pci_dev, mdev->modern_bars); in vp_modern_probe()
376 void vp_modern_remove(struct virtio_pci_modern_device *mdev) in vp_modern_remove() argument
378 struct pci_dev *pci_dev = mdev->pci_dev; in vp_modern_remove()
380 if (mdev->device) in vp_modern_remove()
381 pci_iounmap(pci_dev, mdev->device); in vp_modern_remove()
382 if (mdev->notify_base) in vp_modern_remove()
383 pci_iounmap(pci_dev, mdev->notify_base); in vp_modern_remove()
384 pci_iounmap(pci_dev, mdev->isr); in vp_modern_remove()
385 pci_iounmap(pci_dev, mdev->common); in vp_modern_remove()
386 pci_release_selected_regions(pci_dev, mdev->modern_bars); in vp_modern_remove()
396 u64 vp_modern_get_features(struct virtio_pci_modern_device *mdev) in vp_modern_get_features() argument
398 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_get_features()
417 u64 vp_modern_get_driver_features(struct virtio_pci_modern_device *mdev) in vp_modern_get_driver_features() argument
419 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_get_driver_features()
437 void vp_modern_set_features(struct virtio_pci_modern_device *mdev, in vp_modern_set_features() argument
440 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_set_features()
455 u32 vp_modern_generation(struct virtio_pci_modern_device *mdev) in vp_modern_generation() argument
457 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_generation()
469 u8 vp_modern_get_status(struct virtio_pci_modern_device *mdev) in vp_modern_get_status() argument
471 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_get_status()
482 void vp_modern_set_status(struct virtio_pci_modern_device *mdev, in vp_modern_set_status() argument
485 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_set_status()
501 int vp_modern_get_queue_reset(struct virtio_pci_modern_device *mdev, u16 index) in vp_modern_get_queue_reset() argument
505 cfg = (struct virtio_pci_modern_common_cfg __iomem *)mdev->common; in vp_modern_get_queue_reset()
517 void vp_modern_set_queue_reset(struct virtio_pci_modern_device *mdev, u16 index) in vp_modern_set_queue_reset() argument
521 cfg = (struct virtio_pci_modern_common_cfg __iomem *)mdev->common; in vp_modern_set_queue_reset()
542 u16 vp_modern_queue_vector(struct virtio_pci_modern_device *mdev, in vp_modern_queue_vector() argument
545 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_queue_vector()
561 u16 vp_modern_config_vector(struct virtio_pci_modern_device *mdev, in vp_modern_config_vector() argument
564 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_config_vector()
582 void vp_modern_queue_address(struct virtio_pci_modern_device *mdev, in vp_modern_queue_address() argument
586 struct virtio_pci_common_cfg __iomem *cfg = mdev->common; in vp_modern_queue_address()
605 void vp_modern_set_queue_enable(struct virtio_pci_modern_device *mdev, in vp_modern_set_queue_enable() argument
608 vp_iowrite16(index, &mdev->common->queue_select); in vp_modern_set_queue_enable()
609 vp_iowrite16(enable, &mdev->common->queue_enable); in vp_modern_set_queue_enable()
620 bool vp_modern_get_queue_enable(struct virtio_pci_modern_device *mdev, in vp_modern_get_queue_enable() argument
623 vp_iowrite16(index, &mdev->common->queue_select); in vp_modern_get_queue_enable()
625 return vp_ioread16(&mdev->common->queue_enable); in vp_modern_get_queue_enable()
635 void vp_modern_set_queue_size(struct virtio_pci_modern_device *mdev, in vp_modern_set_queue_size() argument
638 vp_iowrite16(index, &mdev->common->queue_select); in vp_modern_set_queue_size()
639 vp_iowrite16(size, &mdev->common->queue_size); in vp_modern_set_queue_size()
651 u16 vp_modern_get_queue_size(struct virtio_pci_modern_device *mdev, in vp_modern_get_queue_size() argument
654 vp_iowrite16(index, &mdev->common->queue_select); in vp_modern_get_queue_size()
656 return vp_ioread16(&mdev->common->queue_size); in vp_modern_get_queue_size()
667 u16 vp_modern_get_num_queues(struct virtio_pci_modern_device *mdev) in vp_modern_get_num_queues() argument
669 return vp_ioread16(&mdev->common->num_queues); in vp_modern_get_num_queues()
680 static u16 vp_modern_get_queue_notify_off(struct virtio_pci_modern_device *mdev, in vp_modern_get_queue_notify_off() argument
683 vp_iowrite16(index, &mdev->common->queue_select); in vp_modern_get_queue_notify_off()
685 return vp_ioread16(&mdev->common->queue_notify_off); in vp_modern_get_queue_notify_off()
697 void __iomem *vp_modern_map_vq_notify(struct virtio_pci_modern_device *mdev, in vp_modern_map_vq_notify() argument
700 u16 off = vp_modern_get_queue_notify_off(mdev, index); in vp_modern_map_vq_notify()
702 if (mdev->notify_base) { in vp_modern_map_vq_notify()
704 if ((u64)off * mdev->notify_offset_multiplier + 2 in vp_modern_map_vq_notify()
705 > mdev->notify_len) { in vp_modern_map_vq_notify()
706 dev_warn(&mdev->pci_dev->dev, in vp_modern_map_vq_notify()
709 off, mdev->notify_offset_multiplier, in vp_modern_map_vq_notify()
710 index, mdev->notify_len); in vp_modern_map_vq_notify()
714 *pa = mdev->notify_pa + in vp_modern_map_vq_notify()
715 off * mdev->notify_offset_multiplier; in vp_modern_map_vq_notify()
716 return mdev->notify_base + off * mdev->notify_offset_multiplier; in vp_modern_map_vq_notify()
718 return vp_modern_map_capability(mdev, in vp_modern_map_vq_notify()
719 mdev->notify_map_cap, 2, 2, in vp_modern_map_vq_notify()
720 off * mdev->notify_offset_multiplier, 2, in vp_modern_map_vq_notify()
726 u16 vp_modern_avq_num(struct virtio_pci_modern_device *mdev) in vp_modern_avq_num() argument
730 cfg = (struct virtio_pci_modern_common_cfg __iomem *)mdev->common; in vp_modern_avq_num()
735 u16 vp_modern_avq_index(struct virtio_pci_modern_device *mdev) in vp_modern_avq_index() argument
739 cfg = (struct virtio_pci_modern_common_cfg __iomem *)mdev->common; in vp_modern_avq_index()