Lines Matching refs:vgpu
112 #define vgpu_cfg_space(vgpu) ((vgpu)->cfg_space.virtual_cfg_space)
125 #define vgpu_opregion(vgpu) (&(vgpu->opregion))
145 int (*init)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
146 void (*clean)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
147 void (*reset)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
193 * scheduler structure. So below 2 vgpu data are protected
323 * not yet protected by special locks(vgpu and scheduler lock).
326 /* scheduler scope lock, protect gvt and vgpu schedule related data */
422 #define vgpu_aperture_offset(vgpu) ((vgpu)->gm.low_gm_node.start)
423 #define vgpu_hidden_offset(vgpu) ((vgpu)->gm.high_gm_node.start)
424 #define vgpu_aperture_sz(vgpu) ((vgpu)->gm.aperture_sz)
425 #define vgpu_hidden_sz(vgpu) ((vgpu)->gm.hidden_sz)
427 #define vgpu_aperture_pa_base(vgpu) \
428 (gvt_aperture_pa_base(vgpu->gvt) + vgpu_aperture_offset(vgpu))
430 #define vgpu_ggtt_gm_sz(vgpu) ((vgpu)->gm.aperture_sz + (vgpu)->gm.hidden_sz)
432 #define vgpu_aperture_pa_end(vgpu) \
433 (vgpu_aperture_pa_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
435 #define vgpu_aperture_gmadr_base(vgpu) (vgpu_aperture_offset(vgpu))
436 #define vgpu_aperture_gmadr_end(vgpu) \
437 (vgpu_aperture_gmadr_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
439 #define vgpu_hidden_gmadr_base(vgpu) (vgpu_hidden_offset(vgpu))
440 #define vgpu_hidden_gmadr_end(vgpu) \
441 (vgpu_hidden_gmadr_base(vgpu) + vgpu_hidden_sz(vgpu) - 1)
443 #define vgpu_fence_sz(vgpu) (vgpu->fence.size)
448 int intel_vgpu_alloc_resource(struct intel_vgpu *vgpu,
450 void intel_vgpu_reset_resource(struct intel_vgpu *vgpu);
451 void intel_vgpu_free_resource(struct intel_vgpu *vgpu);
452 void intel_vgpu_write_fence(struct intel_vgpu *vgpu,
459 #define vgpu_vreg_t(vgpu, reg) \
460 (*(u32 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
461 #define vgpu_vreg(vgpu, offset) \
462 (*(u32 *)(vgpu->mmio.vreg + (offset)))
463 #define vgpu_vreg64_t(vgpu, reg) \
464 (*(u64 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
465 #define vgpu_vreg64(vgpu, offset) \
466 (*(u64 *)(vgpu->mmio.vreg + (offset)))
468 #define for_each_active_vgpu(gvt, vgpu, id) \
469 idr_for_each_entry((&(gvt)->vgpu_idr), (vgpu), (id)) \
470 for_each_if(test_bit(INTEL_VGPU_STATUS_ACTIVE, vgpu->status))
472 static inline void intel_vgpu_write_pci_bar(struct intel_vgpu *vgpu,
479 pval = (u32 *)(vgpu_cfg_space(vgpu) + offset);
496 void intel_gvt_destroy_idle_vgpu(struct intel_vgpu *vgpu);
497 int intel_gvt_create_vgpu(struct intel_vgpu *vgpu,
499 void intel_gvt_destroy_vgpu(struct intel_vgpu *vgpu);
500 void intel_gvt_release_vgpu(struct intel_vgpu *vgpu);
501 void intel_gvt_reset_vgpu_locked(struct intel_vgpu *vgpu, bool dmlr,
503 void intel_gvt_reset_vgpu(struct intel_vgpu *vgpu);
504 void intel_gvt_activate_vgpu(struct intel_vgpu *vgpu);
505 void intel_gvt_deactivate_vgpu(struct intel_vgpu *vgpu);
507 int intel_gvt_set_opregion(struct intel_vgpu *vgpu);
508 int intel_gvt_set_edid(struct intel_vgpu *vgpu, int port_num);
511 #define vgpu_gmadr_is_aperture(vgpu, gmadr) \
512 ((gmadr >= vgpu_aperture_gmadr_base(vgpu)) && \
513 (gmadr <= vgpu_aperture_gmadr_end(vgpu)))
515 #define vgpu_gmadr_is_hidden(vgpu, gmadr) \
516 ((gmadr >= vgpu_hidden_gmadr_base(vgpu)) && \
517 (gmadr <= vgpu_hidden_gmadr_end(vgpu)))
519 #define vgpu_gmadr_is_valid(vgpu, gmadr) \
520 ((vgpu_gmadr_is_aperture(vgpu, gmadr) || \
521 (vgpu_gmadr_is_hidden(vgpu, gmadr))))
535 bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size);
537 void intel_vgpu_init_cfg_space(struct intel_vgpu *vgpu,
539 void intel_vgpu_reset_cfg_space(struct intel_vgpu *vgpu);
541 int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset,
544 int intel_vgpu_emulate_cfg_write(struct intel_vgpu *vgpu, unsigned int offset,
547 void intel_vgpu_emulate_hotplug(struct intel_vgpu *vgpu, bool connected);
549 static inline u64 intel_vgpu_get_bar_gpa(struct intel_vgpu *vgpu, int bar)
552 return (*(u64 *)(vgpu->cfg_space.virtual_cfg_space + bar)) &
556 void intel_vgpu_clean_opregion(struct intel_vgpu *vgpu);
557 int intel_vgpu_init_opregion(struct intel_vgpu *vgpu);
558 int intel_vgpu_opregion_base_write_handler(struct intel_vgpu *vgpu, u32 gpa);
560 int intel_vgpu_emulate_opregion_request(struct intel_vgpu *vgpu, u32 swsci);
561 void populate_pvinfo_page(struct intel_vgpu *vgpu);
564 void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason);
565 void intel_vgpu_detach_regions(struct intel_vgpu *vgpu);
680 void intel_gvt_debugfs_add_vgpu(struct intel_vgpu *vgpu);
712 * @vgpu: a vGPU
720 static inline int intel_gvt_read_gpa(struct intel_vgpu *vgpu, unsigned long gpa,
723 if (!test_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status))
725 return vfio_dma_rw(&vgpu->vfio_device, gpa, buf, len, false);
730 * @vgpu: a vGPU
738 static inline int intel_gvt_write_gpa(struct intel_vgpu *vgpu,
741 if (!test_bit(INTEL_VGPU_STATUS_ATTACHED, vgpu->status))
743 return vfio_dma_rw(&vgpu->vfio_device, gpa, buf, len, true);
746 void intel_gvt_debugfs_remove_vgpu(struct intel_vgpu *vgpu);
752 int intel_gvt_dma_pin_guest_page(struct intel_vgpu *vgpu, dma_addr_t dma_addr);
753 int intel_gvt_dma_map_guest_page(struct intel_vgpu *vgpu, unsigned long gfn,
755 void intel_gvt_dma_unmap_guest_page(struct intel_vgpu *vgpu,