Home
last modified time | relevance | path

Searched refs:vmid (Results 1 – 25 of 173) sorted by relevance

1234567

/linux/arch/arm64/kvm/
H A Dvmid.c32 #define vmid2idx(vmid) ((vmid) & ~VMID_MASK) argument
42 #define vmid_gen_match(vmid) \ argument
43 (!(((vmid) ^ atomic64_read(&vmid_generation)) >> kvm_arm_vmid_bits))
48 u64 vmid; in flush_context() local
53 vmid = atomic64_xchg_relaxed(&per_cpu(active_vmids, cpu), 0); in flush_context()
56 if (vmid == 0) in flush_context()
57 vmid = per_cpu(reserved_vmids, cpu); in flush_context()
58 __set_bit(vmid2idx(vmid), vmid_map); in flush_context()
59 per_cpu(reserved_vmids, cpu) = vmid; in flush_context()
72 static bool check_update_reserved_vmid(u64 vmid, u64 newvmid) in check_update_reserved_vmid() argument
[all …]
/linux/drivers/virt/acrn/
H A Dhypercall.h80 static inline long hcall_start_vm(u64 vmid) in hcall_start_vm() argument
82 return acrn_hypercall1(HC_START_VM, vmid); in hcall_start_vm()
91 static inline long hcall_pause_vm(u64 vmid) in hcall_pause_vm() argument
93 return acrn_hypercall1(HC_PAUSE_VM, vmid); in hcall_pause_vm()
102 static inline long hcall_destroy_vm(u64 vmid) in hcall_destroy_vm() argument
104 return acrn_hypercall1(HC_DESTROY_VM, vmid); in hcall_destroy_vm()
113 static inline long hcall_reset_vm(u64 vmid) in hcall_reset_vm() argument
115 return acrn_hypercall1(HC_RESET_VM, vmid); in hcall_reset_vm()
125 static inline long hcall_set_vcpu_regs(u64 vmid, u64 regs_state) in hcall_set_vcpu_regs() argument
127 return acrn_hypercall2(HC_SET_VCPU_REGS, vmid, regs_state); in hcall_set_vcpu_regs()
[all …]
H A Dvm.c31 if (ret < 0 || vm_param->vmid == ACRN_INVALID_VMID) { in acrn_vm_create()
40 vm->vmid = vm_param->vmid; in acrn_vm_create()
44 hcall_destroy_vm(vm_param->vmid); in acrn_vm_create()
45 vm->vmid = ACRN_INVALID_VMID; in acrn_vm_create()
55 dev_dbg(acrn_dev.this_device, "VM %u created.\n", vm->vmid); in acrn_vm_create()
63 if (vm->vmid == ACRN_INVALID_VMID || in acrn_vm_destroy()
67 ret = hcall_destroy_vm(vm->vmid); in acrn_vm_destroy()
70 "Failed to destroy VM %u\n", vm->vmid); in acrn_vm_destroy()
91 dev_dbg(acrn_dev.this_device, "VM %u destroyed.\n", vm->vmid); in acrn_vm_destroy()
92 vm->vmid = ACRN_INVALID_VMID; in acrn_vm_destroy()
[all …]
H A Dhsm.c37 vm->vmid = ACRN_INVALID_VMID; in acrn_dev_open()
126 if (vm->vmid == ACRN_INVALID_VMID && cmd != ACRN_IOCTL_CREATE_VM) { in acrn_dev_ioctl()
160 ret = hcall_start_vm(vm->vmid); in acrn_dev_ioctl()
163 "Failed to start VM %u!\n", vm->vmid); in acrn_dev_ioctl()
166 ret = hcall_pause_vm(vm->vmid); in acrn_dev_ioctl()
169 "Failed to pause VM %u!\n", vm->vmid); in acrn_dev_ioctl()
172 ret = hcall_reset_vm(vm->vmid); in acrn_dev_ioctl()
175 "Failed to restart VM %u!\n", vm->vmid); in acrn_dev_ioctl()
211 ret = hcall_set_vcpu_regs(vm->vmid, virt_to_phys(cpu_regs)); in acrn_dev_ioctl()
215 vm->vmid); in acrn_dev_ioctl()
[all …]
H A Dmm.c28 regions->vmid = vm->vmid; in modify_region()
35 "Failed to set memory region for VM[%u]!\n", vm->vmid); in modify_region()
126 "Add memory region failed, VM[%u]!\n", vm->vmid); in acrn_vm_memseg_map()
144 "Del memory region failed, VM[%u]!\n", vm->vmid); in acrn_vm_memseg_unmap()
298 regions_info->vmid = vm->vmid; in acrn_vm_ram_map()
323 "Failed to set regions, VM[%u]!\n", vm->vmid); in acrn_vm_ram_map()
330 __func__, vm->vmid, in acrn_vm_ram_map()
/linux/arch/riscv/kvm/
H A Dtlb.c23 void kvm_riscv_local_hfence_gvma_vmid_gpa(unsigned long vmid, in kvm_riscv_local_hfence_gvma_vmid_gpa() argument
30 kvm_riscv_local_hfence_gvma_vmid_all(vmid); in kvm_riscv_local_hfence_gvma_vmid_gpa()
38 : : "r" (pos >> 2), "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_gpa()
43 : : "r" (pos >> 2), "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_gpa()
47 void kvm_riscv_local_hfence_gvma_vmid_all(unsigned long vmid) in kvm_riscv_local_hfence_gvma_vmid_all() argument
49 asm volatile(HFENCE_GVMA(zero, %0) : : "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_all()
80 void kvm_riscv_local_hfence_vvma_asid_gva(unsigned long vmid, in kvm_riscv_local_hfence_vvma_asid_gva() argument
89 kvm_riscv_local_hfence_vvma_asid_all(vmid, asid); in kvm_riscv_local_hfence_vvma_asid_gva()
93 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_asid_gva()
110 void kvm_riscv_local_hfence_vvma_asid_all(unsigned long vmid, in kvm_riscv_local_hfence_vvma_asid_all() argument
[all …]
H A Dvmid.c51 kvm->arch.vmid.vmid_version = 0; in kvm_riscv_gstage_vmid_init()
52 kvm->arch.vmid.vmid = 0; in kvm_riscv_gstage_vmid_init()
57 bool kvm_riscv_gstage_vmid_ver_changed(struct kvm_vmid *vmid) in kvm_riscv_gstage_vmid_ver_changed() argument
62 return unlikely(READ_ONCE(vmid->vmid_version) != in kvm_riscv_gstage_vmid_ver_changed()
75 struct kvm_vmid *vmid = &vcpu->kvm->arch.vmid; in kvm_riscv_gstage_vmid_update() local
77 if (!kvm_riscv_gstage_vmid_ver_changed(vmid)) in kvm_riscv_gstage_vmid_update()
86 if (!kvm_riscv_gstage_vmid_ver_changed(vmid)) { in kvm_riscv_gstage_vmid_update()
113 vmid->vmid = vmid_next; in kvm_riscv_gstage_vmid_update()
117 WRITE_ONCE(vmid->vmid_version, READ_ONCE(vmid_version)); in kvm_riscv_gstage_vmid_update()
H A Dmmu.c29 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in mmu_wp_memory_region()
54 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_riscv_mmu_ioremap()
94 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_riscv_mmu_iounmap()
114 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_arch_mmu_enable_log_dirty_pt_masked()
146 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_arch_flush_shadow_memslot()
254 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_unmap_gfn_range()
276 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_age_gfn()
299 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_test_age_gfn()
328 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_riscv_mmu_map()
447 gstage.vmid = READ_ONCE(kvm->arch.vmid.vmid); in kvm_riscv_mmu_free_pgd()
[all …]
H A Dvcpu_sbi_v01.c26 unsigned long vmid; in kvm_sbi_ext_v01_handler() local
81 vmid = READ_ONCE(vcpu->kvm->arch.vmid.vmid); in kvm_sbi_ext_v01_handler()
83 kvm_riscv_hfence_vvma_all(vcpu->kvm, 0, hmask, vmid); in kvm_sbi_ext_v01_handler()
86 cp->a2, PAGE_SHIFT, vmid); in kvm_sbi_ext_v01_handler()
88 vmid = READ_ONCE(vcpu->kvm->arch.vmid.vmid); in kvm_sbi_ext_v01_handler()
91 cp->a3, vmid); in kvm_sbi_ext_v01_handler()
95 cp->a3, vmid); in kvm_sbi_ext_v01_handler()
H A Dvcpu_sbi_replace.c99 unsigned long vmid; in kvm_sbi_ext_rfence_handler() local
107 vmid = READ_ONCE(vcpu->kvm->arch.vmid.vmid); in kvm_sbi_ext_rfence_handler()
109 kvm_riscv_hfence_vvma_all(vcpu->kvm, hbase, hmask, vmid); in kvm_sbi_ext_rfence_handler()
112 cp->a2, cp->a3, PAGE_SHIFT, vmid); in kvm_sbi_ext_rfence_handler()
116 vmid = READ_ONCE(vcpu->kvm->arch.vmid.vmid); in kvm_sbi_ext_rfence_handler()
119 cp->a4, vmid); in kvm_sbi_ext_rfence_handler()
122 cp->a3, PAGE_SHIFT, cp->a4, vmid); in kvm_sbi_ext_rfence_handler()
/linux/arch/riscv/include/asm/
H A Dkvm_tlb.h24 unsigned long vmid; member
34 void kvm_riscv_local_hfence_gvma_vmid_gpa(unsigned long vmid,
37 void kvm_riscv_local_hfence_gvma_vmid_all(unsigned long vmid);
41 void kvm_riscv_local_hfence_vvma_asid_gva(unsigned long vmid,
46 void kvm_riscv_local_hfence_vvma_asid_all(unsigned long vmid,
48 void kvm_riscv_local_hfence_vvma_gva(unsigned long vmid,
51 void kvm_riscv_local_hfence_vvma_all(unsigned long vmid);
65 unsigned long order, unsigned long vmid);
68 unsigned long vmid);
73 unsigned long vmid);
[all …]
/linux/drivers/gpu/drm/amd/display/modules/vmid/
H A Dvmid.c41 static void add_ptb_to_table(struct core_vmid *core_vmid, unsigned int vmid, uint64_t ptb) in add_ptb_to_table() argument
43 if (vmid < MAX_VMID) { in add_ptb_to_table()
44 core_vmid->ptb_assigned_to_vmid[vmid] = ptb; in add_ptb_to_table()
49 static void clear_entry_from_vmid_table(struct core_vmid *core_vmid, unsigned int vmid) in clear_entry_from_vmid_table() argument
51 if (vmid < MAX_VMID) { in clear_entry_from_vmid_table()
52 core_vmid->ptb_assigned_to_vmid[vmid] = 0; in clear_entry_from_vmid_table()
98 int vmid = 0; in mod_vmid_get_for_ptb() local
104 vmid = get_existing_vmid_for_ptb(core_vmid, ptb); in mod_vmid_get_for_ptb()
106 if (vmid == -1) { in mod_vmid_get_for_ptb()
114 vmid = get_next_available_vmid(core_vmid); in mod_vmid_get_for_ptb()
[all …]
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dcik_event_interrupt.c37 unsigned int vmid; in cik_event_interrupt_isr() local
53 vmid = f2g->read_vmid_from_vmfault_reg(dev->adev); in cik_event_interrupt_isr()
54 ret = f2g->get_atc_vmid_pasid_mapping_info(dev->adev, vmid, &pasid); in cik_event_interrupt_isr()
57 tmp_ihre->ring_id |= vmid << 8; in cik_event_interrupt_isr()
61 vmid >= dev->vm_info.first_vmid_kfd && in cik_event_interrupt_isr()
62 vmid <= dev->vm_info.last_vmid_kfd; in cik_event_interrupt_isr()
66 vmid = (ihre->ring_id & 0x0000ff00) >> 8; in cik_event_interrupt_isr()
67 if (vmid < dev->vm_info.first_vmid_kfd || in cik_event_interrupt_isr()
68 vmid > dev->vm_info.last_vmid_kfd) in cik_event_interrupt_isr()
/linux/drivers/gpu/drm/amd/display/dc/dcn20/
H A Ddcn20_vmid.c32 vmid->regs->reg
35 vmid->ctx
39 vmid->shifts->field_name, vmid->masks->field_name
44 static void dcn20_wait_for_vmid_ready(struct dcn20_vmid *vmid) in dcn20_wait_for_vmid_ready() argument
76 void dcn20_vmid_setup(struct dcn20_vmid *vmid, const struct dcn_vmid_page_table_config *config) in dcn20_vmid_setup() argument
98 dcn20_wait_for_vmid_ready(vmid); in dcn20_vmid_setup()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Dgmc_v12_0.c156 entry->src_id, entry->ring_id, entry->vmid, entry->pasid); in gmc_v12_0_process_interrupt()
213 uint8_t vmid, uint16_t *p_pasid) in gmc_v12_0_get_vmid_pasid_mapping_info() argument
215 *p_pasid = RREG32(SOC15_REG_OFFSET(OSSSYS, 0, regIH_VMID_0_LUT) + vmid) & 0xffff; in gmc_v12_0_get_vmid_pasid_mapping_info()
227 static void gmc_v12_0_flush_vm_hub(struct amdgpu_device *adev, uint32_t vmid, in gmc_v12_0_flush_vm_hub() argument
232 u32 inv_req = hub->vmhub_funcs->get_invalidate_req(vmid, flush_type); in gmc_v12_0_flush_vm_hub()
272 tmp &= 1 << vmid; in gmc_v12_0_flush_vm_hub()
319 static void gmc_v12_0_flush_gpu_tlb(struct amdgpu_device *adev, uint32_t vmid, in gmc_v12_0_flush_gpu_tlb() argument
335 u32 inv_req = hub->vmhub_funcs->get_invalidate_req(vmid, flush_type); in gmc_v12_0_flush_gpu_tlb()
340 1 << vmid, GET_INST(GC, 0)); in gmc_v12_0_flush_gpu_tlb()
344 gmc_v12_0_flush_vm_hub(adev, vmid, vmhub, 0); in gmc_v12_0_flush_gpu_tlb()
[all …]
H A Damdgpu_amdkfd_gfx_v7.c49 uint32_t queue, uint32_t vmid) in lock_srbm() argument
51 uint32_t value = PIPEID(pipe) | MEID(mec) | VMID(vmid) | QUEUEID(queue); in lock_srbm()
77 static void kgd_program_sh_mem_settings(struct amdgpu_device *adev, uint32_t vmid, in kgd_program_sh_mem_settings() argument
83 lock_srbm(adev, 0, 0, 0, vmid); in kgd_program_sh_mem_settings()
94 unsigned int vmid, uint32_t inst) in kgd_set_pasid_vmid_mapping() argument
105 WREG32(mmATC_VMID0_PASID_MAPPING + vmid, pasid_mapping); in kgd_set_pasid_vmid_mapping()
107 while (!(RREG32(mmATC_VMID_PASID_MAPPING_UPDATE_STATUS) & (1U << vmid))) in kgd_set_pasid_vmid_mapping()
109 WREG32(mmATC_VMID_PASID_MAPPING_UPDATE_STATUS, 1U << vmid); in kgd_set_pasid_vmid_mapping()
112 WREG32(mmIH_VMID_0_LUT + vmid, pasid_mapping); in kgd_set_pasid_vmid_mapping()
521 uint8_t vmid, uint16_t *p_pasid) in get_atc_vmid_pasid_mapping_info() argument
[all …]
H A Dgmc_v11_0.c163 entry->src_id, entry->ring_id, entry->vmid, entry->pasid); in gmc_v11_0_process_interrupt()
220 uint8_t vmid, uint16_t *p_pasid) in gmc_v11_0_get_vmid_pasid_mapping_info() argument
222 *p_pasid = RREG32(SOC15_REG_OFFSET(OSSSYS, 0, regIH_VMID_0_LUT) + vmid) & 0xffff; in gmc_v11_0_get_vmid_pasid_mapping_info()
237 static void gmc_v11_0_flush_gpu_tlb(struct amdgpu_device *adev, uint32_t vmid, in gmc_v11_0_flush_gpu_tlb() argument
242 u32 inv_req = hub->vmhub_funcs->get_invalidate_req(vmid, flush_type); in gmc_v11_0_flush_gpu_tlb()
266 1 << vmid, GET_INST(GC, 0)); in gmc_v11_0_flush_gpu_tlb()
300 tmp &= 1 << vmid; in gmc_v11_0_flush_gpu_tlb()
346 int vmid, i; in gmc_v11_0_flush_gpu_tlb_pasid() local
348 for (vmid = 1; vmid < 16; vmid++) { in gmc_v11_0_flush_gpu_tlb_pasid()
351 valid = gmc_v11_0_get_vmid_pasid_mapping_info(adev, vmid, in gmc_v11_0_flush_gpu_tlb_pasid()
[all …]
H A Damdgpu_amdkfd_gfx_v8.c43 uint32_t queue, uint32_t vmid) in lock_srbm() argument
45 uint32_t value = PIPEID(pipe) | MEID(mec) | VMID(vmid) | QUEUEID(queue); in lock_srbm()
71 static void kgd_program_sh_mem_settings(struct amdgpu_device *adev, uint32_t vmid, in kgd_program_sh_mem_settings() argument
77 lock_srbm(adev, 0, 0, 0, vmid); in kgd_program_sh_mem_settings()
88 unsigned int vmid, uint32_t inst) in kgd_set_pasid_vmid_mapping() argument
100 WREG32(mmATC_VMID0_PASID_MAPPING + vmid, pasid_mapping); in kgd_set_pasid_vmid_mapping()
102 while (!(RREG32(mmATC_VMID_PASID_MAPPING_UPDATE_STATUS) & (1U << vmid))) in kgd_set_pasid_vmid_mapping()
104 WREG32(mmATC_VMID_PASID_MAPPING_UPDATE_STATUS, 1U << vmid); in kgd_set_pasid_vmid_mapping()
107 WREG32(mmIH_VMID_0_LUT + vmid, pasid_mapping); in kgd_set_pasid_vmid_mapping()
532 uint8_t vmid, uint16_t *p_pasid) in get_atc_vmid_pasid_mapping_info() argument
[all …]
H A Dgmc_v10_0.c166 entry->src_id, entry->ring_id, entry->vmid, entry->pasid); in gmc_v10_0_process_interrupt()
224 uint8_t vmid, uint16_t *p_pasid) in gmc_v10_0_get_atc_vmid_pasid_mapping_info() argument
229 + vmid); in gmc_v10_0_get_atc_vmid_pasid_mapping_info()
252 static void gmc_v10_0_flush_gpu_tlb(struct amdgpu_device *adev, uint32_t vmid, in gmc_v10_0_flush_gpu_tlb() argument
257 u32 inv_req = hub->vmhub_funcs->get_invalidate_req(vmid, flush_type); in gmc_v10_0_flush_gpu_tlb()
278 1 << vmid, GET_INST(GC, 0)); in gmc_v10_0_flush_gpu_tlb()
320 tmp &= 1 << vmid; in gmc_v10_0_flush_gpu_tlb()
354 int vmid, i; in gmc_v10_0_flush_gpu_tlb_pasid() local
356 for (vmid = 1; vmid < AMDGPU_NUM_VMID; vmid++) { in gmc_v10_0_flush_gpu_tlb_pasid()
359 valid = gmc_v10_0_get_atc_vmid_pasid_mapping_info(adev, vmid, in gmc_v10_0_flush_gpu_tlb_pasid()
[all …]
H A Dvcn_sw_ring.c47 uint32_t vmid = AMDGPU_JOB_GET_VMID(job); in vcn_dec_sw_ring_emit_ib() local
50 amdgpu_ring_write(ring, vmid); in vcn_dec_sw_ring_emit_ib()
66 uint32_t vmid, uint64_t pd_addr) in vcn_dec_sw_ring_emit_vm_flush() argument
71 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in vcn_dec_sw_ring_emit_vm_flush()
74 data0 = hub->ctx0_ptb_addr_lo32 + vmid * hub->ctx_addr_distance; in vcn_dec_sw_ring_emit_vm_flush()
H A Damdgpu_gmc.h110 uint32_t (*get_invalidate_req)(unsigned int vmid, uint32_t flush_type);
148 void (*flush_gpu_tlb)(struct amdgpu_device *adev, uint32_t vmid,
155 uint64_t (*emit_flush_gpu_tlb)(struct amdgpu_ring *ring, unsigned vmid,
158 void (*emit_pasid_mapping)(struct amdgpu_ring *ring, unsigned vmid,
362 #define amdgpu_gmc_emit_flush_gpu_tlb(r, vmid, addr) (r)->adev->gmc.gmc_funcs->emit_flush_gpu_tlb((… argument
363 #define amdgpu_gmc_emit_pasid_mapping(r, vmid, pasid) (r)->adev->gmc.gmc_funcs->emit_pasid_mapping(… argument
433 void amdgpu_gmc_flush_gpu_tlb(struct amdgpu_device *adev, uint32_t vmid,
H A Dgmc_v7_0.c439 int vmid; in gmc_v7_0_flush_gpu_tlb_pasid() local
441 for (vmid = 1; vmid < 16; vmid++) { in gmc_v7_0_flush_gpu_tlb_pasid()
442 u32 tmp = RREG32(mmATC_VMID0_PASID_MAPPING + vmid); in gmc_v7_0_flush_gpu_tlb_pasid()
446 mask |= 1 << vmid; in gmc_v7_0_flush_gpu_tlb_pasid()
470 static void gmc_v7_0_flush_gpu_tlb(struct amdgpu_device *adev, uint32_t vmid, in gmc_v7_0_flush_gpu_tlb() argument
474 WREG32(mmVM_INVALIDATE_REQUEST, 1 << vmid); in gmc_v7_0_flush_gpu_tlb()
478 unsigned int vmid, uint64_t pd_addr) in gmc_v7_0_emit_flush_gpu_tlb() argument
482 if (vmid < 8) in gmc_v7_0_emit_flush_gpu_tlb()
483 reg = mmVM_CONTEXT0_PAGE_TABLE_BASE_ADDR + vmid; in gmc_v7_0_emit_flush_gpu_tlb()
485 reg = mmVM_CONTEXT8_PAGE_TABLE_BASE_ADDR + vmid - 8; in gmc_v7_0_emit_flush_gpu_tlb()
[all …]
H A Dgmc_v9_0.c597 ret = amdgpu_vm_handle_fault(adev, entry->pasid, entry->vmid, node_id, in gmc_v9_0_process_interrupt()
620 if (amdgpu_vm_handle_fault(adev, entry->pasid, entry->vmid, node_id, in gmc_v9_0_process_interrupt()
635 entry->src_id, entry->ring_id, entry->vmid, entry->pasid); in gmc_v9_0_process_interrupt()
766 static uint32_t gmc_v9_0_get_invalidate_req(unsigned int vmid, in gmc_v9_0_get_invalidate_req() argument
772 PER_VMID_INVALIDATE_REQ, 1 << vmid); in gmc_v9_0_get_invalidate_req()
807 uint8_t vmid, uint16_t *p_pasid) in gmc_v9_0_get_atc_vmid_pasid_mapping_info() argument
812 + vmid); in gmc_v9_0_get_atc_vmid_pasid_mapping_info()
835 static void gmc_v9_0_flush_gpu_tlb(struct amdgpu_device *adev, uint32_t vmid, in gmc_v9_0_flush_gpu_tlb() argument
846 inv_req = gmc_v9_0_get_invalidate_req(vmid, flush_type); in gmc_v9_0_flush_gpu_tlb()
865 1 << vmid, inst); in gmc_v9_0_flush_gpu_tlb()
[all …]
/linux/samples/acrn/
H A Dvm-sample.c30 __u16 vmid; variable
40 ioctl(hsm_fd, ACRN_IOCTL_PAUSE_VM, vmid); in vm_exit()
67 vmid = create_vm.vmid; in main()
101 ret = ioctl(hsm_fd, ACRN_IOCTL_START_VM, vmid); in main()
120 notify.vmid = vmid; in main()
/linux/drivers/gpu/drm/radeon/
H A Dradeon_trace.h51 TP_PROTO(unsigned vmid, int ring),
52 TP_ARGS(vmid, ring),
54 __field(u32, vmid)
59 __entry->vmid = vmid;
62 TP_printk("vmid=%u, ring=%u", __entry->vmid, __entry->ring)

1234567