Home
last modified time | relevance | path

Searched refs:vm_id (Results 1 – 25 of 36) sorted by relevance

12

/linux/drivers/firmware/arm_ffa/
H A Ddriver.c94 u16 vm_id; member
201 static int ffa_rxtx_unmap(u16 vm_id) in ffa_rxtx_unmap() argument
206 .a0 = FFA_RXTX_UNMAP, .a1 = PACK_TARGET_INFO(vm_id, 0), in ffa_rxtx_unmap()
383 static int ffa_id_get(u16 *vm_id) in ffa_id_get() argument
394 *vm_id = FIELD_GET(VM_ID_MASK, (id.a2)); in ffa_id_get()
620 mem_region->sender_id = drv_info->vm_id; in ffa_setup_and_transmit()
739 .a1 = drv_info->vm_id, .a2 = vcpu_count, in ffa_notification_bitmap_create()
754 .a1 = drv_info->vm_id, in ffa_notification_bitmap_destroy()
795 u32 func, src_dst_ids = PACK_TARGET_INFO(dst_id, drv_info->vm_id); in ffa_notification_bind_common()
842 u16 src_id = drv_info->vm_id; in ffa_notification_get()
[all …]
H A Dbus.c72 ffa_dev->vm_id, &ffa_dev->uuid); in ffa_device_uevent()
80 return sysfs_emit(buf, SCMI_UEVENT_MODALIAS_FMT, ffa_dev->vm_id, in modalias_show()
90 return sprintf(buf, "0x%04x\n", ffa_dev->vm_id); in partition_id_show()
218 ffa_dev->vm_id = part_info->id; in ffa_device_register()
/linux/drivers/gpu/drm/radeon/
H A Dradeon_vm.c180 struct radeon_vm_id *vm_id = &vm->ids[ring]; in radeon_vm_grab_id() local
186 if (vm_id->id && vm_id->last_id_use && in radeon_vm_grab_id()
187 vm_id->last_id_use == rdev->vm_manager.active[vm_id->id]) in radeon_vm_grab_id()
191 vm_id->pd_gpu_addr = ~0ll; in radeon_vm_grab_id()
199 vm_id->id = i; in radeon_vm_grab_id()
212 vm_id->id = choices[i]; in radeon_vm_grab_id()
240 struct radeon_vm_id *vm_id = &vm->ids[ring]; in radeon_vm_flush() local
242 if (pd_addr != vm_id->pd_gpu_addr || !vm_id->flushed_updates || in radeon_vm_flush()
243 radeon_fence_is_earlier(vm_id->flushed_updates, updates)) { in radeon_vm_flush()
246 radeon_fence_unref(&vm_id->flushed_updates); in radeon_vm_flush()
[all …]
H A Dsi_dma.c187 unsigned vm_id, uint64_t pd_addr) in si_dma_vm_flush() argument
191 if (vm_id < 8) { in si_dma_vm_flush()
192 radeon_ring_write(ring, (0xf << 16) | ((VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2)) >> 2)); in si_dma_vm_flush()
194 …radeon_ring_write(ring, (0xf << 16) | ((VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm_id - 8) << 2)) >> … in si_dma_vm_flush()
206 radeon_ring_write(ring, 1 << vm_id); in si_dma_vm_flush()
212 radeon_ring_write(ring, 1 << vm_id); /* mask */ in si_dma_vm_flush()
H A Dni_dma.c125 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute() local
143 radeon_ring_write(ring, DMA_IB_PACKET(DMA_PACKET_INDIRECT_BUFFER, vm_id, 0)); in cayman_dma_ring_ib_execute()
449 unsigned vm_id, uint64_t pd_addr) in cayman_dma_vm_flush() argument
452 radeon_ring_write(ring, (0xf << 16) | ((VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2)) >> 2)); in cayman_dma_vm_flush()
463 radeon_ring_write(ring, 1 << vm_id); in cayman_dma_vm_flush()
H A Dcik_sdma.c945 unsigned vm_id, uint64_t pd_addr) in cik_dma_vm_flush() argument
951 if (vm_id < 8) { in cik_dma_vm_flush()
952 radeon_ring_write(ring, (VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2)) >> 2); in cik_dma_vm_flush()
954 radeon_ring_write(ring, (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm_id - 8) << 2)) >> 2); in cik_dma_vm_flush()
961 radeon_ring_write(ring, VMID(vm_id)); in cik_dma_vm_flush()
989 radeon_ring_write(ring, 1 << vm_id); in cik_dma_vm_flush()
H A Dradeon_asic.h615 unsigned vm_id, uint64_t pd_addr);
641 unsigned vm_id, uint64_t pd_addr);
723 unsigned vm_id, uint64_t pd_addr);
746 unsigned vm_id, uint64_t pd_addr);
830 unsigned vm_id, uint64_t pd_addr);
849 unsigned vm_id, uint64_t pd_addr);
H A Dni.c1402 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_ring_ib_execute() local
1425 radeon_ring_write(ring, ib->length_dw | (vm_id << 24)); in cayman_ring_ib_execute()
1432 radeon_ring_write(ring, (vm_id << 24) | 10); /* poll interval */ in cayman_ring_ib_execute()
2664 unsigned vm_id, uint64_t pd_addr) in cayman_vm_flush() argument
2666 radeon_ring_write(ring, PACKET0(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2), 0)); in cayman_vm_flush()
2675 radeon_ring_write(ring, 1 << vm_id); in cayman_vm_flush()
H A Dsi.c3383 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in si_ring_ib_execute() local
3419 radeon_ring_write(ring, ib->length_dw | (vm_id << 24)); in si_ring_ib_execute()
3425 radeon_ring_write(ring, vm_id); in si_ring_ib_execute()
5057 unsigned vm_id, uint64_t pd_addr) in si_vm_flush() argument
5064 if (vm_id < 8) { in si_vm_flush()
5066 (VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2)) >> 2); in si_vm_flush()
5069 (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm_id - 8) << 2)) >> 2); in si_vm_flush()
5088 radeon_ring_write(ring, 1 << vm_id); in si_vm_flush()
H A Dradeon_gem.c685 if (args->vm_id) { in radeon_gem_va_ioctl()
/linux/arch/x86/kvm/svm/
H A Davic.c49 #define __AVIC_GATAG(vm_id, vcpu_id) ((((vm_id) & AVIC_VM_ID_MASK) << AVIC_VM_ID_SHIFT) | \ argument
51 #define AVIC_GATAG(vm_id, vcpu_id) \ argument
53 u32 ga_tag = __AVIC_GATAG(vm_id, vcpu_id); \
56 WARN_ON_ONCE(AVIC_GATAG_TO_VMID(ga_tag) != (vm_id)); \
148 u32 vm_id = AVIC_GATAG_TO_VMID(ga_tag); in avic_ga_log_notifier() local
151 pr_debug("SVM: %s: vm_id=%#x, vcpu_id=%#x\n", __func__, vm_id, vcpu_id); in avic_ga_log_notifier()
152 trace_kvm_avic_ga_log(vm_id, vcpu_id); in avic_ga_log_notifier()
155 hash_for_each_possible(svm_vm_data_hash, kvm_svm, hnode, vm_id) { in avic_ga_log_notifier()
156 if (kvm_svm->avic_vm_id != vm_id) in avic_ga_log_notifier()
200 u32 vm_id; in avic_vm_init() local
[all …]
H A Dhyperv.h25 hv_vcpu->nested.vm_id = hve->hv_vm_id; in nested_svm_hv_update_vm_vp_ids()
/linux/Documentation/gpu/rfc/
H A Di915_vm_bind.h112 __u32 vm_id; member
177 __u32 vm_id; member
289 __u32 vm_id; member
/linux/include/uapi/drm/
H A Dxe_drm.h783 __u32 vm_id; member
866 __u32 vm_id; member
877 __u32 vm_id; member
1048 __u32 vm_id; member
1124 __u32 vm_id; member
H A Dpanthor_drm.h603 __u32 vm_id; member
644 __u32 vm_id; member
823 __u32 vm_id; member
965 __u32 vm_id; member
H A Dradeon_drm.h944 __u32 vm_id; member
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_drv.c1128 vm = panthor_vm_pool_get_vm(pfile->vms, args->vm_id); in panthor_ioctl_tiler_heap_create()
1151 args->handle = (args->vm_id << 16) | ret; in panthor_ioctl_tiler_heap_create()
1202 vm = panthor_vm_pool_get_vm(pfile->vms, args->vm_id); in panthor_ioctl_vm_bind_async()
1270 vm = panthor_vm_pool_get_vm(pfile->vms, args->vm_id); in panthor_ioctl_vm_bind_sync()
1321 vm = panthor_vm_pool_get_vm(pfile->vms, args->vm_id); in panthor_ioctl_vm_get_state()
/linux/drivers/tee/tstee/
H A Dcore.c51 .impl_caps = lower_16_bits(tstee->ffa_dev->vm_id), in tstee_get_version()
249 .receiver = tstee->ffa_dev->vm_id, in tstee_shm_register()
/linux/Documentation/gpu/
H A Ddrm-vm-bind-async.rst268 /** @vm_id: The ID of the VM to bind to */
269 __u32 vm_id;
273 * and exec queue must have same vm_id. If zero, the default VM bind engine
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_device_queue_manager.h60 uint32_t vm_id:4; member
/linux/include/linux/
H A Darm_ffa.h135 int vm_id; member
/linux/drivers/infiniband/hw/irdma/
H A Dtype.h557 u16 vm_id; member
714 u16 vm_id; member
/linux/drivers/gpu/drm/xe/
H A Dxe_vm.c1855 args->vm_id = id; in xe_vm_create_ioctl()
1879 vm = xa_load(&xef->vm.xa, args->vm_id); in xe_vm_destroy_ioctl()
1885 xa_erase(&xef->vm.xa, args->vm_id); in xe_vm_destroy_ioctl()
3020 vm = xe_vm_lookup(xef, args->vm_id); in xe_vm_bind_ioctl()
H A Dxe_exec_queue.c612 vm = xe_vm_lookup(xef, args->vm_id); in xe_exec_queue_create_ioctl()
/linux/drivers/tee/optee/
H A Dffa_abi.c278 .receiver = ffa_dev->vm_id, in optee_ffa_shm_register()

12