Home
last modified time | relevance | path

Searched refs:bos (Results 1 – 25 of 49) sorted by relevance

12

/linux/drivers/gpu/drm/qxl/
H A Dqxl_release.c100 INIT_LIST_HEAD(&release->bos); in qxl_release_alloc()
122 while (!list_empty(&release->bos)) { in qxl_release_free_list()
125 entry = container_of(release->bos.next, in qxl_release_free_list()
148 WARN_ON(list_empty(&release->bos)); in qxl_release_free()
173 list_for_each_entry(entry, &release->bos, list) { in qxl_release_list_add()
184 list_add_tail(&entry->list, &release->bos); in qxl_release_list_add()
218 if (list_is_singular(&release->bos)) in qxl_release_reserve_list()
224 list_for_each_entry(entry, &release->bos, list) { in qxl_release_reserve_list()
234 list_for_each_entry(entry, &release->bos, list) { in qxl_release_reserve_list()
249 if (list_is_singular(&release->bos)) in qxl_release_backoff_reserve_list()
[all …]
/linux/drivers/gpu/drm/msm/
H A Dmsm_gem_submit.c40 sz = struct_size(submit, bos, nr_bos) + in submit_create()
68 submit->cmd = (void *)&submit->bos[nr_bos]; in submit_create()
127 u64_to_user_ptr(args->bos + (i * sizeof(submit_bo))); in submit_lookup_objects()
132 submit->bos[i].flags = 0; in submit_lookup_objects()
150 submit->bos[i].handle = submit_bo.handle; in submit_lookup_objects()
151 submit->bos[i].flags = submit_bo.flags; in submit_lookup_objects()
162 obj = idr_find(&file->object_idr, submit->bos[i].handle); in submit_lookup_objects()
164 ret = SUBMIT_ERROR(EINVAL, submit, "invalid handle %u at index %u\n", submit->bos[i].handle, i); in submit_lookup_objects()
170 submit->bos[i].obj = obj; in submit_lookup_objects()
256 struct drm_gem_object *obj = submit->bos[i].obj; in submit_lock_objects()
[all …]
H A Dmsm_fb.c33 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos);
141 struct drm_gem_object *bos[4] = {0}; in msm_framebuffer_create() local
146 bos[i] = drm_gem_object_lookup(file, mode_cmd->handles[i]); in msm_framebuffer_create()
147 if (!bos[i]) { in msm_framebuffer_create()
153 fb = msm_framebuffer_init(dev, mode_cmd, bos); in msm_framebuffer_create()
163 drm_gem_object_put(bos[i]); in msm_framebuffer_create()
168 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos) in msm_framebuffer_init() argument
217 if (bos[i]->size < min_size) { in msm_framebuffer_init()
222 msm_fb->base.obj[i] = bos[i]; in msm_framebuffer_init()
H A Dmsm_rd.c315 struct drm_gem_object *obj = submit->bos[idx].obj; in snapshot_buf()
320 offset = iova - submit->bos[idx].iova; in snapshot_buf()
322 iova = submit->bos[idx].iova; in snapshot_buf()
337 if (!(submit->bos[idx].flags & MSM_SUBMIT_BO_READ)) in snapshot_buf()
/linux/drivers/gpu/drm/lima/
H A Dlima_sched.c115 struct lima_bo **bos, int num_bos, in lima_sched_task_init() argument
120 task->bos = kmemdup(bos, sizeof(*bos) * num_bos, GFP_KERNEL); in lima_sched_task_init()
121 if (!task->bos) in lima_sched_task_init()
125 drm_gem_object_get(&bos[i]->base.base); in lima_sched_task_init()
129 kfree(task->bos); in lima_sched_task_init()
147 if (task->bos) { in lima_sched_task_fini()
149 drm_gem_object_put(&task->bos[i]->base.base); in lima_sched_task_fini()
150 kfree(task->bos); in lima_sched_task_fini()
306 struct lima_bo *bo = task->bos[i]; in lima_sched_build_error_task_list()
351 struct lima_bo *bo = task->bos[i]; in lima_sched_build_error_task_list()
[all …]
H A Dlima_drv.c107 struct drm_lima_gem_submit_bo *bos; in lima_ioctl_gem_submit() local
125 bos = kvcalloc(args->nr_bos, sizeof(*submit.bos) + sizeof(*submit.lbos), GFP_KERNEL); in lima_ioctl_gem_submit()
126 if (!bos) in lima_ioctl_gem_submit()
129 size = args->nr_bos * sizeof(*submit.bos); in lima_ioctl_gem_submit()
130 if (copy_from_user(bos, u64_to_user_ptr(args->bos), size)) { in lima_ioctl_gem_submit()
158 submit.bos = bos; in lima_ioctl_gem_submit()
159 submit.lbos = (void *)bos + size; in lima_ioctl_gem_submit()
175 kvfree(bos); in lima_ioctl_gem_submit()
H A Dlima_sched.h26 struct lima_bo **bos; member
90 struct lima_bo **bos, int num_bos,
H A Dlima_drv.h33 struct drm_lima_gem_submit_bo *bos; member
/linux/drivers/gpu/drm/omapdrm/
H A Domap_fb.c343 struct drm_gem_object *bos[4]; in omap_framebuffer_create() local
348 bos[i] = drm_gem_object_lookup(file, mode_cmd->handles[i]); in omap_framebuffer_create()
349 if (!bos[i]) { in omap_framebuffer_create()
355 fb = omap_framebuffer_init(dev, mode_cmd, bos); in omap_framebuffer_create()
363 drm_gem_object_put(bos[i]); in omap_framebuffer_create()
369 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos) in omap_framebuffer_init() argument
431 if (size > omap_gem_mmap_size(bos[i]) - mode_cmd->offsets[i]) { in omap_framebuffer_init()
434 bos[i]->size - mode_cmd->offsets[i], size); in omap_framebuffer_init()
439 fb->obj[i] = bos[i]; in omap_framebuffer_init()
H A Domap_fb.h25 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos);
/linux/drivers/gpu/drm/panfrost/
H A Dpanfrost_job.c266 static int panfrost_acquire_object_fences(struct drm_gem_object **bos, in panfrost_acquire_object_fences() argument
273 ret = dma_resv_reserve_fences(bos[i]->resv, 1); in panfrost_acquire_object_fences()
278 ret = drm_sched_job_add_implicit_dependencies(job, bos[i], in panfrost_acquire_object_fences()
287 static void panfrost_attach_object_fences(struct drm_gem_object **bos, in panfrost_attach_object_fences() argument
294 dma_resv_add_fence(bos[i]->resv, fence, DMA_RESV_USAGE_WRITE); in panfrost_attach_object_fences()
303 ret = drm_gem_lock_reservations(job->bos, job->bo_count, in panfrost_job_push()
313 ret = panfrost_acquire_object_fences(job->bos, job->bo_count, in panfrost_job_push()
326 panfrost_attach_object_fences(job->bos, job->bo_count, in panfrost_job_push()
330 drm_gem_unlock_reservations(job->bos, job->bo_count, &acquire_ctx); in panfrost_job_push()
355 if (job->bos) { in panfrost_job_cleanup()
[all …]
H A Dpanfrost_dump.c138 dbo = job->bos[i]; in panfrost_core_dump()
203 bo = to_panfrost_bo(job->bos[i]); in panfrost_core_dump()
/linux/include/net/
H A Dmpls.h33 bool bos) in mpls_entry_encode() argument
40 (bos ? (1 << MPLS_LS_S_SHIFT) : 0) | in mpls_entry_encode()
/linux/net/mpls/
H A Dmpls_iptunnel.c50 bool bos; in mpls_xmit() local
124 bos = true; in mpls_xmit()
127 ttl, 0, bos); in mpls_xmit()
128 bos = false; in mpls_xmit()
H A Daf_mpls.c194 if (!dec.bos) in mpls_multipath_hash()
435 if (unlikely(!new_header_size && dec.bos)) { in mpls_forward()
440 bool bos; in mpls_forward() local
446 bos = dec.bos; in mpls_forward()
449 dec.ttl, 0, bos); in mpls_forward()
450 bos = false; in mpls_forward()
1691 bool bos; in nla_put_labels() local
1698 bos = true; in nla_put_labels()
1700 nla_label[i] = mpls_entry_encode(label[i], 0, 0, bos); in nla_put_labels()
1701 bos = false; in nla_put_labels()
[all …]
H A Dinternal.h15 u8 bos; member
182 result.bos = (entry & MPLS_LS_S_MASK) >> MPLS_LS_S_SHIFT; in mpls_entry_decode()
/linux/drivers/gpu/drm/msm/adreno/
H A Dadreno_gpu.c755 for (i = 0; state->bos && i < state->nr_bos; i++) in adreno_gpu_state_destroy()
756 kvfree(state->bos[i].data); in adreno_gpu_state_destroy()
758 kfree(state->bos); in adreno_gpu_state_destroy()
905 if (state->bos) { in adreno_show()
910 state->bos[i].iova); in adreno_show()
911 drm_printf(p, " size: %zd\n", state->bos[i].size); in adreno_show()
912 drm_printf(p, " flags: 0x%x\n", state->bos[i].flags); in adreno_show()
913 drm_printf(p, " name: %-32s\n", state->bos[i].name); in adreno_show()
915 adreno_show_object(p, &state->bos[i].data, in adreno_show()
916 state->bos[i].size, &state->bos[i].encoded); in adreno_show()
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/tc/act/
H A Dmpls.c32 mpls_info->bos = act->mpls_push.bos; in copy_mpls_info()
/linux/drivers/gpu/drm/ttm/tests/
H A Dttm_bo_validate_test.c820 struct ttm_buffer_object *bos, *bo_val; in ttm_bo_validate_happy_evict() local
832 bos = kunit_kmalloc_array(test, bo_no, sizeof(*bos), GFP_KERNEL); in ttm_bo_validate_happy_evict()
833 KUNIT_ASSERT_NOT_NULL(test, bos); in ttm_bo_validate_happy_evict()
835 memset(bos, 0, sizeof(*bos) * bo_no); in ttm_bo_validate_happy_evict()
837 drm_gem_private_object_init(priv->drm, &bos[i].base, bo_sizes[i]); in ttm_bo_validate_happy_evict()
838 err = ttm_bo_init_reserved(priv->ttm_dev, &bos[i], bo_type, placement, in ttm_bo_validate_happy_evict()
841 dma_resv_unlock(bos[i].base.resv); in ttm_bo_validate_happy_evict()
852 KUNIT_EXPECT_EQ(test, bos[0].resource->mem_type, mem_type_evict); in ttm_bo_validate_happy_evict()
853 KUNIT_EXPECT_TRUE(test, bos[0].ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC); in ttm_bo_validate_happy_evict()
854 KUNIT_EXPECT_TRUE(test, bos[0].ttm->page_flags & TTM_TT_FLAG_PRIV_POPULATED); in ttm_bo_validate_happy_evict()
[all …]
/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_dump.c146 obj = submit->bos[i].obj; in etnaviv_core_dump()
205 obj = submit->bos[i].obj; in etnaviv_core_dump()
206 vram = submit->bos[i].mapping; in etnaviv_core_dump()
/linux/drivers/usb/gadget/
H A Dcomposite.c729 struct usb_bos_descriptor *bos = cdev->req->buf; in bos_desc() local
732 bos->bLength = USB_DT_BOS_SIZE; in bos_desc()
733 bos->bDescriptorType = USB_DT_BOS; in bos_desc()
735 bos->wTotalLength = cpu_to_le16(USB_DT_BOS_SIZE); in bos_desc()
736 bos->bNumDeviceCaps = 0; in bos_desc()
766 usb_ext = cdev->req->buf + le16_to_cpu(bos->wTotalLength); in bos_desc()
767 bos->bNumDeviceCaps++; in bos_desc()
768 le16_add_cpu(&bos->wTotalLength, USB_DT_USB_EXT_CAP_SIZE); in bos_desc()
783 ss_cap = cdev->req->buf + le16_to_cpu(bos->wTotalLength); in bos_desc()
784 bos->bNumDeviceCaps++; in bos_desc()
[all …]
/linux/drivers/accel/amdxdna/
H A Damdxdna_ctx.c346 if (!job->bos[i]) in amdxdna_arg_bos_put()
348 drm_gem_object_put(job->bos[i]); in amdxdna_arg_bos_put()
374 job->bos[i] = gobj; in amdxdna_arg_bos_lookup()
387 job->bos[i] = gobj; in amdxdna_arg_bos_lookup()
414 job = kzalloc(struct_size(job, bos, arg_bo_cnt), GFP_KERNEL); in amdxdna_cmd_submit()
H A Daie2_ctx.c829 ret = drm_gem_lock_reservations(job->bos, job->bo_cnt, &acquire_ctx); in aie2_cmd_submit()
836 ret = dma_resv_reserve_fences(job->bos[i]->resv, 1); in aie2_cmd_submit()
839 drm_gem_unlock_reservations(job->bos, job->bo_cnt, &acquire_ctx); in aie2_cmd_submit()
846 abo = to_xdna_obj(job->bos[i]); in aie2_cmd_submit()
849 drm_gem_unlock_reservations(job->bos, job->bo_cnt, &acquire_ctx); in aie2_cmd_submit()
869 dma_resv_add_fence(job->bos[i]->resv, job->out_fence, DMA_RESV_USAGE_WRITE); in aie2_cmd_submit()
879 drm_gem_unlock_reservations(job->bos, job->bo_cnt, &acquire_ctx); in aie2_cmd_submit()
/linux/drivers/usb/core/
H A Dhub.c168 if (!udev->bos) in usb_device_supports_lpm()
175 if (udev->bos->ext_cap && in usb_device_supports_lpm()
177 le32_to_cpu(udev->bos->ext_cap->bmAttributes))) in usb_device_supports_lpm()
187 if (!udev->bos->ss_cap) { in usb_device_supports_lpm()
192 if (udev->bos->ss_cap->bU1devExitLat == 0 && in usb_device_supports_lpm()
193 udev->bos->ss_cap->bU2DevExitLat == 0) { in usb_device_supports_lpm()
348 if (!udev->bos) in usb_set_lpm_parameters()
358 udev_u1_del = udev->bos->ss_cap->bU1devExitLat; in usb_set_lpm_parameters()
359 udev_u2_del = le16_to_cpu(udev->bos->ss_cap->bU2DevExitLat); in usb_set_lpm_parameters()
360 hub_u1_del = udev->parent->bos->ss_cap->bU1devExitLat; in usb_set_lpm_parameters()
[all …]
/linux/drivers/usb/host/
H A Dxhci-hub.c39 struct usb_bos_descriptor *bos; in xhci_create_usb3x_bos_desc() local
53 bos = (struct usb_bos_descriptor *)buf; in xhci_create_usb3x_bos_desc()
54 bos->bLength = USB_DT_BOS_SIZE; in xhci_create_usb3x_bos_desc()
55 bos->bDescriptorType = USB_DT_BOS; in xhci_create_usb3x_bos_desc()
56 bos->wTotalLength = cpu_to_le16(USB_DT_BOS_SIZE + in xhci_create_usb3x_bos_desc()
58 bos->bNumDeviceCaps = 1; in xhci_create_usb3x_bos_desc()
92 bos->bNumDeviceCaps++; in xhci_create_usb3x_bos_desc()
93 bos->wTotalLength = cpu_to_le16(USB_DT_BOS_SIZE + in xhci_create_usb3x_bos_desc()
122 if (wLength < le16_to_cpu(bos->wTotalLength)) in xhci_create_usb3x_bos_desc()
126 return le16_to_cpu(bos->wTotalLength); in xhci_create_usb3x_bos_desc()
[all …]

12