Home
last modified time | relevance | path

Searched refs:ib (Results 1 – 25 of 97) sorted by relevance

1234

/linux/drivers/gpu/drm/radeon/
H A Dradeon_ib.c61 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument
66 r = radeon_sa_bo_new(&rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get()
72 radeon_sync_create(&ib->sync); in radeon_ib_get()
74 ib->ring = ring; in radeon_ib_get()
75 ib->fence = NULL; in radeon_ib_get()
76 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get()
77 ib->vm = vm; in radeon_ib_get()
82 ib->gpu_addr = drm_suballoc_soffset(ib->sa_bo) + RADEON_VA_IB_OFFSET; in radeon_ib_get()
84 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get()
86 ib->is_const_ib = false; in radeon_ib_get()
[all …]
H A Dni_dma.c122 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument
124 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute()
125 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute()
144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
315 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument
326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
[all …]
H A Dsi_dma.c69 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument
78 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
105 struct radeon_ib *ib, in si_dma_vm_write_pages() argument
119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
[all …]
H A Dcik_sdma.c133 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument
135 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute()
136 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute()
154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
703 struct radeon_ib ib; in cik_sdma_ib_test() local
720 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test()
726 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test()
727 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test()
[all …]
H A Dradeon_vm.c359 struct radeon_ib *ib, in radeon_vm_set_pages() argument
368 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); in radeon_vm_set_pages()
371 radeon_asic_vm_write_pages(rdev, ib, pe, addr, in radeon_vm_set_pages()
375 radeon_asic_vm_set_pages(rdev, ib, pe, addr, in radeon_vm_set_pages()
390 struct radeon_ib ib; in radeon_vm_clear_bo() local
406 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); in radeon_vm_clear_bo()
410 ib.length_dw = 0; in radeon_vm_clear_bo()
412 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); in radeon_vm_clear_bo()
413 radeon_asic_vm_pad_ib(rdev, &ib); in radeon_vm_clear_bo()
414 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
[all …]
H A Dr600_dma.c338 struct radeon_ib ib; in r600_dma_ib_test() local
352 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test()
358 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1); in r600_dma_ib_test()
359 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test()
360 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test()
361 ib.ptr[3] = 0xDEADBEEF; in r600_dma_ib_test()
362 ib.length_dw = 4; in r600_dma_ib_test()
364 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test()
366 radeon_ib_free(rdev, &ib); in r600_dma_ib_test()
370 r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( in r600_dma_ib_test()
[all …]
H A Devergreen_dma.c67 struct radeon_ib *ib) in evergreen_dma_ring_ib_execute() argument
69 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_dma_ring_ib_execute()
88 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute()
89 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
/linux/drivers/net/ethernet/amd/
H A D7990.c100 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \
101 ib->brx_ring[t].length, \
102 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \
106 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \
107 ib->btx_ring[t].length, \
108 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \
140 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
150 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring()
163 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
164 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
[all …]
H A Dsunlance.c319 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local
332 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma()
333 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma()
334 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma()
335 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma()
336 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma()
337 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma()
342 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma()
343 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma()
344 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma()
[all …]
H A Da2065.c149 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
160 ib->mode = 0; in lance_init_ring()
165 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
166 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
167 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring()
168 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring()
169 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring()
170 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring()
176 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring()
177 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring()
[all …]
H A Ddeclance.c235 #define lib_ptr(ib, rt, type) \ argument
236 ((volatile u16 *)((u8 *)(ib) + lib_off(rt, type)))
453 volatile u16 *ib = (volatile u16 *)dev->mem_start; in lance_init_ring() local
465 *lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) | in lance_init_ring()
467 *lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) | in lance_init_ring()
469 *lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) | in lance_init_ring()
475 *lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) | in lance_init_ring()
477 *lib_ptr(ib, rx_ptr, lp->type) = leptr; in lance_init_ring()
484 *lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) | in lance_init_ring()
486 *lib_ptr(ib, tx_ptr, lp->type) = leptr; in lance_init_ring()
[all …]
/linux/drivers/infiniband/hw/mlx4/
H A Dah.c48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn | in create_ib_ah()
50 ah->av.ib.g_slid = rdma_ah_get_path_bits(ah_attr); in create_ib_ah()
51 ah->av.ib.sl_tclass_flowlabel = in create_ib_ah()
56 ah->av.ib.g_slid |= 0x80; in create_ib_ah()
57 ah->av.ib.gid_index = grh->sgid_index; in create_ib_ah()
58 ah->av.ib.hop_limit = grh->hop_limit; in create_ib_ah()
59 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah()
62 memcpy(ah->av.ib.dgid, grh->dgid.raw, 16); in create_ib_ah()
65 ah->av.ib.dlid = cpu_to_be16(rdma_ah_get_dlid(ah_attr)); in create_ib_ah()
73 ah->av.ib.stat_rate = static_rate; in create_ib_ah()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Dsdma_v6_0.c266 struct amdgpu_ib *ib, in sdma_v6_0_ring_emit_ib() argument
285 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v6_0_ring_emit_ib()
286 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v6_0_ring_emit_ib()
287 amdgpu_ring_write(ring, ib->length_dw); in sdma_v6_0_ring_emit_ib()
983 struct amdgpu_ib ib; in sdma_v6_0_ring_test_ib() local
991 memset(&ib, 0, sizeof(ib)); in sdma_v6_0_ring_test_ib()
1002 r = amdgpu_ib_get(adev, NULL, 256, AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v6_0_ring_test_ib()
1008 ib.ptr[0] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) | in sdma_v6_0_ring_test_ib()
1010 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v6_0_ring_test_ib()
1011 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v6_0_ring_test_ib()
[all …]
H A Damdgpu_ring.h233 struct amdgpu_ib *ib);
236 struct amdgpu_ib *ib);
243 struct amdgpu_ib *ib,
263 void (*pad_ib)(struct amdgpu_ring *ring, struct amdgpu_ib *ib);
423 #define amdgpu_ring_parse_cs(r, p, job, ib) ((r)->funcs->parse_cs((p), (job), (ib))) argument
424 #define amdgpu_ring_patch_cs_in_place(r, p, job, ib) ((r)->funcs->patch_cs_in_place((p), (job), (ib argument
430 #define amdgpu_ring_emit_ib(r, job, ib, flags) ((r)->funcs->emit_ib((r), (job), (ib), (flags))) argument
444 #define amdgpu_ring_pad_ib(r, ib) ((r)->funcs->pad_ib((r), (ib))) argument
461 void amdgpu_ring_generic_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib);
549 static inline u32 amdgpu_ib_get_value(struct amdgpu_ib *ib, int idx) in amdgpu_ib_get_value() argument
[all …]
H A Damdgpu_jpeg.c193 struct amdgpu_ib *ib; in amdgpu_jpeg_dec_set_reg() local
204 ib = &job->ibs[0]; in amdgpu_jpeg_dec_set_reg()
206 ib->ptr[0] = PACKETJ(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0, 0, PACKETJ_TYPE0); in amdgpu_jpeg_dec_set_reg()
207 ib->ptr[1] = 0xDEADBEEF; in amdgpu_jpeg_dec_set_reg()
209 ib->ptr[i] = PACKETJ(0, 0, 0, PACKETJ_TYPE6); in amdgpu_jpeg_dec_set_reg()
210 ib->ptr[i+1] = 0; in amdgpu_jpeg_dec_set_reg()
212 ib->length_dw = 16; in amdgpu_jpeg_dec_set_reg()
566 struct amdgpu_ib *ib) in amdgpu_jpeg_dec_parse_cs() argument
571 for (i = 0; i < ib->length_dw ; i += 2) { in amdgpu_jpeg_dec_parse_cs()
572 reg = CP_PACKETJ_GET_REG(ib->ptr[i]); in amdgpu_jpeg_dec_parse_cs()
[all …]
H A Damdgpu_sdma.h150 void (*emit_copy_buffer)(struct amdgpu_ib *ib,
166 void (*emit_fill_buffer)(struct amdgpu_ib *ib,
178 #define amdgpu_emit_copy_buffer(adev, ib, s, d, b, t) (adev)->mman.buffer_funcs->emit_copy_buffer(( argument
179 #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib)… argument
H A Dvcn_sw_ring.c45 struct amdgpu_ib *ib, uint32_t flags) in vcn_dec_sw_ring_emit_ib() argument
51 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in vcn_dec_sw_ring_emit_ib()
52 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in vcn_dec_sw_ring_emit_ib()
53 amdgpu_ring_write(ring, ib->length_dw); in vcn_dec_sw_ring_emit_ib()
H A Damdgpu_vpe.c526 struct amdgpu_ib *ib, in vpe_ring_emit_ib() argument
536 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); in vpe_ring_emit_ib()
537 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in vpe_ring_emit_ib()
538 amdgpu_ring_write(ring, ib->length_dw); in vpe_ring_emit_ib()
820 struct amdgpu_ib ib = {}; in vpe_ring_test_ib() local
835 ret = amdgpu_ib_get(adev, NULL, 256, AMDGPU_IB_POOL_DIRECT, &ib); in vpe_ring_test_ib()
839 ib.ptr[0] = VPE_CMD_HEADER(VPE_CMD_OPCODE_FENCE, 0); in vpe_ring_test_ib()
840 ib.ptr[1] = lower_32_bits(wb_addr); in vpe_ring_test_ib()
841 ib.ptr[2] = upper_32_bits(wb_addr); in vpe_ring_test_ib()
842 ib.ptr[3] = test_pattern; in vpe_ring_test_ib()
[all …]
/linux/fs/smb/server/
H A Dksmbd_work.c98 static inline void __ksmbd_iov_pin(struct ksmbd_work *work, void *ib, in __ksmbd_iov_pin() argument
101 work->iov[++work->iov_idx].iov_base = ib; in __ksmbd_iov_pin()
106 static int __ksmbd_iov_pin_rsp(struct ksmbd_work *work, void *ib, int len, in __ksmbd_iov_pin_rsp() argument
142 __ksmbd_iov_pin(work, ib, len); in __ksmbd_iov_pin_rsp()
156 int ksmbd_iov_pin_rsp(struct ksmbd_work *work, void *ib, int len) in ksmbd_iov_pin_rsp() argument
158 return __ksmbd_iov_pin_rsp(work, ib, len, NULL, 0); in ksmbd_iov_pin_rsp()
161 int ksmbd_iov_pin_rsp_read(struct ksmbd_work *work, void *ib, int len, in ksmbd_iov_pin_rsp_read() argument
164 return __ksmbd_iov_pin_rsp(work, ib, len, aux_buf, aux_size); in ksmbd_iov_pin_rsp_read()
/linux/fs/smb/client/
H A Dsmbdirect.c509 ib_dma_unmap_single(sc->ib.dev, in send_done()
922 sc->ib.dev = sc->rdma.cm_id->device; in smbd_ia_open()
924 if (!frwr_is_supported(&sc->ib.dev->attrs)) { in smbd_ia_open()
927 sc->ib.dev->attrs.device_cap_flags, in smbd_ia_open()
928 sc->ib.dev->attrs.max_fast_reg_page_list_len); in smbd_ia_open()
934 sc->ib.dev->attrs.max_fast_reg_page_list_len); in smbd_ia_open()
936 if (sc->ib.dev->attrs.kernel_cap_flags & IBK_SG_GAPS_REG) in smbd_ia_open()
981 sc->ib.dev, (void *)packet, in smbd_post_send_negotiate_req()
983 if (ib_dma_mapping_error(sc->ib.dev, request->sge[0].addr)) { in smbd_post_send_negotiate_req()
989 request->sge[0].lkey = sc->ib.pd->local_dma_lkey; in smbd_post_send_negotiate_req()
[all …]
/linux/drivers/net/ethernet/brocade/bna/
H A Dbna_hw_defs.h236 struct bna_ib *ib = _ib; \
237 if ((ib->intr_type == BNA_INTR_T_INTX)) { \
239 intx_mask &= ~(ib->intr_vector); \
242 bna_ib_coalescing_timer_set(&ib->door_bell, \
243 ib->coalescing_timeo); \
245 bna_ib_ack(&ib->door_bell, 0); \
251 struct bna_ib *ib = _ib; \
253 ib->door_bell.doorbell_addr); \
254 if (ib->intr_type == BNA_INTR_T_INTX) { \
256 intx_mask |= ib->intr_vector; \
/linux/drivers/iio/
H A Dindustrialio-buffer.c143 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_read() local
144 struct iio_buffer *rb = ib->buffer; in iio_buffer_read()
145 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_read()
212 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_write() local
213 struct iio_buffer *rb = ib->buffer; in iio_buffer_write()
214 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_write()
275 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_poll() local
276 struct iio_buffer *rb = ib->buffer; in iio_buffer_poll()
277 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_poll()
301 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_read_wrapper() local
[all …]
/linux/drivers/tee/qcomtee/
H A Dcore.c401 int i, ib, ob, io, oo; in qcomtee_prepare_msg() local
415 ib = 0; in qcomtee_prepare_msg()
419 msg->args[ib].b.offset = offset; in qcomtee_prepare_msg()
420 msg->args[ib].b.size = u[i].b.size; in qcomtee_prepare_msg()
430 ib++; in qcomtee_prepare_msg()
433 ob = ib; in qcomtee_prepare_msg()
461 qcomtee_msg_init(msg, object_id, op, ib, ob, io, oo); in qcomtee_prepare_msg()
484 int i, ib, ob, io, oo; in qcomtee_update_args() local
490 ib = 0; in qcomtee_update_args()
492 ib++; in qcomtee_update_args()
[all …]
/linux/arch/arm/boot/dts/marvell/
H A Dkirkwood-ib62x0.dts9 …compatible = "raidsonic,ib-nas6210-b", "raidsonic,ib-nas6220-b", "raidsonic,ib-nas6210", "raidsoni…
/linux/drivers/pinctrl/meson/
H A Dpinctrl-meson.h146 #define BANK_DS(n, f, l, fi, li, per, peb, pr, pb, dr, db, or, ob, ir, ib, \ argument
159 [MESON_REG_IN] = { ir, ib }, \
164 #define BANK(n, f, l, fi, li, per, peb, pr, pb, dr, db, or, ob, ir, ib) \ argument
165 BANK_DS(n, f, l, fi, li, per, peb, pr, pb, dr, db, or, ob, ir, ib, 0, 0)

1234