Home
last modified time | relevance | path

Searched refs:ib (Results 1 – 25 of 101) sorted by relevance

12345

/linux/drivers/gpu/drm/radeon/
H A Dradeon_ib.c61 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument
66 r = radeon_sa_bo_new(&rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get()
72 radeon_sync_create(&ib->sync); in radeon_ib_get()
74 ib->ring = ring; in radeon_ib_get()
75 ib->fence = NULL; in radeon_ib_get()
76 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get()
77 ib->vm = vm; in radeon_ib_get()
82 ib->gpu_addr = drm_suballoc_soffset(ib->sa_bo) + RADEON_VA_IB_OFFSET; in radeon_ib_get()
84 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get()
86 ib->is_const_ib = false; in radeon_ib_get()
[all …]
H A Dsi_dma.c69 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument
78 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
105 struct radeon_ib *ib, in si_dma_vm_write_pages() argument
119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
[all …]
H A Dni_dma.c122 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument
124 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute()
125 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute()
144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
315 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument
326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
[all …]
H A Dcik_sdma.c133 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument
135 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute()
136 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute()
154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
703 struct radeon_ib ib; in cik_sdma_ib_test() local
720 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test()
726 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test()
727 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test()
[all …]
H A Dradeon_vm.c359 struct radeon_ib *ib, in radeon_vm_set_pages() argument
368 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); in radeon_vm_set_pages()
371 radeon_asic_vm_write_pages(rdev, ib, pe, addr, in radeon_vm_set_pages()
375 radeon_asic_vm_set_pages(rdev, ib, pe, addr, in radeon_vm_set_pages()
390 struct radeon_ib ib; in radeon_vm_clear_bo() local
406 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); in radeon_vm_clear_bo()
410 ib.length_dw = 0; in radeon_vm_clear_bo()
412 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); in radeon_vm_clear_bo()
413 radeon_asic_vm_pad_ib(rdev, &ib); in radeon_vm_clear_bo()
414 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
[all …]
H A Dr600_dma.c338 struct radeon_ib ib; in r600_dma_ib_test() local
352 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test()
358 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1); in r600_dma_ib_test()
359 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test()
360 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test()
361 ib.ptr[3] = 0xDEADBEEF; in r600_dma_ib_test()
362 ib.length_dw = 4; in r600_dma_ib_test()
364 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test()
366 radeon_ib_free(rdev, &ib); in r600_dma_ib_test()
370 r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( in r600_dma_ib_test()
[all …]
H A Devergreen_dma.c67 struct radeon_ib *ib) in evergreen_dma_ring_ib_execute() argument
69 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_dma_ring_ib_execute()
88 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute()
89 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
/linux/drivers/net/ethernet/amd/
H A D7990.c100 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \
101 ib->brx_ring[t].length, \
102 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \
106 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \
107 ib->btx_ring[t].length, \
108 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \
140 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
150 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring()
163 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
164 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
[all …]
H A Dsunlance.c319 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local
332 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma()
333 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma()
334 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma()
335 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma()
336 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma()
337 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma()
342 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma()
343 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma()
344 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma()
[all …]
H A Da2065.c149 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
160 ib->mode = 0; in lance_init_ring()
165 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
166 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
167 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring()
168 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring()
169 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring()
170 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring()
176 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring()
177 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring()
[all …]
H A Ddeclance.c235 #define lib_ptr(ib, rt, type) \ argument
236 ((volatile u16 *)((u8 *)(ib) + lib_off(rt, type)))
453 volatile u16 *ib = (volatile u16 *)dev->mem_start; in lance_init_ring() local
465 *lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) | in lance_init_ring()
467 *lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) | in lance_init_ring()
469 *lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) | in lance_init_ring()
475 *lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) | in lance_init_ring()
477 *lib_ptr(ib, rx_ptr, lp->type) = leptr; in lance_init_ring()
484 *lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) | in lance_init_ring()
486 *lib_ptr(ib, tx_pt in lance_init_ring()
559 volatile u16 *ib = (volatile u16 *)dev->mem_start; lance_rx() local
646 volatile u16 *ib = (volatile u16 *)dev->mem_start; lance_tx() local
782 volatile u16 *ib = (volatile u16 *)dev->mem_start; lance_open() local
902 volatile u16 *ib = (volatile u16 *)dev->mem_start; lance_start_xmit() local
946 volatile u16 *ib = (volatile u16 *)dev->mem_start; lance_load_multicast() local
975 volatile u16 *ib = (volatile u16 *)dev->mem_start; lance_set_multicast() local
[all...]
/linux/drivers/infiniband/hw/mlx4/
H A Dah.c48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn | in create_ib_ah()
50 ah->av.ib.g_slid = rdma_ah_get_path_bits(ah_attr); in create_ib_ah()
51 ah->av.ib.sl_tclass_flowlabel = in create_ib_ah()
56 ah->av.ib.g_slid |= 0x80; in create_ib_ah()
57 ah->av.ib.gid_index = grh->sgid_index; in create_ib_ah()
58 ah->av.ib.hop_limit = grh->hop_limit; in create_ib_ah()
59 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah()
62 memcpy(ah->av.ib.dgid, grh->dgid.raw, 16); in create_ib_ah()
65 ah->av.ib.dlid = cpu_to_be16(rdma_ah_get_dlid(ah_attr)); in create_ib_ah()
73 ah->av.ib.stat_rate = static_rate; in create_ib_ah()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Dsdma_v6_0.c259 * @ib: IB object to schedule
267 struct amdgpu_ib *ib, in sdma_v6_0_ring_emit_ib()
286 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v6_0_ring_emit_ib()
287 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v6_0_ring_emit_ib()
288 amdgpu_ring_write(ring, ib->length_dw); in sdma_v6_0_ring_emit_ib()
984 struct amdgpu_ib ib; in sdma_v6_0_ring_test_ib()
992 memset(&ib, 0, sizeof(ib)); in sdma_v6_0_ring_test_ib()
1003 r = amdgpu_ib_get(adev, NULL, 256, AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v6_0_ring_test_ib()
1005 drm_err(adev_to_drm(adev), "failed to get ib ( in sdma_v6_0_ring_test_ib()
266 sdma_v6_0_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_job * job,struct amdgpu_ib * ib,uint32_t flags) sdma_v6_0_ring_emit_ib() argument
983 struct amdgpu_ib ib; sdma_v6_0_ring_test_ib() local
1059 sdma_v6_0_vm_copy_pte(struct amdgpu_ib * ib,uint64_t pe,uint64_t src,unsigned count) sdma_v6_0_vm_copy_pte() argument
1087 sdma_v6_0_vm_write_pte(struct amdgpu_ib * ib,uint64_t pe,uint64_t value,unsigned count,uint32_t incr) sdma_v6_0_vm_write_pte() argument
1117 sdma_v6_0_vm_set_pte_pde(struct amdgpu_ib * ib,uint64_t pe,uint64_t addr,unsigned count,uint32_t incr,uint64_t flags) sdma_v6_0_vm_set_pte_pde() argument
1142 sdma_v6_0_ring_pad_ib(struct amdgpu_ring * ring,struct amdgpu_ib * ib) sdma_v6_0_ring_pad_ib() argument
1837 sdma_v6_0_emit_copy_buffer(struct amdgpu_ib * ib,uint64_t src_offset,uint64_t dst_offset,uint32_t byte_count,uint32_t copy_flags) sdma_v6_0_emit_copy_buffer() argument
1864 sdma_v6_0_emit_fill_buffer(struct amdgpu_ib * ib,uint32_t src_data,uint64_t dst_offset,uint32_t byte_count) sdma_v6_0_emit_fill_buffer() argument
[all...]
H A Damdgpu_ib.c66 struct amdgpu_ib *ib) in amdgpu_ib_get() argument
72 &ib->sa_bo, size); in amdgpu_ib_get()
78 ib->ptr = amdgpu_sa_bo_cpu_addr(ib->sa_bo); in amdgpu_ib_get()
80 ib->flags = AMDGPU_IB_FLAG_EMIT_MEM_SYNC; in amdgpu_ib_get()
83 ib->gpu_addr = amdgpu_sa_bo_gpu_addr(ib->sa_bo); in amdgpu_ib_get()
97 void amdgpu_ib_free(struct amdgpu_ib *ib, struct dma_fence *f) in amdgpu_ib_free() argument
99 amdgpu_sa_bo_free(&ib->sa_bo, f); in amdgpu_ib_free()
129 struct amdgpu_ib *ib = &ibs[0]; in amdgpu_ib_schedule() local
189 if ((ib->flags & AMDGPU_IB_FLAGS_SECURE) && in amdgpu_ib_schedule()
218 if ((ib->flags & AMDGPU_IB_FLAG_EMIT_MEM_SYNC) && ring->funcs->emit_mem_sync) in amdgpu_ib_schedule()
[all …]
H A Damdgpu_ring.h240 struct amdgpu_ib *ib);
243 struct amdgpu_ib *ib);
250 struct amdgpu_ib *ib,
270 void (*pad_ib)(struct amdgpu_ring *ring, struct amdgpu_ib *ib);
430 #define amdgpu_ring_parse_cs(r, p, job, ib) ((r)->funcs->parse_cs((p), (job), (ib)))
431 #define amdgpu_ring_patch_cs_in_place(r, p, job, ib) ((r)->funcs->patch_cs_in_place((p), (job), (ib)))
437 #define amdgpu_ring_emit_ib(r, job, ib, flags) ((r)->funcs->emit_ib((r), (job), (ib), (flag
428 amdgpu_ring_parse_cs(r,p,job,ib) global() argument
429 amdgpu_ring_patch_cs_in_place(r,p,job,ib) global() argument
435 amdgpu_ring_emit_ib(r,job,ib,flags) global() argument
449 amdgpu_ring_pad_ib(r,ib) global() argument
554 amdgpu_ib_get_value(struct amdgpu_ib * ib,int idx) amdgpu_ib_get_value() argument
559 amdgpu_ib_set_value(struct amdgpu_ib * ib,int idx,uint32_t value) amdgpu_ib_set_value() argument
[all...]
H A Damdgpu_jpeg.c193 struct amdgpu_ib *ib; in amdgpu_jpeg_dec_set_reg() local
204 ib = &job->ibs[0]; in amdgpu_jpeg_dec_set_reg()
206 ib->ptr[0] = PACKETJ(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0, 0, PACKETJ_TYPE0); in amdgpu_jpeg_dec_set_reg()
207 ib->ptr[1] = 0xDEADBEEF; in amdgpu_jpeg_dec_set_reg()
209 ib->ptr[i] = PACKETJ(0, 0, 0, PACKETJ_TYPE6); in amdgpu_jpeg_dec_set_reg()
210 ib->ptr[i+1] = 0; in amdgpu_jpeg_dec_set_reg()
212 ib->length_dw = 16; in amdgpu_jpeg_dec_set_reg()
566 struct amdgpu_ib *ib) in amdgpu_jpeg_dec_parse_cs() argument
571 for (i = 0; i < ib->length_dw ; i += 2) { in amdgpu_jpeg_dec_parse_cs()
572 reg = CP_PACKETJ_GET_REG(ib->ptr[i]); in amdgpu_jpeg_dec_parse_cs()
[all …]
H A Damdgpu_sdma.h163 void (*emit_copy_buffer)(struct amdgpu_ib *ib,
179 void (*emit_fill_buffer)(struct amdgpu_ib *ib, argument
191 #define amdgpu_emit_copy_buffer(adev, ib, s, d, b, t) (adev)->mman.buffer_funcs->emit_copy_buffer((ib), (s), (d), (b), (t))
192 #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib), (s), (d), (b))
178 amdgpu_emit_copy_buffer(adev,ib,s,d,b,t) global() argument
H A Dvcn_sw_ring.c45 struct amdgpu_ib *ib, uint32_t flags) in vcn_dec_sw_ring_emit_ib() argument
51 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in vcn_dec_sw_ring_emit_ib()
52 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in vcn_dec_sw_ring_emit_ib()
53 amdgpu_ring_write(ring, ib->length_dw); in vcn_dec_sw_ring_emit_ib()
/linux/fs/smb/server/
H A Dtransport_rdma.c187 ib_dma_unmap_single(sc->ib.dev,
440 sc->ib.dev = sc->rdma.cm_id->device; in alloc_transport()
494 if (sc->ib.qp) { in free_transport()
495 ib_drain_qp(sc->ib.qp); in free_transport()
496 sc->ib.qp = NULL; in free_transport()
516 if (sc->ib.send_cq)
517 ib_free_cq(sc->ib.send_cq);
518 if (sc->ib.recv_cq) in smb_direct_alloc_sendmsg()
519 ib_free_cq(sc->ib.recv_cq); in smb_direct_alloc_sendmsg()
520 if (sc->ib in smb_direct_alloc_sendmsg()
[all...]
H A Dksmbd_work.c98 static inline void __ksmbd_iov_pin(struct ksmbd_work *work, void *ib, in __ksmbd_iov_pin() argument
101 work->iov[++work->iov_idx].iov_base = ib; in __ksmbd_iov_pin()
106 static int __ksmbd_iov_pin_rsp(struct ksmbd_work *work, void *ib, int len, in __ksmbd_iov_pin_rsp() argument
142 __ksmbd_iov_pin(work, ib, len); in __ksmbd_iov_pin_rsp()
156 int ksmbd_iov_pin_rsp(struct ksmbd_work *work, void *ib, int len) in ksmbd_iov_pin_rsp() argument
158 return __ksmbd_iov_pin_rsp(work, ib, len, NULL, 0); in ksmbd_iov_pin_rsp()
161 int ksmbd_iov_pin_rsp_read(struct ksmbd_work *work, void *ib, int len, in ksmbd_iov_pin_rsp_read() argument
164 return __ksmbd_iov_pin_rsp(work, ib, len, aux_buf, aux_size); in ksmbd_iov_pin_rsp_read()
/linux/fs/smb/client/
H A Dsmbdirect.c552 ib_dma_unmap_page(sc->ib.dev, in process_negotiation_response()
1060 sc->ib.dev = sc->rdma.cm_id->device;
1062 if (!frwr_is_supported(&sc->ib.dev->attrs)) { in manage_keep_alive_before_sending()
1065 sc->ib.dev->attrs.device_cap_flags, in manage_keep_alive_before_sending()
1066 sc->ib.dev->attrs.max_fast_reg_page_list_len); in manage_keep_alive_before_sending()
1072 sc->ib.dev->attrs.max_fast_reg_page_list_len); in manage_keep_alive_before_sending()
1074 if (sc->ib.dev->attrs.kernel_cap_flags & IBK_SG_GAPS_REG) in manage_keep_alive_before_sending()
1115 sc->ib.dev, (void *)packet, in smbd_post_send_iter()
1117 if (ib_dma_mapping_error(sc->ib.dev, request->sge[0].addr)) { in smbd_post_send_iter()
1124 request->sge[0].lkey = sc->ib in smbd_post_send_iter()
[all...]
/linux/drivers/net/ethernet/brocade/bna/
H A Dbna_hw_defs.h236 struct bna_ib *ib = _ib; \
237 if ((ib->intr_type == BNA_INTR_T_INTX)) { \
239 intx_mask &= ~(ib->intr_vector); \
242 bna_ib_coalescing_timer_set(&ib->door_bell, \
243 ib->coalescing_timeo); \
245 bna_ib_ack(&ib->door_bell, 0); \
251 struct bna_ib *ib = _ib; \
253 ib->door_bell.doorbell_addr); \
254 if (ib->intr_type == BNA_INTR_T_INTX) { \
256 intx_mask |= ib->intr_vector; \
/linux/drivers/iio/
H A Dindustrialio-buffer.c143 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_read() local
144 struct iio_buffer *rb = ib->buffer; in iio_buffer_read()
145 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_read()
212 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_write() local
213 struct iio_buffer *rb = ib->buffer; in iio_buffer_write()
214 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_write()
275 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_poll() local
276 struct iio_buffer *rb = ib->buffer; in iio_buffer_poll()
277 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_poll()
301 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_read_wrapper() local
[all …]
/linux/drivers/tee/qcomtee/
H A Dcore.c401 int i, ib, ob, io, oo; in qcomtee_prepare_msg() local
415 ib = 0; in qcomtee_prepare_msg()
419 msg->args[ib].b.offset = offset; in qcomtee_prepare_msg()
420 msg->args[ib].b.size = u[i].b.size; in qcomtee_prepare_msg()
430 ib++; in qcomtee_prepare_msg()
433 ob = ib; in qcomtee_prepare_msg()
461 qcomtee_msg_init(msg, object_id, op, ib, ob, io, oo); in qcomtee_prepare_msg()
484 int i, ib, ob, io, oo; in qcomtee_update_args() local
490 ib = 0; in qcomtee_update_args()
492 ib++; in qcomtee_update_args()
[all …]
/linux/arch/arm/boot/dts/marvell/
H A Dkirkwood-ib62x0.dts9 …compatible = "raidsonic,ib-nas6210-b", "raidsonic,ib-nas6220-b", "raidsonic,ib-nas6210", "raidsoni…

12345