| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_ib.c | 61 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument 66 r = radeon_sa_bo_new(&rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get() 72 radeon_sync_create(&ib->sync); in radeon_ib_get() 74 ib->ring = ring; in radeon_ib_get() 75 ib->fence = NULL; in radeon_ib_get() 76 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get() 77 ib->vm = vm; in radeon_ib_get() 82 ib->gpu_addr = drm_suballoc_soffset(ib->sa_bo) + RADEON_VA_IB_OFFSET; in radeon_ib_get() 84 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get() 86 ib->is_const_ib = false; in radeon_ib_get() [all …]
|
| H A D | ni_dma.c | 122 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument 124 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute() 125 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute() 144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute() 145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 315 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument 326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages() 328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages() 330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() [all …]
|
| H A D | si_dma.c | 69 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument 78 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages() 80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages() 82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages() 105 struct radeon_ib *ib, in si_dma_vm_write_pages() argument 119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages() 120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() [all …]
|
| H A D | cik_sdma.c | 133 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument 135 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute() 136 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute() 154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute() 155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute() 156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute() 703 struct radeon_ib ib; in cik_sdma_ib_test() local 720 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test() 726 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test() 727 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test() [all …]
|
| H A D | evergreen_cs.c | 452 uint32_t *ib = p->ib.ptr; in evergreen_cs_track_validate_cb() local 474 ib[track->cb_color_slice_idx[id]] = slice; in evergreen_cs_track_validate_cb() 1099 u32 tmp, *ib; in evergreen_cs_handle_reg() local 1102 ib = p->ib.ptr; in evergreen_cs_handle_reg() 1150 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg() 1179 ib[idx] &= ~Z_ARRAY_MODE(0xf); in evergreen_cs_handle_reg() 1181 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_handle_reg() 1189 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); in evergreen_cs_handle_reg() 1190 ib[idx] |= DB_TILE_SPLIT(tile_split) | in evergreen_cs_handle_reg() 1222 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg() [all …]
|
| H A D | radeon_vm.c | 358 struct radeon_ib *ib, in radeon_vm_set_pages() argument 367 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); in radeon_vm_set_pages() 370 radeon_asic_vm_write_pages(rdev, ib, pe, addr, in radeon_vm_set_pages() 374 radeon_asic_vm_set_pages(rdev, ib, pe, addr, in radeon_vm_set_pages() 389 struct radeon_ib ib; in radeon_vm_clear_bo() local 405 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); in radeon_vm_clear_bo() 409 ib.length_dw = 0; in radeon_vm_clear_bo() 411 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); in radeon_vm_clear_bo() 412 radeon_asic_vm_pad_ib(rdev, &ib); in radeon_vm_clear_bo() 413 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo() [all …]
|
| H A D | r600_cs.c | 356 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_cb() local 468 ib[track->cb_color_size_idx[i]] = tmp; in r600_cs_track_validate_cb() 527 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_db() local 567 ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF); in r600_cs_track_validate_db() 837 volatile uint32_t *ib; in r600_cs_common_vline_parse() local 839 ib = p->ib.ptr; in r600_cs_common_vline_parse() 902 ib[h_idx + 2] = PACKET2(0); in r600_cs_common_vline_parse() 903 ib[h_idx + 3] = PACKET2(0); in r600_cs_common_vline_parse() 904 ib[h_idx + 4] = PACKET2(0); in r600_cs_common_vline_parse() 905 ib[h_idx + 5] = PACKET2(0); in r600_cs_common_vline_parse() [all …]
|
| H A D | r600_dma.c | 338 struct radeon_ib ib; in r600_dma_ib_test() local 352 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test() 358 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1); in r600_dma_ib_test() 359 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test() 360 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test() 361 ib.ptr[3] = 0xDEADBEEF; in r600_dma_ib_test() 362 ib.length_dw = 4; in r600_dma_ib_test() 364 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test() 366 radeon_ib_free(rdev, &ib); in r600_dma_ib_test() 370 r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( in r600_dma_ib_test() [all …]
|
| /linux/arch/s390/include/asm/ |
| H A D | idals.h | 135 struct idal_buffer *ib; in idal_buffer_alloc() local 140 ib = kmalloc_flex(*ib, data, nr_ptrs, GFP_DMA | GFP_KERNEL); in idal_buffer_alloc() 141 if (!ib) in idal_buffer_alloc() 143 ib->size = size; in idal_buffer_alloc() 144 ib->page_order = page_order; in idal_buffer_alloc() 147 ib->data[i] = dma64_add(ib->data[i - 1], IDA_BLOCK_SIZE); in idal_buffer_alloc() 153 ib->data[i] = virt_to_dma64(vaddr); in idal_buffer_alloc() 155 return ib; in idal_buffer_alloc() 159 vaddr = dma64_to_virt(ib->data[i]); in idal_buffer_alloc() 160 free_pages((unsigned long)vaddr, ib->page_order); in idal_buffer_alloc() [all …]
|
| /linux/drivers/net/ethernet/amd/ |
| H A D | 7990.c | 100 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \ 101 ib->brx_ring[t].length, \ 102 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \ 106 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \ 107 ib->btx_ring[t].length, \ 108 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \ 140 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local 150 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring() 163 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring() 164 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring() [all …]
|
| H A D | sunlance.c | 319 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local 332 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma() 333 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma() 334 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma() 335 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma() 336 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma() 337 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma() 342 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma() 343 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma() 344 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma() [all …]
|
| H A D | a2065.c | 149 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local 160 ib->mode = 0; in lance_init_ring() 165 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring() 166 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring() 167 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring() 168 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring() 169 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring() 170 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring() 176 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring() 177 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring() [all …]
|
| /linux/drivers/infiniband/hw/mlx4/ |
| H A D | ah.c | 48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn | in create_ib_ah() 50 ah->av.ib.g_slid = rdma_ah_get_path_bits(ah_attr); in create_ib_ah() 51 ah->av.ib.sl_tclass_flowlabel = in create_ib_ah() 56 ah->av.ib.g_slid |= 0x80; in create_ib_ah() 57 ah->av.ib.gid_index = grh->sgid_index; in create_ib_ah() 58 ah->av.ib.hop_limit = grh->hop_limit; in create_ib_ah() 59 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah() 62 memcpy(ah->av.ib.dgid, grh->dgid.raw, 16); in create_ib_ah() 65 ah->av.ib.dlid = cpu_to_be16(rdma_ah_get_dlid(ah_attr)); in create_ib_ah() 73 ah->av.ib.stat_rate = static_rate; in create_ib_ah() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | sdma_v6_0.c | 267 struct amdgpu_ib *ib, in sdma_v6_0_ring_emit_ib() argument 286 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v6_0_ring_emit_ib() 287 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v6_0_ring_emit_ib() 288 amdgpu_ring_write(ring, ib->length_dw); in sdma_v6_0_ring_emit_ib() 984 struct amdgpu_ib ib; in sdma_v6_0_ring_test_ib() local 992 memset(&ib, 0, sizeof(ib)); in sdma_v6_0_ring_test_ib() 1003 r = amdgpu_ib_get(adev, NULL, 256, AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v6_0_ring_test_ib() 1009 ib.ptr[0] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) | in sdma_v6_0_ring_test_ib() 1011 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v6_0_ring_test_ib() 1012 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v6_0_ring_test_ib() [all …]
|
| H A D | sdma_v5_0.c | 432 struct amdgpu_ib *ib, in sdma_v5_0_ring_emit_ib() argument 451 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v5_0_ring_emit_ib() 452 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v5_0_ring_emit_ib() 453 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_0_ring_emit_ib() 1077 struct amdgpu_ib ib; in sdma_v5_0_ring_test_ib() local 1085 memset(&ib, 0, sizeof(ib)); in sdma_v5_0_ring_test_ib() 1097 AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v5_0_ring_test_ib() 1103 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_0_ring_test_ib() 1105 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v5_0_ring_test_ib() 1106 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v5_0_ring_test_ib() [all …]
|
| H A D | sdma_v5_2.c | 280 struct amdgpu_ib *ib, in sdma_v5_2_ring_emit_ib() argument 299 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v5_2_ring_emit_ib() 300 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v5_2_ring_emit_ib() 301 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_2_ring_emit_ib() 977 struct amdgpu_ib ib; in sdma_v5_2_ring_test_ib() local 985 memset(&ib, 0, sizeof(ib)); in sdma_v5_2_ring_test_ib() 996 r = amdgpu_ib_get(adev, NULL, 256, AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v5_2_ring_test_ib() 1002 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_2_ring_test_ib() 1004 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v5_2_ring_test_ib() 1005 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v5_2_ring_test_ib() [all …]
|
| H A D | amdgpu_ib.c | 66 struct amdgpu_ib *ib) in amdgpu_ib_get() argument 72 &ib->sa_bo, size); in amdgpu_ib_get() 78 ib->ptr = amdgpu_sa_bo_cpu_addr(ib->sa_bo); in amdgpu_ib_get() 80 ib->flags = AMDGPU_IB_FLAG_EMIT_MEM_SYNC; in amdgpu_ib_get() 83 ib->gpu_addr = amdgpu_sa_bo_gpu_addr(ib->sa_bo); in amdgpu_ib_get() 97 void amdgpu_ib_free(struct amdgpu_ib *ib, struct dma_fence *f) in amdgpu_ib_free() argument 99 amdgpu_sa_bo_free(&ib->sa_bo, f); in amdgpu_ib_free() 129 struct amdgpu_ib *ib = &ibs[0]; in amdgpu_ib_schedule() local 189 if ((ib->flags & AMDGPU_IB_FLAGS_SECURE) && in amdgpu_ib_schedule() 218 if ((ib->flags & AMDGPU_IB_FLAG_EMIT_MEM_SYNC) && ring->funcs->emit_mem_sync) in amdgpu_ib_schedule() [all …]
|
| H A D | amdgpu_ring.h | 240 struct amdgpu_ib *ib); 243 struct amdgpu_ib *ib); 250 struct amdgpu_ib *ib, 270 void (*pad_ib)(struct amdgpu_ring *ring, struct amdgpu_ib *ib); 430 #define amdgpu_ring_parse_cs(r, p, job, ib) ((r)->funcs->parse_cs((p), (job), (ib))) argument 431 #define amdgpu_ring_patch_cs_in_place(r, p, job, ib) ((r)->funcs->patch_cs_in_place((p), (job), (ib… argument 437 #define amdgpu_ring_emit_ib(r, job, ib, flags) ((r)->funcs->emit_ib((r), (job), (ib), (flags))) argument 451 #define amdgpu_ring_pad_ib(r, ib) ((r)->funcs->pad_ib((r), (ib))) argument 468 void amdgpu_ring_generic_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib); 556 static inline u32 amdgpu_ib_get_value(struct amdgpu_ib *ib, int idx) in amdgpu_ib_get_value() argument [all …]
|
| H A D | amdgpu_jpeg.c | 193 struct amdgpu_ib *ib; in amdgpu_jpeg_dec_set_reg() local 204 ib = &job->ibs[0]; in amdgpu_jpeg_dec_set_reg() 206 ib->ptr[0] = PACKETJ(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0, 0, PACKETJ_TYPE0); in amdgpu_jpeg_dec_set_reg() 207 ib->ptr[1] = 0xDEADBEEF; in amdgpu_jpeg_dec_set_reg() 209 ib->ptr[i] = PACKETJ(0, 0, 0, PACKETJ_TYPE6); in amdgpu_jpeg_dec_set_reg() 210 ib->ptr[i+1] = 0; in amdgpu_jpeg_dec_set_reg() 212 ib->length_dw = 16; in amdgpu_jpeg_dec_set_reg() 566 struct amdgpu_ib *ib) in amdgpu_jpeg_dec_parse_cs() argument 571 for (i = 0; i < ib->length_dw ; i += 2) { in amdgpu_jpeg_dec_parse_cs() 572 reg = CP_PACKETJ_GET_REG(ib->ptr[i]); in amdgpu_jpeg_dec_parse_cs() [all …]
|
| /linux/fs/smb/server/ |
| H A D | transport_rdma.c | 187 ib_dma_unmap_single(sc->ib.dev, in put_recvmsg() 440 sc->ib.dev = sc->rdma.cm_id->device; in alloc_transport() 494 if (sc->ib.qp) { in free_transport() 495 ib_drain_qp(sc->ib.qp); in free_transport() 496 sc->ib.qp = NULL; in free_transport() 516 if (sc->ib.send_cq) in free_transport() 517 ib_free_cq(sc->ib.send_cq); in free_transport() 518 if (sc->ib.recv_cq) in free_transport() 519 ib_free_cq(sc->ib.recv_cq); in free_transport() 520 if (sc->ib.pd) in free_transport() [all …]
|
| H A D | ksmbd_work.c | 98 static inline void __ksmbd_iov_pin(struct ksmbd_work *work, void *ib, in __ksmbd_iov_pin() argument 101 work->iov[++work->iov_idx].iov_base = ib; in __ksmbd_iov_pin() 106 static int __ksmbd_iov_pin_rsp(struct ksmbd_work *work, void *ib, int len, in __ksmbd_iov_pin_rsp() argument 142 __ksmbd_iov_pin(work, ib, len); in __ksmbd_iov_pin_rsp() 156 int ksmbd_iov_pin_rsp(struct ksmbd_work *work, void *ib, int len) in ksmbd_iov_pin_rsp() argument 158 return __ksmbd_iov_pin_rsp(work, ib, len, NULL, 0); in ksmbd_iov_pin_rsp() 161 int ksmbd_iov_pin_rsp_read(struct ksmbd_work *work, void *ib, int len, in ksmbd_iov_pin_rsp_read() argument 164 return __ksmbd_iov_pin_rsp(work, ib, len, aux_buf, aux_size); in ksmbd_iov_pin_rsp_read()
|
| /linux/drivers/s390/char/ |
| H A D | fs3270.c | 242 struct idal_buffer *ib; in fs3270_read() local 250 ib = idal_buffer_alloc(count, 0); in fs3270_read() 251 if (IS_ERR(ib)) in fs3270_read() 258 raw3270_request_set_idal(rq, ib); in fs3270_read() 265 if (idal_buffer_to_user(ib, data, count) != 0) in fs3270_read() 275 idal_buffer_free(ib); in fs3270_read() 287 struct idal_buffer *ib; in fs3270_write() local 294 ib = idal_buffer_alloc(count, 0); in fs3270_write() 295 if (IS_ERR(ib)) in fs3270_write() 299 if (idal_buffer_from_user(ib, data, count) == 0) { in fs3270_write() [all …]
|
| /linux/fs/smb/client/ |
| H A D | smbdirect.c | 552 ib_dma_unmap_page(sc->ib.dev, in smbd_free_send_io() 1060 sc->ib.dev = sc->rdma.cm_id->device; in smbd_ia_open() 1062 if (!frwr_is_supported(&sc->ib.dev->attrs)) { in smbd_ia_open() 1065 sc->ib.dev->attrs.device_cap_flags, in smbd_ia_open() 1066 sc->ib.dev->attrs.max_fast_reg_page_list_len); in smbd_ia_open() 1072 sc->ib.dev->attrs.max_fast_reg_page_list_len); in smbd_ia_open() 1074 if (sc->ib.dev->attrs.kernel_cap_flags & IBK_SG_GAPS_REG) in smbd_ia_open() 1115 sc->ib.dev, (void *)packet, in smbd_post_send_negotiate_req() 1117 if (ib_dma_mapping_error(sc->ib.dev, request->sge[0].addr)) { in smbd_post_send_negotiate_req() 1124 request->sge[0].lkey = sc->ib.pd->local_dma_lkey; in smbd_post_send_negotiate_req() [all …]
|
| /linux/drivers/net/ethernet/brocade/bna/ |
| H A D | bna_hw_defs.h | 236 struct bna_ib *ib = _ib; \ 237 if ((ib->intr_type == BNA_INTR_T_INTX)) { \ 239 intx_mask &= ~(ib->intr_vector); \ 242 bna_ib_coalescing_timer_set(&ib->door_bell, \ 243 ib->coalescing_timeo); \ 245 bna_ib_ack(&ib->door_bell, 0); \ 251 struct bna_ib *ib = _ib; \ 253 ib->door_bell.doorbell_addr); \ 254 if (ib->intr_type == BNA_INTR_T_INTX) { \ 256 intx_mask |= ib->intr_vector; \
|
| /linux/drivers/iio/ |
| H A D | industrialio-buffer.c | 143 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_read() local 144 struct iio_buffer *rb = ib->buffer; in iio_buffer_read() 145 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_read() 212 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_write() local 213 struct iio_buffer *rb = ib->buffer; in iio_buffer_write() 214 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_write() 277 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_poll() local 278 struct iio_buffer *rb = ib->buffer; in iio_buffer_poll() 279 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_poll() 303 struct iio_dev_buffer_pair *ib = filp->private_data; in iio_buffer_read_wrapper() local [all …]
|