/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_packet_manager_v9.c | 67 packet->sq_shader_tba_lo = lower_32_bits(qpd->tba_addr >> 8); in pm_map_process_v9() 74 packet->sq_shader_tma_lo = lower_32_bits(qpd->tma_addr >> 8); in pm_map_process_v9() 78 packet->gds_addr_lo = lower_32_bits(qpd->gds_context_area); in pm_map_process_v9() 82 lower_32_bits(vm_page_table_base_addr); in pm_map_process_v9() 130 packet->sq_shader_tba_lo = lower_32_bits(qpd->tba_addr >> 8); in pm_map_process_aldebaran() 132 packet->sq_shader_tma_lo = lower_32_bits(qpd->tma_addr >> 8); in pm_map_process_aldebaran() 136 packet->gds_addr_lo = lower_32_bits(qpd->gds_context_area); in pm_map_process_aldebaran() 140 lower_32_bits(vm_page_table_base_addr); in pm_map_process_aldebaran() 184 packet->ordinal2 = lower_32_bits(ib); in pm_runlist_v9() 209 packet->gws_mask_lo = lower_32_bits(res->gws_mask); in pm_set_resources_v9() [all …]
|
H A D | kfd_packet_manager_vi.c | 69 packet->gds_addr_lo = lower_32_bits(qpd->gds_context_area); in pm_map_process_vi() 108 packet->ordinal2 = lower_32_bits(ib); in pm_runlist_vi() 133 packet->gws_mask_lo = lower_32_bits(res->gws_mask); in pm_set_resources_vi() 136 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_vi() 186 lower_32_bits(q->gart_mqd_addr); in pm_map_queues_vi() 192 lower_32_bits((uint64_t)q->properties.write_ptr); in pm_map_queues_vi() 264 packet->addr_lo = lower_32_bits((uint64_t)fence_address); in pm_query_status_vi() 266 packet->data_lo = lower_32_bits((uint64_t)fence_value); in pm_query_status_vi()
|
H A D | kfd_mqd_manager_v12.c | 128 m->cp_mqd_base_addr_lo = lower_32_bits(addr); in init_mqd() 152 lower_32_bits(q->ctx_save_restore_area_address); in init_mqd() 194 m->cp_hqd_pq_base_lo = lower_32_bits((uint64_t)q->queue_address >> 8); in update_mqd() 197 m->cp_hqd_pq_rptr_report_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in update_mqd() 199 m->cp_hqd_pq_wptr_poll_addr_lo = lower_32_bits((uint64_t)q->write_ptr); in update_mqd() 220 lower_32_bits(q->eop_ring_buffer_address >> 8); in update_mqd() 336 m->sdmax_rlcx_rb_base = lower_32_bits(q->queue_address >> 8); in update_mqd_sdma() 338 m->sdmax_rlcx_rb_rptr_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma() 340 m->sdmax_rlcx_rb_wptr_poll_addr_lo = lower_32_bits((uint64_t)q->write_ptr); in update_mqd_sdma()
|
H A D | kfd_mqd_manager_vi.c | 116 m->cp_mqd_base_addr_lo = lower_32_bits(addr); in init_mqd() 130 m->compute_tba_lo = lower_32_bits(q->tba_addr >> 8); in init_mqd() 132 m->compute_tma_lo = lower_32_bits(q->tma_addr >> 8); in init_mqd() 142 lower_32_bits(q->ctx_save_restore_area_address); in init_mqd() 184 m->cp_hqd_pq_base_lo = lower_32_bits((uint64_t)q->queue_address >> 8); in __update_mqd() 187 m->cp_hqd_pq_rptr_report_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in __update_mqd() 189 m->cp_hqd_pq_wptr_poll_addr_lo = lower_32_bits((uint64_t)q->write_ptr); in __update_mqd() 215 lower_32_bits(q->eop_ring_buffer_address >> 8); in __update_mqd() 370 m->sdmax_rlcx_rb_base = lower_32_bits(q->queue_address >> 8); in update_mqd_sdma() 372 m->sdmax_rlcx_rb_rptr_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma()
|
/linux/drivers/firmware/smccc/ |
H A D | kvm_guest.c | 33 val[0] = lower_32_bits(res.a0); in kvm_init_hyp_services() 34 val[1] = lower_32_bits(res.a1); in kvm_init_hyp_services() 35 val[2] = lower_32_bits(res.a2); in kvm_init_hyp_services() 36 val[3] = lower_32_bits(res.a3); in kvm_init_hyp_services()
|
/linux/drivers/pci/controller/mobiveil/ |
H A D | pcie-mobiveil.c | 151 (lower_32_bits(size64) & WIN_SIZE_MASK); in program_ib_windows() 157 mobiveil_csr_writel(pcie, lower_32_bits(cpu_addr), in program_ib_windows() 162 mobiveil_csr_writel(pcie, lower_32_bits(pci_addr), in program_ib_windows() 192 (lower_32_bits(size64) & WIN_SIZE_MASK); in program_ob_windows() 203 lower_32_bits(cpu_addr) & (~AXI_WINDOW_ALIGN_MASK), in program_ob_windows() 208 mobiveil_csr_writel(pcie, lower_32_bits(pci_addr), in program_ob_windows()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | vcn_v2_0.c | 391 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume() 403 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume() 411 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume() 419 lower_32_bits(adev->vcn.inst->fw_shared.gpu_addr)); in vcn_v2_0_mc_resume() 457 lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode() 478 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode() 498 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode() 510 lower_32_bits(adev->vcn.inst->fw_shared.gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode() 952 lower_32_bits(ring->gpu_addr)); in vcn_v2_0_start_dpg_mode() 963 lower_32_bits(ring->wptr)); in vcn_v2_0_start_dpg_mode() [all …]
|
H A D | sdma_v7_0.c | 148 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v7_0_ring_init_cond_exec() 242 lower_32_bits(ring->wptr << 2), in sdma_v7_0_ring_set_wptr() 255 lower_32_bits(ring->wptr << 2), in sdma_v7_0_ring_set_wptr() 261 lower_32_bits(ring->wptr << 2)); in sdma_v7_0_ring_set_wptr() 309 sdma_v7_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7); in sdma_v7_0_ring_emit_ib() 314 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v7_0_ring_emit_ib() 317 amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr)); in sdma_v7_0_ring_emit_ib() 393 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v7_0_ring_emit_fence() 395 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v7_0_ring_emit_fence() 404 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v7_0_ring_emit_fence() [all …]
|
H A D | sdma_v6_0.c | 147 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v6_0_ring_init_cond_exec() 213 lower_32_bits(ring->wptr << 2), in sdma_v6_0_ring_set_wptr() 226 lower_32_bits(ring->wptr << 2), in sdma_v6_0_ring_set_wptr() 231 lower_32_bits(ring->wptr << 2)); in sdma_v6_0_ring_set_wptr() 277 sdma_v6_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7); in sdma_v6_0_ring_emit_ib() 282 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v6_0_ring_emit_ib() 285 amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr)); in sdma_v6_0_ring_emit_ib() 361 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v6_0_ring_emit_fence() 363 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v6_0_ring_emit_fence() 372 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v6_0_ring_emit_fence() [all …]
|
H A D | vcn_v3_0.c | 506 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v3_0_mc_resume() 517 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v3_0_mc_resume() 525 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v3_0_mc_resume() 533 lower_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v3_0_mc_resume() 569 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() 590 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() 610 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() 622 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() 1110 lower_32_bits(ring->gpu_addr)); in vcn_v3_0_start_dpg_mode() 1121 lower_32_bits(ring->wptr)); in vcn_v3_0_start_dpg_mode() [all …]
|
H A D | sdma_v5_2.c | 147 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_2_ring_init_cond_exec() 220 lower_32_bits(ring->wptr << 2), in sdma_v5_2_ring_set_wptr() 234 lower_32_bits(ring->wptr << 2)); in sdma_v5_2_ring_set_wptr() 243 lower_32_bits(ring->wptr << 2), in sdma_v5_2_ring_set_wptr() 247 lower_32_bits(ring->wptr << 2)); in sdma_v5_2_ring_set_wptr() 292 sdma_v5_2_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7); in sdma_v5_2_ring_emit_ib() 297 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v5_2_ring_emit_ib() 300 amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr)); in sdma_v5_2_ring_emit_ib() 380 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_2_ring_emit_fence() 382 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v5_2_ring_emit_fence() [all …]
|
H A D | vce_v4_0.c | 109 *ring->wptr_cpu_addr = lower_32_bits(ring->wptr); in vce_v4_0_ring_set_wptr() 110 WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr)); in vce_v4_0_ring_set_wptr() 116 lower_32_bits(ring->wptr)); in vce_v4_0_ring_set_wptr() 119 lower_32_bits(ring->wptr)); in vce_v4_0_ring_set_wptr() 122 lower_32_bits(ring->wptr)); in vce_v4_0_ring_set_wptr() 164 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_MMSCH_VF_CTX_ADDR_LO), lower_32_bits(addr)); in vce_v4_0_mmsch_start() 235 lower_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start() 343 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR), lower_32_bits(ring->wptr)); in vce_v4_0_start() 344 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR), lower_32_bits(ring->wptr)); in vce_v4_0_start() 351 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR2), lower_32_bits(ring->wptr)); in vce_v4_0_start() [all …]
|
H A D | vcn_v2_5.c | 479 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume() 490 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume() 498 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume() 506 lower_32_bits(adev->vcn.inst[i].fw_shared.gpu_addr)); in vcn_v2_5_mc_resume() 543 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode() 564 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode() 584 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode() 596 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode() 988 lower_32_bits(ring->gpu_addr)); in vcn_v2_5_start_dpg_mode() 999 lower_32_bits(ring->wptr)); in vcn_v2_5_start_dpg_mode() [all …]
|
H A D | si_dma.c | 71 while ((lower_32_bits(ring->wptr) & 7) != 5) in si_dma_ring_emit_ib() 153 WREG32(DMA_RB_RPTR_ADDR_LO + sdma_offsets[i], lower_32_bits(rptr_addr)); in si_dma_start() 214 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in si_dma_ring_test_ring() 267 ib.ptr[1] = lower_32_bits(gpu_addr); in si_dma_ring_test_ib() 314 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pte() 315 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pte() 338 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_write_pte() 341 ib->ptr[ib->length_dw++] = lower_32_bits(value); in si_dma_vm_write_pte() 381 ib->ptr[ib->length_dw++] = lower_32_bits(flags); /* mask */ in si_dma_vm_set_pte_pde() 421 amdgpu_ring_write(ring, lower_32_bits(addr)); in si_dma_ring_emit_pipeline_sync() [all …]
|
H A D | sdma_v2_4.c | 255 sdma_v2_4_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7); in sdma_v2_4_ring_emit_ib() 260 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib() 313 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v2_4_ring_emit_fence() 315 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v2_4_ring_emit_fence() 321 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v2_4_ring_emit_fence() 447 lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume() 554 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring() 609 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib() 663 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v2_4_vm_copy_pte() 665 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v2_4_vm_copy_pte() [all …]
|
H A D | sdma_v5_0.c | 307 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_0_ring_init_cond_exec() 410 lower_32_bits(ring->wptr << 2), in sdma_v5_0_ring_set_wptr() 423 lower_32_bits(ring->wptr << 2), in sdma_v5_0_ring_set_wptr() 428 lower_32_bits(ring->wptr << 2)); in sdma_v5_0_ring_set_wptr() 475 sdma_v5_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7); in sdma_v5_0_ring_emit_ib() 480 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v5_0_ring_emit_ib() 483 amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr)); in sdma_v5_0_ring_emit_ib() 561 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_0_ring_emit_fence() 563 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v5_0_ring_emit_fence() 572 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_0_ring_emit_fence() [all …]
|
H A D | sdma_v3_0.c | 431 sdma_v3_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7); in sdma_v3_0_ring_emit_ib() 436 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib() 489 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v3_0_ring_emit_fence() 491 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v3_0_ring_emit_fence() 497 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v3_0_ring_emit_fence() 688 lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume() 710 lower_32_bits(wptr_gpu_addr)); in sdma_v3_0_gfx_resume() 828 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring() 883 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib() 936 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v3_0_vm_copy_pte() [all …]
|
H A D | cik_sdma.c | 232 cik_sdma_ring_insert_nop(ring, (4 - lower_32_bits(ring->wptr)) & 7); in cik_sdma_ring_emit_ib() 285 amdgpu_ring_write(ring, lower_32_bits(addr)); in cik_sdma_ring_emit_fence() 287 amdgpu_ring_write(ring, lower_32_bits(seq)); in cik_sdma_ring_emit_fence() 293 amdgpu_ring_write(ring, lower_32_bits(addr)); in cik_sdma_ring_emit_fence() 623 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in cik_sdma_ring_test_ring() 678 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ring_test_ib() 728 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pte() 730 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pte() 753 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_write_pte() 757 ib->ptr[ib->length_dw++] = lower_32_bits(value); in cik_sdma_vm_write_pte() [all …]
|
/linux/drivers/tee/tstee/ |
H A D | core.c | 37 args[0] = lower_32_bits(data->data0); in arg_list_from_ffa_data() 38 args[1] = lower_32_bits(data->data1); in arg_list_from_ffa_data() 39 args[2] = lower_32_bits(data->data2); in arg_list_from_ffa_data() 40 args[3] = lower_32_bits(data->data3); in arg_list_from_ffa_data() 41 args[4] = lower_32_bits(data->data4); in arg_list_from_ffa_data() 190 shm_id = lower_32_bits(param[0].u.value.a); in tstee_invoke_func() 191 req_len = lower_32_bits(param[0].u.value.b); in tstee_invoke_func() 212 ffa_args[TS_RPC_SERVICE_MEM_HANDLE_LSW] = lower_32_bits(handle); in tstee_invoke_func() 281 lower_32_bits(shm->sec_world_id); in tstee_shm_register() 319 lower_32_bits(shm->sec_world_id); in tstee_shm_unregister()
|
/linux/drivers/iio/test/ |
H A D | iio-test-format.c | 211 values[0] = lower_32_bits(value); in iio_test_iio_format_value_integer_64() 217 values[0] = lower_32_bits(value); in iio_test_iio_format_value_integer_64() 223 values[0] = lower_32_bits(value); in iio_test_iio_format_value_integer_64() 229 values[0] = lower_32_bits(value); in iio_test_iio_format_value_integer_64() 235 values[0] = lower_32_bits(value); in iio_test_iio_format_value_integer_64() 241 values[0] = lower_32_bits(value); in iio_test_iio_format_value_integer_64() 247 values[0] = lower_32_bits(value); in iio_test_iio_format_value_integer_64()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
H A D | gv100.c | 47 nvkm_wo32(chan->inst, 0x008, lower_32_bits(userd)); in gv100_chan_ramfc_write() 51 nvkm_wo32(chan->inst, 0x048, lower_32_bits(offset)); in gv100_chan_ramfc_write() 102 nvkm_wo32(chan->inst, 0x210, lower_32_bits(addr)); in gv100_ectx_bind() 122 nvkm_wo32(chan->inst, 0x220, lower_32_bits(bar2)); in gv100_ectx_ce_bind() 188 nvkm_wo32(memory, offset + 0x0, lower_32_bits(user) | chan->runq << 1); in gv100_runl_insert_chan() 190 nvkm_wo32(memory, offset + 0x8, lower_32_bits(inst) | chan->id); in gv100_runl_insert_chan()
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
H A D | gm20b.c | 73 hdr.code_dma_base = lower_32_bits((addr + adjust) >> 8); in gm20b_pmu_acr_bld_patch() 76 hdr.data_dma_base = lower_32_bits((addr + adjust) >> 8); in gm20b_pmu_acr_bld_patch() 79 hdr.overlay_dma_base = lower_32_bits((addr + adjust) << 8); in gm20b_pmu_acr_bld_patch() 95 .code_dma_base = lower_32_bits(code), in gm20b_pmu_acr_bld_write() 99 .data_dma_base = lower_32_bits(data), in gm20b_pmu_acr_bld_write() 101 .overlay_dma_base = lower_32_bits(code), in gm20b_pmu_acr_bld_write()
|
/linux/drivers/pci/controller/ |
H A D | pci-xgene.c | 291 val = (val32 & 0x0000ffff) | (lower_32_bits(mask) << 16); in xgene_pcie_set_ib_mask() 295 val = (val32 & 0xffff0000) | (lower_32_bits(mask) >> 16); in xgene_pcie_set_ib_mask() 388 xgene_pcie_writel(port, offset, lower_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 390 xgene_pcie_writel(port, offset + 0x08, lower_32_bits(mask)); in xgene_pcie_setup_ob_reg() 392 xgene_pcie_writel(port, offset + 0x10, lower_32_bits(pci_addr)); in xgene_pcie_setup_ob_reg() 400 xgene_pcie_writel(port, CFGBARL, lower_32_bits(addr)); in xgene_pcie_setup_cfg_reg() 449 xgene_pcie_writel(port, pim_reg, lower_32_bits(pim)); in xgene_pcie_setup_pims() 452 xgene_pcie_writel(port, pim_reg + 0x10, lower_32_bits(size)); in xgene_pcie_setup_pims() 515 xgene_pcie_writel(port, IR2MSK, lower_32_bits(mask)); in xgene_pcie_setup_ib_reg() 521 xgene_pcie_writel(port, IR3MSKL, lower_32_bits(mask)); in xgene_pcie_setup_ib_reg()
|
/linux/drivers/media/pci/pt3/ |
H A D | pt3_dma.c | 52 iowrite32(lower_32_bits(adap->desc_buf[0].b_addr), in pt3_start_dma() 184 d->next_l = lower_32_bits(desc_addr); in pt3_alloc_dmabuf() 190 d->addr_l = lower_32_bits(data_addr); in pt3_alloc_dmabuf() 195 d->next_l = lower_32_bits(desc_addr); in pt3_alloc_dmabuf() 204 d->next_l = lower_32_bits(desc_addr); in pt3_alloc_dmabuf()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
H A D | gm20b.c | 41 hdr.code_dma_base = lower_32_bits((addr + adjust) >> 8); in gm20b_gr_acr_bld_patch() 44 hdr.data_dma_base = lower_32_bits((addr + adjust) >> 8); in gm20b_gr_acr_bld_patch() 60 .code_dma_base = lower_32_bits(code), in gm20b_gr_acr_bld_write() 64 .data_dma_base = lower_32_bits(data), in gm20b_gr_acr_bld_write()
|