/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_packet_manager_v9.c | 71 packet->sq_shader_tba_hi = upper_32_bits(qpd->tba_addr >> 8) in pm_map_process_v9() 75 packet->sq_shader_tma_hi = upper_32_bits(qpd->tma_addr >> 8); in pm_map_process_v9() 79 packet->gds_addr_hi = upper_32_bits(qpd->gds_context_area); in pm_map_process_v9() 84 upper_32_bits(vm_page_table_base_addr); in pm_map_process_v9() 132 packet->sq_shader_tba_hi = upper_32_bits(qpd->tba_addr >> 8); in pm_map_process_aldebaran() 134 packet->sq_shader_tma_hi = upper_32_bits(qpd->tma_addr >> 8); in pm_map_process_aldebaran() 138 packet->gds_addr_hi = upper_32_bits(qpd->gds_context_area); in pm_map_process_aldebaran() 143 upper_32_bits(vm_page_table_base_addr); in pm_map_process_aldebaran() 186 packet->ib_base_hi = upper_32_bits(ib); in pm_runlist_v9() 213 packet->gws_mask_hi = upper_32_bits(res->gws_mask); in pm_set_resources_v9() [all …]
|
H A D | kfd_mqd_manager_v12.c | 129 m->cp_mqd_base_addr_hi = upper_32_bits(addr); in init_mqd() 154 upper_32_bits(q->ctx_save_restore_area_address); in init_mqd() 195 m->cp_hqd_pq_base_hi = upper_32_bits((uint64_t)q->queue_address >> 8); in update_mqd() 198 m->cp_hqd_pq_rptr_report_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd() 200 m->cp_hqd_pq_wptr_poll_addr_hi = upper_32_bits((uint64_t)q->write_ptr); in update_mqd() 222 upper_32_bits(q->eop_ring_buffer_address >> 8); in update_mqd() 337 m->sdmax_rlcx_rb_base_hi = upper_32_bits(q->queue_address >> 8); in update_mqd_sdma() 339 m->sdmax_rlcx_rb_rptr_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma() 341 m->sdmax_rlcx_rb_wptr_poll_addr_hi = upper_32_bits((uint64_t)q->write_ptr); in update_mqd_sdma()
|
H A D | kfd_mqd_manager_vi.c | 117 m->cp_mqd_base_addr_hi = upper_32_bits(addr); in init_mqd() 131 m->compute_tba_hi = upper_32_bits(q->tba_addr >> 8); in init_mqd() 133 m->compute_tma_hi = upper_32_bits(q->tma_addr >> 8); in init_mqd() 144 upper_32_bits(q->ctx_save_restore_area_address); in init_mqd() 185 m->cp_hqd_pq_base_hi = upper_32_bits((uint64_t)q->queue_address >> 8); in __update_mqd() 188 m->cp_hqd_pq_rptr_report_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in __update_mqd() 190 m->cp_hqd_pq_wptr_poll_addr_hi = upper_32_bits((uint64_t)q->write_ptr); in __update_mqd() 217 upper_32_bits(q->eop_ring_buffer_address >> 8); in __update_mqd() 371 m->sdmax_rlcx_rb_base_hi = upper_32_bits(q->queue_address >> 8); in update_mqd_sdma() 373 m->sdmax_rlcx_rb_rptr_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma()
|
H A D | kfd_mqd_manager_v11.c | 162 m->cp_mqd_base_addr_hi = upper_32_bits(addr); in init_mqd() 191 upper_32_bits(q->ctx_save_restore_area_address); in init_mqd() 232 m->cp_hqd_pq_base_hi = upper_32_bits((uint64_t)q->queue_address >> 8); in update_mqd() 235 m->cp_hqd_pq_rptr_report_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd() 237 m->cp_hqd_pq_wptr_poll_addr_hi = upper_32_bits((uint64_t)q->write_ptr); in update_mqd() 259 upper_32_bits(q->eop_ring_buffer_address >> 8); in update_mqd() 436 m->sdmax_rlcx_rb_base_hi = upper_32_bits(q->queue_address >> 8); in update_mqd_sdma() 438 m->sdmax_rlcx_rb_rptr_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma() 440 m->sdmax_rlcx_rb_wptr_poll_addr_hi = upper_32_bits((uint64_t)q->write_ptr); in update_mqd_sdma()
|
H A D | kfd_mqd_manager_v10.c | 115 m->cp_mqd_base_addr_hi = upper_32_bits(addr); in init_mqd() 137 upper_32_bits(q->ctx_save_restore_area_address); in init_mqd() 179 m->cp_hqd_pq_base_hi = upper_32_bits((uint64_t)q->queue_address >> 8); in update_mqd() 182 m->cp_hqd_pq_rptr_report_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd() 184 m->cp_hqd_pq_wptr_poll_addr_hi = upper_32_bits((uint64_t)q->write_ptr); in update_mqd() 206 upper_32_bits(q->eop_ring_buffer_address >> 8); in update_mqd() 377 m->sdmax_rlcx_rb_base_hi = upper_32_bits(q->queue_address >> 8); in update_mqd_sdma() 379 m->sdmax_rlcx_rb_rptr_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma()
|
H A D | kfd_mqd_manager_cik.c | 117 m->cp_mqd_base_addr_hi = upper_32_bits(addr); in init_mqd() 193 m->cp_hqd_pq_base_hi = upper_32_bits((uint64_t)q->queue_address >> 8); in __update_mqd() 195 m->cp_hqd_pq_rptr_report_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in __update_mqd() 237 m->sdma_rlc_rb_base_hi = upper_32_bits(q->queue_address >> 8); in update_mqd_sdma() 239 m->sdma_rlc_rb_rptr_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma() 353 m->cp_hqd_pq_base_hi = upper_32_bits((uint64_t)q->queue_address >> 8); in update_mqd_hiq() 355 m->cp_hqd_pq_rptr_report_addr_hi = upper_32_bits((uint64_t)q->read_ptr); in update_mqd_hiq()
|
/linux/drivers/gpu/drm/radeon/ |
H A D | si_dma.c | 82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages() 121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() 133 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages() 173 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_set_pages() 177 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_set_pages() 265 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff); in si_copy_dma() 266 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff); in si_copy_dma()
|
H A D | ni_dma.c | 134 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff); in cayman_dma_ring_ib_execute() 145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume() 330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() 331 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in cayman_dma_vm_copy_pages() 370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages() 382 ib->ptr[ib->length_dw++] = upper_32_bits(value); in cayman_dma_vm_write_pages() 422 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_set_pages() 426 ib->ptr[ib->length_dw++] = upper_32_bits(value); in cayman_dma_vm_set_pages()
|
H A D | r600_dma.c | 143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume() 255 radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in r600_dma_ring_test() 295 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff)); in r600_dma_fence_ring_emit() 322 radeon_ring_write(ring, upper_32_bits(addr) & 0xff); in r600_dma_semaphore_ring_emit() 360 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test() 415 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff); in r600_dma_ring_ib_execute() 426 radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in r600_dma_ring_ib_execute() 478 radeon_ring_write(ring, (((upper_32_bits(dst_offset) & 0xff) << 16) | in r600_copy_dma() 479 (upper_32_bits(src_offset) & 0xff))); in r600_copy_dma()
|
H A D | evergreen_dma.c | 48 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff)); in evergreen_dma_fence_ring_emit() 78 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff); in evergreen_dma_ring_ib_execute() 89 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute() 142 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff); in evergreen_copy_dma() 143 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff); in evergreen_copy_dma()
|
H A D | cik_sdma.c | 145 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr)); in cik_sdma_ring_ib_execute() 155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute() 208 radeon_ring_write(ring, upper_32_bits(addr)); in cik_sdma_fence_ring_emit() 237 radeon_ring_write(ring, upper_32_bits(addr)); in cik_sdma_semaphore_ring_emit() 400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 614 radeon_ring_write(ring, upper_32_bits(src_offset)); in cik_copy_dma() 616 radeon_ring_write(ring, upper_32_bits(dst_offset)); in cik_copy_dma() 670 radeon_ring_write(ring, upper_32_bits(gpu_addr)); in cik_sdma_ring_test() 728 ib.ptr[2] = upper_32_bits(gpu_addr); in cik_sdma_ib_test() 817 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pages() [all …]
|
/linux/drivers/pci/controller/mobiveil/ |
H A D | pcie-mobiveil.c | 154 mobiveil_csr_writel(pcie, upper_32_bits(size64), in program_ib_windows() 159 mobiveil_csr_writel(pcie, upper_32_bits(cpu_addr), in program_ib_windows() 164 mobiveil_csr_writel(pcie, upper_32_bits(pci_addr), in program_ib_windows() 195 mobiveil_csr_writel(pcie, upper_32_bits(size64), in program_ob_windows() 205 mobiveil_csr_writel(pcie, upper_32_bits(cpu_addr), in program_ob_windows() 210 mobiveil_csr_writel(pcie, upper_32_bits(pci_addr), in program_ob_windows()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | sdma_v7_0.c | 151 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v7_0_ring_init_cond_exec() 219 upper_32_bits(ring->wptr << 2)); in sdma_v7_0_ring_set_wptr() 233 upper_32_bits(ring->wptr << 2)); in sdma_v7_0_ring_set_wptr() 241 upper_32_bits(ring->wptr << 2)); in sdma_v7_0_ring_set_wptr() 290 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v7_0_ring_emit_ib() 293 amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr)); in sdma_v7_0_ring_emit_ib() 369 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v7_0_ring_emit_fence() 380 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v7_0_ring_emit_fence() 381 amdgpu_ring_write(ring, upper_32_bits(seq)); in sdma_v7_0_ring_emit_fence() 503 …WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_RPTR_HI), upper_32_bits(r… in sdma_v7_0_gfx_resume_instance() [all …]
|
H A D | sdma_v6_0.c | 151 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v6_0_ring_init_cond_exec() 217 upper_32_bits(ring->wptr << 2)); in sdma_v6_0_ring_set_wptr() 231 upper_32_bits(ring->wptr << 2)); in sdma_v6_0_ring_set_wptr() 237 upper_32_bits(ring->wptr << 2)); in sdma_v6_0_ring_set_wptr() 286 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v6_0_ring_emit_ib() 289 amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr)); in sdma_v6_0_ring_emit_ib() 365 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v6_0_ring_emit_fence() 376 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v6_0_ring_emit_fence() 377 amdgpu_ring_write(ring, upper_32_bits(seq)); in sdma_v6_0_ring_emit_fence() 511 …WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_RPTR_HI), upper_32_bits(r… in sdma_v6_0_gfx_resume_instance() [all …]
|
H A D | sdma_v5_2.c | 150 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v5_2_ring_init_cond_exec() 223 upper_32_bits(ring->wptr << 2)); in sdma_v5_2_ring_set_wptr() 238 upper_32_bits(ring->wptr << 2)); in sdma_v5_2_ring_set_wptr() 247 upper_32_bits(ring->wptr << 2)); in sdma_v5_2_ring_set_wptr() 251 upper_32_bits(ring->wptr << 2)); in sdma_v5_2_ring_set_wptr() 300 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v5_2_ring_emit_ib() 303 amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr)); in sdma_v5_2_ring_emit_ib() 383 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v5_2_ring_emit_fence() 394 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v5_2_ring_emit_fence() 395 amdgpu_ring_write(ring, upper_32_bits(seq)); in sdma_v5_2_ring_emit_fence() [all …]
|
H A D | amdgpu_cper.c | 298 reg_data.status_hi = upper_32_bits(bank->regs[ACA_REG_IDX_STATUS]); in amdgpu_cper_generate_ue_record() 300 reg_data.addr_hi = upper_32_bits(bank->regs[ACA_REG_IDX_ADDR]); in amdgpu_cper_generate_ue_record() 302 reg_data.ipid_hi = upper_32_bits(bank->regs[ACA_REG_IDX_IPID]); in amdgpu_cper_generate_ue_record() 304 reg_data.synd_hi = upper_32_bits(bank->regs[ACA_REG_IDX_SYND]); in amdgpu_cper_generate_ue_record() 392 reg_data[CPER_ACA_REG_CTL_HI] = upper_32_bits(bank->regs[ACA_REG_IDX_CTL]); in amdgpu_cper_generate_ce_records() 394 reg_data[CPER_ACA_REG_STATUS_HI] = upper_32_bits(bank->regs[ACA_REG_IDX_STATUS]); in amdgpu_cper_generate_ce_records() 396 reg_data[CPER_ACA_REG_ADDR_HI] = upper_32_bits(bank->regs[ACA_REG_IDX_ADDR]); in amdgpu_cper_generate_ce_records() 398 reg_data[CPER_ACA_REG_MISC0_HI] = upper_32_bits(bank->regs[ACA_REG_IDX_MISC0]); in amdgpu_cper_generate_ce_records() 400 reg_data[CPER_ACA_REG_CONFIG_HI] = upper_32_bits(bank->regs[ACA_REG_IDX_CONFIG]); in amdgpu_cper_generate_ce_records() 402 reg_data[CPER_ACA_REG_IPID_HI] = upper_32_bits(bank->regs[ACA_REG_IDX_IPID]); in amdgpu_cper_generate_ce_records() [all …]
|
H A D | sdma_v5_0.c | 310 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v5_0_ring_init_cond_exec() 383 upper_32_bits(ring->wptr << 2)); in sdma_v5_0_ring_set_wptr() 397 upper_32_bits(ring->wptr << 2)); in sdma_v5_0_ring_set_wptr() 403 upper_32_bits(ring->wptr << 2)); in sdma_v5_0_ring_set_wptr() 452 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v5_0_ring_emit_ib() 455 amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr)); in sdma_v5_0_ring_emit_ib() 533 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v5_0_ring_emit_fence() 544 amdgpu_ring_write(ring, upper_32_bits(addr)); in sdma_v5_0_ring_emit_fence() 545 amdgpu_ring_write(ring, upper_32_bits(seq)); in sdma_v5_0_ring_emit_fence() 718 …WREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_RPTR_HI), upper_32_bits(ring-… in sdma_v5_0_gfx_resume_instance() [all …]
|
H A D | umsch_mm_v4_0.c | 92 upper_32_bits(adev->umsch_mm.irq_start_addr >> 2)); in umsch_mm_v4_0_load_microcode() 97 upper_32_bits(adev->umsch_mm.uc_start_addr >> 2)); in umsch_mm_v4_0_load_microcode() 104 WREG32_SOC15_UMSCH(regVCN_MES_LOCAL_INSTR_MASK_HI, upper_32_bits(data)); in umsch_mm_v4_0_load_microcode() 109 WREG32_SOC15_UMSCH(regVCN_MES_IC_BASE_HI, upper_32_bits(data)); in umsch_mm_v4_0_load_microcode() 116 upper_32_bits(adev->umsch_mm.data_start_addr)); in umsch_mm_v4_0_load_microcode() 125 WREG32_SOC15_UMSCH(regVCN_MES_DC_BASE_HI, upper_32_bits(data)); in umsch_mm_v4_0_load_microcode() 148 WREG32_SOC15_UMSCH(regVCN_MES_GP0_HI, upper_32_bits(umsch->log_gpu_addr)); in umsch_mm_v4_0_load_microcode() 229 WREG32_SOC15(VCN, 0, regVCN_UMSCH_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in umsch_mm_v4_0_ring_start()
|
/linux/drivers/iio/test/ |
H A D | iio-test-format.c | 212 values[1] = upper_32_bits(value); in iio_test_iio_format_value_integer_64() 218 values[1] = upper_32_bits(value); in iio_test_iio_format_value_integer_64() 224 values[1] = upper_32_bits(value); in iio_test_iio_format_value_integer_64() 230 values[1] = upper_32_bits(value); in iio_test_iio_format_value_integer_64() 236 values[1] = upper_32_bits(value); in iio_test_iio_format_value_integer_64() 242 values[1] = upper_32_bits(value); in iio_test_iio_format_value_integer_64() 248 values[1] = upper_32_bits(value); in iio_test_iio_format_value_integer_64()
|
/linux/drivers/pci/controller/ |
H A D | pci-xgene.c | 296 val = (val32 & 0x0000ffff) | (upper_32_bits(mask) << 16); in xgene_pcie_set_ib_mask() 300 val = (val32 & 0xffff0000) | (upper_32_bits(mask) >> 16); in xgene_pcie_set_ib_mask() 386 xgene_pcie_writel(port, offset + 0x04, upper_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 388 xgene_pcie_writel(port, offset + 0x0c, upper_32_bits(mask)); in xgene_pcie_setup_ob_reg() 390 xgene_pcie_writel(port, offset + 0x14, upper_32_bits(pci_addr)); in xgene_pcie_setup_ob_reg() 398 xgene_pcie_writel(port, CFGBARH, upper_32_bits(addr)); in xgene_pcie_setup_cfg_reg() 448 upper_32_bits(pim) | EN_COHERENCY); in xgene_pcie_setup_pims() 450 xgene_pcie_writel(port, pim_reg + 0x14, upper_32_bits(size)); in xgene_pcie_setup_pims() 507 writel(upper_32_bits(cpu_addr), bar_addr + 0x4); in xgene_pcie_setup_ib_reg() 517 xgene_pcie_writel(port, IBAR3L + 0x4, upper_32_bits(cpu_addr)); in xgene_pcie_setup_ib_reg() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
H A D | gm20b.c | 74 hdr.code_dma_base1 = upper_32_bits((addr + adjust) >> 8); in gm20b_pmu_acr_bld_patch() 77 hdr.data_dma_base1 = upper_32_bits((addr + adjust) >> 8); in gm20b_pmu_acr_bld_patch() 80 hdr.overlay_dma_base1 = upper_32_bits((addr + adjust) << 8); in gm20b_pmu_acr_bld_patch() 104 .code_dma_base1 = upper_32_bits(code), in gm20b_pmu_acr_bld_write() 105 .data_dma_base1 = upper_32_bits(data), in gm20b_pmu_acr_bld_write() 106 .overlay_dma_base1 = upper_32_bits(code), in gm20b_pmu_acr_bld_write()
|
/linux/drivers/net/ethernet/apm/xgene-v2/ |
H A D | ring.c | 28 dma_h = upper_32_bits(next_dma); in xge_setup_desc() 40 xge_wr_csr(pdata, DMATXDESCH, upper_32_bits(dma_addr)); in xge_update_tx_desc_addr() 52 xge_wr_csr(pdata, DMARXDESCH, upper_32_bits(dma_addr)); in xge_update_rx_desc_addr()
|
/linux/drivers/media/pci/pt3/ |
H A D | pt3_dma.c | 54 iowrite32(upper_32_bits(adap->desc_buf[0].b_addr), in pt3_start_dma() 185 d->next_h = upper_32_bits(desc_addr); in pt3_alloc_dmabuf() 191 d->addr_h = upper_32_bits(data_addr); in pt3_alloc_dmabuf() 196 d->next_h = upper_32_bits(desc_addr); in pt3_alloc_dmabuf() 205 d->next_h = upper_32_bits(desc_addr); in pt3_alloc_dmabuf()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
H A D | gm20b.c | 42 hdr.code_dma_base1 = upper_32_bits((addr + adjust) >> 8); in gm20b_gr_acr_bld_patch() 45 hdr.data_dma_base1 = upper_32_bits((addr + adjust) >> 8); in gm20b_gr_acr_bld_patch() 66 .code_dma_base1 = upper_32_bits(code), in gm20b_gr_acr_bld_write() 67 .data_dma_base1 = upper_32_bits(data), in gm20b_gr_acr_bld_write()
|
/linux/arch/x86/include/asm/ |
H A D | mshyperv.h | 95 u32 input_address_hi = upper_32_bits(input_address); in hv_do_hypercall() 97 u32 output_address_hi = upper_32_bits(output_address); in hv_do_hypercall() 139 u32 input1_hi = upper_32_bits(input1); in _hv_do_fast_hypercall8() 189 u32 input1_hi = upper_32_bits(input1); in _hv_do_fast_hypercall16() 191 u32 input2_hi = upper_32_bits(input2); in _hv_do_fast_hypercall16()
|