/linux/drivers/gpu/drm/xe/compat-i915-headers/ |
H A D | intel_uncore.h | 32 return xe_mmio_read32(__compat_uncore_to_gt(uncore), reg); in intel_uncore_read() 60 upper = xe_mmio_read32(__compat_uncore_to_gt(uncore), upper_reg); in intel_uncore_read64_2x32() 63 lower = xe_mmio_read32(__compat_uncore_to_gt(uncore), lower_reg); in intel_uncore_read64_2x32() 64 upper = xe_mmio_read32(__compat_uncore_to_gt(uncore), upper_reg); in intel_uncore_read64_2x32() 75 xe_mmio_read32(__compat_uncore_to_gt(uncore), reg); in intel_uncore_posting_read() 131 return xe_mmio_read32(__compat_uncore_to_gt(uncore), reg); in intel_uncore_read_fw() 147 return xe_mmio_read32(__compat_uncore_to_gt(uncore), reg); in intel_uncore_read_notrace()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_irq.c | 35 u32 val = xe_mmio_read32(mmio, reg); in assert_iir_is_zero() 44 xe_mmio_read32(mmio, reg); in assert_iir_is_zero() 46 xe_mmio_read32(mmio, reg); in assert_iir_is_zero() 67 xe_mmio_read32(mmio, IMR(irqregs)); in unmask_and_enable() 77 xe_mmio_read32(mmio, IMR(irqregs)); in mask_and_disable() 83 xe_mmio_read32(mmio, IIR(irqregs)); in mask_and_disable() 85 xe_mmio_read32(mmio, IIR(irqregs)); in mask_and_disable() 100 return xe_mmio_read32(mmio, GFX_MSTR_IRQ); in xelp_intr_disable() 112 iir = xe_mmio_read32(mmio, IIR(GU_MISC_IRQ_OFFSET)); in gu_misc_irq_ack() 125 xe_mmio_read32(mmio, GFX_MSTR_IRQ); in xelp_intr_enable() [all …]
|
H A D | xe_guc_pc.c | 337 reg = xe_mmio_read32(gt, MTL_MPE_FREQUENCY); in mtl_update_rpe_value() 339 reg = xe_mmio_read32(gt, MTL_GT_RPE_FREQUENCY); in mtl_update_rpe_value() 356 reg = xe_mmio_read32(gt, PVC_RP_STATE_CAP); in tgl_update_rpe_value() 358 reg = xe_mmio_read32(gt, FREQ_INFO_REC); in tgl_update_rpe_value() 395 freq = xe_mmio_read32(gt, MTL_MIRROR_TARGET_WP1); in xe_guc_pc_get_act_freq() 398 freq = xe_mmio_read32(gt, GT_PERF_STATUS); in xe_guc_pc_get_act_freq() 428 *freq = xe_mmio_read32(gt, RPNSWREQ); in xe_guc_pc_get_cur_freq() 615 reg = xe_mmio_read32(gt, MTL_MIRROR_TARGET_WP1); in xe_guc_pc_c_status() 618 reg = xe_mmio_read32(gt, GT_CORE_STATUS); in xe_guc_pc_c_status() 641 reg = xe_mmio_read32(gt, GT_GFX_RC6); in xe_guc_pc_rc6_residency() [all …]
|
H A D | xe_gt_clock.c | 20 u32 ts_override = xe_mmio_read32(gt, TIMESTAMP_OVERRIDE); in read_reference_ts_freq() 60 u32 ctc_reg = xe_mmio_read32(gt, CTC_MODE); in xe_gt_clock_init() 69 u32 c0 = xe_mmio_read32(gt, RPM_CONFIG0); in xe_gt_clock_init()
|
H A D | xe_mmio.c | 238 u32 xe_mmio_read32(struct xe_gt *gt, struct xe_reg reg) in xe_mmio_read32() function 261 old = xe_mmio_read32(gt, reg); in xe_mmio_rmw32() 274 reg_val = xe_mmio_read32(gt, reg); in xe_mmio_write32_and_verify() 321 oldudw = xe_mmio_read32(gt, reg_udw); in xe_mmio_read64_2x32() 323 ldw = xe_mmio_read32(gt, reg); in xe_mmio_read64_2x32() 324 udw = xe_mmio_read32(gt, reg_udw); in xe_mmio_read64_2x32() 349 read = xe_mmio_read32(gt, reg); in __xe_mmio_wait32() 375 read = xe_mmio_read32(gt, reg); in __xe_mmio_wait32()
|
H A D | xe_gsc.c | 182 return xe_mmio_read32(gt, HECI_FWSTS1(MTL_GSC_HECI1_BASE)) & in gsc_fw_is_loaded() 333 er_status = xe_mmio_read32(gt, GSCI_TIMER_STATUS) & GSCI_TIMER_STATUS_VALUE; in gsc_er_complete() 616 xe_mmio_read32(gt, HECI_FWSTS1(MTL_GSC_HECI1_BASE)), in xe_gsc_print_info() 617 xe_mmio_read32(gt, HECI_FWSTS2(MTL_GSC_HECI1_BASE)), in xe_gsc_print_info() 618 xe_mmio_read32(gt, HECI_FWSTS3(MTL_GSC_HECI1_BASE)), in xe_gsc_print_info() 619 xe_mmio_read32(gt, HECI_FWSTS4(MTL_GSC_HECI1_BASE)), in xe_gsc_print_info() 620 xe_mmio_read32(gt, HECI_FWSTS5(MTL_GSC_HECI1_BASE)), in xe_gsc_print_info() 621 xe_mmio_read32(gt, HECI_FWSTS6(MTL_GSC_HECI1_BASE))); in xe_gsc_print_info()
|
H A D | xe_wopcm.c | 126 u32 reg_base = xe_mmio_read32(gt, DMA_GUC_WOPCM_OFFSET); in __wopcm_regs_locked() 127 u32 reg_size = xe_mmio_read32(gt, GUC_WOPCM_SIZE); in __wopcm_regs_locked() 172 xe_mmio_read32(gt, DMA_GUC_WOPCM_OFFSET)); in __wopcm_init_regs() 175 xe_mmio_read32(gt, GUC_WOPCM_SIZE)); in __wopcm_init_regs()
|
H A D | xe_pcode.c | 47 err = xe_mmio_read32(tile->primary_gt, PCODE_MAILBOX) & PCODE_ERROR_MASK; in pcode_mailbox_status() 67 if ((xe_mmio_read32(mmio, PCODE_MAILBOX) & PCODE_READY) != 0) in __pcode_mailbox_rw() 80 *data0 = xe_mmio_read32(mmio, PCODE_DATA0); in __pcode_mailbox_rw() 82 *data1 = xe_mmio_read32(mmio, PCODE_DATA1); in __pcode_mailbox_rw()
|
H A D | xe_gt_topology.c | 28 fuse_val[i] = xe_mmio_read32(gt, va_arg(argp, struct xe_reg)); in load_dss_mask() 38 u32 reg_val = xe_mmio_read32(gt, XELP_EU_ENABLE); in load_eu_mask() 130 u32 fuse3 = xe_mmio_read32(gt, MIRROR_FUSE3); in load_l3_bank_mask() 144 u32 fuse4 = xe_mmio_read32(gt, XEHP_FUSE4); in load_l3_bank_mask()
|
H A D | xe_guc.c | 444 guc_status = xe_mmio_read32(gt, GUC_STATUS); in xe_guc_reset() 660 xe_mmio_read32(gt, GUC_HEADER_INFO)); in guc_wait_ucode() 675 xe_mmio_read32(gt, SOFT_SCRATCH(13))); in guc_wait_ucode() 827 msg = xe_mmio_read32(gt, SOFT_SCRATCH(15)); in guc_handle_mmio_msg() 952 xe_mmio_read32(gt, MED_VF_SW_FLAG(LAST_INDEX)); in xe_guc_mmio_send_recv() 957 xe_mmio_read32(gt, VF_SW_FLAG(LAST_INDEX)); in xe_guc_mmio_send_recv() 972 header = xe_mmio_read32(gt, reply_reg); in xe_guc_mmio_send_recv() 1035 response_buf[i] = xe_mmio_read32(gt, reply_reg); in xe_guc_mmio_send_recv() 1158 status = xe_mmio_read32(gt, GUC_STATUS); in xe_guc_print_info() 1173 i, xe_mmio_read32(gt, SOFT_SCRATCH(i))); in xe_guc_print_info()
|
H A D | xe_hwmon.c | 169 reg_val = xe_mmio_read32(mmio, rapl_limit); in xe_hwmon_power_max_read() 205 reg_val = xe_mmio_read32(mmio, rapl_limit); in xe_hwmon_power_max_write() 234 reg_val = xe_mmio_read32(mmio, reg); in xe_hwmon_power_rated_max_read() 266 reg_val = xe_mmio_read32(mmio, xe_hwmon_get_reg(hwmon, REG_PKG_ENERGY_STATUS, in xe_hwmon_energy_get() 294 r = xe_mmio_read32(mmio, xe_hwmon_get_reg(hwmon, REG_PKG_RAPL_LIMIT, sensor_index)); in xe_hwmon_power_max_interval_show() 504 reg_val = xe_mmio_read32(mmio, xe_hwmon_get_reg(hwmon, REG_GT_PERF_STATUS, channel)); in xe_hwmon_get_voltage() 797 val_sku_unit = xe_mmio_read32(mmio, pkg_power_sku_unit); in xe_hwmon_get_preregistration_info()
|
H A D | xe_mocs.c | 281 reg_val = xe_mmio_read32(gt, XELP_LNCFCMOCS(i)); in xelp_lncf_dump() 313 reg_val = xe_mmio_read32(gt, XELP_GLOBAL_MOCS(i)); in xelp_mocs_dump() 386 reg_val = xe_mmio_read32(gt, XELP_LNCFCMOCS(i)); in xehp_lncf_dump() 431 reg_val = xe_mmio_read32(gt, XELP_LNCFCMOCS(i)); in pvc_mocs_dump() 513 reg_val = xe_mmio_read32(gt, XELP_GLOBAL_MOCS(i)); in mtl_mocs_dump() 556 reg_val = xe_mmio_read32(gt, XELP_GLOBAL_MOCS(i)); in xe2_mocs_dump()
|
H A D | xe_hw_engine.c | 318 return xe_mmio_read32(hwe->gt, reg); in xe_hw_engine_mmio_read32() 357 return xe_mmio_read32(hwe->gt, XEHP_FUSE4) & CFEG_WMTP_DISABLE; in xe_rtp_cfeg_wmtp_disabled() 615 media_fuse = xe_mmio_read32(gt, GT_VEBOX_VDBOX_DISABLE); in read_media_fuses() 660 bcs_mask = xe_mmio_read32(gt, MIRROR_FUSE3); in read_copy_fuses() 707 ccs_mask = xe_mmio_read32(gt, XEHP_FUSE4); in read_compute_fuses_from_reg() 823 xe_mmio_read32(gt, SC_INSTDONE); in xe_hw_engine_snapshot_instdone_capture() 825 xe_mmio_read32(gt, SC_INSTDONE_EXTRA); in xe_hw_engine_snapshot_instdone_capture() 827 xe_mmio_read32(gt, SC_INSTDONE_EXTRA2); in xe_hw_engine_snapshot_instdone_capture() 962 snapshot->reg.rcu_mode = xe_mmio_read32(hwe->gt, RCU_MODE); in xe_hw_engine_snapshot_capture()
|
H A D | xe_pat.c | 189 u32 pat = xe_mmio_read32(gt, XE_REG(_PAT_INDEX(i))); in xelp_dump() 281 pat = xe_mmio_read32(gt, XE_REG(_PAT_INDEX(i))); in xelpg_dump() 339 pat = xe_mmio_read32(gt, XE_REG(_PAT_INDEX(i))); in xe2_dump() 358 pat = xe_mmio_read32(gt, XE_REG(_PAT_PTA)); in xe2_dump()
|
H A D | xe_gt_throttle.c | 44 reg = xe_mmio_read32(gt, MTL_MEDIA_PERF_LIMIT_REASONS); in xe_gt_throttle_get_limit_reasons() 46 reg = xe_mmio_read32(gt, GT0_PERF_LIMIT_REASONS); in xe_gt_throttle_get_limit_reasons()
|
H A D | xe_execlist.c | 81 xe_mmio_read32(gt, RING_HWS_PGA(hwe->mmio_base)); in __start_lrc() 171 lo = xe_mmio_read32(gt, RING_EXECLIST_STATUS_LO(hwe->mmio_base)); in read_execlist_status() 172 hi = xe_mmio_read32(gt, RING_EXECLIST_STATUS_HI(hwe->mmio_base)); in read_execlist_status()
|
H A D | xe_mmio.h | 20 u32 xe_mmio_read32(struct xe_gt *gt, struct xe_reg reg);
|
H A D | xe_huc.c | 232 return xe_mmio_read32(gt, huc_auth_modes[type].reg) & huc_auth_modes[type].val; in xe_huc_is_authenticated() 311 xe_mmio_read32(gt, HUC_KERNEL_LOAD_INFO)); in xe_huc_print_info()
|
H A D | xe_sriov.c | 38 u32 value = xe_mmio_read32(xe_root_mmio_gt(xe), VF_CAP_REG); in test_is_vf()
|
H A D | xe_query.c | 96 upper = xe_mmio_read32(gt, upper_reg); in __read_timestamps() 100 lower = xe_mmio_read32(gt, lower_reg); in __read_timestamps() 103 upper = xe_mmio_read32(gt, upper_reg); in __read_timestamps()
|
H A D | xe_reg_sr.c | 167 xe_mmio_read32(gt, reg)) & (~entry->clr_bits); in apply_one_mmio()
|
H A D | xe_ttm_stolen_mgr.c | 122 ggc = xe_mmio_read32(xe_root_mmio_gt(xe), GGC); in detect_bar2_integrated()
|
H A D | xe_gt.c | 247 xe_mmio_read32(gt, reg)) & (~entry->clr_bits); in emit_wa_job() 442 gt->info.gmdid = xe_mmio_read32(gt, GMD_ID); in gt_fw_domain_init()
|
H A D | xe_vram.c | 172 reg = xe_mmio_read32(gt, MIRROR_FUSE3); in get_flat_ccs_offset()
|
/linux/drivers/gpu/drm/xe/tests/ |
H A D | xe_mocs.c | 58 reg_val = xe_mmio_read32(gt, XELP_LNCFCMOCS(i >> 1)); in read_l3cc_table() 97 reg_val = xe_mmio_read32(gt, XELP_GLOBAL_MOCS(i)); in read_mocs_table()
|