/linux/drivers/gpu/drm/i915/gt/uc/ |
H A D | intel_guc_print.h | 13 gt_##_level(guc_to_gt(_guc), "GUC: " _fmt, ##__VA_ARGS__) 40 gt_WARN(guc_to_gt(_guc), _cond, "GUC: " _fmt, ##__VA_ARGS__) 43 gt_WARN_ONCE(guc_to_gt(_guc), _cond, "GUC: " _fmt, ##__VA_ARGS__) 46 gt_WARN(guc_to_gt(_guc), _cond, "%s(%s)", "guc_WARN_ON", __stringify(_cond)) 49 gt_WARN_ONCE(guc_to_gt(_guc), _cond, "%s(%s)", "guc_WARN_ON_ONCE", __stringify(_cond))
|
H A D | intel_guc.c | 44 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_notify() 66 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_init_send_regs() 83 struct intel_gt *gt = guc_to_gt(guc); in gen9_reset_guc_interrupts() 94 struct intel_gt *gt = guc_to_gt(guc); in gen9_enable_guc_interrupts() 109 struct intel_gt *gt = guc_to_gt(guc); in gen9_disable_guc_interrupts() 134 struct intel_gt *gt = guc_to_gt(guc); in gen11_reset_guc_interrupts() 143 struct intel_gt *gt = guc_to_gt(guc); in gen11_enable_guc_interrupts() 154 struct intel_gt *gt = guc_to_gt(guc); in gen11_disable_guc_interrupts() 165 struct intel_gt *gt = guc_to_gt(guc); in guc_dead_worker_func() 179 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_init_early() [all …]
|
H A D | intel_guc_ads.c | 163 struct intel_gt *gt = guc_to_gt(guc); in guc_policies_init() 206 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_global_policies_update() 435 struct intel_gt *gt = guc_to_gt(guc); in guc_mmio_reg_state_create() 467 struct intel_gt *gt = guc_to_gt(guc); in guc_mmio_reg_state_init() 531 struct intel_gt *gt = guc_to_gt(guc); in guc_prep_golden_context() 621 struct intel_gt *gt = guc_to_gt(guc); in guc_init_golden_context() 706 struct intel_gt *gt = guc_to_gt(guc); in guc_capture_prep_lists() 838 struct intel_gt *gt = guc_to_gt(guc); in guc_waklv_init() 887 struct intel_gt *gt = guc_to_gt(guc); in __guc_ads_init() 916 guc_mapping_table_init(guc_to_gt(guc), &info_map); in __guc_ads_init()
|
H A D | intel_guc_submission.c | 685 if (!intel_uc_uses_guc_submission(&guc_to_gt(guc)->uc)) in intel_guc_wait_for_idle() 1123 intel_gt_pm_put_async_untracked(guc_to_gt(guc)); in scrub_guc_desc_for_outstanding_g2h() 1286 struct intel_gt *gt = guc_to_gt(guc); in guc_update_pm_timestamp() 1415 if (mutex_is_locked(&guc_to_gt(guc)->reset.mutex) || in guc_cancel_busyness_worker() 1416 test_bit(I915_RESET_BACKOFF, &guc_to_gt(guc)->reset.flags)) in guc_cancel_busyness_worker() 1424 struct intel_gt *gt = guc_to_gt(guc); in __reset_guc_busyness_stats() 1443 struct intel_gt *gt = guc_to_gt(guc); in __update_guc_busyness_stats() 1484 struct intel_gt *gt = guc_to_gt(guc); in guc_timestamp_ping() 1558 struct intel_gt *gt = guc_to_gt(guc); in guc_init_engine_stats() 1627 intel_gt_is_wedged(guc_to_gt(guc))); in submission_disabled() [all …]
|
H A D | intel_guc_fw.c | 156 struct intel_gt *gt = guc_to_gt(guc); in guc_wait_ucode() 288 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_fw_upload()
|
H A D | intel_uc.c | 184 intel_uncore_write(guc_to_gt(guc)->uncore, SOFT_SCRATCH(15), 0); in guc_clear_mmio_msg() 193 val = intel_uncore_read(guc_to_gt(guc)->uncore, SOFT_SCRATCH(15)); in guc_get_mmio_msg() 221 struct intel_gt *gt = guc_to_gt(guc); in guc_enable_communication() 711 struct intel_gt *gt = guc_to_gt(guc); in __uc_resume()
|
H A D | intel_guc_rc.c | 52 struct intel_gt *gt = guc_to_gt(guc); in __guc_rc_control()
|
H A D | intel_guc_capture.c | 295 struct intel_gt *gt = guc_to_gt(guc); in guc_capture_alloc_steered_lists() 613 struct intel_gt *gt = guc_to_gt(guc); in guc_capture_output_min_size_est()
|
H A D | intel_guc_slpc.c | 25 return guc_to_gt(slpc_to_guc(slpc)); in slpc_to_gt()
|
H A D | intel_guc_log.c | 784 with_intel_runtime_pm(guc_to_gt(guc)->uncore->rpm, wakeref) in intel_guc_log_relay_flush()
|
H A D | intel_guc_ct.c | 1392 intel_klog_error_capture(guc_to_gt(guc), (intel_engine_mask_t)~0U); in ct_dead_ct_worker_func()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_guc.c | 143 struct xe_gt *gt = guc_to_gt(guc); in guc_ctl_wa_flags() 188 struct xe_gt *gt = guc_to_gt(guc); in guc_print_params() 234 struct xe_gt *gt = guc_to_gt(guc); in guc_write_params() 248 struct xe_gt *gt = guc_to_gt(guc); in guc_fini_hw() 251 xe_uc_fini_hw(&guc_to_gt(guc)->uc); in guc_fini_hw() 263 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_comm_init_early() 273 struct xe_tile *tile = gt_to_tile(guc_to_gt(guc)); in xe_guc_realloc_post_hwconfig() 319 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_init() 374 err = xe_guc_submit_init(guc, xe_gt_sriov_vf_guc_ids(guc_to_gt(guc))); in vf_guc_init_post_hwconfig() 427 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_reset() [all …]
|
H A D | xe_guc_submit.c | 282 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_submit_init() 810 queue_work(guc_to_gt(guc)->ordered_wq, &q->guc->lr_tdr); in xe_guc_exec_queue_trigger_cleanup() 829 xe_gt_assert(guc_to_gt(guc), guc_to_xe(guc)->wedged.mode); in xe_guc_submit_wedge() 918 struct xe_gt *gt = guc_to_gt(exec_queue_to_guc(q)); in check_timeout() 966 xe_gt_assert(guc_to_gt(guc), !exec_queue_destroyed(q)); in enable_scheduling() 967 xe_gt_assert(guc_to_gt(guc), exec_queue_registered(q)); in enable_scheduling() 968 xe_gt_assert(guc_to_gt(guc), !exec_queue_pending_disable(q)); in enable_scheduling() 969 xe_gt_assert(guc_to_gt(guc), !exec_queue_pending_enable(q)); in enable_scheduling() 982 xe_gt_warn(guc_to_gt(guc), "Schedule enable failed to respond"); in enable_scheduling() 994 xe_gt_assert(guc_to_gt(guc), !exec_queue_destroyed(q)); in disable_scheduling() [all …]
|
H A D | xe_guc.h | 75 static inline struct xe_gt *guc_to_gt(struct xe_guc *guc) in guc_to_gt() function 82 return gt_to_xe(guc_to_gt(guc)); in guc_to_xe()
|
H A D | xe_memirq.c | 40 return xe_gt_is_media_type(guc_to_gt(guc)) ? "media GuC" : "GuC"; in guc_name() 277 bool is_media = xe_gt_is_media_type(guc_to_gt(guc)); in xe_memirq_init_guc()
|
H A D | xe_gt_tlb_invalidation.c | 175 struct xe_gt *gt = guc_to_gt(guc); in send_tlb_invalidation() 429 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_tlb_invalidation_done_handler()
|
H A D | xe_guc_hwconfig.c | 55 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_hwconfig_init()
|
H A D | xe_guc_db_mgr.c | 38 return guc_to_gt(dbm_to_guc(dbm)); in dbm_to_gt()
|
H A D | xe_guc_id_mgr.c | 24 return guc_to_gt(idm_to_guc(idm)); in idm_to_gt()
|
H A D | xe_guc_pc.c | 89 return guc_to_gt(pc_to_guc(pc)); in pc_to_gt()
|
H A D | xe_guc_relay.c | 50 return guc_to_gt(relay_to_guc(relay)); in relay_to_gt()
|
H A D | xe_gt_sriov_vf.c | 256 xe_gt_WARN_ON(guc_to_gt(guc), value_len > 3); in guc_action_query_single_klv()
|
/linux/drivers/gpu/drm/i915/gt/ |
H A D | intel_gt.h | 102 static inline struct intel_gt *guc_to_gt(struct intel_guc *guc) in guc_to_gt() function 124 return guc_to_gt(guc)->i915; in guc_to_i915()
|