/linux/drivers/gpu/drm/xe/ |
H A D | xe_guc.c | 55 addr = __xe_bo_ggtt_addr(bo, gt_to_tile(guc_to_gt(guc))->id); in guc_bo_ggtt_addr() 181 struct xe_gt *gt = guc_to_gt(guc); in guc_ctl_wa_flags() 226 struct xe_gt *gt = guc_to_gt(guc); in guc_print_params() 272 struct xe_gt *gt = guc_to_gt(guc); in guc_write_params() 286 struct xe_gt *gt = guc_to_gt(guc); in guc_action_register_g2g_buffer() 306 struct xe_gt *gt = guc_to_gt(guc); in guc_action_deregister_g2g_buffer() 396 struct xe_gt *near_gt = guc_to_gt(near_guc); in guc_g2g_register() 434 struct xe_gt *gt = guc_to_gt(guc); in guc_g2g_size() 456 struct xe_gt *gt = guc_to_gt(guc); in guc_g2g_alloc() 509 struct xe_gt *far_gt, *gt = guc_to_gt(guc); in guc_g2g_start() [all …]
|
H A D | xe_guc_submit.c | 283 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_submit_init() 419 xe_gt_assert(guc_to_gt(guc), exec_queue_registered(q)); in init_policies() 457 xe_gt_assert(guc_to_gt(guc), xe_exec_queue_is_parallel(q)); in __register_mlrc_exec_queue() 480 xe_gt_assert(guc_to_gt(guc), len <= MAX_MLRC_REG_SIZE); in __register_mlrc_exec_queue() 514 xe_gt_assert(guc_to_gt(guc), !exec_queue_registered(q)); in register_exec_queue() 604 xe_gt_assert(guc_to_gt(guc), FIELD_FIT(WQ_LEN_MASK, len_dw)); in wq_noop_append() 644 xe_gt_assert(guc_to_gt(guc), i == wqi_size / sizeof(u32)); in wq_item_append() 650 xe_gt_assert(guc_to_gt(guc), q->guc->wqi_tail <= WQ_SIZE); in wq_item_append() 669 xe_gt_assert(guc_to_gt(guc), exec_queue_registered(q)); in submit_exec_queue() 719 xe_gt_assert(guc_to_gt(guc), !(exec_queue_destroyed(q) || exec_queue_pending_disable(q)) || in guc_exec_queue_run_job() [all …]
|
H A D | xe_guc.h | 75 static inline struct xe_gt *guc_to_gt(struct xe_guc *guc) in guc_to_gt() function 82 return gt_to_xe(guc_to_gt(guc)); in guc_to_xe()
|
H A D | xe_guc_capture.c | 398 struct xe_gt *gt = guc_to_gt(guc); in guc_capture_alloc_steered_lists() 472 xe_gt_dbg(guc_to_gt(guc), "capture found %d ext-regs.\n", total); in guc_capture_alloc_steered_lists() 514 xe_gt_dbg(guc_to_gt(guc), "Got short capture reglist init: %d out-of %d.\n", in guc_capture_list_init() 558 struct xe_gt *gt = guc_to_gt(guc); in guc_capture_getlistsize() 766 struct xe_gt *gt = guc_to_gt(guc); in guc_capture_output_size_est() 831 xe_gt_dbg(guc_to_gt(guc), in check_guc_capture_size() 835 xe_gt_dbg(guc_to_gt(guc), "Error state capture buffer maybe small: %d < %d\n", in check_guc_capture_size() 838 xe_gt_dbg(guc_to_gt(guc), in check_guc_capture_size() 1030 xe_gt_warn(guc_to_gt(guc), in guc_capture_log_remove_bytes() 1146 struct xe_gt *gt = guc_to_gt(guc); in guc_capture_extract_reglists() [all …]
|
H A D | xe_memirq.c | 50 return xe_gt_is_media_type(guc_to_gt(guc)) ? "media GuC" : "GuC"; in guc_name() 339 bool is_media = xe_gt_is_media_type(guc_to_gt(guc)); in xe_memirq_init_guc()
|
H A D | xe_gt_tlb_invalidation.c | 185 struct xe_gt *gt = guc_to_gt(guc); in send_tlb_invalidation() 451 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_tlb_invalidation_done_handler()
|
H A D | xe_guc_hwconfig.c | 55 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_hwconfig_init()
|
H A D | xe_guc_db_mgr.c | 38 return guc_to_gt(dbm_to_guc(dbm)); in dbm_to_gt()
|
H A D | xe_guc_id_mgr.c | 24 return guc_to_gt(idm_to_guc(idm)); in idm_to_gt()
|
H A D | xe_gt_pagefault.c | 322 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_pagefault_handler() 664 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_access_counter_notify_handler()
|
H A D | xe_guc_pc.c | 90 return guc_to_gt(pc_to_guc(pc)); in pc_to_gt()
|
H A D | xe_gt_sriov_vf.c | 295 xe_gt_WARN_ON(guc_to_gt(guc), value_len > 3); in guc_action_query_single_klv()
|
/linux/drivers/gpu/drm/i915/gt/uc/ |
H A D | intel_guc_print.h | 13 gt_##_level(guc_to_gt(_guc), "GUC: " _fmt, ##__VA_ARGS__) 40 gt_WARN(guc_to_gt(_guc), _cond, "GUC: " _fmt, ##__VA_ARGS__) 43 gt_WARN_ONCE(guc_to_gt(_guc), _cond, "GUC: " _fmt, ##__VA_ARGS__) 46 gt_WARN(guc_to_gt(_guc), _cond, "%s(%s)", "guc_WARN_ON", __stringify(_cond)) 49 gt_WARN_ONCE(guc_to_gt(_guc), _cond, "%s(%s)", "guc_WARN_ON_ONCE", __stringify(_cond))
|
H A D | intel_guc.c | 44 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_notify() 66 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_init_send_regs() 83 struct intel_gt *gt = guc_to_gt(guc); in gen9_reset_guc_interrupts() 94 struct intel_gt *gt = guc_to_gt(guc); in gen9_enable_guc_interrupts() 109 struct intel_gt *gt = guc_to_gt(guc); in gen9_disable_guc_interrupts() 134 struct intel_gt *gt = guc_to_gt(guc); in gen11_reset_guc_interrupts() 143 struct intel_gt *gt = guc_to_gt(guc); in gen11_enable_guc_interrupts() 154 struct intel_gt *gt = guc_to_gt(guc); in gen11_disable_guc_interrupts() 165 struct intel_gt *gt = guc_to_gt(guc); in guc_dead_worker_func() 179 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_init_early() [all …]
|
H A D | intel_guc_ads.c | 163 struct intel_gt *gt = guc_to_gt(guc); in guc_policies_init() 206 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_global_policies_update() 435 struct intel_gt *gt = guc_to_gt(guc); in guc_mmio_reg_state_create() 467 struct intel_gt *gt = guc_to_gt(guc); in guc_mmio_reg_state_init() 531 struct intel_gt *gt = guc_to_gt(guc); in guc_prep_golden_context() 621 struct intel_gt *gt = guc_to_gt(guc); in guc_init_golden_context() 706 struct intel_gt *gt = guc_to_gt(guc); in guc_capture_prep_lists() 838 struct intel_gt *gt = guc_to_gt(guc); in guc_waklv_init() 887 struct intel_gt *gt = guc_to_gt(guc); in __guc_ads_init() 916 guc_mapping_table_init(guc_to_gt(guc), &info_map); in __guc_ads_init()
|
H A D | intel_guc_submission.c | 685 if (!intel_uc_uses_guc_submission(&guc_to_gt(guc)->uc)) in intel_guc_wait_for_idle() 1123 intel_gt_pm_put_async_untracked(guc_to_gt(guc)); in scrub_guc_desc_for_outstanding_g2h() 1301 struct intel_gt *gt = guc_to_gt(guc); in guc_update_pm_timestamp() 1433 if (mutex_is_locked(&guc_to_gt(guc)->reset.mutex) || in guc_cancel_busyness_worker() 1434 test_bit(I915_RESET_BACKOFF, &guc_to_gt(guc)->reset.flags)) in guc_cancel_busyness_worker() 1442 struct intel_gt *gt = guc_to_gt(guc); in __reset_guc_busyness_stats() 1474 struct intel_gt *gt = guc_to_gt(guc); in __update_guc_busyness_running_state() 1487 struct intel_gt *gt = guc_to_gt(guc); in __update_guc_busyness_stats() 1528 struct intel_gt *gt = guc_to_gt(guc); in guc_timestamp_ping() 1590 struct intel_gt *gt = guc_to_gt(guc); in guc_action_enable_usage_stats() [all …]
|
H A D | intel_uc.c | 184 intel_uncore_write(guc_to_gt(guc)->uncore, SOFT_SCRATCH(15), 0); in guc_clear_mmio_msg() 193 val = intel_uncore_read(guc_to_gt(guc)->uncore, SOFT_SCRATCH(15)); in guc_get_mmio_msg() 221 struct intel_gt *gt = guc_to_gt(guc); in guc_enable_communication() 711 struct intel_gt *gt = guc_to_gt(guc); in __uc_resume()
|
H A D | intel_guc_rc.c | 52 struct intel_gt *gt = guc_to_gt(guc); in __guc_rc_control()
|
H A D | intel_guc_capture.c | 295 struct intel_gt *gt = guc_to_gt(guc); in guc_capture_alloc_steered_lists() 613 struct intel_gt *gt = guc_to_gt(guc); in guc_capture_output_min_size_est()
|
H A D | intel_guc_slpc.c | 25 return guc_to_gt(slpc_to_guc(slpc)); in slpc_to_gt()
|
H A D | intel_guc_log.c | 784 with_intel_runtime_pm(guc_to_gt(guc)->uncore->rpm, wakeref) in intel_guc_log_relay_flush()
|
H A D | intel_guc_ct.c | 1392 intel_klog_error_capture(guc_to_gt(guc), (intel_engine_mask_t)~0U); in ct_dead_ct_worker_func()
|
/linux/drivers/gpu/drm/i915/gt/ |
H A D | intel_gt.h | 102 static inline struct intel_gt *guc_to_gt(struct intel_guc *guc) in guc_to_gt() function 124 return guc_to_gt(guc)->i915; in guc_to_i915()
|