Home
last modified time | relevance | path

Searched refs:guc (Results 1 – 25 of 41) sorted by relevance

12

/linux/drivers/gpu/drm/i915/gt/uc/
H A Dintel_guc_ads.c84 static u32 guc_ads_regset_size(struct intel_guc *guc) in guc_ads_regset_size() argument
86 GEM_BUG_ON(!guc->ads_regset_size); in guc_ads_regset_size()
87 return guc->ads_regset_size; in guc_ads_regset_size()
90 static u32 guc_ads_golden_ctxt_size(struct intel_guc *guc) in guc_ads_golden_ctxt_size() argument
92 return PAGE_ALIGN(guc->ads_golden_ctxt_size); in guc_ads_golden_ctxt_size()
95 static u32 guc_ads_waklv_size(struct intel_guc *guc) in guc_ads_waklv_size() argument
97 return PAGE_ALIGN(guc->ads_waklv_size); in guc_ads_waklv_size()
100 static u32 guc_ads_capture_size(struct intel_guc *guc) in guc_ads_capture_size() argument
102 return PAGE_ALIGN(guc->ads_capture_size); in guc_ads_capture_size()
105 static u32 guc_ads_private_data_size(struct intel_guc *guc) in guc_ads_private_data_size() argument
[all …]
H A Dintel_uc.c119 intel_guc_init_early(&uc->guc); in intel_uc_init_early()
133 intel_guc_init_late(&uc->guc); in intel_uc_init_late()
151 intel_guc_init_send_regs(&uc->guc); in intel_uc_init_mmio()
156 struct intel_guc *guc = &uc->guc; in __uc_capture_load_err_log() local
158 if (guc->log.vma && !uc->load_err_log) in __uc_capture_load_err_log()
159 uc->load_err_log = i915_gem_object_get(guc->log.vma->obj); in __uc_capture_load_err_log()
183 static void guc_clear_mmio_msg(struct intel_guc *guc) in guc_clear_mmio_msg() argument
185 intel_uncore_write(guc_to_gt(guc)->uncore, SOFT_SCRATCH(15), 0); in guc_clear_mmio_msg()
188 static void guc_get_mmio_msg(struct intel_guc *guc) in guc_get_mmio_msg() argument
192 spin_lock_irq(&guc->irq_lock); in guc_get_mmio_msg()
[all …]
H A Dintel_guc_debugfs.c19 struct intel_guc *guc = m->private; in guc_info_show() local
22 if (!intel_guc_is_supported(guc)) in guc_info_show()
25 intel_guc_load_status(guc, &p); in guc_info_show()
27 intel_guc_log_info(&guc->log, &p); in guc_info_show()
29 if (!intel_guc_submission_is_used(guc)) in guc_info_show()
32 intel_guc_ct_print_info(&guc->ct, &p); in guc_info_show()
33 intel_guc_submission_print_info(guc, &p); in guc_info_show()
34 intel_guc_ads_print_policy_info(guc, &p); in guc_info_show()
42 struct intel_guc *guc = m->private; in guc_registered_contexts_show() local
45 if (!intel_guc_submission_is_used(guc)) in guc_registered_contexts_show()
[all …]
H A Dintel_guc_rc.c13 static bool __guc_rc_supported(struct intel_guc *guc) in __guc_rc_supported() argument
16 return guc->submission_supported && in __guc_rc_supported()
17 GRAPHICS_VER(guc_to_i915(guc)) >= 12; in __guc_rc_supported()
20 static bool __guc_rc_selected(struct intel_guc *guc) in __guc_rc_selected() argument
22 if (!intel_guc_rc_is_supported(guc)) in __guc_rc_selected()
25 return guc->submission_selected; in __guc_rc_selected()
28 void intel_guc_rc_init_early(struct intel_guc *guc) in intel_guc_rc_init_early() argument
30 guc->rc_supported = __guc_rc_supported(guc); in intel_guc_rc_init_early()
31 guc->rc_selected = __guc_rc_selected(guc); in intel_guc_rc_init_early()
34 static int guc_action_control_gucrc(struct intel_guc *guc, bool enable) in guc_action_control_gucrc() argument
[all …]
H A Dintel_guc_capture.c292 guc_capture_alloc_steered_lists(struct intel_guc *guc, in guc_capture_alloc_steered_lists() argument
295 struct intel_gt *gt = guc_to_gt(guc); in guc_capture_alloc_steered_lists()
307 if (!list || guc->capture->extlists) in guc_capture_alloc_steered_lists()
347 guc_dbg(guc, "capture found %d ext-regs.\n", num_tot_regs); in guc_capture_alloc_steered_lists()
348 guc->capture->extlists = extlists; in guc_capture_alloc_steered_lists()
352 guc_capture_get_device_reglist(struct intel_guc *guc) in guc_capture_get_device_reglist() argument
354 struct drm_i915_private *i915 = guc_to_i915(guc); in guc_capture_get_device_reglist()
368 guc_capture_alloc_steered_lists(guc, lists); in guc_capture_get_device_reglist()
412 guc_capture_list_init(struct intel_guc *guc, u32 owner, u32 type, u32 classid, in guc_capture_list_init() argument
416 const struct __guc_mmio_reg_descr_group *reglists = guc->capture->reglists; in guc_capture_list_init()
[all …]
H A Dintel_guc_submission.h16 void intel_guc_submission_init_early(struct intel_guc *guc);
17 int intel_guc_submission_init(struct intel_guc *guc);
18 int intel_guc_submission_enable(struct intel_guc *guc);
19 void intel_guc_submission_disable(struct intel_guc *guc);
20 void intel_guc_submission_fini(struct intel_guc *guc);
21 int intel_guc_preempt_work_create(struct intel_guc *guc);
22 void intel_guc_preempt_work_destroy(struct intel_guc *guc);
24 void intel_guc_submission_print_info(struct intel_guc *guc,
26 void intel_guc_submission_print_context_info(struct intel_guc *guc,
36 int intel_guc_wait_for_pending_msg(struct intel_guc *guc,
[all …]
H A Dintel_guc_rc.h11 void intel_guc_rc_init_early(struct intel_guc *guc);
13 static inline bool intel_guc_rc_is_supported(struct intel_guc *guc) in intel_guc_rc_is_supported() argument
15 return guc->rc_supported; in intel_guc_rc_is_supported()
18 static inline bool intel_guc_rc_is_wanted(struct intel_guc *guc) in intel_guc_rc_is_wanted() argument
20 return guc->submission_selected && intel_guc_rc_is_supported(guc); in intel_guc_rc_is_wanted()
23 static inline bool intel_guc_rc_is_used(struct intel_guc *guc) in intel_guc_rc_is_used() argument
25 return intel_guc_submission_is_used(guc) && intel_guc_rc_is_wanted(guc); in intel_guc_rc_is_used()
28 int intel_guc_rc_enable(struct intel_guc *guc);
29 int intel_guc_rc_disable(struct intel_guc *guc);
H A Dselftest_guc.c147 struct intel_guc *guc = gt_to_guc(gt); in intel_guc_steal_guc_ids() local
154 int number_guc_id_stolen = guc->number_guc_id_stolen; in intel_guc_steal_guc_ids()
158 guc_err(guc, "Context array allocation failed\n"); in intel_guc_steal_guc_ids()
164 sv = guc->submission_state.num_guc_ids; in intel_guc_steal_guc_ids()
165 guc->submission_state.num_guc_ids = 512; in intel_guc_steal_guc_ids()
171 guc_err(guc, "Failed to create context: %pe\n", ce[context_index]); in intel_guc_steal_guc_ids()
177 guc_err(guc, "Failed to create spinner: %pe\n", ERR_PTR(ret)); in intel_guc_steal_guc_ids()
184 guc_err(guc, "Failed to create spinner request: %pe\n", spin_rq); in intel_guc_steal_guc_ids()
189 guc_err(guc, "Failed to add Spinner request: %pe\n", ERR_PTR(ret)); in intel_guc_steal_guc_ids()
198 guc_err(guc, "Failed to create context: %pe\n", ce[context_index]); in intel_guc_steal_guc_ids()
[all …]
H A Dintel_guc_ads.h16 int intel_guc_ads_create(struct intel_guc *guc);
17 void intel_guc_ads_destroy(struct intel_guc *guc);
18 void intel_guc_ads_init_late(struct intel_guc *guc);
19 void intel_guc_ads_reset(struct intel_guc *guc);
20 void intel_guc_ads_print_policy_info(struct intel_guc *guc,
23 u32 intel_guc_engine_usage_offset(struct intel_guc *guc);
H A Dintel_guc_capture.h26 void intel_guc_capture_process(struct intel_guc *guc);
27 int intel_guc_capture_getlist(struct intel_guc *guc, u32 owner, u32 type, u32 classid,
29 int intel_guc_capture_getlistsize(struct intel_guc *guc, u32 owner, u32 type, u32 classid,
31 int intel_guc_capture_getnullheader(struct intel_guc *guc, void **outptr, size_t *size);
32 void intel_guc_capture_destroy(struct intel_guc *guc);
33 int intel_guc_capture_init(struct intel_guc *guc);
H A Dintel_uc.h33 struct intel_guc guc; member
89 uc_state_checkers(guc, guc);
91 uc_state_checkers(guc, guc_submission);
92 uc_state_checkers(guc, guc_slpc);
93 uc_state_checkers(guc, guc_rc);
101 return intel_guc_wait_for_idle(&uc->guc, timeout); in intel_uc_wait_for_idle()
H A Dintel_guc_log_debugfs.c33 struct intel_guc *guc = log_to_guc(log); in guc_log_dump_size() local
35 if (!intel_guc_is_supported(guc)) in guc_log_dump_size()
61 struct intel_guc *guc = log_to_guc(log); in guc_load_err_dump_size() local
62 struct intel_uc *uc = container_of(guc, struct intel_uc, guc); in guc_load_err_dump_size()
64 if (!intel_guc_is_supported(guc)) in guc_load_err_dump_size()
H A Dintel_guc_ct.c188 static int guc_action_control_ctb(struct intel_guc *guc, u32 control) in guc_action_control_ctb() argument
201 ret = intel_guc_send_mmio(guc, request, ARRAY_SIZE(request), NULL, 0); in guc_action_control_ctb()
260 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_init() local
269 err = i915_inject_probe_error(guc_to_i915(guc), -ENXIO); in intel_guc_ct_init()
276 err = intel_guc_allocate_and_map_vma(guc, blob_size, &ct->vma, &blob); in intel_guc_ct_init()
283 CT_DEBUG(ct, "base=%#x size=%u\n", intel_guc_ggtt_offset(guc, ct->vma), blob_size); in intel_guc_ct_init()
333 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_enable() local
343 base = intel_guc_ggtt_offset(guc, ct->vma); in intel_guc_ct_enable()
396 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_disable() local
402 if (intel_guc_is_fw_running(guc)) { in intel_guc_ct_disable()
[all …]
H A Dintel_uc_debugfs.c58 uc->guc.dbgfs_node = root; in intel_uc_debugfs_register()
63 intel_guc_debugfs_register(&uc->guc, root); in intel_uc_debugfs_register()
/linux/drivers/gpu/drm/xe/
H A Dxe_guc_hwconfig.c18 static int send_get_hwconfig(struct xe_guc *guc, u64 ggtt_addr, u32 size) in send_get_hwconfig() argument
27 return xe_guc_mmio_send(guc, action, ARRAY_SIZE(action)); in send_get_hwconfig()
30 static int guc_hwconfig_size(struct xe_guc *guc, u32 *size) in guc_hwconfig_size() argument
32 int ret = send_get_hwconfig(guc, 0, 0); in guc_hwconfig_size()
41 static int guc_hwconfig_copy(struct xe_guc *guc) in guc_hwconfig_copy() argument
43 int ret = send_get_hwconfig(guc, xe_bo_ggtt_addr(guc->hwconfig.bo), in guc_hwconfig_copy()
44 guc->hwconfig.size); in guc_hwconfig_copy()
52 int xe_guc_hwconfig_init(struct xe_guc *guc) in xe_guc_hwconfig_init() argument
54 struct xe_device *xe = guc_to_xe(guc); in xe_guc_hwconfig_init()
55 struct xe_gt *gt = guc_to_gt(guc); in xe_guc_hwconfig_init()
[all …]
H A Dxe_uc.c40 ret = xe_guc_init_noalloc(&uc->guc); in xe_uc_init_noalloc()
60 ret = xe_guc_init(&uc->guc); in xe_uc_init()
81 ret = xe_guc_min_load_for_hwconfig(&uc->guc); in xe_uc_init()
109 err = xe_guc_init_post_hwconfig(&uc->guc); in xe_uc_init_post_hwconfig()
125 ret = xe_guc_reset(&uc->guc); in uc_reset()
137 xe_guc_sanitize(&uc->guc); in xe_uc_sanitize()
155 err = xe_guc_enable_communication(&uc->guc); in vf_uc_load_hw()
163 uc->guc.submission_state.enabled = true; in vf_uc_load_hw()
165 err = xe_guc_opt_in_features_enable(&uc->guc); in vf_uc_load_hw()
176 xe_guc_sanitize(&uc->guc); in vf_uc_load_hw()
[all …]
H A Dxe_guc_capture.h43 void xe_guc_capture_process(struct xe_guc *guc);
44 int xe_guc_capture_getlist(struct xe_guc *guc, u32 owner, u32 type,
46 int xe_guc_capture_getlistsize(struct xe_guc *guc, u32 owner, u32 type,
48 int xe_guc_capture_getnullheader(struct xe_guc *guc, void **outptr, size_t *size);
49 size_t xe_guc_capture_ads_input_worst_size(struct xe_guc *guc);
57 void xe_guc_capture_steered_list_init(struct xe_guc *guc);
58 void xe_guc_capture_put_matched_nodes(struct xe_guc *guc);
59 int xe_guc_capture_init(struct xe_guc *guc);
H A Dxe_guc_hwconfig.h14 int xe_guc_hwconfig_init(struct xe_guc *guc);
15 u32 xe_guc_hwconfig_size(struct xe_guc *guc);
16 void xe_guc_hwconfig_copy(struct xe_guc *guc, void *dst);
17 void xe_guc_hwconfig_dump(struct xe_guc *guc, struct drm_printer *p);
18 int xe_guc_hwconfig_lookup_u32(struct xe_guc *guc, u32 attribute, u32 *val);
H A Dxe_devcoredump.c80 return &q->gt->uc.guc; in exec_queue_to_guc()
119 xe_guc_log_snapshot_print(ss->guc.log, &p); in __xe_devcoredump_read()
121 xe_guc_ct_snapshot_print(ss->guc.ct, &p); in __xe_devcoredump_read()
147 xe_guc_log_snapshot_free(ss->guc.log); in xe_devcoredump_snapshot_free()
148 ss->guc.log = NULL; in xe_devcoredump_snapshot_free()
150 xe_guc_ct_snapshot_free(ss->guc.ct); in xe_devcoredump_snapshot_free()
151 ss->guc.ct = NULL; in xe_devcoredump_snapshot_free()
153 xe_guc_capture_put_matched_nodes(&ss->gt->uc.guc); in xe_devcoredump_snapshot_free()
333 struct xe_guc *guc = exec_queue_to_guc(q); in devcoredump_snapshot() local
356 ss->guc.log = xe_guc_log_snapshot_capture(&guc->log, true); in devcoredump_snapshot()
[all …]
H A Dxe_gt_sriov_pf_monitor.c32 gt->sriov.pf.vfs[vfid].monitor.guc.events[e] = 0; in xe_gt_sriov_pf_monitor_flr()
41 gt->sriov.pf.vfs[vfid].monitor.guc.events[e]++; in pf_update_event_counter()
130 if (data->guc.events[e]) in xe_gt_sriov_pf_monitor_print_events()
139 #define __value(TAG, NAME, ...) , #NAME, data->guc.events[MAKE_XE_GUC_KLV_THRESHOLD_INDEX(TAG)] in xe_gt_sriov_pf_monitor_print_events()
H A Dxe_gt_sriov_pf_migration.c154 static int guc_action_vf_save_restore(struct xe_guc *guc, u32 vfid, u32 opcode, in guc_action_vf_save_restore() argument
168 return xe_guc_ct_send_block(&guc->ct, request, ARRAY_SIZE(request)); in guc_action_vf_save_restore()
176 ret = guc_action_vf_save_restore(&gt->uc.guc, vfid, GUC_PF_OPCODE_VF_SAVE, 0, 0); in pf_send_guc_query_vf_mig_data_size()
185 struct xe_guc *guc = &gt->uc.guc; in pf_send_guc_save_vf_mig_data() local
186 CLASS(xe_guc_buf, buf)(&guc->buf, ndwords); in pf_send_guc_save_vf_mig_data()
198 ret = guc_action_vf_save_restore(guc, vfid, GUC_PF_OPCODE_VF_SAVE, in pf_send_guc_save_vf_mig_data()
215 struct xe_guc *guc = &gt->uc.guc; in pf_send_guc_restore_vf_mig_data() local
216 CLASS(xe_guc_buf_from_data, buf)(&guc->buf, src, size); in pf_send_guc_restore_vf_mig_data()
225 ret = guc_action_vf_save_restore(guc, vfid, GUC_PF_OPCODE_VF_RESTORE, in pf_send_guc_restore_vf_mig_data()
1029 if (GUC_FIRMWARE_VER(&gt->uc.guc) < MAKE_GUC_VER(70, 54, 0)) in pf_gt_migration_check_support()
H A Dxe_guc_log.c31 return container_of(log, struct xe_gt, uc.guc.log); in log_to_gt()
146 struct xe_guc *guc = log_to_guc(log); in xe_guc_log_snapshot_capture() local
177 snapshot->ver_found = guc->fw.versions.found[XE_UC_FW_VER_RELEASE]; in xe_guc_log_snapshot_capture()
178 snapshot->ver_want = guc->fw.versions.wanted; in xe_guc_log_snapshot_capture()
179 snapshot->path = guc->fw.path; in xe_guc_log_snapshot_capture()
/linux/drivers/gpu/drm/i915/gt/
H A Dintel_gt.h102 static inline struct intel_gt *guc_to_gt(struct intel_guc *guc) in guc_to_gt() argument
104 return container_of(guc, struct intel_gt, uc.guc); in guc_to_gt()
122 static inline struct drm_i915_private *guc_to_i915(struct intel_guc *guc) in guc_to_i915() argument
124 return guc_to_gt(guc)->i915; in guc_to_i915()
129 return &gt->uc.guc; in gt_to_guc()
/linux/drivers/gpu/drm/i915/selftests/
H A Dintel_scheduler_helpers.c70 err = intel_guc_global_policies_update(&engine->gt->uc.guc); in intel_selftest_modify_policy()
89 return intel_guc_global_policies_update(&engine->gt->uc.guc); in intel_selftest_restore_policy()
/linux/drivers/gpu/drm/xe/tests/
H A Dxe_guc_buf_kunit.c60 struct xe_guc *guc; in guc_buf_test_init() local
66 guc = &xe_device_get_gt(test->priv, 0)->uc.guc; in guc_buf_test_init()
75 KUNIT_ASSERT_EQ(test, 0, xe_guc_buf_cache_init(&guc->buf)); in guc_buf_test_init()
77 test->priv = &guc->buf; in guc_buf_test_init()

12