Lines Matching defs:engine

41  *   engine is reset. It's also possible that a set of engine classes share a
45 * driver is to tie those workarounds to the first compute/render engine that
46 * is registered. When executing with GuC submission, engine resets are
48 * written once, on engine initialization, and then passed to GuC, that
56 * engine's MMIO range but that are part of of the common RCS/CCS reset domain
88 * engine registers are restored in a context restore sequence. This is
336 static void gen6_ctx_workarounds_init(struct intel_engine_cs *engine,
342 static void gen7_ctx_workarounds_init(struct intel_engine_cs *engine,
348 static void gen8_ctx_workarounds_init(struct intel_engine_cs *engine,
396 static void bdw_ctx_workarounds_init(struct intel_engine_cs *engine,
399 struct drm_i915_private *i915 = engine->i915;
401 gen8_ctx_workarounds_init(engine, wal);
424 static void chv_ctx_workarounds_init(struct intel_engine_cs *engine,
427 gen8_ctx_workarounds_init(engine, wal);
436 static void gen9_ctx_workarounds_init(struct intel_engine_cs *engine,
439 struct drm_i915_private *i915 = engine->i915;
532 static void skl_tune_iz_hashing(struct intel_engine_cs *engine,
535 struct intel_gt *gt = engine->gt;
572 static void skl_ctx_workarounds_init(struct intel_engine_cs *engine,
575 gen9_ctx_workarounds_init(engine, wal);
576 skl_tune_iz_hashing(engine, wal);
579 static void bxt_ctx_workarounds_init(struct intel_engine_cs *engine,
582 gen9_ctx_workarounds_init(engine, wal);
593 static void kbl_ctx_workarounds_init(struct intel_engine_cs *engine,
596 struct drm_i915_private *i915 = engine->i915;
598 gen9_ctx_workarounds_init(engine, wal);
610 static void glk_ctx_workarounds_init(struct intel_engine_cs *engine,
613 gen9_ctx_workarounds_init(engine, wal);
620 static void cfl_ctx_workarounds_init(struct intel_engine_cs *engine,
623 gen9_ctx_workarounds_init(engine, wal);
634 static void icl_ctx_workarounds_init(struct intel_engine_cs *engine,
678 static void dg2_ctx_gt_tuning_init(struct intel_engine_cs *engine,
688 static void gen12_ctx_workarounds_init(struct intel_engine_cs *engine,
691 struct drm_i915_private *i915 = engine->i915;
753 static void dg1_ctx_workarounds_init(struct intel_engine_cs *engine,
756 gen12_ctx_workarounds_init(engine, wal);
767 static void dg2_ctx_workarounds_init(struct intel_engine_cs *engine,
770 dg2_ctx_gt_tuning_init(engine, wal);
789 static void xelpg_ctx_gt_tuning_init(struct intel_engine_cs *engine,
792 struct intel_gt *gt = engine->gt;
794 dg2_ctx_gt_tuning_init(engine, wal);
806 static void xelpg_ctx_workarounds_init(struct intel_engine_cs *engine,
809 struct intel_gt *gt = engine->gt;
811 xelpg_ctx_gt_tuning_init(engine, wal);
837 static void fakewa_disable_nestedbb_mode(struct intel_engine_cs *engine,
865 wa_masked_dis(wal, RING_MI_MODE(engine->mmio_base), TGL_NESTED_BB_EN);
868 static void gen12_ctx_gt_mocs_init(struct intel_engine_cs *engine,
878 if (engine->class == COPY_ENGINE_CLASS) {
879 mocs = engine->gt->mocs.uc_index;
881 BLIT_CCTL(engine->mmio_base),
894 gen12_ctx_gt_fake_wa_init(struct intel_engine_cs *engine,
897 if (GRAPHICS_VER_FULL(engine->i915) >= IP_VER(12, 55))
898 fakewa_disable_nestedbb_mode(engine, wal);
900 gen12_ctx_gt_mocs_init(engine, wal);
904 __intel_engine_init_ctx_wa(struct intel_engine_cs *engine,
908 struct drm_i915_private *i915 = engine->i915;
910 wa_init_start(wal, engine->gt, name, engine->name);
918 gen12_ctx_gt_fake_wa_init(engine, wal);
920 if (engine->class != RENDER_CLASS)
923 if (IS_GFX_GT_IP_RANGE(engine->gt, IP_VER(12, 70), IP_VER(12, 74)))
924 xelpg_ctx_workarounds_init(engine, wal);
926 dg2_ctx_workarounds_init(engine, wal);
928 dg1_ctx_workarounds_init(engine, wal);
930 gen12_ctx_workarounds_init(engine, wal);
932 icl_ctx_workarounds_init(engine, wal);
934 cfl_ctx_workarounds_init(engine, wal);
936 glk_ctx_workarounds_init(engine, wal);
938 kbl_ctx_workarounds_init(engine, wal);
940 bxt_ctx_workarounds_init(engine, wal);
942 skl_ctx_workarounds_init(engine, wal);
944 chv_ctx_workarounds_init(engine, wal);
946 bdw_ctx_workarounds_init(engine, wal);
948 gen7_ctx_workarounds_init(engine, wal);
950 gen6_ctx_workarounds_init(engine, wal);
960 void intel_engine_init_ctx_wa(struct intel_engine_cs *engine)
962 __intel_engine_init_ctx_wa(engine, &engine->ctx_wa_list, "context");
967 struct i915_wa_list *wal = &rq->engine->ctx_wa_list;
968 struct intel_uncore *uncore = rq->engine->uncore;
979 ret = rq->engine->emit_flush(rq, EMIT_BARRIER);
983 if ((IS_GFX_GT_IP_RANGE(rq->engine->gt, IP_VER(12, 70), IP_VER(12, 74)) ||
984 IS_DG2(rq->i915)) && rq->engine->class == RENDER_CLASS)
1019 if ((IS_GFX_GT_IP_RANGE(rq->engine->gt, IP_VER(12, 70), IP_VER(12, 74)) ||
1020 IS_DG2(rq->i915)) && rq->engine->class == RENDER_CLASS) {
1033 ret = rq->engine->emit_flush(rq, EMIT_BARRIER);
1460 * Though there are per-engine instances of these registers,
1461 * they retain their value through engine resets and should
1463 * the engine-specific workaround list.
1468 struct intel_engine_cs *engine;
1471 for_each_engine(engine, gt, id) {
1472 if (engine->class != VIDEO_DECODE_CLASS ||
1473 (engine->instance % 2))
1476 wa_write_or(wal, VDBOX_CGCTL3F10(engine->mmio_base),
1515 /* Empirical testing shows this register is unaffected by engine reset. */
1590 struct intel_engine_cs *engine;
1593 for_each_engine(engine, gt, id)
1594 if (engine->class == VIDEO_DECODE_CLASS)
1595 wa_write_or(wal, VDBOX_CGCTL3F1C(engine->mmio_base),
1616 * Issue is seen on media KPI test running on VDBOX engine
1634 * engine resets and also are not part of any engine's register state context.
1897 static void skl_whitelist_build(struct intel_engine_cs *engine)
1899 struct i915_wa_list *w = &engine->whitelist;
1901 if (engine->class != RENDER_CLASS)
1910 static void bxt_whitelist_build(struct intel_engine_cs *engine)
1912 if (engine->class != RENDER_CLASS)
1915 gen9_whitelist_build(&engine->whitelist);
1918 static void kbl_whitelist_build(struct intel_engine_cs *engine)
1920 struct i915_wa_list *w = &engine->whitelist;
1922 if (engine->class != RENDER_CLASS)
1931 static void glk_whitelist_build(struct intel_engine_cs *engine)
1933 struct i915_wa_list *w = &engine->whitelist;
1935 if (engine->class != RENDER_CLASS)
1944 static void cfl_whitelist_build(struct intel_engine_cs *engine)
1946 struct i915_wa_list *w = &engine->whitelist;
1948 if (engine->class != RENDER_CLASS)
1967 static void allow_read_ctx_timestamp(struct intel_engine_cs *engine)
1969 struct i915_wa_list *w = &engine->whitelist;
1971 if (engine->class != RENDER_CLASS)
1973 RING_CTX_TIMESTAMP(engine->mmio_base),
1977 static void cml_whitelist_build(struct intel_engine_cs *engine)
1979 allow_read_ctx_timestamp(engine);
1981 cfl_whitelist_build(engine);
1984 static void icl_whitelist_build(struct intel_engine_cs *engine)
1986 struct i915_wa_list *w = &engine->whitelist;
1988 allow_read_ctx_timestamp(engine);
1990 switch (engine->class) {
2017 whitelist_reg_ext(w, _MMIO(0x2000 + engine->mmio_base),
2020 whitelist_reg_ext(w, _MMIO(0x2014 + engine->mmio_base),
2023 whitelist_reg_ext(w, _MMIO(0x23B0 + engine->mmio_base),
2032 static void tgl_whitelist_build(struct intel_engine_cs *engine)
2034 struct i915_wa_list *w = &engine->whitelist;
2036 allow_read_ctx_timestamp(engine);
2038 switch (engine->class) {
2073 static void dg2_whitelist_build(struct intel_engine_cs *engine)
2075 struct i915_wa_list *w = &engine->whitelist;
2077 switch (engine->class) {
2088 static void xelpg_whitelist_build(struct intel_engine_cs *engine)
2090 struct i915_wa_list *w = &engine->whitelist;
2092 switch (engine->class) {
2103 void intel_engine_init_whitelist(struct intel_engine_cs *engine)
2105 struct drm_i915_private *i915 = engine->i915;
2106 struct i915_wa_list *w = &engine->whitelist;
2108 wa_init_start(w, engine->gt, "whitelist", engine->name);
2110 if (engine->gt->type == GT_MEDIA)
2112 else if (IS_GFX_GT_IP_RANGE(engine->gt, IP_VER(12, 70), IP_VER(12, 74)))
2113 xelpg_whitelist_build(engine);
2115 dg2_whitelist_build(engine);
2117 tgl_whitelist_build(engine);
2119 icl_whitelist_build(engine);
2121 cml_whitelist_build(engine);
2123 cfl_whitelist_build(engine);
2125 glk_whitelist_build(engine);
2127 kbl_whitelist_build(engine);
2129 bxt_whitelist_build(engine);
2131 skl_whitelist_build(engine);
2140 void intel_engine_apply_whitelist(struct intel_engine_cs *engine)
2142 const struct i915_wa_list *wal = &engine->whitelist;
2143 struct intel_uncore *uncore = engine->uncore;
2144 const u32 base = engine->mmio_base;
2171 engine_fake_wa_init(struct intel_engine_cs *engine, struct i915_wa_list *wal)
2183 if (GRAPHICS_VER(engine->i915) >= 12) {
2184 mocs_r = engine->gt->mocs.uc_index;
2185 mocs_w = engine->gt->mocs.uc_index;
2187 if (HAS_L3_CCS_READ(engine->i915) &&
2188 engine->class == COMPUTE_CLASS) {
2189 mocs_r = engine->gt->mocs.wb_index;
2197 drm_WARN_ON(&engine->i915->drm, mocs_r == 0);
2201 RING_CMD_CCTL(engine->mmio_base),
2208 rcs_engine_wa_init(struct intel_engine_cs *engine, struct i915_wa_list *wal)
2210 struct drm_i915_private *i915 = engine->i915;
2211 struct intel_gt *gt = engine->gt;
2685 xcs_engine_wa_init(struct intel_engine_cs *engine, struct i915_wa_list *wal)
2687 struct drm_i915_private *i915 = engine->i915;
2692 RING_SEMA_WAIT_POLL(engine->mmio_base),
2696 if (NEEDS_FASTCOLOR_BLT_WABB(engine))
2697 wa_masked_field_set(wal, ECOSKPD(engine->mmio_base),
2703 ccs_engine_wa_init(struct intel_engine_cs *engine, struct i915_wa_list *wal)
2705 /* boilerplate for any CCS engine workaround */
2716 * part of an engine's register state context. If a register is part of a
2742 static void ccs_engine_wa_mode(struct intel_engine_cs *engine, struct i915_wa_list *wal)
2744 struct intel_gt *gt = engine->gt;
2769 * specific engine. Since all render+compute engines get reset
2772 * here and then add them to just a single RCS or CCS engine's
2773 * workaround list (whichever engine has the XXXX flag).
2776 general_render_compute_wa_init(struct intel_engine_cs *engine, struct i915_wa_list *wal)
2778 struct drm_i915_private *i915 = engine->i915;
2779 struct intel_gt *gt = engine->gt;
2886 engine_init_workarounds(struct intel_engine_cs *engine, struct i915_wa_list *wal)
2888 if (GRAPHICS_VER(engine->i915) < 4)
2891 engine_fake_wa_init(engine, wal);
2895 * to a single RCS/CCS engine's workaround list since
2898 if (engine->flags & I915_ENGINE_FIRST_RENDER_COMPUTE) {
2899 general_render_compute_wa_init(engine, wal);
2900 ccs_engine_wa_mode(engine, wal);
2903 if (engine->class == COMPUTE_CLASS)
2904 ccs_engine_wa_init(engine, wal);
2905 else if (engine->class == RENDER_CLASS)
2906 rcs_engine_wa_init(engine, wal);
2908 xcs_engine_wa_init(engine, wal);
2911 void intel_engine_init_workarounds(struct intel_engine_cs *engine)
2913 struct i915_wa_list *wal = &engine->wa_list;
2915 wa_init_start(wal, engine->gt, "engine", engine->name);
2916 engine_init_workarounds(engine, wal);
2920 void intel_engine_apply_workarounds(struct intel_engine_cs *engine)
2922 wa_list_apply(&engine->wa_list);
3041 vma = __vm_create_scratch_for_read(&ce->engine->gt->ggtt->vm,
3046 intel_engine_pm_get(ce->engine);
3113 intel_engine_pm_put(ce->engine);
3118 int intel_engine_verify_workarounds(struct intel_engine_cs *engine,
3121 return engine_wa_list_verify(engine->kernel_context,
3122 &engine->wa_list,