Home
last modified time | relevance | path

Searched full:engine (Results 1 – 25 of 1603) sorted by relevance

12345678910>>...65

/linux/drivers/gpu/drm/i915/gt/
H A Dintel_engine_heartbeat.c17 * While the engine is active, we send a periodic pulse along the engine
19 * is stuck, and we fail to preempt it, we declare the engine hung and
23 static bool next_heartbeat(struct intel_engine_cs *engine) in next_heartbeat() argument
28 delay = READ_ONCE(engine->props.heartbeat_interval_ms); in next_heartbeat()
30 rq = engine->heartbeat.systole; in next_heartbeat()
42 delay == engine->defaults.heartbeat_interval_ms) { in next_heartbeat()
50 longer = READ_ONCE(engine->props.preempt_timeout_ms) * 2; in next_heartbeat()
51 longer = intel_clamp_heartbeat_interval_ms(engine, longer); in next_heartbeat()
62 mod_delayed_work(system_highpri_wq, &engine->heartbeat.work, delay + 1); in next_heartbeat()
79 static void idle_pulse(struct intel_engine_cs *engine, struct i915_request *rq) in idle_pulse() argument
[all …]
H A Dmock_engine.c60 static struct intel_ring *mock_ring(struct intel_engine_cs *engine) in mock_ring() argument
75 ring->vma = create_ring_vma(engine->gt->ggtt, PAGE_SIZE); in mock_ring()
93 static struct i915_request *first_request(struct mock_engine *engine) in first_request() argument
95 return list_first_entry_or_null(&engine->hw_queue, in first_request()
106 intel_engine_signal_breadcrumbs(request->engine); in advance()
111 struct mock_engine *engine = timer_container_of(engine, t, hw_delay); in hw_delay_complete() local
115 spin_lock_irqsave(&engine->hw_lock, flags); in hw_delay_complete()
118 request = first_request(engine); in hw_delay_complete()
126 while ((request = first_request(engine))) { in hw_delay_complete()
128 mod_timer(&engine->hw_delay, in hw_delay_complete()
[all …]
H A Dselftest_engine_heartbeat.c14 static void reset_heartbeat(struct intel_engine_cs *engine) in reset_heartbeat() argument
16 intel_engine_set_heartbeat(engine, in reset_heartbeat()
17 engine->defaults.heartbeat_interval_ms); in reset_heartbeat()
37 static int engine_sync_barrier(struct intel_engine_cs *engine) in engine_sync_barrier() argument
39 return timeline_sync(engine->kernel_context->timeline); in engine_sync_barrier()
90 static int __live_idle_pulse(struct intel_engine_cs *engine, in __live_idle_pulse() argument
96 GEM_BUG_ON(!intel_engine_pm_is_awake(engine)); in __live_idle_pulse()
106 err = i915_active_acquire_preallocate_barrier(&p->active, engine); in __live_idle_pulse()
116 GEM_BUG_ON(llist_empty(&engine->barrier_tasks)); in __live_idle_pulse()
118 err = fn(engine); in __live_idle_pulse()
[all …]
H A Dintel_engine_pm.h17 intel_engine_pm_is_awake(const struct intel_engine_cs *engine) in intel_engine_pm_is_awake() argument
19 return intel_wakeref_is_active(&engine->wakeref); in intel_engine_pm_is_awake()
22 static inline void __intel_engine_pm_get(struct intel_engine_cs *engine) in __intel_engine_pm_get() argument
24 __intel_wakeref_get(&engine->wakeref); in __intel_engine_pm_get()
27 static inline void intel_engine_pm_get(struct intel_engine_cs *engine) in intel_engine_pm_get() argument
29 intel_wakeref_get(&engine->wakeref); in intel_engine_pm_get()
32 static inline bool intel_engine_pm_get_if_awake(struct intel_engine_cs *engine) in intel_engine_pm_get_if_awake() argument
34 return intel_wakeref_get_if_active(&engine->wakeref); in intel_engine_pm_get_if_awake()
37 static inline void intel_engine_pm_might_get(struct intel_engine_cs *engine) in intel_engine_pm_might_get() argument
39 if (!intel_engine_is_virtual(engine)) { in intel_engine_pm_might_get()
[all …]
H A Dintel_engine_user.c39 void intel_engine_add_user(struct intel_engine_cs *engine) in intel_engine_add_user() argument
41 llist_add(&engine->uabi_llist, &engine->i915->uabi_engines_llist); in intel_engine_add_user()
87 struct intel_engine_cs *engine = in sort_engines() local
88 container_of(pos, typeof(*engine), uabi_llist); in sort_engines()
89 list_add(&engine->uabi_list, engines); in sort_engines()
97 u8 engine; in set_scheduler_caps() member
106 struct intel_engine_cs *engine; in set_scheduler_caps() local
111 for_each_uabi_engine(engine, i915) { /* all engines must agree! */ in set_scheduler_caps()
114 if (engine->sched_engine->schedule) in set_scheduler_caps()
121 if (intel_uc_uses_guc_submission(&engine->gt->uc)) in set_scheduler_caps()
[all …]
H A Dintel_engine_pm.c20 static void intel_gsc_idle_msg_enable(struct intel_engine_cs *engine) in intel_gsc_idle_msg_enable() argument
22 struct drm_i915_private *i915 = engine->i915; in intel_gsc_idle_msg_enable()
24 if (MEDIA_VER(i915) >= 13 && engine->id == GSC0) { in intel_gsc_idle_msg_enable()
25 intel_uncore_write(engine->gt->uncore, in intel_gsc_idle_msg_enable()
29 intel_uncore_write(engine->gt->uncore, in intel_gsc_idle_msg_enable()
42 int type = intel_gt_coherent_map_type(ce->engine->gt, obj, true); in dbg_poison_ce()
60 struct intel_engine_cs *engine = in __engine_unpark() local
61 container_of(wf, typeof(*engine), wakeref); in __engine_unpark()
64 ENGINE_TRACE(engine, "\n"); in __engine_unpark()
66 engine->wakeref_track = intel_gt_pm_get(engine->gt); in __engine_unpark()
[all …]
H A Dselftest_context.c75 static int __live_context_size(struct intel_engine_cs *engine) in __live_context_size() argument
82 ce = intel_context_create(engine); in __live_context_size()
91 intel_gt_coherent_map_type(engine->gt, in __live_context_size()
112 vaddr += engine->context_size - I915_GTT_PAGE_SIZE; in __live_context_size()
127 rq = intel_engine_create_kernel_request(engine); in __live_context_size()
137 pr_err("%s context overwrote trailing red-zone!", engine->name); in __live_context_size()
151 struct intel_engine_cs *engine; in live_context_size() local
160 for_each_engine(engine, gt, id) { in live_context_size()
163 if (!engine->context_size) in live_context_size()
166 intel_engine_pm_get(engine); in live_context_size()
[all …]
H A Dselftest_engine_pm.c76 struct intel_engine_cs *engine = ce->engine; in __measure_timestamps() local
77 u32 *sema = memset32(engine->status_page.addr + 1000, 0, 5); in __measure_timestamps()
78 u32 offset = i915_ggtt_offset(engine->status_page.vma); in __measure_timestamps()
96 cs = emit_srm(cs, RING_TIMESTAMP(engine->mmio_base), offset + 4000); in __measure_timestamps()
97 cs = emit_srm(cs, RING_CTX_TIMESTAMP(engine->mmio_base), offset + 4004); in __measure_timestamps()
102 cs = emit_srm(cs, RING_TIMESTAMP(engine->mmio_base), offset + 4016); in __measure_timestamps()
103 cs = emit_srm(cs, RING_CTX_TIMESTAMP(engine->mmio_base), offset + 4012); in __measure_timestamps()
108 intel_engine_flush_submission(engine); in __measure_timestamps()
132 engine->name, sema[1], sema[3], sema[0], sema[4]); in __measure_timestamps()
139 static int __live_engine_timestamps(struct intel_engine_cs *engine) in __live_engine_timestamps() argument
[all …]
H A Dselftest_workarounds.c34 } engine[I915_NUM_ENGINES]; member
64 struct intel_engine_cs *engine; in reference_lists_init() local
73 for_each_engine(engine, gt, id) { in reference_lists_init()
74 struct i915_wa_list *wal = &lists->engine[id].wa_list; in reference_lists_init()
76 wa_init_start(wal, gt, "REF", engine->name); in reference_lists_init()
77 engine_init_workarounds(engine, wal); in reference_lists_init()
80 __intel_engine_init_ctx_wa(engine, in reference_lists_init()
81 &lists->engine[id].ctx_wa_list, in reference_lists_init()
89 struct intel_engine_cs *engine; in reference_lists_fini() local
92 for_each_engine(engine, gt, id) in reference_lists_fini()
[all …]
H A Dselftest_execlists.c24 #define CS_GPR(engine, n) ((engine)->mmio_base + 0x600 + (n) * 4) argument
42 static int wait_for_submit(struct intel_engine_cs *engine, in wait_for_submit() argument
47 tasklet_hi_schedule(&engine->sched_engine->tasklet); in wait_for_submit()
57 intel_engine_flush_submission(engine); in wait_for_submit()
58 if (!READ_ONCE(engine->execlists.pending[0]) && is_active(rq)) in wait_for_submit()
68 static int wait_for_reset(struct intel_engine_cs *engine, in wait_for_reset() argument
76 intel_engine_flush_submission(engine); in wait_for_reset()
78 if (READ_ONCE(engine->execlists.pending[0])) in wait_for_reset()
90 engine->name, in wait_for_reset()
100 engine->name, in wait_for_reset()
[all …]
H A Dselftest_ring_submission.c9 static struct i915_vma *create_wally(struct intel_engine_cs *engine) in create_wally() argument
16 obj = i915_gem_object_create_internal(engine->i915, 4096); in create_wally()
20 vma = i915_vma_instance(obj, engine->gt->vm, NULL); in create_wally()
44 if (GRAPHICS_VER(engine->i915) >= 6) { in create_wally()
47 } else if (GRAPHICS_VER(engine->i915) >= 4) { in create_wally()
61 vma->private = intel_context_create(engine); /* dummy residuals */ in create_wally()
89 static int new_context_sync(struct intel_engine_cs *engine) in new_context_sync() argument
94 ce = intel_context_create(engine); in new_context_sync()
104 static int mixed_contexts_sync(struct intel_engine_cs *engine, u32 *result) in mixed_contexts_sync() argument
111 err = context_sync(engine->kernel_context); in mixed_contexts_sync()
[all …]
H A Dselftest_mocs.c24 static struct intel_context *mocs_context_create(struct intel_engine_cs *engine) in mocs_context_create() argument
28 ce = intel_context_create(engine); in mocs_context_create()
134 struct intel_gt *gt = rq->engine->gt; in read_mocs_table()
143 addr = mocs_offset(rq->engine); in read_mocs_table()
160 static int check_mocs_table(struct intel_engine_cs *engine, in check_mocs_table() argument
173 engine->name, i, **vaddr, expect); in check_mocs_table()
192 static int check_l3cc_table(struct intel_engine_cs *engine, in check_l3cc_table() argument
205 if (!mcr_range(engine->i915, reg) && **vaddr != expect) { in check_l3cc_table()
207 engine->name, i, **vaddr, expect); in check_l3cc_table()
238 if (!err && ce->engine->class == RENDER_CLASS) in check_mocs_engine()
[all …]
H A Dselftest_engine_cs.c44 static i915_reg_t timestamp_reg(struct intel_engine_cs *engine) in timestamp_reg() argument
46 struct drm_i915_private *i915 = engine->i915; in timestamp_reg()
49 return RING_TIMESTAMP_UDW(engine->mmio_base); in timestamp_reg()
51 return RING_TIMESTAMP(engine->mmio_base); in timestamp_reg()
70 *cs++ = i915_mmio_reg_offset(timestamp_reg(rq->engine)); in write_timestamp()
86 obj = i915_gem_object_create_internal(ce->engine->i915, PAGE_SIZE); in create_empty_batch()
136 struct intel_engine_cs *engine; in perf_mi_bb_start() local
145 for_each_engine(engine, gt, id) { in perf_mi_bb_start()
146 struct intel_context *ce = engine->kernel_context; in perf_mi_bb_start()
151 if (GRAPHICS_VER(engine->i915) < 7 && engine->id != RCS0) in perf_mi_bb_start()
[all …]
H A Dselftest_gt_pm.c39 static u32 read_timestamp(struct intel_engine_cs *engine) in read_timestamp() argument
41 struct drm_i915_private *i915 = engine->i915; in read_timestamp()
44 ENGINE_READ_FW(engine, RING_TIMESTAMP); in read_timestamp()
47 return ENGINE_READ_FW(engine, RING_TIMESTAMP_UDW); in read_timestamp()
49 return ENGINE_READ_FW(engine, RING_TIMESTAMP); in read_timestamp()
52 static void measure_clocks(struct intel_engine_cs *engine, in measure_clocks() argument
61 cycles[i] = -read_timestamp(engine); in measure_clocks()
66 cycles[i] += read_timestamp(engine); in measure_clocks()
82 struct intel_engine_cs *engine; in live_gt_clocks() local
98 for_each_engine(engine, gt, id) { in live_gt_clocks()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/core/
H A Dengine.c24 #include <core/engine.h>
31 nvkm_engine_chsw_load(struct nvkm_engine *engine) in nvkm_engine_chsw_load() argument
33 if (engine->func->chsw_load) in nvkm_engine_chsw_load()
34 return engine->func->chsw_load(engine); in nvkm_engine_chsw_load()
39 nvkm_engine_reset(struct nvkm_engine *engine) in nvkm_engine_reset() argument
41 if (engine->func->reset) in nvkm_engine_reset()
42 return engine->func->reset(engine); in nvkm_engine_reset()
44 nvkm_subdev_fini(&engine->subdev, false); in nvkm_engine_reset()
45 return nvkm_subdev_init(&engine->subdev); in nvkm_engine_reset()
51 struct nvkm_engine *engine = *pengine; in nvkm_engine_unref() local
[all …]
/linux/drivers/gpu/drm/sun4i/
H A Dsunxi_engine.h27 * This callback allows to prepare our engine for an atomic
34 void (*atomic_begin)(struct sunxi_engine *engine,
51 int (*atomic_check)(struct sunxi_engine *engine,
63 void (*commit)(struct sunxi_engine *engine,
71 * the layers supported by that engine.
81 struct sunxi_engine *engine);
87 * engine. This is useful only for the composite output.
91 void (*apply_color_correction)(struct sunxi_engine *engine);
97 * engine. This is useful only for the composite output.
101 void (*disable_color_correction)(struct sunxi_engine *engine);
[all …]
/linux/drivers/video/fbdev/via/
H A Daccel.c13 static int viafb_set_bpp(void __iomem *engine, u8 bpp) in viafb_set_bpp() argument
19 gemode = readl(engine + VIA_REG_GEMODE) & 0xfffffcfc; in viafb_set_bpp()
34 writel(gemode, engine + VIA_REG_GEMODE); in viafb_set_bpp()
39 static int hw_bitblt_1(void __iomem *engine, u8 op, u32 width, u32 height, in hw_bitblt_1() argument
79 ret = viafb_set_bpp(engine, dst_bpp); in hw_bitblt_1()
91 writel(tmp, engine + 0x08); in hw_bitblt_1()
100 writel(tmp, engine + 0x0C); in hw_bitblt_1()
108 writel(tmp, engine + 0x10); in hw_bitblt_1()
111 writel(fg_color, engine + 0x18); in hw_bitblt_1()
114 writel(bg_color, engine + 0x1C); in hw_bitblt_1()
[all …]
/linux/Documentation/devicetree/bindings/display/
H A Dallwinner,sun4i-a10-display-engine.yaml4 $id: http://devicetree.org/schemas/display/allwinner,sun4i-a10-display-engine.yaml#
7 title: Allwinner A10 Display Engine Pipeline
14 The display engine pipeline (and its entry point, since it can be
52 - allwinner,sun4i-a10-display-engine
53 - allwinner,sun5i-a10s-display-engine
54 - allwinner,sun5i-a13-display-engine
55 - allwinner,sun6i-a31-display-engine
56 - allwinner,sun6i-a31s-display-engine
57 - allwinner,sun7i-a20-display-engine
58 - allwinner,sun8i-a23-display-engine
[all …]
/linux/drivers/gpu/drm/i915/selftests/
H A Dintel_scheduler_helpers.c21 struct intel_engine_cs *engine; in intel_selftest_find_any_engine() local
24 for_each_engine(engine, gt, id) in intel_selftest_find_any_engine()
25 return engine; in intel_selftest_find_any_engine()
27 pr_err("No valid engine found!\n"); in intel_selftest_find_any_engine()
31 int intel_selftest_modify_policy(struct intel_engine_cs *engine, in intel_selftest_modify_policy() argument
37 saved->reset = engine->i915->params.reset; in intel_selftest_modify_policy()
38 saved->flags = engine->flags; in intel_selftest_modify_policy()
39 saved->timeslice = engine->props.timeslice_duration_ms; in intel_selftest_modify_policy()
40 saved->preempt_timeout = engine->props.preempt_timeout_ms; in intel_selftest_modify_policy()
46 * together with engine reset on pre-emption timeout. in intel_selftest_modify_policy()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_hw_engine_types.h13 /* See "Engine ID Definition" struct in the Icelake PRM */
69 * struct xe_hw_engine_class_intf - per hw engine class struct interface
71 * Contains all the hw engine properties per engine class.
82 /** @sched_props.set_job_timeout: Set job timeout in ms for engine */
84 /** @sched_props.job_timeout_min: Min job timeout in ms for engine */
86 /** @sched_props.job_timeout_max: Max job timeout in ms for engine */
104 * struct xe_hw_engine - Hardware engine
106 * Contains all the hardware engine state for physical instances.
109 /** @gt: GT structure this hw engine belongs to */
111 /** @name: name of this hw engine */
[all …]
/linux/drivers/dma/
H A DKconfig3 # DMA engine configuration
7 bool "DMA Engine support"
18 bool "DMA Engine debugging"
22 say N here. This enables DMA engine core and driver debugging.
25 bool "DMA Engine verbose debugging"
30 the DMA engine core and drivers.
61 tristate "Altera / Intel mSGDMA Engine"
74 provide DMA engine support. This includes the original ARM
130 tristate "Broadcom SBA RAID engine support"
139 Enable support for Broadcom SBA RAID Engine. The SBA RAID
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/engine/dma/
H A DKbuild2 nvkm-y += nvkm/engine/dma/base.o
3 nvkm-y += nvkm/engine/dma/nv04.o
4 nvkm-y += nvkm/engine/dma/nv50.o
5 nvkm-y += nvkm/engine/dma/gf100.o
6 nvkm-y += nvkm/engine/dma/gf119.o
7 nvkm-y += nvkm/engine/dma/gv100.o
9 nvkm-y += nvkm/engine/dma/user.o
10 nvkm-y += nvkm/engine/dma/usernv04.o
11 nvkm-y += nvkm/engine/dma/usernv50.o
12 nvkm-y += nvkm/engine/dma/usergf100.o
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/
H A Dbase.c26 #include <engine/fifo.h>
56 nvkm_gr_chsw_load(struct nvkm_engine *engine) in nvkm_gr_chsw_load() argument
58 struct nvkm_gr *gr = nvkm_gr(engine); in nvkm_gr_chsw_load()
65 nvkm_gr_tile(struct nvkm_engine *engine, int region, struct nvkm_fb_tile *tile) in nvkm_gr_tile() argument
67 struct nvkm_gr *gr = nvkm_gr(engine); in nvkm_gr_tile()
91 struct nvkm_gr *gr = nvkm_gr(oclass->engine); in nvkm_gr_oclass_get()
115 struct nvkm_gr *gr = nvkm_gr(oclass->engine); in nvkm_gr_cclass_new()
122 nvkm_gr_intr(struct nvkm_engine *engine) in nvkm_gr_intr() argument
124 struct nvkm_gr *gr = nvkm_gr(engine); in nvkm_gr_intr()
129 nvkm_gr_nonstall(struct nvkm_engine *engine) in nvkm_gr_nonstall() argument
[all …]
/linux/Documentation/devicetree/bindings/media/
H A Dallwinner,sun4i-a10-video-engine.yaml4 $id: http://devicetree.org/schemas/media/allwinner,sun4i-a10-video-engine.yaml#
7 title: Allwinner A10 Video Engine
16 - allwinner,sun4i-a10-video-engine
17 - allwinner,sun5i-a13-video-engine
18 - allwinner,sun7i-a20-video-engine
19 - allwinner,sun8i-a33-video-engine
20 - allwinner,sun8i-h3-video-engine
21 - allwinner,sun8i-v3s-video-engine
22 - allwinner,sun8i-r40-video-engine
23 - allwinner,sun20i-d1-video-engine
[all …]
/linux/drivers/soc/sunxi/
H A Dsunxi_mbus.c13 * The display engine virtual devices are not strictly speaking
18 "allwinner,sun4i-a10-display-engine",
19 "allwinner,sun5i-a10s-display-engine",
20 "allwinner,sun5i-a13-display-engine",
21 "allwinner,sun6i-a31-display-engine",
22 "allwinner,sun6i-a31s-display-engine",
23 "allwinner,sun7i-a20-display-engine",
24 "allwinner,sun8i-a23-display-engine",
25 "allwinner,sun8i-a33-display-engine",
26 "allwinner,sun9i-a80-display-engine",
[all …]

12345678910>>...65