| /linux/drivers/gpu/drm/i915/gt/ |
| H A D | intel_engine_heartbeat.c | 20 * While the engine is active, we send a periodic pulse along the engine 22 * is stuck, and we fail to preempt it, we declare the engine hung and 26 static bool next_heartbeat(struct intel_engine_cs *engine) in next_heartbeat() 31 delay = READ_ONCE(engine->props.heartbeat_interval_ms); in next_heartbeat() 33 rq = engine->heartbeat.systole; in next_heartbeat() 45 delay == engine->defaults.heartbeat_interval_ms) { in next_heartbeat() 53 longer = READ_ONCE(engine->props.preempt_timeout_ms) * 2; in next_heartbeat() 54 longer = intel_clamp_heartbeat_interval_ms(engine, longer); in next_heartbeat() 65 mod_delayed_work(system_highpri_wq, &engine in next_heartbeat() 23 next_heartbeat(struct intel_engine_cs * engine) next_heartbeat() argument 79 idle_pulse(struct intel_engine_cs * engine,struct i915_request * rq) idle_pulse() argument 97 show_heartbeat(const struct i915_request * rq,struct intel_engine_cs * engine) show_heartbeat() argument 117 reset_engine(struct intel_engine_cs * engine,struct i915_request * rq) reset_engine() argument 139 struct intel_engine_cs *engine = heartbeat() local 237 intel_engine_unpark_heartbeat(struct intel_engine_cs * engine) intel_engine_unpark_heartbeat() argument 245 intel_engine_park_heartbeat(struct intel_engine_cs * engine) intel_engine_park_heartbeat() argument 253 struct intel_engine_cs *engine; intel_gt_unpark_heartbeats() local 263 struct intel_engine_cs *engine; intel_gt_park_heartbeats() local 270 intel_engine_init_heartbeat(struct intel_engine_cs * engine) intel_engine_init_heartbeat() argument 275 __intel_engine_pulse(struct intel_engine_cs * engine) __intel_engine_pulse() argument 300 set_heartbeat(struct intel_engine_cs * engine,unsigned long delay) set_heartbeat() argument 314 intel_engine_set_heartbeat(struct intel_engine_cs * engine,unsigned long delay) intel_engine_set_heartbeat() argument 358 intel_engine_pulse(struct intel_engine_cs * engine) intel_engine_pulse() argument 380 intel_engine_flush_barriers(struct intel_engine_cs * engine) intel_engine_flush_barriers() argument [all...] |
| H A D | mock_engine.c | 60 static struct intel_ring *mock_ring(struct intel_engine_cs *engine) in mock_ring() argument 75 ring->vma = create_ring_vma(engine->gt->ggtt, PAGE_SIZE); in mock_ring() 93 static struct i915_request *first_request(struct mock_engine *engine) in first_request() argument 95 return list_first_entry_or_null(&engine->hw_queue, in first_request() 106 intel_engine_signal_breadcrumbs(request->engine); in advance() 111 struct mock_engine *engine = timer_container_of(engine, t, hw_delay); in hw_delay_complete() local 115 spin_lock_irqsave(&engine->hw_lock, flags); in hw_delay_complete() 118 request = first_request(engine); in hw_delay_complete() 126 while ((request = first_request(engine))) { in hw_delay_complete() 128 mod_timer(&engine->hw_delay, in hw_delay_complete() [all …]
|
| H A D | selftest_engine_heartbeat.c | 14 static void reset_heartbeat(struct intel_engine_cs *engine) in reset_heartbeat() argument 16 intel_engine_set_heartbeat(engine, in reset_heartbeat() 17 engine->defaults.heartbeat_interval_ms); in reset_heartbeat() 37 static int engine_sync_barrier(struct intel_engine_cs *engine) in engine_sync_barrier() argument 39 return timeline_sync(engine->kernel_context->timeline); in engine_sync_barrier() 90 static int __live_idle_pulse(struct intel_engine_cs *engine, in __live_idle_pulse() argument 96 GEM_BUG_ON(!intel_engine_pm_is_awake(engine)); in __live_idle_pulse() 106 err = i915_active_acquire_preallocate_barrier(&p->active, engine); in __live_idle_pulse() 116 GEM_BUG_ON(llist_empty(&engine->barrier_tasks)); in __live_idle_pulse() 118 err = fn(engine); in __live_idle_pulse() [all …]
|
| H A D | intel_engine_pm.h | 17 intel_engine_pm_is_awake(const struct intel_engine_cs *engine) in intel_engine_pm_is_awake() argument 19 return intel_wakeref_is_active(&engine->wakeref); in intel_engine_pm_is_awake() 22 static inline void __intel_engine_pm_get(struct intel_engine_cs *engine) in __intel_engine_pm_get() argument 24 __intel_wakeref_get(&engine->wakeref); in __intel_engine_pm_get() 27 static inline void intel_engine_pm_get(struct intel_engine_cs *engine) in intel_engine_pm_get() argument 29 intel_wakeref_get(&engine->wakeref); in intel_engine_pm_get() 32 static inline bool intel_engine_pm_get_if_awake(struct intel_engine_cs *engine) in intel_engine_pm_get_if_awake() argument 34 return intel_wakeref_get_if_active(&engine->wakeref); in intel_engine_pm_get_if_awake() 37 static inline void intel_engine_pm_might_get(struct intel_engine_cs *engine) in intel_engine_pm_might_get() argument 39 if (!intel_engine_is_virtual(engine)) { in intel_engine_pm_might_get() [all …]
|
| H A D | intel_engine_user.c | 41 void intel_engine_add_user(struct intel_engine_cs *engine) in intel_engine_add_user() 43 llist_add(&engine->uabi_llist, &engine->i915->uabi_engines_llist); 89 struct intel_engine_cs *engine = in sort_engines() 90 container_of(pos, typeof(*engine), uabi_llist); in sort_engines() 91 list_add(&engine->uabi_list, engines); in sort_engines() 99 u8 engine; in set_scheduler_caps() 108 struct intel_engine_cs *engine; in set_scheduler_caps() 113 for_each_uabi_engine(engine, i915) { /* all engines must agree! */ in set_scheduler_caps() 116 if (engine in set_scheduler_caps() 39 intel_engine_add_user(struct intel_engine_cs * engine) intel_engine_add_user() argument 87 struct intel_engine_cs *engine = sort_engines() local 97 u8 engine; set_scheduler_caps() member 106 struct intel_engine_cs *engine; set_scheduler_caps() local 182 add_legacy_ring(struct legacy_ring * ring,struct intel_engine_cs * engine) add_legacy_ring() argument 195 engine_rename(struct intel_engine_cs * engine,const char * name,u16 instance) engine_rename() argument 217 struct intel_engine_cs *engine = intel_engines_driver_register() local 262 struct intel_engine_cs *engine; intel_engines_driver_register() local 317 struct intel_engine_cs *engine; intel_engines_has_context_isolation() local [all...] |
| H A D | intel_engine_pm.c | 20 static void intel_gsc_idle_msg_enable(struct intel_engine_cs *engine) in intel_gsc_idle_msg_enable() argument 22 struct drm_i915_private *i915 = engine->i915; in intel_gsc_idle_msg_enable() 24 if (MEDIA_VER(i915) >= 13 && engine->id == GSC0) { in intel_gsc_idle_msg_enable() 25 intel_uncore_write(engine->gt->uncore, in intel_gsc_idle_msg_enable() 29 intel_uncore_write(engine->gt->uncore, in intel_gsc_idle_msg_enable() 42 int type = intel_gt_coherent_map_type(ce->engine->gt, obj, true); in dbg_poison_ce() 60 struct intel_engine_cs *engine = in __engine_unpark() local 61 container_of(wf, typeof(*engine), wakeref); in __engine_unpark() 64 ENGINE_TRACE(engine, "\n"); in __engine_unpark() 66 engine->wakeref_track = intel_gt_pm_get(engine->gt); in __engine_unpark() [all …]
|
| H A D | selftest_context.c | 77 static int __live_context_size(struct intel_engine_cs *engine) in __live_context_size() 84 ce = intel_context_create(engine); in __live_context_size() 93 intel_gt_coherent_map_type(engine->gt, in __live_context_size() 114 vaddr += engine->context_size - I915_GTT_PAGE_SIZE; in __live_context_size() 129 rq = intel_engine_create_kernel_request(engine); in __live_context_size() 139 pr_err("%s context overwrote trailing red-zone!", engine->name); in __live_context_size() 153 struct intel_engine_cs *engine; in live_context_size() 162 for_each_engine(engine, gt, id) { in live_context_size() 165 if (!engine->context_size) in live_context_size() 168 intel_engine_pm_get(engine); in live_context_size() 75 __live_context_size(struct intel_engine_cs * engine) __live_context_size() argument 151 struct intel_engine_cs *engine; live_context_size() local 195 __live_active_context(struct intel_engine_cs * engine) __live_active_context() argument 310 struct intel_engine_cs *engine; live_active_context() local 355 __live_remote_context(struct intel_engine_cs * engine) __live_remote_context() argument 423 struct intel_engine_cs *engine; live_remote_context() local [all...] |
| H A D | selftest_engine_pm.c | 76 struct intel_engine_cs *engine = ce->engine; in __measure_timestamps() local 77 u32 *sema = memset32(engine->status_page.addr + 1000, 0, 5); in __measure_timestamps() 78 u32 offset = i915_ggtt_offset(engine->status_page.vma); in __measure_timestamps() 96 cs = emit_srm(cs, RING_TIMESTAMP(engine->mmio_base), offset + 4000); in __measure_timestamps() 97 cs = emit_srm(cs, RING_CTX_TIMESTAMP(engine->mmio_base), offset + 4004); in __measure_timestamps() 102 cs = emit_srm(cs, RING_TIMESTAMP(engine->mmio_base), offset + 4016); in __measure_timestamps() 103 cs = emit_srm(cs, RING_CTX_TIMESTAMP(engine->mmio_base), offset + 4012); in __measure_timestamps() 108 intel_engine_flush_submission(engine); in __measure_timestamps() 132 engine->name, sema[1], sema[3], sema[0], sema[4]); in __measure_timestamps() 139 static int __live_engine_timestamps(struct intel_engine_cs *engine) in __live_engine_timestamps() argument [all …]
|
| H A D | selftest_workarounds.c | 34 } engine[I915_NUM_ENGINES]; member 64 struct intel_engine_cs *engine; in reference_lists_init() local 73 for_each_engine(engine, gt, id) { in reference_lists_init() 74 struct i915_wa_list *wal = &lists->engine[id].wa_list; in reference_lists_init() 76 wa_init_start(wal, gt, "REF", engine->name); in reference_lists_init() 77 engine_init_workarounds(engine, wal); in reference_lists_init() 80 __intel_engine_init_ctx_wa(engine, in reference_lists_init() 81 &lists->engine[id].ctx_wa_list, in reference_lists_init() 89 struct intel_engine_cs *engine; in reference_lists_fini() local 92 for_each_engine(engine, gt, id) in reference_lists_fini() [all …]
|
| H A D | selftest_execlists.c | 27 #define CS_GPR(engine, n) ((engine)->mmio_base + 0x600 + (n) * 4) 45 static int wait_for_submit(struct intel_engine_cs *engine, in wait_for_submit() 50 tasklet_hi_schedule(&engine->sched_engine->tasklet); in wait_for_submit() 60 intel_engine_flush_submission(engine); in wait_for_submit() 61 if (!READ_ONCE(engine->execlists.pending[0]) && is_active(rq)) in wait_for_submit() 71 static int wait_for_reset(struct intel_engine_cs *engine, in wait_for_reset() 79 intel_engine_flush_submission(engine); in wait_for_reset() 81 if (READ_ONCE(engine->execlists.pending[0])) in wait_for_reset() 93 engine in wait_for_reset() 24 CS_GPR(engine,n) global() argument 42 wait_for_submit(struct intel_engine_cs * engine,struct i915_request * rq,unsigned long timeout) wait_for_submit() argument 68 wait_for_reset(struct intel_engine_cs * engine,struct i915_request * rq,unsigned long timeout) wait_for_reset() argument 112 struct intel_engine_cs *engine; live_sanitycheck() local 166 struct intel_engine_cs *engine; live_unlite_restore() local 328 struct intel_engine_cs *engine; live_unlite_ring() local 478 struct intel_engine_cs *engine; live_pin_rewind() local 553 engine_lock_reset_tasklet(struct intel_engine_cs * engine) engine_lock_reset_tasklet() argument 570 engine_unlock_reset_tasklet(struct intel_engine_cs * engine) engine_unlock_reset_tasklet() argument 582 struct intel_engine_cs *engine; live_hold_reset() local 689 struct intel_engine_cs *engine; live_error_interrupt() local 857 semaphore_queue(struct intel_engine_cs * engine,struct i915_vma * vma,int idx) semaphore_queue() argument 888 release_queue(struct intel_engine_cs * engine,struct i915_vma * vma,int idx,int prio) release_queue() argument 932 struct intel_engine_cs *engine; slice_semaphore_queue() local 980 struct intel_engine_cs *engine; live_timeslice_preempt() local 1114 struct intel_engine_cs *engine; live_timeslice_rewind() local 1257 nop_request(struct intel_engine_cs * engine) nop_request() argument 1271 slice_timeout(struct intel_engine_cs * engine) slice_timeout() argument 1288 struct intel_engine_cs *engine; live_timeslice_queue() local 1413 struct intel_engine_cs *engine; live_timeslice_nopreempt() local 1529 struct intel_engine_cs *engine; live_busywait_preempt() local 1717 spinner_create_request(struct igt_spinner * spin,struct i915_gem_context * ctx,struct intel_engine_cs * engine,u32 arb) spinner_create_request() argument 1737 struct intel_engine_cs *engine; live_preempt() local 1828 struct intel_engine_cs *engine; live_late_preempt() local 1956 struct intel_engine_cs *engine; live_nopreempt() local 2052 struct intel_engine_cs *engine; global() member 2311 force_reset_timeout(struct intel_engine_cs * engine) force_reset_timeout() argument 2317 cancel_reset_timeout(struct intel_engine_cs * engine) cancel_reset_timeout() argument 2324 struct intel_engine_cs *engine = arg->engine; __cancel_fail() local 2443 struct intel_engine_cs *engine; live_suppress_self_preempt() local 2556 struct intel_engine_cs *engine; live_chain_preempt() local 2701 create_gang(struct intel_engine_cs * engine,struct i915_request ** prev) create_gang() argument 2796 __live_preempt_ring(struct intel_engine_cs * engine,struct igt_spinner * spin,int queue_sz,int ring_sz) __live_preempt_ring() argument 2917 struct intel_engine_cs *engine; live_preempt_ring() local 2961 struct intel_engine_cs *engine; live_preempt_gang() local 3047 create_gpr_user(struct intel_engine_cs * engine,struct i915_vma * result,unsigned int offset) create_gpr_user() argument 3147 create_gpr_client(struct intel_engine_cs * engine,struct i915_vma * global,unsigned int offset) create_gpr_client() argument 3207 preempt_user(struct intel_engine_cs * engine,struct i915_vma * global,int id) preempt_user() argument 3250 struct intel_engine_cs *engine; live_preempt_user() local 3369 struct intel_engine_cs *engine; live_preempt_timeout() local 3472 struct intel_engine_cs *engine; global() member 3559 struct intel_engine_cs *engine; smoke_crescendo() local 3871 struct intel_engine_cs *engine; live_virtual_engine() local 4225 struct intel_engine_cs *engine = siblings[n % nsibling]; preserved_virtual_engine() local 4334 struct intel_engine_cs *engine; reset_virtual_engine() local [all...] |
| H A D | selftest_ring_submission.c | 9 static struct i915_vma *create_wally(struct intel_engine_cs *engine) in create_wally() argument 16 obj = i915_gem_object_create_internal(engine->i915, 4096); in create_wally() 20 vma = i915_vma_instance(obj, engine->gt->vm, NULL); in create_wally() 44 if (GRAPHICS_VER(engine->i915) >= 6) { in create_wally() 47 } else if (GRAPHICS_VER(engine->i915) >= 4) { in create_wally() 61 vma->private = intel_context_create(engine); /* dummy residuals */ in create_wally() 89 static int new_context_sync(struct intel_engine_cs *engine) in new_context_sync() argument 94 ce = intel_context_create(engine); in new_context_sync() 104 static int mixed_contexts_sync(struct intel_engine_cs *engine, u32 *result) in mixed_contexts_sync() argument 111 err = context_sync(engine->kernel_context); in mixed_contexts_sync() [all …]
|
| H A D | selftest_mocs.c | 24 static struct intel_context *mocs_context_create(struct intel_engine_cs *engine) in mocs_context_create() argument 28 ce = intel_context_create(engine); in mocs_context_create() 134 struct intel_gt *gt = rq->engine->gt; in read_mocs_table() 143 addr = mocs_offset(rq->engine); in read_mocs_table() 160 static int check_mocs_table(struct intel_engine_cs *engine, in check_mocs_table() argument 173 engine->name, i, **vaddr, expect); in check_mocs_table() 192 static int check_l3cc_table(struct intel_engine_cs *engine, in check_l3cc_table() argument 205 if (!mcr_range(engine->i915, reg) && **vaddr != expect) { in check_l3cc_table() 207 engine->name, i, **vaddr, expect); in check_l3cc_table() 238 if (!err && ce->engine->class == RENDER_CLASS) in check_mocs_engine() [all …]
|
| H A D | selftest_engine_cs.c | 44 static i915_reg_t timestamp_reg(struct intel_engine_cs *engine) in timestamp_reg() argument 46 struct drm_i915_private *i915 = engine->i915; in timestamp_reg() 49 return RING_TIMESTAMP_UDW(engine->mmio_base); in timestamp_reg() 51 return RING_TIMESTAMP(engine->mmio_base); in timestamp_reg() 70 *cs++ = i915_mmio_reg_offset(timestamp_reg(rq->engine)); in write_timestamp() 86 obj = i915_gem_object_create_internal(ce->engine->i915, PAGE_SIZE); in create_empty_batch() 136 struct intel_engine_cs *engine; in perf_mi_bb_start() local 145 for_each_engine(engine, gt, id) { in perf_mi_bb_start() 146 struct intel_context *ce = engine->kernel_context; in perf_mi_bb_start() 151 if (GRAPHICS_VER(engine->i915) < 7 && engine->id != RCS0) in perf_mi_bb_start() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/core/ |
| H A D | engine.c | 24 #include <core/engine.h> 31 nvkm_engine_chsw_load(struct nvkm_engine *engine) in nvkm_engine_chsw_load() argument 33 if (engine->func->chsw_load) in nvkm_engine_chsw_load() 34 return engine->func->chsw_load(engine); in nvkm_engine_chsw_load() 39 nvkm_engine_reset(struct nvkm_engine *engine) in nvkm_engine_reset() argument 41 if (engine->func->reset) in nvkm_engine_reset() 42 return engine->func->reset(engine); in nvkm_engine_reset() 44 nvkm_subdev_fini(&engine->subdev, false); in nvkm_engine_reset() 45 return nvkm_subdev_init(&engine->subdev); in nvkm_engine_reset() 51 struct nvkm_engine *engine = *pengine; in nvkm_engine_unref() local [all …]
|
| /linux/drivers/gpu/drm/sun4i/ |
| H A D | sunxi_engine.h | 27 * This callback allows to prepare our engine for an atomic 34 void (*atomic_begin)(struct sunxi_engine *engine, 51 int (*atomic_check)(struct sunxi_engine *engine, 63 void (*commit)(struct sunxi_engine *engine, 71 * the layers supported by that engine. 81 struct sunxi_engine *engine); 87 * engine. This is useful only for the composite output. 91 void (*apply_color_correction)(struct sunxi_engine *engine); 97 * engine. This is useful only for the composite output. 101 void (*disable_color_correction)(struct sunxi_engine *engine); [all …]
|
| /linux/drivers/video/fbdev/via/ |
| H A D | accel.c | 13 static int viafb_set_bpp(void __iomem *engine, u8 bpp) in viafb_set_bpp() argument 19 gemode = readl(engine + VIA_REG_GEMODE) & 0xfffffcfc; in viafb_set_bpp() 34 writel(gemode, engine + VIA_REG_GEMODE); in viafb_set_bpp() 39 static int hw_bitblt_1(void __iomem *engine, u8 op, u32 width, u32 height, in hw_bitblt_1() argument 79 ret = viafb_set_bpp(engine, dst_bpp); in hw_bitblt_1() 91 writel(tmp, engine + 0x08); in hw_bitblt_1() 100 writel(tmp, engine + 0x0C); in hw_bitblt_1() 108 writel(tmp, engine + 0x10); in hw_bitblt_1() 111 writel(fg_color, engine + 0x18); in hw_bitblt_1() 114 writel(bg_color, engine + 0x1C); in hw_bitblt_1() [all …]
|
| /linux/drivers/crypto/marvell/cesa/ |
| H A D | cesa.c | 3 * Support for Marvell's Cryptographic Engine and Security Accelerator (CESA) 5 * driver supports the TDMA engine on platforms on which it is available. 38 mv_cesa_dequeue_req_locked(struct mv_cesa_engine *engine, in mv_cesa_dequeue_req_locked() argument 43 *backlog = crypto_get_backlog(&engine->queue); in mv_cesa_dequeue_req_locked() 44 req = crypto_dequeue_request(&engine->queue); in mv_cesa_dequeue_req_locked() 52 static void mv_cesa_rearm_engine(struct mv_cesa_engine *engine) in mv_cesa_rearm_engine() argument 58 spin_lock_bh(&engine->lock); in mv_cesa_rearm_engine() 59 if (!engine->req) { in mv_cesa_rearm_engine() 60 req = mv_cesa_dequeue_req_locked(engine, &backlog); in mv_cesa_rearm_engine() 61 engine->req = req; in mv_cesa_rearm_engine() [all …]
|
| /linux/Documentation/devicetree/bindings/display/ |
| H A D | allwinner,sun4i-a10-display-engine.yaml | 4 $id: http://devicetree.org/schemas/display/allwinner,sun4i-a10-display-engine.yaml# 7 title: Allwinner A10 Display Engine Pipeline 14 The display engine pipeline (and its entry point, since it can be 52 - allwinner,sun4i-a10-display-engine 53 - allwinner,sun5i-a10s-display-engine 54 - allwinner,sun5i-a13-display-engine 55 - allwinner,sun6i-a31-display-engine 56 - allwinner,sun6i-a31s-display-engine 57 - allwinner,sun7i-a20-display-engine 58 - allwinner,sun8i-a23-display-engine [all …]
|
| /linux/drivers/gpu/drm/i915/selftests/ |
| H A D | intel_scheduler_helpers.c | 21 struct intel_engine_cs *engine; in intel_selftest_find_any_engine() local 24 for_each_engine(engine, gt, id) in intel_selftest_find_any_engine() 25 return engine; in intel_selftest_find_any_engine() 27 pr_err("No valid engine found!\n"); in intel_selftest_find_any_engine() 31 int intel_selftest_modify_policy(struct intel_engine_cs *engine, in intel_selftest_modify_policy() argument 37 saved->reset = engine->i915->params.reset; in intel_selftest_modify_policy() 38 saved->flags = engine->flags; in intel_selftest_modify_policy() 39 saved->timeslice = engine->props.timeslice_duration_ms; in intel_selftest_modify_policy() 40 saved->preempt_timeout = engine->props.preempt_timeout_ms; in intel_selftest_modify_policy() 46 * together with engine reset on pre-emption timeout. in intel_selftest_modify_policy() [all …]
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_hw_engine_types.h | 13 /* See "Engine ID Definition" struct in the Icelake PRM */ 69 * struct xe_hw_engine_class_intf - per hw engine class struct interface 71 * Contains all the hw engine properties per engine class. 82 /** @sched_props.set_job_timeout: Set job timeout in ms for engine */ 84 /** @sched_props.job_timeout_min: Min job timeout in ms for engine */ 86 /** @sched_props.job_timeout_max: Max job timeout in ms for engine */ 104 * struct xe_hw_engine - Hardware engine 106 * Contains all the hardware engine state for physical instances. 109 /** @gt: GT structure this hw engine belongs to */ 111 /** @name: name of this hw engine */ [all …]
|
| /linux/drivers/dma/ |
| H A D | Kconfig | 3 # DMA engine configuration 7 bool "DMA Engine support" 18 bool "DMA Engine debugging" 22 say N here. This enables DMA engine core and driver debugging. 25 bool "DMA Engine verbose debugging" 30 the DMA engine core and drivers. 61 tristate "Altera / Intel mSGDMA Engine" 74 provide DMA engine support. This includes the original ARM 130 tristate "Broadcom SBA RAID engine support" 139 Enable support for Broadcom SBA RAID Engine. The SBA RAID [all …]
|
| /linux/Documentation/devicetree/bindings/crypto/ |
| H A D | qcom,inline-crypto-engine.yaml | 4 $id: http://devicetree.org/schemas/crypto/qcom,inline-crypto-engine.yaml# 7 title: Qualcomm Technologies, Inc. (QTI) Inline Crypto Engine 16 - qcom,kaanapali-inline-crypto-engine 17 - qcom,qcs8300-inline-crypto-engine 18 - qcom,sa8775p-inline-crypto-engine 19 - qcom,sc7180-inline-crypto-engine 20 - qcom,sc7280-inline-crypto-engine 21 - qcom,sm8450-inline-crypto-engine 22 - qcom,sm8550-inline-crypto-engine 23 - qcom,sm8650-inline-crypto-engine [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/dma/ |
| H A D | Kbuild | 2 nvkm-y += nvkm/engine/dma/base.o 3 nvkm-y += nvkm/engine/dma/nv04.o 4 nvkm-y += nvkm/engine/dma/nv50.o 5 nvkm-y += nvkm/engine/dma/gf100.o 6 nvkm-y += nvkm/engine/dma/gf119.o 7 nvkm-y += nvkm/engine/dma/gv100.o 9 nvkm-y += nvkm/engine/dma/user.o 10 nvkm-y += nvkm/engine/dma/usernv04.o 11 nvkm-y += nvkm/engine/dma/usernv50.o 12 nvkm-y += nvkm/engine/dma/usergf100.o [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
| H A D | base.c | 26 #include <engine/fifo.h> 56 nvkm_gr_chsw_load(struct nvkm_engine *engine) in nvkm_gr_chsw_load() argument 58 struct nvkm_gr *gr = nvkm_gr(engine); in nvkm_gr_chsw_load() 65 nvkm_gr_tile(struct nvkm_engine *engine, int region, struct nvkm_fb_tile *tile) in nvkm_gr_tile() argument 67 struct nvkm_gr *gr = nvkm_gr(engine); in nvkm_gr_tile() 91 struct nvkm_gr *gr = nvkm_gr(oclass->engine); in nvkm_gr_oclass_get() 115 struct nvkm_gr *gr = nvkm_gr(oclass->engine); in nvkm_gr_cclass_new() 122 nvkm_gr_intr(struct nvkm_engine *engine) in nvkm_gr_intr() argument 124 struct nvkm_gr *gr = nvkm_gr(engine); in nvkm_gr_intr() 129 nvkm_gr_nonstall(struct nvkm_engine *engine) in nvkm_gr_nonstall() argument [all …]
|
| /linux/Documentation/devicetree/bindings/media/ |
| H A D | allwinner,sun4i-a10-video-engine.yaml | 4 $id: http://devicetree.org/schemas/media/allwinner,sun4i-a10-video-engine.yaml# 7 title: Allwinner A10 Video Engine 16 - allwinner,sun4i-a10-video-engine 17 - allwinner,sun5i-a13-video-engine 18 - allwinner,sun7i-a20-video-engine 19 - allwinner,sun8i-a33-video-engine 20 - allwinner,sun8i-h3-video-engine 21 - allwinner,sun8i-v3s-video-engine 22 - allwinner,sun8i-r40-video-engine 23 - allwinner,sun20i-d1-video-engine [all …]
|