xref: /linux/drivers/gpu/drm/xe/xe_preempt_fence.c (revision dd08ebf6c3525a7ea2186e636df064ea47281987)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2022 Intel Corporation
4  */
5 
6 #include <linux/slab.h>
7 
8 #include "xe_engine.h"
9 #include "xe_preempt_fence.h"
10 #include "xe_vm.h"
11 
12 static void preempt_fence_work_func(struct work_struct *w)
13 {
14 	bool cookie = dma_fence_begin_signalling();
15 	struct xe_preempt_fence *pfence =
16 		container_of(w, typeof(*pfence), preempt_work);
17 	struct xe_engine *e = pfence->engine;
18 
19 	if (pfence->error)
20 		dma_fence_set_error(&pfence->base, pfence->error);
21 	else
22 		e->ops->suspend_wait(e);
23 
24 	dma_fence_signal(&pfence->base);
25 	dma_fence_end_signalling(cookie);
26 
27 	queue_work(system_unbound_wq, &e->vm->preempt.rebind_work);
28 
29 	xe_engine_put(e);
30 }
31 
32 static const char *
33 preempt_fence_get_driver_name(struct dma_fence *fence)
34 {
35 	return "xe";
36 }
37 
38 static const char *
39 preempt_fence_get_timeline_name(struct dma_fence *fence)
40 {
41 	return "preempt";
42 }
43 
44 static bool preempt_fence_enable_signaling(struct dma_fence *fence)
45 {
46 	struct xe_preempt_fence *pfence =
47 		container_of(fence, typeof(*pfence), base);
48 	struct xe_engine *e = pfence->engine;
49 
50 	pfence->error = e->ops->suspend(e);
51 	queue_work(system_unbound_wq, &pfence->preempt_work);
52 	return true;
53 }
54 
55 static const struct dma_fence_ops preempt_fence_ops = {
56 	.get_driver_name = preempt_fence_get_driver_name,
57 	.get_timeline_name = preempt_fence_get_timeline_name,
58 	.enable_signaling = preempt_fence_enable_signaling,
59 };
60 
61 /**
62  * xe_preempt_fence_alloc() - Allocate a preempt fence with minimal
63  * initialization
64  *
65  * Allocate a preempt fence, and initialize its list head.
66  * If the preempt_fence allocated has been armed with
67  * xe_preempt_fence_arm(), it must be freed using dma_fence_put(). If not,
68  * it must be freed using xe_preempt_fence_free().
69  *
70  * Return: A struct xe_preempt_fence pointer used for calling into
71  * xe_preempt_fence_arm() or xe_preempt_fence_free().
72  * An error pointer on error.
73  */
74 struct xe_preempt_fence *xe_preempt_fence_alloc(void)
75 {
76 	struct xe_preempt_fence *pfence;
77 
78 	pfence = kmalloc(sizeof(*pfence), GFP_KERNEL);
79 	if (!pfence)
80 		return ERR_PTR(-ENOMEM);
81 
82 	INIT_LIST_HEAD(&pfence->link);
83 	INIT_WORK(&pfence->preempt_work, preempt_fence_work_func);
84 
85 	return pfence;
86 }
87 
88 /**
89  * xe_preempt_fence_free() - Free a preempt fence allocated using
90  * xe_preempt_fence_alloc().
91  * @pfence: pointer obtained from xe_preempt_fence_alloc();
92  *
93  * Free a preempt fence that has not yet been armed.
94  */
95 void xe_preempt_fence_free(struct xe_preempt_fence *pfence)
96 {
97 	list_del(&pfence->link);
98 	kfree(pfence);
99 }
100 
101 /**
102  * xe_preempt_fence_arm() - Arm a preempt fence allocated using
103  * xe_preempt_fence_alloc().
104  * @pfence: The struct xe_preempt_fence pointer returned from
105  *          xe_preempt_fence_alloc().
106  * @e: The struct xe_engine used for arming.
107  * @context: The dma-fence context used for arming.
108  * @seqno: The dma-fence seqno used for arming.
109  *
110  * Inserts the preempt fence into @context's timeline, takes @link off any
111  * list, and registers the struct xe_engine as the xe_engine to be preempted.
112  *
113  * Return: A pointer to a struct dma_fence embedded into the preempt fence.
114  * This function doesn't error.
115  */
116 struct dma_fence *
117 xe_preempt_fence_arm(struct xe_preempt_fence *pfence, struct xe_engine *e,
118 		     u64 context, u32 seqno)
119 {
120 	list_del_init(&pfence->link);
121 	pfence->engine = xe_engine_get(e);
122 	dma_fence_init(&pfence->base, &preempt_fence_ops,
123 		      &e->compute.lock, context, seqno);
124 
125 	return &pfence->base;
126 }
127 
128 /**
129  * xe_preempt_fence_create() - Helper to create and arm a preempt fence.
130  * @e: The struct xe_engine used for arming.
131  * @context: The dma-fence context used for arming.
132  * @seqno: The dma-fence seqno used for arming.
133  *
134  * Allocates and inserts the preempt fence into @context's timeline,
135  * and registers @e as the struct xe_engine to be preempted.
136  *
137  * Return: A pointer to the resulting struct dma_fence on success. An error
138  * pointer on error. In particular if allocation fails it returns
139  * ERR_PTR(-ENOMEM);
140  */
141 struct dma_fence *
142 xe_preempt_fence_create(struct xe_engine *e,
143 			u64 context, u32 seqno)
144 {
145 	struct xe_preempt_fence *pfence;
146 
147 	pfence = xe_preempt_fence_alloc();
148 	if (IS_ERR(pfence))
149 		return ERR_CAST(pfence);
150 
151 	return xe_preempt_fence_arm(pfence, e, context, seqno);
152 }
153 
154 bool xe_fence_is_xe_preempt(const struct dma_fence *fence)
155 {
156 	return fence->ops == &preempt_fence_ops;
157 }
158