1 // SPDX-License-Identifier: GPL-2.0+
2
3 #include <linux/dma-fence.h>
4
5 #include <drm/drm_atomic.h>
6 #include <drm/drm_atomic_helper.h>
7 #include <drm/drm_managed.h>
8 #include <drm/drm_print.h>
9 #include <drm/drm_probe_helper.h>
10 #include <drm/drm_vblank.h>
11 #include <drm/drm_vblank_helper.h>
12
13 #include "vkms_drv.h"
14
vkms_crtc_handle_vblank_timeout(struct drm_crtc * crtc)15 static bool vkms_crtc_handle_vblank_timeout(struct drm_crtc *crtc)
16 {
17 struct vkms_output *output = drm_crtc_to_vkms_output(crtc);
18 struct vkms_crtc_state *state;
19 bool ret, fence_cookie;
20
21 fence_cookie = dma_fence_begin_signalling();
22
23 spin_lock(&output->lock);
24 ret = drm_crtc_handle_vblank(crtc);
25 if (!ret)
26 DRM_ERROR("vkms failure on handling vblank");
27
28 state = output->composer_state;
29 spin_unlock(&output->lock);
30
31 if (state && output->composer_enabled) {
32 u64 frame = drm_crtc_accurate_vblank_count(crtc);
33
34 /* update frame_start only if a queued vkms_composer_worker()
35 * has read the data
36 */
37 spin_lock(&output->composer_lock);
38 if (!state->crc_pending)
39 state->frame_start = frame;
40 else
41 DRM_DEBUG_DRIVER("crc worker falling behind, frame_start: %llu, frame_end: %llu\n",
42 state->frame_start, frame);
43 state->frame_end = frame;
44 state->crc_pending = true;
45 spin_unlock(&output->composer_lock);
46
47 ret = queue_work(output->composer_workq, &state->composer_work);
48 if (!ret)
49 DRM_DEBUG_DRIVER("Composer worker already queued\n");
50 }
51
52 dma_fence_end_signalling(fence_cookie);
53
54 return true;
55 }
56
57 static struct drm_crtc_state *
vkms_atomic_crtc_duplicate_state(struct drm_crtc * crtc)58 vkms_atomic_crtc_duplicate_state(struct drm_crtc *crtc)
59 {
60 struct vkms_crtc_state *vkms_state;
61
62 if (WARN_ON(!crtc->state))
63 return NULL;
64
65 vkms_state = kzalloc_obj(*vkms_state);
66 if (!vkms_state)
67 return NULL;
68
69 __drm_atomic_helper_crtc_duplicate_state(crtc, &vkms_state->base);
70
71 INIT_WORK(&vkms_state->composer_work, vkms_composer_worker);
72
73 return &vkms_state->base;
74 }
75
vkms_atomic_crtc_destroy_state(struct drm_crtc * crtc,struct drm_crtc_state * state)76 static void vkms_atomic_crtc_destroy_state(struct drm_crtc *crtc,
77 struct drm_crtc_state *state)
78 {
79 struct vkms_crtc_state *vkms_state = to_vkms_crtc_state(state);
80
81 __drm_atomic_helper_crtc_destroy_state(state);
82
83 WARN_ON(work_pending(&vkms_state->composer_work));
84 kfree(vkms_state->active_planes);
85 kfree(vkms_state);
86 }
87
vkms_atomic_crtc_reset(struct drm_crtc * crtc)88 static void vkms_atomic_crtc_reset(struct drm_crtc *crtc)
89 {
90 struct vkms_crtc_state *vkms_state = kzalloc_obj(*vkms_state);
91
92 if (crtc->state)
93 vkms_atomic_crtc_destroy_state(crtc, crtc->state);
94
95 __drm_atomic_helper_crtc_reset(crtc, &vkms_state->base);
96 if (vkms_state)
97 INIT_WORK(&vkms_state->composer_work, vkms_composer_worker);
98 }
99
100 static const struct drm_crtc_funcs vkms_crtc_funcs = {
101 .set_config = drm_atomic_helper_set_config,
102 .page_flip = drm_atomic_helper_page_flip,
103 .reset = vkms_atomic_crtc_reset,
104 .atomic_duplicate_state = vkms_atomic_crtc_duplicate_state,
105 .atomic_destroy_state = vkms_atomic_crtc_destroy_state,
106 DRM_CRTC_VBLANK_TIMER_FUNCS,
107 .get_crc_sources = vkms_get_crc_sources,
108 .set_crc_source = vkms_set_crc_source,
109 .verify_crc_source = vkms_verify_crc_source,
110 };
111
vkms_crtc_atomic_check(struct drm_crtc * crtc,struct drm_atomic_state * state)112 static int vkms_crtc_atomic_check(struct drm_crtc *crtc,
113 struct drm_atomic_state *state)
114 {
115 struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
116 crtc);
117 struct vkms_crtc_state *vkms_state = to_vkms_crtc_state(crtc_state);
118 struct drm_plane *plane;
119 struct drm_plane_state *plane_state;
120 int i = 0, ret;
121
122 if (vkms_state->active_planes)
123 return 0;
124
125 ret = drm_atomic_add_affected_planes(crtc_state->state, crtc);
126 if (ret < 0)
127 return ret;
128
129 drm_for_each_plane_mask(plane, crtc->dev, crtc_state->plane_mask) {
130 plane_state = drm_atomic_get_new_plane_state(crtc_state->state, plane);
131 WARN_ON(!plane_state);
132
133 if (!plane_state->visible)
134 continue;
135
136 i++;
137 }
138
139 vkms_state->active_planes = kzalloc_objs(*vkms_state->active_planes, i);
140 if (!vkms_state->active_planes)
141 return -ENOMEM;
142 vkms_state->num_active_planes = i;
143
144 i = 0;
145 drm_for_each_plane_mask(plane, crtc->dev, crtc_state->plane_mask) {
146 plane_state = drm_atomic_get_new_plane_state(crtc_state->state, plane);
147
148 if (!plane_state->visible)
149 continue;
150
151 vkms_state->active_planes[i++] =
152 to_vkms_plane_state(plane_state);
153 }
154
155 return 0;
156 }
157
vkms_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_atomic_state * state)158 static void vkms_crtc_atomic_begin(struct drm_crtc *crtc,
159 struct drm_atomic_state *state)
160 __acquires(&vkms_output->lock)
161 {
162 struct vkms_output *vkms_output = drm_crtc_to_vkms_output(crtc);
163
164 /* This lock is held across the atomic commit to block vblank timer
165 * from scheduling vkms_composer_worker until the composer is updated
166 */
167 spin_lock_irq(&vkms_output->lock);
168 }
169
vkms_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_atomic_state * state)170 static void vkms_crtc_atomic_flush(struct drm_crtc *crtc,
171 struct drm_atomic_state *state)
172 __releases(&vkms_output->lock)
173 {
174 struct vkms_output *vkms_output = drm_crtc_to_vkms_output(crtc);
175
176 if (crtc->state->event) {
177 spin_lock(&crtc->dev->event_lock);
178
179 if (drm_crtc_vblank_get(crtc) != 0)
180 drm_crtc_send_vblank_event(crtc, crtc->state->event);
181 else
182 drm_crtc_arm_vblank_event(crtc, crtc->state->event);
183
184 spin_unlock(&crtc->dev->event_lock);
185
186 crtc->state->event = NULL;
187 }
188
189 vkms_output->composer_state = to_vkms_crtc_state(crtc->state);
190
191 spin_unlock_irq(&vkms_output->lock);
192 }
193
194 static const struct drm_crtc_helper_funcs vkms_crtc_helper_funcs = {
195 .atomic_check = vkms_crtc_atomic_check,
196 .atomic_begin = vkms_crtc_atomic_begin,
197 .atomic_flush = vkms_crtc_atomic_flush,
198 .atomic_enable = drm_crtc_vblank_atomic_enable,
199 .atomic_disable = drm_crtc_vblank_atomic_disable,
200 .handle_vblank_timeout = vkms_crtc_handle_vblank_timeout,
201 };
202
vkms_crtc_init(struct drm_device * dev,struct drm_plane * primary,struct drm_plane * cursor)203 struct vkms_output *vkms_crtc_init(struct drm_device *dev, struct drm_plane *primary,
204 struct drm_plane *cursor)
205 {
206 struct vkms_output *vkms_out;
207 struct drm_crtc *crtc;
208 int ret;
209
210 vkms_out = drmm_crtc_alloc_with_planes(dev, struct vkms_output, crtc,
211 primary, cursor,
212 &vkms_crtc_funcs, NULL);
213 if (IS_ERR(vkms_out)) {
214 DRM_DEV_ERROR(dev->dev, "Failed to init CRTC\n");
215 return vkms_out;
216 }
217
218 crtc = &vkms_out->crtc;
219
220 drm_crtc_helper_add(crtc, &vkms_crtc_helper_funcs);
221
222 ret = drm_mode_crtc_set_gamma_size(crtc, VKMS_LUT_SIZE);
223 if (ret) {
224 DRM_ERROR("Failed to set gamma size\n");
225 return ERR_PTR(ret);
226 }
227
228 drm_crtc_enable_color_mgmt(crtc, 0, false, VKMS_LUT_SIZE);
229
230 spin_lock_init(&vkms_out->lock);
231 spin_lock_init(&vkms_out->composer_lock);
232
233 vkms_out->composer_workq = drmm_alloc_ordered_workqueue(dev, "vkms_composer", 0);
234 if (IS_ERR(vkms_out->composer_workq))
235 return ERR_CAST(vkms_out->composer_workq);
236
237 return vkms_out;
238 }
239