xref: /linux/drivers/gpu/drm/nouveau/nvkm/engine/disp/r535.c (revision f868cd2517763c66783c6000b29d97f0b966b311)
1 /*
2  * Copyright 2023 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  */
22 #include "priv.h"
23 #include "chan.h"
24 #include "conn.h"
25 #include "dp.h"
26 #include "head.h"
27 #include "ior.h"
28 #include "outp.h"
29 
30 #include <core/ramht.h>
31 #include <subdev/bios.h>
32 #include <subdev/bios/conn.h>
33 #include <subdev/gsp.h>
34 #include <subdev/mmu.h>
35 #include <subdev/vfn.h>
36 
37 #include <nvhw/drf.h>
38 
39 #include <nvrm/nvtypes.h>
40 #include <nvrm/535.113.01/common/sdk/nvidia/inc/class/cl2080_notification.h>
41 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073dfp.h>
42 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073dp.h>
43 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073specific.h>
44 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073system.h>
45 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080internal.h>
46 #include <nvrm/535.113.01/common/sdk/nvidia/inc/nvos.h>
47 #include <nvrm/535.113.01/nvidia/generated/g_allclasses.h>
48 #include <nvrm/535.113.01/nvidia/generated/g_mem_desc_nvoc.h>
49 #include <nvrm/535.113.01/nvidia/inc/kernel/os/nv_memory_type.h>
50 
51 #include <linux/acpi.h>
52 
53 static u64
r535_chan_user(struct nvkm_disp_chan * chan,u64 * psize)54 r535_chan_user(struct nvkm_disp_chan *chan, u64 *psize)
55 {
56 	switch (chan->object.oclass & 0xff) {
57 	case 0x7d: *psize = 0x10000; return 0x680000;
58 	case 0x7e: *psize = 0x01000; return 0x690000 + (chan->head * *psize);
59 	case 0x7b: *psize = 0x01000; return 0x6b0000 + (chan->head * *psize);
60 	case 0x7a: *psize = 0x01000; return 0x6d8000 + (chan->head * *psize);
61 	default:
62 		BUG_ON(1);
63 		break;
64 	}
65 
66 	return 0ULL;
67 }
68 
69 static void
r535_chan_intr(struct nvkm_disp_chan * chan,bool en)70 r535_chan_intr(struct nvkm_disp_chan *chan, bool en)
71 {
72 }
73 
74 static void
r535_chan_fini(struct nvkm_disp_chan * chan)75 r535_chan_fini(struct nvkm_disp_chan *chan)
76 {
77 	nvkm_gsp_rm_free(&chan->rm.object);
78 }
79 
80 static int
r535_chan_push(struct nvkm_disp_chan * chan)81 r535_chan_push(struct nvkm_disp_chan *chan)
82 {
83 	struct nvkm_gsp *gsp = chan->disp->engine.subdev.device->gsp;
84 	NV2080_CTRL_INTERNAL_DISPLAY_CHANNEL_PUSHBUFFER_PARAMS *ctrl;
85 
86 	ctrl = nvkm_gsp_rm_ctrl_get(&gsp->internal.device.subdevice,
87 				    NV2080_CTRL_CMD_INTERNAL_DISPLAY_CHANNEL_PUSHBUFFER,
88 				    sizeof(*ctrl));
89 	if (IS_ERR(ctrl))
90 		return PTR_ERR(ctrl);
91 
92 	if (chan->memory) {
93 		switch (nvkm_memory_target(chan->memory)) {
94 		case NVKM_MEM_TARGET_NCOH:
95 			ctrl->addressSpace = ADDR_SYSMEM;
96 			ctrl->cacheSnoop = 0;
97 			break;
98 		case NVKM_MEM_TARGET_HOST:
99 			ctrl->addressSpace = ADDR_SYSMEM;
100 			ctrl->cacheSnoop = 1;
101 			break;
102 		case NVKM_MEM_TARGET_VRAM:
103 			ctrl->addressSpace = ADDR_FBMEM;
104 			break;
105 		default:
106 			WARN_ON(1);
107 			return -EINVAL;
108 		}
109 
110 		ctrl->physicalAddr = nvkm_memory_addr(chan->memory);
111 		ctrl->limit = nvkm_memory_size(chan->memory) - 1;
112 	}
113 
114 	ctrl->hclass = chan->object.oclass;
115 	ctrl->channelInstance = chan->head;
116 	ctrl->valid = ((chan->object.oclass & 0xff) != 0x7a) ? 1 : 0;
117 
118 	return nvkm_gsp_rm_ctrl_wr(&gsp->internal.device.subdevice, ctrl);
119 }
120 
121 static int
r535_curs_init(struct nvkm_disp_chan * chan)122 r535_curs_init(struct nvkm_disp_chan *chan)
123 {
124 	NV50VAIO_CHANNELPIO_ALLOCATION_PARAMETERS *args;
125 	int ret;
126 
127 	ret = r535_chan_push(chan);
128 	if (ret)
129 		return ret;
130 
131 	args = nvkm_gsp_rm_alloc_get(&chan->disp->rm.object,
132 				     (chan->object.oclass << 16) | chan->head,
133 				     chan->object.oclass, sizeof(*args), &chan->rm.object);
134 	if (IS_ERR(args))
135 		return PTR_ERR(args);
136 
137 	args->channelInstance = chan->head;
138 
139 	return nvkm_gsp_rm_alloc_wr(&chan->rm.object, args);
140 }
141 
142 static const struct nvkm_disp_chan_func
143 r535_curs_func = {
144 	.init = r535_curs_init,
145 	.fini = r535_chan_fini,
146 	.intr = r535_chan_intr,
147 	.user = r535_chan_user,
148 };
149 
150 static const struct nvkm_disp_chan_user
151 r535_curs = {
152 	.func = &r535_curs_func,
153 	.user = 73,
154 };
155 
156 static int
r535_dmac_bind(struct nvkm_disp_chan * chan,struct nvkm_object * object,u32 handle)157 r535_dmac_bind(struct nvkm_disp_chan *chan, struct nvkm_object *object, u32 handle)
158 {
159 	return nvkm_ramht_insert(chan->disp->ramht, object, chan->chid.user, -9, handle,
160 				 chan->chid.user << 25 |
161 				 (chan->disp->rm.client.object.handle & 0x3fff));
162 }
163 
164 static void
r535_dmac_fini(struct nvkm_disp_chan * chan)165 r535_dmac_fini(struct nvkm_disp_chan *chan)
166 {
167 	struct nvkm_device *device = chan->disp->engine.subdev.device;
168 	const u32 uoff = (chan->chid.user - 1) * 0x1000;
169 
170 	chan->suspend_put = nvkm_rd32(device, 0x690000 + uoff);
171 	r535_chan_fini(chan);
172 }
173 
174 static int
r535_dmac_init(struct nvkm_disp_chan * chan)175 r535_dmac_init(struct nvkm_disp_chan *chan)
176 {
177 	NV50VAIO_CHANNELDMA_ALLOCATION_PARAMETERS *args;
178 	int ret;
179 
180 	ret = r535_chan_push(chan);
181 	if (ret)
182 		return ret;
183 
184 	args = nvkm_gsp_rm_alloc_get(&chan->disp->rm.object,
185 				     (chan->object.oclass << 16) | chan->head,
186 				     chan->object.oclass, sizeof(*args), &chan->rm.object);
187 	if (IS_ERR(args))
188 		return PTR_ERR(args);
189 
190 	args->channelInstance = chan->head;
191 	args->offset = chan->suspend_put;
192 
193 	return nvkm_gsp_rm_alloc_wr(&chan->rm.object, args);
194 }
195 
196 static int
r535_dmac_push(struct nvkm_disp_chan * chan,u64 memory)197 r535_dmac_push(struct nvkm_disp_chan *chan, u64 memory)
198 {
199 	chan->memory = nvkm_umem_search(chan->object.client, memory);
200 	if (IS_ERR(chan->memory))
201 		return PTR_ERR(chan->memory);
202 
203 	return 0;
204 }
205 
206 static const struct nvkm_disp_chan_func
207 r535_dmac_func = {
208 	.push = r535_dmac_push,
209 	.init = r535_dmac_init,
210 	.fini = r535_dmac_fini,
211 	.intr = r535_chan_intr,
212 	.user = r535_chan_user,
213 	.bind = r535_dmac_bind,
214 };
215 
216 static const struct nvkm_disp_chan_func
217 r535_wimm_func = {
218 	.push = r535_dmac_push,
219 	.init = r535_dmac_init,
220 	.fini = r535_dmac_fini,
221 	.intr = r535_chan_intr,
222 	.user = r535_chan_user,
223 };
224 
225 static const struct nvkm_disp_chan_user
226 r535_wimm = {
227 	.func = &r535_wimm_func,
228 	.user = 33,
229 };
230 
231 static const struct nvkm_disp_chan_user
232 r535_wndw = {
233 	.func = &r535_dmac_func,
234 	.user = 1,
235 };
236 
237 static void
r535_core_fini(struct nvkm_disp_chan * chan)238 r535_core_fini(struct nvkm_disp_chan *chan)
239 {
240 	struct nvkm_device *device = chan->disp->engine.subdev.device;
241 
242 	chan->suspend_put = nvkm_rd32(device, 0x680000);
243 	r535_chan_fini(chan);
244 }
245 
246 static const struct nvkm_disp_chan_func
247 r535_core_func = {
248 	.push = r535_dmac_push,
249 	.init = r535_dmac_init,
250 	.fini = r535_core_fini,
251 	.intr = r535_chan_intr,
252 	.user = r535_chan_user,
253 	.bind = r535_dmac_bind,
254 };
255 
256 static const struct nvkm_disp_chan_user
257 r535_core = {
258 	.func = &r535_core_func,
259 	.user = 0,
260 };
261 
262 static int
r535_sor_bl_set(struct nvkm_ior * sor,int lvl)263 r535_sor_bl_set(struct nvkm_ior *sor, int lvl)
264 {
265 	struct nvkm_disp *disp = sor->disp;
266 	NV0073_CTRL_SPECIFIC_BACKLIGHT_BRIGHTNESS_PARAMS *ctrl;
267 
268 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
269 				    NV0073_CTRL_CMD_SPECIFIC_SET_BACKLIGHT_BRIGHTNESS,
270 				    sizeof(*ctrl));
271 	if (IS_ERR(ctrl))
272 		return PTR_ERR(ctrl);
273 
274 	ctrl->displayId = BIT(sor->asy.outp->index);
275 	ctrl->brightness = lvl;
276 
277 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
278 }
279 
280 static int
r535_sor_bl_get(struct nvkm_ior * sor)281 r535_sor_bl_get(struct nvkm_ior *sor)
282 {
283 	struct nvkm_disp *disp = sor->disp;
284 	NV0073_CTRL_SPECIFIC_BACKLIGHT_BRIGHTNESS_PARAMS *ctrl;
285 	int ret, lvl;
286 
287 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
288 				    NV0073_CTRL_CMD_SPECIFIC_GET_BACKLIGHT_BRIGHTNESS,
289 				    sizeof(*ctrl));
290 	if (IS_ERR(ctrl))
291 		return PTR_ERR(ctrl);
292 
293 	ctrl->displayId = BIT(sor->asy.outp->index);
294 
295 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
296 	if (ret) {
297 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
298 		return ret;
299 	}
300 
301 	lvl = ctrl->brightness;
302 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
303 	return lvl;
304 }
305 
306 static const struct nvkm_ior_func_bl
307 r535_sor_bl = {
308 	.get = r535_sor_bl_get,
309 	.set = r535_sor_bl_set,
310 };
311 
312 static void
r535_sor_hda_eld(struct nvkm_ior * sor,int head,u8 * data,u8 size)313 r535_sor_hda_eld(struct nvkm_ior *sor, int head, u8 *data, u8 size)
314 {
315 	struct nvkm_disp *disp = sor->disp;
316 	NV0073_CTRL_DFP_SET_ELD_AUDIO_CAP_PARAMS *ctrl;
317 
318 	if (WARN_ON(size > sizeof(ctrl->bufferELD)))
319 		return;
320 
321 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
322 				    NV0073_CTRL_CMD_DFP_SET_ELD_AUDIO_CAPS, sizeof(*ctrl));
323 	if (WARN_ON(IS_ERR(ctrl)))
324 		return;
325 
326 	ctrl->displayId = BIT(sor->asy.outp->index);
327 	ctrl->numELDSize = size;
328 	memcpy(ctrl->bufferELD, data, size);
329 	ctrl->maxFreqSupported = 0; //XXX
330 	ctrl->ctrl  = NVDEF(NV0073, CTRL_DFP_ELD_AUDIO_CAPS_CTRL, PD, TRUE);
331 	ctrl->ctrl |= NVDEF(NV0073, CTRL_DFP_ELD_AUDIO_CAPS_CTRL, ELDV, TRUE);
332 	ctrl->deviceEntry = head;
333 
334 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
335 }
336 
337 static void
r535_sor_hda_hpd(struct nvkm_ior * sor,int head,bool present)338 r535_sor_hda_hpd(struct nvkm_ior *sor, int head, bool present)
339 {
340 	struct nvkm_disp *disp = sor->disp;
341 	NV0073_CTRL_DFP_SET_ELD_AUDIO_CAP_PARAMS *ctrl;
342 
343 	if (present)
344 		return;
345 
346 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
347 				    NV0073_CTRL_CMD_DFP_SET_ELD_AUDIO_CAPS, sizeof(*ctrl));
348 	if (WARN_ON(IS_ERR(ctrl)))
349 		return;
350 
351 	ctrl->displayId = BIT(sor->asy.outp->index);
352 	ctrl->deviceEntry = head;
353 
354 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
355 }
356 
357 static const struct nvkm_ior_func_hda
358 r535_sor_hda = {
359 	.hpd = r535_sor_hda_hpd,
360 	.eld = r535_sor_hda_eld,
361 };
362 
363 static void
r535_sor_dp_audio_mute(struct nvkm_ior * sor,bool mute)364 r535_sor_dp_audio_mute(struct nvkm_ior *sor, bool mute)
365 {
366 	struct nvkm_disp *disp = sor->disp;
367 	NV0073_CTRL_DP_SET_AUDIO_MUTESTREAM_PARAMS *ctrl;
368 
369 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
370 				    NV0073_CTRL_CMD_DP_SET_AUDIO_MUTESTREAM, sizeof(*ctrl));
371 	if (WARN_ON(IS_ERR(ctrl)))
372 		return;
373 
374 	ctrl->displayId = BIT(sor->asy.outp->index);
375 	ctrl->mute = mute;
376 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
377 }
378 
379 static void
r535_sor_dp_audio(struct nvkm_ior * sor,int head,bool enable)380 r535_sor_dp_audio(struct nvkm_ior *sor, int head, bool enable)
381 {
382 	struct nvkm_disp *disp = sor->disp;
383 	NV0073_CTRL_DFP_SET_AUDIO_ENABLE_PARAMS *ctrl;
384 
385 	if (!enable)
386 		r535_sor_dp_audio_mute(sor, true);
387 
388 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
389 				    NV0073_CTRL_CMD_DFP_SET_AUDIO_ENABLE, sizeof(*ctrl));
390 	if (WARN_ON(IS_ERR(ctrl)))
391 		return;
392 
393 	ctrl->displayId = BIT(sor->asy.outp->index);
394 	ctrl->enable = enable;
395 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
396 
397 	if (enable)
398 		r535_sor_dp_audio_mute(sor, false);
399 }
400 
401 static void
r535_sor_dp_vcpi(struct nvkm_ior * sor,int head,u8 slot,u8 slot_nr,u16 pbn,u16 aligned_pbn)402 r535_sor_dp_vcpi(struct nvkm_ior *sor, int head, u8 slot, u8 slot_nr, u16 pbn, u16 aligned_pbn)
403 {
404 	struct nvkm_disp *disp = sor->disp;
405 	struct NV0073_CTRL_CMD_DP_CONFIG_STREAM_PARAMS *ctrl;
406 
407 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
408 				    NV0073_CTRL_CMD_DP_CONFIG_STREAM, sizeof(*ctrl));
409 	if (WARN_ON(IS_ERR(ctrl)))
410 		return;
411 
412 	ctrl->subDeviceInstance = 0;
413 	ctrl->head = head;
414 	ctrl->sorIndex = sor->id;
415 	ctrl->dpLink = sor->asy.link == 2;
416 	ctrl->bEnableOverride = 1;
417 	ctrl->bMST = 1;
418 	ctrl->hBlankSym = 0;
419 	ctrl->vBlankSym = 0;
420 	ctrl->colorFormat = 0;
421 	ctrl->bEnableTwoHeadOneOr = 0;
422 	ctrl->singleHeadMultistreamMode = 0;
423 	ctrl->MST.slotStart = slot;
424 	ctrl->MST.slotEnd = slot + slot_nr - 1;
425 	ctrl->MST.PBN = pbn;
426 	ctrl->MST.Timeslice = aligned_pbn;
427 	ctrl->MST.sendACT = 0;
428 	ctrl->MST.singleHeadMSTPipeline = 0;
429 	ctrl->MST.bEnableAudioOverRightPanel = 0;
430 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
431 }
432 
433 static int
r535_sor_dp_sst(struct nvkm_ior * sor,int head,bool ef,u32 watermark,u32 hblanksym,u32 vblanksym)434 r535_sor_dp_sst(struct nvkm_ior *sor, int head, bool ef,
435 		u32 watermark, u32 hblanksym, u32 vblanksym)
436 {
437 	struct nvkm_disp *disp = sor->disp;
438 	struct NV0073_CTRL_CMD_DP_CONFIG_STREAM_PARAMS *ctrl;
439 
440 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
441 				    NV0073_CTRL_CMD_DP_CONFIG_STREAM, sizeof(*ctrl));
442 	if (IS_ERR(ctrl))
443 		return PTR_ERR(ctrl);
444 
445 	ctrl->subDeviceInstance = 0;
446 	ctrl->head = head;
447 	ctrl->sorIndex = sor->id;
448 	ctrl->dpLink = sor->asy.link == 2;
449 	ctrl->bEnableOverride = 1;
450 	ctrl->bMST = 0;
451 	ctrl->hBlankSym = hblanksym;
452 	ctrl->vBlankSym = vblanksym;
453 	ctrl->colorFormat = 0;
454 	ctrl->bEnableTwoHeadOneOr = 0;
455 	ctrl->SST.bEnhancedFraming = ef;
456 	ctrl->SST.tuSize = 64;
457 	ctrl->SST.waterMark = watermark;
458 	ctrl->SST.bEnableAudioOverRightPanel = 0;
459 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
460 }
461 
462 static const struct nvkm_ior_func_dp
463 r535_sor_dp = {
464 	.sst = r535_sor_dp_sst,
465 	.vcpi = r535_sor_dp_vcpi,
466 	.audio = r535_sor_dp_audio,
467 };
468 
469 static void
r535_sor_hdmi_scdc(struct nvkm_ior * sor,u32 khz,bool support,bool scrambling,bool scrambling_low_rates)470 r535_sor_hdmi_scdc(struct nvkm_ior *sor, u32 khz, bool support, bool scrambling,
471 		   bool scrambling_low_rates)
472 {
473 	struct nvkm_outp *outp = sor->asy.outp;
474 	struct nvkm_disp *disp = outp->disp;
475 	NV0073_CTRL_SPECIFIC_SET_HDMI_SINK_CAPS_PARAMS *ctrl;
476 
477 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
478 				    NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_SINK_CAPS, sizeof(*ctrl));
479 	if (WARN_ON(IS_ERR(ctrl)))
480 		return;
481 
482 	ctrl->displayId = BIT(outp->index);
483 	ctrl->caps = 0;
484 	if (support)
485 		ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, SCDC_SUPPORTED, TRUE);
486 	if (scrambling)
487 		ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, GT_340MHZ_CLOCK_SUPPORTED, TRUE);
488 	if (scrambling_low_rates)
489 		ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, LTE_340MHZ_SCRAMBLING_SUPPORTED, TRUE);
490 
491 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
492 }
493 
494 static void
r535_sor_hdmi_ctrl_audio_mute(struct nvkm_outp * outp,bool mute)495 r535_sor_hdmi_ctrl_audio_mute(struct nvkm_outp *outp, bool mute)
496 {
497 	struct nvkm_disp *disp = outp->disp;
498 	NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_AUDIO_MUTESTREAM_PARAMS *ctrl;
499 
500 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
501 				    NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_AUDIO_MUTESTREAM, sizeof(*ctrl));
502 	if (WARN_ON(IS_ERR(ctrl)))
503 		return;
504 
505 	ctrl->displayId = BIT(outp->index);
506 	ctrl->mute = mute;
507 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
508 }
509 
510 static void
r535_sor_hdmi_ctrl_audio(struct nvkm_outp * outp,bool enable)511 r535_sor_hdmi_ctrl_audio(struct nvkm_outp *outp, bool enable)
512 {
513 	struct nvkm_disp *disp = outp->disp;
514 	NV0073_CTRL_SPECIFIC_SET_OD_PACKET_PARAMS *ctrl;
515 
516 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
517 				    NV0073_CTRL_CMD_SPECIFIC_SET_OD_PACKET, sizeof(*ctrl));
518 	if (WARN_ON(IS_ERR(ctrl)))
519 		return;
520 
521 	ctrl->displayId = BIT(outp->index);
522 	ctrl->transmitControl =
523 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, ENABLE, YES) |
524 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, OTHER_FRAME, DISABLE) |
525 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, SINGLE_FRAME, DISABLE) |
526 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, ON_HBLANK, DISABLE) |
527 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, VIDEO_FMT, SW_CONTROLLED) |
528 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, RESERVED_LEGACY_MODE, NO);
529 	ctrl->packetSize = 10;
530 	ctrl->aPacket[0] = 0x03;
531 	ctrl->aPacket[1] = 0x00;
532 	ctrl->aPacket[2] = 0x00;
533 	ctrl->aPacket[3] = enable ? 0x10 : 0x01;
534 	ctrl->aPacket[4] = 0x00;
535 	ctrl->aPacket[5] = 0x00;
536 	ctrl->aPacket[6] = 0x00;
537 	ctrl->aPacket[7] = 0x00;
538 	ctrl->aPacket[8] = 0x00;
539 	ctrl->aPacket[9] = 0x00;
540 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
541 }
542 
543 static void
r535_sor_hdmi_audio(struct nvkm_ior * sor,int head,bool enable)544 r535_sor_hdmi_audio(struct nvkm_ior *sor, int head, bool enable)
545 {
546 	struct nvkm_device *device = sor->disp->engine.subdev.device;
547 	const u32 hdmi = head * 0x400;
548 
549 	r535_sor_hdmi_ctrl_audio(sor->asy.outp, enable);
550 	r535_sor_hdmi_ctrl_audio_mute(sor->asy.outp, !enable);
551 
552 	/* General Control (GCP). */
553 	nvkm_mask(device, 0x6f00c0 + hdmi, 0x00000001, 0x00000000);
554 	nvkm_wr32(device, 0x6f00cc + hdmi, !enable ? 0x00000001 : 0x00000010);
555 	nvkm_mask(device, 0x6f00c0 + hdmi, 0x00000001, 0x00000001);
556 }
557 
558 static void
r535_sor_hdmi_ctrl(struct nvkm_ior * sor,int head,bool enable,u8 max_ac_packet,u8 rekey)559 r535_sor_hdmi_ctrl(struct nvkm_ior *sor, int head, bool enable, u8 max_ac_packet, u8 rekey)
560 {
561 	struct nvkm_disp *disp = sor->disp;
562 	NV0073_CTRL_SPECIFIC_SET_HDMI_ENABLE_PARAMS *ctrl;
563 
564 	if (!enable)
565 		return;
566 
567 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
568 				    NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_ENABLE, sizeof(*ctrl));
569 	if (WARN_ON(IS_ERR(ctrl)))
570 		return;
571 
572 	ctrl->displayId = BIT(sor->asy.outp->index);
573 	ctrl->enable = enable;
574 
575 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
576 }
577 
578 static const struct nvkm_ior_func_hdmi
579 r535_sor_hdmi = {
580 	.ctrl = r535_sor_hdmi_ctrl,
581 	.scdc = r535_sor_hdmi_scdc,
582 	/*TODO: SF_USER -> KMS. */
583 	.infoframe_avi = gv100_sor_hdmi_infoframe_avi,
584 	.infoframe_vsi = gv100_sor_hdmi_infoframe_vsi,
585 	.audio = r535_sor_hdmi_audio,
586 };
587 
588 static const struct nvkm_ior_func
589 r535_sor = {
590 	.hdmi = &r535_sor_hdmi,
591 	.dp = &r535_sor_dp,
592 	.hda = &r535_sor_hda,
593 	.bl = &r535_sor_bl,
594 };
595 
596 static int
r535_sor_new(struct nvkm_disp * disp,int id)597 r535_sor_new(struct nvkm_disp *disp, int id)
598 {
599 	return nvkm_ior_new_(&r535_sor, disp, SOR, id, true/*XXX: hda cap*/);
600 }
601 
602 static int
r535_sor_cnt(struct nvkm_disp * disp,unsigned long * pmask)603 r535_sor_cnt(struct nvkm_disp *disp, unsigned long *pmask)
604 {
605 	*pmask = 0xf;
606 	return 4;
607 }
608 
609 static void
r535_head_vblank_put(struct nvkm_head * head)610 r535_head_vblank_put(struct nvkm_head *head)
611 {
612 	struct nvkm_device *device = head->disp->engine.subdev.device;
613 
614 	nvkm_mask(device, 0x611d80 + (head->id * 4), 0x00000002, 0x00000000);
615 }
616 
617 static void
r535_head_vblank_get(struct nvkm_head * head)618 r535_head_vblank_get(struct nvkm_head *head)
619 {
620 	struct nvkm_device *device = head->disp->engine.subdev.device;
621 
622 	nvkm_wr32(device, 0x611800 + (head->id * 4), 0x00000002);
623 	nvkm_mask(device, 0x611d80 + (head->id * 4), 0x00000002, 0x00000002);
624 }
625 
626 static void
r535_head_state(struct nvkm_head * head,struct nvkm_head_state * state)627 r535_head_state(struct nvkm_head *head, struct nvkm_head_state *state)
628 {
629 }
630 
631 static const struct nvkm_head_func
632 r535_head = {
633 	.state = r535_head_state,
634 	.vblank_get = r535_head_vblank_get,
635 	.vblank_put = r535_head_vblank_put,
636 };
637 
638 static struct nvkm_conn *
r535_conn_new(struct nvkm_disp * disp,u32 id)639 r535_conn_new(struct nvkm_disp *disp, u32 id)
640 {
641 	NV0073_CTRL_SPECIFIC_GET_CONNECTOR_DATA_PARAMS *ctrl;
642 	struct nvbios_connE dcbE = {};
643 	struct nvkm_conn *conn;
644 	int ret, index;
645 
646 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
647 				    NV0073_CTRL_CMD_SPECIFIC_GET_CONNECTOR_DATA, sizeof(*ctrl));
648 	if (IS_ERR(ctrl))
649 		return (void *)ctrl;
650 
651 	ctrl->subDeviceInstance = 0;
652 	ctrl->displayId = BIT(id);
653 
654 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
655 	if (ret) {
656 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
657 		return ERR_PTR(ret);
658 	}
659 
660 	list_for_each_entry(conn, &disp->conns, head) {
661 		if (conn->index == ctrl->data[0].index) {
662 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
663 			return conn;
664 		}
665 	}
666 
667 	dcbE.type = ctrl->data[0].type;
668 	index = ctrl->data[0].index;
669 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
670 
671 	ret = nvkm_conn_new(disp, index, &dcbE, &conn);
672 	if (ret)
673 		return ERR_PTR(ret);
674 
675 	list_add_tail(&conn->head, &disp->conns);
676 	return conn;
677 }
678 
679 static void
r535_outp_release(struct nvkm_outp * outp)680 r535_outp_release(struct nvkm_outp *outp)
681 {
682 	outp->disp->rm.assigned_sors &= ~BIT(outp->ior->id);
683 	outp->ior->asy.outp = NULL;
684 	outp->ior = NULL;
685 }
686 
687 static int
r535_outp_acquire(struct nvkm_outp * outp,bool hda)688 r535_outp_acquire(struct nvkm_outp *outp, bool hda)
689 {
690 	struct nvkm_disp *disp = outp->disp;
691 	struct nvkm_ior *ior;
692 	NV0073_CTRL_DFP_ASSIGN_SOR_PARAMS *ctrl;
693 	int ret, or;
694 
695 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
696 				    NV0073_CTRL_CMD_DFP_ASSIGN_SOR, sizeof(*ctrl));
697 	if (IS_ERR(ctrl))
698 		return PTR_ERR(ctrl);
699 
700 	ctrl->subDeviceInstance = 0;
701 	ctrl->displayId = BIT(outp->index);
702 	ctrl->sorExcludeMask = disp->rm.assigned_sors;
703 	if (hda)
704 		ctrl->flags |= NVDEF(NV0073_CTRL, DFP_ASSIGN_SOR_FLAGS, AUDIO, OPTIMAL);
705 
706 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
707 	if (ret) {
708 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
709 		return ret;
710 	}
711 
712 	for (or = 0; or < ARRAY_SIZE(ctrl->sorAssignListWithTag); or++) {
713 		if (ctrl->sorAssignListWithTag[or].displayMask & BIT(outp->index)) {
714 			disp->rm.assigned_sors |= BIT(or);
715 			break;
716 		}
717 	}
718 
719 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
720 
721 	if (WARN_ON(or == ARRAY_SIZE(ctrl->sorAssignListWithTag)))
722 		return -EINVAL;
723 
724 	ior = nvkm_ior_find(disp, SOR, or);
725 	if (WARN_ON(!ior))
726 		return -EINVAL;
727 
728 	nvkm_outp_acquire_ior(outp, NVKM_OUTP_USER, ior);
729 	return 0;
730 }
731 
732 static int
r535_disp_head_displayid(struct nvkm_disp * disp,int head,u32 * displayid)733 r535_disp_head_displayid(struct nvkm_disp *disp, int head, u32 *displayid)
734 {
735 	NV0073_CTRL_SYSTEM_GET_ACTIVE_PARAMS *ctrl;
736 	int ret;
737 
738 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
739 				    NV0073_CTRL_CMD_SYSTEM_GET_ACTIVE, sizeof(*ctrl));
740 	if (IS_ERR(ctrl))
741 		return PTR_ERR(ctrl);
742 
743 	ctrl->subDeviceInstance = 0;
744 	ctrl->head = head;
745 
746 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
747 	if (ret) {
748 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
749 		return ret;
750 	}
751 
752 	*displayid = ctrl->displayId;
753 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
754 	return 0;
755 }
756 
757 static struct nvkm_ior *
r535_outp_inherit(struct nvkm_outp * outp)758 r535_outp_inherit(struct nvkm_outp *outp)
759 {
760 	struct nvkm_disp *disp = outp->disp;
761 	struct nvkm_head *head;
762 	u32 displayid;
763 	int ret;
764 
765 	list_for_each_entry(head, &disp->heads, head) {
766 		ret = r535_disp_head_displayid(disp, head->id, &displayid);
767 		if (WARN_ON(ret))
768 			return NULL;
769 
770 		if (displayid == BIT(outp->index)) {
771 			NV0073_CTRL_SPECIFIC_OR_GET_INFO_PARAMS *ctrl;
772 			u32 id, proto;
773 			struct nvkm_ior *ior;
774 
775 			ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
776 						    NV0073_CTRL_CMD_SPECIFIC_OR_GET_INFO,
777 						    sizeof(*ctrl));
778 			if (IS_ERR(ctrl))
779 				return NULL;
780 
781 			ctrl->subDeviceInstance = 0;
782 			ctrl->displayId = displayid;
783 
784 			ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
785 			if (ret) {
786 				nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
787 				return NULL;
788 			}
789 
790 			id = ctrl->index;
791 			proto = ctrl->protocol;
792 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
793 
794 			ior = nvkm_ior_find(disp, SOR, id);
795 			if (WARN_ON(!ior))
796 				return NULL;
797 
798 			switch (proto) {
799 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_A:
800 				ior->arm.proto = TMDS;
801 				ior->arm.link = 1;
802 				break;
803 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_B:
804 				ior->arm.proto = TMDS;
805 				ior->arm.link = 2;
806 				break;
807 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DUAL_TMDS:
808 				ior->arm.proto = TMDS;
809 				ior->arm.link = 3;
810 				break;
811 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_A:
812 				ior->arm.proto = DP;
813 				ior->arm.link = 1;
814 				break;
815 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_B:
816 				ior->arm.proto = DP;
817 				ior->arm.link = 2;
818 				break;
819 			default:
820 				WARN_ON(1);
821 				return NULL;
822 			}
823 
824 			ior->arm.proto_evo = proto;
825 			ior->arm.head = BIT(head->id);
826 			disp->rm.assigned_sors |= BIT(ior->id);
827 			return ior;
828 		}
829 	}
830 
831 	return NULL;
832 }
833 
834 static int
r535_outp_dfp_get_info(struct nvkm_outp * outp)835 r535_outp_dfp_get_info(struct nvkm_outp *outp)
836 {
837 	NV0073_CTRL_DFP_GET_INFO_PARAMS *ctrl;
838 	struct nvkm_disp *disp = outp->disp;
839 	int ret;
840 
841 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DFP_GET_INFO, sizeof(*ctrl));
842 	if (IS_ERR(ctrl))
843 		return PTR_ERR(ctrl);
844 
845 	ctrl->displayId = BIT(outp->index);
846 
847 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
848 	if (ret) {
849 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
850 		return ret;
851 	}
852 
853 	nvkm_debug(&disp->engine.subdev, "DFP %08x: flags:%08x flags2:%08x\n",
854 		   ctrl->displayId, ctrl->flags, ctrl->flags2);
855 
856 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
857 	return 0;
858 }
859 
860 static int
r535_outp_detect(struct nvkm_outp * outp)861 r535_outp_detect(struct nvkm_outp *outp)
862 {
863 	NV0073_CTRL_SYSTEM_GET_CONNECT_STATE_PARAMS *ctrl;
864 	struct nvkm_disp *disp = outp->disp;
865 	int ret;
866 
867 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
868 				    NV0073_CTRL_CMD_SYSTEM_GET_CONNECT_STATE, sizeof(*ctrl));
869 	if (IS_ERR(ctrl))
870 		return PTR_ERR(ctrl);
871 
872 	ctrl->subDeviceInstance = 0;
873 	ctrl->displayMask = BIT(outp->index);
874 
875 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
876 	if (ret) {
877 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
878 		return ret;
879 	}
880 
881 	if (ctrl->displayMask & BIT(outp->index)) {
882 		ret = r535_outp_dfp_get_info(outp);
883 		if (ret == 0)
884 			ret = 1;
885 	} else {
886 		ret = 0;
887 	}
888 
889 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
890 	return ret;
891 }
892 
893 static int
r535_dp_mst_id_put(struct nvkm_outp * outp,u32 id)894 r535_dp_mst_id_put(struct nvkm_outp *outp, u32 id)
895 {
896 	NV0073_CTRL_CMD_DP_TOPOLOGY_FREE_DISPLAYID_PARAMS *ctrl;
897 	struct nvkm_disp *disp = outp->disp;
898 
899 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
900 				    NV0073_CTRL_CMD_DP_TOPOLOGY_FREE_DISPLAYID, sizeof(*ctrl));
901 	if (IS_ERR(ctrl))
902 		return PTR_ERR(ctrl);
903 
904 	ctrl->subDeviceInstance = 0;
905 	ctrl->displayId = id;
906 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
907 }
908 
909 static int
r535_dp_mst_id_get(struct nvkm_outp * outp,u32 * pid)910 r535_dp_mst_id_get(struct nvkm_outp *outp, u32 *pid)
911 {
912 	NV0073_CTRL_CMD_DP_TOPOLOGY_ALLOCATE_DISPLAYID_PARAMS *ctrl;
913 	struct nvkm_disp *disp = outp->disp;
914 	int ret;
915 
916 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
917 				    NV0073_CTRL_CMD_DP_TOPOLOGY_ALLOCATE_DISPLAYID,
918 				    sizeof(*ctrl));
919 	if (IS_ERR(ctrl))
920 		return PTR_ERR(ctrl);
921 
922 	ctrl->subDeviceInstance = 0;
923 	ctrl->displayId = BIT(outp->index);
924 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
925 	if (ret) {
926 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
927 		return ret;
928 	}
929 
930 	*pid = ctrl->displayIdAssigned;
931 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
932 	return 0;
933 }
934 
935 static int
r535_dp_drive(struct nvkm_outp * outp,u8 lanes,u8 pe[4],u8 vs[4])936 r535_dp_drive(struct nvkm_outp *outp, u8 lanes, u8 pe[4], u8 vs[4])
937 {
938 	NV0073_CTRL_DP_LANE_DATA_PARAMS *ctrl;
939 	struct nvkm_disp *disp = outp->disp;
940 
941 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
942 				    NV0073_CTRL_CMD_DP_SET_LANE_DATA, sizeof(*ctrl));
943 	if (IS_ERR(ctrl))
944 		return PTR_ERR(ctrl);
945 
946 	ctrl->displayId = BIT(outp->index);
947 	ctrl->numLanes = lanes;
948 	for (int i = 0; i < lanes; i++)
949 		ctrl->data[i] = NVVAL(NV0073_CTRL, DP_LANE_DATA,  PREEMPHASIS, pe[i]) |
950 				NVVAL(NV0073_CTRL, DP_LANE_DATA, DRIVECURRENT, vs[i]);
951 
952 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
953 }
954 
955 static int
r535_dp_train_target(struct nvkm_outp * outp,u8 target,bool mst,u8 link_nr,u8 link_bw)956 r535_dp_train_target(struct nvkm_outp *outp, u8 target, bool mst, u8 link_nr, u8 link_bw)
957 {
958 	struct nvkm_disp *disp = outp->disp;
959 	NV0073_CTRL_DP_CTRL_PARAMS *ctrl;
960 	int ret, retries;
961 	u32 cmd, data;
962 
963 	cmd = NVDEF(NV0073_CTRL, DP_CMD, SET_LANE_COUNT, TRUE) |
964 	      NVDEF(NV0073_CTRL, DP_CMD, SET_LINK_BW, TRUE) |
965 	      NVDEF(NV0073_CTRL, DP_CMD, TRAIN_PHY_REPEATER, YES);
966 	data = NVVAL(NV0073_CTRL, DP_DATA, SET_LANE_COUNT, link_nr) |
967 	       NVVAL(NV0073_CTRL, DP_DATA, SET_LINK_BW, link_bw) |
968 	       NVVAL(NV0073_CTRL, DP_DATA, TARGET, target);
969 
970 	if (mst)
971 		cmd |= NVDEF(NV0073_CTRL, DP_CMD, SET_FORMAT_MODE, MULTI_STREAM);
972 
973 	if (outp->dp.dpcd[DPCD_RC02] & DPCD_RC02_ENHANCED_FRAME_CAP)
974 		cmd |= NVDEF(NV0073_CTRL, DP_CMD, SET_ENHANCED_FRAMING, TRUE);
975 
976 	if (target == 0 &&
977 	     (outp->dp.dpcd[DPCD_RC02] & 0x20) &&
978 	    !(outp->dp.dpcd[DPCD_RC03] & DPCD_RC03_TPS4_SUPPORTED))
979 		cmd |= NVDEF(NV0073_CTRL, DP_CMD, POST_LT_ADJ_REQ_GRANTED, YES);
980 
981 	/* We should retry up to 3 times, but only if GSP asks politely */
982 	for (retries = 0; retries < 3; ++retries) {
983 		ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DP_CTRL,
984 					    sizeof(*ctrl));
985 		if (IS_ERR(ctrl))
986 			return PTR_ERR(ctrl);
987 
988 		ctrl->subDeviceInstance = 0;
989 		ctrl->displayId = BIT(outp->index);
990 		ctrl->retryTimeMs = 0;
991 		ctrl->cmd = cmd;
992 		ctrl->data = data;
993 
994 		ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
995 		if ((ret == -EAGAIN || ret == -EBUSY) && ctrl->retryTimeMs) {
996 			/*
997 			 * Device (likely an eDP panel) isn't ready yet, wait for the time specified
998 			 * by GSP before retrying again
999 			 */
1000 			nvkm_debug(&disp->engine.subdev,
1001 				   "Waiting %dms for GSP LT panel delay before retrying\n",
1002 				   ctrl->retryTimeMs);
1003 			msleep(ctrl->retryTimeMs);
1004 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1005 		} else {
1006 			/* GSP didn't say to retry, or we were successful */
1007 			if (ctrl->err)
1008 				ret = -EIO;
1009 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1010 			break;
1011 		}
1012 	}
1013 
1014 	return ret;
1015 }
1016 
1017 static int
r535_dp_train(struct nvkm_outp * outp,bool retrain)1018 r535_dp_train(struct nvkm_outp *outp, bool retrain)
1019 {
1020 	for (int target = outp->dp.lttprs; target >= 0; target--) {
1021 		int ret = r535_dp_train_target(outp, target, outp->dp.lt.mst,
1022 							     outp->dp.lt.nr,
1023 							     outp->dp.lt.bw);
1024 		if (ret)
1025 			return ret;
1026 	}
1027 
1028 	return 0;
1029 }
1030 
1031 static int
r535_dp_rates(struct nvkm_outp * outp)1032 r535_dp_rates(struct nvkm_outp *outp)
1033 {
1034 	NV0073_CTRL_CMD_DP_CONFIG_INDEXED_LINK_RATES_PARAMS *ctrl;
1035 	struct nvkm_disp *disp = outp->disp;
1036 
1037 	if (outp->conn->info.type != DCB_CONNECTOR_eDP ||
1038 	    !outp->dp.rates || outp->dp.rate[0].dpcd < 0)
1039 		return 0;
1040 
1041 	if (WARN_ON(outp->dp.rates > ARRAY_SIZE(ctrl->linkRateTbl)))
1042 		return -EINVAL;
1043 
1044 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1045 				    NV0073_CTRL_CMD_DP_CONFIG_INDEXED_LINK_RATES, sizeof(*ctrl));
1046 	if (IS_ERR(ctrl))
1047 		return PTR_ERR(ctrl);
1048 
1049 	ctrl->displayId = BIT(outp->index);
1050 	for (int i = 0; i < outp->dp.rates; i++)
1051 		ctrl->linkRateTbl[outp->dp.rate[i].dpcd] = outp->dp.rate[i].rate * 10 / 200;
1052 
1053 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
1054 }
1055 
1056 static int
r535_dp_aux_xfer(struct nvkm_outp * outp,u8 type,u32 addr,u8 * data,u8 * psize)1057 r535_dp_aux_xfer(struct nvkm_outp *outp, u8 type, u32 addr, u8 *data, u8 *psize)
1058 {
1059 	struct nvkm_disp *disp = outp->disp;
1060 	NV0073_CTRL_DP_AUXCH_CTRL_PARAMS *ctrl;
1061 	u8 size = *psize;
1062 	int ret;
1063 	int retries;
1064 
1065 	for (retries = 0; retries < 3; ++retries) {
1066 		ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DP_AUXCH_CTRL, sizeof(*ctrl));
1067 		if (IS_ERR(ctrl))
1068 			return PTR_ERR(ctrl);
1069 
1070 		ctrl->subDeviceInstance = 0;
1071 		ctrl->displayId = BIT(outp->index);
1072 		ctrl->bAddrOnly = !size;
1073 		ctrl->cmd = type;
1074 		if (ctrl->bAddrOnly) {
1075 			ctrl->cmd = NVDEF_SET(ctrl->cmd, NV0073_CTRL, DP_AUXCH_CMD, REQ_TYPE, WRITE);
1076 			ctrl->cmd = NVDEF_SET(ctrl->cmd, NV0073_CTRL, DP_AUXCH_CMD,  I2C_MOT, FALSE);
1077 		}
1078 		ctrl->addr = addr;
1079 		ctrl->size = !ctrl->bAddrOnly ? (size - 1) : 0;
1080 		memcpy(ctrl->data, data, size);
1081 
1082 		ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1083 		if ((ret == -EAGAIN || ret == -EBUSY) && ctrl->retryTimeMs) {
1084 			/*
1085 			 * Device (likely an eDP panel) isn't ready yet, wait for the time specified
1086 			 * by GSP before retrying again
1087 			 */
1088 			nvkm_debug(&disp->engine.subdev,
1089 				   "Waiting %dms for GSP LT panel delay before retrying in AUX\n",
1090 				   ctrl->retryTimeMs);
1091 			msleep(ctrl->retryTimeMs);
1092 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1093 		} else {
1094 			memcpy(data, ctrl->data, size);
1095 			*psize = ctrl->size;
1096 			ret = ctrl->replyType;
1097 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1098 			break;
1099 		}
1100 	}
1101 	return ret;
1102 }
1103 
1104 static int
r535_dp_aux_pwr(struct nvkm_outp * outp,bool pu)1105 r535_dp_aux_pwr(struct nvkm_outp *outp, bool pu)
1106 {
1107 	return 0;
1108 }
1109 
1110 static void
r535_dp_release(struct nvkm_outp * outp)1111 r535_dp_release(struct nvkm_outp *outp)
1112 {
1113 	if (!outp->dp.lt.bw) {
1114 		if (!WARN_ON(!outp->dp.rates))
1115 			outp->dp.lt.bw = outp->dp.rate[0].rate / 27000;
1116 		else
1117 			outp->dp.lt.bw = 0x06;
1118 	}
1119 
1120 	outp->dp.lt.nr = 0;
1121 
1122 	r535_dp_train_target(outp, 0, outp->dp.lt.mst, outp->dp.lt.nr, outp->dp.lt.bw);
1123 	r535_outp_release(outp);
1124 }
1125 
1126 static int
r535_dp_acquire(struct nvkm_outp * outp,bool hda)1127 r535_dp_acquire(struct nvkm_outp *outp, bool hda)
1128 {
1129 	int ret;
1130 
1131 	ret = r535_outp_acquire(outp, hda);
1132 	if (ret)
1133 		return ret;
1134 
1135 	return 0;
1136 }
1137 
1138 static const struct nvkm_outp_func
1139 r535_dp = {
1140 	.detect = r535_outp_detect,
1141 	.inherit = r535_outp_inherit,
1142 	.acquire = r535_dp_acquire,
1143 	.release = r535_dp_release,
1144 	.dp.aux_pwr = r535_dp_aux_pwr,
1145 	.dp.aux_xfer = r535_dp_aux_xfer,
1146 	.dp.mst_id_get = r535_dp_mst_id_get,
1147 	.dp.mst_id_put = r535_dp_mst_id_put,
1148 	.dp.rates = r535_dp_rates,
1149 	.dp.train = r535_dp_train,
1150 	.dp.drive = r535_dp_drive,
1151 };
1152 
1153 static int
r535_tmds_edid_get(struct nvkm_outp * outp,u8 * data,u16 * psize)1154 r535_tmds_edid_get(struct nvkm_outp *outp, u8 *data, u16 *psize)
1155 {
1156 	NV0073_CTRL_SPECIFIC_GET_EDID_V2_PARAMS *ctrl;
1157 	struct nvkm_disp *disp = outp->disp;
1158 	int ret = -E2BIG;
1159 
1160 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1161 				    NV0073_CTRL_CMD_SPECIFIC_GET_EDID_V2, sizeof(*ctrl));
1162 	if (IS_ERR(ctrl))
1163 		return PTR_ERR(ctrl);
1164 
1165 	ctrl->subDeviceInstance = 0;
1166 	ctrl->displayId = BIT(outp->index);
1167 
1168 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1169 	if (ret) {
1170 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1171 		return ret;
1172 	}
1173 
1174 	ret = -E2BIG;
1175 	if (ctrl->bufferSize <= *psize) {
1176 		memcpy(data, ctrl->edidBuffer, ctrl->bufferSize);
1177 		*psize = ctrl->bufferSize;
1178 		ret = 0;
1179 	}
1180 
1181 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1182 	return ret;
1183 }
1184 
1185 static const struct nvkm_outp_func
1186 r535_tmds = {
1187 	.detect = r535_outp_detect,
1188 	.inherit = r535_outp_inherit,
1189 	.acquire = r535_outp_acquire,
1190 	.release = r535_outp_release,
1191 	.edid_get = r535_tmds_edid_get,
1192 };
1193 
1194 static int
r535_outp_new(struct nvkm_disp * disp,u32 id)1195 r535_outp_new(struct nvkm_disp *disp, u32 id)
1196 {
1197 	NV0073_CTRL_SPECIFIC_OR_GET_INFO_PARAMS *ctrl;
1198 	enum nvkm_ior_proto proto;
1199 	struct dcb_output dcbE = {};
1200 	struct nvkm_conn *conn;
1201 	struct nvkm_outp *outp;
1202 	u8 locn, link = 0;
1203 	int ret;
1204 
1205 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1206 				    NV0073_CTRL_CMD_SPECIFIC_OR_GET_INFO, sizeof(*ctrl));
1207 	if (IS_ERR(ctrl))
1208 		return PTR_ERR(ctrl);
1209 
1210 	ctrl->subDeviceInstance = 0;
1211 	ctrl->displayId = BIT(id);
1212 
1213 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1214 	if (ret) {
1215 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1216 		return ret;
1217 	}
1218 
1219 	switch (ctrl->type) {
1220 	case NV0073_CTRL_SPECIFIC_OR_TYPE_NONE:
1221 		return 0;
1222 	case NV0073_CTRL_SPECIFIC_OR_TYPE_SOR:
1223 		switch (ctrl->protocol) {
1224 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_A:
1225 			proto = TMDS;
1226 			link = 1;
1227 			break;
1228 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_B:
1229 			proto = TMDS;
1230 			link = 2;
1231 			break;
1232 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DUAL_TMDS:
1233 			proto = TMDS;
1234 			link = 3;
1235 			break;
1236 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_A:
1237 			proto = DP;
1238 			link = 1;
1239 			break;
1240 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_B:
1241 			proto = DP;
1242 			link = 2;
1243 			break;
1244 		default:
1245 			WARN_ON(1);
1246 			return -EINVAL;
1247 		}
1248 
1249 		break;
1250 	default:
1251 		WARN_ON(1);
1252 		return -EINVAL;
1253 	}
1254 
1255 	locn = ctrl->location;
1256 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1257 
1258 	conn = r535_conn_new(disp, id);
1259 	if (IS_ERR(conn))
1260 		return PTR_ERR(conn);
1261 
1262 	switch (proto) {
1263 	case TMDS: dcbE.type = DCB_OUTPUT_TMDS; break;
1264 	case   DP: dcbE.type = DCB_OUTPUT_DP; break;
1265 	default:
1266 		WARN_ON(1);
1267 		return -EINVAL;
1268 	}
1269 
1270 	dcbE.location = locn;
1271 	dcbE.connector = conn->index;
1272 	dcbE.heads = disp->head.mask;
1273 	dcbE.i2c_index = 0xff;
1274 	dcbE.link = dcbE.sorconf.link = link;
1275 
1276 	if (proto == TMDS) {
1277 		ret = nvkm_outp_new_(&r535_tmds, disp, id, &dcbE, &outp);
1278 		if (ret)
1279 			return ret;
1280 	} else {
1281 		NV0073_CTRL_CMD_DP_GET_CAPS_PARAMS *ctrl;
1282 		bool mst, wm;
1283 
1284 		ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1285 					    NV0073_CTRL_CMD_DP_GET_CAPS, sizeof(*ctrl));
1286 		if (IS_ERR(ctrl))
1287 			return PTR_ERR(ctrl);
1288 
1289 		ctrl->sorIndex = ~0;
1290 
1291 		ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1292 		if (ret) {
1293 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1294 			return ret;
1295 		}
1296 
1297 		switch (NVVAL_GET(ctrl->maxLinkRate, NV0073_CTRL_CMD, DP_GET_CAPS, MAX_LINK_RATE)) {
1298 		case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_1_62:
1299 			dcbE.dpconf.link_bw = 0x06;
1300 			break;
1301 		case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_2_70:
1302 			dcbE.dpconf.link_bw = 0x0a;
1303 			break;
1304 		case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_5_40:
1305 			dcbE.dpconf.link_bw = 0x14;
1306 			break;
1307 		case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_8_10:
1308 			dcbE.dpconf.link_bw = 0x1e;
1309 			break;
1310 		default:
1311 			dcbE.dpconf.link_bw = 0x00;
1312 			break;
1313 		}
1314 
1315 		mst = ctrl->bIsMultistreamSupported;
1316 		wm = ctrl->bHasIncreasedWatermarkLimits;
1317 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1318 
1319 		if (WARN_ON(!dcbE.dpconf.link_bw))
1320 			return -EINVAL;
1321 
1322 		dcbE.dpconf.link_nr = 4;
1323 
1324 		ret = nvkm_outp_new_(&r535_dp, disp, id, &dcbE, &outp);
1325 		if (ret)
1326 			return ret;
1327 
1328 		outp->dp.mst = mst;
1329 		outp->dp.increased_wm = wm;
1330 	}
1331 
1332 
1333 	outp->conn = conn;
1334 	list_add_tail(&outp->head, &disp->outps);
1335 	return 0;
1336 }
1337 
1338 static void
r535_disp_irq(struct nvkm_gsp_event * event,void * repv,u32 repc)1339 r535_disp_irq(struct nvkm_gsp_event *event, void *repv, u32 repc)
1340 {
1341 	struct nvkm_disp *disp = container_of(event, typeof(*disp), rm.irq);
1342 	Nv2080DpIrqNotification *irq = repv;
1343 
1344 	if (WARN_ON(repc < sizeof(*irq)))
1345 		return;
1346 
1347 	nvkm_debug(&disp->engine.subdev, "event: dp irq displayId %08x\n", irq->displayId);
1348 
1349 	if (irq->displayId)
1350 		nvkm_event_ntfy(&disp->rm.event, fls(irq->displayId) - 1, NVKM_DPYID_IRQ);
1351 }
1352 
1353 static void
r535_disp_hpd(struct nvkm_gsp_event * event,void * repv,u32 repc)1354 r535_disp_hpd(struct nvkm_gsp_event *event, void *repv, u32 repc)
1355 {
1356 	struct nvkm_disp *disp = container_of(event, typeof(*disp), rm.hpd);
1357 	Nv2080HotplugNotification *hpd = repv;
1358 
1359 	if (WARN_ON(repc < sizeof(*hpd)))
1360 		return;
1361 
1362 	nvkm_debug(&disp->engine.subdev, "event: hpd plug %08x unplug %08x\n",
1363 		   hpd->plugDisplayMask, hpd->unplugDisplayMask);
1364 
1365 	for (int i = 0; i < 31; i++) {
1366 		u32 mask = 0;
1367 
1368 		if (hpd->plugDisplayMask & BIT(i))
1369 			mask |= NVKM_DPYID_PLUG;
1370 		if (hpd->unplugDisplayMask & BIT(i))
1371 			mask |= NVKM_DPYID_UNPLUG;
1372 
1373 		if (mask)
1374 			nvkm_event_ntfy(&disp->rm.event, i, mask);
1375 	}
1376 }
1377 
1378 static const struct nvkm_event_func
1379 r535_disp_event = {
1380 };
1381 
1382 static void
r535_disp_intr_head_timing(struct nvkm_disp * disp,int head)1383 r535_disp_intr_head_timing(struct nvkm_disp *disp, int head)
1384 {
1385 	struct nvkm_subdev *subdev = &disp->engine.subdev;
1386 	struct nvkm_device *device = subdev->device;
1387 	u32 stat = nvkm_rd32(device, 0x611c00 + (head * 0x04));
1388 
1389 	if (stat & 0x00000002) {
1390 		nvkm_disp_vblank(disp, head);
1391 
1392 		nvkm_wr32(device, 0x611800 + (head * 0x04), 0x00000002);
1393 	}
1394 }
1395 
1396 static irqreturn_t
r535_disp_intr(struct nvkm_inth * inth)1397 r535_disp_intr(struct nvkm_inth *inth)
1398 {
1399 	struct nvkm_disp *disp = container_of(inth, typeof(*disp), engine.subdev.inth);
1400 	struct nvkm_subdev *subdev = &disp->engine.subdev;
1401 	struct nvkm_device *device = subdev->device;
1402 	unsigned long mask = nvkm_rd32(device, 0x611ec0) & 0x000000ff;
1403 	int head;
1404 
1405 	for_each_set_bit(head, &mask, 8)
1406 		r535_disp_intr_head_timing(disp, head);
1407 
1408 	return IRQ_HANDLED;
1409 }
1410 
1411 static void
r535_disp_fini(struct nvkm_disp * disp,bool suspend)1412 r535_disp_fini(struct nvkm_disp *disp, bool suspend)
1413 {
1414 	if (!disp->engine.subdev.use.enabled)
1415 		return;
1416 
1417 	nvkm_gsp_rm_free(&disp->rm.object);
1418 
1419 	if (!suspend) {
1420 		nvkm_gsp_event_dtor(&disp->rm.irq);
1421 		nvkm_gsp_event_dtor(&disp->rm.hpd);
1422 		nvkm_event_fini(&disp->rm.event);
1423 
1424 		nvkm_gsp_rm_free(&disp->rm.objcom);
1425 		nvkm_gsp_device_dtor(&disp->rm.device);
1426 		nvkm_gsp_client_dtor(&disp->rm.client);
1427 	}
1428 }
1429 
1430 static int
r535_disp_init(struct nvkm_disp * disp)1431 r535_disp_init(struct nvkm_disp *disp)
1432 {
1433 	int ret;
1434 
1435 	ret = nvkm_gsp_rm_alloc(&disp->rm.device.object, disp->func->root.oclass << 16,
1436 				disp->func->root.oclass, 0, &disp->rm.object);
1437 	if (ret)
1438 		return ret;
1439 
1440 	return 0;
1441 }
1442 
1443 static int
r535_disp_oneinit(struct nvkm_disp * disp)1444 r535_disp_oneinit(struct nvkm_disp *disp)
1445 {
1446 	struct nvkm_device *device = disp->engine.subdev.device;
1447 	struct nvkm_gsp *gsp = device->gsp;
1448 	NV2080_CTRL_INTERNAL_DISPLAY_WRITE_INST_MEM_PARAMS *ctrl;
1449 	int ret, i;
1450 
1451 	/* RAMIN. */
1452 	ret = nvkm_gpuobj_new(device, 0x10000, 0x10000, false, NULL, &disp->inst);
1453 	if (ret)
1454 		return ret;
1455 
1456 	if (WARN_ON(nvkm_memory_target(disp->inst->memory) != NVKM_MEM_TARGET_VRAM))
1457 		return -EINVAL;
1458 
1459 	ctrl = nvkm_gsp_rm_ctrl_get(&gsp->internal.device.subdevice,
1460 				    NV2080_CTRL_CMD_INTERNAL_DISPLAY_WRITE_INST_MEM,
1461 				    sizeof(*ctrl));
1462 	if (IS_ERR(ctrl))
1463 		return PTR_ERR(ctrl);
1464 
1465 	ctrl->instMemPhysAddr = nvkm_memory_addr(disp->inst->memory);
1466 	ctrl->instMemSize = nvkm_memory_size(disp->inst->memory);
1467 	ctrl->instMemAddrSpace = ADDR_FBMEM;
1468 	ctrl->instMemCpuCacheAttr = NV_MEMORY_WRITECOMBINED;
1469 
1470 	ret = nvkm_gsp_rm_ctrl_wr(&gsp->internal.device.subdevice, ctrl);
1471 	if (ret)
1472 		return ret;
1473 
1474 	/* OBJs. */
1475 	ret = nvkm_gsp_client_device_ctor(gsp, &disp->rm.client, &disp->rm.device);
1476 	if (ret)
1477 		return ret;
1478 
1479 	ret = nvkm_gsp_rm_alloc(&disp->rm.device.object, 0x00730000, NV04_DISPLAY_COMMON, 0,
1480 				&disp->rm.objcom);
1481 	if (ret)
1482 		return ret;
1483 
1484 	{
1485 		NV2080_CTRL_INTERNAL_DISPLAY_GET_STATIC_INFO_PARAMS *ctrl;
1486 
1487 		ctrl = nvkm_gsp_rm_ctrl_rd(&gsp->internal.device.subdevice,
1488 					   NV2080_CTRL_CMD_INTERNAL_DISPLAY_GET_STATIC_INFO,
1489 					   sizeof(*ctrl));
1490 		if (IS_ERR(ctrl))
1491 			return PTR_ERR(ctrl);
1492 
1493 		disp->wndw.mask = ctrl->windowPresentMask;
1494 		disp->wndw.nr = fls(disp->wndw.mask);
1495 		nvkm_gsp_rm_ctrl_done(&gsp->internal.device.subdevice, ctrl);
1496 	}
1497 
1498 	/* */
1499 	{
1500 #if defined(CONFIG_ACPI) && defined(CONFIG_X86)
1501 		NV2080_CTRL_INTERNAL_INIT_BRIGHTC_STATE_LOAD_PARAMS *ctrl;
1502 		struct nvkm_gsp_object *subdevice = &disp->rm.client.gsp->internal.device.subdevice;
1503 
1504 		ctrl = nvkm_gsp_rm_ctrl_get(subdevice,
1505 					    NV2080_CTRL_CMD_INTERNAL_INIT_BRIGHTC_STATE_LOAD,
1506 					    sizeof(*ctrl));
1507 		if (IS_ERR(ctrl))
1508 			return PTR_ERR(ctrl);
1509 
1510 		ctrl->status = 0x56; /* NV_ERR_NOT_SUPPORTED */
1511 
1512 		{
1513 			const guid_t NBCI_DSM_GUID =
1514 				GUID_INIT(0xD4A50B75, 0x65C7, 0x46F7,
1515 					  0xBF, 0xB7, 0x41, 0x51, 0x4C, 0xEA, 0x02, 0x44);
1516 			u64 NBCI_DSM_REV = 0x00000102;
1517 			const guid_t NVHG_DSM_GUID =
1518 				GUID_INIT(0x9D95A0A0, 0x0060, 0x4D48,
1519 					  0xB3, 0x4D, 0x7E, 0x5F, 0xEA, 0x12, 0x9F, 0xD4);
1520 			u64 NVHG_DSM_REV = 0x00000102;
1521 			acpi_handle handle = ACPI_HANDLE(device->dev);
1522 
1523 			if (handle && acpi_has_method(handle, "_DSM")) {
1524 				bool nbci = acpi_check_dsm(handle, &NBCI_DSM_GUID, NBCI_DSM_REV,
1525 						           1ULL << 0x00000014);
1526 				bool nvhg = acpi_check_dsm(handle, &NVHG_DSM_GUID, NVHG_DSM_REV,
1527 						           1ULL << 0x00000014);
1528 
1529 				if (nbci || nvhg) {
1530 					union acpi_object argv4 = {
1531 						.buffer.type    = ACPI_TYPE_BUFFER,
1532 						.buffer.length  = sizeof(ctrl->backLightData),
1533 						.buffer.pointer = kmalloc(argv4.buffer.length, GFP_KERNEL),
1534 					}, *obj;
1535 
1536 					obj = acpi_evaluate_dsm(handle, nbci ? &NBCI_DSM_GUID : &NVHG_DSM_GUID,
1537 								0x00000102, 0x14, &argv4);
1538 					if (!obj) {
1539 						acpi_handle_info(handle, "failed to evaluate _DSM\n");
1540 					} else {
1541 						for (int i = 0; i < obj->package.count; i++) {
1542 							union acpi_object *elt = &obj->package.elements[i];
1543 							u32 size;
1544 
1545 							if (elt->integer.value & ~0xffffffffULL)
1546 								size = 8;
1547 							else
1548 								size = 4;
1549 
1550 							memcpy(&ctrl->backLightData[ctrl->backLightDataSize], &elt->integer.value, size);
1551 							ctrl->backLightDataSize += size;
1552 						}
1553 
1554 						ctrl->status = 0;
1555 						ACPI_FREE(obj);
1556 					}
1557 
1558 					kfree(argv4.buffer.pointer);
1559 				}
1560 			}
1561 		}
1562 
1563 		ret = nvkm_gsp_rm_ctrl_wr(subdevice, ctrl);
1564 		if (ret)
1565 			return ret;
1566 #endif
1567 	}
1568 
1569 	/* */
1570 	{
1571 		NV0073_CTRL_CMD_DP_SET_MANUAL_DISPLAYPORT_PARAMS *ctrl;
1572 
1573 		ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1574 					    NV0073_CTRL_CMD_DP_SET_MANUAL_DISPLAYPORT,
1575 					    sizeof(*ctrl));
1576 		if (IS_ERR(ctrl))
1577 			return PTR_ERR(ctrl);
1578 
1579 		ret = nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
1580 		if (ret)
1581 			return ret;
1582 	}
1583 
1584 	/* */
1585 	{
1586 		NV0073_CTRL_SYSTEM_GET_NUM_HEADS_PARAMS *ctrl;
1587 
1588 		ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1589 					   NV0073_CTRL_CMD_SYSTEM_GET_NUM_HEADS, sizeof(*ctrl));
1590 		if (IS_ERR(ctrl))
1591 			return PTR_ERR(ctrl);
1592 
1593 		disp->head.nr = ctrl->numHeads;
1594 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1595 	}
1596 
1597 	/* */
1598 	{
1599 		NV0073_CTRL_SPECIFIC_GET_ALL_HEAD_MASK_PARAMS *ctrl;
1600 
1601 		ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1602 					   NV0073_CTRL_CMD_SPECIFIC_GET_ALL_HEAD_MASK,
1603 					   sizeof(*ctrl));
1604 		if (IS_ERR(ctrl))
1605 			return PTR_ERR(ctrl);
1606 
1607 		disp->head.mask = ctrl->headMask;
1608 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1609 
1610 		for_each_set_bit(i, &disp->head.mask, disp->head.nr) {
1611 			ret = nvkm_head_new_(&r535_head, disp, i);
1612 			if (ret)
1613 				return ret;
1614 		}
1615 	}
1616 
1617 	disp->sor.nr = disp->func->sor.cnt(disp, &disp->sor.mask);
1618 	nvkm_debug(&disp->engine.subdev, "   SOR(s): %d (%02lx)\n", disp->sor.nr, disp->sor.mask);
1619 	for_each_set_bit(i, &disp->sor.mask, disp->sor.nr) {
1620 		ret = disp->func->sor.new(disp, i);
1621 		if (ret)
1622 			return ret;
1623 	}
1624 
1625 	/* */
1626 	{
1627 		NV0073_CTRL_SYSTEM_GET_SUPPORTED_PARAMS *ctrl;
1628 		unsigned long mask;
1629 		int i;
1630 
1631 		ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1632 					   NV0073_CTRL_CMD_SYSTEM_GET_SUPPORTED, sizeof(*ctrl));
1633 		if (IS_ERR(ctrl))
1634 			return PTR_ERR(ctrl);
1635 
1636 		mask = ctrl->displayMask;
1637 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1638 
1639 		for_each_set_bit(i, &mask, 32) {
1640 			ret = r535_outp_new(disp, i);
1641 			if (ret)
1642 				return ret;
1643 		}
1644 	}
1645 
1646 	ret = nvkm_event_init(&r535_disp_event, &gsp->subdev, 3, 32, &disp->rm.event);
1647 	if (WARN_ON(ret))
1648 		return ret;
1649 
1650 	ret = nvkm_gsp_device_event_ctor(&disp->rm.device, 0x007e0000, NV2080_NOTIFIERS_HOTPLUG,
1651 					 r535_disp_hpd, &disp->rm.hpd);
1652 	if (ret)
1653 		return ret;
1654 
1655 	ret = nvkm_gsp_device_event_ctor(&disp->rm.device, 0x007e0001, NV2080_NOTIFIERS_DP_IRQ,
1656 					 r535_disp_irq, &disp->rm.irq);
1657 	if (ret)
1658 		return ret;
1659 
1660 	/* RAMHT. */
1661 	ret = nvkm_ramht_new(device, disp->func->ramht_size ? disp->func->ramht_size :
1662 			     0x1000, 0, disp->inst, &disp->ramht);
1663 	if (ret)
1664 		return ret;
1665 
1666 	ret = nvkm_gsp_intr_stall(gsp, disp->engine.subdev.type, disp->engine.subdev.inst);
1667 	if (ret < 0)
1668 		return ret;
1669 
1670 	ret = nvkm_inth_add(&device->vfn->intr, ret, NVKM_INTR_PRIO_NORMAL, &disp->engine.subdev,
1671 			    r535_disp_intr, &disp->engine.subdev.inth);
1672 	if (ret)
1673 		return ret;
1674 
1675 	nvkm_inth_allow(&disp->engine.subdev.inth);
1676 	return 0;
1677 }
1678 
1679 static void
r535_disp_dtor(struct nvkm_disp * disp)1680 r535_disp_dtor(struct nvkm_disp *disp)
1681 {
1682 	kfree(disp->func);
1683 }
1684 
1685 int
r535_disp_new(const struct nvkm_disp_func * hw,struct nvkm_device * device,enum nvkm_subdev_type type,int inst,struct nvkm_disp ** pdisp)1686 r535_disp_new(const struct nvkm_disp_func *hw, struct nvkm_device *device,
1687 	      enum nvkm_subdev_type type, int inst, struct nvkm_disp **pdisp)
1688 {
1689 	struct nvkm_disp_func *rm;
1690 	int ret;
1691 
1692 	if (!(rm = kzalloc(sizeof(*rm) + 6 * sizeof(rm->user[0]), GFP_KERNEL)))
1693 		return -ENOMEM;
1694 
1695 	rm->dtor = r535_disp_dtor;
1696 	rm->oneinit = r535_disp_oneinit;
1697 	rm->init = r535_disp_init;
1698 	rm->fini = r535_disp_fini;
1699 	rm->uevent = hw->uevent;
1700 	rm->sor.cnt = r535_sor_cnt;
1701 	rm->sor.new = r535_sor_new;
1702 	rm->ramht_size = hw->ramht_size;
1703 
1704 	rm->root = hw->root;
1705 
1706 	for (int i = 0; hw->user[i].ctor; i++) {
1707 		switch (hw->user[i].base.oclass & 0xff) {
1708 		case 0x73: rm->user[i] = hw->user[i]; break;
1709 		case 0x7d: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_core; break;
1710 		case 0x7e: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_wndw; break;
1711 		case 0x7b: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_wimm; break;
1712 		case 0x7a: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_curs; break;
1713 		default:
1714 			WARN_ON(1);
1715 			continue;
1716 		}
1717 	}
1718 
1719 	ret = nvkm_disp_new_(rm, device, type, inst, pdisp);
1720 	if (ret)
1721 		kfree(rm);
1722 
1723 	mutex_init(&(*pdisp)->super.mutex); //XXX
1724 	return ret;
1725 }
1726