1 /*
2 * Copyright 2023 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 */
22 #include "priv.h"
23 #include "chan.h"
24 #include "conn.h"
25 #include "dp.h"
26 #include "head.h"
27 #include "ior.h"
28 #include "outp.h"
29
30 #include <core/ramht.h>
31 #include <subdev/bios.h>
32 #include <subdev/bios/conn.h>
33 #include <subdev/gsp.h>
34 #include <subdev/mmu.h>
35 #include <subdev/vfn.h>
36
37 #include <nvhw/drf.h>
38
39 #include <nvrm/nvtypes.h>
40 #include <nvrm/535.113.01/common/sdk/nvidia/inc/class/cl2080_notification.h>
41 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073dfp.h>
42 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073dp.h>
43 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073specific.h>
44 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl0073/ctrl0073system.h>
45 #include <nvrm/535.113.01/common/sdk/nvidia/inc/ctrl/ctrl2080/ctrl2080internal.h>
46 #include <nvrm/535.113.01/common/sdk/nvidia/inc/nvos.h>
47 #include <nvrm/535.113.01/nvidia/generated/g_allclasses.h>
48 #include <nvrm/535.113.01/nvidia/generated/g_mem_desc_nvoc.h>
49 #include <nvrm/535.113.01/nvidia/inc/kernel/os/nv_memory_type.h>
50
51 #include <linux/acpi.h>
52
53 static u64
r535_chan_user(struct nvkm_disp_chan * chan,u64 * psize)54 r535_chan_user(struct nvkm_disp_chan *chan, u64 *psize)
55 {
56 switch (chan->object.oclass & 0xff) {
57 case 0x7d: *psize = 0x10000; return 0x680000;
58 case 0x7e: *psize = 0x01000; return 0x690000 + (chan->head * *psize);
59 case 0x7b: *psize = 0x01000; return 0x6b0000 + (chan->head * *psize);
60 case 0x7a: *psize = 0x01000; return 0x6d8000 + (chan->head * *psize);
61 default:
62 BUG_ON(1);
63 break;
64 }
65
66 return 0ULL;
67 }
68
69 static void
r535_chan_intr(struct nvkm_disp_chan * chan,bool en)70 r535_chan_intr(struct nvkm_disp_chan *chan, bool en)
71 {
72 }
73
74 static void
r535_chan_fini(struct nvkm_disp_chan * chan)75 r535_chan_fini(struct nvkm_disp_chan *chan)
76 {
77 nvkm_gsp_rm_free(&chan->rm.object);
78 }
79
80 static int
r535_chan_push(struct nvkm_disp_chan * chan)81 r535_chan_push(struct nvkm_disp_chan *chan)
82 {
83 struct nvkm_gsp *gsp = chan->disp->engine.subdev.device->gsp;
84 NV2080_CTRL_INTERNAL_DISPLAY_CHANNEL_PUSHBUFFER_PARAMS *ctrl;
85
86 ctrl = nvkm_gsp_rm_ctrl_get(&gsp->internal.device.subdevice,
87 NV2080_CTRL_CMD_INTERNAL_DISPLAY_CHANNEL_PUSHBUFFER,
88 sizeof(*ctrl));
89 if (IS_ERR(ctrl))
90 return PTR_ERR(ctrl);
91
92 if (chan->memory) {
93 switch (nvkm_memory_target(chan->memory)) {
94 case NVKM_MEM_TARGET_NCOH:
95 ctrl->addressSpace = ADDR_SYSMEM;
96 ctrl->cacheSnoop = 0;
97 break;
98 case NVKM_MEM_TARGET_HOST:
99 ctrl->addressSpace = ADDR_SYSMEM;
100 ctrl->cacheSnoop = 1;
101 break;
102 case NVKM_MEM_TARGET_VRAM:
103 ctrl->addressSpace = ADDR_FBMEM;
104 break;
105 default:
106 WARN_ON(1);
107 return -EINVAL;
108 }
109
110 ctrl->physicalAddr = nvkm_memory_addr(chan->memory);
111 ctrl->limit = nvkm_memory_size(chan->memory) - 1;
112 }
113
114 ctrl->hclass = chan->object.oclass;
115 ctrl->channelInstance = chan->head;
116 ctrl->valid = ((chan->object.oclass & 0xff) != 0x7a) ? 1 : 0;
117
118 return nvkm_gsp_rm_ctrl_wr(&gsp->internal.device.subdevice, ctrl);
119 }
120
121 static int
r535_curs_init(struct nvkm_disp_chan * chan)122 r535_curs_init(struct nvkm_disp_chan *chan)
123 {
124 NV50VAIO_CHANNELPIO_ALLOCATION_PARAMETERS *args;
125 int ret;
126
127 ret = r535_chan_push(chan);
128 if (ret)
129 return ret;
130
131 args = nvkm_gsp_rm_alloc_get(&chan->disp->rm.object,
132 (chan->object.oclass << 16) | chan->head,
133 chan->object.oclass, sizeof(*args), &chan->rm.object);
134 if (IS_ERR(args))
135 return PTR_ERR(args);
136
137 args->channelInstance = chan->head;
138
139 return nvkm_gsp_rm_alloc_wr(&chan->rm.object, args);
140 }
141
142 static const struct nvkm_disp_chan_func
143 r535_curs_func = {
144 .init = r535_curs_init,
145 .fini = r535_chan_fini,
146 .intr = r535_chan_intr,
147 .user = r535_chan_user,
148 };
149
150 static const struct nvkm_disp_chan_user
151 r535_curs = {
152 .func = &r535_curs_func,
153 .user = 73,
154 };
155
156 static int
r535_dmac_bind(struct nvkm_disp_chan * chan,struct nvkm_object * object,u32 handle)157 r535_dmac_bind(struct nvkm_disp_chan *chan, struct nvkm_object *object, u32 handle)
158 {
159 return nvkm_ramht_insert(chan->disp->ramht, object, chan->chid.user, -9, handle,
160 chan->chid.user << 25 |
161 (chan->disp->rm.client.object.handle & 0x3fff));
162 }
163
164 static void
r535_dmac_fini(struct nvkm_disp_chan * chan)165 r535_dmac_fini(struct nvkm_disp_chan *chan)
166 {
167 struct nvkm_device *device = chan->disp->engine.subdev.device;
168 const u32 uoff = (chan->chid.user - 1) * 0x1000;
169
170 chan->suspend_put = nvkm_rd32(device, 0x690000 + uoff);
171 r535_chan_fini(chan);
172 }
173
174 static int
r535_dmac_init(struct nvkm_disp_chan * chan)175 r535_dmac_init(struct nvkm_disp_chan *chan)
176 {
177 NV50VAIO_CHANNELDMA_ALLOCATION_PARAMETERS *args;
178 int ret;
179
180 ret = r535_chan_push(chan);
181 if (ret)
182 return ret;
183
184 args = nvkm_gsp_rm_alloc_get(&chan->disp->rm.object,
185 (chan->object.oclass << 16) | chan->head,
186 chan->object.oclass, sizeof(*args), &chan->rm.object);
187 if (IS_ERR(args))
188 return PTR_ERR(args);
189
190 args->channelInstance = chan->head;
191 args->offset = chan->suspend_put;
192
193 return nvkm_gsp_rm_alloc_wr(&chan->rm.object, args);
194 }
195
196 static int
r535_dmac_push(struct nvkm_disp_chan * chan,u64 memory)197 r535_dmac_push(struct nvkm_disp_chan *chan, u64 memory)
198 {
199 chan->memory = nvkm_umem_search(chan->object.client, memory);
200 if (IS_ERR(chan->memory))
201 return PTR_ERR(chan->memory);
202
203 return 0;
204 }
205
206 static const struct nvkm_disp_chan_func
207 r535_dmac_func = {
208 .push = r535_dmac_push,
209 .init = r535_dmac_init,
210 .fini = r535_dmac_fini,
211 .intr = r535_chan_intr,
212 .user = r535_chan_user,
213 .bind = r535_dmac_bind,
214 };
215
216 static const struct nvkm_disp_chan_func
217 r535_wimm_func = {
218 .push = r535_dmac_push,
219 .init = r535_dmac_init,
220 .fini = r535_dmac_fini,
221 .intr = r535_chan_intr,
222 .user = r535_chan_user,
223 };
224
225 static const struct nvkm_disp_chan_user
226 r535_wimm = {
227 .func = &r535_wimm_func,
228 .user = 33,
229 };
230
231 static const struct nvkm_disp_chan_user
232 r535_wndw = {
233 .func = &r535_dmac_func,
234 .user = 1,
235 };
236
237 static void
r535_core_fini(struct nvkm_disp_chan * chan)238 r535_core_fini(struct nvkm_disp_chan *chan)
239 {
240 struct nvkm_device *device = chan->disp->engine.subdev.device;
241
242 chan->suspend_put = nvkm_rd32(device, 0x680000);
243 r535_chan_fini(chan);
244 }
245
246 static const struct nvkm_disp_chan_func
247 r535_core_func = {
248 .push = r535_dmac_push,
249 .init = r535_dmac_init,
250 .fini = r535_core_fini,
251 .intr = r535_chan_intr,
252 .user = r535_chan_user,
253 .bind = r535_dmac_bind,
254 };
255
256 static const struct nvkm_disp_chan_user
257 r535_core = {
258 .func = &r535_core_func,
259 .user = 0,
260 };
261
262 static int
r535_sor_bl_set(struct nvkm_ior * sor,int lvl)263 r535_sor_bl_set(struct nvkm_ior *sor, int lvl)
264 {
265 struct nvkm_disp *disp = sor->disp;
266 NV0073_CTRL_SPECIFIC_BACKLIGHT_BRIGHTNESS_PARAMS *ctrl;
267
268 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
269 NV0073_CTRL_CMD_SPECIFIC_SET_BACKLIGHT_BRIGHTNESS,
270 sizeof(*ctrl));
271 if (IS_ERR(ctrl))
272 return PTR_ERR(ctrl);
273
274 ctrl->displayId = BIT(sor->asy.outp->index);
275 ctrl->brightness = lvl;
276
277 return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
278 }
279
280 static int
r535_sor_bl_get(struct nvkm_ior * sor)281 r535_sor_bl_get(struct nvkm_ior *sor)
282 {
283 struct nvkm_disp *disp = sor->disp;
284 NV0073_CTRL_SPECIFIC_BACKLIGHT_BRIGHTNESS_PARAMS *ctrl;
285 int ret, lvl;
286
287 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
288 NV0073_CTRL_CMD_SPECIFIC_GET_BACKLIGHT_BRIGHTNESS,
289 sizeof(*ctrl));
290 if (IS_ERR(ctrl))
291 return PTR_ERR(ctrl);
292
293 ctrl->displayId = BIT(sor->asy.outp->index);
294
295 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
296 if (ret) {
297 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
298 return ret;
299 }
300
301 lvl = ctrl->brightness;
302 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
303 return lvl;
304 }
305
306 static const struct nvkm_ior_func_bl
307 r535_sor_bl = {
308 .get = r535_sor_bl_get,
309 .set = r535_sor_bl_set,
310 };
311
312 static void
r535_sor_hda_eld(struct nvkm_ior * sor,int head,u8 * data,u8 size)313 r535_sor_hda_eld(struct nvkm_ior *sor, int head, u8 *data, u8 size)
314 {
315 struct nvkm_disp *disp = sor->disp;
316 NV0073_CTRL_DFP_SET_ELD_AUDIO_CAP_PARAMS *ctrl;
317
318 if (WARN_ON(size > sizeof(ctrl->bufferELD)))
319 return;
320
321 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
322 NV0073_CTRL_CMD_DFP_SET_ELD_AUDIO_CAPS, sizeof(*ctrl));
323 if (WARN_ON(IS_ERR(ctrl)))
324 return;
325
326 ctrl->displayId = BIT(sor->asy.outp->index);
327 ctrl->numELDSize = size;
328 memcpy(ctrl->bufferELD, data, size);
329 ctrl->maxFreqSupported = 0; //XXX
330 ctrl->ctrl = NVDEF(NV0073, CTRL_DFP_ELD_AUDIO_CAPS_CTRL, PD, TRUE);
331 ctrl->ctrl |= NVDEF(NV0073, CTRL_DFP_ELD_AUDIO_CAPS_CTRL, ELDV, TRUE);
332 ctrl->deviceEntry = head;
333
334 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
335 }
336
337 static void
r535_sor_hda_hpd(struct nvkm_ior * sor,int head,bool present)338 r535_sor_hda_hpd(struct nvkm_ior *sor, int head, bool present)
339 {
340 struct nvkm_disp *disp = sor->disp;
341 NV0073_CTRL_DFP_SET_ELD_AUDIO_CAP_PARAMS *ctrl;
342
343 if (present)
344 return;
345
346 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
347 NV0073_CTRL_CMD_DFP_SET_ELD_AUDIO_CAPS, sizeof(*ctrl));
348 if (WARN_ON(IS_ERR(ctrl)))
349 return;
350
351 ctrl->displayId = BIT(sor->asy.outp->index);
352 ctrl->deviceEntry = head;
353
354 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
355 }
356
357 static const struct nvkm_ior_func_hda
358 r535_sor_hda = {
359 .hpd = r535_sor_hda_hpd,
360 .eld = r535_sor_hda_eld,
361 };
362
363 static void
r535_sor_dp_audio_mute(struct nvkm_ior * sor,bool mute)364 r535_sor_dp_audio_mute(struct nvkm_ior *sor, bool mute)
365 {
366 struct nvkm_disp *disp = sor->disp;
367 NV0073_CTRL_DP_SET_AUDIO_MUTESTREAM_PARAMS *ctrl;
368
369 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
370 NV0073_CTRL_CMD_DP_SET_AUDIO_MUTESTREAM, sizeof(*ctrl));
371 if (WARN_ON(IS_ERR(ctrl)))
372 return;
373
374 ctrl->displayId = BIT(sor->asy.outp->index);
375 ctrl->mute = mute;
376 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
377 }
378
379 static void
r535_sor_dp_audio(struct nvkm_ior * sor,int head,bool enable)380 r535_sor_dp_audio(struct nvkm_ior *sor, int head, bool enable)
381 {
382 struct nvkm_disp *disp = sor->disp;
383 NV0073_CTRL_DFP_SET_AUDIO_ENABLE_PARAMS *ctrl;
384
385 if (!enable)
386 r535_sor_dp_audio_mute(sor, true);
387
388 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
389 NV0073_CTRL_CMD_DFP_SET_AUDIO_ENABLE, sizeof(*ctrl));
390 if (WARN_ON(IS_ERR(ctrl)))
391 return;
392
393 ctrl->displayId = BIT(sor->asy.outp->index);
394 ctrl->enable = enable;
395 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
396
397 if (enable)
398 r535_sor_dp_audio_mute(sor, false);
399 }
400
401 static void
r535_sor_dp_vcpi(struct nvkm_ior * sor,int head,u8 slot,u8 slot_nr,u16 pbn,u16 aligned_pbn)402 r535_sor_dp_vcpi(struct nvkm_ior *sor, int head, u8 slot, u8 slot_nr, u16 pbn, u16 aligned_pbn)
403 {
404 struct nvkm_disp *disp = sor->disp;
405 struct NV0073_CTRL_CMD_DP_CONFIG_STREAM_PARAMS *ctrl;
406
407 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
408 NV0073_CTRL_CMD_DP_CONFIG_STREAM, sizeof(*ctrl));
409 if (WARN_ON(IS_ERR(ctrl)))
410 return;
411
412 ctrl->subDeviceInstance = 0;
413 ctrl->head = head;
414 ctrl->sorIndex = sor->id;
415 ctrl->dpLink = sor->asy.link == 2;
416 ctrl->bEnableOverride = 1;
417 ctrl->bMST = 1;
418 ctrl->hBlankSym = 0;
419 ctrl->vBlankSym = 0;
420 ctrl->colorFormat = 0;
421 ctrl->bEnableTwoHeadOneOr = 0;
422 ctrl->singleHeadMultistreamMode = 0;
423 ctrl->MST.slotStart = slot;
424 ctrl->MST.slotEnd = slot + slot_nr - 1;
425 ctrl->MST.PBN = pbn;
426 ctrl->MST.Timeslice = aligned_pbn;
427 ctrl->MST.sendACT = 0;
428 ctrl->MST.singleHeadMSTPipeline = 0;
429 ctrl->MST.bEnableAudioOverRightPanel = 0;
430 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
431 }
432
433 static int
r535_sor_dp_sst(struct nvkm_ior * sor,int head,bool ef,u32 watermark,u32 hblanksym,u32 vblanksym)434 r535_sor_dp_sst(struct nvkm_ior *sor, int head, bool ef,
435 u32 watermark, u32 hblanksym, u32 vblanksym)
436 {
437 struct nvkm_disp *disp = sor->disp;
438 struct NV0073_CTRL_CMD_DP_CONFIG_STREAM_PARAMS *ctrl;
439
440 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
441 NV0073_CTRL_CMD_DP_CONFIG_STREAM, sizeof(*ctrl));
442 if (IS_ERR(ctrl))
443 return PTR_ERR(ctrl);
444
445 ctrl->subDeviceInstance = 0;
446 ctrl->head = head;
447 ctrl->sorIndex = sor->id;
448 ctrl->dpLink = sor->asy.link == 2;
449 ctrl->bEnableOverride = 1;
450 ctrl->bMST = 0;
451 ctrl->hBlankSym = hblanksym;
452 ctrl->vBlankSym = vblanksym;
453 ctrl->colorFormat = 0;
454 ctrl->bEnableTwoHeadOneOr = 0;
455 ctrl->SST.bEnhancedFraming = ef;
456 ctrl->SST.tuSize = 64;
457 ctrl->SST.waterMark = watermark;
458 ctrl->SST.bEnableAudioOverRightPanel = 0;
459 return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
460 }
461
462 static const struct nvkm_ior_func_dp
463 r535_sor_dp = {
464 .sst = r535_sor_dp_sst,
465 .vcpi = r535_sor_dp_vcpi,
466 .audio = r535_sor_dp_audio,
467 };
468
469 static void
r535_sor_hdmi_scdc(struct nvkm_ior * sor,u32 khz,bool support,bool scrambling,bool scrambling_low_rates)470 r535_sor_hdmi_scdc(struct nvkm_ior *sor, u32 khz, bool support, bool scrambling,
471 bool scrambling_low_rates)
472 {
473 struct nvkm_outp *outp = sor->asy.outp;
474 struct nvkm_disp *disp = outp->disp;
475 NV0073_CTRL_SPECIFIC_SET_HDMI_SINK_CAPS_PARAMS *ctrl;
476
477 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
478 NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_SINK_CAPS, sizeof(*ctrl));
479 if (WARN_ON(IS_ERR(ctrl)))
480 return;
481
482 ctrl->displayId = BIT(outp->index);
483 ctrl->caps = 0;
484 if (support)
485 ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, SCDC_SUPPORTED, TRUE);
486 if (scrambling)
487 ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, GT_340MHZ_CLOCK_SUPPORTED, TRUE);
488 if (scrambling_low_rates)
489 ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, LTE_340MHZ_SCRAMBLING_SUPPORTED, TRUE);
490
491 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
492 }
493
494 static void
r535_sor_hdmi_ctrl_audio_mute(struct nvkm_outp * outp,bool mute)495 r535_sor_hdmi_ctrl_audio_mute(struct nvkm_outp *outp, bool mute)
496 {
497 struct nvkm_disp *disp = outp->disp;
498 NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_AUDIO_MUTESTREAM_PARAMS *ctrl;
499
500 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
501 NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_AUDIO_MUTESTREAM, sizeof(*ctrl));
502 if (WARN_ON(IS_ERR(ctrl)))
503 return;
504
505 ctrl->displayId = BIT(outp->index);
506 ctrl->mute = mute;
507 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
508 }
509
510 static void
r535_sor_hdmi_ctrl_audio(struct nvkm_outp * outp,bool enable)511 r535_sor_hdmi_ctrl_audio(struct nvkm_outp *outp, bool enable)
512 {
513 struct nvkm_disp *disp = outp->disp;
514 NV0073_CTRL_SPECIFIC_SET_OD_PACKET_PARAMS *ctrl;
515
516 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
517 NV0073_CTRL_CMD_SPECIFIC_SET_OD_PACKET, sizeof(*ctrl));
518 if (WARN_ON(IS_ERR(ctrl)))
519 return;
520
521 ctrl->displayId = BIT(outp->index);
522 ctrl->transmitControl =
523 NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, ENABLE, YES) |
524 NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, OTHER_FRAME, DISABLE) |
525 NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, SINGLE_FRAME, DISABLE) |
526 NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, ON_HBLANK, DISABLE) |
527 NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, VIDEO_FMT, SW_CONTROLLED) |
528 NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, RESERVED_LEGACY_MODE, NO);
529 ctrl->packetSize = 10;
530 ctrl->aPacket[0] = 0x03;
531 ctrl->aPacket[1] = 0x00;
532 ctrl->aPacket[2] = 0x00;
533 ctrl->aPacket[3] = enable ? 0x10 : 0x01;
534 ctrl->aPacket[4] = 0x00;
535 ctrl->aPacket[5] = 0x00;
536 ctrl->aPacket[6] = 0x00;
537 ctrl->aPacket[7] = 0x00;
538 ctrl->aPacket[8] = 0x00;
539 ctrl->aPacket[9] = 0x00;
540 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
541 }
542
543 static void
r535_sor_hdmi_audio(struct nvkm_ior * sor,int head,bool enable)544 r535_sor_hdmi_audio(struct nvkm_ior *sor, int head, bool enable)
545 {
546 struct nvkm_device *device = sor->disp->engine.subdev.device;
547 const u32 hdmi = head * 0x400;
548
549 r535_sor_hdmi_ctrl_audio(sor->asy.outp, enable);
550 r535_sor_hdmi_ctrl_audio_mute(sor->asy.outp, !enable);
551
552 /* General Control (GCP). */
553 nvkm_mask(device, 0x6f00c0 + hdmi, 0x00000001, 0x00000000);
554 nvkm_wr32(device, 0x6f00cc + hdmi, !enable ? 0x00000001 : 0x00000010);
555 nvkm_mask(device, 0x6f00c0 + hdmi, 0x00000001, 0x00000001);
556 }
557
558 static void
r535_sor_hdmi_ctrl(struct nvkm_ior * sor,int head,bool enable,u8 max_ac_packet,u8 rekey)559 r535_sor_hdmi_ctrl(struct nvkm_ior *sor, int head, bool enable, u8 max_ac_packet, u8 rekey)
560 {
561 struct nvkm_disp *disp = sor->disp;
562 NV0073_CTRL_SPECIFIC_SET_HDMI_ENABLE_PARAMS *ctrl;
563
564 if (!enable)
565 return;
566
567 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
568 NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_ENABLE, sizeof(*ctrl));
569 if (WARN_ON(IS_ERR(ctrl)))
570 return;
571
572 ctrl->displayId = BIT(sor->asy.outp->index);
573 ctrl->enable = enable;
574
575 WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
576 }
577
578 static const struct nvkm_ior_func_hdmi
579 r535_sor_hdmi = {
580 .ctrl = r535_sor_hdmi_ctrl,
581 .scdc = r535_sor_hdmi_scdc,
582 /*TODO: SF_USER -> KMS. */
583 .infoframe_avi = gv100_sor_hdmi_infoframe_avi,
584 .infoframe_vsi = gv100_sor_hdmi_infoframe_vsi,
585 .audio = r535_sor_hdmi_audio,
586 };
587
588 static const struct nvkm_ior_func
589 r535_sor = {
590 .hdmi = &r535_sor_hdmi,
591 .dp = &r535_sor_dp,
592 .hda = &r535_sor_hda,
593 .bl = &r535_sor_bl,
594 };
595
596 static int
r535_sor_new(struct nvkm_disp * disp,int id)597 r535_sor_new(struct nvkm_disp *disp, int id)
598 {
599 return nvkm_ior_new_(&r535_sor, disp, SOR, id, true/*XXX: hda cap*/);
600 }
601
602 static int
r535_sor_cnt(struct nvkm_disp * disp,unsigned long * pmask)603 r535_sor_cnt(struct nvkm_disp *disp, unsigned long *pmask)
604 {
605 *pmask = 0xf;
606 return 4;
607 }
608
609 static void
r535_head_vblank_put(struct nvkm_head * head)610 r535_head_vblank_put(struct nvkm_head *head)
611 {
612 struct nvkm_device *device = head->disp->engine.subdev.device;
613
614 nvkm_mask(device, 0x611d80 + (head->id * 4), 0x00000002, 0x00000000);
615 }
616
617 static void
r535_head_vblank_get(struct nvkm_head * head)618 r535_head_vblank_get(struct nvkm_head *head)
619 {
620 struct nvkm_device *device = head->disp->engine.subdev.device;
621
622 nvkm_wr32(device, 0x611800 + (head->id * 4), 0x00000002);
623 nvkm_mask(device, 0x611d80 + (head->id * 4), 0x00000002, 0x00000002);
624 }
625
626 static void
r535_head_state(struct nvkm_head * head,struct nvkm_head_state * state)627 r535_head_state(struct nvkm_head *head, struct nvkm_head_state *state)
628 {
629 }
630
631 static const struct nvkm_head_func
632 r535_head = {
633 .state = r535_head_state,
634 .vblank_get = r535_head_vblank_get,
635 .vblank_put = r535_head_vblank_put,
636 };
637
638 static struct nvkm_conn *
r535_conn_new(struct nvkm_disp * disp,u32 id)639 r535_conn_new(struct nvkm_disp *disp, u32 id)
640 {
641 NV0073_CTRL_SPECIFIC_GET_CONNECTOR_DATA_PARAMS *ctrl;
642 struct nvbios_connE dcbE = {};
643 struct nvkm_conn *conn;
644 int ret, index;
645
646 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
647 NV0073_CTRL_CMD_SPECIFIC_GET_CONNECTOR_DATA, sizeof(*ctrl));
648 if (IS_ERR(ctrl))
649 return (void *)ctrl;
650
651 ctrl->subDeviceInstance = 0;
652 ctrl->displayId = BIT(id);
653
654 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
655 if (ret) {
656 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
657 return ERR_PTR(ret);
658 }
659
660 list_for_each_entry(conn, &disp->conns, head) {
661 if (conn->index == ctrl->data[0].index) {
662 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
663 return conn;
664 }
665 }
666
667 dcbE.type = ctrl->data[0].type;
668 index = ctrl->data[0].index;
669 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
670
671 ret = nvkm_conn_new(disp, index, &dcbE, &conn);
672 if (ret)
673 return ERR_PTR(ret);
674
675 list_add_tail(&conn->head, &disp->conns);
676 return conn;
677 }
678
679 static void
r535_outp_release(struct nvkm_outp * outp)680 r535_outp_release(struct nvkm_outp *outp)
681 {
682 outp->disp->rm.assigned_sors &= ~BIT(outp->ior->id);
683 outp->ior->asy.outp = NULL;
684 outp->ior = NULL;
685 }
686
687 static int
r535_outp_acquire(struct nvkm_outp * outp,bool hda)688 r535_outp_acquire(struct nvkm_outp *outp, bool hda)
689 {
690 struct nvkm_disp *disp = outp->disp;
691 struct nvkm_ior *ior;
692 NV0073_CTRL_DFP_ASSIGN_SOR_PARAMS *ctrl;
693 int ret, or;
694
695 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
696 NV0073_CTRL_CMD_DFP_ASSIGN_SOR, sizeof(*ctrl));
697 if (IS_ERR(ctrl))
698 return PTR_ERR(ctrl);
699
700 ctrl->subDeviceInstance = 0;
701 ctrl->displayId = BIT(outp->index);
702 ctrl->sorExcludeMask = disp->rm.assigned_sors;
703 if (hda)
704 ctrl->flags |= NVDEF(NV0073_CTRL, DFP_ASSIGN_SOR_FLAGS, AUDIO, OPTIMAL);
705
706 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
707 if (ret) {
708 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
709 return ret;
710 }
711
712 for (or = 0; or < ARRAY_SIZE(ctrl->sorAssignListWithTag); or++) {
713 if (ctrl->sorAssignListWithTag[or].displayMask & BIT(outp->index)) {
714 disp->rm.assigned_sors |= BIT(or);
715 break;
716 }
717 }
718
719 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
720
721 if (WARN_ON(or == ARRAY_SIZE(ctrl->sorAssignListWithTag)))
722 return -EINVAL;
723
724 ior = nvkm_ior_find(disp, SOR, or);
725 if (WARN_ON(!ior))
726 return -EINVAL;
727
728 nvkm_outp_acquire_ior(outp, NVKM_OUTP_USER, ior);
729 return 0;
730 }
731
732 static int
r535_disp_head_displayid(struct nvkm_disp * disp,int head,u32 * displayid)733 r535_disp_head_displayid(struct nvkm_disp *disp, int head, u32 *displayid)
734 {
735 NV0073_CTRL_SYSTEM_GET_ACTIVE_PARAMS *ctrl;
736 int ret;
737
738 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
739 NV0073_CTRL_CMD_SYSTEM_GET_ACTIVE, sizeof(*ctrl));
740 if (IS_ERR(ctrl))
741 return PTR_ERR(ctrl);
742
743 ctrl->subDeviceInstance = 0;
744 ctrl->head = head;
745
746 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
747 if (ret) {
748 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
749 return ret;
750 }
751
752 *displayid = ctrl->displayId;
753 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
754 return 0;
755 }
756
757 static struct nvkm_ior *
r535_outp_inherit(struct nvkm_outp * outp)758 r535_outp_inherit(struct nvkm_outp *outp)
759 {
760 struct nvkm_disp *disp = outp->disp;
761 struct nvkm_head *head;
762 u32 displayid;
763 int ret;
764
765 list_for_each_entry(head, &disp->heads, head) {
766 ret = r535_disp_head_displayid(disp, head->id, &displayid);
767 if (WARN_ON(ret))
768 return NULL;
769
770 if (displayid == BIT(outp->index)) {
771 NV0073_CTRL_SPECIFIC_OR_GET_INFO_PARAMS *ctrl;
772 u32 id, proto;
773 struct nvkm_ior *ior;
774
775 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
776 NV0073_CTRL_CMD_SPECIFIC_OR_GET_INFO,
777 sizeof(*ctrl));
778 if (IS_ERR(ctrl))
779 return NULL;
780
781 ctrl->subDeviceInstance = 0;
782 ctrl->displayId = displayid;
783
784 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
785 if (ret) {
786 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
787 return NULL;
788 }
789
790 id = ctrl->index;
791 proto = ctrl->protocol;
792 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
793
794 ior = nvkm_ior_find(disp, SOR, id);
795 if (WARN_ON(!ior))
796 return NULL;
797
798 switch (proto) {
799 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_A:
800 ior->arm.proto = TMDS;
801 ior->arm.link = 1;
802 break;
803 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_B:
804 ior->arm.proto = TMDS;
805 ior->arm.link = 2;
806 break;
807 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DUAL_TMDS:
808 ior->arm.proto = TMDS;
809 ior->arm.link = 3;
810 break;
811 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_A:
812 ior->arm.proto = DP;
813 ior->arm.link = 1;
814 break;
815 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_B:
816 ior->arm.proto = DP;
817 ior->arm.link = 2;
818 break;
819 default:
820 WARN_ON(1);
821 return NULL;
822 }
823
824 ior->arm.proto_evo = proto;
825 ior->arm.head = BIT(head->id);
826 disp->rm.assigned_sors |= BIT(ior->id);
827 return ior;
828 }
829 }
830
831 return NULL;
832 }
833
834 static int
r535_outp_dfp_get_info(struct nvkm_outp * outp)835 r535_outp_dfp_get_info(struct nvkm_outp *outp)
836 {
837 NV0073_CTRL_DFP_GET_INFO_PARAMS *ctrl;
838 struct nvkm_disp *disp = outp->disp;
839 int ret;
840
841 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DFP_GET_INFO, sizeof(*ctrl));
842 if (IS_ERR(ctrl))
843 return PTR_ERR(ctrl);
844
845 ctrl->displayId = BIT(outp->index);
846
847 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
848 if (ret) {
849 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
850 return ret;
851 }
852
853 nvkm_debug(&disp->engine.subdev, "DFP %08x: flags:%08x flags2:%08x\n",
854 ctrl->displayId, ctrl->flags, ctrl->flags2);
855
856 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
857 return 0;
858 }
859
860 static int
r535_outp_detect(struct nvkm_outp * outp)861 r535_outp_detect(struct nvkm_outp *outp)
862 {
863 NV0073_CTRL_SYSTEM_GET_CONNECT_STATE_PARAMS *ctrl;
864 struct nvkm_disp *disp = outp->disp;
865 int ret;
866
867 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
868 NV0073_CTRL_CMD_SYSTEM_GET_CONNECT_STATE, sizeof(*ctrl));
869 if (IS_ERR(ctrl))
870 return PTR_ERR(ctrl);
871
872 ctrl->subDeviceInstance = 0;
873 ctrl->displayMask = BIT(outp->index);
874
875 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
876 if (ret) {
877 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
878 return ret;
879 }
880
881 if (ctrl->displayMask & BIT(outp->index)) {
882 ret = r535_outp_dfp_get_info(outp);
883 if (ret == 0)
884 ret = 1;
885 } else {
886 ret = 0;
887 }
888
889 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
890 return ret;
891 }
892
893 static int
r535_dp_mst_id_put(struct nvkm_outp * outp,u32 id)894 r535_dp_mst_id_put(struct nvkm_outp *outp, u32 id)
895 {
896 NV0073_CTRL_CMD_DP_TOPOLOGY_FREE_DISPLAYID_PARAMS *ctrl;
897 struct nvkm_disp *disp = outp->disp;
898
899 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
900 NV0073_CTRL_CMD_DP_TOPOLOGY_FREE_DISPLAYID, sizeof(*ctrl));
901 if (IS_ERR(ctrl))
902 return PTR_ERR(ctrl);
903
904 ctrl->subDeviceInstance = 0;
905 ctrl->displayId = id;
906 return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
907 }
908
909 static int
r535_dp_mst_id_get(struct nvkm_outp * outp,u32 * pid)910 r535_dp_mst_id_get(struct nvkm_outp *outp, u32 *pid)
911 {
912 NV0073_CTRL_CMD_DP_TOPOLOGY_ALLOCATE_DISPLAYID_PARAMS *ctrl;
913 struct nvkm_disp *disp = outp->disp;
914 int ret;
915
916 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
917 NV0073_CTRL_CMD_DP_TOPOLOGY_ALLOCATE_DISPLAYID,
918 sizeof(*ctrl));
919 if (IS_ERR(ctrl))
920 return PTR_ERR(ctrl);
921
922 ctrl->subDeviceInstance = 0;
923 ctrl->displayId = BIT(outp->index);
924 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
925 if (ret) {
926 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
927 return ret;
928 }
929
930 *pid = ctrl->displayIdAssigned;
931 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
932 return 0;
933 }
934
935 static int
r535_dp_drive(struct nvkm_outp * outp,u8 lanes,u8 pe[4],u8 vs[4])936 r535_dp_drive(struct nvkm_outp *outp, u8 lanes, u8 pe[4], u8 vs[4])
937 {
938 NV0073_CTRL_DP_LANE_DATA_PARAMS *ctrl;
939 struct nvkm_disp *disp = outp->disp;
940
941 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
942 NV0073_CTRL_CMD_DP_SET_LANE_DATA, sizeof(*ctrl));
943 if (IS_ERR(ctrl))
944 return PTR_ERR(ctrl);
945
946 ctrl->displayId = BIT(outp->index);
947 ctrl->numLanes = lanes;
948 for (int i = 0; i < lanes; i++)
949 ctrl->data[i] = NVVAL(NV0073_CTRL, DP_LANE_DATA, PREEMPHASIS, pe[i]) |
950 NVVAL(NV0073_CTRL, DP_LANE_DATA, DRIVECURRENT, vs[i]);
951
952 return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
953 }
954
955 static int
r535_dp_train_target(struct nvkm_outp * outp,u8 target,bool mst,u8 link_nr,u8 link_bw)956 r535_dp_train_target(struct nvkm_outp *outp, u8 target, bool mst, u8 link_nr, u8 link_bw)
957 {
958 struct nvkm_disp *disp = outp->disp;
959 NV0073_CTRL_DP_CTRL_PARAMS *ctrl;
960 int ret, retries;
961 u32 cmd, data;
962
963 cmd = NVDEF(NV0073_CTRL, DP_CMD, SET_LANE_COUNT, TRUE) |
964 NVDEF(NV0073_CTRL, DP_CMD, SET_LINK_BW, TRUE) |
965 NVDEF(NV0073_CTRL, DP_CMD, TRAIN_PHY_REPEATER, YES);
966 data = NVVAL(NV0073_CTRL, DP_DATA, SET_LANE_COUNT, link_nr) |
967 NVVAL(NV0073_CTRL, DP_DATA, SET_LINK_BW, link_bw) |
968 NVVAL(NV0073_CTRL, DP_DATA, TARGET, target);
969
970 if (mst)
971 cmd |= NVDEF(NV0073_CTRL, DP_CMD, SET_FORMAT_MODE, MULTI_STREAM);
972
973 if (outp->dp.dpcd[DPCD_RC02] & DPCD_RC02_ENHANCED_FRAME_CAP)
974 cmd |= NVDEF(NV0073_CTRL, DP_CMD, SET_ENHANCED_FRAMING, TRUE);
975
976 if (target == 0 &&
977 (outp->dp.dpcd[DPCD_RC02] & 0x20) &&
978 !(outp->dp.dpcd[DPCD_RC03] & DPCD_RC03_TPS4_SUPPORTED))
979 cmd |= NVDEF(NV0073_CTRL, DP_CMD, POST_LT_ADJ_REQ_GRANTED, YES);
980
981 /* We should retry up to 3 times, but only if GSP asks politely */
982 for (retries = 0; retries < 3; ++retries) {
983 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DP_CTRL,
984 sizeof(*ctrl));
985 if (IS_ERR(ctrl))
986 return PTR_ERR(ctrl);
987
988 ctrl->subDeviceInstance = 0;
989 ctrl->displayId = BIT(outp->index);
990 ctrl->retryTimeMs = 0;
991 ctrl->cmd = cmd;
992 ctrl->data = data;
993
994 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
995 if (ret == -EAGAIN && ctrl->retryTimeMs) {
996 /*
997 * Device (likely an eDP panel) isn't ready yet, wait for the time specified
998 * by GSP before retrying again
999 */
1000 nvkm_debug(&disp->engine.subdev,
1001 "Waiting %dms for GSP LT panel delay before retrying\n",
1002 ctrl->retryTimeMs);
1003 msleep(ctrl->retryTimeMs);
1004 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1005 } else {
1006 /* GSP didn't say to retry, or we were successful */
1007 if (ctrl->err)
1008 ret = -EIO;
1009 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1010 break;
1011 }
1012 }
1013
1014 return ret;
1015 }
1016
1017 static int
r535_dp_train(struct nvkm_outp * outp,bool retrain)1018 r535_dp_train(struct nvkm_outp *outp, bool retrain)
1019 {
1020 for (int target = outp->dp.lttprs; target >= 0; target--) {
1021 int ret = r535_dp_train_target(outp, target, outp->dp.lt.mst,
1022 outp->dp.lt.nr,
1023 outp->dp.lt.bw);
1024 if (ret)
1025 return ret;
1026 }
1027
1028 return 0;
1029 }
1030
1031 static int
r535_dp_rates(struct nvkm_outp * outp)1032 r535_dp_rates(struct nvkm_outp *outp)
1033 {
1034 NV0073_CTRL_CMD_DP_CONFIG_INDEXED_LINK_RATES_PARAMS *ctrl;
1035 struct nvkm_disp *disp = outp->disp;
1036
1037 if (outp->conn->info.type != DCB_CONNECTOR_eDP ||
1038 !outp->dp.rates || outp->dp.rate[0].dpcd < 0)
1039 return 0;
1040
1041 if (WARN_ON(outp->dp.rates > ARRAY_SIZE(ctrl->linkRateTbl)))
1042 return -EINVAL;
1043
1044 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1045 NV0073_CTRL_CMD_DP_CONFIG_INDEXED_LINK_RATES, sizeof(*ctrl));
1046 if (IS_ERR(ctrl))
1047 return PTR_ERR(ctrl);
1048
1049 ctrl->displayId = BIT(outp->index);
1050 for (int i = 0; i < outp->dp.rates; i++)
1051 ctrl->linkRateTbl[outp->dp.rate[i].dpcd] = outp->dp.rate[i].rate * 10 / 200;
1052
1053 return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
1054 }
1055
1056 static int
r535_dp_aux_xfer(struct nvkm_outp * outp,u8 type,u32 addr,u8 * data,u8 * psize)1057 r535_dp_aux_xfer(struct nvkm_outp *outp, u8 type, u32 addr, u8 *data, u8 *psize)
1058 {
1059 struct nvkm_disp *disp = outp->disp;
1060 NV0073_CTRL_DP_AUXCH_CTRL_PARAMS *ctrl;
1061 u8 size = *psize;
1062 int ret;
1063
1064 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DP_AUXCH_CTRL, sizeof(*ctrl));
1065 if (IS_ERR(ctrl))
1066 return PTR_ERR(ctrl);
1067
1068 ctrl->subDeviceInstance = 0;
1069 ctrl->displayId = BIT(outp->index);
1070 ctrl->bAddrOnly = !size;
1071 ctrl->cmd = type;
1072 if (ctrl->bAddrOnly) {
1073 ctrl->cmd = NVDEF_SET(ctrl->cmd, NV0073_CTRL, DP_AUXCH_CMD, REQ_TYPE, WRITE);
1074 ctrl->cmd = NVDEF_SET(ctrl->cmd, NV0073_CTRL, DP_AUXCH_CMD, I2C_MOT, FALSE);
1075 }
1076 ctrl->addr = addr;
1077 ctrl->size = !ctrl->bAddrOnly ? (size - 1) : 0;
1078 memcpy(ctrl->data, data, size);
1079
1080 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1081 if (ret) {
1082 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1083 return ret;
1084 }
1085
1086 memcpy(data, ctrl->data, size);
1087 *psize = ctrl->size;
1088 ret = ctrl->replyType;
1089 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1090 return ret;
1091 }
1092
1093 static int
r535_dp_aux_pwr(struct nvkm_outp * outp,bool pu)1094 r535_dp_aux_pwr(struct nvkm_outp *outp, bool pu)
1095 {
1096 return 0;
1097 }
1098
1099 static void
r535_dp_release(struct nvkm_outp * outp)1100 r535_dp_release(struct nvkm_outp *outp)
1101 {
1102 if (!outp->dp.lt.bw) {
1103 if (!WARN_ON(!outp->dp.rates))
1104 outp->dp.lt.bw = outp->dp.rate[0].rate / 27000;
1105 else
1106 outp->dp.lt.bw = 0x06;
1107 }
1108
1109 outp->dp.lt.nr = 0;
1110
1111 r535_dp_train_target(outp, 0, outp->dp.lt.mst, outp->dp.lt.nr, outp->dp.lt.bw);
1112 r535_outp_release(outp);
1113 }
1114
1115 static int
r535_dp_acquire(struct nvkm_outp * outp,bool hda)1116 r535_dp_acquire(struct nvkm_outp *outp, bool hda)
1117 {
1118 int ret;
1119
1120 ret = r535_outp_acquire(outp, hda);
1121 if (ret)
1122 return ret;
1123
1124 return 0;
1125 }
1126
1127 static const struct nvkm_outp_func
1128 r535_dp = {
1129 .detect = r535_outp_detect,
1130 .inherit = r535_outp_inherit,
1131 .acquire = r535_dp_acquire,
1132 .release = r535_dp_release,
1133 .dp.aux_pwr = r535_dp_aux_pwr,
1134 .dp.aux_xfer = r535_dp_aux_xfer,
1135 .dp.mst_id_get = r535_dp_mst_id_get,
1136 .dp.mst_id_put = r535_dp_mst_id_put,
1137 .dp.rates = r535_dp_rates,
1138 .dp.train = r535_dp_train,
1139 .dp.drive = r535_dp_drive,
1140 };
1141
1142 static int
r535_tmds_edid_get(struct nvkm_outp * outp,u8 * data,u16 * psize)1143 r535_tmds_edid_get(struct nvkm_outp *outp, u8 *data, u16 *psize)
1144 {
1145 NV0073_CTRL_SPECIFIC_GET_EDID_V2_PARAMS *ctrl;
1146 struct nvkm_disp *disp = outp->disp;
1147 int ret = -E2BIG;
1148
1149 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1150 NV0073_CTRL_CMD_SPECIFIC_GET_EDID_V2, sizeof(*ctrl));
1151 if (IS_ERR(ctrl))
1152 return PTR_ERR(ctrl);
1153
1154 ctrl->subDeviceInstance = 0;
1155 ctrl->displayId = BIT(outp->index);
1156
1157 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1158 if (ret) {
1159 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1160 return ret;
1161 }
1162
1163 ret = -E2BIG;
1164 if (ctrl->bufferSize <= *psize) {
1165 memcpy(data, ctrl->edidBuffer, ctrl->bufferSize);
1166 *psize = ctrl->bufferSize;
1167 ret = 0;
1168 }
1169
1170 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1171 return ret;
1172 }
1173
1174 static const struct nvkm_outp_func
1175 r535_tmds = {
1176 .detect = r535_outp_detect,
1177 .inherit = r535_outp_inherit,
1178 .acquire = r535_outp_acquire,
1179 .release = r535_outp_release,
1180 .edid_get = r535_tmds_edid_get,
1181 };
1182
1183 static int
r535_outp_new(struct nvkm_disp * disp,u32 id)1184 r535_outp_new(struct nvkm_disp *disp, u32 id)
1185 {
1186 NV0073_CTRL_SPECIFIC_OR_GET_INFO_PARAMS *ctrl;
1187 enum nvkm_ior_proto proto;
1188 struct dcb_output dcbE = {};
1189 struct nvkm_conn *conn;
1190 struct nvkm_outp *outp;
1191 u8 locn, link = 0;
1192 int ret;
1193
1194 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1195 NV0073_CTRL_CMD_SPECIFIC_OR_GET_INFO, sizeof(*ctrl));
1196 if (IS_ERR(ctrl))
1197 return PTR_ERR(ctrl);
1198
1199 ctrl->subDeviceInstance = 0;
1200 ctrl->displayId = BIT(id);
1201
1202 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1203 if (ret) {
1204 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1205 return ret;
1206 }
1207
1208 switch (ctrl->type) {
1209 case NV0073_CTRL_SPECIFIC_OR_TYPE_NONE:
1210 return 0;
1211 case NV0073_CTRL_SPECIFIC_OR_TYPE_SOR:
1212 switch (ctrl->protocol) {
1213 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_A:
1214 proto = TMDS;
1215 link = 1;
1216 break;
1217 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_B:
1218 proto = TMDS;
1219 link = 2;
1220 break;
1221 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DUAL_TMDS:
1222 proto = TMDS;
1223 link = 3;
1224 break;
1225 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_A:
1226 proto = DP;
1227 link = 1;
1228 break;
1229 case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_B:
1230 proto = DP;
1231 link = 2;
1232 break;
1233 default:
1234 WARN_ON(1);
1235 return -EINVAL;
1236 }
1237
1238 break;
1239 default:
1240 WARN_ON(1);
1241 return -EINVAL;
1242 }
1243
1244 locn = ctrl->location;
1245 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1246
1247 conn = r535_conn_new(disp, id);
1248 if (IS_ERR(conn))
1249 return PTR_ERR(conn);
1250
1251 switch (proto) {
1252 case TMDS: dcbE.type = DCB_OUTPUT_TMDS; break;
1253 case DP: dcbE.type = DCB_OUTPUT_DP; break;
1254 default:
1255 WARN_ON(1);
1256 return -EINVAL;
1257 }
1258
1259 dcbE.location = locn;
1260 dcbE.connector = conn->index;
1261 dcbE.heads = disp->head.mask;
1262 dcbE.i2c_index = 0xff;
1263 dcbE.link = dcbE.sorconf.link = link;
1264
1265 if (proto == TMDS) {
1266 ret = nvkm_outp_new_(&r535_tmds, disp, id, &dcbE, &outp);
1267 if (ret)
1268 return ret;
1269 } else {
1270 NV0073_CTRL_CMD_DP_GET_CAPS_PARAMS *ctrl;
1271 bool mst, wm;
1272
1273 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1274 NV0073_CTRL_CMD_DP_GET_CAPS, sizeof(*ctrl));
1275 if (IS_ERR(ctrl))
1276 return PTR_ERR(ctrl);
1277
1278 ctrl->sorIndex = ~0;
1279
1280 ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1281 if (ret) {
1282 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1283 return ret;
1284 }
1285
1286 switch (NVVAL_GET(ctrl->maxLinkRate, NV0073_CTRL_CMD, DP_GET_CAPS, MAX_LINK_RATE)) {
1287 case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_1_62:
1288 dcbE.dpconf.link_bw = 0x06;
1289 break;
1290 case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_2_70:
1291 dcbE.dpconf.link_bw = 0x0a;
1292 break;
1293 case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_5_40:
1294 dcbE.dpconf.link_bw = 0x14;
1295 break;
1296 case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_8_10:
1297 dcbE.dpconf.link_bw = 0x1e;
1298 break;
1299 default:
1300 dcbE.dpconf.link_bw = 0x00;
1301 break;
1302 }
1303
1304 mst = ctrl->bIsMultistreamSupported;
1305 wm = ctrl->bHasIncreasedWatermarkLimits;
1306 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1307
1308 if (WARN_ON(!dcbE.dpconf.link_bw))
1309 return -EINVAL;
1310
1311 dcbE.dpconf.link_nr = 4;
1312
1313 ret = nvkm_outp_new_(&r535_dp, disp, id, &dcbE, &outp);
1314 if (ret)
1315 return ret;
1316
1317 outp->dp.mst = mst;
1318 outp->dp.increased_wm = wm;
1319 }
1320
1321
1322 outp->conn = conn;
1323 list_add_tail(&outp->head, &disp->outps);
1324 return 0;
1325 }
1326
1327 static void
r535_disp_irq(struct nvkm_gsp_event * event,void * repv,u32 repc)1328 r535_disp_irq(struct nvkm_gsp_event *event, void *repv, u32 repc)
1329 {
1330 struct nvkm_disp *disp = container_of(event, typeof(*disp), rm.irq);
1331 Nv2080DpIrqNotification *irq = repv;
1332
1333 if (WARN_ON(repc < sizeof(*irq)))
1334 return;
1335
1336 nvkm_debug(&disp->engine.subdev, "event: dp irq displayId %08x\n", irq->displayId);
1337
1338 if (irq->displayId)
1339 nvkm_event_ntfy(&disp->rm.event, fls(irq->displayId) - 1, NVKM_DPYID_IRQ);
1340 }
1341
1342 static void
r535_disp_hpd(struct nvkm_gsp_event * event,void * repv,u32 repc)1343 r535_disp_hpd(struct nvkm_gsp_event *event, void *repv, u32 repc)
1344 {
1345 struct nvkm_disp *disp = container_of(event, typeof(*disp), rm.hpd);
1346 Nv2080HotplugNotification *hpd = repv;
1347
1348 if (WARN_ON(repc < sizeof(*hpd)))
1349 return;
1350
1351 nvkm_debug(&disp->engine.subdev, "event: hpd plug %08x unplug %08x\n",
1352 hpd->plugDisplayMask, hpd->unplugDisplayMask);
1353
1354 for (int i = 0; i < 31; i++) {
1355 u32 mask = 0;
1356
1357 if (hpd->plugDisplayMask & BIT(i))
1358 mask |= NVKM_DPYID_PLUG;
1359 if (hpd->unplugDisplayMask & BIT(i))
1360 mask |= NVKM_DPYID_UNPLUG;
1361
1362 if (mask)
1363 nvkm_event_ntfy(&disp->rm.event, i, mask);
1364 }
1365 }
1366
1367 static const struct nvkm_event_func
1368 r535_disp_event = {
1369 };
1370
1371 static void
r535_disp_intr_head_timing(struct nvkm_disp * disp,int head)1372 r535_disp_intr_head_timing(struct nvkm_disp *disp, int head)
1373 {
1374 struct nvkm_subdev *subdev = &disp->engine.subdev;
1375 struct nvkm_device *device = subdev->device;
1376 u32 stat = nvkm_rd32(device, 0x611c00 + (head * 0x04));
1377
1378 if (stat & 0x00000002) {
1379 nvkm_disp_vblank(disp, head);
1380
1381 nvkm_wr32(device, 0x611800 + (head * 0x04), 0x00000002);
1382 }
1383 }
1384
1385 static irqreturn_t
r535_disp_intr(struct nvkm_inth * inth)1386 r535_disp_intr(struct nvkm_inth *inth)
1387 {
1388 struct nvkm_disp *disp = container_of(inth, typeof(*disp), engine.subdev.inth);
1389 struct nvkm_subdev *subdev = &disp->engine.subdev;
1390 struct nvkm_device *device = subdev->device;
1391 unsigned long mask = nvkm_rd32(device, 0x611ec0) & 0x000000ff;
1392 int head;
1393
1394 for_each_set_bit(head, &mask, 8)
1395 r535_disp_intr_head_timing(disp, head);
1396
1397 return IRQ_HANDLED;
1398 }
1399
1400 static void
r535_disp_fini(struct nvkm_disp * disp,bool suspend)1401 r535_disp_fini(struct nvkm_disp *disp, bool suspend)
1402 {
1403 if (!disp->engine.subdev.use.enabled)
1404 return;
1405
1406 nvkm_gsp_rm_free(&disp->rm.object);
1407
1408 if (!suspend) {
1409 nvkm_gsp_event_dtor(&disp->rm.irq);
1410 nvkm_gsp_event_dtor(&disp->rm.hpd);
1411 nvkm_event_fini(&disp->rm.event);
1412
1413 nvkm_gsp_rm_free(&disp->rm.objcom);
1414 nvkm_gsp_device_dtor(&disp->rm.device);
1415 nvkm_gsp_client_dtor(&disp->rm.client);
1416 }
1417 }
1418
1419 static int
r535_disp_init(struct nvkm_disp * disp)1420 r535_disp_init(struct nvkm_disp *disp)
1421 {
1422 int ret;
1423
1424 ret = nvkm_gsp_rm_alloc(&disp->rm.device.object, disp->func->root.oclass << 16,
1425 disp->func->root.oclass, 0, &disp->rm.object);
1426 if (ret)
1427 return ret;
1428
1429 return 0;
1430 }
1431
1432 static int
r535_disp_oneinit(struct nvkm_disp * disp)1433 r535_disp_oneinit(struct nvkm_disp *disp)
1434 {
1435 struct nvkm_device *device = disp->engine.subdev.device;
1436 struct nvkm_gsp *gsp = device->gsp;
1437 NV2080_CTRL_INTERNAL_DISPLAY_WRITE_INST_MEM_PARAMS *ctrl;
1438 int ret, i;
1439
1440 /* RAMIN. */
1441 ret = nvkm_gpuobj_new(device, 0x10000, 0x10000, false, NULL, &disp->inst);
1442 if (ret)
1443 return ret;
1444
1445 if (WARN_ON(nvkm_memory_target(disp->inst->memory) != NVKM_MEM_TARGET_VRAM))
1446 return -EINVAL;
1447
1448 ctrl = nvkm_gsp_rm_ctrl_get(&gsp->internal.device.subdevice,
1449 NV2080_CTRL_CMD_INTERNAL_DISPLAY_WRITE_INST_MEM,
1450 sizeof(*ctrl));
1451 if (IS_ERR(ctrl))
1452 return PTR_ERR(ctrl);
1453
1454 ctrl->instMemPhysAddr = nvkm_memory_addr(disp->inst->memory);
1455 ctrl->instMemSize = nvkm_memory_size(disp->inst->memory);
1456 ctrl->instMemAddrSpace = ADDR_FBMEM;
1457 ctrl->instMemCpuCacheAttr = NV_MEMORY_WRITECOMBINED;
1458
1459 ret = nvkm_gsp_rm_ctrl_wr(&gsp->internal.device.subdevice, ctrl);
1460 if (ret)
1461 return ret;
1462
1463 /* OBJs. */
1464 ret = nvkm_gsp_client_device_ctor(gsp, &disp->rm.client, &disp->rm.device);
1465 if (ret)
1466 return ret;
1467
1468 ret = nvkm_gsp_rm_alloc(&disp->rm.device.object, 0x00730000, NV04_DISPLAY_COMMON, 0,
1469 &disp->rm.objcom);
1470 if (ret)
1471 return ret;
1472
1473 {
1474 NV2080_CTRL_INTERNAL_DISPLAY_GET_STATIC_INFO_PARAMS *ctrl;
1475
1476 ctrl = nvkm_gsp_rm_ctrl_rd(&gsp->internal.device.subdevice,
1477 NV2080_CTRL_CMD_INTERNAL_DISPLAY_GET_STATIC_INFO,
1478 sizeof(*ctrl));
1479 if (IS_ERR(ctrl))
1480 return PTR_ERR(ctrl);
1481
1482 disp->wndw.mask = ctrl->windowPresentMask;
1483 disp->wndw.nr = fls(disp->wndw.mask);
1484 nvkm_gsp_rm_ctrl_done(&gsp->internal.device.subdevice, ctrl);
1485 }
1486
1487 /* */
1488 {
1489 #if defined(CONFIG_ACPI) && defined(CONFIG_X86)
1490 NV2080_CTRL_INTERNAL_INIT_BRIGHTC_STATE_LOAD_PARAMS *ctrl;
1491 struct nvkm_gsp_object *subdevice = &disp->rm.client.gsp->internal.device.subdevice;
1492
1493 ctrl = nvkm_gsp_rm_ctrl_get(subdevice,
1494 NV2080_CTRL_CMD_INTERNAL_INIT_BRIGHTC_STATE_LOAD,
1495 sizeof(*ctrl));
1496 if (IS_ERR(ctrl))
1497 return PTR_ERR(ctrl);
1498
1499 ctrl->status = 0x56; /* NV_ERR_NOT_SUPPORTED */
1500
1501 {
1502 const guid_t NBCI_DSM_GUID =
1503 GUID_INIT(0xD4A50B75, 0x65C7, 0x46F7,
1504 0xBF, 0xB7, 0x41, 0x51, 0x4C, 0xEA, 0x02, 0x44);
1505 u64 NBCI_DSM_REV = 0x00000102;
1506 const guid_t NVHG_DSM_GUID =
1507 GUID_INIT(0x9D95A0A0, 0x0060, 0x4D48,
1508 0xB3, 0x4D, 0x7E, 0x5F, 0xEA, 0x12, 0x9F, 0xD4);
1509 u64 NVHG_DSM_REV = 0x00000102;
1510 acpi_handle handle = ACPI_HANDLE(device->dev);
1511
1512 if (handle && acpi_has_method(handle, "_DSM")) {
1513 bool nbci = acpi_check_dsm(handle, &NBCI_DSM_GUID, NBCI_DSM_REV,
1514 1ULL << 0x00000014);
1515 bool nvhg = acpi_check_dsm(handle, &NVHG_DSM_GUID, NVHG_DSM_REV,
1516 1ULL << 0x00000014);
1517
1518 if (nbci || nvhg) {
1519 union acpi_object argv4 = {
1520 .buffer.type = ACPI_TYPE_BUFFER,
1521 .buffer.length = sizeof(ctrl->backLightData),
1522 .buffer.pointer = kmalloc(argv4.buffer.length, GFP_KERNEL),
1523 }, *obj;
1524
1525 obj = acpi_evaluate_dsm(handle, nbci ? &NBCI_DSM_GUID : &NVHG_DSM_GUID,
1526 0x00000102, 0x14, &argv4);
1527 if (!obj) {
1528 acpi_handle_info(handle, "failed to evaluate _DSM\n");
1529 } else {
1530 for (int i = 0; i < obj->package.count; i++) {
1531 union acpi_object *elt = &obj->package.elements[i];
1532 u32 size;
1533
1534 if (elt->integer.value & ~0xffffffffULL)
1535 size = 8;
1536 else
1537 size = 4;
1538
1539 memcpy(&ctrl->backLightData[ctrl->backLightDataSize], &elt->integer.value, size);
1540 ctrl->backLightDataSize += size;
1541 }
1542
1543 ctrl->status = 0;
1544 ACPI_FREE(obj);
1545 }
1546
1547 kfree(argv4.buffer.pointer);
1548 }
1549 }
1550 }
1551
1552 ret = nvkm_gsp_rm_ctrl_wr(subdevice, ctrl);
1553 if (ret)
1554 return ret;
1555 #endif
1556 }
1557
1558 /* */
1559 {
1560 NV0073_CTRL_CMD_DP_SET_MANUAL_DISPLAYPORT_PARAMS *ctrl;
1561
1562 ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1563 NV0073_CTRL_CMD_DP_SET_MANUAL_DISPLAYPORT,
1564 sizeof(*ctrl));
1565 if (IS_ERR(ctrl))
1566 return PTR_ERR(ctrl);
1567
1568 ret = nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
1569 if (ret)
1570 return ret;
1571 }
1572
1573 /* */
1574 {
1575 NV0073_CTRL_SYSTEM_GET_NUM_HEADS_PARAMS *ctrl;
1576
1577 ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1578 NV0073_CTRL_CMD_SYSTEM_GET_NUM_HEADS, sizeof(*ctrl));
1579 if (IS_ERR(ctrl))
1580 return PTR_ERR(ctrl);
1581
1582 disp->head.nr = ctrl->numHeads;
1583 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1584 }
1585
1586 /* */
1587 {
1588 NV0073_CTRL_SPECIFIC_GET_ALL_HEAD_MASK_PARAMS *ctrl;
1589
1590 ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1591 NV0073_CTRL_CMD_SPECIFIC_GET_ALL_HEAD_MASK,
1592 sizeof(*ctrl));
1593 if (IS_ERR(ctrl))
1594 return PTR_ERR(ctrl);
1595
1596 disp->head.mask = ctrl->headMask;
1597 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1598
1599 for_each_set_bit(i, &disp->head.mask, disp->head.nr) {
1600 ret = nvkm_head_new_(&r535_head, disp, i);
1601 if (ret)
1602 return ret;
1603 }
1604 }
1605
1606 disp->sor.nr = disp->func->sor.cnt(disp, &disp->sor.mask);
1607 nvkm_debug(&disp->engine.subdev, " SOR(s): %d (%02lx)\n", disp->sor.nr, disp->sor.mask);
1608 for_each_set_bit(i, &disp->sor.mask, disp->sor.nr) {
1609 ret = disp->func->sor.new(disp, i);
1610 if (ret)
1611 return ret;
1612 }
1613
1614 /* */
1615 {
1616 NV0073_CTRL_SYSTEM_GET_SUPPORTED_PARAMS *ctrl;
1617 unsigned long mask;
1618 int i;
1619
1620 ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1621 NV0073_CTRL_CMD_SYSTEM_GET_SUPPORTED, sizeof(*ctrl));
1622 if (IS_ERR(ctrl))
1623 return PTR_ERR(ctrl);
1624
1625 mask = ctrl->displayMask;
1626 nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1627
1628 for_each_set_bit(i, &mask, 32) {
1629 ret = r535_outp_new(disp, i);
1630 if (ret)
1631 return ret;
1632 }
1633 }
1634
1635 ret = nvkm_event_init(&r535_disp_event, &gsp->subdev, 3, 32, &disp->rm.event);
1636 if (WARN_ON(ret))
1637 return ret;
1638
1639 ret = nvkm_gsp_device_event_ctor(&disp->rm.device, 0x007e0000, NV2080_NOTIFIERS_HOTPLUG,
1640 r535_disp_hpd, &disp->rm.hpd);
1641 if (ret)
1642 return ret;
1643
1644 ret = nvkm_gsp_device_event_ctor(&disp->rm.device, 0x007e0001, NV2080_NOTIFIERS_DP_IRQ,
1645 r535_disp_irq, &disp->rm.irq);
1646 if (ret)
1647 return ret;
1648
1649 /* RAMHT. */
1650 ret = nvkm_ramht_new(device, disp->func->ramht_size ? disp->func->ramht_size :
1651 0x1000, 0, disp->inst, &disp->ramht);
1652 if (ret)
1653 return ret;
1654
1655 ret = nvkm_gsp_intr_stall(gsp, disp->engine.subdev.type, disp->engine.subdev.inst);
1656 if (ret < 0)
1657 return ret;
1658
1659 ret = nvkm_inth_add(&device->vfn->intr, ret, NVKM_INTR_PRIO_NORMAL, &disp->engine.subdev,
1660 r535_disp_intr, &disp->engine.subdev.inth);
1661 if (ret)
1662 return ret;
1663
1664 nvkm_inth_allow(&disp->engine.subdev.inth);
1665 return 0;
1666 }
1667
1668 static void
r535_disp_dtor(struct nvkm_disp * disp)1669 r535_disp_dtor(struct nvkm_disp *disp)
1670 {
1671 kfree(disp->func);
1672 }
1673
1674 int
r535_disp_new(const struct nvkm_disp_func * hw,struct nvkm_device * device,enum nvkm_subdev_type type,int inst,struct nvkm_disp ** pdisp)1675 r535_disp_new(const struct nvkm_disp_func *hw, struct nvkm_device *device,
1676 enum nvkm_subdev_type type, int inst, struct nvkm_disp **pdisp)
1677 {
1678 struct nvkm_disp_func *rm;
1679 int ret;
1680
1681 if (!(rm = kzalloc(sizeof(*rm) + 6 * sizeof(rm->user[0]), GFP_KERNEL)))
1682 return -ENOMEM;
1683
1684 rm->dtor = r535_disp_dtor;
1685 rm->oneinit = r535_disp_oneinit;
1686 rm->init = r535_disp_init;
1687 rm->fini = r535_disp_fini;
1688 rm->uevent = hw->uevent;
1689 rm->sor.cnt = r535_sor_cnt;
1690 rm->sor.new = r535_sor_new;
1691 rm->ramht_size = hw->ramht_size;
1692
1693 rm->root = hw->root;
1694
1695 for (int i = 0; hw->user[i].ctor; i++) {
1696 switch (hw->user[i].base.oclass & 0xff) {
1697 case 0x73: rm->user[i] = hw->user[i]; break;
1698 case 0x7d: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_core; break;
1699 case 0x7e: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_wndw; break;
1700 case 0x7b: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_wimm; break;
1701 case 0x7a: rm->user[i] = hw->user[i]; rm->user[i].chan = &r535_curs; break;
1702 default:
1703 WARN_ON(1);
1704 continue;
1705 }
1706 }
1707
1708 ret = nvkm_disp_new_(rm, device, type, inst, pdisp);
1709 if (ret)
1710 kfree(rm);
1711
1712 mutex_init(&(*pdisp)->super.mutex); //XXX
1713 return ret;
1714 }
1715