xref: /linux/drivers/gpu/drm/nouveau/nouveau_display.c (revision 74ba587f402d5501af2c85e50cf1e4044263b6ca)
1 /*
2  * Copyright (C) 2008 Maarten Maathuis.
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining
6  * a copy of this software and associated documentation files (the
7  * "Software"), to deal in the Software without restriction, including
8  * without limitation the rights to use, copy, modify, merge, publish,
9  * distribute, sublicense, and/or sell copies of the Software, and to
10  * permit persons to whom the Software is furnished to do so, subject to
11  * the following conditions:
12  *
13  * The above copyright notice and this permission notice (including the
14  * next paragraph) shall be included in all copies or substantial
15  * portions of the Software.
16  *
17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20  * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24  *
25  */
26 
27 #include <acpi/video.h>
28 
29 #include <drm/drm_atomic.h>
30 #include <drm/drm_atomic_helper.h>
31 #include <drm/drm_client_event.h>
32 #include <drm/drm_crtc_helper.h>
33 #include <drm/drm_dumb_buffers.h>
34 #include <drm/drm_fourcc.h>
35 #include <drm/drm_gem_framebuffer_helper.h>
36 #include <drm/drm_probe_helper.h>
37 #include <drm/drm_vblank.h>
38 
39 #include "nouveau_crtc.h"
40 #include "nouveau_gem.h"
41 #include "nouveau_connector.h"
42 #include "nv50_display.h"
43 
44 #include <nvif/class.h>
45 #include <nvif/if0011.h>
46 #include <nvif/if0013.h>
47 #include <dispnv50/crc.h>
48 #include <dispnv50/tile.h>
49 
50 int
51 nouveau_display_vblank_enable(struct drm_crtc *crtc)
52 {
53 	struct nouveau_crtc *nv_crtc;
54 
55 	nv_crtc = nouveau_crtc(crtc);
56 	nvif_event_allow(&nv_crtc->vblank);
57 
58 	return 0;
59 }
60 
61 void
62 nouveau_display_vblank_disable(struct drm_crtc *crtc)
63 {
64 	struct nouveau_crtc *nv_crtc;
65 
66 	nv_crtc = nouveau_crtc(crtc);
67 	nvif_event_block(&nv_crtc->vblank);
68 }
69 
70 static inline int
71 calc(int blanks, int blanke, int total, int line)
72 {
73 	if (blanke >= blanks) {
74 		if (line >= blanks)
75 			line -= total;
76 	} else {
77 		if (line >= blanks)
78 			line -= total;
79 		line -= blanke + 1;
80 	}
81 	return line;
82 }
83 
84 static bool
85 nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos,
86 				ktime_t *stime, ktime_t *etime)
87 {
88 	struct drm_vblank_crtc *vblank = drm_crtc_vblank_crtc(crtc);
89 	struct nvif_head *head = &nouveau_crtc(crtc)->head;
90 	struct nvif_head_scanoutpos_v0 args;
91 	int retry = 20;
92 	bool ret = false;
93 
94 	args.version = 0;
95 
96 	do {
97 		ret = nvif_mthd(&head->object, NVIF_HEAD_V0_SCANOUTPOS, &args, sizeof(args));
98 		if (ret != 0)
99 			return false;
100 
101 		if (args.vline) {
102 			ret = true;
103 			break;
104 		}
105 
106 		if (retry) ndelay(vblank->linedur_ns);
107 	} while (retry--);
108 
109 	*hpos = args.hline;
110 	*vpos = calc(args.vblanks, args.vblanke, args.vtotal, args.vline);
111 	if (stime) *stime = ns_to_ktime(args.time[0]);
112 	if (etime) *etime = ns_to_ktime(args.time[1]);
113 
114 	return ret;
115 }
116 
117 bool
118 nouveau_display_scanoutpos(struct drm_crtc *crtc,
119 			   bool in_vblank_irq, int *vpos, int *hpos,
120 			   ktime_t *stime, ktime_t *etime,
121 			   const struct drm_display_mode *mode)
122 {
123 	return nouveau_display_scanoutpos_head(crtc, vpos, hpos,
124 					       stime, etime);
125 }
126 
127 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
128 	.destroy = drm_gem_fb_destroy,
129 	.create_handle = drm_gem_fb_create_handle,
130 };
131 
132 static void
133 nouveau_decode_mod(struct nouveau_drm *drm,
134 		   uint64_t modifier,
135 		   uint32_t *tile_mode,
136 		   uint8_t *kind)
137 {
138 	struct nouveau_display *disp = nouveau_display(drm->dev);
139 	BUG_ON(!tile_mode || !kind);
140 
141 	if (modifier == DRM_FORMAT_MOD_LINEAR) {
142 		/* tile_mode will not be used in this case */
143 		*tile_mode = 0;
144 		*kind = 0;
145 	} else {
146 		/*
147 		 * Extract the block height and kind from the corresponding
148 		 * modifier fields.  See drm_fourcc.h for details.
149 		 */
150 
151 		if ((modifier & (0xffull << 12)) == 0ull) {
152 			/* Legacy modifier.  Translate to this dev's 'kind.' */
153 			modifier |= disp->format_modifiers[0] & (0xffull << 12);
154 		}
155 
156 		*tile_mode = (uint32_t)(modifier & 0xF);
157 		*kind = (uint8_t)((modifier >> 12) & 0xFF);
158 
159 		if (drm->client.device.info.chipset >= 0xc0)
160 			*tile_mode <<= 4;
161 	}
162 }
163 
164 void
165 nouveau_framebuffer_get_layout(struct drm_framebuffer *fb,
166 			       uint32_t *tile_mode,
167 			       uint8_t *kind)
168 {
169 	if (fb->flags & DRM_MODE_FB_MODIFIERS) {
170 		struct nouveau_drm *drm = nouveau_drm(fb->dev);
171 
172 		nouveau_decode_mod(drm, fb->modifier, tile_mode, kind);
173 	} else {
174 		const struct nouveau_bo *nvbo = nouveau_gem_object(fb->obj[0]);
175 
176 		*tile_mode = nvbo->mode;
177 		*kind = nvbo->kind;
178 	}
179 }
180 
181 static const u64 legacy_modifiers[] = {
182 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(0),
183 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(1),
184 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(2),
185 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(3),
186 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(4),
187 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(5),
188 	DRM_FORMAT_MOD_INVALID
189 };
190 
191 static int
192 nouveau_validate_decode_mod(struct nouveau_drm *drm,
193 			    uint64_t modifier,
194 			    uint32_t *tile_mode,
195 			    uint8_t *kind)
196 {
197 	struct nouveau_display *disp = nouveau_display(drm->dev);
198 	int mod;
199 
200 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA) {
201 		return -EINVAL;
202 	}
203 
204 	BUG_ON(!disp->format_modifiers);
205 
206 	for (mod = 0;
207 	     (disp->format_modifiers[mod] != DRM_FORMAT_MOD_INVALID) &&
208 	     (disp->format_modifiers[mod] != modifier);
209 	     mod++);
210 
211 	if (disp->format_modifiers[mod] == DRM_FORMAT_MOD_INVALID) {
212 		for (mod = 0;
213 		     (legacy_modifiers[mod] != DRM_FORMAT_MOD_INVALID) &&
214 		     (legacy_modifiers[mod] != modifier);
215 		     mod++);
216 		if (legacy_modifiers[mod] == DRM_FORMAT_MOD_INVALID)
217 			return -EINVAL;
218 	}
219 
220 	nouveau_decode_mod(drm, modifier, tile_mode, kind);
221 
222 	return 0;
223 }
224 
225 static int
226 nouveau_check_bl_size(struct nouveau_drm *drm, struct nouveau_bo *nvbo,
227 		      uint32_t offset, uint32_t stride, uint32_t h,
228 		      uint32_t tile_mode)
229 {
230 	uint32_t gob_size, bw, bh, gobs_in_block;
231 	uint64_t bl_size;
232 
233 	BUG_ON(drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA);
234 
235 	if (nouveau_check_tile_mode(tile_mode, drm->client.device.info.chipset))
236 		return -EINVAL;
237 
238 	gobs_in_block = nouveau_get_gobs_in_block(tile_mode, drm->client.device.info.chipset);
239 	bw = nouveau_get_width_in_blocks(stride);
240 	bh = nouveau_get_height_in_blocks(h, gobs_in_block, drm->client.device.info.family);
241 	gob_size = nouveau_get_gob_size(drm->client.device.info.family);
242 
243 	bl_size = bw * bh * gobs_in_block * gob_size;
244 
245 	DRM_DEBUG_KMS("offset=%u stride=%u h=%u gobs_in_block=%u bw=%u bh=%u gob_size=%u bl_size=%llu size=%zu\n",
246 		      offset, stride, h, gobs_in_block, bw, bh, gob_size,
247 		      bl_size, nvbo->bo.base.size);
248 
249 	if (bl_size + offset > nvbo->bo.base.size)
250 		return -ERANGE;
251 
252 	return 0;
253 }
254 
255 int
256 nouveau_framebuffer_new(struct drm_device *dev,
257 			const struct drm_format_info *info,
258 			const struct drm_mode_fb_cmd2 *mode_cmd,
259 			struct drm_gem_object *gem,
260 			struct drm_framebuffer **pfb)
261 {
262 	struct nouveau_drm *drm = nouveau_drm(dev);
263 	struct nouveau_bo *nvbo = nouveau_gem_object(gem);
264 	struct drm_framebuffer *fb;
265 	unsigned int height, i;
266 	uint32_t tile_mode;
267 	uint8_t kind;
268 	int ret;
269 
270         /* YUV overlays have special requirements pre-NV50 */
271 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA &&
272 
273 	    (mode_cmd->pixel_format == DRM_FORMAT_YUYV ||
274 	     mode_cmd->pixel_format == DRM_FORMAT_UYVY ||
275 	     mode_cmd->pixel_format == DRM_FORMAT_NV12 ||
276 	     mode_cmd->pixel_format == DRM_FORMAT_NV21) &&
277 	    (mode_cmd->pitches[0] & 0x3f || /* align 64 */
278 	     mode_cmd->pitches[0] >= 0x10000 || /* at most 64k pitch */
279 	     (mode_cmd->pitches[1] && /* pitches for planes must match */
280 	      mode_cmd->pitches[0] != mode_cmd->pitches[1]))) {
281 		DRM_DEBUG_KMS("Unsuitable framebuffer: format: %p4cc; pitches: 0x%x\n 0x%x\n",
282 			      &mode_cmd->pixel_format,
283 			      mode_cmd->pitches[0], mode_cmd->pitches[1]);
284 		return -EINVAL;
285 	}
286 
287 	if (mode_cmd->flags & DRM_MODE_FB_MODIFIERS) {
288 		if (nouveau_validate_decode_mod(drm, mode_cmd->modifier[0],
289 						&tile_mode, &kind)) {
290 			DRM_DEBUG_KMS("Unsupported modifier: 0x%llx\n",
291 				      mode_cmd->modifier[0]);
292 			return -EINVAL;
293 		}
294 	} else {
295 		tile_mode = nvbo->mode;
296 		kind = nvbo->kind;
297 	}
298 
299 	for (i = 0; i < info->num_planes; i++) {
300 		height = drm_format_info_plane_height(info,
301 						      mode_cmd->height,
302 						      i);
303 
304 		if (kind) {
305 			ret = nouveau_check_bl_size(drm, nvbo,
306 						    mode_cmd->offsets[i],
307 						    mode_cmd->pitches[i],
308 						    height, tile_mode);
309 			if (ret)
310 				return ret;
311 		} else {
312 			uint32_t size = mode_cmd->pitches[i] * height;
313 
314 			if (size + mode_cmd->offsets[i] > nvbo->bo.base.size)
315 				return -ERANGE;
316 		}
317 	}
318 
319 	if (!(fb = *pfb = kzalloc(sizeof(*fb), GFP_KERNEL)))
320 		return -ENOMEM;
321 
322 	drm_helper_mode_fill_fb_struct(dev, fb, info, mode_cmd);
323 	fb->obj[0] = gem;
324 
325 	ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs);
326 	if (ret)
327 		kfree(fb);
328 	return ret;
329 }
330 
331 struct drm_framebuffer *
332 nouveau_user_framebuffer_create(struct drm_device *dev,
333 				struct drm_file *file_priv,
334 				const struct drm_format_info *info,
335 				const struct drm_mode_fb_cmd2 *mode_cmd)
336 {
337 	struct drm_framebuffer *fb;
338 	struct drm_gem_object *gem;
339 	int ret;
340 
341 	gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
342 	if (!gem)
343 		return ERR_PTR(-ENOENT);
344 
345 	ret = nouveau_framebuffer_new(dev, info, mode_cmd, gem, &fb);
346 	if (ret == 0)
347 		return fb;
348 
349 	drm_gem_object_put(gem);
350 	return ERR_PTR(ret);
351 }
352 
353 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
354 	.fb_create = nouveau_user_framebuffer_create,
355 };
356 
357 
358 struct nouveau_drm_prop_enum_list {
359 	u8 gen_mask;
360 	int type;
361 	char *name;
362 };
363 
364 static struct nouveau_drm_prop_enum_list underscan[] = {
365 	{ 6, UNDERSCAN_AUTO, "auto" },
366 	{ 6, UNDERSCAN_OFF, "off" },
367 	{ 6, UNDERSCAN_ON, "on" },
368 	{}
369 };
370 
371 static struct nouveau_drm_prop_enum_list dither_mode[] = {
372 	{ 7, DITHERING_MODE_AUTO, "auto" },
373 	{ 7, DITHERING_MODE_OFF, "off" },
374 	{ 1, DITHERING_MODE_ON, "on" },
375 	{ 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
376 	{ 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
377 	{ 4, DITHERING_MODE_TEMPORAL, "temporal" },
378 	{}
379 };
380 
381 static struct nouveau_drm_prop_enum_list dither_depth[] = {
382 	{ 6, DITHERING_DEPTH_AUTO, "auto" },
383 	{ 6, DITHERING_DEPTH_6BPC, "6 bpc" },
384 	{ 6, DITHERING_DEPTH_8BPC, "8 bpc" },
385 	{}
386 };
387 
388 #define PROP_ENUM(p,gen,n,list) do {                                           \
389 	struct nouveau_drm_prop_enum_list *l = (list);                         \
390 	int c = 0;                                                             \
391 	while (l->gen_mask) {                                                  \
392 		if (l->gen_mask & (1 << (gen)))                                \
393 			c++;                                                   \
394 		l++;                                                           \
395 	}                                                                      \
396 	if (c) {                                                               \
397 		p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c);        \
398 		l = (list);                                                    \
399 		while (p && l->gen_mask) {                                     \
400 			if (l->gen_mask & (1 << (gen))) {                      \
401 				drm_property_add_enum(p, l->type, l->name);    \
402 			}                                                      \
403 			l++;                                                   \
404 		}                                                              \
405 	}                                                                      \
406 } while(0)
407 
408 void
409 nouveau_display_hpd_resume(struct nouveau_drm *drm)
410 {
411 	if (drm->headless)
412 		return;
413 
414 	spin_lock_irq(&drm->hpd_lock);
415 	drm->hpd_pending = ~0;
416 	spin_unlock_irq(&drm->hpd_lock);
417 
418 	schedule_work(&drm->hpd_work);
419 }
420 
421 static void
422 nouveau_display_hpd_work(struct work_struct *work)
423 {
424 	struct nouveau_drm *drm = container_of(work, typeof(*drm), hpd_work);
425 	struct drm_device *dev = drm->dev;
426 	struct drm_connector *connector;
427 	struct drm_connector_list_iter conn_iter;
428 	u32 pending;
429 	int changed = 0;
430 	struct drm_connector *first_changed_connector = NULL;
431 
432 	pm_runtime_get_sync(dev->dev);
433 
434 	spin_lock_irq(&drm->hpd_lock);
435 	pending = drm->hpd_pending;
436 	drm->hpd_pending = 0;
437 	spin_unlock_irq(&drm->hpd_lock);
438 
439 	/* Nothing to do, exit early without updating the last busy counter */
440 	if (!pending)
441 		goto noop;
442 
443 	mutex_lock(&dev->mode_config.mutex);
444 	drm_connector_list_iter_begin(dev, &conn_iter);
445 
446 	nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) {
447 		struct nouveau_connector *nv_connector = nouveau_connector(connector);
448 		enum drm_connector_status old_status = connector->status;
449 		u64 bits, old_epoch_counter = connector->epoch_counter;
450 
451 		if (!(pending & drm_connector_mask(connector)))
452 			continue;
453 
454 		spin_lock_irq(&drm->hpd_lock);
455 		bits = nv_connector->hpd_pending;
456 		nv_connector->hpd_pending = 0;
457 		spin_unlock_irq(&drm->hpd_lock);
458 
459 		drm_dbg_kms(dev, "[CONNECTOR:%d:%s] plug:%d unplug:%d irq:%d\n",
460 			    connector->base.id, connector->name,
461 			    !!(bits & NVIF_CONN_EVENT_V0_PLUG),
462 			    !!(bits & NVIF_CONN_EVENT_V0_UNPLUG),
463 			    !!(bits & NVIF_CONN_EVENT_V0_IRQ));
464 
465 		if (bits & NVIF_CONN_EVENT_V0_IRQ) {
466 			if (nouveau_dp_link_check(nv_connector))
467 				continue;
468 		}
469 
470 		connector->status = drm_helper_probe_detect(connector, NULL, false);
471 		if (old_epoch_counter == connector->epoch_counter)
472 			continue;
473 
474 		changed++;
475 		if (!first_changed_connector) {
476 			drm_connector_get(connector);
477 			first_changed_connector = connector;
478 		}
479 
480 		drm_dbg_kms(dev, "[CONNECTOR:%d:%s] status updated from %s to %s (epoch counter %llu->%llu)\n",
481 			    connector->base.id, connector->name,
482 			    drm_get_connector_status_name(old_status),
483 			    drm_get_connector_status_name(connector->status),
484 			    old_epoch_counter, connector->epoch_counter);
485 	}
486 
487 	drm_connector_list_iter_end(&conn_iter);
488 	mutex_unlock(&dev->mode_config.mutex);
489 
490 	if (changed == 1)
491 		drm_kms_helper_connector_hotplug_event(first_changed_connector);
492 	else if (changed > 0)
493 		drm_kms_helper_hotplug_event(dev);
494 
495 	if (first_changed_connector)
496 		drm_connector_put(first_changed_connector);
497 
498 	pm_runtime_mark_last_busy(dev->dev);
499 noop:
500 	pm_runtime_put_autosuspend(dev->dev);
501 }
502 
503 #ifdef CONFIG_ACPI
504 
505 static int
506 nouveau_display_acpi_ntfy(struct notifier_block *nb, unsigned long val,
507 			  void *data)
508 {
509 	struct nouveau_drm *drm = container_of(nb, typeof(*drm), acpi_nb);
510 	struct acpi_bus_event *info = data;
511 	int ret;
512 
513 	if (!strcmp(info->device_class, ACPI_VIDEO_CLASS)) {
514 		if (info->type == ACPI_VIDEO_NOTIFY_PROBE) {
515 			ret = pm_runtime_get(drm->dev->dev);
516 			if (ret == 1 || ret == -EACCES) {
517 				/* If the GPU is already awake, or in a state
518 				 * where we can't wake it up, it can handle
519 				 * it's own hotplug events.
520 				 */
521 				pm_runtime_put_autosuspend(drm->dev->dev);
522 			} else if (ret == 0 || ret == -EINPROGRESS) {
523 				/* We've started resuming the GPU already, so
524 				 * it will handle scheduling a full reprobe
525 				 * itself
526 				 */
527 				NV_DEBUG(drm, "ACPI requested connector reprobe\n");
528 				pm_runtime_put_noidle(drm->dev->dev);
529 			} else {
530 				NV_WARN(drm, "Dropped ACPI reprobe event due to RPM error: %d\n",
531 					ret);
532 			}
533 
534 			/* acpi-video should not generate keypresses for this */
535 			return NOTIFY_BAD;
536 		}
537 	}
538 
539 	return NOTIFY_DONE;
540 }
541 #endif
542 
543 int
544 nouveau_display_init(struct drm_device *dev, bool resume, bool runtime)
545 {
546 	struct nouveau_display *disp = nouveau_display(dev);
547 	struct drm_connector *connector;
548 	struct drm_connector_list_iter conn_iter;
549 	int ret;
550 
551 	/*
552 	 * Enable hotplug interrupts (done as early as possible, since we need
553 	 * them for MST)
554 	 */
555 	drm_connector_list_iter_begin(dev, &conn_iter);
556 	nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) {
557 		struct nouveau_connector *conn = nouveau_connector(connector);
558 		nvif_event_allow(&conn->hpd);
559 		nvif_event_allow(&conn->irq);
560 	}
561 	drm_connector_list_iter_end(&conn_iter);
562 
563 	ret = disp->init(dev, resume, runtime);
564 	if (ret)
565 		return ret;
566 
567 	/* enable connector detection and polling for connectors without HPD
568 	 * support
569 	 */
570 	drm_kms_helper_poll_enable(dev);
571 
572 	return ret;
573 }
574 
575 void
576 nouveau_display_fini(struct drm_device *dev, bool suspend, bool runtime)
577 {
578 	struct nouveau_display *disp = nouveau_display(dev);
579 	struct nouveau_drm *drm = nouveau_drm(dev);
580 	struct drm_connector *connector;
581 	struct drm_connector_list_iter conn_iter;
582 
583 	if (!suspend) {
584 		if (drm_drv_uses_atomic_modeset(dev))
585 			drm_atomic_helper_shutdown(dev);
586 		else
587 			drm_helper_force_disable_all(dev);
588 	}
589 
590 	/* disable hotplug interrupts */
591 	drm_connector_list_iter_begin(dev, &conn_iter);
592 	nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) {
593 		struct nouveau_connector *conn = nouveau_connector(connector);
594 		nvif_event_block(&conn->irq);
595 		nvif_event_block(&conn->hpd);
596 	}
597 	drm_connector_list_iter_end(&conn_iter);
598 
599 	if (!runtime && !drm->headless)
600 		cancel_work_sync(&drm->hpd_work);
601 
602 	drm_kms_helper_poll_disable(dev);
603 	disp->fini(dev, runtime, suspend);
604 }
605 
606 static void
607 nouveau_display_create_properties(struct drm_device *dev)
608 {
609 	struct nouveau_display *disp = nouveau_display(dev);
610 	int gen;
611 
612 	if (disp->disp.object.oclass < NV50_DISP)
613 		gen = 0;
614 	else
615 	if (disp->disp.object.oclass < GF110_DISP)
616 		gen = 1;
617 	else
618 		gen = 2;
619 
620 	PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
621 	PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
622 	PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
623 
624 	disp->underscan_hborder_property =
625 		drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
626 
627 	disp->underscan_vborder_property =
628 		drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
629 
630 	if (gen < 1)
631 		return;
632 
633 	/* -90..+90 */
634 	disp->vibrant_hue_property =
635 		drm_property_create_range(dev, 0, "vibrant hue", 0, 180);
636 
637 	/* -100..+100 */
638 	disp->color_vibrance_property =
639 		drm_property_create_range(dev, 0, "color vibrance", 0, 200);
640 }
641 
642 int
643 nouveau_display_create(struct drm_device *dev)
644 {
645 	struct nouveau_drm *drm = nouveau_drm(dev);
646 	struct nouveau_display *disp;
647 	int ret;
648 
649 	disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL);
650 	if (!disp)
651 		return -ENOMEM;
652 
653 	drm_mode_config_init(dev);
654 	drm_mode_create_scaling_mode_property(dev);
655 	drm_mode_create_dvi_i_properties(dev);
656 
657 	dev->mode_config.funcs = &nouveau_mode_config_funcs;
658 
659 	dev->mode_config.min_width = 0;
660 	dev->mode_config.min_height = 0;
661 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_CELSIUS) {
662 		dev->mode_config.max_width = 2048;
663 		dev->mode_config.max_height = 2048;
664 	} else
665 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA) {
666 		dev->mode_config.max_width = 4096;
667 		dev->mode_config.max_height = 4096;
668 	} else
669 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI) {
670 		dev->mode_config.max_width = 8192;
671 		dev->mode_config.max_height = 8192;
672 	} else {
673 		dev->mode_config.max_width = 16384;
674 		dev->mode_config.max_height = 16384;
675 	}
676 
677 	dev->mode_config.preferred_depth = 24;
678 	dev->mode_config.prefer_shadow = 1;
679 
680 	if (drm->client.device.info.chipset < 0x11)
681 		dev->mode_config.async_page_flip = false;
682 	else
683 		dev->mode_config.async_page_flip = true;
684 
685 	drm_kms_helper_poll_init(dev);
686 	drm_kms_helper_poll_disable(dev);
687 
688 	if (nouveau_modeset != 2) {
689 		ret = nvif_disp_ctor(&drm->client.device, "kmsDisp", 0, &disp->disp);
690 		/* no display hw */
691 		if (ret == -ENODEV) {
692 			ret = 0;
693 			drm->headless = true;
694 			goto disp_create_err;
695 		}
696 
697 		if (!ret && (disp->disp.outp_mask || drm->vbios.dcb.entries)) {
698 			nouveau_display_create_properties(dev);
699 			if (disp->disp.object.oclass < NV50_DISP) {
700 				dev->mode_config.fb_modifiers_not_supported = true;
701 				ret = nv04_display_create(dev);
702 			} else {
703 				ret = nv50_display_create(dev);
704 			}
705 		}
706 	} else {
707 		ret = 0;
708 	}
709 
710 	if (ret)
711 		goto disp_create_err;
712 
713 	drm_mode_config_reset(dev);
714 
715 	if (dev->mode_config.num_crtc) {
716 		ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
717 		if (ret)
718 			goto vblank_err;
719 
720 		if (disp->disp.object.oclass >= NV50_DISP)
721 			nv50_crc_init(dev);
722 	}
723 
724 	INIT_WORK(&drm->hpd_work, nouveau_display_hpd_work);
725 	spin_lock_init(&drm->hpd_lock);
726 #ifdef CONFIG_ACPI
727 	drm->acpi_nb.notifier_call = nouveau_display_acpi_ntfy;
728 	register_acpi_notifier(&drm->acpi_nb);
729 #endif
730 
731 	return 0;
732 
733 vblank_err:
734 	disp->dtor(dev);
735 disp_create_err:
736 	drm_kms_helper_poll_fini(dev);
737 	drm_mode_config_cleanup(dev);
738 	return ret;
739 }
740 
741 void
742 nouveau_display_destroy(struct drm_device *dev)
743 {
744 	struct nouveau_display *disp = nouveau_display(dev);
745 	struct nouveau_drm *drm = nouveau_drm(dev);
746 
747 #ifdef CONFIG_ACPI
748 	unregister_acpi_notifier(&drm->acpi_nb);
749 #endif
750 
751 	drm_kms_helper_poll_fini(dev);
752 	drm_mode_config_cleanup(dev);
753 
754 	if (disp->dtor)
755 		disp->dtor(dev);
756 
757 	nvif_disp_dtor(&disp->disp);
758 
759 	drm->display = NULL;
760 	kfree(disp);
761 }
762 
763 int
764 nouveau_display_suspend(struct drm_device *dev, bool runtime)
765 {
766 	struct nouveau_display *disp = nouveau_display(dev);
767 
768 	drm_client_dev_suspend(dev);
769 
770 	if (drm_drv_uses_atomic_modeset(dev)) {
771 		if (!runtime) {
772 			disp->suspend = drm_atomic_helper_suspend(dev);
773 			if (IS_ERR(disp->suspend)) {
774 				int ret = PTR_ERR(disp->suspend);
775 				disp->suspend = NULL;
776 				return ret;
777 			}
778 		}
779 	}
780 
781 	nouveau_display_fini(dev, true, runtime);
782 	return 0;
783 }
784 
785 void
786 nouveau_display_resume(struct drm_device *dev, bool runtime)
787 {
788 	struct nouveau_display *disp = nouveau_display(dev);
789 
790 	nouveau_display_init(dev, true, runtime);
791 
792 	if (drm_drv_uses_atomic_modeset(dev)) {
793 		if (disp->suspend) {
794 			drm_atomic_helper_resume(dev, disp->suspend);
795 			disp->suspend = NULL;
796 		}
797 	}
798 
799 	drm_client_dev_resume(dev);
800 }
801 
802 int
803 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
804 			    struct drm_mode_create_dumb *args)
805 {
806 	struct nouveau_cli *cli = nouveau_cli(file_priv);
807 	struct nouveau_bo *bo;
808 	uint32_t domain;
809 	int ret;
810 
811 	ret = drm_mode_size_dumb(dev, args, SZ_256, 0);
812 	if (ret)
813 		return ret;
814 
815 	/* Use VRAM if there is any ; otherwise fallback to system memory */
816 	if (nouveau_drm(dev)->client.device.info.ram_size != 0)
817 		domain = NOUVEAU_GEM_DOMAIN_VRAM;
818 	else
819 		domain = NOUVEAU_GEM_DOMAIN_GART;
820 
821 	ret = nouveau_gem_new(cli, args->size, 0, domain, 0, 0, &bo);
822 	if (ret)
823 		return ret;
824 
825 	ret = drm_gem_handle_create(file_priv, &bo->bo.base, &args->handle);
826 	drm_gem_object_put(&bo->bo.base);
827 	return ret;
828 }
829