xref: /linux/drivers/gpu/drm/drm_edid.c (revision 7211a1bae6eac26539eb2d77a26fcd1bccef8137)
1 /*
2  * Copyright (c) 2006 Luc Verhaegen (quirks list)
3  * Copyright (c) 2007-2008 Intel Corporation
4  *   Jesse Barnes <jesse.barnes@intel.com>
5  * Copyright 2010 Red Hat, Inc.
6  *
7  * DDC probing routines (drm_ddc_read & drm_do_probe_ddc_edid) originally from
8  * FB layer.
9  *   Copyright (C) 2006 Dennis Munsie <dmunsie@cecropia.com>
10  *
11  * Permission is hereby granted, free of charge, to any person obtaining a
12  * copy of this software and associated documentation files (the "Software"),
13  * to deal in the Software without restriction, including without limitation
14  * the rights to use, copy, modify, merge, publish, distribute, sub license,
15  * and/or sell copies of the Software, and to permit persons to whom the
16  * Software is furnished to do so, subject to the following conditions:
17  *
18  * The above copyright notice and this permission notice (including the
19  * next paragraph) shall be included in all copies or substantial portions
20  * of the Software.
21  *
22  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
25  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
27  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
28  * DEALINGS IN THE SOFTWARE.
29  */
30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/i2c.h>
33 #include "drmP.h"
34 #include "drm_edid.h"
35 #include "drm_edid_modes.h"
36 
37 #define version_greater(edid, maj, min) \
38 	(((edid)->version > (maj)) || \
39 	 ((edid)->version == (maj) && (edid)->revision > (min)))
40 
41 #define EDID_EST_TIMINGS 16
42 #define EDID_STD_TIMINGS 8
43 #define EDID_DETAILED_TIMINGS 4
44 
45 /*
46  * EDID blocks out in the wild have a variety of bugs, try to collect
47  * them here (note that userspace may work around broken monitors first,
48  * but fixes should make their way here so that the kernel "just works"
49  * on as many displays as possible).
50  */
51 
52 /* First detailed mode wrong, use largest 60Hz mode */
53 #define EDID_QUIRK_PREFER_LARGE_60		(1 << 0)
54 /* Reported 135MHz pixel clock is too high, needs adjustment */
55 #define EDID_QUIRK_135_CLOCK_TOO_HIGH		(1 << 1)
56 /* Prefer the largest mode at 75 Hz */
57 #define EDID_QUIRK_PREFER_LARGE_75		(1 << 2)
58 /* Detail timing is in cm not mm */
59 #define EDID_QUIRK_DETAILED_IN_CM		(1 << 3)
60 /* Detailed timing descriptors have bogus size values, so just take the
61  * maximum size and use that.
62  */
63 #define EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE	(1 << 4)
64 /* Monitor forgot to set the first detailed is preferred bit. */
65 #define EDID_QUIRK_FIRST_DETAILED_PREFERRED	(1 << 5)
66 /* use +hsync +vsync for detailed mode */
67 #define EDID_QUIRK_DETAILED_SYNC_PP		(1 << 6)
68 
69 struct detailed_mode_closure {
70 	struct drm_connector *connector;
71 	struct edid *edid;
72 	bool preferred;
73 	u32 quirks;
74 	int modes;
75 };
76 
77 #define LEVEL_DMT	0
78 #define LEVEL_GTF	1
79 #define LEVEL_GTF2	2
80 #define LEVEL_CVT	3
81 
82 static struct edid_quirk {
83 	char *vendor;
84 	int product_id;
85 	u32 quirks;
86 } edid_quirk_list[] = {
87 	/* Acer AL1706 */
88 	{ "ACR", 44358, EDID_QUIRK_PREFER_LARGE_60 },
89 	/* Acer F51 */
90 	{ "API", 0x7602, EDID_QUIRK_PREFER_LARGE_60 },
91 	/* Unknown Acer */
92 	{ "ACR", 2423, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
93 
94 	/* Belinea 10 15 55 */
95 	{ "MAX", 1516, EDID_QUIRK_PREFER_LARGE_60 },
96 	{ "MAX", 0x77e, EDID_QUIRK_PREFER_LARGE_60 },
97 
98 	/* Envision Peripherals, Inc. EN-7100e */
99 	{ "EPI", 59264, EDID_QUIRK_135_CLOCK_TOO_HIGH },
100 	/* Envision EN2028 */
101 	{ "EPI", 8232, EDID_QUIRK_PREFER_LARGE_60 },
102 
103 	/* Funai Electronics PM36B */
104 	{ "FCM", 13600, EDID_QUIRK_PREFER_LARGE_75 |
105 	  EDID_QUIRK_DETAILED_IN_CM },
106 
107 	/* LG Philips LCD LP154W01-A5 */
108 	{ "LPL", 0, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
109 	{ "LPL", 0x2a00, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
110 
111 	/* Philips 107p5 CRT */
112 	{ "PHL", 57364, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
113 
114 	/* Proview AY765C */
115 	{ "PTS", 765, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
116 
117 	/* Samsung SyncMaster 205BW.  Note: irony */
118 	{ "SAM", 541, EDID_QUIRK_DETAILED_SYNC_PP },
119 	/* Samsung SyncMaster 22[5-6]BW */
120 	{ "SAM", 596, EDID_QUIRK_PREFER_LARGE_60 },
121 	{ "SAM", 638, EDID_QUIRK_PREFER_LARGE_60 },
122 };
123 
124 /*** DDC fetch and block validation ***/
125 
126 static const u8 edid_header[] = {
127 	0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00
128 };
129 
130 /*
131  * Sanity check the EDID block (base or extension).  Return 0 if the block
132  * doesn't check out, or 1 if it's valid.
133  */
134 static bool
135 drm_edid_block_valid(u8 *raw_edid)
136 {
137 	int i;
138 	u8 csum = 0;
139 	struct edid *edid = (struct edid *)raw_edid;
140 
141 	if (raw_edid[0] == 0x00) {
142 		int score = 0;
143 
144 		for (i = 0; i < sizeof(edid_header); i++)
145 			if (raw_edid[i] == edid_header[i])
146 				score++;
147 
148 		if (score == 8) ;
149 		else if (score >= 6) {
150 			DRM_DEBUG("Fixing EDID header, your hardware may be failing\n");
151 			memcpy(raw_edid, edid_header, sizeof(edid_header));
152 		} else {
153 			goto bad;
154 		}
155 	}
156 
157 	for (i = 0; i < EDID_LENGTH; i++)
158 		csum += raw_edid[i];
159 	if (csum) {
160 		DRM_ERROR("EDID checksum is invalid, remainder is %d\n", csum);
161 
162 		/* allow CEA to slide through, switches mangle this */
163 		if (raw_edid[0] != 0x02)
164 			goto bad;
165 	}
166 
167 	/* per-block-type checks */
168 	switch (raw_edid[0]) {
169 	case 0: /* base */
170 		if (edid->version != 1) {
171 			DRM_ERROR("EDID has major version %d, instead of 1\n", edid->version);
172 			goto bad;
173 		}
174 
175 		if (edid->revision > 4)
176 			DRM_DEBUG("EDID minor > 4, assuming backward compatibility\n");
177 		break;
178 
179 	default:
180 		break;
181 	}
182 
183 	return 1;
184 
185 bad:
186 	if (raw_edid) {
187 		DRM_ERROR("Raw EDID:\n");
188 		print_hex_dump_bytes(KERN_ERR, DUMP_PREFIX_NONE, raw_edid, EDID_LENGTH);
189 		printk("\n");
190 	}
191 	return 0;
192 }
193 
194 /**
195  * drm_edid_is_valid - sanity check EDID data
196  * @edid: EDID data
197  *
198  * Sanity-check an entire EDID record (including extensions)
199  */
200 bool drm_edid_is_valid(struct edid *edid)
201 {
202 	int i;
203 	u8 *raw = (u8 *)edid;
204 
205 	if (!edid)
206 		return false;
207 
208 	for (i = 0; i <= edid->extensions; i++)
209 		if (!drm_edid_block_valid(raw + i * EDID_LENGTH))
210 			return false;
211 
212 	return true;
213 }
214 EXPORT_SYMBOL(drm_edid_is_valid);
215 
216 #define DDC_ADDR 0x50
217 #define DDC_SEGMENT_ADDR 0x30
218 /**
219  * Get EDID information via I2C.
220  *
221  * \param adapter : i2c device adaptor
222  * \param buf     : EDID data buffer to be filled
223  * \param len     : EDID data buffer length
224  * \return 0 on success or -1 on failure.
225  *
226  * Try to fetch EDID information by calling i2c driver function.
227  */
228 static int
229 drm_do_probe_ddc_edid(struct i2c_adapter *adapter, unsigned char *buf,
230 		      int block, int len)
231 {
232 	unsigned char start = block * EDID_LENGTH;
233 	struct i2c_msg msgs[] = {
234 		{
235 			.addr	= DDC_ADDR,
236 			.flags	= 0,
237 			.len	= 1,
238 			.buf	= &start,
239 		}, {
240 			.addr	= DDC_ADDR,
241 			.flags	= I2C_M_RD,
242 			.len	= len,
243 			.buf	= buf + start,
244 		}
245 	};
246 
247 	if (i2c_transfer(adapter, msgs, 2) == 2)
248 		return 0;
249 
250 	return -1;
251 }
252 
253 static u8 *
254 drm_do_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
255 {
256 	int i, j = 0;
257 	u8 *block, *new;
258 
259 	if ((block = kmalloc(EDID_LENGTH, GFP_KERNEL)) == NULL)
260 		return NULL;
261 
262 	/* base block fetch */
263 	for (i = 0; i < 4; i++) {
264 		if (drm_do_probe_ddc_edid(adapter, block, 0, EDID_LENGTH))
265 			goto out;
266 		if (drm_edid_block_valid(block))
267 			break;
268 	}
269 	if (i == 4)
270 		goto carp;
271 
272 	/* if there's no extensions, we're done */
273 	if (block[0x7e] == 0)
274 		return block;
275 
276 	new = krealloc(block, (block[0x7e] + 1) * EDID_LENGTH, GFP_KERNEL);
277 	if (!new)
278 		goto out;
279 	block = new;
280 
281 	for (j = 1; j <= block[0x7e]; j++) {
282 		for (i = 0; i < 4; i++) {
283 			if (drm_do_probe_ddc_edid(adapter, block, j,
284 						  EDID_LENGTH))
285 				goto out;
286 			if (drm_edid_block_valid(block + j * EDID_LENGTH))
287 				break;
288 		}
289 		if (i == 4)
290 			goto carp;
291 	}
292 
293 	return block;
294 
295 carp:
296 	dev_warn(connector->dev->dev, "%s: EDID block %d invalid.\n",
297 		 drm_get_connector_name(connector), j);
298 
299 out:
300 	kfree(block);
301 	return NULL;
302 }
303 
304 /**
305  * Probe DDC presence.
306  *
307  * \param adapter : i2c device adaptor
308  * \return 1 on success
309  */
310 static bool
311 drm_probe_ddc(struct i2c_adapter *adapter)
312 {
313 	unsigned char out;
314 
315 	return (drm_do_probe_ddc_edid(adapter, &out, 0, 1) == 0);
316 }
317 
318 /**
319  * drm_get_edid - get EDID data, if available
320  * @connector: connector we're probing
321  * @adapter: i2c adapter to use for DDC
322  *
323  * Poke the given i2c channel to grab EDID data if possible.  If found,
324  * attach it to the connector.
325  *
326  * Return edid data or NULL if we couldn't find any.
327  */
328 struct edid *drm_get_edid(struct drm_connector *connector,
329 			  struct i2c_adapter *adapter)
330 {
331 	struct edid *edid = NULL;
332 
333 	if (drm_probe_ddc(adapter))
334 		edid = (struct edid *)drm_do_get_edid(connector, adapter);
335 
336 	connector->display_info.raw_edid = (char *)edid;
337 
338 	return edid;
339 
340 }
341 EXPORT_SYMBOL(drm_get_edid);
342 
343 /*** EDID parsing ***/
344 
345 /**
346  * edid_vendor - match a string against EDID's obfuscated vendor field
347  * @edid: EDID to match
348  * @vendor: vendor string
349  *
350  * Returns true if @vendor is in @edid, false otherwise
351  */
352 static bool edid_vendor(struct edid *edid, char *vendor)
353 {
354 	char edid_vendor[3];
355 
356 	edid_vendor[0] = ((edid->mfg_id[0] & 0x7c) >> 2) + '@';
357 	edid_vendor[1] = (((edid->mfg_id[0] & 0x3) << 3) |
358 			  ((edid->mfg_id[1] & 0xe0) >> 5)) + '@';
359 	edid_vendor[2] = (edid->mfg_id[1] & 0x1f) + '@';
360 
361 	return !strncmp(edid_vendor, vendor, 3);
362 }
363 
364 /**
365  * edid_get_quirks - return quirk flags for a given EDID
366  * @edid: EDID to process
367  *
368  * This tells subsequent routines what fixes they need to apply.
369  */
370 static u32 edid_get_quirks(struct edid *edid)
371 {
372 	struct edid_quirk *quirk;
373 	int i;
374 
375 	for (i = 0; i < ARRAY_SIZE(edid_quirk_list); i++) {
376 		quirk = &edid_quirk_list[i];
377 
378 		if (edid_vendor(edid, quirk->vendor) &&
379 		    (EDID_PRODUCT_ID(edid) == quirk->product_id))
380 			return quirk->quirks;
381 	}
382 
383 	return 0;
384 }
385 
386 #define MODE_SIZE(m) ((m)->hdisplay * (m)->vdisplay)
387 #define MODE_REFRESH_DIFF(m,r) (abs((m)->vrefresh - target_refresh))
388 
389 /**
390  * edid_fixup_preferred - set preferred modes based on quirk list
391  * @connector: has mode list to fix up
392  * @quirks: quirks list
393  *
394  * Walk the mode list for @connector, clearing the preferred status
395  * on existing modes and setting it anew for the right mode ala @quirks.
396  */
397 static void edid_fixup_preferred(struct drm_connector *connector,
398 				 u32 quirks)
399 {
400 	struct drm_display_mode *t, *cur_mode, *preferred_mode;
401 	int target_refresh = 0;
402 
403 	if (list_empty(&connector->probed_modes))
404 		return;
405 
406 	if (quirks & EDID_QUIRK_PREFER_LARGE_60)
407 		target_refresh = 60;
408 	if (quirks & EDID_QUIRK_PREFER_LARGE_75)
409 		target_refresh = 75;
410 
411 	preferred_mode = list_first_entry(&connector->probed_modes,
412 					  struct drm_display_mode, head);
413 
414 	list_for_each_entry_safe(cur_mode, t, &connector->probed_modes, head) {
415 		cur_mode->type &= ~DRM_MODE_TYPE_PREFERRED;
416 
417 		if (cur_mode == preferred_mode)
418 			continue;
419 
420 		/* Largest mode is preferred */
421 		if (MODE_SIZE(cur_mode) > MODE_SIZE(preferred_mode))
422 			preferred_mode = cur_mode;
423 
424 		/* At a given size, try to get closest to target refresh */
425 		if ((MODE_SIZE(cur_mode) == MODE_SIZE(preferred_mode)) &&
426 		    MODE_REFRESH_DIFF(cur_mode, target_refresh) <
427 		    MODE_REFRESH_DIFF(preferred_mode, target_refresh)) {
428 			preferred_mode = cur_mode;
429 		}
430 	}
431 
432 	preferred_mode->type |= DRM_MODE_TYPE_PREFERRED;
433 }
434 
435 struct drm_display_mode *drm_mode_find_dmt(struct drm_device *dev,
436 					   int hsize, int vsize, int fresh)
437 {
438 	int i;
439 	struct drm_display_mode *ptr, *mode;
440 
441 	mode = NULL;
442 	for (i = 0; i < drm_num_dmt_modes; i++) {
443 		ptr = &drm_dmt_modes[i];
444 		if (hsize == ptr->hdisplay &&
445 			vsize == ptr->vdisplay &&
446 			fresh == drm_mode_vrefresh(ptr)) {
447 			/* get the expected default mode */
448 			mode = drm_mode_duplicate(dev, ptr);
449 			break;
450 		}
451 	}
452 	return mode;
453 }
454 EXPORT_SYMBOL(drm_mode_find_dmt);
455 
456 typedef void detailed_cb(struct detailed_timing *timing, void *closure);
457 
458 static void
459 cea_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
460 {
461 	int i, n = 0;
462 	u8 rev = ext[0x01], d = ext[0x02];
463 	u8 *det_base = ext + d;
464 
465 	switch (rev) {
466 	case 0:
467 		/* can't happen */
468 		return;
469 	case 1:
470 		/* have to infer how many blocks we have, check pixel clock */
471 		for (i = 0; i < 6; i++)
472 			if (det_base[18*i] || det_base[18*i+1])
473 				n++;
474 		break;
475 	default:
476 		/* explicit count */
477 		n = min(ext[0x03] & 0x0f, 6);
478 		break;
479 	}
480 
481 	for (i = 0; i < n; i++)
482 		cb((struct detailed_timing *)(det_base + 18 * i), closure);
483 }
484 
485 static void
486 vtb_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
487 {
488 	unsigned int i, n = min((int)ext[0x02], 6);
489 	u8 *det_base = ext + 5;
490 
491 	if (ext[0x01] != 1)
492 		return; /* unknown version */
493 
494 	for (i = 0; i < n; i++)
495 		cb((struct detailed_timing *)(det_base + 18 * i), closure);
496 }
497 
498 static void
499 drm_for_each_detailed_block(u8 *raw_edid, detailed_cb *cb, void *closure)
500 {
501 	int i;
502 	struct edid *edid = (struct edid *)raw_edid;
503 
504 	if (edid == NULL)
505 		return;
506 
507 	for (i = 0; i < EDID_DETAILED_TIMINGS; i++)
508 		cb(&(edid->detailed_timings[i]), closure);
509 
510 	for (i = 1; i <= raw_edid[0x7e]; i++) {
511 		u8 *ext = raw_edid + (i * EDID_LENGTH);
512 		switch (*ext) {
513 		case CEA_EXT:
514 			cea_for_each_detailed_block(ext, cb, closure);
515 			break;
516 		case VTB_EXT:
517 			vtb_for_each_detailed_block(ext, cb, closure);
518 			break;
519 		default:
520 			break;
521 		}
522 	}
523 }
524 
525 static void
526 is_rb(struct detailed_timing *t, void *data)
527 {
528 	u8 *r = (u8 *)t;
529 	if (r[3] == EDID_DETAIL_MONITOR_RANGE)
530 		if (r[15] & 0x10)
531 			*(bool *)data = true;
532 }
533 
534 /* EDID 1.4 defines this explicitly.  For EDID 1.3, we guess, badly. */
535 static bool
536 drm_monitor_supports_rb(struct edid *edid)
537 {
538 	if (edid->revision >= 4) {
539 		bool ret;
540 		drm_for_each_detailed_block((u8 *)edid, is_rb, &ret);
541 		return ret;
542 	}
543 
544 	return ((edid->input & DRM_EDID_INPUT_DIGITAL) != 0);
545 }
546 
547 static void
548 find_gtf2(struct detailed_timing *t, void *data)
549 {
550 	u8 *r = (u8 *)t;
551 	if (r[3] == EDID_DETAIL_MONITOR_RANGE && r[10] == 0x02)
552 		*(u8 **)data = r;
553 }
554 
555 /* Secondary GTF curve kicks in above some break frequency */
556 static int
557 drm_gtf2_hbreak(struct edid *edid)
558 {
559 	u8 *r = NULL;
560 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
561 	return r ? (r[12] * 2) : 0;
562 }
563 
564 static int
565 drm_gtf2_2c(struct edid *edid)
566 {
567 	u8 *r = NULL;
568 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
569 	return r ? r[13] : 0;
570 }
571 
572 static int
573 drm_gtf2_m(struct edid *edid)
574 {
575 	u8 *r = NULL;
576 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
577 	return r ? (r[15] << 8) + r[14] : 0;
578 }
579 
580 static int
581 drm_gtf2_k(struct edid *edid)
582 {
583 	u8 *r = NULL;
584 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
585 	return r ? r[16] : 0;
586 }
587 
588 static int
589 drm_gtf2_2j(struct edid *edid)
590 {
591 	u8 *r = NULL;
592 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
593 	return r ? r[17] : 0;
594 }
595 
596 /**
597  * standard_timing_level - get std. timing level(CVT/GTF/DMT)
598  * @edid: EDID block to scan
599  */
600 static int standard_timing_level(struct edid *edid)
601 {
602 	if (edid->revision >= 2) {
603 		if (edid->revision >= 4 && (edid->features & DRM_EDID_FEATURE_DEFAULT_GTF))
604 			return LEVEL_CVT;
605 		if (drm_gtf2_hbreak(edid))
606 			return LEVEL_GTF2;
607 		return LEVEL_GTF;
608 	}
609 	return LEVEL_DMT;
610 }
611 
612 /*
613  * 0 is reserved.  The spec says 0x01 fill for unused timings.  Some old
614  * monitors fill with ascii space (0x20) instead.
615  */
616 static int
617 bad_std_timing(u8 a, u8 b)
618 {
619 	return (a == 0x00 && b == 0x00) ||
620 	       (a == 0x01 && b == 0x01) ||
621 	       (a == 0x20 && b == 0x20);
622 }
623 
624 /**
625  * drm_mode_std - convert standard mode info (width, height, refresh) into mode
626  * @t: standard timing params
627  * @timing_level: standard timing level
628  *
629  * Take the standard timing params (in this case width, aspect, and refresh)
630  * and convert them into a real mode using CVT/GTF/DMT.
631  */
632 static struct drm_display_mode *
633 drm_mode_std(struct drm_connector *connector, struct edid *edid,
634 	     struct std_timing *t, int revision)
635 {
636 	struct drm_device *dev = connector->dev;
637 	struct drm_display_mode *m, *mode = NULL;
638 	int hsize, vsize;
639 	int vrefresh_rate;
640 	unsigned aspect_ratio = (t->vfreq_aspect & EDID_TIMING_ASPECT_MASK)
641 		>> EDID_TIMING_ASPECT_SHIFT;
642 	unsigned vfreq = (t->vfreq_aspect & EDID_TIMING_VFREQ_MASK)
643 		>> EDID_TIMING_VFREQ_SHIFT;
644 	int timing_level = standard_timing_level(edid);
645 
646 	if (bad_std_timing(t->hsize, t->vfreq_aspect))
647 		return NULL;
648 
649 	/* According to the EDID spec, the hdisplay = hsize * 8 + 248 */
650 	hsize = t->hsize * 8 + 248;
651 	/* vrefresh_rate = vfreq + 60 */
652 	vrefresh_rate = vfreq + 60;
653 	/* the vdisplay is calculated based on the aspect ratio */
654 	if (aspect_ratio == 0) {
655 		if (revision < 3)
656 			vsize = hsize;
657 		else
658 			vsize = (hsize * 10) / 16;
659 	} else if (aspect_ratio == 1)
660 		vsize = (hsize * 3) / 4;
661 	else if (aspect_ratio == 2)
662 		vsize = (hsize * 4) / 5;
663 	else
664 		vsize = (hsize * 9) / 16;
665 
666 	/* HDTV hack, part 1 */
667 	if (vrefresh_rate == 60 &&
668 	    ((hsize == 1360 && vsize == 765) ||
669 	     (hsize == 1368 && vsize == 769))) {
670 		hsize = 1366;
671 		vsize = 768;
672 	}
673 
674 	/*
675 	 * If this connector already has a mode for this size and refresh
676 	 * rate (because it came from detailed or CVT info), use that
677 	 * instead.  This way we don't have to guess at interlace or
678 	 * reduced blanking.
679 	 */
680 	list_for_each_entry(m, &connector->probed_modes, head)
681 		if (m->hdisplay == hsize && m->vdisplay == vsize &&
682 		    drm_mode_vrefresh(m) == vrefresh_rate)
683 			return NULL;
684 
685 	/* HDTV hack, part 2 */
686 	if (hsize == 1366 && vsize == 768 && vrefresh_rate == 60) {
687 		mode = drm_cvt_mode(dev, 1366, 768, vrefresh_rate, 0, 0,
688 				    false);
689 		mode->hdisplay = 1366;
690 		mode->hsync_start = mode->hsync_start - 1;
691 		mode->hsync_end = mode->hsync_end - 1;
692 		return mode;
693 	}
694 
695 	/* check whether it can be found in default mode table */
696 	mode = drm_mode_find_dmt(dev, hsize, vsize, vrefresh_rate);
697 	if (mode)
698 		return mode;
699 
700 	switch (timing_level) {
701 	case LEVEL_DMT:
702 		break;
703 	case LEVEL_GTF:
704 		mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
705 		break;
706 	case LEVEL_GTF2:
707 		/*
708 		 * This is potentially wrong if there's ever a monitor with
709 		 * more than one ranges section, each claiming a different
710 		 * secondary GTF curve.  Please don't do that.
711 		 */
712 		mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
713 		if (drm_mode_hsync(mode) > drm_gtf2_hbreak(edid)) {
714 			kfree(mode);
715 			mode = drm_gtf_mode_complex(dev, hsize, vsize,
716 						    vrefresh_rate, 0, 0,
717 						    drm_gtf2_m(edid),
718 						    drm_gtf2_2c(edid),
719 						    drm_gtf2_k(edid),
720 						    drm_gtf2_2j(edid));
721 		}
722 		break;
723 	case LEVEL_CVT:
724 		mode = drm_cvt_mode(dev, hsize, vsize, vrefresh_rate, 0, 0,
725 				    false);
726 		break;
727 	}
728 	return mode;
729 }
730 
731 /*
732  * EDID is delightfully ambiguous about how interlaced modes are to be
733  * encoded.  Our internal representation is of frame height, but some
734  * HDTV detailed timings are encoded as field height.
735  *
736  * The format list here is from CEA, in frame size.  Technically we
737  * should be checking refresh rate too.  Whatever.
738  */
739 static void
740 drm_mode_do_interlace_quirk(struct drm_display_mode *mode,
741 			    struct detailed_pixel_timing *pt)
742 {
743 	int i;
744 	static const struct {
745 		int w, h;
746 	} cea_interlaced[] = {
747 		{ 1920, 1080 },
748 		{  720,  480 },
749 		{ 1440,  480 },
750 		{ 2880,  480 },
751 		{  720,  576 },
752 		{ 1440,  576 },
753 		{ 2880,  576 },
754 	};
755 
756 	if (!(pt->misc & DRM_EDID_PT_INTERLACED))
757 		return;
758 
759 	for (i = 0; i < ARRAY_SIZE(cea_interlaced); i++) {
760 		if ((mode->hdisplay == cea_interlaced[i].w) &&
761 		    (mode->vdisplay == cea_interlaced[i].h / 2)) {
762 			mode->vdisplay *= 2;
763 			mode->vsync_start *= 2;
764 			mode->vsync_end *= 2;
765 			mode->vtotal *= 2;
766 			mode->vtotal |= 1;
767 		}
768 	}
769 
770 	mode->flags |= DRM_MODE_FLAG_INTERLACE;
771 }
772 
773 /**
774  * drm_mode_detailed - create a new mode from an EDID detailed timing section
775  * @dev: DRM device (needed to create new mode)
776  * @edid: EDID block
777  * @timing: EDID detailed timing info
778  * @quirks: quirks to apply
779  *
780  * An EDID detailed timing block contains enough info for us to create and
781  * return a new struct drm_display_mode.
782  */
783 static struct drm_display_mode *drm_mode_detailed(struct drm_device *dev,
784 						  struct edid *edid,
785 						  struct detailed_timing *timing,
786 						  u32 quirks)
787 {
788 	struct drm_display_mode *mode;
789 	struct detailed_pixel_timing *pt = &timing->data.pixel_data;
790 	unsigned hactive = (pt->hactive_hblank_hi & 0xf0) << 4 | pt->hactive_lo;
791 	unsigned vactive = (pt->vactive_vblank_hi & 0xf0) << 4 | pt->vactive_lo;
792 	unsigned hblank = (pt->hactive_hblank_hi & 0xf) << 8 | pt->hblank_lo;
793 	unsigned vblank = (pt->vactive_vblank_hi & 0xf) << 8 | pt->vblank_lo;
794 	unsigned hsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc0) << 2 | pt->hsync_offset_lo;
795 	unsigned hsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x30) << 4 | pt->hsync_pulse_width_lo;
796 	unsigned vsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc) >> 2 | pt->vsync_offset_pulse_width_lo >> 4;
797 	unsigned vsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x3) << 4 | (pt->vsync_offset_pulse_width_lo & 0xf);
798 
799 	/* ignore tiny modes */
800 	if (hactive < 64 || vactive < 64)
801 		return NULL;
802 
803 	if (pt->misc & DRM_EDID_PT_STEREO) {
804 		printk(KERN_WARNING "stereo mode not supported\n");
805 		return NULL;
806 	}
807 	if (!(pt->misc & DRM_EDID_PT_SEPARATE_SYNC)) {
808 		printk(KERN_WARNING "composite sync not supported\n");
809 	}
810 
811 	/* it is incorrect if hsync/vsync width is zero */
812 	if (!hsync_pulse_width || !vsync_pulse_width) {
813 		DRM_DEBUG_KMS("Incorrect Detailed timing. "
814 				"Wrong Hsync/Vsync pulse width\n");
815 		return NULL;
816 	}
817 	mode = drm_mode_create(dev);
818 	if (!mode)
819 		return NULL;
820 
821 	mode->type = DRM_MODE_TYPE_DRIVER;
822 
823 	if (quirks & EDID_QUIRK_135_CLOCK_TOO_HIGH)
824 		timing->pixel_clock = cpu_to_le16(1088);
825 
826 	mode->clock = le16_to_cpu(timing->pixel_clock) * 10;
827 
828 	mode->hdisplay = hactive;
829 	mode->hsync_start = mode->hdisplay + hsync_offset;
830 	mode->hsync_end = mode->hsync_start + hsync_pulse_width;
831 	mode->htotal = mode->hdisplay + hblank;
832 
833 	mode->vdisplay = vactive;
834 	mode->vsync_start = mode->vdisplay + vsync_offset;
835 	mode->vsync_end = mode->vsync_start + vsync_pulse_width;
836 	mode->vtotal = mode->vdisplay + vblank;
837 
838 	/* Some EDIDs have bogus h/vtotal values */
839 	if (mode->hsync_end > mode->htotal)
840 		mode->htotal = mode->hsync_end + 1;
841 	if (mode->vsync_end > mode->vtotal)
842 		mode->vtotal = mode->vsync_end + 1;
843 
844 	drm_mode_do_interlace_quirk(mode, pt);
845 
846 	drm_mode_set_name(mode);
847 
848 	if (quirks & EDID_QUIRK_DETAILED_SYNC_PP) {
849 		pt->misc |= DRM_EDID_PT_HSYNC_POSITIVE | DRM_EDID_PT_VSYNC_POSITIVE;
850 	}
851 
852 	mode->flags |= (pt->misc & DRM_EDID_PT_HSYNC_POSITIVE) ?
853 		DRM_MODE_FLAG_PHSYNC : DRM_MODE_FLAG_NHSYNC;
854 	mode->flags |= (pt->misc & DRM_EDID_PT_VSYNC_POSITIVE) ?
855 		DRM_MODE_FLAG_PVSYNC : DRM_MODE_FLAG_NVSYNC;
856 
857 	mode->width_mm = pt->width_mm_lo | (pt->width_height_mm_hi & 0xf0) << 4;
858 	mode->height_mm = pt->height_mm_lo | (pt->width_height_mm_hi & 0xf) << 8;
859 
860 	if (quirks & EDID_QUIRK_DETAILED_IN_CM) {
861 		mode->width_mm *= 10;
862 		mode->height_mm *= 10;
863 	}
864 
865 	if (quirks & EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE) {
866 		mode->width_mm = edid->width_cm * 10;
867 		mode->height_mm = edid->height_cm * 10;
868 	}
869 
870 	return mode;
871 }
872 
873 static bool
874 mode_is_rb(struct drm_display_mode *mode)
875 {
876 	return (mode->htotal - mode->hdisplay == 160) &&
877 	       (mode->hsync_end - mode->hdisplay == 80) &&
878 	       (mode->hsync_end - mode->hsync_start == 32) &&
879 	       (mode->vsync_start - mode->vdisplay == 3);
880 }
881 
882 static bool
883 mode_in_hsync_range(struct drm_display_mode *mode, struct edid *edid, u8 *t)
884 {
885 	int hsync, hmin, hmax;
886 
887 	hmin = t[7];
888 	if (edid->revision >= 4)
889 	    hmin += ((t[4] & 0x04) ? 255 : 0);
890 	hmax = t[8];
891 	if (edid->revision >= 4)
892 	    hmax += ((t[4] & 0x08) ? 255 : 0);
893 	hsync = drm_mode_hsync(mode);
894 
895 	return (hsync <= hmax && hsync >= hmin);
896 }
897 
898 static bool
899 mode_in_vsync_range(struct drm_display_mode *mode, struct edid *edid, u8 *t)
900 {
901 	int vsync, vmin, vmax;
902 
903 	vmin = t[5];
904 	if (edid->revision >= 4)
905 	    vmin += ((t[4] & 0x01) ? 255 : 0);
906 	vmax = t[6];
907 	if (edid->revision >= 4)
908 	    vmax += ((t[4] & 0x02) ? 255 : 0);
909 	vsync = drm_mode_vrefresh(mode);
910 
911 	return (vsync <= vmax && vsync >= vmin);
912 }
913 
914 static u32
915 range_pixel_clock(struct edid *edid, u8 *t)
916 {
917 	/* unspecified */
918 	if (t[9] == 0 || t[9] == 255)
919 		return 0;
920 
921 	/* 1.4 with CVT support gives us real precision, yay */
922 	if (edid->revision >= 4 && t[10] == 0x04)
923 		return (t[9] * 10000) - ((t[12] >> 2) * 250);
924 
925 	/* 1.3 is pathetic, so fuzz up a bit */
926 	return t[9] * 10000 + 5001;
927 }
928 
929 static bool
930 mode_in_range(struct drm_display_mode *mode, struct edid *edid,
931 	      struct detailed_timing *timing)
932 {
933 	u32 max_clock;
934 	u8 *t = (u8 *)timing;
935 
936 	if (!mode_in_hsync_range(mode, edid, t))
937 		return false;
938 
939 	if (!mode_in_vsync_range(mode, edid, t))
940 		return false;
941 
942 	if ((max_clock = range_pixel_clock(edid, t)))
943 		if (mode->clock > max_clock)
944 			return false;
945 
946 	/* 1.4 max horizontal check */
947 	if (edid->revision >= 4 && t[10] == 0x04)
948 		if (t[13] && mode->hdisplay > 8 * (t[13] + (256 * (t[12]&0x3))))
949 			return false;
950 
951 	if (mode_is_rb(mode) && !drm_monitor_supports_rb(edid))
952 		return false;
953 
954 	return true;
955 }
956 
957 /*
958  * XXX If drm_dmt_modes ever regrows the CVT-R modes (and it will) this will
959  * need to account for them.
960  */
961 static int
962 drm_gtf_modes_for_range(struct drm_connector *connector, struct edid *edid,
963 			struct detailed_timing *timing)
964 {
965 	int i, modes = 0;
966 	struct drm_display_mode *newmode;
967 	struct drm_device *dev = connector->dev;
968 
969 	for (i = 0; i < drm_num_dmt_modes; i++) {
970 		if (mode_in_range(drm_dmt_modes + i, edid, timing)) {
971 			newmode = drm_mode_duplicate(dev, &drm_dmt_modes[i]);
972 			if (newmode) {
973 				drm_mode_probed_add(connector, newmode);
974 				modes++;
975 			}
976 		}
977 	}
978 
979 	return modes;
980 }
981 
982 static void
983 do_inferred_modes(struct detailed_timing *timing, void *c)
984 {
985 	struct detailed_mode_closure *closure = c;
986 	struct detailed_non_pixel *data = &timing->data.other_data;
987 	int gtf = (closure->edid->features & DRM_EDID_FEATURE_DEFAULT_GTF);
988 
989 	if (gtf && data->type == EDID_DETAIL_MONITOR_RANGE)
990 		closure->modes += drm_gtf_modes_for_range(closure->connector,
991 							  closure->edid,
992 							  timing);
993 }
994 
995 static int
996 add_inferred_modes(struct drm_connector *connector, struct edid *edid)
997 {
998 	struct detailed_mode_closure closure = {
999 		connector, edid, 0, 0, 0
1000 	};
1001 
1002 	if (version_greater(edid, 1, 0))
1003 		drm_for_each_detailed_block((u8 *)edid, do_inferred_modes,
1004 					    &closure);
1005 
1006 	return closure.modes;
1007 }
1008 
1009 static int
1010 drm_est3_modes(struct drm_connector *connector, struct detailed_timing *timing)
1011 {
1012 	int i, j, m, modes = 0;
1013 	struct drm_display_mode *mode;
1014 	u8 *est = ((u8 *)timing) + 5;
1015 
1016 	for (i = 0; i < 6; i++) {
1017 		for (j = 7; j > 0; j--) {
1018 			m = (i * 8) + (7 - j);
1019 			if (m >= ARRAY_SIZE(est3_modes))
1020 				break;
1021 			if (est[i] & (1 << j)) {
1022 				mode = drm_mode_find_dmt(connector->dev,
1023 							 est3_modes[m].w,
1024 							 est3_modes[m].h,
1025 							 est3_modes[m].r
1026 							 /*, est3_modes[m].rb */);
1027 				if (mode) {
1028 					drm_mode_probed_add(connector, mode);
1029 					modes++;
1030 				}
1031 			}
1032 		}
1033 	}
1034 
1035 	return modes;
1036 }
1037 
1038 static void
1039 do_established_modes(struct detailed_timing *timing, void *c)
1040 {
1041 	struct detailed_mode_closure *closure = c;
1042 	struct detailed_non_pixel *data = &timing->data.other_data;
1043 
1044 	if (data->type == EDID_DETAIL_EST_TIMINGS)
1045 		closure->modes += drm_est3_modes(closure->connector, timing);
1046 }
1047 
1048 /**
1049  * add_established_modes - get est. modes from EDID and add them
1050  * @edid: EDID block to scan
1051  *
1052  * Each EDID block contains a bitmap of the supported "established modes" list
1053  * (defined above).  Tease them out and add them to the global modes list.
1054  */
1055 static int
1056 add_established_modes(struct drm_connector *connector, struct edid *edid)
1057 {
1058 	struct drm_device *dev = connector->dev;
1059 	unsigned long est_bits = edid->established_timings.t1 |
1060 		(edid->established_timings.t2 << 8) |
1061 		((edid->established_timings.mfg_rsvd & 0x80) << 9);
1062 	int i, modes = 0;
1063 	struct detailed_mode_closure closure = {
1064 		connector, edid, 0, 0, 0
1065 	};
1066 
1067 	for (i = 0; i <= EDID_EST_TIMINGS; i++) {
1068 		if (est_bits & (1<<i)) {
1069 			struct drm_display_mode *newmode;
1070 			newmode = drm_mode_duplicate(dev, &edid_est_modes[i]);
1071 			if (newmode) {
1072 				drm_mode_probed_add(connector, newmode);
1073 				modes++;
1074 			}
1075 		}
1076 	}
1077 
1078 	if (version_greater(edid, 1, 0))
1079 		    drm_for_each_detailed_block((u8 *)edid,
1080 						do_established_modes, &closure);
1081 
1082 	return modes + closure.modes;
1083 }
1084 
1085 static void
1086 do_standard_modes(struct detailed_timing *timing, void *c)
1087 {
1088 	struct detailed_mode_closure *closure = c;
1089 	struct detailed_non_pixel *data = &timing->data.other_data;
1090 	struct drm_connector *connector = closure->connector;
1091 	struct edid *edid = closure->edid;
1092 
1093 	if (data->type == EDID_DETAIL_STD_MODES) {
1094 		int i;
1095 		for (i = 0; i < 6; i++) {
1096 			struct std_timing *std;
1097 			struct drm_display_mode *newmode;
1098 
1099 			std = &data->data.timings[i];
1100 			newmode = drm_mode_std(connector, edid, std,
1101 					       edid->revision);
1102 			if (newmode) {
1103 				drm_mode_probed_add(connector, newmode);
1104 				closure->modes++;
1105 			}
1106 		}
1107 	}
1108 }
1109 
1110 /**
1111  * add_standard_modes - get std. modes from EDID and add them
1112  * @edid: EDID block to scan
1113  *
1114  * Standard modes can be calculated using the appropriate standard (DMT,
1115  * GTF or CVT. Grab them from @edid and add them to the list.
1116  */
1117 static int
1118 add_standard_modes(struct drm_connector *connector, struct edid *edid)
1119 {
1120 	int i, modes = 0;
1121 	struct detailed_mode_closure closure = {
1122 		connector, edid, 0, 0, 0
1123 	};
1124 
1125 	for (i = 0; i < EDID_STD_TIMINGS; i++) {
1126 		struct drm_display_mode *newmode;
1127 
1128 		newmode = drm_mode_std(connector, edid,
1129 				       &edid->standard_timings[i],
1130 				       edid->revision);
1131 		if (newmode) {
1132 			drm_mode_probed_add(connector, newmode);
1133 			modes++;
1134 		}
1135 	}
1136 
1137 	if (version_greater(edid, 1, 0))
1138 		drm_for_each_detailed_block((u8 *)edid, do_standard_modes,
1139 					    &closure);
1140 
1141 	/* XXX should also look for standard codes in VTB blocks */
1142 
1143 	return modes + closure.modes;
1144 }
1145 
1146 static int drm_cvt_modes(struct drm_connector *connector,
1147 			 struct detailed_timing *timing)
1148 {
1149 	int i, j, modes = 0;
1150 	struct drm_display_mode *newmode;
1151 	struct drm_device *dev = connector->dev;
1152 	struct cvt_timing *cvt;
1153 	const int rates[] = { 60, 85, 75, 60, 50 };
1154 	const u8 empty[3] = { 0, 0, 0 };
1155 
1156 	for (i = 0; i < 4; i++) {
1157 		int uninitialized_var(width), height;
1158 		cvt = &(timing->data.other_data.data.cvt[i]);
1159 
1160 		if (!memcmp(cvt->code, empty, 3))
1161 			continue;
1162 
1163 		height = (cvt->code[0] + ((cvt->code[1] & 0xf0) << 4) + 1) * 2;
1164 		switch (cvt->code[1] & 0x0c) {
1165 		case 0x00:
1166 			width = height * 4 / 3;
1167 			break;
1168 		case 0x04:
1169 			width = height * 16 / 9;
1170 			break;
1171 		case 0x08:
1172 			width = height * 16 / 10;
1173 			break;
1174 		case 0x0c:
1175 			width = height * 15 / 9;
1176 			break;
1177 		}
1178 
1179 		for (j = 1; j < 5; j++) {
1180 			if (cvt->code[2] & (1 << j)) {
1181 				newmode = drm_cvt_mode(dev, width, height,
1182 						       rates[j], j == 0,
1183 						       false, false);
1184 				if (newmode) {
1185 					drm_mode_probed_add(connector, newmode);
1186 					modes++;
1187 				}
1188 			}
1189 		}
1190 	}
1191 
1192 	return modes;
1193 }
1194 
1195 static void
1196 do_cvt_mode(struct detailed_timing *timing, void *c)
1197 {
1198 	struct detailed_mode_closure *closure = c;
1199 	struct detailed_non_pixel *data = &timing->data.other_data;
1200 
1201 	if (data->type == EDID_DETAIL_CVT_3BYTE)
1202 		closure->modes += drm_cvt_modes(closure->connector, timing);
1203 }
1204 
1205 static int
1206 add_cvt_modes(struct drm_connector *connector, struct edid *edid)
1207 {
1208 	struct detailed_mode_closure closure = {
1209 		connector, edid, 0, 0, 0
1210 	};
1211 
1212 	if (version_greater(edid, 1, 2))
1213 		drm_for_each_detailed_block((u8 *)edid, do_cvt_mode, &closure);
1214 
1215 	/* XXX should also look for CVT codes in VTB blocks */
1216 
1217 	return closure.modes;
1218 }
1219 
1220 static void
1221 do_detailed_mode(struct detailed_timing *timing, void *c)
1222 {
1223 	struct detailed_mode_closure *closure = c;
1224 	struct drm_display_mode *newmode;
1225 
1226 	if (timing->pixel_clock) {
1227 		newmode = drm_mode_detailed(closure->connector->dev,
1228 					    closure->edid, timing,
1229 					    closure->quirks);
1230 		if (!newmode)
1231 			return;
1232 
1233 		if (closure->preferred)
1234 			newmode->type |= DRM_MODE_TYPE_PREFERRED;
1235 
1236 		drm_mode_probed_add(closure->connector, newmode);
1237 		closure->modes++;
1238 		closure->preferred = 0;
1239 	}
1240 }
1241 
1242 /*
1243  * add_detailed_modes - Add modes from detailed timings
1244  * @connector: attached connector
1245  * @edid: EDID block to scan
1246  * @quirks: quirks to apply
1247  */
1248 static int
1249 add_detailed_modes(struct drm_connector *connector, struct edid *edid,
1250 		   u32 quirks)
1251 {
1252 	struct detailed_mode_closure closure = {
1253 		connector,
1254 		edid,
1255 		1,
1256 		quirks,
1257 		0
1258 	};
1259 
1260 	if (closure.preferred && !version_greater(edid, 1, 3))
1261 		closure.preferred =
1262 		    (edid->features & DRM_EDID_FEATURE_PREFERRED_TIMING);
1263 
1264 	drm_for_each_detailed_block((u8 *)edid, do_detailed_mode, &closure);
1265 
1266 	return closure.modes;
1267 }
1268 
1269 #define HDMI_IDENTIFIER 0x000C03
1270 #define AUDIO_BLOCK	0x01
1271 #define VENDOR_BLOCK    0x03
1272 #define EDID_BASIC_AUDIO	(1 << 6)
1273 
1274 /**
1275  * Search EDID for CEA extension block.
1276  */
1277 static u8 *drm_find_cea_extension(struct edid *edid)
1278 {
1279 	u8 *edid_ext = NULL;
1280 	int i;
1281 
1282 	/* No EDID or EDID extensions */
1283 	if (edid == NULL || edid->extensions == 0)
1284 		return NULL;
1285 
1286 	/* Find CEA extension */
1287 	for (i = 0; i < edid->extensions; i++) {
1288 		edid_ext = (u8 *)edid + EDID_LENGTH * (i + 1);
1289 		if (edid_ext[0] == CEA_EXT)
1290 			break;
1291 	}
1292 
1293 	if (i == edid->extensions)
1294 		return NULL;
1295 
1296 	return edid_ext;
1297 }
1298 
1299 /**
1300  * drm_detect_hdmi_monitor - detect whether monitor is hdmi.
1301  * @edid: monitor EDID information
1302  *
1303  * Parse the CEA extension according to CEA-861-B.
1304  * Return true if HDMI, false if not or unknown.
1305  */
1306 bool drm_detect_hdmi_monitor(struct edid *edid)
1307 {
1308 	u8 *edid_ext;
1309 	int i, hdmi_id;
1310 	int start_offset, end_offset;
1311 	bool is_hdmi = false;
1312 
1313 	edid_ext = drm_find_cea_extension(edid);
1314 	if (!edid_ext)
1315 		goto end;
1316 
1317 	/* Data block offset in CEA extension block */
1318 	start_offset = 4;
1319 	end_offset = edid_ext[2];
1320 
1321 	/*
1322 	 * Because HDMI identifier is in Vendor Specific Block,
1323 	 * search it from all data blocks of CEA extension.
1324 	 */
1325 	for (i = start_offset; i < end_offset;
1326 		/* Increased by data block len */
1327 		i += ((edid_ext[i] & 0x1f) + 1)) {
1328 		/* Find vendor specific block */
1329 		if ((edid_ext[i] >> 5) == VENDOR_BLOCK) {
1330 			hdmi_id = edid_ext[i + 1] | (edid_ext[i + 2] << 8) |
1331 				  edid_ext[i + 3] << 16;
1332 			/* Find HDMI identifier */
1333 			if (hdmi_id == HDMI_IDENTIFIER)
1334 				is_hdmi = true;
1335 			break;
1336 		}
1337 	}
1338 
1339 end:
1340 	return is_hdmi;
1341 }
1342 EXPORT_SYMBOL(drm_detect_hdmi_monitor);
1343 
1344 /**
1345  * drm_detect_monitor_audio - check monitor audio capability
1346  *
1347  * Monitor should have CEA extension block.
1348  * If monitor has 'basic audio', but no CEA audio blocks, it's 'basic
1349  * audio' only. If there is any audio extension block and supported
1350  * audio format, assume at least 'basic audio' support, even if 'basic
1351  * audio' is not defined in EDID.
1352  *
1353  */
1354 bool drm_detect_monitor_audio(struct edid *edid)
1355 {
1356 	u8 *edid_ext;
1357 	int i, j;
1358 	bool has_audio = false;
1359 	int start_offset, end_offset;
1360 
1361 	edid_ext = drm_find_cea_extension(edid);
1362 	if (!edid_ext)
1363 		goto end;
1364 
1365 	has_audio = ((edid_ext[3] & EDID_BASIC_AUDIO) != 0);
1366 
1367 	if (has_audio) {
1368 		DRM_DEBUG_KMS("Monitor has basic audio support\n");
1369 		goto end;
1370 	}
1371 
1372 	/* Data block offset in CEA extension block */
1373 	start_offset = 4;
1374 	end_offset = edid_ext[2];
1375 
1376 	for (i = start_offset; i < end_offset;
1377 			i += ((edid_ext[i] & 0x1f) + 1)) {
1378 		if ((edid_ext[i] >> 5) == AUDIO_BLOCK) {
1379 			has_audio = true;
1380 			for (j = 1; j < (edid_ext[i] & 0x1f); j += 3)
1381 				DRM_DEBUG_KMS("CEA audio format %d\n",
1382 					      (edid_ext[i + j] >> 3) & 0xf);
1383 			goto end;
1384 		}
1385 	}
1386 end:
1387 	return has_audio;
1388 }
1389 EXPORT_SYMBOL(drm_detect_monitor_audio);
1390 
1391 /**
1392  * drm_add_edid_modes - add modes from EDID data, if available
1393  * @connector: connector we're probing
1394  * @edid: edid data
1395  *
1396  * Add the specified modes to the connector's mode list.
1397  *
1398  * Return number of modes added or 0 if we couldn't find any.
1399  */
1400 int drm_add_edid_modes(struct drm_connector *connector, struct edid *edid)
1401 {
1402 	int num_modes = 0;
1403 	u32 quirks;
1404 
1405 	if (edid == NULL) {
1406 		return 0;
1407 	}
1408 	if (!drm_edid_is_valid(edid)) {
1409 		dev_warn(connector->dev->dev, "%s: EDID invalid.\n",
1410 			 drm_get_connector_name(connector));
1411 		return 0;
1412 	}
1413 
1414 	quirks = edid_get_quirks(edid);
1415 
1416 	/*
1417 	 * EDID spec says modes should be preferred in this order:
1418 	 * - preferred detailed mode
1419 	 * - other detailed modes from base block
1420 	 * - detailed modes from extension blocks
1421 	 * - CVT 3-byte code modes
1422 	 * - standard timing codes
1423 	 * - established timing codes
1424 	 * - modes inferred from GTF or CVT range information
1425 	 *
1426 	 * We get this pretty much right.
1427 	 *
1428 	 * XXX order for additional mode types in extension blocks?
1429 	 */
1430 	num_modes += add_detailed_modes(connector, edid, quirks);
1431 	num_modes += add_cvt_modes(connector, edid);
1432 	num_modes += add_standard_modes(connector, edid);
1433 	num_modes += add_established_modes(connector, edid);
1434 	num_modes += add_inferred_modes(connector, edid);
1435 
1436 	if (quirks & (EDID_QUIRK_PREFER_LARGE_60 | EDID_QUIRK_PREFER_LARGE_75))
1437 		edid_fixup_preferred(connector, quirks);
1438 
1439 	connector->display_info.width_mm = edid->width_cm * 10;
1440 	connector->display_info.height_mm = edid->height_cm * 10;
1441 
1442 	return num_modes;
1443 }
1444 EXPORT_SYMBOL(drm_add_edid_modes);
1445 
1446 /**
1447  * drm_add_modes_noedid - add modes for the connectors without EDID
1448  * @connector: connector we're probing
1449  * @hdisplay: the horizontal display limit
1450  * @vdisplay: the vertical display limit
1451  *
1452  * Add the specified modes to the connector's mode list. Only when the
1453  * hdisplay/vdisplay is not beyond the given limit, it will be added.
1454  *
1455  * Return number of modes added or 0 if we couldn't find any.
1456  */
1457 int drm_add_modes_noedid(struct drm_connector *connector,
1458 			int hdisplay, int vdisplay)
1459 {
1460 	int i, count, num_modes = 0;
1461 	struct drm_display_mode *mode, *ptr;
1462 	struct drm_device *dev = connector->dev;
1463 
1464 	count = sizeof(drm_dmt_modes) / sizeof(struct drm_display_mode);
1465 	if (hdisplay < 0)
1466 		hdisplay = 0;
1467 	if (vdisplay < 0)
1468 		vdisplay = 0;
1469 
1470 	for (i = 0; i < count; i++) {
1471 		ptr = &drm_dmt_modes[i];
1472 		if (hdisplay && vdisplay) {
1473 			/*
1474 			 * Only when two are valid, they will be used to check
1475 			 * whether the mode should be added to the mode list of
1476 			 * the connector.
1477 			 */
1478 			if (ptr->hdisplay > hdisplay ||
1479 					ptr->vdisplay > vdisplay)
1480 				continue;
1481 		}
1482 		if (drm_mode_vrefresh(ptr) > 61)
1483 			continue;
1484 		mode = drm_mode_duplicate(dev, ptr);
1485 		if (mode) {
1486 			drm_mode_probed_add(connector, mode);
1487 			num_modes++;
1488 		}
1489 	}
1490 	return num_modes;
1491 }
1492 EXPORT_SYMBOL(drm_add_modes_noedid);
1493