1 /*
2  * Copyright (c) 2006 Luc Verhaegen (quirks list)
3  * Copyright (c) 2007-2008 Intel Corporation
4  *   Jesse Barnes <jesse.barnes@intel.com>
5  * Copyright 2010 Red Hat, Inc.
6  *
7  * DDC probing routines (drm_ddc_read & drm_do_probe_ddc_edid) originally from
8  * FB layer.
9  *   Copyright (C) 2006 Dennis Munsie <dmunsie@cecropia.com>
10  *
11  * Permission is hereby granted, free of charge, to any person obtaining a
12  * copy of this software and associated documentation files (the "Software"),
13  * to deal in the Software without restriction, including without limitation
14  * the rights to use, copy, modify, merge, publish, distribute, sub license,
15  * and/or sell copies of the Software, and to permit persons to whom the
16  * Software is furnished to do so, subject to the following conditions:
17  *
18  * The above copyright notice and this permission notice (including the
19  * next paragraph) shall be included in all copies or substantial portions
20  * of the Software.
21  *
22  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
25  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
27  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
28  * DEALINGS IN THE SOFTWARE.
29  */
30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/i2c.h>
33 #include "drmP.h"
34 #include "drm_edid.h"
35 #include "drm_edid_modes.h"
36 
37 #define version_greater(edid, maj, min) \
38 	(((edid)->version > (maj)) || \
39 	 ((edid)->version == (maj) && (edid)->revision > (min)))
40 
41 #define EDID_EST_TIMINGS 16
42 #define EDID_STD_TIMINGS 8
43 #define EDID_DETAILED_TIMINGS 4
44 
45 /*
46  * EDID blocks out in the wild have a variety of bugs, try to collect
47  * them here (note that userspace may work around broken monitors first,
48  * but fixes should make their way here so that the kernel "just works"
49  * on as many displays as possible).
50  */
51 
52 /* First detailed mode wrong, use largest 60Hz mode */
53 #define EDID_QUIRK_PREFER_LARGE_60		(1 << 0)
54 /* Reported 135MHz pixel clock is too high, needs adjustment */
55 #define EDID_QUIRK_135_CLOCK_TOO_HIGH		(1 << 1)
56 /* Prefer the largest mode at 75 Hz */
57 #define EDID_QUIRK_PREFER_LARGE_75		(1 << 2)
58 /* Detail timing is in cm not mm */
59 #define EDID_QUIRK_DETAILED_IN_CM		(1 << 3)
60 /* Detailed timing descriptors have bogus size values, so just take the
61  * maximum size and use that.
62  */
63 #define EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE	(1 << 4)
64 /* Monitor forgot to set the first detailed is preferred bit. */
65 #define EDID_QUIRK_FIRST_DETAILED_PREFERRED	(1 << 5)
66 /* use +hsync +vsync for detailed mode */
67 #define EDID_QUIRK_DETAILED_SYNC_PP		(1 << 6)
68 
69 struct detailed_mode_closure {
70 	struct drm_connector *connector;
71 	struct edid *edid;
72 	bool preferred;
73 	u32 quirks;
74 	int modes;
75 };
76 
77 #define LEVEL_DMT	0
78 #define LEVEL_GTF	1
79 #define LEVEL_GTF2	2
80 #define LEVEL_CVT	3
81 
82 static struct edid_quirk {
83 	char *vendor;
84 	int product_id;
85 	u32 quirks;
86 } edid_quirk_list[] = {
87 	/* Acer AL1706 */
88 	{ "ACR", 44358, EDID_QUIRK_PREFER_LARGE_60 },
89 	/* Acer F51 */
90 	{ "API", 0x7602, EDID_QUIRK_PREFER_LARGE_60 },
91 	/* Unknown Acer */
92 	{ "ACR", 2423, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
93 
94 	/* Belinea 10 15 55 */
95 	{ "MAX", 1516, EDID_QUIRK_PREFER_LARGE_60 },
96 	{ "MAX", 0x77e, EDID_QUIRK_PREFER_LARGE_60 },
97 
98 	/* Envision Peripherals, Inc. EN-7100e */
99 	{ "EPI", 59264, EDID_QUIRK_135_CLOCK_TOO_HIGH },
100 	/* Envision EN2028 */
101 	{ "EPI", 8232, EDID_QUIRK_PREFER_LARGE_60 },
102 
103 	/* Funai Electronics PM36B */
104 	{ "FCM", 13600, EDID_QUIRK_PREFER_LARGE_75 |
105 	  EDID_QUIRK_DETAILED_IN_CM },
106 
107 	/* LG Philips LCD LP154W01-A5 */
108 	{ "LPL", 0, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
109 	{ "LPL", 0x2a00, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
110 
111 	/* Philips 107p5 CRT */
112 	{ "PHL", 57364, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
113 
114 	/* Proview AY765C */
115 	{ "PTS", 765, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
116 
117 	/* Samsung SyncMaster 205BW.  Note: irony */
118 	{ "SAM", 541, EDID_QUIRK_DETAILED_SYNC_PP },
119 	/* Samsung SyncMaster 22[5-6]BW */
120 	{ "SAM", 596, EDID_QUIRK_PREFER_LARGE_60 },
121 	{ "SAM", 638, EDID_QUIRK_PREFER_LARGE_60 },
122 };
123 
124 /*** DDC fetch and block validation ***/
125 
126 static const u8 edid_header[] = {
127 	0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00
128 };
129 
130 /*
131  * Sanity check the EDID block (base or extension).  Return 0 if the block
132  * doesn't check out, or 1 if it's valid.
133  */
134 static bool
drm_edid_block_valid(u8 * raw_edid)135 drm_edid_block_valid(u8 *raw_edid)
136 {
137 	int i;
138 	u8 csum = 0;
139 	struct edid *edid = (struct edid *)raw_edid;
140 
141 	if (raw_edid[0] == 0x00) {
142 		int score = 0;
143 
144 		for (i = 0; i < sizeof(edid_header); i++)
145 			if (raw_edid[i] == edid_header[i])
146 				score++;
147 
148 		if (score == 8) ;
149 		else if (score >= 6) {
150 			DRM_DEBUG("Fixing EDID header, your hardware may be failing\n");
151 			memcpy(raw_edid, edid_header, sizeof(edid_header));
152 		} else {
153 			goto bad;
154 		}
155 	}
156 
157 	for (i = 0; i < EDID_LENGTH; i++)
158 		csum += raw_edid[i];
159 	if (csum) {
160 		DRM_ERROR("EDID checksum is invalid, remainder is %d\n", csum);
161 
162 		/* allow CEA to slide through, switches mangle this */
163 		if (raw_edid[0] != 0x02)
164 			goto bad;
165 	}
166 
167 	/* per-block-type checks */
168 	switch (raw_edid[0]) {
169 	case 0: /* base */
170 		if (edid->version != 1) {
171 			DRM_ERROR("EDID has major version %d, instead of 1\n", edid->version);
172 			goto bad;
173 		}
174 
175 		if (edid->revision > 4)
176 			DRM_DEBUG("EDID minor > 4, assuming backward compatibility\n");
177 		break;
178 
179 	default:
180 		break;
181 	}
182 
183 	return 1;
184 
185 bad:
186 	if (raw_edid) {
187 		DRM_ERROR("Raw EDID:\n");
188 		print_hex_dump_bytes(KERN_ERR, DUMP_PREFIX_NONE, raw_edid, EDID_LENGTH);
189 		printk("\n");
190 	}
191 	return 0;
192 }
193 
194 /**
195  * drm_edid_is_valid - sanity check EDID data
196  * @edid: EDID data
197  *
198  * Sanity-check an entire EDID record (including extensions)
199  */
drm_edid_is_valid(struct edid * edid)200 bool drm_edid_is_valid(struct edid *edid)
201 {
202 	int i;
203 	u8 *raw = (u8 *)edid;
204 
205 	if (!edid)
206 		return false;
207 
208 	for (i = 0; i <= edid->extensions; i++)
209 		if (!drm_edid_block_valid(raw + i * EDID_LENGTH))
210 			return false;
211 
212 	return true;
213 }
214 EXPORT_SYMBOL(drm_edid_is_valid);
215 
216 #define DDC_ADDR 0x50
217 #define DDC_SEGMENT_ADDR 0x30
218 /**
219  * Get EDID information via I2C.
220  *
221  * \param adapter : i2c device adaptor
222  * \param buf     : EDID data buffer to be filled
223  * \param len     : EDID data buffer length
224  * \return 0 on success or -1 on failure.
225  *
226  * Try to fetch EDID information by calling i2c driver function.
227  */
228 static int
drm_do_probe_ddc_edid(struct i2c_adapter * adapter,unsigned char * buf,int block,int len)229 drm_do_probe_ddc_edid(struct i2c_adapter *adapter, unsigned char *buf,
230 		      int block, int len)
231 {
232 	unsigned char start = block * EDID_LENGTH;
233 	int ret, retries = 5;
234 
235 	/* The core i2c driver will automatically retry the transfer if the
236 	 * adapter reports EAGAIN. However, we find that bit-banging transfers
237 	 * are susceptible to errors under a heavily loaded machine and
238 	 * generate spurious NAKs and timeouts. Retrying the transfer
239 	 * of the individual block a few times seems to overcome this.
240 	 */
241 	do {
242 		struct i2c_msg msgs[] = {
243 			{
244 				.addr	= DDC_ADDR,
245 				.flags	= 0,
246 				.len	= 1,
247 				.buf	= &start,
248 			}, {
249 				.addr	= DDC_ADDR,
250 				.flags	= I2C_M_RD,
251 				.len	= len,
252 				.buf	= buf,
253 			}
254 		};
255 		ret = i2c_transfer(adapter, msgs, 2);
256 	} while (ret != 2 && --retries);
257 
258 	return ret == 2 ? 0 : -1;
259 }
260 
261 static u8 *
drm_do_get_edid(struct drm_connector * connector,struct i2c_adapter * adapter)262 drm_do_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
263 {
264 	int i, j = 0, valid_extensions = 0;
265 	u8 *block, *new;
266 
267 	if ((block = kmalloc(EDID_LENGTH, GFP_KERNEL)) == NULL)
268 		return NULL;
269 
270 	/* base block fetch */
271 	for (i = 0; i < 4; i++) {
272 		if (drm_do_probe_ddc_edid(adapter, block, 0, EDID_LENGTH))
273 			goto out;
274 		if (drm_edid_block_valid(block))
275 			break;
276 	}
277 	if (i == 4)
278 		goto carp;
279 
280 	/* if there's no extensions, we're done */
281 	if (block[0x7e] == 0)
282 		return block;
283 
284 	new = krealloc(block, (block[0x7e] + 1) * EDID_LENGTH, GFP_KERNEL);
285 	if (!new)
286 		goto out;
287 	block = new;
288 
289 	for (j = 1; j <= block[0x7e]; j++) {
290 		for (i = 0; i < 4; i++) {
291 			if (drm_do_probe_ddc_edid(adapter,
292 				  block + (valid_extensions + 1) * EDID_LENGTH,
293 				  j, EDID_LENGTH))
294 				goto out;
295 			if (drm_edid_block_valid(block + (valid_extensions + 1) * EDID_LENGTH)) {
296 				valid_extensions++;
297 				break;
298 			}
299 		}
300 		if (i == 4)
301 			dev_warn(connector->dev->dev,
302 			 "%s: Ignoring invalid EDID block %d.\n",
303 			 drm_get_connector_name(connector), j);
304 	}
305 
306 	if (valid_extensions != block[0x7e]) {
307 		block[EDID_LENGTH-1] += block[0x7e] - valid_extensions;
308 		block[0x7e] = valid_extensions;
309 		new = krealloc(block, (valid_extensions + 1) * EDID_LENGTH, GFP_KERNEL);
310 		if (!new)
311 			goto out;
312 		block = new;
313 	}
314 
315 	return block;
316 
317 carp:
318 	dev_warn(connector->dev->dev, "%s: EDID block %d invalid.\n",
319 		 drm_get_connector_name(connector), j);
320 
321 out:
322 	kfree(block);
323 	return NULL;
324 }
325 
326 /**
327  * Probe DDC presence.
328  *
329  * \param adapter : i2c device adaptor
330  * \return 1 on success
331  */
332 static bool
drm_probe_ddc(struct i2c_adapter * adapter)333 drm_probe_ddc(struct i2c_adapter *adapter)
334 {
335 	unsigned char out;
336 
337 	return (drm_do_probe_ddc_edid(adapter, &out, 0, 1) == 0);
338 }
339 
340 /**
341  * drm_get_edid - get EDID data, if available
342  * @connector: connector we're probing
343  * @adapter: i2c adapter to use for DDC
344  *
345  * Poke the given i2c channel to grab EDID data if possible.  If found,
346  * attach it to the connector.
347  *
348  * Return edid data or NULL if we couldn't find any.
349  */
drm_get_edid(struct drm_connector * connector,struct i2c_adapter * adapter)350 struct edid *drm_get_edid(struct drm_connector *connector,
351 			  struct i2c_adapter *adapter)
352 {
353 	struct edid *edid = NULL;
354 
355 	if (drm_probe_ddc(adapter))
356 		edid = (struct edid *)drm_do_get_edid(connector, adapter);
357 
358 	connector->display_info.raw_edid = (char *)edid;
359 
360 	return edid;
361 
362 }
363 EXPORT_SYMBOL(drm_get_edid);
364 
365 /*** EDID parsing ***/
366 
367 /**
368  * edid_vendor - match a string against EDID's obfuscated vendor field
369  * @edid: EDID to match
370  * @vendor: vendor string
371  *
372  * Returns true if @vendor is in @edid, false otherwise
373  */
edid_vendor(struct edid * edid,char * vendor)374 static bool edid_vendor(struct edid *edid, char *vendor)
375 {
376 	char edid_vendor[3];
377 
378 	edid_vendor[0] = ((edid->mfg_id[0] & 0x7c) >> 2) + '@';
379 	edid_vendor[1] = (((edid->mfg_id[0] & 0x3) << 3) |
380 			  ((edid->mfg_id[1] & 0xe0) >> 5)) + '@';
381 	edid_vendor[2] = (edid->mfg_id[1] & 0x1f) + '@';
382 
383 	return !strncmp(edid_vendor, vendor, 3);
384 }
385 
386 /**
387  * edid_get_quirks - return quirk flags for a given EDID
388  * @edid: EDID to process
389  *
390  * This tells subsequent routines what fixes they need to apply.
391  */
edid_get_quirks(struct edid * edid)392 static u32 edid_get_quirks(struct edid *edid)
393 {
394 	struct edid_quirk *quirk;
395 	int i;
396 
397 	for (i = 0; i < ARRAY_SIZE(edid_quirk_list); i++) {
398 		quirk = &edid_quirk_list[i];
399 
400 		if (edid_vendor(edid, quirk->vendor) &&
401 		    (EDID_PRODUCT_ID(edid) == quirk->product_id))
402 			return quirk->quirks;
403 	}
404 
405 	return 0;
406 }
407 
408 #define MODE_SIZE(m) ((m)->hdisplay * (m)->vdisplay)
409 #define MODE_REFRESH_DIFF(m,r) (abs((m)->vrefresh - target_refresh))
410 
411 /**
412  * edid_fixup_preferred - set preferred modes based on quirk list
413  * @connector: has mode list to fix up
414  * @quirks: quirks list
415  *
416  * Walk the mode list for @connector, clearing the preferred status
417  * on existing modes and setting it anew for the right mode ala @quirks.
418  */
edid_fixup_preferred(struct drm_connector * connector,u32 quirks)419 static void edid_fixup_preferred(struct drm_connector *connector,
420 				 u32 quirks)
421 {
422 	struct drm_display_mode *t, *cur_mode, *preferred_mode;
423 	int target_refresh = 0;
424 
425 	if (list_empty(&connector->probed_modes))
426 		return;
427 
428 	if (quirks & EDID_QUIRK_PREFER_LARGE_60)
429 		target_refresh = 60;
430 	if (quirks & EDID_QUIRK_PREFER_LARGE_75)
431 		target_refresh = 75;
432 
433 	preferred_mode = list_first_entry(&connector->probed_modes,
434 					  struct drm_display_mode, head);
435 
436 	list_for_each_entry_safe(cur_mode, t, &connector->probed_modes, head) {
437 		cur_mode->type &= ~DRM_MODE_TYPE_PREFERRED;
438 
439 		if (cur_mode == preferred_mode)
440 			continue;
441 
442 		/* Largest mode is preferred */
443 		if (MODE_SIZE(cur_mode) > MODE_SIZE(preferred_mode))
444 			preferred_mode = cur_mode;
445 
446 		/* At a given size, try to get closest to target refresh */
447 		if ((MODE_SIZE(cur_mode) == MODE_SIZE(preferred_mode)) &&
448 		    MODE_REFRESH_DIFF(cur_mode, target_refresh) <
449 		    MODE_REFRESH_DIFF(preferred_mode, target_refresh)) {
450 			preferred_mode = cur_mode;
451 		}
452 	}
453 
454 	preferred_mode->type |= DRM_MODE_TYPE_PREFERRED;
455 }
456 
drm_mode_find_dmt(struct drm_device * dev,int hsize,int vsize,int fresh)457 struct drm_display_mode *drm_mode_find_dmt(struct drm_device *dev,
458 					   int hsize, int vsize, int fresh)
459 {
460 	struct drm_display_mode *mode = NULL;
461 	int i;
462 
463 	for (i = 0; i < drm_num_dmt_modes; i++) {
464 		const struct drm_display_mode *ptr = &drm_dmt_modes[i];
465 		if (hsize == ptr->hdisplay &&
466 			vsize == ptr->vdisplay &&
467 			fresh == drm_mode_vrefresh(ptr)) {
468 			/* get the expected default mode */
469 			mode = drm_mode_duplicate(dev, ptr);
470 			break;
471 		}
472 	}
473 	return mode;
474 }
475 EXPORT_SYMBOL(drm_mode_find_dmt);
476 
477 typedef void detailed_cb(struct detailed_timing *timing, void *closure);
478 
479 static void
cea_for_each_detailed_block(u8 * ext,detailed_cb * cb,void * closure)480 cea_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
481 {
482 	int i, n = 0;
483 	u8 rev = ext[0x01], d = ext[0x02];
484 	u8 *det_base = ext + d;
485 
486 	switch (rev) {
487 	case 0:
488 		/* can't happen */
489 		return;
490 	case 1:
491 		/* have to infer how many blocks we have, check pixel clock */
492 		for (i = 0; i < 6; i++)
493 			if (det_base[18*i] || det_base[18*i+1])
494 				n++;
495 		break;
496 	default:
497 		/* explicit count */
498 		n = min(ext[0x03] & 0x0f, 6);
499 		break;
500 	}
501 
502 	for (i = 0; i < n; i++)
503 		cb((struct detailed_timing *)(det_base + 18 * i), closure);
504 }
505 
506 static void
vtb_for_each_detailed_block(u8 * ext,detailed_cb * cb,void * closure)507 vtb_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
508 {
509 	unsigned int i, n = min((int)ext[0x02], 6);
510 	u8 *det_base = ext + 5;
511 
512 	if (ext[0x01] != 1)
513 		return; /* unknown version */
514 
515 	for (i = 0; i < n; i++)
516 		cb((struct detailed_timing *)(det_base + 18 * i), closure);
517 }
518 
519 static void
drm_for_each_detailed_block(u8 * raw_edid,detailed_cb * cb,void * closure)520 drm_for_each_detailed_block(u8 *raw_edid, detailed_cb *cb, void *closure)
521 {
522 	int i;
523 	struct edid *edid = (struct edid *)raw_edid;
524 
525 	if (edid == NULL)
526 		return;
527 
528 	for (i = 0; i < EDID_DETAILED_TIMINGS; i++)
529 		cb(&(edid->detailed_timings[i]), closure);
530 
531 	for (i = 1; i <= raw_edid[0x7e]; i++) {
532 		u8 *ext = raw_edid + (i * EDID_LENGTH);
533 		switch (*ext) {
534 		case CEA_EXT:
535 			cea_for_each_detailed_block(ext, cb, closure);
536 			break;
537 		case VTB_EXT:
538 			vtb_for_each_detailed_block(ext, cb, closure);
539 			break;
540 		default:
541 			break;
542 		}
543 	}
544 }
545 
546 static void
is_rb(struct detailed_timing * t,void * data)547 is_rb(struct detailed_timing *t, void *data)
548 {
549 	u8 *r = (u8 *)t;
550 	if (r[3] == EDID_DETAIL_MONITOR_RANGE)
551 		if (r[15] & 0x10)
552 			*(bool *)data = true;
553 }
554 
555 /* EDID 1.4 defines this explicitly.  For EDID 1.3, we guess, badly. */
556 static bool
drm_monitor_supports_rb(struct edid * edid)557 drm_monitor_supports_rb(struct edid *edid)
558 {
559 	if (edid->revision >= 4) {
560 		bool ret;
561 		drm_for_each_detailed_block((u8 *)edid, is_rb, &ret);
562 		return ret;
563 	}
564 
565 	return ((edid->input & DRM_EDID_INPUT_DIGITAL) != 0);
566 }
567 
568 static void
find_gtf2(struct detailed_timing * t,void * data)569 find_gtf2(struct detailed_timing *t, void *data)
570 {
571 	u8 *r = (u8 *)t;
572 	if (r[3] == EDID_DETAIL_MONITOR_RANGE && r[10] == 0x02)
573 		*(u8 **)data = r;
574 }
575 
576 /* Secondary GTF curve kicks in above some break frequency */
577 static int
drm_gtf2_hbreak(struct edid * edid)578 drm_gtf2_hbreak(struct edid *edid)
579 {
580 	u8 *r = NULL;
581 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
582 	return r ? (r[12] * 2) : 0;
583 }
584 
585 static int
drm_gtf2_2c(struct edid * edid)586 drm_gtf2_2c(struct edid *edid)
587 {
588 	u8 *r = NULL;
589 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
590 	return r ? r[13] : 0;
591 }
592 
593 static int
drm_gtf2_m(struct edid * edid)594 drm_gtf2_m(struct edid *edid)
595 {
596 	u8 *r = NULL;
597 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
598 	return r ? (r[15] << 8) + r[14] : 0;
599 }
600 
601 static int
drm_gtf2_k(struct edid * edid)602 drm_gtf2_k(struct edid *edid)
603 {
604 	u8 *r = NULL;
605 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
606 	return r ? r[16] : 0;
607 }
608 
609 static int
drm_gtf2_2j(struct edid * edid)610 drm_gtf2_2j(struct edid *edid)
611 {
612 	u8 *r = NULL;
613 	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
614 	return r ? r[17] : 0;
615 }
616 
617 /**
618  * standard_timing_level - get std. timing level(CVT/GTF/DMT)
619  * @edid: EDID block to scan
620  */
standard_timing_level(struct edid * edid)621 static int standard_timing_level(struct edid *edid)
622 {
623 	if (edid->revision >= 2) {
624 		if (edid->revision >= 4 && (edid->features & DRM_EDID_FEATURE_DEFAULT_GTF))
625 			return LEVEL_CVT;
626 		if (drm_gtf2_hbreak(edid))
627 			return LEVEL_GTF2;
628 		return LEVEL_GTF;
629 	}
630 	return LEVEL_DMT;
631 }
632 
633 /*
634  * 0 is reserved.  The spec says 0x01 fill for unused timings.  Some old
635  * monitors fill with ascii space (0x20) instead.
636  */
637 static int
bad_std_timing(u8 a,u8 b)638 bad_std_timing(u8 a, u8 b)
639 {
640 	return (a == 0x00 && b == 0x00) ||
641 	       (a == 0x01 && b == 0x01) ||
642 	       (a == 0x20 && b == 0x20);
643 }
644 
645 /**
646  * drm_mode_std - convert standard mode info (width, height, refresh) into mode
647  * @t: standard timing params
648  * @timing_level: standard timing level
649  *
650  * Take the standard timing params (in this case width, aspect, and refresh)
651  * and convert them into a real mode using CVT/GTF/DMT.
652  */
653 static struct drm_display_mode *
drm_mode_std(struct drm_connector * connector,struct edid * edid,struct std_timing * t,int revision)654 drm_mode_std(struct drm_connector *connector, struct edid *edid,
655 	     struct std_timing *t, int revision)
656 {
657 	struct drm_device *dev = connector->dev;
658 	struct drm_display_mode *m, *mode = NULL;
659 	int hsize, vsize;
660 	int vrefresh_rate;
661 	unsigned aspect_ratio = (t->vfreq_aspect & EDID_TIMING_ASPECT_MASK)
662 		>> EDID_TIMING_ASPECT_SHIFT;
663 	unsigned vfreq = (t->vfreq_aspect & EDID_TIMING_VFREQ_MASK)
664 		>> EDID_TIMING_VFREQ_SHIFT;
665 	int timing_level = standard_timing_level(edid);
666 
667 	if (bad_std_timing(t->hsize, t->vfreq_aspect))
668 		return NULL;
669 
670 	/* According to the EDID spec, the hdisplay = hsize * 8 + 248 */
671 	hsize = t->hsize * 8 + 248;
672 	/* vrefresh_rate = vfreq + 60 */
673 	vrefresh_rate = vfreq + 60;
674 	/* the vdisplay is calculated based on the aspect ratio */
675 	if (aspect_ratio == 0) {
676 		if (revision < 3)
677 			vsize = hsize;
678 		else
679 			vsize = (hsize * 10) / 16;
680 	} else if (aspect_ratio == 1)
681 		vsize = (hsize * 3) / 4;
682 	else if (aspect_ratio == 2)
683 		vsize = (hsize * 4) / 5;
684 	else
685 		vsize = (hsize * 9) / 16;
686 
687 	/* HDTV hack, part 1 */
688 	if (vrefresh_rate == 60 &&
689 	    ((hsize == 1360 && vsize == 765) ||
690 	     (hsize == 1368 && vsize == 769))) {
691 		hsize = 1366;
692 		vsize = 768;
693 	}
694 
695 	/*
696 	 * If this connector already has a mode for this size and refresh
697 	 * rate (because it came from detailed or CVT info), use that
698 	 * instead.  This way we don't have to guess at interlace or
699 	 * reduced blanking.
700 	 */
701 	list_for_each_entry(m, &connector->probed_modes, head)
702 		if (m->hdisplay == hsize && m->vdisplay == vsize &&
703 		    drm_mode_vrefresh(m) == vrefresh_rate)
704 			return NULL;
705 
706 	/* HDTV hack, part 2 */
707 	if (hsize == 1366 && vsize == 768 && vrefresh_rate == 60) {
708 		mode = drm_cvt_mode(dev, 1366, 768, vrefresh_rate, 0, 0,
709 				    false);
710 		mode->hdisplay = 1366;
711 		mode->hsync_start = mode->hsync_start - 1;
712 		mode->hsync_end = mode->hsync_end - 1;
713 		return mode;
714 	}
715 
716 	/* check whether it can be found in default mode table */
717 	mode = drm_mode_find_dmt(dev, hsize, vsize, vrefresh_rate);
718 	if (mode)
719 		return mode;
720 
721 	switch (timing_level) {
722 	case LEVEL_DMT:
723 		break;
724 	case LEVEL_GTF:
725 		mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
726 		break;
727 	case LEVEL_GTF2:
728 		/*
729 		 * This is potentially wrong if there's ever a monitor with
730 		 * more than one ranges section, each claiming a different
731 		 * secondary GTF curve.  Please don't do that.
732 		 */
733 		mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
734 		if (drm_mode_hsync(mode) > drm_gtf2_hbreak(edid)) {
735 			kfree(mode);
736 			mode = drm_gtf_mode_complex(dev, hsize, vsize,
737 						    vrefresh_rate, 0, 0,
738 						    drm_gtf2_m(edid),
739 						    drm_gtf2_2c(edid),
740 						    drm_gtf2_k(edid),
741 						    drm_gtf2_2j(edid));
742 		}
743 		break;
744 	case LEVEL_CVT:
745 		mode = drm_cvt_mode(dev, hsize, vsize, vrefresh_rate, 0, 0,
746 				    false);
747 		break;
748 	}
749 	return mode;
750 }
751 
752 /*
753  * EDID is delightfully ambiguous about how interlaced modes are to be
754  * encoded.  Our internal representation is of frame height, but some
755  * HDTV detailed timings are encoded as field height.
756  *
757  * The format list here is from CEA, in frame size.  Technically we
758  * should be checking refresh rate too.  Whatever.
759  */
760 static void
drm_mode_do_interlace_quirk(struct drm_display_mode * mode,struct detailed_pixel_timing * pt)761 drm_mode_do_interlace_quirk(struct drm_display_mode *mode,
762 			    struct detailed_pixel_timing *pt)
763 {
764 	int i;
765 	static const struct {
766 		int w, h;
767 	} cea_interlaced[] = {
768 		{ 1920, 1080 },
769 		{  720,  480 },
770 		{ 1440,  480 },
771 		{ 2880,  480 },
772 		{  720,  576 },
773 		{ 1440,  576 },
774 		{ 2880,  576 },
775 	};
776 
777 	if (!(pt->misc & DRM_EDID_PT_INTERLACED))
778 		return;
779 
780 	for (i = 0; i < ARRAY_SIZE(cea_interlaced); i++) {
781 		if ((mode->hdisplay == cea_interlaced[i].w) &&
782 		    (mode->vdisplay == cea_interlaced[i].h / 2)) {
783 			mode->vdisplay *= 2;
784 			mode->vsync_start *= 2;
785 			mode->vsync_end *= 2;
786 			mode->vtotal *= 2;
787 			mode->vtotal |= 1;
788 		}
789 	}
790 
791 	mode->flags |= DRM_MODE_FLAG_INTERLACE;
792 }
793 
794 /**
795  * drm_mode_detailed - create a new mode from an EDID detailed timing section
796  * @dev: DRM device (needed to create new mode)
797  * @edid: EDID block
798  * @timing: EDID detailed timing info
799  * @quirks: quirks to apply
800  *
801  * An EDID detailed timing block contains enough info for us to create and
802  * return a new struct drm_display_mode.
803  */
drm_mode_detailed(struct drm_device * dev,struct edid * edid,struct detailed_timing * timing,u32 quirks)804 static struct drm_display_mode *drm_mode_detailed(struct drm_device *dev,
805 						  struct edid *edid,
806 						  struct detailed_timing *timing,
807 						  u32 quirks)
808 {
809 	struct drm_display_mode *mode;
810 	struct detailed_pixel_timing *pt = &timing->data.pixel_data;
811 	unsigned hactive = (pt->hactive_hblank_hi & 0xf0) << 4 | pt->hactive_lo;
812 	unsigned vactive = (pt->vactive_vblank_hi & 0xf0) << 4 | pt->vactive_lo;
813 	unsigned hblank = (pt->hactive_hblank_hi & 0xf) << 8 | pt->hblank_lo;
814 	unsigned vblank = (pt->vactive_vblank_hi & 0xf) << 8 | pt->vblank_lo;
815 	unsigned hsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc0) << 2 | pt->hsync_offset_lo;
816 	unsigned hsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x30) << 4 | pt->hsync_pulse_width_lo;
817 	unsigned vsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc) >> 2 | pt->vsync_offset_pulse_width_lo >> 4;
818 	unsigned vsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x3) << 4 | (pt->vsync_offset_pulse_width_lo & 0xf);
819 
820 	/* ignore tiny modes */
821 	if (hactive < 64 || vactive < 64)
822 		return NULL;
823 
824 	if (pt->misc & DRM_EDID_PT_STEREO) {
825 		printk(KERN_WARNING "stereo mode not supported\n");
826 		return NULL;
827 	}
828 	if (!(pt->misc & DRM_EDID_PT_SEPARATE_SYNC)) {
829 		printk(KERN_WARNING "composite sync not supported\n");
830 	}
831 
832 	/* it is incorrect if hsync/vsync width is zero */
833 	if (!hsync_pulse_width || !vsync_pulse_width) {
834 		DRM_DEBUG_KMS("Incorrect Detailed timing. "
835 				"Wrong Hsync/Vsync pulse width\n");
836 		return NULL;
837 	}
838 	mode = drm_mode_create(dev);
839 	if (!mode)
840 		return NULL;
841 
842 	mode->type = DRM_MODE_TYPE_DRIVER;
843 
844 	if (quirks & EDID_QUIRK_135_CLOCK_TOO_HIGH)
845 		timing->pixel_clock = cpu_to_le16(1088);
846 
847 	mode->clock = le16_to_cpu(timing->pixel_clock) * 10;
848 
849 	mode->hdisplay = hactive;
850 	mode->hsync_start = mode->hdisplay + hsync_offset;
851 	mode->hsync_end = mode->hsync_start + hsync_pulse_width;
852 	mode->htotal = mode->hdisplay + hblank;
853 
854 	mode->vdisplay = vactive;
855 	mode->vsync_start = mode->vdisplay + vsync_offset;
856 	mode->vsync_end = mode->vsync_start + vsync_pulse_width;
857 	mode->vtotal = mode->vdisplay + vblank;
858 
859 	/* Some EDIDs have bogus h/vtotal values */
860 	if (mode->hsync_end > mode->htotal)
861 		mode->htotal = mode->hsync_end + 1;
862 	if (mode->vsync_end > mode->vtotal)
863 		mode->vtotal = mode->vsync_end + 1;
864 
865 	drm_mode_do_interlace_quirk(mode, pt);
866 
867 	drm_mode_set_name(mode);
868 
869 	if (quirks & EDID_QUIRK_DETAILED_SYNC_PP) {
870 		pt->misc |= DRM_EDID_PT_HSYNC_POSITIVE | DRM_EDID_PT_VSYNC_POSITIVE;
871 	}
872 
873 	mode->flags |= (pt->misc & DRM_EDID_PT_HSYNC_POSITIVE) ?
874 		DRM_MODE_FLAG_PHSYNC : DRM_MODE_FLAG_NHSYNC;
875 	mode->flags |= (pt->misc & DRM_EDID_PT_VSYNC_POSITIVE) ?
876 		DRM_MODE_FLAG_PVSYNC : DRM_MODE_FLAG_NVSYNC;
877 
878 	mode->width_mm = pt->width_mm_lo | (pt->width_height_mm_hi & 0xf0) << 4;
879 	mode->height_mm = pt->height_mm_lo | (pt->width_height_mm_hi & 0xf) << 8;
880 
881 	if (quirks & EDID_QUIRK_DETAILED_IN_CM) {
882 		mode->width_mm *= 10;
883 		mode->height_mm *= 10;
884 	}
885 
886 	if (quirks & EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE) {
887 		mode->width_mm = edid->width_cm * 10;
888 		mode->height_mm = edid->height_cm * 10;
889 	}
890 
891 	return mode;
892 }
893 
894 static bool
mode_is_rb(const struct drm_display_mode * mode)895 mode_is_rb(const struct drm_display_mode *mode)
896 {
897 	return (mode->htotal - mode->hdisplay == 160) &&
898 	       (mode->hsync_end - mode->hdisplay == 80) &&
899 	       (mode->hsync_end - mode->hsync_start == 32) &&
900 	       (mode->vsync_start - mode->vdisplay == 3);
901 }
902 
903 static bool
mode_in_hsync_range(const struct drm_display_mode * mode,struct edid * edid,u8 * t)904 mode_in_hsync_range(const struct drm_display_mode *mode,
905 		    struct edid *edid, u8 *t)
906 {
907 	int hsync, hmin, hmax;
908 
909 	hmin = t[7];
910 	if (edid->revision >= 4)
911 	    hmin += ((t[4] & 0x04) ? 255 : 0);
912 	hmax = t[8];
913 	if (edid->revision >= 4)
914 	    hmax += ((t[4] & 0x08) ? 255 : 0);
915 	hsync = drm_mode_hsync(mode);
916 
917 	return (hsync <= hmax && hsync >= hmin);
918 }
919 
920 static bool
mode_in_vsync_range(const struct drm_display_mode * mode,struct edid * edid,u8 * t)921 mode_in_vsync_range(const struct drm_display_mode *mode,
922 		    struct edid *edid, u8 *t)
923 {
924 	int vsync, vmin, vmax;
925 
926 	vmin = t[5];
927 	if (edid->revision >= 4)
928 	    vmin += ((t[4] & 0x01) ? 255 : 0);
929 	vmax = t[6];
930 	if (edid->revision >= 4)
931 	    vmax += ((t[4] & 0x02) ? 255 : 0);
932 	vsync = drm_mode_vrefresh(mode);
933 
934 	return (vsync <= vmax && vsync >= vmin);
935 }
936 
937 static u32
range_pixel_clock(struct edid * edid,u8 * t)938 range_pixel_clock(struct edid *edid, u8 *t)
939 {
940 	/* unspecified */
941 	if (t[9] == 0 || t[9] == 255)
942 		return 0;
943 
944 	/* 1.4 with CVT support gives us real precision, yay */
945 	if (edid->revision >= 4 && t[10] == 0x04)
946 		return (t[9] * 10000) - ((t[12] >> 2) * 250);
947 
948 	/* 1.3 is pathetic, so fuzz up a bit */
949 	return t[9] * 10000 + 5001;
950 }
951 
952 static bool
mode_in_range(const struct drm_display_mode * mode,struct edid * edid,struct detailed_timing * timing)953 mode_in_range(const struct drm_display_mode *mode, struct edid *edid,
954 	      struct detailed_timing *timing)
955 {
956 	u32 max_clock;
957 	u8 *t = (u8 *)timing;
958 
959 	if (!mode_in_hsync_range(mode, edid, t))
960 		return false;
961 
962 	if (!mode_in_vsync_range(mode, edid, t))
963 		return false;
964 
965 	if ((max_clock = range_pixel_clock(edid, t)))
966 		if (mode->clock > max_clock)
967 			return false;
968 
969 	/* 1.4 max horizontal check */
970 	if (edid->revision >= 4 && t[10] == 0x04)
971 		if (t[13] && mode->hdisplay > 8 * (t[13] + (256 * (t[12]&0x3))))
972 			return false;
973 
974 	if (mode_is_rb(mode) && !drm_monitor_supports_rb(edid))
975 		return false;
976 
977 	return true;
978 }
979 
980 /*
981  * XXX If drm_dmt_modes ever regrows the CVT-R modes (and it will) this will
982  * need to account for them.
983  */
984 static int
drm_gtf_modes_for_range(struct drm_connector * connector,struct edid * edid,struct detailed_timing * timing)985 drm_gtf_modes_for_range(struct drm_connector *connector, struct edid *edid,
986 			struct detailed_timing *timing)
987 {
988 	int i, modes = 0;
989 	struct drm_display_mode *newmode;
990 	struct drm_device *dev = connector->dev;
991 
992 	for (i = 0; i < drm_num_dmt_modes; i++) {
993 		if (mode_in_range(drm_dmt_modes + i, edid, timing)) {
994 			newmode = drm_mode_duplicate(dev, &drm_dmt_modes[i]);
995 			if (newmode) {
996 				drm_mode_probed_add(connector, newmode);
997 				modes++;
998 			}
999 		}
1000 	}
1001 
1002 	return modes;
1003 }
1004 
1005 static void
do_inferred_modes(struct detailed_timing * timing,void * c)1006 do_inferred_modes(struct detailed_timing *timing, void *c)
1007 {
1008 	struct detailed_mode_closure *closure = c;
1009 	struct detailed_non_pixel *data = &timing->data.other_data;
1010 	int gtf = (closure->edid->features & DRM_EDID_FEATURE_DEFAULT_GTF);
1011 
1012 	if (gtf && data->type == EDID_DETAIL_MONITOR_RANGE)
1013 		closure->modes += drm_gtf_modes_for_range(closure->connector,
1014 							  closure->edid,
1015 							  timing);
1016 }
1017 
1018 static int
add_inferred_modes(struct drm_connector * connector,struct edid * edid)1019 add_inferred_modes(struct drm_connector *connector, struct edid *edid)
1020 {
1021 	struct detailed_mode_closure closure = {
1022 		connector, edid, 0, 0, 0
1023 	};
1024 
1025 	if (version_greater(edid, 1, 0))
1026 		drm_for_each_detailed_block((u8 *)edid, do_inferred_modes,
1027 					    &closure);
1028 
1029 	return closure.modes;
1030 }
1031 
1032 static int
drm_est3_modes(struct drm_connector * connector,struct detailed_timing * timing)1033 drm_est3_modes(struct drm_connector *connector, struct detailed_timing *timing)
1034 {
1035 	int i, j, m, modes = 0;
1036 	struct drm_display_mode *mode;
1037 	u8 *est = ((u8 *)timing) + 5;
1038 
1039 	for (i = 0; i < 6; i++) {
1040 		for (j = 7; j > 0; j--) {
1041 			m = (i * 8) + (7 - j);
1042 			if (m >= ARRAY_SIZE(est3_modes))
1043 				break;
1044 			if (est[i] & (1 << j)) {
1045 				mode = drm_mode_find_dmt(connector->dev,
1046 							 est3_modes[m].w,
1047 							 est3_modes[m].h,
1048 							 est3_modes[m].r
1049 							 /*, est3_modes[m].rb */);
1050 				if (mode) {
1051 					drm_mode_probed_add(connector, mode);
1052 					modes++;
1053 				}
1054 			}
1055 		}
1056 	}
1057 
1058 	return modes;
1059 }
1060 
1061 static void
do_established_modes(struct detailed_timing * timing,void * c)1062 do_established_modes(struct detailed_timing *timing, void *c)
1063 {
1064 	struct detailed_mode_closure *closure = c;
1065 	struct detailed_non_pixel *data = &timing->data.other_data;
1066 
1067 	if (data->type == EDID_DETAIL_EST_TIMINGS)
1068 		closure->modes += drm_est3_modes(closure->connector, timing);
1069 }
1070 
1071 /**
1072  * add_established_modes - get est. modes from EDID and add them
1073  * @edid: EDID block to scan
1074  *
1075  * Each EDID block contains a bitmap of the supported "established modes" list
1076  * (defined above).  Tease them out and add them to the global modes list.
1077  */
1078 static int
add_established_modes(struct drm_connector * connector,struct edid * edid)1079 add_established_modes(struct drm_connector *connector, struct edid *edid)
1080 {
1081 	struct drm_device *dev = connector->dev;
1082 	unsigned long est_bits = edid->established_timings.t1 |
1083 		(edid->established_timings.t2 << 8) |
1084 		((edid->established_timings.mfg_rsvd & 0x80) << 9);
1085 	int i, modes = 0;
1086 	struct detailed_mode_closure closure = {
1087 		connector, edid, 0, 0, 0
1088 	};
1089 
1090 	for (i = 0; i <= EDID_EST_TIMINGS; i++) {
1091 		if (est_bits & (1<<i)) {
1092 			struct drm_display_mode *newmode;
1093 			newmode = drm_mode_duplicate(dev, &edid_est_modes[i]);
1094 			if (newmode) {
1095 				drm_mode_probed_add(connector, newmode);
1096 				modes++;
1097 			}
1098 		}
1099 	}
1100 
1101 	if (version_greater(edid, 1, 0))
1102 		    drm_for_each_detailed_block((u8 *)edid,
1103 						do_established_modes, &closure);
1104 
1105 	return modes + closure.modes;
1106 }
1107 
1108 static void
do_standard_modes(struct detailed_timing * timing,void * c)1109 do_standard_modes(struct detailed_timing *timing, void *c)
1110 {
1111 	struct detailed_mode_closure *closure = c;
1112 	struct detailed_non_pixel *data = &timing->data.other_data;
1113 	struct drm_connector *connector = closure->connector;
1114 	struct edid *edid = closure->edid;
1115 
1116 	if (data->type == EDID_DETAIL_STD_MODES) {
1117 		int i;
1118 		for (i = 0; i < 6; i++) {
1119 			struct std_timing *std;
1120 			struct drm_display_mode *newmode;
1121 
1122 			std = &data->data.timings[i];
1123 			newmode = drm_mode_std(connector, edid, std,
1124 					       edid->revision);
1125 			if (newmode) {
1126 				drm_mode_probed_add(connector, newmode);
1127 				closure->modes++;
1128 			}
1129 		}
1130 	}
1131 }
1132 
1133 /**
1134  * add_standard_modes - get std. modes from EDID and add them
1135  * @edid: EDID block to scan
1136  *
1137  * Standard modes can be calculated using the appropriate standard (DMT,
1138  * GTF or CVT. Grab them from @edid and add them to the list.
1139  */
1140 static int
add_standard_modes(struct drm_connector * connector,struct edid * edid)1141 add_standard_modes(struct drm_connector *connector, struct edid *edid)
1142 {
1143 	int i, modes = 0;
1144 	struct detailed_mode_closure closure = {
1145 		connector, edid, 0, 0, 0
1146 	};
1147 
1148 	for (i = 0; i < EDID_STD_TIMINGS; i++) {
1149 		struct drm_display_mode *newmode;
1150 
1151 		newmode = drm_mode_std(connector, edid,
1152 				       &edid->standard_timings[i],
1153 				       edid->revision);
1154 		if (newmode) {
1155 			drm_mode_probed_add(connector, newmode);
1156 			modes++;
1157 		}
1158 	}
1159 
1160 	if (version_greater(edid, 1, 0))
1161 		drm_for_each_detailed_block((u8 *)edid, do_standard_modes,
1162 					    &closure);
1163 
1164 	/* XXX should also look for standard codes in VTB blocks */
1165 
1166 	return modes + closure.modes;
1167 }
1168 
drm_cvt_modes(struct drm_connector * connector,struct detailed_timing * timing)1169 static int drm_cvt_modes(struct drm_connector *connector,
1170 			 struct detailed_timing *timing)
1171 {
1172 	int i, j, modes = 0;
1173 	struct drm_display_mode *newmode;
1174 	struct drm_device *dev = connector->dev;
1175 	struct cvt_timing *cvt;
1176 	const int rates[] = { 60, 85, 75, 60, 50 };
1177 	const u8 empty[3] = { 0, 0, 0 };
1178 
1179 	for (i = 0; i < 4; i++) {
1180 		int uninitialized_var(width), height;
1181 		cvt = &(timing->data.other_data.data.cvt[i]);
1182 
1183 		if (!memcmp(cvt->code, empty, 3))
1184 			continue;
1185 
1186 		height = (cvt->code[0] + ((cvt->code[1] & 0xf0) << 4) + 1) * 2;
1187 		switch (cvt->code[1] & 0x0c) {
1188 		case 0x00:
1189 			width = height * 4 / 3;
1190 			break;
1191 		case 0x04:
1192 			width = height * 16 / 9;
1193 			break;
1194 		case 0x08:
1195 			width = height * 16 / 10;
1196 			break;
1197 		case 0x0c:
1198 			width = height * 15 / 9;
1199 			break;
1200 		}
1201 
1202 		for (j = 1; j < 5; j++) {
1203 			if (cvt->code[2] & (1 << j)) {
1204 				newmode = drm_cvt_mode(dev, width, height,
1205 						       rates[j], j == 0,
1206 						       false, false);
1207 				if (newmode) {
1208 					drm_mode_probed_add(connector, newmode);
1209 					modes++;
1210 				}
1211 			}
1212 		}
1213 	}
1214 
1215 	return modes;
1216 }
1217 
1218 static void
do_cvt_mode(struct detailed_timing * timing,void * c)1219 do_cvt_mode(struct detailed_timing *timing, void *c)
1220 {
1221 	struct detailed_mode_closure *closure = c;
1222 	struct detailed_non_pixel *data = &timing->data.other_data;
1223 
1224 	if (data->type == EDID_DETAIL_CVT_3BYTE)
1225 		closure->modes += drm_cvt_modes(closure->connector, timing);
1226 }
1227 
1228 static int
add_cvt_modes(struct drm_connector * connector,struct edid * edid)1229 add_cvt_modes(struct drm_connector *connector, struct edid *edid)
1230 {
1231 	struct detailed_mode_closure closure = {
1232 		connector, edid, 0, 0, 0
1233 	};
1234 
1235 	if (version_greater(edid, 1, 2))
1236 		drm_for_each_detailed_block((u8 *)edid, do_cvt_mode, &closure);
1237 
1238 	/* XXX should also look for CVT codes in VTB blocks */
1239 
1240 	return closure.modes;
1241 }
1242 
1243 static void
do_detailed_mode(struct detailed_timing * timing,void * c)1244 do_detailed_mode(struct detailed_timing *timing, void *c)
1245 {
1246 	struct detailed_mode_closure *closure = c;
1247 	struct drm_display_mode *newmode;
1248 
1249 	if (timing->pixel_clock) {
1250 		newmode = drm_mode_detailed(closure->connector->dev,
1251 					    closure->edid, timing,
1252 					    closure->quirks);
1253 		if (!newmode)
1254 			return;
1255 
1256 		if (closure->preferred)
1257 			newmode->type |= DRM_MODE_TYPE_PREFERRED;
1258 
1259 		drm_mode_probed_add(closure->connector, newmode);
1260 		closure->modes++;
1261 		closure->preferred = 0;
1262 	}
1263 }
1264 
1265 /*
1266  * add_detailed_modes - Add modes from detailed timings
1267  * @connector: attached connector
1268  * @edid: EDID block to scan
1269  * @quirks: quirks to apply
1270  */
1271 static int
add_detailed_modes(struct drm_connector * connector,struct edid * edid,u32 quirks)1272 add_detailed_modes(struct drm_connector *connector, struct edid *edid,
1273 		   u32 quirks)
1274 {
1275 	struct detailed_mode_closure closure = {
1276 		connector,
1277 		edid,
1278 		1,
1279 		quirks,
1280 		0
1281 	};
1282 
1283 	if (closure.preferred && !version_greater(edid, 1, 3))
1284 		closure.preferred =
1285 		    (edid->features & DRM_EDID_FEATURE_PREFERRED_TIMING);
1286 
1287 	drm_for_each_detailed_block((u8 *)edid, do_detailed_mode, &closure);
1288 
1289 	return closure.modes;
1290 }
1291 
1292 #define HDMI_IDENTIFIER 0x000C03
1293 #define AUDIO_BLOCK	0x01
1294 #define VENDOR_BLOCK    0x03
1295 #define EDID_BASIC_AUDIO	(1 << 6)
1296 
1297 /**
1298  * Search EDID for CEA extension block.
1299  */
drm_find_cea_extension(struct edid * edid)1300 u8 *drm_find_cea_extension(struct edid *edid)
1301 {
1302 	u8 *edid_ext = NULL;
1303 	int i;
1304 
1305 	/* No EDID or EDID extensions */
1306 	if (edid == NULL || edid->extensions == 0)
1307 		return NULL;
1308 
1309 	/* Find CEA extension */
1310 	for (i = 0; i < edid->extensions; i++) {
1311 		edid_ext = (u8 *)edid + EDID_LENGTH * (i + 1);
1312 		if (edid_ext[0] == CEA_EXT)
1313 			break;
1314 	}
1315 
1316 	if (i == edid->extensions)
1317 		return NULL;
1318 
1319 	return edid_ext;
1320 }
1321 EXPORT_SYMBOL(drm_find_cea_extension);
1322 
1323 /**
1324  * drm_detect_hdmi_monitor - detect whether monitor is hdmi.
1325  * @edid: monitor EDID information
1326  *
1327  * Parse the CEA extension according to CEA-861-B.
1328  * Return true if HDMI, false if not or unknown.
1329  */
drm_detect_hdmi_monitor(struct edid * edid)1330 bool drm_detect_hdmi_monitor(struct edid *edid)
1331 {
1332 	u8 *edid_ext;
1333 	int i, hdmi_id;
1334 	int start_offset, end_offset;
1335 	bool is_hdmi = false;
1336 
1337 	edid_ext = drm_find_cea_extension(edid);
1338 	if (!edid_ext)
1339 		goto end;
1340 
1341 	/* Data block offset in CEA extension block */
1342 	start_offset = 4;
1343 	end_offset = edid_ext[2];
1344 
1345 	/*
1346 	 * Because HDMI identifier is in Vendor Specific Block,
1347 	 * search it from all data blocks of CEA extension.
1348 	 */
1349 	for (i = start_offset; i < end_offset;
1350 		/* Increased by data block len */
1351 		i += ((edid_ext[i] & 0x1f) + 1)) {
1352 		/* Find vendor specific block */
1353 		if ((edid_ext[i] >> 5) == VENDOR_BLOCK) {
1354 			hdmi_id = edid_ext[i + 1] | (edid_ext[i + 2] << 8) |
1355 				  edid_ext[i + 3] << 16;
1356 			/* Find HDMI identifier */
1357 			if (hdmi_id == HDMI_IDENTIFIER)
1358 				is_hdmi = true;
1359 			break;
1360 		}
1361 	}
1362 
1363 end:
1364 	return is_hdmi;
1365 }
1366 EXPORT_SYMBOL(drm_detect_hdmi_monitor);
1367 
1368 /**
1369  * drm_detect_monitor_audio - check monitor audio capability
1370  *
1371  * Monitor should have CEA extension block.
1372  * If monitor has 'basic audio', but no CEA audio blocks, it's 'basic
1373  * audio' only. If there is any audio extension block and supported
1374  * audio format, assume at least 'basic audio' support, even if 'basic
1375  * audio' is not defined in EDID.
1376  *
1377  */
drm_detect_monitor_audio(struct edid * edid)1378 bool drm_detect_monitor_audio(struct edid *edid)
1379 {
1380 	u8 *edid_ext;
1381 	int i, j;
1382 	bool has_audio = false;
1383 	int start_offset, end_offset;
1384 
1385 	edid_ext = drm_find_cea_extension(edid);
1386 	if (!edid_ext)
1387 		goto end;
1388 
1389 	has_audio = ((edid_ext[3] & EDID_BASIC_AUDIO) != 0);
1390 
1391 	if (has_audio) {
1392 		DRM_DEBUG_KMS("Monitor has basic audio support\n");
1393 		goto end;
1394 	}
1395 
1396 	/* Data block offset in CEA extension block */
1397 	start_offset = 4;
1398 	end_offset = edid_ext[2];
1399 
1400 	for (i = start_offset; i < end_offset;
1401 			i += ((edid_ext[i] & 0x1f) + 1)) {
1402 		if ((edid_ext[i] >> 5) == AUDIO_BLOCK) {
1403 			has_audio = true;
1404 			for (j = 1; j < (edid_ext[i] & 0x1f); j += 3)
1405 				DRM_DEBUG_KMS("CEA audio format %d\n",
1406 					      (edid_ext[i + j] >> 3) & 0xf);
1407 			goto end;
1408 		}
1409 	}
1410 end:
1411 	return has_audio;
1412 }
1413 EXPORT_SYMBOL(drm_detect_monitor_audio);
1414 
1415 /**
1416  * drm_add_edid_modes - add modes from EDID data, if available
1417  * @connector: connector we're probing
1418  * @edid: edid data
1419  *
1420  * Add the specified modes to the connector's mode list.
1421  *
1422  * Return number of modes added or 0 if we couldn't find any.
1423  */
drm_add_edid_modes(struct drm_connector * connector,struct edid * edid)1424 int drm_add_edid_modes(struct drm_connector *connector, struct edid *edid)
1425 {
1426 	int num_modes = 0;
1427 	u32 quirks;
1428 
1429 	if (edid == NULL) {
1430 		return 0;
1431 	}
1432 	if (!drm_edid_is_valid(edid)) {
1433 		dev_warn(connector->dev->dev, "%s: EDID invalid.\n",
1434 			 drm_get_connector_name(connector));
1435 		return 0;
1436 	}
1437 
1438 	quirks = edid_get_quirks(edid);
1439 
1440 	/*
1441 	 * EDID spec says modes should be preferred in this order:
1442 	 * - preferred detailed mode
1443 	 * - other detailed modes from base block
1444 	 * - detailed modes from extension blocks
1445 	 * - CVT 3-byte code modes
1446 	 * - standard timing codes
1447 	 * - established timing codes
1448 	 * - modes inferred from GTF or CVT range information
1449 	 *
1450 	 * We get this pretty much right.
1451 	 *
1452 	 * XXX order for additional mode types in extension blocks?
1453 	 */
1454 	num_modes += add_detailed_modes(connector, edid, quirks);
1455 	num_modes += add_cvt_modes(connector, edid);
1456 	num_modes += add_standard_modes(connector, edid);
1457 	num_modes += add_established_modes(connector, edid);
1458 	num_modes += add_inferred_modes(connector, edid);
1459 
1460 	if (quirks & (EDID_QUIRK_PREFER_LARGE_60 | EDID_QUIRK_PREFER_LARGE_75))
1461 		edid_fixup_preferred(connector, quirks);
1462 
1463 	connector->display_info.width_mm = edid->width_cm * 10;
1464 	connector->display_info.height_mm = edid->height_cm * 10;
1465 
1466 	return num_modes;
1467 }
1468 EXPORT_SYMBOL(drm_add_edid_modes);
1469 
1470 /**
1471  * drm_add_modes_noedid - add modes for the connectors without EDID
1472  * @connector: connector we're probing
1473  * @hdisplay: the horizontal display limit
1474  * @vdisplay: the vertical display limit
1475  *
1476  * Add the specified modes to the connector's mode list. Only when the
1477  * hdisplay/vdisplay is not beyond the given limit, it will be added.
1478  *
1479  * Return number of modes added or 0 if we couldn't find any.
1480  */
drm_add_modes_noedid(struct drm_connector * connector,int hdisplay,int vdisplay)1481 int drm_add_modes_noedid(struct drm_connector *connector,
1482 			int hdisplay, int vdisplay)
1483 {
1484 	int i, count, num_modes = 0;
1485 	struct drm_display_mode *mode;
1486 	struct drm_device *dev = connector->dev;
1487 
1488 	count = sizeof(drm_dmt_modes) / sizeof(struct drm_display_mode);
1489 	if (hdisplay < 0)
1490 		hdisplay = 0;
1491 	if (vdisplay < 0)
1492 		vdisplay = 0;
1493 
1494 	for (i = 0; i < count; i++) {
1495 		const struct drm_display_mode *ptr = &drm_dmt_modes[i];
1496 		if (hdisplay && vdisplay) {
1497 			/*
1498 			 * Only when two are valid, they will be used to check
1499 			 * whether the mode should be added to the mode list of
1500 			 * the connector.
1501 			 */
1502 			if (ptr->hdisplay > hdisplay ||
1503 					ptr->vdisplay > vdisplay)
1504 				continue;
1505 		}
1506 		if (drm_mode_vrefresh(ptr) > 61)
1507 			continue;
1508 		mode = drm_mode_duplicate(dev, ptr);
1509 		if (mode) {
1510 			drm_mode_probed_add(connector, mode);
1511 			num_modes++;
1512 		}
1513 	}
1514 	return num_modes;
1515 }
1516 EXPORT_SYMBOL(drm_add_modes_noedid);
1517