1 /*
2 * Copyright © 2012 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Keith Packard <keithp@keithp.com>
25 *
26 */
27
28 #include <linux/i2c.h>
29 #include <linux/module.h>
30 #include <linux/slab.h>
31
32 #include <drm/display/drm_dp_helper.h>
33 #include <drm/drm_crtc.h>
34 #include <drm/drm_crtc_helper.h>
35 #include <drm/drm_edid.h>
36 #include <drm/drm_simple_kms_helper.h>
37
38 #include "gma_display.h"
39 #include "psb_drv.h"
40 #include "psb_intel_drv.h"
41 #include "psb_intel_reg.h"
42
43 /**
44 * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp
45 * aux algorithm
46 * @running: set by the algo indicating whether an i2c is ongoing or whether
47 * the i2c bus is quiescent
48 * @address: i2c target address for the currently ongoing transfer
49 * @aux_ch: driver callback to transfer a single byte of the i2c payload
50 */
51 struct i2c_algo_dp_aux_data {
52 bool running;
53 u16 address;
54 int (*aux_ch) (struct i2c_adapter *adapter,
55 int mode, uint8_t write_byte,
56 uint8_t *read_byte);
57 };
58
59 /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */
60 static int
i2c_algo_dp_aux_transaction(struct i2c_adapter * adapter,int mode,uint8_t write_byte,uint8_t * read_byte)61 i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode,
62 uint8_t write_byte, uint8_t *read_byte)
63 {
64 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
65 int ret;
66
67 ret = (*algo_data->aux_ch)(adapter, mode,
68 write_byte, read_byte);
69 return ret;
70 }
71
72 /*
73 * I2C over AUX CH
74 */
75
76 /*
77 * Send the address. If the I2C link is running, this 'restarts'
78 * the connection with the new address, this is used for doing
79 * a write followed by a read (as needed for DDC)
80 */
81 static int
i2c_algo_dp_aux_address(struct i2c_adapter * adapter,u16 address,bool reading)82 i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading)
83 {
84 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
85 int mode = MODE_I2C_START;
86
87 if (reading)
88 mode |= MODE_I2C_READ;
89 else
90 mode |= MODE_I2C_WRITE;
91 algo_data->address = address;
92 algo_data->running = true;
93 return i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
94 }
95
96 /*
97 * Stop the I2C transaction. This closes out the link, sending
98 * a bare address packet with the MOT bit turned off
99 */
100 static void
i2c_algo_dp_aux_stop(struct i2c_adapter * adapter,bool reading)101 i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading)
102 {
103 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
104 int mode = MODE_I2C_STOP;
105
106 if (reading)
107 mode |= MODE_I2C_READ;
108 else
109 mode |= MODE_I2C_WRITE;
110 if (algo_data->running) {
111 (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
112 algo_data->running = false;
113 }
114 }
115
116 /*
117 * Write a single byte to the current I2C address, the
118 * I2C link must be running or this returns -EIO
119 */
120 static int
i2c_algo_dp_aux_put_byte(struct i2c_adapter * adapter,u8 byte)121 i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte)
122 {
123 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
124
125 if (!algo_data->running)
126 return -EIO;
127
128 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL);
129 }
130
131 /*
132 * Read a single byte from the current I2C address, the
133 * I2C link must be running or this returns -EIO
134 */
135 static int
i2c_algo_dp_aux_get_byte(struct i2c_adapter * adapter,u8 * byte_ret)136 i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret)
137 {
138 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
139
140 if (!algo_data->running)
141 return -EIO;
142
143 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret);
144 }
145
146 static int
i2c_algo_dp_aux_xfer(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)147 i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter,
148 struct i2c_msg *msgs,
149 int num)
150 {
151 int ret = 0;
152 bool reading = false;
153 int m;
154 int b;
155
156 for (m = 0; m < num; m++) {
157 u16 len = msgs[m].len;
158 u8 *buf = msgs[m].buf;
159 reading = (msgs[m].flags & I2C_M_RD) != 0;
160 ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading);
161 if (ret < 0)
162 break;
163 if (reading) {
164 for (b = 0; b < len; b++) {
165 ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]);
166 if (ret < 0)
167 break;
168 }
169 } else {
170 for (b = 0; b < len; b++) {
171 ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]);
172 if (ret < 0)
173 break;
174 }
175 }
176 if (ret < 0)
177 break;
178 }
179 if (ret >= 0)
180 ret = num;
181 i2c_algo_dp_aux_stop(adapter, reading);
182 DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret);
183 return ret;
184 }
185
186 static u32
i2c_algo_dp_aux_functionality(struct i2c_adapter * adapter)187 i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter)
188 {
189 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
190 I2C_FUNC_SMBUS_READ_BLOCK_DATA |
191 I2C_FUNC_SMBUS_BLOCK_PROC_CALL |
192 I2C_FUNC_10BIT_ADDR;
193 }
194
195 static const struct i2c_algorithm i2c_dp_aux_algo = {
196 .master_xfer = i2c_algo_dp_aux_xfer,
197 .functionality = i2c_algo_dp_aux_functionality,
198 };
199
200 static void
i2c_dp_aux_reset_bus(struct i2c_adapter * adapter)201 i2c_dp_aux_reset_bus(struct i2c_adapter *adapter)
202 {
203 (void) i2c_algo_dp_aux_address(adapter, 0, false);
204 (void) i2c_algo_dp_aux_stop(adapter, false);
205 }
206
207 static int
i2c_dp_aux_prepare_bus(struct i2c_adapter * adapter)208 i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter)
209 {
210 adapter->algo = &i2c_dp_aux_algo;
211 adapter->retries = 3;
212 i2c_dp_aux_reset_bus(adapter);
213 return 0;
214 }
215
216 /*
217 * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to
218 * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon.
219 */
220 static int
i2c_dp_aux_add_bus(struct i2c_adapter * adapter)221 i2c_dp_aux_add_bus(struct i2c_adapter *adapter)
222 {
223 int error;
224
225 error = i2c_dp_aux_prepare_bus(adapter);
226 if (error)
227 return error;
228 error = i2c_add_adapter(adapter);
229 return error;
230 }
231
232 #define _wait_for(COND, MS, W) ({ \
233 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
234 int ret__ = 0; \
235 while (! (COND)) { \
236 if (time_after(jiffies, timeout__)) { \
237 ret__ = -ETIMEDOUT; \
238 break; \
239 } \
240 if (W && !in_dbg_master()) msleep(W); \
241 } \
242 ret__; \
243 })
244
245 #define wait_for(COND, MS) _wait_for(COND, MS, 1)
246
247 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
248
249 #define DP_LINK_CONFIGURATION_SIZE 9
250
251 #define CDV_FAST_LINK_TRAIN 1
252
253 struct cdv_intel_dp {
254 uint32_t output_reg;
255 uint32_t DP;
256 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
257 bool has_audio;
258 int force_audio;
259 uint32_t color_range;
260 uint8_t link_bw;
261 uint8_t lane_count;
262 uint8_t dpcd[4];
263 struct gma_encoder *encoder;
264 struct i2c_adapter adapter;
265 struct i2c_algo_dp_aux_data algo;
266 uint8_t train_set[4];
267 uint8_t link_status[DP_LINK_STATUS_SIZE];
268 int panel_power_up_delay;
269 int panel_power_down_delay;
270 int panel_power_cycle_delay;
271 int backlight_on_delay;
272 int backlight_off_delay;
273 struct drm_display_mode *panel_fixed_mode; /* for eDP */
274 bool panel_on;
275 };
276
277 struct ddi_regoff {
278 uint32_t PreEmph1;
279 uint32_t PreEmph2;
280 uint32_t VSwing1;
281 uint32_t VSwing2;
282 uint32_t VSwing3;
283 uint32_t VSwing4;
284 uint32_t VSwing5;
285 };
286
287 static struct ddi_regoff ddi_DP_train_table[] = {
288 {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
289 .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
290 .VSwing5 = 0x8158,},
291 {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
292 .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
293 .VSwing5 = 0x8258,},
294 };
295
296 static uint32_t dp_vswing_premph_table[] = {
297 0x55338954, 0x4000,
298 0x554d8954, 0x2000,
299 0x55668954, 0,
300 0x559ac0d4, 0x6000,
301 };
302 /**
303 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
304 * @encoder: GMA encoder struct
305 *
306 * If a CPU or PCH DP output is attached to an eDP panel, this function
307 * will return true, and false otherwise.
308 */
is_edp(struct gma_encoder * encoder)309 static bool is_edp(struct gma_encoder *encoder)
310 {
311 return encoder->type == INTEL_OUTPUT_EDP;
312 }
313
314
315 static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
316 static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
317 static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
318
319 static int
cdv_intel_dp_max_lane_count(struct gma_encoder * encoder)320 cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
321 {
322 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
323 int max_lane_count = 4;
324
325 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
326 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
327 switch (max_lane_count) {
328 case 1: case 2: case 4:
329 break;
330 default:
331 max_lane_count = 4;
332 }
333 }
334 return max_lane_count;
335 }
336
337 static int
cdv_intel_dp_max_link_bw(struct gma_encoder * encoder)338 cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
339 {
340 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
341 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
342
343 switch (max_link_bw) {
344 case DP_LINK_BW_1_62:
345 case DP_LINK_BW_2_7:
346 break;
347 default:
348 max_link_bw = DP_LINK_BW_1_62;
349 break;
350 }
351 return max_link_bw;
352 }
353
354 static int
cdv_intel_dp_link_clock(uint8_t link_bw)355 cdv_intel_dp_link_clock(uint8_t link_bw)
356 {
357 if (link_bw == DP_LINK_BW_2_7)
358 return 270000;
359 else
360 return 162000;
361 }
362
363 static int
cdv_intel_dp_link_required(int pixel_clock,int bpp)364 cdv_intel_dp_link_required(int pixel_clock, int bpp)
365 {
366 return (pixel_clock * bpp + 7) / 8;
367 }
368
369 static int
cdv_intel_dp_max_data_rate(int max_link_clock,int max_lanes)370 cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
371 {
372 return (max_link_clock * max_lanes * 19) / 20;
373 }
374
cdv_intel_edp_panel_vdd_on(struct gma_encoder * intel_encoder)375 static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
376 {
377 struct drm_device *dev = intel_encoder->base.dev;
378 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
379 u32 pp;
380
381 if (intel_dp->panel_on) {
382 DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
383 return;
384 }
385 DRM_DEBUG_KMS("\n");
386
387 pp = REG_READ(PP_CONTROL);
388
389 pp |= EDP_FORCE_VDD;
390 REG_WRITE(PP_CONTROL, pp);
391 REG_READ(PP_CONTROL);
392 msleep(intel_dp->panel_power_up_delay);
393 }
394
cdv_intel_edp_panel_vdd_off(struct gma_encoder * intel_encoder)395 static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
396 {
397 struct drm_device *dev = intel_encoder->base.dev;
398 u32 pp;
399
400 DRM_DEBUG_KMS("\n");
401 pp = REG_READ(PP_CONTROL);
402
403 pp &= ~EDP_FORCE_VDD;
404 REG_WRITE(PP_CONTROL, pp);
405 REG_READ(PP_CONTROL);
406
407 }
408
409 /* Returns true if the panel was already on when called */
cdv_intel_edp_panel_on(struct gma_encoder * intel_encoder)410 static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
411 {
412 struct drm_device *dev = intel_encoder->base.dev;
413 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
414 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
415
416 if (intel_dp->panel_on)
417 return true;
418
419 DRM_DEBUG_KMS("\n");
420 pp = REG_READ(PP_CONTROL);
421 pp &= ~PANEL_UNLOCK_MASK;
422
423 pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
424 REG_WRITE(PP_CONTROL, pp);
425 REG_READ(PP_CONTROL);
426
427 if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
428 DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
429 intel_dp->panel_on = false;
430 } else
431 intel_dp->panel_on = true;
432 msleep(intel_dp->panel_power_up_delay);
433
434 return false;
435 }
436
cdv_intel_edp_panel_off(struct gma_encoder * intel_encoder)437 static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
438 {
439 struct drm_device *dev = intel_encoder->base.dev;
440 u32 pp, idle_off_mask = PP_ON ;
441 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
442
443 DRM_DEBUG_KMS("\n");
444
445 pp = REG_READ(PP_CONTROL);
446
447 if ((pp & POWER_TARGET_ON) == 0)
448 return;
449
450 intel_dp->panel_on = false;
451 pp &= ~PANEL_UNLOCK_MASK;
452 /* ILK workaround: disable reset around power sequence */
453
454 pp &= ~POWER_TARGET_ON;
455 pp &= ~EDP_FORCE_VDD;
456 pp &= ~EDP_BLC_ENABLE;
457 REG_WRITE(PP_CONTROL, pp);
458 REG_READ(PP_CONTROL);
459 DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
460
461 if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
462 DRM_DEBUG_KMS("Error in turning off Panel\n");
463 }
464
465 msleep(intel_dp->panel_power_cycle_delay);
466 DRM_DEBUG_KMS("Over\n");
467 }
468
cdv_intel_edp_backlight_on(struct gma_encoder * intel_encoder)469 static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
470 {
471 struct drm_device *dev = intel_encoder->base.dev;
472 u32 pp;
473
474 DRM_DEBUG_KMS("\n");
475 /*
476 * If we enable the backlight right away following a panel power
477 * on, we may see slight flicker as the panel syncs with the eDP
478 * link. So delay a bit to make sure the image is solid before
479 * allowing it to appear.
480 */
481 msleep(300);
482 pp = REG_READ(PP_CONTROL);
483
484 pp |= EDP_BLC_ENABLE;
485 REG_WRITE(PP_CONTROL, pp);
486 gma_backlight_enable(dev);
487 }
488
cdv_intel_edp_backlight_off(struct gma_encoder * intel_encoder)489 static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
490 {
491 struct drm_device *dev = intel_encoder->base.dev;
492 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
493 u32 pp;
494
495 DRM_DEBUG_KMS("\n");
496 gma_backlight_disable(dev);
497 msleep(10);
498 pp = REG_READ(PP_CONTROL);
499
500 pp &= ~EDP_BLC_ENABLE;
501 REG_WRITE(PP_CONTROL, pp);
502 msleep(intel_dp->backlight_off_delay);
503 }
504
505 static enum drm_mode_status
cdv_intel_dp_mode_valid(struct drm_connector * connector,struct drm_display_mode * mode)506 cdv_intel_dp_mode_valid(struct drm_connector *connector,
507 struct drm_display_mode *mode)
508 {
509 struct gma_encoder *encoder = gma_attached_encoder(connector);
510 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
511 int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
512 int max_lanes = cdv_intel_dp_max_lane_count(encoder);
513 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev);
514
515 if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
516 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
517 return MODE_PANEL;
518 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
519 return MODE_PANEL;
520 }
521
522 /* only refuse the mode on non eDP since we have seen some weird eDP panels
523 which are outside spec tolerances but somehow work by magic */
524 if (!is_edp(encoder) &&
525 (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
526 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
527 return MODE_CLOCK_HIGH;
528
529 if (is_edp(encoder)) {
530 if (cdv_intel_dp_link_required(mode->clock, 24)
531 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
532 return MODE_CLOCK_HIGH;
533
534 }
535 if (mode->clock < 10000)
536 return MODE_CLOCK_LOW;
537
538 return MODE_OK;
539 }
540
541 static uint32_t
pack_aux(uint8_t * src,int src_bytes)542 pack_aux(uint8_t *src, int src_bytes)
543 {
544 int i;
545 uint32_t v = 0;
546
547 if (src_bytes > 4)
548 src_bytes = 4;
549 for (i = 0; i < src_bytes; i++)
550 v |= ((uint32_t) src[i]) << ((3-i) * 8);
551 return v;
552 }
553
554 static void
unpack_aux(uint32_t src,uint8_t * dst,int dst_bytes)555 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
556 {
557 int i;
558 if (dst_bytes > 4)
559 dst_bytes = 4;
560 for (i = 0; i < dst_bytes; i++)
561 dst[i] = src >> ((3-i) * 8);
562 }
563
564 static int
cdv_intel_dp_aux_ch(struct gma_encoder * encoder,uint8_t * send,int send_bytes,uint8_t * recv,int recv_size)565 cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
566 uint8_t *send, int send_bytes,
567 uint8_t *recv, int recv_size)
568 {
569 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
570 uint32_t output_reg = intel_dp->output_reg;
571 struct drm_device *dev = encoder->base.dev;
572 uint32_t ch_ctl = output_reg + 0x10;
573 uint32_t ch_data = ch_ctl + 4;
574 int i;
575 int recv_bytes;
576 uint32_t status;
577 uint32_t aux_clock_divider;
578 int try, precharge;
579
580 /* The clock divider is based off the hrawclk,
581 * and would like to run at 2MHz. So, take the
582 * hrawclk value and divide by 2 and use that
583 * On CDV platform it uses 200MHz as hrawclk.
584 *
585 */
586 aux_clock_divider = 200 / 2;
587
588 precharge = 4;
589 if (is_edp(encoder))
590 precharge = 10;
591
592 if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
593 DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
594 REG_READ(ch_ctl));
595 return -EBUSY;
596 }
597
598 /* Must try at least 3 times according to DP spec */
599 for (try = 0; try < 5; try++) {
600 /* Load the send data into the aux channel data registers */
601 for (i = 0; i < send_bytes; i += 4)
602 REG_WRITE(ch_data + i,
603 pack_aux(send + i, send_bytes - i));
604
605 /* Send the command and wait for it to complete */
606 REG_WRITE(ch_ctl,
607 DP_AUX_CH_CTL_SEND_BUSY |
608 DP_AUX_CH_CTL_TIME_OUT_400us |
609 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
610 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
611 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
612 DP_AUX_CH_CTL_DONE |
613 DP_AUX_CH_CTL_TIME_OUT_ERROR |
614 DP_AUX_CH_CTL_RECEIVE_ERROR);
615 for (;;) {
616 status = REG_READ(ch_ctl);
617 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
618 break;
619 udelay(100);
620 }
621
622 /* Clear done status and any errors */
623 REG_WRITE(ch_ctl,
624 status |
625 DP_AUX_CH_CTL_DONE |
626 DP_AUX_CH_CTL_TIME_OUT_ERROR |
627 DP_AUX_CH_CTL_RECEIVE_ERROR);
628 if (status & DP_AUX_CH_CTL_DONE)
629 break;
630 }
631
632 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
633 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
634 return -EBUSY;
635 }
636
637 /* Check for timeout or receive error.
638 * Timeouts occur when the sink is not connected
639 */
640 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
641 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
642 return -EIO;
643 }
644
645 /* Timeouts occur when the device isn't connected, so they're
646 * "normal" -- don't fill the kernel log with these */
647 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
648 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
649 return -ETIMEDOUT;
650 }
651
652 /* Unload any bytes sent back from the other side */
653 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
654 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
655 if (recv_bytes > recv_size)
656 recv_bytes = recv_size;
657
658 for (i = 0; i < recv_bytes; i += 4)
659 unpack_aux(REG_READ(ch_data + i),
660 recv + i, recv_bytes - i);
661
662 return recv_bytes;
663 }
664
665 /* Write data to the aux channel in native mode */
666 static int
cdv_intel_dp_aux_native_write(struct gma_encoder * encoder,uint16_t address,uint8_t * send,int send_bytes)667 cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
668 uint16_t address, uint8_t *send, int send_bytes)
669 {
670 int ret;
671 uint8_t msg[20];
672 int msg_bytes;
673 uint8_t ack;
674
675 if (send_bytes > 16)
676 return -1;
677 msg[0] = DP_AUX_NATIVE_WRITE << 4;
678 msg[1] = address >> 8;
679 msg[2] = address & 0xff;
680 msg[3] = send_bytes - 1;
681 memcpy(&msg[4], send, send_bytes);
682 msg_bytes = send_bytes + 4;
683 for (;;) {
684 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
685 if (ret < 0)
686 return ret;
687 ack >>= 4;
688 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
689 break;
690 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
691 udelay(100);
692 else
693 return -EIO;
694 }
695 return send_bytes;
696 }
697
698 /* Write a single byte to the aux channel in native mode */
699 static int
cdv_intel_dp_aux_native_write_1(struct gma_encoder * encoder,uint16_t address,uint8_t byte)700 cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
701 uint16_t address, uint8_t byte)
702 {
703 return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
704 }
705
706 /* read bytes from a native aux channel */
707 static int
cdv_intel_dp_aux_native_read(struct gma_encoder * encoder,uint16_t address,uint8_t * recv,int recv_bytes)708 cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
709 uint16_t address, uint8_t *recv, int recv_bytes)
710 {
711 uint8_t msg[4];
712 int msg_bytes;
713 uint8_t reply[20];
714 int reply_bytes;
715 uint8_t ack;
716 int ret;
717
718 msg[0] = DP_AUX_NATIVE_READ << 4;
719 msg[1] = address >> 8;
720 msg[2] = address & 0xff;
721 msg[3] = recv_bytes - 1;
722
723 msg_bytes = 4;
724 reply_bytes = recv_bytes + 1;
725
726 for (;;) {
727 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
728 reply, reply_bytes);
729 if (ret == 0)
730 return -EPROTO;
731 if (ret < 0)
732 return ret;
733 ack = reply[0] >> 4;
734 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
735 memcpy(recv, reply + 1, ret - 1);
736 return ret - 1;
737 }
738 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
739 udelay(100);
740 else
741 return -EIO;
742 }
743 }
744
745 static int
cdv_intel_dp_i2c_aux_ch(struct i2c_adapter * adapter,int mode,uint8_t write_byte,uint8_t * read_byte)746 cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
747 uint8_t write_byte, uint8_t *read_byte)
748 {
749 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
750 struct cdv_intel_dp *intel_dp = container_of(adapter,
751 struct cdv_intel_dp,
752 adapter);
753 struct gma_encoder *encoder = intel_dp->encoder;
754 uint16_t address = algo_data->address;
755 uint8_t msg[5];
756 uint8_t reply[2];
757 unsigned retry;
758 int msg_bytes;
759 int reply_bytes;
760 int ret;
761
762 /* Set up the command byte */
763 if (mode & MODE_I2C_READ)
764 msg[0] = DP_AUX_I2C_READ << 4;
765 else
766 msg[0] = DP_AUX_I2C_WRITE << 4;
767
768 if (!(mode & MODE_I2C_STOP))
769 msg[0] |= DP_AUX_I2C_MOT << 4;
770
771 msg[1] = address >> 8;
772 msg[2] = address;
773
774 switch (mode) {
775 case MODE_I2C_WRITE:
776 msg[3] = 0;
777 msg[4] = write_byte;
778 msg_bytes = 5;
779 reply_bytes = 1;
780 break;
781 case MODE_I2C_READ:
782 msg[3] = 0;
783 msg_bytes = 4;
784 reply_bytes = 2;
785 break;
786 default:
787 msg_bytes = 3;
788 reply_bytes = 1;
789 break;
790 }
791
792 for (retry = 0; retry < 5; retry++) {
793 ret = cdv_intel_dp_aux_ch(encoder,
794 msg, msg_bytes,
795 reply, reply_bytes);
796 if (ret < 0) {
797 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
798 return ret;
799 }
800
801 switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
802 case DP_AUX_NATIVE_REPLY_ACK:
803 /* I2C-over-AUX Reply field is only valid
804 * when paired with AUX ACK.
805 */
806 break;
807 case DP_AUX_NATIVE_REPLY_NACK:
808 DRM_DEBUG_KMS("aux_ch native nack\n");
809 return -EREMOTEIO;
810 case DP_AUX_NATIVE_REPLY_DEFER:
811 udelay(100);
812 continue;
813 default:
814 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
815 reply[0]);
816 return -EREMOTEIO;
817 }
818
819 switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
820 case DP_AUX_I2C_REPLY_ACK:
821 if (mode == MODE_I2C_READ) {
822 *read_byte = reply[1];
823 }
824 return reply_bytes - 1;
825 case DP_AUX_I2C_REPLY_NACK:
826 DRM_DEBUG_KMS("aux_i2c nack\n");
827 return -EREMOTEIO;
828 case DP_AUX_I2C_REPLY_DEFER:
829 DRM_DEBUG_KMS("aux_i2c defer\n");
830 udelay(100);
831 break;
832 default:
833 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
834 return -EREMOTEIO;
835 }
836 }
837
838 DRM_ERROR("too many retries, giving up\n");
839 return -EREMOTEIO;
840 }
841
842 static int
cdv_intel_dp_i2c_init(struct gma_connector * connector,struct gma_encoder * encoder,const char * name)843 cdv_intel_dp_i2c_init(struct gma_connector *connector,
844 struct gma_encoder *encoder, const char *name)
845 {
846 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
847 int ret;
848
849 DRM_DEBUG_KMS("i2c_init %s\n", name);
850
851 intel_dp->algo.running = false;
852 intel_dp->algo.address = 0;
853 intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
854
855 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
856 intel_dp->adapter.owner = THIS_MODULE;
857 intel_dp->adapter.class = I2C_CLASS_DDC;
858 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
859 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
860 intel_dp->adapter.algo_data = &intel_dp->algo;
861 intel_dp->adapter.dev.parent = connector->base.kdev;
862
863 if (is_edp(encoder))
864 cdv_intel_edp_panel_vdd_on(encoder);
865 ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
866 if (is_edp(encoder))
867 cdv_intel_edp_panel_vdd_off(encoder);
868
869 return ret;
870 }
871
cdv_intel_fixed_panel_mode(struct drm_display_mode * fixed_mode,struct drm_display_mode * adjusted_mode)872 static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
873 struct drm_display_mode *adjusted_mode)
874 {
875 adjusted_mode->hdisplay = fixed_mode->hdisplay;
876 adjusted_mode->hsync_start = fixed_mode->hsync_start;
877 adjusted_mode->hsync_end = fixed_mode->hsync_end;
878 adjusted_mode->htotal = fixed_mode->htotal;
879
880 adjusted_mode->vdisplay = fixed_mode->vdisplay;
881 adjusted_mode->vsync_start = fixed_mode->vsync_start;
882 adjusted_mode->vsync_end = fixed_mode->vsync_end;
883 adjusted_mode->vtotal = fixed_mode->vtotal;
884
885 adjusted_mode->clock = fixed_mode->clock;
886
887 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
888 }
889
890 static bool
cdv_intel_dp_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)891 cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
892 struct drm_display_mode *adjusted_mode)
893 {
894 struct drm_psb_private *dev_priv = to_drm_psb_private(encoder->dev);
895 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
896 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
897 int lane_count, clock;
898 int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
899 int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
900 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
901 int refclock = mode->clock;
902 int bpp = 24;
903
904 if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
905 cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
906 refclock = intel_dp->panel_fixed_mode->clock;
907 bpp = dev_priv->edp.bpp;
908 }
909
910 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
911 for (clock = max_clock; clock >= 0; clock--) {
912 int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
913
914 if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
915 intel_dp->link_bw = bws[clock];
916 intel_dp->lane_count = lane_count;
917 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
918 DRM_DEBUG_KMS("Display port link bw %02x lane "
919 "count %d clock %d\n",
920 intel_dp->link_bw, intel_dp->lane_count,
921 adjusted_mode->clock);
922 return true;
923 }
924 }
925 }
926 if (is_edp(intel_encoder)) {
927 /* okay we failed just pick the highest */
928 intel_dp->lane_count = max_lane_count;
929 intel_dp->link_bw = bws[max_clock];
930 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
931 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
932 "count %d clock %d\n",
933 intel_dp->link_bw, intel_dp->lane_count,
934 adjusted_mode->clock);
935
936 return true;
937 }
938 return false;
939 }
940
941 struct cdv_intel_dp_m_n {
942 uint32_t tu;
943 uint32_t gmch_m;
944 uint32_t gmch_n;
945 uint32_t link_m;
946 uint32_t link_n;
947 };
948
949 static void
cdv_intel_reduce_ratio(uint32_t * num,uint32_t * den)950 cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
951 {
952 /*
953 while (*num > 0xffffff || *den > 0xffffff) {
954 *num >>= 1;
955 *den >>= 1;
956 }*/
957 uint64_t value, m;
958 m = *num;
959 value = m * (0x800000);
960 m = do_div(value, *den);
961 *num = value;
962 *den = 0x800000;
963 }
964
965 static void
cdv_intel_dp_compute_m_n(int bpp,int nlanes,int pixel_clock,int link_clock,struct cdv_intel_dp_m_n * m_n)966 cdv_intel_dp_compute_m_n(int bpp,
967 int nlanes,
968 int pixel_clock,
969 int link_clock,
970 struct cdv_intel_dp_m_n *m_n)
971 {
972 m_n->tu = 64;
973 m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
974 m_n->gmch_n = link_clock * nlanes;
975 cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
976 m_n->link_m = pixel_clock;
977 m_n->link_n = link_clock;
978 cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
979 }
980
981 void
cdv_intel_dp_set_m_n(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)982 cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
983 struct drm_display_mode *adjusted_mode)
984 {
985 struct drm_device *dev = crtc->dev;
986 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
987 struct drm_mode_config *mode_config = &dev->mode_config;
988 struct drm_encoder *encoder;
989 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
990 int lane_count = 4, bpp = 24;
991 struct cdv_intel_dp_m_n m_n;
992 int pipe = gma_crtc->pipe;
993
994 /*
995 * Find the lane count in the intel_encoder private
996 */
997 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
998 struct gma_encoder *intel_encoder;
999 struct cdv_intel_dp *intel_dp;
1000
1001 if (encoder->crtc != crtc)
1002 continue;
1003
1004 intel_encoder = to_gma_encoder(encoder);
1005 intel_dp = intel_encoder->dev_priv;
1006 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
1007 lane_count = intel_dp->lane_count;
1008 break;
1009 } else if (is_edp(intel_encoder)) {
1010 lane_count = intel_dp->lane_count;
1011 bpp = dev_priv->edp.bpp;
1012 break;
1013 }
1014 }
1015
1016 /*
1017 * Compute the GMCH and Link ratios. The '3' here is
1018 * the number of bytes_per_pixel post-LUT, which we always
1019 * set up for 8-bits of R/G/B, or 3 bytes total.
1020 */
1021 cdv_intel_dp_compute_m_n(bpp, lane_count,
1022 mode->clock, adjusted_mode->clock, &m_n);
1023
1024 {
1025 REG_WRITE(PIPE_GMCH_DATA_M(pipe),
1026 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
1027 m_n.gmch_m);
1028 REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
1029 REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
1030 REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
1031 }
1032 }
1033
1034 static void
cdv_intel_dp_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1035 cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1036 struct drm_display_mode *adjusted_mode)
1037 {
1038 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1039 struct drm_crtc *crtc = encoder->crtc;
1040 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
1041 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1042 struct drm_device *dev = encoder->dev;
1043
1044 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1045 intel_dp->DP |= intel_dp->color_range;
1046
1047 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1048 intel_dp->DP |= DP_SYNC_HS_HIGH;
1049 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1050 intel_dp->DP |= DP_SYNC_VS_HIGH;
1051
1052 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1053
1054 switch (intel_dp->lane_count) {
1055 case 1:
1056 intel_dp->DP |= DP_PORT_WIDTH_1;
1057 break;
1058 case 2:
1059 intel_dp->DP |= DP_PORT_WIDTH_2;
1060 break;
1061 case 4:
1062 intel_dp->DP |= DP_PORT_WIDTH_4;
1063 break;
1064 }
1065 if (intel_dp->has_audio)
1066 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
1067
1068 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
1069 intel_dp->link_configuration[0] = intel_dp->link_bw;
1070 intel_dp->link_configuration[1] = intel_dp->lane_count;
1071
1072 /*
1073 * Check for DPCD version > 1.1 and enhanced framing support
1074 */
1075 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
1076 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
1077 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
1078 intel_dp->DP |= DP_ENHANCED_FRAMING;
1079 }
1080
1081 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
1082 if (gma_crtc->pipe == 1)
1083 intel_dp->DP |= DP_PIPEB_SELECT;
1084
1085 REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
1086 DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
1087 if (is_edp(intel_encoder)) {
1088 uint32_t pfit_control;
1089 cdv_intel_edp_panel_on(intel_encoder);
1090
1091 if (mode->hdisplay != adjusted_mode->hdisplay ||
1092 mode->vdisplay != adjusted_mode->vdisplay)
1093 pfit_control = PFIT_ENABLE;
1094 else
1095 pfit_control = 0;
1096
1097 pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
1098
1099 REG_WRITE(PFIT_CONTROL, pfit_control);
1100 }
1101 }
1102
1103
1104 /* If the sink supports it, try to set the power state appropriately */
cdv_intel_dp_sink_dpms(struct gma_encoder * encoder,int mode)1105 static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
1106 {
1107 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1108 int ret, i;
1109
1110 /* Should have a valid DPCD by this point */
1111 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1112 return;
1113
1114 if (mode != DRM_MODE_DPMS_ON) {
1115 ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
1116 DP_SET_POWER_D3);
1117 if (ret != 1)
1118 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1119 } else {
1120 /*
1121 * When turning on, we need to retry for 1ms to give the sink
1122 * time to wake up.
1123 */
1124 for (i = 0; i < 3; i++) {
1125 ret = cdv_intel_dp_aux_native_write_1(encoder,
1126 DP_SET_POWER,
1127 DP_SET_POWER_D0);
1128 if (ret == 1)
1129 break;
1130 udelay(1000);
1131 }
1132 }
1133 }
1134
cdv_intel_dp_prepare(struct drm_encoder * encoder)1135 static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
1136 {
1137 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1138 int edp = is_edp(intel_encoder);
1139
1140 if (edp) {
1141 cdv_intel_edp_backlight_off(intel_encoder);
1142 cdv_intel_edp_panel_off(intel_encoder);
1143 cdv_intel_edp_panel_vdd_on(intel_encoder);
1144 }
1145 /* Wake up the sink first */
1146 cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
1147 cdv_intel_dp_link_down(intel_encoder);
1148 if (edp)
1149 cdv_intel_edp_panel_vdd_off(intel_encoder);
1150 }
1151
cdv_intel_dp_commit(struct drm_encoder * encoder)1152 static void cdv_intel_dp_commit(struct drm_encoder *encoder)
1153 {
1154 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1155 int edp = is_edp(intel_encoder);
1156
1157 if (edp)
1158 cdv_intel_edp_panel_on(intel_encoder);
1159 cdv_intel_dp_start_link_train(intel_encoder);
1160 cdv_intel_dp_complete_link_train(intel_encoder);
1161 if (edp)
1162 cdv_intel_edp_backlight_on(intel_encoder);
1163 }
1164
1165 static void
cdv_intel_dp_dpms(struct drm_encoder * encoder,int mode)1166 cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
1167 {
1168 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1169 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1170 struct drm_device *dev = encoder->dev;
1171 uint32_t dp_reg = REG_READ(intel_dp->output_reg);
1172 int edp = is_edp(intel_encoder);
1173
1174 if (mode != DRM_MODE_DPMS_ON) {
1175 if (edp) {
1176 cdv_intel_edp_backlight_off(intel_encoder);
1177 cdv_intel_edp_panel_vdd_on(intel_encoder);
1178 }
1179 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1180 cdv_intel_dp_link_down(intel_encoder);
1181 if (edp) {
1182 cdv_intel_edp_panel_vdd_off(intel_encoder);
1183 cdv_intel_edp_panel_off(intel_encoder);
1184 }
1185 } else {
1186 if (edp)
1187 cdv_intel_edp_panel_on(intel_encoder);
1188 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1189 if (!(dp_reg & DP_PORT_EN)) {
1190 cdv_intel_dp_start_link_train(intel_encoder);
1191 cdv_intel_dp_complete_link_train(intel_encoder);
1192 }
1193 if (edp)
1194 cdv_intel_edp_backlight_on(intel_encoder);
1195 }
1196 }
1197
1198 /*
1199 * Native read with retry for link status and receiver capability reads for
1200 * cases where the sink may still be asleep.
1201 */
1202 static bool
cdv_intel_dp_aux_native_read_retry(struct gma_encoder * encoder,uint16_t address,uint8_t * recv,int recv_bytes)1203 cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
1204 uint8_t *recv, int recv_bytes)
1205 {
1206 int ret, i;
1207
1208 /*
1209 * Sinks are *supposed* to come up within 1ms from an off state,
1210 * but we're also supposed to retry 3 times per the spec.
1211 */
1212 for (i = 0; i < 3; i++) {
1213 ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
1214 recv_bytes);
1215 if (ret == recv_bytes)
1216 return true;
1217 udelay(1000);
1218 }
1219
1220 return false;
1221 }
1222
1223 /*
1224 * Fetch AUX CH registers 0x202 - 0x207 which contain
1225 * link status information
1226 */
1227 static bool
cdv_intel_dp_get_link_status(struct gma_encoder * encoder)1228 cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
1229 {
1230 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1231 return cdv_intel_dp_aux_native_read_retry(encoder,
1232 DP_LANE0_1_STATUS,
1233 intel_dp->link_status,
1234 DP_LINK_STATUS_SIZE);
1235 }
1236
1237 static uint8_t
cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],int r)1238 cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1239 int r)
1240 {
1241 return link_status[r - DP_LANE0_1_STATUS];
1242 }
1243
1244 static uint8_t
cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1245 cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
1246 int lane)
1247 {
1248 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1249 int s = ((lane & 1) ?
1250 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1251 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1252 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1253
1254 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1255 }
1256
1257 static uint8_t
cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1258 cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
1259 int lane)
1260 {
1261 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1262 int s = ((lane & 1) ?
1263 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1264 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1265 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1266
1267 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1268 }
1269
1270 #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
1271
1272 static void
cdv_intel_get_adjust_train(struct gma_encoder * encoder)1273 cdv_intel_get_adjust_train(struct gma_encoder *encoder)
1274 {
1275 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1276 uint8_t v = 0;
1277 uint8_t p = 0;
1278 int lane;
1279
1280 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1281 uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
1282 uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
1283
1284 if (this_v > v)
1285 v = this_v;
1286 if (this_p > p)
1287 p = this_p;
1288 }
1289
1290 if (v >= CDV_DP_VOLTAGE_MAX)
1291 v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1292
1293 if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
1294 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1295
1296 for (lane = 0; lane < 4; lane++)
1297 intel_dp->train_set[lane] = v | p;
1298 }
1299
1300
1301 static uint8_t
cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane)1302 cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1303 int lane)
1304 {
1305 int i = DP_LANE0_1_STATUS + (lane >> 1);
1306 int s = (lane & 1) * 4;
1307 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1308
1309 return (l >> s) & 0xf;
1310 }
1311
1312 /* Check for clock recovery is done on all channels */
1313 static bool
cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE],int lane_count)1314 cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1315 {
1316 int lane;
1317 uint8_t lane_status;
1318
1319 for (lane = 0; lane < lane_count; lane++) {
1320 lane_status = cdv_intel_get_lane_status(link_status, lane);
1321 if ((lane_status & DP_LANE_CR_DONE) == 0)
1322 return false;
1323 }
1324 return true;
1325 }
1326
1327 /* Check to see if channel eq is done on all channels */
1328 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1329 DP_LANE_CHANNEL_EQ_DONE|\
1330 DP_LANE_SYMBOL_LOCKED)
1331 static bool
cdv_intel_channel_eq_ok(struct gma_encoder * encoder)1332 cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
1333 {
1334 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1335 uint8_t lane_align;
1336 uint8_t lane_status;
1337 int lane;
1338
1339 lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
1340 DP_LANE_ALIGN_STATUS_UPDATED);
1341 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1342 return false;
1343 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1344 lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
1345 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1346 return false;
1347 }
1348 return true;
1349 }
1350
1351 static bool
cdv_intel_dp_set_link_train(struct gma_encoder * encoder,uint32_t dp_reg_value,uint8_t dp_train_pat)1352 cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
1353 uint32_t dp_reg_value,
1354 uint8_t dp_train_pat)
1355 {
1356 struct drm_device *dev = encoder->base.dev;
1357 int ret;
1358 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1359
1360 REG_WRITE(intel_dp->output_reg, dp_reg_value);
1361 REG_READ(intel_dp->output_reg);
1362
1363 ret = cdv_intel_dp_aux_native_write_1(encoder,
1364 DP_TRAINING_PATTERN_SET,
1365 dp_train_pat);
1366
1367 if (ret != 1) {
1368 DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
1369 dp_train_pat);
1370 return false;
1371 }
1372
1373 return true;
1374 }
1375
1376
1377 static bool
cdv_intel_dplink_set_level(struct gma_encoder * encoder,uint8_t dp_train_pat)1378 cdv_intel_dplink_set_level(struct gma_encoder *encoder,
1379 uint8_t dp_train_pat)
1380 {
1381 int ret;
1382 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1383
1384 ret = cdv_intel_dp_aux_native_write(encoder,
1385 DP_TRAINING_LANE0_SET,
1386 intel_dp->train_set,
1387 intel_dp->lane_count);
1388
1389 if (ret != intel_dp->lane_count) {
1390 DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
1391 intel_dp->train_set[0], intel_dp->lane_count);
1392 return false;
1393 }
1394 return true;
1395 }
1396
1397 static void
cdv_intel_dp_set_vswing_premph(struct gma_encoder * encoder,uint8_t signal_level)1398 cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
1399 {
1400 struct drm_device *dev = encoder->base.dev;
1401 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1402 struct ddi_regoff *ddi_reg;
1403 int vswing, premph, index;
1404
1405 if (intel_dp->output_reg == DP_B)
1406 ddi_reg = &ddi_DP_train_table[0];
1407 else
1408 ddi_reg = &ddi_DP_train_table[1];
1409
1410 vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
1411 premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
1412 DP_TRAIN_PRE_EMPHASIS_SHIFT;
1413
1414 if (vswing + premph > 3)
1415 return;
1416 #ifdef CDV_FAST_LINK_TRAIN
1417 return;
1418 #endif
1419 DRM_DEBUG_KMS("Test2\n");
1420 //return ;
1421 cdv_sb_reset(dev);
1422 /* ;Swing voltage programming
1423 ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
1424 cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
1425
1426 /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
1427 cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
1428
1429 /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
1430 * The VSwing_PreEmph table is also considered based on the vswing/premp
1431 */
1432 index = (vswing + premph) * 2;
1433 if (premph == 1 && vswing == 1) {
1434 cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
1435 } else
1436 cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
1437
1438 /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
1439 if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
1440 cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
1441 else
1442 cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
1443
1444 /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
1445 /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
1446
1447 /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
1448 cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
1449
1450 /* ;Pre emphasis programming
1451 * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
1452 */
1453 cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
1454
1455 /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
1456 index = 2 * premph + 1;
1457 cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
1458 return;
1459 }
1460
1461
1462 /* Enable corresponding port and start training pattern 1 */
1463 static void
cdv_intel_dp_start_link_train(struct gma_encoder * encoder)1464 cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
1465 {
1466 struct drm_device *dev = encoder->base.dev;
1467 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1468 int i;
1469 uint8_t voltage;
1470 bool clock_recovery = false;
1471 int tries;
1472 u32 reg;
1473 uint32_t DP = intel_dp->DP;
1474
1475 DP |= DP_PORT_EN;
1476 DP &= ~DP_LINK_TRAIN_MASK;
1477
1478 reg = DP;
1479 reg |= DP_LINK_TRAIN_PAT_1;
1480 /* Enable output, wait for it to become active */
1481 REG_WRITE(intel_dp->output_reg, reg);
1482 REG_READ(intel_dp->output_reg);
1483 gma_wait_for_vblank(dev);
1484
1485 DRM_DEBUG_KMS("Link config\n");
1486 /* Write the link configuration data */
1487 cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
1488 intel_dp->link_configuration,
1489 2);
1490
1491 memset(intel_dp->train_set, 0, 4);
1492 voltage = 0;
1493 tries = 0;
1494 clock_recovery = false;
1495
1496 DRM_DEBUG_KMS("Start train\n");
1497 reg = DP | DP_LINK_TRAIN_PAT_1;
1498
1499 for (;;) {
1500 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1501 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1502 intel_dp->train_set[0],
1503 intel_dp->link_configuration[0],
1504 intel_dp->link_configuration[1]);
1505
1506 if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
1507 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
1508 }
1509 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1510 /* Set training pattern 1 */
1511
1512 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
1513
1514 udelay(200);
1515 if (!cdv_intel_dp_get_link_status(encoder))
1516 break;
1517
1518 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1519 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1520 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1521
1522 if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1523 DRM_DEBUG_KMS("PT1 train is done\n");
1524 clock_recovery = true;
1525 break;
1526 }
1527
1528 /* Check to see if we've tried the max voltage */
1529 for (i = 0; i < intel_dp->lane_count; i++)
1530 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1531 break;
1532 if (i == intel_dp->lane_count)
1533 break;
1534
1535 /* Check to see if we've tried the same voltage 5 times */
1536 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1537 ++tries;
1538 if (tries == 5)
1539 break;
1540 } else
1541 tries = 0;
1542 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1543
1544 /* Compute new intel_dp->train_set as requested by target */
1545 cdv_intel_get_adjust_train(encoder);
1546
1547 }
1548
1549 if (!clock_recovery) {
1550 DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]);
1551 }
1552
1553 intel_dp->DP = DP;
1554 }
1555
1556 static void
cdv_intel_dp_complete_link_train(struct gma_encoder * encoder)1557 cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
1558 {
1559 struct drm_device *dev = encoder->base.dev;
1560 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1561 int tries, cr_tries;
1562 u32 reg;
1563 uint32_t DP = intel_dp->DP;
1564
1565 /* channel equalization */
1566 tries = 0;
1567 cr_tries = 0;
1568
1569 DRM_DEBUG_KMS("\n");
1570 reg = DP | DP_LINK_TRAIN_PAT_2;
1571
1572 for (;;) {
1573
1574 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1575 intel_dp->train_set[0],
1576 intel_dp->link_configuration[0],
1577 intel_dp->link_configuration[1]);
1578 /* channel eq pattern */
1579
1580 if (!cdv_intel_dp_set_link_train(encoder, reg,
1581 DP_TRAINING_PATTERN_2)) {
1582 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
1583 }
1584 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1585
1586 if (cr_tries > 5) {
1587 DRM_ERROR("failed to train DP, aborting\n");
1588 cdv_intel_dp_link_down(encoder);
1589 break;
1590 }
1591
1592 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1593
1594 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
1595
1596 udelay(1000);
1597 if (!cdv_intel_dp_get_link_status(encoder))
1598 break;
1599
1600 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1601 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1602 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1603
1604 /* Make sure clock is still ok */
1605 if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1606 cdv_intel_dp_start_link_train(encoder);
1607 cr_tries++;
1608 continue;
1609 }
1610
1611 if (cdv_intel_channel_eq_ok(encoder)) {
1612 DRM_DEBUG_KMS("PT2 train is done\n");
1613 break;
1614 }
1615
1616 /* Try 5 times, then try clock recovery if that fails */
1617 if (tries > 5) {
1618 cdv_intel_dp_link_down(encoder);
1619 cdv_intel_dp_start_link_train(encoder);
1620 tries = 0;
1621 cr_tries++;
1622 continue;
1623 }
1624
1625 /* Compute new intel_dp->train_set as requested by target */
1626 cdv_intel_get_adjust_train(encoder);
1627 ++tries;
1628
1629 }
1630
1631 reg = DP | DP_LINK_TRAIN_OFF;
1632
1633 REG_WRITE(intel_dp->output_reg, reg);
1634 REG_READ(intel_dp->output_reg);
1635 cdv_intel_dp_aux_native_write_1(encoder,
1636 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1637 }
1638
1639 static void
cdv_intel_dp_link_down(struct gma_encoder * encoder)1640 cdv_intel_dp_link_down(struct gma_encoder *encoder)
1641 {
1642 struct drm_device *dev = encoder->base.dev;
1643 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1644 uint32_t DP = intel_dp->DP;
1645
1646 if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1647 return;
1648
1649 DRM_DEBUG_KMS("\n");
1650
1651
1652 {
1653 DP &= ~DP_LINK_TRAIN_MASK;
1654 REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1655 }
1656 REG_READ(intel_dp->output_reg);
1657
1658 msleep(17);
1659
1660 REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1661 REG_READ(intel_dp->output_reg);
1662 }
1663
cdv_dp_detect(struct gma_encoder * encoder)1664 static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
1665 {
1666 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1667 enum drm_connector_status status;
1668
1669 status = connector_status_disconnected;
1670 if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
1671 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1672 {
1673 if (intel_dp->dpcd[DP_DPCD_REV] != 0)
1674 status = connector_status_connected;
1675 }
1676 if (status == connector_status_connected)
1677 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
1678 intel_dp->dpcd[0], intel_dp->dpcd[1],
1679 intel_dp->dpcd[2], intel_dp->dpcd[3]);
1680 return status;
1681 }
1682
1683 /*
1684 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1685 *
1686 * \return true if DP port is connected.
1687 * \return false if DP port is disconnected.
1688 */
1689 static enum drm_connector_status
cdv_intel_dp_detect(struct drm_connector * connector,bool force)1690 cdv_intel_dp_detect(struct drm_connector *connector, bool force)
1691 {
1692 struct gma_encoder *encoder = gma_attached_encoder(connector);
1693 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1694 enum drm_connector_status status;
1695 struct edid *edid = NULL;
1696 int edp = is_edp(encoder);
1697
1698 intel_dp->has_audio = false;
1699
1700 if (edp)
1701 cdv_intel_edp_panel_vdd_on(encoder);
1702 status = cdv_dp_detect(encoder);
1703 if (status != connector_status_connected) {
1704 if (edp)
1705 cdv_intel_edp_panel_vdd_off(encoder);
1706 return status;
1707 }
1708
1709 if (intel_dp->force_audio) {
1710 intel_dp->has_audio = intel_dp->force_audio > 0;
1711 } else {
1712 edid = drm_get_edid(connector, &intel_dp->adapter);
1713 if (edid) {
1714 intel_dp->has_audio = drm_detect_monitor_audio(edid);
1715 kfree(edid);
1716 }
1717 }
1718 if (edp)
1719 cdv_intel_edp_panel_vdd_off(encoder);
1720
1721 return connector_status_connected;
1722 }
1723
cdv_intel_dp_get_modes(struct drm_connector * connector)1724 static int cdv_intel_dp_get_modes(struct drm_connector *connector)
1725 {
1726 struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
1727 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1728 struct edid *edid = NULL;
1729 int ret = 0;
1730 int edp = is_edp(intel_encoder);
1731
1732
1733 edid = drm_get_edid(connector, &intel_dp->adapter);
1734 if (edid) {
1735 drm_connector_update_edid_property(connector, edid);
1736 ret = drm_add_edid_modes(connector, edid);
1737 kfree(edid);
1738 }
1739
1740 if (is_edp(intel_encoder)) {
1741 struct drm_device *dev = connector->dev;
1742 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
1743
1744 cdv_intel_edp_panel_vdd_off(intel_encoder);
1745 if (ret) {
1746 if (edp && !intel_dp->panel_fixed_mode) {
1747 struct drm_display_mode *newmode;
1748 list_for_each_entry(newmode, &connector->probed_modes,
1749 head) {
1750 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1751 intel_dp->panel_fixed_mode =
1752 drm_mode_duplicate(dev, newmode);
1753 break;
1754 }
1755 }
1756 }
1757
1758 return ret;
1759 }
1760 if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
1761 intel_dp->panel_fixed_mode =
1762 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
1763 if (intel_dp->panel_fixed_mode) {
1764 intel_dp->panel_fixed_mode->type |=
1765 DRM_MODE_TYPE_PREFERRED;
1766 }
1767 }
1768 if (intel_dp->panel_fixed_mode != NULL) {
1769 struct drm_display_mode *mode;
1770 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
1771 drm_mode_probed_add(connector, mode);
1772 return 1;
1773 }
1774 }
1775
1776 return ret;
1777 }
1778
1779 static bool
cdv_intel_dp_detect_audio(struct drm_connector * connector)1780 cdv_intel_dp_detect_audio(struct drm_connector *connector)
1781 {
1782 struct gma_encoder *encoder = gma_attached_encoder(connector);
1783 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1784 struct edid *edid;
1785 bool has_audio = false;
1786 int edp = is_edp(encoder);
1787
1788 if (edp)
1789 cdv_intel_edp_panel_vdd_on(encoder);
1790
1791 edid = drm_get_edid(connector, &intel_dp->adapter);
1792 if (edid) {
1793 has_audio = drm_detect_monitor_audio(edid);
1794 kfree(edid);
1795 }
1796 if (edp)
1797 cdv_intel_edp_panel_vdd_off(encoder);
1798
1799 return has_audio;
1800 }
1801
1802 static int
cdv_intel_dp_set_property(struct drm_connector * connector,struct drm_property * property,uint64_t val)1803 cdv_intel_dp_set_property(struct drm_connector *connector,
1804 struct drm_property *property,
1805 uint64_t val)
1806 {
1807 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev);
1808 struct gma_encoder *encoder = gma_attached_encoder(connector);
1809 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1810 int ret;
1811
1812 ret = drm_object_property_set_value(&connector->base, property, val);
1813 if (ret)
1814 return ret;
1815
1816 if (property == dev_priv->force_audio_property) {
1817 int i = val;
1818 bool has_audio;
1819
1820 if (i == intel_dp->force_audio)
1821 return 0;
1822
1823 intel_dp->force_audio = i;
1824
1825 if (i == 0)
1826 has_audio = cdv_intel_dp_detect_audio(connector);
1827 else
1828 has_audio = i > 0;
1829
1830 if (has_audio == intel_dp->has_audio)
1831 return 0;
1832
1833 intel_dp->has_audio = has_audio;
1834 goto done;
1835 }
1836
1837 if (property == dev_priv->broadcast_rgb_property) {
1838 if (val == !!intel_dp->color_range)
1839 return 0;
1840
1841 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
1842 goto done;
1843 }
1844
1845 return -EINVAL;
1846
1847 done:
1848 if (encoder->base.crtc) {
1849 struct drm_crtc *crtc = encoder->base.crtc;
1850 drm_crtc_helper_set_mode(crtc, &crtc->mode,
1851 crtc->x, crtc->y,
1852 crtc->primary->fb);
1853 }
1854
1855 return 0;
1856 }
1857
1858 static void
cdv_intel_dp_destroy(struct drm_connector * connector)1859 cdv_intel_dp_destroy(struct drm_connector *connector)
1860 {
1861 struct gma_connector *gma_connector = to_gma_connector(connector);
1862 struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
1863 struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
1864
1865 if (is_edp(gma_encoder)) {
1866 /* cdv_intel_panel_destroy_backlight(connector->dev); */
1867 kfree(intel_dp->panel_fixed_mode);
1868 intel_dp->panel_fixed_mode = NULL;
1869 }
1870 i2c_del_adapter(&intel_dp->adapter);
1871 drm_connector_cleanup(connector);
1872 kfree(gma_connector);
1873 }
1874
1875 static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
1876 .dpms = cdv_intel_dp_dpms,
1877 .mode_fixup = cdv_intel_dp_mode_fixup,
1878 .prepare = cdv_intel_dp_prepare,
1879 .mode_set = cdv_intel_dp_mode_set,
1880 .commit = cdv_intel_dp_commit,
1881 };
1882
1883 static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
1884 .dpms = drm_helper_connector_dpms,
1885 .detect = cdv_intel_dp_detect,
1886 .fill_modes = drm_helper_probe_single_connector_modes,
1887 .set_property = cdv_intel_dp_set_property,
1888 .destroy = cdv_intel_dp_destroy,
1889 };
1890
1891 static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
1892 .get_modes = cdv_intel_dp_get_modes,
1893 .mode_valid = cdv_intel_dp_mode_valid,
1894 .best_encoder = gma_best_encoder,
1895 };
1896
cdv_intel_dp_add_properties(struct drm_connector * connector)1897 static void cdv_intel_dp_add_properties(struct drm_connector *connector)
1898 {
1899 cdv_intel_attach_force_audio_property(connector);
1900 cdv_intel_attach_broadcast_rgb_property(connector);
1901 }
1902
1903 /* check the VBT to see whether the eDP is on DP-D port */
cdv_intel_dpc_is_edp(struct drm_device * dev)1904 static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
1905 {
1906 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
1907 struct child_device_config *p_child;
1908 int i;
1909
1910 if (!dev_priv->child_dev_num)
1911 return false;
1912
1913 for (i = 0; i < dev_priv->child_dev_num; i++) {
1914 p_child = dev_priv->child_dev + i;
1915
1916 if (p_child->dvo_port == PORT_IDPC &&
1917 p_child->device_type == DEVICE_TYPE_eDP)
1918 return true;
1919 }
1920 return false;
1921 }
1922
1923 /* Cedarview display clock gating
1924
1925 We need this disable dot get correct behaviour while enabling
1926 DP/eDP. TODO - investigate if we can turn it back to normality
1927 after enabling */
cdv_disable_intel_clock_gating(struct drm_device * dev)1928 static void cdv_disable_intel_clock_gating(struct drm_device *dev)
1929 {
1930 u32 reg_value;
1931 reg_value = REG_READ(DSPCLK_GATE_D);
1932
1933 reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
1934 DPUNIT_PIPEA_GATE_DISABLE |
1935 DPCUNIT_CLOCK_GATE_DISABLE |
1936 DPLSUNIT_CLOCK_GATE_DISABLE |
1937 DPOUNIT_CLOCK_GATE_DISABLE |
1938 DPIOUNIT_CLOCK_GATE_DISABLE);
1939
1940 REG_WRITE(DSPCLK_GATE_D, reg_value);
1941
1942 udelay(500);
1943 }
1944
1945 void
cdv_intel_dp_init(struct drm_device * dev,struct psb_intel_mode_device * mode_dev,int output_reg)1946 cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
1947 {
1948 struct gma_encoder *gma_encoder;
1949 struct gma_connector *gma_connector;
1950 struct drm_connector *connector;
1951 struct drm_encoder *encoder;
1952 struct cdv_intel_dp *intel_dp;
1953 const char *name = NULL;
1954 int type = DRM_MODE_CONNECTOR_DisplayPort;
1955
1956 gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
1957 if (!gma_encoder)
1958 return;
1959 gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
1960 if (!gma_connector)
1961 goto err_connector;
1962 intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
1963 if (!intel_dp)
1964 goto err_priv;
1965
1966 if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
1967 type = DRM_MODE_CONNECTOR_eDP;
1968
1969 connector = &gma_connector->base;
1970 encoder = &gma_encoder->base;
1971
1972 drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
1973 drm_simple_encoder_init(dev, encoder, DRM_MODE_ENCODER_TMDS);
1974
1975 gma_connector_attach_encoder(gma_connector, gma_encoder);
1976
1977 if (type == DRM_MODE_CONNECTOR_DisplayPort)
1978 gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
1979 else
1980 gma_encoder->type = INTEL_OUTPUT_EDP;
1981
1982
1983 gma_encoder->dev_priv=intel_dp;
1984 intel_dp->encoder = gma_encoder;
1985 intel_dp->output_reg = output_reg;
1986
1987 drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
1988 drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
1989
1990 connector->polled = DRM_CONNECTOR_POLL_HPD;
1991 connector->interlace_allowed = false;
1992 connector->doublescan_allowed = false;
1993
1994 /* Set up the DDC bus. */
1995 switch (output_reg) {
1996 case DP_B:
1997 name = "DPDDC-B";
1998 gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
1999 break;
2000 case DP_C:
2001 name = "DPDDC-C";
2002 gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
2003 break;
2004 }
2005
2006 cdv_disable_intel_clock_gating(dev);
2007
2008 cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
2009 /* FIXME:fail check */
2010 cdv_intel_dp_add_properties(connector);
2011
2012 if (is_edp(gma_encoder)) {
2013 int ret;
2014 struct edp_power_seq cur;
2015 u32 pp_on, pp_off, pp_div;
2016 u32 pwm_ctrl;
2017
2018 pp_on = REG_READ(PP_CONTROL);
2019 pp_on &= ~PANEL_UNLOCK_MASK;
2020 pp_on |= PANEL_UNLOCK_REGS;
2021
2022 REG_WRITE(PP_CONTROL, pp_on);
2023
2024 pwm_ctrl = REG_READ(BLC_PWM_CTL2);
2025 pwm_ctrl |= PWM_PIPE_B;
2026 REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
2027
2028 pp_on = REG_READ(PP_ON_DELAYS);
2029 pp_off = REG_READ(PP_OFF_DELAYS);
2030 pp_div = REG_READ(PP_DIVISOR);
2031
2032 /* Pull timing values out of registers */
2033 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2034 PANEL_POWER_UP_DELAY_SHIFT;
2035
2036 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2037 PANEL_LIGHT_ON_DELAY_SHIFT;
2038
2039 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2040 PANEL_LIGHT_OFF_DELAY_SHIFT;
2041
2042 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2043 PANEL_POWER_DOWN_DELAY_SHIFT;
2044
2045 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2046 PANEL_POWER_CYCLE_DELAY_SHIFT);
2047
2048 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2049 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2050
2051
2052 intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
2053 intel_dp->backlight_on_delay = cur.t8 / 10;
2054 intel_dp->backlight_off_delay = cur.t9 / 10;
2055 intel_dp->panel_power_down_delay = cur.t10 / 10;
2056 intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
2057
2058 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2059 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2060 intel_dp->panel_power_cycle_delay);
2061
2062 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2063 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2064
2065
2066 cdv_intel_edp_panel_vdd_on(gma_encoder);
2067 ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
2068 intel_dp->dpcd,
2069 sizeof(intel_dp->dpcd));
2070 cdv_intel_edp_panel_vdd_off(gma_encoder);
2071 if (ret <= 0) {
2072 /* if this fails, presume the device is a ghost */
2073 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2074 drm_encoder_cleanup(encoder);
2075 cdv_intel_dp_destroy(connector);
2076 goto err_connector;
2077 } else {
2078 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
2079 intel_dp->dpcd[0], intel_dp->dpcd[1],
2080 intel_dp->dpcd[2], intel_dp->dpcd[3]);
2081
2082 }
2083 /* The CDV reference driver moves pnale backlight setup into the displays that
2084 have a backlight: this is a good idea and one we should probably adopt, however
2085 we need to migrate all the drivers before we can do that */
2086 /*cdv_intel_panel_setup_backlight(dev); */
2087 }
2088 return;
2089
2090 err_priv:
2091 kfree(gma_connector);
2092 err_connector:
2093 kfree(gma_encoder);
2094 }
2095