1 /*
2 * Copyright © 2008 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Keith Packard <keithp@keithp.com>
25 *
26 */
27
28 #include <linux/export.h>
29 #include <linux/i2c.h>
30 #include <linux/notifier.h>
31 #include <linux/slab.h>
32 #include <linux/types.h>
33
34 #include <asm/byteorder.h>
35
36 #include <drm/drm_atomic_helper.h>
37 #include <drm/drm_crtc.h>
38 #include <drm/drm_dp_helper.h>
39 #include <drm/drm_edid.h>
40 #include <drm/drm_probe_helper.h>
41
42 #include "g4x_dp.h"
43 #include "i915_debugfs.h"
44 #include "i915_drv.h"
45 #include "intel_atomic.h"
46 #include "intel_audio.h"
47 #include "intel_connector.h"
48 #include "intel_ddi.h"
49 #include "intel_de.h"
50 #include "intel_display_types.h"
51 #include "intel_dp.h"
52 #include "intel_dp_aux.h"
53 #include "intel_dp_hdcp.h"
54 #include "intel_dp_link_training.h"
55 #include "intel_dp_mst.h"
56 #include "intel_dpio_phy.h"
57 #include "intel_dpll.h"
58 #include "intel_fifo_underrun.h"
59 #include "intel_hdcp.h"
60 #include "intel_hdmi.h"
61 #include "intel_hotplug.h"
62 #include "intel_lspcon.h"
63 #include "intel_lvds.h"
64 #include "intel_panel.h"
65 #include "intel_pps.h"
66 #include "intel_psr.h"
67 #include "intel_sideband.h"
68 #include "intel_tc.h"
69 #include "intel_vdsc.h"
70 #include "intel_vrr.h"
71
72 #define DP_DPRX_ESI_LEN 14
73
74 /* DP DSC throughput values used for slice count calculations KPixels/s */
75 #define DP_DSC_PEAK_PIXEL_RATE 2720000
76 #define DP_DSC_MAX_ENC_THROUGHPUT_0 340000
77 #define DP_DSC_MAX_ENC_THROUGHPUT_1 400000
78
79 /* DP DSC FEC Overhead factor = 1/(0.972261) */
80 #define DP_DSC_FEC_OVERHEAD_FACTOR 972261
81
82 /* Compliance test status bits */
83 #define INTEL_DP_RESOLUTION_SHIFT_MASK 0
84 #define INTEL_DP_RESOLUTION_PREFERRED (1 << INTEL_DP_RESOLUTION_SHIFT_MASK)
85 #define INTEL_DP_RESOLUTION_STANDARD (2 << INTEL_DP_RESOLUTION_SHIFT_MASK)
86 #define INTEL_DP_RESOLUTION_FAILSAFE (3 << INTEL_DP_RESOLUTION_SHIFT_MASK)
87
88
89 /* Constants for DP DSC configurations */
90 static const u8 valid_dsc_bpp[] = {6, 8, 10, 12, 15};
91
92 /* With Single pipe configuration, HW is capable of supporting maximum
93 * of 4 slices per line.
94 */
95 static const u8 valid_dsc_slicecount[] = {1, 2, 4};
96
97 /**
98 * intel_dp_is_edp - is the given port attached to an eDP panel (either CPU or PCH)
99 * @intel_dp: DP struct
100 *
101 * If a CPU or PCH DP output is attached to an eDP panel, this function
102 * will return true, and false otherwise.
103 */
intel_dp_is_edp(struct intel_dp * intel_dp)104 bool intel_dp_is_edp(struct intel_dp *intel_dp)
105 {
106 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
107
108 return dig_port->base.type == INTEL_OUTPUT_EDP;
109 }
110
111 static void intel_dp_unset_edid(struct intel_dp *intel_dp);
112 static int intel_dp_dsc_compute_bpp(struct intel_dp *intel_dp, u8 dsc_max_bpc);
113
114 /* update sink rates from dpcd */
intel_dp_set_sink_rates(struct intel_dp * intel_dp)115 static void intel_dp_set_sink_rates(struct intel_dp *intel_dp)
116 {
117 static const int dp_rates[] = {
118 162000, 270000, 540000, 810000
119 };
120 int i, max_rate;
121 int max_lttpr_rate;
122
123 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_CAN_DO_MAX_LINK_RATE_3_24_GBPS)) {
124 /* Needed, e.g., for Apple MBP 2017, 15 inch eDP Retina panel */
125 static const int quirk_rates[] = { 162000, 270000, 324000 };
126
127 memcpy(intel_dp->sink_rates, quirk_rates, sizeof(quirk_rates));
128 intel_dp->num_sink_rates = ARRAY_SIZE(quirk_rates);
129
130 return;
131 }
132
133 max_rate = drm_dp_bw_code_to_link_rate(intel_dp->dpcd[DP_MAX_LINK_RATE]);
134 max_lttpr_rate = drm_dp_lttpr_max_link_rate(intel_dp->lttpr_common_caps);
135 if (max_lttpr_rate)
136 max_rate = min(max_rate, max_lttpr_rate);
137
138 for (i = 0; i < ARRAY_SIZE(dp_rates); i++) {
139 if (dp_rates[i] > max_rate)
140 break;
141 intel_dp->sink_rates[i] = dp_rates[i];
142 }
143
144 intel_dp->num_sink_rates = i;
145 }
146
147 /* Get length of rates array potentially limited by max_rate. */
intel_dp_rate_limit_len(const int * rates,int len,int max_rate)148 static int intel_dp_rate_limit_len(const int *rates, int len, int max_rate)
149 {
150 int i;
151
152 /* Limit results by potentially reduced max rate */
153 for (i = 0; i < len; i++) {
154 if (rates[len - i - 1] <= max_rate)
155 return len - i;
156 }
157
158 return 0;
159 }
160
161 /* Get length of common rates array potentially limited by max_rate. */
intel_dp_common_len_rate_limit(const struct intel_dp * intel_dp,int max_rate)162 static int intel_dp_common_len_rate_limit(const struct intel_dp *intel_dp,
163 int max_rate)
164 {
165 return intel_dp_rate_limit_len(intel_dp->common_rates,
166 intel_dp->num_common_rates, max_rate);
167 }
168
169 /* Theoretical max between source and sink */
intel_dp_max_common_rate(struct intel_dp * intel_dp)170 static int intel_dp_max_common_rate(struct intel_dp *intel_dp)
171 {
172 return intel_dp->common_rates[intel_dp->num_common_rates - 1];
173 }
174
175 /* Theoretical max between source and sink */
intel_dp_max_common_lane_count(struct intel_dp * intel_dp)176 static int intel_dp_max_common_lane_count(struct intel_dp *intel_dp)
177 {
178 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
179 int source_max = dig_port->max_lanes;
180 int sink_max = drm_dp_max_lane_count(intel_dp->dpcd);
181 int fia_max = intel_tc_port_fia_max_lane_count(dig_port);
182 int lttpr_max = drm_dp_lttpr_max_lane_count(intel_dp->lttpr_common_caps);
183
184 if (lttpr_max)
185 sink_max = min(sink_max, lttpr_max);
186
187 return min3(source_max, sink_max, fia_max);
188 }
189
intel_dp_max_lane_count(struct intel_dp * intel_dp)190 int intel_dp_max_lane_count(struct intel_dp *intel_dp)
191 {
192 return intel_dp->max_link_lane_count;
193 }
194
195 int
intel_dp_link_required(int pixel_clock,int bpp)196 intel_dp_link_required(int pixel_clock, int bpp)
197 {
198 /* pixel_clock is in kHz, divide bpp by 8 for bit to Byte conversion */
199 return DIV_ROUND_UP(pixel_clock * bpp, 8);
200 }
201
202 int
intel_dp_max_data_rate(int max_link_clock,int max_lanes)203 intel_dp_max_data_rate(int max_link_clock, int max_lanes)
204 {
205 /* max_link_clock is the link symbol clock (LS_Clk) in kHz and not the
206 * link rate that is generally expressed in Gbps. Since, 8 bits of data
207 * is transmitted every LS_Clk per lane, there is no need to account for
208 * the channel encoding that is done in the PHY layer here.
209 */
210
211 return max_link_clock * max_lanes;
212 }
213
intel_dp_can_bigjoiner(struct intel_dp * intel_dp)214 bool intel_dp_can_bigjoiner(struct intel_dp *intel_dp)
215 {
216 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
217 struct intel_encoder *encoder = &intel_dig_port->base;
218 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
219
220 return DISPLAY_VER(dev_priv) >= 12 ||
221 (DISPLAY_VER(dev_priv) == 11 &&
222 encoder->port != PORT_A);
223 }
224
icl_max_source_rate(struct intel_dp * intel_dp)225 static int icl_max_source_rate(struct intel_dp *intel_dp)
226 {
227 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
228 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
229 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port);
230
231 if (intel_phy_is_combo(dev_priv, phy) &&
232 !intel_dp_is_edp(intel_dp))
233 return 540000;
234
235 return 810000;
236 }
237
ehl_max_source_rate(struct intel_dp * intel_dp)238 static int ehl_max_source_rate(struct intel_dp *intel_dp)
239 {
240 if (intel_dp_is_edp(intel_dp))
241 return 540000;
242
243 return 810000;
244 }
245
246 static void
intel_dp_set_source_rates(struct intel_dp * intel_dp)247 intel_dp_set_source_rates(struct intel_dp *intel_dp)
248 {
249 /* The values must be in increasing order */
250 static const int icl_rates[] = {
251 162000, 216000, 270000, 324000, 432000, 540000, 648000, 810000
252 };
253 static const int bxt_rates[] = {
254 162000, 216000, 243000, 270000, 324000, 432000, 540000
255 };
256 static const int skl_rates[] = {
257 162000, 216000, 270000, 324000, 432000, 540000
258 };
259 static const int hsw_rates[] = {
260 162000, 270000, 540000
261 };
262 static const int g4x_rates[] = {
263 162000, 270000
264 };
265 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
266 struct intel_encoder *encoder = &dig_port->base;
267 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
268 const int *source_rates;
269 int size, max_rate = 0, vbt_max_rate;
270
271 /* This should only be done once */
272 drm_WARN_ON(&dev_priv->drm,
273 intel_dp->source_rates || intel_dp->num_source_rates);
274
275 if (DISPLAY_VER(dev_priv) >= 11) {
276 source_rates = icl_rates;
277 size = ARRAY_SIZE(icl_rates);
278 if (IS_JSL_EHL(dev_priv))
279 max_rate = ehl_max_source_rate(intel_dp);
280 else
281 max_rate = icl_max_source_rate(intel_dp);
282 } else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) {
283 source_rates = bxt_rates;
284 size = ARRAY_SIZE(bxt_rates);
285 } else if (DISPLAY_VER(dev_priv) == 9) {
286 source_rates = skl_rates;
287 size = ARRAY_SIZE(skl_rates);
288 } else if ((IS_HASWELL(dev_priv) && !IS_HSW_ULX(dev_priv)) ||
289 IS_BROADWELL(dev_priv)) {
290 source_rates = hsw_rates;
291 size = ARRAY_SIZE(hsw_rates);
292 } else {
293 source_rates = g4x_rates;
294 size = ARRAY_SIZE(g4x_rates);
295 }
296
297 vbt_max_rate = intel_bios_dp_max_link_rate(encoder);
298 if (max_rate && vbt_max_rate)
299 max_rate = min(max_rate, vbt_max_rate);
300 else if (vbt_max_rate)
301 max_rate = vbt_max_rate;
302
303 if (max_rate)
304 size = intel_dp_rate_limit_len(source_rates, size, max_rate);
305
306 intel_dp->source_rates = source_rates;
307 intel_dp->num_source_rates = size;
308 }
309
intersect_rates(const int * source_rates,int source_len,const int * sink_rates,int sink_len,int * common_rates)310 static int intersect_rates(const int *source_rates, int source_len,
311 const int *sink_rates, int sink_len,
312 int *common_rates)
313 {
314 int i = 0, j = 0, k = 0;
315
316 while (i < source_len && j < sink_len) {
317 if (source_rates[i] == sink_rates[j]) {
318 if (WARN_ON(k >= DP_MAX_SUPPORTED_RATES))
319 return k;
320 common_rates[k] = source_rates[i];
321 ++k;
322 ++i;
323 ++j;
324 } else if (source_rates[i] < sink_rates[j]) {
325 ++i;
326 } else {
327 ++j;
328 }
329 }
330 return k;
331 }
332
333 /* return index of rate in rates array, or -1 if not found */
intel_dp_rate_index(const int * rates,int len,int rate)334 static int intel_dp_rate_index(const int *rates, int len, int rate)
335 {
336 int i;
337
338 for (i = 0; i < len; i++)
339 if (rate == rates[i])
340 return i;
341
342 return -1;
343 }
344
intel_dp_set_common_rates(struct intel_dp * intel_dp)345 static void intel_dp_set_common_rates(struct intel_dp *intel_dp)
346 {
347 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
348
349 drm_WARN_ON(&i915->drm,
350 !intel_dp->num_source_rates || !intel_dp->num_sink_rates);
351
352 intel_dp->num_common_rates = intersect_rates(intel_dp->source_rates,
353 intel_dp->num_source_rates,
354 intel_dp->sink_rates,
355 intel_dp->num_sink_rates,
356 intel_dp->common_rates);
357
358 /* Paranoia, there should always be something in common. */
359 if (drm_WARN_ON(&i915->drm, intel_dp->num_common_rates == 0)) {
360 intel_dp->common_rates[0] = 162000;
361 intel_dp->num_common_rates = 1;
362 }
363 }
364
intel_dp_link_params_valid(struct intel_dp * intel_dp,int link_rate,u8 lane_count)365 static bool intel_dp_link_params_valid(struct intel_dp *intel_dp, int link_rate,
366 u8 lane_count)
367 {
368 /*
369 * FIXME: we need to synchronize the current link parameters with
370 * hardware readout. Currently fast link training doesn't work on
371 * boot-up.
372 */
373 if (link_rate == 0 ||
374 link_rate > intel_dp->max_link_rate)
375 return false;
376
377 if (lane_count == 0 ||
378 lane_count > intel_dp_max_lane_count(intel_dp))
379 return false;
380
381 return true;
382 }
383
intel_dp_can_link_train_fallback_for_edp(struct intel_dp * intel_dp,int link_rate,u8 lane_count)384 static bool intel_dp_can_link_train_fallback_for_edp(struct intel_dp *intel_dp,
385 int link_rate,
386 u8 lane_count)
387 {
388 const struct drm_display_mode *fixed_mode =
389 intel_dp->attached_connector->panel.fixed_mode;
390 int mode_rate, max_rate;
391
392 mode_rate = intel_dp_link_required(fixed_mode->clock, 18);
393 max_rate = intel_dp_max_data_rate(link_rate, lane_count);
394 if (mode_rate > max_rate)
395 return false;
396
397 return true;
398 }
399
intel_dp_get_link_train_fallback_values(struct intel_dp * intel_dp,int link_rate,u8 lane_count)400 int intel_dp_get_link_train_fallback_values(struct intel_dp *intel_dp,
401 int link_rate, u8 lane_count)
402 {
403 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
404 int index;
405
406 /*
407 * TODO: Enable fallback on MST links once MST link compute can handle
408 * the fallback params.
409 */
410 if (intel_dp->is_mst) {
411 drm_err(&i915->drm, "Link Training Unsuccessful\n");
412 return -1;
413 }
414
415 if (intel_dp_is_edp(intel_dp) && !intel_dp->use_max_params) {
416 drm_dbg_kms(&i915->drm,
417 "Retrying Link training for eDP with max parameters\n");
418 intel_dp->use_max_params = true;
419 return 0;
420 }
421
422 index = intel_dp_rate_index(intel_dp->common_rates,
423 intel_dp->num_common_rates,
424 link_rate);
425 if (index > 0) {
426 if (intel_dp_is_edp(intel_dp) &&
427 !intel_dp_can_link_train_fallback_for_edp(intel_dp,
428 intel_dp->common_rates[index - 1],
429 lane_count)) {
430 drm_dbg_kms(&i915->drm,
431 "Retrying Link training for eDP with same parameters\n");
432 return 0;
433 }
434 intel_dp->max_link_rate = intel_dp->common_rates[index - 1];
435 intel_dp->max_link_lane_count = lane_count;
436 } else if (lane_count > 1) {
437 if (intel_dp_is_edp(intel_dp) &&
438 !intel_dp_can_link_train_fallback_for_edp(intel_dp,
439 intel_dp_max_common_rate(intel_dp),
440 lane_count >> 1)) {
441 drm_dbg_kms(&i915->drm,
442 "Retrying Link training for eDP with same parameters\n");
443 return 0;
444 }
445 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
446 intel_dp->max_link_lane_count = lane_count >> 1;
447 } else {
448 drm_err(&i915->drm, "Link Training Unsuccessful\n");
449 return -1;
450 }
451
452 return 0;
453 }
454
intel_dp_mode_to_fec_clock(u32 mode_clock)455 u32 intel_dp_mode_to_fec_clock(u32 mode_clock)
456 {
457 return div_u64(mul_u32_u32(mode_clock, 1000000U),
458 DP_DSC_FEC_OVERHEAD_FACTOR);
459 }
460
461 static int
small_joiner_ram_size_bits(struct drm_i915_private * i915)462 small_joiner_ram_size_bits(struct drm_i915_private *i915)
463 {
464 if (DISPLAY_VER(i915) >= 11)
465 return 7680 * 8;
466 else
467 return 6144 * 8;
468 }
469
intel_dp_dsc_get_output_bpp(struct drm_i915_private * i915,u32 link_clock,u32 lane_count,u32 mode_clock,u32 mode_hdisplay,bool bigjoiner,u32 pipe_bpp)470 static u16 intel_dp_dsc_get_output_bpp(struct drm_i915_private *i915,
471 u32 link_clock, u32 lane_count,
472 u32 mode_clock, u32 mode_hdisplay,
473 bool bigjoiner,
474 u32 pipe_bpp)
475 {
476 u32 bits_per_pixel, max_bpp_small_joiner_ram;
477 int i;
478
479 /*
480 * Available Link Bandwidth(Kbits/sec) = (NumberOfLanes)*
481 * (LinkSymbolClock)* 8 * (TimeSlotsPerMTP)
482 * for SST -> TimeSlotsPerMTP is 1,
483 * for MST -> TimeSlotsPerMTP has to be calculated
484 */
485 bits_per_pixel = (link_clock * lane_count * 8) /
486 intel_dp_mode_to_fec_clock(mode_clock);
487 drm_dbg_kms(&i915->drm, "Max link bpp: %u\n", bits_per_pixel);
488
489 /* Small Joiner Check: output bpp <= joiner RAM (bits) / Horiz. width */
490 max_bpp_small_joiner_ram = small_joiner_ram_size_bits(i915) /
491 mode_hdisplay;
492
493 if (bigjoiner)
494 max_bpp_small_joiner_ram *= 2;
495
496 drm_dbg_kms(&i915->drm, "Max small joiner bpp: %u\n",
497 max_bpp_small_joiner_ram);
498
499 /*
500 * Greatest allowed DSC BPP = MIN (output BPP from available Link BW
501 * check, output bpp from small joiner RAM check)
502 */
503 bits_per_pixel = min(bits_per_pixel, max_bpp_small_joiner_ram);
504
505 if (bigjoiner) {
506 u32 max_bpp_bigjoiner =
507 i915->max_cdclk_freq * 48 /
508 intel_dp_mode_to_fec_clock(mode_clock);
509
510 DRM_DEBUG_KMS("Max big joiner bpp: %u\n", max_bpp_bigjoiner);
511 bits_per_pixel = min(bits_per_pixel, max_bpp_bigjoiner);
512 }
513
514 /* Error out if the max bpp is less than smallest allowed valid bpp */
515 if (bits_per_pixel < valid_dsc_bpp[0]) {
516 drm_dbg_kms(&i915->drm, "Unsupported BPP %u, min %u\n",
517 bits_per_pixel, valid_dsc_bpp[0]);
518 return 0;
519 }
520
521 /* From XE_LPD onwards we support from bpc upto uncompressed bpp-1 BPPs */
522 if (DISPLAY_VER(i915) >= 13) {
523 bits_per_pixel = min(bits_per_pixel, pipe_bpp - 1);
524 } else {
525 /* Find the nearest match in the array of known BPPs from VESA */
526 for (i = 0; i < ARRAY_SIZE(valid_dsc_bpp) - 1; i++) {
527 if (bits_per_pixel < valid_dsc_bpp[i + 1])
528 break;
529 }
530 bits_per_pixel = valid_dsc_bpp[i];
531 }
532
533 /*
534 * Compressed BPP in U6.4 format so multiply by 16, for Gen 11,
535 * fractional part is 0
536 */
537 return bits_per_pixel << 4;
538 }
539
intel_dp_dsc_get_slice_count(struct intel_dp * intel_dp,int mode_clock,int mode_hdisplay,bool bigjoiner)540 static u8 intel_dp_dsc_get_slice_count(struct intel_dp *intel_dp,
541 int mode_clock, int mode_hdisplay,
542 bool bigjoiner)
543 {
544 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
545 u8 min_slice_count, i;
546 int max_slice_width;
547
548 if (mode_clock <= DP_DSC_PEAK_PIXEL_RATE)
549 min_slice_count = DIV_ROUND_UP(mode_clock,
550 DP_DSC_MAX_ENC_THROUGHPUT_0);
551 else
552 min_slice_count = DIV_ROUND_UP(mode_clock,
553 DP_DSC_MAX_ENC_THROUGHPUT_1);
554
555 max_slice_width = drm_dp_dsc_sink_max_slice_width(intel_dp->dsc_dpcd);
556 if (max_slice_width < DP_DSC_MIN_SLICE_WIDTH_VALUE) {
557 drm_dbg_kms(&i915->drm,
558 "Unsupported slice width %d by DP DSC Sink device\n",
559 max_slice_width);
560 return 0;
561 }
562 /* Also take into account max slice width */
563 min_slice_count = max_t(u8, min_slice_count,
564 DIV_ROUND_UP(mode_hdisplay,
565 max_slice_width));
566
567 /* Find the closest match to the valid slice count values */
568 for (i = 0; i < ARRAY_SIZE(valid_dsc_slicecount); i++) {
569 u8 test_slice_count = valid_dsc_slicecount[i] << bigjoiner;
570
571 if (test_slice_count >
572 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd, false))
573 break;
574
575 /* big joiner needs small joiner to be enabled */
576 if (bigjoiner && test_slice_count < 4)
577 continue;
578
579 if (min_slice_count <= test_slice_count)
580 return test_slice_count;
581 }
582
583 drm_dbg_kms(&i915->drm, "Unsupported Slice Count %d\n",
584 min_slice_count);
585 return 0;
586 }
587
588 static enum intel_output_format
intel_dp_output_format(struct drm_connector * connector,const struct drm_display_mode * mode)589 intel_dp_output_format(struct drm_connector *connector,
590 const struct drm_display_mode *mode)
591 {
592 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
593 const struct drm_display_info *info = &connector->display_info;
594
595 if (!connector->ycbcr_420_allowed ||
596 !drm_mode_is_420_only(info, mode))
597 return INTEL_OUTPUT_FORMAT_RGB;
598
599 if (intel_dp->dfp.rgb_to_ycbcr &&
600 intel_dp->dfp.ycbcr_444_to_420)
601 return INTEL_OUTPUT_FORMAT_RGB;
602
603 if (intel_dp->dfp.ycbcr_444_to_420)
604 return INTEL_OUTPUT_FORMAT_YCBCR444;
605 else
606 return INTEL_OUTPUT_FORMAT_YCBCR420;
607 }
608
intel_dp_min_bpp(enum intel_output_format output_format)609 int intel_dp_min_bpp(enum intel_output_format output_format)
610 {
611 if (output_format == INTEL_OUTPUT_FORMAT_RGB)
612 return 6 * 3;
613 else
614 return 8 * 3;
615 }
616
intel_dp_output_bpp(enum intel_output_format output_format,int bpp)617 static int intel_dp_output_bpp(enum intel_output_format output_format, int bpp)
618 {
619 /*
620 * bpp value was assumed to RGB format. And YCbCr 4:2:0 output
621 * format of the number of bytes per pixel will be half the number
622 * of bytes of RGB pixel.
623 */
624 if (output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
625 bpp /= 2;
626
627 return bpp;
628 }
629
630 static int
intel_dp_mode_min_output_bpp(struct drm_connector * connector,const struct drm_display_mode * mode)631 intel_dp_mode_min_output_bpp(struct drm_connector *connector,
632 const struct drm_display_mode *mode)
633 {
634 enum intel_output_format output_format =
635 intel_dp_output_format(connector, mode);
636
637 return intel_dp_output_bpp(output_format, intel_dp_min_bpp(output_format));
638 }
639
intel_dp_hdisplay_bad(struct drm_i915_private * dev_priv,int hdisplay)640 static bool intel_dp_hdisplay_bad(struct drm_i915_private *dev_priv,
641 int hdisplay)
642 {
643 /*
644 * Older platforms don't like hdisplay==4096 with DP.
645 *
646 * On ILK/SNB/IVB the pipe seems to be somewhat running (scanline
647 * and frame counter increment), but we don't get vblank interrupts,
648 * and the pipe underruns immediately. The link also doesn't seem
649 * to get trained properly.
650 *
651 * On CHV the vblank interrupts don't seem to disappear but
652 * otherwise the symptoms are similar.
653 *
654 * TODO: confirm the behaviour on HSW+
655 */
656 return hdisplay == 4096 && !HAS_DDI(dev_priv);
657 }
658
659 static enum drm_mode_status
intel_dp_mode_valid_downstream(struct intel_connector * connector,const struct drm_display_mode * mode,int target_clock)660 intel_dp_mode_valid_downstream(struct intel_connector *connector,
661 const struct drm_display_mode *mode,
662 int target_clock)
663 {
664 struct intel_dp *intel_dp = intel_attached_dp(connector);
665 const struct drm_display_info *info = &connector->base.display_info;
666 int tmds_clock;
667
668 /* If PCON supports FRL MODE, check FRL bandwidth constraints */
669 if (intel_dp->dfp.pcon_max_frl_bw) {
670 int target_bw;
671 int max_frl_bw;
672 int bpp = intel_dp_mode_min_output_bpp(&connector->base, mode);
673
674 target_bw = bpp * target_clock;
675
676 max_frl_bw = intel_dp->dfp.pcon_max_frl_bw;
677
678 /* converting bw from Gbps to Kbps*/
679 max_frl_bw = max_frl_bw * 1000000;
680
681 if (target_bw > max_frl_bw)
682 return MODE_CLOCK_HIGH;
683
684 return MODE_OK;
685 }
686
687 if (intel_dp->dfp.max_dotclock &&
688 target_clock > intel_dp->dfp.max_dotclock)
689 return MODE_CLOCK_HIGH;
690
691 /* Assume 8bpc for the DP++/HDMI/DVI TMDS clock check */
692 tmds_clock = target_clock;
693 if (drm_mode_is_420_only(info, mode))
694 tmds_clock /= 2;
695
696 if (intel_dp->dfp.min_tmds_clock &&
697 tmds_clock < intel_dp->dfp.min_tmds_clock)
698 return MODE_CLOCK_LOW;
699 if (intel_dp->dfp.max_tmds_clock &&
700 tmds_clock > intel_dp->dfp.max_tmds_clock)
701 return MODE_CLOCK_HIGH;
702
703 return MODE_OK;
704 }
705
706 static enum drm_mode_status
intel_dp_mode_valid(struct drm_connector * connector,struct drm_display_mode * mode)707 intel_dp_mode_valid(struct drm_connector *connector,
708 struct drm_display_mode *mode)
709 {
710 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
711 struct intel_connector *intel_connector = to_intel_connector(connector);
712 struct drm_display_mode *fixed_mode = intel_connector->panel.fixed_mode;
713 struct drm_i915_private *dev_priv = to_i915(connector->dev);
714 int target_clock = mode->clock;
715 int max_rate, mode_rate, max_lanes, max_link_clock;
716 int max_dotclk = dev_priv->max_dotclk_freq;
717 u16 dsc_max_output_bpp = 0;
718 u8 dsc_slice_count = 0;
719 enum drm_mode_status status;
720 bool dsc = false, bigjoiner = false;
721
722 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
723 return MODE_NO_DBLESCAN;
724
725 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
726 return MODE_H_ILLEGAL;
727
728 if (intel_dp_is_edp(intel_dp) && fixed_mode) {
729 if (mode->hdisplay != fixed_mode->hdisplay)
730 return MODE_PANEL;
731
732 if (mode->vdisplay != fixed_mode->vdisplay)
733 return MODE_PANEL;
734
735 target_clock = fixed_mode->clock;
736 }
737
738 if (mode->clock < 10000)
739 return MODE_CLOCK_LOW;
740
741 if ((target_clock > max_dotclk || mode->hdisplay > 5120) &&
742 intel_dp_can_bigjoiner(intel_dp)) {
743 bigjoiner = true;
744 max_dotclk *= 2;
745 }
746 if (target_clock > max_dotclk)
747 return MODE_CLOCK_HIGH;
748
749 max_link_clock = intel_dp_max_link_rate(intel_dp);
750 max_lanes = intel_dp_max_lane_count(intel_dp);
751
752 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
753 mode_rate = intel_dp_link_required(target_clock,
754 intel_dp_mode_min_output_bpp(connector, mode));
755
756 if (intel_dp_hdisplay_bad(dev_priv, mode->hdisplay))
757 return MODE_H_ILLEGAL;
758
759 /*
760 * Output bpp is stored in 6.4 format so right shift by 4 to get the
761 * integer value since we support only integer values of bpp.
762 */
763 if (DISPLAY_VER(dev_priv) >= 10 &&
764 drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd)) {
765 /*
766 * TBD pass the connector BPC,
767 * for now U8_MAX so that max BPC on that platform would be picked
768 */
769 int pipe_bpp = intel_dp_dsc_compute_bpp(intel_dp, U8_MAX);
770
771 if (intel_dp_is_edp(intel_dp)) {
772 dsc_max_output_bpp =
773 drm_edp_dsc_sink_output_bpp(intel_dp->dsc_dpcd) >> 4;
774 dsc_slice_count =
775 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
776 true);
777 } else if (drm_dp_sink_supports_fec(intel_dp->fec_capable)) {
778 dsc_max_output_bpp =
779 intel_dp_dsc_get_output_bpp(dev_priv,
780 max_link_clock,
781 max_lanes,
782 target_clock,
783 mode->hdisplay,
784 bigjoiner,
785 pipe_bpp) >> 4;
786 dsc_slice_count =
787 intel_dp_dsc_get_slice_count(intel_dp,
788 target_clock,
789 mode->hdisplay,
790 bigjoiner);
791 }
792
793 dsc = dsc_max_output_bpp && dsc_slice_count;
794 }
795
796 /*
797 * Big joiner configuration needs DSC for TGL which is not true for
798 * XE_LPD where uncompressed joiner is supported.
799 */
800 if (DISPLAY_VER(dev_priv) < 13 && bigjoiner && !dsc)
801 return MODE_CLOCK_HIGH;
802
803 if (mode_rate > max_rate && !dsc)
804 return MODE_CLOCK_HIGH;
805
806 status = intel_dp_mode_valid_downstream(intel_connector,
807 mode, target_clock);
808 if (status != MODE_OK)
809 return status;
810
811 return intel_mode_valid_max_plane_size(dev_priv, mode, bigjoiner);
812 }
813
intel_dp_source_supports_hbr2(struct intel_dp * intel_dp)814 bool intel_dp_source_supports_hbr2(struct intel_dp *intel_dp)
815 {
816 int max_rate = intel_dp->source_rates[intel_dp->num_source_rates - 1];
817
818 return max_rate >= 540000;
819 }
820
intel_dp_source_supports_hbr3(struct intel_dp * intel_dp)821 bool intel_dp_source_supports_hbr3(struct intel_dp *intel_dp)
822 {
823 int max_rate = intel_dp->source_rates[intel_dp->num_source_rates - 1];
824
825 return max_rate >= 810000;
826 }
827
snprintf_int_array(char * str,size_t len,const int * array,int nelem)828 static void snprintf_int_array(char *str, size_t len,
829 const int *array, int nelem)
830 {
831 int i;
832
833 str[0] = '\0';
834
835 for (i = 0; i < nelem; i++) {
836 int r = snprintf(str, len, "%s%d", i ? ", " : "", array[i]);
837 if (r >= len)
838 return;
839 str += r;
840 len -= r;
841 }
842 }
843
intel_dp_print_rates(struct intel_dp * intel_dp)844 static void intel_dp_print_rates(struct intel_dp *intel_dp)
845 {
846 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
847 char str[128]; /* FIXME: too big for stack? */
848
849 if (!drm_debug_enabled(DRM_UT_KMS))
850 return;
851
852 snprintf_int_array(str, sizeof(str),
853 intel_dp->source_rates, intel_dp->num_source_rates);
854 drm_dbg_kms(&i915->drm, "source rates: %s\n", str);
855
856 snprintf_int_array(str, sizeof(str),
857 intel_dp->sink_rates, intel_dp->num_sink_rates);
858 drm_dbg_kms(&i915->drm, "sink rates: %s\n", str);
859
860 snprintf_int_array(str, sizeof(str),
861 intel_dp->common_rates, intel_dp->num_common_rates);
862 drm_dbg_kms(&i915->drm, "common rates: %s\n", str);
863 }
864
865 int
intel_dp_max_link_rate(struct intel_dp * intel_dp)866 intel_dp_max_link_rate(struct intel_dp *intel_dp)
867 {
868 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
869 int len;
870
871 len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->max_link_rate);
872 if (drm_WARN_ON(&i915->drm, len <= 0))
873 return 162000;
874
875 return intel_dp->common_rates[len - 1];
876 }
877
intel_dp_rate_select(struct intel_dp * intel_dp,int rate)878 int intel_dp_rate_select(struct intel_dp *intel_dp, int rate)
879 {
880 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
881 int i = intel_dp_rate_index(intel_dp->sink_rates,
882 intel_dp->num_sink_rates, rate);
883
884 if (drm_WARN_ON(&i915->drm, i < 0))
885 i = 0;
886
887 return i;
888 }
889
intel_dp_compute_rate(struct intel_dp * intel_dp,int port_clock,u8 * link_bw,u8 * rate_select)890 void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock,
891 u8 *link_bw, u8 *rate_select)
892 {
893 /* eDP 1.4 rate select method. */
894 if (intel_dp->use_rate_select) {
895 *link_bw = 0;
896 *rate_select =
897 intel_dp_rate_select(intel_dp, port_clock);
898 } else {
899 *link_bw = drm_dp_link_rate_to_bw_code(port_clock);
900 *rate_select = 0;
901 }
902 }
903
intel_dp_source_supports_fec(struct intel_dp * intel_dp,const struct intel_crtc_state * pipe_config)904 static bool intel_dp_source_supports_fec(struct intel_dp *intel_dp,
905 const struct intel_crtc_state *pipe_config)
906 {
907 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
908
909 /* On TGL, FEC is supported on all Pipes */
910 if (DISPLAY_VER(dev_priv) >= 12)
911 return true;
912
913 if (DISPLAY_VER(dev_priv) == 11 && pipe_config->cpu_transcoder != TRANSCODER_A)
914 return true;
915
916 return false;
917 }
918
intel_dp_supports_fec(struct intel_dp * intel_dp,const struct intel_crtc_state * pipe_config)919 static bool intel_dp_supports_fec(struct intel_dp *intel_dp,
920 const struct intel_crtc_state *pipe_config)
921 {
922 return intel_dp_source_supports_fec(intel_dp, pipe_config) &&
923 drm_dp_sink_supports_fec(intel_dp->fec_capable);
924 }
925
intel_dp_supports_dsc(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)926 static bool intel_dp_supports_dsc(struct intel_dp *intel_dp,
927 const struct intel_crtc_state *crtc_state)
928 {
929 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP) && !crtc_state->fec_enable)
930 return false;
931
932 return intel_dsc_source_support(crtc_state) &&
933 drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd);
934 }
935
intel_dp_hdmi_ycbcr420(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)936 static bool intel_dp_hdmi_ycbcr420(struct intel_dp *intel_dp,
937 const struct intel_crtc_state *crtc_state)
938 {
939 return crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
940 (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444 &&
941 intel_dp->dfp.ycbcr_444_to_420);
942 }
943
intel_dp_hdmi_tmds_clock(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,int bpc)944 static int intel_dp_hdmi_tmds_clock(struct intel_dp *intel_dp,
945 const struct intel_crtc_state *crtc_state, int bpc)
946 {
947 int clock = crtc_state->hw.adjusted_mode.crtc_clock * bpc / 8;
948
949 if (intel_dp_hdmi_ycbcr420(intel_dp, crtc_state))
950 clock /= 2;
951
952 return clock;
953 }
954
intel_dp_hdmi_tmds_clock_valid(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,int bpc)955 static bool intel_dp_hdmi_tmds_clock_valid(struct intel_dp *intel_dp,
956 const struct intel_crtc_state *crtc_state, int bpc)
957 {
958 int tmds_clock = intel_dp_hdmi_tmds_clock(intel_dp, crtc_state, bpc);
959
960 if (intel_dp->dfp.min_tmds_clock &&
961 tmds_clock < intel_dp->dfp.min_tmds_clock)
962 return false;
963
964 if (intel_dp->dfp.max_tmds_clock &&
965 tmds_clock > intel_dp->dfp.max_tmds_clock)
966 return false;
967
968 return true;
969 }
970
intel_dp_hdmi_deep_color_possible(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,int bpc)971 static bool intel_dp_hdmi_deep_color_possible(struct intel_dp *intel_dp,
972 const struct intel_crtc_state *crtc_state,
973 int bpc)
974 {
975
976 return intel_hdmi_deep_color_possible(crtc_state, bpc,
977 intel_dp->has_hdmi_sink,
978 intel_dp_hdmi_ycbcr420(intel_dp, crtc_state)) &&
979 intel_dp_hdmi_tmds_clock_valid(intel_dp, crtc_state, bpc);
980 }
981
intel_dp_max_bpp(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)982 static int intel_dp_max_bpp(struct intel_dp *intel_dp,
983 const struct intel_crtc_state *crtc_state)
984 {
985 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
986 struct intel_connector *intel_connector = intel_dp->attached_connector;
987 int bpp, bpc;
988
989 bpc = crtc_state->pipe_bpp / 3;
990
991 if (intel_dp->dfp.max_bpc)
992 bpc = min_t(int, bpc, intel_dp->dfp.max_bpc);
993
994 if (intel_dp->dfp.min_tmds_clock) {
995 for (; bpc >= 10; bpc -= 2) {
996 if (intel_dp_hdmi_deep_color_possible(intel_dp, crtc_state, bpc))
997 break;
998 }
999 }
1000
1001 bpp = bpc * 3;
1002 if (intel_dp_is_edp(intel_dp)) {
1003 /* Get bpp from vbt only for panels that dont have bpp in edid */
1004 if (intel_connector->base.display_info.bpc == 0 &&
1005 dev_priv->vbt.edp.bpp && dev_priv->vbt.edp.bpp < bpp) {
1006 drm_dbg_kms(&dev_priv->drm,
1007 "clamping bpp for eDP panel to BIOS-provided %i\n",
1008 dev_priv->vbt.edp.bpp);
1009 bpp = dev_priv->vbt.edp.bpp;
1010 }
1011 }
1012
1013 return bpp;
1014 }
1015
1016 /* Adjust link config limits based on compliance test requests. */
1017 void
intel_dp_adjust_compliance_config(struct intel_dp * intel_dp,struct intel_crtc_state * pipe_config,struct link_config_limits * limits)1018 intel_dp_adjust_compliance_config(struct intel_dp *intel_dp,
1019 struct intel_crtc_state *pipe_config,
1020 struct link_config_limits *limits)
1021 {
1022 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1023
1024 /* For DP Compliance we override the computed bpp for the pipe */
1025 if (intel_dp->compliance.test_data.bpc != 0) {
1026 int bpp = 3 * intel_dp->compliance.test_data.bpc;
1027
1028 limits->min_bpp = limits->max_bpp = bpp;
1029 pipe_config->dither_force_disable = bpp == 6 * 3;
1030
1031 drm_dbg_kms(&i915->drm, "Setting pipe_bpp to %d\n", bpp);
1032 }
1033
1034 /* Use values requested by Compliance Test Request */
1035 if (intel_dp->compliance.test_type == DP_TEST_LINK_TRAINING) {
1036 int index;
1037
1038 /* Validate the compliance test data since max values
1039 * might have changed due to link train fallback.
1040 */
1041 if (intel_dp_link_params_valid(intel_dp, intel_dp->compliance.test_link_rate,
1042 intel_dp->compliance.test_lane_count)) {
1043 index = intel_dp_rate_index(intel_dp->common_rates,
1044 intel_dp->num_common_rates,
1045 intel_dp->compliance.test_link_rate);
1046 if (index >= 0)
1047 limits->min_clock = limits->max_clock = index;
1048 limits->min_lane_count = limits->max_lane_count =
1049 intel_dp->compliance.test_lane_count;
1050 }
1051 }
1052 }
1053
1054 /* Optimize link config in order: max bpp, min clock, min lanes */
1055 static int
intel_dp_compute_link_config_wide(struct intel_dp * intel_dp,struct intel_crtc_state * pipe_config,const struct link_config_limits * limits)1056 intel_dp_compute_link_config_wide(struct intel_dp *intel_dp,
1057 struct intel_crtc_state *pipe_config,
1058 const struct link_config_limits *limits)
1059 {
1060 struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
1061 int bpp, clock, lane_count;
1062 int mode_rate, link_clock, link_avail;
1063
1064 for (bpp = limits->max_bpp; bpp >= limits->min_bpp; bpp -= 2 * 3) {
1065 int output_bpp = intel_dp_output_bpp(pipe_config->output_format, bpp);
1066
1067 mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
1068 output_bpp);
1069
1070 for (clock = limits->min_clock; clock <= limits->max_clock; clock++) {
1071 for (lane_count = limits->min_lane_count;
1072 lane_count <= limits->max_lane_count;
1073 lane_count <<= 1) {
1074 link_clock = intel_dp->common_rates[clock];
1075 link_avail = intel_dp_max_data_rate(link_clock,
1076 lane_count);
1077
1078 if (mode_rate <= link_avail) {
1079 pipe_config->lane_count = lane_count;
1080 pipe_config->pipe_bpp = bpp;
1081 pipe_config->port_clock = link_clock;
1082
1083 return 0;
1084 }
1085 }
1086 }
1087 }
1088
1089 return -EINVAL;
1090 }
1091
intel_dp_dsc_compute_bpp(struct intel_dp * intel_dp,u8 max_req_bpc)1092 static int intel_dp_dsc_compute_bpp(struct intel_dp *intel_dp, u8 max_req_bpc)
1093 {
1094 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1095 int i, num_bpc;
1096 u8 dsc_bpc[3] = {0};
1097 u8 dsc_max_bpc;
1098
1099 /* Max DSC Input BPC for ICL is 10 and for TGL+ is 12 */
1100 if (DISPLAY_VER(i915) >= 12)
1101 dsc_max_bpc = min_t(u8, 12, max_req_bpc);
1102 else
1103 dsc_max_bpc = min_t(u8, 10, max_req_bpc);
1104
1105 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(intel_dp->dsc_dpcd,
1106 dsc_bpc);
1107 for (i = 0; i < num_bpc; i++) {
1108 if (dsc_max_bpc >= dsc_bpc[i])
1109 return dsc_bpc[i] * 3;
1110 }
1111
1112 return 0;
1113 }
1114
1115 #define DSC_SUPPORTED_VERSION_MIN 1
1116
intel_dp_dsc_compute_params(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state)1117 static int intel_dp_dsc_compute_params(struct intel_encoder *encoder,
1118 struct intel_crtc_state *crtc_state)
1119 {
1120 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1121 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1122 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1123 u8 line_buf_depth;
1124 int ret;
1125
1126 /*
1127 * RC_MODEL_SIZE is currently a constant across all configurations.
1128 *
1129 * FIXME: Look into using sink defined DPCD DP_DSC_RC_BUF_BLK_SIZE and
1130 * DP_DSC_RC_BUF_SIZE for this.
1131 */
1132 vdsc_cfg->rc_model_size = DSC_RC_MODEL_SIZE_CONST;
1133
1134 /*
1135 * Slice Height of 8 works for all currently available panels. So start
1136 * with that if pic_height is an integral multiple of 8. Eventually add
1137 * logic to try multiple slice heights.
1138 */
1139 if (vdsc_cfg->pic_height % 8 == 0)
1140 vdsc_cfg->slice_height = 8;
1141 else if (vdsc_cfg->pic_height % 4 == 0)
1142 vdsc_cfg->slice_height = 4;
1143 else
1144 vdsc_cfg->slice_height = 2;
1145
1146 ret = intel_dsc_compute_params(encoder, crtc_state);
1147 if (ret)
1148 return ret;
1149
1150 vdsc_cfg->dsc_version_major =
1151 (intel_dp->dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] &
1152 DP_DSC_MAJOR_MASK) >> DP_DSC_MAJOR_SHIFT;
1153 vdsc_cfg->dsc_version_minor =
1154 min(DSC_SUPPORTED_VERSION_MIN,
1155 (intel_dp->dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] &
1156 DP_DSC_MINOR_MASK) >> DP_DSC_MINOR_SHIFT);
1157
1158 vdsc_cfg->convert_rgb = intel_dp->dsc_dpcd[DP_DSC_DEC_COLOR_FORMAT_CAP - DP_DSC_SUPPORT] &
1159 DP_DSC_RGB;
1160
1161 line_buf_depth = drm_dp_dsc_sink_line_buf_depth(intel_dp->dsc_dpcd);
1162 if (!line_buf_depth) {
1163 drm_dbg_kms(&i915->drm,
1164 "DSC Sink Line Buffer Depth invalid\n");
1165 return -EINVAL;
1166 }
1167
1168 if (vdsc_cfg->dsc_version_minor == 2)
1169 vdsc_cfg->line_buf_depth = (line_buf_depth == DSC_1_2_MAX_LINEBUF_DEPTH_BITS) ?
1170 DSC_1_2_MAX_LINEBUF_DEPTH_VAL : line_buf_depth;
1171 else
1172 vdsc_cfg->line_buf_depth = (line_buf_depth > DSC_1_1_MAX_LINEBUF_DEPTH_BITS) ?
1173 DSC_1_1_MAX_LINEBUF_DEPTH_BITS : line_buf_depth;
1174
1175 vdsc_cfg->block_pred_enable =
1176 intel_dp->dsc_dpcd[DP_DSC_BLK_PREDICTION_SUPPORT - DP_DSC_SUPPORT] &
1177 DP_DSC_BLK_PREDICTION_IS_SUPPORTED;
1178
1179 return drm_dsc_compute_rc_parameters(vdsc_cfg);
1180 }
1181
intel_dp_dsc_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * pipe_config,struct drm_connector_state * conn_state,struct link_config_limits * limits)1182 static int intel_dp_dsc_compute_config(struct intel_dp *intel_dp,
1183 struct intel_crtc_state *pipe_config,
1184 struct drm_connector_state *conn_state,
1185 struct link_config_limits *limits)
1186 {
1187 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1188 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
1189 const struct drm_display_mode *adjusted_mode =
1190 &pipe_config->hw.adjusted_mode;
1191 int pipe_bpp;
1192 int ret;
1193
1194 pipe_config->fec_enable = !intel_dp_is_edp(intel_dp) &&
1195 intel_dp_supports_fec(intel_dp, pipe_config);
1196
1197 if (!intel_dp_supports_dsc(intel_dp, pipe_config))
1198 return -EINVAL;
1199
1200 pipe_bpp = intel_dp_dsc_compute_bpp(intel_dp, conn_state->max_requested_bpc);
1201
1202 /* Min Input BPC for ICL+ is 8 */
1203 if (pipe_bpp < 8 * 3) {
1204 drm_dbg_kms(&dev_priv->drm,
1205 "No DSC support for less than 8bpc\n");
1206 return -EINVAL;
1207 }
1208
1209 /*
1210 * For now enable DSC for max bpp, max link rate, max lane count.
1211 * Optimize this later for the minimum possible link rate/lane count
1212 * with DSC enabled for the requested mode.
1213 */
1214 pipe_config->pipe_bpp = pipe_bpp;
1215 pipe_config->port_clock = intel_dp->common_rates[limits->max_clock];
1216 pipe_config->lane_count = limits->max_lane_count;
1217
1218 if (intel_dp_is_edp(intel_dp)) {
1219 pipe_config->dsc.compressed_bpp =
1220 min_t(u16, drm_edp_dsc_sink_output_bpp(intel_dp->dsc_dpcd) >> 4,
1221 pipe_config->pipe_bpp);
1222 pipe_config->dsc.slice_count =
1223 drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
1224 true);
1225 } else {
1226 u16 dsc_max_output_bpp;
1227 u8 dsc_dp_slice_count;
1228
1229 dsc_max_output_bpp =
1230 intel_dp_dsc_get_output_bpp(dev_priv,
1231 pipe_config->port_clock,
1232 pipe_config->lane_count,
1233 adjusted_mode->crtc_clock,
1234 adjusted_mode->crtc_hdisplay,
1235 pipe_config->bigjoiner,
1236 pipe_bpp);
1237 dsc_dp_slice_count =
1238 intel_dp_dsc_get_slice_count(intel_dp,
1239 adjusted_mode->crtc_clock,
1240 adjusted_mode->crtc_hdisplay,
1241 pipe_config->bigjoiner);
1242 if (!dsc_max_output_bpp || !dsc_dp_slice_count) {
1243 drm_dbg_kms(&dev_priv->drm,
1244 "Compressed BPP/Slice Count not supported\n");
1245 return -EINVAL;
1246 }
1247 pipe_config->dsc.compressed_bpp = min_t(u16,
1248 dsc_max_output_bpp >> 4,
1249 pipe_config->pipe_bpp);
1250 pipe_config->dsc.slice_count = dsc_dp_slice_count;
1251 }
1252
1253 /* As of today we support DSC for only RGB */
1254 if (intel_dp->force_dsc_bpp) {
1255 if (intel_dp->force_dsc_bpp >= 8 &&
1256 intel_dp->force_dsc_bpp < pipe_bpp) {
1257 drm_dbg_kms(&dev_priv->drm,
1258 "DSC BPP forced to %d",
1259 intel_dp->force_dsc_bpp);
1260 pipe_config->dsc.compressed_bpp =
1261 intel_dp->force_dsc_bpp;
1262 } else {
1263 drm_dbg_kms(&dev_priv->drm,
1264 "Invalid DSC BPP %d",
1265 intel_dp->force_dsc_bpp);
1266 }
1267 }
1268
1269 /*
1270 * VDSC engine operates at 1 Pixel per clock, so if peak pixel rate
1271 * is greater than the maximum Cdclock and if slice count is even
1272 * then we need to use 2 VDSC instances.
1273 */
1274 if (adjusted_mode->crtc_clock > dev_priv->max_cdclk_freq ||
1275 pipe_config->bigjoiner) {
1276 if (pipe_config->dsc.slice_count < 2) {
1277 drm_dbg_kms(&dev_priv->drm,
1278 "Cannot split stream to use 2 VDSC instances\n");
1279 return -EINVAL;
1280 }
1281
1282 pipe_config->dsc.dsc_split = true;
1283 }
1284
1285 ret = intel_dp_dsc_compute_params(&dig_port->base, pipe_config);
1286 if (ret < 0) {
1287 drm_dbg_kms(&dev_priv->drm,
1288 "Cannot compute valid DSC parameters for Input Bpp = %d "
1289 "Compressed BPP = %d\n",
1290 pipe_config->pipe_bpp,
1291 pipe_config->dsc.compressed_bpp);
1292 return ret;
1293 }
1294
1295 pipe_config->dsc.compression_enable = true;
1296 drm_dbg_kms(&dev_priv->drm, "DP DSC computed with Input Bpp = %d "
1297 "Compressed Bpp = %d Slice Count = %d\n",
1298 pipe_config->pipe_bpp,
1299 pipe_config->dsc.compressed_bpp,
1300 pipe_config->dsc.slice_count);
1301
1302 return 0;
1303 }
1304
1305 static int
intel_dp_compute_link_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config,struct drm_connector_state * conn_state)1306 intel_dp_compute_link_config(struct intel_encoder *encoder,
1307 struct intel_crtc_state *pipe_config,
1308 struct drm_connector_state *conn_state)
1309 {
1310 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1311 const struct drm_display_mode *adjusted_mode =
1312 &pipe_config->hw.adjusted_mode;
1313 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1314 struct link_config_limits limits;
1315 int common_len;
1316 int ret;
1317
1318 common_len = intel_dp_common_len_rate_limit(intel_dp,
1319 intel_dp->max_link_rate);
1320
1321 /* No common link rates between source and sink */
1322 drm_WARN_ON(encoder->base.dev, common_len <= 0);
1323
1324 limits.min_clock = 0;
1325 limits.max_clock = common_len - 1;
1326
1327 limits.min_lane_count = 1;
1328 limits.max_lane_count = intel_dp_max_lane_count(intel_dp);
1329
1330 limits.min_bpp = intel_dp_min_bpp(pipe_config->output_format);
1331 limits.max_bpp = intel_dp_max_bpp(intel_dp, pipe_config);
1332
1333 if (intel_dp->use_max_params) {
1334 /*
1335 * Use the maximum clock and number of lanes the eDP panel
1336 * advertizes being capable of in case the initial fast
1337 * optimal params failed us. The panels are generally
1338 * designed to support only a single clock and lane
1339 * configuration, and typically on older panels these
1340 * values correspond to the native resolution of the panel.
1341 */
1342 limits.min_lane_count = limits.max_lane_count;
1343 limits.min_clock = limits.max_clock;
1344 }
1345
1346 intel_dp_adjust_compliance_config(intel_dp, pipe_config, &limits);
1347
1348 drm_dbg_kms(&i915->drm, "DP link computation with max lane count %i "
1349 "max rate %d max bpp %d pixel clock %iKHz\n",
1350 limits.max_lane_count,
1351 intel_dp->common_rates[limits.max_clock],
1352 limits.max_bpp, adjusted_mode->crtc_clock);
1353
1354 if ((adjusted_mode->crtc_clock > i915->max_dotclk_freq ||
1355 adjusted_mode->crtc_hdisplay > 5120) &&
1356 intel_dp_can_bigjoiner(intel_dp))
1357 pipe_config->bigjoiner = true;
1358
1359 /*
1360 * Optimize for slow and wide for everything, because there are some
1361 * eDP 1.3 and 1.4 panels don't work well with fast and narrow.
1362 */
1363 ret = intel_dp_compute_link_config_wide(intel_dp, pipe_config, &limits);
1364
1365 /*
1366 * Pipe joiner needs compression upto display12 due to BW limitation. DG2
1367 * onwards pipe joiner can be enabled without compression.
1368 */
1369 drm_dbg_kms(&i915->drm, "Force DSC en = %d\n", intel_dp->force_dsc_en);
1370 if (ret || intel_dp->force_dsc_en || (DISPLAY_VER(i915) < 13 &&
1371 pipe_config->bigjoiner)) {
1372 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config,
1373 conn_state, &limits);
1374 if (ret < 0)
1375 return ret;
1376 }
1377
1378 if (pipe_config->dsc.compression_enable) {
1379 drm_dbg_kms(&i915->drm,
1380 "DP lane count %d clock %d Input bpp %d Compressed bpp %d\n",
1381 pipe_config->lane_count, pipe_config->port_clock,
1382 pipe_config->pipe_bpp,
1383 pipe_config->dsc.compressed_bpp);
1384
1385 drm_dbg_kms(&i915->drm,
1386 "DP link rate required %i available %i\n",
1387 intel_dp_link_required(adjusted_mode->crtc_clock,
1388 pipe_config->dsc.compressed_bpp),
1389 intel_dp_max_data_rate(pipe_config->port_clock,
1390 pipe_config->lane_count));
1391 } else {
1392 drm_dbg_kms(&i915->drm, "DP lane count %d clock %d bpp %d\n",
1393 pipe_config->lane_count, pipe_config->port_clock,
1394 pipe_config->pipe_bpp);
1395
1396 drm_dbg_kms(&i915->drm,
1397 "DP link rate required %i available %i\n",
1398 intel_dp_link_required(adjusted_mode->crtc_clock,
1399 pipe_config->pipe_bpp),
1400 intel_dp_max_data_rate(pipe_config->port_clock,
1401 pipe_config->lane_count));
1402 }
1403 return 0;
1404 }
1405
intel_dp_limited_color_range(const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1406 bool intel_dp_limited_color_range(const struct intel_crtc_state *crtc_state,
1407 const struct drm_connector_state *conn_state)
1408 {
1409 const struct intel_digital_connector_state *intel_conn_state =
1410 to_intel_digital_connector_state(conn_state);
1411 const struct drm_display_mode *adjusted_mode =
1412 &crtc_state->hw.adjusted_mode;
1413
1414 /*
1415 * Our YCbCr output is always limited range.
1416 * crtc_state->limited_color_range only applies to RGB,
1417 * and it must never be set for YCbCr or we risk setting
1418 * some conflicting bits in PIPECONF which will mess up
1419 * the colors on the monitor.
1420 */
1421 if (crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB)
1422 return false;
1423
1424 if (intel_conn_state->broadcast_rgb == INTEL_BROADCAST_RGB_AUTO) {
1425 /*
1426 * See:
1427 * CEA-861-E - 5.1 Default Encoding Parameters
1428 * VESA DisplayPort Ver.1.2a - 5.1.1.1 Video Colorimetry
1429 */
1430 return crtc_state->pipe_bpp != 18 &&
1431 drm_default_rgb_quant_range(adjusted_mode) ==
1432 HDMI_QUANTIZATION_RANGE_LIMITED;
1433 } else {
1434 return intel_conn_state->broadcast_rgb ==
1435 INTEL_BROADCAST_RGB_LIMITED;
1436 }
1437 }
1438
intel_dp_port_has_audio(struct drm_i915_private * dev_priv,enum port port)1439 static bool intel_dp_port_has_audio(struct drm_i915_private *dev_priv,
1440 enum port port)
1441 {
1442 if (IS_G4X(dev_priv))
1443 return false;
1444 if (DISPLAY_VER(dev_priv) < 12 && port == PORT_A)
1445 return false;
1446
1447 return true;
1448 }
1449
intel_dp_compute_vsc_colorimetry(const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state,struct drm_dp_vsc_sdp * vsc)1450 static void intel_dp_compute_vsc_colorimetry(const struct intel_crtc_state *crtc_state,
1451 const struct drm_connector_state *conn_state,
1452 struct drm_dp_vsc_sdp *vsc)
1453 {
1454 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1455 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1456
1457 /*
1458 * Prepare VSC Header for SU as per DP 1.4 spec, Table 2-118
1459 * VSC SDP supporting 3D stereo, PSR2, and Pixel Encoding/
1460 * Colorimetry Format indication.
1461 */
1462 vsc->revision = 0x5;
1463 vsc->length = 0x13;
1464
1465 /* DP 1.4a spec, Table 2-120 */
1466 switch (crtc_state->output_format) {
1467 case INTEL_OUTPUT_FORMAT_YCBCR444:
1468 vsc->pixelformat = DP_PIXELFORMAT_YUV444;
1469 break;
1470 case INTEL_OUTPUT_FORMAT_YCBCR420:
1471 vsc->pixelformat = DP_PIXELFORMAT_YUV420;
1472 break;
1473 case INTEL_OUTPUT_FORMAT_RGB:
1474 default:
1475 vsc->pixelformat = DP_PIXELFORMAT_RGB;
1476 }
1477
1478 switch (conn_state->colorspace) {
1479 case DRM_MODE_COLORIMETRY_BT709_YCC:
1480 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC;
1481 break;
1482 case DRM_MODE_COLORIMETRY_XVYCC_601:
1483 vsc->colorimetry = DP_COLORIMETRY_XVYCC_601;
1484 break;
1485 case DRM_MODE_COLORIMETRY_XVYCC_709:
1486 vsc->colorimetry = DP_COLORIMETRY_XVYCC_709;
1487 break;
1488 case DRM_MODE_COLORIMETRY_SYCC_601:
1489 vsc->colorimetry = DP_COLORIMETRY_SYCC_601;
1490 break;
1491 case DRM_MODE_COLORIMETRY_OPYCC_601:
1492 vsc->colorimetry = DP_COLORIMETRY_OPYCC_601;
1493 break;
1494 case DRM_MODE_COLORIMETRY_BT2020_CYCC:
1495 vsc->colorimetry = DP_COLORIMETRY_BT2020_CYCC;
1496 break;
1497 case DRM_MODE_COLORIMETRY_BT2020_RGB:
1498 vsc->colorimetry = DP_COLORIMETRY_BT2020_RGB;
1499 break;
1500 case DRM_MODE_COLORIMETRY_BT2020_YCC:
1501 vsc->colorimetry = DP_COLORIMETRY_BT2020_YCC;
1502 break;
1503 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_D65:
1504 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_THEATER:
1505 vsc->colorimetry = DP_COLORIMETRY_DCI_P3_RGB;
1506 break;
1507 default:
1508 /*
1509 * RGB->YCBCR color conversion uses the BT.709
1510 * color space.
1511 */
1512 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
1513 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC;
1514 else
1515 vsc->colorimetry = DP_COLORIMETRY_DEFAULT;
1516 break;
1517 }
1518
1519 vsc->bpc = crtc_state->pipe_bpp / 3;
1520
1521 /* only RGB pixelformat supports 6 bpc */
1522 drm_WARN_ON(&dev_priv->drm,
1523 vsc->bpc == 6 && vsc->pixelformat != DP_PIXELFORMAT_RGB);
1524
1525 /* all YCbCr are always limited range */
1526 vsc->dynamic_range = DP_DYNAMIC_RANGE_CTA;
1527 vsc->content_type = DP_CONTENT_TYPE_NOT_DEFINED;
1528 }
1529
intel_dp_compute_vsc_sdp(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1530 static void intel_dp_compute_vsc_sdp(struct intel_dp *intel_dp,
1531 struct intel_crtc_state *crtc_state,
1532 const struct drm_connector_state *conn_state)
1533 {
1534 struct drm_dp_vsc_sdp *vsc = &crtc_state->infoframes.vsc;
1535
1536 /* When a crtc state has PSR, VSC SDP will be handled by PSR routine */
1537 if (crtc_state->has_psr)
1538 return;
1539
1540 if (!intel_dp_needs_vsc_sdp(crtc_state, conn_state))
1541 return;
1542
1543 crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1544 vsc->sdp_type = DP_SDP_VSC;
1545 intel_dp_compute_vsc_colorimetry(crtc_state, conn_state,
1546 &crtc_state->infoframes.vsc);
1547 }
1548
intel_dp_compute_psr_vsc_sdp(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state,struct drm_dp_vsc_sdp * vsc)1549 void intel_dp_compute_psr_vsc_sdp(struct intel_dp *intel_dp,
1550 const struct intel_crtc_state *crtc_state,
1551 const struct drm_connector_state *conn_state,
1552 struct drm_dp_vsc_sdp *vsc)
1553 {
1554 vsc->sdp_type = DP_SDP_VSC;
1555
1556 if (intel_dp->psr.psr2_enabled) {
1557 if (intel_dp->psr.colorimetry_support &&
1558 intel_dp_needs_vsc_sdp(crtc_state, conn_state)) {
1559 /* [PSR2, +Colorimetry] */
1560 intel_dp_compute_vsc_colorimetry(crtc_state, conn_state,
1561 vsc);
1562 } else {
1563 /*
1564 * [PSR2, -Colorimetry]
1565 * Prepare VSC Header for SU as per eDP 1.4 spec, Table 6-11
1566 * 3D stereo + PSR/PSR2 + Y-coordinate.
1567 */
1568 vsc->revision = 0x4;
1569 vsc->length = 0xe;
1570 }
1571 } else {
1572 /*
1573 * [PSR1]
1574 * Prepare VSC Header for SU as per DP 1.4 spec, Table 2-118
1575 * VSC SDP supporting 3D stereo + PSR (applies to eDP v1.3 or
1576 * higher).
1577 */
1578 vsc->revision = 0x2;
1579 vsc->length = 0x8;
1580 }
1581 }
1582
1583 static void
intel_dp_compute_hdr_metadata_infoframe_sdp(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1584 intel_dp_compute_hdr_metadata_infoframe_sdp(struct intel_dp *intel_dp,
1585 struct intel_crtc_state *crtc_state,
1586 const struct drm_connector_state *conn_state)
1587 {
1588 int ret;
1589 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1590 struct hdmi_drm_infoframe *drm_infoframe = &crtc_state->infoframes.drm.drm;
1591
1592 if (!conn_state->hdr_output_metadata)
1593 return;
1594
1595 ret = drm_hdmi_infoframe_set_hdr_metadata(drm_infoframe, conn_state);
1596
1597 if (ret) {
1598 drm_dbg_kms(&dev_priv->drm, "couldn't set HDR metadata in infoframe\n");
1599 return;
1600 }
1601
1602 crtc_state->infoframes.enable |=
1603 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA);
1604 }
1605
1606 static void
intel_dp_drrs_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * pipe_config,int output_bpp,bool constant_n)1607 intel_dp_drrs_compute_config(struct intel_dp *intel_dp,
1608 struct intel_crtc_state *pipe_config,
1609 int output_bpp, bool constant_n)
1610 {
1611 struct intel_connector *intel_connector = intel_dp->attached_connector;
1612 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1613 int pixel_clock;
1614
1615 if (pipe_config->vrr.enable)
1616 return;
1617
1618 /*
1619 * DRRS and PSR can't be enable together, so giving preference to PSR
1620 * as it allows more power-savings by complete shutting down display,
1621 * so to guarantee this, intel_dp_drrs_compute_config() must be called
1622 * after intel_psr_compute_config().
1623 */
1624 if (pipe_config->has_psr)
1625 return;
1626
1627 if (!intel_connector->panel.downclock_mode ||
1628 dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
1629 return;
1630
1631 pipe_config->has_drrs = true;
1632
1633 pixel_clock = intel_connector->panel.downclock_mode->clock;
1634 if (pipe_config->splitter.enable)
1635 pixel_clock /= pipe_config->splitter.link_count;
1636
1637 intel_link_compute_m_n(output_bpp, pipe_config->lane_count, pixel_clock,
1638 pipe_config->port_clock, &pipe_config->dp_m2_n2,
1639 constant_n, pipe_config->fec_enable);
1640
1641 /* FIXME: abstract this better */
1642 if (pipe_config->splitter.enable)
1643 pipe_config->dp_m2_n2.gmch_m *= pipe_config->splitter.link_count;
1644 }
1645
1646 int
intel_dp_compute_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config,struct drm_connector_state * conn_state)1647 intel_dp_compute_config(struct intel_encoder *encoder,
1648 struct intel_crtc_state *pipe_config,
1649 struct drm_connector_state *conn_state)
1650 {
1651 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1652 struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
1653 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1654 enum port port = encoder->port;
1655 struct intel_connector *intel_connector = intel_dp->attached_connector;
1656 struct intel_digital_connector_state *intel_conn_state =
1657 to_intel_digital_connector_state(conn_state);
1658 bool constant_n = drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_CONSTANT_N);
1659 int ret = 0, output_bpp;
1660
1661 if (HAS_PCH_SPLIT(dev_priv) && !HAS_DDI(dev_priv) && port != PORT_A)
1662 pipe_config->has_pch_encoder = true;
1663
1664 pipe_config->output_format = intel_dp_output_format(&intel_connector->base,
1665 adjusted_mode);
1666
1667 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) {
1668 ret = intel_pch_panel_fitting(pipe_config, conn_state);
1669 if (ret)
1670 return ret;
1671 }
1672
1673 if (!intel_dp_port_has_audio(dev_priv, port))
1674 pipe_config->has_audio = false;
1675 else if (intel_conn_state->force_audio == HDMI_AUDIO_AUTO)
1676 pipe_config->has_audio = intel_dp->has_audio;
1677 else
1678 pipe_config->has_audio = intel_conn_state->force_audio == HDMI_AUDIO_ON;
1679
1680 if (intel_dp_is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
1681 intel_fixed_panel_mode(intel_connector->panel.fixed_mode,
1682 adjusted_mode);
1683
1684 if (HAS_GMCH(dev_priv))
1685 ret = intel_gmch_panel_fitting(pipe_config, conn_state);
1686 else
1687 ret = intel_pch_panel_fitting(pipe_config, conn_state);
1688 if (ret)
1689 return ret;
1690 }
1691
1692 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
1693 return -EINVAL;
1694
1695 if (HAS_GMCH(dev_priv) &&
1696 adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
1697 return -EINVAL;
1698
1699 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
1700 return -EINVAL;
1701
1702 if (intel_dp_hdisplay_bad(dev_priv, adjusted_mode->crtc_hdisplay))
1703 return -EINVAL;
1704
1705 ret = intel_dp_compute_link_config(encoder, pipe_config, conn_state);
1706 if (ret < 0)
1707 return ret;
1708
1709 pipe_config->limited_color_range =
1710 intel_dp_limited_color_range(pipe_config, conn_state);
1711
1712 if (pipe_config->dsc.compression_enable)
1713 output_bpp = pipe_config->dsc.compressed_bpp;
1714 else
1715 output_bpp = intel_dp_output_bpp(pipe_config->output_format,
1716 pipe_config->pipe_bpp);
1717
1718 if (intel_dp->mso_link_count) {
1719 int n = intel_dp->mso_link_count;
1720 int overlap = intel_dp->mso_pixel_overlap;
1721
1722 pipe_config->splitter.enable = true;
1723 pipe_config->splitter.link_count = n;
1724 pipe_config->splitter.pixel_overlap = overlap;
1725
1726 drm_dbg_kms(&dev_priv->drm, "MSO link count %d, pixel overlap %d\n",
1727 n, overlap);
1728
1729 adjusted_mode->crtc_hdisplay = adjusted_mode->crtc_hdisplay / n + overlap;
1730 adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hblank_start / n + overlap;
1731 adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_hblank_end / n + overlap;
1732 adjusted_mode->crtc_hsync_start = adjusted_mode->crtc_hsync_start / n + overlap;
1733 adjusted_mode->crtc_hsync_end = adjusted_mode->crtc_hsync_end / n + overlap;
1734 adjusted_mode->crtc_htotal = adjusted_mode->crtc_htotal / n + overlap;
1735 adjusted_mode->crtc_clock /= n;
1736 }
1737
1738 intel_link_compute_m_n(output_bpp,
1739 pipe_config->lane_count,
1740 adjusted_mode->crtc_clock,
1741 pipe_config->port_clock,
1742 &pipe_config->dp_m_n,
1743 constant_n, pipe_config->fec_enable);
1744
1745 /* FIXME: abstract this better */
1746 if (pipe_config->splitter.enable)
1747 pipe_config->dp_m_n.gmch_m *= pipe_config->splitter.link_count;
1748
1749 if (!HAS_DDI(dev_priv))
1750 g4x_dp_set_clock(encoder, pipe_config);
1751
1752 intel_vrr_compute_config(pipe_config, conn_state);
1753 intel_psr_compute_config(intel_dp, pipe_config);
1754 intel_dp_drrs_compute_config(intel_dp, pipe_config, output_bpp,
1755 constant_n);
1756 intel_dp_compute_vsc_sdp(intel_dp, pipe_config, conn_state);
1757 intel_dp_compute_hdr_metadata_infoframe_sdp(intel_dp, pipe_config, conn_state);
1758
1759 return 0;
1760 }
1761
intel_dp_set_link_params(struct intel_dp * intel_dp,int link_rate,int lane_count)1762 void intel_dp_set_link_params(struct intel_dp *intel_dp,
1763 int link_rate, int lane_count)
1764 {
1765 intel_dp->link_trained = false;
1766 intel_dp->link_rate = link_rate;
1767 intel_dp->lane_count = lane_count;
1768 }
1769
1770 /* Enable backlight PWM and backlight PP control. */
intel_edp_backlight_on(const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1771 void intel_edp_backlight_on(const struct intel_crtc_state *crtc_state,
1772 const struct drm_connector_state *conn_state)
1773 {
1774 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(conn_state->best_encoder));
1775 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1776
1777 if (!intel_dp_is_edp(intel_dp))
1778 return;
1779
1780 drm_dbg_kms(&i915->drm, "\n");
1781
1782 intel_panel_enable_backlight(crtc_state, conn_state);
1783 intel_pps_backlight_on(intel_dp);
1784 }
1785
1786 /* Disable backlight PP control and backlight PWM. */
intel_edp_backlight_off(const struct drm_connector_state * old_conn_state)1787 void intel_edp_backlight_off(const struct drm_connector_state *old_conn_state)
1788 {
1789 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(old_conn_state->best_encoder));
1790 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1791
1792 if (!intel_dp_is_edp(intel_dp))
1793 return;
1794
1795 drm_dbg_kms(&i915->drm, "\n");
1796
1797 intel_pps_backlight_off(intel_dp);
1798 intel_panel_disable_backlight(old_conn_state);
1799 }
1800
downstream_hpd_needs_d0(struct intel_dp * intel_dp)1801 static bool downstream_hpd_needs_d0(struct intel_dp *intel_dp)
1802 {
1803 /*
1804 * DPCD 1.2+ should support BRANCH_DEVICE_CTRL, and thus
1805 * be capable of signalling downstream hpd with a long pulse.
1806 * Whether or not that means D3 is safe to use is not clear,
1807 * but let's assume so until proven otherwise.
1808 *
1809 * FIXME should really check all downstream ports...
1810 */
1811 return intel_dp->dpcd[DP_DPCD_REV] == 0x11 &&
1812 drm_dp_is_branch(intel_dp->dpcd) &&
1813 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD;
1814 }
1815
intel_dp_sink_set_decompression_state(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool enable)1816 void intel_dp_sink_set_decompression_state(struct intel_dp *intel_dp,
1817 const struct intel_crtc_state *crtc_state,
1818 bool enable)
1819 {
1820 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1821 int ret;
1822
1823 if (!crtc_state->dsc.compression_enable)
1824 return;
1825
1826 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_DSC_ENABLE,
1827 enable ? DP_DECOMPRESSION_EN : 0);
1828 if (ret < 0)
1829 drm_dbg_kms(&i915->drm,
1830 "Failed to %s sink decompression state\n",
1831 enabledisable(enable));
1832 }
1833
1834 static void
intel_edp_init_source_oui(struct intel_dp * intel_dp,bool careful)1835 intel_edp_init_source_oui(struct intel_dp *intel_dp, bool careful)
1836 {
1837 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1838 u8 oui[] = { 0x00, 0xaa, 0x01 };
1839 u8 buf[3] = { 0 };
1840
1841 /*
1842 * During driver init, we want to be careful and avoid changing the source OUI if it's
1843 * already set to what we want, so as to avoid clearing any state by accident
1844 */
1845 if (careful) {
1846 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SOURCE_OUI, buf, sizeof(buf)) < 0)
1847 drm_err(&i915->drm, "Failed to read source OUI\n");
1848
1849 if (memcmp(oui, buf, sizeof(oui)) == 0)
1850 return;
1851 }
1852
1853 if (drm_dp_dpcd_write(&intel_dp->aux, DP_SOURCE_OUI, oui, sizeof(oui)) < 0)
1854 drm_err(&i915->drm, "Failed to write source OUI\n");
1855 }
1856
1857 /* If the device supports it, try to set the power state appropriately */
intel_dp_set_power(struct intel_dp * intel_dp,u8 mode)1858 void intel_dp_set_power(struct intel_dp *intel_dp, u8 mode)
1859 {
1860 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1861 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1862 int ret, i;
1863
1864 /* Should have a valid DPCD by this point */
1865 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1866 return;
1867
1868 if (mode != DP_SET_POWER_D0) {
1869 if (downstream_hpd_needs_d0(intel_dp))
1870 return;
1871
1872 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode);
1873 } else {
1874 struct intel_lspcon *lspcon = dp_to_lspcon(intel_dp);
1875
1876 lspcon_resume(dp_to_dig_port(intel_dp));
1877
1878 /* Write the source OUI as early as possible */
1879 if (intel_dp_is_edp(intel_dp))
1880 intel_edp_init_source_oui(intel_dp, false);
1881
1882 /*
1883 * When turning on, we need to retry for 1ms to give the sink
1884 * time to wake up.
1885 */
1886 for (i = 0; i < 3; i++) {
1887 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode);
1888 if (ret == 1)
1889 break;
1890 msleep(1);
1891 }
1892
1893 if (ret == 1 && lspcon->active)
1894 lspcon_wait_pcon_mode(lspcon);
1895 }
1896
1897 if (ret != 1)
1898 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] Set power to %s failed\n",
1899 encoder->base.base.id, encoder->base.name,
1900 mode == DP_SET_POWER_D0 ? "D0" : "D3");
1901 }
1902
1903 static bool
1904 intel_dp_get_dpcd(struct intel_dp *intel_dp);
1905
1906 /**
1907 * intel_dp_sync_state - sync the encoder state during init/resume
1908 * @encoder: intel encoder to sync
1909 * @crtc_state: state for the CRTC connected to the encoder
1910 *
1911 * Sync any state stored in the encoder wrt. HW state during driver init
1912 * and system resume.
1913 */
intel_dp_sync_state(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)1914 void intel_dp_sync_state(struct intel_encoder *encoder,
1915 const struct intel_crtc_state *crtc_state)
1916 {
1917 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1918
1919 if (!crtc_state)
1920 return;
1921
1922 /*
1923 * Don't clobber DPCD if it's been already read out during output
1924 * setup (eDP) or detect.
1925 */
1926 if (intel_dp->dpcd[DP_DPCD_REV] == 0)
1927 intel_dp_get_dpcd(intel_dp);
1928
1929 intel_dp->max_link_lane_count = intel_dp_max_common_lane_count(intel_dp);
1930 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
1931 }
1932
intel_dp_initial_fastset_check(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state)1933 bool intel_dp_initial_fastset_check(struct intel_encoder *encoder,
1934 struct intel_crtc_state *crtc_state)
1935 {
1936 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1937 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1938
1939 /*
1940 * If BIOS has set an unsupported or non-standard link rate for some
1941 * reason force an encoder recompute and full modeset.
1942 */
1943 if (intel_dp_rate_index(intel_dp->source_rates, intel_dp->num_source_rates,
1944 crtc_state->port_clock) < 0) {
1945 drm_dbg_kms(&i915->drm, "Forcing full modeset due to unsupported link rate\n");
1946 crtc_state->uapi.connectors_changed = true;
1947 return false;
1948 }
1949
1950 /*
1951 * FIXME hack to force full modeset when DSC is being used.
1952 *
1953 * As long as we do not have full state readout and config comparison
1954 * of crtc_state->dsc, we have no way to ensure reliable fastset.
1955 * Remove once we have readout for DSC.
1956 */
1957 if (crtc_state->dsc.compression_enable) {
1958 drm_dbg_kms(&i915->drm, "Forcing full modeset due to DSC being enabled\n");
1959 crtc_state->uapi.mode_changed = true;
1960 return false;
1961 }
1962
1963 if (CAN_PSR(intel_dp)) {
1964 drm_dbg_kms(&i915->drm, "Forcing full modeset to compute PSR state\n");
1965 crtc_state->uapi.mode_changed = true;
1966 return false;
1967 }
1968
1969 return true;
1970 }
1971
intel_dp_get_pcon_dsc_cap(struct intel_dp * intel_dp)1972 static void intel_dp_get_pcon_dsc_cap(struct intel_dp *intel_dp)
1973 {
1974 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1975
1976 /* Clear the cached register set to avoid using stale values */
1977
1978 memset(intel_dp->pcon_dsc_dpcd, 0, sizeof(intel_dp->pcon_dsc_dpcd));
1979
1980 if (drm_dp_dpcd_read(&intel_dp->aux, DP_PCON_DSC_ENCODER,
1981 intel_dp->pcon_dsc_dpcd,
1982 sizeof(intel_dp->pcon_dsc_dpcd)) < 0)
1983 drm_err(&i915->drm, "Failed to read DPCD register 0x%x\n",
1984 DP_PCON_DSC_ENCODER);
1985
1986 drm_dbg_kms(&i915->drm, "PCON ENCODER DSC DPCD: %*ph\n",
1987 (int)sizeof(intel_dp->pcon_dsc_dpcd), intel_dp->pcon_dsc_dpcd);
1988 }
1989
intel_dp_pcon_get_frl_mask(u8 frl_bw_mask)1990 static int intel_dp_pcon_get_frl_mask(u8 frl_bw_mask)
1991 {
1992 int bw_gbps[] = {9, 18, 24, 32, 40, 48};
1993 int i;
1994
1995 for (i = ARRAY_SIZE(bw_gbps) - 1; i >= 0; i--) {
1996 if (frl_bw_mask & (1 << i))
1997 return bw_gbps[i];
1998 }
1999 return 0;
2000 }
2001
intel_dp_pcon_set_frl_mask(int max_frl)2002 static int intel_dp_pcon_set_frl_mask(int max_frl)
2003 {
2004 switch (max_frl) {
2005 case 48:
2006 return DP_PCON_FRL_BW_MASK_48GBPS;
2007 case 40:
2008 return DP_PCON_FRL_BW_MASK_40GBPS;
2009 case 32:
2010 return DP_PCON_FRL_BW_MASK_32GBPS;
2011 case 24:
2012 return DP_PCON_FRL_BW_MASK_24GBPS;
2013 case 18:
2014 return DP_PCON_FRL_BW_MASK_18GBPS;
2015 case 9:
2016 return DP_PCON_FRL_BW_MASK_9GBPS;
2017 }
2018
2019 return 0;
2020 }
2021
intel_dp_hdmi_sink_max_frl(struct intel_dp * intel_dp)2022 static int intel_dp_hdmi_sink_max_frl(struct intel_dp *intel_dp)
2023 {
2024 struct intel_connector *intel_connector = intel_dp->attached_connector;
2025 struct drm_connector *connector = &intel_connector->base;
2026 int max_frl_rate;
2027 int max_lanes, rate_per_lane;
2028 int max_dsc_lanes, dsc_rate_per_lane;
2029
2030 max_lanes = connector->display_info.hdmi.max_lanes;
2031 rate_per_lane = connector->display_info.hdmi.max_frl_rate_per_lane;
2032 max_frl_rate = max_lanes * rate_per_lane;
2033
2034 if (connector->display_info.hdmi.dsc_cap.v_1p2) {
2035 max_dsc_lanes = connector->display_info.hdmi.dsc_cap.max_lanes;
2036 dsc_rate_per_lane = connector->display_info.hdmi.dsc_cap.max_frl_rate_per_lane;
2037 if (max_dsc_lanes && dsc_rate_per_lane)
2038 max_frl_rate = min(max_frl_rate, max_dsc_lanes * dsc_rate_per_lane);
2039 }
2040
2041 return max_frl_rate;
2042 }
2043
intel_dp_pcon_start_frl_training(struct intel_dp * intel_dp)2044 static int intel_dp_pcon_start_frl_training(struct intel_dp *intel_dp)
2045 {
2046 #define TIMEOUT_FRL_READY_MS 500
2047 #define TIMEOUT_HDMI_LINK_ACTIVE_MS 1000
2048
2049 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2050 int max_frl_bw, max_pcon_frl_bw, max_edid_frl_bw, ret;
2051 u8 max_frl_bw_mask = 0, frl_trained_mask;
2052 bool is_active;
2053
2054 ret = drm_dp_pcon_reset_frl_config(&intel_dp->aux);
2055 if (ret < 0)
2056 return ret;
2057
2058 max_pcon_frl_bw = intel_dp->dfp.pcon_max_frl_bw;
2059 drm_dbg(&i915->drm, "PCON max rate = %d Gbps\n", max_pcon_frl_bw);
2060
2061 max_edid_frl_bw = intel_dp_hdmi_sink_max_frl(intel_dp);
2062 drm_dbg(&i915->drm, "Sink max rate from EDID = %d Gbps\n", max_edid_frl_bw);
2063
2064 max_frl_bw = min(max_edid_frl_bw, max_pcon_frl_bw);
2065
2066 if (max_frl_bw <= 0)
2067 return -EINVAL;
2068
2069 ret = drm_dp_pcon_frl_prepare(&intel_dp->aux, false);
2070 if (ret < 0)
2071 return ret;
2072 /* Wait for PCON to be FRL Ready */
2073 wait_for(is_active = drm_dp_pcon_is_frl_ready(&intel_dp->aux) == true, TIMEOUT_FRL_READY_MS);
2074
2075 if (!is_active)
2076 return -ETIMEDOUT;
2077
2078 max_frl_bw_mask = intel_dp_pcon_set_frl_mask(max_frl_bw);
2079 ret = drm_dp_pcon_frl_configure_1(&intel_dp->aux, max_frl_bw,
2080 DP_PCON_ENABLE_SEQUENTIAL_LINK);
2081 if (ret < 0)
2082 return ret;
2083 ret = drm_dp_pcon_frl_configure_2(&intel_dp->aux, max_frl_bw_mask,
2084 DP_PCON_FRL_LINK_TRAIN_NORMAL);
2085 if (ret < 0)
2086 return ret;
2087 ret = drm_dp_pcon_frl_enable(&intel_dp->aux);
2088 if (ret < 0)
2089 return ret;
2090 /*
2091 * Wait for FRL to be completed
2092 * Check if the HDMI Link is up and active.
2093 */
2094 wait_for(is_active = drm_dp_pcon_hdmi_link_active(&intel_dp->aux) == true, TIMEOUT_HDMI_LINK_ACTIVE_MS);
2095
2096 if (!is_active)
2097 return -ETIMEDOUT;
2098
2099 /* Verify HDMI Link configuration shows FRL Mode */
2100 if (drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, &frl_trained_mask) !=
2101 DP_PCON_HDMI_MODE_FRL) {
2102 drm_dbg(&i915->drm, "HDMI couldn't be trained in FRL Mode\n");
2103 return -EINVAL;
2104 }
2105 drm_dbg(&i915->drm, "MAX_FRL_MASK = %u, FRL_TRAINED_MASK = %u\n", max_frl_bw_mask, frl_trained_mask);
2106
2107 intel_dp->frl.trained_rate_gbps = intel_dp_pcon_get_frl_mask(frl_trained_mask);
2108 intel_dp->frl.is_trained = true;
2109 drm_dbg(&i915->drm, "FRL trained with : %d Gbps\n", intel_dp->frl.trained_rate_gbps);
2110
2111 return 0;
2112 }
2113
intel_dp_is_hdmi_2_1_sink(struct intel_dp * intel_dp)2114 static bool intel_dp_is_hdmi_2_1_sink(struct intel_dp *intel_dp)
2115 {
2116 if (drm_dp_is_branch(intel_dp->dpcd) &&
2117 intel_dp->has_hdmi_sink &&
2118 intel_dp_hdmi_sink_max_frl(intel_dp) > 0)
2119 return true;
2120
2121 return false;
2122 }
2123
intel_dp_check_frl_training(struct intel_dp * intel_dp)2124 void intel_dp_check_frl_training(struct intel_dp *intel_dp)
2125 {
2126 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2127
2128 /*
2129 * Always go for FRL training if:
2130 * -PCON supports SRC_CTL_MODE (VESA DP2.0-HDMI2.1 PCON Spec Draft-1 Sec-7)
2131 * -sink is HDMI2.1
2132 */
2133 if (!(intel_dp->downstream_ports[2] & DP_PCON_SOURCE_CTL_MODE) ||
2134 !intel_dp_is_hdmi_2_1_sink(intel_dp) ||
2135 intel_dp->frl.is_trained)
2136 return;
2137
2138 if (intel_dp_pcon_start_frl_training(intel_dp) < 0) {
2139 int ret, mode;
2140
2141 drm_dbg(&dev_priv->drm, "Couldn't set FRL mode, continuing with TMDS mode\n");
2142 ret = drm_dp_pcon_reset_frl_config(&intel_dp->aux);
2143 mode = drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, NULL);
2144
2145 if (ret < 0 || mode != DP_PCON_HDMI_MODE_TMDS)
2146 drm_dbg(&dev_priv->drm, "Issue with PCON, cannot set TMDS mode\n");
2147 } else {
2148 drm_dbg(&dev_priv->drm, "FRL training Completed\n");
2149 }
2150 }
2151
2152 static int
intel_dp_pcon_dsc_enc_slice_height(const struct intel_crtc_state * crtc_state)2153 intel_dp_pcon_dsc_enc_slice_height(const struct intel_crtc_state *crtc_state)
2154 {
2155 int vactive = crtc_state->hw.adjusted_mode.vdisplay;
2156
2157 return intel_hdmi_dsc_get_slice_height(vactive);
2158 }
2159
2160 static int
intel_dp_pcon_dsc_enc_slices(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)2161 intel_dp_pcon_dsc_enc_slices(struct intel_dp *intel_dp,
2162 const struct intel_crtc_state *crtc_state)
2163 {
2164 struct intel_connector *intel_connector = intel_dp->attached_connector;
2165 struct drm_connector *connector = &intel_connector->base;
2166 int hdmi_throughput = connector->display_info.hdmi.dsc_cap.clk_per_slice;
2167 int hdmi_max_slices = connector->display_info.hdmi.dsc_cap.max_slices;
2168 int pcon_max_slices = drm_dp_pcon_dsc_max_slices(intel_dp->pcon_dsc_dpcd);
2169 int pcon_max_slice_width = drm_dp_pcon_dsc_max_slice_width(intel_dp->pcon_dsc_dpcd);
2170
2171 return intel_hdmi_dsc_get_num_slices(crtc_state, pcon_max_slices,
2172 pcon_max_slice_width,
2173 hdmi_max_slices, hdmi_throughput);
2174 }
2175
2176 static int
intel_dp_pcon_dsc_enc_bpp(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,int num_slices,int slice_width)2177 intel_dp_pcon_dsc_enc_bpp(struct intel_dp *intel_dp,
2178 const struct intel_crtc_state *crtc_state,
2179 int num_slices, int slice_width)
2180 {
2181 struct intel_connector *intel_connector = intel_dp->attached_connector;
2182 struct drm_connector *connector = &intel_connector->base;
2183 int output_format = crtc_state->output_format;
2184 bool hdmi_all_bpp = connector->display_info.hdmi.dsc_cap.all_bpp;
2185 int pcon_fractional_bpp = drm_dp_pcon_dsc_bpp_incr(intel_dp->pcon_dsc_dpcd);
2186 int hdmi_max_chunk_bytes =
2187 connector->display_info.hdmi.dsc_cap.total_chunk_kbytes * 1024;
2188
2189 return intel_hdmi_dsc_get_bpp(pcon_fractional_bpp, slice_width,
2190 num_slices, output_format, hdmi_all_bpp,
2191 hdmi_max_chunk_bytes);
2192 }
2193
2194 void
intel_dp_pcon_dsc_configure(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)2195 intel_dp_pcon_dsc_configure(struct intel_dp *intel_dp,
2196 const struct intel_crtc_state *crtc_state)
2197 {
2198 u8 pps_param[6];
2199 int slice_height;
2200 int slice_width;
2201 int num_slices;
2202 int bits_per_pixel;
2203 int ret;
2204 struct intel_connector *intel_connector = intel_dp->attached_connector;
2205 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2206 struct drm_connector *connector;
2207 bool hdmi_is_dsc_1_2;
2208
2209 if (!intel_dp_is_hdmi_2_1_sink(intel_dp))
2210 return;
2211
2212 if (!intel_connector)
2213 return;
2214 connector = &intel_connector->base;
2215 hdmi_is_dsc_1_2 = connector->display_info.hdmi.dsc_cap.v_1p2;
2216
2217 if (!drm_dp_pcon_enc_is_dsc_1_2(intel_dp->pcon_dsc_dpcd) ||
2218 !hdmi_is_dsc_1_2)
2219 return;
2220
2221 slice_height = intel_dp_pcon_dsc_enc_slice_height(crtc_state);
2222 if (!slice_height)
2223 return;
2224
2225 num_slices = intel_dp_pcon_dsc_enc_slices(intel_dp, crtc_state);
2226 if (!num_slices)
2227 return;
2228
2229 slice_width = DIV_ROUND_UP(crtc_state->hw.adjusted_mode.hdisplay,
2230 num_slices);
2231
2232 bits_per_pixel = intel_dp_pcon_dsc_enc_bpp(intel_dp, crtc_state,
2233 num_slices, slice_width);
2234 if (!bits_per_pixel)
2235 return;
2236
2237 pps_param[0] = slice_height & 0xFF;
2238 pps_param[1] = slice_height >> 8;
2239 pps_param[2] = slice_width & 0xFF;
2240 pps_param[3] = slice_width >> 8;
2241 pps_param[4] = bits_per_pixel & 0xFF;
2242 pps_param[5] = (bits_per_pixel >> 8) & 0x3;
2243
2244 ret = drm_dp_pcon_pps_override_param(&intel_dp->aux, pps_param);
2245 if (ret < 0)
2246 drm_dbg_kms(&i915->drm, "Failed to set pcon DSC\n");
2247 }
2248
intel_dp_configure_protocol_converter(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)2249 void intel_dp_configure_protocol_converter(struct intel_dp *intel_dp,
2250 const struct intel_crtc_state *crtc_state)
2251 {
2252 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2253 u8 tmp;
2254
2255 if (intel_dp->dpcd[DP_DPCD_REV] < 0x13)
2256 return;
2257
2258 if (!drm_dp_is_branch(intel_dp->dpcd))
2259 return;
2260
2261 tmp = intel_dp->has_hdmi_sink ?
2262 DP_HDMI_DVI_OUTPUT_CONFIG : 0;
2263
2264 if (drm_dp_dpcd_writeb(&intel_dp->aux,
2265 DP_PROTOCOL_CONVERTER_CONTROL_0, tmp) != 1)
2266 drm_dbg_kms(&i915->drm, "Failed to %s protocol converter HDMI mode\n",
2267 enabledisable(intel_dp->has_hdmi_sink));
2268
2269 tmp = crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444 &&
2270 intel_dp->dfp.ycbcr_444_to_420 ? DP_CONVERSION_TO_YCBCR420_ENABLE : 0;
2271
2272 if (drm_dp_dpcd_writeb(&intel_dp->aux,
2273 DP_PROTOCOL_CONVERTER_CONTROL_1, tmp) != 1)
2274 drm_dbg_kms(&i915->drm,
2275 "Failed to %s protocol converter YCbCr 4:2:0 conversion mode\n",
2276 enabledisable(intel_dp->dfp.ycbcr_444_to_420));
2277
2278 tmp = 0;
2279 if (intel_dp->dfp.rgb_to_ycbcr) {
2280 bool bt2020, bt709;
2281
2282 /*
2283 * FIXME: Currently if userspace selects BT2020 or BT709, but PCON supports only
2284 * RGB->YCbCr for BT601 colorspace, we go ahead with BT601, as default.
2285 *
2286 */
2287 tmp = DP_CONVERSION_BT601_RGB_YCBCR_ENABLE;
2288
2289 bt2020 = drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd,
2290 intel_dp->downstream_ports,
2291 DP_DS_HDMI_BT2020_RGB_YCBCR_CONV);
2292 bt709 = drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd,
2293 intel_dp->downstream_ports,
2294 DP_DS_HDMI_BT709_RGB_YCBCR_CONV);
2295 switch (crtc_state->infoframes.vsc.colorimetry) {
2296 case DP_COLORIMETRY_BT2020_RGB:
2297 case DP_COLORIMETRY_BT2020_YCC:
2298 if (bt2020)
2299 tmp = DP_CONVERSION_BT2020_RGB_YCBCR_ENABLE;
2300 break;
2301 case DP_COLORIMETRY_BT709_YCC:
2302 case DP_COLORIMETRY_XVYCC_709:
2303 if (bt709)
2304 tmp = DP_CONVERSION_BT709_RGB_YCBCR_ENABLE;
2305 break;
2306 default:
2307 break;
2308 }
2309 }
2310
2311 if (drm_dp_pcon_convert_rgb_to_ycbcr(&intel_dp->aux, tmp) < 0)
2312 drm_dbg_kms(&i915->drm,
2313 "Failed to %s protocol converter RGB->YCbCr conversion mode\n",
2314 enabledisable(tmp));
2315 }
2316
2317
intel_dp_get_colorimetry_status(struct intel_dp * intel_dp)2318 bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp)
2319 {
2320 u8 dprx = 0;
2321
2322 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST,
2323 &dprx) != 1)
2324 return false;
2325 return dprx & DP_VSC_SDP_EXT_FOR_COLORIMETRY_SUPPORTED;
2326 }
2327
intel_dp_get_dsc_sink_cap(struct intel_dp * intel_dp)2328 static void intel_dp_get_dsc_sink_cap(struct intel_dp *intel_dp)
2329 {
2330 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2331
2332 /*
2333 * Clear the cached register set to avoid using stale values
2334 * for the sinks that do not support DSC.
2335 */
2336 memset(intel_dp->dsc_dpcd, 0, sizeof(intel_dp->dsc_dpcd));
2337
2338 /* Clear fec_capable to avoid using stale values */
2339 intel_dp->fec_capable = 0;
2340
2341 /* Cache the DSC DPCD if eDP or DP rev >= 1.4 */
2342 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x14 ||
2343 intel_dp->edp_dpcd[0] >= DP_EDP_14) {
2344 if (drm_dp_dpcd_read(&intel_dp->aux, DP_DSC_SUPPORT,
2345 intel_dp->dsc_dpcd,
2346 sizeof(intel_dp->dsc_dpcd)) < 0)
2347 drm_err(&i915->drm,
2348 "Failed to read DPCD register 0x%x\n",
2349 DP_DSC_SUPPORT);
2350
2351 drm_dbg_kms(&i915->drm, "DSC DPCD: %*ph\n",
2352 (int)sizeof(intel_dp->dsc_dpcd),
2353 intel_dp->dsc_dpcd);
2354
2355 /* FEC is supported only on DP 1.4 */
2356 if (!intel_dp_is_edp(intel_dp) &&
2357 drm_dp_dpcd_readb(&intel_dp->aux, DP_FEC_CAPABILITY,
2358 &intel_dp->fec_capable) < 0)
2359 drm_err(&i915->drm,
2360 "Failed to read FEC DPCD register\n");
2361
2362 drm_dbg_kms(&i915->drm, "FEC CAPABILITY: %x\n",
2363 intel_dp->fec_capable);
2364 }
2365 }
2366
intel_edp_mso_mode_fixup(struct intel_connector * connector,struct drm_display_mode * mode)2367 static void intel_edp_mso_mode_fixup(struct intel_connector *connector,
2368 struct drm_display_mode *mode)
2369 {
2370 struct intel_dp *intel_dp = intel_attached_dp(connector);
2371 struct drm_i915_private *i915 = to_i915(connector->base.dev);
2372 int n = intel_dp->mso_link_count;
2373 int overlap = intel_dp->mso_pixel_overlap;
2374
2375 if (!mode || !n)
2376 return;
2377
2378 mode->hdisplay = (mode->hdisplay - overlap) * n;
2379 mode->hsync_start = (mode->hsync_start - overlap) * n;
2380 mode->hsync_end = (mode->hsync_end - overlap) * n;
2381 mode->htotal = (mode->htotal - overlap) * n;
2382 mode->clock *= n;
2383
2384 drm_mode_set_name(mode);
2385
2386 drm_dbg_kms(&i915->drm,
2387 "[CONNECTOR:%d:%s] using generated MSO mode: ",
2388 connector->base.base.id, connector->base.name);
2389 drm_mode_debug_printmodeline(mode);
2390 }
2391
intel_edp_mso_init(struct intel_dp * intel_dp)2392 static void intel_edp_mso_init(struct intel_dp *intel_dp)
2393 {
2394 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2395 u8 mso;
2396
2397 if (intel_dp->edp_dpcd[0] < DP_EDP_14)
2398 return;
2399
2400 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_EDP_MSO_LINK_CAPABILITIES, &mso) != 1) {
2401 drm_err(&i915->drm, "Failed to read MSO cap\n");
2402 return;
2403 }
2404
2405 /* Valid configurations are SST or MSO 2x1, 2x2, 4x1 */
2406 mso &= DP_EDP_MSO_NUMBER_OF_LINKS_MASK;
2407 if (mso % 2 || mso > drm_dp_max_lane_count(intel_dp->dpcd)) {
2408 drm_err(&i915->drm, "Invalid MSO link count cap %u\n", mso);
2409 mso = 0;
2410 }
2411
2412 if (mso) {
2413 drm_dbg_kms(&i915->drm, "Sink MSO %ux%u configuration\n",
2414 mso, drm_dp_max_lane_count(intel_dp->dpcd) / mso);
2415 if (!HAS_MSO(i915)) {
2416 drm_err(&i915->drm, "No source MSO support, disabling\n");
2417 mso = 0;
2418 }
2419 }
2420
2421 intel_dp->mso_link_count = mso;
2422 intel_dp->mso_pixel_overlap = 0; /* FIXME: read from DisplayID v2.0 */
2423 }
2424
2425 static bool
intel_edp_init_dpcd(struct intel_dp * intel_dp)2426 intel_edp_init_dpcd(struct intel_dp *intel_dp)
2427 {
2428 struct drm_i915_private *dev_priv =
2429 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
2430
2431 /* this function is meant to be called only once */
2432 drm_WARN_ON(&dev_priv->drm, intel_dp->dpcd[DP_DPCD_REV] != 0);
2433
2434 if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd) != 0)
2435 return false;
2436
2437 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
2438 drm_dp_is_branch(intel_dp->dpcd));
2439
2440 /*
2441 * Read the eDP display control registers.
2442 *
2443 * Do this independent of DP_DPCD_DISPLAY_CONTROL_CAPABLE bit in
2444 * DP_EDP_CONFIGURATION_CAP, because some buggy displays do not have it
2445 * set, but require eDP 1.4+ detection (e.g. for supported link rates
2446 * method). The display control registers should read zero if they're
2447 * not supported anyway.
2448 */
2449 if (drm_dp_dpcd_read(&intel_dp->aux, DP_EDP_DPCD_REV,
2450 intel_dp->edp_dpcd, sizeof(intel_dp->edp_dpcd)) ==
2451 sizeof(intel_dp->edp_dpcd)) {
2452 drm_dbg_kms(&dev_priv->drm, "eDP DPCD: %*ph\n",
2453 (int)sizeof(intel_dp->edp_dpcd),
2454 intel_dp->edp_dpcd);
2455
2456 intel_dp->use_max_params = intel_dp->edp_dpcd[0] < DP_EDP_14;
2457 }
2458
2459 /*
2460 * This has to be called after intel_dp->edp_dpcd is filled, PSR checks
2461 * for SET_POWER_CAPABLE bit in intel_dp->edp_dpcd[1]
2462 */
2463 intel_psr_init_dpcd(intel_dp);
2464
2465 /* Read the eDP 1.4+ supported link rates. */
2466 if (intel_dp->edp_dpcd[0] >= DP_EDP_14) {
2467 __le16 sink_rates[DP_MAX_SUPPORTED_RATES];
2468 int i;
2469
2470 drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
2471 sink_rates, sizeof(sink_rates));
2472
2473 for (i = 0; i < ARRAY_SIZE(sink_rates); i++) {
2474 int val = le16_to_cpu(sink_rates[i]);
2475
2476 if (val == 0)
2477 break;
2478
2479 /* Value read multiplied by 200kHz gives the per-lane
2480 * link rate in kHz. The source rates are, however,
2481 * stored in terms of LS_Clk kHz. The full conversion
2482 * back to symbols is
2483 * (val * 200kHz)*(8/10 ch. encoding)*(1/8 bit to Byte)
2484 */
2485 intel_dp->sink_rates[i] = (val * 200) / 10;
2486 }
2487 intel_dp->num_sink_rates = i;
2488 }
2489
2490 /*
2491 * Use DP_LINK_RATE_SET if DP_SUPPORTED_LINK_RATES are available,
2492 * default to DP_MAX_LINK_RATE and DP_LINK_BW_SET otherwise.
2493 */
2494 if (intel_dp->num_sink_rates)
2495 intel_dp->use_rate_select = true;
2496 else
2497 intel_dp_set_sink_rates(intel_dp);
2498
2499 intel_dp_set_common_rates(intel_dp);
2500
2501 /* Read the eDP DSC DPCD registers */
2502 if (DISPLAY_VER(dev_priv) >= 10)
2503 intel_dp_get_dsc_sink_cap(intel_dp);
2504
2505 /*
2506 * If needed, program our source OUI so we can make various Intel-specific AUX services
2507 * available (such as HDR backlight controls)
2508 */
2509 intel_edp_init_source_oui(intel_dp, true);
2510
2511 intel_edp_mso_init(intel_dp);
2512
2513 return true;
2514 }
2515
2516 static bool
intel_dp_has_sink_count(struct intel_dp * intel_dp)2517 intel_dp_has_sink_count(struct intel_dp *intel_dp)
2518 {
2519 if (!intel_dp->attached_connector)
2520 return false;
2521
2522 return drm_dp_read_sink_count_cap(&intel_dp->attached_connector->base,
2523 intel_dp->dpcd,
2524 &intel_dp->desc);
2525 }
2526
2527 static bool
intel_dp_get_dpcd(struct intel_dp * intel_dp)2528 intel_dp_get_dpcd(struct intel_dp *intel_dp)
2529 {
2530 int ret;
2531
2532 if (intel_dp_init_lttpr_and_dprx_caps(intel_dp) < 0)
2533 return false;
2534
2535 /*
2536 * Don't clobber cached eDP rates. Also skip re-reading
2537 * the OUI/ID since we know it won't change.
2538 */
2539 if (!intel_dp_is_edp(intel_dp)) {
2540 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
2541 drm_dp_is_branch(intel_dp->dpcd));
2542
2543 intel_dp_set_sink_rates(intel_dp);
2544 intel_dp_set_common_rates(intel_dp);
2545 }
2546
2547 if (intel_dp_has_sink_count(intel_dp)) {
2548 ret = drm_dp_read_sink_count(&intel_dp->aux);
2549 if (ret < 0)
2550 return false;
2551
2552 /*
2553 * Sink count can change between short pulse hpd hence
2554 * a member variable in intel_dp will track any changes
2555 * between short pulse interrupts.
2556 */
2557 intel_dp->sink_count = ret;
2558
2559 /*
2560 * SINK_COUNT == 0 and DOWNSTREAM_PORT_PRESENT == 1 implies that
2561 * a dongle is present but no display. Unless we require to know
2562 * if a dongle is present or not, we don't need to update
2563 * downstream port information. So, an early return here saves
2564 * time from performing other operations which are not required.
2565 */
2566 if (!intel_dp->sink_count)
2567 return false;
2568 }
2569
2570 return drm_dp_read_downstream_info(&intel_dp->aux, intel_dp->dpcd,
2571 intel_dp->downstream_ports) == 0;
2572 }
2573
2574 static bool
intel_dp_can_mst(struct intel_dp * intel_dp)2575 intel_dp_can_mst(struct intel_dp *intel_dp)
2576 {
2577 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2578
2579 return i915->params.enable_dp_mst &&
2580 intel_dp->can_mst &&
2581 drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
2582 }
2583
2584 static void
intel_dp_configure_mst(struct intel_dp * intel_dp)2585 intel_dp_configure_mst(struct intel_dp *intel_dp)
2586 {
2587 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2588 struct intel_encoder *encoder =
2589 &dp_to_dig_port(intel_dp)->base;
2590 bool sink_can_mst = drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
2591
2592 drm_dbg_kms(&i915->drm,
2593 "[ENCODER:%d:%s] MST support: port: %s, sink: %s, modparam: %s\n",
2594 encoder->base.base.id, encoder->base.name,
2595 yesno(intel_dp->can_mst), yesno(sink_can_mst),
2596 yesno(i915->params.enable_dp_mst));
2597
2598 if (!intel_dp->can_mst)
2599 return;
2600
2601 intel_dp->is_mst = sink_can_mst &&
2602 i915->params.enable_dp_mst;
2603
2604 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
2605 intel_dp->is_mst);
2606 }
2607
2608 static bool
intel_dp_get_sink_irq_esi(struct intel_dp * intel_dp,u8 * sink_irq_vector)2609 intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *sink_irq_vector)
2610 {
2611 return drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI,
2612 sink_irq_vector, DP_DPRX_ESI_LEN) ==
2613 DP_DPRX_ESI_LEN;
2614 }
2615
2616 bool
intel_dp_needs_vsc_sdp(const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)2617 intel_dp_needs_vsc_sdp(const struct intel_crtc_state *crtc_state,
2618 const struct drm_connector_state *conn_state)
2619 {
2620 /*
2621 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication
2622 * of Color Encoding Format and Content Color Gamut], in order to
2623 * sending YCBCR 420 or HDR BT.2020 signals we should use DP VSC SDP.
2624 */
2625 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2626 return true;
2627
2628 switch (conn_state->colorspace) {
2629 case DRM_MODE_COLORIMETRY_SYCC_601:
2630 case DRM_MODE_COLORIMETRY_OPYCC_601:
2631 case DRM_MODE_COLORIMETRY_BT2020_YCC:
2632 case DRM_MODE_COLORIMETRY_BT2020_RGB:
2633 case DRM_MODE_COLORIMETRY_BT2020_CYCC:
2634 return true;
2635 default:
2636 break;
2637 }
2638
2639 return false;
2640 }
2641
intel_dp_vsc_sdp_pack(const struct drm_dp_vsc_sdp * vsc,struct dp_sdp * sdp,size_t size)2642 static ssize_t intel_dp_vsc_sdp_pack(const struct drm_dp_vsc_sdp *vsc,
2643 struct dp_sdp *sdp, size_t size)
2644 {
2645 size_t length = sizeof(struct dp_sdp);
2646
2647 if (size < length)
2648 return -ENOSPC;
2649
2650 memset(sdp, 0, size);
2651
2652 /*
2653 * Prepare VSC Header for SU as per DP 1.4a spec, Table 2-119
2654 * VSC SDP Header Bytes
2655 */
2656 sdp->sdp_header.HB0 = 0; /* Secondary-Data Packet ID = 0 */
2657 sdp->sdp_header.HB1 = vsc->sdp_type; /* Secondary-data Packet Type */
2658 sdp->sdp_header.HB2 = vsc->revision; /* Revision Number */
2659 sdp->sdp_header.HB3 = vsc->length; /* Number of Valid Data Bytes */
2660
2661 /*
2662 * Only revision 0x5 supports Pixel Encoding/Colorimetry Format as
2663 * per DP 1.4a spec.
2664 */
2665 if (vsc->revision != 0x5)
2666 goto out;
2667
2668 /* VSC SDP Payload for DB16 through DB18 */
2669 /* Pixel Encoding and Colorimetry Formats */
2670 sdp->db[16] = (vsc->pixelformat & 0xf) << 4; /* DB16[7:4] */
2671 sdp->db[16] |= vsc->colorimetry & 0xf; /* DB16[3:0] */
2672
2673 switch (vsc->bpc) {
2674 case 6:
2675 /* 6bpc: 0x0 */
2676 break;
2677 case 8:
2678 sdp->db[17] = 0x1; /* DB17[3:0] */
2679 break;
2680 case 10:
2681 sdp->db[17] = 0x2;
2682 break;
2683 case 12:
2684 sdp->db[17] = 0x3;
2685 break;
2686 case 16:
2687 sdp->db[17] = 0x4;
2688 break;
2689 default:
2690 MISSING_CASE(vsc->bpc);
2691 break;
2692 }
2693 /* Dynamic Range and Component Bit Depth */
2694 if (vsc->dynamic_range == DP_DYNAMIC_RANGE_CTA)
2695 sdp->db[17] |= 0x80; /* DB17[7] */
2696
2697 /* Content Type */
2698 sdp->db[18] = vsc->content_type & 0x7;
2699
2700 out:
2701 return length;
2702 }
2703
2704 static ssize_t
intel_dp_hdr_metadata_infoframe_sdp_pack(const struct hdmi_drm_infoframe * drm_infoframe,struct dp_sdp * sdp,size_t size)2705 intel_dp_hdr_metadata_infoframe_sdp_pack(const struct hdmi_drm_infoframe *drm_infoframe,
2706 struct dp_sdp *sdp,
2707 size_t size)
2708 {
2709 size_t length = sizeof(struct dp_sdp);
2710 const int infoframe_size = HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE;
2711 unsigned char buf[HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE];
2712 ssize_t len;
2713
2714 if (size < length)
2715 return -ENOSPC;
2716
2717 memset(sdp, 0, size);
2718
2719 len = hdmi_drm_infoframe_pack_only(drm_infoframe, buf, sizeof(buf));
2720 if (len < 0) {
2721 DRM_DEBUG_KMS("buffer size is smaller than hdr metadata infoframe\n");
2722 return -ENOSPC;
2723 }
2724
2725 if (len != infoframe_size) {
2726 DRM_DEBUG_KMS("wrong static hdr metadata size\n");
2727 return -ENOSPC;
2728 }
2729
2730 /*
2731 * Set up the infoframe sdp packet for HDR static metadata.
2732 * Prepare VSC Header for SU as per DP 1.4a spec,
2733 * Table 2-100 and Table 2-101
2734 */
2735
2736 /* Secondary-Data Packet ID, 00h for non-Audio INFOFRAME */
2737 sdp->sdp_header.HB0 = 0;
2738 /*
2739 * Packet Type 80h + Non-audio INFOFRAME Type value
2740 * HDMI_INFOFRAME_TYPE_DRM: 0x87
2741 * - 80h + Non-audio INFOFRAME Type value
2742 * - InfoFrame Type: 0x07
2743 * [CTA-861-G Table-42 Dynamic Range and Mastering InfoFrame]
2744 */
2745 sdp->sdp_header.HB1 = drm_infoframe->type;
2746 /*
2747 * Least Significant Eight Bits of (Data Byte Count – 1)
2748 * infoframe_size - 1
2749 */
2750 sdp->sdp_header.HB2 = 0x1D;
2751 /* INFOFRAME SDP Version Number */
2752 sdp->sdp_header.HB3 = (0x13 << 2);
2753 /* CTA Header Byte 2 (INFOFRAME Version Number) */
2754 sdp->db[0] = drm_infoframe->version;
2755 /* CTA Header Byte 3 (Length of INFOFRAME): HDMI_DRM_INFOFRAME_SIZE */
2756 sdp->db[1] = drm_infoframe->length;
2757 /*
2758 * Copy HDMI_DRM_INFOFRAME_SIZE size from a buffer after
2759 * HDMI_INFOFRAME_HEADER_SIZE
2760 */
2761 BUILD_BUG_ON(sizeof(sdp->db) < HDMI_DRM_INFOFRAME_SIZE + 2);
2762 memcpy(&sdp->db[2], &buf[HDMI_INFOFRAME_HEADER_SIZE],
2763 HDMI_DRM_INFOFRAME_SIZE);
2764
2765 /*
2766 * Size of DP infoframe sdp packet for HDR static metadata consists of
2767 * - DP SDP Header(struct dp_sdp_header): 4 bytes
2768 * - Two Data Blocks: 2 bytes
2769 * CTA Header Byte2 (INFOFRAME Version Number)
2770 * CTA Header Byte3 (Length of INFOFRAME)
2771 * - HDMI_DRM_INFOFRAME_SIZE: 26 bytes
2772 *
2773 * Prior to GEN11's GMP register size is identical to DP HDR static metadata
2774 * infoframe size. But GEN11+ has larger than that size, write_infoframe
2775 * will pad rest of the size.
2776 */
2777 return sizeof(struct dp_sdp_header) + 2 + HDMI_DRM_INFOFRAME_SIZE;
2778 }
2779
intel_write_dp_sdp(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state,unsigned int type)2780 static void intel_write_dp_sdp(struct intel_encoder *encoder,
2781 const struct intel_crtc_state *crtc_state,
2782 unsigned int type)
2783 {
2784 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2785 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2786 struct dp_sdp sdp = {};
2787 ssize_t len;
2788
2789 if ((crtc_state->infoframes.enable &
2790 intel_hdmi_infoframe_enable(type)) == 0)
2791 return;
2792
2793 switch (type) {
2794 case DP_SDP_VSC:
2795 len = intel_dp_vsc_sdp_pack(&crtc_state->infoframes.vsc, &sdp,
2796 sizeof(sdp));
2797 break;
2798 case HDMI_PACKET_TYPE_GAMUT_METADATA:
2799 len = intel_dp_hdr_metadata_infoframe_sdp_pack(&crtc_state->infoframes.drm.drm,
2800 &sdp, sizeof(sdp));
2801 break;
2802 default:
2803 MISSING_CASE(type);
2804 return;
2805 }
2806
2807 if (drm_WARN_ON(&dev_priv->drm, len < 0))
2808 return;
2809
2810 dig_port->write_infoframe(encoder, crtc_state, type, &sdp, len);
2811 }
2812
intel_write_dp_vsc_sdp(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state,struct drm_dp_vsc_sdp * vsc)2813 void intel_write_dp_vsc_sdp(struct intel_encoder *encoder,
2814 const struct intel_crtc_state *crtc_state,
2815 struct drm_dp_vsc_sdp *vsc)
2816 {
2817 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2818 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2819 struct dp_sdp sdp = {};
2820 ssize_t len;
2821
2822 len = intel_dp_vsc_sdp_pack(vsc, &sdp, sizeof(sdp));
2823
2824 if (drm_WARN_ON(&dev_priv->drm, len < 0))
2825 return;
2826
2827 dig_port->write_infoframe(encoder, crtc_state, DP_SDP_VSC,
2828 &sdp, len);
2829 }
2830
intel_dp_set_infoframes(struct intel_encoder * encoder,bool enable,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)2831 void intel_dp_set_infoframes(struct intel_encoder *encoder,
2832 bool enable,
2833 const struct intel_crtc_state *crtc_state,
2834 const struct drm_connector_state *conn_state)
2835 {
2836 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2837 i915_reg_t reg = HSW_TVIDEO_DIP_CTL(crtc_state->cpu_transcoder);
2838 u32 dip_enable = VIDEO_DIP_ENABLE_AVI_HSW | VIDEO_DIP_ENABLE_GCP_HSW |
2839 VIDEO_DIP_ENABLE_VS_HSW | VIDEO_DIP_ENABLE_GMP_HSW |
2840 VIDEO_DIP_ENABLE_SPD_HSW | VIDEO_DIP_ENABLE_DRM_GLK;
2841 u32 val = intel_de_read(dev_priv, reg) & ~dip_enable;
2842
2843 /* TODO: Add DSC case (DIP_ENABLE_PPS) */
2844 /* When PSR is enabled, this routine doesn't disable VSC DIP */
2845 if (!crtc_state->has_psr)
2846 val &= ~VIDEO_DIP_ENABLE_VSC_HSW;
2847
2848 intel_de_write(dev_priv, reg, val);
2849 intel_de_posting_read(dev_priv, reg);
2850
2851 if (!enable)
2852 return;
2853
2854 /* When PSR is enabled, VSC SDP is handled by PSR routine */
2855 if (!crtc_state->has_psr)
2856 intel_write_dp_sdp(encoder, crtc_state, DP_SDP_VSC);
2857
2858 intel_write_dp_sdp(encoder, crtc_state, HDMI_PACKET_TYPE_GAMUT_METADATA);
2859 }
2860
intel_dp_vsc_sdp_unpack(struct drm_dp_vsc_sdp * vsc,const void * buffer,size_t size)2861 static int intel_dp_vsc_sdp_unpack(struct drm_dp_vsc_sdp *vsc,
2862 const void *buffer, size_t size)
2863 {
2864 const struct dp_sdp *sdp = buffer;
2865
2866 if (size < sizeof(struct dp_sdp))
2867 return -EINVAL;
2868
2869 memset(vsc, 0, sizeof(*vsc));
2870
2871 if (sdp->sdp_header.HB0 != 0)
2872 return -EINVAL;
2873
2874 if (sdp->sdp_header.HB1 != DP_SDP_VSC)
2875 return -EINVAL;
2876
2877 vsc->sdp_type = sdp->sdp_header.HB1;
2878 vsc->revision = sdp->sdp_header.HB2;
2879 vsc->length = sdp->sdp_header.HB3;
2880
2881 if ((sdp->sdp_header.HB2 == 0x2 && sdp->sdp_header.HB3 == 0x8) ||
2882 (sdp->sdp_header.HB2 == 0x4 && sdp->sdp_header.HB3 == 0xe)) {
2883 /*
2884 * - HB2 = 0x2, HB3 = 0x8
2885 * VSC SDP supporting 3D stereo + PSR
2886 * - HB2 = 0x4, HB3 = 0xe
2887 * VSC SDP supporting 3D stereo + PSR2 with Y-coordinate of
2888 * first scan line of the SU region (applies to eDP v1.4b
2889 * and higher).
2890 */
2891 return 0;
2892 } else if (sdp->sdp_header.HB2 == 0x5 && sdp->sdp_header.HB3 == 0x13) {
2893 /*
2894 * - HB2 = 0x5, HB3 = 0x13
2895 * VSC SDP supporting 3D stereo + PSR2 + Pixel Encoding/Colorimetry
2896 * Format.
2897 */
2898 vsc->pixelformat = (sdp->db[16] >> 4) & 0xf;
2899 vsc->colorimetry = sdp->db[16] & 0xf;
2900 vsc->dynamic_range = (sdp->db[17] >> 7) & 0x1;
2901
2902 switch (sdp->db[17] & 0x7) {
2903 case 0x0:
2904 vsc->bpc = 6;
2905 break;
2906 case 0x1:
2907 vsc->bpc = 8;
2908 break;
2909 case 0x2:
2910 vsc->bpc = 10;
2911 break;
2912 case 0x3:
2913 vsc->bpc = 12;
2914 break;
2915 case 0x4:
2916 vsc->bpc = 16;
2917 break;
2918 default:
2919 MISSING_CASE(sdp->db[17] & 0x7);
2920 return -EINVAL;
2921 }
2922
2923 vsc->content_type = sdp->db[18] & 0x7;
2924 } else {
2925 return -EINVAL;
2926 }
2927
2928 return 0;
2929 }
2930
2931 static int
intel_dp_hdr_metadata_infoframe_sdp_unpack(struct hdmi_drm_infoframe * drm_infoframe,const void * buffer,size_t size)2932 intel_dp_hdr_metadata_infoframe_sdp_unpack(struct hdmi_drm_infoframe *drm_infoframe,
2933 const void *buffer, size_t size)
2934 {
2935 int ret;
2936
2937 const struct dp_sdp *sdp = buffer;
2938
2939 if (size < sizeof(struct dp_sdp))
2940 return -EINVAL;
2941
2942 if (sdp->sdp_header.HB0 != 0)
2943 return -EINVAL;
2944
2945 if (sdp->sdp_header.HB1 != HDMI_INFOFRAME_TYPE_DRM)
2946 return -EINVAL;
2947
2948 /*
2949 * Least Significant Eight Bits of (Data Byte Count – 1)
2950 * 1Dh (i.e., Data Byte Count = 30 bytes).
2951 */
2952 if (sdp->sdp_header.HB2 != 0x1D)
2953 return -EINVAL;
2954
2955 /* Most Significant Two Bits of (Data Byte Count – 1), Clear to 00b. */
2956 if ((sdp->sdp_header.HB3 & 0x3) != 0)
2957 return -EINVAL;
2958
2959 /* INFOFRAME SDP Version Number */
2960 if (((sdp->sdp_header.HB3 >> 2) & 0x3f) != 0x13)
2961 return -EINVAL;
2962
2963 /* CTA Header Byte 2 (INFOFRAME Version Number) */
2964 if (sdp->db[0] != 1)
2965 return -EINVAL;
2966
2967 /* CTA Header Byte 3 (Length of INFOFRAME): HDMI_DRM_INFOFRAME_SIZE */
2968 if (sdp->db[1] != HDMI_DRM_INFOFRAME_SIZE)
2969 return -EINVAL;
2970
2971 ret = hdmi_drm_infoframe_unpack_only(drm_infoframe, &sdp->db[2],
2972 HDMI_DRM_INFOFRAME_SIZE);
2973
2974 return ret;
2975 }
2976
intel_read_dp_vsc_sdp(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state,struct drm_dp_vsc_sdp * vsc)2977 static void intel_read_dp_vsc_sdp(struct intel_encoder *encoder,
2978 struct intel_crtc_state *crtc_state,
2979 struct drm_dp_vsc_sdp *vsc)
2980 {
2981 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2982 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2983 unsigned int type = DP_SDP_VSC;
2984 struct dp_sdp sdp = {};
2985 int ret;
2986
2987 /* When PSR is enabled, VSC SDP is handled by PSR routine */
2988 if (crtc_state->has_psr)
2989 return;
2990
2991 if ((crtc_state->infoframes.enable &
2992 intel_hdmi_infoframe_enable(type)) == 0)
2993 return;
2994
2995 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, sizeof(sdp));
2996
2997 ret = intel_dp_vsc_sdp_unpack(vsc, &sdp, sizeof(sdp));
2998
2999 if (ret)
3000 drm_dbg_kms(&dev_priv->drm, "Failed to unpack DP VSC SDP\n");
3001 }
3002
intel_read_dp_hdr_metadata_infoframe_sdp(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state,struct hdmi_drm_infoframe * drm_infoframe)3003 static void intel_read_dp_hdr_metadata_infoframe_sdp(struct intel_encoder *encoder,
3004 struct intel_crtc_state *crtc_state,
3005 struct hdmi_drm_infoframe *drm_infoframe)
3006 {
3007 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3008 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3009 unsigned int type = HDMI_PACKET_TYPE_GAMUT_METADATA;
3010 struct dp_sdp sdp = {};
3011 int ret;
3012
3013 if ((crtc_state->infoframes.enable &
3014 intel_hdmi_infoframe_enable(type)) == 0)
3015 return;
3016
3017 dig_port->read_infoframe(encoder, crtc_state, type, &sdp,
3018 sizeof(sdp));
3019
3020 ret = intel_dp_hdr_metadata_infoframe_sdp_unpack(drm_infoframe, &sdp,
3021 sizeof(sdp));
3022
3023 if (ret)
3024 drm_dbg_kms(&dev_priv->drm,
3025 "Failed to unpack DP HDR Metadata Infoframe SDP\n");
3026 }
3027
intel_read_dp_sdp(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state,unsigned int type)3028 void intel_read_dp_sdp(struct intel_encoder *encoder,
3029 struct intel_crtc_state *crtc_state,
3030 unsigned int type)
3031 {
3032 switch (type) {
3033 case DP_SDP_VSC:
3034 intel_read_dp_vsc_sdp(encoder, crtc_state,
3035 &crtc_state->infoframes.vsc);
3036 break;
3037 case HDMI_PACKET_TYPE_GAMUT_METADATA:
3038 intel_read_dp_hdr_metadata_infoframe_sdp(encoder, crtc_state,
3039 &crtc_state->infoframes.drm.drm);
3040 break;
3041 default:
3042 MISSING_CASE(type);
3043 break;
3044 }
3045 }
3046
intel_dp_autotest_link_training(struct intel_dp * intel_dp)3047 static u8 intel_dp_autotest_link_training(struct intel_dp *intel_dp)
3048 {
3049 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3050 int status = 0;
3051 int test_link_rate;
3052 u8 test_lane_count, test_link_bw;
3053 /* (DP CTS 1.2)
3054 * 4.3.1.11
3055 */
3056 /* Read the TEST_LANE_COUNT and TEST_LINK_RTAE fields (DP CTS 3.1.4) */
3057 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LANE_COUNT,
3058 &test_lane_count);
3059
3060 if (status <= 0) {
3061 drm_dbg_kms(&i915->drm, "Lane count read failed\n");
3062 return DP_TEST_NAK;
3063 }
3064 test_lane_count &= DP_MAX_LANE_COUNT_MASK;
3065
3066 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LINK_RATE,
3067 &test_link_bw);
3068 if (status <= 0) {
3069 drm_dbg_kms(&i915->drm, "Link Rate read failed\n");
3070 return DP_TEST_NAK;
3071 }
3072 test_link_rate = drm_dp_bw_code_to_link_rate(test_link_bw);
3073
3074 /* Validate the requested link rate and lane count */
3075 if (!intel_dp_link_params_valid(intel_dp, test_link_rate,
3076 test_lane_count))
3077 return DP_TEST_NAK;
3078
3079 intel_dp->compliance.test_lane_count = test_lane_count;
3080 intel_dp->compliance.test_link_rate = test_link_rate;
3081
3082 return DP_TEST_ACK;
3083 }
3084
intel_dp_autotest_video_pattern(struct intel_dp * intel_dp)3085 static u8 intel_dp_autotest_video_pattern(struct intel_dp *intel_dp)
3086 {
3087 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3088 u8 test_pattern;
3089 u8 test_misc;
3090 __be16 h_width, v_height;
3091 int status = 0;
3092
3093 /* Read the TEST_PATTERN (DP CTS 3.1.5) */
3094 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_PATTERN,
3095 &test_pattern);
3096 if (status <= 0) {
3097 drm_dbg_kms(&i915->drm, "Test pattern read failed\n");
3098 return DP_TEST_NAK;
3099 }
3100 if (test_pattern != DP_COLOR_RAMP)
3101 return DP_TEST_NAK;
3102
3103 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_H_WIDTH_HI,
3104 &h_width, 2);
3105 if (status <= 0) {
3106 drm_dbg_kms(&i915->drm, "H Width read failed\n");
3107 return DP_TEST_NAK;
3108 }
3109
3110 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_V_HEIGHT_HI,
3111 &v_height, 2);
3112 if (status <= 0) {
3113 drm_dbg_kms(&i915->drm, "V Height read failed\n");
3114 return DP_TEST_NAK;
3115 }
3116
3117 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_MISC0,
3118 &test_misc);
3119 if (status <= 0) {
3120 drm_dbg_kms(&i915->drm, "TEST MISC read failed\n");
3121 return DP_TEST_NAK;
3122 }
3123 if ((test_misc & DP_TEST_COLOR_FORMAT_MASK) != DP_COLOR_FORMAT_RGB)
3124 return DP_TEST_NAK;
3125 if (test_misc & DP_TEST_DYNAMIC_RANGE_CEA)
3126 return DP_TEST_NAK;
3127 switch (test_misc & DP_TEST_BIT_DEPTH_MASK) {
3128 case DP_TEST_BIT_DEPTH_6:
3129 intel_dp->compliance.test_data.bpc = 6;
3130 break;
3131 case DP_TEST_BIT_DEPTH_8:
3132 intel_dp->compliance.test_data.bpc = 8;
3133 break;
3134 default:
3135 return DP_TEST_NAK;
3136 }
3137
3138 intel_dp->compliance.test_data.video_pattern = test_pattern;
3139 intel_dp->compliance.test_data.hdisplay = be16_to_cpu(h_width);
3140 intel_dp->compliance.test_data.vdisplay = be16_to_cpu(v_height);
3141 /* Set test active flag here so userspace doesn't interrupt things */
3142 intel_dp->compliance.test_active = true;
3143
3144 return DP_TEST_ACK;
3145 }
3146
intel_dp_autotest_edid(struct intel_dp * intel_dp)3147 static u8 intel_dp_autotest_edid(struct intel_dp *intel_dp)
3148 {
3149 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3150 u8 test_result = DP_TEST_ACK;
3151 struct intel_connector *intel_connector = intel_dp->attached_connector;
3152 struct drm_connector *connector = &intel_connector->base;
3153
3154 if (intel_connector->detect_edid == NULL ||
3155 connector->edid_corrupt ||
3156 intel_dp->aux.i2c_defer_count > 6) {
3157 /* Check EDID read for NACKs, DEFERs and corruption
3158 * (DP CTS 1.2 Core r1.1)
3159 * 4.2.2.4 : Failed EDID read, I2C_NAK
3160 * 4.2.2.5 : Failed EDID read, I2C_DEFER
3161 * 4.2.2.6 : EDID corruption detected
3162 * Use failsafe mode for all cases
3163 */
3164 if (intel_dp->aux.i2c_nack_count > 0 ||
3165 intel_dp->aux.i2c_defer_count > 0)
3166 drm_dbg_kms(&i915->drm,
3167 "EDID read had %d NACKs, %d DEFERs\n",
3168 intel_dp->aux.i2c_nack_count,
3169 intel_dp->aux.i2c_defer_count);
3170 intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_FAILSAFE;
3171 } else {
3172 struct edid *block = intel_connector->detect_edid;
3173
3174 /* We have to write the checksum
3175 * of the last block read
3176 */
3177 block += intel_connector->detect_edid->extensions;
3178
3179 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_EDID_CHECKSUM,
3180 block->checksum) <= 0)
3181 drm_dbg_kms(&i915->drm,
3182 "Failed to write EDID checksum\n");
3183
3184 test_result = DP_TEST_ACK | DP_TEST_EDID_CHECKSUM_WRITE;
3185 intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_PREFERRED;
3186 }
3187
3188 /* Set test active flag here so userspace doesn't interrupt things */
3189 intel_dp->compliance.test_active = true;
3190
3191 return test_result;
3192 }
3193
intel_dp_phy_pattern_update(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)3194 static void intel_dp_phy_pattern_update(struct intel_dp *intel_dp,
3195 const struct intel_crtc_state *crtc_state)
3196 {
3197 struct drm_i915_private *dev_priv =
3198 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
3199 struct drm_dp_phy_test_params *data =
3200 &intel_dp->compliance.test_data.phytest;
3201 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
3202 enum pipe pipe = crtc->pipe;
3203 u32 pattern_val;
3204
3205 switch (data->phy_pattern) {
3206 case DP_PHY_TEST_PATTERN_NONE:
3207 DRM_DEBUG_KMS("Disable Phy Test Pattern\n");
3208 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe), 0x0);
3209 break;
3210 case DP_PHY_TEST_PATTERN_D10_2:
3211 DRM_DEBUG_KMS("Set D10.2 Phy Test Pattern\n");
3212 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3213 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_D10_2);
3214 break;
3215 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
3216 DRM_DEBUG_KMS("Set Error Count Phy Test Pattern\n");
3217 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3218 DDI_DP_COMP_CTL_ENABLE |
3219 DDI_DP_COMP_CTL_SCRAMBLED_0);
3220 break;
3221 case DP_PHY_TEST_PATTERN_PRBS7:
3222 DRM_DEBUG_KMS("Set PRBS7 Phy Test Pattern\n");
3223 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3224 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_PRBS7);
3225 break;
3226 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
3227 /*
3228 * FIXME: Ideally pattern should come from DPCD 0x250. As
3229 * current firmware of DPR-100 could not set it, so hardcoding
3230 * now for complaince test.
3231 */
3232 DRM_DEBUG_KMS("Set 80Bit Custom Phy Test Pattern 0x3e0f83e0 0x0f83e0f8 0x0000f83e\n");
3233 pattern_val = 0x3e0f83e0;
3234 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 0), pattern_val);
3235 pattern_val = 0x0f83e0f8;
3236 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 1), pattern_val);
3237 pattern_val = 0x0000f83e;
3238 intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 2), pattern_val);
3239 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3240 DDI_DP_COMP_CTL_ENABLE |
3241 DDI_DP_COMP_CTL_CUSTOM80);
3242 break;
3243 case DP_PHY_TEST_PATTERN_CP2520:
3244 /*
3245 * FIXME: Ideally pattern should come from DPCD 0x24A. As
3246 * current firmware of DPR-100 could not set it, so hardcoding
3247 * now for complaince test.
3248 */
3249 DRM_DEBUG_KMS("Set HBR2 compliance Phy Test Pattern\n");
3250 pattern_val = 0xFB;
3251 intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3252 DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_HBR2 |
3253 pattern_val);
3254 break;
3255 default:
3256 WARN(1, "Invalid Phy Test Pattern\n");
3257 }
3258 }
3259
3260 static void
intel_dp_autotest_phy_ddi_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)3261 intel_dp_autotest_phy_ddi_disable(struct intel_dp *intel_dp,
3262 const struct intel_crtc_state *crtc_state)
3263 {
3264 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3265 struct drm_device *dev = dig_port->base.base.dev;
3266 struct drm_i915_private *dev_priv = to_i915(dev);
3267 struct intel_crtc *crtc = to_intel_crtc(dig_port->base.base.crtc);
3268 enum pipe pipe = crtc->pipe;
3269 u32 trans_ddi_func_ctl_value, trans_conf_value, dp_tp_ctl_value;
3270
3271 trans_ddi_func_ctl_value = intel_de_read(dev_priv,
3272 TRANS_DDI_FUNC_CTL(pipe));
3273 trans_conf_value = intel_de_read(dev_priv, PIPECONF(pipe));
3274 dp_tp_ctl_value = intel_de_read(dev_priv, TGL_DP_TP_CTL(pipe));
3275
3276 trans_ddi_func_ctl_value &= ~(TRANS_DDI_FUNC_ENABLE |
3277 TGL_TRANS_DDI_PORT_MASK);
3278 trans_conf_value &= ~PIPECONF_ENABLE;
3279 dp_tp_ctl_value &= ~DP_TP_CTL_ENABLE;
3280
3281 intel_de_write(dev_priv, PIPECONF(pipe), trans_conf_value);
3282 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(pipe),
3283 trans_ddi_func_ctl_value);
3284 intel_de_write(dev_priv, TGL_DP_TP_CTL(pipe), dp_tp_ctl_value);
3285 }
3286
3287 static void
intel_dp_autotest_phy_ddi_enable(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)3288 intel_dp_autotest_phy_ddi_enable(struct intel_dp *intel_dp,
3289 const struct intel_crtc_state *crtc_state)
3290 {
3291 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3292 struct drm_device *dev = dig_port->base.base.dev;
3293 struct drm_i915_private *dev_priv = to_i915(dev);
3294 enum port port = dig_port->base.port;
3295 struct intel_crtc *crtc = to_intel_crtc(dig_port->base.base.crtc);
3296 enum pipe pipe = crtc->pipe;
3297 u32 trans_ddi_func_ctl_value, trans_conf_value, dp_tp_ctl_value;
3298
3299 trans_ddi_func_ctl_value = intel_de_read(dev_priv,
3300 TRANS_DDI_FUNC_CTL(pipe));
3301 trans_conf_value = intel_de_read(dev_priv, PIPECONF(pipe));
3302 dp_tp_ctl_value = intel_de_read(dev_priv, TGL_DP_TP_CTL(pipe));
3303
3304 trans_ddi_func_ctl_value |= TRANS_DDI_FUNC_ENABLE |
3305 TGL_TRANS_DDI_SELECT_PORT(port);
3306 trans_conf_value |= PIPECONF_ENABLE;
3307 dp_tp_ctl_value |= DP_TP_CTL_ENABLE;
3308
3309 intel_de_write(dev_priv, PIPECONF(pipe), trans_conf_value);
3310 intel_de_write(dev_priv, TGL_DP_TP_CTL(pipe), dp_tp_ctl_value);
3311 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(pipe),
3312 trans_ddi_func_ctl_value);
3313 }
3314
intel_dp_process_phy_request(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)3315 static void intel_dp_process_phy_request(struct intel_dp *intel_dp,
3316 const struct intel_crtc_state *crtc_state)
3317 {
3318 struct drm_dp_phy_test_params *data =
3319 &intel_dp->compliance.test_data.phytest;
3320 u8 link_status[DP_LINK_STATUS_SIZE];
3321
3322 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX,
3323 link_status) < 0) {
3324 DRM_DEBUG_KMS("failed to get link status\n");
3325 return;
3326 }
3327
3328 /* retrieve vswing & pre-emphasis setting */
3329 intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX,
3330 link_status);
3331
3332 intel_dp_autotest_phy_ddi_disable(intel_dp, crtc_state);
3333
3334 intel_dp_set_signal_levels(intel_dp, crtc_state, DP_PHY_DPRX);
3335
3336 intel_dp_phy_pattern_update(intel_dp, crtc_state);
3337
3338 intel_dp_autotest_phy_ddi_enable(intel_dp, crtc_state);
3339
3340 drm_dp_dpcd_write(&intel_dp->aux, DP_TRAINING_LANE0_SET,
3341 intel_dp->train_set, crtc_state->lane_count);
3342
3343 drm_dp_set_phy_test_pattern(&intel_dp->aux, data,
3344 link_status[DP_DPCD_REV]);
3345 }
3346
intel_dp_autotest_phy_pattern(struct intel_dp * intel_dp)3347 static u8 intel_dp_autotest_phy_pattern(struct intel_dp *intel_dp)
3348 {
3349 struct drm_dp_phy_test_params *data =
3350 &intel_dp->compliance.test_data.phytest;
3351
3352 if (drm_dp_get_phy_test_pattern(&intel_dp->aux, data)) {
3353 DRM_DEBUG_KMS("DP Phy Test pattern AUX read failure\n");
3354 return DP_TEST_NAK;
3355 }
3356
3357 /* Set test active flag here so userspace doesn't interrupt things */
3358 intel_dp->compliance.test_active = true;
3359
3360 return DP_TEST_ACK;
3361 }
3362
intel_dp_handle_test_request(struct intel_dp * intel_dp)3363 static void intel_dp_handle_test_request(struct intel_dp *intel_dp)
3364 {
3365 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3366 u8 response = DP_TEST_NAK;
3367 u8 request = 0;
3368 int status;
3369
3370 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_REQUEST, &request);
3371 if (status <= 0) {
3372 drm_dbg_kms(&i915->drm,
3373 "Could not read test request from sink\n");
3374 goto update_status;
3375 }
3376
3377 switch (request) {
3378 case DP_TEST_LINK_TRAINING:
3379 drm_dbg_kms(&i915->drm, "LINK_TRAINING test requested\n");
3380 response = intel_dp_autotest_link_training(intel_dp);
3381 break;
3382 case DP_TEST_LINK_VIDEO_PATTERN:
3383 drm_dbg_kms(&i915->drm, "TEST_PATTERN test requested\n");
3384 response = intel_dp_autotest_video_pattern(intel_dp);
3385 break;
3386 case DP_TEST_LINK_EDID_READ:
3387 drm_dbg_kms(&i915->drm, "EDID test requested\n");
3388 response = intel_dp_autotest_edid(intel_dp);
3389 break;
3390 case DP_TEST_LINK_PHY_TEST_PATTERN:
3391 drm_dbg_kms(&i915->drm, "PHY_PATTERN test requested\n");
3392 response = intel_dp_autotest_phy_pattern(intel_dp);
3393 break;
3394 default:
3395 drm_dbg_kms(&i915->drm, "Invalid test request '%02x'\n",
3396 request);
3397 break;
3398 }
3399
3400 if (response & DP_TEST_ACK)
3401 intel_dp->compliance.test_type = request;
3402
3403 update_status:
3404 status = drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_RESPONSE, response);
3405 if (status <= 0)
3406 drm_dbg_kms(&i915->drm,
3407 "Could not write test response to sink\n");
3408 }
3409
3410 static void
intel_dp_mst_hpd_irq(struct intel_dp * intel_dp,u8 * esi,bool * handled)3411 intel_dp_mst_hpd_irq(struct intel_dp *intel_dp, u8 *esi, bool *handled)
3412 {
3413 drm_dp_mst_hpd_irq(&intel_dp->mst_mgr, esi, handled);
3414
3415 if (esi[1] & DP_CP_IRQ) {
3416 intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
3417 *handled = true;
3418 }
3419 }
3420
3421 /**
3422 * intel_dp_check_mst_status - service any pending MST interrupts, check link status
3423 * @intel_dp: Intel DP struct
3424 *
3425 * Read any pending MST interrupts, call MST core to handle these and ack the
3426 * interrupts. Check if the main and AUX link state is ok.
3427 *
3428 * Returns:
3429 * - %true if pending interrupts were serviced (or no interrupts were
3430 * pending) w/o detecting an error condition.
3431 * - %false if an error condition - like AUX failure or a loss of link - is
3432 * detected, which needs servicing from the hotplug work.
3433 */
3434 static bool
intel_dp_check_mst_status(struct intel_dp * intel_dp)3435 intel_dp_check_mst_status(struct intel_dp *intel_dp)
3436 {
3437 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3438 bool link_ok = true;
3439
3440 drm_WARN_ON_ONCE(&i915->drm, intel_dp->active_mst_links < 0);
3441
3442 for (;;) {
3443 /*
3444 * The +2 is because DP_DPRX_ESI_LEN is 14, but we then
3445 * pass in "esi+10" to drm_dp_channel_eq_ok(), which
3446 * takes a 6-byte array. So we actually need 16 bytes
3447 * here.
3448 *
3449 * Somebody who knows what the limits actually are
3450 * should check this, but for now this is at least
3451 * harmless and avoids a valid compiler warning about
3452 * using more of the array than we have allocated.
3453 */
3454 u8 esi[DP_DPRX_ESI_LEN+2] = {};
3455 bool handled;
3456 int retry;
3457
3458 if (!intel_dp_get_sink_irq_esi(intel_dp, esi)) {
3459 drm_dbg_kms(&i915->drm,
3460 "failed to get ESI - device may have failed\n");
3461 link_ok = false;
3462
3463 break;
3464 }
3465
3466 /* check link status - esi[10] = 0x200c */
3467 if (intel_dp->active_mst_links > 0 && link_ok &&
3468 !drm_dp_channel_eq_ok(&esi[10], intel_dp->lane_count)) {
3469 drm_dbg_kms(&i915->drm,
3470 "channel EQ not ok, retraining\n");
3471 link_ok = false;
3472 }
3473
3474 drm_dbg_kms(&i915->drm, "got esi %3ph\n", esi);
3475
3476 intel_dp_mst_hpd_irq(intel_dp, esi, &handled);
3477
3478 if (!handled)
3479 break;
3480
3481 for (retry = 0; retry < 3; retry++) {
3482 int wret;
3483
3484 wret = drm_dp_dpcd_write(&intel_dp->aux,
3485 DP_SINK_COUNT_ESI+1,
3486 &esi[1], 3);
3487 if (wret == 3)
3488 break;
3489 }
3490 }
3491
3492 return link_ok;
3493 }
3494
3495 static void
intel_dp_handle_hdmi_link_status_change(struct intel_dp * intel_dp)3496 intel_dp_handle_hdmi_link_status_change(struct intel_dp *intel_dp)
3497 {
3498 bool is_active;
3499 u8 buf = 0;
3500
3501 is_active = drm_dp_pcon_hdmi_link_active(&intel_dp->aux);
3502 if (intel_dp->frl.is_trained && !is_active) {
3503 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, &buf) < 0)
3504 return;
3505
3506 buf &= ~DP_PCON_ENABLE_HDMI_LINK;
3507 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf) < 0)
3508 return;
3509
3510 drm_dp_pcon_hdmi_frl_link_error_count(&intel_dp->aux, &intel_dp->attached_connector->base);
3511
3512 /* Restart FRL training or fall back to TMDS mode */
3513 intel_dp_check_frl_training(intel_dp);
3514 }
3515 }
3516
3517 static bool
intel_dp_needs_link_retrain(struct intel_dp * intel_dp)3518 intel_dp_needs_link_retrain(struct intel_dp *intel_dp)
3519 {
3520 u8 link_status[DP_LINK_STATUS_SIZE];
3521
3522 if (!intel_dp->link_trained)
3523 return false;
3524
3525 /*
3526 * While PSR source HW is enabled, it will control main-link sending
3527 * frames, enabling and disabling it so trying to do a retrain will fail
3528 * as the link would or not be on or it could mix training patterns
3529 * and frame data at the same time causing retrain to fail.
3530 * Also when exiting PSR, HW will retrain the link anyways fixing
3531 * any link status error.
3532 */
3533 if (intel_psr_enabled(intel_dp))
3534 return false;
3535
3536 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX,
3537 link_status) < 0)
3538 return false;
3539
3540 /*
3541 * Validate the cached values of intel_dp->link_rate and
3542 * intel_dp->lane_count before attempting to retrain.
3543 *
3544 * FIXME would be nice to user the crtc state here, but since
3545 * we need to call this from the short HPD handler that seems
3546 * a bit hard.
3547 */
3548 if (!intel_dp_link_params_valid(intel_dp, intel_dp->link_rate,
3549 intel_dp->lane_count))
3550 return false;
3551
3552 /* Retrain if Channel EQ or CR not ok */
3553 return !drm_dp_channel_eq_ok(link_status, intel_dp->lane_count);
3554 }
3555
intel_dp_has_connector(struct intel_dp * intel_dp,const struct drm_connector_state * conn_state)3556 static bool intel_dp_has_connector(struct intel_dp *intel_dp,
3557 const struct drm_connector_state *conn_state)
3558 {
3559 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3560 struct intel_encoder *encoder;
3561 enum pipe pipe;
3562
3563 if (!conn_state->best_encoder)
3564 return false;
3565
3566 /* SST */
3567 encoder = &dp_to_dig_port(intel_dp)->base;
3568 if (conn_state->best_encoder == &encoder->base)
3569 return true;
3570
3571 /* MST */
3572 for_each_pipe(i915, pipe) {
3573 encoder = &intel_dp->mst_encoders[pipe]->base;
3574 if (conn_state->best_encoder == &encoder->base)
3575 return true;
3576 }
3577
3578 return false;
3579 }
3580
intel_dp_prep_link_retrain(struct intel_dp * intel_dp,struct drm_modeset_acquire_ctx * ctx,u32 * crtc_mask)3581 static int intel_dp_prep_link_retrain(struct intel_dp *intel_dp,
3582 struct drm_modeset_acquire_ctx *ctx,
3583 u32 *crtc_mask)
3584 {
3585 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3586 struct drm_connector_list_iter conn_iter;
3587 struct intel_connector *connector;
3588 int ret = 0;
3589
3590 *crtc_mask = 0;
3591
3592 if (!intel_dp_needs_link_retrain(intel_dp))
3593 return 0;
3594
3595 drm_connector_list_iter_begin(&i915->drm, &conn_iter);
3596 for_each_intel_connector_iter(connector, &conn_iter) {
3597 struct drm_connector_state *conn_state =
3598 connector->base.state;
3599 struct intel_crtc_state *crtc_state;
3600 struct intel_crtc *crtc;
3601
3602 if (!intel_dp_has_connector(intel_dp, conn_state))
3603 continue;
3604
3605 crtc = to_intel_crtc(conn_state->crtc);
3606 if (!crtc)
3607 continue;
3608
3609 ret = drm_modeset_lock(&crtc->base.mutex, ctx);
3610 if (ret)
3611 break;
3612
3613 crtc_state = to_intel_crtc_state(crtc->base.state);
3614
3615 drm_WARN_ON(&i915->drm, !intel_crtc_has_dp_encoder(crtc_state));
3616
3617 if (!crtc_state->hw.active)
3618 continue;
3619
3620 if (conn_state->commit &&
3621 !try_wait_for_completion(&conn_state->commit->hw_done))
3622 continue;
3623
3624 *crtc_mask |= drm_crtc_mask(&crtc->base);
3625 }
3626 drm_connector_list_iter_end(&conn_iter);
3627
3628 if (!intel_dp_needs_link_retrain(intel_dp))
3629 *crtc_mask = 0;
3630
3631 return ret;
3632 }
3633
intel_dp_is_connected(struct intel_dp * intel_dp)3634 static bool intel_dp_is_connected(struct intel_dp *intel_dp)
3635 {
3636 struct intel_connector *connector = intel_dp->attached_connector;
3637
3638 return connector->base.status == connector_status_connected ||
3639 intel_dp->is_mst;
3640 }
3641
intel_dp_retrain_link(struct intel_encoder * encoder,struct drm_modeset_acquire_ctx * ctx)3642 int intel_dp_retrain_link(struct intel_encoder *encoder,
3643 struct drm_modeset_acquire_ctx *ctx)
3644 {
3645 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3646 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3647 struct intel_crtc *crtc;
3648 u32 crtc_mask;
3649 int ret;
3650
3651 if (!intel_dp_is_connected(intel_dp))
3652 return 0;
3653
3654 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
3655 ctx);
3656 if (ret)
3657 return ret;
3658
3659 ret = intel_dp_prep_link_retrain(intel_dp, ctx, &crtc_mask);
3660 if (ret)
3661 return ret;
3662
3663 if (crtc_mask == 0)
3664 return 0;
3665
3666 drm_dbg_kms(&dev_priv->drm, "[ENCODER:%d:%s] retraining link\n",
3667 encoder->base.base.id, encoder->base.name);
3668
3669 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
3670 const struct intel_crtc_state *crtc_state =
3671 to_intel_crtc_state(crtc->base.state);
3672
3673 /* Suppress underruns caused by re-training */
3674 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, false);
3675 if (crtc_state->has_pch_encoder)
3676 intel_set_pch_fifo_underrun_reporting(dev_priv,
3677 intel_crtc_pch_transcoder(crtc), false);
3678 }
3679
3680 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
3681 const struct intel_crtc_state *crtc_state =
3682 to_intel_crtc_state(crtc->base.state);
3683
3684 /* retrain on the MST master transcoder */
3685 if (DISPLAY_VER(dev_priv) >= 12 &&
3686 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST) &&
3687 !intel_dp_mst_is_master_trans(crtc_state))
3688 continue;
3689
3690 intel_dp_check_frl_training(intel_dp);
3691 intel_dp_pcon_dsc_configure(intel_dp, crtc_state);
3692 intel_dp_start_link_train(intel_dp, crtc_state);
3693 intel_dp_stop_link_train(intel_dp, crtc_state);
3694 break;
3695 }
3696
3697 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
3698 const struct intel_crtc_state *crtc_state =
3699 to_intel_crtc_state(crtc->base.state);
3700
3701 /* Keep underrun reporting disabled until things are stable */
3702 intel_wait_for_vblank(dev_priv, crtc->pipe);
3703
3704 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, true);
3705 if (crtc_state->has_pch_encoder)
3706 intel_set_pch_fifo_underrun_reporting(dev_priv,
3707 intel_crtc_pch_transcoder(crtc), true);
3708 }
3709
3710 return 0;
3711 }
3712
intel_dp_prep_phy_test(struct intel_dp * intel_dp,struct drm_modeset_acquire_ctx * ctx,u32 * crtc_mask)3713 static int intel_dp_prep_phy_test(struct intel_dp *intel_dp,
3714 struct drm_modeset_acquire_ctx *ctx,
3715 u32 *crtc_mask)
3716 {
3717 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3718 struct drm_connector_list_iter conn_iter;
3719 struct intel_connector *connector;
3720 int ret = 0;
3721
3722 *crtc_mask = 0;
3723
3724 drm_connector_list_iter_begin(&i915->drm, &conn_iter);
3725 for_each_intel_connector_iter(connector, &conn_iter) {
3726 struct drm_connector_state *conn_state =
3727 connector->base.state;
3728 struct intel_crtc_state *crtc_state;
3729 struct intel_crtc *crtc;
3730
3731 if (!intel_dp_has_connector(intel_dp, conn_state))
3732 continue;
3733
3734 crtc = to_intel_crtc(conn_state->crtc);
3735 if (!crtc)
3736 continue;
3737
3738 ret = drm_modeset_lock(&crtc->base.mutex, ctx);
3739 if (ret)
3740 break;
3741
3742 crtc_state = to_intel_crtc_state(crtc->base.state);
3743
3744 drm_WARN_ON(&i915->drm, !intel_crtc_has_dp_encoder(crtc_state));
3745
3746 if (!crtc_state->hw.active)
3747 continue;
3748
3749 if (conn_state->commit &&
3750 !try_wait_for_completion(&conn_state->commit->hw_done))
3751 continue;
3752
3753 *crtc_mask |= drm_crtc_mask(&crtc->base);
3754 }
3755 drm_connector_list_iter_end(&conn_iter);
3756
3757 return ret;
3758 }
3759
intel_dp_do_phy_test(struct intel_encoder * encoder,struct drm_modeset_acquire_ctx * ctx)3760 static int intel_dp_do_phy_test(struct intel_encoder *encoder,
3761 struct drm_modeset_acquire_ctx *ctx)
3762 {
3763 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3764 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3765 struct intel_crtc *crtc;
3766 u32 crtc_mask;
3767 int ret;
3768
3769 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
3770 ctx);
3771 if (ret)
3772 return ret;
3773
3774 ret = intel_dp_prep_phy_test(intel_dp, ctx, &crtc_mask);
3775 if (ret)
3776 return ret;
3777
3778 if (crtc_mask == 0)
3779 return 0;
3780
3781 drm_dbg_kms(&dev_priv->drm, "[ENCODER:%d:%s] PHY test\n",
3782 encoder->base.base.id, encoder->base.name);
3783
3784 for_each_intel_crtc_mask(&dev_priv->drm, crtc, crtc_mask) {
3785 const struct intel_crtc_state *crtc_state =
3786 to_intel_crtc_state(crtc->base.state);
3787
3788 /* test on the MST master transcoder */
3789 if (DISPLAY_VER(dev_priv) >= 12 &&
3790 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST) &&
3791 !intel_dp_mst_is_master_trans(crtc_state))
3792 continue;
3793
3794 intel_dp_process_phy_request(intel_dp, crtc_state);
3795 break;
3796 }
3797
3798 return 0;
3799 }
3800
intel_dp_phy_test(struct intel_encoder * encoder)3801 void intel_dp_phy_test(struct intel_encoder *encoder)
3802 {
3803 struct drm_modeset_acquire_ctx ctx;
3804 int ret;
3805
3806 drm_modeset_acquire_init(&ctx, 0);
3807
3808 for (;;) {
3809 ret = intel_dp_do_phy_test(encoder, &ctx);
3810
3811 if (ret == -EDEADLK) {
3812 drm_modeset_backoff(&ctx);
3813 continue;
3814 }
3815
3816 break;
3817 }
3818
3819 drm_modeset_drop_locks(&ctx);
3820 drm_modeset_acquire_fini(&ctx);
3821 drm_WARN(encoder->base.dev, ret,
3822 "Acquiring modeset locks failed with %i\n", ret);
3823 }
3824
intel_dp_check_device_service_irq(struct intel_dp * intel_dp)3825 static void intel_dp_check_device_service_irq(struct intel_dp *intel_dp)
3826 {
3827 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3828 u8 val;
3829
3830 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
3831 return;
3832
3833 if (drm_dp_dpcd_readb(&intel_dp->aux,
3834 DP_DEVICE_SERVICE_IRQ_VECTOR, &val) != 1 || !val)
3835 return;
3836
3837 drm_dp_dpcd_writeb(&intel_dp->aux, DP_DEVICE_SERVICE_IRQ_VECTOR, val);
3838
3839 if (val & DP_AUTOMATED_TEST_REQUEST)
3840 intel_dp_handle_test_request(intel_dp);
3841
3842 if (val & DP_CP_IRQ)
3843 intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
3844
3845 if (val & DP_SINK_SPECIFIC_IRQ)
3846 drm_dbg_kms(&i915->drm, "Sink specific irq unhandled\n");
3847 }
3848
intel_dp_check_link_service_irq(struct intel_dp * intel_dp)3849 static void intel_dp_check_link_service_irq(struct intel_dp *intel_dp)
3850 {
3851 u8 val;
3852
3853 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
3854 return;
3855
3856 if (drm_dp_dpcd_readb(&intel_dp->aux,
3857 DP_LINK_SERVICE_IRQ_VECTOR_ESI0, &val) != 1 || !val)
3858 return;
3859
3860 if (drm_dp_dpcd_writeb(&intel_dp->aux,
3861 DP_LINK_SERVICE_IRQ_VECTOR_ESI0, val) != 1)
3862 return;
3863
3864 if (val & HDMI_LINK_STATUS_CHANGED)
3865 intel_dp_handle_hdmi_link_status_change(intel_dp);
3866 }
3867
3868 /*
3869 * According to DP spec
3870 * 5.1.2:
3871 * 1. Read DPCD
3872 * 2. Configure link according to Receiver Capabilities
3873 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3
3874 * 4. Check link status on receipt of hot-plug interrupt
3875 *
3876 * intel_dp_short_pulse - handles short pulse interrupts
3877 * when full detection is not required.
3878 * Returns %true if short pulse is handled and full detection
3879 * is NOT required and %false otherwise.
3880 */
3881 static bool
intel_dp_short_pulse(struct intel_dp * intel_dp)3882 intel_dp_short_pulse(struct intel_dp *intel_dp)
3883 {
3884 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3885 u8 old_sink_count = intel_dp->sink_count;
3886 bool ret;
3887
3888 /*
3889 * Clearing compliance test variables to allow capturing
3890 * of values for next automated test request.
3891 */
3892 memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
3893
3894 /*
3895 * Now read the DPCD to see if it's actually running
3896 * If the current value of sink count doesn't match with
3897 * the value that was stored earlier or dpcd read failed
3898 * we need to do full detection
3899 */
3900 ret = intel_dp_get_dpcd(intel_dp);
3901
3902 if ((old_sink_count != intel_dp->sink_count) || !ret) {
3903 /* No need to proceed if we are going to do full detect */
3904 return false;
3905 }
3906
3907 intel_dp_check_device_service_irq(intel_dp);
3908 intel_dp_check_link_service_irq(intel_dp);
3909
3910 /* Handle CEC interrupts, if any */
3911 drm_dp_cec_irq(&intel_dp->aux);
3912
3913 /* defer to the hotplug work for link retraining if needed */
3914 if (intel_dp_needs_link_retrain(intel_dp))
3915 return false;
3916
3917 intel_psr_short_pulse(intel_dp);
3918
3919 switch (intel_dp->compliance.test_type) {
3920 case DP_TEST_LINK_TRAINING:
3921 drm_dbg_kms(&dev_priv->drm,
3922 "Link Training Compliance Test requested\n");
3923 /* Send a Hotplug Uevent to userspace to start modeset */
3924 drm_kms_helper_hotplug_event(&dev_priv->drm);
3925 break;
3926 case DP_TEST_LINK_PHY_TEST_PATTERN:
3927 drm_dbg_kms(&dev_priv->drm,
3928 "PHY test pattern Compliance Test requested\n");
3929 /*
3930 * Schedule long hpd to do the test
3931 *
3932 * FIXME get rid of the ad-hoc phy test modeset code
3933 * and properly incorporate it into the normal modeset.
3934 */
3935 return false;
3936 }
3937
3938 return true;
3939 }
3940
3941 /* XXX this is probably wrong for multiple downstream ports */
3942 static enum drm_connector_status
intel_dp_detect_dpcd(struct intel_dp * intel_dp)3943 intel_dp_detect_dpcd(struct intel_dp *intel_dp)
3944 {
3945 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3946 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3947 u8 *dpcd = intel_dp->dpcd;
3948 u8 type;
3949
3950 if (drm_WARN_ON(&i915->drm, intel_dp_is_edp(intel_dp)))
3951 return connector_status_connected;
3952
3953 lspcon_resume(dig_port);
3954
3955 if (!intel_dp_get_dpcd(intel_dp))
3956 return connector_status_disconnected;
3957
3958 /* if there's no downstream port, we're done */
3959 if (!drm_dp_is_branch(dpcd))
3960 return connector_status_connected;
3961
3962 /* If we're HPD-aware, SINK_COUNT changes dynamically */
3963 if (intel_dp_has_sink_count(intel_dp) &&
3964 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) {
3965 return intel_dp->sink_count ?
3966 connector_status_connected : connector_status_disconnected;
3967 }
3968
3969 if (intel_dp_can_mst(intel_dp))
3970 return connector_status_connected;
3971
3972 /* If no HPD, poke DDC gently */
3973 if (drm_probe_ddc(&intel_dp->aux.ddc))
3974 return connector_status_connected;
3975
3976 /* Well we tried, say unknown for unreliable port types */
3977 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
3978 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
3979 if (type == DP_DS_PORT_TYPE_VGA ||
3980 type == DP_DS_PORT_TYPE_NON_EDID)
3981 return connector_status_unknown;
3982 } else {
3983 type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
3984 DP_DWN_STRM_PORT_TYPE_MASK;
3985 if (type == DP_DWN_STRM_PORT_TYPE_ANALOG ||
3986 type == DP_DWN_STRM_PORT_TYPE_OTHER)
3987 return connector_status_unknown;
3988 }
3989
3990 /* Anything else is out of spec, warn and ignore */
3991 drm_dbg_kms(&i915->drm, "Broken DP branch device, ignoring\n");
3992 return connector_status_disconnected;
3993 }
3994
3995 static enum drm_connector_status
edp_detect(struct intel_dp * intel_dp)3996 edp_detect(struct intel_dp *intel_dp)
3997 {
3998 return connector_status_connected;
3999 }
4000
4001 /*
4002 * intel_digital_port_connected - is the specified port connected?
4003 * @encoder: intel_encoder
4004 *
4005 * In cases where there's a connector physically connected but it can't be used
4006 * by our hardware we also return false, since the rest of the driver should
4007 * pretty much treat the port as disconnected. This is relevant for type-C
4008 * (starting on ICL) where there's ownership involved.
4009 *
4010 * Return %true if port is connected, %false otherwise.
4011 */
intel_digital_port_connected(struct intel_encoder * encoder)4012 bool intel_digital_port_connected(struct intel_encoder *encoder)
4013 {
4014 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4015 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4016 bool is_connected = false;
4017 intel_wakeref_t wakeref;
4018
4019 with_intel_display_power(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref)
4020 is_connected = dig_port->connected(encoder);
4021
4022 return is_connected;
4023 }
4024
4025 static struct edid *
intel_dp_get_edid(struct intel_dp * intel_dp)4026 intel_dp_get_edid(struct intel_dp *intel_dp)
4027 {
4028 struct intel_connector *intel_connector = intel_dp->attached_connector;
4029
4030 /* use cached edid if we have one */
4031 if (intel_connector->edid) {
4032 /* invalid edid */
4033 if (IS_ERR(intel_connector->edid))
4034 return NULL;
4035
4036 return drm_edid_duplicate(intel_connector->edid);
4037 } else
4038 return drm_get_edid(&intel_connector->base,
4039 &intel_dp->aux.ddc);
4040 }
4041
4042 static void
intel_dp_update_dfp(struct intel_dp * intel_dp,const struct edid * edid)4043 intel_dp_update_dfp(struct intel_dp *intel_dp,
4044 const struct edid *edid)
4045 {
4046 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4047 struct intel_connector *connector = intel_dp->attached_connector;
4048
4049 intel_dp->dfp.max_bpc =
4050 drm_dp_downstream_max_bpc(intel_dp->dpcd,
4051 intel_dp->downstream_ports, edid);
4052
4053 intel_dp->dfp.max_dotclock =
4054 drm_dp_downstream_max_dotclock(intel_dp->dpcd,
4055 intel_dp->downstream_ports);
4056
4057 intel_dp->dfp.min_tmds_clock =
4058 drm_dp_downstream_min_tmds_clock(intel_dp->dpcd,
4059 intel_dp->downstream_ports,
4060 edid);
4061 intel_dp->dfp.max_tmds_clock =
4062 drm_dp_downstream_max_tmds_clock(intel_dp->dpcd,
4063 intel_dp->downstream_ports,
4064 edid);
4065
4066 intel_dp->dfp.pcon_max_frl_bw =
4067 drm_dp_get_pcon_max_frl_bw(intel_dp->dpcd,
4068 intel_dp->downstream_ports);
4069
4070 drm_dbg_kms(&i915->drm,
4071 "[CONNECTOR:%d:%s] DFP max bpc %d, max dotclock %d, TMDS clock %d-%d, PCON Max FRL BW %dGbps\n",
4072 connector->base.base.id, connector->base.name,
4073 intel_dp->dfp.max_bpc,
4074 intel_dp->dfp.max_dotclock,
4075 intel_dp->dfp.min_tmds_clock,
4076 intel_dp->dfp.max_tmds_clock,
4077 intel_dp->dfp.pcon_max_frl_bw);
4078
4079 intel_dp_get_pcon_dsc_cap(intel_dp);
4080 }
4081
4082 static void
intel_dp_update_420(struct intel_dp * intel_dp)4083 intel_dp_update_420(struct intel_dp *intel_dp)
4084 {
4085 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4086 struct intel_connector *connector = intel_dp->attached_connector;
4087 bool is_branch, ycbcr_420_passthrough, ycbcr_444_to_420, rgb_to_ycbcr;
4088
4089 /* No YCbCr output support on gmch platforms */
4090 if (HAS_GMCH(i915))
4091 return;
4092
4093 /*
4094 * ILK doesn't seem capable of DP YCbCr output. The
4095 * displayed image is severly corrupted. SNB+ is fine.
4096 */
4097 if (IS_IRONLAKE(i915))
4098 return;
4099
4100 is_branch = drm_dp_is_branch(intel_dp->dpcd);
4101 ycbcr_420_passthrough =
4102 drm_dp_downstream_420_passthrough(intel_dp->dpcd,
4103 intel_dp->downstream_ports);
4104 /* on-board LSPCON always assumed to support 4:4:4->4:2:0 conversion */
4105 ycbcr_444_to_420 =
4106 dp_to_dig_port(intel_dp)->lspcon.active ||
4107 drm_dp_downstream_444_to_420_conversion(intel_dp->dpcd,
4108 intel_dp->downstream_ports);
4109 rgb_to_ycbcr = drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd,
4110 intel_dp->downstream_ports,
4111 DP_DS_HDMI_BT601_RGB_YCBCR_CONV |
4112 DP_DS_HDMI_BT709_RGB_YCBCR_CONV |
4113 DP_DS_HDMI_BT2020_RGB_YCBCR_CONV);
4114
4115 if (DISPLAY_VER(i915) >= 11) {
4116 /* Let PCON convert from RGB->YCbCr if possible */
4117 if (is_branch && rgb_to_ycbcr && ycbcr_444_to_420) {
4118 intel_dp->dfp.rgb_to_ycbcr = true;
4119 intel_dp->dfp.ycbcr_444_to_420 = true;
4120 connector->base.ycbcr_420_allowed = true;
4121 } else {
4122 /* Prefer 4:2:0 passthrough over 4:4:4->4:2:0 conversion */
4123 intel_dp->dfp.ycbcr_444_to_420 =
4124 ycbcr_444_to_420 && !ycbcr_420_passthrough;
4125
4126 connector->base.ycbcr_420_allowed =
4127 !is_branch || ycbcr_444_to_420 || ycbcr_420_passthrough;
4128 }
4129 } else {
4130 /* 4:4:4->4:2:0 conversion is the only way */
4131 intel_dp->dfp.ycbcr_444_to_420 = ycbcr_444_to_420;
4132
4133 connector->base.ycbcr_420_allowed = ycbcr_444_to_420;
4134 }
4135
4136 drm_dbg_kms(&i915->drm,
4137 "[CONNECTOR:%d:%s] RGB->YcbCr conversion? %s, YCbCr 4:2:0 allowed? %s, YCbCr 4:4:4->4:2:0 conversion? %s\n",
4138 connector->base.base.id, connector->base.name,
4139 yesno(intel_dp->dfp.rgb_to_ycbcr),
4140 yesno(connector->base.ycbcr_420_allowed),
4141 yesno(intel_dp->dfp.ycbcr_444_to_420));
4142 }
4143
4144 static void
intel_dp_set_edid(struct intel_dp * intel_dp)4145 intel_dp_set_edid(struct intel_dp *intel_dp)
4146 {
4147 struct intel_connector *connector = intel_dp->attached_connector;
4148 struct edid *edid;
4149
4150 intel_dp_unset_edid(intel_dp);
4151 edid = intel_dp_get_edid(intel_dp);
4152 connector->detect_edid = edid;
4153
4154 intel_dp_update_dfp(intel_dp, edid);
4155 intel_dp_update_420(intel_dp);
4156
4157 if (edid && edid->input & DRM_EDID_INPUT_DIGITAL) {
4158 intel_dp->has_hdmi_sink = drm_detect_hdmi_monitor(edid);
4159 intel_dp->has_audio = drm_detect_monitor_audio(edid);
4160 }
4161
4162 drm_dp_cec_set_edid(&intel_dp->aux, edid);
4163 }
4164
4165 static void
intel_dp_unset_edid(struct intel_dp * intel_dp)4166 intel_dp_unset_edid(struct intel_dp *intel_dp)
4167 {
4168 struct intel_connector *connector = intel_dp->attached_connector;
4169
4170 drm_dp_cec_unset_edid(&intel_dp->aux);
4171 kfree(connector->detect_edid);
4172 connector->detect_edid = NULL;
4173
4174 intel_dp->has_hdmi_sink = false;
4175 intel_dp->has_audio = false;
4176
4177 intel_dp->dfp.max_bpc = 0;
4178 intel_dp->dfp.max_dotclock = 0;
4179 intel_dp->dfp.min_tmds_clock = 0;
4180 intel_dp->dfp.max_tmds_clock = 0;
4181
4182 intel_dp->dfp.pcon_max_frl_bw = 0;
4183
4184 intel_dp->dfp.ycbcr_444_to_420 = false;
4185 connector->base.ycbcr_420_allowed = false;
4186 }
4187
4188 static int
intel_dp_detect(struct drm_connector * connector,struct drm_modeset_acquire_ctx * ctx,bool force)4189 intel_dp_detect(struct drm_connector *connector,
4190 struct drm_modeset_acquire_ctx *ctx,
4191 bool force)
4192 {
4193 struct drm_i915_private *dev_priv = to_i915(connector->dev);
4194 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
4195 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4196 struct intel_encoder *encoder = &dig_port->base;
4197 enum drm_connector_status status;
4198
4199 drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s]\n",
4200 connector->base.id, connector->name);
4201 drm_WARN_ON(&dev_priv->drm,
4202 !drm_modeset_is_locked(&dev_priv->drm.mode_config.connection_mutex));
4203
4204 if (!INTEL_DISPLAY_ENABLED(dev_priv))
4205 return connector_status_disconnected;
4206
4207 /* Can't disconnect eDP */
4208 if (intel_dp_is_edp(intel_dp))
4209 status = edp_detect(intel_dp);
4210 else if (intel_digital_port_connected(encoder))
4211 status = intel_dp_detect_dpcd(intel_dp);
4212 else
4213 status = connector_status_disconnected;
4214
4215 if (status == connector_status_disconnected) {
4216 memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
4217 memset(intel_dp->dsc_dpcd, 0, sizeof(intel_dp->dsc_dpcd));
4218
4219 if (intel_dp->is_mst) {
4220 drm_dbg_kms(&dev_priv->drm,
4221 "MST device may have disappeared %d vs %d\n",
4222 intel_dp->is_mst,
4223 intel_dp->mst_mgr.mst_state);
4224 intel_dp->is_mst = false;
4225 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
4226 intel_dp->is_mst);
4227 }
4228
4229 goto out;
4230 }
4231
4232 /* Read DP Sink DSC Cap DPCD regs for DP v1.4 */
4233 if (DISPLAY_VER(dev_priv) >= 11)
4234 intel_dp_get_dsc_sink_cap(intel_dp);
4235
4236 intel_dp_configure_mst(intel_dp);
4237
4238 /*
4239 * TODO: Reset link params when switching to MST mode, until MST
4240 * supports link training fallback params.
4241 */
4242 if (intel_dp->reset_link_params || intel_dp->is_mst) {
4243 /* Initial max link lane count */
4244 intel_dp->max_link_lane_count = intel_dp_max_common_lane_count(intel_dp);
4245
4246 /* Initial max link rate */
4247 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
4248
4249 intel_dp->reset_link_params = false;
4250 }
4251
4252 intel_dp_print_rates(intel_dp);
4253
4254 if (intel_dp->is_mst) {
4255 /*
4256 * If we are in MST mode then this connector
4257 * won't appear connected or have anything
4258 * with EDID on it
4259 */
4260 status = connector_status_disconnected;
4261 goto out;
4262 }
4263
4264 /*
4265 * Some external monitors do not signal loss of link synchronization
4266 * with an IRQ_HPD, so force a link status check.
4267 */
4268 if (!intel_dp_is_edp(intel_dp)) {
4269 int ret;
4270
4271 ret = intel_dp_retrain_link(encoder, ctx);
4272 if (ret)
4273 return ret;
4274 }
4275
4276 /*
4277 * Clearing NACK and defer counts to get their exact values
4278 * while reading EDID which are required by Compliance tests
4279 * 4.2.2.4 and 4.2.2.5
4280 */
4281 intel_dp->aux.i2c_nack_count = 0;
4282 intel_dp->aux.i2c_defer_count = 0;
4283
4284 intel_dp_set_edid(intel_dp);
4285 if (intel_dp_is_edp(intel_dp) ||
4286 to_intel_connector(connector)->detect_edid)
4287 status = connector_status_connected;
4288
4289 intel_dp_check_device_service_irq(intel_dp);
4290
4291 out:
4292 if (status != connector_status_connected && !intel_dp->is_mst)
4293 intel_dp_unset_edid(intel_dp);
4294
4295 /*
4296 * Make sure the refs for power wells enabled during detect are
4297 * dropped to avoid a new detect cycle triggered by HPD polling.
4298 */
4299 intel_display_power_flush_work(dev_priv);
4300
4301 if (!intel_dp_is_edp(intel_dp))
4302 drm_dp_set_subconnector_property(connector,
4303 status,
4304 intel_dp->dpcd,
4305 intel_dp->downstream_ports);
4306 return status;
4307 }
4308
4309 static void
intel_dp_force(struct drm_connector * connector)4310 intel_dp_force(struct drm_connector *connector)
4311 {
4312 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
4313 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4314 struct intel_encoder *intel_encoder = &dig_port->base;
4315 struct drm_i915_private *dev_priv = to_i915(intel_encoder->base.dev);
4316 enum intel_display_power_domain aux_domain =
4317 intel_aux_power_domain(dig_port);
4318 intel_wakeref_t wakeref;
4319
4320 drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s]\n",
4321 connector->base.id, connector->name);
4322 intel_dp_unset_edid(intel_dp);
4323
4324 if (connector->status != connector_status_connected)
4325 return;
4326
4327 wakeref = intel_display_power_get(dev_priv, aux_domain);
4328
4329 intel_dp_set_edid(intel_dp);
4330
4331 intel_display_power_put(dev_priv, aux_domain, wakeref);
4332 }
4333
intel_dp_get_modes(struct drm_connector * connector)4334 static int intel_dp_get_modes(struct drm_connector *connector)
4335 {
4336 struct intel_connector *intel_connector = to_intel_connector(connector);
4337 struct edid *edid;
4338 int num_modes = 0;
4339
4340 edid = intel_connector->detect_edid;
4341 if (edid) {
4342 num_modes = intel_connector_update_modes(connector, edid);
4343
4344 if (intel_vrr_is_capable(connector))
4345 drm_connector_set_vrr_capable_property(connector,
4346 true);
4347 }
4348
4349 /* Also add fixed mode, which may or may not be present in EDID */
4350 if (intel_dp_is_edp(intel_attached_dp(intel_connector)) &&
4351 intel_connector->panel.fixed_mode) {
4352 struct drm_display_mode *mode;
4353
4354 mode = drm_mode_duplicate(connector->dev,
4355 intel_connector->panel.fixed_mode);
4356 if (mode) {
4357 drm_mode_probed_add(connector, mode);
4358 num_modes++;
4359 }
4360 }
4361
4362 if (num_modes)
4363 return num_modes;
4364
4365 if (!edid) {
4366 struct intel_dp *intel_dp = intel_attached_dp(intel_connector);
4367 struct drm_display_mode *mode;
4368
4369 mode = drm_dp_downstream_mode(connector->dev,
4370 intel_dp->dpcd,
4371 intel_dp->downstream_ports);
4372 if (mode) {
4373 drm_mode_probed_add(connector, mode);
4374 num_modes++;
4375 }
4376 }
4377
4378 return num_modes;
4379 }
4380
4381 static int
intel_dp_connector_register(struct drm_connector * connector)4382 intel_dp_connector_register(struct drm_connector *connector)
4383 {
4384 struct drm_i915_private *i915 = to_i915(connector->dev);
4385 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
4386 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4387 struct intel_lspcon *lspcon = &dig_port->lspcon;
4388 int ret;
4389
4390 ret = intel_connector_register(connector);
4391 if (ret)
4392 return ret;
4393
4394 drm_dbg_kms(&i915->drm, "registering %s bus for %s\n",
4395 intel_dp->aux.name, connector->kdev->kobj.name);
4396
4397 intel_dp->aux.dev = connector->kdev;
4398 ret = drm_dp_aux_register(&intel_dp->aux);
4399 if (!ret)
4400 drm_dp_cec_register_connector(&intel_dp->aux, connector);
4401
4402 if (!intel_bios_is_lspcon_present(i915, dig_port->base.port))
4403 return ret;
4404
4405 /*
4406 * ToDo: Clean this up to handle lspcon init and resume more
4407 * efficiently and streamlined.
4408 */
4409 if (lspcon_init(dig_port)) {
4410 lspcon_detect_hdr_capability(lspcon);
4411 if (lspcon->hdr_supported)
4412 drm_object_attach_property(&connector->base,
4413 connector->dev->mode_config.hdr_output_metadata_property,
4414 0);
4415 }
4416
4417 return ret;
4418 }
4419
4420 static void
intel_dp_connector_unregister(struct drm_connector * connector)4421 intel_dp_connector_unregister(struct drm_connector *connector)
4422 {
4423 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
4424
4425 drm_dp_cec_unregister_connector(&intel_dp->aux);
4426 drm_dp_aux_unregister(&intel_dp->aux);
4427 intel_connector_unregister(connector);
4428 }
4429
intel_dp_encoder_flush_work(struct drm_encoder * encoder)4430 void intel_dp_encoder_flush_work(struct drm_encoder *encoder)
4431 {
4432 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder));
4433 struct intel_dp *intel_dp = &dig_port->dp;
4434
4435 intel_dp_mst_encoder_cleanup(dig_port);
4436
4437 intel_pps_vdd_off_sync(intel_dp);
4438
4439 intel_dp_aux_fini(intel_dp);
4440 }
4441
intel_dp_encoder_suspend(struct intel_encoder * intel_encoder)4442 void intel_dp_encoder_suspend(struct intel_encoder *intel_encoder)
4443 {
4444 struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
4445
4446 intel_pps_vdd_off_sync(intel_dp);
4447 }
4448
intel_dp_encoder_shutdown(struct intel_encoder * intel_encoder)4449 void intel_dp_encoder_shutdown(struct intel_encoder *intel_encoder)
4450 {
4451 struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
4452
4453 intel_pps_wait_power_cycle(intel_dp);
4454 }
4455
intel_modeset_tile_group(struct intel_atomic_state * state,int tile_group_id)4456 static int intel_modeset_tile_group(struct intel_atomic_state *state,
4457 int tile_group_id)
4458 {
4459 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
4460 struct drm_connector_list_iter conn_iter;
4461 struct drm_connector *connector;
4462 int ret = 0;
4463
4464 drm_connector_list_iter_begin(&dev_priv->drm, &conn_iter);
4465 drm_for_each_connector_iter(connector, &conn_iter) {
4466 struct drm_connector_state *conn_state;
4467 struct intel_crtc_state *crtc_state;
4468 struct intel_crtc *crtc;
4469
4470 if (!connector->has_tile ||
4471 connector->tile_group->id != tile_group_id)
4472 continue;
4473
4474 conn_state = drm_atomic_get_connector_state(&state->base,
4475 connector);
4476 if (IS_ERR(conn_state)) {
4477 ret = PTR_ERR(conn_state);
4478 break;
4479 }
4480
4481 crtc = to_intel_crtc(conn_state->crtc);
4482
4483 if (!crtc)
4484 continue;
4485
4486 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
4487 crtc_state->uapi.mode_changed = true;
4488
4489 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
4490 if (ret)
4491 break;
4492 }
4493 drm_connector_list_iter_end(&conn_iter);
4494
4495 return ret;
4496 }
4497
intel_modeset_affected_transcoders(struct intel_atomic_state * state,u8 transcoders)4498 static int intel_modeset_affected_transcoders(struct intel_atomic_state *state, u8 transcoders)
4499 {
4500 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
4501 struct intel_crtc *crtc;
4502
4503 if (transcoders == 0)
4504 return 0;
4505
4506 for_each_intel_crtc(&dev_priv->drm, crtc) {
4507 struct intel_crtc_state *crtc_state;
4508 int ret;
4509
4510 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
4511 if (IS_ERR(crtc_state))
4512 return PTR_ERR(crtc_state);
4513
4514 if (!crtc_state->hw.enable)
4515 continue;
4516
4517 if (!(transcoders & BIT(crtc_state->cpu_transcoder)))
4518 continue;
4519
4520 crtc_state->uapi.mode_changed = true;
4521
4522 ret = drm_atomic_add_affected_connectors(&state->base, &crtc->base);
4523 if (ret)
4524 return ret;
4525
4526 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
4527 if (ret)
4528 return ret;
4529
4530 transcoders &= ~BIT(crtc_state->cpu_transcoder);
4531 }
4532
4533 drm_WARN_ON(&dev_priv->drm, transcoders != 0);
4534
4535 return 0;
4536 }
4537
intel_modeset_synced_crtcs(struct intel_atomic_state * state,struct drm_connector * connector)4538 static int intel_modeset_synced_crtcs(struct intel_atomic_state *state,
4539 struct drm_connector *connector)
4540 {
4541 const struct drm_connector_state *old_conn_state =
4542 drm_atomic_get_old_connector_state(&state->base, connector);
4543 const struct intel_crtc_state *old_crtc_state;
4544 struct intel_crtc *crtc;
4545 u8 transcoders;
4546
4547 crtc = to_intel_crtc(old_conn_state->crtc);
4548 if (!crtc)
4549 return 0;
4550
4551 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc);
4552
4553 if (!old_crtc_state->hw.active)
4554 return 0;
4555
4556 transcoders = old_crtc_state->sync_mode_slaves_mask;
4557 if (old_crtc_state->master_transcoder != INVALID_TRANSCODER)
4558 transcoders |= BIT(old_crtc_state->master_transcoder);
4559
4560 return intel_modeset_affected_transcoders(state,
4561 transcoders);
4562 }
4563
intel_dp_connector_atomic_check(struct drm_connector * conn,struct drm_atomic_state * _state)4564 static int intel_dp_connector_atomic_check(struct drm_connector *conn,
4565 struct drm_atomic_state *_state)
4566 {
4567 struct drm_i915_private *dev_priv = to_i915(conn->dev);
4568 struct intel_atomic_state *state = to_intel_atomic_state(_state);
4569 int ret;
4570
4571 ret = intel_digital_connector_atomic_check(conn, &state->base);
4572 if (ret)
4573 return ret;
4574
4575 /*
4576 * We don't enable port sync on BDW due to missing w/as and
4577 * due to not having adjusted the modeset sequence appropriately.
4578 */
4579 if (DISPLAY_VER(dev_priv) < 9)
4580 return 0;
4581
4582 if (!intel_connector_needs_modeset(state, conn))
4583 return 0;
4584
4585 if (conn->has_tile) {
4586 ret = intel_modeset_tile_group(state, conn->tile_group->id);
4587 if (ret)
4588 return ret;
4589 }
4590
4591 return intel_modeset_synced_crtcs(state, conn);
4592 }
4593
4594 static const struct drm_connector_funcs intel_dp_connector_funcs = {
4595 .force = intel_dp_force,
4596 .fill_modes = drm_helper_probe_single_connector_modes,
4597 .atomic_get_property = intel_digital_connector_atomic_get_property,
4598 .atomic_set_property = intel_digital_connector_atomic_set_property,
4599 .late_register = intel_dp_connector_register,
4600 .early_unregister = intel_dp_connector_unregister,
4601 .destroy = intel_connector_destroy,
4602 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
4603 .atomic_duplicate_state = intel_digital_connector_duplicate_state,
4604 };
4605
4606 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
4607 .detect_ctx = intel_dp_detect,
4608 .get_modes = intel_dp_get_modes,
4609 .mode_valid = intel_dp_mode_valid,
4610 .atomic_check = intel_dp_connector_atomic_check,
4611 };
4612
4613 enum irqreturn
intel_dp_hpd_pulse(struct intel_digital_port * dig_port,bool long_hpd)4614 intel_dp_hpd_pulse(struct intel_digital_port *dig_port, bool long_hpd)
4615 {
4616 struct drm_i915_private *i915 = to_i915(dig_port->base.base.dev);
4617 struct intel_dp *intel_dp = &dig_port->dp;
4618
4619 if (dig_port->base.type == INTEL_OUTPUT_EDP &&
4620 (long_hpd || !intel_pps_have_power(intel_dp))) {
4621 /*
4622 * vdd off can generate a long/short pulse on eDP which
4623 * would require vdd on to handle it, and thus we
4624 * would end up in an endless cycle of
4625 * "vdd off -> long/short hpd -> vdd on -> detect -> vdd off -> ..."
4626 */
4627 drm_dbg_kms(&i915->drm,
4628 "ignoring %s hpd on eDP [ENCODER:%d:%s]\n",
4629 long_hpd ? "long" : "short",
4630 dig_port->base.base.base.id,
4631 dig_port->base.base.name);
4632 return IRQ_HANDLED;
4633 }
4634
4635 drm_dbg_kms(&i915->drm, "got hpd irq on [ENCODER:%d:%s] - %s\n",
4636 dig_port->base.base.base.id,
4637 dig_port->base.base.name,
4638 long_hpd ? "long" : "short");
4639
4640 if (long_hpd) {
4641 intel_dp->reset_link_params = true;
4642 return IRQ_NONE;
4643 }
4644
4645 if (intel_dp->is_mst) {
4646 if (!intel_dp_check_mst_status(intel_dp))
4647 return IRQ_NONE;
4648 } else if (!intel_dp_short_pulse(intel_dp)) {
4649 return IRQ_NONE;
4650 }
4651
4652 return IRQ_HANDLED;
4653 }
4654
4655 /* check the VBT to see whether the eDP is on another port */
intel_dp_is_port_edp(struct drm_i915_private * dev_priv,enum port port)4656 bool intel_dp_is_port_edp(struct drm_i915_private *dev_priv, enum port port)
4657 {
4658 /*
4659 * eDP not supported on g4x. so bail out early just
4660 * for a bit extra safety in case the VBT is bonkers.
4661 */
4662 if (DISPLAY_VER(dev_priv) < 5)
4663 return false;
4664
4665 if (DISPLAY_VER(dev_priv) < 9 && port == PORT_A)
4666 return true;
4667
4668 return intel_bios_is_port_edp(dev_priv, port);
4669 }
4670
4671 static void
intel_dp_add_properties(struct intel_dp * intel_dp,struct drm_connector * connector)4672 intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
4673 {
4674 struct drm_i915_private *dev_priv = to_i915(connector->dev);
4675 enum port port = dp_to_dig_port(intel_dp)->base.port;
4676
4677 if (!intel_dp_is_edp(intel_dp))
4678 drm_connector_attach_dp_subconnector_property(connector);
4679
4680 if (!IS_G4X(dev_priv) && port != PORT_A)
4681 intel_attach_force_audio_property(connector);
4682
4683 intel_attach_broadcast_rgb_property(connector);
4684 if (HAS_GMCH(dev_priv))
4685 drm_connector_attach_max_bpc_property(connector, 6, 10);
4686 else if (DISPLAY_VER(dev_priv) >= 5)
4687 drm_connector_attach_max_bpc_property(connector, 6, 12);
4688
4689 /* Register HDMI colorspace for case of lspcon */
4690 if (intel_bios_is_lspcon_present(dev_priv, port)) {
4691 drm_connector_attach_content_type_property(connector);
4692 intel_attach_hdmi_colorspace_property(connector);
4693 } else {
4694 intel_attach_dp_colorspace_property(connector);
4695 }
4696
4697 if (IS_GEMINILAKE(dev_priv) || DISPLAY_VER(dev_priv) >= 11)
4698 drm_object_attach_property(&connector->base,
4699 connector->dev->mode_config.hdr_output_metadata_property,
4700 0);
4701
4702 if (intel_dp_is_edp(intel_dp)) {
4703 u32 allowed_scalers;
4704
4705 allowed_scalers = BIT(DRM_MODE_SCALE_ASPECT) | BIT(DRM_MODE_SCALE_FULLSCREEN);
4706 if (!HAS_GMCH(dev_priv))
4707 allowed_scalers |= BIT(DRM_MODE_SCALE_CENTER);
4708
4709 drm_connector_attach_scaling_mode_property(connector, allowed_scalers);
4710
4711 connector->state->scaling_mode = DRM_MODE_SCALE_ASPECT;
4712
4713 }
4714
4715 if (HAS_VRR(dev_priv))
4716 drm_connector_attach_vrr_capable_property(connector);
4717 }
4718
4719 /**
4720 * intel_dp_set_drrs_state - program registers for RR switch to take effect
4721 * @dev_priv: i915 device
4722 * @crtc_state: a pointer to the active intel_crtc_state
4723 * @refresh_rate: RR to be programmed
4724 *
4725 * This function gets called when refresh rate (RR) has to be changed from
4726 * one frequency to another. Switches can be between high and low RR
4727 * supported by the panel or to any other RR based on media playback (in
4728 * this case, RR value needs to be passed from user space).
4729 *
4730 * The caller of this function needs to take a lock on dev_priv->drrs.
4731 */
intel_dp_set_drrs_state(struct drm_i915_private * dev_priv,const struct intel_crtc_state * crtc_state,int refresh_rate)4732 static void intel_dp_set_drrs_state(struct drm_i915_private *dev_priv,
4733 const struct intel_crtc_state *crtc_state,
4734 int refresh_rate)
4735 {
4736 struct intel_dp *intel_dp = dev_priv->drrs.dp;
4737 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
4738 enum drrs_refresh_rate_type index = DRRS_HIGH_RR;
4739
4740 if (refresh_rate <= 0) {
4741 drm_dbg_kms(&dev_priv->drm,
4742 "Refresh rate should be positive non-zero.\n");
4743 return;
4744 }
4745
4746 if (intel_dp == NULL) {
4747 drm_dbg_kms(&dev_priv->drm, "DRRS not supported.\n");
4748 return;
4749 }
4750
4751 if (!crtc) {
4752 drm_dbg_kms(&dev_priv->drm,
4753 "DRRS: intel_crtc not initialized\n");
4754 return;
4755 }
4756
4757 if (dev_priv->drrs.type < SEAMLESS_DRRS_SUPPORT) {
4758 drm_dbg_kms(&dev_priv->drm, "Only Seamless DRRS supported.\n");
4759 return;
4760 }
4761
4762 if (drm_mode_vrefresh(intel_dp->attached_connector->panel.downclock_mode) ==
4763 refresh_rate)
4764 index = DRRS_LOW_RR;
4765
4766 if (index == dev_priv->drrs.refresh_rate_type) {
4767 drm_dbg_kms(&dev_priv->drm,
4768 "DRRS requested for previously set RR...ignoring\n");
4769 return;
4770 }
4771
4772 if (!crtc_state->hw.active) {
4773 drm_dbg_kms(&dev_priv->drm,
4774 "eDP encoder disabled. CRTC not Active\n");
4775 return;
4776 }
4777
4778 if (DISPLAY_VER(dev_priv) >= 8 && !IS_CHERRYVIEW(dev_priv)) {
4779 switch (index) {
4780 case DRRS_HIGH_RR:
4781 intel_dp_set_m_n(crtc_state, M1_N1);
4782 break;
4783 case DRRS_LOW_RR:
4784 intel_dp_set_m_n(crtc_state, M2_N2);
4785 break;
4786 case DRRS_MAX_RR:
4787 default:
4788 drm_err(&dev_priv->drm,
4789 "Unsupported refreshrate type\n");
4790 }
4791 } else if (DISPLAY_VER(dev_priv) > 6) {
4792 i915_reg_t reg = PIPECONF(crtc_state->cpu_transcoder);
4793 u32 val;
4794
4795 val = intel_de_read(dev_priv, reg);
4796 if (index > DRRS_HIGH_RR) {
4797 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
4798 val |= PIPECONF_EDP_RR_MODE_SWITCH_VLV;
4799 else
4800 val |= PIPECONF_EDP_RR_MODE_SWITCH;
4801 } else {
4802 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
4803 val &= ~PIPECONF_EDP_RR_MODE_SWITCH_VLV;
4804 else
4805 val &= ~PIPECONF_EDP_RR_MODE_SWITCH;
4806 }
4807 intel_de_write(dev_priv, reg, val);
4808 }
4809
4810 dev_priv->drrs.refresh_rate_type = index;
4811
4812 drm_dbg_kms(&dev_priv->drm, "eDP Refresh Rate set to : %dHz\n",
4813 refresh_rate);
4814 }
4815
4816 static void
intel_edp_drrs_enable_locked(struct intel_dp * intel_dp)4817 intel_edp_drrs_enable_locked(struct intel_dp *intel_dp)
4818 {
4819 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4820
4821 dev_priv->drrs.busy_frontbuffer_bits = 0;
4822 dev_priv->drrs.dp = intel_dp;
4823 }
4824
4825 /**
4826 * intel_edp_drrs_enable - init drrs struct if supported
4827 * @intel_dp: DP struct
4828 * @crtc_state: A pointer to the active crtc state.
4829 *
4830 * Initializes frontbuffer_bits and drrs.dp
4831 */
intel_edp_drrs_enable(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4832 void intel_edp_drrs_enable(struct intel_dp *intel_dp,
4833 const struct intel_crtc_state *crtc_state)
4834 {
4835 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4836
4837 if (!crtc_state->has_drrs)
4838 return;
4839
4840 drm_dbg_kms(&dev_priv->drm, "Enabling DRRS\n");
4841
4842 mutex_lock(&dev_priv->drrs.mutex);
4843
4844 if (dev_priv->drrs.dp) {
4845 drm_warn(&dev_priv->drm, "DRRS already enabled\n");
4846 goto unlock;
4847 }
4848
4849 intel_edp_drrs_enable_locked(intel_dp);
4850
4851 unlock:
4852 mutex_unlock(&dev_priv->drrs.mutex);
4853 }
4854
4855 static void
intel_edp_drrs_disable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4856 intel_edp_drrs_disable_locked(struct intel_dp *intel_dp,
4857 const struct intel_crtc_state *crtc_state)
4858 {
4859 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4860
4861 if (dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR) {
4862 int refresh;
4863
4864 refresh = drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode);
4865 intel_dp_set_drrs_state(dev_priv, crtc_state, refresh);
4866 }
4867
4868 dev_priv->drrs.dp = NULL;
4869 }
4870
4871 /**
4872 * intel_edp_drrs_disable - Disable DRRS
4873 * @intel_dp: DP struct
4874 * @old_crtc_state: Pointer to old crtc_state.
4875 *
4876 */
intel_edp_drrs_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)4877 void intel_edp_drrs_disable(struct intel_dp *intel_dp,
4878 const struct intel_crtc_state *old_crtc_state)
4879 {
4880 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4881
4882 if (!old_crtc_state->has_drrs)
4883 return;
4884
4885 mutex_lock(&dev_priv->drrs.mutex);
4886 if (!dev_priv->drrs.dp) {
4887 mutex_unlock(&dev_priv->drrs.mutex);
4888 return;
4889 }
4890
4891 intel_edp_drrs_disable_locked(intel_dp, old_crtc_state);
4892 mutex_unlock(&dev_priv->drrs.mutex);
4893
4894 cancel_delayed_work_sync(&dev_priv->drrs.work);
4895 }
4896
4897 /**
4898 * intel_edp_drrs_update - Update DRRS state
4899 * @intel_dp: Intel DP
4900 * @crtc_state: new CRTC state
4901 *
4902 * This function will update DRRS states, disabling or enabling DRRS when
4903 * executing fastsets. For full modeset, intel_edp_drrs_disable() and
4904 * intel_edp_drrs_enable() should be called instead.
4905 */
4906 void
intel_edp_drrs_update(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)4907 intel_edp_drrs_update(struct intel_dp *intel_dp,
4908 const struct intel_crtc_state *crtc_state)
4909 {
4910 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4911
4912 if (dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
4913 return;
4914
4915 mutex_lock(&dev_priv->drrs.mutex);
4916
4917 /* New state matches current one? */
4918 if (crtc_state->has_drrs == !!dev_priv->drrs.dp)
4919 goto unlock;
4920
4921 if (crtc_state->has_drrs)
4922 intel_edp_drrs_enable_locked(intel_dp);
4923 else
4924 intel_edp_drrs_disable_locked(intel_dp, crtc_state);
4925
4926 unlock:
4927 mutex_unlock(&dev_priv->drrs.mutex);
4928 }
4929
intel_edp_drrs_downclock_work(struct work_struct * work)4930 static void intel_edp_drrs_downclock_work(struct work_struct *work)
4931 {
4932 struct drm_i915_private *dev_priv =
4933 container_of(work, typeof(*dev_priv), drrs.work.work);
4934 struct intel_dp *intel_dp;
4935
4936 mutex_lock(&dev_priv->drrs.mutex);
4937
4938 intel_dp = dev_priv->drrs.dp;
4939
4940 if (!intel_dp)
4941 goto unlock;
4942
4943 /*
4944 * The delayed work can race with an invalidate hence we need to
4945 * recheck.
4946 */
4947
4948 if (dev_priv->drrs.busy_frontbuffer_bits)
4949 goto unlock;
4950
4951 if (dev_priv->drrs.refresh_rate_type != DRRS_LOW_RR) {
4952 struct drm_crtc *crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
4953
4954 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
4955 drm_mode_vrefresh(intel_dp->attached_connector->panel.downclock_mode));
4956 }
4957
4958 unlock:
4959 mutex_unlock(&dev_priv->drrs.mutex);
4960 }
4961
4962 /**
4963 * intel_edp_drrs_invalidate - Disable Idleness DRRS
4964 * @dev_priv: i915 device
4965 * @frontbuffer_bits: frontbuffer plane tracking bits
4966 *
4967 * This function gets called everytime rendering on the given planes start.
4968 * Hence DRRS needs to be Upclocked, i.e. (LOW_RR -> HIGH_RR).
4969 *
4970 * Dirty frontbuffers relevant to DRRS are tracked in busy_frontbuffer_bits.
4971 */
intel_edp_drrs_invalidate(struct drm_i915_private * dev_priv,unsigned int frontbuffer_bits)4972 void intel_edp_drrs_invalidate(struct drm_i915_private *dev_priv,
4973 unsigned int frontbuffer_bits)
4974 {
4975 struct intel_dp *intel_dp;
4976 struct drm_crtc *crtc;
4977 enum pipe pipe;
4978
4979 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
4980 return;
4981
4982 cancel_delayed_work(&dev_priv->drrs.work);
4983
4984 mutex_lock(&dev_priv->drrs.mutex);
4985
4986 intel_dp = dev_priv->drrs.dp;
4987 if (!intel_dp) {
4988 mutex_unlock(&dev_priv->drrs.mutex);
4989 return;
4990 }
4991
4992 crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
4993 pipe = to_intel_crtc(crtc)->pipe;
4994
4995 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
4996 dev_priv->drrs.busy_frontbuffer_bits |= frontbuffer_bits;
4997
4998 /* invalidate means busy screen hence upclock */
4999 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
5000 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
5001 drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode));
5002
5003 mutex_unlock(&dev_priv->drrs.mutex);
5004 }
5005
5006 /**
5007 * intel_edp_drrs_flush - Restart Idleness DRRS
5008 * @dev_priv: i915 device
5009 * @frontbuffer_bits: frontbuffer plane tracking bits
5010 *
5011 * This function gets called every time rendering on the given planes has
5012 * completed or flip on a crtc is completed. So DRRS should be upclocked
5013 * (LOW_RR -> HIGH_RR). And also Idleness detection should be started again,
5014 * if no other planes are dirty.
5015 *
5016 * Dirty frontbuffers relevant to DRRS are tracked in busy_frontbuffer_bits.
5017 */
intel_edp_drrs_flush(struct drm_i915_private * dev_priv,unsigned int frontbuffer_bits)5018 void intel_edp_drrs_flush(struct drm_i915_private *dev_priv,
5019 unsigned int frontbuffer_bits)
5020 {
5021 struct intel_dp *intel_dp;
5022 struct drm_crtc *crtc;
5023 enum pipe pipe;
5024
5025 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
5026 return;
5027
5028 cancel_delayed_work(&dev_priv->drrs.work);
5029
5030 mutex_lock(&dev_priv->drrs.mutex);
5031
5032 intel_dp = dev_priv->drrs.dp;
5033 if (!intel_dp) {
5034 mutex_unlock(&dev_priv->drrs.mutex);
5035 return;
5036 }
5037
5038 crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
5039 pipe = to_intel_crtc(crtc)->pipe;
5040
5041 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
5042 dev_priv->drrs.busy_frontbuffer_bits &= ~frontbuffer_bits;
5043
5044 /* flush means busy screen hence upclock */
5045 if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
5046 intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
5047 drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode));
5048
5049 /*
5050 * flush also means no more activity hence schedule downclock, if all
5051 * other fbs are quiescent too
5052 */
5053 if (!dev_priv->drrs.busy_frontbuffer_bits)
5054 schedule_delayed_work(&dev_priv->drrs.work,
5055 msecs_to_jiffies(1000));
5056 mutex_unlock(&dev_priv->drrs.mutex);
5057 }
5058
5059 /**
5060 * DOC: Display Refresh Rate Switching (DRRS)
5061 *
5062 * Display Refresh Rate Switching (DRRS) is a power conservation feature
5063 * which enables swtching between low and high refresh rates,
5064 * dynamically, based on the usage scenario. This feature is applicable
5065 * for internal panels.
5066 *
5067 * Indication that the panel supports DRRS is given by the panel EDID, which
5068 * would list multiple refresh rates for one resolution.
5069 *
5070 * DRRS is of 2 types - static and seamless.
5071 * Static DRRS involves changing refresh rate (RR) by doing a full modeset
5072 * (may appear as a blink on screen) and is used in dock-undock scenario.
5073 * Seamless DRRS involves changing RR without any visual effect to the user
5074 * and can be used during normal system usage. This is done by programming
5075 * certain registers.
5076 *
5077 * Support for static/seamless DRRS may be indicated in the VBT based on
5078 * inputs from the panel spec.
5079 *
5080 * DRRS saves power by switching to low RR based on usage scenarios.
5081 *
5082 * The implementation is based on frontbuffer tracking implementation. When
5083 * there is a disturbance on the screen triggered by user activity or a periodic
5084 * system activity, DRRS is disabled (RR is changed to high RR). When there is
5085 * no movement on screen, after a timeout of 1 second, a switch to low RR is
5086 * made.
5087 *
5088 * For integration with frontbuffer tracking code, intel_edp_drrs_invalidate()
5089 * and intel_edp_drrs_flush() are called.
5090 *
5091 * DRRS can be further extended to support other internal panels and also
5092 * the scenario of video playback wherein RR is set based on the rate
5093 * requested by userspace.
5094 */
5095
5096 /**
5097 * intel_dp_drrs_init - Init basic DRRS work and mutex.
5098 * @connector: eDP connector
5099 * @fixed_mode: preferred mode of panel
5100 *
5101 * This function is called only once at driver load to initialize basic
5102 * DRRS stuff.
5103 *
5104 * Returns:
5105 * Downclock mode if panel supports it, else return NULL.
5106 * DRRS support is determined by the presence of downclock mode (apart
5107 * from VBT setting).
5108 */
5109 static struct drm_display_mode *
intel_dp_drrs_init(struct intel_connector * connector,struct drm_display_mode * fixed_mode)5110 intel_dp_drrs_init(struct intel_connector *connector,
5111 struct drm_display_mode *fixed_mode)
5112 {
5113 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
5114 struct drm_display_mode *downclock_mode = NULL;
5115
5116 INIT_DELAYED_WORK(&dev_priv->drrs.work, intel_edp_drrs_downclock_work);
5117 mutex_init(&dev_priv->drrs.mutex);
5118
5119 if (DISPLAY_VER(dev_priv) <= 6) {
5120 drm_dbg_kms(&dev_priv->drm,
5121 "DRRS supported for Gen7 and above\n");
5122 return NULL;
5123 }
5124
5125 if (dev_priv->vbt.drrs_type != SEAMLESS_DRRS_SUPPORT) {
5126 drm_dbg_kms(&dev_priv->drm, "VBT doesn't support DRRS\n");
5127 return NULL;
5128 }
5129
5130 downclock_mode = intel_panel_edid_downclock_mode(connector, fixed_mode);
5131 if (!downclock_mode) {
5132 drm_dbg_kms(&dev_priv->drm,
5133 "Downclock mode is not found. DRRS not supported\n");
5134 return NULL;
5135 }
5136
5137 dev_priv->drrs.type = dev_priv->vbt.drrs_type;
5138
5139 dev_priv->drrs.refresh_rate_type = DRRS_HIGH_RR;
5140 drm_dbg_kms(&dev_priv->drm,
5141 "seamless DRRS supported for eDP panel.\n");
5142 return downclock_mode;
5143 }
5144
intel_edp_init_connector(struct intel_dp * intel_dp,struct intel_connector * intel_connector)5145 static bool intel_edp_init_connector(struct intel_dp *intel_dp,
5146 struct intel_connector *intel_connector)
5147 {
5148 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
5149 struct drm_device *dev = &dev_priv->drm;
5150 struct drm_connector *connector = &intel_connector->base;
5151 struct drm_display_mode *fixed_mode = NULL;
5152 struct drm_display_mode *downclock_mode = NULL;
5153 bool has_dpcd;
5154 enum pipe pipe = INVALID_PIPE;
5155 struct edid *edid;
5156
5157 if (!intel_dp_is_edp(intel_dp))
5158 return true;
5159
5160 /*
5161 * On IBX/CPT we may get here with LVDS already registered. Since the
5162 * driver uses the only internal power sequencer available for both
5163 * eDP and LVDS bail out early in this case to prevent interfering
5164 * with an already powered-on LVDS power sequencer.
5165 */
5166 if (intel_get_lvds_encoder(dev_priv)) {
5167 drm_WARN_ON(dev,
5168 !(HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)));
5169 drm_info(&dev_priv->drm,
5170 "LVDS was detected, not registering eDP\n");
5171
5172 return false;
5173 }
5174
5175 intel_pps_init(intel_dp);
5176
5177 /* Cache DPCD and EDID for edp. */
5178 has_dpcd = intel_edp_init_dpcd(intel_dp);
5179
5180 if (!has_dpcd) {
5181 /* if this fails, presume the device is a ghost */
5182 drm_info(&dev_priv->drm,
5183 "failed to retrieve link info, disabling eDP\n");
5184 goto out_vdd_off;
5185 }
5186
5187 mutex_lock(&dev->mode_config.mutex);
5188 edid = drm_get_edid(connector, &intel_dp->aux.ddc);
5189 if (edid) {
5190 if (drm_add_edid_modes(connector, edid)) {
5191 drm_connector_update_edid_property(connector, edid);
5192 } else {
5193 kfree(edid);
5194 edid = ERR_PTR(-EINVAL);
5195 }
5196 } else {
5197 edid = ERR_PTR(-ENOENT);
5198 }
5199 intel_connector->edid = edid;
5200
5201 fixed_mode = intel_panel_edid_fixed_mode(intel_connector);
5202 if (fixed_mode)
5203 downclock_mode = intel_dp_drrs_init(intel_connector, fixed_mode);
5204
5205 /* multiply the mode clock and horizontal timings for MSO */
5206 intel_edp_mso_mode_fixup(intel_connector, fixed_mode);
5207 intel_edp_mso_mode_fixup(intel_connector, downclock_mode);
5208
5209 /* fallback to VBT if available for eDP */
5210 if (!fixed_mode)
5211 fixed_mode = intel_panel_vbt_fixed_mode(intel_connector);
5212 mutex_unlock(&dev->mode_config.mutex);
5213
5214 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
5215 /*
5216 * Figure out the current pipe for the initial backlight setup.
5217 * If the current pipe isn't valid, try the PPS pipe, and if that
5218 * fails just assume pipe A.
5219 */
5220 pipe = vlv_active_pipe(intel_dp);
5221
5222 if (pipe != PIPE_A && pipe != PIPE_B)
5223 pipe = intel_dp->pps.pps_pipe;
5224
5225 if (pipe != PIPE_A && pipe != PIPE_B)
5226 pipe = PIPE_A;
5227
5228 drm_dbg_kms(&dev_priv->drm,
5229 "using pipe %c for initial backlight setup\n",
5230 pipe_name(pipe));
5231 }
5232
5233 intel_panel_init(&intel_connector->panel, fixed_mode, downclock_mode);
5234 if (!(dev_priv->quirks & QUIRK_NO_PPS_BACKLIGHT_POWER_HOOK))
5235 intel_connector->panel.backlight.power = intel_pps_backlight_power;
5236 intel_panel_setup_backlight(connector, pipe);
5237
5238 if (fixed_mode) {
5239 drm_connector_set_panel_orientation_with_quirk(connector,
5240 dev_priv->vbt.orientation,
5241 fixed_mode->hdisplay, fixed_mode->vdisplay);
5242 }
5243
5244 return true;
5245
5246 out_vdd_off:
5247 intel_pps_vdd_off_sync(intel_dp);
5248
5249 return false;
5250 }
5251
intel_dp_modeset_retry_work_fn(struct work_struct * work)5252 static void intel_dp_modeset_retry_work_fn(struct work_struct *work)
5253 {
5254 struct intel_connector *intel_connector;
5255 struct drm_connector *connector;
5256
5257 intel_connector = container_of(work, typeof(*intel_connector),
5258 modeset_retry_work);
5259 connector = &intel_connector->base;
5260 DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n", connector->base.id,
5261 connector->name);
5262
5263 /* Grab the locks before changing connector property*/
5264 mutex_lock(&connector->dev->mode_config.mutex);
5265 /* Set connector link status to BAD and send a Uevent to notify
5266 * userspace to do a modeset.
5267 */
5268 drm_connector_set_link_status_property(connector,
5269 DRM_MODE_LINK_STATUS_BAD);
5270 mutex_unlock(&connector->dev->mode_config.mutex);
5271 /* Send Hotplug uevent so userspace can reprobe */
5272 drm_kms_helper_hotplug_event(connector->dev);
5273 }
5274
5275 bool
intel_dp_init_connector(struct intel_digital_port * dig_port,struct intel_connector * intel_connector)5276 intel_dp_init_connector(struct intel_digital_port *dig_port,
5277 struct intel_connector *intel_connector)
5278 {
5279 struct drm_connector *connector = &intel_connector->base;
5280 struct intel_dp *intel_dp = &dig_port->dp;
5281 struct intel_encoder *intel_encoder = &dig_port->base;
5282 struct drm_device *dev = intel_encoder->base.dev;
5283 struct drm_i915_private *dev_priv = to_i915(dev);
5284 enum port port = intel_encoder->port;
5285 enum phy phy = intel_port_to_phy(dev_priv, port);
5286 int type;
5287
5288 /* Initialize the work for modeset in case of link train failure */
5289 INIT_WORK(&intel_connector->modeset_retry_work,
5290 intel_dp_modeset_retry_work_fn);
5291
5292 if (drm_WARN(dev, dig_port->max_lanes < 1,
5293 "Not enough lanes (%d) for DP on [ENCODER:%d:%s]\n",
5294 dig_port->max_lanes, intel_encoder->base.base.id,
5295 intel_encoder->base.name))
5296 return false;
5297
5298 intel_dp_set_source_rates(intel_dp);
5299
5300 intel_dp->reset_link_params = true;
5301 intel_dp->pps.pps_pipe = INVALID_PIPE;
5302 intel_dp->pps.active_pipe = INVALID_PIPE;
5303
5304 /* Preserve the current hw state. */
5305 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
5306 intel_dp->attached_connector = intel_connector;
5307
5308 if (intel_dp_is_port_edp(dev_priv, port)) {
5309 /*
5310 * Currently we don't support eDP on TypeC ports, although in
5311 * theory it could work on TypeC legacy ports.
5312 */
5313 drm_WARN_ON(dev, intel_phy_is_tc(dev_priv, phy));
5314 type = DRM_MODE_CONNECTOR_eDP;
5315 } else {
5316 type = DRM_MODE_CONNECTOR_DisplayPort;
5317 }
5318
5319 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
5320 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
5321
5322 /*
5323 * For eDP we always set the encoder type to INTEL_OUTPUT_EDP, but
5324 * for DP the encoder type can be set by the caller to
5325 * INTEL_OUTPUT_UNKNOWN for DDI, so don't rewrite it.
5326 */
5327 if (type == DRM_MODE_CONNECTOR_eDP)
5328 intel_encoder->type = INTEL_OUTPUT_EDP;
5329
5330 /* eDP only on port B and/or C on vlv/chv */
5331 if (drm_WARN_ON(dev, (IS_VALLEYVIEW(dev_priv) ||
5332 IS_CHERRYVIEW(dev_priv)) &&
5333 intel_dp_is_edp(intel_dp) &&
5334 port != PORT_B && port != PORT_C))
5335 return false;
5336
5337 drm_dbg_kms(&dev_priv->drm,
5338 "Adding %s connector on [ENCODER:%d:%s]\n",
5339 type == DRM_MODE_CONNECTOR_eDP ? "eDP" : "DP",
5340 intel_encoder->base.base.id, intel_encoder->base.name);
5341
5342 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
5343 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
5344
5345 if (!HAS_GMCH(dev_priv))
5346 connector->interlace_allowed = true;
5347 connector->doublescan_allowed = 0;
5348
5349 intel_connector->polled = DRM_CONNECTOR_POLL_HPD;
5350
5351 intel_dp_aux_init(intel_dp);
5352
5353 intel_connector_attach_encoder(intel_connector, intel_encoder);
5354
5355 if (HAS_DDI(dev_priv))
5356 intel_connector->get_hw_state = intel_ddi_connector_get_hw_state;
5357 else
5358 intel_connector->get_hw_state = intel_connector_get_hw_state;
5359
5360 /* init MST on ports that can support it */
5361 intel_dp_mst_encoder_init(dig_port,
5362 intel_connector->base.base.id);
5363
5364 if (!intel_edp_init_connector(intel_dp, intel_connector)) {
5365 intel_dp_aux_fini(intel_dp);
5366 intel_dp_mst_encoder_cleanup(dig_port);
5367 goto fail;
5368 }
5369
5370 intel_dp_add_properties(intel_dp, connector);
5371
5372 if (is_hdcp_supported(dev_priv, port) && !intel_dp_is_edp(intel_dp)) {
5373 int ret = intel_dp_hdcp_init(dig_port, intel_connector);
5374 if (ret)
5375 drm_dbg_kms(&dev_priv->drm,
5376 "HDCP init failed, skipping.\n");
5377 }
5378
5379 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
5380 * 0xd. Failure to do so will result in spurious interrupts being
5381 * generated on the port when a cable is not attached.
5382 */
5383 if (IS_G45(dev_priv)) {
5384 u32 temp = intel_de_read(dev_priv, PEG_BAND_GAP_DATA);
5385 intel_de_write(dev_priv, PEG_BAND_GAP_DATA,
5386 (temp & ~0xf) | 0xd);
5387 }
5388
5389 intel_dp->frl.is_trained = false;
5390 intel_dp->frl.trained_rate_gbps = 0;
5391
5392 intel_psr_init(intel_dp);
5393
5394 return true;
5395
5396 fail:
5397 drm_connector_cleanup(connector);
5398
5399 return false;
5400 }
5401
intel_dp_mst_suspend(struct drm_i915_private * dev_priv)5402 void intel_dp_mst_suspend(struct drm_i915_private *dev_priv)
5403 {
5404 struct intel_encoder *encoder;
5405
5406 if (!HAS_DISPLAY(dev_priv))
5407 return;
5408
5409 for_each_intel_encoder(&dev_priv->drm, encoder) {
5410 struct intel_dp *intel_dp;
5411
5412 if (encoder->type != INTEL_OUTPUT_DDI)
5413 continue;
5414
5415 intel_dp = enc_to_intel_dp(encoder);
5416
5417 if (!intel_dp->can_mst)
5418 continue;
5419
5420 if (intel_dp->is_mst)
5421 drm_dp_mst_topology_mgr_suspend(&intel_dp->mst_mgr);
5422 }
5423 }
5424
intel_dp_mst_resume(struct drm_i915_private * dev_priv)5425 void intel_dp_mst_resume(struct drm_i915_private *dev_priv)
5426 {
5427 struct intel_encoder *encoder;
5428
5429 if (!HAS_DISPLAY(dev_priv))
5430 return;
5431
5432 for_each_intel_encoder(&dev_priv->drm, encoder) {
5433 struct intel_dp *intel_dp;
5434 int ret;
5435
5436 if (encoder->type != INTEL_OUTPUT_DDI)
5437 continue;
5438
5439 intel_dp = enc_to_intel_dp(encoder);
5440
5441 if (!intel_dp->can_mst)
5442 continue;
5443
5444 ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst_mgr,
5445 true);
5446 if (ret) {
5447 intel_dp->is_mst = false;
5448 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
5449 false);
5450 }
5451 }
5452 }
5453