1 /*
2 * Copyright © 2006-2017 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "intel_cdclk.h"
25 #include "intel_display_types.h"
26 #include "intel_sideband.h"
27
28 /**
29 * DOC: CDCLK / RAWCLK
30 *
31 * The display engine uses several different clocks to do its work. There
32 * are two main clocks involved that aren't directly related to the actual
33 * pixel clock or any symbol/bit clock of the actual output port. These
34 * are the core display clock (CDCLK) and RAWCLK.
35 *
36 * CDCLK clocks most of the display pipe logic, and thus its frequency
37 * must be high enough to support the rate at which pixels are flowing
38 * through the pipes. Downscaling must also be accounted as that increases
39 * the effective pixel rate.
40 *
41 * On several platforms the CDCLK frequency can be changed dynamically
42 * to minimize power consumption for a given display configuration.
43 * Typically changes to the CDCLK frequency require all the display pipes
44 * to be shut down while the frequency is being changed.
45 *
46 * On SKL+ the DMC will toggle the CDCLK off/on during DC5/6 entry/exit.
47 * DMC will not change the active CDCLK frequency however, so that part
48 * will still be performed by the driver directly.
49 *
50 * RAWCLK is a fixed frequency clock, often used by various auxiliary
51 * blocks such as AUX CH or backlight PWM. Hence the only thing we
52 * really need to know about RAWCLK is its frequency so that various
53 * dividers can be programmed correctly.
54 */
55
fixed_133mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)56 static void fixed_133mhz_get_cdclk(struct drm_i915_private *dev_priv,
57 struct intel_cdclk_state *cdclk_state)
58 {
59 cdclk_state->cdclk = 133333;
60 }
61
fixed_200mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)62 static void fixed_200mhz_get_cdclk(struct drm_i915_private *dev_priv,
63 struct intel_cdclk_state *cdclk_state)
64 {
65 cdclk_state->cdclk = 200000;
66 }
67
fixed_266mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)68 static void fixed_266mhz_get_cdclk(struct drm_i915_private *dev_priv,
69 struct intel_cdclk_state *cdclk_state)
70 {
71 cdclk_state->cdclk = 266667;
72 }
73
fixed_333mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)74 static void fixed_333mhz_get_cdclk(struct drm_i915_private *dev_priv,
75 struct intel_cdclk_state *cdclk_state)
76 {
77 cdclk_state->cdclk = 333333;
78 }
79
fixed_400mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)80 static void fixed_400mhz_get_cdclk(struct drm_i915_private *dev_priv,
81 struct intel_cdclk_state *cdclk_state)
82 {
83 cdclk_state->cdclk = 400000;
84 }
85
fixed_450mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)86 static void fixed_450mhz_get_cdclk(struct drm_i915_private *dev_priv,
87 struct intel_cdclk_state *cdclk_state)
88 {
89 cdclk_state->cdclk = 450000;
90 }
91
i85x_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)92 static void i85x_get_cdclk(struct drm_i915_private *dev_priv,
93 struct intel_cdclk_state *cdclk_state)
94 {
95 struct pci_dev *pdev = dev_priv->drm.pdev;
96 u16 hpllcc = 0;
97
98 /*
99 * 852GM/852GMV only supports 133 MHz and the HPLLCC
100 * encoding is different :(
101 * FIXME is this the right way to detect 852GM/852GMV?
102 */
103 if (pdev->revision == 0x1) {
104 cdclk_state->cdclk = 133333;
105 return;
106 }
107
108 pci_bus_read_config_word(pdev->bus,
109 PCI_DEVFN(0, 3), HPLLCC, &hpllcc);
110
111 /* Assume that the hardware is in the high speed state. This
112 * should be the default.
113 */
114 switch (hpllcc & GC_CLOCK_CONTROL_MASK) {
115 case GC_CLOCK_133_200:
116 case GC_CLOCK_133_200_2:
117 case GC_CLOCK_100_200:
118 cdclk_state->cdclk = 200000;
119 break;
120 case GC_CLOCK_166_250:
121 cdclk_state->cdclk = 250000;
122 break;
123 case GC_CLOCK_100_133:
124 cdclk_state->cdclk = 133333;
125 break;
126 case GC_CLOCK_133_266:
127 case GC_CLOCK_133_266_2:
128 case GC_CLOCK_166_266:
129 cdclk_state->cdclk = 266667;
130 break;
131 }
132 }
133
i915gm_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)134 static void i915gm_get_cdclk(struct drm_i915_private *dev_priv,
135 struct intel_cdclk_state *cdclk_state)
136 {
137 struct pci_dev *pdev = dev_priv->drm.pdev;
138 u16 gcfgc = 0;
139
140 pci_read_config_word(pdev, GCFGC, &gcfgc);
141
142 if (gcfgc & GC_LOW_FREQUENCY_ENABLE) {
143 cdclk_state->cdclk = 133333;
144 return;
145 }
146
147 switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
148 case GC_DISPLAY_CLOCK_333_320_MHZ:
149 cdclk_state->cdclk = 333333;
150 break;
151 default:
152 case GC_DISPLAY_CLOCK_190_200_MHZ:
153 cdclk_state->cdclk = 190000;
154 break;
155 }
156 }
157
i945gm_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)158 static void i945gm_get_cdclk(struct drm_i915_private *dev_priv,
159 struct intel_cdclk_state *cdclk_state)
160 {
161 struct pci_dev *pdev = dev_priv->drm.pdev;
162 u16 gcfgc = 0;
163
164 pci_read_config_word(pdev, GCFGC, &gcfgc);
165
166 if (gcfgc & GC_LOW_FREQUENCY_ENABLE) {
167 cdclk_state->cdclk = 133333;
168 return;
169 }
170
171 switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
172 case GC_DISPLAY_CLOCK_333_320_MHZ:
173 cdclk_state->cdclk = 320000;
174 break;
175 default:
176 case GC_DISPLAY_CLOCK_190_200_MHZ:
177 cdclk_state->cdclk = 200000;
178 break;
179 }
180 }
181
intel_hpll_vco(struct drm_i915_private * dev_priv)182 static unsigned int intel_hpll_vco(struct drm_i915_private *dev_priv)
183 {
184 static const unsigned int blb_vco[8] = {
185 [0] = 3200000,
186 [1] = 4000000,
187 [2] = 5333333,
188 [3] = 4800000,
189 [4] = 6400000,
190 };
191 static const unsigned int pnv_vco[8] = {
192 [0] = 3200000,
193 [1] = 4000000,
194 [2] = 5333333,
195 [3] = 4800000,
196 [4] = 2666667,
197 };
198 static const unsigned int cl_vco[8] = {
199 [0] = 3200000,
200 [1] = 4000000,
201 [2] = 5333333,
202 [3] = 6400000,
203 [4] = 3333333,
204 [5] = 3566667,
205 [6] = 4266667,
206 };
207 static const unsigned int elk_vco[8] = {
208 [0] = 3200000,
209 [1] = 4000000,
210 [2] = 5333333,
211 [3] = 4800000,
212 };
213 static const unsigned int ctg_vco[8] = {
214 [0] = 3200000,
215 [1] = 4000000,
216 [2] = 5333333,
217 [3] = 6400000,
218 [4] = 2666667,
219 [5] = 4266667,
220 };
221 const unsigned int *vco_table;
222 unsigned int vco;
223 u8 tmp = 0;
224
225 /* FIXME other chipsets? */
226 if (IS_GM45(dev_priv))
227 vco_table = ctg_vco;
228 else if (IS_G45(dev_priv))
229 vco_table = elk_vco;
230 else if (IS_I965GM(dev_priv))
231 vco_table = cl_vco;
232 else if (IS_PINEVIEW(dev_priv))
233 vco_table = pnv_vco;
234 else if (IS_G33(dev_priv))
235 vco_table = blb_vco;
236 else
237 return 0;
238
239 tmp = I915_READ(IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv) ?
240 HPLLVCO_MOBILE : HPLLVCO);
241
242 vco = vco_table[tmp & 0x7];
243 if (vco == 0)
244 DRM_ERROR("Bad HPLL VCO (HPLLVCO=0x%02x)\n", tmp);
245 else
246 DRM_DEBUG_KMS("HPLL VCO %u kHz\n", vco);
247
248 return vco;
249 }
250
g33_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)251 static void g33_get_cdclk(struct drm_i915_private *dev_priv,
252 struct intel_cdclk_state *cdclk_state)
253 {
254 struct pci_dev *pdev = dev_priv->drm.pdev;
255 static const u8 div_3200[] = { 12, 10, 8, 7, 5, 16 };
256 static const u8 div_4000[] = { 14, 12, 10, 8, 6, 20 };
257 static const u8 div_4800[] = { 20, 14, 12, 10, 8, 24 };
258 static const u8 div_5333[] = { 20, 16, 12, 12, 8, 28 };
259 const u8 *div_table;
260 unsigned int cdclk_sel;
261 u16 tmp = 0;
262
263 cdclk_state->vco = intel_hpll_vco(dev_priv);
264
265 pci_read_config_word(pdev, GCFGC, &tmp);
266
267 cdclk_sel = (tmp >> 4) & 0x7;
268
269 if (cdclk_sel >= ARRAY_SIZE(div_3200))
270 goto fail;
271
272 switch (cdclk_state->vco) {
273 case 3200000:
274 div_table = div_3200;
275 break;
276 case 4000000:
277 div_table = div_4000;
278 break;
279 case 4800000:
280 div_table = div_4800;
281 break;
282 case 5333333:
283 div_table = div_5333;
284 break;
285 default:
286 goto fail;
287 }
288
289 cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco,
290 div_table[cdclk_sel]);
291 return;
292
293 fail:
294 DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%08x\n",
295 cdclk_state->vco, tmp);
296 cdclk_state->cdclk = 190476;
297 }
298
pnv_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)299 static void pnv_get_cdclk(struct drm_i915_private *dev_priv,
300 struct intel_cdclk_state *cdclk_state)
301 {
302 struct pci_dev *pdev = dev_priv->drm.pdev;
303 u16 gcfgc = 0;
304
305 pci_read_config_word(pdev, GCFGC, &gcfgc);
306
307 switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
308 case GC_DISPLAY_CLOCK_267_MHZ_PNV:
309 cdclk_state->cdclk = 266667;
310 break;
311 case GC_DISPLAY_CLOCK_333_MHZ_PNV:
312 cdclk_state->cdclk = 333333;
313 break;
314 case GC_DISPLAY_CLOCK_444_MHZ_PNV:
315 cdclk_state->cdclk = 444444;
316 break;
317 case GC_DISPLAY_CLOCK_200_MHZ_PNV:
318 cdclk_state->cdclk = 200000;
319 break;
320 default:
321 DRM_ERROR("Unknown pnv display core clock 0x%04x\n", gcfgc);
322 /* fall through */
323 case GC_DISPLAY_CLOCK_133_MHZ_PNV:
324 cdclk_state->cdclk = 133333;
325 break;
326 case GC_DISPLAY_CLOCK_167_MHZ_PNV:
327 cdclk_state->cdclk = 166667;
328 break;
329 }
330 }
331
i965gm_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)332 static void i965gm_get_cdclk(struct drm_i915_private *dev_priv,
333 struct intel_cdclk_state *cdclk_state)
334 {
335 struct pci_dev *pdev = dev_priv->drm.pdev;
336 static const u8 div_3200[] = { 16, 10, 8 };
337 static const u8 div_4000[] = { 20, 12, 10 };
338 static const u8 div_5333[] = { 24, 16, 14 };
339 const u8 *div_table;
340 unsigned int cdclk_sel;
341 u16 tmp = 0;
342
343 cdclk_state->vco = intel_hpll_vco(dev_priv);
344
345 pci_read_config_word(pdev, GCFGC, &tmp);
346
347 cdclk_sel = ((tmp >> 8) & 0x1f) - 1;
348
349 if (cdclk_sel >= ARRAY_SIZE(div_3200))
350 goto fail;
351
352 switch (cdclk_state->vco) {
353 case 3200000:
354 div_table = div_3200;
355 break;
356 case 4000000:
357 div_table = div_4000;
358 break;
359 case 5333333:
360 div_table = div_5333;
361 break;
362 default:
363 goto fail;
364 }
365
366 cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco,
367 div_table[cdclk_sel]);
368 return;
369
370 fail:
371 DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%04x\n",
372 cdclk_state->vco, tmp);
373 cdclk_state->cdclk = 200000;
374 }
375
gm45_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)376 static void gm45_get_cdclk(struct drm_i915_private *dev_priv,
377 struct intel_cdclk_state *cdclk_state)
378 {
379 struct pci_dev *pdev = dev_priv->drm.pdev;
380 unsigned int cdclk_sel;
381 u16 tmp = 0;
382
383 cdclk_state->vco = intel_hpll_vco(dev_priv);
384
385 pci_read_config_word(pdev, GCFGC, &tmp);
386
387 cdclk_sel = (tmp >> 12) & 0x1;
388
389 switch (cdclk_state->vco) {
390 case 2666667:
391 case 4000000:
392 case 5333333:
393 cdclk_state->cdclk = cdclk_sel ? 333333 : 222222;
394 break;
395 case 3200000:
396 cdclk_state->cdclk = cdclk_sel ? 320000 : 228571;
397 break;
398 default:
399 DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u, CFGC=0x%04x\n",
400 cdclk_state->vco, tmp);
401 cdclk_state->cdclk = 222222;
402 break;
403 }
404 }
405
hsw_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)406 static void hsw_get_cdclk(struct drm_i915_private *dev_priv,
407 struct intel_cdclk_state *cdclk_state)
408 {
409 u32 lcpll = I915_READ(LCPLL_CTL);
410 u32 freq = lcpll & LCPLL_CLK_FREQ_MASK;
411
412 if (lcpll & LCPLL_CD_SOURCE_FCLK)
413 cdclk_state->cdclk = 800000;
414 else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
415 cdclk_state->cdclk = 450000;
416 else if (freq == LCPLL_CLK_FREQ_450)
417 cdclk_state->cdclk = 450000;
418 else if (IS_HSW_ULT(dev_priv))
419 cdclk_state->cdclk = 337500;
420 else
421 cdclk_state->cdclk = 540000;
422 }
423
vlv_calc_cdclk(struct drm_i915_private * dev_priv,int min_cdclk)424 static int vlv_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
425 {
426 int freq_320 = (dev_priv->hpll_freq << 1) % 320000 != 0 ?
427 333333 : 320000;
428
429 /*
430 * We seem to get an unstable or solid color picture at 200MHz.
431 * Not sure what's wrong. For now use 200MHz only when all pipes
432 * are off.
433 */
434 if (IS_VALLEYVIEW(dev_priv) && min_cdclk > freq_320)
435 return 400000;
436 else if (min_cdclk > 266667)
437 return freq_320;
438 else if (min_cdclk > 0)
439 return 266667;
440 else
441 return 200000;
442 }
443
vlv_calc_voltage_level(struct drm_i915_private * dev_priv,int cdclk)444 static u8 vlv_calc_voltage_level(struct drm_i915_private *dev_priv, int cdclk)
445 {
446 if (IS_VALLEYVIEW(dev_priv)) {
447 if (cdclk >= 320000) /* jump to highest voltage for 400MHz too */
448 return 2;
449 else if (cdclk >= 266667)
450 return 1;
451 else
452 return 0;
453 } else {
454 /*
455 * Specs are full of misinformation, but testing on actual
456 * hardware has shown that we just need to write the desired
457 * CCK divider into the Punit register.
458 */
459 return DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1, cdclk) - 1;
460 }
461 }
462
vlv_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)463 static void vlv_get_cdclk(struct drm_i915_private *dev_priv,
464 struct intel_cdclk_state *cdclk_state)
465 {
466 u32 val;
467
468 vlv_iosf_sb_get(dev_priv,
469 BIT(VLV_IOSF_SB_CCK) | BIT(VLV_IOSF_SB_PUNIT));
470
471 cdclk_state->vco = vlv_get_hpll_vco(dev_priv);
472 cdclk_state->cdclk = vlv_get_cck_clock(dev_priv, "cdclk",
473 CCK_DISPLAY_CLOCK_CONTROL,
474 cdclk_state->vco);
475
476 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
477
478 vlv_iosf_sb_put(dev_priv,
479 BIT(VLV_IOSF_SB_CCK) | BIT(VLV_IOSF_SB_PUNIT));
480
481 if (IS_VALLEYVIEW(dev_priv))
482 cdclk_state->voltage_level = (val & DSPFREQGUAR_MASK) >>
483 DSPFREQGUAR_SHIFT;
484 else
485 cdclk_state->voltage_level = (val & DSPFREQGUAR_MASK_CHV) >>
486 DSPFREQGUAR_SHIFT_CHV;
487 }
488
vlv_program_pfi_credits(struct drm_i915_private * dev_priv)489 static void vlv_program_pfi_credits(struct drm_i915_private *dev_priv)
490 {
491 unsigned int credits, default_credits;
492
493 if (IS_CHERRYVIEW(dev_priv))
494 default_credits = PFI_CREDIT(12);
495 else
496 default_credits = PFI_CREDIT(8);
497
498 if (dev_priv->cdclk.hw.cdclk >= dev_priv->czclk_freq) {
499 /* CHV suggested value is 31 or 63 */
500 if (IS_CHERRYVIEW(dev_priv))
501 credits = PFI_CREDIT_63;
502 else
503 credits = PFI_CREDIT(15);
504 } else {
505 credits = default_credits;
506 }
507
508 /*
509 * WA - write default credits before re-programming
510 * FIXME: should we also set the resend bit here?
511 */
512 I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
513 default_credits);
514
515 I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
516 credits | PFI_CREDIT_RESEND);
517
518 /*
519 * FIXME is this guaranteed to clear
520 * immediately or should we poll for it?
521 */
522 WARN_ON(I915_READ(GCI_CONTROL) & PFI_CREDIT_RESEND);
523 }
524
vlv_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)525 static void vlv_set_cdclk(struct drm_i915_private *dev_priv,
526 const struct intel_cdclk_state *cdclk_state,
527 enum pipe pipe)
528 {
529 int cdclk = cdclk_state->cdclk;
530 u32 val, cmd = cdclk_state->voltage_level;
531 intel_wakeref_t wakeref;
532
533 switch (cdclk) {
534 case 400000:
535 case 333333:
536 case 320000:
537 case 266667:
538 case 200000:
539 break;
540 default:
541 MISSING_CASE(cdclk);
542 return;
543 }
544
545 /* There are cases where we can end up here with power domains
546 * off and a CDCLK frequency other than the minimum, like when
547 * issuing a modeset without actually changing any display after
548 * a system suspend. So grab the display core domain, which covers
549 * the HW blocks needed for the following programming.
550 */
551 wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
552
553 vlv_iosf_sb_get(dev_priv,
554 BIT(VLV_IOSF_SB_CCK) |
555 BIT(VLV_IOSF_SB_BUNIT) |
556 BIT(VLV_IOSF_SB_PUNIT));
557
558 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
559 val &= ~DSPFREQGUAR_MASK;
560 val |= (cmd << DSPFREQGUAR_SHIFT);
561 vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
562 if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
563 DSPFREQSTAT_MASK) == (cmd << DSPFREQSTAT_SHIFT),
564 50)) {
565 DRM_ERROR("timed out waiting for CDclk change\n");
566 }
567
568 if (cdclk == 400000) {
569 u32 divider;
570
571 divider = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1,
572 cdclk) - 1;
573
574 /* adjust cdclk divider */
575 val = vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL);
576 val &= ~CCK_FREQUENCY_VALUES;
577 val |= divider;
578 vlv_cck_write(dev_priv, CCK_DISPLAY_CLOCK_CONTROL, val);
579
580 if (wait_for((vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL) &
581 CCK_FREQUENCY_STATUS) == (divider << CCK_FREQUENCY_STATUS_SHIFT),
582 50))
583 DRM_ERROR("timed out waiting for CDclk change\n");
584 }
585
586 /* adjust self-refresh exit latency value */
587 val = vlv_bunit_read(dev_priv, BUNIT_REG_BISOC);
588 val &= ~0x7f;
589
590 /*
591 * For high bandwidth configs, we set a higher latency in the bunit
592 * so that the core display fetch happens in time to avoid underruns.
593 */
594 if (cdclk == 400000)
595 val |= 4500 / 250; /* 4.5 usec */
596 else
597 val |= 3000 / 250; /* 3.0 usec */
598 vlv_bunit_write(dev_priv, BUNIT_REG_BISOC, val);
599
600 vlv_iosf_sb_put(dev_priv,
601 BIT(VLV_IOSF_SB_CCK) |
602 BIT(VLV_IOSF_SB_BUNIT) |
603 BIT(VLV_IOSF_SB_PUNIT));
604
605 intel_update_cdclk(dev_priv);
606
607 vlv_program_pfi_credits(dev_priv);
608
609 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
610 }
611
chv_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)612 static void chv_set_cdclk(struct drm_i915_private *dev_priv,
613 const struct intel_cdclk_state *cdclk_state,
614 enum pipe pipe)
615 {
616 int cdclk = cdclk_state->cdclk;
617 u32 val, cmd = cdclk_state->voltage_level;
618 intel_wakeref_t wakeref;
619
620 switch (cdclk) {
621 case 333333:
622 case 320000:
623 case 266667:
624 case 200000:
625 break;
626 default:
627 MISSING_CASE(cdclk);
628 return;
629 }
630
631 /* There are cases where we can end up here with power domains
632 * off and a CDCLK frequency other than the minimum, like when
633 * issuing a modeset without actually changing any display after
634 * a system suspend. So grab the display core domain, which covers
635 * the HW blocks needed for the following programming.
636 */
637 wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
638
639 vlv_punit_get(dev_priv);
640 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
641 val &= ~DSPFREQGUAR_MASK_CHV;
642 val |= (cmd << DSPFREQGUAR_SHIFT_CHV);
643 vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
644 if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
645 DSPFREQSTAT_MASK_CHV) == (cmd << DSPFREQSTAT_SHIFT_CHV),
646 50)) {
647 DRM_ERROR("timed out waiting for CDclk change\n");
648 }
649
650 vlv_punit_put(dev_priv);
651
652 intel_update_cdclk(dev_priv);
653
654 vlv_program_pfi_credits(dev_priv);
655
656 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
657 }
658
bdw_calc_cdclk(int min_cdclk)659 static int bdw_calc_cdclk(int min_cdclk)
660 {
661 if (min_cdclk > 540000)
662 return 675000;
663 else if (min_cdclk > 450000)
664 return 540000;
665 else if (min_cdclk > 337500)
666 return 450000;
667 else
668 return 337500;
669 }
670
bdw_calc_voltage_level(int cdclk)671 static u8 bdw_calc_voltage_level(int cdclk)
672 {
673 switch (cdclk) {
674 default:
675 case 337500:
676 return 2;
677 case 450000:
678 return 0;
679 case 540000:
680 return 1;
681 case 675000:
682 return 3;
683 }
684 }
685
bdw_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)686 static void bdw_get_cdclk(struct drm_i915_private *dev_priv,
687 struct intel_cdclk_state *cdclk_state)
688 {
689 u32 lcpll = I915_READ(LCPLL_CTL);
690 u32 freq = lcpll & LCPLL_CLK_FREQ_MASK;
691
692 if (lcpll & LCPLL_CD_SOURCE_FCLK)
693 cdclk_state->cdclk = 800000;
694 else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
695 cdclk_state->cdclk = 450000;
696 else if (freq == LCPLL_CLK_FREQ_450)
697 cdclk_state->cdclk = 450000;
698 else if (freq == LCPLL_CLK_FREQ_54O_BDW)
699 cdclk_state->cdclk = 540000;
700 else if (freq == LCPLL_CLK_FREQ_337_5_BDW)
701 cdclk_state->cdclk = 337500;
702 else
703 cdclk_state->cdclk = 675000;
704
705 /*
706 * Can't read this out :( Let's assume it's
707 * at least what the CDCLK frequency requires.
708 */
709 cdclk_state->voltage_level =
710 bdw_calc_voltage_level(cdclk_state->cdclk);
711 }
712
bdw_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)713 static void bdw_set_cdclk(struct drm_i915_private *dev_priv,
714 const struct intel_cdclk_state *cdclk_state,
715 enum pipe pipe)
716 {
717 int cdclk = cdclk_state->cdclk;
718 u32 val;
719 int ret;
720
721 if (WARN((I915_READ(LCPLL_CTL) &
722 (LCPLL_PLL_DISABLE | LCPLL_PLL_LOCK |
723 LCPLL_CD_CLOCK_DISABLE | LCPLL_ROOT_CD_CLOCK_DISABLE |
724 LCPLL_CD2X_CLOCK_DISABLE | LCPLL_POWER_DOWN_ALLOW |
725 LCPLL_CD_SOURCE_FCLK)) != LCPLL_PLL_LOCK,
726 "trying to change cdclk frequency with cdclk not enabled\n"))
727 return;
728
729 ret = sandybridge_pcode_write(dev_priv,
730 BDW_PCODE_DISPLAY_FREQ_CHANGE_REQ, 0x0);
731 if (ret) {
732 DRM_ERROR("failed to inform pcode about cdclk change\n");
733 return;
734 }
735
736 val = I915_READ(LCPLL_CTL);
737 val |= LCPLL_CD_SOURCE_FCLK;
738 I915_WRITE(LCPLL_CTL, val);
739
740 /*
741 * According to the spec, it should be enough to poll for this 1 us.
742 * However, extensive testing shows that this can take longer.
743 */
744 if (wait_for_us(I915_READ(LCPLL_CTL) &
745 LCPLL_CD_SOURCE_FCLK_DONE, 100))
746 DRM_ERROR("Switching to FCLK failed\n");
747
748 val = I915_READ(LCPLL_CTL);
749 val &= ~LCPLL_CLK_FREQ_MASK;
750
751 switch (cdclk) {
752 default:
753 MISSING_CASE(cdclk);
754 /* fall through */
755 case 337500:
756 val |= LCPLL_CLK_FREQ_337_5_BDW;
757 break;
758 case 450000:
759 val |= LCPLL_CLK_FREQ_450;
760 break;
761 case 540000:
762 val |= LCPLL_CLK_FREQ_54O_BDW;
763 break;
764 case 675000:
765 val |= LCPLL_CLK_FREQ_675_BDW;
766 break;
767 }
768
769 I915_WRITE(LCPLL_CTL, val);
770
771 val = I915_READ(LCPLL_CTL);
772 val &= ~LCPLL_CD_SOURCE_FCLK;
773 I915_WRITE(LCPLL_CTL, val);
774
775 if (wait_for_us((I915_READ(LCPLL_CTL) &
776 LCPLL_CD_SOURCE_FCLK_DONE) == 0, 1))
777 DRM_ERROR("Switching back to LCPLL failed\n");
778
779 sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ,
780 cdclk_state->voltage_level);
781
782 I915_WRITE(CDCLK_FREQ, DIV_ROUND_CLOSEST(cdclk, 1000) - 1);
783
784 intel_update_cdclk(dev_priv);
785 }
786
skl_calc_cdclk(int min_cdclk,int vco)787 static int skl_calc_cdclk(int min_cdclk, int vco)
788 {
789 if (vco == 8640000) {
790 if (min_cdclk > 540000)
791 return 617143;
792 else if (min_cdclk > 432000)
793 return 540000;
794 else if (min_cdclk > 308571)
795 return 432000;
796 else
797 return 308571;
798 } else {
799 if (min_cdclk > 540000)
800 return 675000;
801 else if (min_cdclk > 450000)
802 return 540000;
803 else if (min_cdclk > 337500)
804 return 450000;
805 else
806 return 337500;
807 }
808 }
809
skl_calc_voltage_level(int cdclk)810 static u8 skl_calc_voltage_level(int cdclk)
811 {
812 if (cdclk > 540000)
813 return 3;
814 else if (cdclk > 450000)
815 return 2;
816 else if (cdclk > 337500)
817 return 1;
818 else
819 return 0;
820 }
821
skl_dpll0_update(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)822 static void skl_dpll0_update(struct drm_i915_private *dev_priv,
823 struct intel_cdclk_state *cdclk_state)
824 {
825 u32 val;
826
827 cdclk_state->ref = 24000;
828 cdclk_state->vco = 0;
829
830 val = I915_READ(LCPLL1_CTL);
831 if ((val & LCPLL_PLL_ENABLE) == 0)
832 return;
833
834 if (WARN_ON((val & LCPLL_PLL_LOCK) == 0))
835 return;
836
837 val = I915_READ(DPLL_CTRL1);
838
839 if (WARN_ON((val & (DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) |
840 DPLL_CTRL1_SSC(SKL_DPLL0) |
841 DPLL_CTRL1_OVERRIDE(SKL_DPLL0))) !=
842 DPLL_CTRL1_OVERRIDE(SKL_DPLL0)))
843 return;
844
845 switch (val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0)) {
846 case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, SKL_DPLL0):
847 case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, SKL_DPLL0):
848 case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, SKL_DPLL0):
849 case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, SKL_DPLL0):
850 cdclk_state->vco = 8100000;
851 break;
852 case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, SKL_DPLL0):
853 case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, SKL_DPLL0):
854 cdclk_state->vco = 8640000;
855 break;
856 default:
857 MISSING_CASE(val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
858 break;
859 }
860 }
861
skl_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)862 static void skl_get_cdclk(struct drm_i915_private *dev_priv,
863 struct intel_cdclk_state *cdclk_state)
864 {
865 u32 cdctl;
866
867 skl_dpll0_update(dev_priv, cdclk_state);
868
869 cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref;
870
871 if (cdclk_state->vco == 0)
872 goto out;
873
874 cdctl = I915_READ(CDCLK_CTL);
875
876 if (cdclk_state->vco == 8640000) {
877 switch (cdctl & CDCLK_FREQ_SEL_MASK) {
878 case CDCLK_FREQ_450_432:
879 cdclk_state->cdclk = 432000;
880 break;
881 case CDCLK_FREQ_337_308:
882 cdclk_state->cdclk = 308571;
883 break;
884 case CDCLK_FREQ_540:
885 cdclk_state->cdclk = 540000;
886 break;
887 case CDCLK_FREQ_675_617:
888 cdclk_state->cdclk = 617143;
889 break;
890 default:
891 MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
892 break;
893 }
894 } else {
895 switch (cdctl & CDCLK_FREQ_SEL_MASK) {
896 case CDCLK_FREQ_450_432:
897 cdclk_state->cdclk = 450000;
898 break;
899 case CDCLK_FREQ_337_308:
900 cdclk_state->cdclk = 337500;
901 break;
902 case CDCLK_FREQ_540:
903 cdclk_state->cdclk = 540000;
904 break;
905 case CDCLK_FREQ_675_617:
906 cdclk_state->cdclk = 675000;
907 break;
908 default:
909 MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
910 break;
911 }
912 }
913
914 out:
915 /*
916 * Can't read this out :( Let's assume it's
917 * at least what the CDCLK frequency requires.
918 */
919 cdclk_state->voltage_level =
920 skl_calc_voltage_level(cdclk_state->cdclk);
921 }
922
923 /* convert from kHz to .1 fixpoint MHz with -1MHz offset */
skl_cdclk_decimal(int cdclk)924 static int skl_cdclk_decimal(int cdclk)
925 {
926 return DIV_ROUND_CLOSEST(cdclk - 1000, 500);
927 }
928
skl_set_preferred_cdclk_vco(struct drm_i915_private * dev_priv,int vco)929 static void skl_set_preferred_cdclk_vco(struct drm_i915_private *dev_priv,
930 int vco)
931 {
932 bool changed = dev_priv->skl_preferred_vco_freq != vco;
933
934 dev_priv->skl_preferred_vco_freq = vco;
935
936 if (changed)
937 intel_update_max_cdclk(dev_priv);
938 }
939
skl_dpll0_enable(struct drm_i915_private * dev_priv,int vco)940 static void skl_dpll0_enable(struct drm_i915_private *dev_priv, int vco)
941 {
942 u32 val;
943
944 WARN_ON(vco != 8100000 && vco != 8640000);
945
946 /*
947 * We always enable DPLL0 with the lowest link rate possible, but still
948 * taking into account the VCO required to operate the eDP panel at the
949 * desired frequency. The usual DP link rates operate with a VCO of
950 * 8100 while the eDP 1.4 alternate link rates need a VCO of 8640.
951 * The modeset code is responsible for the selection of the exact link
952 * rate later on, with the constraint of choosing a frequency that
953 * works with vco.
954 */
955 val = I915_READ(DPLL_CTRL1);
956
957 val &= ~(DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) | DPLL_CTRL1_SSC(SKL_DPLL0) |
958 DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
959 val |= DPLL_CTRL1_OVERRIDE(SKL_DPLL0);
960 if (vco == 8640000)
961 val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080,
962 SKL_DPLL0);
963 else
964 val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810,
965 SKL_DPLL0);
966
967 I915_WRITE(DPLL_CTRL1, val);
968 POSTING_READ(DPLL_CTRL1);
969
970 I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) | LCPLL_PLL_ENABLE);
971
972 if (intel_de_wait_for_set(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 5))
973 DRM_ERROR("DPLL0 not locked\n");
974
975 dev_priv->cdclk.hw.vco = vco;
976
977 /* We'll want to keep using the current vco from now on. */
978 skl_set_preferred_cdclk_vco(dev_priv, vco);
979 }
980
skl_dpll0_disable(struct drm_i915_private * dev_priv)981 static void skl_dpll0_disable(struct drm_i915_private *dev_priv)
982 {
983 I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) & ~LCPLL_PLL_ENABLE);
984 if (intel_de_wait_for_clear(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 1))
985 DRM_ERROR("Couldn't disable DPLL0\n");
986
987 dev_priv->cdclk.hw.vco = 0;
988 }
989
skl_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)990 static void skl_set_cdclk(struct drm_i915_private *dev_priv,
991 const struct intel_cdclk_state *cdclk_state,
992 enum pipe pipe)
993 {
994 int cdclk = cdclk_state->cdclk;
995 int vco = cdclk_state->vco;
996 u32 freq_select, cdclk_ctl;
997 int ret;
998
999 /*
1000 * Based on WA#1183 CDCLK rates 308 and 617MHz CDCLK rates are
1001 * unsupported on SKL. In theory this should never happen since only
1002 * the eDP1.4 2.16 and 4.32Gbps rates require it, but eDP1.4 is not
1003 * supported on SKL either, see the above WA. WARN whenever trying to
1004 * use the corresponding VCO freq as that always leads to using the
1005 * minimum 308MHz CDCLK.
1006 */
1007 WARN_ON_ONCE(IS_SKYLAKE(dev_priv) && vco == 8640000);
1008
1009 ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1010 SKL_CDCLK_PREPARE_FOR_CHANGE,
1011 SKL_CDCLK_READY_FOR_CHANGE,
1012 SKL_CDCLK_READY_FOR_CHANGE, 3);
1013 if (ret) {
1014 DRM_ERROR("Failed to inform PCU about cdclk change (%d)\n",
1015 ret);
1016 return;
1017 }
1018
1019 /* Choose frequency for this cdclk */
1020 switch (cdclk) {
1021 default:
1022 WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1023 WARN_ON(vco != 0);
1024 /* fall through */
1025 case 308571:
1026 case 337500:
1027 freq_select = CDCLK_FREQ_337_308;
1028 break;
1029 case 450000:
1030 case 432000:
1031 freq_select = CDCLK_FREQ_450_432;
1032 break;
1033 case 540000:
1034 freq_select = CDCLK_FREQ_540;
1035 break;
1036 case 617143:
1037 case 675000:
1038 freq_select = CDCLK_FREQ_675_617;
1039 break;
1040 }
1041
1042 if (dev_priv->cdclk.hw.vco != 0 &&
1043 dev_priv->cdclk.hw.vco != vco)
1044 skl_dpll0_disable(dev_priv);
1045
1046 cdclk_ctl = I915_READ(CDCLK_CTL);
1047
1048 if (dev_priv->cdclk.hw.vco != vco) {
1049 /* Wa Display #1183: skl,kbl,cfl */
1050 cdclk_ctl &= ~(CDCLK_FREQ_SEL_MASK | CDCLK_FREQ_DECIMAL_MASK);
1051 cdclk_ctl |= freq_select | skl_cdclk_decimal(cdclk);
1052 I915_WRITE(CDCLK_CTL, cdclk_ctl);
1053 }
1054
1055 /* Wa Display #1183: skl,kbl,cfl */
1056 cdclk_ctl |= CDCLK_DIVMUX_CD_OVERRIDE;
1057 I915_WRITE(CDCLK_CTL, cdclk_ctl);
1058 POSTING_READ(CDCLK_CTL);
1059
1060 if (dev_priv->cdclk.hw.vco != vco)
1061 skl_dpll0_enable(dev_priv, vco);
1062
1063 /* Wa Display #1183: skl,kbl,cfl */
1064 cdclk_ctl &= ~(CDCLK_FREQ_SEL_MASK | CDCLK_FREQ_DECIMAL_MASK);
1065 I915_WRITE(CDCLK_CTL, cdclk_ctl);
1066
1067 cdclk_ctl |= freq_select | skl_cdclk_decimal(cdclk);
1068 I915_WRITE(CDCLK_CTL, cdclk_ctl);
1069
1070 /* Wa Display #1183: skl,kbl,cfl */
1071 cdclk_ctl &= ~CDCLK_DIVMUX_CD_OVERRIDE;
1072 I915_WRITE(CDCLK_CTL, cdclk_ctl);
1073 POSTING_READ(CDCLK_CTL);
1074
1075 /* inform PCU of the change */
1076 sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1077 cdclk_state->voltage_level);
1078
1079 intel_update_cdclk(dev_priv);
1080 }
1081
skl_sanitize_cdclk(struct drm_i915_private * dev_priv)1082 static void skl_sanitize_cdclk(struct drm_i915_private *dev_priv)
1083 {
1084 u32 cdctl, expected;
1085
1086 /*
1087 * check if the pre-os initialized the display
1088 * There is SWF18 scratchpad register defined which is set by the
1089 * pre-os which can be used by the OS drivers to check the status
1090 */
1091 if ((I915_READ(SWF_ILK(0x18)) & 0x00FFFFFF) == 0)
1092 goto sanitize;
1093
1094 intel_update_cdclk(dev_priv);
1095 intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1096
1097 /* Is PLL enabled and locked ? */
1098 if (dev_priv->cdclk.hw.vco == 0 ||
1099 dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1100 goto sanitize;
1101
1102 /* DPLL okay; verify the cdclock
1103 *
1104 * Noticed in some instances that the freq selection is correct but
1105 * decimal part is programmed wrong from BIOS where pre-os does not
1106 * enable display. Verify the same as well.
1107 */
1108 cdctl = I915_READ(CDCLK_CTL);
1109 expected = (cdctl & CDCLK_FREQ_SEL_MASK) |
1110 skl_cdclk_decimal(dev_priv->cdclk.hw.cdclk);
1111 if (cdctl == expected)
1112 /* All well; nothing to sanitize */
1113 return;
1114
1115 sanitize:
1116 DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1117
1118 /* force cdclk programming */
1119 dev_priv->cdclk.hw.cdclk = 0;
1120 /* force full PLL disable + enable */
1121 dev_priv->cdclk.hw.vco = -1;
1122 }
1123
skl_init_cdclk(struct drm_i915_private * dev_priv)1124 static void skl_init_cdclk(struct drm_i915_private *dev_priv)
1125 {
1126 struct intel_cdclk_state cdclk_state;
1127
1128 skl_sanitize_cdclk(dev_priv);
1129
1130 if (dev_priv->cdclk.hw.cdclk != 0 &&
1131 dev_priv->cdclk.hw.vco != 0) {
1132 /*
1133 * Use the current vco as our initial
1134 * guess as to what the preferred vco is.
1135 */
1136 if (dev_priv->skl_preferred_vco_freq == 0)
1137 skl_set_preferred_cdclk_vco(dev_priv,
1138 dev_priv->cdclk.hw.vco);
1139 return;
1140 }
1141
1142 cdclk_state = dev_priv->cdclk.hw;
1143
1144 cdclk_state.vco = dev_priv->skl_preferred_vco_freq;
1145 if (cdclk_state.vco == 0)
1146 cdclk_state.vco = 8100000;
1147 cdclk_state.cdclk = skl_calc_cdclk(0, cdclk_state.vco);
1148 cdclk_state.voltage_level = skl_calc_voltage_level(cdclk_state.cdclk);
1149
1150 skl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1151 }
1152
skl_uninit_cdclk(struct drm_i915_private * dev_priv)1153 static void skl_uninit_cdclk(struct drm_i915_private *dev_priv)
1154 {
1155 struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1156
1157 cdclk_state.cdclk = cdclk_state.bypass;
1158 cdclk_state.vco = 0;
1159 cdclk_state.voltage_level = skl_calc_voltage_level(cdclk_state.cdclk);
1160
1161 skl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1162 }
1163
bxt_calc_cdclk(int min_cdclk)1164 static int bxt_calc_cdclk(int min_cdclk)
1165 {
1166 if (min_cdclk > 576000)
1167 return 624000;
1168 else if (min_cdclk > 384000)
1169 return 576000;
1170 else if (min_cdclk > 288000)
1171 return 384000;
1172 else if (min_cdclk > 144000)
1173 return 288000;
1174 else
1175 return 144000;
1176 }
1177
glk_calc_cdclk(int min_cdclk)1178 static int glk_calc_cdclk(int min_cdclk)
1179 {
1180 if (min_cdclk > 158400)
1181 return 316800;
1182 else if (min_cdclk > 79200)
1183 return 158400;
1184 else
1185 return 79200;
1186 }
1187
bxt_calc_voltage_level(int cdclk)1188 static u8 bxt_calc_voltage_level(int cdclk)
1189 {
1190 return DIV_ROUND_UP(cdclk, 25000);
1191 }
1192
bxt_de_pll_vco(struct drm_i915_private * dev_priv,int cdclk)1193 static int bxt_de_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
1194 {
1195 int ratio;
1196
1197 if (cdclk == dev_priv->cdclk.hw.bypass)
1198 return 0;
1199
1200 switch (cdclk) {
1201 default:
1202 MISSING_CASE(cdclk);
1203 /* fall through */
1204 case 144000:
1205 case 288000:
1206 case 384000:
1207 case 576000:
1208 ratio = 60;
1209 break;
1210 case 624000:
1211 ratio = 65;
1212 break;
1213 }
1214
1215 return dev_priv->cdclk.hw.ref * ratio;
1216 }
1217
glk_de_pll_vco(struct drm_i915_private * dev_priv,int cdclk)1218 static int glk_de_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
1219 {
1220 int ratio;
1221
1222 if (cdclk == dev_priv->cdclk.hw.bypass)
1223 return 0;
1224
1225 switch (cdclk) {
1226 default:
1227 MISSING_CASE(cdclk);
1228 /* fall through */
1229 case 79200:
1230 case 158400:
1231 case 316800:
1232 ratio = 33;
1233 break;
1234 }
1235
1236 return dev_priv->cdclk.hw.ref * ratio;
1237 }
1238
bxt_de_pll_update(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1239 static void bxt_de_pll_update(struct drm_i915_private *dev_priv,
1240 struct intel_cdclk_state *cdclk_state)
1241 {
1242 u32 val;
1243
1244 cdclk_state->ref = 19200;
1245 cdclk_state->vco = 0;
1246
1247 val = I915_READ(BXT_DE_PLL_ENABLE);
1248 if ((val & BXT_DE_PLL_PLL_ENABLE) == 0)
1249 return;
1250
1251 if (WARN_ON((val & BXT_DE_PLL_LOCK) == 0))
1252 return;
1253
1254 val = I915_READ(BXT_DE_PLL_CTL);
1255 cdclk_state->vco = (val & BXT_DE_PLL_RATIO_MASK) * cdclk_state->ref;
1256 }
1257
bxt_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1258 static void bxt_get_cdclk(struct drm_i915_private *dev_priv,
1259 struct intel_cdclk_state *cdclk_state)
1260 {
1261 u32 divider;
1262 int div;
1263
1264 bxt_de_pll_update(dev_priv, cdclk_state);
1265
1266 cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref;
1267
1268 if (cdclk_state->vco == 0)
1269 goto out;
1270
1271 divider = I915_READ(CDCLK_CTL) & BXT_CDCLK_CD2X_DIV_SEL_MASK;
1272
1273 switch (divider) {
1274 case BXT_CDCLK_CD2X_DIV_SEL_1:
1275 div = 2;
1276 break;
1277 case BXT_CDCLK_CD2X_DIV_SEL_1_5:
1278 WARN(IS_GEMINILAKE(dev_priv), "Unsupported divider\n");
1279 div = 3;
1280 break;
1281 case BXT_CDCLK_CD2X_DIV_SEL_2:
1282 div = 4;
1283 break;
1284 case BXT_CDCLK_CD2X_DIV_SEL_4:
1285 div = 8;
1286 break;
1287 default:
1288 MISSING_CASE(divider);
1289 return;
1290 }
1291
1292 cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco, div);
1293
1294 out:
1295 /*
1296 * Can't read this out :( Let's assume it's
1297 * at least what the CDCLK frequency requires.
1298 */
1299 cdclk_state->voltage_level =
1300 bxt_calc_voltage_level(cdclk_state->cdclk);
1301 }
1302
bxt_de_pll_disable(struct drm_i915_private * dev_priv)1303 static void bxt_de_pll_disable(struct drm_i915_private *dev_priv)
1304 {
1305 I915_WRITE(BXT_DE_PLL_ENABLE, 0);
1306
1307 /* Timeout 200us */
1308 if (intel_de_wait_for_clear(dev_priv,
1309 BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1310 DRM_ERROR("timeout waiting for DE PLL unlock\n");
1311
1312 dev_priv->cdclk.hw.vco = 0;
1313 }
1314
bxt_de_pll_enable(struct drm_i915_private * dev_priv,int vco)1315 static void bxt_de_pll_enable(struct drm_i915_private *dev_priv, int vco)
1316 {
1317 int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref);
1318 u32 val;
1319
1320 val = I915_READ(BXT_DE_PLL_CTL);
1321 val &= ~BXT_DE_PLL_RATIO_MASK;
1322 val |= BXT_DE_PLL_RATIO(ratio);
1323 I915_WRITE(BXT_DE_PLL_CTL, val);
1324
1325 I915_WRITE(BXT_DE_PLL_ENABLE, BXT_DE_PLL_PLL_ENABLE);
1326
1327 /* Timeout 200us */
1328 if (intel_de_wait_for_set(dev_priv,
1329 BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1330 DRM_ERROR("timeout waiting for DE PLL lock\n");
1331
1332 dev_priv->cdclk.hw.vco = vco;
1333 }
1334
bxt_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)1335 static void bxt_set_cdclk(struct drm_i915_private *dev_priv,
1336 const struct intel_cdclk_state *cdclk_state,
1337 enum pipe pipe)
1338 {
1339 int cdclk = cdclk_state->cdclk;
1340 int vco = cdclk_state->vco;
1341 u32 val, divider;
1342 int ret;
1343
1344 /* cdclk = vco / 2 / div{1,1.5,2,4} */
1345 switch (DIV_ROUND_CLOSEST(vco, cdclk)) {
1346 default:
1347 WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1348 WARN_ON(vco != 0);
1349 /* fall through */
1350 case 2:
1351 divider = BXT_CDCLK_CD2X_DIV_SEL_1;
1352 break;
1353 case 3:
1354 WARN(IS_GEMINILAKE(dev_priv), "Unsupported divider\n");
1355 divider = BXT_CDCLK_CD2X_DIV_SEL_1_5;
1356 break;
1357 case 4:
1358 divider = BXT_CDCLK_CD2X_DIV_SEL_2;
1359 break;
1360 case 8:
1361 divider = BXT_CDCLK_CD2X_DIV_SEL_4;
1362 break;
1363 }
1364
1365 /*
1366 * Inform power controller of upcoming frequency change. BSpec
1367 * requires us to wait up to 150usec, but that leads to timeouts;
1368 * the 2ms used here is based on experiment.
1369 */
1370 ret = sandybridge_pcode_write_timeout(dev_priv,
1371 HSW_PCODE_DE_WRITE_FREQ_REQ,
1372 0x80000000, 150, 2);
1373 if (ret) {
1374 DRM_ERROR("PCode CDCLK freq change notify failed (err %d, freq %d)\n",
1375 ret, cdclk);
1376 return;
1377 }
1378
1379 if (dev_priv->cdclk.hw.vco != 0 &&
1380 dev_priv->cdclk.hw.vco != vco)
1381 bxt_de_pll_disable(dev_priv);
1382
1383 if (dev_priv->cdclk.hw.vco != vco)
1384 bxt_de_pll_enable(dev_priv, vco);
1385
1386 val = divider | skl_cdclk_decimal(cdclk);
1387 if (pipe == INVALID_PIPE)
1388 val |= BXT_CDCLK_CD2X_PIPE_NONE;
1389 else
1390 val |= BXT_CDCLK_CD2X_PIPE(pipe);
1391 /*
1392 * Disable SSA Precharge when CD clock frequency < 500 MHz,
1393 * enable otherwise.
1394 */
1395 if (cdclk >= 500000)
1396 val |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
1397 I915_WRITE(CDCLK_CTL, val);
1398
1399 if (pipe != INVALID_PIPE)
1400 intel_wait_for_vblank(dev_priv, pipe);
1401
1402 /*
1403 * The timeout isn't specified, the 2ms used here is based on
1404 * experiment.
1405 * FIXME: Waiting for the request completion could be delayed until
1406 * the next PCODE request based on BSpec.
1407 */
1408 ret = sandybridge_pcode_write_timeout(dev_priv,
1409 HSW_PCODE_DE_WRITE_FREQ_REQ,
1410 cdclk_state->voltage_level, 150, 2);
1411 if (ret) {
1412 DRM_ERROR("PCode CDCLK freq set failed, (err %d, freq %d)\n",
1413 ret, cdclk);
1414 return;
1415 }
1416
1417 intel_update_cdclk(dev_priv);
1418 }
1419
bxt_sanitize_cdclk(struct drm_i915_private * dev_priv)1420 static void bxt_sanitize_cdclk(struct drm_i915_private *dev_priv)
1421 {
1422 u32 cdctl, expected;
1423
1424 intel_update_cdclk(dev_priv);
1425 intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1426
1427 if (dev_priv->cdclk.hw.vco == 0 ||
1428 dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1429 goto sanitize;
1430
1431 /* DPLL okay; verify the cdclock
1432 *
1433 * Some BIOS versions leave an incorrect decimal frequency value and
1434 * set reserved MBZ bits in CDCLK_CTL at least during exiting from S4,
1435 * so sanitize this register.
1436 */
1437 cdctl = I915_READ(CDCLK_CTL);
1438 /*
1439 * Let's ignore the pipe field, since BIOS could have configured the
1440 * dividers both synching to an active pipe, or asynchronously
1441 * (PIPE_NONE).
1442 */
1443 cdctl &= ~BXT_CDCLK_CD2X_PIPE_NONE;
1444
1445 expected = (cdctl & BXT_CDCLK_CD2X_DIV_SEL_MASK) |
1446 skl_cdclk_decimal(dev_priv->cdclk.hw.cdclk);
1447 /*
1448 * Disable SSA Precharge when CD clock frequency < 500 MHz,
1449 * enable otherwise.
1450 */
1451 if (dev_priv->cdclk.hw.cdclk >= 500000)
1452 expected |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
1453
1454 if (cdctl == expected)
1455 /* All well; nothing to sanitize */
1456 return;
1457
1458 sanitize:
1459 DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1460
1461 /* force cdclk programming */
1462 dev_priv->cdclk.hw.cdclk = 0;
1463
1464 /* force full PLL disable + enable */
1465 dev_priv->cdclk.hw.vco = -1;
1466 }
1467
bxt_init_cdclk(struct drm_i915_private * dev_priv)1468 static void bxt_init_cdclk(struct drm_i915_private *dev_priv)
1469 {
1470 struct intel_cdclk_state cdclk_state;
1471
1472 bxt_sanitize_cdclk(dev_priv);
1473
1474 if (dev_priv->cdclk.hw.cdclk != 0 &&
1475 dev_priv->cdclk.hw.vco != 0)
1476 return;
1477
1478 cdclk_state = dev_priv->cdclk.hw;
1479
1480 /*
1481 * FIXME:
1482 * - The initial CDCLK needs to be read from VBT.
1483 * Need to make this change after VBT has changes for BXT.
1484 */
1485 if (IS_GEMINILAKE(dev_priv)) {
1486 cdclk_state.cdclk = glk_calc_cdclk(0);
1487 cdclk_state.vco = glk_de_pll_vco(dev_priv, cdclk_state.cdclk);
1488 } else {
1489 cdclk_state.cdclk = bxt_calc_cdclk(0);
1490 cdclk_state.vco = bxt_de_pll_vco(dev_priv, cdclk_state.cdclk);
1491 }
1492 cdclk_state.voltage_level = bxt_calc_voltage_level(cdclk_state.cdclk);
1493
1494 bxt_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1495 }
1496
bxt_uninit_cdclk(struct drm_i915_private * dev_priv)1497 static void bxt_uninit_cdclk(struct drm_i915_private *dev_priv)
1498 {
1499 struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1500
1501 cdclk_state.cdclk = cdclk_state.bypass;
1502 cdclk_state.vco = 0;
1503 cdclk_state.voltage_level = bxt_calc_voltage_level(cdclk_state.cdclk);
1504
1505 bxt_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1506 }
1507
cnl_calc_cdclk(int min_cdclk)1508 static int cnl_calc_cdclk(int min_cdclk)
1509 {
1510 if (min_cdclk > 336000)
1511 return 528000;
1512 else if (min_cdclk > 168000)
1513 return 336000;
1514 else
1515 return 168000;
1516 }
1517
cnl_calc_voltage_level(int cdclk)1518 static u8 cnl_calc_voltage_level(int cdclk)
1519 {
1520 if (cdclk > 336000)
1521 return 2;
1522 else if (cdclk > 168000)
1523 return 1;
1524 else
1525 return 0;
1526 }
1527
cnl_cdclk_pll_update(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1528 static void cnl_cdclk_pll_update(struct drm_i915_private *dev_priv,
1529 struct intel_cdclk_state *cdclk_state)
1530 {
1531 u32 val;
1532
1533 if (I915_READ(SKL_DSSM) & CNL_DSSM_CDCLK_PLL_REFCLK_24MHz)
1534 cdclk_state->ref = 24000;
1535 else
1536 cdclk_state->ref = 19200;
1537
1538 cdclk_state->vco = 0;
1539
1540 val = I915_READ(BXT_DE_PLL_ENABLE);
1541 if ((val & BXT_DE_PLL_PLL_ENABLE) == 0)
1542 return;
1543
1544 if (WARN_ON((val & BXT_DE_PLL_LOCK) == 0))
1545 return;
1546
1547 cdclk_state->vco = (val & CNL_CDCLK_PLL_RATIO_MASK) * cdclk_state->ref;
1548 }
1549
cnl_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1550 static void cnl_get_cdclk(struct drm_i915_private *dev_priv,
1551 struct intel_cdclk_state *cdclk_state)
1552 {
1553 u32 divider;
1554 int div;
1555
1556 cnl_cdclk_pll_update(dev_priv, cdclk_state);
1557
1558 cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref;
1559
1560 if (cdclk_state->vco == 0)
1561 goto out;
1562
1563 divider = I915_READ(CDCLK_CTL) & BXT_CDCLK_CD2X_DIV_SEL_MASK;
1564
1565 switch (divider) {
1566 case BXT_CDCLK_CD2X_DIV_SEL_1:
1567 div = 2;
1568 break;
1569 case BXT_CDCLK_CD2X_DIV_SEL_2:
1570 div = 4;
1571 break;
1572 default:
1573 MISSING_CASE(divider);
1574 return;
1575 }
1576
1577 cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco, div);
1578
1579 out:
1580 /*
1581 * Can't read this out :( Let's assume it's
1582 * at least what the CDCLK frequency requires.
1583 */
1584 cdclk_state->voltage_level =
1585 cnl_calc_voltage_level(cdclk_state->cdclk);
1586 }
1587
cnl_cdclk_pll_disable(struct drm_i915_private * dev_priv)1588 static void cnl_cdclk_pll_disable(struct drm_i915_private *dev_priv)
1589 {
1590 u32 val;
1591
1592 val = I915_READ(BXT_DE_PLL_ENABLE);
1593 val &= ~BXT_DE_PLL_PLL_ENABLE;
1594 I915_WRITE(BXT_DE_PLL_ENABLE, val);
1595
1596 /* Timeout 200us */
1597 if (wait_for((I915_READ(BXT_DE_PLL_ENABLE) & BXT_DE_PLL_LOCK) == 0, 1))
1598 DRM_ERROR("timeout waiting for CDCLK PLL unlock\n");
1599
1600 dev_priv->cdclk.hw.vco = 0;
1601 }
1602
cnl_cdclk_pll_enable(struct drm_i915_private * dev_priv,int vco)1603 static void cnl_cdclk_pll_enable(struct drm_i915_private *dev_priv, int vco)
1604 {
1605 int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref);
1606 u32 val;
1607
1608 val = CNL_CDCLK_PLL_RATIO(ratio);
1609 I915_WRITE(BXT_DE_PLL_ENABLE, val);
1610
1611 val |= BXT_DE_PLL_PLL_ENABLE;
1612 I915_WRITE(BXT_DE_PLL_ENABLE, val);
1613
1614 /* Timeout 200us */
1615 if (wait_for((I915_READ(BXT_DE_PLL_ENABLE) & BXT_DE_PLL_LOCK) != 0, 1))
1616 DRM_ERROR("timeout waiting for CDCLK PLL lock\n");
1617
1618 dev_priv->cdclk.hw.vco = vco;
1619 }
1620
cnl_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)1621 static void cnl_set_cdclk(struct drm_i915_private *dev_priv,
1622 const struct intel_cdclk_state *cdclk_state,
1623 enum pipe pipe)
1624 {
1625 int cdclk = cdclk_state->cdclk;
1626 int vco = cdclk_state->vco;
1627 u32 val, divider;
1628 int ret;
1629
1630 ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1631 SKL_CDCLK_PREPARE_FOR_CHANGE,
1632 SKL_CDCLK_READY_FOR_CHANGE,
1633 SKL_CDCLK_READY_FOR_CHANGE, 3);
1634 if (ret) {
1635 DRM_ERROR("Failed to inform PCU about cdclk change (%d)\n",
1636 ret);
1637 return;
1638 }
1639
1640 /* cdclk = vco / 2 / div{1,2} */
1641 switch (DIV_ROUND_CLOSEST(vco, cdclk)) {
1642 default:
1643 WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1644 WARN_ON(vco != 0);
1645 /* fall through */
1646 case 2:
1647 divider = BXT_CDCLK_CD2X_DIV_SEL_1;
1648 break;
1649 case 4:
1650 divider = BXT_CDCLK_CD2X_DIV_SEL_2;
1651 break;
1652 }
1653
1654 if (dev_priv->cdclk.hw.vco != 0 &&
1655 dev_priv->cdclk.hw.vco != vco)
1656 cnl_cdclk_pll_disable(dev_priv);
1657
1658 if (dev_priv->cdclk.hw.vco != vco)
1659 cnl_cdclk_pll_enable(dev_priv, vco);
1660
1661 val = divider | skl_cdclk_decimal(cdclk);
1662 if (pipe == INVALID_PIPE)
1663 val |= BXT_CDCLK_CD2X_PIPE_NONE;
1664 else
1665 val |= BXT_CDCLK_CD2X_PIPE(pipe);
1666 I915_WRITE(CDCLK_CTL, val);
1667
1668 if (pipe != INVALID_PIPE)
1669 intel_wait_for_vblank(dev_priv, pipe);
1670
1671 /* inform PCU of the change */
1672 sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1673 cdclk_state->voltage_level);
1674
1675 intel_update_cdclk(dev_priv);
1676
1677 /*
1678 * Can't read out the voltage level :(
1679 * Let's just assume everything is as expected.
1680 */
1681 dev_priv->cdclk.hw.voltage_level = cdclk_state->voltage_level;
1682 }
1683
cnl_cdclk_pll_vco(struct drm_i915_private * dev_priv,int cdclk)1684 static int cnl_cdclk_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
1685 {
1686 int ratio;
1687
1688 if (cdclk == dev_priv->cdclk.hw.bypass)
1689 return 0;
1690
1691 switch (cdclk) {
1692 default:
1693 MISSING_CASE(cdclk);
1694 /* fall through */
1695 case 168000:
1696 case 336000:
1697 ratio = dev_priv->cdclk.hw.ref == 19200 ? 35 : 28;
1698 break;
1699 case 528000:
1700 ratio = dev_priv->cdclk.hw.ref == 19200 ? 55 : 44;
1701 break;
1702 }
1703
1704 return dev_priv->cdclk.hw.ref * ratio;
1705 }
1706
cnl_sanitize_cdclk(struct drm_i915_private * dev_priv)1707 static void cnl_sanitize_cdclk(struct drm_i915_private *dev_priv)
1708 {
1709 u32 cdctl, expected;
1710
1711 intel_update_cdclk(dev_priv);
1712 intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1713
1714 if (dev_priv->cdclk.hw.vco == 0 ||
1715 dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1716 goto sanitize;
1717
1718 /* DPLL okay; verify the cdclock
1719 *
1720 * Some BIOS versions leave an incorrect decimal frequency value and
1721 * set reserved MBZ bits in CDCLK_CTL at least during exiting from S4,
1722 * so sanitize this register.
1723 */
1724 cdctl = I915_READ(CDCLK_CTL);
1725 /*
1726 * Let's ignore the pipe field, since BIOS could have configured the
1727 * dividers both synching to an active pipe, or asynchronously
1728 * (PIPE_NONE).
1729 */
1730 cdctl &= ~BXT_CDCLK_CD2X_PIPE_NONE;
1731
1732 expected = (cdctl & BXT_CDCLK_CD2X_DIV_SEL_MASK) |
1733 skl_cdclk_decimal(dev_priv->cdclk.hw.cdclk);
1734
1735 if (cdctl == expected)
1736 /* All well; nothing to sanitize */
1737 return;
1738
1739 sanitize:
1740 DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1741
1742 /* force cdclk programming */
1743 dev_priv->cdclk.hw.cdclk = 0;
1744
1745 /* force full PLL disable + enable */
1746 dev_priv->cdclk.hw.vco = -1;
1747 }
1748
icl_calc_cdclk(int min_cdclk,unsigned int ref)1749 static int icl_calc_cdclk(int min_cdclk, unsigned int ref)
1750 {
1751 static const int ranges_24[] = { 180000, 192000, 312000, 552000, 648000 };
1752 static const int ranges_19_38[] = { 172800, 192000, 307200, 556800, 652800 };
1753 const int *ranges;
1754 int len, i;
1755
1756 switch (ref) {
1757 default:
1758 MISSING_CASE(ref);
1759 /* fall through */
1760 case 24000:
1761 ranges = ranges_24;
1762 len = ARRAY_SIZE(ranges_24);
1763 break;
1764 case 19200:
1765 case 38400:
1766 ranges = ranges_19_38;
1767 len = ARRAY_SIZE(ranges_19_38);
1768 break;
1769 }
1770
1771 for (i = 0; i < len; i++) {
1772 if (min_cdclk <= ranges[i])
1773 return ranges[i];
1774 }
1775
1776 WARN_ON(min_cdclk > ranges[len - 1]);
1777 return ranges[len - 1];
1778 }
1779
icl_calc_cdclk_pll_vco(struct drm_i915_private * dev_priv,int cdclk)1780 static int icl_calc_cdclk_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
1781 {
1782 int ratio;
1783
1784 if (cdclk == dev_priv->cdclk.hw.bypass)
1785 return 0;
1786
1787 switch (cdclk) {
1788 default:
1789 MISSING_CASE(cdclk);
1790 /* fall through */
1791 case 172800:
1792 case 307200:
1793 case 556800:
1794 case 652800:
1795 WARN_ON(dev_priv->cdclk.hw.ref != 19200 &&
1796 dev_priv->cdclk.hw.ref != 38400);
1797 break;
1798 case 180000:
1799 case 312000:
1800 case 552000:
1801 case 648000:
1802 WARN_ON(dev_priv->cdclk.hw.ref != 24000);
1803 break;
1804 case 192000:
1805 WARN_ON(dev_priv->cdclk.hw.ref != 19200 &&
1806 dev_priv->cdclk.hw.ref != 38400 &&
1807 dev_priv->cdclk.hw.ref != 24000);
1808 break;
1809 }
1810
1811 ratio = cdclk / (dev_priv->cdclk.hw.ref / 2);
1812
1813 return dev_priv->cdclk.hw.ref * ratio;
1814 }
1815
icl_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)1816 static void icl_set_cdclk(struct drm_i915_private *dev_priv,
1817 const struct intel_cdclk_state *cdclk_state,
1818 enum pipe pipe)
1819 {
1820 unsigned int cdclk = cdclk_state->cdclk;
1821 unsigned int vco = cdclk_state->vco;
1822 int ret;
1823
1824 ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1825 SKL_CDCLK_PREPARE_FOR_CHANGE,
1826 SKL_CDCLK_READY_FOR_CHANGE,
1827 SKL_CDCLK_READY_FOR_CHANGE, 3);
1828 if (ret) {
1829 DRM_ERROR("Failed to inform PCU about cdclk change (%d)\n",
1830 ret);
1831 return;
1832 }
1833
1834 if (dev_priv->cdclk.hw.vco != 0 &&
1835 dev_priv->cdclk.hw.vco != vco)
1836 cnl_cdclk_pll_disable(dev_priv);
1837
1838 if (dev_priv->cdclk.hw.vco != vco)
1839 cnl_cdclk_pll_enable(dev_priv, vco);
1840
1841 /*
1842 * On ICL CD2X_DIV can only be 1, so we'll never end up changing the
1843 * divider here synchronized to a pipe while CDCLK is on, nor will we
1844 * need the corresponding vblank wait.
1845 */
1846 I915_WRITE(CDCLK_CTL, ICL_CDCLK_CD2X_PIPE_NONE |
1847 skl_cdclk_decimal(cdclk));
1848
1849 sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1850 cdclk_state->voltage_level);
1851
1852 intel_update_cdclk(dev_priv);
1853
1854 /*
1855 * Can't read out the voltage level :(
1856 * Let's just assume everything is as expected.
1857 */
1858 dev_priv->cdclk.hw.voltage_level = cdclk_state->voltage_level;
1859 }
1860
icl_calc_voltage_level(struct drm_i915_private * dev_priv,int cdclk)1861 static u8 icl_calc_voltage_level(struct drm_i915_private *dev_priv, int cdclk)
1862 {
1863 if (IS_ELKHARTLAKE(dev_priv)) {
1864 if (cdclk > 312000)
1865 return 2;
1866 else if (cdclk > 180000)
1867 return 1;
1868 else
1869 return 0;
1870 } else {
1871 if (cdclk > 556800)
1872 return 2;
1873 else if (cdclk > 312000)
1874 return 1;
1875 else
1876 return 0;
1877 }
1878 }
1879
icl_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1880 static void icl_get_cdclk(struct drm_i915_private *dev_priv,
1881 struct intel_cdclk_state *cdclk_state)
1882 {
1883 u32 val;
1884
1885 cdclk_state->bypass = 50000;
1886
1887 val = I915_READ(SKL_DSSM);
1888 switch (val & ICL_DSSM_CDCLK_PLL_REFCLK_MASK) {
1889 default:
1890 MISSING_CASE(val);
1891 /* fall through */
1892 case ICL_DSSM_CDCLK_PLL_REFCLK_24MHz:
1893 cdclk_state->ref = 24000;
1894 break;
1895 case ICL_DSSM_CDCLK_PLL_REFCLK_19_2MHz:
1896 cdclk_state->ref = 19200;
1897 break;
1898 case ICL_DSSM_CDCLK_PLL_REFCLK_38_4MHz:
1899 cdclk_state->ref = 38400;
1900 break;
1901 }
1902
1903 val = I915_READ(BXT_DE_PLL_ENABLE);
1904 if ((val & BXT_DE_PLL_PLL_ENABLE) == 0 ||
1905 (val & BXT_DE_PLL_LOCK) == 0) {
1906 /*
1907 * CDCLK PLL is disabled, the VCO/ratio doesn't matter, but
1908 * setting it to zero is a way to signal that.
1909 */
1910 cdclk_state->vco = 0;
1911 cdclk_state->cdclk = cdclk_state->bypass;
1912 goto out;
1913 }
1914
1915 cdclk_state->vco = (val & BXT_DE_PLL_RATIO_MASK) * cdclk_state->ref;
1916
1917 val = I915_READ(CDCLK_CTL);
1918 WARN_ON((val & BXT_CDCLK_CD2X_DIV_SEL_MASK) != 0);
1919
1920 cdclk_state->cdclk = cdclk_state->vco / 2;
1921
1922 out:
1923 /*
1924 * Can't read this out :( Let's assume it's
1925 * at least what the CDCLK frequency requires.
1926 */
1927 cdclk_state->voltage_level =
1928 icl_calc_voltage_level(dev_priv, cdclk_state->cdclk);
1929 }
1930
icl_init_cdclk(struct drm_i915_private * dev_priv)1931 static void icl_init_cdclk(struct drm_i915_private *dev_priv)
1932 {
1933 struct intel_cdclk_state sanitized_state;
1934 u32 val;
1935
1936 /* This sets dev_priv->cdclk.hw. */
1937 intel_update_cdclk(dev_priv);
1938 intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1939
1940 /* This means CDCLK disabled. */
1941 if (dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1942 goto sanitize;
1943
1944 val = I915_READ(CDCLK_CTL);
1945
1946 if ((val & BXT_CDCLK_CD2X_DIV_SEL_MASK) != 0)
1947 goto sanitize;
1948
1949 if ((val & CDCLK_FREQ_DECIMAL_MASK) !=
1950 skl_cdclk_decimal(dev_priv->cdclk.hw.cdclk))
1951 goto sanitize;
1952
1953 return;
1954
1955 sanitize:
1956 DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1957
1958 sanitized_state.ref = dev_priv->cdclk.hw.ref;
1959 sanitized_state.cdclk = icl_calc_cdclk(0, sanitized_state.ref);
1960 sanitized_state.vco = icl_calc_cdclk_pll_vco(dev_priv,
1961 sanitized_state.cdclk);
1962 sanitized_state.voltage_level =
1963 icl_calc_voltage_level(dev_priv,
1964 sanitized_state.cdclk);
1965
1966 icl_set_cdclk(dev_priv, &sanitized_state, INVALID_PIPE);
1967 }
1968
icl_uninit_cdclk(struct drm_i915_private * dev_priv)1969 static void icl_uninit_cdclk(struct drm_i915_private *dev_priv)
1970 {
1971 struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1972
1973 cdclk_state.cdclk = cdclk_state.bypass;
1974 cdclk_state.vco = 0;
1975 cdclk_state.voltage_level = icl_calc_voltage_level(dev_priv,
1976 cdclk_state.cdclk);
1977
1978 icl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1979 }
1980
cnl_init_cdclk(struct drm_i915_private * dev_priv)1981 static void cnl_init_cdclk(struct drm_i915_private *dev_priv)
1982 {
1983 struct intel_cdclk_state cdclk_state;
1984
1985 cnl_sanitize_cdclk(dev_priv);
1986
1987 if (dev_priv->cdclk.hw.cdclk != 0 &&
1988 dev_priv->cdclk.hw.vco != 0)
1989 return;
1990
1991 cdclk_state = dev_priv->cdclk.hw;
1992
1993 cdclk_state.cdclk = cnl_calc_cdclk(0);
1994 cdclk_state.vco = cnl_cdclk_pll_vco(dev_priv, cdclk_state.cdclk);
1995 cdclk_state.voltage_level = cnl_calc_voltage_level(cdclk_state.cdclk);
1996
1997 cnl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1998 }
1999
cnl_uninit_cdclk(struct drm_i915_private * dev_priv)2000 static void cnl_uninit_cdclk(struct drm_i915_private *dev_priv)
2001 {
2002 struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
2003
2004 cdclk_state.cdclk = cdclk_state.bypass;
2005 cdclk_state.vco = 0;
2006 cdclk_state.voltage_level = cnl_calc_voltage_level(cdclk_state.cdclk);
2007
2008 cnl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
2009 }
2010
2011 /**
2012 * intel_cdclk_init - Initialize CDCLK
2013 * @i915: i915 device
2014 *
2015 * Initialize CDCLK. This consists mainly of initializing dev_priv->cdclk.hw and
2016 * sanitizing the state of the hardware if needed. This is generally done only
2017 * during the display core initialization sequence, after which the DMC will
2018 * take care of turning CDCLK off/on as needed.
2019 */
intel_cdclk_init(struct drm_i915_private * i915)2020 void intel_cdclk_init(struct drm_i915_private *i915)
2021 {
2022 if (INTEL_GEN(i915) >= 11)
2023 icl_init_cdclk(i915);
2024 else if (IS_CANNONLAKE(i915))
2025 cnl_init_cdclk(i915);
2026 else if (IS_GEN9_BC(i915))
2027 skl_init_cdclk(i915);
2028 else if (IS_GEN9_LP(i915))
2029 bxt_init_cdclk(i915);
2030 }
2031
2032 /**
2033 * intel_cdclk_uninit - Uninitialize CDCLK
2034 * @i915: i915 device
2035 *
2036 * Uninitialize CDCLK. This is done only during the display core
2037 * uninitialization sequence.
2038 */
intel_cdclk_uninit(struct drm_i915_private * i915)2039 void intel_cdclk_uninit(struct drm_i915_private *i915)
2040 {
2041 if (INTEL_GEN(i915) >= 11)
2042 icl_uninit_cdclk(i915);
2043 else if (IS_CANNONLAKE(i915))
2044 cnl_uninit_cdclk(i915);
2045 else if (IS_GEN9_BC(i915))
2046 skl_uninit_cdclk(i915);
2047 else if (IS_GEN9_LP(i915))
2048 bxt_uninit_cdclk(i915);
2049 }
2050
2051 /**
2052 * intel_cdclk_needs_modeset - Determine if two CDCLK states require a modeset on all pipes
2053 * @a: first CDCLK state
2054 * @b: second CDCLK state
2055 *
2056 * Returns:
2057 * True if the CDCLK states require pipes to be off during reprogramming, false if not.
2058 */
intel_cdclk_needs_modeset(const struct intel_cdclk_state * a,const struct intel_cdclk_state * b)2059 bool intel_cdclk_needs_modeset(const struct intel_cdclk_state *a,
2060 const struct intel_cdclk_state *b)
2061 {
2062 return a->cdclk != b->cdclk ||
2063 a->vco != b->vco ||
2064 a->ref != b->ref;
2065 }
2066
2067 /**
2068 * intel_cdclk_needs_cd2x_update - Determine if two CDCLK states require a cd2x divider update
2069 * @dev_priv: Not a CDCLK state, it's the drm_i915_private!
2070 * @a: first CDCLK state
2071 * @b: second CDCLK state
2072 *
2073 * Returns:
2074 * True if the CDCLK states require just a cd2x divider update, false if not.
2075 */
intel_cdclk_needs_cd2x_update(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * a,const struct intel_cdclk_state * b)2076 bool intel_cdclk_needs_cd2x_update(struct drm_i915_private *dev_priv,
2077 const struct intel_cdclk_state *a,
2078 const struct intel_cdclk_state *b)
2079 {
2080 /* Older hw doesn't have the capability */
2081 if (INTEL_GEN(dev_priv) < 10 && !IS_GEN9_LP(dev_priv))
2082 return false;
2083
2084 return a->cdclk != b->cdclk &&
2085 a->vco == b->vco &&
2086 a->ref == b->ref;
2087 }
2088
2089 /**
2090 * intel_cdclk_changed - Determine if two CDCLK states are different
2091 * @a: first CDCLK state
2092 * @b: second CDCLK state
2093 *
2094 * Returns:
2095 * True if the CDCLK states don't match, false if they do.
2096 */
intel_cdclk_changed(const struct intel_cdclk_state * a,const struct intel_cdclk_state * b)2097 bool intel_cdclk_changed(const struct intel_cdclk_state *a,
2098 const struct intel_cdclk_state *b)
2099 {
2100 return intel_cdclk_needs_modeset(a, b) ||
2101 a->voltage_level != b->voltage_level;
2102 }
2103
2104 /**
2105 * intel_cdclk_swap_state - make atomic CDCLK configuration effective
2106 * @state: atomic state
2107 *
2108 * This is the CDCLK version of drm_atomic_helper_swap_state() since the
2109 * helper does not handle driver-specific global state.
2110 *
2111 * Similarly to the atomic helpers this function does a complete swap,
2112 * i.e. it also puts the old state into @state. This is used by the commit
2113 * code to determine how CDCLK has changed (for instance did it increase or
2114 * decrease).
2115 */
intel_cdclk_swap_state(struct intel_atomic_state * state)2116 void intel_cdclk_swap_state(struct intel_atomic_state *state)
2117 {
2118 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2119
2120 swap(state->cdclk.logical, dev_priv->cdclk.logical);
2121 swap(state->cdclk.actual, dev_priv->cdclk.actual);
2122 }
2123
intel_dump_cdclk_state(const struct intel_cdclk_state * cdclk_state,const char * context)2124 void intel_dump_cdclk_state(const struct intel_cdclk_state *cdclk_state,
2125 const char *context)
2126 {
2127 DRM_DEBUG_DRIVER("%s %d kHz, VCO %d kHz, ref %d kHz, bypass %d kHz, voltage level %d\n",
2128 context, cdclk_state->cdclk, cdclk_state->vco,
2129 cdclk_state->ref, cdclk_state->bypass,
2130 cdclk_state->voltage_level);
2131 }
2132
2133 /**
2134 * intel_set_cdclk - Push the CDCLK state to the hardware
2135 * @dev_priv: i915 device
2136 * @cdclk_state: new CDCLK state
2137 * @pipe: pipe with which to synchronize the update
2138 *
2139 * Program the hardware based on the passed in CDCLK state,
2140 * if necessary.
2141 */
intel_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)2142 static void intel_set_cdclk(struct drm_i915_private *dev_priv,
2143 const struct intel_cdclk_state *cdclk_state,
2144 enum pipe pipe)
2145 {
2146 if (!intel_cdclk_changed(&dev_priv->cdclk.hw, cdclk_state))
2147 return;
2148
2149 if (WARN_ON_ONCE(!dev_priv->display.set_cdclk))
2150 return;
2151
2152 intel_dump_cdclk_state(cdclk_state, "Changing CDCLK to");
2153
2154 dev_priv->display.set_cdclk(dev_priv, cdclk_state, pipe);
2155
2156 if (WARN(intel_cdclk_changed(&dev_priv->cdclk.hw, cdclk_state),
2157 "cdclk state doesn't match!\n")) {
2158 intel_dump_cdclk_state(&dev_priv->cdclk.hw, "[hw state]");
2159 intel_dump_cdclk_state(cdclk_state, "[sw state]");
2160 }
2161 }
2162
2163 /**
2164 * intel_set_cdclk_pre_plane_update - Push the CDCLK state to the hardware
2165 * @dev_priv: i915 device
2166 * @old_state: old CDCLK state
2167 * @new_state: new CDCLK state
2168 * @pipe: pipe with which to synchronize the update
2169 *
2170 * Program the hardware before updating the HW plane state based on the passed
2171 * in CDCLK state, if necessary.
2172 */
2173 void
intel_set_cdclk_pre_plane_update(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * old_state,const struct intel_cdclk_state * new_state,enum pipe pipe)2174 intel_set_cdclk_pre_plane_update(struct drm_i915_private *dev_priv,
2175 const struct intel_cdclk_state *old_state,
2176 const struct intel_cdclk_state *new_state,
2177 enum pipe pipe)
2178 {
2179 if (pipe == INVALID_PIPE || old_state->cdclk <= new_state->cdclk)
2180 intel_set_cdclk(dev_priv, new_state, pipe);
2181 }
2182
2183 /**
2184 * intel_set_cdclk_post_plane_update - Push the CDCLK state to the hardware
2185 * @dev_priv: i915 device
2186 * @old_state: old CDCLK state
2187 * @new_state: new CDCLK state
2188 * @pipe: pipe with which to synchronize the update
2189 *
2190 * Program the hardware after updating the HW plane state based on the passed
2191 * in CDCLK state, if necessary.
2192 */
2193 void
intel_set_cdclk_post_plane_update(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * old_state,const struct intel_cdclk_state * new_state,enum pipe pipe)2194 intel_set_cdclk_post_plane_update(struct drm_i915_private *dev_priv,
2195 const struct intel_cdclk_state *old_state,
2196 const struct intel_cdclk_state *new_state,
2197 enum pipe pipe)
2198 {
2199 if (pipe != INVALID_PIPE && old_state->cdclk > new_state->cdclk)
2200 intel_set_cdclk(dev_priv, new_state, pipe);
2201 }
2202
intel_pixel_rate_to_cdclk(struct drm_i915_private * dev_priv,int pixel_rate)2203 static int intel_pixel_rate_to_cdclk(struct drm_i915_private *dev_priv,
2204 int pixel_rate)
2205 {
2206 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
2207 return DIV_ROUND_UP(pixel_rate, 2);
2208 else if (IS_GEN(dev_priv, 9) ||
2209 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
2210 return pixel_rate;
2211 else if (IS_CHERRYVIEW(dev_priv))
2212 return DIV_ROUND_UP(pixel_rate * 100, 95);
2213 else
2214 return DIV_ROUND_UP(pixel_rate * 100, 90);
2215 }
2216
intel_crtc_compute_min_cdclk(const struct intel_crtc_state * crtc_state)2217 int intel_crtc_compute_min_cdclk(const struct intel_crtc_state *crtc_state)
2218 {
2219 struct drm_i915_private *dev_priv =
2220 to_i915(crtc_state->base.crtc->dev);
2221 int min_cdclk;
2222
2223 if (!crtc_state->base.enable)
2224 return 0;
2225
2226 min_cdclk = intel_pixel_rate_to_cdclk(dev_priv, crtc_state->pixel_rate);
2227
2228 /* pixel rate mustn't exceed 95% of cdclk with IPS on BDW */
2229 if (IS_BROADWELL(dev_priv) && hsw_crtc_state_ips_capable(crtc_state))
2230 min_cdclk = DIV_ROUND_UP(min_cdclk * 100, 95);
2231
2232 /* BSpec says "Do not use DisplayPort with CDCLK less than 432 MHz,
2233 * audio enabled, port width x4, and link rate HBR2 (5.4 GHz), or else
2234 * there may be audio corruption or screen corruption." This cdclk
2235 * restriction for GLK is 316.8 MHz.
2236 */
2237 if (intel_crtc_has_dp_encoder(crtc_state) &&
2238 crtc_state->has_audio &&
2239 crtc_state->port_clock >= 540000 &&
2240 crtc_state->lane_count == 4) {
2241 if (IS_CANNONLAKE(dev_priv) || IS_GEMINILAKE(dev_priv)) {
2242 /* Display WA #1145: glk,cnl */
2243 min_cdclk = max(316800, min_cdclk);
2244 } else if (IS_GEN(dev_priv, 9) || IS_BROADWELL(dev_priv)) {
2245 /* Display WA #1144: skl,bxt */
2246 min_cdclk = max(432000, min_cdclk);
2247 }
2248 }
2249
2250 /*
2251 * According to BSpec, "The CD clock frequency must be at least twice
2252 * the frequency of the Azalia BCLK." and BCLK is 96 MHz by default.
2253 */
2254 if (crtc_state->has_audio && INTEL_GEN(dev_priv) >= 9)
2255 min_cdclk = max(2 * 96000, min_cdclk);
2256
2257 /*
2258 * "For DP audio configuration, cdclk frequency shall be set to
2259 * meet the following requirements:
2260 * DP Link Frequency(MHz) | Cdclk frequency(MHz)
2261 * 270 | 320 or higher
2262 * 162 | 200 or higher"
2263 */
2264 if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
2265 intel_crtc_has_dp_encoder(crtc_state) && crtc_state->has_audio)
2266 min_cdclk = max(crtc_state->port_clock, min_cdclk);
2267
2268 /*
2269 * On Valleyview some DSI panels lose (v|h)sync when the clock is lower
2270 * than 320000KHz.
2271 */
2272 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) &&
2273 IS_VALLEYVIEW(dev_priv))
2274 min_cdclk = max(320000, min_cdclk);
2275
2276 /*
2277 * On Geminilake once the CDCLK gets as low as 79200
2278 * picture gets unstable, despite that values are
2279 * correct for DSI PLL and DE PLL.
2280 */
2281 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) &&
2282 IS_GEMINILAKE(dev_priv))
2283 min_cdclk = max(158400, min_cdclk);
2284
2285 if (min_cdclk > dev_priv->max_cdclk_freq) {
2286 DRM_DEBUG_KMS("required cdclk (%d kHz) exceeds max (%d kHz)\n",
2287 min_cdclk, dev_priv->max_cdclk_freq);
2288 return -EINVAL;
2289 }
2290
2291 return min_cdclk;
2292 }
2293
intel_compute_min_cdclk(struct intel_atomic_state * state)2294 static int intel_compute_min_cdclk(struct intel_atomic_state *state)
2295 {
2296 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2297 struct intel_crtc *crtc;
2298 struct intel_crtc_state *crtc_state;
2299 int min_cdclk, i;
2300 enum pipe pipe;
2301
2302 memcpy(state->min_cdclk, dev_priv->min_cdclk,
2303 sizeof(state->min_cdclk));
2304
2305 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2306 min_cdclk = intel_crtc_compute_min_cdclk(crtc_state);
2307 if (min_cdclk < 0)
2308 return min_cdclk;
2309
2310 state->min_cdclk[i] = min_cdclk;
2311 }
2312
2313 min_cdclk = state->cdclk.force_min_cdclk;
2314 for_each_pipe(dev_priv, pipe)
2315 min_cdclk = max(state->min_cdclk[pipe], min_cdclk);
2316
2317 return min_cdclk;
2318 }
2319
2320 /*
2321 * Note that this functions assumes that 0 is
2322 * the lowest voltage value, and higher values
2323 * correspond to increasingly higher voltages.
2324 *
2325 * Should that relationship no longer hold on
2326 * future platforms this code will need to be
2327 * adjusted.
2328 */
cnl_compute_min_voltage_level(struct intel_atomic_state * state)2329 static u8 cnl_compute_min_voltage_level(struct intel_atomic_state *state)
2330 {
2331 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2332 struct intel_crtc *crtc;
2333 struct intel_crtc_state *crtc_state;
2334 u8 min_voltage_level;
2335 int i;
2336 enum pipe pipe;
2337
2338 memcpy(state->min_voltage_level, dev_priv->min_voltage_level,
2339 sizeof(state->min_voltage_level));
2340
2341 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2342 if (crtc_state->base.enable)
2343 state->min_voltage_level[i] =
2344 crtc_state->min_voltage_level;
2345 else
2346 state->min_voltage_level[i] = 0;
2347 }
2348
2349 min_voltage_level = 0;
2350 for_each_pipe(dev_priv, pipe)
2351 min_voltage_level = max(state->min_voltage_level[pipe],
2352 min_voltage_level);
2353
2354 return min_voltage_level;
2355 }
2356
vlv_modeset_calc_cdclk(struct intel_atomic_state * state)2357 static int vlv_modeset_calc_cdclk(struct intel_atomic_state *state)
2358 {
2359 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2360 int min_cdclk, cdclk;
2361
2362 min_cdclk = intel_compute_min_cdclk(state);
2363 if (min_cdclk < 0)
2364 return min_cdclk;
2365
2366 cdclk = vlv_calc_cdclk(dev_priv, min_cdclk);
2367
2368 state->cdclk.logical.cdclk = cdclk;
2369 state->cdclk.logical.voltage_level =
2370 vlv_calc_voltage_level(dev_priv, cdclk);
2371
2372 if (!state->active_crtcs) {
2373 cdclk = vlv_calc_cdclk(dev_priv, state->cdclk.force_min_cdclk);
2374
2375 state->cdclk.actual.cdclk = cdclk;
2376 state->cdclk.actual.voltage_level =
2377 vlv_calc_voltage_level(dev_priv, cdclk);
2378 } else {
2379 state->cdclk.actual = state->cdclk.logical;
2380 }
2381
2382 return 0;
2383 }
2384
bdw_modeset_calc_cdclk(struct intel_atomic_state * state)2385 static int bdw_modeset_calc_cdclk(struct intel_atomic_state *state)
2386 {
2387 int min_cdclk, cdclk;
2388
2389 min_cdclk = intel_compute_min_cdclk(state);
2390 if (min_cdclk < 0)
2391 return min_cdclk;
2392
2393 /*
2394 * FIXME should also account for plane ratio
2395 * once 64bpp pixel formats are supported.
2396 */
2397 cdclk = bdw_calc_cdclk(min_cdclk);
2398
2399 state->cdclk.logical.cdclk = cdclk;
2400 state->cdclk.logical.voltage_level =
2401 bdw_calc_voltage_level(cdclk);
2402
2403 if (!state->active_crtcs) {
2404 cdclk = bdw_calc_cdclk(state->cdclk.force_min_cdclk);
2405
2406 state->cdclk.actual.cdclk = cdclk;
2407 state->cdclk.actual.voltage_level =
2408 bdw_calc_voltage_level(cdclk);
2409 } else {
2410 state->cdclk.actual = state->cdclk.logical;
2411 }
2412
2413 return 0;
2414 }
2415
skl_dpll0_vco(struct intel_atomic_state * state)2416 static int skl_dpll0_vco(struct intel_atomic_state *state)
2417 {
2418 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2419 struct intel_crtc *crtc;
2420 struct intel_crtc_state *crtc_state;
2421 int vco, i;
2422
2423 vco = state->cdclk.logical.vco;
2424 if (!vco)
2425 vco = dev_priv->skl_preferred_vco_freq;
2426
2427 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2428 if (!crtc_state->base.enable)
2429 continue;
2430
2431 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2432 continue;
2433
2434 /*
2435 * DPLL0 VCO may need to be adjusted to get the correct
2436 * clock for eDP. This will affect cdclk as well.
2437 */
2438 switch (crtc_state->port_clock / 2) {
2439 case 108000:
2440 case 216000:
2441 vco = 8640000;
2442 break;
2443 default:
2444 vco = 8100000;
2445 break;
2446 }
2447 }
2448
2449 return vco;
2450 }
2451
skl_modeset_calc_cdclk(struct intel_atomic_state * state)2452 static int skl_modeset_calc_cdclk(struct intel_atomic_state *state)
2453 {
2454 int min_cdclk, cdclk, vco;
2455
2456 min_cdclk = intel_compute_min_cdclk(state);
2457 if (min_cdclk < 0)
2458 return min_cdclk;
2459
2460 vco = skl_dpll0_vco(state);
2461
2462 /*
2463 * FIXME should also account for plane ratio
2464 * once 64bpp pixel formats are supported.
2465 */
2466 cdclk = skl_calc_cdclk(min_cdclk, vco);
2467
2468 state->cdclk.logical.vco = vco;
2469 state->cdclk.logical.cdclk = cdclk;
2470 state->cdclk.logical.voltage_level =
2471 skl_calc_voltage_level(cdclk);
2472
2473 if (!state->active_crtcs) {
2474 cdclk = skl_calc_cdclk(state->cdclk.force_min_cdclk, vco);
2475
2476 state->cdclk.actual.vco = vco;
2477 state->cdclk.actual.cdclk = cdclk;
2478 state->cdclk.actual.voltage_level =
2479 skl_calc_voltage_level(cdclk);
2480 } else {
2481 state->cdclk.actual = state->cdclk.logical;
2482 }
2483
2484 return 0;
2485 }
2486
bxt_modeset_calc_cdclk(struct intel_atomic_state * state)2487 static int bxt_modeset_calc_cdclk(struct intel_atomic_state *state)
2488 {
2489 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2490 int min_cdclk, cdclk, vco;
2491
2492 min_cdclk = intel_compute_min_cdclk(state);
2493 if (min_cdclk < 0)
2494 return min_cdclk;
2495
2496 if (IS_GEMINILAKE(dev_priv)) {
2497 cdclk = glk_calc_cdclk(min_cdclk);
2498 vco = glk_de_pll_vco(dev_priv, cdclk);
2499 } else {
2500 cdclk = bxt_calc_cdclk(min_cdclk);
2501 vco = bxt_de_pll_vco(dev_priv, cdclk);
2502 }
2503
2504 state->cdclk.logical.vco = vco;
2505 state->cdclk.logical.cdclk = cdclk;
2506 state->cdclk.logical.voltage_level =
2507 bxt_calc_voltage_level(cdclk);
2508
2509 if (!state->active_crtcs) {
2510 if (IS_GEMINILAKE(dev_priv)) {
2511 cdclk = glk_calc_cdclk(state->cdclk.force_min_cdclk);
2512 vco = glk_de_pll_vco(dev_priv, cdclk);
2513 } else {
2514 cdclk = bxt_calc_cdclk(state->cdclk.force_min_cdclk);
2515 vco = bxt_de_pll_vco(dev_priv, cdclk);
2516 }
2517
2518 state->cdclk.actual.vco = vco;
2519 state->cdclk.actual.cdclk = cdclk;
2520 state->cdclk.actual.voltage_level =
2521 bxt_calc_voltage_level(cdclk);
2522 } else {
2523 state->cdclk.actual = state->cdclk.logical;
2524 }
2525
2526 return 0;
2527 }
2528
cnl_modeset_calc_cdclk(struct intel_atomic_state * state)2529 static int cnl_modeset_calc_cdclk(struct intel_atomic_state *state)
2530 {
2531 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2532 int min_cdclk, cdclk, vco;
2533
2534 min_cdclk = intel_compute_min_cdclk(state);
2535 if (min_cdclk < 0)
2536 return min_cdclk;
2537
2538 cdclk = cnl_calc_cdclk(min_cdclk);
2539 vco = cnl_cdclk_pll_vco(dev_priv, cdclk);
2540
2541 state->cdclk.logical.vco = vco;
2542 state->cdclk.logical.cdclk = cdclk;
2543 state->cdclk.logical.voltage_level =
2544 max(cnl_calc_voltage_level(cdclk),
2545 cnl_compute_min_voltage_level(state));
2546
2547 if (!state->active_crtcs) {
2548 cdclk = cnl_calc_cdclk(state->cdclk.force_min_cdclk);
2549 vco = cnl_cdclk_pll_vco(dev_priv, cdclk);
2550
2551 state->cdclk.actual.vco = vco;
2552 state->cdclk.actual.cdclk = cdclk;
2553 state->cdclk.actual.voltage_level =
2554 cnl_calc_voltage_level(cdclk);
2555 } else {
2556 state->cdclk.actual = state->cdclk.logical;
2557 }
2558
2559 return 0;
2560 }
2561
icl_modeset_calc_cdclk(struct intel_atomic_state * state)2562 static int icl_modeset_calc_cdclk(struct intel_atomic_state *state)
2563 {
2564 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2565 unsigned int ref = state->cdclk.logical.ref;
2566 int min_cdclk, cdclk, vco;
2567
2568 min_cdclk = intel_compute_min_cdclk(state);
2569 if (min_cdclk < 0)
2570 return min_cdclk;
2571
2572 cdclk = icl_calc_cdclk(min_cdclk, ref);
2573 vco = icl_calc_cdclk_pll_vco(dev_priv, cdclk);
2574
2575 state->cdclk.logical.vco = vco;
2576 state->cdclk.logical.cdclk = cdclk;
2577 state->cdclk.logical.voltage_level =
2578 max(icl_calc_voltage_level(dev_priv, cdclk),
2579 cnl_compute_min_voltage_level(state));
2580
2581 if (!state->active_crtcs) {
2582 cdclk = icl_calc_cdclk(state->cdclk.force_min_cdclk, ref);
2583 vco = icl_calc_cdclk_pll_vco(dev_priv, cdclk);
2584
2585 state->cdclk.actual.vco = vco;
2586 state->cdclk.actual.cdclk = cdclk;
2587 state->cdclk.actual.voltage_level =
2588 icl_calc_voltage_level(dev_priv, cdclk);
2589 } else {
2590 state->cdclk.actual = state->cdclk.logical;
2591 }
2592
2593 return 0;
2594 }
2595
intel_compute_max_dotclk(struct drm_i915_private * dev_priv)2596 static int intel_compute_max_dotclk(struct drm_i915_private *dev_priv)
2597 {
2598 int max_cdclk_freq = dev_priv->max_cdclk_freq;
2599
2600 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
2601 return 2 * max_cdclk_freq;
2602 else if (IS_GEN(dev_priv, 9) ||
2603 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
2604 return max_cdclk_freq;
2605 else if (IS_CHERRYVIEW(dev_priv))
2606 return max_cdclk_freq*95/100;
2607 else if (INTEL_GEN(dev_priv) < 4)
2608 return 2*max_cdclk_freq*90/100;
2609 else
2610 return max_cdclk_freq*90/100;
2611 }
2612
2613 /**
2614 * intel_update_max_cdclk - Determine the maximum support CDCLK frequency
2615 * @dev_priv: i915 device
2616 *
2617 * Determine the maximum CDCLK frequency the platform supports, and also
2618 * derive the maximum dot clock frequency the maximum CDCLK frequency
2619 * allows.
2620 */
intel_update_max_cdclk(struct drm_i915_private * dev_priv)2621 void intel_update_max_cdclk(struct drm_i915_private *dev_priv)
2622 {
2623 if (IS_ELKHARTLAKE(dev_priv)) {
2624 if (dev_priv->cdclk.hw.ref == 24000)
2625 dev_priv->max_cdclk_freq = 552000;
2626 else
2627 dev_priv->max_cdclk_freq = 556800;
2628 } else if (INTEL_GEN(dev_priv) >= 11) {
2629 if (dev_priv->cdclk.hw.ref == 24000)
2630 dev_priv->max_cdclk_freq = 648000;
2631 else
2632 dev_priv->max_cdclk_freq = 652800;
2633 } else if (IS_CANNONLAKE(dev_priv)) {
2634 dev_priv->max_cdclk_freq = 528000;
2635 } else if (IS_GEN9_BC(dev_priv)) {
2636 u32 limit = I915_READ(SKL_DFSM) & SKL_DFSM_CDCLK_LIMIT_MASK;
2637 int max_cdclk, vco;
2638
2639 vco = dev_priv->skl_preferred_vco_freq;
2640 WARN_ON(vco != 8100000 && vco != 8640000);
2641
2642 /*
2643 * Use the lower (vco 8640) cdclk values as a
2644 * first guess. skl_calc_cdclk() will correct it
2645 * if the preferred vco is 8100 instead.
2646 */
2647 if (limit == SKL_DFSM_CDCLK_LIMIT_675)
2648 max_cdclk = 617143;
2649 else if (limit == SKL_DFSM_CDCLK_LIMIT_540)
2650 max_cdclk = 540000;
2651 else if (limit == SKL_DFSM_CDCLK_LIMIT_450)
2652 max_cdclk = 432000;
2653 else
2654 max_cdclk = 308571;
2655
2656 dev_priv->max_cdclk_freq = skl_calc_cdclk(max_cdclk, vco);
2657 } else if (IS_GEMINILAKE(dev_priv)) {
2658 dev_priv->max_cdclk_freq = 316800;
2659 } else if (IS_BROXTON(dev_priv)) {
2660 dev_priv->max_cdclk_freq = 624000;
2661 } else if (IS_BROADWELL(dev_priv)) {
2662 /*
2663 * FIXME with extra cooling we can allow
2664 * 540 MHz for ULX and 675 Mhz for ULT.
2665 * How can we know if extra cooling is
2666 * available? PCI ID, VTB, something else?
2667 */
2668 if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
2669 dev_priv->max_cdclk_freq = 450000;
2670 else if (IS_BDW_ULX(dev_priv))
2671 dev_priv->max_cdclk_freq = 450000;
2672 else if (IS_BDW_ULT(dev_priv))
2673 dev_priv->max_cdclk_freq = 540000;
2674 else
2675 dev_priv->max_cdclk_freq = 675000;
2676 } else if (IS_CHERRYVIEW(dev_priv)) {
2677 dev_priv->max_cdclk_freq = 320000;
2678 } else if (IS_VALLEYVIEW(dev_priv)) {
2679 dev_priv->max_cdclk_freq = 400000;
2680 } else {
2681 /* otherwise assume cdclk is fixed */
2682 dev_priv->max_cdclk_freq = dev_priv->cdclk.hw.cdclk;
2683 }
2684
2685 dev_priv->max_dotclk_freq = intel_compute_max_dotclk(dev_priv);
2686
2687 DRM_DEBUG_DRIVER("Max CD clock rate: %d kHz\n",
2688 dev_priv->max_cdclk_freq);
2689
2690 DRM_DEBUG_DRIVER("Max dotclock rate: %d kHz\n",
2691 dev_priv->max_dotclk_freq);
2692 }
2693
2694 /**
2695 * intel_update_cdclk - Determine the current CDCLK frequency
2696 * @dev_priv: i915 device
2697 *
2698 * Determine the current CDCLK frequency.
2699 */
intel_update_cdclk(struct drm_i915_private * dev_priv)2700 void intel_update_cdclk(struct drm_i915_private *dev_priv)
2701 {
2702 dev_priv->display.get_cdclk(dev_priv, &dev_priv->cdclk.hw);
2703
2704 /*
2705 * 9:0 CMBUS [sic] CDCLK frequency (cdfreq):
2706 * Programmng [sic] note: bit[9:2] should be programmed to the number
2707 * of cdclk that generates 4MHz reference clock freq which is used to
2708 * generate GMBus clock. This will vary with the cdclk freq.
2709 */
2710 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2711 I915_WRITE(GMBUSFREQ_VLV,
2712 DIV_ROUND_UP(dev_priv->cdclk.hw.cdclk, 1000));
2713 }
2714
cnp_rawclk(struct drm_i915_private * dev_priv)2715 static int cnp_rawclk(struct drm_i915_private *dev_priv)
2716 {
2717 u32 rawclk;
2718 int divider, fraction;
2719
2720 if (I915_READ(SFUSE_STRAP) & SFUSE_STRAP_RAW_FREQUENCY) {
2721 /* 24 MHz */
2722 divider = 24000;
2723 fraction = 0;
2724 } else {
2725 /* 19.2 MHz */
2726 divider = 19000;
2727 fraction = 200;
2728 }
2729
2730 rawclk = CNP_RAWCLK_DIV(divider / 1000);
2731 if (fraction) {
2732 int numerator = 1;
2733
2734 rawclk |= CNP_RAWCLK_DEN(DIV_ROUND_CLOSEST(numerator * 1000,
2735 fraction) - 1);
2736 if (INTEL_PCH_TYPE(dev_priv) >= PCH_ICP)
2737 rawclk |= ICP_RAWCLK_NUM(numerator);
2738 }
2739
2740 I915_WRITE(PCH_RAWCLK_FREQ, rawclk);
2741 return divider + fraction;
2742 }
2743
pch_rawclk(struct drm_i915_private * dev_priv)2744 static int pch_rawclk(struct drm_i915_private *dev_priv)
2745 {
2746 return (I915_READ(PCH_RAWCLK_FREQ) & RAWCLK_FREQ_MASK) * 1000;
2747 }
2748
vlv_hrawclk(struct drm_i915_private * dev_priv)2749 static int vlv_hrawclk(struct drm_i915_private *dev_priv)
2750 {
2751 /* RAWCLK_FREQ_VLV register updated from power well code */
2752 return vlv_get_cck_clock_hpll(dev_priv, "hrawclk",
2753 CCK_DISPLAY_REF_CLOCK_CONTROL);
2754 }
2755
g4x_hrawclk(struct drm_i915_private * dev_priv)2756 static int g4x_hrawclk(struct drm_i915_private *dev_priv)
2757 {
2758 u32 clkcfg;
2759
2760 /* hrawclock is 1/4 the FSB frequency */
2761 clkcfg = I915_READ(CLKCFG);
2762 switch (clkcfg & CLKCFG_FSB_MASK) {
2763 case CLKCFG_FSB_400:
2764 return 100000;
2765 case CLKCFG_FSB_533:
2766 return 133333;
2767 case CLKCFG_FSB_667:
2768 return 166667;
2769 case CLKCFG_FSB_800:
2770 return 200000;
2771 case CLKCFG_FSB_1067:
2772 case CLKCFG_FSB_1067_ALT:
2773 return 266667;
2774 case CLKCFG_FSB_1333:
2775 case CLKCFG_FSB_1333_ALT:
2776 return 333333;
2777 default:
2778 return 133333;
2779 }
2780 }
2781
2782 /**
2783 * intel_update_rawclk - Determine the current RAWCLK frequency
2784 * @dev_priv: i915 device
2785 *
2786 * Determine the current RAWCLK frequency. RAWCLK is a fixed
2787 * frequency clock so this needs to done only once.
2788 */
intel_update_rawclk(struct drm_i915_private * dev_priv)2789 void intel_update_rawclk(struct drm_i915_private *dev_priv)
2790 {
2791 if (INTEL_PCH_TYPE(dev_priv) >= PCH_CNP)
2792 dev_priv->rawclk_freq = cnp_rawclk(dev_priv);
2793 else if (HAS_PCH_SPLIT(dev_priv))
2794 dev_priv->rawclk_freq = pch_rawclk(dev_priv);
2795 else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2796 dev_priv->rawclk_freq = vlv_hrawclk(dev_priv);
2797 else if (IS_G4X(dev_priv) || IS_PINEVIEW(dev_priv))
2798 dev_priv->rawclk_freq = g4x_hrawclk(dev_priv);
2799 else
2800 /* no rawclk on other platforms, or no need to know it */
2801 return;
2802
2803 DRM_DEBUG_DRIVER("rawclk rate: %d kHz\n", dev_priv->rawclk_freq);
2804 }
2805
2806 /**
2807 * intel_init_cdclk_hooks - Initialize CDCLK related modesetting hooks
2808 * @dev_priv: i915 device
2809 */
intel_init_cdclk_hooks(struct drm_i915_private * dev_priv)2810 void intel_init_cdclk_hooks(struct drm_i915_private *dev_priv)
2811 {
2812 if (INTEL_GEN(dev_priv) >= 11) {
2813 dev_priv->display.set_cdclk = icl_set_cdclk;
2814 dev_priv->display.modeset_calc_cdclk = icl_modeset_calc_cdclk;
2815 } else if (IS_CANNONLAKE(dev_priv)) {
2816 dev_priv->display.set_cdclk = cnl_set_cdclk;
2817 dev_priv->display.modeset_calc_cdclk = cnl_modeset_calc_cdclk;
2818 } else if (IS_GEN9_LP(dev_priv)) {
2819 dev_priv->display.set_cdclk = bxt_set_cdclk;
2820 dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2821 } else if (IS_GEN9_BC(dev_priv)) {
2822 dev_priv->display.set_cdclk = skl_set_cdclk;
2823 dev_priv->display.modeset_calc_cdclk = skl_modeset_calc_cdclk;
2824 } else if (IS_BROADWELL(dev_priv)) {
2825 dev_priv->display.set_cdclk = bdw_set_cdclk;
2826 dev_priv->display.modeset_calc_cdclk = bdw_modeset_calc_cdclk;
2827 } else if (IS_CHERRYVIEW(dev_priv)) {
2828 dev_priv->display.set_cdclk = chv_set_cdclk;
2829 dev_priv->display.modeset_calc_cdclk = vlv_modeset_calc_cdclk;
2830 } else if (IS_VALLEYVIEW(dev_priv)) {
2831 dev_priv->display.set_cdclk = vlv_set_cdclk;
2832 dev_priv->display.modeset_calc_cdclk = vlv_modeset_calc_cdclk;
2833 }
2834
2835 if (INTEL_GEN(dev_priv) >= 11)
2836 dev_priv->display.get_cdclk = icl_get_cdclk;
2837 else if (IS_CANNONLAKE(dev_priv))
2838 dev_priv->display.get_cdclk = cnl_get_cdclk;
2839 else if (IS_GEN9_LP(dev_priv))
2840 dev_priv->display.get_cdclk = bxt_get_cdclk;
2841 else if (IS_GEN9_BC(dev_priv))
2842 dev_priv->display.get_cdclk = skl_get_cdclk;
2843 else if (IS_BROADWELL(dev_priv))
2844 dev_priv->display.get_cdclk = bdw_get_cdclk;
2845 else if (IS_HASWELL(dev_priv))
2846 dev_priv->display.get_cdclk = hsw_get_cdclk;
2847 else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2848 dev_priv->display.get_cdclk = vlv_get_cdclk;
2849 else if (IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv))
2850 dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2851 else if (IS_GEN(dev_priv, 5))
2852 dev_priv->display.get_cdclk = fixed_450mhz_get_cdclk;
2853 else if (IS_GM45(dev_priv))
2854 dev_priv->display.get_cdclk = gm45_get_cdclk;
2855 else if (IS_G45(dev_priv))
2856 dev_priv->display.get_cdclk = g33_get_cdclk;
2857 else if (IS_I965GM(dev_priv))
2858 dev_priv->display.get_cdclk = i965gm_get_cdclk;
2859 else if (IS_I965G(dev_priv))
2860 dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2861 else if (IS_PINEVIEW(dev_priv))
2862 dev_priv->display.get_cdclk = pnv_get_cdclk;
2863 else if (IS_G33(dev_priv))
2864 dev_priv->display.get_cdclk = g33_get_cdclk;
2865 else if (IS_I945GM(dev_priv))
2866 dev_priv->display.get_cdclk = i945gm_get_cdclk;
2867 else if (IS_I945G(dev_priv))
2868 dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2869 else if (IS_I915GM(dev_priv))
2870 dev_priv->display.get_cdclk = i915gm_get_cdclk;
2871 else if (IS_I915G(dev_priv))
2872 dev_priv->display.get_cdclk = fixed_333mhz_get_cdclk;
2873 else if (IS_I865G(dev_priv))
2874 dev_priv->display.get_cdclk = fixed_266mhz_get_cdclk;
2875 else if (IS_I85X(dev_priv))
2876 dev_priv->display.get_cdclk = i85x_get_cdclk;
2877 else if (IS_I845G(dev_priv))
2878 dev_priv->display.get_cdclk = fixed_200mhz_get_cdclk;
2879 else { /* 830 */
2880 WARN(!IS_I830(dev_priv),
2881 "Unknown platform. Assuming 133 MHz CDCLK\n");
2882 dev_priv->display.get_cdclk = fixed_133mhz_get_cdclk;
2883 }
2884 }
2885