1 /*
2  * Copyright 2014 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 
24 #include <drm/drm_vblank.h>
25 
26 #include "amdgpu.h"
27 #include "amdgpu_pm.h"
28 #include "amdgpu_i2c.h"
29 #include "atom.h"
30 #include "amdgpu_pll.h"
31 #include "amdgpu_connectors.h"
32 #ifdef CONFIG_DRM_AMDGPU_SI
33 #include "dce_v6_0.h"
34 #endif
35 #ifdef CONFIG_DRM_AMDGPU_CIK
36 #include "dce_v8_0.h"
37 #endif
38 #include "dce_v10_0.h"
39 #include "dce_v11_0.h"
40 #include "dce_virtual.h"
41 #include "ivsrcid/ivsrcid_vislands30.h"
42 
43 #define DCE_VIRTUAL_VBLANK_PERIOD 16666666
44 
45 
46 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
47 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
48 static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
49 					      int index);
50 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
51 							int crtc,
52 							enum amdgpu_interrupt_state state);
53 
dce_virtual_vblank_get_counter(struct amdgpu_device * adev,int crtc)54 static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
55 {
56 	return 0;
57 }
58 
dce_virtual_page_flip(struct amdgpu_device * adev,int crtc_id,u64 crtc_base,bool async)59 static void dce_virtual_page_flip(struct amdgpu_device *adev,
60 			      int crtc_id, u64 crtc_base, bool async)
61 {
62 	return;
63 }
64 
dce_virtual_crtc_get_scanoutpos(struct amdgpu_device * adev,int crtc,u32 * vbl,u32 * position)65 static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
66 					u32 *vbl, u32 *position)
67 {
68 	*vbl = 0;
69 	*position = 0;
70 
71 	return -EINVAL;
72 }
73 
dce_virtual_hpd_sense(struct amdgpu_device * adev,enum amdgpu_hpd_id hpd)74 static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
75 			       enum amdgpu_hpd_id hpd)
76 {
77 	return true;
78 }
79 
dce_virtual_hpd_set_polarity(struct amdgpu_device * adev,enum amdgpu_hpd_id hpd)80 static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
81 				      enum amdgpu_hpd_id hpd)
82 {
83 	return;
84 }
85 
dce_virtual_hpd_get_gpio_reg(struct amdgpu_device * adev)86 static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
87 {
88 	return 0;
89 }
90 
91 /**
92  * dce_virtual_bandwidth_update - program display watermarks
93  *
94  * @adev: amdgpu_device pointer
95  *
96  * Calculate and program the display watermarks and line
97  * buffer allocation (CIK).
98  */
dce_virtual_bandwidth_update(struct amdgpu_device * adev)99 static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
100 {
101 	return;
102 }
103 
dce_virtual_crtc_gamma_set(struct drm_crtc * crtc,u16 * red,u16 * green,u16 * blue,uint32_t size,struct drm_modeset_acquire_ctx * ctx)104 static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
105 				      u16 *green, u16 *blue, uint32_t size,
106 				      struct drm_modeset_acquire_ctx *ctx)
107 {
108 	return 0;
109 }
110 
dce_virtual_crtc_destroy(struct drm_crtc * crtc)111 static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
112 {
113 	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
114 
115 	drm_crtc_cleanup(crtc);
116 	kfree(amdgpu_crtc);
117 }
118 
119 static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
120 	.cursor_set2 = NULL,
121 	.cursor_move = NULL,
122 	.gamma_set = dce_virtual_crtc_gamma_set,
123 	.set_config = amdgpu_display_crtc_set_config,
124 	.destroy = dce_virtual_crtc_destroy,
125 	.page_flip_target = amdgpu_display_crtc_page_flip_target,
126 };
127 
dce_virtual_crtc_dpms(struct drm_crtc * crtc,int mode)128 static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
129 {
130 	struct drm_device *dev = crtc->dev;
131 	struct amdgpu_device *adev = dev->dev_private;
132 	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
133 	unsigned type;
134 
135 	if (amdgpu_sriov_vf(adev))
136 		return;
137 
138 	switch (mode) {
139 	case DRM_MODE_DPMS_ON:
140 		amdgpu_crtc->enabled = true;
141 		/* Make sure VBLANK interrupts are still enabled */
142 		type = amdgpu_display_crtc_idx_to_irq_type(adev,
143 						amdgpu_crtc->crtc_id);
144 		amdgpu_irq_update(adev, &adev->crtc_irq, type);
145 		drm_crtc_vblank_on(crtc);
146 		break;
147 	case DRM_MODE_DPMS_STANDBY:
148 	case DRM_MODE_DPMS_SUSPEND:
149 	case DRM_MODE_DPMS_OFF:
150 		drm_crtc_vblank_off(crtc);
151 		amdgpu_crtc->enabled = false;
152 		break;
153 	}
154 }
155 
156 
dce_virtual_crtc_prepare(struct drm_crtc * crtc)157 static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
158 {
159 	dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
160 }
161 
dce_virtual_crtc_commit(struct drm_crtc * crtc)162 static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
163 {
164 	dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
165 }
166 
dce_virtual_crtc_disable(struct drm_crtc * crtc)167 static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
168 {
169 	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
170 
171 	dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
172 
173 	amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
174 	amdgpu_crtc->encoder = NULL;
175 	amdgpu_crtc->connector = NULL;
176 }
177 
dce_virtual_crtc_mode_set(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode,int x,int y,struct drm_framebuffer * old_fb)178 static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
179 				  struct drm_display_mode *mode,
180 				  struct drm_display_mode *adjusted_mode,
181 				  int x, int y, struct drm_framebuffer *old_fb)
182 {
183 	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
184 
185 	/* update the hw version fpr dpm */
186 	amdgpu_crtc->hw_mode = *adjusted_mode;
187 
188 	return 0;
189 }
190 
dce_virtual_crtc_mode_fixup(struct drm_crtc * crtc,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)191 static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
192 				     const struct drm_display_mode *mode,
193 				     struct drm_display_mode *adjusted_mode)
194 {
195 	return true;
196 }
197 
198 
dce_virtual_crtc_set_base(struct drm_crtc * crtc,int x,int y,struct drm_framebuffer * old_fb)199 static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
200 				  struct drm_framebuffer *old_fb)
201 {
202 	return 0;
203 }
204 
dce_virtual_crtc_set_base_atomic(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,enum mode_set_atomic state)205 static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
206 					 struct drm_framebuffer *fb,
207 					 int x, int y, enum mode_set_atomic state)
208 {
209 	return 0;
210 }
211 
212 static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
213 	.dpms = dce_virtual_crtc_dpms,
214 	.mode_fixup = dce_virtual_crtc_mode_fixup,
215 	.mode_set = dce_virtual_crtc_mode_set,
216 	.mode_set_base = dce_virtual_crtc_set_base,
217 	.mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
218 	.prepare = dce_virtual_crtc_prepare,
219 	.commit = dce_virtual_crtc_commit,
220 	.disable = dce_virtual_crtc_disable,
221 };
222 
dce_virtual_crtc_init(struct amdgpu_device * adev,int index)223 static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
224 {
225 	struct amdgpu_crtc *amdgpu_crtc;
226 
227 	amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
228 			      (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
229 	if (amdgpu_crtc == NULL)
230 		return -ENOMEM;
231 
232 	drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
233 
234 	drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
235 	amdgpu_crtc->crtc_id = index;
236 	adev->mode_info.crtcs[index] = amdgpu_crtc;
237 
238 	amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
239 	amdgpu_crtc->encoder = NULL;
240 	amdgpu_crtc->connector = NULL;
241 	amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
242 	drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
243 
244 	return 0;
245 }
246 
dce_virtual_early_init(void * handle)247 static int dce_virtual_early_init(void *handle)
248 {
249 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
250 
251 	dce_virtual_set_display_funcs(adev);
252 	dce_virtual_set_irq_funcs(adev);
253 
254 	adev->mode_info.num_hpd = 1;
255 	adev->mode_info.num_dig = 1;
256 	return 0;
257 }
258 
259 static struct drm_encoder *
dce_virtual_encoder(struct drm_connector * connector)260 dce_virtual_encoder(struct drm_connector *connector)
261 {
262 	struct drm_encoder *encoder;
263 	int i;
264 
265 	drm_connector_for_each_possible_encoder(connector, encoder, i) {
266 		if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
267 			return encoder;
268 	}
269 
270 	/* pick the first one */
271 	drm_connector_for_each_possible_encoder(connector, encoder, i)
272 		return encoder;
273 
274 	return NULL;
275 }
276 
dce_virtual_get_modes(struct drm_connector * connector)277 static int dce_virtual_get_modes(struct drm_connector *connector)
278 {
279 	struct drm_device *dev = connector->dev;
280 	struct drm_display_mode *mode = NULL;
281 	unsigned i;
282 	static const struct mode_size {
283 		int w;
284 		int h;
285 	} common_modes[17] = {
286 		{ 640,  480},
287 		{ 720,  480},
288 		{ 800,  600},
289 		{ 848,  480},
290 		{1024,  768},
291 		{1152,  768},
292 		{1280,  720},
293 		{1280,  800},
294 		{1280,  854},
295 		{1280,  960},
296 		{1280, 1024},
297 		{1440,  900},
298 		{1400, 1050},
299 		{1680, 1050},
300 		{1600, 1200},
301 		{1920, 1080},
302 		{1920, 1200}
303 	};
304 
305 	for (i = 0; i < 17; i++) {
306 		mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
307 		drm_mode_probed_add(connector, mode);
308 	}
309 
310 	return 0;
311 }
312 
dce_virtual_mode_valid(struct drm_connector * connector,struct drm_display_mode * mode)313 static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
314 				  struct drm_display_mode *mode)
315 {
316 	return MODE_OK;
317 }
318 
319 static int
dce_virtual_dpms(struct drm_connector * connector,int mode)320 dce_virtual_dpms(struct drm_connector *connector, int mode)
321 {
322 	return 0;
323 }
324 
325 static int
dce_virtual_set_property(struct drm_connector * connector,struct drm_property * property,uint64_t val)326 dce_virtual_set_property(struct drm_connector *connector,
327 			 struct drm_property *property,
328 			 uint64_t val)
329 {
330 	return 0;
331 }
332 
dce_virtual_destroy(struct drm_connector * connector)333 static void dce_virtual_destroy(struct drm_connector *connector)
334 {
335 	drm_connector_unregister(connector);
336 	drm_connector_cleanup(connector);
337 	kfree(connector);
338 }
339 
dce_virtual_force(struct drm_connector * connector)340 static void dce_virtual_force(struct drm_connector *connector)
341 {
342 	return;
343 }
344 
345 static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
346 	.get_modes = dce_virtual_get_modes,
347 	.mode_valid = dce_virtual_mode_valid,
348 	.best_encoder = dce_virtual_encoder,
349 };
350 
351 static const struct drm_connector_funcs dce_virtual_connector_funcs = {
352 	.dpms = dce_virtual_dpms,
353 	.fill_modes = drm_helper_probe_single_connector_modes,
354 	.set_property = dce_virtual_set_property,
355 	.destroy = dce_virtual_destroy,
356 	.force = dce_virtual_force,
357 };
358 
dce_virtual_sw_init(void * handle)359 static int dce_virtual_sw_init(void *handle)
360 {
361 	int r, i;
362 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
363 
364 	r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
365 	if (r)
366 		return r;
367 
368 	adev->ddev->max_vblank_count = 0;
369 
370 	adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
371 
372 	adev->ddev->mode_config.max_width = 16384;
373 	adev->ddev->mode_config.max_height = 16384;
374 
375 	adev->ddev->mode_config.preferred_depth = 24;
376 	adev->ddev->mode_config.prefer_shadow = 1;
377 
378 	adev->ddev->mode_config.fb_base = adev->gmc.aper_base;
379 
380 	r = amdgpu_display_modeset_create_props(adev);
381 	if (r)
382 		return r;
383 
384 	adev->ddev->mode_config.max_width = 16384;
385 	adev->ddev->mode_config.max_height = 16384;
386 
387 	/* allocate crtcs, encoders, connectors */
388 	for (i = 0; i < adev->mode_info.num_crtc; i++) {
389 		r = dce_virtual_crtc_init(adev, i);
390 		if (r)
391 			return r;
392 		r = dce_virtual_connector_encoder_init(adev, i);
393 		if (r)
394 			return r;
395 	}
396 
397 	drm_kms_helper_poll_init(adev->ddev);
398 
399 	adev->mode_info.mode_config_initialized = true;
400 	return 0;
401 }
402 
dce_virtual_sw_fini(void * handle)403 static int dce_virtual_sw_fini(void *handle)
404 {
405 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
406 
407 	kfree(adev->mode_info.bios_hardcoded_edid);
408 
409 	drm_kms_helper_poll_fini(adev->ddev);
410 
411 	drm_mode_config_cleanup(adev->ddev);
412 	/* clear crtcs pointer to avoid dce irq finish routine access freed data */
413 	memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
414 	adev->mode_info.mode_config_initialized = false;
415 	return 0;
416 }
417 
dce_virtual_hw_init(void * handle)418 static int dce_virtual_hw_init(void *handle)
419 {
420 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
421 
422 	switch (adev->asic_type) {
423 #ifdef CONFIG_DRM_AMDGPU_SI
424 	case CHIP_TAHITI:
425 	case CHIP_PITCAIRN:
426 	case CHIP_VERDE:
427 	case CHIP_OLAND:
428 		dce_v6_0_disable_dce(adev);
429 		break;
430 #endif
431 #ifdef CONFIG_DRM_AMDGPU_CIK
432 	case CHIP_BONAIRE:
433 	case CHIP_HAWAII:
434 	case CHIP_KAVERI:
435 	case CHIP_KABINI:
436 	case CHIP_MULLINS:
437 		dce_v8_0_disable_dce(adev);
438 		break;
439 #endif
440 	case CHIP_FIJI:
441 	case CHIP_TONGA:
442 		dce_v10_0_disable_dce(adev);
443 		break;
444 	case CHIP_CARRIZO:
445 	case CHIP_STONEY:
446 	case CHIP_POLARIS10:
447 	case CHIP_POLARIS11:
448 	case CHIP_VEGAM:
449 		dce_v11_0_disable_dce(adev);
450 		break;
451 	case CHIP_TOPAZ:
452 #ifdef CONFIG_DRM_AMDGPU_SI
453 	case CHIP_HAINAN:
454 #endif
455 		/* no DCE */
456 		break;
457 	default:
458 		break;
459 	}
460 	return 0;
461 }
462 
dce_virtual_hw_fini(void * handle)463 static int dce_virtual_hw_fini(void *handle)
464 {
465 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
466 	int i = 0;
467 
468 	for (i = 0; i<adev->mode_info.num_crtc; i++)
469 		if (adev->mode_info.crtcs[i])
470 			dce_virtual_set_crtc_vblank_interrupt_state(adev, i, AMDGPU_IRQ_STATE_DISABLE);
471 
472 	return 0;
473 }
474 
dce_virtual_suspend(void * handle)475 static int dce_virtual_suspend(void *handle)
476 {
477 	return dce_virtual_hw_fini(handle);
478 }
479 
dce_virtual_resume(void * handle)480 static int dce_virtual_resume(void *handle)
481 {
482 	return dce_virtual_hw_init(handle);
483 }
484 
dce_virtual_is_idle(void * handle)485 static bool dce_virtual_is_idle(void *handle)
486 {
487 	return true;
488 }
489 
dce_virtual_wait_for_idle(void * handle)490 static int dce_virtual_wait_for_idle(void *handle)
491 {
492 	return 0;
493 }
494 
dce_virtual_soft_reset(void * handle)495 static int dce_virtual_soft_reset(void *handle)
496 {
497 	return 0;
498 }
499 
dce_virtual_set_clockgating_state(void * handle,enum amd_clockgating_state state)500 static int dce_virtual_set_clockgating_state(void *handle,
501 					  enum amd_clockgating_state state)
502 {
503 	return 0;
504 }
505 
dce_virtual_set_powergating_state(void * handle,enum amd_powergating_state state)506 static int dce_virtual_set_powergating_state(void *handle,
507 					  enum amd_powergating_state state)
508 {
509 	return 0;
510 }
511 
512 static const struct amd_ip_funcs dce_virtual_ip_funcs = {
513 	.name = "dce_virtual",
514 	.early_init = dce_virtual_early_init,
515 	.late_init = NULL,
516 	.sw_init = dce_virtual_sw_init,
517 	.sw_fini = dce_virtual_sw_fini,
518 	.hw_init = dce_virtual_hw_init,
519 	.hw_fini = dce_virtual_hw_fini,
520 	.suspend = dce_virtual_suspend,
521 	.resume = dce_virtual_resume,
522 	.is_idle = dce_virtual_is_idle,
523 	.wait_for_idle = dce_virtual_wait_for_idle,
524 	.soft_reset = dce_virtual_soft_reset,
525 	.set_clockgating_state = dce_virtual_set_clockgating_state,
526 	.set_powergating_state = dce_virtual_set_powergating_state,
527 };
528 
529 /* these are handled by the primary encoders */
dce_virtual_encoder_prepare(struct drm_encoder * encoder)530 static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
531 {
532 	return;
533 }
534 
dce_virtual_encoder_commit(struct drm_encoder * encoder)535 static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
536 {
537 	return;
538 }
539 
540 static void
dce_virtual_encoder_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)541 dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
542 			     struct drm_display_mode *mode,
543 			     struct drm_display_mode *adjusted_mode)
544 {
545 	return;
546 }
547 
dce_virtual_encoder_disable(struct drm_encoder * encoder)548 static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
549 {
550 	return;
551 }
552 
553 static void
dce_virtual_encoder_dpms(struct drm_encoder * encoder,int mode)554 dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
555 {
556 	return;
557 }
558 
dce_virtual_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)559 static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
560 				    const struct drm_display_mode *mode,
561 				    struct drm_display_mode *adjusted_mode)
562 {
563 	return true;
564 }
565 
566 static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
567 	.dpms = dce_virtual_encoder_dpms,
568 	.mode_fixup = dce_virtual_encoder_mode_fixup,
569 	.prepare = dce_virtual_encoder_prepare,
570 	.mode_set = dce_virtual_encoder_mode_set,
571 	.commit = dce_virtual_encoder_commit,
572 	.disable = dce_virtual_encoder_disable,
573 };
574 
dce_virtual_encoder_destroy(struct drm_encoder * encoder)575 static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
576 {
577 	drm_encoder_cleanup(encoder);
578 	kfree(encoder);
579 }
580 
581 static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
582 	.destroy = dce_virtual_encoder_destroy,
583 };
584 
dce_virtual_connector_encoder_init(struct amdgpu_device * adev,int index)585 static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
586 					      int index)
587 {
588 	struct drm_encoder *encoder;
589 	struct drm_connector *connector;
590 
591 	/* add a new encoder */
592 	encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
593 	if (!encoder)
594 		return -ENOMEM;
595 	encoder->possible_crtcs = 1 << index;
596 	drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs,
597 			 DRM_MODE_ENCODER_VIRTUAL, NULL);
598 	drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
599 
600 	connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
601 	if (!connector) {
602 		kfree(encoder);
603 		return -ENOMEM;
604 	}
605 
606 	/* add a new connector */
607 	drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs,
608 			   DRM_MODE_CONNECTOR_VIRTUAL);
609 	drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
610 	connector->display_info.subpixel_order = SubPixelHorizontalRGB;
611 	connector->interlace_allowed = false;
612 	connector->doublescan_allowed = false;
613 	drm_connector_register(connector);
614 
615 	/* link them */
616 	drm_connector_attach_encoder(connector, encoder);
617 
618 	return 0;
619 }
620 
621 static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
622 	.bandwidth_update = &dce_virtual_bandwidth_update,
623 	.vblank_get_counter = &dce_virtual_vblank_get_counter,
624 	.backlight_set_level = NULL,
625 	.backlight_get_level = NULL,
626 	.hpd_sense = &dce_virtual_hpd_sense,
627 	.hpd_set_polarity = &dce_virtual_hpd_set_polarity,
628 	.hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
629 	.page_flip = &dce_virtual_page_flip,
630 	.page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
631 	.add_encoder = NULL,
632 	.add_connector = NULL,
633 };
634 
dce_virtual_set_display_funcs(struct amdgpu_device * adev)635 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
636 {
637 	adev->mode_info.funcs = &dce_virtual_display_funcs;
638 }
639 
dce_virtual_pageflip(struct amdgpu_device * adev,unsigned crtc_id)640 static int dce_virtual_pageflip(struct amdgpu_device *adev,
641 				unsigned crtc_id)
642 {
643 	unsigned long flags;
644 	struct amdgpu_crtc *amdgpu_crtc;
645 	struct amdgpu_flip_work *works;
646 
647 	amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
648 
649 	if (crtc_id >= adev->mode_info.num_crtc) {
650 		DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
651 		return -EINVAL;
652 	}
653 
654 	/* IRQ could occur when in initial stage */
655 	if (amdgpu_crtc == NULL)
656 		return 0;
657 
658 	spin_lock_irqsave(&adev->ddev->event_lock, flags);
659 	works = amdgpu_crtc->pflip_works;
660 	if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
661 		DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
662 			"AMDGPU_FLIP_SUBMITTED(%d)\n",
663 			amdgpu_crtc->pflip_status,
664 			AMDGPU_FLIP_SUBMITTED);
665 		spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
666 		return 0;
667 	}
668 
669 	/* page flip completed. clean up */
670 	amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
671 	amdgpu_crtc->pflip_works = NULL;
672 
673 	/* wakeup usersapce */
674 	if (works->event)
675 		drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
676 
677 	spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
678 
679 	drm_crtc_vblank_put(&amdgpu_crtc->base);
680 	amdgpu_bo_unref(&works->old_abo);
681 	kfree(works->shared);
682 	kfree(works);
683 
684 	return 0;
685 }
686 
dce_virtual_vblank_timer_handle(struct hrtimer * vblank_timer)687 static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
688 {
689 	struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
690 						       struct amdgpu_crtc, vblank_timer);
691 	struct drm_device *ddev = amdgpu_crtc->base.dev;
692 	struct amdgpu_device *adev = ddev->dev_private;
693 
694 	drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
695 	dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
696 	hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
697 		      HRTIMER_MODE_REL);
698 
699 	return HRTIMER_NORESTART;
700 }
701 
dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device * adev,int crtc,enum amdgpu_interrupt_state state)702 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
703 							int crtc,
704 							enum amdgpu_interrupt_state state)
705 {
706 	if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
707 		DRM_DEBUG("invalid crtc %d\n", crtc);
708 		return;
709 	}
710 
711 	if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
712 		DRM_DEBUG("Enable software vsync timer\n");
713 		hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer,
714 			     CLOCK_MONOTONIC, HRTIMER_MODE_REL);
715 		hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer,
716 				    DCE_VIRTUAL_VBLANK_PERIOD);
717 		adev->mode_info.crtcs[crtc]->vblank_timer.function =
718 			dce_virtual_vblank_timer_handle;
719 		hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer,
720 			      DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
721 	} else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
722 		DRM_DEBUG("Disable software vsync timer\n");
723 		hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer);
724 	}
725 
726 	adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
727 	DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
728 }
729 
730 
dce_virtual_set_crtc_irq_state(struct amdgpu_device * adev,struct amdgpu_irq_src * source,unsigned type,enum amdgpu_interrupt_state state)731 static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
732 					  struct amdgpu_irq_src *source,
733 					  unsigned type,
734 					  enum amdgpu_interrupt_state state)
735 {
736 	if (type > AMDGPU_CRTC_IRQ_VBLANK6)
737 		return -EINVAL;
738 
739 	dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
740 
741 	return 0;
742 }
743 
744 static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
745 	.set = dce_virtual_set_crtc_irq_state,
746 	.process = NULL,
747 };
748 
dce_virtual_set_irq_funcs(struct amdgpu_device * adev)749 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
750 {
751 	adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
752 	adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
753 }
754 
755 const struct amdgpu_ip_block_version dce_virtual_ip_block =
756 {
757 	.type = AMD_IP_BLOCK_TYPE_DCE,
758 	.major = 1,
759 	.minor = 0,
760 	.rev = 0,
761 	.funcs = &dce_virtual_ip_funcs,
762 };
763