1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2015-2018 The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm:%s:%d] " fmt, __func__, __LINE__
7 #include "dpu_encoder_phys.h"
8 #include "dpu_hw_interrupts.h"
9 #include "dpu_core_irq.h"
10 #include "dpu_formats.h"
11 #include "dpu_trace.h"
12 
13 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
14 		(e) && (e)->base.parent ? \
15 		(e)->base.parent->base.id : -1, \
16 		(e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
17 
18 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
19 		(e) && (e)->base.parent ? \
20 		(e)->base.parent->base.id : -1, \
21 		(e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
22 
23 #define to_dpu_encoder_phys_cmd(x) \
24 	container_of(x, struct dpu_encoder_phys_cmd, base)
25 
26 #define PP_TIMEOUT_MAX_TRIALS	10
27 
28 /*
29  * Tearcheck sync start and continue thresholds are empirically found
30  * based on common panels In the future, may want to allow panels to override
31  * these default values
32  */
33 #define DEFAULT_TEARCHECK_SYNC_THRESH_START	4
34 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE	4
35 
36 #define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
37 
dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys * phys_enc)38 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
39 {
40 	return (phys_enc->split_role != ENC_ROLE_SLAVE) ? true : false;
41 }
42 
dpu_encoder_phys_cmd_mode_fixup(struct dpu_encoder_phys * phys_enc,const struct drm_display_mode * mode,struct drm_display_mode * adj_mode)43 static bool dpu_encoder_phys_cmd_mode_fixup(
44 		struct dpu_encoder_phys *phys_enc,
45 		const struct drm_display_mode *mode,
46 		struct drm_display_mode *adj_mode)
47 {
48 	DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc), "\n");
49 	return true;
50 }
51 
_dpu_encoder_phys_cmd_update_intf_cfg(struct dpu_encoder_phys * phys_enc)52 static void _dpu_encoder_phys_cmd_update_intf_cfg(
53 		struct dpu_encoder_phys *phys_enc)
54 {
55 	struct dpu_encoder_phys_cmd *cmd_enc =
56 			to_dpu_encoder_phys_cmd(phys_enc);
57 	struct dpu_hw_ctl *ctl;
58 	struct dpu_hw_intf_cfg intf_cfg = { 0 };
59 
60 	ctl = phys_enc->hw_ctl;
61 	if (!ctl->ops.setup_intf_cfg)
62 		return;
63 
64 	intf_cfg.intf = phys_enc->intf_idx;
65 	intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
66 	intf_cfg.stream_sel = cmd_enc->stream_sel;
67 	intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
68 	ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
69 }
70 
dpu_encoder_phys_cmd_pp_tx_done_irq(void * arg,int irq_idx)71 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
72 {
73 	struct dpu_encoder_phys *phys_enc = arg;
74 	unsigned long lock_flags;
75 	int new_cnt;
76 	u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
77 
78 	if (!phys_enc->hw_pp)
79 		return;
80 
81 	DPU_ATRACE_BEGIN("pp_done_irq");
82 	/* notify all synchronous clients first, then asynchronous clients */
83 	if (phys_enc->parent_ops->handle_frame_done)
84 		phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
85 				phys_enc, event);
86 
87 	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
88 	new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
89 	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
90 
91 	trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
92 					  phys_enc->hw_pp->idx - PINGPONG_0,
93 					  new_cnt, event);
94 
95 	/* Signal any waiting atomic commit thread */
96 	wake_up_all(&phys_enc->pending_kickoff_wq);
97 	DPU_ATRACE_END("pp_done_irq");
98 }
99 
dpu_encoder_phys_cmd_pp_rd_ptr_irq(void * arg,int irq_idx)100 static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
101 {
102 	struct dpu_encoder_phys *phys_enc = arg;
103 	struct dpu_encoder_phys_cmd *cmd_enc;
104 
105 	if (!phys_enc->hw_pp)
106 		return;
107 
108 	DPU_ATRACE_BEGIN("rd_ptr_irq");
109 	cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
110 
111 	if (phys_enc->parent_ops->handle_vblank_virt)
112 		phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
113 			phys_enc);
114 
115 	atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
116 	wake_up_all(&cmd_enc->pending_vblank_wq);
117 	DPU_ATRACE_END("rd_ptr_irq");
118 }
119 
dpu_encoder_phys_cmd_ctl_start_irq(void * arg,int irq_idx)120 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
121 {
122 	struct dpu_encoder_phys *phys_enc = arg;
123 
124 	DPU_ATRACE_BEGIN("ctl_start_irq");
125 
126 	atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
127 
128 	/* Signal any waiting ctl start interrupt */
129 	wake_up_all(&phys_enc->pending_kickoff_wq);
130 	DPU_ATRACE_END("ctl_start_irq");
131 }
132 
dpu_encoder_phys_cmd_underrun_irq(void * arg,int irq_idx)133 static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
134 {
135 	struct dpu_encoder_phys *phys_enc = arg;
136 
137 	if (phys_enc->parent_ops->handle_underrun_virt)
138 		phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
139 			phys_enc);
140 }
141 
_dpu_encoder_phys_cmd_setup_irq_hw_idx(struct dpu_encoder_phys * phys_enc)142 static void _dpu_encoder_phys_cmd_setup_irq_hw_idx(
143 		struct dpu_encoder_phys *phys_enc)
144 {
145 	struct dpu_encoder_irq *irq;
146 
147 	irq = &phys_enc->irq[INTR_IDX_CTL_START];
148 	irq->hw_idx = phys_enc->hw_ctl->idx;
149 	irq->irq_idx = -EINVAL;
150 
151 	irq = &phys_enc->irq[INTR_IDX_PINGPONG];
152 	irq->hw_idx = phys_enc->hw_pp->idx;
153 	irq->irq_idx = -EINVAL;
154 
155 	irq = &phys_enc->irq[INTR_IDX_RDPTR];
156 	irq->hw_idx = phys_enc->hw_pp->idx;
157 	irq->irq_idx = -EINVAL;
158 
159 	irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
160 	irq->hw_idx = phys_enc->intf_idx;
161 	irq->irq_idx = -EINVAL;
162 }
163 
dpu_encoder_phys_cmd_mode_set(struct dpu_encoder_phys * phys_enc,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)164 static void dpu_encoder_phys_cmd_mode_set(
165 		struct dpu_encoder_phys *phys_enc,
166 		struct drm_display_mode *mode,
167 		struct drm_display_mode *adj_mode)
168 {
169 	struct dpu_encoder_phys_cmd *cmd_enc =
170 		to_dpu_encoder_phys_cmd(phys_enc);
171 
172 	if (!mode || !adj_mode) {
173 		DPU_ERROR("invalid args\n");
174 		return;
175 	}
176 	phys_enc->cached_mode = *adj_mode;
177 	DPU_DEBUG_CMDENC(cmd_enc, "caching mode:\n");
178 	drm_mode_debug_printmodeline(adj_mode);
179 
180 	_dpu_encoder_phys_cmd_setup_irq_hw_idx(phys_enc);
181 }
182 
_dpu_encoder_phys_cmd_handle_ppdone_timeout(struct dpu_encoder_phys * phys_enc)183 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
184 		struct dpu_encoder_phys *phys_enc)
185 {
186 	struct dpu_encoder_phys_cmd *cmd_enc =
187 			to_dpu_encoder_phys_cmd(phys_enc);
188 	u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
189 	bool do_log = false;
190 
191 	if (!phys_enc->hw_pp)
192 		return -EINVAL;
193 
194 	cmd_enc->pp_timeout_report_cnt++;
195 	if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
196 		frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
197 		do_log = true;
198 	} else if (cmd_enc->pp_timeout_report_cnt == 1) {
199 		do_log = true;
200 	}
201 
202 	trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(phys_enc->parent),
203 		     phys_enc->hw_pp->idx - PINGPONG_0,
204 		     cmd_enc->pp_timeout_report_cnt,
205 		     atomic_read(&phys_enc->pending_kickoff_cnt),
206 		     frame_event);
207 
208 	/* to avoid flooding, only log first time, and "dead" time */
209 	if (do_log) {
210 		DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
211 			  DRMID(phys_enc->parent),
212 			  phys_enc->hw_pp->idx - PINGPONG_0,
213 			  phys_enc->hw_ctl->idx - CTL_0,
214 			  cmd_enc->pp_timeout_report_cnt,
215 			  atomic_read(&phys_enc->pending_kickoff_cnt));
216 
217 		dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_RDPTR);
218 	}
219 
220 	atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
221 
222 	/* request a ctl reset before the next kickoff */
223 	phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
224 
225 	if (phys_enc->parent_ops->handle_frame_done)
226 		phys_enc->parent_ops->handle_frame_done(
227 				phys_enc->parent, phys_enc, frame_event);
228 
229 	return -ETIMEDOUT;
230 }
231 
_dpu_encoder_phys_cmd_wait_for_idle(struct dpu_encoder_phys * phys_enc)232 static int _dpu_encoder_phys_cmd_wait_for_idle(
233 		struct dpu_encoder_phys *phys_enc)
234 {
235 	struct dpu_encoder_phys_cmd *cmd_enc =
236 			to_dpu_encoder_phys_cmd(phys_enc);
237 	struct dpu_encoder_wait_info wait_info;
238 	int ret;
239 
240 	wait_info.wq = &phys_enc->pending_kickoff_wq;
241 	wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
242 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
243 
244 	ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_PINGPONG,
245 			&wait_info);
246 	if (ret == -ETIMEDOUT)
247 		_dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
248 	else if (!ret)
249 		cmd_enc->pp_timeout_report_cnt = 0;
250 
251 	return ret;
252 }
253 
dpu_encoder_phys_cmd_control_vblank_irq(struct dpu_encoder_phys * phys_enc,bool enable)254 static int dpu_encoder_phys_cmd_control_vblank_irq(
255 		struct dpu_encoder_phys *phys_enc,
256 		bool enable)
257 {
258 	int ret = 0;
259 	int refcount;
260 
261 	if (!phys_enc->hw_pp) {
262 		DPU_ERROR("invalid encoder\n");
263 		return -EINVAL;
264 	}
265 
266 	refcount = atomic_read(&phys_enc->vblank_refcount);
267 
268 	/* Slave encoders don't report vblank */
269 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
270 		goto end;
271 
272 	/* protect against negative */
273 	if (!enable && refcount == 0) {
274 		ret = -EINVAL;
275 		goto end;
276 	}
277 
278 	DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
279 		      phys_enc->hw_pp->idx - PINGPONG_0,
280 		      enable ? "true" : "false", refcount);
281 
282 	if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
283 		ret = dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_RDPTR);
284 	else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
285 		ret = dpu_encoder_helper_unregister_irq(phys_enc,
286 				INTR_IDX_RDPTR);
287 
288 end:
289 	if (ret) {
290 		DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
291 			  DRMID(phys_enc->parent),
292 			  phys_enc->hw_pp->idx - PINGPONG_0, ret,
293 			  enable ? "true" : "false", refcount);
294 	}
295 
296 	return ret;
297 }
298 
dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys * phys_enc,bool enable)299 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
300 		bool enable)
301 {
302 	trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
303 			phys_enc->hw_pp->idx - PINGPONG_0,
304 			enable, atomic_read(&phys_enc->vblank_refcount));
305 
306 	if (enable) {
307 		dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_PINGPONG);
308 		dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
309 		dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
310 
311 		if (dpu_encoder_phys_cmd_is_master(phys_enc))
312 			dpu_encoder_helper_register_irq(phys_enc,
313 					INTR_IDX_CTL_START);
314 	} else {
315 		if (dpu_encoder_phys_cmd_is_master(phys_enc))
316 			dpu_encoder_helper_unregister_irq(phys_enc,
317 					INTR_IDX_CTL_START);
318 
319 		dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
320 		dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
321 		dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_PINGPONG);
322 	}
323 }
324 
dpu_encoder_phys_cmd_tearcheck_config(struct dpu_encoder_phys * phys_enc)325 static void dpu_encoder_phys_cmd_tearcheck_config(
326 		struct dpu_encoder_phys *phys_enc)
327 {
328 	struct dpu_encoder_phys_cmd *cmd_enc =
329 		to_dpu_encoder_phys_cmd(phys_enc);
330 	struct dpu_hw_tear_check tc_cfg = { 0 };
331 	struct drm_display_mode *mode;
332 	bool tc_enable = true;
333 	u32 vsync_hz;
334 	struct dpu_kms *dpu_kms;
335 
336 	if (!phys_enc->hw_pp) {
337 		DPU_ERROR("invalid encoder\n");
338 		return;
339 	}
340 	mode = &phys_enc->cached_mode;
341 
342 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
343 
344 	if (!phys_enc->hw_pp->ops.setup_tearcheck ||
345 		!phys_enc->hw_pp->ops.enable_tearcheck) {
346 		DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
347 		return;
348 	}
349 
350 	dpu_kms = phys_enc->dpu_kms;
351 
352 	/*
353 	 * TE default: dsi byte clock calculated base on 70 fps;
354 	 * around 14 ms to complete a kickoff cycle if te disabled;
355 	 * vclk_line base on 60 fps; write is faster than read;
356 	 * init == start == rdptr;
357 	 *
358 	 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
359 	 * frequency divided by the no. of rows (lines) in the LCDpanel.
360 	 */
361 	vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
362 	if (vsync_hz <= 0) {
363 		DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
364 				 vsync_hz);
365 		return;
366 	}
367 
368 	tc_cfg.vsync_count = vsync_hz /
369 				(mode->vtotal * drm_mode_vrefresh(mode));
370 
371 	/* enable external TE after kickoff to avoid premature autorefresh */
372 	tc_cfg.hw_vsync_mode = 0;
373 
374 	/*
375 	 * By setting sync_cfg_height to near max register value, we essentially
376 	 * disable dpu hw generated TE signal, since hw TE will arrive first.
377 	 * Only caveat is if due to error, we hit wrap-around.
378 	 */
379 	tc_cfg.sync_cfg_height = 0xFFF0;
380 	tc_cfg.vsync_init_val = mode->vdisplay;
381 	tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
382 	tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
383 	tc_cfg.start_pos = mode->vdisplay;
384 	tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
385 
386 	DPU_DEBUG_CMDENC(cmd_enc,
387 		"tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
388 		phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
389 		mode->vtotal, drm_mode_vrefresh(mode));
390 	DPU_DEBUG_CMDENC(cmd_enc,
391 		"tc %d enable %u start_pos %u rd_ptr_irq %u\n",
392 		phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
393 		tc_cfg.rd_ptr_irq);
394 	DPU_DEBUG_CMDENC(cmd_enc,
395 		"tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
396 		phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
397 		tc_cfg.vsync_count, tc_cfg.vsync_init_val);
398 	DPU_DEBUG_CMDENC(cmd_enc,
399 		"tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
400 		phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
401 		tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
402 
403 	phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
404 	phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
405 }
406 
_dpu_encoder_phys_cmd_pingpong_config(struct dpu_encoder_phys * phys_enc)407 static void _dpu_encoder_phys_cmd_pingpong_config(
408 		struct dpu_encoder_phys *phys_enc)
409 {
410 	struct dpu_encoder_phys_cmd *cmd_enc =
411 		to_dpu_encoder_phys_cmd(phys_enc);
412 
413 	if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
414 		DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
415 		return;
416 	}
417 
418 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
419 			phys_enc->hw_pp->idx - PINGPONG_0);
420 	drm_mode_debug_printmodeline(&phys_enc->cached_mode);
421 
422 	_dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
423 	dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
424 }
425 
dpu_encoder_phys_cmd_needs_single_flush(struct dpu_encoder_phys * phys_enc)426 static bool dpu_encoder_phys_cmd_needs_single_flush(
427 		struct dpu_encoder_phys *phys_enc)
428 {
429 	/**
430 	 * we do separate flush for each CTL and let
431 	 * CTL_START synchronize them
432 	 */
433 	return false;
434 }
435 
dpu_encoder_phys_cmd_enable_helper(struct dpu_encoder_phys * phys_enc)436 static void dpu_encoder_phys_cmd_enable_helper(
437 		struct dpu_encoder_phys *phys_enc)
438 {
439 	struct dpu_hw_ctl *ctl;
440 	u32 flush_mask = 0;
441 
442 	if (!phys_enc->hw_pp) {
443 		DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
444 		return;
445 	}
446 
447 	dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
448 
449 	_dpu_encoder_phys_cmd_pingpong_config(phys_enc);
450 
451 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
452 		return;
453 
454 	ctl = phys_enc->hw_ctl;
455 	ctl->ops.get_bitmask_intf(ctl, &flush_mask, phys_enc->intf_idx);
456 	ctl->ops.update_pending_flush(ctl, flush_mask);
457 }
458 
dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys * phys_enc)459 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
460 {
461 	struct dpu_encoder_phys_cmd *cmd_enc =
462 		to_dpu_encoder_phys_cmd(phys_enc);
463 
464 	if (!phys_enc->hw_pp) {
465 		DPU_ERROR("invalid phys encoder\n");
466 		return;
467 	}
468 
469 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
470 
471 	if (phys_enc->enable_state == DPU_ENC_ENABLED) {
472 		DPU_ERROR("already enabled\n");
473 		return;
474 	}
475 
476 	dpu_encoder_phys_cmd_enable_helper(phys_enc);
477 	phys_enc->enable_state = DPU_ENC_ENABLED;
478 }
479 
_dpu_encoder_phys_cmd_connect_te(struct dpu_encoder_phys * phys_enc,bool enable)480 static void _dpu_encoder_phys_cmd_connect_te(
481 		struct dpu_encoder_phys *phys_enc, bool enable)
482 {
483 	if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
484 		return;
485 
486 	trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
487 	phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
488 }
489 
dpu_encoder_phys_cmd_prepare_idle_pc(struct dpu_encoder_phys * phys_enc)490 static void dpu_encoder_phys_cmd_prepare_idle_pc(
491 		struct dpu_encoder_phys *phys_enc)
492 {
493 	_dpu_encoder_phys_cmd_connect_te(phys_enc, false);
494 }
495 
dpu_encoder_phys_cmd_get_line_count(struct dpu_encoder_phys * phys_enc)496 static int dpu_encoder_phys_cmd_get_line_count(
497 		struct dpu_encoder_phys *phys_enc)
498 {
499 	struct dpu_hw_pingpong *hw_pp;
500 
501 	if (!phys_enc->hw_pp)
502 		return -EINVAL;
503 
504 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
505 		return -EINVAL;
506 
507 	hw_pp = phys_enc->hw_pp;
508 	if (!hw_pp->ops.get_line_count)
509 		return -EINVAL;
510 
511 	return hw_pp->ops.get_line_count(hw_pp);
512 }
513 
dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys * phys_enc)514 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
515 {
516 	struct dpu_encoder_phys_cmd *cmd_enc =
517 		to_dpu_encoder_phys_cmd(phys_enc);
518 
519 	if (!phys_enc->hw_pp) {
520 		DPU_ERROR("invalid encoder\n");
521 		return;
522 	}
523 	DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
524 		      phys_enc->hw_pp->idx - PINGPONG_0,
525 		      phys_enc->enable_state);
526 
527 	if (phys_enc->enable_state == DPU_ENC_DISABLED) {
528 		DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
529 		return;
530 	}
531 
532 	if (phys_enc->hw_pp->ops.enable_tearcheck)
533 		phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
534 	phys_enc->enable_state = DPU_ENC_DISABLED;
535 }
536 
dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys * phys_enc)537 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
538 {
539 	struct dpu_encoder_phys_cmd *cmd_enc =
540 		to_dpu_encoder_phys_cmd(phys_enc);
541 
542 	kfree(cmd_enc);
543 }
544 
dpu_encoder_phys_cmd_get_hw_resources(struct dpu_encoder_phys * phys_enc,struct dpu_encoder_hw_resources * hw_res)545 static void dpu_encoder_phys_cmd_get_hw_resources(
546 		struct dpu_encoder_phys *phys_enc,
547 		struct dpu_encoder_hw_resources *hw_res)
548 {
549 	hw_res->intfs[phys_enc->intf_idx - INTF_0] = INTF_MODE_CMD;
550 }
551 
dpu_encoder_phys_cmd_prepare_for_kickoff(struct dpu_encoder_phys * phys_enc)552 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
553 		struct dpu_encoder_phys *phys_enc)
554 {
555 	struct dpu_encoder_phys_cmd *cmd_enc =
556 			to_dpu_encoder_phys_cmd(phys_enc);
557 	int ret;
558 
559 	if (!phys_enc->hw_pp) {
560 		DPU_ERROR("invalid encoder\n");
561 		return;
562 	}
563 	DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
564 		      phys_enc->hw_pp->idx - PINGPONG_0,
565 		      atomic_read(&phys_enc->pending_kickoff_cnt));
566 
567 	/*
568 	 * Mark kickoff request as outstanding. If there are more than one,
569 	 * outstanding, then we have to wait for the previous one to complete
570 	 */
571 	ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
572 	if (ret) {
573 		/* force pending_kickoff_cnt 0 to discard failed kickoff */
574 		atomic_set(&phys_enc->pending_kickoff_cnt, 0);
575 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
576 			  DRMID(phys_enc->parent), ret,
577 			  phys_enc->hw_pp->idx - PINGPONG_0);
578 	}
579 
580 	DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
581 			phys_enc->hw_pp->idx - PINGPONG_0,
582 			atomic_read(&phys_enc->pending_kickoff_cnt));
583 }
584 
_dpu_encoder_phys_cmd_wait_for_ctl_start(struct dpu_encoder_phys * phys_enc)585 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
586 		struct dpu_encoder_phys *phys_enc)
587 {
588 	struct dpu_encoder_phys_cmd *cmd_enc =
589 			to_dpu_encoder_phys_cmd(phys_enc);
590 	struct dpu_encoder_wait_info wait_info;
591 	int ret;
592 
593 	wait_info.wq = &phys_enc->pending_kickoff_wq;
594 	wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
595 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
596 
597 	ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_CTL_START,
598 			&wait_info);
599 	if (ret == -ETIMEDOUT) {
600 		DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
601 		ret = -EINVAL;
602 	} else if (!ret)
603 		ret = 0;
604 
605 	return ret;
606 }
607 
dpu_encoder_phys_cmd_wait_for_tx_complete(struct dpu_encoder_phys * phys_enc)608 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
609 		struct dpu_encoder_phys *phys_enc)
610 {
611 	int rc;
612 
613 	rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
614 	if (rc) {
615 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
616 			  DRMID(phys_enc->parent), rc,
617 			  phys_enc->intf_idx - INTF_0);
618 	}
619 
620 	return rc;
621 }
622 
dpu_encoder_phys_cmd_wait_for_commit_done(struct dpu_encoder_phys * phys_enc)623 static int dpu_encoder_phys_cmd_wait_for_commit_done(
624 		struct dpu_encoder_phys *phys_enc)
625 {
626 	int rc = 0;
627 	struct dpu_encoder_phys_cmd *cmd_enc;
628 
629 	cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
630 
631 	/* only required for master controller */
632 	if (dpu_encoder_phys_cmd_is_master(phys_enc))
633 		rc = _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
634 
635 	/* required for both controllers */
636 	if (!rc && cmd_enc->serialize_wait4pp)
637 		dpu_encoder_phys_cmd_prepare_for_kickoff(phys_enc);
638 
639 	return rc;
640 }
641 
dpu_encoder_phys_cmd_wait_for_vblank(struct dpu_encoder_phys * phys_enc)642 static int dpu_encoder_phys_cmd_wait_for_vblank(
643 		struct dpu_encoder_phys *phys_enc)
644 {
645 	int rc = 0;
646 	struct dpu_encoder_phys_cmd *cmd_enc;
647 	struct dpu_encoder_wait_info wait_info;
648 
649 	cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
650 
651 	/* only required for master controller */
652 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
653 		return rc;
654 
655 	wait_info.wq = &cmd_enc->pending_vblank_wq;
656 	wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
657 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
658 
659 	atomic_inc(&cmd_enc->pending_vblank_cnt);
660 
661 	rc = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_RDPTR,
662 			&wait_info);
663 
664 	return rc;
665 }
666 
dpu_encoder_phys_cmd_handle_post_kickoff(struct dpu_encoder_phys * phys_enc)667 static void dpu_encoder_phys_cmd_handle_post_kickoff(
668 		struct dpu_encoder_phys *phys_enc)
669 {
670 	/**
671 	 * re-enable external TE, either for the first time after enabling
672 	 * or if disabled for Autorefresh
673 	 */
674 	_dpu_encoder_phys_cmd_connect_te(phys_enc, true);
675 }
676 
dpu_encoder_phys_cmd_trigger_start(struct dpu_encoder_phys * phys_enc)677 static void dpu_encoder_phys_cmd_trigger_start(
678 		struct dpu_encoder_phys *phys_enc)
679 {
680 	dpu_encoder_helper_trigger_start(phys_enc);
681 }
682 
dpu_encoder_phys_cmd_init_ops(struct dpu_encoder_phys_ops * ops)683 static void dpu_encoder_phys_cmd_init_ops(
684 		struct dpu_encoder_phys_ops *ops)
685 {
686 	ops->is_master = dpu_encoder_phys_cmd_is_master;
687 	ops->mode_set = dpu_encoder_phys_cmd_mode_set;
688 	ops->mode_fixup = dpu_encoder_phys_cmd_mode_fixup;
689 	ops->enable = dpu_encoder_phys_cmd_enable;
690 	ops->disable = dpu_encoder_phys_cmd_disable;
691 	ops->destroy = dpu_encoder_phys_cmd_destroy;
692 	ops->get_hw_resources = dpu_encoder_phys_cmd_get_hw_resources;
693 	ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
694 	ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
695 	ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
696 	ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
697 	ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
698 	ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
699 	ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
700 	ops->irq_control = dpu_encoder_phys_cmd_irq_control;
701 	ops->restore = dpu_encoder_phys_cmd_enable_helper;
702 	ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
703 	ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
704 	ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
705 }
706 
dpu_encoder_phys_cmd_init(struct dpu_enc_phys_init_params * p)707 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
708 		struct dpu_enc_phys_init_params *p)
709 {
710 	struct dpu_encoder_phys *phys_enc = NULL;
711 	struct dpu_encoder_phys_cmd *cmd_enc = NULL;
712 	struct dpu_encoder_irq *irq;
713 	int i, ret = 0;
714 
715 	DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
716 
717 	cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
718 	if (!cmd_enc) {
719 		ret = -ENOMEM;
720 		DPU_ERROR("failed to allocate\n");
721 		return ERR_PTR(ret);
722 	}
723 	phys_enc = &cmd_enc->base;
724 	phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
725 	phys_enc->intf_idx = p->intf_idx;
726 
727 	dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
728 	phys_enc->parent = p->parent;
729 	phys_enc->parent_ops = p->parent_ops;
730 	phys_enc->dpu_kms = p->dpu_kms;
731 	phys_enc->split_role = p->split_role;
732 	phys_enc->intf_mode = INTF_MODE_CMD;
733 	phys_enc->enc_spinlock = p->enc_spinlock;
734 	cmd_enc->stream_sel = 0;
735 	phys_enc->enable_state = DPU_ENC_DISABLED;
736 	for (i = 0; i < INTR_IDX_MAX; i++) {
737 		irq = &phys_enc->irq[i];
738 		INIT_LIST_HEAD(&irq->cb.list);
739 		irq->irq_idx = -EINVAL;
740 		irq->hw_idx = -EINVAL;
741 		irq->cb.arg = phys_enc;
742 	}
743 
744 	irq = &phys_enc->irq[INTR_IDX_CTL_START];
745 	irq->name = "ctl_start";
746 	irq->intr_type = DPU_IRQ_TYPE_CTL_START;
747 	irq->intr_idx = INTR_IDX_CTL_START;
748 	irq->cb.func = dpu_encoder_phys_cmd_ctl_start_irq;
749 
750 	irq = &phys_enc->irq[INTR_IDX_PINGPONG];
751 	irq->name = "pp_done";
752 	irq->intr_type = DPU_IRQ_TYPE_PING_PONG_COMP;
753 	irq->intr_idx = INTR_IDX_PINGPONG;
754 	irq->cb.func = dpu_encoder_phys_cmd_pp_tx_done_irq;
755 
756 	irq = &phys_enc->irq[INTR_IDX_RDPTR];
757 	irq->name = "pp_rd_ptr";
758 	irq->intr_type = DPU_IRQ_TYPE_PING_PONG_RD_PTR;
759 	irq->intr_idx = INTR_IDX_RDPTR;
760 	irq->cb.func = dpu_encoder_phys_cmd_pp_rd_ptr_irq;
761 
762 	irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
763 	irq->name = "underrun";
764 	irq->intr_type = DPU_IRQ_TYPE_INTF_UNDER_RUN;
765 	irq->intr_idx = INTR_IDX_UNDERRUN;
766 	irq->cb.func = dpu_encoder_phys_cmd_underrun_irq;
767 
768 	atomic_set(&phys_enc->vblank_refcount, 0);
769 	atomic_set(&phys_enc->pending_kickoff_cnt, 0);
770 	atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
771 	atomic_set(&cmd_enc->pending_vblank_cnt, 0);
772 	init_waitqueue_head(&phys_enc->pending_kickoff_wq);
773 	init_waitqueue_head(&cmd_enc->pending_vblank_wq);
774 
775 	DPU_DEBUG_CMDENC(cmd_enc, "created\n");
776 
777 	return phys_enc;
778 }
779