1 /*
2 * Copyright (c) 2015-2018 The Linux Foundation. All rights reserved.
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 and
6 * only version 2 as published by the Free Software Foundation.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 *
13 */
14
15 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
16 #include "dpu_encoder_phys.h"
17 #include "dpu_hw_interrupts.h"
18 #include "dpu_core_irq.h"
19 #include "dpu_formats.h"
20 #include "dpu_trace.h"
21
22 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
23 (e) && (e)->base.parent ? \
24 (e)->base.parent->base.id : -1, \
25 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
26
27 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
28 (e) && (e)->base.parent ? \
29 (e)->base.parent->base.id : -1, \
30 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
31
32 #define to_dpu_encoder_phys_cmd(x) \
33 container_of(x, struct dpu_encoder_phys_cmd, base)
34
35 #define PP_TIMEOUT_MAX_TRIALS 10
36
37 /*
38 * Tearcheck sync start and continue thresholds are empirically found
39 * based on common panels In the future, may want to allow panels to override
40 * these default values
41 */
42 #define DEFAULT_TEARCHECK_SYNC_THRESH_START 4
43 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE 4
44
45 #define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
46
_dpu_encoder_phys_cmd_get_idle_timeout(struct dpu_encoder_phys_cmd * cmd_enc)47 static inline int _dpu_encoder_phys_cmd_get_idle_timeout(
48 struct dpu_encoder_phys_cmd *cmd_enc)
49 {
50 return KICKOFF_TIMEOUT_MS;
51 }
52
dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys * phys_enc)53 static inline bool dpu_encoder_phys_cmd_is_master(
54 struct dpu_encoder_phys *phys_enc)
55 {
56 return (phys_enc->split_role != ENC_ROLE_SLAVE) ? true : false;
57 }
58
dpu_encoder_phys_cmd_mode_fixup(struct dpu_encoder_phys * phys_enc,const struct drm_display_mode * mode,struct drm_display_mode * adj_mode)59 static bool dpu_encoder_phys_cmd_mode_fixup(
60 struct dpu_encoder_phys *phys_enc,
61 const struct drm_display_mode *mode,
62 struct drm_display_mode *adj_mode)
63 {
64 if (phys_enc)
65 DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc), "\n");
66 return true;
67 }
68
_dpu_encoder_phys_cmd_update_intf_cfg(struct dpu_encoder_phys * phys_enc)69 static void _dpu_encoder_phys_cmd_update_intf_cfg(
70 struct dpu_encoder_phys *phys_enc)
71 {
72 struct dpu_encoder_phys_cmd *cmd_enc =
73 to_dpu_encoder_phys_cmd(phys_enc);
74 struct dpu_hw_ctl *ctl;
75 struct dpu_hw_intf_cfg intf_cfg = { 0 };
76
77 if (!phys_enc)
78 return;
79
80 ctl = phys_enc->hw_ctl;
81 if (!ctl || !ctl->ops.setup_intf_cfg)
82 return;
83
84 intf_cfg.intf = phys_enc->intf_idx;
85 intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
86 intf_cfg.stream_sel = cmd_enc->stream_sel;
87 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
88 ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
89 }
90
dpu_encoder_phys_cmd_pp_tx_done_irq(void * arg,int irq_idx)91 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
92 {
93 struct dpu_encoder_phys *phys_enc = arg;
94 unsigned long lock_flags;
95 int new_cnt;
96 u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
97
98 if (!phys_enc || !phys_enc->hw_pp)
99 return;
100
101 DPU_ATRACE_BEGIN("pp_done_irq");
102 /* notify all synchronous clients first, then asynchronous clients */
103 if (phys_enc->parent_ops->handle_frame_done)
104 phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
105 phys_enc, event);
106
107 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
108 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
109 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
110
111 trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
112 phys_enc->hw_pp->idx - PINGPONG_0,
113 new_cnt, event);
114
115 /* Signal any waiting atomic commit thread */
116 wake_up_all(&phys_enc->pending_kickoff_wq);
117 DPU_ATRACE_END("pp_done_irq");
118 }
119
dpu_encoder_phys_cmd_pp_rd_ptr_irq(void * arg,int irq_idx)120 static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
121 {
122 struct dpu_encoder_phys *phys_enc = arg;
123 struct dpu_encoder_phys_cmd *cmd_enc;
124
125 if (!phys_enc || !phys_enc->hw_pp)
126 return;
127
128 DPU_ATRACE_BEGIN("rd_ptr_irq");
129 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
130
131 if (phys_enc->parent_ops->handle_vblank_virt)
132 phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
133 phys_enc);
134
135 atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
136 wake_up_all(&cmd_enc->pending_vblank_wq);
137 DPU_ATRACE_END("rd_ptr_irq");
138 }
139
dpu_encoder_phys_cmd_ctl_start_irq(void * arg,int irq_idx)140 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
141 {
142 struct dpu_encoder_phys *phys_enc = arg;
143 struct dpu_encoder_phys_cmd *cmd_enc;
144
145 if (!phys_enc || !phys_enc->hw_ctl)
146 return;
147
148 DPU_ATRACE_BEGIN("ctl_start_irq");
149 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
150
151 atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
152
153 /* Signal any waiting ctl start interrupt */
154 wake_up_all(&phys_enc->pending_kickoff_wq);
155 DPU_ATRACE_END("ctl_start_irq");
156 }
157
dpu_encoder_phys_cmd_underrun_irq(void * arg,int irq_idx)158 static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
159 {
160 struct dpu_encoder_phys *phys_enc = arg;
161
162 if (!phys_enc)
163 return;
164
165 if (phys_enc->parent_ops->handle_underrun_virt)
166 phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
167 phys_enc);
168 }
169
_dpu_encoder_phys_cmd_setup_irq_hw_idx(struct dpu_encoder_phys * phys_enc)170 static void _dpu_encoder_phys_cmd_setup_irq_hw_idx(
171 struct dpu_encoder_phys *phys_enc)
172 {
173 struct dpu_encoder_irq *irq;
174
175 irq = &phys_enc->irq[INTR_IDX_CTL_START];
176 irq->hw_idx = phys_enc->hw_ctl->idx;
177 irq->irq_idx = -EINVAL;
178
179 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
180 irq->hw_idx = phys_enc->hw_pp->idx;
181 irq->irq_idx = -EINVAL;
182
183 irq = &phys_enc->irq[INTR_IDX_RDPTR];
184 irq->hw_idx = phys_enc->hw_pp->idx;
185 irq->irq_idx = -EINVAL;
186
187 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
188 irq->hw_idx = phys_enc->intf_idx;
189 irq->irq_idx = -EINVAL;
190 }
191
dpu_encoder_phys_cmd_mode_set(struct dpu_encoder_phys * phys_enc,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)192 static void dpu_encoder_phys_cmd_mode_set(
193 struct dpu_encoder_phys *phys_enc,
194 struct drm_display_mode *mode,
195 struct drm_display_mode *adj_mode)
196 {
197 struct dpu_encoder_phys_cmd *cmd_enc =
198 to_dpu_encoder_phys_cmd(phys_enc);
199 struct dpu_rm *rm = &phys_enc->dpu_kms->rm;
200 struct dpu_rm_hw_iter iter;
201 int i, instance;
202
203 if (!phys_enc || !mode || !adj_mode) {
204 DPU_ERROR("invalid args\n");
205 return;
206 }
207 phys_enc->cached_mode = *adj_mode;
208 DPU_DEBUG_CMDENC(cmd_enc, "caching mode:\n");
209 drm_mode_debug_printmodeline(adj_mode);
210
211 instance = phys_enc->split_role == ENC_ROLE_SLAVE ? 1 : 0;
212
213 /* Retrieve previously allocated HW Resources. Shouldn't fail */
214 dpu_rm_init_hw_iter(&iter, phys_enc->parent->base.id, DPU_HW_BLK_CTL);
215 for (i = 0; i <= instance; i++) {
216 if (dpu_rm_get_hw(rm, &iter))
217 phys_enc->hw_ctl = (struct dpu_hw_ctl *)iter.hw;
218 }
219
220 if (IS_ERR_OR_NULL(phys_enc->hw_ctl)) {
221 DPU_ERROR_CMDENC(cmd_enc, "failed to init ctl: %ld\n",
222 PTR_ERR(phys_enc->hw_ctl));
223 phys_enc->hw_ctl = NULL;
224 return;
225 }
226
227 _dpu_encoder_phys_cmd_setup_irq_hw_idx(phys_enc);
228 }
229
_dpu_encoder_phys_cmd_handle_ppdone_timeout(struct dpu_encoder_phys * phys_enc)230 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
231 struct dpu_encoder_phys *phys_enc)
232 {
233 struct dpu_encoder_phys_cmd *cmd_enc =
234 to_dpu_encoder_phys_cmd(phys_enc);
235 u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
236 bool do_log = false;
237
238 if (!phys_enc || !phys_enc->hw_pp || !phys_enc->hw_ctl)
239 return -EINVAL;
240
241 cmd_enc->pp_timeout_report_cnt++;
242 if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
243 frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
244 do_log = true;
245 } else if (cmd_enc->pp_timeout_report_cnt == 1) {
246 do_log = true;
247 }
248
249 trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(phys_enc->parent),
250 phys_enc->hw_pp->idx - PINGPONG_0,
251 cmd_enc->pp_timeout_report_cnt,
252 atomic_read(&phys_enc->pending_kickoff_cnt),
253 frame_event);
254
255 /* to avoid flooding, only log first time, and "dead" time */
256 if (do_log) {
257 DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
258 DRMID(phys_enc->parent),
259 phys_enc->hw_pp->idx - PINGPONG_0,
260 phys_enc->hw_ctl->idx - CTL_0,
261 cmd_enc->pp_timeout_report_cnt,
262 atomic_read(&phys_enc->pending_kickoff_cnt));
263
264 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_RDPTR);
265 dpu_dbg_dump(false, __func__, true, true);
266 }
267
268 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
269
270 /* request a ctl reset before the next kickoff */
271 phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
272
273 if (phys_enc->parent_ops->handle_frame_done)
274 phys_enc->parent_ops->handle_frame_done(
275 phys_enc->parent, phys_enc, frame_event);
276
277 return -ETIMEDOUT;
278 }
279
_dpu_encoder_phys_cmd_wait_for_idle(struct dpu_encoder_phys * phys_enc)280 static int _dpu_encoder_phys_cmd_wait_for_idle(
281 struct dpu_encoder_phys *phys_enc)
282 {
283 struct dpu_encoder_phys_cmd *cmd_enc =
284 to_dpu_encoder_phys_cmd(phys_enc);
285 struct dpu_encoder_wait_info wait_info;
286 int ret;
287
288 if (!phys_enc) {
289 DPU_ERROR("invalid encoder\n");
290 return -EINVAL;
291 }
292
293 wait_info.wq = &phys_enc->pending_kickoff_wq;
294 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
295 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
296
297 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_PINGPONG,
298 &wait_info);
299 if (ret == -ETIMEDOUT)
300 _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
301 else if (!ret)
302 cmd_enc->pp_timeout_report_cnt = 0;
303
304 return ret;
305 }
306
dpu_encoder_phys_cmd_control_vblank_irq(struct dpu_encoder_phys * phys_enc,bool enable)307 static int dpu_encoder_phys_cmd_control_vblank_irq(
308 struct dpu_encoder_phys *phys_enc,
309 bool enable)
310 {
311 int ret = 0;
312 int refcount;
313
314 if (!phys_enc || !phys_enc->hw_pp) {
315 DPU_ERROR("invalid encoder\n");
316 return -EINVAL;
317 }
318
319 refcount = atomic_read(&phys_enc->vblank_refcount);
320
321 /* Slave encoders don't report vblank */
322 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
323 goto end;
324
325 /* protect against negative */
326 if (!enable && refcount == 0) {
327 ret = -EINVAL;
328 goto end;
329 }
330
331 DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
332 phys_enc->hw_pp->idx - PINGPONG_0,
333 enable ? "true" : "false", refcount);
334
335 if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
336 ret = dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_RDPTR);
337 else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
338 ret = dpu_encoder_helper_unregister_irq(phys_enc,
339 INTR_IDX_RDPTR);
340
341 end:
342 if (ret) {
343 DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
344 DRMID(phys_enc->parent),
345 phys_enc->hw_pp->idx - PINGPONG_0, ret,
346 enable ? "true" : "false", refcount);
347 }
348
349 return ret;
350 }
351
dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys * phys_enc,bool enable)352 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
353 bool enable)
354 {
355 struct dpu_encoder_phys_cmd *cmd_enc;
356
357 if (!phys_enc)
358 return;
359
360 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
361
362 trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
363 phys_enc->hw_pp->idx - PINGPONG_0,
364 enable, atomic_read(&phys_enc->vblank_refcount));
365
366 if (enable) {
367 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_PINGPONG);
368 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
369 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
370
371 if (dpu_encoder_phys_cmd_is_master(phys_enc))
372 dpu_encoder_helper_register_irq(phys_enc,
373 INTR_IDX_CTL_START);
374 } else {
375 if (dpu_encoder_phys_cmd_is_master(phys_enc))
376 dpu_encoder_helper_unregister_irq(phys_enc,
377 INTR_IDX_CTL_START);
378
379 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
380 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
381 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_PINGPONG);
382 }
383 }
384
dpu_encoder_phys_cmd_tearcheck_config(struct dpu_encoder_phys * phys_enc)385 static void dpu_encoder_phys_cmd_tearcheck_config(
386 struct dpu_encoder_phys *phys_enc)
387 {
388 struct dpu_encoder_phys_cmd *cmd_enc =
389 to_dpu_encoder_phys_cmd(phys_enc);
390 struct dpu_hw_tear_check tc_cfg = { 0 };
391 struct drm_display_mode *mode;
392 bool tc_enable = true;
393 u32 vsync_hz;
394 struct msm_drm_private *priv;
395 struct dpu_kms *dpu_kms;
396
397 if (!phys_enc || !phys_enc->hw_pp) {
398 DPU_ERROR("invalid encoder\n");
399 return;
400 }
401 mode = &phys_enc->cached_mode;
402
403 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
404
405 if (!phys_enc->hw_pp->ops.setup_tearcheck ||
406 !phys_enc->hw_pp->ops.enable_tearcheck) {
407 DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
408 return;
409 }
410
411 dpu_kms = phys_enc->dpu_kms;
412 if (!dpu_kms || !dpu_kms->dev || !dpu_kms->dev->dev_private) {
413 DPU_ERROR("invalid device\n");
414 return;
415 }
416 priv = dpu_kms->dev->dev_private;
417
418 /*
419 * TE default: dsi byte clock calculated base on 70 fps;
420 * around 14 ms to complete a kickoff cycle if te disabled;
421 * vclk_line base on 60 fps; write is faster than read;
422 * init == start == rdptr;
423 *
424 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
425 * frequency divided by the no. of rows (lines) in the LCDpanel.
426 */
427 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
428 if (vsync_hz <= 0) {
429 DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
430 vsync_hz);
431 return;
432 }
433
434 tc_cfg.vsync_count = vsync_hz / (mode->vtotal * mode->vrefresh);
435
436 /* enable external TE after kickoff to avoid premature autorefresh */
437 tc_cfg.hw_vsync_mode = 0;
438
439 /*
440 * By setting sync_cfg_height to near max register value, we essentially
441 * disable dpu hw generated TE signal, since hw TE will arrive first.
442 * Only caveat is if due to error, we hit wrap-around.
443 */
444 tc_cfg.sync_cfg_height = 0xFFF0;
445 tc_cfg.vsync_init_val = mode->vdisplay;
446 tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
447 tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
448 tc_cfg.start_pos = mode->vdisplay;
449 tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
450
451 DPU_DEBUG_CMDENC(cmd_enc,
452 "tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
453 phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
454 mode->vtotal, mode->vrefresh);
455 DPU_DEBUG_CMDENC(cmd_enc,
456 "tc %d enable %u start_pos %u rd_ptr_irq %u\n",
457 phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
458 tc_cfg.rd_ptr_irq);
459 DPU_DEBUG_CMDENC(cmd_enc,
460 "tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
461 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
462 tc_cfg.vsync_count, tc_cfg.vsync_init_val);
463 DPU_DEBUG_CMDENC(cmd_enc,
464 "tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
465 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
466 tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
467
468 phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
469 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
470 }
471
_dpu_encoder_phys_cmd_pingpong_config(struct dpu_encoder_phys * phys_enc)472 static void _dpu_encoder_phys_cmd_pingpong_config(
473 struct dpu_encoder_phys *phys_enc)
474 {
475 struct dpu_encoder_phys_cmd *cmd_enc =
476 to_dpu_encoder_phys_cmd(phys_enc);
477
478 if (!phys_enc || !phys_enc->hw_ctl || !phys_enc->hw_pp
479 || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
480 DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != 0);
481 return;
482 }
483
484 DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
485 phys_enc->hw_pp->idx - PINGPONG_0);
486 drm_mode_debug_printmodeline(&phys_enc->cached_mode);
487
488 _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
489 dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
490 }
491
dpu_encoder_phys_cmd_needs_single_flush(struct dpu_encoder_phys * phys_enc)492 static bool dpu_encoder_phys_cmd_needs_single_flush(
493 struct dpu_encoder_phys *phys_enc)
494 {
495 /**
496 * we do separate flush for each CTL and let
497 * CTL_START synchronize them
498 */
499 return false;
500 }
501
dpu_encoder_phys_cmd_enable_helper(struct dpu_encoder_phys * phys_enc)502 static void dpu_encoder_phys_cmd_enable_helper(
503 struct dpu_encoder_phys *phys_enc)
504 {
505 struct dpu_hw_ctl *ctl;
506 u32 flush_mask = 0;
507
508 if (!phys_enc || !phys_enc->hw_ctl || !phys_enc->hw_pp) {
509 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
510 return;
511 }
512
513 dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
514
515 _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
516
517 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
518 goto skip_flush;
519
520 ctl = phys_enc->hw_ctl;
521 ctl->ops.get_bitmask_intf(ctl, &flush_mask, phys_enc->intf_idx);
522 ctl->ops.update_pending_flush(ctl, flush_mask);
523
524 skip_flush:
525 return;
526 }
527
dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys * phys_enc)528 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
529 {
530 struct dpu_encoder_phys_cmd *cmd_enc =
531 to_dpu_encoder_phys_cmd(phys_enc);
532
533 if (!phys_enc || !phys_enc->hw_pp) {
534 DPU_ERROR("invalid phys encoder\n");
535 return;
536 }
537
538 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
539
540 if (phys_enc->enable_state == DPU_ENC_ENABLED) {
541 DPU_ERROR("already enabled\n");
542 return;
543 }
544
545 dpu_encoder_phys_cmd_enable_helper(phys_enc);
546 phys_enc->enable_state = DPU_ENC_ENABLED;
547 }
548
_dpu_encoder_phys_cmd_connect_te(struct dpu_encoder_phys * phys_enc,bool enable)549 static void _dpu_encoder_phys_cmd_connect_te(
550 struct dpu_encoder_phys *phys_enc, bool enable)
551 {
552 if (!phys_enc || !phys_enc->hw_pp ||
553 !phys_enc->hw_pp->ops.connect_external_te)
554 return;
555
556 trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
557 phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
558 }
559
dpu_encoder_phys_cmd_prepare_idle_pc(struct dpu_encoder_phys * phys_enc)560 static void dpu_encoder_phys_cmd_prepare_idle_pc(
561 struct dpu_encoder_phys *phys_enc)
562 {
563 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
564 }
565
dpu_encoder_phys_cmd_get_line_count(struct dpu_encoder_phys * phys_enc)566 static int dpu_encoder_phys_cmd_get_line_count(
567 struct dpu_encoder_phys *phys_enc)
568 {
569 struct dpu_hw_pingpong *hw_pp;
570
571 if (!phys_enc || !phys_enc->hw_pp)
572 return -EINVAL;
573
574 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
575 return -EINVAL;
576
577 hw_pp = phys_enc->hw_pp;
578 if (!hw_pp->ops.get_line_count)
579 return -EINVAL;
580
581 return hw_pp->ops.get_line_count(hw_pp);
582 }
583
dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys * phys_enc)584 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
585 {
586 struct dpu_encoder_phys_cmd *cmd_enc =
587 to_dpu_encoder_phys_cmd(phys_enc);
588
589 if (!phys_enc || !phys_enc->hw_pp) {
590 DPU_ERROR("invalid encoder\n");
591 return;
592 }
593 DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
594 phys_enc->hw_pp->idx - PINGPONG_0,
595 phys_enc->enable_state);
596
597 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
598 DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
599 return;
600 }
601
602 if (phys_enc->hw_pp->ops.enable_tearcheck)
603 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
604 phys_enc->enable_state = DPU_ENC_DISABLED;
605 }
606
dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys * phys_enc)607 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
608 {
609 struct dpu_encoder_phys_cmd *cmd_enc =
610 to_dpu_encoder_phys_cmd(phys_enc);
611
612 if (!phys_enc) {
613 DPU_ERROR("invalid encoder\n");
614 return;
615 }
616 kfree(cmd_enc);
617 }
618
dpu_encoder_phys_cmd_get_hw_resources(struct dpu_encoder_phys * phys_enc,struct dpu_encoder_hw_resources * hw_res,struct drm_connector_state * conn_state)619 static void dpu_encoder_phys_cmd_get_hw_resources(
620 struct dpu_encoder_phys *phys_enc,
621 struct dpu_encoder_hw_resources *hw_res,
622 struct drm_connector_state *conn_state)
623 {
624 struct dpu_encoder_phys_cmd *cmd_enc =
625 to_dpu_encoder_phys_cmd(phys_enc);
626
627 if (!phys_enc) {
628 DPU_ERROR("invalid encoder\n");
629 return;
630 }
631
632 if ((phys_enc->intf_idx - INTF_0) >= INTF_MAX) {
633 DPU_ERROR("invalid intf idx:%d\n", phys_enc->intf_idx);
634 return;
635 }
636
637 DPU_DEBUG_CMDENC(cmd_enc, "\n");
638 hw_res->intfs[phys_enc->intf_idx - INTF_0] = INTF_MODE_CMD;
639 }
640
dpu_encoder_phys_cmd_prepare_for_kickoff(struct dpu_encoder_phys * phys_enc,struct dpu_encoder_kickoff_params * params)641 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
642 struct dpu_encoder_phys *phys_enc,
643 struct dpu_encoder_kickoff_params *params)
644 {
645 struct dpu_encoder_phys_cmd *cmd_enc =
646 to_dpu_encoder_phys_cmd(phys_enc);
647 int ret;
648
649 if (!phys_enc || !phys_enc->hw_pp) {
650 DPU_ERROR("invalid encoder\n");
651 return;
652 }
653 DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
654 phys_enc->hw_pp->idx - PINGPONG_0,
655 atomic_read(&phys_enc->pending_kickoff_cnt));
656
657 /*
658 * Mark kickoff request as outstanding. If there are more than one,
659 * outstanding, then we have to wait for the previous one to complete
660 */
661 ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
662 if (ret) {
663 /* force pending_kickoff_cnt 0 to discard failed kickoff */
664 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
665 DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
666 DRMID(phys_enc->parent), ret,
667 phys_enc->hw_pp->idx - PINGPONG_0);
668 }
669
670 DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
671 phys_enc->hw_pp->idx - PINGPONG_0,
672 atomic_read(&phys_enc->pending_kickoff_cnt));
673 }
674
_dpu_encoder_phys_cmd_wait_for_ctl_start(struct dpu_encoder_phys * phys_enc)675 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
676 struct dpu_encoder_phys *phys_enc)
677 {
678 struct dpu_encoder_phys_cmd *cmd_enc =
679 to_dpu_encoder_phys_cmd(phys_enc);
680 struct dpu_encoder_wait_info wait_info;
681 int ret;
682
683 if (!phys_enc || !phys_enc->hw_ctl) {
684 DPU_ERROR("invalid argument(s)\n");
685 return -EINVAL;
686 }
687
688 wait_info.wq = &phys_enc->pending_kickoff_wq;
689 wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
690 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
691
692 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_CTL_START,
693 &wait_info);
694 if (ret == -ETIMEDOUT) {
695 DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
696 ret = -EINVAL;
697 } else if (!ret)
698 ret = 0;
699
700 return ret;
701 }
702
dpu_encoder_phys_cmd_wait_for_tx_complete(struct dpu_encoder_phys * phys_enc)703 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
704 struct dpu_encoder_phys *phys_enc)
705 {
706 int rc;
707 struct dpu_encoder_phys_cmd *cmd_enc;
708
709 if (!phys_enc)
710 return -EINVAL;
711
712 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
713
714 rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
715 if (rc) {
716 DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
717 DRMID(phys_enc->parent), rc,
718 phys_enc->intf_idx - INTF_0);
719 }
720
721 return rc;
722 }
723
dpu_encoder_phys_cmd_wait_for_commit_done(struct dpu_encoder_phys * phys_enc)724 static int dpu_encoder_phys_cmd_wait_for_commit_done(
725 struct dpu_encoder_phys *phys_enc)
726 {
727 int rc = 0;
728 struct dpu_encoder_phys_cmd *cmd_enc;
729
730 if (!phys_enc)
731 return -EINVAL;
732
733 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
734
735 /* only required for master controller */
736 if (dpu_encoder_phys_cmd_is_master(phys_enc))
737 rc = _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
738
739 /* required for both controllers */
740 if (!rc && cmd_enc->serialize_wait4pp)
741 dpu_encoder_phys_cmd_prepare_for_kickoff(phys_enc, NULL);
742
743 return rc;
744 }
745
dpu_encoder_phys_cmd_wait_for_vblank(struct dpu_encoder_phys * phys_enc)746 static int dpu_encoder_phys_cmd_wait_for_vblank(
747 struct dpu_encoder_phys *phys_enc)
748 {
749 int rc = 0;
750 struct dpu_encoder_phys_cmd *cmd_enc;
751 struct dpu_encoder_wait_info wait_info;
752
753 if (!phys_enc)
754 return -EINVAL;
755
756 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
757
758 /* only required for master controller */
759 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
760 return rc;
761
762 wait_info.wq = &cmd_enc->pending_vblank_wq;
763 wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
764 wait_info.timeout_ms = _dpu_encoder_phys_cmd_get_idle_timeout(cmd_enc);
765
766 atomic_inc(&cmd_enc->pending_vblank_cnt);
767
768 rc = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_RDPTR,
769 &wait_info);
770
771 return rc;
772 }
773
dpu_encoder_phys_cmd_handle_post_kickoff(struct dpu_encoder_phys * phys_enc)774 static void dpu_encoder_phys_cmd_handle_post_kickoff(
775 struct dpu_encoder_phys *phys_enc)
776 {
777 if (!phys_enc)
778 return;
779
780 /**
781 * re-enable external TE, either for the first time after enabling
782 * or if disabled for Autorefresh
783 */
784 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
785 }
786
dpu_encoder_phys_cmd_trigger_start(struct dpu_encoder_phys * phys_enc)787 static void dpu_encoder_phys_cmd_trigger_start(
788 struct dpu_encoder_phys *phys_enc)
789 {
790 if (!phys_enc)
791 return;
792
793 dpu_encoder_helper_trigger_start(phys_enc);
794 }
795
dpu_encoder_phys_cmd_init_ops(struct dpu_encoder_phys_ops * ops)796 static void dpu_encoder_phys_cmd_init_ops(
797 struct dpu_encoder_phys_ops *ops)
798 {
799 ops->is_master = dpu_encoder_phys_cmd_is_master;
800 ops->mode_set = dpu_encoder_phys_cmd_mode_set;
801 ops->mode_fixup = dpu_encoder_phys_cmd_mode_fixup;
802 ops->enable = dpu_encoder_phys_cmd_enable;
803 ops->disable = dpu_encoder_phys_cmd_disable;
804 ops->destroy = dpu_encoder_phys_cmd_destroy;
805 ops->get_hw_resources = dpu_encoder_phys_cmd_get_hw_resources;
806 ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
807 ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
808 ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
809 ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
810 ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
811 ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
812 ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
813 ops->hw_reset = dpu_encoder_helper_hw_reset;
814 ops->irq_control = dpu_encoder_phys_cmd_irq_control;
815 ops->restore = dpu_encoder_phys_cmd_enable_helper;
816 ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
817 ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
818 ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
819 }
820
dpu_encoder_phys_cmd_init(struct dpu_enc_phys_init_params * p)821 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
822 struct dpu_enc_phys_init_params *p)
823 {
824 struct dpu_encoder_phys *phys_enc = NULL;
825 struct dpu_encoder_phys_cmd *cmd_enc = NULL;
826 struct dpu_hw_mdp *hw_mdp;
827 struct dpu_encoder_irq *irq;
828 int i, ret = 0;
829
830 DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
831
832 cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
833 if (!cmd_enc) {
834 ret = -ENOMEM;
835 DPU_ERROR("failed to allocate\n");
836 goto fail;
837 }
838 phys_enc = &cmd_enc->base;
839
840 hw_mdp = dpu_rm_get_mdp(&p->dpu_kms->rm);
841 if (IS_ERR_OR_NULL(hw_mdp)) {
842 ret = PTR_ERR(hw_mdp);
843 DPU_ERROR("failed to get mdptop\n");
844 goto fail_mdp_init;
845 }
846 phys_enc->hw_mdptop = hw_mdp;
847 phys_enc->intf_idx = p->intf_idx;
848
849 dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
850 phys_enc->parent = p->parent;
851 phys_enc->parent_ops = p->parent_ops;
852 phys_enc->dpu_kms = p->dpu_kms;
853 phys_enc->split_role = p->split_role;
854 phys_enc->intf_mode = INTF_MODE_CMD;
855 phys_enc->enc_spinlock = p->enc_spinlock;
856 cmd_enc->stream_sel = 0;
857 phys_enc->enable_state = DPU_ENC_DISABLED;
858 for (i = 0; i < INTR_IDX_MAX; i++) {
859 irq = &phys_enc->irq[i];
860 INIT_LIST_HEAD(&irq->cb.list);
861 irq->irq_idx = -EINVAL;
862 irq->hw_idx = -EINVAL;
863 irq->cb.arg = phys_enc;
864 }
865
866 irq = &phys_enc->irq[INTR_IDX_CTL_START];
867 irq->name = "ctl_start";
868 irq->intr_type = DPU_IRQ_TYPE_CTL_START;
869 irq->intr_idx = INTR_IDX_CTL_START;
870 irq->cb.func = dpu_encoder_phys_cmd_ctl_start_irq;
871
872 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
873 irq->name = "pp_done";
874 irq->intr_type = DPU_IRQ_TYPE_PING_PONG_COMP;
875 irq->intr_idx = INTR_IDX_PINGPONG;
876 irq->cb.func = dpu_encoder_phys_cmd_pp_tx_done_irq;
877
878 irq = &phys_enc->irq[INTR_IDX_RDPTR];
879 irq->name = "pp_rd_ptr";
880 irq->intr_type = DPU_IRQ_TYPE_PING_PONG_RD_PTR;
881 irq->intr_idx = INTR_IDX_RDPTR;
882 irq->cb.func = dpu_encoder_phys_cmd_pp_rd_ptr_irq;
883
884 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
885 irq->name = "underrun";
886 irq->intr_type = DPU_IRQ_TYPE_INTF_UNDER_RUN;
887 irq->intr_idx = INTR_IDX_UNDERRUN;
888 irq->cb.func = dpu_encoder_phys_cmd_underrun_irq;
889
890 atomic_set(&phys_enc->vblank_refcount, 0);
891 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
892 atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
893 atomic_set(&cmd_enc->pending_vblank_cnt, 0);
894 init_waitqueue_head(&phys_enc->pending_kickoff_wq);
895 init_waitqueue_head(&cmd_enc->pending_vblank_wq);
896
897 DPU_DEBUG_CMDENC(cmd_enc, "created\n");
898
899 return phys_enc;
900
901 fail_mdp_init:
902 kfree(cmd_enc);
903 fail:
904 return ERR_PTR(ret);
905 }
906