1 /* Copyright (c) 2015-2018, The Linux Foundation. All rights reserved.
2 *
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 */
12
13 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
14 #include "dpu_encoder_phys.h"
15 #include "dpu_hw_interrupts.h"
16 #include "dpu_core_irq.h"
17 #include "dpu_formats.h"
18 #include "dpu_trace.h"
19
20 #define DPU_DEBUG_VIDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
21 (e) && (e)->base.parent ? \
22 (e)->base.parent->base.id : -1, \
23 (e) && (e)->hw_intf ? \
24 (e)->hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
25
26 #define DPU_ERROR_VIDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
27 (e) && (e)->base.parent ? \
28 (e)->base.parent->base.id : -1, \
29 (e) && (e)->hw_intf ? \
30 (e)->hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
31
32 #define to_dpu_encoder_phys_vid(x) \
33 container_of(x, struct dpu_encoder_phys_vid, base)
34
dpu_encoder_phys_vid_is_master(struct dpu_encoder_phys * phys_enc)35 static bool dpu_encoder_phys_vid_is_master(
36 struct dpu_encoder_phys *phys_enc)
37 {
38 bool ret = false;
39
40 if (phys_enc->split_role != ENC_ROLE_SLAVE)
41 ret = true;
42
43 return ret;
44 }
45
drm_mode_to_intf_timing_params(const struct dpu_encoder_phys_vid * vid_enc,const struct drm_display_mode * mode,struct intf_timing_params * timing)46 static void drm_mode_to_intf_timing_params(
47 const struct dpu_encoder_phys_vid *vid_enc,
48 const struct drm_display_mode *mode,
49 struct intf_timing_params *timing)
50 {
51 memset(timing, 0, sizeof(*timing));
52
53 if ((mode->htotal < mode->hsync_end)
54 || (mode->hsync_start < mode->hdisplay)
55 || (mode->vtotal < mode->vsync_end)
56 || (mode->vsync_start < mode->vdisplay)
57 || (mode->hsync_end < mode->hsync_start)
58 || (mode->vsync_end < mode->vsync_start)) {
59 DPU_ERROR(
60 "invalid params - hstart:%d,hend:%d,htot:%d,hdisplay:%d\n",
61 mode->hsync_start, mode->hsync_end,
62 mode->htotal, mode->hdisplay);
63 DPU_ERROR("vstart:%d,vend:%d,vtot:%d,vdisplay:%d\n",
64 mode->vsync_start, mode->vsync_end,
65 mode->vtotal, mode->vdisplay);
66 return;
67 }
68
69 /*
70 * https://www.kernel.org/doc/htmldocs/drm/ch02s05.html
71 * Active Region Front Porch Sync Back Porch
72 * <-----------------><------------><-----><----------->
73 * <- [hv]display --->
74 * <--------- [hv]sync_start ------>
75 * <----------------- [hv]sync_end ------->
76 * <---------------------------- [hv]total ------------->
77 */
78 timing->width = mode->hdisplay; /* active width */
79 timing->height = mode->vdisplay; /* active height */
80 timing->xres = timing->width;
81 timing->yres = timing->height;
82 timing->h_back_porch = mode->htotal - mode->hsync_end;
83 timing->h_front_porch = mode->hsync_start - mode->hdisplay;
84 timing->v_back_porch = mode->vtotal - mode->vsync_end;
85 timing->v_front_porch = mode->vsync_start - mode->vdisplay;
86 timing->hsync_pulse_width = mode->hsync_end - mode->hsync_start;
87 timing->vsync_pulse_width = mode->vsync_end - mode->vsync_start;
88 timing->hsync_polarity = (mode->flags & DRM_MODE_FLAG_NHSYNC) ? 1 : 0;
89 timing->vsync_polarity = (mode->flags & DRM_MODE_FLAG_NVSYNC) ? 1 : 0;
90 timing->border_clr = 0;
91 timing->underflow_clr = 0xff;
92 timing->hsync_skew = mode->hskew;
93
94 /* DSI controller cannot handle active-low sync signals. */
95 if (vid_enc->hw_intf->cap->type == INTF_DSI) {
96 timing->hsync_polarity = 0;
97 timing->vsync_polarity = 0;
98 }
99
100 /*
101 * For edp only:
102 * DISPLAY_V_START = (VBP * HCYCLE) + HBP
103 * DISPLAY_V_END = (VBP + VACTIVE) * HCYCLE - 1 - HFP
104 */
105 /*
106 * if (vid_enc->hw->cap->type == INTF_EDP) {
107 * display_v_start += mode->htotal - mode->hsync_start;
108 * display_v_end -= mode->hsync_start - mode->hdisplay;
109 * }
110 */
111 }
112
get_horizontal_total(const struct intf_timing_params * timing)113 static inline u32 get_horizontal_total(const struct intf_timing_params *timing)
114 {
115 u32 active = timing->xres;
116 u32 inactive =
117 timing->h_back_porch + timing->h_front_porch +
118 timing->hsync_pulse_width;
119 return active + inactive;
120 }
121
get_vertical_total(const struct intf_timing_params * timing)122 static inline u32 get_vertical_total(const struct intf_timing_params *timing)
123 {
124 u32 active = timing->yres;
125 u32 inactive =
126 timing->v_back_porch + timing->v_front_porch +
127 timing->vsync_pulse_width;
128 return active + inactive;
129 }
130
131 /*
132 * programmable_fetch_get_num_lines:
133 * Number of fetch lines in vertical front porch
134 * @timing: Pointer to the intf timing information for the requested mode
135 *
136 * Returns the number of fetch lines in vertical front porch at which mdp
137 * can start fetching the next frame.
138 *
139 * Number of needed prefetch lines is anything that cannot be absorbed in the
140 * start of frame time (back porch + vsync pulse width).
141 *
142 * Some panels have very large VFP, however we only need a total number of
143 * lines based on the chip worst case latencies.
144 */
programmable_fetch_get_num_lines(struct dpu_encoder_phys_vid * vid_enc,const struct intf_timing_params * timing)145 static u32 programmable_fetch_get_num_lines(
146 struct dpu_encoder_phys_vid *vid_enc,
147 const struct intf_timing_params *timing)
148 {
149 u32 worst_case_needed_lines =
150 vid_enc->hw_intf->cap->prog_fetch_lines_worst_case;
151 u32 start_of_frame_lines =
152 timing->v_back_porch + timing->vsync_pulse_width;
153 u32 needed_vfp_lines = worst_case_needed_lines - start_of_frame_lines;
154 u32 actual_vfp_lines = 0;
155
156 /* Fetch must be outside active lines, otherwise undefined. */
157 if (start_of_frame_lines >= worst_case_needed_lines) {
158 DPU_DEBUG_VIDENC(vid_enc,
159 "prog fetch is not needed, large vbp+vsw\n");
160 actual_vfp_lines = 0;
161 } else if (timing->v_front_porch < needed_vfp_lines) {
162 /* Warn fetch needed, but not enough porch in panel config */
163 pr_warn_once
164 ("low vbp+vfp may lead to perf issues in some cases\n");
165 DPU_DEBUG_VIDENC(vid_enc,
166 "less vfp than fetch req, using entire vfp\n");
167 actual_vfp_lines = timing->v_front_porch;
168 } else {
169 DPU_DEBUG_VIDENC(vid_enc, "room in vfp for needed prefetch\n");
170 actual_vfp_lines = needed_vfp_lines;
171 }
172
173 DPU_DEBUG_VIDENC(vid_enc,
174 "v_front_porch %u v_back_porch %u vsync_pulse_width %u\n",
175 timing->v_front_porch, timing->v_back_porch,
176 timing->vsync_pulse_width);
177 DPU_DEBUG_VIDENC(vid_enc,
178 "wc_lines %u needed_vfp_lines %u actual_vfp_lines %u\n",
179 worst_case_needed_lines, needed_vfp_lines, actual_vfp_lines);
180
181 return actual_vfp_lines;
182 }
183
184 /*
185 * programmable_fetch_config: Programs HW to prefetch lines by offsetting
186 * the start of fetch into the vertical front porch for cases where the
187 * vsync pulse width and vertical back porch time is insufficient
188 *
189 * Gets # of lines to pre-fetch, then calculate VSYNC counter value.
190 * HW layer requires VSYNC counter of first pixel of tgt VFP line.
191 *
192 * @timing: Pointer to the intf timing information for the requested mode
193 */
programmable_fetch_config(struct dpu_encoder_phys * phys_enc,const struct intf_timing_params * timing)194 static void programmable_fetch_config(struct dpu_encoder_phys *phys_enc,
195 const struct intf_timing_params *timing)
196 {
197 struct dpu_encoder_phys_vid *vid_enc =
198 to_dpu_encoder_phys_vid(phys_enc);
199 struct intf_prog_fetch f = { 0 };
200 u32 vfp_fetch_lines = 0;
201 u32 horiz_total = 0;
202 u32 vert_total = 0;
203 u32 vfp_fetch_start_vsync_counter = 0;
204 unsigned long lock_flags;
205
206 if (WARN_ON_ONCE(!vid_enc->hw_intf->ops.setup_prg_fetch))
207 return;
208
209 vfp_fetch_lines = programmable_fetch_get_num_lines(vid_enc, timing);
210 if (vfp_fetch_lines) {
211 vert_total = get_vertical_total(timing);
212 horiz_total = get_horizontal_total(timing);
213 vfp_fetch_start_vsync_counter =
214 (vert_total - vfp_fetch_lines) * horiz_total + 1;
215 f.enable = 1;
216 f.fetch_start = vfp_fetch_start_vsync_counter;
217 }
218
219 DPU_DEBUG_VIDENC(vid_enc,
220 "vfp_fetch_lines %u vfp_fetch_start_vsync_counter %u\n",
221 vfp_fetch_lines, vfp_fetch_start_vsync_counter);
222
223 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
224 vid_enc->hw_intf->ops.setup_prg_fetch(vid_enc->hw_intf, &f);
225 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
226 }
227
dpu_encoder_phys_vid_mode_fixup(struct dpu_encoder_phys * phys_enc,const struct drm_display_mode * mode,struct drm_display_mode * adj_mode)228 static bool dpu_encoder_phys_vid_mode_fixup(
229 struct dpu_encoder_phys *phys_enc,
230 const struct drm_display_mode *mode,
231 struct drm_display_mode *adj_mode)
232 {
233 if (phys_enc)
234 DPU_DEBUG_VIDENC(to_dpu_encoder_phys_vid(phys_enc), "\n");
235
236 /*
237 * Modifying mode has consequences when the mode comes back to us
238 */
239 return true;
240 }
241
dpu_encoder_phys_vid_setup_timing_engine(struct dpu_encoder_phys * phys_enc)242 static void dpu_encoder_phys_vid_setup_timing_engine(
243 struct dpu_encoder_phys *phys_enc)
244 {
245 struct dpu_encoder_phys_vid *vid_enc;
246 struct drm_display_mode mode;
247 struct intf_timing_params timing_params = { 0 };
248 const struct dpu_format *fmt = NULL;
249 u32 fmt_fourcc = DRM_FORMAT_RGB888;
250 unsigned long lock_flags;
251 struct dpu_hw_intf_cfg intf_cfg = { 0 };
252
253 if (!phys_enc || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
254 DPU_ERROR("invalid encoder %d\n", phys_enc != 0);
255 return;
256 }
257
258 mode = phys_enc->cached_mode;
259 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
260 if (!vid_enc->hw_intf->ops.setup_timing_gen) {
261 DPU_ERROR("timing engine setup is not supported\n");
262 return;
263 }
264
265 DPU_DEBUG_VIDENC(vid_enc, "enabling mode:\n");
266 drm_mode_debug_printmodeline(&mode);
267
268 if (phys_enc->split_role != ENC_ROLE_SOLO) {
269 mode.hdisplay >>= 1;
270 mode.htotal >>= 1;
271 mode.hsync_start >>= 1;
272 mode.hsync_end >>= 1;
273
274 DPU_DEBUG_VIDENC(vid_enc,
275 "split_role %d, halve horizontal %d %d %d %d\n",
276 phys_enc->split_role,
277 mode.hdisplay, mode.htotal,
278 mode.hsync_start, mode.hsync_end);
279 }
280
281 drm_mode_to_intf_timing_params(vid_enc, &mode, &timing_params);
282
283 fmt = dpu_get_dpu_format(fmt_fourcc);
284 DPU_DEBUG_VIDENC(vid_enc, "fmt_fourcc 0x%X\n", fmt_fourcc);
285
286 intf_cfg.intf = vid_enc->hw_intf->idx;
287 intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_VID;
288 intf_cfg.stream_sel = 0; /* Don't care value for video mode */
289 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
290
291 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
292 vid_enc->hw_intf->ops.setup_timing_gen(vid_enc->hw_intf,
293 &timing_params, fmt);
294 phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg);
295 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
296
297 programmable_fetch_config(phys_enc, &timing_params);
298
299 vid_enc->timing_params = timing_params;
300 }
301
dpu_encoder_phys_vid_vblank_irq(void * arg,int irq_idx)302 static void dpu_encoder_phys_vid_vblank_irq(void *arg, int irq_idx)
303 {
304 struct dpu_encoder_phys *phys_enc = arg;
305 struct dpu_hw_ctl *hw_ctl;
306 unsigned long lock_flags;
307 u32 flush_register = 0;
308 int new_cnt = -1, old_cnt = -1;
309
310 if (!phys_enc)
311 return;
312
313 hw_ctl = phys_enc->hw_ctl;
314 if (!hw_ctl)
315 return;
316
317 DPU_ATRACE_BEGIN("vblank_irq");
318
319 if (phys_enc->parent_ops->handle_vblank_virt)
320 phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
321 phys_enc);
322
323 old_cnt = atomic_read(&phys_enc->pending_kickoff_cnt);
324
325 /*
326 * only decrement the pending flush count if we've actually flushed
327 * hardware. due to sw irq latency, vblank may have already happened
328 * so we need to double-check with hw that it accepted the flush bits
329 */
330 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
331 if (hw_ctl && hw_ctl->ops.get_flush_register)
332 flush_register = hw_ctl->ops.get_flush_register(hw_ctl);
333
334 if (flush_register == 0)
335 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt,
336 -1, 0);
337 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
338
339 /* Signal any waiting atomic commit thread */
340 wake_up_all(&phys_enc->pending_kickoff_wq);
341 DPU_ATRACE_END("vblank_irq");
342 }
343
dpu_encoder_phys_vid_underrun_irq(void * arg,int irq_idx)344 static void dpu_encoder_phys_vid_underrun_irq(void *arg, int irq_idx)
345 {
346 struct dpu_encoder_phys *phys_enc = arg;
347
348 if (!phys_enc)
349 return;
350
351 if (phys_enc->parent_ops->handle_underrun_virt)
352 phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
353 phys_enc);
354 }
355
_dpu_encoder_phys_is_dual_ctl(struct dpu_encoder_phys * phys_enc)356 static bool _dpu_encoder_phys_is_dual_ctl(struct dpu_encoder_phys *phys_enc)
357 {
358 if (!phys_enc)
359 return false;
360
361 if (phys_enc->topology_name == DPU_RM_TOPOLOGY_DUALPIPE)
362 return true;
363
364 return false;
365 }
366
dpu_encoder_phys_vid_needs_single_flush(struct dpu_encoder_phys * phys_enc)367 static bool dpu_encoder_phys_vid_needs_single_flush(
368 struct dpu_encoder_phys *phys_enc)
369 {
370 return (phys_enc && _dpu_encoder_phys_is_dual_ctl(phys_enc));
371 }
372
_dpu_encoder_phys_vid_setup_irq_hw_idx(struct dpu_encoder_phys * phys_enc)373 static void _dpu_encoder_phys_vid_setup_irq_hw_idx(
374 struct dpu_encoder_phys *phys_enc)
375 {
376 struct dpu_encoder_irq *irq;
377
378 /*
379 * Initialize irq->hw_idx only when irq is not registered.
380 * Prevent invalidating irq->irq_idx as modeset may be
381 * called many times during dfps.
382 */
383
384 irq = &phys_enc->irq[INTR_IDX_VSYNC];
385 if (irq->irq_idx < 0)
386 irq->hw_idx = phys_enc->intf_idx;
387
388 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
389 if (irq->irq_idx < 0)
390 irq->hw_idx = phys_enc->intf_idx;
391 }
392
dpu_encoder_phys_vid_mode_set(struct dpu_encoder_phys * phys_enc,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)393 static void dpu_encoder_phys_vid_mode_set(
394 struct dpu_encoder_phys *phys_enc,
395 struct drm_display_mode *mode,
396 struct drm_display_mode *adj_mode)
397 {
398 struct dpu_rm *rm;
399 struct dpu_rm_hw_iter iter;
400 int i, instance;
401 struct dpu_encoder_phys_vid *vid_enc;
402
403 if (!phys_enc || !phys_enc->dpu_kms) {
404 DPU_ERROR("invalid encoder/kms\n");
405 return;
406 }
407
408 rm = &phys_enc->dpu_kms->rm;
409 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
410
411 if (adj_mode) {
412 phys_enc->cached_mode = *adj_mode;
413 drm_mode_debug_printmodeline(adj_mode);
414 DPU_DEBUG_VIDENC(vid_enc, "caching mode:\n");
415 }
416
417 instance = phys_enc->split_role == ENC_ROLE_SLAVE ? 1 : 0;
418
419 /* Retrieve previously allocated HW Resources. Shouldn't fail */
420 dpu_rm_init_hw_iter(&iter, phys_enc->parent->base.id, DPU_HW_BLK_CTL);
421 for (i = 0; i <= instance; i++) {
422 if (dpu_rm_get_hw(rm, &iter))
423 phys_enc->hw_ctl = (struct dpu_hw_ctl *)iter.hw;
424 }
425 if (IS_ERR_OR_NULL(phys_enc->hw_ctl)) {
426 DPU_ERROR_VIDENC(vid_enc, "failed to init ctl, %ld\n",
427 PTR_ERR(phys_enc->hw_ctl));
428 phys_enc->hw_ctl = NULL;
429 return;
430 }
431
432 _dpu_encoder_phys_vid_setup_irq_hw_idx(phys_enc);
433 }
434
dpu_encoder_phys_vid_control_vblank_irq(struct dpu_encoder_phys * phys_enc,bool enable)435 static int dpu_encoder_phys_vid_control_vblank_irq(
436 struct dpu_encoder_phys *phys_enc,
437 bool enable)
438 {
439 int ret = 0;
440 struct dpu_encoder_phys_vid *vid_enc;
441 int refcount;
442
443 if (!phys_enc) {
444 DPU_ERROR("invalid encoder\n");
445 return -EINVAL;
446 }
447
448 refcount = atomic_read(&phys_enc->vblank_refcount);
449 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
450
451 /* Slave encoders don't report vblank */
452 if (!dpu_encoder_phys_vid_is_master(phys_enc))
453 goto end;
454
455 /* protect against negative */
456 if (!enable && refcount == 0) {
457 ret = -EINVAL;
458 goto end;
459 }
460
461 DRM_DEBUG_KMS("id:%u enable=%d/%d\n", DRMID(phys_enc->parent), enable,
462 atomic_read(&phys_enc->vblank_refcount));
463
464 if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
465 ret = dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_VSYNC);
466 else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
467 ret = dpu_encoder_helper_unregister_irq(phys_enc,
468 INTR_IDX_VSYNC);
469
470 end:
471 if (ret) {
472 DRM_ERROR("failed: id:%u intf:%d ret:%d enable:%d refcnt:%d\n",
473 DRMID(phys_enc->parent),
474 vid_enc->hw_intf->idx - INTF_0, ret, enable,
475 refcount);
476 }
477 return ret;
478 }
479
dpu_encoder_phys_vid_enable(struct dpu_encoder_phys * phys_enc)480 static void dpu_encoder_phys_vid_enable(struct dpu_encoder_phys *phys_enc)
481 {
482 struct msm_drm_private *priv;
483 struct dpu_encoder_phys_vid *vid_enc;
484 struct dpu_hw_intf *intf;
485 struct dpu_hw_ctl *ctl;
486 u32 flush_mask = 0;
487
488 if (!phys_enc || !phys_enc->parent || !phys_enc->parent->dev ||
489 !phys_enc->parent->dev->dev_private) {
490 DPU_ERROR("invalid encoder/device\n");
491 return;
492 }
493 priv = phys_enc->parent->dev->dev_private;
494
495 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
496 intf = vid_enc->hw_intf;
497 ctl = phys_enc->hw_ctl;
498 if (!vid_enc->hw_intf || !phys_enc->hw_ctl) {
499 DPU_ERROR("invalid hw_intf %d hw_ctl %d\n",
500 vid_enc->hw_intf != 0, phys_enc->hw_ctl != 0);
501 return;
502 }
503
504 DPU_DEBUG_VIDENC(vid_enc, "\n");
505
506 if (WARN_ON(!vid_enc->hw_intf->ops.enable_timing))
507 return;
508
509 dpu_encoder_helper_split_config(phys_enc, vid_enc->hw_intf->idx);
510
511 dpu_encoder_phys_vid_setup_timing_engine(phys_enc);
512
513 /*
514 * For single flush cases (dual-ctl or pp-split), skip setting the
515 * flush bit for the slave intf, since both intfs use same ctl
516 * and HW will only flush the master.
517 */
518 if (dpu_encoder_phys_vid_needs_single_flush(phys_enc) &&
519 !dpu_encoder_phys_vid_is_master(phys_enc))
520 goto skip_flush;
521
522 ctl->ops.get_bitmask_intf(ctl, &flush_mask, intf->idx);
523 ctl->ops.update_pending_flush(ctl, flush_mask);
524
525 skip_flush:
526 DPU_DEBUG_VIDENC(vid_enc, "update pending flush ctl %d flush_mask %x\n",
527 ctl->idx - CTL_0, flush_mask);
528
529 /* ctl_flush & timing engine enable will be triggered by framework */
530 if (phys_enc->enable_state == DPU_ENC_DISABLED)
531 phys_enc->enable_state = DPU_ENC_ENABLING;
532 }
533
dpu_encoder_phys_vid_destroy(struct dpu_encoder_phys * phys_enc)534 static void dpu_encoder_phys_vid_destroy(struct dpu_encoder_phys *phys_enc)
535 {
536 struct dpu_encoder_phys_vid *vid_enc;
537
538 if (!phys_enc) {
539 DPU_ERROR("invalid encoder\n");
540 return;
541 }
542
543 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
544 DPU_DEBUG_VIDENC(vid_enc, "\n");
545 kfree(vid_enc);
546 }
547
dpu_encoder_phys_vid_get_hw_resources(struct dpu_encoder_phys * phys_enc,struct dpu_encoder_hw_resources * hw_res,struct drm_connector_state * conn_state)548 static void dpu_encoder_phys_vid_get_hw_resources(
549 struct dpu_encoder_phys *phys_enc,
550 struct dpu_encoder_hw_resources *hw_res,
551 struct drm_connector_state *conn_state)
552 {
553 struct dpu_encoder_phys_vid *vid_enc;
554
555 if (!phys_enc || !hw_res) {
556 DPU_ERROR("invalid arg(s), enc %d hw_res %d conn_state %d\n",
557 phys_enc != 0, hw_res != 0, conn_state != 0);
558 return;
559 }
560
561 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
562 if (!vid_enc->hw_intf) {
563 DPU_ERROR("invalid arg(s), hw_intf\n");
564 return;
565 }
566
567 DPU_DEBUG_VIDENC(vid_enc, "\n");
568 hw_res->intfs[vid_enc->hw_intf->idx - INTF_0] = INTF_MODE_VIDEO;
569 }
570
_dpu_encoder_phys_vid_wait_for_vblank(struct dpu_encoder_phys * phys_enc,bool notify)571 static int _dpu_encoder_phys_vid_wait_for_vblank(
572 struct dpu_encoder_phys *phys_enc, bool notify)
573 {
574 struct dpu_encoder_wait_info wait_info;
575 int ret;
576
577 if (!phys_enc) {
578 pr_err("invalid encoder\n");
579 return -EINVAL;
580 }
581
582 wait_info.wq = &phys_enc->pending_kickoff_wq;
583 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
584 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
585
586 if (!dpu_encoder_phys_vid_is_master(phys_enc)) {
587 if (notify && phys_enc->parent_ops->handle_frame_done)
588 phys_enc->parent_ops->handle_frame_done(
589 phys_enc->parent, phys_enc,
590 DPU_ENCODER_FRAME_EVENT_DONE);
591 return 0;
592 }
593
594 /* Wait for kickoff to complete */
595 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_VSYNC,
596 &wait_info);
597
598 if (ret == -ETIMEDOUT) {
599 dpu_encoder_helper_report_irq_timeout(phys_enc, INTR_IDX_VSYNC);
600 } else if (!ret && notify && phys_enc->parent_ops->handle_frame_done)
601 phys_enc->parent_ops->handle_frame_done(
602 phys_enc->parent, phys_enc,
603 DPU_ENCODER_FRAME_EVENT_DONE);
604
605 return ret;
606 }
607
dpu_encoder_phys_vid_wait_for_vblank(struct dpu_encoder_phys * phys_enc)608 static int dpu_encoder_phys_vid_wait_for_vblank(
609 struct dpu_encoder_phys *phys_enc)
610 {
611 return _dpu_encoder_phys_vid_wait_for_vblank(phys_enc, true);
612 }
613
dpu_encoder_phys_vid_prepare_for_kickoff(struct dpu_encoder_phys * phys_enc,struct dpu_encoder_kickoff_params * params)614 static void dpu_encoder_phys_vid_prepare_for_kickoff(
615 struct dpu_encoder_phys *phys_enc,
616 struct dpu_encoder_kickoff_params *params)
617 {
618 struct dpu_encoder_phys_vid *vid_enc;
619 struct dpu_hw_ctl *ctl;
620 int rc;
621
622 if (!phys_enc || !params) {
623 DPU_ERROR("invalid encoder/parameters\n");
624 return;
625 }
626 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
627
628 ctl = phys_enc->hw_ctl;
629 if (!ctl || !ctl->ops.wait_reset_status)
630 return;
631
632 /*
633 * hw supports hardware initiated ctl reset, so before we kickoff a new
634 * frame, need to check and wait for hw initiated ctl reset completion
635 */
636 rc = ctl->ops.wait_reset_status(ctl);
637 if (rc) {
638 DPU_ERROR_VIDENC(vid_enc, "ctl %d reset failure: %d\n",
639 ctl->idx, rc);
640 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_VSYNC);
641 dpu_dbg_dump(false, __func__, true, true);
642 }
643 }
644
dpu_encoder_phys_vid_disable(struct dpu_encoder_phys * phys_enc)645 static void dpu_encoder_phys_vid_disable(struct dpu_encoder_phys *phys_enc)
646 {
647 struct msm_drm_private *priv;
648 struct dpu_encoder_phys_vid *vid_enc;
649 unsigned long lock_flags;
650 int ret;
651
652 if (!phys_enc || !phys_enc->parent || !phys_enc->parent->dev ||
653 !phys_enc->parent->dev->dev_private) {
654 DPU_ERROR("invalid encoder/device\n");
655 return;
656 }
657 priv = phys_enc->parent->dev->dev_private;
658
659 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
660 if (!vid_enc->hw_intf || !phys_enc->hw_ctl) {
661 DPU_ERROR("invalid hw_intf %d hw_ctl %d\n",
662 vid_enc->hw_intf != 0, phys_enc->hw_ctl != 0);
663 return;
664 }
665
666 DPU_DEBUG_VIDENC(vid_enc, "\n");
667
668 if (WARN_ON(!vid_enc->hw_intf->ops.enable_timing))
669 return;
670
671 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
672 DPU_ERROR("already disabled\n");
673 return;
674 }
675
676 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
677 vid_enc->hw_intf->ops.enable_timing(vid_enc->hw_intf, 0);
678 if (dpu_encoder_phys_vid_is_master(phys_enc))
679 dpu_encoder_phys_inc_pending(phys_enc);
680 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
681
682 /*
683 * Wait for a vsync so we know the ENABLE=0 latched before
684 * the (connector) source of the vsync's gets disabled,
685 * otherwise we end up in a funny state if we re-enable
686 * before the disable latches, which results that some of
687 * the settings changes for the new modeset (like new
688 * scanout buffer) don't latch properly..
689 */
690 if (dpu_encoder_phys_vid_is_master(phys_enc)) {
691 ret = _dpu_encoder_phys_vid_wait_for_vblank(phys_enc, false);
692 if (ret) {
693 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
694 DRM_ERROR("wait disable failed: id:%u intf:%d ret:%d\n",
695 DRMID(phys_enc->parent),
696 vid_enc->hw_intf->idx - INTF_0, ret);
697 }
698 }
699
700 phys_enc->enable_state = DPU_ENC_DISABLED;
701 }
702
dpu_encoder_phys_vid_handle_post_kickoff(struct dpu_encoder_phys * phys_enc)703 static void dpu_encoder_phys_vid_handle_post_kickoff(
704 struct dpu_encoder_phys *phys_enc)
705 {
706 unsigned long lock_flags;
707 struct dpu_encoder_phys_vid *vid_enc;
708
709 if (!phys_enc) {
710 DPU_ERROR("invalid encoder\n");
711 return;
712 }
713
714 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
715 DPU_DEBUG_VIDENC(vid_enc, "enable_state %d\n", phys_enc->enable_state);
716
717 /*
718 * Video mode must flush CTL before enabling timing engine
719 * Video encoders need to turn on their interfaces now
720 */
721 if (phys_enc->enable_state == DPU_ENC_ENABLING) {
722 trace_dpu_enc_phys_vid_post_kickoff(DRMID(phys_enc->parent),
723 vid_enc->hw_intf->idx - INTF_0);
724 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
725 vid_enc->hw_intf->ops.enable_timing(vid_enc->hw_intf, 1);
726 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
727 phys_enc->enable_state = DPU_ENC_ENABLED;
728 }
729 }
730
dpu_encoder_phys_vid_irq_control(struct dpu_encoder_phys * phys_enc,bool enable)731 static void dpu_encoder_phys_vid_irq_control(struct dpu_encoder_phys *phys_enc,
732 bool enable)
733 {
734 struct dpu_encoder_phys_vid *vid_enc;
735 int ret;
736
737 if (!phys_enc)
738 return;
739
740 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
741
742 trace_dpu_enc_phys_vid_irq_ctrl(DRMID(phys_enc->parent),
743 vid_enc->hw_intf->idx - INTF_0,
744 enable,
745 atomic_read(&phys_enc->vblank_refcount));
746
747 if (enable) {
748 ret = dpu_encoder_phys_vid_control_vblank_irq(phys_enc, true);
749 if (ret)
750 return;
751
752 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
753 } else {
754 dpu_encoder_phys_vid_control_vblank_irq(phys_enc, false);
755 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
756 }
757 }
758
dpu_encoder_phys_vid_setup_misr(struct dpu_encoder_phys * phys_enc,bool enable,u32 frame_count)759 static void dpu_encoder_phys_vid_setup_misr(struct dpu_encoder_phys *phys_enc,
760 bool enable, u32 frame_count)
761 {
762 struct dpu_encoder_phys_vid *vid_enc;
763
764 if (!phys_enc)
765 return;
766 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
767
768 if (vid_enc->hw_intf && vid_enc->hw_intf->ops.setup_misr)
769 vid_enc->hw_intf->ops.setup_misr(vid_enc->hw_intf,
770 enable, frame_count);
771 }
772
dpu_encoder_phys_vid_collect_misr(struct dpu_encoder_phys * phys_enc)773 static u32 dpu_encoder_phys_vid_collect_misr(struct dpu_encoder_phys *phys_enc)
774 {
775 struct dpu_encoder_phys_vid *vid_enc;
776
777 if (!phys_enc)
778 return 0;
779 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
780
781 return vid_enc->hw_intf && vid_enc->hw_intf->ops.collect_misr ?
782 vid_enc->hw_intf->ops.collect_misr(vid_enc->hw_intf) : 0;
783 }
784
dpu_encoder_phys_vid_get_line_count(struct dpu_encoder_phys * phys_enc)785 static int dpu_encoder_phys_vid_get_line_count(
786 struct dpu_encoder_phys *phys_enc)
787 {
788 struct dpu_encoder_phys_vid *vid_enc;
789
790 if (!phys_enc)
791 return -EINVAL;
792
793 if (!dpu_encoder_phys_vid_is_master(phys_enc))
794 return -EINVAL;
795
796 vid_enc = to_dpu_encoder_phys_vid(phys_enc);
797 if (!vid_enc->hw_intf || !vid_enc->hw_intf->ops.get_line_count)
798 return -EINVAL;
799
800 return vid_enc->hw_intf->ops.get_line_count(vid_enc->hw_intf);
801 }
802
dpu_encoder_phys_vid_init_ops(struct dpu_encoder_phys_ops * ops)803 static void dpu_encoder_phys_vid_init_ops(struct dpu_encoder_phys_ops *ops)
804 {
805 ops->is_master = dpu_encoder_phys_vid_is_master;
806 ops->mode_set = dpu_encoder_phys_vid_mode_set;
807 ops->mode_fixup = dpu_encoder_phys_vid_mode_fixup;
808 ops->enable = dpu_encoder_phys_vid_enable;
809 ops->disable = dpu_encoder_phys_vid_disable;
810 ops->destroy = dpu_encoder_phys_vid_destroy;
811 ops->get_hw_resources = dpu_encoder_phys_vid_get_hw_resources;
812 ops->control_vblank_irq = dpu_encoder_phys_vid_control_vblank_irq;
813 ops->wait_for_commit_done = dpu_encoder_phys_vid_wait_for_vblank;
814 ops->wait_for_vblank = dpu_encoder_phys_vid_wait_for_vblank;
815 ops->wait_for_tx_complete = dpu_encoder_phys_vid_wait_for_vblank;
816 ops->irq_control = dpu_encoder_phys_vid_irq_control;
817 ops->prepare_for_kickoff = dpu_encoder_phys_vid_prepare_for_kickoff;
818 ops->handle_post_kickoff = dpu_encoder_phys_vid_handle_post_kickoff;
819 ops->needs_single_flush = dpu_encoder_phys_vid_needs_single_flush;
820 ops->setup_misr = dpu_encoder_phys_vid_setup_misr;
821 ops->collect_misr = dpu_encoder_phys_vid_collect_misr;
822 ops->hw_reset = dpu_encoder_helper_hw_reset;
823 ops->get_line_count = dpu_encoder_phys_vid_get_line_count;
824 }
825
dpu_encoder_phys_vid_init(struct dpu_enc_phys_init_params * p)826 struct dpu_encoder_phys *dpu_encoder_phys_vid_init(
827 struct dpu_enc_phys_init_params *p)
828 {
829 struct dpu_encoder_phys *phys_enc = NULL;
830 struct dpu_encoder_phys_vid *vid_enc = NULL;
831 struct dpu_rm_hw_iter iter;
832 struct dpu_hw_mdp *hw_mdp;
833 struct dpu_encoder_irq *irq;
834 int i, ret = 0;
835
836 if (!p) {
837 ret = -EINVAL;
838 goto fail;
839 }
840
841 vid_enc = kzalloc(sizeof(*vid_enc), GFP_KERNEL);
842 if (!vid_enc) {
843 ret = -ENOMEM;
844 goto fail;
845 }
846
847 phys_enc = &vid_enc->base;
848
849 hw_mdp = dpu_rm_get_mdp(&p->dpu_kms->rm);
850 if (IS_ERR_OR_NULL(hw_mdp)) {
851 ret = PTR_ERR(hw_mdp);
852 DPU_ERROR("failed to get mdptop\n");
853 goto fail;
854 }
855 phys_enc->hw_mdptop = hw_mdp;
856 phys_enc->intf_idx = p->intf_idx;
857
858 /**
859 * hw_intf resource permanently assigned to this encoder
860 * Other resources allocated at atomic commit time by use case
861 */
862 dpu_rm_init_hw_iter(&iter, 0, DPU_HW_BLK_INTF);
863 while (dpu_rm_get_hw(&p->dpu_kms->rm, &iter)) {
864 struct dpu_hw_intf *hw_intf = (struct dpu_hw_intf *)iter.hw;
865
866 if (hw_intf->idx == p->intf_idx) {
867 vid_enc->hw_intf = hw_intf;
868 break;
869 }
870 }
871
872 if (!vid_enc->hw_intf) {
873 ret = -EINVAL;
874 DPU_ERROR("failed to get hw_intf\n");
875 goto fail;
876 }
877
878 DPU_DEBUG_VIDENC(vid_enc, "\n");
879
880 dpu_encoder_phys_vid_init_ops(&phys_enc->ops);
881 phys_enc->parent = p->parent;
882 phys_enc->parent_ops = p->parent_ops;
883 phys_enc->dpu_kms = p->dpu_kms;
884 phys_enc->split_role = p->split_role;
885 phys_enc->intf_mode = INTF_MODE_VIDEO;
886 phys_enc->enc_spinlock = p->enc_spinlock;
887 for (i = 0; i < INTR_IDX_MAX; i++) {
888 irq = &phys_enc->irq[i];
889 INIT_LIST_HEAD(&irq->cb.list);
890 irq->irq_idx = -EINVAL;
891 irq->hw_idx = -EINVAL;
892 irq->cb.arg = phys_enc;
893 }
894
895 irq = &phys_enc->irq[INTR_IDX_VSYNC];
896 irq->name = "vsync_irq";
897 irq->intr_type = DPU_IRQ_TYPE_INTF_VSYNC;
898 irq->intr_idx = INTR_IDX_VSYNC;
899 irq->cb.func = dpu_encoder_phys_vid_vblank_irq;
900
901 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
902 irq->name = "underrun";
903 irq->intr_type = DPU_IRQ_TYPE_INTF_UNDER_RUN;
904 irq->intr_idx = INTR_IDX_UNDERRUN;
905 irq->cb.func = dpu_encoder_phys_vid_underrun_irq;
906
907 atomic_set(&phys_enc->vblank_refcount, 0);
908 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
909 init_waitqueue_head(&phys_enc->pending_kickoff_wq);
910 phys_enc->enable_state = DPU_ENC_DISABLED;
911
912 DPU_DEBUG_VIDENC(vid_enc, "created intf idx:%d\n", p->intf_idx);
913
914 return phys_enc;
915
916 fail:
917 DPU_ERROR("failed to create encoder\n");
918 if (vid_enc)
919 dpu_encoder_phys_vid_destroy(phys_enc);
920
921 return ERR_PTR(ret);
922 }
923