1 /*
2 * Copyright 2012-15 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25 #include "dm_services.h"
26
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "dpp.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 #include "dpcd_defs.h"
39
40 #include "dce80/dce80_resource.h"
41 #include "dce100/dce100_resource.h"
42 #include "dce110/dce110_resource.h"
43 #include "dce112/dce112_resource.h"
44 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
45 #include "dcn10/dcn10_resource.h"
46 #endif
47 #include "dce120/dce120_resource.h"
48
49 #define DC_LOGGER_INIT(logger)
50
resource_parse_asic_id(struct hw_asic_id asic_id)51 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
52 {
53 enum dce_version dc_version = DCE_VERSION_UNKNOWN;
54 switch (asic_id.chip_family) {
55
56 case FAMILY_CI:
57 dc_version = DCE_VERSION_8_0;
58 break;
59 case FAMILY_KV:
60 if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
61 ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
62 ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
63 dc_version = DCE_VERSION_8_3;
64 else
65 dc_version = DCE_VERSION_8_1;
66 break;
67 case FAMILY_CZ:
68 dc_version = DCE_VERSION_11_0;
69 break;
70
71 case FAMILY_VI:
72 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
73 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
74 dc_version = DCE_VERSION_10_0;
75 break;
76 }
77 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
78 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
79 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
80 dc_version = DCE_VERSION_11_2;
81 }
82 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev))
83 dc_version = DCE_VERSION_11_22;
84 break;
85 case FAMILY_AI:
86 dc_version = DCE_VERSION_12_0;
87 break;
88 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
89 case FAMILY_RV:
90 dc_version = DCN_VERSION_1_0;
91 break;
92 #endif
93 default:
94 dc_version = DCE_VERSION_UNKNOWN;
95 break;
96 }
97 return dc_version;
98 }
99
dc_create_resource_pool(struct dc * dc,int num_virtual_links,enum dce_version dc_version,struct hw_asic_id asic_id)100 struct resource_pool *dc_create_resource_pool(
101 struct dc *dc,
102 int num_virtual_links,
103 enum dce_version dc_version,
104 struct hw_asic_id asic_id)
105 {
106 struct resource_pool *res_pool = NULL;
107
108 switch (dc_version) {
109 case DCE_VERSION_8_0:
110 res_pool = dce80_create_resource_pool(
111 num_virtual_links, dc);
112 break;
113 case DCE_VERSION_8_1:
114 res_pool = dce81_create_resource_pool(
115 num_virtual_links, dc);
116 break;
117 case DCE_VERSION_8_3:
118 res_pool = dce83_create_resource_pool(
119 num_virtual_links, dc);
120 break;
121 case DCE_VERSION_10_0:
122 res_pool = dce100_create_resource_pool(
123 num_virtual_links, dc);
124 break;
125 case DCE_VERSION_11_0:
126 res_pool = dce110_create_resource_pool(
127 num_virtual_links, dc, asic_id);
128 break;
129 case DCE_VERSION_11_2:
130 case DCE_VERSION_11_22:
131 res_pool = dce112_create_resource_pool(
132 num_virtual_links, dc);
133 break;
134 case DCE_VERSION_12_0:
135 res_pool = dce120_create_resource_pool(
136 num_virtual_links, dc);
137 break;
138
139 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
140 case DCN_VERSION_1_0:
141 res_pool = dcn10_create_resource_pool(
142 num_virtual_links, dc);
143 break;
144 #endif
145
146
147 default:
148 break;
149 }
150 if (res_pool != NULL) {
151 struct dc_firmware_info fw_info = { { 0 } };
152
153 if (dc->ctx->dc_bios->funcs->get_firmware_info(
154 dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
155 res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
156 } else
157 ASSERT_CRITICAL(false);
158 }
159
160 return res_pool;
161 }
162
dc_destroy_resource_pool(struct dc * dc)163 void dc_destroy_resource_pool(struct dc *dc)
164 {
165 if (dc) {
166 if (dc->res_pool)
167 dc->res_pool->funcs->destroy(&dc->res_pool);
168
169 kfree(dc->hwseq);
170 }
171 }
172
update_num_audio(const struct resource_straps * straps,unsigned int * num_audio,struct audio_support * aud_support)173 static void update_num_audio(
174 const struct resource_straps *straps,
175 unsigned int *num_audio,
176 struct audio_support *aud_support)
177 {
178 aud_support->dp_audio = true;
179 aud_support->hdmi_audio_native = false;
180 aud_support->hdmi_audio_on_dongle = false;
181
182 if (straps->hdmi_disable == 0) {
183 if (straps->dc_pinstraps_audio & 0x2) {
184 aud_support->hdmi_audio_on_dongle = true;
185 aud_support->hdmi_audio_native = true;
186 }
187 }
188
189 switch (straps->audio_stream_number) {
190 case 0: /* multi streams supported */
191 break;
192 case 1: /* multi streams not supported */
193 *num_audio = 1;
194 break;
195 default:
196 DC_ERR("DC: unexpected audio fuse!\n");
197 }
198 }
199
resource_construct(unsigned int num_virtual_links,struct dc * dc,struct resource_pool * pool,const struct resource_create_funcs * create_funcs)200 bool resource_construct(
201 unsigned int num_virtual_links,
202 struct dc *dc,
203 struct resource_pool *pool,
204 const struct resource_create_funcs *create_funcs)
205 {
206 struct dc_context *ctx = dc->ctx;
207 const struct resource_caps *caps = pool->res_cap;
208 int i;
209 unsigned int num_audio = caps->num_audio;
210 struct resource_straps straps = {0};
211
212 if (create_funcs->read_dce_straps)
213 create_funcs->read_dce_straps(dc->ctx, &straps);
214
215 pool->audio_count = 0;
216 if (create_funcs->create_audio) {
217 /* find the total number of streams available via the
218 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
219 * registers (one for each pin) starting from pin 1
220 * up to the max number of audio pins.
221 * We stop on the first pin where
222 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
223 */
224 update_num_audio(&straps, &num_audio, &pool->audio_support);
225 for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
226 struct audio *aud = create_funcs->create_audio(ctx, i);
227
228 if (aud == NULL) {
229 DC_ERR("DC: failed to create audio!\n");
230 return false;
231 }
232
233 if (!aud->funcs->endpoint_valid(aud)) {
234 aud->funcs->destroy(&aud);
235 break;
236 }
237
238 pool->audios[i] = aud;
239 pool->audio_count++;
240 }
241 }
242
243 pool->stream_enc_count = 0;
244 if (create_funcs->create_stream_encoder) {
245 for (i = 0; i < caps->num_stream_encoder; i++) {
246 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
247 if (pool->stream_enc[i] == NULL)
248 DC_ERR("DC: failed to create stream_encoder!\n");
249 pool->stream_enc_count++;
250 }
251 }
252 dc->caps.dynamic_audio = false;
253 if (pool->audio_count < pool->stream_enc_count) {
254 dc->caps.dynamic_audio = true;
255 }
256 for (i = 0; i < num_virtual_links; i++) {
257 pool->stream_enc[pool->stream_enc_count] =
258 virtual_stream_encoder_create(
259 ctx, ctx->dc_bios);
260 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
261 DC_ERR("DC: failed to create stream_encoder!\n");
262 return false;
263 }
264 pool->stream_enc_count++;
265 }
266
267 dc->hwseq = create_funcs->create_hwseq(ctx);
268
269 return true;
270 }
find_matching_clock_source(const struct resource_pool * pool,struct clock_source * clock_source)271 static int find_matching_clock_source(
272 const struct resource_pool *pool,
273 struct clock_source *clock_source)
274 {
275
276 int i;
277
278 for (i = 0; i < pool->clk_src_count; i++) {
279 if (pool->clock_sources[i] == clock_source)
280 return i;
281 }
282 return -1;
283 }
284
resource_unreference_clock_source(struct resource_context * res_ctx,const struct resource_pool * pool,struct clock_source * clock_source)285 void resource_unreference_clock_source(
286 struct resource_context *res_ctx,
287 const struct resource_pool *pool,
288 struct clock_source *clock_source)
289 {
290 int i = find_matching_clock_source(pool, clock_source);
291
292 if (i > -1)
293 res_ctx->clock_source_ref_count[i]--;
294
295 if (pool->dp_clock_source == clock_source)
296 res_ctx->dp_clock_source_ref_count--;
297 }
298
resource_reference_clock_source(struct resource_context * res_ctx,const struct resource_pool * pool,struct clock_source * clock_source)299 void resource_reference_clock_source(
300 struct resource_context *res_ctx,
301 const struct resource_pool *pool,
302 struct clock_source *clock_source)
303 {
304 int i = find_matching_clock_source(pool, clock_source);
305
306 if (i > -1)
307 res_ctx->clock_source_ref_count[i]++;
308
309 if (pool->dp_clock_source == clock_source)
310 res_ctx->dp_clock_source_ref_count++;
311 }
312
resource_get_clock_source_reference(struct resource_context * res_ctx,const struct resource_pool * pool,struct clock_source * clock_source)313 int resource_get_clock_source_reference(
314 struct resource_context *res_ctx,
315 const struct resource_pool *pool,
316 struct clock_source *clock_source)
317 {
318 int i = find_matching_clock_source(pool, clock_source);
319
320 if (i > -1)
321 return res_ctx->clock_source_ref_count[i];
322
323 if (pool->dp_clock_source == clock_source)
324 return res_ctx->dp_clock_source_ref_count;
325
326 return -1;
327 }
328
resource_are_streams_timing_synchronizable(struct dc_stream_state * stream1,struct dc_stream_state * stream2)329 bool resource_are_streams_timing_synchronizable(
330 struct dc_stream_state *stream1,
331 struct dc_stream_state *stream2)
332 {
333 if (stream1->timing.h_total != stream2->timing.h_total)
334 return false;
335
336 if (stream1->timing.v_total != stream2->timing.v_total)
337 return false;
338
339 if (stream1->timing.h_addressable
340 != stream2->timing.h_addressable)
341 return false;
342
343 if (stream1->timing.v_addressable
344 != stream2->timing.v_addressable)
345 return false;
346
347 if (stream1->timing.pix_clk_khz
348 != stream2->timing.pix_clk_khz)
349 return false;
350
351 if (stream1->clamping.c_depth != stream2->clamping.c_depth)
352 return false;
353
354 if (stream1->phy_pix_clk != stream2->phy_pix_clk
355 && (!dc_is_dp_signal(stream1->signal)
356 || !dc_is_dp_signal(stream2->signal)))
357 return false;
358
359 return true;
360 }
is_dp_and_hdmi_sharable(struct dc_stream_state * stream1,struct dc_stream_state * stream2)361 static bool is_dp_and_hdmi_sharable(
362 struct dc_stream_state *stream1,
363 struct dc_stream_state *stream2)
364 {
365 if (stream1->ctx->dc->caps.disable_dp_clk_share)
366 return false;
367
368 if (stream1->clamping.c_depth != COLOR_DEPTH_888 ||
369 stream2->clamping.c_depth != COLOR_DEPTH_888)
370 return false;
371
372 return true;
373
374 }
375
is_sharable_clk_src(const struct pipe_ctx * pipe_with_clk_src,const struct pipe_ctx * pipe)376 static bool is_sharable_clk_src(
377 const struct pipe_ctx *pipe_with_clk_src,
378 const struct pipe_ctx *pipe)
379 {
380 if (pipe_with_clk_src->clock_source == NULL)
381 return false;
382
383 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
384 return false;
385
386 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal) ||
387 (dc_is_dp_signal(pipe->stream->signal) &&
388 !is_dp_and_hdmi_sharable(pipe_with_clk_src->stream,
389 pipe->stream)))
390 return false;
391
392 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
393 && dc_is_dual_link_signal(pipe->stream->signal))
394 return false;
395
396 if (dc_is_hdmi_signal(pipe->stream->signal)
397 && dc_is_dual_link_signal(pipe_with_clk_src->stream->signal))
398 return false;
399
400 if (!resource_are_streams_timing_synchronizable(
401 pipe_with_clk_src->stream, pipe->stream))
402 return false;
403
404 return true;
405 }
406
resource_find_used_clk_src_for_sharing(struct resource_context * res_ctx,struct pipe_ctx * pipe_ctx)407 struct clock_source *resource_find_used_clk_src_for_sharing(
408 struct resource_context *res_ctx,
409 struct pipe_ctx *pipe_ctx)
410 {
411 int i;
412
413 for (i = 0; i < MAX_PIPES; i++) {
414 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
415 return res_ctx->pipe_ctx[i].clock_source;
416 }
417
418 return NULL;
419 }
420
convert_pixel_format_to_dalsurface(enum surface_pixel_format surface_pixel_format)421 static enum pixel_format convert_pixel_format_to_dalsurface(
422 enum surface_pixel_format surface_pixel_format)
423 {
424 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
425
426 switch (surface_pixel_format) {
427 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
428 dal_pixel_format = PIXEL_FORMAT_INDEX8;
429 break;
430 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
431 dal_pixel_format = PIXEL_FORMAT_RGB565;
432 break;
433 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
434 dal_pixel_format = PIXEL_FORMAT_RGB565;
435 break;
436 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
437 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
438 break;
439 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
440 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
441 break;
442 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
443 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
444 break;
445 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
446 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
447 break;
448 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
449 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
450 break;
451 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
452 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
453 dal_pixel_format = PIXEL_FORMAT_FP16;
454 break;
455 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
456 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
457 dal_pixel_format = PIXEL_FORMAT_420BPP8;
458 break;
459 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
460 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
461 dal_pixel_format = PIXEL_FORMAT_420BPP10;
462 break;
463 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
464 default:
465 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
466 break;
467 }
468 return dal_pixel_format;
469 }
470
rect_swap_helper(struct rect * rect)471 static void rect_swap_helper(struct rect *rect)
472 {
473 swap(rect->height, rect->width);
474 swap(rect->x, rect->y);
475 }
476
calculate_viewport(struct pipe_ctx * pipe_ctx)477 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
478 {
479 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
480 const struct dc_stream_state *stream = pipe_ctx->stream;
481 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
482 struct rect surf_src = plane_state->src_rect;
483 struct rect clip = { 0 };
484 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
485 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
486 bool pri_split = pipe_ctx->bottom_pipe &&
487 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
488 bool sec_split = pipe_ctx->top_pipe &&
489 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
490
491 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
492 stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
493 pri_split = false;
494 sec_split = false;
495 }
496
497 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
498 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
499 rect_swap_helper(&surf_src);
500
501 /* The actual clip is an intersection between stream
502 * source and surface clip
503 */
504 clip.x = stream->src.x > plane_state->clip_rect.x ?
505 stream->src.x : plane_state->clip_rect.x;
506
507 clip.width = stream->src.x + stream->src.width <
508 plane_state->clip_rect.x + plane_state->clip_rect.width ?
509 stream->src.x + stream->src.width - clip.x :
510 plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
511
512 clip.y = stream->src.y > plane_state->clip_rect.y ?
513 stream->src.y : plane_state->clip_rect.y;
514
515 clip.height = stream->src.y + stream->src.height <
516 plane_state->clip_rect.y + plane_state->clip_rect.height ?
517 stream->src.y + stream->src.height - clip.y :
518 plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
519
520 /* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
521 * num_pixels = clip.num_pix * scl_ratio
522 */
523 data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
524 surf_src.width / plane_state->dst_rect.width;
525 data->viewport.width = clip.width *
526 surf_src.width / plane_state->dst_rect.width;
527
528 data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
529 surf_src.height / plane_state->dst_rect.height;
530 data->viewport.height = clip.height *
531 surf_src.height / plane_state->dst_rect.height;
532
533 /* Round down, compensate in init */
534 data->viewport_c.x = data->viewport.x / vpc_div;
535 data->viewport_c.y = data->viewport.y / vpc_div;
536 data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
537 dc_fixpt_half : dc_fixpt_zero;
538 data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
539 dc_fixpt_half : dc_fixpt_zero;
540 /* Round up, assume original video size always even dimensions */
541 data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
542 data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
543
544 /* Handle hsplit */
545 if (sec_split) {
546 data->viewport.x += data->viewport.width / 2;
547 data->viewport_c.x += data->viewport_c.width / 2;
548 /* Ceil offset pipe */
549 data->viewport.width = (data->viewport.width + 1) / 2;
550 data->viewport_c.width = (data->viewport_c.width + 1) / 2;
551 } else if (pri_split) {
552 data->viewport.width /= 2;
553 data->viewport_c.width /= 2;
554 }
555
556 if (plane_state->rotation == ROTATION_ANGLE_90 ||
557 plane_state->rotation == ROTATION_ANGLE_270) {
558 rect_swap_helper(&data->viewport_c);
559 rect_swap_helper(&data->viewport);
560 }
561 }
562
calculate_recout(struct pipe_ctx * pipe_ctx,struct rect * recout_full)563 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
564 {
565 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
566 const struct dc_stream_state *stream = pipe_ctx->stream;
567 struct rect surf_src = plane_state->src_rect;
568 struct rect surf_clip = plane_state->clip_rect;
569 bool pri_split = pipe_ctx->bottom_pipe &&
570 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
571 bool sec_split = pipe_ctx->top_pipe &&
572 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
573 bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
574
575 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
576 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
577 rect_swap_helper(&surf_src);
578
579 pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
580 if (stream->src.x < surf_clip.x)
581 pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
582 - stream->src.x) * stream->dst.width
583 / stream->src.width;
584
585 pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
586 stream->dst.width / stream->src.width;
587 if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
588 stream->dst.x + stream->dst.width)
589 pipe_ctx->plane_res.scl_data.recout.width =
590 stream->dst.x + stream->dst.width
591 - pipe_ctx->plane_res.scl_data.recout.x;
592
593 pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
594 if (stream->src.y < surf_clip.y)
595 pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
596 - stream->src.y) * stream->dst.height
597 / stream->src.height;
598
599 pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
600 stream->dst.height / stream->src.height;
601 if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
602 stream->dst.y + stream->dst.height)
603 pipe_ctx->plane_res.scl_data.recout.height =
604 stream->dst.y + stream->dst.height
605 - pipe_ctx->plane_res.scl_data.recout.y;
606
607 /* Handle h & vsplit */
608 if (sec_split && top_bottom_split) {
609 pipe_ctx->plane_res.scl_data.recout.y +=
610 pipe_ctx->plane_res.scl_data.recout.height / 2;
611 /* Floor primary pipe, ceil 2ndary pipe */
612 pipe_ctx->plane_res.scl_data.recout.height =
613 (pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
614 } else if (pri_split && top_bottom_split)
615 pipe_ctx->plane_res.scl_data.recout.height /= 2;
616 else if (pri_split || sec_split) {
617 /* HMirror XOR Secondary_pipe XOR Rotation_180 */
618 bool right_view = (sec_split != plane_state->horizontal_mirror) !=
619 (plane_state->rotation == ROTATION_ANGLE_180);
620
621 if (plane_state->rotation == ROTATION_ANGLE_90
622 || plane_state->rotation == ROTATION_ANGLE_270)
623 /* Secondary_pipe XOR Rotation_270 */
624 right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
625
626 if (right_view) {
627 pipe_ctx->plane_res.scl_data.recout.x +=
628 pipe_ctx->plane_res.scl_data.recout.width / 2;
629 /* Ceil offset pipe */
630 pipe_ctx->plane_res.scl_data.recout.width =
631 (pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
632 } else {
633 pipe_ctx->plane_res.scl_data.recout.width /= 2;
634 }
635 }
636 /* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
637 * * 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
638 * ratio)
639 */
640 recout_full->x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
641 * stream->dst.width / stream->src.width -
642 surf_src.x * plane_state->dst_rect.width / surf_src.width
643 * stream->dst.width / stream->src.width;
644 recout_full->y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
645 * stream->dst.height / stream->src.height -
646 surf_src.y * plane_state->dst_rect.height / surf_src.height
647 * stream->dst.height / stream->src.height;
648
649 recout_full->width = plane_state->dst_rect.width
650 * stream->dst.width / stream->src.width;
651 recout_full->height = plane_state->dst_rect.height
652 * stream->dst.height / stream->src.height;
653 }
654
calculate_scaling_ratios(struct pipe_ctx * pipe_ctx)655 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
656 {
657 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
658 const struct dc_stream_state *stream = pipe_ctx->stream;
659 struct rect surf_src = plane_state->src_rect;
660 const int in_w = stream->src.width;
661 const int in_h = stream->src.height;
662 const int out_w = stream->dst.width;
663 const int out_h = stream->dst.height;
664
665 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
666 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
667 rect_swap_helper(&surf_src);
668
669 pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_from_fraction(
670 surf_src.width,
671 plane_state->dst_rect.width);
672 pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_from_fraction(
673 surf_src.height,
674 plane_state->dst_rect.height);
675
676 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
677 pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
678 else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
679 pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
680
681 pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
682 pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
683 pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
684 pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
685
686 pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
687 pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
688
689 if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
690 || pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
691 pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
692 pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
693 }
694 pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_truncate(
695 pipe_ctx->plane_res.scl_data.ratios.horz, 19);
696 pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_truncate(
697 pipe_ctx->plane_res.scl_data.ratios.vert, 19);
698 pipe_ctx->plane_res.scl_data.ratios.horz_c = dc_fixpt_truncate(
699 pipe_ctx->plane_res.scl_data.ratios.horz_c, 19);
700 pipe_ctx->plane_res.scl_data.ratios.vert_c = dc_fixpt_truncate(
701 pipe_ctx->plane_res.scl_data.ratios.vert_c, 19);
702 }
703
calculate_inits_and_adj_vp(struct pipe_ctx * pipe_ctx,struct rect * recout_full)704 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
705 {
706 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
707 struct rect src = pipe_ctx->plane_state->src_rect;
708 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
709 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
710 bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
711
712 /*
713 * Need to calculate the scan direction for viewport to make adjustments
714 */
715 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
716 flip_vert_scan_dir = true;
717 flip_horz_scan_dir = true;
718 } else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
719 flip_vert_scan_dir = true;
720 else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
721 flip_horz_scan_dir = true;
722
723 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
724 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
725 rect_swap_helper(&src);
726 rect_swap_helper(&data->viewport_c);
727 rect_swap_helper(&data->viewport);
728 } else if (pipe_ctx->plane_state->horizontal_mirror)
729 flip_horz_scan_dir = !flip_horz_scan_dir;
730
731 /*
732 * Init calculated according to formula:
733 * init = (scaling_ratio + number_of_taps + 1) / 2
734 * init_bot = init + scaling_ratio
735 * init_c = init + truncated_vp_c_offset(from calculate viewport)
736 */
737 data->inits.h = dc_fixpt_truncate(dc_fixpt_div_int(
738 dc_fixpt_add_int(data->ratios.horz, data->taps.h_taps + 1), 2), 19);
739
740 data->inits.h_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.h_c, dc_fixpt_div_int(
741 dc_fixpt_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2)), 19);
742
743 data->inits.v = dc_fixpt_truncate(dc_fixpt_div_int(
744 dc_fixpt_add_int(data->ratios.vert, data->taps.v_taps + 1), 2), 19);
745
746 data->inits.v_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.v_c, dc_fixpt_div_int(
747 dc_fixpt_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2)), 19);
748
749 if (!flip_horz_scan_dir) {
750 /* Adjust for viewport end clip-off */
751 if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
752 int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
753 int int_part = dc_fixpt_floor(
754 dc_fixpt_sub(data->inits.h, data->ratios.horz));
755
756 int_part = int_part > 0 ? int_part : 0;
757 data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
758 }
759 if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
760 int vp_clip = (src.x + src.width) / vpc_div -
761 data->viewport_c.width - data->viewport_c.x;
762 int int_part = dc_fixpt_floor(
763 dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
764
765 int_part = int_part > 0 ? int_part : 0;
766 data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
767 }
768
769 /* Adjust for non-0 viewport offset */
770 if (data->viewport.x) {
771 int int_part;
772
773 data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
774 data->ratios.horz, data->recout.x - recout_full->x));
775 int_part = dc_fixpt_floor(data->inits.h) - data->viewport.x;
776 if (int_part < data->taps.h_taps) {
777 int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
778 (data->taps.h_taps - int_part) : data->viewport.x;
779 data->viewport.x -= int_adj;
780 data->viewport.width += int_adj;
781 int_part += int_adj;
782 } else if (int_part > data->taps.h_taps) {
783 data->viewport.x += int_part - data->taps.h_taps;
784 data->viewport.width -= int_part - data->taps.h_taps;
785 int_part = data->taps.h_taps;
786 }
787 data->inits.h.value &= 0xffffffff;
788 data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
789 }
790
791 if (data->viewport_c.x) {
792 int int_part;
793
794 data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
795 data->ratios.horz_c, data->recout.x - recout_full->x));
796 int_part = dc_fixpt_floor(data->inits.h_c) - data->viewport_c.x;
797 if (int_part < data->taps.h_taps_c) {
798 int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
799 (data->taps.h_taps_c - int_part) : data->viewport_c.x;
800 data->viewport_c.x -= int_adj;
801 data->viewport_c.width += int_adj;
802 int_part += int_adj;
803 } else if (int_part > data->taps.h_taps_c) {
804 data->viewport_c.x += int_part - data->taps.h_taps_c;
805 data->viewport_c.width -= int_part - data->taps.h_taps_c;
806 int_part = data->taps.h_taps_c;
807 }
808 data->inits.h_c.value &= 0xffffffff;
809 data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
810 }
811 } else {
812 /* Adjust for non-0 viewport offset */
813 if (data->viewport.x) {
814 int int_part = dc_fixpt_floor(
815 dc_fixpt_sub(data->inits.h, data->ratios.horz));
816
817 int_part = int_part > 0 ? int_part : 0;
818 data->viewport.width += int_part < data->viewport.x ? int_part : data->viewport.x;
819 data->viewport.x -= int_part < data->viewport.x ? int_part : data->viewport.x;
820 }
821 if (data->viewport_c.x) {
822 int int_part = dc_fixpt_floor(
823 dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
824
825 int_part = int_part > 0 ? int_part : 0;
826 data->viewport_c.width += int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
827 data->viewport_c.x -= int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
828 }
829
830 /* Adjust for viewport end clip-off */
831 if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
832 int int_part;
833 int end_offset = src.x + src.width
834 - data->viewport.x - data->viewport.width;
835
836 /*
837 * this is init if vp had no offset, keep in mind this is from the
838 * right side of vp due to scan direction
839 */
840 data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
841 data->ratios.horz, data->recout.x - recout_full->x));
842 /*
843 * this is the difference between first pixel of viewport available to read
844 * and init position, takning into account scan direction
845 */
846 int_part = dc_fixpt_floor(data->inits.h) - end_offset;
847 if (int_part < data->taps.h_taps) {
848 int int_adj = end_offset >= (data->taps.h_taps - int_part) ?
849 (data->taps.h_taps - int_part) : end_offset;
850 data->viewport.width += int_adj;
851 int_part += int_adj;
852 } else if (int_part > data->taps.h_taps) {
853 data->viewport.width += int_part - data->taps.h_taps;
854 int_part = data->taps.h_taps;
855 }
856 data->inits.h.value &= 0xffffffff;
857 data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
858 }
859
860 if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
861 int int_part;
862 int end_offset = (src.x + src.width) / vpc_div
863 - data->viewport_c.x - data->viewport_c.width;
864
865 /*
866 * this is init if vp had no offset, keep in mind this is from the
867 * right side of vp due to scan direction
868 */
869 data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
870 data->ratios.horz_c, data->recout.x - recout_full->x));
871 /*
872 * this is the difference between first pixel of viewport available to read
873 * and init position, takning into account scan direction
874 */
875 int_part = dc_fixpt_floor(data->inits.h_c) - end_offset;
876 if (int_part < data->taps.h_taps_c) {
877 int int_adj = end_offset >= (data->taps.h_taps_c - int_part) ?
878 (data->taps.h_taps_c - int_part) : end_offset;
879 data->viewport_c.width += int_adj;
880 int_part += int_adj;
881 } else if (int_part > data->taps.h_taps_c) {
882 data->viewport_c.width += int_part - data->taps.h_taps_c;
883 int_part = data->taps.h_taps_c;
884 }
885 data->inits.h_c.value &= 0xffffffff;
886 data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
887 }
888
889 }
890 if (!flip_vert_scan_dir) {
891 /* Adjust for viewport end clip-off */
892 if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
893 int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
894 int int_part = dc_fixpt_floor(
895 dc_fixpt_sub(data->inits.v, data->ratios.vert));
896
897 int_part = int_part > 0 ? int_part : 0;
898 data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
899 }
900 if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
901 int vp_clip = (src.y + src.height) / vpc_div -
902 data->viewport_c.height - data->viewport_c.y;
903 int int_part = dc_fixpt_floor(
904 dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
905
906 int_part = int_part > 0 ? int_part : 0;
907 data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
908 }
909
910 /* Adjust for non-0 viewport offset */
911 if (data->viewport.y) {
912 int int_part;
913
914 data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
915 data->ratios.vert, data->recout.y - recout_full->y));
916 int_part = dc_fixpt_floor(data->inits.v) - data->viewport.y;
917 if (int_part < data->taps.v_taps) {
918 int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
919 (data->taps.v_taps - int_part) : data->viewport.y;
920 data->viewport.y -= int_adj;
921 data->viewport.height += int_adj;
922 int_part += int_adj;
923 } else if (int_part > data->taps.v_taps) {
924 data->viewport.y += int_part - data->taps.v_taps;
925 data->viewport.height -= int_part - data->taps.v_taps;
926 int_part = data->taps.v_taps;
927 }
928 data->inits.v.value &= 0xffffffff;
929 data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
930 }
931
932 if (data->viewport_c.y) {
933 int int_part;
934
935 data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
936 data->ratios.vert_c, data->recout.y - recout_full->y));
937 int_part = dc_fixpt_floor(data->inits.v_c) - data->viewport_c.y;
938 if (int_part < data->taps.v_taps_c) {
939 int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
940 (data->taps.v_taps_c - int_part) : data->viewport_c.y;
941 data->viewport_c.y -= int_adj;
942 data->viewport_c.height += int_adj;
943 int_part += int_adj;
944 } else if (int_part > data->taps.v_taps_c) {
945 data->viewport_c.y += int_part - data->taps.v_taps_c;
946 data->viewport_c.height -= int_part - data->taps.v_taps_c;
947 int_part = data->taps.v_taps_c;
948 }
949 data->inits.v_c.value &= 0xffffffff;
950 data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
951 }
952 } else {
953 /* Adjust for non-0 viewport offset */
954 if (data->viewport.y) {
955 int int_part = dc_fixpt_floor(
956 dc_fixpt_sub(data->inits.v, data->ratios.vert));
957
958 int_part = int_part > 0 ? int_part : 0;
959 data->viewport.height += int_part < data->viewport.y ? int_part : data->viewport.y;
960 data->viewport.y -= int_part < data->viewport.y ? int_part : data->viewport.y;
961 }
962 if (data->viewport_c.y) {
963 int int_part = dc_fixpt_floor(
964 dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
965
966 int_part = int_part > 0 ? int_part : 0;
967 data->viewport_c.height += int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
968 data->viewport_c.y -= int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
969 }
970
971 /* Adjust for viewport end clip-off */
972 if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
973 int int_part;
974 int end_offset = src.y + src.height
975 - data->viewport.y - data->viewport.height;
976
977 /*
978 * this is init if vp had no offset, keep in mind this is from the
979 * right side of vp due to scan direction
980 */
981 data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
982 data->ratios.vert, data->recout.y - recout_full->y));
983 /*
984 * this is the difference between first pixel of viewport available to read
985 * and init position, taking into account scan direction
986 */
987 int_part = dc_fixpt_floor(data->inits.v) - end_offset;
988 if (int_part < data->taps.v_taps) {
989 int int_adj = end_offset >= (data->taps.v_taps - int_part) ?
990 (data->taps.v_taps - int_part) : end_offset;
991 data->viewport.height += int_adj;
992 int_part += int_adj;
993 } else if (int_part > data->taps.v_taps) {
994 data->viewport.height += int_part - data->taps.v_taps;
995 int_part = data->taps.v_taps;
996 }
997 data->inits.v.value &= 0xffffffff;
998 data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
999 }
1000
1001 if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
1002 int int_part;
1003 int end_offset = (src.y + src.height) / vpc_div
1004 - data->viewport_c.y - data->viewport_c.height;
1005
1006 /*
1007 * this is init if vp had no offset, keep in mind this is from the
1008 * right side of vp due to scan direction
1009 */
1010 data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
1011 data->ratios.vert_c, data->recout.y - recout_full->y));
1012 /*
1013 * this is the difference between first pixel of viewport available to read
1014 * and init position, taking into account scan direction
1015 */
1016 int_part = dc_fixpt_floor(data->inits.v_c) - end_offset;
1017 if (int_part < data->taps.v_taps_c) {
1018 int int_adj = end_offset >= (data->taps.v_taps_c - int_part) ?
1019 (data->taps.v_taps_c - int_part) : end_offset;
1020 data->viewport_c.height += int_adj;
1021 int_part += int_adj;
1022 } else if (int_part > data->taps.v_taps_c) {
1023 data->viewport_c.height += int_part - data->taps.v_taps_c;
1024 int_part = data->taps.v_taps_c;
1025 }
1026 data->inits.v_c.value &= 0xffffffff;
1027 data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
1028 }
1029 }
1030
1031 /* Interlaced inits based on final vert inits */
1032 data->inits.v_bot = dc_fixpt_add(data->inits.v, data->ratios.vert);
1033 data->inits.v_c_bot = dc_fixpt_add(data->inits.v_c, data->ratios.vert_c);
1034
1035 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
1036 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
1037 rect_swap_helper(&data->viewport_c);
1038 rect_swap_helper(&data->viewport);
1039 }
1040 }
1041
resource_build_scaling_params(struct pipe_ctx * pipe_ctx)1042 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
1043 {
1044 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1045 struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
1046 struct rect recout_full = { 0 };
1047 bool res = false;
1048 DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
1049 /* Important: scaling ratio calculation requires pixel format,
1050 * lb depth calculation requires recout and taps require scaling ratios.
1051 * Inits require viewport, taps, ratios and recout of split pipe
1052 */
1053 pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
1054 pipe_ctx->plane_state->format);
1055
1056 if (pipe_ctx->stream->timing.flags.INTERLACE)
1057 pipe_ctx->stream->dst.height *= 2;
1058
1059 calculate_scaling_ratios(pipe_ctx);
1060
1061 calculate_viewport(pipe_ctx);
1062
1063 if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
1064 return false;
1065
1066 calculate_recout(pipe_ctx, &recout_full);
1067
1068 /**
1069 * Setting line buffer pixel depth to 24bpp yields banding
1070 * on certain displays, such as the Sharp 4k
1071 */
1072 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
1073
1074 pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
1075 pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
1076
1077 pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
1078 pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
1079 if (pipe_ctx->stream->timing.flags.INTERLACE)
1080 pipe_ctx->plane_res.scl_data.v_active *= 2;
1081
1082
1083 /* Taps calculations */
1084 if (pipe_ctx->plane_res.xfm != NULL)
1085 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1086 pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1087
1088 if (pipe_ctx->plane_res.dpp != NULL)
1089 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1090 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1091 if (!res) {
1092 /* Try 24 bpp linebuffer */
1093 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
1094
1095 if (pipe_ctx->plane_res.xfm != NULL)
1096 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1097 pipe_ctx->plane_res.xfm,
1098 &pipe_ctx->plane_res.scl_data,
1099 &plane_state->scaling_quality);
1100
1101 if (pipe_ctx->plane_res.dpp != NULL)
1102 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1103 pipe_ctx->plane_res.dpp,
1104 &pipe_ctx->plane_res.scl_data,
1105 &plane_state->scaling_quality);
1106 }
1107
1108 if (res)
1109 /* May need to re-check lb size after this in some obscure scenario */
1110 calculate_inits_and_adj_vp(pipe_ctx, &recout_full);
1111
1112 DC_LOG_SCALER(
1113 "%s: Viewport:\nheight:%d width:%d x:%d "
1114 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
1115 "y:%d\n",
1116 __func__,
1117 pipe_ctx->plane_res.scl_data.viewport.height,
1118 pipe_ctx->plane_res.scl_data.viewport.width,
1119 pipe_ctx->plane_res.scl_data.viewport.x,
1120 pipe_ctx->plane_res.scl_data.viewport.y,
1121 plane_state->dst_rect.height,
1122 plane_state->dst_rect.width,
1123 plane_state->dst_rect.x,
1124 plane_state->dst_rect.y);
1125
1126 if (pipe_ctx->stream->timing.flags.INTERLACE)
1127 pipe_ctx->stream->dst.height /= 2;
1128
1129 return res;
1130 }
1131
1132
resource_build_scaling_params_for_context(const struct dc * dc,struct dc_state * context)1133 enum dc_status resource_build_scaling_params_for_context(
1134 const struct dc *dc,
1135 struct dc_state *context)
1136 {
1137 int i;
1138
1139 for (i = 0; i < MAX_PIPES; i++) {
1140 if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
1141 context->res_ctx.pipe_ctx[i].stream != NULL)
1142 if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
1143 return DC_FAIL_SCALING;
1144 }
1145
1146 return DC_OK;
1147 }
1148
find_idle_secondary_pipe(struct resource_context * res_ctx,const struct resource_pool * pool)1149 struct pipe_ctx *find_idle_secondary_pipe(
1150 struct resource_context *res_ctx,
1151 const struct resource_pool *pool)
1152 {
1153 int i;
1154 struct pipe_ctx *secondary_pipe = NULL;
1155
1156 /*
1157 * search backwards for the second pipe to keep pipe
1158 * assignment more consistent
1159 */
1160
1161 for (i = pool->pipe_count - 1; i >= 0; i--) {
1162 if (res_ctx->pipe_ctx[i].stream == NULL) {
1163 secondary_pipe = &res_ctx->pipe_ctx[i];
1164 secondary_pipe->pipe_idx = i;
1165 break;
1166 }
1167 }
1168
1169
1170 return secondary_pipe;
1171 }
1172
resource_get_head_pipe_for_stream(struct resource_context * res_ctx,struct dc_stream_state * stream)1173 struct pipe_ctx *resource_get_head_pipe_for_stream(
1174 struct resource_context *res_ctx,
1175 struct dc_stream_state *stream)
1176 {
1177 int i;
1178 for (i = 0; i < MAX_PIPES; i++) {
1179 if (res_ctx->pipe_ctx[i].stream == stream &&
1180 !res_ctx->pipe_ctx[i].top_pipe) {
1181 return &res_ctx->pipe_ctx[i];
1182 break;
1183 }
1184 }
1185 return NULL;
1186 }
1187
resource_get_tail_pipe_for_stream(struct resource_context * res_ctx,struct dc_stream_state * stream)1188 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
1189 struct resource_context *res_ctx,
1190 struct dc_stream_state *stream)
1191 {
1192 struct pipe_ctx *head_pipe, *tail_pipe;
1193 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1194
1195 if (!head_pipe)
1196 return NULL;
1197
1198 tail_pipe = head_pipe->bottom_pipe;
1199
1200 while (tail_pipe) {
1201 head_pipe = tail_pipe;
1202 tail_pipe = tail_pipe->bottom_pipe;
1203 }
1204
1205 return head_pipe;
1206 }
1207
1208 /*
1209 * A free_pipe for a stream is defined here as a pipe
1210 * that has no surface attached yet
1211 */
acquire_free_pipe_for_stream(struct dc_state * context,const struct resource_pool * pool,struct dc_stream_state * stream)1212 static struct pipe_ctx *acquire_free_pipe_for_stream(
1213 struct dc_state *context,
1214 const struct resource_pool *pool,
1215 struct dc_stream_state *stream)
1216 {
1217 int i;
1218 struct resource_context *res_ctx = &context->res_ctx;
1219
1220 struct pipe_ctx *head_pipe = NULL;
1221
1222 /* Find head pipe, which has the back end set up*/
1223
1224 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1225
1226 if (!head_pipe) {
1227 ASSERT(0);
1228 return NULL;
1229 }
1230
1231 if (!head_pipe->plane_state)
1232 return head_pipe;
1233
1234 /* Re-use pipe already acquired for this stream if available*/
1235 for (i = pool->pipe_count - 1; i >= 0; i--) {
1236 if (res_ctx->pipe_ctx[i].stream == stream &&
1237 !res_ctx->pipe_ctx[i].plane_state) {
1238 return &res_ctx->pipe_ctx[i];
1239 }
1240 }
1241
1242 /*
1243 * At this point we have no re-useable pipe for this stream and we need
1244 * to acquire an idle one to satisfy the request
1245 */
1246
1247 if (!pool->funcs->acquire_idle_pipe_for_layer)
1248 return NULL;
1249
1250 return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1251
1252 }
1253
1254 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
acquire_first_split_pipe(struct resource_context * res_ctx,const struct resource_pool * pool,struct dc_stream_state * stream)1255 static int acquire_first_split_pipe(
1256 struct resource_context *res_ctx,
1257 const struct resource_pool *pool,
1258 struct dc_stream_state *stream)
1259 {
1260 int i;
1261
1262 for (i = 0; i < pool->pipe_count; i++) {
1263 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1264
1265 if (pipe_ctx->top_pipe &&
1266 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1267 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1268 if (pipe_ctx->bottom_pipe)
1269 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1270
1271 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1272 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1273 pipe_ctx->plane_res.hubp = pool->hubps[i];
1274 pipe_ctx->plane_res.ipp = pool->ipps[i];
1275 pipe_ctx->plane_res.dpp = pool->dpps[i];
1276 pipe_ctx->stream_res.opp = pool->opps[i];
1277 pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1278 pipe_ctx->pipe_idx = i;
1279
1280 pipe_ctx->stream = stream;
1281 return i;
1282 }
1283 }
1284 return -1;
1285 }
1286 #endif
1287
dc_add_plane_to_context(const struct dc * dc,struct dc_stream_state * stream,struct dc_plane_state * plane_state,struct dc_state * context)1288 bool dc_add_plane_to_context(
1289 const struct dc *dc,
1290 struct dc_stream_state *stream,
1291 struct dc_plane_state *plane_state,
1292 struct dc_state *context)
1293 {
1294 int i;
1295 struct resource_pool *pool = dc->res_pool;
1296 struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1297 struct dc_stream_status *stream_status = NULL;
1298
1299 for (i = 0; i < context->stream_count; i++)
1300 if (context->streams[i] == stream) {
1301 stream_status = &context->stream_status[i];
1302 break;
1303 }
1304 if (stream_status == NULL) {
1305 dm_error("Existing stream not found; failed to attach surface!\n");
1306 return false;
1307 }
1308
1309
1310 if (stream_status->plane_count == MAX_SURFACE_NUM) {
1311 dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1312 plane_state, MAX_SURFACE_NUM);
1313 return false;
1314 }
1315
1316 head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1317
1318 if (!head_pipe) {
1319 dm_error("Head pipe not found for stream_state %p !\n", stream);
1320 return false;
1321 }
1322
1323 free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1324
1325 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1326 if (!free_pipe) {
1327 int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1328 if (pipe_idx >= 0)
1329 free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1330 }
1331 #endif
1332 if (!free_pipe)
1333 return false;
1334
1335 /* retain new surfaces */
1336 dc_plane_state_retain(plane_state);
1337 free_pipe->plane_state = plane_state;
1338
1339 if (head_pipe != free_pipe) {
1340
1341 tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1342 ASSERT(tail_pipe);
1343
1344 free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1345 free_pipe->stream_res.abm = tail_pipe->stream_res.abm;
1346 free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1347 free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1348 free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1349 free_pipe->clock_source = tail_pipe->clock_source;
1350 free_pipe->top_pipe = tail_pipe;
1351 tail_pipe->bottom_pipe = free_pipe;
1352 }
1353
1354 /* assign new surfaces*/
1355 stream_status->plane_states[stream_status->plane_count] = plane_state;
1356
1357 stream_status->plane_count++;
1358
1359 return true;
1360 }
1361
dc_remove_plane_from_context(const struct dc * dc,struct dc_stream_state * stream,struct dc_plane_state * plane_state,struct dc_state * context)1362 bool dc_remove_plane_from_context(
1363 const struct dc *dc,
1364 struct dc_stream_state *stream,
1365 struct dc_plane_state *plane_state,
1366 struct dc_state *context)
1367 {
1368 int i;
1369 struct dc_stream_status *stream_status = NULL;
1370 struct resource_pool *pool = dc->res_pool;
1371
1372 for (i = 0; i < context->stream_count; i++)
1373 if (context->streams[i] == stream) {
1374 stream_status = &context->stream_status[i];
1375 break;
1376 }
1377
1378 if (stream_status == NULL) {
1379 dm_error("Existing stream not found; failed to remove plane.\n");
1380 return false;
1381 }
1382
1383 /* release pipe for plane*/
1384 for (i = pool->pipe_count - 1; i >= 0; i--) {
1385 struct pipe_ctx *pipe_ctx;
1386
1387 if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1388 pipe_ctx = &context->res_ctx.pipe_ctx[i];
1389
1390 if (pipe_ctx->top_pipe)
1391 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1392
1393 /* Second condition is to avoid setting NULL to top pipe
1394 * of tail pipe making it look like head pipe in subsequent
1395 * deletes
1396 */
1397 if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1398 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1399
1400 /*
1401 * For head pipe detach surfaces from pipe for tail
1402 * pipe just zero it out
1403 */
1404 if (!pipe_ctx->top_pipe) {
1405 pipe_ctx->plane_state = NULL;
1406 pipe_ctx->bottom_pipe = NULL;
1407 } else {
1408 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1409 }
1410 }
1411 }
1412
1413
1414 for (i = 0; i < stream_status->plane_count; i++) {
1415 if (stream_status->plane_states[i] == plane_state) {
1416
1417 dc_plane_state_release(stream_status->plane_states[i]);
1418 break;
1419 }
1420 }
1421
1422 if (i == stream_status->plane_count) {
1423 dm_error("Existing plane_state not found; failed to detach it!\n");
1424 return false;
1425 }
1426
1427 stream_status->plane_count--;
1428
1429 /* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1430 for (; i < stream_status->plane_count; i++)
1431 stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1432
1433 stream_status->plane_states[stream_status->plane_count] = NULL;
1434
1435 return true;
1436 }
1437
dc_rem_all_planes_for_stream(const struct dc * dc,struct dc_stream_state * stream,struct dc_state * context)1438 bool dc_rem_all_planes_for_stream(
1439 const struct dc *dc,
1440 struct dc_stream_state *stream,
1441 struct dc_state *context)
1442 {
1443 int i, old_plane_count;
1444 struct dc_stream_status *stream_status = NULL;
1445 struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1446
1447 for (i = 0; i < context->stream_count; i++)
1448 if (context->streams[i] == stream) {
1449 stream_status = &context->stream_status[i];
1450 break;
1451 }
1452
1453 if (stream_status == NULL) {
1454 dm_error("Existing stream %p not found!\n", stream);
1455 return false;
1456 }
1457
1458 old_plane_count = stream_status->plane_count;
1459
1460 for (i = 0; i < old_plane_count; i++)
1461 del_planes[i] = stream_status->plane_states[i];
1462
1463 for (i = 0; i < old_plane_count; i++)
1464 if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1465 return false;
1466
1467 return true;
1468 }
1469
add_all_planes_for_stream(const struct dc * dc,struct dc_stream_state * stream,const struct dc_validation_set set[],int set_count,struct dc_state * context)1470 static bool add_all_planes_for_stream(
1471 const struct dc *dc,
1472 struct dc_stream_state *stream,
1473 const struct dc_validation_set set[],
1474 int set_count,
1475 struct dc_state *context)
1476 {
1477 int i, j;
1478
1479 for (i = 0; i < set_count; i++)
1480 if (set[i].stream == stream)
1481 break;
1482
1483 if (i == set_count) {
1484 dm_error("Stream %p not found in set!\n", stream);
1485 return false;
1486 }
1487
1488 for (j = 0; j < set[i].plane_count; j++)
1489 if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1490 return false;
1491
1492 return true;
1493 }
1494
dc_add_all_planes_for_stream(const struct dc * dc,struct dc_stream_state * stream,struct dc_plane_state * const * plane_states,int plane_count,struct dc_state * context)1495 bool dc_add_all_planes_for_stream(
1496 const struct dc *dc,
1497 struct dc_stream_state *stream,
1498 struct dc_plane_state * const *plane_states,
1499 int plane_count,
1500 struct dc_state *context)
1501 {
1502 struct dc_validation_set set;
1503 int i;
1504
1505 set.stream = stream;
1506 set.plane_count = plane_count;
1507
1508 for (i = 0; i < plane_count; i++)
1509 set.plane_states[i] = plane_states[i];
1510
1511 return add_all_planes_for_stream(dc, stream, &set, 1, context);
1512 }
1513
1514
is_hdr_static_meta_changed(struct dc_stream_state * cur_stream,struct dc_stream_state * new_stream)1515 static bool is_hdr_static_meta_changed(struct dc_stream_state *cur_stream,
1516 struct dc_stream_state *new_stream)
1517 {
1518 if (cur_stream == NULL)
1519 return true;
1520
1521 if (memcmp(&cur_stream->hdr_static_metadata,
1522 &new_stream->hdr_static_metadata,
1523 sizeof(struct dc_info_packet)) != 0)
1524 return true;
1525
1526 return false;
1527 }
1528
is_timing_changed(struct dc_stream_state * cur_stream,struct dc_stream_state * new_stream)1529 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1530 struct dc_stream_state *new_stream)
1531 {
1532 if (cur_stream == NULL)
1533 return true;
1534
1535 /* If sink pointer changed, it means this is a hotplug, we should do
1536 * full hw setting.
1537 */
1538 if (cur_stream->sink != new_stream->sink)
1539 return true;
1540
1541 /* If output color space is changed, need to reprogram info frames */
1542 if (cur_stream->output_color_space != new_stream->output_color_space)
1543 return true;
1544
1545 return memcmp(
1546 &cur_stream->timing,
1547 &new_stream->timing,
1548 sizeof(struct dc_crtc_timing)) != 0;
1549 }
1550
are_stream_backends_same(struct dc_stream_state * stream_a,struct dc_stream_state * stream_b)1551 static bool are_stream_backends_same(
1552 struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1553 {
1554 if (stream_a == stream_b)
1555 return true;
1556
1557 if (stream_a == NULL || stream_b == NULL)
1558 return false;
1559
1560 if (is_timing_changed(stream_a, stream_b))
1561 return false;
1562
1563 if (is_hdr_static_meta_changed(stream_a, stream_b))
1564 return false;
1565
1566 return true;
1567 }
1568
dc_is_stream_unchanged(struct dc_stream_state * old_stream,struct dc_stream_state * stream)1569 bool dc_is_stream_unchanged(
1570 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1571 {
1572
1573 if (!are_stream_backends_same(old_stream, stream))
1574 return false;
1575
1576 return true;
1577 }
1578
dc_is_stream_scaling_unchanged(struct dc_stream_state * old_stream,struct dc_stream_state * stream)1579 bool dc_is_stream_scaling_unchanged(
1580 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1581 {
1582 if (old_stream == stream)
1583 return true;
1584
1585 if (old_stream == NULL || stream == NULL)
1586 return false;
1587
1588 if (memcmp(&old_stream->src,
1589 &stream->src,
1590 sizeof(struct rect)) != 0)
1591 return false;
1592
1593 if (memcmp(&old_stream->dst,
1594 &stream->dst,
1595 sizeof(struct rect)) != 0)
1596 return false;
1597
1598 return true;
1599 }
1600
update_stream_engine_usage(struct resource_context * res_ctx,const struct resource_pool * pool,struct stream_encoder * stream_enc,bool acquired)1601 static void update_stream_engine_usage(
1602 struct resource_context *res_ctx,
1603 const struct resource_pool *pool,
1604 struct stream_encoder *stream_enc,
1605 bool acquired)
1606 {
1607 int i;
1608
1609 for (i = 0; i < pool->stream_enc_count; i++) {
1610 if (pool->stream_enc[i] == stream_enc)
1611 res_ctx->is_stream_enc_acquired[i] = acquired;
1612 }
1613 }
1614
1615 /* TODO: release audio object */
update_audio_usage(struct resource_context * res_ctx,const struct resource_pool * pool,struct audio * audio,bool acquired)1616 void update_audio_usage(
1617 struct resource_context *res_ctx,
1618 const struct resource_pool *pool,
1619 struct audio *audio,
1620 bool acquired)
1621 {
1622 int i;
1623 for (i = 0; i < pool->audio_count; i++) {
1624 if (pool->audios[i] == audio)
1625 res_ctx->is_audio_acquired[i] = acquired;
1626 }
1627 }
1628
acquire_first_free_pipe(struct resource_context * res_ctx,const struct resource_pool * pool,struct dc_stream_state * stream)1629 static int acquire_first_free_pipe(
1630 struct resource_context *res_ctx,
1631 const struct resource_pool *pool,
1632 struct dc_stream_state *stream)
1633 {
1634 int i;
1635
1636 for (i = 0; i < pool->pipe_count; i++) {
1637 if (!res_ctx->pipe_ctx[i].stream) {
1638 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1639
1640 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1641 pipe_ctx->plane_res.mi = pool->mis[i];
1642 pipe_ctx->plane_res.hubp = pool->hubps[i];
1643 pipe_ctx->plane_res.ipp = pool->ipps[i];
1644 pipe_ctx->plane_res.xfm = pool->transforms[i];
1645 pipe_ctx->plane_res.dpp = pool->dpps[i];
1646 pipe_ctx->stream_res.opp = pool->opps[i];
1647 if (pool->dpps[i])
1648 pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1649 pipe_ctx->pipe_idx = i;
1650
1651
1652 pipe_ctx->stream = stream;
1653 return i;
1654 }
1655 }
1656 return -1;
1657 }
1658
find_first_free_match_stream_enc_for_link(struct resource_context * res_ctx,const struct resource_pool * pool,struct dc_stream_state * stream)1659 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1660 struct resource_context *res_ctx,
1661 const struct resource_pool *pool,
1662 struct dc_stream_state *stream)
1663 {
1664 int i;
1665 int j = -1;
1666 struct dc_link *link = stream->sink->link;
1667
1668 for (i = 0; i < pool->stream_enc_count; i++) {
1669 if (!res_ctx->is_stream_enc_acquired[i] &&
1670 pool->stream_enc[i]) {
1671 /* Store first available for MST second display
1672 * in daisy chain use case */
1673 j = i;
1674 if (pool->stream_enc[i]->id ==
1675 link->link_enc->preferred_engine)
1676 return pool->stream_enc[i];
1677 }
1678 }
1679
1680 /*
1681 * below can happen in cases when stream encoder is acquired:
1682 * 1) for second MST display in chain, so preferred engine already
1683 * acquired;
1684 * 2) for another link, which preferred engine already acquired by any
1685 * MST configuration.
1686 *
1687 * If signal is of DP type and preferred engine not found, return last available
1688 *
1689 * TODO - This is just a patch up and a generic solution is
1690 * required for non DP connectors.
1691 */
1692
1693 if (j >= 0 && dc_is_dp_signal(stream->signal))
1694 return pool->stream_enc[j];
1695
1696 return NULL;
1697 }
1698
find_first_free_audio(struct resource_context * res_ctx,const struct resource_pool * pool,enum engine_id id)1699 static struct audio *find_first_free_audio(
1700 struct resource_context *res_ctx,
1701 const struct resource_pool *pool,
1702 enum engine_id id)
1703 {
1704 int i;
1705 for (i = 0; i < pool->audio_count; i++) {
1706 if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1707 /*we have enough audio endpoint, find the matching inst*/
1708 if (id != i)
1709 continue;
1710
1711 return pool->audios[i];
1712 }
1713 }
1714 /*not found the matching one, first come first serve*/
1715 for (i = 0; i < pool->audio_count; i++) {
1716 if (res_ctx->is_audio_acquired[i] == false) {
1717 return pool->audios[i];
1718 }
1719 }
1720 return 0;
1721 }
1722
resource_is_stream_unchanged(struct dc_state * old_context,struct dc_stream_state * stream)1723 bool resource_is_stream_unchanged(
1724 struct dc_state *old_context, struct dc_stream_state *stream)
1725 {
1726 int i;
1727
1728 for (i = 0; i < old_context->stream_count; i++) {
1729 struct dc_stream_state *old_stream = old_context->streams[i];
1730
1731 if (are_stream_backends_same(old_stream, stream))
1732 return true;
1733 }
1734
1735 return false;
1736 }
1737
dc_add_stream_to_ctx(struct dc * dc,struct dc_state * new_ctx,struct dc_stream_state * stream)1738 enum dc_status dc_add_stream_to_ctx(
1739 struct dc *dc,
1740 struct dc_state *new_ctx,
1741 struct dc_stream_state *stream)
1742 {
1743 struct dc_context *dc_ctx = dc->ctx;
1744 enum dc_status res;
1745
1746 if (new_ctx->stream_count >= dc->res_pool->timing_generator_count) {
1747 DC_ERROR("Max streams reached, can't add stream %p !\n", stream);
1748 return DC_ERROR_UNEXPECTED;
1749 }
1750
1751 new_ctx->streams[new_ctx->stream_count] = stream;
1752 dc_stream_retain(stream);
1753 new_ctx->stream_count++;
1754
1755 res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1756 if (res != DC_OK)
1757 DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1758
1759 return res;
1760 }
1761
dc_remove_stream_from_ctx(struct dc * dc,struct dc_state * new_ctx,struct dc_stream_state * stream)1762 enum dc_status dc_remove_stream_from_ctx(
1763 struct dc *dc,
1764 struct dc_state *new_ctx,
1765 struct dc_stream_state *stream)
1766 {
1767 int i;
1768 struct dc_context *dc_ctx = dc->ctx;
1769 struct pipe_ctx *del_pipe = NULL;
1770
1771 /* Release primary pipe */
1772 for (i = 0; i < MAX_PIPES; i++) {
1773 if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1774 !new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1775 del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1776
1777 ASSERT(del_pipe->stream_res.stream_enc);
1778 update_stream_engine_usage(
1779 &new_ctx->res_ctx,
1780 dc->res_pool,
1781 del_pipe->stream_res.stream_enc,
1782 false);
1783
1784 if (del_pipe->stream_res.audio)
1785 update_audio_usage(
1786 &new_ctx->res_ctx,
1787 dc->res_pool,
1788 del_pipe->stream_res.audio,
1789 false);
1790
1791 resource_unreference_clock_source(&new_ctx->res_ctx,
1792 dc->res_pool,
1793 del_pipe->clock_source);
1794
1795 if (dc->res_pool->funcs->remove_stream_from_ctx)
1796 dc->res_pool->funcs->remove_stream_from_ctx(dc, new_ctx, stream);
1797
1798 memset(del_pipe, 0, sizeof(*del_pipe));
1799
1800 break;
1801 }
1802 }
1803
1804 if (!del_pipe) {
1805 DC_ERROR("Pipe not found for stream %p !\n", stream);
1806 return DC_ERROR_UNEXPECTED;
1807 }
1808
1809 for (i = 0; i < new_ctx->stream_count; i++)
1810 if (new_ctx->streams[i] == stream)
1811 break;
1812
1813 if (new_ctx->streams[i] != stream) {
1814 DC_ERROR("Context doesn't have stream %p !\n", stream);
1815 return DC_ERROR_UNEXPECTED;
1816 }
1817
1818 dc_stream_release(new_ctx->streams[i]);
1819 new_ctx->stream_count--;
1820
1821 /* Trim back arrays */
1822 for (; i < new_ctx->stream_count; i++) {
1823 new_ctx->streams[i] = new_ctx->streams[i + 1];
1824 new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1825 }
1826
1827 new_ctx->streams[new_ctx->stream_count] = NULL;
1828 memset(
1829 &new_ctx->stream_status[new_ctx->stream_count],
1830 0,
1831 sizeof(new_ctx->stream_status[0]));
1832
1833 return DC_OK;
1834 }
1835
find_pll_sharable_stream(struct dc_stream_state * stream_needs_pll,struct dc_state * context)1836 static struct dc_stream_state *find_pll_sharable_stream(
1837 struct dc_stream_state *stream_needs_pll,
1838 struct dc_state *context)
1839 {
1840 int i;
1841
1842 for (i = 0; i < context->stream_count; i++) {
1843 struct dc_stream_state *stream_has_pll = context->streams[i];
1844
1845 /* We are looking for non dp, non virtual stream */
1846 if (resource_are_streams_timing_synchronizable(
1847 stream_needs_pll, stream_has_pll)
1848 && !dc_is_dp_signal(stream_has_pll->signal)
1849 && stream_has_pll->sink->link->connector_signal
1850 != SIGNAL_TYPE_VIRTUAL)
1851 return stream_has_pll;
1852
1853 }
1854
1855 return NULL;
1856 }
1857
get_norm_pix_clk(const struct dc_crtc_timing * timing)1858 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1859 {
1860 uint32_t pix_clk = timing->pix_clk_khz;
1861 uint32_t normalized_pix_clk = pix_clk;
1862
1863 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1864 pix_clk /= 2;
1865 if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1866 switch (timing->display_color_depth) {
1867 case COLOR_DEPTH_888:
1868 normalized_pix_clk = pix_clk;
1869 break;
1870 case COLOR_DEPTH_101010:
1871 normalized_pix_clk = (pix_clk * 30) / 24;
1872 break;
1873 case COLOR_DEPTH_121212:
1874 normalized_pix_clk = (pix_clk * 36) / 24;
1875 break;
1876 case COLOR_DEPTH_161616:
1877 normalized_pix_clk = (pix_clk * 48) / 24;
1878 break;
1879 default:
1880 ASSERT(0);
1881 break;
1882 }
1883 }
1884 return normalized_pix_clk;
1885 }
1886
calculate_phy_pix_clks(struct dc_stream_state * stream)1887 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1888 {
1889 /* update actual pixel clock on all streams */
1890 if (dc_is_hdmi_signal(stream->signal))
1891 stream->phy_pix_clk = get_norm_pix_clk(
1892 &stream->timing);
1893 else
1894 stream->phy_pix_clk =
1895 stream->timing.pix_clk_khz;
1896 }
1897
resource_map_pool_resources(const struct dc * dc,struct dc_state * context,struct dc_stream_state * stream)1898 enum dc_status resource_map_pool_resources(
1899 const struct dc *dc,
1900 struct dc_state *context,
1901 struct dc_stream_state *stream)
1902 {
1903 const struct resource_pool *pool = dc->res_pool;
1904 int i;
1905 struct dc_context *dc_ctx = dc->ctx;
1906 struct pipe_ctx *pipe_ctx = NULL;
1907 int pipe_idx = -1;
1908
1909 /* TODO Check if this is needed */
1910 /*if (!resource_is_stream_unchanged(old_context, stream)) {
1911 if (stream != NULL && old_context->streams[i] != NULL) {
1912 stream->bit_depth_params =
1913 old_context->streams[i]->bit_depth_params;
1914 stream->clamping = old_context->streams[i]->clamping;
1915 continue;
1916 }
1917 }
1918 */
1919
1920 /* acquire new resources */
1921 pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1922
1923 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1924 if (pipe_idx < 0)
1925 pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1926 #endif
1927
1928 if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL)
1929 return DC_NO_CONTROLLER_RESOURCE;
1930
1931 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1932
1933 pipe_ctx->stream_res.stream_enc =
1934 find_first_free_match_stream_enc_for_link(
1935 &context->res_ctx, pool, stream);
1936
1937 if (!pipe_ctx->stream_res.stream_enc)
1938 return DC_NO_STREAM_ENG_RESOURCE;
1939
1940 update_stream_engine_usage(
1941 &context->res_ctx, pool,
1942 pipe_ctx->stream_res.stream_enc,
1943 true);
1944
1945 /* TODO: Add check if ASIC support and EDID audio */
1946 if (!stream->sink->converter_disable_audio &&
1947 dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1948 stream->audio_info.mode_count) {
1949 pipe_ctx->stream_res.audio = find_first_free_audio(
1950 &context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
1951
1952 /*
1953 * Audio assigned in order first come first get.
1954 * There are asics which has number of audio
1955 * resources less then number of pipes
1956 */
1957 if (pipe_ctx->stream_res.audio)
1958 update_audio_usage(&context->res_ctx, pool,
1959 pipe_ctx->stream_res.audio, true);
1960 }
1961
1962 /* Add ABM to the resource if on EDP */
1963 if (pipe_ctx->stream && dc_is_embedded_signal(pipe_ctx->stream->signal))
1964 pipe_ctx->stream_res.abm = pool->abm;
1965
1966 for (i = 0; i < context->stream_count; i++)
1967 if (context->streams[i] == stream) {
1968 context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
1969 context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
1970 return DC_OK;
1971 }
1972
1973 DC_ERROR("Stream %p not found in new ctx!\n", stream);
1974 return DC_ERROR_UNEXPECTED;
1975 }
1976
dc_resource_state_copy_construct_current(const struct dc * dc,struct dc_state * dst_ctx)1977 void dc_resource_state_copy_construct_current(
1978 const struct dc *dc,
1979 struct dc_state *dst_ctx)
1980 {
1981 dc_resource_state_copy_construct(dc->current_state, dst_ctx);
1982 }
1983
1984
dc_resource_state_construct(const struct dc * dc,struct dc_state * dst_ctx)1985 void dc_resource_state_construct(
1986 const struct dc *dc,
1987 struct dc_state *dst_ctx)
1988 {
1989 dst_ctx->dis_clk = dc->res_pool->dccg;
1990 }
1991
dc_validate_global_state(struct dc * dc,struct dc_state * new_ctx)1992 enum dc_status dc_validate_global_state(
1993 struct dc *dc,
1994 struct dc_state *new_ctx)
1995 {
1996 enum dc_status result = DC_ERROR_UNEXPECTED;
1997 int i, j;
1998
1999 if (!new_ctx)
2000 return DC_ERROR_UNEXPECTED;
2001
2002 if (dc->res_pool->funcs->validate_global) {
2003 result = dc->res_pool->funcs->validate_global(dc, new_ctx);
2004 if (result != DC_OK)
2005 return result;
2006 }
2007
2008 for (i = 0; i < new_ctx->stream_count; i++) {
2009 struct dc_stream_state *stream = new_ctx->streams[i];
2010
2011 for (j = 0; j < dc->res_pool->pipe_count; j++) {
2012 struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
2013
2014 if (pipe_ctx->stream != stream)
2015 continue;
2016
2017 /* Switch to dp clock source only if there is
2018 * no non dp stream that shares the same timing
2019 * with the dp stream.
2020 */
2021 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
2022 !find_pll_sharable_stream(stream, new_ctx)) {
2023
2024 resource_unreference_clock_source(
2025 &new_ctx->res_ctx,
2026 dc->res_pool,
2027 pipe_ctx->clock_source);
2028
2029 pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
2030 resource_reference_clock_source(
2031 &new_ctx->res_ctx,
2032 dc->res_pool,
2033 pipe_ctx->clock_source);
2034 }
2035 }
2036 }
2037
2038 result = resource_build_scaling_params_for_context(dc, new_ctx);
2039
2040 if (result == DC_OK)
2041 if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
2042 result = DC_FAIL_BANDWIDTH_VALIDATE;
2043
2044 return result;
2045 }
2046
patch_gamut_packet_checksum(struct dc_info_packet * gamut_packet)2047 static void patch_gamut_packet_checksum(
2048 struct dc_info_packet *gamut_packet)
2049 {
2050 /* For gamut we recalc checksum */
2051 if (gamut_packet->valid) {
2052 uint8_t chk_sum = 0;
2053 uint8_t *ptr;
2054 uint8_t i;
2055
2056 /*start of the Gamut data. */
2057 ptr = &gamut_packet->sb[3];
2058
2059 for (i = 0; i <= gamut_packet->sb[1]; i++)
2060 chk_sum += ptr[i];
2061
2062 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
2063 }
2064 }
2065
set_avi_info_frame(struct dc_info_packet * info_packet,struct pipe_ctx * pipe_ctx)2066 static void set_avi_info_frame(
2067 struct dc_info_packet *info_packet,
2068 struct pipe_ctx *pipe_ctx)
2069 {
2070 struct dc_stream_state *stream = pipe_ctx->stream;
2071 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
2072 uint32_t pixel_encoding = 0;
2073 enum scanning_type scan_type = SCANNING_TYPE_NODATA;
2074 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
2075 bool itc = false;
2076 uint8_t itc_value = 0;
2077 uint8_t cn0_cn1 = 0;
2078 unsigned int cn0_cn1_value = 0;
2079 uint8_t *check_sum = NULL;
2080 uint8_t byte_index = 0;
2081 union hdmi_info_packet hdmi_info;
2082 union display_content_support support = {0};
2083 unsigned int vic = pipe_ctx->stream->timing.vic;
2084 enum dc_timing_3d_format format;
2085
2086 memset(&hdmi_info, 0, sizeof(union hdmi_info_packet));
2087
2088 color_space = pipe_ctx->stream->output_color_space;
2089 if (color_space == COLOR_SPACE_UNKNOWN)
2090 color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
2091 COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
2092
2093 /* Initialize header */
2094 hdmi_info.bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
2095 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
2096 * not be used in HDMI 2.0 (Section 10.1) */
2097 hdmi_info.bits.header.version = 2;
2098 hdmi_info.bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
2099
2100 /*
2101 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
2102 * according to HDMI 2.0 spec (Section 10.1)
2103 */
2104
2105 switch (stream->timing.pixel_encoding) {
2106 case PIXEL_ENCODING_YCBCR422:
2107 pixel_encoding = 1;
2108 break;
2109
2110 case PIXEL_ENCODING_YCBCR444:
2111 pixel_encoding = 2;
2112 break;
2113 case PIXEL_ENCODING_YCBCR420:
2114 pixel_encoding = 3;
2115 break;
2116
2117 case PIXEL_ENCODING_RGB:
2118 default:
2119 pixel_encoding = 0;
2120 }
2121
2122 /* Y0_Y1_Y2 : The pixel encoding */
2123 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
2124 hdmi_info.bits.Y0_Y1_Y2 = pixel_encoding;
2125
2126 /* A0 = 1 Active Format Information valid */
2127 hdmi_info.bits.A0 = ACTIVE_FORMAT_VALID;
2128
2129 /* B0, B1 = 3; Bar info data is valid */
2130 hdmi_info.bits.B0_B1 = BAR_INFO_BOTH_VALID;
2131
2132 hdmi_info.bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
2133
2134 /* S0, S1 : Underscan / Overscan */
2135 /* TODO: un-hardcode scan type */
2136 scan_type = SCANNING_TYPE_UNDERSCAN;
2137 hdmi_info.bits.S0_S1 = scan_type;
2138
2139 /* C0, C1 : Colorimetry */
2140 if (color_space == COLOR_SPACE_YCBCR709 ||
2141 color_space == COLOR_SPACE_YCBCR709_LIMITED)
2142 hdmi_info.bits.C0_C1 = COLORIMETRY_ITU709;
2143 else if (color_space == COLOR_SPACE_YCBCR601 ||
2144 color_space == COLOR_SPACE_YCBCR601_LIMITED)
2145 hdmi_info.bits.C0_C1 = COLORIMETRY_ITU601;
2146 else {
2147 hdmi_info.bits.C0_C1 = COLORIMETRY_NO_DATA;
2148 }
2149 if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
2150 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
2151 color_space == COLOR_SPACE_2020_YCBCR) {
2152 hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
2153 hdmi_info.bits.C0_C1 = COLORIMETRY_EXTENDED;
2154 } else if (color_space == COLOR_SPACE_ADOBERGB) {
2155 hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
2156 hdmi_info.bits.C0_C1 = COLORIMETRY_EXTENDED;
2157 }
2158
2159 /* TODO: un-hardcode aspect ratio */
2160 aspect = stream->timing.aspect_ratio;
2161
2162 switch (aspect) {
2163 case ASPECT_RATIO_4_3:
2164 case ASPECT_RATIO_16_9:
2165 hdmi_info.bits.M0_M1 = aspect;
2166 break;
2167
2168 case ASPECT_RATIO_NO_DATA:
2169 case ASPECT_RATIO_64_27:
2170 case ASPECT_RATIO_256_135:
2171 default:
2172 hdmi_info.bits.M0_M1 = 0;
2173 }
2174
2175 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
2176 hdmi_info.bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
2177
2178 /* TODO: un-hardcode cn0_cn1 and itc */
2179
2180 cn0_cn1 = 0;
2181 cn0_cn1_value = 0;
2182
2183 itc = true;
2184 itc_value = 1;
2185
2186 support = stream->sink->edid_caps.content_support;
2187
2188 if (itc) {
2189 if (!support.bits.valid_content_type) {
2190 cn0_cn1_value = 0;
2191 } else {
2192 if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
2193 if (support.bits.graphics_content == 1) {
2194 cn0_cn1_value = 0;
2195 }
2196 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
2197 if (support.bits.photo_content == 1) {
2198 cn0_cn1_value = 1;
2199 } else {
2200 cn0_cn1_value = 0;
2201 itc_value = 0;
2202 }
2203 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
2204 if (support.bits.cinema_content == 1) {
2205 cn0_cn1_value = 2;
2206 } else {
2207 cn0_cn1_value = 0;
2208 itc_value = 0;
2209 }
2210 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2211 if (support.bits.game_content == 1) {
2212 cn0_cn1_value = 3;
2213 } else {
2214 cn0_cn1_value = 0;
2215 itc_value = 0;
2216 }
2217 }
2218 }
2219 hdmi_info.bits.CN0_CN1 = cn0_cn1_value;
2220 hdmi_info.bits.ITC = itc_value;
2221 }
2222
2223 /* TODO : We should handle YCC quantization */
2224 /* but we do not have matrix calculation */
2225 if (stream->sink->edid_caps.qs_bit == 1 &&
2226 stream->sink->edid_caps.qy_bit == 1) {
2227 if (color_space == COLOR_SPACE_SRGB ||
2228 color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2229 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_FULL_RANGE;
2230 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2231 } else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2232 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2233 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_LIMITED_RANGE;
2234 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2235 } else {
2236 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2237 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2238 }
2239 } else {
2240 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2241 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2242 }
2243
2244 ///VIC
2245 format = stream->timing.timing_3d_format;
2246 /*todo, add 3DStereo support*/
2247 if (format != TIMING_3D_FORMAT_NONE) {
2248 // Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2249 switch (pipe_ctx->stream->timing.hdmi_vic) {
2250 case 1:
2251 vic = 95;
2252 break;
2253 case 2:
2254 vic = 94;
2255 break;
2256 case 3:
2257 vic = 93;
2258 break;
2259 case 4:
2260 vic = 98;
2261 break;
2262 default:
2263 break;
2264 }
2265 }
2266 hdmi_info.bits.VIC0_VIC7 = vic;
2267
2268 /* pixel repetition
2269 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2270 * repetition start from 1 */
2271 hdmi_info.bits.PR0_PR3 = 0;
2272
2273 /* Bar Info
2274 * barTop: Line Number of End of Top Bar.
2275 * barBottom: Line Number of Start of Bottom Bar.
2276 * barLeft: Pixel Number of End of Left Bar.
2277 * barRight: Pixel Number of Start of Right Bar. */
2278 hdmi_info.bits.bar_top = stream->timing.v_border_top;
2279 hdmi_info.bits.bar_bottom = (stream->timing.v_total
2280 - stream->timing.v_border_bottom + 1);
2281 hdmi_info.bits.bar_left = stream->timing.h_border_left;
2282 hdmi_info.bits.bar_right = (stream->timing.h_total
2283 - stream->timing.h_border_right + 1);
2284
2285 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2286 check_sum = &hdmi_info.packet_raw_data.sb[0];
2287
2288 *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2289
2290 for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2291 *check_sum += hdmi_info.packet_raw_data.sb[byte_index];
2292
2293 /* one byte complement */
2294 *check_sum = (uint8_t) (0x100 - *check_sum);
2295
2296 /* Store in hw_path_mode */
2297 info_packet->hb0 = hdmi_info.packet_raw_data.hb0;
2298 info_packet->hb1 = hdmi_info.packet_raw_data.hb1;
2299 info_packet->hb2 = hdmi_info.packet_raw_data.hb2;
2300
2301 for (byte_index = 0; byte_index < sizeof(hdmi_info.packet_raw_data.sb); byte_index++)
2302 info_packet->sb[byte_index] = hdmi_info.packet_raw_data.sb[byte_index];
2303
2304 info_packet->valid = true;
2305 }
2306
set_vendor_info_packet(struct dc_info_packet * info_packet,struct dc_stream_state * stream)2307 static void set_vendor_info_packet(
2308 struct dc_info_packet *info_packet,
2309 struct dc_stream_state *stream)
2310 {
2311 uint32_t length = 0;
2312 bool hdmi_vic_mode = false;
2313 uint8_t checksum = 0;
2314 uint32_t i = 0;
2315 enum dc_timing_3d_format format;
2316 // Can be different depending on packet content /*todo*/
2317 // unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2318
2319 info_packet->valid = false;
2320
2321 format = stream->timing.timing_3d_format;
2322 if (stream->view_format == VIEW_3D_FORMAT_NONE)
2323 format = TIMING_3D_FORMAT_NONE;
2324
2325 /* Can be different depending on packet content */
2326 length = 5;
2327
2328 if (stream->timing.hdmi_vic != 0
2329 && stream->timing.h_total >= 3840
2330 && stream->timing.v_total >= 2160)
2331 hdmi_vic_mode = true;
2332
2333 /* According to HDMI 1.4a CTS, VSIF should be sent
2334 * for both 3D stereo and HDMI VIC modes.
2335 * For all other modes, there is no VSIF sent. */
2336
2337 if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2338 return;
2339
2340 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2341 info_packet->sb[1] = 0x03;
2342 info_packet->sb[2] = 0x0C;
2343 info_packet->sb[3] = 0x00;
2344
2345 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2346 * The value for HDMI_Video_Format are:
2347 * 0x0 (0b000) - No additional HDMI video format is presented in this
2348 * packet
2349 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2350 * parameter follows
2351 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2352 * potentially 3D_Ext_Data follows
2353 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2354 if (format != TIMING_3D_FORMAT_NONE)
2355 info_packet->sb[4] = (2 << 5);
2356 else if (hdmi_vic_mode)
2357 info_packet->sb[4] = (1 << 5);
2358
2359 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2360 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2361 * The value for 3D_Structure are:
2362 * 0x0 - Frame Packing
2363 * 0x1 - Field Alternative
2364 * 0x2 - Line Alternative
2365 * 0x3 - Side-by-Side (full)
2366 * 0x4 - L + depth
2367 * 0x5 - L + depth + graphics + graphics-depth
2368 * 0x6 - Top-and-Bottom
2369 * 0x7 - Reserved for future use
2370 * 0x8 - Side-by-Side (Half)
2371 * 0x9..0xE - Reserved for future use
2372 * 0xF - Not used */
2373 switch (format) {
2374 case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2375 case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2376 info_packet->sb[5] = (0x0 << 4);
2377 break;
2378
2379 case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2380 case TIMING_3D_FORMAT_SBS_SW_PACKED:
2381 info_packet->sb[5] = (0x8 << 4);
2382 length = 6;
2383 break;
2384
2385 case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2386 case TIMING_3D_FORMAT_TB_SW_PACKED:
2387 info_packet->sb[5] = (0x6 << 4);
2388 break;
2389
2390 default:
2391 break;
2392 }
2393
2394 /*PB5: If PB4 is set to 0x1 (extended resolution format)
2395 * fill PB5 with the correct HDMI VIC code */
2396 if (hdmi_vic_mode)
2397 info_packet->sb[5] = stream->timing.hdmi_vic;
2398
2399 /* Header */
2400 info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2401 info_packet->hb1 = 0x01; /* Version */
2402
2403 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2404 info_packet->hb2 = (uint8_t) (length);
2405
2406 /* Calculate checksum */
2407 checksum = 0;
2408 checksum += info_packet->hb0;
2409 checksum += info_packet->hb1;
2410 checksum += info_packet->hb2;
2411
2412 for (i = 1; i <= length; i++)
2413 checksum += info_packet->sb[i];
2414
2415 info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2416
2417 info_packet->valid = true;
2418 }
2419
set_spd_info_packet(struct dc_info_packet * info_packet,struct dc_stream_state * stream)2420 static void set_spd_info_packet(
2421 struct dc_info_packet *info_packet,
2422 struct dc_stream_state *stream)
2423 {
2424 /* SPD info packet for FreeSync */
2425
2426 unsigned char checksum = 0;
2427 unsigned int idx, payload_size = 0;
2428
2429 /* Check if Freesync is supported. Return if false. If true,
2430 * set the corresponding bit in the info packet
2431 */
2432 if (stream->freesync_ctx.supported == false)
2433 return;
2434
2435 if (dc_is_hdmi_signal(stream->signal)) {
2436
2437 /* HEADER */
2438
2439 /* HB0 = Packet Type = 0x83 (Source Product
2440 * Descriptor InfoFrame)
2441 */
2442 info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2443
2444 /* HB1 = Version = 0x01 */
2445 info_packet->hb1 = 0x01;
2446
2447 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2448 info_packet->hb2 = 0x08;
2449
2450 payload_size = 0x08;
2451
2452 } else if (dc_is_dp_signal(stream->signal)) {
2453
2454 /* HEADER */
2455
2456 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero
2457 * when used to associate audio related info packets
2458 */
2459 info_packet->hb0 = 0x00;
2460
2461 /* HB1 = Packet Type = 0x83 (Source Product
2462 * Descriptor InfoFrame)
2463 */
2464 info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2465
2466 /* HB2 = [Bits 7:0 = Least significant eight bits -
2467 * For INFOFRAME, the value must be 1Bh]
2468 */
2469 info_packet->hb2 = 0x1B;
2470
2471 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2472 * [Bits 1:0 = Most significant two bits = 0x00]
2473 */
2474 info_packet->hb3 = 0x04;
2475
2476 payload_size = 0x1B;
2477 }
2478
2479 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2480 info_packet->sb[1] = 0x1A;
2481
2482 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2483 info_packet->sb[2] = 0x00;
2484
2485 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2486 info_packet->sb[3] = 0x00;
2487
2488 /* PB4 = Reserved */
2489 info_packet->sb[4] = 0x00;
2490
2491 /* PB5 = Reserved */
2492 info_packet->sb[5] = 0x00;
2493
2494 /* PB6 = [Bits 7:3 = Reserved] */
2495 info_packet->sb[6] = 0x00;
2496
2497 if (stream->freesync_ctx.supported == true)
2498 /* PB6 = [Bit 0 = FreeSync Supported] */
2499 info_packet->sb[6] |= 0x01;
2500
2501 if (stream->freesync_ctx.enabled == true)
2502 /* PB6 = [Bit 1 = FreeSync Enabled] */
2503 info_packet->sb[6] |= 0x02;
2504
2505 if (stream->freesync_ctx.active == true)
2506 /* PB6 = [Bit 2 = FreeSync Active] */
2507 info_packet->sb[6] |= 0x04;
2508
2509 /* PB7 = FreeSync Minimum refresh rate (Hz) */
2510 info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2511 min_refresh_in_micro_hz / 1000000);
2512
2513 /* PB8 = FreeSync Maximum refresh rate (Hz)
2514 *
2515 * Note: We do not use the maximum capable refresh rate
2516 * of the panel, because we should never go above the field
2517 * rate of the mode timing set.
2518 */
2519 info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2520 nominal_refresh_in_micro_hz / 1000000);
2521
2522 /* PB9 - PB27 = Reserved */
2523 for (idx = 9; idx <= 27; idx++)
2524 info_packet->sb[idx] = 0x00;
2525
2526 /* Calculate checksum */
2527 checksum += info_packet->hb0;
2528 checksum += info_packet->hb1;
2529 checksum += info_packet->hb2;
2530 checksum += info_packet->hb3;
2531
2532 for (idx = 1; idx <= payload_size; idx++)
2533 checksum += info_packet->sb[idx];
2534
2535 /* PB0 = Checksum (one byte complement) */
2536 info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2537
2538 info_packet->valid = true;
2539 }
2540
set_hdr_static_info_packet(struct dc_info_packet * info_packet,struct dc_stream_state * stream)2541 static void set_hdr_static_info_packet(
2542 struct dc_info_packet *info_packet,
2543 struct dc_stream_state *stream)
2544 {
2545 /* HDR Static Metadata info packet for HDR10 */
2546
2547 if (!stream->hdr_static_metadata.valid ||
2548 stream->use_dynamic_meta)
2549 return;
2550
2551 *info_packet = stream->hdr_static_metadata;
2552 }
2553
set_vsc_info_packet(struct dc_info_packet * info_packet,struct dc_stream_state * stream)2554 static void set_vsc_info_packet(
2555 struct dc_info_packet *info_packet,
2556 struct dc_stream_state *stream)
2557 {
2558 unsigned int vscPacketRevision = 0;
2559 unsigned int i;
2560
2561 /*VSC packet set to 2 when DP revision >= 1.2*/
2562 if (stream->psr_version != 0) {
2563 vscPacketRevision = 2;
2564 }
2565
2566 /* VSC packet not needed based on the features
2567 * supported by this DP display
2568 */
2569 if (vscPacketRevision == 0)
2570 return;
2571
2572 if (vscPacketRevision == 0x2) {
2573 /* Secondary-data Packet ID = 0*/
2574 info_packet->hb0 = 0x00;
2575 /* 07h - Packet Type Value indicating Video
2576 * Stream Configuration packet
2577 */
2578 info_packet->hb1 = 0x07;
2579 /* 02h = VSC SDP supporting 3D stereo and PSR
2580 * (applies to eDP v1.3 or higher).
2581 */
2582 info_packet->hb2 = 0x02;
2583 /* 08h = VSC packet supporting 3D stereo + PSR
2584 * (HB2 = 02h).
2585 */
2586 info_packet->hb3 = 0x08;
2587
2588 for (i = 0; i < 28; i++)
2589 info_packet->sb[i] = 0;
2590
2591 info_packet->valid = true;
2592 }
2593
2594 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2595 }
2596
dc_resource_state_destruct(struct dc_state * context)2597 void dc_resource_state_destruct(struct dc_state *context)
2598 {
2599 int i, j;
2600
2601 for (i = 0; i < context->stream_count; i++) {
2602 for (j = 0; j < context->stream_status[i].plane_count; j++)
2603 dc_plane_state_release(
2604 context->stream_status[i].plane_states[j]);
2605
2606 context->stream_status[i].plane_count = 0;
2607 dc_stream_release(context->streams[i]);
2608 context->streams[i] = NULL;
2609 }
2610 }
2611
2612 /*
2613 * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2614 * by the src_ctx
2615 */
dc_resource_state_copy_construct(const struct dc_state * src_ctx,struct dc_state * dst_ctx)2616 void dc_resource_state_copy_construct(
2617 const struct dc_state *src_ctx,
2618 struct dc_state *dst_ctx)
2619 {
2620 int i, j;
2621 struct kref refcount = dst_ctx->refcount;
2622
2623 *dst_ctx = *src_ctx;
2624
2625 for (i = 0; i < MAX_PIPES; i++) {
2626 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2627
2628 if (cur_pipe->top_pipe)
2629 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2630
2631 if (cur_pipe->bottom_pipe)
2632 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2633
2634 }
2635
2636 for (i = 0; i < dst_ctx->stream_count; i++) {
2637 dc_stream_retain(dst_ctx->streams[i]);
2638 for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2639 dc_plane_state_retain(
2640 dst_ctx->stream_status[i].plane_states[j]);
2641 }
2642
2643 /* context refcount should not be overridden */
2644 dst_ctx->refcount = refcount;
2645
2646 }
2647
dc_resource_find_first_free_pll(struct resource_context * res_ctx,const struct resource_pool * pool)2648 struct clock_source *dc_resource_find_first_free_pll(
2649 struct resource_context *res_ctx,
2650 const struct resource_pool *pool)
2651 {
2652 int i;
2653
2654 for (i = 0; i < pool->clk_src_count; ++i) {
2655 if (res_ctx->clock_source_ref_count[i] == 0)
2656 return pool->clock_sources[i];
2657 }
2658
2659 return NULL;
2660 }
2661
resource_build_info_frame(struct pipe_ctx * pipe_ctx)2662 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2663 {
2664 enum signal_type signal = SIGNAL_TYPE_NONE;
2665 struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2666
2667 /* default all packets to invalid */
2668 info->avi.valid = false;
2669 info->gamut.valid = false;
2670 info->vendor.valid = false;
2671 info->spd.valid = false;
2672 info->hdrsmd.valid = false;
2673 info->vsc.valid = false;
2674
2675 signal = pipe_ctx->stream->signal;
2676
2677 /* HDMi and DP have different info packets*/
2678 if (dc_is_hdmi_signal(signal)) {
2679 set_avi_info_frame(&info->avi, pipe_ctx);
2680
2681 set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2682
2683 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2684
2685 set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2686
2687 } else if (dc_is_dp_signal(signal)) {
2688 set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2689
2690 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2691
2692 set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2693 }
2694
2695 patch_gamut_packet_checksum(&info->gamut);
2696 }
2697
resource_map_clock_resources(const struct dc * dc,struct dc_state * context,struct dc_stream_state * stream)2698 enum dc_status resource_map_clock_resources(
2699 const struct dc *dc,
2700 struct dc_state *context,
2701 struct dc_stream_state *stream)
2702 {
2703 /* acquire new resources */
2704 const struct resource_pool *pool = dc->res_pool;
2705 struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2706 &context->res_ctx, stream);
2707
2708 if (!pipe_ctx)
2709 return DC_ERROR_UNEXPECTED;
2710
2711 if (dc_is_dp_signal(pipe_ctx->stream->signal)
2712 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2713 pipe_ctx->clock_source = pool->dp_clock_source;
2714 else {
2715 pipe_ctx->clock_source = NULL;
2716
2717 if (!dc->config.disable_disp_pll_sharing)
2718 pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2719 &context->res_ctx,
2720 pipe_ctx);
2721
2722 if (pipe_ctx->clock_source == NULL)
2723 pipe_ctx->clock_source =
2724 dc_resource_find_first_free_pll(
2725 &context->res_ctx,
2726 pool);
2727 }
2728
2729 if (pipe_ctx->clock_source == NULL)
2730 return DC_NO_CLOCK_SOURCE_RESOURCE;
2731
2732 resource_reference_clock_source(
2733 &context->res_ctx, pool,
2734 pipe_ctx->clock_source);
2735
2736 return DC_OK;
2737 }
2738
2739 /*
2740 * Note: We need to disable output if clock sources change,
2741 * since bios does optimization and doesn't apply if changing
2742 * PHY when not already disabled.
2743 */
pipe_need_reprogram(struct pipe_ctx * pipe_ctx_old,struct pipe_ctx * pipe_ctx)2744 bool pipe_need_reprogram(
2745 struct pipe_ctx *pipe_ctx_old,
2746 struct pipe_ctx *pipe_ctx)
2747 {
2748 if (!pipe_ctx_old->stream)
2749 return false;
2750
2751 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2752 return true;
2753
2754 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2755 return true;
2756
2757 if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2758 return true;
2759
2760 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2761 && pipe_ctx_old->stream != pipe_ctx->stream)
2762 return true;
2763
2764 if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2765 return true;
2766
2767 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2768 return true;
2769
2770 if (is_hdr_static_meta_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2771 return true;
2772
2773 return false;
2774 }
2775
resource_build_bit_depth_reduction_params(struct dc_stream_state * stream,struct bit_depth_reduction_params * fmt_bit_depth)2776 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2777 struct bit_depth_reduction_params *fmt_bit_depth)
2778 {
2779 enum dc_dither_option option = stream->dither_option;
2780 enum dc_pixel_encoding pixel_encoding =
2781 stream->timing.pixel_encoding;
2782
2783 memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2784
2785 if (option == DITHER_OPTION_DEFAULT) {
2786 switch (stream->timing.display_color_depth) {
2787 case COLOR_DEPTH_666:
2788 option = DITHER_OPTION_SPATIAL6;
2789 break;
2790 case COLOR_DEPTH_888:
2791 option = DITHER_OPTION_SPATIAL8;
2792 break;
2793 case COLOR_DEPTH_101010:
2794 option = DITHER_OPTION_SPATIAL10;
2795 break;
2796 default:
2797 option = DITHER_OPTION_DISABLE;
2798 }
2799 }
2800
2801 if (option == DITHER_OPTION_DISABLE)
2802 return;
2803
2804 if (option == DITHER_OPTION_TRUN6) {
2805 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2806 fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2807 } else if (option == DITHER_OPTION_TRUN8 ||
2808 option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2809 option == DITHER_OPTION_TRUN8_FM6) {
2810 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2811 fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2812 } else if (option == DITHER_OPTION_TRUN10 ||
2813 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2814 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2815 option == DITHER_OPTION_TRUN10_FM8 ||
2816 option == DITHER_OPTION_TRUN10_FM6 ||
2817 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2818 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2819 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2820 }
2821
2822 /* special case - Formatter can only reduce by 4 bits at most.
2823 * When reducing from 12 to 6 bits,
2824 * HW recommends we use trunc with round mode
2825 * (if we did nothing, trunc to 10 bits would be used)
2826 * note that any 12->10 bit reduction is ignored prior to DCE8,
2827 * as the input was 10 bits.
2828 */
2829 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2830 option == DITHER_OPTION_SPATIAL6 ||
2831 option == DITHER_OPTION_FM6) {
2832 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2833 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2834 fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2835 }
2836
2837 /* spatial dither
2838 * note that spatial modes 1-3 are never used
2839 */
2840 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2841 option == DITHER_OPTION_SPATIAL6 ||
2842 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2843 option == DITHER_OPTION_TRUN8_SPATIAL6) {
2844 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2845 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2846 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2847 fmt_bit_depth->flags.RGB_RANDOM =
2848 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2849 } else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM ||
2850 option == DITHER_OPTION_SPATIAL8 ||
2851 option == DITHER_OPTION_SPATIAL8_FM6 ||
2852 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2853 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2854 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2855 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2856 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2857 fmt_bit_depth->flags.RGB_RANDOM =
2858 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2859 } else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2860 option == DITHER_OPTION_SPATIAL10 ||
2861 option == DITHER_OPTION_SPATIAL10_FM8 ||
2862 option == DITHER_OPTION_SPATIAL10_FM6) {
2863 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2864 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2865 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2866 fmt_bit_depth->flags.RGB_RANDOM =
2867 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2868 }
2869
2870 if (option == DITHER_OPTION_SPATIAL6 ||
2871 option == DITHER_OPTION_SPATIAL8 ||
2872 option == DITHER_OPTION_SPATIAL10) {
2873 fmt_bit_depth->flags.FRAME_RANDOM = 0;
2874 } else {
2875 fmt_bit_depth->flags.FRAME_RANDOM = 1;
2876 }
2877
2878 //////////////////////
2879 //// temporal dither
2880 //////////////////////
2881 if (option == DITHER_OPTION_FM6 ||
2882 option == DITHER_OPTION_SPATIAL8_FM6 ||
2883 option == DITHER_OPTION_SPATIAL10_FM6 ||
2884 option == DITHER_OPTION_TRUN10_FM6 ||
2885 option == DITHER_OPTION_TRUN8_FM6 ||
2886 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2887 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2888 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2889 } else if (option == DITHER_OPTION_FM8 ||
2890 option == DITHER_OPTION_SPATIAL10_FM8 ||
2891 option == DITHER_OPTION_TRUN10_FM8) {
2892 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2893 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2894 } else if (option == DITHER_OPTION_FM10) {
2895 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2896 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2897 }
2898
2899 fmt_bit_depth->pixel_encoding = pixel_encoding;
2900 }
2901
dc_validate_stream(struct dc * dc,struct dc_stream_state * stream)2902 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2903 {
2904 struct dc *core_dc = dc;
2905 struct dc_link *link = stream->sink->link;
2906 struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2907 enum dc_status res = DC_OK;
2908
2909 calculate_phy_pix_clks(stream);
2910
2911 if (!tg->funcs->validate_timing(tg, &stream->timing))
2912 res = DC_FAIL_CONTROLLER_VALIDATE;
2913
2914 if (res == DC_OK)
2915 if (!link->link_enc->funcs->validate_output_with_stream(
2916 link->link_enc, stream))
2917 res = DC_FAIL_ENC_VALIDATE;
2918
2919 /* TODO: validate audio ASIC caps, encoder */
2920
2921 if (res == DC_OK)
2922 res = dc_link_validate_mode_timing(stream,
2923 link,
2924 &stream->timing);
2925
2926 return res;
2927 }
2928
dc_validate_plane(struct dc * dc,const struct dc_plane_state * plane_state)2929 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2930 {
2931 enum dc_status res = DC_OK;
2932
2933 /* TODO For now validates pixel format only */
2934 if (dc->res_pool->funcs->validate_plane)
2935 return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);
2936
2937 return res;
2938 }
2939