@@ -1823,6 +1823,19 @@ intel_dp_adjust_compliance_config(struct intel_dp *intel_dp,
}
}
+static int intel_dp_output_bpp(const struct intel_crtc_state *crtc_state, int bpp)
+{
+ /*
+ * bpp value was assumed to RGB format. And YCbCr 4:2:0 output
+ * format of the number of bytes per pixel will be half the number
+ * of bytes of RGB pixel.
+ */
+ if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
+ bpp /= 2;
+
+ return bpp;
+}
+
/* Optimize link config in order: max bpp, min clock, min lanes */
static int
intel_dp_compute_link_config_wide(struct intel_dp *intel_dp,
@@ -1834,8 +1847,10 @@ intel_dp_compute_link_config_wide(struct intel_dp *intel_dp,
int mode_rate, link_clock, link_avail;
for (bpp = limits->max_bpp; bpp >= limits->min_bpp; bpp -= 2 * 3) {
+ int output_bpp = intel_dp_output_bpp(pipe_config, bpp);
+
mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
- bpp);
+ output_bpp);
for (clock = limits->min_clock; clock <= limits->max_clock; clock++) {
for (lane_count = limits->min_lane_count;
@@ -1870,8 +1885,10 @@ intel_dp_compute_link_config_fast(struct intel_dp *intel_dp,
int mode_rate, link_clock, link_avail;
for (bpp = limits->max_bpp; bpp >= limits->min_bpp; bpp -= 2 * 3) {
+ int output_bpp = intel_dp_output_bpp(pipe_config, bpp);
+
mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
- bpp);
+ output_bpp);
for (lane_count = limits->min_lane_count;
lane_count <= limits->max_lane_count;
@@ -2220,13 +2237,16 @@ intel_dp_compute_config(struct intel_encoder *encoder,
intel_conn_state->broadcast_rgb == INTEL_BROADCAST_RGB_LIMITED;
}
- if (!pipe_config->dsc_params.compression_enable)
- intel_link_compute_m_n(pipe_config->pipe_bpp,
+ if (!pipe_config->dsc_params.compression_enable) {
+ int output_bpp = intel_dp_output_bpp(pipe_config, pipe_config->pipe_bpp);
+
+ intel_link_compute_m_n(output_bpp,
pipe_config->lane_count,
adjusted_mode->crtc_clock,
pipe_config->port_clock,
&pipe_config->dp_m_n,
constant_n);
+ }
else
intel_link_compute_m_n(pipe_config->dsc_params.compressed_bpp,
pipe_config->lane_count,
@@ -4454,6 +4474,23 @@ intel_pixel_encoding_setup_vsc(struct intel_dp *intel_dp,
* 100b = 16bpc.
*/
vsc_sdp.DB17 = 0x1;
+ switch (crtc_state->pipe_bpp / 3) {
+ case 8: /* 8bpc */
+ vsc_sdp.DB17 = 0x1;
+ break;
+ case 10: /* 10bpc */
+ vsc_sdp.DB17 = 0x2;
+ break;
+ case 12: /* 12bpc */
+ vsc_sdp.DB17 = 0x3;
+ break;
+ case 16: /* 16bpc */
+ vsc_sdp.DB17 = 0x4;
+ break;
+ default:
+ DRM_DEBUG_KMS("Invalid bpp value '%d'\n", crtc_state->pipe_bpp);
+ break;
+ }
/*
* Content Type (Bits 2:0)
All of the link bandwidth and Data M/N calculations were assumed a bpp as RGB format. But When we are using YCbCr 4:2:0 output format on DP, we should change bpp calculations as YCbCr 4:2:0 format. The pipe_bpp value was assumed RGB format, therefore, it was multiplied with 3. But YCbCr 4:2:0 requires a multiplier value to 1.5. Therefore we need to divide pipe_bpp to 2 while DP output uses YCbCr4:2:0 format. - RGB format bpp = bpc x 3 - YCbCr 4:2:0 format bpp = bpc x 1.5 And it adds missed bpc values for a programming of VSC Header. It only affects dp and edp port which use YCbCr 4:2:0 output format. And for now, it does not consider a use case of DSC + YCbCr 4:2:0. v2: Addressed review comments from Ville. Remove a changing of pipe_bpp on intel_ddi_set_pipe_settings(). Because the pipe is running at the full bpp, keep pipe_bpp as RGB even though YCbCr 4:2:0 output format is used. Add a link bandwidth computation for YCbCr4:2:0 output format. v3: Addressed reivew comments from Ville. In order to make codes simple, it adds and uses intel_dp_output_bpp() function. Cc: Ville Syrjälä <ville.syrjala@linux.intel.com> Signed-off-by: Gwan-gyeong Mun <gwan-gyeong.mun@intel.com> --- drivers/gpu/drm/i915/intel_dp.c | 45 ++++++++++++++++++++++++++++++--- 1 file changed, 41 insertions(+), 4 deletions(-)