@@ -1019,6 +1019,7 @@ static int skl_suspend_complete(struct drm_i915_private *dev_priv)
{
/* Enabling DC6 is not a hard requirement to enter runtime D3 */
+ flush_work(&dev_priv->csr.csr_work);
skl_uninit_cdclk(dev_priv);
return 0;
@@ -738,6 +738,8 @@ struct intel_uncore {
for_each_fw_domain_mask(domain__, FORCEWAKE_ALL, dev_priv__, i__)
struct intel_csr {
+ bool dc_state_req;
+ struct work_struct csr_work;
const char *fw_path;
__be32 *dmc_payload;
uint32_t dmc_fw_size;
@@ -2621,6 +2623,9 @@ extern void i915_update_gfx_val(struct drm_i915_private *dev_priv);
int vlv_force_gfx_clock(struct drm_i915_private *dev_priv, bool on);
void i915_firmware_load_error_print(const char *fw_path, int err);
+/* intel_csr.c */
+void intel_csr_setdc_work_fn(struct work_struct *__work);
+
/* intel_hotplug.c */
void intel_hpd_irq_handler(struct drm_device *dev, u32 pin_mask, u32 long_mask);
void intel_hpd_init(struct drm_i915_private *dev_priv);
@@ -391,6 +391,7 @@ void intel_csr_ucode_init(struct drm_device *dev)
if (ret) {
i915_firmware_load_error_print(csr->fw_path, ret);
}
+ INIT_WORK(&csr->csr_work, intel_csr_setdc_work_fn);
}
/**
@@ -407,6 +408,8 @@ void intel_csr_ucode_fini(struct drm_device *dev)
if (!HAS_CSR(dev))
return;
+ flush_work(&dev_priv->csr.csr_work);
+
kfree(dev_priv->csr.dmc_payload);
}
@@ -563,6 +563,37 @@ static void skl_disable_dc6(struct drm_i915_private *dev_priv)
POSTING_READ(DC_STATE_EN);
}
+void intel_csr_setdc_work_fn(struct work_struct *__work)
+{
+ struct drm_i915_private *dev_priv =
+ container_of(__work, struct drm_i915_private, csr.csr_work);
+ struct intel_csr *csr = &dev_priv->csr;
+
+ if (csr->dc_state_req) {
+
+ /* TODO: Load the dmc firmware. */
+
+ if (IS_SKYLAKE(dev_priv->dev))
+ skl_enable_dc6(dev_priv);
+ else
+ gen9_enable_dc5(dev_priv);
+ } else {
+ if (IS_SKYLAKE(dev_priv->dev)) {
+ skl_disable_dc6(dev_priv);
+ /*
+ * DDI buffer programming unnecessary during
+ * driver-load/resume as it's already done during
+ * modeset initialization then. It's also invalid
+ * here as encoder list is still uninitialized.
+ */
+ if (!dev_priv->power_domains.initializing)
+ intel_prepare_ddi(dev_priv->dev);
+ } else {
+ gen9_disable_dc5(dev_priv);
+ }
+ }
+}
+
static void skl_set_power_well(struct drm_i915_private *dev_priv,
struct i915_power_well *power_well, bool enable)
{
@@ -612,18 +643,8 @@ static void skl_set_power_well(struct drm_i915_private *dev_priv,
when request is to disable!\n");
if ((GEN9_ENABLE_DC5(dev) || SKL_ENABLE_DC6(dev)) &&
power_well->data == SKL_DISP_PW_2) {
- if (SKL_ENABLE_DC6(dev)) {
- skl_disable_dc6(dev_priv);
- /*
- * DDI buffer programming unnecessary during driver-load/resume
- * as it's already done during modeset initialization then.
- * It's also invalid here as encoder list is still uninitialized.
- */
- if (!dev_priv->power_domains.initializing)
- intel_prepare_ddi(dev);
- } else {
- gen9_disable_dc5(dev_priv);
- }
+ dev_priv->csr.dc_state_req = false;
+ schedule_work(&dev_priv->csr.csr_work);
}
I915_WRITE(HSW_PWR_WELL_DRIVER, tmp | req_mask);
}
@@ -644,10 +665,8 @@ static void skl_set_power_well(struct drm_i915_private *dev_priv,
if ((GEN9_ENABLE_DC5(dev) || SKL_ENABLE_DC6(dev)) &&
power_well->data == SKL_DISP_PW_2) {
- if (SKL_ENABLE_DC6(dev))
- skl_enable_dc6(dev_priv);
- else
- gen9_enable_dc5(dev_priv);
+ dev_priv->csr.dc_state_req = true;
+ schedule_work(&dev_priv->csr.csr_work);
}
}
}
v1: As per review comments from Daniel, prevented entering in dc5/dc6 while firmware loading in process. Now register programming for dc5/dc6 always will happen followed by firmware loading. Added a async work which is responsible for both loading the firmware and register programming for dc5/dc6. Signed-off-by: Animesh Manna <animesh.manna@intel.com> --- drivers/gpu/drm/i915/i915_drv.c | 1 + drivers/gpu/drm/i915/i915_drv.h | 5 ++++ drivers/gpu/drm/i915/intel_csr.c | 3 ++ drivers/gpu/drm/i915/intel_runtime_pm.c | 51 ++++++++++++++++++++++----------- 4 files changed, 44 insertions(+), 16 deletions(-)