drm/amd/display: fixup DPP programming sequence
authorJun Lei <Jun.Lei@amd.com>
Mon, 15 Jul 2019 14:41:47 +0000 (10:41 -0400)
committerAlex Deucher <alexander.deucher@amd.com>
Thu, 15 Aug 2019 15:53:43 +0000 (10:53 -0500)
[why]
DC does not correct account for the fact that DPP DTO is double buffered while DPP ref is not.
This means that when DPP ref clock is lowered when it's "safe to lower", the DPP blocks that need
an increased divider will temporarily have actual DPP clock drop below minimum while DTO
double buffering takes effect.  This results in temporary underflow.

[how]
To fix this, DPP clock cannot be programmed atomically, but rather be broken up into the DTO and the
ref.  Each has a separate "safe to lower" logic.  When doing "prepare" the ref and dividers may only increase.
When doing "optimize", both may decrease.  It is guaranteed that we won't exceed max DPP clock because
we do not use dividers larger than 1.

Signed-off-by: Jun Lei <Jun.Lei@amd.com>
Reviewed-by: Eric Yang <eric.yang2@amd.com>
Acked-by: Leo Li <sunpeng.li@amd.com>
Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
drivers/gpu/drm/amd/display/dc/core/dc.c
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.c
drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.h
drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c
drivers/gpu/drm/amd/display/dc/inc/core_types.h
drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr_internal.h
drivers/gpu/drm/amd/display/dc/inc/hw/dccg.h

index 7ff0396..24775ab 100644 (file)
@@ -104,7 +104,6 @@ void dcn20_update_clocks_update_dpp_dto(struct clk_mgr_internal *clk_mgr,
 {
        int i;
 
-       clk_mgr->dccg->ref_dppclk = clk_mgr->base.clks.dppclk_khz;
        for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
                int dpp_inst, dppclk_khz;
 
@@ -114,28 +113,75 @@ void dcn20_update_clocks_update_dpp_dto(struct clk_mgr_internal *clk_mgr,
                dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
                dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
                clk_mgr->dccg->funcs->update_dpp_dto(
-                               clk_mgr->dccg, dpp_inst, dppclk_khz);
+                               clk_mgr->dccg, dpp_inst, dppclk_khz, false);
        }
 }
 
-void dcn20_update_clocks_update_dentist(struct clk_mgr_internal *clk_mgr)
+static void update_global_dpp_clk(struct clk_mgr_internal *clk_mgr, unsigned int khz)
 {
        int dpp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
-                       * clk_mgr->dentist_vco_freq_khz / clk_mgr->base.clks.dppclk_khz;
-       int disp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
-                       * clk_mgr->dentist_vco_freq_khz / clk_mgr->base.clks.dispclk_khz;
+                       * clk_mgr->dentist_vco_freq_khz / khz;
 
        uint32_t dppclk_wdivider = dentist_get_did_from_divider(dpp_divider);
-       uint32_t dispclk_wdivider = dentist_get_did_from_divider(disp_divider);
 
        REG_UPDATE(DENTIST_DISPCLK_CNTL,
-                       DENTIST_DISPCLK_WDIVIDER, dispclk_wdivider);
-//     REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_CHG_DONE, 1, 5, 100);
-       REG_UPDATE(DENTIST_DISPCLK_CNTL,
                        DENTIST_DPPCLK_WDIVIDER, dppclk_wdivider);
        REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DPPCLK_CHG_DONE, 1, 5, 100);
 }
 
+static void update_display_clk(struct clk_mgr_internal *clk_mgr, unsigned int khz)
+{
+       int disp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
+                       * clk_mgr->dentist_vco_freq_khz / khz;
+
+       uint32_t dispclk_wdivider = dentist_get_did_from_divider(disp_divider);
+
+       REG_UPDATE(DENTIST_DISPCLK_CNTL,
+                       DENTIST_DISPCLK_WDIVIDER, dispclk_wdivider);
+}
+
+static void request_voltage_and_program_disp_clk(struct clk_mgr *clk_mgr_base, unsigned int khz)
+{
+       struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
+       struct dc *dc = clk_mgr_base->ctx->dc;
+       struct pp_smu_funcs_nv *pp_smu = NULL;
+       bool going_up = clk_mgr->base.clks.dispclk_khz < khz;
+
+       if (dc->res_pool->pp_smu)
+               pp_smu = &dc->res_pool->pp_smu->nv_funcs;
+
+       clk_mgr->base.clks.dispclk_khz = khz;
+
+       if (going_up && pp_smu && pp_smu->set_voltage_by_freq)
+               pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_DISPCLK, clk_mgr_base->clks.dispclk_khz / 1000);
+
+       update_display_clk(clk_mgr, khz);
+
+       if (!going_up && pp_smu && pp_smu->set_voltage_by_freq)
+               pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_DISPCLK, clk_mgr_base->clks.dispclk_khz / 1000);
+}
+
+static void request_voltage_and_program_global_dpp_clk(struct clk_mgr *clk_mgr_base, unsigned int khz)
+{
+       struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
+       struct dc *dc = clk_mgr_base->ctx->dc;
+       struct pp_smu_funcs_nv *pp_smu = NULL;
+       bool going_up = clk_mgr->base.clks.dppclk_khz < khz;
+
+       if (dc->res_pool->pp_smu)
+               pp_smu = &dc->res_pool->pp_smu->nv_funcs;
+
+       clk_mgr->base.clks.dppclk_khz = khz;
+       clk_mgr->dccg->ref_dppclk = khz;
+
+       if (going_up && pp_smu && pp_smu->set_voltage_by_freq)
+               pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PIXELCLK, clk_mgr_base->clks.dppclk_khz / 1000);
+
+       update_global_dpp_clk(clk_mgr, khz);
+
+       if (!going_up && pp_smu && pp_smu->set_voltage_by_freq)
+               pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PIXELCLK, clk_mgr_base->clks.dppclk_khz / 1000);
+}
 
 void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
                        struct dc_state *context,
@@ -146,12 +192,11 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
        struct dc *dc = clk_mgr_base->ctx->dc;
        struct pp_smu_funcs_nv *pp_smu = NULL;
        int display_count;
-       bool update_dppclk = false;
        bool update_dispclk = false;
        bool enter_display_off = false;
-       bool dpp_clock_lowered = false;
        struct dmcu *dmcu = clk_mgr_base->ctx->dc->res_pool->dmcu;
        bool force_reset = false;
+       int i;
 
        if (clk_mgr_base->clks.dispclk_khz == 0 ||
                dc->debug.force_clock_mode & 0x1) {
@@ -177,6 +222,7 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
                        pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PHYCLK, clk_mgr_base->clks.phyclk_khz / 1000);
        }
 
+
        if (dc->debug.force_min_dcfclk_mhz > 0)
                new_clocks->dcfclk_khz = (new_clocks->dcfclk_khz > (dc->debug.force_min_dcfclk_mhz * 1000)) ?
                                new_clocks->dcfclk_khz : (dc->debug.force_min_dcfclk_mhz * 1000);
@@ -202,10 +248,12 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
 
        if (should_update_pstate_support(safe_to_lower, new_clocks->p_state_change_support, clk_mgr_base->clks.p_state_change_support)) {
                clk_mgr_base->clks.prev_p_state_change_support = clk_mgr_base->clks.p_state_change_support;
+
                clk_mgr_base->clks.p_state_change_support = new_clocks->p_state_change_support;
                if (pp_smu && pp_smu->set_pstate_handshake_support)
                        pp_smu->set_pstate_handshake_support(&pp_smu->pp_smu, clk_mgr_base->clks.p_state_change_support);
        }
+       clk_mgr_base->clks.prev_p_state_change_support = clk_mgr_base->clks.p_state_change_support;
 
        if (should_set_clock(safe_to_lower, new_clocks->dramclk_khz, clk_mgr_base->clks.dramclk_khz)) {
                clk_mgr_base->clks.dramclk_khz = new_clocks->dramclk_khz;
@@ -213,35 +261,48 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
                        pp_smu->set_hard_min_uclk_by_freq(&pp_smu->pp_smu, clk_mgr_base->clks.dramclk_khz / 1000);
        }
 
-       if (should_set_clock(safe_to_lower, new_clocks->dppclk_khz, clk_mgr->base.clks.dppclk_khz)) {
-               if (clk_mgr->base.clks.dppclk_khz > new_clocks->dppclk_khz)
-                       dpp_clock_lowered = true;
-               clk_mgr->base.clks.dppclk_khz = new_clocks->dppclk_khz;
+       if (dc->config.forced_clocks == false) {
+               // First update display clock
+               if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz))
+                       request_voltage_and_program_disp_clk(clk_mgr_base, new_clocks->dispclk_khz);
 
-               if (pp_smu && pp_smu->set_voltage_by_freq)
-                       pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PIXELCLK, clk_mgr_base->clks.dppclk_khz / 1000);
+               // Updating DPP clock requires some more logic
+               if (!safe_to_lower) {
+                       // For pre-programming, we need to make sure any DPP clock that will go up has to go up
 
-               update_dppclk = true;
-       }
+                       // First raise the global reference if needed
+                       if (new_clocks->dppclk_khz > clk_mgr_base->clks.dppclk_khz)
+                               request_voltage_and_program_global_dpp_clk(clk_mgr_base, new_clocks->dppclk_khz);
 
-       if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) {
-               clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz;
-               if (pp_smu && pp_smu->set_voltage_by_freq)
-                       pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_DISPCLK, clk_mgr_base->clks.dispclk_khz / 1000);
+                       // Then raise any dividers that need raising
+                       for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
+                               int dpp_inst, dppclk_khz;
 
-               update_dispclk = true;
-       }
-       if (dc->config.forced_clocks == false || (force_reset && safe_to_lower)) {
-               if (dpp_clock_lowered) {
-                       // if clock is being lowered, increase DTO before lowering refclk
-                       dcn20_update_clocks_update_dpp_dto(clk_mgr, context);
-                       dcn20_update_clocks_update_dentist(clk_mgr);
+                               if (!context->res_ctx.pipe_ctx[i].plane_state)
+                                       continue;
+
+                               dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
+                               dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
+
+                               clk_mgr->dccg->funcs->update_dpp_dto(clk_mgr->dccg, dpp_inst, dppclk_khz, true);
+                       }
                } else {
-                       // if clock is being raised, increase refclk before lowering DTO
-                       if (update_dppclk || update_dispclk)
-                               dcn20_update_clocks_update_dentist(clk_mgr);
-                       if (update_dppclk)
-                               dcn20_update_clocks_update_dpp_dto(clk_mgr, context);
+                       // For post-programming, we can lower ref clk if needed, and unconditionally set all the DTOs
+
+                       if (new_clocks->dppclk_khz < clk_mgr_base->clks.dppclk_khz)
+                               request_voltage_and_program_global_dpp_clk(clk_mgr_base, new_clocks->dppclk_khz);
+
+                       for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
+                               int dpp_inst, dppclk_khz;
+
+                               if (!context->res_ctx.pipe_ctx[i].plane_state)
+                                       continue;
+
+                               dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
+                               dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
+
+                               clk_mgr->dccg->funcs->update_dpp_dto(clk_mgr->dccg, dpp_inst, dppclk_khz, false);
+                       }
                }
        }
        if (update_dispclk &&
index 21fb7ee..f61eb29 100644 (file)
@@ -1603,6 +1603,9 @@ enum surface_update_type dc_check_update_surfaces_for_stream(
                for (i = 0; i < surface_count; i++)
                        updates[i].surface->update_flags.raw = 0xFFFFFFFF;
 
+       if (type == UPDATE_TYPE_FAST && memcmp(&dc->current_state->bw_ctx.bw.dcn.clk, &dc->clk_mgr->clks, offsetof(struct dc_clocks, prev_p_state_change_support)) != 0)
+               dc->optimized_required = true;
+
        return type;
 }
 
index 04399c5..51857ca 100644 (file)
@@ -2155,7 +2155,8 @@ void update_dchubp_dpp(
                        dc->res_pool->dccg->funcs->update_dpp_dto(
                                        dc->res_pool->dccg,
                                        dpp->inst,
-                                       pipe_ctx->plane_res.bw.dppclk_khz);
+                                       pipe_ctx->plane_res.bw.dppclk_khz,
+                                       false);
                else
                        dc->clk_mgr->clks.dppclk_khz = should_divided_by_2 ?
                                                dc->clk_mgr->clks.dispclk_khz / 2 :
index 31aa6ee..16476ed 100644 (file)
 #define DC_LOGGER \
        dccg->ctx->logger
 
-void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk)
+void dccg2_update_dpp_dto(struct dccg *dccg,
+               int dpp_inst,
+               int req_dppclk,
+               bool reduce_divider_only)
 {
        struct dcn_dccg *dccg_dcn = TO_DCN_DCCG(dccg);
 
        if (dccg->ref_dppclk && req_dppclk) {
                int ref_dppclk = dccg->ref_dppclk;
+               int current_phase, current_modulo;
 
                ASSERT(req_dppclk <= ref_dppclk);
                /* need to clamp to 8 bits */
@@ -61,9 +65,28 @@ void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk)
                        if (req_dppclk > ref_dppclk)
                                req_dppclk = ref_dppclk;
                }
-               REG_SET_2(DPPCLK_DTO_PARAM[dpp_inst], 0,
-                               DPPCLK0_DTO_PHASE, req_dppclk,
-                               DPPCLK0_DTO_MODULO, ref_dppclk);
+
+               REG_GET_2(DPPCLK_DTO_PARAM[dpp_inst],
+                               DPPCLK0_DTO_PHASE, &current_phase,
+                               DPPCLK0_DTO_MODULO, &current_modulo);
+
+               if (reduce_divider_only) {
+                       // requested phase/modulo greater than current
+                       if (req_dppclk * current_modulo >= current_phase * ref_dppclk) {
+                               REG_SET_2(DPPCLK_DTO_PARAM[dpp_inst], 0,
+                                               DPPCLK0_DTO_PHASE, req_dppclk,
+                                               DPPCLK0_DTO_MODULO, ref_dppclk);
+                       } else {
+                               REG_SET_2(DPPCLK_DTO_PARAM[dpp_inst], 0,
+                                               DPPCLK0_DTO_PHASE, current_phase,
+                                               DPPCLK0_DTO_MODULO, current_modulo);
+                       }
+               } else {
+                       REG_SET_2(DPPCLK_DTO_PARAM[dpp_inst], 0,
+                                       DPPCLK0_DTO_PHASE, req_dppclk,
+                                       DPPCLK0_DTO_MODULO, ref_dppclk);
+               }
+
                REG_UPDATE(DPPCLK_DTO_CTRL,
                                DPPCLK_DTO_ENABLE[dpp_inst], 1);
        } else {
index 2205cb0..74a074a 100644 (file)
@@ -97,7 +97,7 @@ struct dcn_dccg {
        const struct dccg_mask *dccg_mask;
 };
 
-void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk);
+void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk, bool raise_divider_only);
 
 void dccg2_get_dccg_ref_freq(struct dccg *dccg,
                unsigned int xtalin_freq_inKhz,
index 6dc4bf9..a6715a4 100644 (file)
@@ -2482,7 +2482,7 @@ void dcn20_calculate_dlg_params(
        context->bw_ctx.bw.dcn.clk.socclk_khz = context->bw_ctx.dml.vba.SOCCLK * 1000;
        context->bw_ctx.bw.dcn.clk.dramclk_khz = context->bw_ctx.dml.vba.DRAMSpeed * 1000 / 16;
        context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = context->bw_ctx.dml.vba.DCFCLKDeepSleep * 1000;
-       context->bw_ctx.bw.dcn.clk.fclk_khz = context->bw_ctx.dml.vba.FabricClock * 1000;
+       context->bw_ctx.bw.dcn.clk.fclk_khz = 0;
        context->bw_ctx.bw.dcn.clk.p_state_change_support =
                context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb]
                                                        != dm_dram_clock_change_unsupported;
index a148ffd..1d66c4b 100644 (file)
@@ -228,7 +228,6 @@ struct resource_pool {
 
 struct dcn_fe_bandwidth {
        int dppclk_khz;
-
 };
 
 struct stream_resource {
index 213046d..7dd46eb 100644 (file)
@@ -281,8 +281,14 @@ static inline bool should_set_clock(bool safe_to_lower, int calc_clk, int cur_cl
 
 static inline bool should_update_pstate_support(bool safe_to_lower, bool calc_support, bool cur_support)
 {
-       // Whenever we are transitioning pstate support, we always want to notify prior to committing state
-       return (calc_support != cur_support) ? !safe_to_lower : false;
+       if (cur_support != calc_support) {
+               if (calc_support == true && safe_to_lower)
+                       return true;
+               else if (calc_support == false && !safe_to_lower)
+                       return true;
+       }
+
+       return false;
 }
 
 int clk_mgr_helper_get_active_display_cnt(
index 05ee529..d8e744f 100644 (file)
@@ -38,7 +38,8 @@ struct dccg {
 struct dccg_funcs {
        void (*update_dpp_dto)(struct dccg *dccg,
                        int dpp_inst,
-                       int req_dppclk);
+                       int req_dppclk,
+                       bool reduce_divider_only);
        void (*get_dccg_ref_freq)(struct dccg *dccg,
                        unsigned int xtalin_freq_inKhz,
                        unsigned int *dccg_ref_freq_inKhz);