Merge tag 'sched-urgent-2024-03-24' of git://git.kernel.org/pub/scm/linux/kernel...
[sfrench/cifs-2.6.git] / drivers / gpu / drm / amd / display / dc / clk_mgr / dcn32 / dcn32_clk_mgr.c
1 /*
2  * Copyright 2021 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25
26 #include "dccg.h"
27 #include "clk_mgr_internal.h"
28 #include "dcn32/dcn32_clk_mgr_smu_msg.h"
29 #include "dcn20/dcn20_clk_mgr.h"
30 #include "dce100/dce_clk_mgr.h"
31 #include "dcn31/dcn31_clk_mgr.h"
32 #include "reg_helper.h"
33 #include "core_types.h"
34 #include "dm_helpers.h"
35 #include "link.h"
36 #include "dc_state_priv.h"
37 #include "atomfirmware.h"
38 #include "smu13_driver_if.h"
39
40 #include "dcn/dcn_3_2_0_offset.h"
41 #include "dcn/dcn_3_2_0_sh_mask.h"
42
43 #include "dcn32/dcn32_clk_mgr.h"
44 #include "dml/dcn32/dcn32_fpu.h"
45
46 #define DCN_BASE__INST0_SEG1                       0x000000C0
47
48 #define mmCLK1_CLK_PLL_REQ                              0x16E37
49 #define mmCLK1_CLK0_DFS_CNTL                            0x16E69
50 #define mmCLK1_CLK1_DFS_CNTL                            0x16E6C
51 #define mmCLK1_CLK2_DFS_CNTL                            0x16E6F
52 #define mmCLK1_CLK3_DFS_CNTL                            0x16E72
53 #define mmCLK1_CLK4_DFS_CNTL                            0x16E75
54
55 #define mmCLK1_CLK0_CURRENT_CNT                         0x16EE7
56 #define mmCLK1_CLK1_CURRENT_CNT                         0x16EE8
57 #define mmCLK1_CLK2_CURRENT_CNT                         0x16EE9
58 #define mmCLK1_CLK3_CURRENT_CNT                         0x16EEA
59 #define mmCLK1_CLK4_CURRENT_CNT                         0x16EEB
60
61 #define mmCLK4_CLK0_CURRENT_CNT                         0x1B0C9
62
63 #define CLK1_CLK_PLL_REQ__FbMult_int_MASK               0x000001ffUL
64 #define CLK1_CLK_PLL_REQ__PllSpineDiv_MASK              0x0000f000UL
65 #define CLK1_CLK_PLL_REQ__FbMult_frac_MASK              0xffff0000UL
66 #define CLK1_CLK_PLL_REQ__FbMult_int__SHIFT             0x00000000
67 #define CLK1_CLK_PLL_REQ__PllSpineDiv__SHIFT            0x0000000c
68 #define CLK1_CLK_PLL_REQ__FbMult_frac__SHIFT            0x00000010
69
70 #define mmCLK01_CLK0_CLK_PLL_REQ                        0x16E37
71 #define mmCLK01_CLK0_CLK0_DFS_CNTL                      0x16E64
72 #define mmCLK01_CLK0_CLK1_DFS_CNTL                      0x16E67
73 #define mmCLK01_CLK0_CLK2_DFS_CNTL                      0x16E6A
74 #define mmCLK01_CLK0_CLK3_DFS_CNTL                      0x16E6D
75 #define mmCLK01_CLK0_CLK4_DFS_CNTL                      0x16E70
76
77 #define CLK0_CLK_PLL_REQ__FbMult_int_MASK               0x000001ffL
78 #define CLK0_CLK_PLL_REQ__PllSpineDiv_MASK              0x0000f000L
79 #define CLK0_CLK_PLL_REQ__FbMult_frac_MASK              0xffff0000L
80 #define CLK0_CLK_PLL_REQ__FbMult_int__SHIFT             0x00000000
81 #define CLK0_CLK_PLL_REQ__PllSpineDiv__SHIFT            0x0000000c
82 #define CLK0_CLK_PLL_REQ__FbMult_frac__SHIFT            0x00000010
83
84 #undef FN
85 #define FN(reg_name, field_name) \
86         clk_mgr->clk_mgr_shift->field_name, clk_mgr->clk_mgr_mask->field_name
87
88 #define REG(reg) \
89         (clk_mgr->regs->reg)
90
91 #define BASE_INNER(seg) DCN_BASE__INST0_SEG ## seg
92
93 #define BASE(seg) BASE_INNER(seg)
94
95 #define SR(reg_name)\
96                 .reg_name = BASE(reg ## reg_name ## _BASE_IDX) +  \
97                                         reg ## reg_name
98
99 #define CLK_SR_DCN32(reg_name)\
100         .reg_name = mm ## reg_name
101
102 static const struct clk_mgr_registers clk_mgr_regs_dcn32 = {
103         CLK_REG_LIST_DCN32()
104 };
105
106 static const struct clk_mgr_shift clk_mgr_shift_dcn32 = {
107         CLK_COMMON_MASK_SH_LIST_DCN32(__SHIFT)
108 };
109
110 static const struct clk_mgr_mask clk_mgr_mask_dcn32 = {
111         CLK_COMMON_MASK_SH_LIST_DCN32(_MASK)
112 };
113
114
115 #define CLK_SR_DCN321(reg_name, block, inst)\
116         .reg_name = mm ## block ## _ ## reg_name
117
118 static const struct clk_mgr_registers clk_mgr_regs_dcn321 = {
119         CLK_REG_LIST_DCN321()
120 };
121
122 static const struct clk_mgr_shift clk_mgr_shift_dcn321 = {
123         CLK_COMMON_MASK_SH_LIST_DCN321(__SHIFT)
124 };
125
126 static const struct clk_mgr_mask clk_mgr_mask_dcn321 = {
127         CLK_COMMON_MASK_SH_LIST_DCN321(_MASK)
128 };
129
130
131 /* Query SMU for all clock states for a particular clock */
132 static void dcn32_init_single_clock(struct clk_mgr_internal *clk_mgr, PPCLK_e clk, unsigned int *entry_0,
133                 unsigned int *num_levels)
134 {
135         unsigned int i;
136         char *entry_i = (char *)entry_0;
137
138         uint32_t ret = dcn30_smu_get_dpm_freq_by_index(clk_mgr, clk, 0xFF);
139
140         if (ret & (1 << 31))
141                 /* fine-grained, only min and max */
142                 *num_levels = 2;
143         else
144                 /* discrete, a number of fixed states */
145                 /* will set num_levels to 0 on failure */
146                 *num_levels = ret & 0xFF;
147
148         /* if the initial message failed, num_levels will be 0 */
149         for (i = 0; i < *num_levels; i++) {
150                 *((unsigned int *)entry_i) = (dcn30_smu_get_dpm_freq_by_index(clk_mgr, clk, i) & 0xFFFF);
151                 entry_i += sizeof(clk_mgr->base.bw_params->clk_table.entries[0]);
152         }
153 }
154
155 static void dcn32_build_wm_range_table(struct clk_mgr_internal *clk_mgr)
156 {
157         DC_FP_START();
158         dcn32_build_wm_range_table_fpu(clk_mgr);
159         DC_FP_END();
160 }
161
162 void dcn32_init_clocks(struct clk_mgr *clk_mgr_base)
163 {
164         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
165         unsigned int num_levels;
166         struct clk_limit_num_entries *num_entries_per_clk = &clk_mgr_base->bw_params->clk_table.num_entries_per_clk;
167         unsigned int i;
168
169         memset(&(clk_mgr_base->clks), 0, sizeof(struct dc_clocks));
170         clk_mgr_base->clks.p_state_change_support = true;
171         clk_mgr_base->clks.prev_p_state_change_support = true;
172         clk_mgr_base->clks.fclk_prev_p_state_change_support = true;
173         clk_mgr->smu_present = false;
174         clk_mgr->dpm_present = false;
175
176         if (!clk_mgr_base->bw_params)
177                 return;
178
179         if (!clk_mgr_base->force_smu_not_present && dcn30_smu_get_smu_version(clk_mgr, &clk_mgr->smu_ver))
180                 clk_mgr->smu_present = true;
181
182         if (!clk_mgr->smu_present)
183                 return;
184
185         dcn30_smu_check_driver_if_version(clk_mgr);
186         dcn30_smu_check_msg_header_version(clk_mgr);
187
188         /* DCFCLK */
189         dcn32_init_single_clock(clk_mgr, PPCLK_DCFCLK,
190                         &clk_mgr_base->bw_params->clk_table.entries[0].dcfclk_mhz,
191                         &num_entries_per_clk->num_dcfclk_levels);
192         clk_mgr_base->bw_params->dc_mode_limit.dcfclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_DCFCLK);
193
194         /* SOCCLK */
195         dcn32_init_single_clock(clk_mgr, PPCLK_SOCCLK,
196                                         &clk_mgr_base->bw_params->clk_table.entries[0].socclk_mhz,
197                                         &num_entries_per_clk->num_socclk_levels);
198         clk_mgr_base->bw_params->dc_mode_limit.socclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_SOCCLK);
199
200         /* DTBCLK */
201         if (!clk_mgr->base.ctx->dc->debug.disable_dtb_ref_clk_switch) {
202                 dcn32_init_single_clock(clk_mgr, PPCLK_DTBCLK,
203                                 &clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz,
204                                 &num_entries_per_clk->num_dtbclk_levels);
205                 clk_mgr_base->bw_params->dc_mode_limit.dtbclk_mhz =
206                                 dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_DTBCLK);
207         }
208
209         /* DISPCLK */
210         dcn32_init_single_clock(clk_mgr, PPCLK_DISPCLK,
211                         &clk_mgr_base->bw_params->clk_table.entries[0].dispclk_mhz,
212                         &num_entries_per_clk->num_dispclk_levels);
213         num_levels = num_entries_per_clk->num_dispclk_levels;
214         clk_mgr_base->bw_params->dc_mode_limit.dispclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_DISPCLK);
215         //HW recommends limit of 1950 MHz in display clock for all DCN3.2.x
216         if (clk_mgr_base->bw_params->dc_mode_limit.dispclk_mhz > 1950)
217                 clk_mgr_base->bw_params->dc_mode_limit.dispclk_mhz = 1950;
218
219         /* DPPCLK */
220         dcn32_init_single_clock(clk_mgr, PPCLK_DPPCLK,
221                         &clk_mgr_base->bw_params->clk_table.entries[0].dppclk_mhz,
222                         &num_entries_per_clk->num_dppclk_levels);
223         num_levels = num_entries_per_clk->num_dppclk_levels;
224         clk_mgr_base->bw_params->dc_mode_limit.dppclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_DPPCLK);
225         //HW recommends limit of 1950 MHz in display clock for all DCN3.2.x
226         if (clk_mgr_base->bw_params->dc_mode_limit.dppclk_mhz > 1950)
227                 clk_mgr_base->bw_params->dc_mode_limit.dppclk_mhz = 1950;
228
229         if (num_entries_per_clk->num_dcfclk_levels &&
230                         num_entries_per_clk->num_dtbclk_levels &&
231                         num_entries_per_clk->num_dispclk_levels)
232                 clk_mgr->dpm_present = true;
233
234         if (clk_mgr_base->ctx->dc->debug.min_disp_clk_khz) {
235                 for (i = 0; i < num_levels; i++)
236                         if (clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz
237                                         < khz_to_mhz_ceil(clk_mgr_base->ctx->dc->debug.min_disp_clk_khz))
238                                 clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz
239                                         = khz_to_mhz_ceil(clk_mgr_base->ctx->dc->debug.min_disp_clk_khz);
240         }
241         for (i = 0; i < num_levels; i++)
242                 if (clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz > 1950)
243                         clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz = 1950;
244
245         if (clk_mgr_base->ctx->dc->debug.min_dpp_clk_khz) {
246                 for (i = 0; i < num_levels; i++)
247                         if (clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz
248                                         < khz_to_mhz_ceil(clk_mgr_base->ctx->dc->debug.min_dpp_clk_khz))
249                                 clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz
250                                         = khz_to_mhz_ceil(clk_mgr_base->ctx->dc->debug.min_dpp_clk_khz);
251         }
252
253         for (i = 0; i < num_levels; i++)
254                 if (clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz > 1950)
255                         clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz = 1950;
256
257         /* Get UCLK, update bounding box */
258         clk_mgr_base->funcs->get_memclk_states_from_smu(clk_mgr_base);
259
260         /* WM range table */
261         dcn32_build_wm_range_table(clk_mgr);
262 }
263
264 static void dcn32_update_clocks_update_dtb_dto(struct clk_mgr_internal *clk_mgr,
265                         struct dc_state *context,
266                         int ref_dtbclk_khz)
267 {
268         struct dccg *dccg = clk_mgr->dccg;
269         uint32_t tg_mask = 0;
270         int i;
271
272         for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
273                 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
274                 struct dtbclk_dto_params dto_params = {0};
275
276                 /* use mask to program DTO once per tg */
277                 if (pipe_ctx->stream_res.tg &&
278                                 !(tg_mask & (1 << pipe_ctx->stream_res.tg->inst))) {
279                         tg_mask |= (1 << pipe_ctx->stream_res.tg->inst);
280
281                         dto_params.otg_inst = pipe_ctx->stream_res.tg->inst;
282                         dto_params.ref_dtbclk_khz = ref_dtbclk_khz;
283
284                         dccg->funcs->set_dtbclk_dto(clk_mgr->dccg, &dto_params);
285                         //dccg->funcs->set_audio_dtbclk_dto(clk_mgr->dccg, &dto_params);
286                 }
287         }
288 }
289
290 /* Since DPPCLK request to PMFW needs to be exact (due to DPP DTO programming),
291  * update DPPCLK to be the exact frequency that will be set after the DPPCLK
292  * divider is updated. This will prevent rounding issues that could cause DPP
293  * refclk and DPP DTO to not match up.
294  */
295 static void dcn32_update_dppclk_dispclk_freq(struct clk_mgr_internal *clk_mgr, struct dc_clocks *new_clocks)
296 {
297         int dpp_divider = 0;
298         int disp_divider = 0;
299
300         if (new_clocks->dppclk_khz) {
301                 dpp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
302                                 * clk_mgr->base.dentist_vco_freq_khz / new_clocks->dppclk_khz;
303                 new_clocks->dppclk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR * clk_mgr->base.dentist_vco_freq_khz) / dpp_divider;
304         }
305         if (new_clocks->dispclk_khz > 0) {
306                 disp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
307                                 * clk_mgr->base.dentist_vco_freq_khz / new_clocks->dispclk_khz;
308                 new_clocks->dispclk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR * clk_mgr->base.dentist_vco_freq_khz) / disp_divider;
309         }
310 }
311
312 void dcn32_update_clocks_update_dpp_dto(struct clk_mgr_internal *clk_mgr,
313                 struct dc_state *context, bool safe_to_lower)
314 {
315         int i;
316
317         clk_mgr->dccg->ref_dppclk = clk_mgr->base.clks.dppclk_khz;
318         for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
319                 int dpp_inst = 0, dppclk_khz, prev_dppclk_khz;
320
321                 dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
322
323                 if (context->res_ctx.pipe_ctx[i].plane_res.dpp)
324                         dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
325                 else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz == 0) {
326                         /* dpp == NULL && dppclk_khz == 0 is valid because of pipe harvesting.
327                          * In this case just continue in loop
328                          */
329                         continue;
330                 } else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz > 0) {
331                         /* The software state is not valid if dpp resource is NULL and
332                          * dppclk_khz > 0.
333                          */
334                         ASSERT(false);
335                         continue;
336                 }
337
338                 prev_dppclk_khz = clk_mgr->dccg->pipe_dppclk_khz[i];
339
340                 if (safe_to_lower || prev_dppclk_khz < dppclk_khz)
341                         clk_mgr->dccg->funcs->update_dpp_dto(
342                                                         clk_mgr->dccg, dpp_inst, dppclk_khz);
343         }
344 }
345
346 static void dcn32_update_clocks_update_dentist(
347                 struct clk_mgr_internal *clk_mgr,
348                 struct dc_state *context)
349 {
350         uint32_t new_disp_divider = 0;
351         uint32_t new_dispclk_wdivider = 0;
352         uint32_t old_dispclk_wdivider = 0;
353         uint32_t i;
354         uint32_t dentist_dispclk_wdivider_readback = 0;
355         struct dc *dc = clk_mgr->base.ctx->dc;
356
357         if (clk_mgr->base.clks.dispclk_khz == 0)
358                 return;
359
360         new_disp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
361                         * clk_mgr->base.dentist_vco_freq_khz / clk_mgr->base.clks.dispclk_khz;
362
363         new_dispclk_wdivider = dentist_get_did_from_divider(new_disp_divider);
364         REG_GET(DENTIST_DISPCLK_CNTL,
365                         DENTIST_DISPCLK_WDIVIDER, &old_dispclk_wdivider);
366
367         /* When changing divider to or from 127, some extra programming is required to prevent corruption */
368         if (old_dispclk_wdivider == 127 && new_dispclk_wdivider != 127) {
369                 for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
370                         struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
371                         uint32_t fifo_level;
372                         struct dccg *dccg = clk_mgr->base.ctx->dc->res_pool->dccg;
373                         struct stream_encoder *stream_enc = pipe_ctx->stream_res.stream_enc;
374                         int32_t N;
375                         int32_t j;
376
377                         if (!resource_is_pipe_type(pipe_ctx, OTG_MASTER))
378                                 continue;
379                         /* Virtual encoders don't have this function */
380                         if (!stream_enc->funcs->get_fifo_cal_average_level)
381                                 continue;
382                         fifo_level = stream_enc->funcs->get_fifo_cal_average_level(
383                                         stream_enc);
384                         N = fifo_level / 4;
385                         dccg->funcs->set_fifo_errdet_ovr_en(
386                                         dccg,
387                                         true);
388                         for (j = 0; j < N - 4; j++)
389                                 dccg->funcs->otg_drop_pixel(
390                                                 dccg,
391                                                 pipe_ctx->stream_res.tg->inst);
392                         dccg->funcs->set_fifo_errdet_ovr_en(
393                                         dccg,
394                                         false);
395                 }
396         } else if (new_dispclk_wdivider == 127 && old_dispclk_wdivider != 127) {
397                 /* request clock with 126 divider first */
398                 uint32_t temp_disp_divider = dentist_get_divider_from_did(126);
399                 uint32_t temp_dispclk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR * clk_mgr->base.dentist_vco_freq_khz) / temp_disp_divider;
400
401                 if (clk_mgr->smu_present)
402                         /*
403                          * SMU uses discrete dispclk presets. We applied
404                          * the same formula to increase our dppclk_khz
405                          * to the next matching discrete value. By
406                          * contract, we should use the preset dispclk
407                          * floored in Mhz to describe the intended clock.
408                          */
409                         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DISPCLK,
410                                         khz_to_mhz_floor(temp_dispclk_khz));
411
412                 if (dc->debug.override_dispclk_programming) {
413                         REG_GET(DENTIST_DISPCLK_CNTL,
414                                         DENTIST_DISPCLK_WDIVIDER, &dentist_dispclk_wdivider_readback);
415
416                         if (dentist_dispclk_wdivider_readback != 126) {
417                                 REG_UPDATE(DENTIST_DISPCLK_CNTL,
418                                                 DENTIST_DISPCLK_WDIVIDER, 126);
419                                 REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_CHG_DONE, 1, 50, 2000);
420                         }
421                 }
422
423                 for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
424                         struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
425                         struct dccg *dccg = clk_mgr->base.ctx->dc->res_pool->dccg;
426                         struct stream_encoder *stream_enc = pipe_ctx->stream_res.stream_enc;
427                         uint32_t fifo_level;
428                         int32_t N;
429                         int32_t j;
430
431                         if (!resource_is_pipe_type(pipe_ctx, OTG_MASTER))
432                                 continue;
433                         /* Virtual encoders don't have this function */
434                         if (!stream_enc->funcs->get_fifo_cal_average_level)
435                                 continue;
436                         fifo_level = stream_enc->funcs->get_fifo_cal_average_level(
437                                         stream_enc);
438                         N = fifo_level / 4;
439                         dccg->funcs->set_fifo_errdet_ovr_en(dccg, true);
440                         for (j = 0; j < 12 - N; j++)
441                                 dccg->funcs->otg_add_pixel(dccg,
442                                                 pipe_ctx->stream_res.tg->inst);
443                         dccg->funcs->set_fifo_errdet_ovr_en(dccg, false);
444                 }
445         }
446
447         /* do requested DISPCLK updates*/
448         if (clk_mgr->smu_present)
449                 /*
450                  * SMU uses discrete dispclk presets. We applied
451                  * the same formula to increase our dppclk_khz
452                  * to the next matching discrete value. By
453                  * contract, we should use the preset dispclk
454                  * floored in Mhz to describe the intended clock.
455                  */
456                 dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DISPCLK,
457                                 khz_to_mhz_floor(clk_mgr->base.clks.dispclk_khz));
458
459         if (dc->debug.override_dispclk_programming) {
460                 REG_GET(DENTIST_DISPCLK_CNTL,
461                                 DENTIST_DISPCLK_WDIVIDER, &dentist_dispclk_wdivider_readback);
462
463                 if (dentist_dispclk_wdivider_readback > new_dispclk_wdivider) {
464                         REG_UPDATE(DENTIST_DISPCLK_CNTL,
465                                         DENTIST_DISPCLK_WDIVIDER, new_dispclk_wdivider);
466                         REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_CHG_DONE, 1, 50, 2000);
467                 }
468         }
469
470 }
471
472 static int dcn32_get_dispclk_from_dentist(struct clk_mgr *clk_mgr_base)
473 {
474         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
475         uint32_t dispclk_wdivider;
476         int disp_divider;
477
478         REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_WDIVIDER, &dispclk_wdivider);
479         disp_divider = dentist_get_divider_from_did(dispclk_wdivider);
480
481         /* Return DISPCLK freq in Khz */
482         if (disp_divider)
483                 return (DENTIST_DIVIDER_RANGE_SCALE_FACTOR * clk_mgr->base.dentist_vco_freq_khz) / disp_divider;
484
485         return 0;
486 }
487
488 static bool dcn32_check_native_scaling(struct pipe_ctx *pipe)
489 {
490         bool is_native_scaling = false;
491         int width = pipe->plane_state->src_rect.width;
492         int height = pipe->plane_state->src_rect.height;
493
494         if (pipe->stream->timing.h_addressable == width &&
495                         pipe->stream->timing.v_addressable == height &&
496                         pipe->plane_state->dst_rect.width == width &&
497                         pipe->plane_state->dst_rect.height == height)
498                 is_native_scaling = true;
499
500         return is_native_scaling;
501 }
502
503 static void dcn32_auto_dpm_test_log(
504                 struct dc_clocks *new_clocks,
505                 struct clk_mgr_internal *clk_mgr,
506                 struct dc_state *context)
507 {
508         unsigned int dispclk_khz_reg, dppclk_khz_reg, dprefclk_khz_reg, dcfclk_khz_reg, dtbclk_khz_reg,
509                                  fclk_khz_reg, mall_ss_size_bytes;
510         int dramclk_khz_override, fclk_khz_override, num_fclk_levels;
511
512         struct pipe_ctx *pipe_ctx_list[MAX_PIPES];
513         int active_pipe_count = 0;
514
515         for (int i = 0; i < MAX_PIPES; i++) {
516                 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
517
518                 if (pipe_ctx->stream && dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM) {
519                         pipe_ctx_list[active_pipe_count] = pipe_ctx;
520                         active_pipe_count++;
521                 }
522         }
523
524         msleep(5);
525
526         mall_ss_size_bytes = context->bw_ctx.bw.dcn.mall_ss_size_bytes;
527
528     dispclk_khz_reg    = REG_READ(CLK1_CLK0_CURRENT_CNT); // DISPCLK
529     dppclk_khz_reg     = REG_READ(CLK1_CLK1_CURRENT_CNT); // DPPCLK
530     dprefclk_khz_reg   = REG_READ(CLK1_CLK2_CURRENT_CNT); // DPREFCLK
531     dcfclk_khz_reg     = REG_READ(CLK1_CLK3_CURRENT_CNT); // DCFCLK
532     dtbclk_khz_reg     = REG_READ(CLK1_CLK4_CURRENT_CNT); // DTBCLK
533     fclk_khz_reg       = REG_READ(CLK4_CLK0_CURRENT_CNT); // FCLK
534
535     // Overrides for these clocks in case there is no p_state change support
536     dramclk_khz_override = new_clocks->dramclk_khz;
537     fclk_khz_override = new_clocks->fclk_khz;
538
539     num_fclk_levels = clk_mgr->base.bw_params->clk_table.num_entries_per_clk.num_fclk_levels - 1;
540
541     if (!new_clocks->p_state_change_support) {
542             dramclk_khz_override = clk_mgr->base.bw_params->max_memclk_mhz * 1000;
543     }
544     if (!new_clocks->fclk_p_state_change_support) {
545             fclk_khz_override = clk_mgr->base.bw_params->clk_table.entries[num_fclk_levels].fclk_mhz * 1000;
546     }
547
548         ////////////////////////////////////////////////////////////////////////////
549         //      IMPORTANT:      When adding more clocks to these logs, do NOT put a newline
550         //                              anywhere other than at the very end of the string.
551         //
552         //      Formatting example (make sure to have " - " between each entry):
553         //
554         //                              AutoDPMTest: clk1:%d - clk2:%d - clk3:%d - clk4:%d\n"
555         ////////////////////////////////////////////////////////////////////////////
556         if (new_clocks && active_pipe_count > 0 &&
557                 new_clocks->dramclk_khz > 0 &&
558                 new_clocks->fclk_khz > 0 &&
559                 new_clocks->dcfclk_khz > 0 &&
560                 new_clocks->dppclk_khz > 0) {
561
562                 uint32_t pix_clk_list[MAX_PIPES] = {0};
563                 int p_state_list[MAX_PIPES] = {0};
564                 int disp_src_width_list[MAX_PIPES] = {0};
565                 int disp_src_height_list[MAX_PIPES] = {0};
566                 uint64_t disp_src_refresh_list[MAX_PIPES] = {0};
567                 bool is_scaled_list[MAX_PIPES] = {0};
568
569                 for (int i = 0; i < active_pipe_count; i++) {
570                         struct pipe_ctx *curr_pipe_ctx = pipe_ctx_list[i];
571                         uint64_t refresh_rate;
572
573                         pix_clk_list[i] = curr_pipe_ctx->stream->timing.pix_clk_100hz;
574                         p_state_list[i] = curr_pipe_ctx->p_state_type;
575
576                         refresh_rate = (curr_pipe_ctx->stream->timing.pix_clk_100hz * (uint64_t)100 +
577                                 curr_pipe_ctx->stream->timing.v_total * curr_pipe_ctx->stream->timing.h_total - (uint64_t)1);
578                         refresh_rate = div_u64(refresh_rate, curr_pipe_ctx->stream->timing.v_total);
579                         refresh_rate = div_u64(refresh_rate, curr_pipe_ctx->stream->timing.h_total);
580                         disp_src_refresh_list[i] = refresh_rate;
581
582                         if (curr_pipe_ctx->plane_state) {
583                                 is_scaled_list[i] = !(dcn32_check_native_scaling(curr_pipe_ctx));
584                                 disp_src_width_list[i] = curr_pipe_ctx->plane_state->src_rect.width;
585                                 disp_src_height_list[i] = curr_pipe_ctx->plane_state->src_rect.height;
586                         }
587                 }
588
589                 DC_LOG_AUTO_DPM_TEST("AutoDPMTest: dramclk:%d - fclk:%d - "
590                         "dcfclk:%d - dppclk:%d - dispclk_hw:%d - "
591                         "dppclk_hw:%d - dprefclk_hw:%d - dcfclk_hw:%d - "
592                         "dtbclk_hw:%d - fclk_hw:%d - pix_clk_0:%d - pix_clk_1:%d - "
593                         "pix_clk_2:%d - pix_clk_3:%d - mall_ss_size:%d - p_state_type_0:%d - "
594                         "p_state_type_1:%d - p_state_type_2:%d - p_state_type_3:%d - "
595                         "pix_width_0:%d - pix_height_0:%d - refresh_rate_0:%lld - is_scaled_0:%d - "
596                         "pix_width_1:%d - pix_height_1:%d - refresh_rate_1:%lld - is_scaled_1:%d - "
597                         "pix_width_2:%d - pix_height_2:%d - refresh_rate_2:%lld - is_scaled_2:%d - "
598                         "pix_width_3:%d - pix_height_3:%d - refresh_rate_3:%lld - is_scaled_3:%d - LOG_END\n",
599                         dramclk_khz_override,
600                         fclk_khz_override,
601                         new_clocks->dcfclk_khz,
602                         new_clocks->dppclk_khz,
603                         dispclk_khz_reg,
604                         dppclk_khz_reg,
605                         dprefclk_khz_reg,
606                         dcfclk_khz_reg,
607                         dtbclk_khz_reg,
608                         fclk_khz_reg,
609                         pix_clk_list[0], pix_clk_list[1], pix_clk_list[3], pix_clk_list[2],
610                         mall_ss_size_bytes,
611                         p_state_list[0], p_state_list[1], p_state_list[2], p_state_list[3],
612                         disp_src_width_list[0], disp_src_height_list[0], disp_src_refresh_list[0], is_scaled_list[0],
613                         disp_src_width_list[1], disp_src_height_list[1], disp_src_refresh_list[1], is_scaled_list[1],
614                         disp_src_width_list[2], disp_src_height_list[2], disp_src_refresh_list[2], is_scaled_list[2],
615                         disp_src_width_list[3], disp_src_height_list[3], disp_src_refresh_list[3], is_scaled_list[3]);
616         }
617 }
618
619 static void dcn32_update_clocks(struct clk_mgr *clk_mgr_base,
620                         struct dc_state *context,
621                         bool safe_to_lower)
622 {
623         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
624         struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
625         struct dc *dc = clk_mgr_base->ctx->dc;
626         int display_count;
627         bool update_dppclk = false;
628         bool update_dispclk = false;
629         bool enter_display_off = false;
630         bool dpp_clock_lowered = false;
631         struct dmcu *dmcu = clk_mgr_base->ctx->dc->res_pool->dmcu;
632         bool force_reset = false;
633         bool update_uclk = false, update_fclk = false;
634         bool p_state_change_support;
635         bool fclk_p_state_change_support;
636
637         if (clk_mgr_base->clks.dispclk_khz == 0 ||
638                         (dc->debug.force_clock_mode & 0x1)) {
639                 /* This is from resume or boot up, if forced_clock cfg option used,
640                  * we bypass program dispclk and DPPCLK, but need set them for S3.
641                  */
642                 force_reset = true;
643
644                 dcn2_read_clocks_from_hw_dentist(clk_mgr_base);
645
646                 /* Force_clock_mode 0x1:  force reset the clock even it is the same clock
647                  * as long as it is in Passive level.
648                  */
649         }
650         display_count = clk_mgr_helper_get_active_display_cnt(dc, context);
651
652         if (display_count == 0)
653                 enter_display_off = true;
654
655         if (clk_mgr->smu_present) {
656                 if (enter_display_off == safe_to_lower)
657                         dcn30_smu_set_num_of_displays(clk_mgr, display_count);
658
659                 clk_mgr_base->clks.fclk_prev_p_state_change_support = clk_mgr_base->clks.fclk_p_state_change_support;
660
661                 fclk_p_state_change_support = new_clocks->fclk_p_state_change_support;
662
663                 if (should_update_pstate_support(safe_to_lower, fclk_p_state_change_support, clk_mgr_base->clks.fclk_p_state_change_support) &&
664                                 !dc->work_arounds.clock_update_disable_mask.fclk) {
665                         clk_mgr_base->clks.fclk_p_state_change_support = fclk_p_state_change_support;
666
667                         /* To enable FCLK P-state switching, send FCLK_PSTATE_SUPPORTED message to PMFW */
668                         if (clk_mgr_base->ctx->dce_version != DCN_VERSION_3_21 && clk_mgr_base->clks.fclk_p_state_change_support) {
669                                 /* Handle the code for sending a message to PMFW that FCLK P-state change is supported */
670                                 dcn32_smu_send_fclk_pstate_message(clk_mgr, FCLK_PSTATE_SUPPORTED);
671                         }
672                 }
673
674                 if (dc->debug.force_min_dcfclk_mhz > 0)
675                         new_clocks->dcfclk_khz = (new_clocks->dcfclk_khz > (dc->debug.force_min_dcfclk_mhz * 1000)) ?
676                                         new_clocks->dcfclk_khz : (dc->debug.force_min_dcfclk_mhz * 1000);
677
678                 if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, clk_mgr_base->clks.dcfclk_khz) &&
679                                 !dc->work_arounds.clock_update_disable_mask.dcfclk) {
680                         clk_mgr_base->clks.dcfclk_khz = new_clocks->dcfclk_khz;
681                         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DCFCLK, khz_to_mhz_ceil(clk_mgr_base->clks.dcfclk_khz));
682                 }
683
684                 if (should_set_clock(safe_to_lower, new_clocks->dcfclk_deep_sleep_khz, clk_mgr_base->clks.dcfclk_deep_sleep_khz) &&
685                                 !dc->work_arounds.clock_update_disable_mask.dcfclk_ds) {
686                         clk_mgr_base->clks.dcfclk_deep_sleep_khz = new_clocks->dcfclk_deep_sleep_khz;
687                         dcn30_smu_set_min_deep_sleep_dcef_clk(clk_mgr, khz_to_mhz_ceil(clk_mgr_base->clks.dcfclk_deep_sleep_khz));
688                 }
689
690                 if (should_set_clock(safe_to_lower, new_clocks->socclk_khz, clk_mgr_base->clks.socclk_khz))
691                         /* We don't actually care about socclk, don't notify SMU of hard min */
692                         clk_mgr_base->clks.socclk_khz = new_clocks->socclk_khz;
693
694                 clk_mgr_base->clks.prev_p_state_change_support = clk_mgr_base->clks.p_state_change_support;
695                 clk_mgr_base->clks.prev_num_ways = clk_mgr_base->clks.num_ways;
696
697                 if (clk_mgr_base->clks.num_ways != new_clocks->num_ways &&
698                                 clk_mgr_base->clks.num_ways < new_clocks->num_ways) {
699                         clk_mgr_base->clks.num_ways = new_clocks->num_ways;
700                         dcn32_smu_send_cab_for_uclk_message(clk_mgr, clk_mgr_base->clks.num_ways);
701                 }
702
703                 p_state_change_support = new_clocks->p_state_change_support;
704                 if (should_update_pstate_support(safe_to_lower, p_state_change_support, clk_mgr_base->clks.p_state_change_support) &&
705                                 !dc->work_arounds.clock_update_disable_mask.uclk) {
706                         clk_mgr_base->clks.p_state_change_support = p_state_change_support;
707
708                         /* to disable P-State switching, set UCLK min = max */
709                         if (!clk_mgr_base->clks.p_state_change_support) {
710                                 if (dc->clk_mgr->dc_mode_softmax_enabled) {
711                                         /* On DCN32x we will never have the functional UCLK min above the softmax
712                                          * since we calculate mode support based on softmax being the max UCLK
713                                          * frequency.
714                                          */
715                                         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK,
716                                                         dc->clk_mgr->bw_params->dc_mode_softmax_memclk);
717                                 } else {
718                                         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, dc->clk_mgr->bw_params->max_memclk_mhz);
719                                 }
720                         }
721                 }
722
723                 if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching)
724                         dcn32_smu_wait_for_dmub_ack_mclk(clk_mgr, true);
725                 else
726                         dcn32_smu_wait_for_dmub_ack_mclk(clk_mgr, false);
727
728                 /* Always update saved value, even if new value not set due to P-State switching unsupported. Also check safe_to_lower for FCLK */
729                 if (safe_to_lower && (clk_mgr_base->clks.fclk_p_state_change_support != clk_mgr_base->clks.fclk_prev_p_state_change_support)) {
730                         update_fclk = true;
731                 }
732
733                 if (clk_mgr_base->ctx->dce_version != DCN_VERSION_3_21 && !clk_mgr_base->clks.fclk_p_state_change_support && update_fclk &&
734                                 !dc->work_arounds.clock_update_disable_mask.fclk) {
735                         /* Handle code for sending a message to PMFW that FCLK P-state change is not supported */
736                         dcn32_smu_send_fclk_pstate_message(clk_mgr, FCLK_PSTATE_NOTSUPPORTED);
737                 }
738
739                 /* Always update saved value, even if new value not set due to P-State switching unsupported */
740                 if (should_set_clock(safe_to_lower, new_clocks->dramclk_khz, clk_mgr_base->clks.dramclk_khz) &&
741                                 !dc->work_arounds.clock_update_disable_mask.uclk) {
742                         clk_mgr_base->clks.dramclk_khz = new_clocks->dramclk_khz;
743                         update_uclk = true;
744                 }
745
746                 /* set UCLK to requested value if P-State switching is supported, or to re-enable P-State switching */
747                 if (clk_mgr_base->clks.p_state_change_support &&
748                                 (update_uclk || !clk_mgr_base->clks.prev_p_state_change_support) &&
749                                 !dc->work_arounds.clock_update_disable_mask.uclk)
750                         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, khz_to_mhz_ceil(clk_mgr_base->clks.dramclk_khz));
751
752                 if (clk_mgr_base->clks.num_ways != new_clocks->num_ways &&
753                                 clk_mgr_base->clks.num_ways > new_clocks->num_ways) {
754                         clk_mgr_base->clks.num_ways = new_clocks->num_ways;
755                         dcn32_smu_send_cab_for_uclk_message(clk_mgr, clk_mgr_base->clks.num_ways);
756                 }
757         }
758
759         dcn32_update_dppclk_dispclk_freq(clk_mgr, new_clocks);
760         if (should_set_clock(safe_to_lower, new_clocks->dppclk_khz, clk_mgr_base->clks.dppclk_khz)) {
761                 if (clk_mgr_base->clks.dppclk_khz > new_clocks->dppclk_khz)
762                         dpp_clock_lowered = true;
763
764                 clk_mgr_base->clks.dppclk_khz = new_clocks->dppclk_khz;
765
766                 if (clk_mgr->smu_present && !dpp_clock_lowered)
767                         /*
768                          * SMU uses discrete dppclk presets. We applied
769                          * the same formula to increase our dppclk_khz
770                          * to the next matching discrete value. By
771                          * contract, we should use the preset dppclk
772                          * floored in Mhz to describe the intended clock.
773                          */
774                         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DPPCLK,
775                                         khz_to_mhz_floor(clk_mgr_base->clks.dppclk_khz));
776
777                 update_dppclk = true;
778         }
779
780         if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) {
781                 clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz;
782
783                 update_dispclk = true;
784         }
785
786         if (!new_clocks->dtbclk_en) {
787                 new_clocks->ref_dtbclk_khz = clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz * 1000;
788         }
789
790         /* clock limits are received with MHz precision, divide by 1000 to prevent setting clocks at every call */
791         if (!dc->debug.disable_dtb_ref_clk_switch &&
792                         should_set_clock(safe_to_lower, new_clocks->ref_dtbclk_khz / 1000, clk_mgr_base->clks.ref_dtbclk_khz / 1000)) {
793                 /* DCCG requires KHz precision for DTBCLK */
794                 clk_mgr_base->clks.ref_dtbclk_khz =
795                                 dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DTBCLK, khz_to_mhz_ceil(new_clocks->ref_dtbclk_khz));
796
797                 dcn32_update_clocks_update_dtb_dto(clk_mgr, context, clk_mgr_base->clks.ref_dtbclk_khz);
798         }
799
800         if (dc->config.forced_clocks == false || (force_reset && safe_to_lower)) {
801                 if (dpp_clock_lowered) {
802                         /* if clock is being lowered, increase DTO before lowering refclk */
803                         dcn32_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
804                         dcn32_update_clocks_update_dentist(clk_mgr, context);
805                         if (clk_mgr->smu_present)
806                                 /*
807                                  * SMU uses discrete dppclk presets. We applied
808                                  * the same formula to increase our dppclk_khz
809                                  * to the next matching discrete value. By
810                                  * contract, we should use the preset dppclk
811                                  * floored in Mhz to describe the intended clock.
812                                  */
813                                 dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DPPCLK,
814                                                 khz_to_mhz_floor(clk_mgr_base->clks.dppclk_khz));
815                 } else {
816                         /* if clock is being raised, increase refclk before lowering DTO */
817                         if (update_dppclk || update_dispclk)
818                                 dcn32_update_clocks_update_dentist(clk_mgr, context);
819                         /* There is a check inside dcn20_update_clocks_update_dpp_dto which ensures
820                          * that we do not lower dto when it is not safe to lower. We do not need to
821                          * compare the current and new dppclk before calling this function.
822                          */
823                         dcn32_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
824                 }
825         }
826
827         if (update_dispclk && dmcu && dmcu->funcs->is_dmcu_initialized(dmcu))
828                 /*update dmcu for wait_loop count*/
829                 dmcu->funcs->set_psr_wait_loop(dmcu,
830                                 clk_mgr_base->clks.dispclk_khz / 1000 / 7);
831
832         if (dc->config.enable_auto_dpm_test_logs && safe_to_lower) {
833             dcn32_auto_dpm_test_log(new_clocks, clk_mgr, context);
834         }
835 }
836
837 static uint32_t dcn32_get_vco_frequency_from_reg(struct clk_mgr_internal *clk_mgr)
838 {
839                 struct fixed31_32 pll_req;
840                 uint32_t pll_req_reg = 0;
841
842                 /* get FbMult value */
843                 if (ASICREV_IS_GC_11_0_2(clk_mgr->base.ctx->asic_id.hw_internal_rev))
844                         pll_req_reg = REG_READ(CLK0_CLK_PLL_REQ);
845                 else
846                         pll_req_reg = REG_READ(CLK1_CLK_PLL_REQ);
847
848                 /* set up a fixed-point number
849                  * this works because the int part is on the right edge of the register
850                  * and the frac part is on the left edge
851                  */
852                 pll_req = dc_fixpt_from_int(pll_req_reg & clk_mgr->clk_mgr_mask->FbMult_int);
853                 pll_req.value |= pll_req_reg & clk_mgr->clk_mgr_mask->FbMult_frac;
854
855                 /* multiply by REFCLK period */
856                 pll_req = dc_fixpt_mul_int(pll_req, clk_mgr->dfs_ref_freq_khz);
857
858                 return dc_fixpt_floor(pll_req);
859 }
860
861 static void dcn32_dump_clk_registers(struct clk_state_registers_and_bypass *regs_and_bypass,
862                 struct clk_mgr *clk_mgr_base, struct clk_log_info *log_info)
863 {
864         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
865         uint32_t dprefclk_did = 0;
866         uint32_t dcfclk_did = 0;
867         uint32_t dtbclk_did = 0;
868         uint32_t dispclk_did = 0;
869         uint32_t dppclk_did = 0;
870         uint32_t target_div = 0;
871
872         if (ASICREV_IS_GC_11_0_2(clk_mgr->base.ctx->asic_id.hw_internal_rev)) {
873                 /* DFS Slice 0 is used for DISPCLK */
874                 dispclk_did = REG_READ(CLK0_CLK0_DFS_CNTL);
875                 /* DFS Slice 1 is used for DPPCLK */
876                 dppclk_did = REG_READ(CLK0_CLK1_DFS_CNTL);
877                 /* DFS Slice 2 is used for DPREFCLK */
878                 dprefclk_did = REG_READ(CLK0_CLK2_DFS_CNTL);
879                 /* DFS Slice 3 is used for DCFCLK */
880                 dcfclk_did = REG_READ(CLK0_CLK3_DFS_CNTL);
881                 /* DFS Slice 4 is used for DTBCLK */
882                 dtbclk_did = REG_READ(CLK0_CLK4_DFS_CNTL);
883         } else {
884                 /* DFS Slice 0 is used for DISPCLK */
885                 dispclk_did = REG_READ(CLK1_CLK0_DFS_CNTL);
886                 /* DFS Slice 1 is used for DPPCLK */
887                 dppclk_did = REG_READ(CLK1_CLK1_DFS_CNTL);
888                 /* DFS Slice 2 is used for DPREFCLK */
889                 dprefclk_did = REG_READ(CLK1_CLK2_DFS_CNTL);
890                 /* DFS Slice 3 is used for DCFCLK */
891                 dcfclk_did = REG_READ(CLK1_CLK3_DFS_CNTL);
892                 /* DFS Slice 4 is used for DTBCLK */
893                 dtbclk_did = REG_READ(CLK1_CLK4_DFS_CNTL);
894         }
895
896         /* Convert DISPCLK DFS Slice DID to divider*/
897         target_div = dentist_get_divider_from_did(dispclk_did);
898         //Get dispclk in khz
899         regs_and_bypass->dispclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
900                         * clk_mgr->base.dentist_vco_freq_khz) / target_div;
901
902         /* Convert DISPCLK DFS Slice DID to divider*/
903         target_div = dentist_get_divider_from_did(dppclk_did);
904         //Get dppclk in khz
905         regs_and_bypass->dppclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
906                         * clk_mgr->base.dentist_vco_freq_khz) / target_div;
907
908         /* Convert DPREFCLK DFS Slice DID to divider*/
909         target_div = dentist_get_divider_from_did(dprefclk_did);
910         //Get dprefclk in khz
911         regs_and_bypass->dprefclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
912                         * clk_mgr->base.dentist_vco_freq_khz) / target_div;
913
914         /* Convert DCFCLK DFS Slice DID to divider*/
915         target_div = dentist_get_divider_from_did(dcfclk_did);
916         //Get dcfclk in khz
917         regs_and_bypass->dcfclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
918                         * clk_mgr->base.dentist_vco_freq_khz) / target_div;
919
920         /* Convert DTBCLK DFS Slice DID to divider*/
921         target_div = dentist_get_divider_from_did(dtbclk_did);
922         //Get dtbclk in khz
923         regs_and_bypass->dtbclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
924                         * clk_mgr->base.dentist_vco_freq_khz) / target_div;
925 }
926
927 static void dcn32_clock_read_ss_info(struct clk_mgr_internal *clk_mgr)
928 {
929         struct dc_bios *bp = clk_mgr->base.ctx->dc_bios;
930         int ss_info_num = bp->funcs->get_ss_entry_number(
931                         bp, AS_SIGNAL_TYPE_GPU_PLL);
932
933         if (ss_info_num) {
934                 struct spread_spectrum_info info = { { 0 } };
935                 enum bp_result result = bp->funcs->get_spread_spectrum_info(
936                                 bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info);
937
938                 /* SSInfo.spreadSpectrumPercentage !=0 would be sign
939                  * that SS is enabled
940                  */
941                 if (result == BP_RESULT_OK &&
942                                 info.spread_spectrum_percentage != 0) {
943                         clk_mgr->ss_on_dprefclk = true;
944                         clk_mgr->dprefclk_ss_divider = info.spread_percentage_divider;
945
946                         if (info.type.CENTER_MODE == 0) {
947                                 /* Currently for DP Reference clock we
948                                  * need only SS percentage for
949                                  * downspread
950                                  */
951                                 clk_mgr->dprefclk_ss_percentage =
952                                                 info.spread_spectrum_percentage;
953                         }
954                 }
955         }
956 }
957 static void dcn32_notify_wm_ranges(struct clk_mgr *clk_mgr_base)
958 {
959         unsigned int i;
960         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
961         WatermarksExternal_t *table = (WatermarksExternal_t *) clk_mgr->wm_range_table;
962
963         if (!clk_mgr->smu_present)
964                 return;
965
966         if (!table)
967                 return;
968
969         memset(table, 0, sizeof(*table));
970
971         /* collect valid ranges, place in pmfw table */
972         for (i = 0; i < WM_SET_COUNT; i++)
973                 if (clk_mgr->base.bw_params->wm_table.nv_entries[i].valid) {
974                         table->Watermarks.WatermarkRow[i].WmSetting = i;
975                         table->Watermarks.WatermarkRow[i].Flags = clk_mgr->base.bw_params->wm_table.nv_entries[i].pmfw_breakdown.wm_type;
976                 }
977         dcn30_smu_set_dram_addr_high(clk_mgr, clk_mgr->wm_range_table_addr >> 32);
978         dcn30_smu_set_dram_addr_low(clk_mgr, clk_mgr->wm_range_table_addr & 0xFFFFFFFF);
979         dcn32_smu_transfer_wm_table_dram_2_smu(clk_mgr);
980 }
981
982 /* Set min memclk to minimum, either constrained by the current mode or DPM0 */
983 static void dcn32_set_hard_min_memclk(struct clk_mgr *clk_mgr_base, bool current_mode)
984 {
985         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
986
987         if (!clk_mgr->smu_present)
988                 return;
989
990         if (current_mode) {
991                 if (clk_mgr_base->clks.p_state_change_support)
992                         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK,
993                                         khz_to_mhz_ceil(clk_mgr_base->clks.dramclk_khz));
994                 else
995                         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK,
996                                         clk_mgr_base->bw_params->max_memclk_mhz);
997         } else {
998                 dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK,
999                                 clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz);
1000         }
1001 }
1002
1003 /* Set max memclk to highest DPM value */
1004 static void dcn32_set_hard_max_memclk(struct clk_mgr *clk_mgr_base)
1005 {
1006         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
1007
1008         if (!clk_mgr->smu_present)
1009                 return;
1010
1011         dcn30_smu_set_hard_max_by_freq(clk_mgr, PPCLK_UCLK, clk_mgr_base->bw_params->max_memclk_mhz);
1012 }
1013
1014 /* Get current memclk states, update bounding box */
1015 static void dcn32_get_memclk_states_from_smu(struct clk_mgr *clk_mgr_base)
1016 {
1017         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
1018         struct clk_limit_num_entries *num_entries_per_clk = &clk_mgr_base->bw_params->clk_table.num_entries_per_clk;
1019         unsigned int num_levels;
1020
1021         if (!clk_mgr->smu_present)
1022                 return;
1023
1024         /* Refresh memclk and fclk states */
1025         dcn32_init_single_clock(clk_mgr, PPCLK_UCLK,
1026                         &clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz,
1027                         &num_entries_per_clk->num_memclk_levels);
1028         clk_mgr_base->bw_params->dc_mode_limit.memclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_UCLK);
1029         clk_mgr_base->bw_params->dc_mode_softmax_memclk = clk_mgr_base->bw_params->dc_mode_limit.memclk_mhz;
1030
1031         /* memclk must have at least one level */
1032         num_entries_per_clk->num_memclk_levels = num_entries_per_clk->num_memclk_levels ? num_entries_per_clk->num_memclk_levels : 1;
1033
1034         dcn32_init_single_clock(clk_mgr, PPCLK_FCLK,
1035                         &clk_mgr_base->bw_params->clk_table.entries[0].fclk_mhz,
1036                         &num_entries_per_clk->num_fclk_levels);
1037         clk_mgr_base->bw_params->dc_mode_limit.fclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_FCLK);
1038
1039         if (num_entries_per_clk->num_memclk_levels >= num_entries_per_clk->num_fclk_levels) {
1040                 num_levels = num_entries_per_clk->num_memclk_levels;
1041         } else {
1042                 num_levels = num_entries_per_clk->num_fclk_levels;
1043         }
1044         clk_mgr_base->bw_params->max_memclk_mhz =
1045                         clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_memclk_levels - 1].memclk_mhz;
1046         clk_mgr_base->bw_params->clk_table.num_entries = num_levels ? num_levels : 1;
1047
1048         if (clk_mgr->dpm_present && !num_levels)
1049                 clk_mgr->dpm_present = false;
1050
1051         if (!clk_mgr->dpm_present)
1052                 dcn32_patch_dpm_table(clk_mgr_base->bw_params);
1053
1054         DC_FP_START();
1055         /* Refresh bounding box */
1056         clk_mgr_base->ctx->dc->res_pool->funcs->update_bw_bounding_box(
1057                         clk_mgr->base.ctx->dc, clk_mgr_base->bw_params);
1058         DC_FP_END();
1059 }
1060
1061 static bool dcn32_are_clock_states_equal(struct dc_clocks *a,
1062                                         struct dc_clocks *b)
1063 {
1064         if (a->dispclk_khz != b->dispclk_khz)
1065                 return false;
1066         else if (a->dppclk_khz != b->dppclk_khz)
1067                 return false;
1068         else if (a->dcfclk_khz != b->dcfclk_khz)
1069                 return false;
1070         else if (a->dcfclk_deep_sleep_khz != b->dcfclk_deep_sleep_khz)
1071                 return false;
1072         else if (a->dramclk_khz != b->dramclk_khz)
1073                 return false;
1074         else if (a->p_state_change_support != b->p_state_change_support)
1075                 return false;
1076         else if (a->fclk_p_state_change_support != b->fclk_p_state_change_support)
1077                 return false;
1078
1079         return true;
1080 }
1081
1082 static void dcn32_enable_pme_wa(struct clk_mgr *clk_mgr_base)
1083 {
1084         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
1085
1086         if (!clk_mgr->smu_present)
1087                 return;
1088
1089         dcn32_smu_set_pme_workaround(clk_mgr);
1090 }
1091
1092 static bool dcn32_is_smu_present(struct clk_mgr *clk_mgr_base)
1093 {
1094         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
1095         return clk_mgr->smu_present;
1096 }
1097
1098 static void dcn32_set_max_memclk(struct clk_mgr *clk_mgr_base, unsigned int memclk_mhz)
1099 {
1100         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
1101
1102         if (!clk_mgr->smu_present)
1103                 return;
1104
1105         dcn30_smu_set_hard_max_by_freq(clk_mgr, PPCLK_UCLK, memclk_mhz);
1106 }
1107
1108 static void dcn32_set_min_memclk(struct clk_mgr *clk_mgr_base, unsigned int memclk_mhz)
1109 {
1110         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
1111
1112         if (!clk_mgr->smu_present)
1113                 return;
1114
1115         dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, memclk_mhz);
1116 }
1117
1118 static struct clk_mgr_funcs dcn32_funcs = {
1119                 .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz,
1120                 .get_dtb_ref_clk_frequency = dcn31_get_dtb_ref_freq_khz,
1121                 .update_clocks = dcn32_update_clocks,
1122                 .dump_clk_registers = dcn32_dump_clk_registers,
1123                 .init_clocks = dcn32_init_clocks,
1124                 .notify_wm_ranges = dcn32_notify_wm_ranges,
1125                 .set_hard_min_memclk = dcn32_set_hard_min_memclk,
1126                 .set_hard_max_memclk = dcn32_set_hard_max_memclk,
1127                 .set_max_memclk = dcn32_set_max_memclk,
1128                 .set_min_memclk = dcn32_set_min_memclk,
1129                 .get_memclk_states_from_smu = dcn32_get_memclk_states_from_smu,
1130                 .are_clock_states_equal = dcn32_are_clock_states_equal,
1131                 .enable_pme_wa = dcn32_enable_pme_wa,
1132                 .is_smu_present = dcn32_is_smu_present,
1133                 .get_dispclk_from_dentist = dcn32_get_dispclk_from_dentist,
1134 };
1135
1136 void dcn32_clk_mgr_construct(
1137                 struct dc_context *ctx,
1138                 struct clk_mgr_internal *clk_mgr,
1139                 struct pp_smu_funcs *pp_smu,
1140                 struct dccg *dccg)
1141 {
1142         struct clk_log_info log_info = {0};
1143
1144         clk_mgr->base.ctx = ctx;
1145         clk_mgr->base.funcs = &dcn32_funcs;
1146         if (ASICREV_IS_GC_11_0_2(clk_mgr->base.ctx->asic_id.hw_internal_rev)) {
1147                 clk_mgr->regs = &clk_mgr_regs_dcn321;
1148                 clk_mgr->clk_mgr_shift = &clk_mgr_shift_dcn321;
1149                 clk_mgr->clk_mgr_mask = &clk_mgr_mask_dcn321;
1150         } else {
1151                 clk_mgr->regs = &clk_mgr_regs_dcn32;
1152                 clk_mgr->clk_mgr_shift = &clk_mgr_shift_dcn32;
1153                 clk_mgr->clk_mgr_mask = &clk_mgr_mask_dcn32;
1154         }
1155
1156         clk_mgr->dccg = dccg;
1157         clk_mgr->dfs_bypass_disp_clk = 0;
1158
1159         clk_mgr->dprefclk_ss_percentage = 0;
1160         clk_mgr->dprefclk_ss_divider = 1000;
1161         clk_mgr->ss_on_dprefclk = false;
1162         clk_mgr->dfs_ref_freq_khz = 100000;
1163
1164         /* Changed from DCN3.2_clock_frequency doc to match
1165          * dcn32_dump_clk_registers from 4 * dentist_vco_freq_khz /
1166          * dprefclk DID divider
1167          */
1168         clk_mgr->base.dprefclk_khz = 716666;
1169         if (ctx->dc->debug.disable_dtb_ref_clk_switch) {
1170                 //initialize DTB ref clock value if DPM disabled
1171                 if (ctx->dce_version == DCN_VERSION_3_21)
1172                         clk_mgr->base.clks.ref_dtbclk_khz = 477800;
1173                 else
1174                         clk_mgr->base.clks.ref_dtbclk_khz = 268750;
1175         }
1176
1177
1178         /* integer part is now VCO frequency in kHz */
1179         clk_mgr->base.dentist_vco_freq_khz = dcn32_get_vco_frequency_from_reg(clk_mgr);
1180
1181         /* in case we don't get a value from the register, use default */
1182         if (clk_mgr->base.dentist_vco_freq_khz == 0)
1183                 clk_mgr->base.dentist_vco_freq_khz = 4300000; /* Updated as per HW docs */
1184
1185         dcn32_dump_clk_registers(&clk_mgr->base.boot_snapshot, &clk_mgr->base, &log_info);
1186
1187         if (ctx->dc->debug.disable_dtb_ref_clk_switch &&
1188                         clk_mgr->base.clks.ref_dtbclk_khz != clk_mgr->base.boot_snapshot.dtbclk) {
1189                 clk_mgr->base.clks.ref_dtbclk_khz = clk_mgr->base.boot_snapshot.dtbclk;
1190         }
1191
1192         if (clk_mgr->base.boot_snapshot.dprefclk != 0) {
1193                 clk_mgr->base.dprefclk_khz = clk_mgr->base.boot_snapshot.dprefclk;
1194         }
1195         dcn32_clock_read_ss_info(clk_mgr);
1196
1197         clk_mgr->dfs_bypass_enabled = false;
1198
1199         clk_mgr->smu_present = false;
1200
1201         clk_mgr->base.bw_params = kzalloc(sizeof(*clk_mgr->base.bw_params), GFP_KERNEL);
1202
1203         /* need physical address of table to give to PMFW */
1204         clk_mgr->wm_range_table = dm_helpers_allocate_gpu_mem(clk_mgr->base.ctx,
1205                         DC_MEM_ALLOC_TYPE_GART, sizeof(WatermarksExternal_t),
1206                         &clk_mgr->wm_range_table_addr);
1207 }
1208
1209 void dcn32_clk_mgr_destroy(struct clk_mgr_internal *clk_mgr)
1210 {
1211         kfree(clk_mgr->base.bw_params);
1212
1213         if (clk_mgr->wm_range_table)
1214                 dm_helpers_free_gpu_mem(clk_mgr->base.ctx, DC_MEM_ALLOC_TYPE_GART,
1215                                 clk_mgr->wm_range_table);
1216 }
1217