1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include "dce_clocks.h"
27#include "dm_services.h"
28#include "reg_helper.h"
29#include "fixed32_32.h"
30#include "bios_parser_interface.h"
31#include "dc.h"
32#include "dmcu.h"
33#if defined(CONFIG_DRM_AMD_DC_DCN1_0)
34#include "dcn_calcs.h"
35#endif
36#include "core_types.h"
37
38
39#define TO_DCE_CLOCKS(clocks)\
40 container_of(clocks, struct dce_disp_clk, base)
41
42#define REG(reg) \
43 (clk_dce->regs->reg)
44
45#undef FN
46#define FN(reg_name, field_name) \
47 clk_dce->clk_shift->field_name, clk_dce->clk_mask->field_name
48
49#define CTX \
50 clk_dce->base.ctx
51
52
53static const struct state_dependent_clocks dce80_max_clks_by_state[] = {
54
55{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
56
57{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
58
59{ .display_clk_khz = 352000, .pixel_clk_khz = 330000},
60
61{ .display_clk_khz = 600000, .pixel_clk_khz = 400000 },
62
63{ .display_clk_khz = 600000, .pixel_clk_khz = 400000 } };
64
65static const struct state_dependent_clocks dce110_max_clks_by_state[] = {
66
67{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
68
69{ .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
70
71{ .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
72
73{ .display_clk_khz = 467000, .pixel_clk_khz = 400000 },
74
75{ .display_clk_khz = 643000, .pixel_clk_khz = 400000 } };
76
77static const struct state_dependent_clocks dce112_max_clks_by_state[] = {
78
79{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
80
81{ .display_clk_khz = 389189, .pixel_clk_khz = 346672 },
82
83{ .display_clk_khz = 459000, .pixel_clk_khz = 400000 },
84
85{ .display_clk_khz = 667000, .pixel_clk_khz = 600000 },
86
87{ .display_clk_khz = 1132000, .pixel_clk_khz = 600000 } };
88
89static const struct state_dependent_clocks dce120_max_clks_by_state[] = {
90
91{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
92
93{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
94
95{ .display_clk_khz = 460000, .pixel_clk_khz = 400000 },
96
97{ .display_clk_khz = 670000, .pixel_clk_khz = 600000 },
98
99{ .display_clk_khz = 1133000, .pixel_clk_khz = 600000 } };
100
101
102enum dce_divider_range_start {
103 DIVIDER_RANGE_01_START = 200,
104 DIVIDER_RANGE_02_START = 1600,
105 DIVIDER_RANGE_03_START = 3200,
106 DIVIDER_RANGE_SCALE_FACTOR = 100
107};
108
109
110
111enum dce_divider_id_register_setting {
112 DIVIDER_RANGE_01_BASE_DIVIDER_ID = 0X08,
113 DIVIDER_RANGE_02_BASE_DIVIDER_ID = 0X40,
114 DIVIDER_RANGE_03_BASE_DIVIDER_ID = 0X60,
115 DIVIDER_RANGE_MAX_DIVIDER_ID = 0X80
116};
117
118
119
120
121enum dce_divider_range_step_size {
122 DIVIDER_RANGE_01_STEP_SIZE = 25,
123 DIVIDER_RANGE_02_STEP_SIZE = 50,
124 DIVIDER_RANGE_03_STEP_SIZE = 100
125};
126
127static bool dce_divider_range_construct(
128 struct dce_divider_range *div_range,
129 int range_start,
130 int range_step,
131 int did_min,
132 int did_max)
133{
134 div_range->div_range_start = range_start;
135 div_range->div_range_step = range_step;
136 div_range->did_min = did_min;
137 div_range->did_max = did_max;
138
139 if (div_range->div_range_step == 0) {
140 div_range->div_range_step = 1;
141
142 BREAK_TO_DEBUGGER();
143 }
144
145
146
147
148
149
150
151 div_range->div_range_end = (did_max - did_min) * range_step
152 + range_start;
153 return true;
154}
155
156static int dce_divider_range_calc_divider(
157 struct dce_divider_range *div_range,
158 int did)
159{
160
161 if ((did < div_range->did_min) || (did >= div_range->did_max))
162 return INVALID_DIVIDER;
163
164 return ((did - div_range->did_min) * div_range->div_range_step)
165 + div_range->div_range_start;
166
167}
168
169static int dce_divider_range_get_divider(
170 struct dce_divider_range *div_range,
171 int ranges_num,
172 int did)
173{
174 int div = INVALID_DIVIDER;
175 int i;
176
177 for (i = 0; i < ranges_num; i++) {
178
179 div = dce_divider_range_calc_divider(&div_range[i], did);
180
181 if (div != INVALID_DIVIDER)
182 break;
183 }
184 return div;
185}
186
187static int dce_clocks_get_dp_ref_freq(struct display_clock *clk)
188{
189 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
190 int dprefclk_wdivider;
191 int dprefclk_src_sel;
192 int dp_ref_clk_khz = 600000;
193 int target_div = INVALID_DIVIDER;
194
195
196 REG_GET(DPREFCLK_CNTL, DPREFCLK_SRC_SEL, &dprefclk_src_sel);
197 ASSERT(dprefclk_src_sel == 0);
198
199
200
201 REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DPREFCLK_WDIVIDER, &dprefclk_wdivider);
202
203
204 target_div = dce_divider_range_get_divider(
205 clk_dce->divider_ranges,
206 DIVIDER_RANGE_MAX,
207 dprefclk_wdivider);
208
209 if (target_div != INVALID_DIVIDER) {
210
211 dp_ref_clk_khz = (DIVIDER_RANGE_SCALE_FACTOR
212 * clk_dce->dentist_vco_freq_khz) / target_div;
213 }
214
215
216
217
218
219
220
221
222
223
224
225
226
227 if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
228 struct fixed32_32 ss_percentage = dal_fixed32_32_div_int(
229 dal_fixed32_32_from_fraction(
230 clk_dce->dprefclk_ss_percentage,
231 clk_dce->dprefclk_ss_divider), 200);
232 struct fixed32_32 adj_dp_ref_clk_khz;
233
234 ss_percentage = dal_fixed32_32_sub(dal_fixed32_32_one,
235 ss_percentage);
236 adj_dp_ref_clk_khz =
237 dal_fixed32_32_mul_int(
238 ss_percentage,
239 dp_ref_clk_khz);
240 dp_ref_clk_khz = dal_fixed32_32_floor(adj_dp_ref_clk_khz);
241 }
242
243 return dp_ref_clk_khz;
244}
245
246
247
248
249
250static int dce_clocks_get_dp_ref_freq_wrkaround(struct display_clock *clk)
251{
252 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
253 int dp_ref_clk_khz = 600000;
254
255 if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
256 struct fixed32_32 ss_percentage = dal_fixed32_32_div_int(
257 dal_fixed32_32_from_fraction(
258 clk_dce->dprefclk_ss_percentage,
259 clk_dce->dprefclk_ss_divider), 200);
260 struct fixed32_32 adj_dp_ref_clk_khz;
261
262 ss_percentage = dal_fixed32_32_sub(dal_fixed32_32_one,
263 ss_percentage);
264 adj_dp_ref_clk_khz =
265 dal_fixed32_32_mul_int(
266 ss_percentage,
267 dp_ref_clk_khz);
268 dp_ref_clk_khz = dal_fixed32_32_floor(adj_dp_ref_clk_khz);
269 }
270
271 return dp_ref_clk_khz;
272}
273static enum dm_pp_clocks_state dce_get_required_clocks_state(
274 struct display_clock *clk,
275 struct state_dependent_clocks *req_clocks)
276{
277 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
278 int i;
279 enum dm_pp_clocks_state low_req_clk;
280
281
282
283
284
285 for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--)
286 if (req_clocks->display_clk_khz >
287 clk_dce->max_clks_by_state[i].display_clk_khz
288 || req_clocks->pixel_clk_khz >
289 clk_dce->max_clks_by_state[i].pixel_clk_khz)
290 break;
291
292 low_req_clk = i + 1;
293 if (low_req_clk > clk->max_clks_state) {
294 dm_logger_write(clk->ctx->logger, LOG_WARNING,
295 "%s: clocks unsupported", __func__);
296 low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
297 }
298
299 return low_req_clk;
300}
301
302static bool dce_clock_set_min_clocks_state(
303 struct display_clock *clk,
304 enum dm_pp_clocks_state clocks_state)
305{
306 struct dm_pp_power_level_change_request level_change_req = {
307 clocks_state };
308
309 if (clocks_state > clk->max_clks_state) {
310
311 dm_logger_write(clk->ctx->logger, LOG_WARNING,
312 "Requested state exceeds max supported state");
313 return false;
314 } else if (clocks_state == clk->cur_min_clks_state) {
315
316
317 return true;
318 }
319
320
321 if (dm_pp_apply_power_level_change_request(clk->ctx, &level_change_req))
322 clk->cur_min_clks_state = clocks_state;
323
324 return true;
325}
326
327static int dce_set_clock(
328 struct display_clock *clk,
329 int requested_clk_khz)
330{
331 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
332 struct bp_pixel_clock_parameters pxl_clk_params = { 0 };
333 struct dc_bios *bp = clk->ctx->dc_bios;
334 int actual_clock = requested_clk_khz;
335
336
337 if (requested_clk_khz > 0)
338 requested_clk_khz = max(requested_clk_khz,
339 clk_dce->dentist_vco_freq_khz / 64);
340
341
342 pxl_clk_params.target_pixel_clock = requested_clk_khz;
343 pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
344
345 bp->funcs->program_display_engine_pll(bp, &pxl_clk_params);
346
347 if (clk_dce->dfs_bypass_enabled) {
348
349
350 clk_dce->dfs_bypass_disp_clk =
351 pxl_clk_params.dfs_bypass_display_clock;
352 actual_clock = pxl_clk_params.dfs_bypass_display_clock;
353 }
354
355
356
357 if (requested_clk_khz == 0)
358 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
359 return actual_clock;
360}
361
362static int dce_psr_set_clock(
363 struct display_clock *clk,
364 int requested_clk_khz)
365{
366 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
367 struct dc_context *ctx = clk_dce->base.ctx;
368 struct dc *core_dc = ctx->dc;
369 struct dmcu *dmcu = core_dc->res_pool->dmcu;
370 int actual_clk_khz = requested_clk_khz;
371
372 actual_clk_khz = dce_set_clock(clk, requested_clk_khz);
373
374 dmcu->funcs->set_psr_wait_loop(dmcu, actual_clk_khz / 1000 / 7);
375 return actual_clk_khz;
376}
377
378static int dce112_set_clock(
379 struct display_clock *clk,
380 int requested_clk_khz)
381{
382 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
383 struct bp_set_dce_clock_parameters dce_clk_params;
384 struct dc_bios *bp = clk->ctx->dc_bios;
385 struct dc *core_dc = clk->ctx->dc;
386 struct dmcu *dmcu = core_dc->res_pool->dmcu;
387 int actual_clock = requested_clk_khz;
388
389 memset(&dce_clk_params, 0, sizeof(dce_clk_params));
390
391
392 if (requested_clk_khz > 0)
393 requested_clk_khz = max(requested_clk_khz,
394 clk_dce->dentist_vco_freq_khz / 62);
395
396 dce_clk_params.target_clock_frequency = requested_clk_khz;
397 dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
398 dce_clk_params.clock_type = DCECLOCK_TYPE_DISPLAY_CLOCK;
399
400 bp->funcs->set_dce_clock(bp, &dce_clk_params);
401 actual_clock = dce_clk_params.target_clock_frequency;
402
403
404
405 if (requested_clk_khz == 0)
406 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
407
408
409
410 dce_clk_params.target_clock_frequency = 0;
411 dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK;
412 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK =
413 (dce_clk_params.pll_id ==
414 CLOCK_SOURCE_COMBO_DISPLAY_PLL0);
415
416 bp->funcs->set_dce_clock(bp, &dce_clk_params);
417
418 if (clk_dce->dfs_bypass_disp_clk != actual_clock)
419 dmcu->funcs->set_psr_wait_loop(dmcu,
420 actual_clock / 1000 / 7);
421 clk_dce->dfs_bypass_disp_clk = actual_clock;
422 return actual_clock;
423}
424
425static void dce_clock_read_integrated_info(struct dce_disp_clk *clk_dce)
426{
427 struct dc_debug *debug = &clk_dce->base.ctx->dc->debug;
428 struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
429 struct integrated_info info = { { { 0 } } };
430 struct dc_firmware_info fw_info = { { 0 } };
431 int i;
432
433 if (bp->integrated_info)
434 info = *bp->integrated_info;
435
436 clk_dce->dentist_vco_freq_khz = info.dentist_vco_freq;
437 if (clk_dce->dentist_vco_freq_khz == 0) {
438 bp->funcs->get_firmware_info(bp, &fw_info);
439 clk_dce->dentist_vco_freq_khz =
440 fw_info.smu_gpu_pll_output_freq;
441 if (clk_dce->dentist_vco_freq_khz == 0)
442 clk_dce->dentist_vco_freq_khz = 3600000;
443 }
444
445
446 for (i = 0; i < NUMBER_OF_DISP_CLK_VOLTAGE; ++i) {
447 enum dm_pp_clocks_state clk_state = DM_PP_CLOCKS_STATE_INVALID;
448
449 switch (i) {
450 case 0:
451 clk_state = DM_PP_CLOCKS_STATE_ULTRA_LOW;
452 break;
453
454 case 1:
455 clk_state = DM_PP_CLOCKS_STATE_LOW;
456 break;
457
458 case 2:
459 clk_state = DM_PP_CLOCKS_STATE_NOMINAL;
460 break;
461
462 case 3:
463 clk_state = DM_PP_CLOCKS_STATE_PERFORMANCE;
464 break;
465
466 default:
467 clk_state = DM_PP_CLOCKS_STATE_INVALID;
468 break;
469 }
470
471
472
473 if (info.disp_clk_voltage[i].max_supported_clk >= 100000)
474 clk_dce->max_clks_by_state[clk_state].display_clk_khz =
475 info.disp_clk_voltage[i].max_supported_clk;
476 }
477
478 if (!debug->disable_dfs_bypass && bp->integrated_info)
479 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
480 clk_dce->dfs_bypass_enabled = true;
481
482 clk_dce->use_max_disp_clk = debug->max_disp_clk;
483}
484
485static void dce_clock_read_ss_info(struct dce_disp_clk *clk_dce)
486{
487 struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
488 int ss_info_num = bp->funcs->get_ss_entry_number(
489 bp, AS_SIGNAL_TYPE_GPU_PLL);
490
491 if (ss_info_num) {
492 struct spread_spectrum_info info = { { 0 } };
493 enum bp_result result = bp->funcs->get_spread_spectrum_info(
494 bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info);
495
496
497
498
499
500
501 if (result == BP_RESULT_OK &&
502 info.spread_spectrum_percentage != 0) {
503 clk_dce->ss_on_dprefclk = true;
504 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
505
506 if (info.type.CENTER_MODE == 0) {
507
508
509
510 clk_dce->dprefclk_ss_percentage =
511 info.spread_spectrum_percentage;
512 }
513
514 return;
515 }
516
517 result = bp->funcs->get_spread_spectrum_info(
518 bp, AS_SIGNAL_TYPE_DISPLAY_PORT, 0, &info);
519
520
521
522
523
524
525 if (result == BP_RESULT_OK &&
526 info.spread_spectrum_percentage != 0) {
527 clk_dce->ss_on_dprefclk = true;
528 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
529
530 if (info.type.CENTER_MODE == 0) {
531
532
533
534 clk_dce->dprefclk_ss_percentage =
535 info.spread_spectrum_percentage;
536 }
537 }
538 }
539}
540
541static bool dce_apply_clock_voltage_request(
542 struct display_clock *clk,
543 enum dm_pp_clock_type clocks_type,
544 int clocks_in_khz,
545 bool pre_mode_set,
546 bool update_dp_phyclk)
547{
548 bool send_request = false;
549 struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
550
551 switch (clocks_type) {
552 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
553 case DM_PP_CLOCK_TYPE_PIXELCLK:
554 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
555 break;
556 default:
557 BREAK_TO_DEBUGGER();
558 return false;
559 }
560
561 clock_voltage_req.clk_type = clocks_type;
562 clock_voltage_req.clocks_in_khz = clocks_in_khz;
563
564
565 if (pre_mode_set) {
566 switch (clocks_type) {
567 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
568 if (clocks_in_khz > clk->cur_clocks_value.dispclk_in_khz) {
569 clk->cur_clocks_value.dispclk_notify_pplib_done = true;
570 send_request = true;
571 } else
572 clk->cur_clocks_value.dispclk_notify_pplib_done = false;
573
574 clk->cur_clocks_value.dispclk_in_khz = clocks_in_khz;
575 break;
576 case DM_PP_CLOCK_TYPE_PIXELCLK:
577 if (clocks_in_khz > clk->cur_clocks_value.max_pixelclk_in_khz) {
578 clk->cur_clocks_value.pixelclk_notify_pplib_done = true;
579 send_request = true;
580 } else
581 clk->cur_clocks_value.pixelclk_notify_pplib_done = false;
582
583 clk->cur_clocks_value.max_pixelclk_in_khz = clocks_in_khz;
584 break;
585 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
586 if (clocks_in_khz > clk->cur_clocks_value.max_non_dp_phyclk_in_khz) {
587 clk->cur_clocks_value.phyclk_notigy_pplib_done = true;
588 send_request = true;
589 } else
590 clk->cur_clocks_value.phyclk_notigy_pplib_done = false;
591
592 clk->cur_clocks_value.max_non_dp_phyclk_in_khz = clocks_in_khz;
593 break;
594 default:
595 ASSERT(0);
596 break;
597 }
598
599 } else {
600 switch (clocks_type) {
601 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
602 if (!clk->cur_clocks_value.dispclk_notify_pplib_done)
603 send_request = true;
604 break;
605 case DM_PP_CLOCK_TYPE_PIXELCLK:
606 if (!clk->cur_clocks_value.pixelclk_notify_pplib_done)
607 send_request = true;
608 break;
609 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
610 if (!clk->cur_clocks_value.phyclk_notigy_pplib_done)
611 send_request = true;
612 break;
613 default:
614 ASSERT(0);
615 break;
616 }
617 }
618 if (send_request) {
619#if defined(CONFIG_DRM_AMD_DC_DCN1_0)
620 if (clk->ctx->dce_version >= DCN_VERSION_1_0) {
621 struct dc *core_dc = clk->ctx->dc;
622
623 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
624 clock_voltage_req.clocks_in_khz =
625 dcn_find_dcfclk_suits_all(core_dc, &clk->cur_clocks_value);
626 }
627#endif
628 dm_pp_apply_clock_for_voltage_request(
629 clk->ctx, &clock_voltage_req);
630 }
631 if (update_dp_phyclk && (clocks_in_khz >
632 clk->cur_clocks_value.max_dp_phyclk_in_khz))
633 clk->cur_clocks_value.max_dp_phyclk_in_khz = clocks_in_khz;
634
635 return true;
636}
637
638
639static const struct display_clock_funcs dce120_funcs = {
640 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq_wrkaround,
641 .apply_clock_voltage_request = dce_apply_clock_voltage_request,
642 .set_clock = dce112_set_clock
643};
644
645static const struct display_clock_funcs dce112_funcs = {
646 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
647 .get_required_clocks_state = dce_get_required_clocks_state,
648 .set_min_clocks_state = dce_clock_set_min_clocks_state,
649 .set_clock = dce112_set_clock
650};
651
652static const struct display_clock_funcs dce110_funcs = {
653 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
654 .get_required_clocks_state = dce_get_required_clocks_state,
655 .set_min_clocks_state = dce_clock_set_min_clocks_state,
656 .set_clock = dce_psr_set_clock
657};
658
659static const struct display_clock_funcs dce_funcs = {
660 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
661 .get_required_clocks_state = dce_get_required_clocks_state,
662 .set_min_clocks_state = dce_clock_set_min_clocks_state,
663 .set_clock = dce_set_clock
664};
665
666static void dce_disp_clk_construct(
667 struct dce_disp_clk *clk_dce,
668 struct dc_context *ctx,
669 const struct dce_disp_clk_registers *regs,
670 const struct dce_disp_clk_shift *clk_shift,
671 const struct dce_disp_clk_mask *clk_mask)
672{
673 struct display_clock *base = &clk_dce->base;
674
675 base->ctx = ctx;
676 base->funcs = &dce_funcs;
677
678 clk_dce->regs = regs;
679 clk_dce->clk_shift = clk_shift;
680 clk_dce->clk_mask = clk_mask;
681
682 clk_dce->dfs_bypass_disp_clk = 0;
683
684 clk_dce->dprefclk_ss_percentage = 0;
685 clk_dce->dprefclk_ss_divider = 1000;
686 clk_dce->ss_on_dprefclk = false;
687
688 base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
689 base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID;
690
691 dce_clock_read_integrated_info(clk_dce);
692 dce_clock_read_ss_info(clk_dce);
693
694 dce_divider_range_construct(
695 &clk_dce->divider_ranges[DIVIDER_RANGE_01],
696 DIVIDER_RANGE_01_START,
697 DIVIDER_RANGE_01_STEP_SIZE,
698 DIVIDER_RANGE_01_BASE_DIVIDER_ID,
699 DIVIDER_RANGE_02_BASE_DIVIDER_ID);
700 dce_divider_range_construct(
701 &clk_dce->divider_ranges[DIVIDER_RANGE_02],
702 DIVIDER_RANGE_02_START,
703 DIVIDER_RANGE_02_STEP_SIZE,
704 DIVIDER_RANGE_02_BASE_DIVIDER_ID,
705 DIVIDER_RANGE_03_BASE_DIVIDER_ID);
706 dce_divider_range_construct(
707 &clk_dce->divider_ranges[DIVIDER_RANGE_03],
708 DIVIDER_RANGE_03_START,
709 DIVIDER_RANGE_03_STEP_SIZE,
710 DIVIDER_RANGE_03_BASE_DIVIDER_ID,
711 DIVIDER_RANGE_MAX_DIVIDER_ID);
712}
713
714struct display_clock *dce_disp_clk_create(
715 struct dc_context *ctx,
716 const struct dce_disp_clk_registers *regs,
717 const struct dce_disp_clk_shift *clk_shift,
718 const struct dce_disp_clk_mask *clk_mask)
719{
720 struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
721
722 if (clk_dce == NULL) {
723 BREAK_TO_DEBUGGER();
724 return NULL;
725 }
726
727 memcpy(clk_dce->max_clks_by_state,
728 dce80_max_clks_by_state,
729 sizeof(dce80_max_clks_by_state));
730
731 dce_disp_clk_construct(
732 clk_dce, ctx, regs, clk_shift, clk_mask);
733
734 return &clk_dce->base;
735}
736
737struct display_clock *dce110_disp_clk_create(
738 struct dc_context *ctx,
739 const struct dce_disp_clk_registers *regs,
740 const struct dce_disp_clk_shift *clk_shift,
741 const struct dce_disp_clk_mask *clk_mask)
742{
743 struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
744
745 if (clk_dce == NULL) {
746 BREAK_TO_DEBUGGER();
747 return NULL;
748 }
749
750 memcpy(clk_dce->max_clks_by_state,
751 dce110_max_clks_by_state,
752 sizeof(dce110_max_clks_by_state));
753
754 dce_disp_clk_construct(
755 clk_dce, ctx, regs, clk_shift, clk_mask);
756
757 clk_dce->base.funcs = &dce110_funcs;
758
759 return &clk_dce->base;
760}
761
762struct display_clock *dce112_disp_clk_create(
763 struct dc_context *ctx,
764 const struct dce_disp_clk_registers *regs,
765 const struct dce_disp_clk_shift *clk_shift,
766 const struct dce_disp_clk_mask *clk_mask)
767{
768 struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
769
770 if (clk_dce == NULL) {
771 BREAK_TO_DEBUGGER();
772 return NULL;
773 }
774
775 memcpy(clk_dce->max_clks_by_state,
776 dce112_max_clks_by_state,
777 sizeof(dce112_max_clks_by_state));
778
779 dce_disp_clk_construct(
780 clk_dce, ctx, regs, clk_shift, clk_mask);
781
782 clk_dce->base.funcs = &dce112_funcs;
783
784 return &clk_dce->base;
785}
786
787struct display_clock *dce120_disp_clk_create(struct dc_context *ctx)
788{
789 struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
790 struct dm_pp_clock_levels_with_voltage clk_level_info = {0};
791
792 if (clk_dce == NULL) {
793 BREAK_TO_DEBUGGER();
794 return NULL;
795 }
796
797 memcpy(clk_dce->max_clks_by_state,
798 dce120_max_clks_by_state,
799 sizeof(dce120_max_clks_by_state));
800
801 dce_disp_clk_construct(
802 clk_dce, ctx, NULL, NULL, NULL);
803
804 clk_dce->base.funcs = &dce120_funcs;
805
806
807 if (!ctx->dc->debug.disable_pplib_clock_request &&
808 dm_pp_get_clock_levels_by_type_with_voltage(
809 ctx, DM_PP_CLOCK_TYPE_DISPLAY_CLK, &clk_level_info)
810 && clk_level_info.num_levels)
811 clk_dce->max_displ_clk_in_khz =
812 clk_level_info.data[clk_level_info.num_levels - 1].clocks_in_khz;
813 else
814 clk_dce->max_displ_clk_in_khz = 1133000;
815
816 return &clk_dce->base;
817}
818
819void dce_disp_clk_destroy(struct display_clock **disp_clk)
820{
821 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(*disp_clk);
822
823 kfree(clk_dce);
824 *disp_clk = NULL;
825}
826