1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <linux/slab.h>
27
28#include "dal_asic_id.h"
29#include "dc_types.h"
30#include "dccg.h"
31#include "clk_mgr_internal.h"
32
33#include "dce100/dce_clk_mgr.h"
34#include "dce110/dce110_clk_mgr.h"
35#include "dce112/dce112_clk_mgr.h"
36#include "dce120/dce120_clk_mgr.h"
37#include "dce60/dce60_clk_mgr.h"
38#include "dcn10/rv1_clk_mgr.h"
39#include "dcn10/rv2_clk_mgr.h"
40#include "dcn20/dcn20_clk_mgr.h"
41#include "dcn21/rn_clk_mgr.h"
42#include "dcn30/dcn30_clk_mgr.h"
43#include "dcn301/vg_clk_mgr.h"
44
45
46int clk_mgr_helper_get_active_display_cnt(
47 struct dc *dc,
48 struct dc_state *context)
49{
50 int i, display_count;
51
52 display_count = 0;
53 for (i = 0; i < context->stream_count; i++) {
54 const struct dc_stream_state *stream = context->streams[i];
55
56
57
58
59
60
61
62 if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
63 display_count++;
64 }
65
66 return display_count;
67}
68
69int clk_mgr_helper_get_active_plane_cnt(
70 struct dc *dc,
71 struct dc_state *context)
72{
73 int i, total_plane_count;
74
75 total_plane_count = 0;
76 for (i = 0; i < context->stream_count; i++) {
77 const struct dc_stream_status stream_status = context->stream_status[i];
78
79
80
81
82 total_plane_count += stream_status.plane_count;
83 }
84
85 return total_plane_count;
86}
87
88void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
89{
90 struct dc_link *edp_link = get_edp_link(dc);
91
92 if (dc->hwss.exit_optimized_pwr_state)
93 dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
94
95 if (edp_link) {
96 clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
97 dc_link_set_psr_allow_active(edp_link, false, false, false);
98 }
99
100}
101
102void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
103{
104 struct dc_link *edp_link = get_edp_link(dc);
105
106 if (edp_link)
107 dc_link_set_psr_allow_active(edp_link,
108 clk_mgr->psr_allow_active_cache, false, false);
109
110 if (dc->hwss.optimize_pwr_state)
111 dc->hwss.optimize_pwr_state(dc, dc->current_state);
112
113}
114
115struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
116{
117 struct hw_asic_id asic_id = ctx->asic_id;
118
119 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
120
121 if (clk_mgr == NULL) {
122 BREAK_TO_DEBUGGER();
123 return NULL;
124 }
125
126 switch (asic_id.chip_family) {
127#if defined(CONFIG_DRM_AMD_DC_SI)
128 case FAMILY_SI:
129 dce60_clk_mgr_construct(ctx, clk_mgr);
130 break;
131#endif
132 case FAMILY_CI:
133 case FAMILY_KV:
134 dce_clk_mgr_construct(ctx, clk_mgr);
135 break;
136 case FAMILY_CZ:
137 dce110_clk_mgr_construct(ctx, clk_mgr);
138 break;
139 case FAMILY_VI:
140 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
141 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
142 dce_clk_mgr_construct(ctx, clk_mgr);
143 break;
144 }
145 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
146 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
147 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
148 dce112_clk_mgr_construct(ctx, clk_mgr);
149 break;
150 }
151 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
152 dce112_clk_mgr_construct(ctx, clk_mgr);
153 break;
154 }
155 break;
156 case FAMILY_AI:
157 if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
158 dce121_clk_mgr_construct(ctx, clk_mgr);
159 else
160 dce120_clk_mgr_construct(ctx, clk_mgr);
161 break;
162
163#if defined(CONFIG_DRM_AMD_DC_DCN)
164 case FAMILY_RV:
165 if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
166 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
167 break;
168 }
169
170 if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
171 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
172 break;
173 }
174 if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
175 rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
176 break;
177 }
178 if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
179 ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
180 rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
181 break;
182 }
183 break;
184
185 case FAMILY_NV:
186 if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
187 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
188 break;
189 }
190 if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
191 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
192 break;
193 }
194 dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
195 break;
196
197 case FAMILY_VGH:
198 if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev))
199 vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
200 break;
201#endif
202 default:
203 ASSERT(0);
204 break;
205 }
206
207 return &clk_mgr->base;
208}
209
210void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
211{
212 struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
213
214#ifdef CONFIG_DRM_AMD_DC_DCN
215 switch (clk_mgr_base->ctx->asic_id.chip_family) {
216 case FAMILY_NV:
217 if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
218 dcn3_clk_mgr_destroy(clk_mgr);
219 }
220 break;
221
222 case FAMILY_VGH:
223 if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
224 vg_clk_mgr_destroy(clk_mgr);
225 break;
226
227 default:
228 break;
229 }
230#endif
231
232 kfree(clk_mgr);
233}
234
235