1 /*
2 * Copyright 2012-16 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26 #include <linux/slab.h>
27
28 #include "dal_asic_id.h"
29 #include "dc_types.h"
30 #include "dccg.h"
31 #include "clk_mgr_internal.h"
32 #include "dc_state_priv.h"
33 #include "link.h"
34
35 #include "dce100/dce_clk_mgr.h"
36 #include "dce110/dce110_clk_mgr.h"
37 #include "dce112/dce112_clk_mgr.h"
38 #include "dce120/dce120_clk_mgr.h"
39 #include "dce60/dce60_clk_mgr.h"
40 #include "dcn10/rv1_clk_mgr.h"
41 #include "dcn10/rv2_clk_mgr.h"
42 #include "dcn20/dcn20_clk_mgr.h"
43 #include "dcn21/rn_clk_mgr.h"
44 #include "dcn201/dcn201_clk_mgr.h"
45 #include "dcn30/dcn30_clk_mgr.h"
46 #include "dcn301/vg_clk_mgr.h"
47 #include "dcn31/dcn31_clk_mgr.h"
48 #include "dcn314/dcn314_clk_mgr.h"
49 #include "dcn315/dcn315_clk_mgr.h"
50 #include "dcn316/dcn316_clk_mgr.h"
51 #include "dcn32/dcn32_clk_mgr.h"
52 #include "dcn35/dcn35_clk_mgr.h"
53
clk_mgr_helper_get_active_display_cnt(struct dc * dc,struct dc_state * context)54 int clk_mgr_helper_get_active_display_cnt(
55 struct dc *dc,
56 struct dc_state *context)
57 {
58 int i, display_count;
59
60 display_count = 0;
61 for (i = 0; i < context->stream_count; i++) {
62 const struct dc_stream_state *stream = context->streams[i];
63
64 /* Don't count SubVP phantom pipes as part of active
65 * display count
66 */
67 if (dc_state_get_stream_subvp_type(context, stream) == SUBVP_PHANTOM)
68 continue;
69
70 /*
71 * Only notify active stream or virtual stream.
72 * Need to notify virtual stream to work around
73 * headless case. HPD does not fire when system is in
74 * S0i2.
75 */
76 if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
77 display_count++;
78 }
79
80 return display_count;
81 }
82
clk_mgr_helper_get_active_plane_cnt(struct dc * dc,struct dc_state * context)83 int clk_mgr_helper_get_active_plane_cnt(
84 struct dc *dc,
85 struct dc_state *context)
86 {
87 int i, total_plane_count;
88
89 total_plane_count = 0;
90 for (i = 0; i < context->stream_count; i++) {
91 const struct dc_stream_status stream_status = context->stream_status[i];
92
93 /*
94 * Sum up plane_count for all streams ( active and virtual ).
95 */
96 total_plane_count += stream_status.plane_count;
97 }
98
99 return total_plane_count;
100 }
101
clk_mgr_exit_optimized_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)102 void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
103 {
104 struct dc_link *edp_links[MAX_NUM_EDP];
105 struct dc_link *edp_link = NULL;
106 int edp_num;
107 unsigned int panel_inst;
108
109 dc_get_edp_links(dc, edp_links, &edp_num);
110 if (dc->hwss.exit_optimized_pwr_state)
111 dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
112
113 if (edp_num) {
114 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
115 bool allow_active = false;
116
117 edp_link = edp_links[panel_inst];
118 if (!edp_link->psr_settings.psr_feature_enabled)
119 continue;
120 clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
121 dc->link_srv->edp_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
122 dc->link_srv->edp_set_replay_allow_active(edp_link, &allow_active, false, false, NULL);
123 }
124 }
125
126 }
127
clk_mgr_optimize_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)128 void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
129 {
130 struct dc_link *edp_links[MAX_NUM_EDP];
131 struct dc_link *edp_link = NULL;
132 int edp_num;
133 unsigned int panel_inst;
134
135 dc_get_edp_links(dc, edp_links, &edp_num);
136 if (edp_num) {
137 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
138 edp_link = edp_links[panel_inst];
139 if (!edp_link->psr_settings.psr_feature_enabled)
140 continue;
141 dc->link_srv->edp_set_psr_allow_active(edp_link,
142 &clk_mgr->psr_allow_active_cache, false, false, NULL);
143 dc->link_srv->edp_set_replay_allow_active(edp_link,
144 &clk_mgr->psr_allow_active_cache, false, false, NULL);
145 }
146 }
147
148 if (dc->hwss.optimize_pwr_state)
149 dc->hwss.optimize_pwr_state(dc, dc->current_state);
150
151 }
152
dc_clk_mgr_create(struct dc_context * ctx,struct pp_smu_funcs * pp_smu,struct dccg * dccg)153 struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
154 {
155 struct hw_asic_id asic_id = ctx->asic_id;
156
157 switch (asic_id.chip_family) {
158 #if defined(CONFIG_DRM_AMD_DC_SI)
159 case FAMILY_SI: {
160 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
161
162 if (clk_mgr == NULL) {
163 BREAK_TO_DEBUGGER();
164 return NULL;
165 }
166 dce60_clk_mgr_construct(ctx, clk_mgr);
167 dce_clk_mgr_construct(ctx, clk_mgr);
168 return &clk_mgr->base;
169 }
170 #endif
171 case FAMILY_CI:
172 case FAMILY_KV: {
173 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
174
175 if (clk_mgr == NULL) {
176 BREAK_TO_DEBUGGER();
177 return NULL;
178 }
179 dce_clk_mgr_construct(ctx, clk_mgr);
180 return &clk_mgr->base;
181 }
182 case FAMILY_CZ: {
183 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
184
185 if (clk_mgr == NULL) {
186 BREAK_TO_DEBUGGER();
187 return NULL;
188 }
189 dce110_clk_mgr_construct(ctx, clk_mgr);
190 return &clk_mgr->base;
191 }
192 case FAMILY_VI: {
193 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
194
195 if (clk_mgr == NULL) {
196 BREAK_TO_DEBUGGER();
197 return NULL;
198 }
199 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
200 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
201 dce_clk_mgr_construct(ctx, clk_mgr);
202 return &clk_mgr->base;
203 }
204 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
205 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
206 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
207 dce112_clk_mgr_construct(ctx, clk_mgr);
208 return &clk_mgr->base;
209 }
210 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
211 dce112_clk_mgr_construct(ctx, clk_mgr);
212 return &clk_mgr->base;
213 }
214 return &clk_mgr->base;
215 }
216 case FAMILY_AI: {
217 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
218
219 if (clk_mgr == NULL) {
220 BREAK_TO_DEBUGGER();
221 return NULL;
222 }
223 if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
224 dce121_clk_mgr_construct(ctx, clk_mgr);
225 else
226 dce120_clk_mgr_construct(ctx, clk_mgr);
227 return &clk_mgr->base;
228 }
229 #if defined(CONFIG_DRM_AMD_DC_FP)
230 case FAMILY_RV: {
231 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
232
233 if (clk_mgr == NULL) {
234 BREAK_TO_DEBUGGER();
235 return NULL;
236 }
237
238 if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
239 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
240 return &clk_mgr->base;
241 }
242
243 if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
244 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
245 return &clk_mgr->base;
246 }
247 if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
248 rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
249 return &clk_mgr->base;
250 }
251 if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
252 ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
253 rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
254 return &clk_mgr->base;
255 }
256 return &clk_mgr->base;
257 }
258 case FAMILY_NV: {
259 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
260
261 if (clk_mgr == NULL) {
262 BREAK_TO_DEBUGGER();
263 return NULL;
264 }
265 if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
266 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
267 return &clk_mgr->base;
268 }
269 if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
270 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
271 return &clk_mgr->base;
272 }
273 if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
274 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
275 return &clk_mgr->base;
276 }
277 if (asic_id.chip_id == DEVICE_ID_NV_13FE) {
278 dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
279 return &clk_mgr->base;
280 }
281 dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
282 return &clk_mgr->base;
283 }
284 case FAMILY_VGH:
285 if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
286 struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
287
288 if (clk_mgr == NULL) {
289 BREAK_TO_DEBUGGER();
290 return NULL;
291 }
292 vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
293 return &clk_mgr->base.base;
294 }
295 break;
296
297 case FAMILY_YELLOW_CARP: {
298 struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
299
300 if (clk_mgr == NULL) {
301 BREAK_TO_DEBUGGER();
302 return NULL;
303 }
304
305 dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
306 return &clk_mgr->base.base;
307 }
308 break;
309 case AMDGPU_FAMILY_GC_10_3_6: {
310 struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
311
312 if (clk_mgr == NULL) {
313 BREAK_TO_DEBUGGER();
314 return NULL;
315 }
316
317 dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
318 return &clk_mgr->base.base;
319 }
320 break;
321 case AMDGPU_FAMILY_GC_10_3_7: {
322 struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
323
324 if (clk_mgr == NULL) {
325 BREAK_TO_DEBUGGER();
326 return NULL;
327 }
328
329 dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
330 return &clk_mgr->base.base;
331 }
332 break;
333 case AMDGPU_FAMILY_GC_11_0_0: {
334 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
335
336 if (clk_mgr == NULL) {
337 BREAK_TO_DEBUGGER();
338 return NULL;
339 }
340
341 dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
342 return &clk_mgr->base;
343 break;
344 }
345
346 case AMDGPU_FAMILY_GC_11_0_1: {
347 struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
348
349 if (clk_mgr == NULL) {
350 BREAK_TO_DEBUGGER();
351 return NULL;
352 }
353
354 dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
355 return &clk_mgr->base.base;
356 }
357 break;
358
359 case AMDGPU_FAMILY_GC_11_5_0: {
360 struct clk_mgr_dcn35 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
361
362 if (clk_mgr == NULL) {
363 BREAK_TO_DEBUGGER();
364 return NULL;
365 }
366
367 dcn35_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
368 return &clk_mgr->base.base;
369 }
370 break;
371
372 #endif /* CONFIG_DRM_AMD_DC_FP */
373 default:
374 ASSERT(0); /* Unknown Asic */
375 break;
376 }
377
378 return NULL;
379 }
380
dc_destroy_clk_mgr(struct clk_mgr * clk_mgr_base)381 void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
382 {
383 struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
384
385 #ifdef CONFIG_DRM_AMD_DC_FP
386 switch (clk_mgr_base->ctx->asic_id.chip_family) {
387 case FAMILY_NV:
388 if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
389 dcn3_clk_mgr_destroy(clk_mgr);
390 } else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
391 dcn3_clk_mgr_destroy(clk_mgr);
392 }
393 if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
394 dcn3_clk_mgr_destroy(clk_mgr);
395 }
396 break;
397
398 case FAMILY_VGH:
399 if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
400 vg_clk_mgr_destroy(clk_mgr);
401 break;
402
403 case FAMILY_YELLOW_CARP:
404 dcn31_clk_mgr_destroy(clk_mgr);
405 break;
406
407 case AMDGPU_FAMILY_GC_10_3_6:
408 dcn315_clk_mgr_destroy(clk_mgr);
409 break;
410
411 case AMDGPU_FAMILY_GC_10_3_7:
412 dcn316_clk_mgr_destroy(clk_mgr);
413 break;
414
415 case AMDGPU_FAMILY_GC_11_0_0:
416 dcn32_clk_mgr_destroy(clk_mgr);
417 break;
418
419 case AMDGPU_FAMILY_GC_11_0_1:
420 dcn314_clk_mgr_destroy(clk_mgr);
421 break;
422
423 case AMDGPU_FAMILY_GC_11_5_0:
424 dcn35_clk_mgr_destroy(clk_mgr);
425 break;
426
427 default:
428 break;
429 }
430 #endif /* CONFIG_DRM_AMD_DC_FP */
431
432 kfree(clk_mgr);
433 }
434
435