0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026 #include <linux/slab.h>
0027
0028 #include "dal_asic_id.h"
0029 #include "dc_types.h"
0030 #include "dccg.h"
0031 #include "clk_mgr_internal.h"
0032
0033 #include "dce100/dce_clk_mgr.h"
0034 #include "dce110/dce110_clk_mgr.h"
0035 #include "dce112/dce112_clk_mgr.h"
0036 #include "dce120/dce120_clk_mgr.h"
0037 #include "dce60/dce60_clk_mgr.h"
0038 #include "dcn10/rv1_clk_mgr.h"
0039 #include "dcn10/rv2_clk_mgr.h"
0040 #include "dcn20/dcn20_clk_mgr.h"
0041 #include "dcn21/rn_clk_mgr.h"
0042 #include "dcn201/dcn201_clk_mgr.h"
0043 #include "dcn30/dcn30_clk_mgr.h"
0044 #include "dcn301/vg_clk_mgr.h"
0045 #include "dcn31/dcn31_clk_mgr.h"
0046 #include "dcn314/dcn314_clk_mgr.h"
0047 #include "dcn315/dcn315_clk_mgr.h"
0048 #include "dcn316/dcn316_clk_mgr.h"
0049 #include "dcn32/dcn32_clk_mgr.h"
0050
0051 int clk_mgr_helper_get_active_display_cnt(
0052 struct dc *dc,
0053 struct dc_state *context)
0054 {
0055 int i, display_count;
0056
0057 display_count = 0;
0058 for (i = 0; i < context->stream_count; i++) {
0059 const struct dc_stream_state *stream = context->streams[i];
0060
0061
0062
0063
0064 if (stream->mall_stream_config.type == SUBVP_PHANTOM)
0065 continue;
0066
0067
0068
0069
0070
0071
0072
0073 if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
0074 display_count++;
0075 }
0076
0077 return display_count;
0078 }
0079
0080 int clk_mgr_helper_get_active_plane_cnt(
0081 struct dc *dc,
0082 struct dc_state *context)
0083 {
0084 int i, total_plane_count;
0085
0086 total_plane_count = 0;
0087 for (i = 0; i < context->stream_count; i++) {
0088 const struct dc_stream_status stream_status = context->stream_status[i];
0089
0090
0091
0092
0093 total_plane_count += stream_status.plane_count;
0094 }
0095
0096 return total_plane_count;
0097 }
0098
0099 void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
0100 {
0101 struct dc_link *edp_links[MAX_NUM_EDP];
0102 struct dc_link *edp_link = NULL;
0103 int edp_num;
0104 unsigned int panel_inst;
0105
0106 get_edp_links(dc, edp_links, &edp_num);
0107 if (dc->hwss.exit_optimized_pwr_state)
0108 dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
0109
0110 if (edp_num) {
0111 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
0112 bool allow_active = false;
0113
0114 edp_link = edp_links[panel_inst];
0115 if (!edp_link->psr_settings.psr_feature_enabled)
0116 continue;
0117 clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
0118 dc_link_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
0119 }
0120 }
0121
0122 }
0123
0124 void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
0125 {
0126 struct dc_link *edp_links[MAX_NUM_EDP];
0127 struct dc_link *edp_link = NULL;
0128 int edp_num;
0129 unsigned int panel_inst;
0130
0131 get_edp_links(dc, edp_links, &edp_num);
0132 if (edp_num) {
0133 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
0134 edp_link = edp_links[panel_inst];
0135 if (!edp_link->psr_settings.psr_feature_enabled)
0136 continue;
0137 dc_link_set_psr_allow_active(edp_link,
0138 &clk_mgr->psr_allow_active_cache, false, false, NULL);
0139 }
0140 }
0141
0142 if (dc->hwss.optimize_pwr_state)
0143 dc->hwss.optimize_pwr_state(dc, dc->current_state);
0144
0145 }
0146
0147 struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
0148 {
0149 struct hw_asic_id asic_id = ctx->asic_id;
0150
0151 switch (asic_id.chip_family) {
0152 #if defined(CONFIG_DRM_AMD_DC_SI)
0153 case FAMILY_SI: {
0154 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0155
0156 if (clk_mgr == NULL) {
0157 BREAK_TO_DEBUGGER();
0158 return NULL;
0159 }
0160 dce60_clk_mgr_construct(ctx, clk_mgr);
0161 dce_clk_mgr_construct(ctx, clk_mgr);
0162 return &clk_mgr->base;
0163 }
0164 #endif
0165 case FAMILY_CI:
0166 case FAMILY_KV: {
0167 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0168
0169 if (clk_mgr == NULL) {
0170 BREAK_TO_DEBUGGER();
0171 return NULL;
0172 }
0173 dce_clk_mgr_construct(ctx, clk_mgr);
0174 return &clk_mgr->base;
0175 }
0176 case FAMILY_CZ: {
0177 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0178
0179 if (clk_mgr == NULL) {
0180 BREAK_TO_DEBUGGER();
0181 return NULL;
0182 }
0183 dce110_clk_mgr_construct(ctx, clk_mgr);
0184 return &clk_mgr->base;
0185 }
0186 case FAMILY_VI: {
0187 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0188
0189 if (clk_mgr == NULL) {
0190 BREAK_TO_DEBUGGER();
0191 return NULL;
0192 }
0193 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
0194 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
0195 dce_clk_mgr_construct(ctx, clk_mgr);
0196 return &clk_mgr->base;
0197 }
0198 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
0199 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
0200 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
0201 dce112_clk_mgr_construct(ctx, clk_mgr);
0202 return &clk_mgr->base;
0203 }
0204 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
0205 dce112_clk_mgr_construct(ctx, clk_mgr);
0206 return &clk_mgr->base;
0207 }
0208 return &clk_mgr->base;
0209 }
0210 case FAMILY_AI: {
0211 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0212
0213 if (clk_mgr == NULL) {
0214 BREAK_TO_DEBUGGER();
0215 return NULL;
0216 }
0217 if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
0218 dce121_clk_mgr_construct(ctx, clk_mgr);
0219 else
0220 dce120_clk_mgr_construct(ctx, clk_mgr);
0221 return &clk_mgr->base;
0222 }
0223 #if defined(CONFIG_DRM_AMD_DC_DCN)
0224 case FAMILY_RV: {
0225 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0226
0227 if (clk_mgr == NULL) {
0228 BREAK_TO_DEBUGGER();
0229 return NULL;
0230 }
0231
0232 if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
0233 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0234 return &clk_mgr->base;
0235 }
0236
0237 if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
0238 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0239 return &clk_mgr->base;
0240 }
0241 if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
0242 rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
0243 return &clk_mgr->base;
0244 }
0245 if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
0246 ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
0247 rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
0248 return &clk_mgr->base;
0249 }
0250 return &clk_mgr->base;
0251 }
0252 case FAMILY_NV: {
0253 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0254
0255 if (clk_mgr == NULL) {
0256 BREAK_TO_DEBUGGER();
0257 return NULL;
0258 }
0259 if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
0260 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0261 return &clk_mgr->base;
0262 }
0263 if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
0264 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0265 return &clk_mgr->base;
0266 }
0267 if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
0268 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0269 return &clk_mgr->base;
0270 }
0271 if (asic_id.chip_id == DEVICE_ID_NV_13FE) {
0272 dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0273 return &clk_mgr->base;
0274 }
0275 dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0276 return &clk_mgr->base;
0277 }
0278 case FAMILY_VGH:
0279 if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
0280 struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0281
0282 if (clk_mgr == NULL) {
0283 BREAK_TO_DEBUGGER();
0284 return NULL;
0285 }
0286 vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0287 return &clk_mgr->base.base;
0288 }
0289 break;
0290
0291 case FAMILY_YELLOW_CARP: {
0292 struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0293
0294 if (clk_mgr == NULL) {
0295 BREAK_TO_DEBUGGER();
0296 return NULL;
0297 }
0298
0299 dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0300 return &clk_mgr->base.base;
0301 }
0302 break;
0303 case AMDGPU_FAMILY_GC_10_3_6: {
0304 struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0305
0306 if (clk_mgr == NULL) {
0307 BREAK_TO_DEBUGGER();
0308 return NULL;
0309 }
0310
0311 dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0312 return &clk_mgr->base.base;
0313 }
0314 break;
0315 case AMDGPU_FAMILY_GC_10_3_7: {
0316 struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0317
0318 if (clk_mgr == NULL) {
0319 BREAK_TO_DEBUGGER();
0320 return NULL;
0321 }
0322
0323 dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0324 return &clk_mgr->base.base;
0325 }
0326 break;
0327 case AMDGPU_FAMILY_GC_11_0_0: {
0328 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0329
0330 if (clk_mgr == NULL) {
0331 BREAK_TO_DEBUGGER();
0332 return NULL;
0333 }
0334
0335 dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0336 return &clk_mgr->base;
0337 break;
0338 }
0339
0340 case AMDGPU_FAMILY_GC_11_0_1: {
0341 struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
0342
0343 if (clk_mgr == NULL) {
0344 BREAK_TO_DEBUGGER();
0345 return NULL;
0346 }
0347
0348 dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
0349 return &clk_mgr->base.base;
0350 }
0351 break;
0352
0353 #endif
0354 default:
0355 ASSERT(0);
0356 break;
0357 }
0358
0359 return NULL;
0360 }
0361
0362 void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
0363 {
0364 struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
0365
0366 #ifdef CONFIG_DRM_AMD_DC_DCN
0367 switch (clk_mgr_base->ctx->asic_id.chip_family) {
0368 case FAMILY_NV:
0369 if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
0370 dcn3_clk_mgr_destroy(clk_mgr);
0371 } else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
0372 dcn3_clk_mgr_destroy(clk_mgr);
0373 }
0374 if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
0375 dcn3_clk_mgr_destroy(clk_mgr);
0376 }
0377 break;
0378
0379 case FAMILY_VGH:
0380 if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
0381 vg_clk_mgr_destroy(clk_mgr);
0382 break;
0383
0384 case FAMILY_YELLOW_CARP:
0385 dcn31_clk_mgr_destroy(clk_mgr);
0386 break;
0387
0388 case AMDGPU_FAMILY_GC_10_3_6:
0389 dcn315_clk_mgr_destroy(clk_mgr);
0390 break;
0391
0392 case AMDGPU_FAMILY_GC_10_3_7:
0393 dcn316_clk_mgr_destroy(clk_mgr);
0394 break;
0395
0396 case AMDGPU_FAMILY_GC_11_0_0:
0397 dcn32_clk_mgr_destroy(clk_mgr);
0398 break;
0399
0400 case AMDGPU_FAMILY_GC_11_0_1:
0401 dcn314_clk_mgr_destroy(clk_mgr);
0402 break;
0403
0404 default:
0405 break;
0406 }
0407 #endif
0408
0409 kfree(clk_mgr);
0410 }
0411