0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027 #include "dcn20_hubbub.h"
0028 #include "reg_helper.h"
0029 #include "clk_mgr.h"
0030
0031 #define REG(reg)\
0032 hubbub1->regs->reg
0033
0034 #define CTX \
0035 hubbub1->base.ctx
0036
0037 #undef FN
0038 #define FN(reg_name, field_name) \
0039 hubbub1->shifts->field_name, hubbub1->masks->field_name
0040
0041 #define REG(reg)\
0042 hubbub1->regs->reg
0043
0044 #define CTX \
0045 hubbub1->base.ctx
0046
0047 #undef FN
0048 #define FN(reg_name, field_name) \
0049 hubbub1->shifts->field_name, hubbub1->masks->field_name
0050
0051 #ifdef NUM_VMID
0052 #undef NUM_VMID
0053 #endif
0054 #define NUM_VMID 16
0055
0056 bool hubbub2_dcc_support_swizzle(
0057 enum swizzle_mode_values swizzle,
0058 unsigned int bytes_per_element,
0059 enum segment_order *segment_order_horz,
0060 enum segment_order *segment_order_vert)
0061 {
0062 bool standard_swizzle = false;
0063 bool display_swizzle = false;
0064 bool render_swizzle = false;
0065
0066 switch (swizzle) {
0067 case DC_SW_4KB_S:
0068 case DC_SW_64KB_S:
0069 case DC_SW_VAR_S:
0070 case DC_SW_4KB_S_X:
0071 case DC_SW_64KB_S_X:
0072 case DC_SW_VAR_S_X:
0073 standard_swizzle = true;
0074 break;
0075 case DC_SW_64KB_R_X:
0076 render_swizzle = true;
0077 break;
0078 case DC_SW_4KB_D:
0079 case DC_SW_64KB_D:
0080 case DC_SW_VAR_D:
0081 case DC_SW_4KB_D_X:
0082 case DC_SW_64KB_D_X:
0083 case DC_SW_VAR_D_X:
0084 display_swizzle = true;
0085 break;
0086 default:
0087 break;
0088 }
0089
0090 if (standard_swizzle) {
0091 if (bytes_per_element == 1) {
0092 *segment_order_horz = segment_order__contiguous;
0093 *segment_order_vert = segment_order__na;
0094 return true;
0095 }
0096 if (bytes_per_element == 2) {
0097 *segment_order_horz = segment_order__non_contiguous;
0098 *segment_order_vert = segment_order__contiguous;
0099 return true;
0100 }
0101 if (bytes_per_element == 4) {
0102 *segment_order_horz = segment_order__non_contiguous;
0103 *segment_order_vert = segment_order__contiguous;
0104 return true;
0105 }
0106 if (bytes_per_element == 8) {
0107 *segment_order_horz = segment_order__na;
0108 *segment_order_vert = segment_order__contiguous;
0109 return true;
0110 }
0111 }
0112 if (render_swizzle) {
0113 if (bytes_per_element == 2) {
0114 *segment_order_horz = segment_order__contiguous;
0115 *segment_order_vert = segment_order__contiguous;
0116 return true;
0117 }
0118 if (bytes_per_element == 4) {
0119 *segment_order_horz = segment_order__non_contiguous;
0120 *segment_order_vert = segment_order__contiguous;
0121 return true;
0122 }
0123 if (bytes_per_element == 8) {
0124 *segment_order_horz = segment_order__contiguous;
0125 *segment_order_vert = segment_order__non_contiguous;
0126 return true;
0127 }
0128 }
0129 if (display_swizzle && bytes_per_element == 8) {
0130 *segment_order_horz = segment_order__contiguous;
0131 *segment_order_vert = segment_order__non_contiguous;
0132 return true;
0133 }
0134
0135 return false;
0136 }
0137
0138 bool hubbub2_dcc_support_pixel_format(
0139 enum surface_pixel_format format,
0140 unsigned int *bytes_per_element)
0141 {
0142
0143 switch (format) {
0144 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
0145 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
0146 *bytes_per_element = 2;
0147 return true;
0148 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
0149 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
0150 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
0151 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
0152 case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FIX:
0153 case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FIX:
0154 case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FLOAT:
0155 case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FLOAT:
0156 case SURFACE_PIXEL_FORMAT_GRPH_RGBE:
0157 case SURFACE_PIXEL_FORMAT_GRPH_RGBE_ALPHA:
0158 *bytes_per_element = 4;
0159 return true;
0160 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
0161 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616:
0162 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
0163 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
0164 *bytes_per_element = 8;
0165 return true;
0166 default:
0167 return false;
0168 }
0169 }
0170
0171 static void hubbub2_get_blk256_size(unsigned int *blk256_width, unsigned int *blk256_height,
0172 unsigned int bytes_per_element)
0173 {
0174
0175
0176 if (bytes_per_element == 1) {
0177 *blk256_width = 16;
0178 *blk256_height = 16;
0179 } else if (bytes_per_element == 2) {
0180 *blk256_width = 16;
0181 *blk256_height = 8;
0182 } else if (bytes_per_element == 4) {
0183 *blk256_width = 8;
0184 *blk256_height = 8;
0185 } else if (bytes_per_element == 8) {
0186 *blk256_width = 8;
0187 *blk256_height = 4;
0188 }
0189 }
0190
0191 static void hubbub2_det_request_size(
0192 unsigned int detile_buf_size,
0193 unsigned int height,
0194 unsigned int width,
0195 unsigned int bpe,
0196 bool *req128_horz_wc,
0197 bool *req128_vert_wc)
0198 {
0199 unsigned int blk256_height = 0;
0200 unsigned int blk256_width = 0;
0201 unsigned int swath_bytes_horz_wc, swath_bytes_vert_wc;
0202
0203 hubbub2_get_blk256_size(&blk256_width, &blk256_height, bpe);
0204
0205 swath_bytes_horz_wc = width * blk256_height * bpe;
0206 swath_bytes_vert_wc = height * blk256_width * bpe;
0207
0208 *req128_horz_wc = (2 * swath_bytes_horz_wc <= detile_buf_size) ?
0209 false :
0210 true;
0211
0212 *req128_vert_wc = (2 * swath_bytes_vert_wc <= detile_buf_size) ?
0213 false :
0214 true;
0215 }
0216
0217 bool hubbub2_get_dcc_compression_cap(struct hubbub *hubbub,
0218 const struct dc_dcc_surface_param *input,
0219 struct dc_surface_dcc_cap *output)
0220 {
0221 struct dc *dc = hubbub->ctx->dc;
0222
0223 enum dcc_control dcc_control;
0224 unsigned int bpe;
0225 enum segment_order segment_order_horz, segment_order_vert;
0226 bool req128_horz_wc, req128_vert_wc;
0227
0228 memset(output, 0, sizeof(*output));
0229
0230 if (dc->debug.disable_dcc == DCC_DISABLE)
0231 return false;
0232
0233 if (!hubbub->funcs->dcc_support_pixel_format(input->format,
0234 &bpe))
0235 return false;
0236
0237 if (!hubbub->funcs->dcc_support_swizzle(input->swizzle_mode, bpe,
0238 &segment_order_horz, &segment_order_vert))
0239 return false;
0240
0241 hubbub2_det_request_size(TO_DCN20_HUBBUB(hubbub)->detile_buf_size,
0242 input->surface_size.height, input->surface_size.width,
0243 bpe, &req128_horz_wc, &req128_vert_wc);
0244
0245 if (!req128_horz_wc && !req128_vert_wc) {
0246 dcc_control = dcc_control__256_256_xxx;
0247 } else if (input->scan == SCAN_DIRECTION_HORIZONTAL) {
0248 if (!req128_horz_wc)
0249 dcc_control = dcc_control__256_256_xxx;
0250 else if (segment_order_horz == segment_order__contiguous)
0251 dcc_control = dcc_control__128_128_xxx;
0252 else
0253 dcc_control = dcc_control__256_64_64;
0254 } else if (input->scan == SCAN_DIRECTION_VERTICAL) {
0255 if (!req128_vert_wc)
0256 dcc_control = dcc_control__256_256_xxx;
0257 else if (segment_order_vert == segment_order__contiguous)
0258 dcc_control = dcc_control__128_128_xxx;
0259 else
0260 dcc_control = dcc_control__256_64_64;
0261 } else {
0262 if ((req128_horz_wc &&
0263 segment_order_horz == segment_order__non_contiguous) ||
0264 (req128_vert_wc &&
0265 segment_order_vert == segment_order__non_contiguous))
0266
0267 dcc_control = dcc_control__256_64_64;
0268 else
0269
0270
0271
0272 dcc_control = dcc_control__128_128_xxx;
0273 }
0274
0275
0276 if ((bpe == 2) && (input->swizzle_mode == DC_SW_64KB_R_X))
0277 dcc_control = dcc_control__128_128_xxx;
0278
0279 if (dc->debug.disable_dcc == DCC_HALF_REQ_DISALBE &&
0280 dcc_control != dcc_control__256_256_xxx)
0281 return false;
0282
0283 switch (dcc_control) {
0284 case dcc_control__256_256_xxx:
0285 output->grph.rgb.max_uncompressed_blk_size = 256;
0286 output->grph.rgb.max_compressed_blk_size = 256;
0287 output->grph.rgb.independent_64b_blks = false;
0288 break;
0289 case dcc_control__128_128_xxx:
0290 output->grph.rgb.max_uncompressed_blk_size = 128;
0291 output->grph.rgb.max_compressed_blk_size = 128;
0292 output->grph.rgb.independent_64b_blks = false;
0293 break;
0294 case dcc_control__256_64_64:
0295 output->grph.rgb.max_uncompressed_blk_size = 256;
0296 output->grph.rgb.max_compressed_blk_size = 64;
0297 output->grph.rgb.independent_64b_blks = true;
0298 break;
0299 default:
0300 ASSERT(false);
0301 break;
0302 }
0303 output->capable = true;
0304 output->const_color_support = true;
0305
0306 return true;
0307 }
0308
0309 static enum dcn_hubbub_page_table_depth page_table_depth_to_hw(unsigned int page_table_depth)
0310 {
0311 enum dcn_hubbub_page_table_depth depth = 0;
0312
0313 switch (page_table_depth) {
0314 case 1:
0315 depth = DCN_PAGE_TABLE_DEPTH_1_LEVEL;
0316 break;
0317 case 2:
0318 depth = DCN_PAGE_TABLE_DEPTH_2_LEVEL;
0319 break;
0320 case 3:
0321 depth = DCN_PAGE_TABLE_DEPTH_3_LEVEL;
0322 break;
0323 case 4:
0324 depth = DCN_PAGE_TABLE_DEPTH_4_LEVEL;
0325 break;
0326 default:
0327 ASSERT(false);
0328 break;
0329 }
0330
0331 return depth;
0332 }
0333
0334 static enum dcn_hubbub_page_table_block_size page_table_block_size_to_hw(unsigned int page_table_block_size)
0335 {
0336 enum dcn_hubbub_page_table_block_size block_size = 0;
0337
0338 switch (page_table_block_size) {
0339 case 4096:
0340 block_size = DCN_PAGE_TABLE_BLOCK_SIZE_4KB;
0341 break;
0342 case 65536:
0343 block_size = DCN_PAGE_TABLE_BLOCK_SIZE_64KB;
0344 break;
0345 case 32768:
0346 block_size = DCN_PAGE_TABLE_BLOCK_SIZE_32KB;
0347 break;
0348 default:
0349 ASSERT(false);
0350 block_size = page_table_block_size;
0351 break;
0352 }
0353
0354 return block_size;
0355 }
0356
0357 void hubbub2_init_vm_ctx(struct hubbub *hubbub,
0358 struct dcn_hubbub_virt_addr_config *va_config,
0359 int vmid)
0360 {
0361 struct dcn20_hubbub *hubbub1 = TO_DCN20_HUBBUB(hubbub);
0362 struct dcn_vmid_page_table_config virt_config;
0363
0364 virt_config.page_table_start_addr = va_config->page_table_start_addr >> 12;
0365 virt_config.page_table_end_addr = va_config->page_table_end_addr >> 12;
0366 virt_config.depth = page_table_depth_to_hw(va_config->page_table_depth);
0367 virt_config.block_size = page_table_block_size_to_hw(va_config->page_table_block_size);
0368 virt_config.page_table_base_addr = va_config->page_table_base_addr;
0369
0370 dcn20_vmid_setup(&hubbub1->vmid[vmid], &virt_config);
0371 }
0372
0373 int hubbub2_init_dchub_sys_ctx(struct hubbub *hubbub,
0374 struct dcn_hubbub_phys_addr_config *pa_config)
0375 {
0376 struct dcn20_hubbub *hubbub1 = TO_DCN20_HUBBUB(hubbub);
0377 struct dcn_vmid_page_table_config phys_config;
0378
0379 REG_SET(DCN_VM_FB_LOCATION_BASE, 0,
0380 FB_BASE, pa_config->system_aperture.fb_base >> 24);
0381 REG_SET(DCN_VM_FB_LOCATION_TOP, 0,
0382 FB_TOP, pa_config->system_aperture.fb_top >> 24);
0383 REG_SET(DCN_VM_FB_OFFSET, 0,
0384 FB_OFFSET, pa_config->system_aperture.fb_offset >> 24);
0385 REG_SET(DCN_VM_AGP_BOT, 0,
0386 AGP_BOT, pa_config->system_aperture.agp_bot >> 24);
0387 REG_SET(DCN_VM_AGP_TOP, 0,
0388 AGP_TOP, pa_config->system_aperture.agp_top >> 24);
0389 REG_SET(DCN_VM_AGP_BASE, 0,
0390 AGP_BASE, pa_config->system_aperture.agp_base >> 24);
0391
0392 REG_SET(DCN_VM_PROTECTION_FAULT_DEFAULT_ADDR_MSB, 0,
0393 DCN_VM_PROTECTION_FAULT_DEFAULT_ADDR_MSB, (pa_config->page_table_default_page_addr >> 44) & 0xF);
0394 REG_SET(DCN_VM_PROTECTION_FAULT_DEFAULT_ADDR_LSB, 0,
0395 DCN_VM_PROTECTION_FAULT_DEFAULT_ADDR_LSB, (pa_config->page_table_default_page_addr >> 12) & 0xFFFFFFFF);
0396
0397 if (pa_config->gart_config.page_table_start_addr != pa_config->gart_config.page_table_end_addr) {
0398 phys_config.page_table_start_addr = pa_config->gart_config.page_table_start_addr >> 12;
0399 phys_config.page_table_end_addr = pa_config->gart_config.page_table_end_addr >> 12;
0400 phys_config.page_table_base_addr = pa_config->gart_config.page_table_base_addr;
0401 phys_config.depth = 0;
0402 phys_config.block_size = 0;
0403
0404 dcn20_vmid_setup(&hubbub1->vmid[0], &phys_config);
0405 }
0406
0407 return NUM_VMID;
0408 }
0409
0410 void hubbub2_update_dchub(struct hubbub *hubbub,
0411 struct dchub_init_data *dh_data)
0412 {
0413 struct dcn20_hubbub *hubbub1 = TO_DCN20_HUBBUB(hubbub);
0414
0415 if (REG(DCN_VM_FB_LOCATION_TOP) == 0)
0416 return;
0417
0418 switch (dh_data->fb_mode) {
0419 case FRAME_BUFFER_MODE_ZFB_ONLY:
0420
0421 REG_UPDATE(DCN_VM_FB_LOCATION_TOP,
0422 FB_TOP, 0);
0423
0424 REG_UPDATE(DCN_VM_FB_LOCATION_BASE,
0425 FB_BASE, 0xFFFFFF);
0426
0427
0428 REG_UPDATE(DCN_VM_AGP_BASE,
0429 AGP_BASE, dh_data->zfb_phys_addr_base >> 24);
0430
0431
0432
0433 REG_UPDATE(DCN_VM_AGP_BOT,
0434 AGP_BOT, dh_data->zfb_mc_base_addr >> 24);
0435
0436
0437
0438 REG_UPDATE(DCN_VM_AGP_TOP,
0439 AGP_TOP, (dh_data->zfb_mc_base_addr +
0440 dh_data->zfb_size_in_byte - 1) >> 24);
0441 break;
0442 case FRAME_BUFFER_MODE_MIXED_ZFB_AND_LOCAL:
0443
0444
0445
0446 REG_UPDATE(DCN_VM_AGP_BASE,
0447 AGP_BASE, dh_data->zfb_phys_addr_base >> 24);
0448
0449
0450
0451 REG_UPDATE(DCN_VM_AGP_BOT,
0452 AGP_BOT, dh_data->zfb_mc_base_addr >> 24);
0453
0454
0455
0456 REG_UPDATE(DCN_VM_AGP_TOP,
0457 AGP_TOP, (dh_data->zfb_mc_base_addr +
0458 dh_data->zfb_size_in_byte - 1) >> 24);
0459 break;
0460 case FRAME_BUFFER_MODE_LOCAL_ONLY:
0461
0462
0463
0464 REG_UPDATE(DCN_VM_AGP_BASE,
0465 AGP_BASE, 0);
0466
0467
0468
0469 REG_UPDATE(DCN_VM_AGP_BOT,
0470 AGP_BOT, 0xFFFFFF);
0471
0472
0473
0474 REG_UPDATE(DCN_VM_AGP_TOP,
0475 AGP_TOP, 0);
0476 break;
0477 default:
0478 break;
0479 }
0480
0481 dh_data->dchub_initialzied = true;
0482 dh_data->dchub_info_valid = false;
0483 }
0484
0485 void hubbub2_wm_read_state(struct hubbub *hubbub,
0486 struct dcn_hubbub_wm *wm)
0487 {
0488 struct dcn20_hubbub *hubbub1 = TO_DCN20_HUBBUB(hubbub);
0489
0490 struct dcn_hubbub_wm_set *s;
0491
0492 memset(wm, 0, sizeof(struct dcn_hubbub_wm));
0493
0494 s = &wm->sets[0];
0495 s->wm_set = 0;
0496 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_A);
0497 if (REG(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_A))
0498 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_A);
0499 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_A)) {
0500 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_A);
0501 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_A);
0502 }
0503 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_A);
0504
0505 s = &wm->sets[1];
0506 s->wm_set = 1;
0507 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_B);
0508 if (REG(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_B))
0509 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_B);
0510 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_B)) {
0511 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_B);
0512 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_B);
0513 }
0514 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_B);
0515
0516 s = &wm->sets[2];
0517 s->wm_set = 2;
0518 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_C);
0519 if (REG(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_C))
0520 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_C);
0521 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_C)) {
0522 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_C);
0523 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_C);
0524 }
0525 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_C);
0526
0527 s = &wm->sets[3];
0528 s->wm_set = 3;
0529 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_D);
0530 if (REG(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_D))
0531 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_D);
0532 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_D)) {
0533 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_D);
0534 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_D);
0535 }
0536 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_D);
0537 }
0538
0539 void hubbub2_get_dchub_ref_freq(struct hubbub *hubbub,
0540 unsigned int dccg_ref_freq_inKhz,
0541 unsigned int *dchub_ref_freq_inKhz)
0542 {
0543 struct dcn20_hubbub *hubbub1 = TO_DCN20_HUBBUB(hubbub);
0544 uint32_t ref_div = 0;
0545 uint32_t ref_en = 0;
0546
0547 REG_GET_2(DCHUBBUB_GLOBAL_TIMER_CNTL, DCHUBBUB_GLOBAL_TIMER_REFDIV, &ref_div,
0548 DCHUBBUB_GLOBAL_TIMER_ENABLE, &ref_en);
0549
0550 if (ref_en) {
0551 if (ref_div == 2)
0552 *dchub_ref_freq_inKhz = dccg_ref_freq_inKhz / 2;
0553 else
0554 *dchub_ref_freq_inKhz = dccg_ref_freq_inKhz;
0555
0556
0557
0558 if (*dchub_ref_freq_inKhz < 40000 || *dchub_ref_freq_inKhz > 60000)
0559 ASSERT_CRITICAL(false);
0560
0561 return;
0562 } else {
0563 *dchub_ref_freq_inKhz = dccg_ref_freq_inKhz;
0564
0565
0566 ASSERT_CRITICAL(false);
0567 return;
0568 }
0569 }
0570
0571 static bool hubbub2_program_watermarks(
0572 struct hubbub *hubbub,
0573 struct dcn_watermark_set *watermarks,
0574 unsigned int refclk_mhz,
0575 bool safe_to_lower)
0576 {
0577 struct dcn20_hubbub *hubbub1 = TO_DCN20_HUBBUB(hubbub);
0578 bool wm_pending = false;
0579
0580
0581
0582
0583 if (hubbub1_program_urgent_watermarks(hubbub, watermarks, refclk_mhz, safe_to_lower))
0584 wm_pending = true;
0585
0586 if (hubbub1_program_stutter_watermarks(hubbub, watermarks, refclk_mhz, safe_to_lower))
0587 wm_pending = true;
0588
0589
0590
0591
0592
0593
0594 if (hubbub1->base.ctx->dc->clk_mgr->clks.prev_p_state_change_support == true &&
0595 hubbub1->base.ctx->dc->clk_mgr->clks.p_state_change_support == false)
0596 safe_to_lower = true;
0597
0598 hubbub1_program_pstate_watermarks(hubbub, watermarks, refclk_mhz, safe_to_lower);
0599
0600 REG_SET(DCHUBBUB_ARB_SAT_LEVEL, 0,
0601 DCHUBBUB_ARB_SAT_LEVEL, 60 * refclk_mhz);
0602 REG_UPDATE(DCHUBBUB_ARB_DF_REQ_OUTSTAND, DCHUBBUB_ARB_MIN_REQ_OUTSTAND, 180);
0603
0604 hubbub->funcs->allow_self_refresh_control(hubbub, !hubbub->ctx->dc->debug.disable_stutter);
0605 return wm_pending;
0606 }
0607
0608 void hubbub2_read_state(struct hubbub *hubbub, struct dcn_hubbub_state *hubbub_state)
0609 {
0610 struct dcn20_hubbub *hubbub1 = TO_DCN20_HUBBUB(hubbub);
0611
0612 if (REG(DCN_VM_FAULT_ADDR_MSB))
0613 hubbub_state->vm_fault_addr_msb = REG_READ(DCN_VM_FAULT_ADDR_MSB);
0614
0615 if (REG(DCN_VM_FAULT_ADDR_LSB))
0616 hubbub_state->vm_fault_addr_msb = REG_READ(DCN_VM_FAULT_ADDR_LSB);
0617
0618 if (REG(DCN_VM_FAULT_CNTL))
0619 REG_GET(DCN_VM_FAULT_CNTL, DCN_VM_ERROR_STATUS_MODE, &hubbub_state->vm_error_mode);
0620
0621 if (REG(DCN_VM_FAULT_STATUS)) {
0622 REG_GET(DCN_VM_FAULT_STATUS, DCN_VM_ERROR_STATUS, &hubbub_state->vm_error_status);
0623 REG_GET(DCN_VM_FAULT_STATUS, DCN_VM_ERROR_VMID, &hubbub_state->vm_error_vmid);
0624 REG_GET(DCN_VM_FAULT_STATUS, DCN_VM_ERROR_PIPE, &hubbub_state->vm_error_pipe);
0625 }
0626 }
0627
0628 static const struct hubbub_funcs hubbub2_funcs = {
0629 .update_dchub = hubbub2_update_dchub,
0630 .init_dchub_sys_ctx = hubbub2_init_dchub_sys_ctx,
0631 .init_vm_ctx = hubbub2_init_vm_ctx,
0632 .dcc_support_swizzle = hubbub2_dcc_support_swizzle,
0633 .dcc_support_pixel_format = hubbub2_dcc_support_pixel_format,
0634 .get_dcc_compression_cap = hubbub2_get_dcc_compression_cap,
0635 .wm_read_state = hubbub2_wm_read_state,
0636 .get_dchub_ref_freq = hubbub2_get_dchub_ref_freq,
0637 .program_watermarks = hubbub2_program_watermarks,
0638 .is_allow_self_refresh_enabled = hubbub1_is_allow_self_refresh_enabled,
0639 .allow_self_refresh_control = hubbub1_allow_self_refresh_control,
0640 .hubbub_read_state = hubbub2_read_state,
0641 };
0642
0643 void hubbub2_construct(struct dcn20_hubbub *hubbub,
0644 struct dc_context *ctx,
0645 const struct dcn_hubbub_registers *hubbub_regs,
0646 const struct dcn_hubbub_shift *hubbub_shift,
0647 const struct dcn_hubbub_mask *hubbub_mask)
0648 {
0649 hubbub->base.ctx = ctx;
0650
0651 hubbub->base.funcs = &hubbub2_funcs;
0652
0653 hubbub->regs = hubbub_regs;
0654 hubbub->shifts = hubbub_shift;
0655 hubbub->masks = hubbub_mask;
0656
0657 hubbub->debug_test_index_pstate = 0xB;
0658 hubbub->detile_buf_size = 164 * 1024;
0659 }