0001
0002
0003
0004
0005
0006 #include <linux/delay.h>
0007 #include "dpu_hwio.h"
0008 #include "dpu_hw_ctl.h"
0009 #include "dpu_kms.h"
0010 #include "dpu_trace.h"
0011
0012 #define CTL_LAYER(lm) \
0013 (((lm) == LM_5) ? (0x024) : (((lm) - LM_0) * 0x004))
0014 #define CTL_LAYER_EXT(lm) \
0015 (0x40 + (((lm) - LM_0) * 0x004))
0016 #define CTL_LAYER_EXT2(lm) \
0017 (0x70 + (((lm) - LM_0) * 0x004))
0018 #define CTL_LAYER_EXT3(lm) \
0019 (0xA0 + (((lm) - LM_0) * 0x004))
0020 #define CTL_TOP 0x014
0021 #define CTL_FLUSH 0x018
0022 #define CTL_START 0x01C
0023 #define CTL_PREPARE 0x0d0
0024 #define CTL_SW_RESET 0x030
0025 #define CTL_LAYER_EXTN_OFFSET 0x40
0026 #define CTL_MERGE_3D_ACTIVE 0x0E4
0027 #define CTL_WB_ACTIVE 0x0EC
0028 #define CTL_INTF_ACTIVE 0x0F4
0029 #define CTL_MERGE_3D_FLUSH 0x100
0030 #define CTL_DSC_ACTIVE 0x0E8
0031 #define CTL_DSC_FLUSH 0x104
0032 #define CTL_WB_FLUSH 0x108
0033 #define CTL_INTF_FLUSH 0x110
0034 #define CTL_INTF_MASTER 0x134
0035 #define CTL_FETCH_PIPE_ACTIVE 0x0FC
0036
0037 #define CTL_MIXER_BORDER_OUT BIT(24)
0038 #define CTL_FLUSH_MASK_CTL BIT(17)
0039
0040 #define DPU_REG_RESET_TIMEOUT_US 2000
0041 #define MERGE_3D_IDX 23
0042 #define DSC_IDX 22
0043 #define INTF_IDX 31
0044 #define WB_IDX 16
0045 #define CTL_INVALID_BIT 0xffff
0046 #define CTL_DEFAULT_GROUP_ID 0xf
0047
0048 static const u32 fetch_tbl[SSPP_MAX] = {CTL_INVALID_BIT, 16, 17, 18, 19,
0049 CTL_INVALID_BIT, CTL_INVALID_BIT, CTL_INVALID_BIT, CTL_INVALID_BIT, 0,
0050 1, 2, 3, CTL_INVALID_BIT, CTL_INVALID_BIT};
0051
0052 static const struct dpu_ctl_cfg *_ctl_offset(enum dpu_ctl ctl,
0053 const struct dpu_mdss_cfg *m,
0054 void __iomem *addr,
0055 struct dpu_hw_blk_reg_map *b)
0056 {
0057 int i;
0058
0059 for (i = 0; i < m->ctl_count; i++) {
0060 if (ctl == m->ctl[i].id) {
0061 b->blk_addr = addr + m->ctl[i].base;
0062 b->log_mask = DPU_DBG_MASK_CTL;
0063 return &m->ctl[i];
0064 }
0065 }
0066 return ERR_PTR(-ENOMEM);
0067 }
0068
0069 static int _mixer_stages(const struct dpu_lm_cfg *mixer, int count,
0070 enum dpu_lm lm)
0071 {
0072 int i;
0073 int stages = -EINVAL;
0074
0075 for (i = 0; i < count; i++) {
0076 if (lm == mixer[i].id) {
0077 stages = mixer[i].sblk->maxblendstages;
0078 break;
0079 }
0080 }
0081
0082 return stages;
0083 }
0084
0085 static inline u32 dpu_hw_ctl_get_flush_register(struct dpu_hw_ctl *ctx)
0086 {
0087 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0088
0089 return DPU_REG_READ(c, CTL_FLUSH);
0090 }
0091
0092 static inline void dpu_hw_ctl_trigger_start(struct dpu_hw_ctl *ctx)
0093 {
0094 trace_dpu_hw_ctl_trigger_start(ctx->pending_flush_mask,
0095 dpu_hw_ctl_get_flush_register(ctx));
0096 DPU_REG_WRITE(&ctx->hw, CTL_START, 0x1);
0097 }
0098
0099 static inline bool dpu_hw_ctl_is_started(struct dpu_hw_ctl *ctx)
0100 {
0101 return !!(DPU_REG_READ(&ctx->hw, CTL_START) & BIT(0));
0102 }
0103
0104 static inline void dpu_hw_ctl_trigger_pending(struct dpu_hw_ctl *ctx)
0105 {
0106 trace_dpu_hw_ctl_trigger_prepare(ctx->pending_flush_mask,
0107 dpu_hw_ctl_get_flush_register(ctx));
0108 DPU_REG_WRITE(&ctx->hw, CTL_PREPARE, 0x1);
0109 }
0110
0111 static inline void dpu_hw_ctl_clear_pending_flush(struct dpu_hw_ctl *ctx)
0112 {
0113 trace_dpu_hw_ctl_clear_pending_flush(ctx->pending_flush_mask,
0114 dpu_hw_ctl_get_flush_register(ctx));
0115 ctx->pending_flush_mask = 0x0;
0116 }
0117
0118 static inline void dpu_hw_ctl_update_pending_flush(struct dpu_hw_ctl *ctx,
0119 u32 flushbits)
0120 {
0121 trace_dpu_hw_ctl_update_pending_flush(flushbits,
0122 ctx->pending_flush_mask);
0123 ctx->pending_flush_mask |= flushbits;
0124 }
0125
0126 static u32 dpu_hw_ctl_get_pending_flush(struct dpu_hw_ctl *ctx)
0127 {
0128 return ctx->pending_flush_mask;
0129 }
0130
0131 static inline void dpu_hw_ctl_trigger_flush_v1(struct dpu_hw_ctl *ctx)
0132 {
0133 if (ctx->pending_flush_mask & BIT(MERGE_3D_IDX))
0134 DPU_REG_WRITE(&ctx->hw, CTL_MERGE_3D_FLUSH,
0135 ctx->pending_merge_3d_flush_mask);
0136 if (ctx->pending_flush_mask & BIT(INTF_IDX))
0137 DPU_REG_WRITE(&ctx->hw, CTL_INTF_FLUSH,
0138 ctx->pending_intf_flush_mask);
0139 if (ctx->pending_flush_mask & BIT(WB_IDX))
0140 DPU_REG_WRITE(&ctx->hw, CTL_WB_FLUSH,
0141 ctx->pending_wb_flush_mask);
0142
0143 DPU_REG_WRITE(&ctx->hw, CTL_FLUSH, ctx->pending_flush_mask);
0144 }
0145
0146 static inline void dpu_hw_ctl_trigger_flush(struct dpu_hw_ctl *ctx)
0147 {
0148 trace_dpu_hw_ctl_trigger_pending_flush(ctx->pending_flush_mask,
0149 dpu_hw_ctl_get_flush_register(ctx));
0150 DPU_REG_WRITE(&ctx->hw, CTL_FLUSH, ctx->pending_flush_mask);
0151 }
0152
0153 static uint32_t dpu_hw_ctl_get_bitmask_sspp(struct dpu_hw_ctl *ctx,
0154 enum dpu_sspp sspp)
0155 {
0156 uint32_t flushbits = 0;
0157
0158 switch (sspp) {
0159 case SSPP_VIG0:
0160 flushbits = BIT(0);
0161 break;
0162 case SSPP_VIG1:
0163 flushbits = BIT(1);
0164 break;
0165 case SSPP_VIG2:
0166 flushbits = BIT(2);
0167 break;
0168 case SSPP_VIG3:
0169 flushbits = BIT(18);
0170 break;
0171 case SSPP_RGB0:
0172 flushbits = BIT(3);
0173 break;
0174 case SSPP_RGB1:
0175 flushbits = BIT(4);
0176 break;
0177 case SSPP_RGB2:
0178 flushbits = BIT(5);
0179 break;
0180 case SSPP_RGB3:
0181 flushbits = BIT(19);
0182 break;
0183 case SSPP_DMA0:
0184 flushbits = BIT(11);
0185 break;
0186 case SSPP_DMA1:
0187 flushbits = BIT(12);
0188 break;
0189 case SSPP_DMA2:
0190 flushbits = BIT(24);
0191 break;
0192 case SSPP_DMA3:
0193 flushbits = BIT(25);
0194 break;
0195 case SSPP_CURSOR0:
0196 flushbits = BIT(22);
0197 break;
0198 case SSPP_CURSOR1:
0199 flushbits = BIT(23);
0200 break;
0201 default:
0202 break;
0203 }
0204
0205 return flushbits;
0206 }
0207
0208 static uint32_t dpu_hw_ctl_get_bitmask_mixer(struct dpu_hw_ctl *ctx,
0209 enum dpu_lm lm)
0210 {
0211 uint32_t flushbits = 0;
0212
0213 switch (lm) {
0214 case LM_0:
0215 flushbits = BIT(6);
0216 break;
0217 case LM_1:
0218 flushbits = BIT(7);
0219 break;
0220 case LM_2:
0221 flushbits = BIT(8);
0222 break;
0223 case LM_3:
0224 flushbits = BIT(9);
0225 break;
0226 case LM_4:
0227 flushbits = BIT(10);
0228 break;
0229 case LM_5:
0230 flushbits = BIT(20);
0231 break;
0232 default:
0233 return -EINVAL;
0234 }
0235
0236 flushbits |= CTL_FLUSH_MASK_CTL;
0237
0238 return flushbits;
0239 }
0240
0241 static void dpu_hw_ctl_update_pending_flush_intf(struct dpu_hw_ctl *ctx,
0242 enum dpu_intf intf)
0243 {
0244 switch (intf) {
0245 case INTF_0:
0246 ctx->pending_flush_mask |= BIT(31);
0247 break;
0248 case INTF_1:
0249 ctx->pending_flush_mask |= BIT(30);
0250 break;
0251 case INTF_2:
0252 ctx->pending_flush_mask |= BIT(29);
0253 break;
0254 case INTF_3:
0255 ctx->pending_flush_mask |= BIT(28);
0256 break;
0257 default:
0258 break;
0259 }
0260 }
0261
0262 static void dpu_hw_ctl_update_pending_flush_wb(struct dpu_hw_ctl *ctx,
0263 enum dpu_wb wb)
0264 {
0265 switch (wb) {
0266 case WB_0:
0267 case WB_1:
0268 case WB_2:
0269 ctx->pending_flush_mask |= BIT(WB_IDX);
0270 break;
0271 default:
0272 break;
0273 }
0274 }
0275
0276 static void dpu_hw_ctl_update_pending_flush_wb_v1(struct dpu_hw_ctl *ctx,
0277 enum dpu_wb wb)
0278 {
0279 ctx->pending_wb_flush_mask |= BIT(wb - WB_0);
0280 ctx->pending_flush_mask |= BIT(WB_IDX);
0281 }
0282
0283 static void dpu_hw_ctl_update_pending_flush_intf_v1(struct dpu_hw_ctl *ctx,
0284 enum dpu_intf intf)
0285 {
0286 ctx->pending_intf_flush_mask |= BIT(intf - INTF_0);
0287 ctx->pending_flush_mask |= BIT(INTF_IDX);
0288 }
0289
0290 static void dpu_hw_ctl_update_pending_flush_merge_3d_v1(struct dpu_hw_ctl *ctx,
0291 enum dpu_merge_3d merge_3d)
0292 {
0293 ctx->pending_merge_3d_flush_mask |= BIT(merge_3d - MERGE_3D_0);
0294 ctx->pending_flush_mask |= BIT(MERGE_3D_IDX);
0295 }
0296
0297 static uint32_t dpu_hw_ctl_get_bitmask_dspp(struct dpu_hw_ctl *ctx,
0298 enum dpu_dspp dspp)
0299 {
0300 uint32_t flushbits = 0;
0301
0302 switch (dspp) {
0303 case DSPP_0:
0304 flushbits = BIT(13);
0305 break;
0306 case DSPP_1:
0307 flushbits = BIT(14);
0308 break;
0309 case DSPP_2:
0310 flushbits = BIT(15);
0311 break;
0312 case DSPP_3:
0313 flushbits = BIT(21);
0314 break;
0315 default:
0316 return 0;
0317 }
0318
0319 return flushbits;
0320 }
0321
0322 static u32 dpu_hw_ctl_poll_reset_status(struct dpu_hw_ctl *ctx, u32 timeout_us)
0323 {
0324 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0325 ktime_t timeout;
0326 u32 status;
0327
0328 timeout = ktime_add_us(ktime_get(), timeout_us);
0329
0330
0331
0332
0333
0334 do {
0335 status = DPU_REG_READ(c, CTL_SW_RESET);
0336 status &= 0x1;
0337 if (status)
0338 usleep_range(20, 50);
0339 } while (status && ktime_compare_safe(ktime_get(), timeout) < 0);
0340
0341 return status;
0342 }
0343
0344 static int dpu_hw_ctl_reset_control(struct dpu_hw_ctl *ctx)
0345 {
0346 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0347
0348 pr_debug("issuing hw ctl reset for ctl:%d\n", ctx->idx);
0349 DPU_REG_WRITE(c, CTL_SW_RESET, 0x1);
0350 if (dpu_hw_ctl_poll_reset_status(ctx, DPU_REG_RESET_TIMEOUT_US))
0351 return -EINVAL;
0352
0353 return 0;
0354 }
0355
0356 static int dpu_hw_ctl_wait_reset_status(struct dpu_hw_ctl *ctx)
0357 {
0358 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0359 u32 status;
0360
0361 status = DPU_REG_READ(c, CTL_SW_RESET);
0362 status &= 0x01;
0363 if (!status)
0364 return 0;
0365
0366 pr_debug("hw ctl reset is set for ctl:%d\n", ctx->idx);
0367 if (dpu_hw_ctl_poll_reset_status(ctx, DPU_REG_RESET_TIMEOUT_US)) {
0368 pr_err("hw recovery is not complete for ctl:%d\n", ctx->idx);
0369 return -EINVAL;
0370 }
0371
0372 return 0;
0373 }
0374
0375 static void dpu_hw_ctl_clear_all_blendstages(struct dpu_hw_ctl *ctx)
0376 {
0377 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0378 int i;
0379
0380 for (i = 0; i < ctx->mixer_count; i++) {
0381 enum dpu_lm mixer_id = ctx->mixer_hw_caps[i].id;
0382
0383 DPU_REG_WRITE(c, CTL_LAYER(mixer_id), 0);
0384 DPU_REG_WRITE(c, CTL_LAYER_EXT(mixer_id), 0);
0385 DPU_REG_WRITE(c, CTL_LAYER_EXT2(mixer_id), 0);
0386 DPU_REG_WRITE(c, CTL_LAYER_EXT3(mixer_id), 0);
0387 }
0388
0389 DPU_REG_WRITE(c, CTL_FETCH_PIPE_ACTIVE, 0);
0390 }
0391
0392 static void dpu_hw_ctl_setup_blendstage(struct dpu_hw_ctl *ctx,
0393 enum dpu_lm lm, struct dpu_hw_stage_cfg *stage_cfg)
0394 {
0395 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0396 u32 mixercfg = 0, mixercfg_ext = 0, mix, ext;
0397 u32 mixercfg_ext2 = 0, mixercfg_ext3 = 0;
0398 int i, j;
0399 int stages;
0400 int pipes_per_stage;
0401
0402 stages = _mixer_stages(ctx->mixer_hw_caps, ctx->mixer_count, lm);
0403 if (stages < 0)
0404 return;
0405
0406 if (test_bit(DPU_MIXER_SOURCESPLIT,
0407 &ctx->mixer_hw_caps->features))
0408 pipes_per_stage = PIPES_PER_STAGE;
0409 else
0410 pipes_per_stage = 1;
0411
0412 mixercfg = CTL_MIXER_BORDER_OUT;
0413
0414 if (!stage_cfg)
0415 goto exit;
0416
0417 for (i = 0; i <= stages; i++) {
0418
0419 mix = (i + 1) & 0x7;
0420 ext = i >= 7;
0421
0422 for (j = 0 ; j < pipes_per_stage; j++) {
0423 enum dpu_sspp_multirect_index rect_index =
0424 stage_cfg->multirect_index[i][j];
0425
0426 switch (stage_cfg->stage[i][j]) {
0427 case SSPP_VIG0:
0428 if (rect_index == DPU_SSPP_RECT_1) {
0429 mixercfg_ext3 |= ((i + 1) & 0xF) << 0;
0430 } else {
0431 mixercfg |= mix << 0;
0432 mixercfg_ext |= ext << 0;
0433 }
0434 break;
0435 case SSPP_VIG1:
0436 if (rect_index == DPU_SSPP_RECT_1) {
0437 mixercfg_ext3 |= ((i + 1) & 0xF) << 4;
0438 } else {
0439 mixercfg |= mix << 3;
0440 mixercfg_ext |= ext << 2;
0441 }
0442 break;
0443 case SSPP_VIG2:
0444 if (rect_index == DPU_SSPP_RECT_1) {
0445 mixercfg_ext3 |= ((i + 1) & 0xF) << 8;
0446 } else {
0447 mixercfg |= mix << 6;
0448 mixercfg_ext |= ext << 4;
0449 }
0450 break;
0451 case SSPP_VIG3:
0452 if (rect_index == DPU_SSPP_RECT_1) {
0453 mixercfg_ext3 |= ((i + 1) & 0xF) << 12;
0454 } else {
0455 mixercfg |= mix << 26;
0456 mixercfg_ext |= ext << 6;
0457 }
0458 break;
0459 case SSPP_RGB0:
0460 mixercfg |= mix << 9;
0461 mixercfg_ext |= ext << 8;
0462 break;
0463 case SSPP_RGB1:
0464 mixercfg |= mix << 12;
0465 mixercfg_ext |= ext << 10;
0466 break;
0467 case SSPP_RGB2:
0468 mixercfg |= mix << 15;
0469 mixercfg_ext |= ext << 12;
0470 break;
0471 case SSPP_RGB3:
0472 mixercfg |= mix << 29;
0473 mixercfg_ext |= ext << 14;
0474 break;
0475 case SSPP_DMA0:
0476 if (rect_index == DPU_SSPP_RECT_1) {
0477 mixercfg_ext2 |= ((i + 1) & 0xF) << 8;
0478 } else {
0479 mixercfg |= mix << 18;
0480 mixercfg_ext |= ext << 16;
0481 }
0482 break;
0483 case SSPP_DMA1:
0484 if (rect_index == DPU_SSPP_RECT_1) {
0485 mixercfg_ext2 |= ((i + 1) & 0xF) << 12;
0486 } else {
0487 mixercfg |= mix << 21;
0488 mixercfg_ext |= ext << 18;
0489 }
0490 break;
0491 case SSPP_DMA2:
0492 if (rect_index == DPU_SSPP_RECT_1) {
0493 mixercfg_ext2 |= ((i + 1) & 0xF) << 16;
0494 } else {
0495 mix |= (i + 1) & 0xF;
0496 mixercfg_ext2 |= mix << 0;
0497 }
0498 break;
0499 case SSPP_DMA3:
0500 if (rect_index == DPU_SSPP_RECT_1) {
0501 mixercfg_ext2 |= ((i + 1) & 0xF) << 20;
0502 } else {
0503 mix |= (i + 1) & 0xF;
0504 mixercfg_ext2 |= mix << 4;
0505 }
0506 break;
0507 case SSPP_CURSOR0:
0508 mixercfg_ext |= ((i + 1) & 0xF) << 20;
0509 break;
0510 case SSPP_CURSOR1:
0511 mixercfg_ext |= ((i + 1) & 0xF) << 26;
0512 break;
0513 default:
0514 break;
0515 }
0516 }
0517 }
0518
0519 exit:
0520 DPU_REG_WRITE(c, CTL_LAYER(lm), mixercfg);
0521 DPU_REG_WRITE(c, CTL_LAYER_EXT(lm), mixercfg_ext);
0522 DPU_REG_WRITE(c, CTL_LAYER_EXT2(lm), mixercfg_ext2);
0523 DPU_REG_WRITE(c, CTL_LAYER_EXT3(lm), mixercfg_ext3);
0524 }
0525
0526
0527 static void dpu_hw_ctl_intf_cfg_v1(struct dpu_hw_ctl *ctx,
0528 struct dpu_hw_intf_cfg *cfg)
0529 {
0530 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0531 u32 intf_active = 0;
0532 u32 wb_active = 0;
0533 u32 mode_sel = 0;
0534
0535
0536
0537
0538
0539 if ((test_bit(DPU_CTL_VM_CFG, &ctx->caps->features)))
0540 mode_sel = CTL_DEFAULT_GROUP_ID << 28;
0541
0542 if (cfg->dsc)
0543 DPU_REG_WRITE(&ctx->hw, CTL_DSC_FLUSH, cfg->dsc);
0544
0545 if (cfg->intf_mode_sel == DPU_CTL_MODE_SEL_CMD)
0546 mode_sel |= BIT(17);
0547
0548 intf_active = DPU_REG_READ(c, CTL_INTF_ACTIVE);
0549 wb_active = DPU_REG_READ(c, CTL_WB_ACTIVE);
0550
0551 if (cfg->intf)
0552 intf_active |= BIT(cfg->intf - INTF_0);
0553
0554 if (cfg->wb)
0555 wb_active |= BIT(cfg->wb - WB_0);
0556
0557 DPU_REG_WRITE(c, CTL_TOP, mode_sel);
0558 DPU_REG_WRITE(c, CTL_INTF_ACTIVE, intf_active);
0559 DPU_REG_WRITE(c, CTL_WB_ACTIVE, wb_active);
0560
0561 if (cfg->merge_3d)
0562 DPU_REG_WRITE(c, CTL_MERGE_3D_ACTIVE,
0563 BIT(cfg->merge_3d - MERGE_3D_0));
0564 if (cfg->dsc) {
0565 DPU_REG_WRITE(&ctx->hw, CTL_FLUSH, DSC_IDX);
0566 DPU_REG_WRITE(c, CTL_DSC_ACTIVE, cfg->dsc);
0567 }
0568 }
0569
0570 static void dpu_hw_ctl_intf_cfg(struct dpu_hw_ctl *ctx,
0571 struct dpu_hw_intf_cfg *cfg)
0572 {
0573 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0574 u32 intf_cfg = 0;
0575
0576 intf_cfg |= (cfg->intf & 0xF) << 4;
0577
0578 if (cfg->mode_3d) {
0579 intf_cfg |= BIT(19);
0580 intf_cfg |= (cfg->mode_3d - 0x1) << 20;
0581 }
0582
0583 if (cfg->wb)
0584 intf_cfg |= (cfg->wb & 0x3) + 2;
0585
0586 switch (cfg->intf_mode_sel) {
0587 case DPU_CTL_MODE_SEL_VID:
0588 intf_cfg &= ~BIT(17);
0589 intf_cfg &= ~(0x3 << 15);
0590 break;
0591 case DPU_CTL_MODE_SEL_CMD:
0592 intf_cfg |= BIT(17);
0593 intf_cfg |= ((cfg->stream_sel & 0x3) << 15);
0594 break;
0595 default:
0596 pr_err("unknown interface type %d\n", cfg->intf_mode_sel);
0597 return;
0598 }
0599
0600 DPU_REG_WRITE(c, CTL_TOP, intf_cfg);
0601 }
0602
0603 static void dpu_hw_ctl_reset_intf_cfg_v1(struct dpu_hw_ctl *ctx,
0604 struct dpu_hw_intf_cfg *cfg)
0605 {
0606 struct dpu_hw_blk_reg_map *c = &ctx->hw;
0607 u32 intf_active = 0;
0608 u32 wb_active = 0;
0609 u32 merge3d_active = 0;
0610
0611
0612
0613
0614
0615
0616
0617
0618
0619 if (cfg->merge_3d) {
0620 merge3d_active = DPU_REG_READ(c, CTL_MERGE_3D_ACTIVE);
0621 merge3d_active &= ~BIT(cfg->merge_3d - MERGE_3D_0);
0622 DPU_REG_WRITE(c, CTL_MERGE_3D_ACTIVE,
0623 merge3d_active);
0624 }
0625
0626 dpu_hw_ctl_clear_all_blendstages(ctx);
0627
0628 if (cfg->intf) {
0629 intf_active = DPU_REG_READ(c, CTL_INTF_ACTIVE);
0630 intf_active &= ~BIT(cfg->intf - INTF_0);
0631 DPU_REG_WRITE(c, CTL_INTF_ACTIVE, intf_active);
0632 }
0633
0634 if (cfg->wb) {
0635 wb_active = DPU_REG_READ(c, CTL_WB_ACTIVE);
0636 wb_active &= ~BIT(cfg->wb - WB_0);
0637 DPU_REG_WRITE(c, CTL_WB_ACTIVE, wb_active);
0638 }
0639 }
0640
0641 static void dpu_hw_ctl_set_fetch_pipe_active(struct dpu_hw_ctl *ctx,
0642 unsigned long *fetch_active)
0643 {
0644 int i;
0645 u32 val = 0;
0646
0647 if (fetch_active) {
0648 for (i = 0; i < SSPP_MAX; i++) {
0649 if (test_bit(i, fetch_active) &&
0650 fetch_tbl[i] != CTL_INVALID_BIT)
0651 val |= BIT(fetch_tbl[i]);
0652 }
0653 }
0654
0655 DPU_REG_WRITE(&ctx->hw, CTL_FETCH_PIPE_ACTIVE, val);
0656 }
0657
0658 static void _setup_ctl_ops(struct dpu_hw_ctl_ops *ops,
0659 unsigned long cap)
0660 {
0661 if (cap & BIT(DPU_CTL_ACTIVE_CFG)) {
0662 ops->trigger_flush = dpu_hw_ctl_trigger_flush_v1;
0663 ops->setup_intf_cfg = dpu_hw_ctl_intf_cfg_v1;
0664 ops->reset_intf_cfg = dpu_hw_ctl_reset_intf_cfg_v1;
0665 ops->update_pending_flush_intf =
0666 dpu_hw_ctl_update_pending_flush_intf_v1;
0667 ops->update_pending_flush_merge_3d =
0668 dpu_hw_ctl_update_pending_flush_merge_3d_v1;
0669 ops->update_pending_flush_wb = dpu_hw_ctl_update_pending_flush_wb_v1;
0670 } else {
0671 ops->trigger_flush = dpu_hw_ctl_trigger_flush;
0672 ops->setup_intf_cfg = dpu_hw_ctl_intf_cfg;
0673 ops->update_pending_flush_intf =
0674 dpu_hw_ctl_update_pending_flush_intf;
0675 ops->update_pending_flush_wb = dpu_hw_ctl_update_pending_flush_wb;
0676 }
0677 ops->clear_pending_flush = dpu_hw_ctl_clear_pending_flush;
0678 ops->update_pending_flush = dpu_hw_ctl_update_pending_flush;
0679 ops->get_pending_flush = dpu_hw_ctl_get_pending_flush;
0680 ops->get_flush_register = dpu_hw_ctl_get_flush_register;
0681 ops->trigger_start = dpu_hw_ctl_trigger_start;
0682 ops->is_started = dpu_hw_ctl_is_started;
0683 ops->trigger_pending = dpu_hw_ctl_trigger_pending;
0684 ops->reset = dpu_hw_ctl_reset_control;
0685 ops->wait_reset_status = dpu_hw_ctl_wait_reset_status;
0686 ops->clear_all_blendstages = dpu_hw_ctl_clear_all_blendstages;
0687 ops->setup_blendstage = dpu_hw_ctl_setup_blendstage;
0688 ops->get_bitmask_sspp = dpu_hw_ctl_get_bitmask_sspp;
0689 ops->get_bitmask_mixer = dpu_hw_ctl_get_bitmask_mixer;
0690 ops->get_bitmask_dspp = dpu_hw_ctl_get_bitmask_dspp;
0691 if (cap & BIT(DPU_CTL_FETCH_ACTIVE))
0692 ops->set_active_pipes = dpu_hw_ctl_set_fetch_pipe_active;
0693 };
0694
0695 struct dpu_hw_ctl *dpu_hw_ctl_init(enum dpu_ctl idx,
0696 void __iomem *addr,
0697 const struct dpu_mdss_cfg *m)
0698 {
0699 struct dpu_hw_ctl *c;
0700 const struct dpu_ctl_cfg *cfg;
0701
0702 c = kzalloc(sizeof(*c), GFP_KERNEL);
0703 if (!c)
0704 return ERR_PTR(-ENOMEM);
0705
0706 cfg = _ctl_offset(idx, m, addr, &c->hw);
0707 if (IS_ERR_OR_NULL(cfg)) {
0708 kfree(c);
0709 pr_err("failed to create dpu_hw_ctl %d\n", idx);
0710 return ERR_PTR(-EINVAL);
0711 }
0712
0713 c->caps = cfg;
0714 _setup_ctl_ops(&c->ops, c->caps->features);
0715 c->idx = idx;
0716 c->mixer_count = m->mixer_count;
0717 c->mixer_hw_caps = m->mixer;
0718
0719 return c;
0720 }
0721
0722 void dpu_hw_ctl_destroy(struct dpu_hw_ctl *ctx)
0723 {
0724 kfree(ctx);
0725 }