0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022 #include <linux/bitfield.h>
0023 #include <linux/clk.h>
0024 #include <linux/component.h>
0025 #include <linux/platform_device.h>
0026
0027 #include <drm/drm_atomic_helper.h>
0028 #include <drm/drm_vblank.h>
0029
0030 #include "vc4_drv.h"
0031 #include "vc4_regs.h"
0032
0033 static const struct debugfs_reg32 hvs_regs[] = {
0034 VC4_REG32(SCALER_DISPCTRL),
0035 VC4_REG32(SCALER_DISPSTAT),
0036 VC4_REG32(SCALER_DISPID),
0037 VC4_REG32(SCALER_DISPECTRL),
0038 VC4_REG32(SCALER_DISPPROF),
0039 VC4_REG32(SCALER_DISPDITHER),
0040 VC4_REG32(SCALER_DISPEOLN),
0041 VC4_REG32(SCALER_DISPLIST0),
0042 VC4_REG32(SCALER_DISPLIST1),
0043 VC4_REG32(SCALER_DISPLIST2),
0044 VC4_REG32(SCALER_DISPLSTAT),
0045 VC4_REG32(SCALER_DISPLACT0),
0046 VC4_REG32(SCALER_DISPLACT1),
0047 VC4_REG32(SCALER_DISPLACT2),
0048 VC4_REG32(SCALER_DISPCTRL0),
0049 VC4_REG32(SCALER_DISPBKGND0),
0050 VC4_REG32(SCALER_DISPSTAT0),
0051 VC4_REG32(SCALER_DISPBASE0),
0052 VC4_REG32(SCALER_DISPCTRL1),
0053 VC4_REG32(SCALER_DISPBKGND1),
0054 VC4_REG32(SCALER_DISPSTAT1),
0055 VC4_REG32(SCALER_DISPBASE1),
0056 VC4_REG32(SCALER_DISPCTRL2),
0057 VC4_REG32(SCALER_DISPBKGND2),
0058 VC4_REG32(SCALER_DISPSTAT2),
0059 VC4_REG32(SCALER_DISPBASE2),
0060 VC4_REG32(SCALER_DISPALPHA2),
0061 VC4_REG32(SCALER_OLEDOFFS),
0062 VC4_REG32(SCALER_OLEDCOEF0),
0063 VC4_REG32(SCALER_OLEDCOEF1),
0064 VC4_REG32(SCALER_OLEDCOEF2),
0065 };
0066
0067 void vc4_hvs_dump_state(struct vc4_hvs *hvs)
0068 {
0069 struct drm_printer p = drm_info_printer(&hvs->pdev->dev);
0070 int i;
0071
0072 drm_print_regset32(&p, &hvs->regset);
0073
0074 DRM_INFO("HVS ctx:\n");
0075 for (i = 0; i < 64; i += 4) {
0076 DRM_INFO("0x%08x (%s): 0x%08x 0x%08x 0x%08x 0x%08x\n",
0077 i * 4, i < HVS_BOOTLOADER_DLIST_END ? "B" : "D",
0078 readl((u32 __iomem *)hvs->dlist + i + 0),
0079 readl((u32 __iomem *)hvs->dlist + i + 1),
0080 readl((u32 __iomem *)hvs->dlist + i + 2),
0081 readl((u32 __iomem *)hvs->dlist + i + 3));
0082 }
0083 }
0084
0085 static int vc4_hvs_debugfs_underrun(struct seq_file *m, void *data)
0086 {
0087 struct drm_info_node *node = m->private;
0088 struct drm_device *dev = node->minor->dev;
0089 struct vc4_dev *vc4 = to_vc4_dev(dev);
0090 struct drm_printer p = drm_seq_file_printer(m);
0091
0092 drm_printf(&p, "%d\n", atomic_read(&vc4->underrun));
0093
0094 return 0;
0095 }
0096
0097 static int vc4_hvs_debugfs_dlist(struct seq_file *m, void *data)
0098 {
0099 struct drm_info_node *node = m->private;
0100 struct drm_device *dev = node->minor->dev;
0101 struct vc4_dev *vc4 = to_vc4_dev(dev);
0102 struct vc4_hvs *hvs = vc4->hvs;
0103 struct drm_printer p = drm_seq_file_printer(m);
0104 unsigned int next_entry_start = 0;
0105 unsigned int i, j;
0106 u32 dlist_word, dispstat;
0107
0108 for (i = 0; i < SCALER_CHANNELS_COUNT; i++) {
0109 dispstat = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTATX(i)),
0110 SCALER_DISPSTATX_MODE);
0111 if (dispstat == SCALER_DISPSTATX_MODE_DISABLED ||
0112 dispstat == SCALER_DISPSTATX_MODE_EOF) {
0113 drm_printf(&p, "HVS chan %u disabled\n", i);
0114 continue;
0115 }
0116
0117 drm_printf(&p, "HVS chan %u:\n", i);
0118
0119 for (j = HVS_READ(SCALER_DISPLISTX(i)); j < 256; j++) {
0120 dlist_word = readl((u32 __iomem *)vc4->hvs->dlist + j);
0121 drm_printf(&p, "dlist: %02d: 0x%08x\n", j,
0122 dlist_word);
0123 if (!next_entry_start ||
0124 next_entry_start == j) {
0125 if (dlist_word & SCALER_CTL0_END)
0126 break;
0127 next_entry_start = j +
0128 VC4_GET_FIELD(dlist_word,
0129 SCALER_CTL0_SIZE);
0130 }
0131 }
0132 }
0133
0134 return 0;
0135 }
0136
0137
0138
0139
0140 #define VC4_INT_TO_COEFF(coeff) (coeff & 0x1ff)
0141 #define VC4_PPF_FILTER_WORD(c0, c1, c2) \
0142 ((((c0) & 0x1ff) << 0) | \
0143 (((c1) & 0x1ff) << 9) | \
0144 (((c2) & 0x1ff) << 18))
0145
0146
0147
0148
0149
0150
0151
0152 #define VC4_LINEAR_PHASE_KERNEL(c0, c1, c2, c3, c4, c5, c6, c7, c8, \
0153 c9, c10, c11, c12, c13, c14, c15) \
0154 {VC4_PPF_FILTER_WORD(c0, c1, c2), \
0155 VC4_PPF_FILTER_WORD(c3, c4, c5), \
0156 VC4_PPF_FILTER_WORD(c6, c7, c8), \
0157 VC4_PPF_FILTER_WORD(c9, c10, c11), \
0158 VC4_PPF_FILTER_WORD(c12, c13, c14), \
0159 VC4_PPF_FILTER_WORD(c15, c15, 0)}
0160
0161 #define VC4_LINEAR_PHASE_KERNEL_DWORDS 6
0162 #define VC4_KERNEL_DWORDS (VC4_LINEAR_PHASE_KERNEL_DWORDS * 2 - 1)
0163
0164
0165
0166
0167 static const u32 mitchell_netravali_1_3_1_3_kernel[] =
0168 VC4_LINEAR_PHASE_KERNEL(0, -2, -6, -8, -10, -8, -3, 2, 18,
0169 50, 82, 119, 155, 187, 213, 227);
0170
0171 static int vc4_hvs_upload_linear_kernel(struct vc4_hvs *hvs,
0172 struct drm_mm_node *space,
0173 const u32 *kernel)
0174 {
0175 int ret, i;
0176 u32 __iomem *dst_kernel;
0177
0178 ret = drm_mm_insert_node(&hvs->dlist_mm, space, VC4_KERNEL_DWORDS);
0179 if (ret) {
0180 DRM_ERROR("Failed to allocate space for filter kernel: %d\n",
0181 ret);
0182 return ret;
0183 }
0184
0185 dst_kernel = hvs->dlist + space->start;
0186
0187 for (i = 0; i < VC4_KERNEL_DWORDS; i++) {
0188 if (i < VC4_LINEAR_PHASE_KERNEL_DWORDS)
0189 writel(kernel[i], &dst_kernel[i]);
0190 else {
0191 writel(kernel[VC4_KERNEL_DWORDS - i - 1],
0192 &dst_kernel[i]);
0193 }
0194 }
0195
0196 return 0;
0197 }
0198
0199 static void vc4_hvs_lut_load(struct vc4_hvs *hvs,
0200 struct vc4_crtc *vc4_crtc)
0201 {
0202 struct drm_crtc *crtc = &vc4_crtc->base;
0203 struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
0204 u32 i;
0205
0206
0207
0208
0209
0210 HVS_WRITE(SCALER_GAMADDR,
0211 SCALER_GAMADDR_AUTOINC |
0212 (vc4_state->assigned_channel * 3 * crtc->gamma_size));
0213
0214 for (i = 0; i < crtc->gamma_size; i++)
0215 HVS_WRITE(SCALER_GAMDATA, vc4_crtc->lut_r[i]);
0216 for (i = 0; i < crtc->gamma_size; i++)
0217 HVS_WRITE(SCALER_GAMDATA, vc4_crtc->lut_g[i]);
0218 for (i = 0; i < crtc->gamma_size; i++)
0219 HVS_WRITE(SCALER_GAMDATA, vc4_crtc->lut_b[i]);
0220 }
0221
0222 static void vc4_hvs_update_gamma_lut(struct vc4_hvs *hvs,
0223 struct vc4_crtc *vc4_crtc)
0224 {
0225 struct drm_crtc_state *crtc_state = vc4_crtc->base.state;
0226 struct drm_color_lut *lut = crtc_state->gamma_lut->data;
0227 u32 length = drm_color_lut_size(crtc_state->gamma_lut);
0228 u32 i;
0229
0230 for (i = 0; i < length; i++) {
0231 vc4_crtc->lut_r[i] = drm_color_lut_extract(lut[i].red, 8);
0232 vc4_crtc->lut_g[i] = drm_color_lut_extract(lut[i].green, 8);
0233 vc4_crtc->lut_b[i] = drm_color_lut_extract(lut[i].blue, 8);
0234 }
0235
0236 vc4_hvs_lut_load(hvs, vc4_crtc);
0237 }
0238
0239 u8 vc4_hvs_get_fifo_frame_count(struct vc4_hvs *hvs, unsigned int fifo)
0240 {
0241 u8 field = 0;
0242
0243 switch (fifo) {
0244 case 0:
0245 field = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTAT1),
0246 SCALER_DISPSTAT1_FRCNT0);
0247 break;
0248 case 1:
0249 field = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTAT1),
0250 SCALER_DISPSTAT1_FRCNT1);
0251 break;
0252 case 2:
0253 field = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTAT2),
0254 SCALER_DISPSTAT2_FRCNT2);
0255 break;
0256 }
0257
0258 return field;
0259 }
0260
0261 int vc4_hvs_get_fifo_from_output(struct vc4_hvs *hvs, unsigned int output)
0262 {
0263 struct vc4_dev *vc4 = hvs->vc4;
0264 u32 reg;
0265 int ret;
0266
0267 if (!vc4->is_vc5)
0268 return output;
0269
0270 switch (output) {
0271 case 0:
0272 return 0;
0273
0274 case 1:
0275 return 1;
0276
0277 case 2:
0278 reg = HVS_READ(SCALER_DISPECTRL);
0279 ret = FIELD_GET(SCALER_DISPECTRL_DSP2_MUX_MASK, reg);
0280 if (ret == 0)
0281 return 2;
0282
0283 return 0;
0284
0285 case 3:
0286 reg = HVS_READ(SCALER_DISPCTRL);
0287 ret = FIELD_GET(SCALER_DISPCTRL_DSP3_MUX_MASK, reg);
0288 if (ret == 3)
0289 return -EPIPE;
0290
0291 return ret;
0292
0293 case 4:
0294 reg = HVS_READ(SCALER_DISPEOLN);
0295 ret = FIELD_GET(SCALER_DISPEOLN_DSP4_MUX_MASK, reg);
0296 if (ret == 3)
0297 return -EPIPE;
0298
0299 return ret;
0300
0301 case 5:
0302 reg = HVS_READ(SCALER_DISPDITHER);
0303 ret = FIELD_GET(SCALER_DISPDITHER_DSP5_MUX_MASK, reg);
0304 if (ret == 3)
0305 return -EPIPE;
0306
0307 return ret;
0308
0309 default:
0310 return -EPIPE;
0311 }
0312 }
0313
0314 static int vc4_hvs_init_channel(struct vc4_hvs *hvs, struct drm_crtc *crtc,
0315 struct drm_display_mode *mode, bool oneshot)
0316 {
0317 struct vc4_dev *vc4 = hvs->vc4;
0318 struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
0319 struct vc4_crtc_state *vc4_crtc_state = to_vc4_crtc_state(crtc->state);
0320 unsigned int chan = vc4_crtc_state->assigned_channel;
0321 bool interlace = mode->flags & DRM_MODE_FLAG_INTERLACE;
0322 u32 dispbkgndx;
0323 u32 dispctrl;
0324
0325 HVS_WRITE(SCALER_DISPCTRLX(chan), 0);
0326 HVS_WRITE(SCALER_DISPCTRLX(chan), SCALER_DISPCTRLX_RESET);
0327 HVS_WRITE(SCALER_DISPCTRLX(chan), 0);
0328
0329
0330
0331
0332
0333
0334 dispctrl = SCALER_DISPCTRLX_ENABLE;
0335
0336 if (!vc4->is_vc5)
0337 dispctrl |= VC4_SET_FIELD(mode->hdisplay,
0338 SCALER_DISPCTRLX_WIDTH) |
0339 VC4_SET_FIELD(mode->vdisplay,
0340 SCALER_DISPCTRLX_HEIGHT) |
0341 (oneshot ? SCALER_DISPCTRLX_ONESHOT : 0);
0342 else
0343 dispctrl |= VC4_SET_FIELD(mode->hdisplay,
0344 SCALER5_DISPCTRLX_WIDTH) |
0345 VC4_SET_FIELD(mode->vdisplay,
0346 SCALER5_DISPCTRLX_HEIGHT) |
0347 (oneshot ? SCALER5_DISPCTRLX_ONESHOT : 0);
0348
0349 HVS_WRITE(SCALER_DISPCTRLX(chan), dispctrl);
0350
0351 dispbkgndx = HVS_READ(SCALER_DISPBKGNDX(chan));
0352 dispbkgndx &= ~SCALER_DISPBKGND_GAMMA;
0353 dispbkgndx &= ~SCALER_DISPBKGND_INTERLACE;
0354
0355 HVS_WRITE(SCALER_DISPBKGNDX(chan), dispbkgndx |
0356 SCALER_DISPBKGND_AUTOHS |
0357 ((!vc4->is_vc5) ? SCALER_DISPBKGND_GAMMA : 0) |
0358 (interlace ? SCALER_DISPBKGND_INTERLACE : 0));
0359
0360
0361
0362
0363 vc4_hvs_lut_load(hvs, vc4_crtc);
0364
0365 return 0;
0366 }
0367
0368 void vc4_hvs_stop_channel(struct vc4_hvs *hvs, unsigned int chan)
0369 {
0370 if (HVS_READ(SCALER_DISPCTRLX(chan)) & SCALER_DISPCTRLX_ENABLE)
0371 return;
0372
0373 HVS_WRITE(SCALER_DISPCTRLX(chan),
0374 HVS_READ(SCALER_DISPCTRLX(chan)) | SCALER_DISPCTRLX_RESET);
0375 HVS_WRITE(SCALER_DISPCTRLX(chan),
0376 HVS_READ(SCALER_DISPCTRLX(chan)) & ~SCALER_DISPCTRLX_ENABLE);
0377
0378
0379 WARN_ON_ONCE(HVS_READ(SCALER_DISPCTRLX(chan)) & SCALER_DISPCTRLX_RESET);
0380
0381 WARN_ON_ONCE(VC4_GET_FIELD(HVS_READ(SCALER_DISPSTATX(chan)),
0382 SCALER_DISPSTATX_MODE) !=
0383 SCALER_DISPSTATX_MODE_DISABLED);
0384
0385 WARN_ON_ONCE((HVS_READ(SCALER_DISPSTATX(chan)) &
0386 (SCALER_DISPSTATX_FULL | SCALER_DISPSTATX_EMPTY)) !=
0387 SCALER_DISPSTATX_EMPTY);
0388 }
0389
0390 int vc4_hvs_atomic_check(struct drm_crtc *crtc, struct drm_atomic_state *state)
0391 {
0392 struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state, crtc);
0393 struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc_state);
0394 struct drm_device *dev = crtc->dev;
0395 struct vc4_dev *vc4 = to_vc4_dev(dev);
0396 struct drm_plane *plane;
0397 unsigned long flags;
0398 const struct drm_plane_state *plane_state;
0399 u32 dlist_count = 0;
0400 int ret;
0401
0402
0403
0404
0405 if (hweight32(crtc_state->connector_mask) > 1)
0406 return -EINVAL;
0407
0408 drm_atomic_crtc_state_for_each_plane_state(plane, plane_state, crtc_state)
0409 dlist_count += vc4_plane_dlist_size(plane_state);
0410
0411 dlist_count++;
0412
0413 spin_lock_irqsave(&vc4->hvs->mm_lock, flags);
0414 ret = drm_mm_insert_node(&vc4->hvs->dlist_mm, &vc4_state->mm,
0415 dlist_count);
0416 spin_unlock_irqrestore(&vc4->hvs->mm_lock, flags);
0417 if (ret)
0418 return ret;
0419
0420 return 0;
0421 }
0422
0423 static void vc4_hvs_install_dlist(struct drm_crtc *crtc)
0424 {
0425 struct drm_device *dev = crtc->dev;
0426 struct vc4_dev *vc4 = to_vc4_dev(dev);
0427 struct vc4_hvs *hvs = vc4->hvs;
0428 struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
0429
0430 HVS_WRITE(SCALER_DISPLISTX(vc4_state->assigned_channel),
0431 vc4_state->mm.start);
0432 }
0433
0434 static void vc4_hvs_update_dlist(struct drm_crtc *crtc)
0435 {
0436 struct drm_device *dev = crtc->dev;
0437 struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
0438 struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
0439 unsigned long flags;
0440
0441 if (crtc->state->event) {
0442 crtc->state->event->pipe = drm_crtc_index(crtc);
0443
0444 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
0445
0446 spin_lock_irqsave(&dev->event_lock, flags);
0447
0448 if (!vc4_crtc->feeds_txp || vc4_state->txp_armed) {
0449 vc4_crtc->event = crtc->state->event;
0450 crtc->state->event = NULL;
0451 }
0452
0453 spin_unlock_irqrestore(&dev->event_lock, flags);
0454 }
0455
0456 spin_lock_irqsave(&vc4_crtc->irq_lock, flags);
0457 vc4_crtc->current_dlist = vc4_state->mm.start;
0458 spin_unlock_irqrestore(&vc4_crtc->irq_lock, flags);
0459 }
0460
0461 void vc4_hvs_atomic_begin(struct drm_crtc *crtc,
0462 struct drm_atomic_state *state)
0463 {
0464 struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
0465 struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
0466 unsigned long flags;
0467
0468 spin_lock_irqsave(&vc4_crtc->irq_lock, flags);
0469 vc4_crtc->current_hvs_channel = vc4_state->assigned_channel;
0470 spin_unlock_irqrestore(&vc4_crtc->irq_lock, flags);
0471 }
0472
0473 void vc4_hvs_atomic_enable(struct drm_crtc *crtc,
0474 struct drm_atomic_state *state)
0475 {
0476 struct drm_device *dev = crtc->dev;
0477 struct vc4_dev *vc4 = to_vc4_dev(dev);
0478 struct drm_display_mode *mode = &crtc->state->adjusted_mode;
0479 struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
0480 bool oneshot = vc4_crtc->feeds_txp;
0481
0482 vc4_hvs_install_dlist(crtc);
0483 vc4_hvs_update_dlist(crtc);
0484 vc4_hvs_init_channel(vc4->hvs, crtc, mode, oneshot);
0485 }
0486
0487 void vc4_hvs_atomic_disable(struct drm_crtc *crtc,
0488 struct drm_atomic_state *state)
0489 {
0490 struct drm_device *dev = crtc->dev;
0491 struct vc4_dev *vc4 = to_vc4_dev(dev);
0492 struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state, crtc);
0493 struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(old_state);
0494 unsigned int chan = vc4_state->assigned_channel;
0495
0496 vc4_hvs_stop_channel(vc4->hvs, chan);
0497 }
0498
0499 void vc4_hvs_atomic_flush(struct drm_crtc *crtc,
0500 struct drm_atomic_state *state)
0501 {
0502 struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state,
0503 crtc);
0504 struct drm_device *dev = crtc->dev;
0505 struct vc4_dev *vc4 = to_vc4_dev(dev);
0506 struct vc4_hvs *hvs = vc4->hvs;
0507 struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
0508 struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
0509 unsigned int channel = vc4_state->assigned_channel;
0510 struct drm_plane *plane;
0511 struct vc4_plane_state *vc4_plane_state;
0512 bool debug_dump_regs = false;
0513 bool enable_bg_fill = false;
0514 u32 __iomem *dlist_start = vc4->hvs->dlist + vc4_state->mm.start;
0515 u32 __iomem *dlist_next = dlist_start;
0516
0517 if (debug_dump_regs) {
0518 DRM_INFO("CRTC %d HVS before:\n", drm_crtc_index(crtc));
0519 vc4_hvs_dump_state(hvs);
0520 }
0521
0522
0523 drm_atomic_crtc_for_each_plane(plane, crtc) {
0524
0525 if (dlist_next == dlist_start) {
0526
0527
0528
0529
0530
0531
0532
0533
0534 vc4_plane_state = to_vc4_plane_state(plane->state);
0535 enable_bg_fill = vc4_plane_state->needs_bg_fill;
0536 }
0537
0538 dlist_next += vc4_plane_write_dlist(plane, dlist_next);
0539 }
0540
0541 writel(SCALER_CTL0_END, dlist_next);
0542 dlist_next++;
0543
0544 WARN_ON_ONCE(dlist_next - dlist_start != vc4_state->mm.size);
0545
0546 if (enable_bg_fill)
0547
0548
0549
0550 HVS_WRITE(SCALER_DISPBKGNDX(channel),
0551 HVS_READ(SCALER_DISPBKGNDX(channel)) |
0552 SCALER_DISPBKGND_FILL);
0553
0554
0555
0556
0557
0558
0559
0560
0561 if (crtc->state->active && old_state->active) {
0562 vc4_hvs_install_dlist(crtc);
0563 vc4_hvs_update_dlist(crtc);
0564 }
0565
0566 if (crtc->state->color_mgmt_changed) {
0567 u32 dispbkgndx = HVS_READ(SCALER_DISPBKGNDX(channel));
0568
0569 if (crtc->state->gamma_lut) {
0570 vc4_hvs_update_gamma_lut(hvs, vc4_crtc);
0571 dispbkgndx |= SCALER_DISPBKGND_GAMMA;
0572 } else {
0573
0574
0575
0576
0577 dispbkgndx &= ~SCALER_DISPBKGND_GAMMA;
0578 }
0579 HVS_WRITE(SCALER_DISPBKGNDX(channel), dispbkgndx);
0580 }
0581
0582 if (debug_dump_regs) {
0583 DRM_INFO("CRTC %d HVS after:\n", drm_crtc_index(crtc));
0584 vc4_hvs_dump_state(hvs);
0585 }
0586 }
0587
0588 void vc4_hvs_mask_underrun(struct vc4_hvs *hvs, int channel)
0589 {
0590 u32 dispctrl = HVS_READ(SCALER_DISPCTRL);
0591
0592 dispctrl &= ~SCALER_DISPCTRL_DSPEISLUR(channel);
0593
0594 HVS_WRITE(SCALER_DISPCTRL, dispctrl);
0595 }
0596
0597 void vc4_hvs_unmask_underrun(struct vc4_hvs *hvs, int channel)
0598 {
0599 u32 dispctrl = HVS_READ(SCALER_DISPCTRL);
0600
0601 dispctrl |= SCALER_DISPCTRL_DSPEISLUR(channel);
0602
0603 HVS_WRITE(SCALER_DISPSTAT,
0604 SCALER_DISPSTAT_EUFLOW(channel));
0605 HVS_WRITE(SCALER_DISPCTRL, dispctrl);
0606 }
0607
0608 static void vc4_hvs_report_underrun(struct drm_device *dev)
0609 {
0610 struct vc4_dev *vc4 = to_vc4_dev(dev);
0611
0612 atomic_inc(&vc4->underrun);
0613 DRM_DEV_ERROR(dev->dev, "HVS underrun\n");
0614 }
0615
0616 static irqreturn_t vc4_hvs_irq_handler(int irq, void *data)
0617 {
0618 struct drm_device *dev = data;
0619 struct vc4_dev *vc4 = to_vc4_dev(dev);
0620 struct vc4_hvs *hvs = vc4->hvs;
0621 irqreturn_t irqret = IRQ_NONE;
0622 int channel;
0623 u32 control;
0624 u32 status;
0625
0626 status = HVS_READ(SCALER_DISPSTAT);
0627 control = HVS_READ(SCALER_DISPCTRL);
0628
0629 for (channel = 0; channel < SCALER_CHANNELS_COUNT; channel++) {
0630
0631 if (status & SCALER_DISPSTAT_EUFLOW(channel) &&
0632 control & SCALER_DISPCTRL_DSPEISLUR(channel)) {
0633 vc4_hvs_mask_underrun(hvs, channel);
0634 vc4_hvs_report_underrun(dev);
0635
0636 irqret = IRQ_HANDLED;
0637 }
0638 }
0639
0640
0641 HVS_WRITE(SCALER_DISPSTAT, SCALER_DISPSTAT_IRQMASK(0) |
0642 SCALER_DISPSTAT_IRQMASK(1) |
0643 SCALER_DISPSTAT_IRQMASK(2));
0644
0645 return irqret;
0646 }
0647
0648 static int vc4_hvs_bind(struct device *dev, struct device *master, void *data)
0649 {
0650 struct platform_device *pdev = to_platform_device(dev);
0651 struct drm_device *drm = dev_get_drvdata(master);
0652 struct vc4_dev *vc4 = to_vc4_dev(drm);
0653 struct vc4_hvs *hvs = NULL;
0654 int ret;
0655 u32 dispctrl;
0656 u32 reg;
0657
0658 hvs = devm_kzalloc(&pdev->dev, sizeof(*hvs), GFP_KERNEL);
0659 if (!hvs)
0660 return -ENOMEM;
0661
0662 hvs->vc4 = vc4;
0663 hvs->pdev = pdev;
0664
0665 hvs->regs = vc4_ioremap_regs(pdev, 0);
0666 if (IS_ERR(hvs->regs))
0667 return PTR_ERR(hvs->regs);
0668
0669 hvs->regset.base = hvs->regs;
0670 hvs->regset.regs = hvs_regs;
0671 hvs->regset.nregs = ARRAY_SIZE(hvs_regs);
0672
0673 if (vc4->is_vc5) {
0674 hvs->core_clk = devm_clk_get(&pdev->dev, NULL);
0675 if (IS_ERR(hvs->core_clk)) {
0676 dev_err(&pdev->dev, "Couldn't get core clock\n");
0677 return PTR_ERR(hvs->core_clk);
0678 }
0679
0680 ret = clk_prepare_enable(hvs->core_clk);
0681 if (ret) {
0682 dev_err(&pdev->dev, "Couldn't enable the core clock\n");
0683 return ret;
0684 }
0685 }
0686
0687 if (!vc4->is_vc5)
0688 hvs->dlist = hvs->regs + SCALER_DLIST_START;
0689 else
0690 hvs->dlist = hvs->regs + SCALER5_DLIST_START;
0691
0692 spin_lock_init(&hvs->mm_lock);
0693
0694
0695
0696
0697
0698
0699 drm_mm_init(&hvs->dlist_mm,
0700 HVS_BOOTLOADER_DLIST_END,
0701 (SCALER_DLIST_SIZE >> 2) - HVS_BOOTLOADER_DLIST_END);
0702
0703
0704
0705
0706
0707
0708 if (!vc4->is_vc5)
0709
0710 drm_mm_init(&hvs->lbm_mm, 0, 48 * 1024);
0711 else
0712
0713 drm_mm_init(&hvs->lbm_mm, 0, 60 * 1024);
0714
0715
0716
0717
0718 ret = vc4_hvs_upload_linear_kernel(hvs,
0719 &hvs->mitchell_netravali_filter,
0720 mitchell_netravali_1_3_1_3_kernel);
0721 if (ret)
0722 return ret;
0723
0724 vc4->hvs = hvs;
0725
0726 reg = HVS_READ(SCALER_DISPECTRL);
0727 reg &= ~SCALER_DISPECTRL_DSP2_MUX_MASK;
0728 HVS_WRITE(SCALER_DISPECTRL,
0729 reg | VC4_SET_FIELD(0, SCALER_DISPECTRL_DSP2_MUX));
0730
0731 reg = HVS_READ(SCALER_DISPCTRL);
0732 reg &= ~SCALER_DISPCTRL_DSP3_MUX_MASK;
0733 HVS_WRITE(SCALER_DISPCTRL,
0734 reg | VC4_SET_FIELD(3, SCALER_DISPCTRL_DSP3_MUX));
0735
0736 reg = HVS_READ(SCALER_DISPEOLN);
0737 reg &= ~SCALER_DISPEOLN_DSP4_MUX_MASK;
0738 HVS_WRITE(SCALER_DISPEOLN,
0739 reg | VC4_SET_FIELD(3, SCALER_DISPEOLN_DSP4_MUX));
0740
0741 reg = HVS_READ(SCALER_DISPDITHER);
0742 reg &= ~SCALER_DISPDITHER_DSP5_MUX_MASK;
0743 HVS_WRITE(SCALER_DISPDITHER,
0744 reg | VC4_SET_FIELD(3, SCALER_DISPDITHER_DSP5_MUX));
0745
0746 dispctrl = HVS_READ(SCALER_DISPCTRL);
0747
0748 dispctrl |= SCALER_DISPCTRL_ENABLE;
0749 dispctrl |= SCALER_DISPCTRL_DISPEIRQ(0) |
0750 SCALER_DISPCTRL_DISPEIRQ(1) |
0751 SCALER_DISPCTRL_DISPEIRQ(2);
0752
0753 dispctrl &= ~(SCALER_DISPCTRL_DMAEIRQ |
0754 SCALER_DISPCTRL_SLVWREIRQ |
0755 SCALER_DISPCTRL_SLVRDEIRQ |
0756 SCALER_DISPCTRL_DSPEIEOF(0) |
0757 SCALER_DISPCTRL_DSPEIEOF(1) |
0758 SCALER_DISPCTRL_DSPEIEOF(2) |
0759 SCALER_DISPCTRL_DSPEIEOLN(0) |
0760 SCALER_DISPCTRL_DSPEIEOLN(1) |
0761 SCALER_DISPCTRL_DSPEIEOLN(2) |
0762 SCALER_DISPCTRL_DSPEISLUR(0) |
0763 SCALER_DISPCTRL_DSPEISLUR(1) |
0764 SCALER_DISPCTRL_DSPEISLUR(2) |
0765 SCALER_DISPCTRL_SCLEIRQ);
0766
0767 HVS_WRITE(SCALER_DISPCTRL, dispctrl);
0768
0769 ret = devm_request_irq(dev, platform_get_irq(pdev, 0),
0770 vc4_hvs_irq_handler, 0, "vc4 hvs", drm);
0771 if (ret)
0772 return ret;
0773
0774 vc4_debugfs_add_regset32(drm, "hvs_regs", &hvs->regset);
0775 vc4_debugfs_add_file(drm, "hvs_underrun", vc4_hvs_debugfs_underrun,
0776 NULL);
0777 vc4_debugfs_add_file(drm, "hvs_dlists", vc4_hvs_debugfs_dlist,
0778 NULL);
0779
0780 return 0;
0781 }
0782
0783 static void vc4_hvs_unbind(struct device *dev, struct device *master,
0784 void *data)
0785 {
0786 struct drm_device *drm = dev_get_drvdata(master);
0787 struct vc4_dev *vc4 = to_vc4_dev(drm);
0788 struct vc4_hvs *hvs = vc4->hvs;
0789
0790 if (drm_mm_node_allocated(&vc4->hvs->mitchell_netravali_filter))
0791 drm_mm_remove_node(&vc4->hvs->mitchell_netravali_filter);
0792
0793 drm_mm_takedown(&vc4->hvs->dlist_mm);
0794 drm_mm_takedown(&vc4->hvs->lbm_mm);
0795
0796 clk_disable_unprepare(hvs->core_clk);
0797
0798 vc4->hvs = NULL;
0799 }
0800
0801 static const struct component_ops vc4_hvs_ops = {
0802 .bind = vc4_hvs_bind,
0803 .unbind = vc4_hvs_unbind,
0804 };
0805
0806 static int vc4_hvs_dev_probe(struct platform_device *pdev)
0807 {
0808 return component_add(&pdev->dev, &vc4_hvs_ops);
0809 }
0810
0811 static int vc4_hvs_dev_remove(struct platform_device *pdev)
0812 {
0813 component_del(&pdev->dev, &vc4_hvs_ops);
0814 return 0;
0815 }
0816
0817 static const struct of_device_id vc4_hvs_dt_match[] = {
0818 { .compatible = "brcm,bcm2711-hvs" },
0819 { .compatible = "brcm,bcm2835-hvs" },
0820 {}
0821 };
0822
0823 struct platform_driver vc4_hvs_driver = {
0824 .probe = vc4_hvs_dev_probe,
0825 .remove = vc4_hvs_dev_remove,
0826 .driver = {
0827 .name = "vc4_hvs",
0828 .of_match_table = vc4_hvs_dt_match,
0829 },
0830 };