0001
0002
0003
0004
0005
0006 #include <linux/component.h>
0007 #include <linux/delay.h>
0008 #include <linux/dma-buf.h>
0009 #include <linux/io.h>
0010 #include <linux/module.h>
0011 #include <linux/of.h>
0012 #include <linux/of_address.h>
0013 #include <linux/of_device.h>
0014 #include <linux/of_graph.h>
0015 #include <linux/of_irq.h>
0016 #include <linux/wait.h>
0017 #include <linux/workqueue.h>
0018
0019 #include <drm/drm_atomic_helper.h>
0020 #include <drm/drm_blend.h>
0021 #include <drm/drm_crtc_helper.h>
0022 #include <drm/drm_fb_cma_helper.h>
0023 #include <drm/drm_framebuffer.h>
0024 #include <drm/drm_gem_cma_helper.h>
0025 #include <drm/drm_gem_framebuffer_helper.h>
0026 #include <drm/drm_plane_helper.h>
0027
0028 #include "sprd_drm.h"
0029 #include "sprd_dpu.h"
0030 #include "sprd_dsi.h"
0031
0032
0033 #define REG_DPU_CTRL 0x04
0034 #define REG_DPU_CFG0 0x08
0035 #define REG_PANEL_SIZE 0x20
0036 #define REG_BLEND_SIZE 0x24
0037 #define REG_BG_COLOR 0x2C
0038
0039
0040 #define REG_LAY_BASE_ADDR0 0x30
0041 #define REG_LAY_BASE_ADDR1 0x34
0042 #define REG_LAY_BASE_ADDR2 0x38
0043 #define REG_LAY_CTRL 0x40
0044 #define REG_LAY_SIZE 0x44
0045 #define REG_LAY_PITCH 0x48
0046 #define REG_LAY_POS 0x4C
0047 #define REG_LAY_ALPHA 0x50
0048 #define REG_LAY_CROP_START 0x5C
0049
0050
0051 #define REG_DPU_INT_EN 0x1E0
0052 #define REG_DPU_INT_CLR 0x1E4
0053 #define REG_DPU_INT_STS 0x1E8
0054
0055
0056 #define REG_DPI_CTRL 0x1F0
0057 #define REG_DPI_H_TIMING 0x1F4
0058 #define REG_DPI_V_TIMING 0x1F8
0059
0060
0061 #define REG_MMU_EN 0x800
0062 #define REG_MMU_VPN_RANGE 0x80C
0063 #define REG_MMU_PPN1 0x83C
0064 #define REG_MMU_RANGE1 0x840
0065 #define REG_MMU_PPN2 0x844
0066 #define REG_MMU_RANGE2 0x848
0067
0068
0069 #define BIT_DPU_RUN BIT(0)
0070 #define BIT_DPU_STOP BIT(1)
0071 #define BIT_DPU_REG_UPDATE BIT(2)
0072 #define BIT_DPU_IF_EDPI BIT(0)
0073
0074
0075 #define BIT_DPU_LAY_EN BIT(0)
0076 #define BIT_DPU_LAY_LAYER_ALPHA (0x01 << 2)
0077 #define BIT_DPU_LAY_COMBO_ALPHA (0x02 << 2)
0078 #define BIT_DPU_LAY_FORMAT_YUV422_2PLANE (0x00 << 4)
0079 #define BIT_DPU_LAY_FORMAT_YUV420_2PLANE (0x01 << 4)
0080 #define BIT_DPU_LAY_FORMAT_YUV420_3PLANE (0x02 << 4)
0081 #define BIT_DPU_LAY_FORMAT_ARGB8888 (0x03 << 4)
0082 #define BIT_DPU_LAY_FORMAT_RGB565 (0x04 << 4)
0083 #define BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3 (0x00 << 8)
0084 #define BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0 (0x01 << 8)
0085 #define BIT_DPU_LAY_NO_SWITCH (0x00 << 10)
0086 #define BIT_DPU_LAY_RB_OR_UV_SWITCH (0x01 << 10)
0087 #define BIT_DPU_LAY_MODE_BLEND_NORMAL (0x00 << 16)
0088 #define BIT_DPU_LAY_MODE_BLEND_PREMULT (0x01 << 16)
0089 #define BIT_DPU_LAY_ROTATION_0 (0x00 << 20)
0090 #define BIT_DPU_LAY_ROTATION_90 (0x01 << 20)
0091 #define BIT_DPU_LAY_ROTATION_180 (0x02 << 20)
0092 #define BIT_DPU_LAY_ROTATION_270 (0x03 << 20)
0093 #define BIT_DPU_LAY_ROTATION_0_M (0x04 << 20)
0094 #define BIT_DPU_LAY_ROTATION_90_M (0x05 << 20)
0095 #define BIT_DPU_LAY_ROTATION_180_M (0x06 << 20)
0096 #define BIT_DPU_LAY_ROTATION_270_M (0x07 << 20)
0097
0098
0099 #define BIT_DPU_INT_DONE BIT(0)
0100 #define BIT_DPU_INT_TE BIT(1)
0101 #define BIT_DPU_INT_ERR BIT(2)
0102 #define BIT_DPU_INT_UPDATE_DONE BIT(4)
0103 #define BIT_DPU_INT_VSYNC BIT(5)
0104
0105
0106 #define BIT_DPU_EDPI_TE_EN BIT(8)
0107 #define BIT_DPU_EDPI_FROM_EXTERNAL_PAD BIT(10)
0108 #define BIT_DPU_DPI_HALT_EN BIT(16)
0109
0110 static const u32 layer_fmts[] = {
0111 DRM_FORMAT_XRGB8888,
0112 DRM_FORMAT_XBGR8888,
0113 DRM_FORMAT_ARGB8888,
0114 DRM_FORMAT_ABGR8888,
0115 DRM_FORMAT_RGBA8888,
0116 DRM_FORMAT_BGRA8888,
0117 DRM_FORMAT_RGBX8888,
0118 DRM_FORMAT_RGB565,
0119 DRM_FORMAT_BGR565,
0120 DRM_FORMAT_NV12,
0121 DRM_FORMAT_NV21,
0122 DRM_FORMAT_NV16,
0123 DRM_FORMAT_NV61,
0124 DRM_FORMAT_YUV420,
0125 DRM_FORMAT_YVU420,
0126 };
0127
0128 struct sprd_plane {
0129 struct drm_plane base;
0130 };
0131
0132 static int dpu_wait_stop_done(struct sprd_dpu *dpu)
0133 {
0134 struct dpu_context *ctx = &dpu->ctx;
0135 int rc;
0136
0137 if (ctx->stopped)
0138 return 0;
0139
0140 rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_stop,
0141 msecs_to_jiffies(500));
0142 ctx->evt_stop = false;
0143
0144 ctx->stopped = true;
0145
0146 if (!rc) {
0147 drm_err(dpu->drm, "dpu wait for stop done time out!\n");
0148 return -ETIMEDOUT;
0149 }
0150
0151 return 0;
0152 }
0153
0154 static int dpu_wait_update_done(struct sprd_dpu *dpu)
0155 {
0156 struct dpu_context *ctx = &dpu->ctx;
0157 int rc;
0158
0159 ctx->evt_update = false;
0160
0161 rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_update,
0162 msecs_to_jiffies(500));
0163
0164 if (!rc) {
0165 drm_err(dpu->drm, "dpu wait for reg update done time out!\n");
0166 return -ETIMEDOUT;
0167 }
0168
0169 return 0;
0170 }
0171
0172 static u32 drm_format_to_dpu(struct drm_framebuffer *fb)
0173 {
0174 u32 format = 0;
0175
0176 switch (fb->format->format) {
0177 case DRM_FORMAT_BGRA8888:
0178
0179 format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
0180 format |= BIT_DPU_LAY_FORMAT_ARGB8888;
0181 break;
0182 case DRM_FORMAT_RGBX8888:
0183 case DRM_FORMAT_RGBA8888:
0184
0185 format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
0186 fallthrough;
0187 case DRM_FORMAT_ABGR8888:
0188
0189 format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
0190 fallthrough;
0191 case DRM_FORMAT_ARGB8888:
0192 format |= BIT_DPU_LAY_FORMAT_ARGB8888;
0193 break;
0194 case DRM_FORMAT_XBGR8888:
0195
0196 format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
0197 fallthrough;
0198 case DRM_FORMAT_XRGB8888:
0199 format |= BIT_DPU_LAY_FORMAT_ARGB8888;
0200 break;
0201 case DRM_FORMAT_BGR565:
0202
0203 format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
0204 fallthrough;
0205 case DRM_FORMAT_RGB565:
0206 format |= BIT_DPU_LAY_FORMAT_RGB565;
0207 break;
0208 case DRM_FORMAT_NV12:
0209
0210 format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
0211
0212 format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
0213
0214 format |= BIT_DPU_LAY_NO_SWITCH;
0215 break;
0216 case DRM_FORMAT_NV21:
0217
0218 format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
0219
0220 format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
0221
0222 format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
0223 break;
0224 case DRM_FORMAT_NV16:
0225
0226 format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
0227
0228 format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
0229
0230 format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
0231 break;
0232 case DRM_FORMAT_NV61:
0233
0234 format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
0235
0236 format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
0237
0238 format |= BIT_DPU_LAY_NO_SWITCH;
0239 break;
0240 case DRM_FORMAT_YUV420:
0241 format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
0242
0243 format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
0244
0245 format |= BIT_DPU_LAY_NO_SWITCH;
0246 break;
0247 case DRM_FORMAT_YVU420:
0248 format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
0249
0250 format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
0251
0252 format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
0253 break;
0254 default:
0255 break;
0256 }
0257
0258 return format;
0259 }
0260
0261 static u32 drm_rotation_to_dpu(struct drm_plane_state *state)
0262 {
0263 u32 rotation = 0;
0264
0265 switch (state->rotation) {
0266 default:
0267 case DRM_MODE_ROTATE_0:
0268 rotation = BIT_DPU_LAY_ROTATION_0;
0269 break;
0270 case DRM_MODE_ROTATE_90:
0271 rotation = BIT_DPU_LAY_ROTATION_90;
0272 break;
0273 case DRM_MODE_ROTATE_180:
0274 rotation = BIT_DPU_LAY_ROTATION_180;
0275 break;
0276 case DRM_MODE_ROTATE_270:
0277 rotation = BIT_DPU_LAY_ROTATION_270;
0278 break;
0279 case DRM_MODE_REFLECT_Y:
0280 rotation = BIT_DPU_LAY_ROTATION_180_M;
0281 break;
0282 case (DRM_MODE_REFLECT_Y | DRM_MODE_ROTATE_90):
0283 rotation = BIT_DPU_LAY_ROTATION_90_M;
0284 break;
0285 case DRM_MODE_REFLECT_X:
0286 rotation = BIT_DPU_LAY_ROTATION_0_M;
0287 break;
0288 case (DRM_MODE_REFLECT_X | DRM_MODE_ROTATE_90):
0289 rotation = BIT_DPU_LAY_ROTATION_270_M;
0290 break;
0291 }
0292
0293 return rotation;
0294 }
0295
0296 static u32 drm_blend_to_dpu(struct drm_plane_state *state)
0297 {
0298 u32 blend = 0;
0299
0300 switch (state->pixel_blend_mode) {
0301 case DRM_MODE_BLEND_COVERAGE:
0302
0303 blend |= BIT_DPU_LAY_COMBO_ALPHA;
0304
0305 blend |= BIT_DPU_LAY_MODE_BLEND_NORMAL;
0306 break;
0307 case DRM_MODE_BLEND_PREMULTI:
0308
0309 blend |= BIT_DPU_LAY_COMBO_ALPHA;
0310
0311 blend |= BIT_DPU_LAY_MODE_BLEND_PREMULT;
0312 break;
0313 case DRM_MODE_BLEND_PIXEL_NONE:
0314 default:
0315
0316
0317 blend |= BIT_DPU_LAY_LAYER_ALPHA;
0318 break;
0319 }
0320
0321 return blend;
0322 }
0323
0324 static void sprd_dpu_layer(struct sprd_dpu *dpu, struct drm_plane_state *state)
0325 {
0326 struct dpu_context *ctx = &dpu->ctx;
0327 struct drm_gem_cma_object *cma_obj;
0328 struct drm_framebuffer *fb = state->fb;
0329 u32 addr, size, offset, pitch, blend, format, rotation;
0330 u32 src_x = state->src_x >> 16;
0331 u32 src_y = state->src_y >> 16;
0332 u32 src_w = state->src_w >> 16;
0333 u32 src_h = state->src_h >> 16;
0334 u32 dst_x = state->crtc_x;
0335 u32 dst_y = state->crtc_y;
0336 u32 alpha = state->alpha;
0337 u32 index = state->zpos;
0338 int i;
0339
0340 offset = (dst_x & 0xffff) | (dst_y << 16);
0341 size = (src_w & 0xffff) | (src_h << 16);
0342
0343 for (i = 0; i < fb->format->num_planes; i++) {
0344 cma_obj = drm_fb_cma_get_gem_obj(fb, i);
0345 addr = cma_obj->paddr + fb->offsets[i];
0346
0347 if (i == 0)
0348 layer_reg_wr(ctx, REG_LAY_BASE_ADDR0, addr, index);
0349 else if (i == 1)
0350 layer_reg_wr(ctx, REG_LAY_BASE_ADDR1, addr, index);
0351 else
0352 layer_reg_wr(ctx, REG_LAY_BASE_ADDR2, addr, index);
0353 }
0354
0355 if (fb->format->num_planes == 3) {
0356
0357 pitch = (fb->pitches[0] / fb->format->cpp[0]) |
0358 (fb->pitches[0] / fb->format->cpp[0] << 15);
0359 } else {
0360 pitch = fb->pitches[0] / fb->format->cpp[0];
0361 }
0362
0363 layer_reg_wr(ctx, REG_LAY_POS, offset, index);
0364 layer_reg_wr(ctx, REG_LAY_SIZE, size, index);
0365 layer_reg_wr(ctx, REG_LAY_CROP_START,
0366 src_y << 16 | src_x, index);
0367 layer_reg_wr(ctx, REG_LAY_ALPHA, alpha, index);
0368 layer_reg_wr(ctx, REG_LAY_PITCH, pitch, index);
0369
0370 format = drm_format_to_dpu(fb);
0371 blend = drm_blend_to_dpu(state);
0372 rotation = drm_rotation_to_dpu(state);
0373
0374 layer_reg_wr(ctx, REG_LAY_CTRL, BIT_DPU_LAY_EN |
0375 format |
0376 blend |
0377 rotation,
0378 index);
0379 }
0380
0381 static void sprd_dpu_flip(struct sprd_dpu *dpu)
0382 {
0383 struct dpu_context *ctx = &dpu->ctx;
0384
0385
0386
0387
0388
0389
0390 if (ctx->if_type == SPRD_DPU_IF_EDPI)
0391 dpu_wait_stop_done(dpu);
0392
0393
0394 if (ctx->if_type == SPRD_DPU_IF_DPI) {
0395 if (!ctx->stopped) {
0396 dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_REG_UPDATE);
0397 dpu_wait_update_done(dpu);
0398 }
0399
0400 dpu_reg_set(ctx, REG_DPU_INT_EN, BIT_DPU_INT_ERR);
0401 } else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
0402 dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
0403
0404 ctx->stopped = false;
0405 }
0406 }
0407
0408 static void sprd_dpu_init(struct sprd_dpu *dpu)
0409 {
0410 struct dpu_context *ctx = &dpu->ctx;
0411 u32 int_mask = 0;
0412
0413 writel(0x00, ctx->base + REG_BG_COLOR);
0414 writel(0x00, ctx->base + REG_MMU_EN);
0415 writel(0x00, ctx->base + REG_MMU_PPN1);
0416 writel(0xffff, ctx->base + REG_MMU_RANGE1);
0417 writel(0x00, ctx->base + REG_MMU_PPN2);
0418 writel(0xffff, ctx->base + REG_MMU_RANGE2);
0419 writel(0x1ffff, ctx->base + REG_MMU_VPN_RANGE);
0420
0421 if (ctx->if_type == SPRD_DPU_IF_DPI) {
0422
0423 dpu_reg_clr(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
0424
0425 dpu_reg_clr(ctx, REG_DPI_CTRL, BIT_DPU_DPI_HALT_EN);
0426
0427 dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
0428
0429
0430 int_mask |= BIT_DPU_INT_UPDATE_DONE;
0431
0432 int_mask |= BIT_DPU_INT_DONE;
0433
0434 int_mask |= BIT_DPU_INT_VSYNC;
0435
0436 int_mask |= BIT_DPU_INT_TE;
0437
0438 int_mask |= BIT_DPU_INT_ERR;
0439 } else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
0440
0441 dpu_reg_set(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
0442
0443 dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
0444
0445 dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_TE_EN);
0446
0447
0448 int_mask |= BIT_DPU_INT_DONE;
0449
0450 int_mask |= BIT_DPU_INT_TE;
0451 }
0452
0453 writel(int_mask, ctx->base + REG_DPU_INT_EN);
0454 }
0455
0456 static void sprd_dpu_fini(struct sprd_dpu *dpu)
0457 {
0458 struct dpu_context *ctx = &dpu->ctx;
0459
0460 writel(0x00, ctx->base + REG_DPU_INT_EN);
0461 writel(0xff, ctx->base + REG_DPU_INT_CLR);
0462 }
0463
0464 static void sprd_dpi_init(struct sprd_dpu *dpu)
0465 {
0466 struct dpu_context *ctx = &dpu->ctx;
0467 u32 reg_val;
0468 u32 size;
0469
0470 size = (ctx->vm.vactive << 16) | ctx->vm.hactive;
0471 writel(size, ctx->base + REG_PANEL_SIZE);
0472 writel(size, ctx->base + REG_BLEND_SIZE);
0473
0474 if (ctx->if_type == SPRD_DPU_IF_DPI) {
0475
0476 reg_val = ctx->vm.hsync_len << 0 |
0477 ctx->vm.hback_porch << 8 |
0478 ctx->vm.hfront_porch << 20;
0479 writel(reg_val, ctx->base + REG_DPI_H_TIMING);
0480
0481 reg_val = ctx->vm.vsync_len << 0 |
0482 ctx->vm.vback_porch << 8 |
0483 ctx->vm.vfront_porch << 20;
0484 writel(reg_val, ctx->base + REG_DPI_V_TIMING);
0485 }
0486 }
0487
0488 void sprd_dpu_run(struct sprd_dpu *dpu)
0489 {
0490 struct dpu_context *ctx = &dpu->ctx;
0491
0492 dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
0493
0494 ctx->stopped = false;
0495 }
0496
0497 void sprd_dpu_stop(struct sprd_dpu *dpu)
0498 {
0499 struct dpu_context *ctx = &dpu->ctx;
0500
0501 if (ctx->if_type == SPRD_DPU_IF_DPI)
0502 dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_STOP);
0503
0504 dpu_wait_stop_done(dpu);
0505 }
0506
0507 static int sprd_plane_atomic_check(struct drm_plane *plane,
0508 struct drm_atomic_state *state)
0509 {
0510 struct drm_plane_state *plane_state = drm_atomic_get_new_plane_state(state,
0511 plane);
0512 struct drm_crtc_state *crtc_state;
0513 u32 fmt;
0514
0515 if (!plane_state->fb || !plane_state->crtc)
0516 return 0;
0517
0518 fmt = drm_format_to_dpu(plane_state->fb);
0519 if (!fmt)
0520 return -EINVAL;
0521
0522 crtc_state = drm_atomic_get_crtc_state(plane_state->state, plane_state->crtc);
0523 if (IS_ERR(crtc_state))
0524 return PTR_ERR(crtc_state);
0525
0526 return drm_atomic_helper_check_plane_state(plane_state, crtc_state,
0527 DRM_PLANE_HELPER_NO_SCALING,
0528 DRM_PLANE_HELPER_NO_SCALING,
0529 true, true);
0530 }
0531
0532 static void sprd_plane_atomic_update(struct drm_plane *drm_plane,
0533 struct drm_atomic_state *state)
0534 {
0535 struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state,
0536 drm_plane);
0537 struct sprd_dpu *dpu = to_sprd_crtc(new_state->crtc);
0538
0539
0540 sprd_dpu_layer(dpu, new_state);
0541 }
0542
0543 static void sprd_plane_atomic_disable(struct drm_plane *drm_plane,
0544 struct drm_atomic_state *state)
0545 {
0546 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
0547 drm_plane);
0548 struct sprd_dpu *dpu = to_sprd_crtc(old_state->crtc);
0549
0550 layer_reg_wr(&dpu->ctx, REG_LAY_CTRL, 0x00, old_state->zpos);
0551 }
0552
0553 static void sprd_plane_create_properties(struct sprd_plane *plane, int index)
0554 {
0555 unsigned int supported_modes = BIT(DRM_MODE_BLEND_PIXEL_NONE) |
0556 BIT(DRM_MODE_BLEND_PREMULTI) |
0557 BIT(DRM_MODE_BLEND_COVERAGE);
0558
0559
0560 drm_plane_create_rotation_property(&plane->base,
0561 DRM_MODE_ROTATE_0,
0562 DRM_MODE_ROTATE_MASK |
0563 DRM_MODE_REFLECT_MASK);
0564
0565
0566 drm_plane_create_alpha_property(&plane->base);
0567
0568
0569 drm_plane_create_blend_mode_property(&plane->base, supported_modes);
0570
0571
0572 drm_plane_create_zpos_immutable_property(&plane->base, index);
0573 }
0574
0575 static const struct drm_plane_helper_funcs sprd_plane_helper_funcs = {
0576 .atomic_check = sprd_plane_atomic_check,
0577 .atomic_update = sprd_plane_atomic_update,
0578 .atomic_disable = sprd_plane_atomic_disable,
0579 };
0580
0581 static const struct drm_plane_funcs sprd_plane_funcs = {
0582 .update_plane = drm_atomic_helper_update_plane,
0583 .disable_plane = drm_atomic_helper_disable_plane,
0584 .destroy = drm_plane_cleanup,
0585 .reset = drm_atomic_helper_plane_reset,
0586 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
0587 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
0588 };
0589
0590 static struct sprd_plane *sprd_planes_init(struct drm_device *drm)
0591 {
0592 struct sprd_plane *plane, *primary;
0593 enum drm_plane_type plane_type;
0594 int i;
0595
0596 for (i = 0; i < 6; i++) {
0597 plane_type = (i == 0) ? DRM_PLANE_TYPE_PRIMARY :
0598 DRM_PLANE_TYPE_OVERLAY;
0599
0600 plane = drmm_universal_plane_alloc(drm, struct sprd_plane, base,
0601 1, &sprd_plane_funcs,
0602 layer_fmts, ARRAY_SIZE(layer_fmts),
0603 NULL, plane_type, NULL);
0604 if (IS_ERR(plane)) {
0605 drm_err(drm, "failed to init drm plane: %d\n", i);
0606 return plane;
0607 }
0608
0609 drm_plane_helper_add(&plane->base, &sprd_plane_helper_funcs);
0610
0611 sprd_plane_create_properties(plane, i);
0612
0613 if (i == 0)
0614 primary = plane;
0615 }
0616
0617 return primary;
0618 }
0619
0620 static void sprd_crtc_mode_set_nofb(struct drm_crtc *crtc)
0621 {
0622 struct sprd_dpu *dpu = to_sprd_crtc(crtc);
0623 struct drm_display_mode *mode = &crtc->state->adjusted_mode;
0624 struct drm_encoder *encoder;
0625 struct sprd_dsi *dsi;
0626
0627 drm_display_mode_to_videomode(mode, &dpu->ctx.vm);
0628
0629 drm_for_each_encoder_mask(encoder, crtc->dev,
0630 crtc->state->encoder_mask) {
0631 dsi = encoder_to_dsi(encoder);
0632
0633 if (dsi->slave->mode_flags & MIPI_DSI_MODE_VIDEO)
0634 dpu->ctx.if_type = SPRD_DPU_IF_DPI;
0635 else
0636 dpu->ctx.if_type = SPRD_DPU_IF_EDPI;
0637 }
0638
0639 sprd_dpi_init(dpu);
0640 }
0641
0642 static void sprd_crtc_atomic_enable(struct drm_crtc *crtc,
0643 struct drm_atomic_state *state)
0644 {
0645 struct sprd_dpu *dpu = to_sprd_crtc(crtc);
0646
0647 sprd_dpu_init(dpu);
0648
0649 drm_crtc_vblank_on(&dpu->base);
0650 }
0651
0652 static void sprd_crtc_atomic_disable(struct drm_crtc *crtc,
0653 struct drm_atomic_state *state)
0654 {
0655 struct sprd_dpu *dpu = to_sprd_crtc(crtc);
0656 struct drm_device *drm = dpu->base.dev;
0657
0658 drm_crtc_vblank_off(&dpu->base);
0659
0660 sprd_dpu_fini(dpu);
0661
0662 spin_lock_irq(&drm->event_lock);
0663 if (crtc->state->event) {
0664 drm_crtc_send_vblank_event(crtc, crtc->state->event);
0665 crtc->state->event = NULL;
0666 }
0667 spin_unlock_irq(&drm->event_lock);
0668 }
0669
0670 static void sprd_crtc_atomic_flush(struct drm_crtc *crtc,
0671 struct drm_atomic_state *state)
0672
0673 {
0674 struct sprd_dpu *dpu = to_sprd_crtc(crtc);
0675 struct drm_device *drm = dpu->base.dev;
0676
0677 sprd_dpu_flip(dpu);
0678
0679 spin_lock_irq(&drm->event_lock);
0680 if (crtc->state->event) {
0681 drm_crtc_send_vblank_event(crtc, crtc->state->event);
0682 crtc->state->event = NULL;
0683 }
0684 spin_unlock_irq(&drm->event_lock);
0685 }
0686
0687 static int sprd_crtc_enable_vblank(struct drm_crtc *crtc)
0688 {
0689 struct sprd_dpu *dpu = to_sprd_crtc(crtc);
0690
0691 dpu_reg_set(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
0692
0693 return 0;
0694 }
0695
0696 static void sprd_crtc_disable_vblank(struct drm_crtc *crtc)
0697 {
0698 struct sprd_dpu *dpu = to_sprd_crtc(crtc);
0699
0700 dpu_reg_clr(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
0701 }
0702
0703 static const struct drm_crtc_helper_funcs sprd_crtc_helper_funcs = {
0704 .mode_set_nofb = sprd_crtc_mode_set_nofb,
0705 .atomic_flush = sprd_crtc_atomic_flush,
0706 .atomic_enable = sprd_crtc_atomic_enable,
0707 .atomic_disable = sprd_crtc_atomic_disable,
0708 };
0709
0710 static const struct drm_crtc_funcs sprd_crtc_funcs = {
0711 .destroy = drm_crtc_cleanup,
0712 .set_config = drm_atomic_helper_set_config,
0713 .page_flip = drm_atomic_helper_page_flip,
0714 .reset = drm_atomic_helper_crtc_reset,
0715 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
0716 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
0717 .enable_vblank = sprd_crtc_enable_vblank,
0718 .disable_vblank = sprd_crtc_disable_vblank,
0719 };
0720
0721 static struct sprd_dpu *sprd_crtc_init(struct drm_device *drm,
0722 struct drm_plane *primary, struct device *dev)
0723 {
0724 struct device_node *port;
0725 struct sprd_dpu *dpu;
0726
0727 dpu = drmm_crtc_alloc_with_planes(drm, struct sprd_dpu, base,
0728 primary, NULL,
0729 &sprd_crtc_funcs, NULL);
0730 if (IS_ERR(dpu)) {
0731 drm_err(drm, "failed to init crtc\n");
0732 return dpu;
0733 }
0734 drm_crtc_helper_add(&dpu->base, &sprd_crtc_helper_funcs);
0735
0736
0737
0738
0739 port = of_graph_get_port_by_id(dev->of_node, 0);
0740 if (!port) {
0741 drm_err(drm, "failed to found crtc output port for %s\n",
0742 dev->of_node->full_name);
0743 return ERR_PTR(-EINVAL);
0744 }
0745 dpu->base.port = port;
0746 of_node_put(port);
0747
0748 return dpu;
0749 }
0750
0751 static irqreturn_t sprd_dpu_isr(int irq, void *data)
0752 {
0753 struct sprd_dpu *dpu = data;
0754 struct dpu_context *ctx = &dpu->ctx;
0755 u32 reg_val, int_mask = 0;
0756
0757 reg_val = readl(ctx->base + REG_DPU_INT_STS);
0758
0759
0760 if (reg_val & BIT_DPU_INT_ERR) {
0761 int_mask |= BIT_DPU_INT_ERR;
0762 drm_warn(dpu->drm, "Warning: dpu underflow!\n");
0763 }
0764
0765
0766 if (reg_val & BIT_DPU_INT_UPDATE_DONE) {
0767 ctx->evt_update = true;
0768 wake_up_interruptible_all(&ctx->wait_queue);
0769 }
0770
0771
0772 if (reg_val & BIT_DPU_INT_DONE) {
0773 ctx->evt_stop = true;
0774 wake_up_interruptible_all(&ctx->wait_queue);
0775 }
0776
0777 if (reg_val & BIT_DPU_INT_VSYNC)
0778 drm_crtc_handle_vblank(&dpu->base);
0779
0780 writel(reg_val, ctx->base + REG_DPU_INT_CLR);
0781 dpu_reg_clr(ctx, REG_DPU_INT_EN, int_mask);
0782
0783 return IRQ_HANDLED;
0784 }
0785
0786 static int sprd_dpu_context_init(struct sprd_dpu *dpu,
0787 struct device *dev)
0788 {
0789 struct platform_device *pdev = to_platform_device(dev);
0790 struct dpu_context *ctx = &dpu->ctx;
0791 struct resource *res;
0792 int ret;
0793
0794 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
0795 if (!res) {
0796 dev_err(dev, "failed to get I/O resource\n");
0797 return -EINVAL;
0798 }
0799
0800 ctx->base = devm_ioremap(dev, res->start, resource_size(res));
0801 if (!ctx->base) {
0802 dev_err(dev, "failed to map dpu registers\n");
0803 return -EFAULT;
0804 }
0805
0806 ctx->irq = platform_get_irq(pdev, 0);
0807 if (ctx->irq < 0) {
0808 dev_err(dev, "failed to get dpu irq\n");
0809 return ctx->irq;
0810 }
0811
0812
0813 writel(0x00, ctx->base + REG_DPU_INT_EN);
0814 writel(0xff, ctx->base + REG_DPU_INT_CLR);
0815
0816 ret = devm_request_irq(dev, ctx->irq, sprd_dpu_isr,
0817 IRQF_TRIGGER_NONE, "DPU", dpu);
0818 if (ret) {
0819 dev_err(dev, "failed to register dpu irq handler\n");
0820 return ret;
0821 }
0822
0823 init_waitqueue_head(&ctx->wait_queue);
0824
0825 return 0;
0826 }
0827
0828 static int sprd_dpu_bind(struct device *dev, struct device *master, void *data)
0829 {
0830 struct drm_device *drm = data;
0831 struct sprd_dpu *dpu;
0832 struct sprd_plane *plane;
0833 int ret;
0834
0835 plane = sprd_planes_init(drm);
0836 if (IS_ERR(plane))
0837 return PTR_ERR(plane);
0838
0839 dpu = sprd_crtc_init(drm, &plane->base, dev);
0840 if (IS_ERR(dpu))
0841 return PTR_ERR(dpu);
0842
0843 dpu->drm = drm;
0844 dev_set_drvdata(dev, dpu);
0845
0846 ret = sprd_dpu_context_init(dpu, dev);
0847 if (ret)
0848 return ret;
0849
0850 return 0;
0851 }
0852
0853 static const struct component_ops dpu_component_ops = {
0854 .bind = sprd_dpu_bind,
0855 };
0856
0857 static const struct of_device_id dpu_match_table[] = {
0858 { .compatible = "sprd,sharkl3-dpu" },
0859 { },
0860 };
0861 MODULE_DEVICE_TABLE(of, dpu_match_table);
0862
0863 static int sprd_dpu_probe(struct platform_device *pdev)
0864 {
0865 return component_add(&pdev->dev, &dpu_component_ops);
0866 }
0867
0868 static int sprd_dpu_remove(struct platform_device *pdev)
0869 {
0870 component_del(&pdev->dev, &dpu_component_ops);
0871
0872 return 0;
0873 }
0874
0875 struct platform_driver sprd_dpu_driver = {
0876 .probe = sprd_dpu_probe,
0877 .remove = sprd_dpu_remove,
0878 .driver = {
0879 .name = "sprd-dpu-drv",
0880 .of_match_table = dpu_match_table,
0881 },
0882 };
0883
0884 MODULE_AUTHOR("Leon He <leon.he@unisoc.com>");
0885 MODULE_AUTHOR("Kevin Tang <kevin.tang@unisoc.com>");
0886 MODULE_DESCRIPTION("Unisoc Display Controller Driver");
0887 MODULE_LICENSE("GPL v2");