Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: MIT
0002 /*
0003  * Copyright 2020 Noralf Trønnes
0004  */
0005 
0006 #include <linux/lz4.h>
0007 #include <linux/usb.h>
0008 #include <linux/workqueue.h>
0009 
0010 #include <drm/drm_atomic.h>
0011 #include <drm/drm_connector.h>
0012 #include <drm/drm_damage_helper.h>
0013 #include <drm/drm_drv.h>
0014 #include <drm/drm_format_helper.h>
0015 #include <drm/drm_fourcc.h>
0016 #include <drm/drm_framebuffer.h>
0017 #include <drm/drm_gem.h>
0018 #include <drm/drm_gem_framebuffer_helper.h>
0019 #include <drm/drm_print.h>
0020 #include <drm/drm_rect.h>
0021 #include <drm/drm_simple_kms_helper.h>
0022 #include <drm/gud.h>
0023 
0024 #include "gud_internal.h"
0025 
0026 /*
0027  * Some userspace rendering loops runs all displays in the same loop.
0028  * This means that a fast display will have to wait for a slow one.
0029  * For this reason gud does flushing asynchronous by default.
0030  * The down side is that in e.g. a single display setup userspace thinks
0031  * the display is insanely fast since the driver reports back immediately
0032  * that the flush/pageflip is done. This wastes CPU and power.
0033  * Such users might want to set this module parameter to false.
0034  */
0035 static bool gud_async_flush = true;
0036 module_param_named(async_flush, gud_async_flush, bool, 0644);
0037 MODULE_PARM_DESC(async_flush, "Enable asynchronous flushing [default=true]");
0038 
0039 /*
0040  * FIXME: The driver is probably broken on Big Endian machines.
0041  * See discussion:
0042  * https://lore.kernel.org/dri-devel/CAKb7UvihLX0hgBOP3VBG7O+atwZcUVCPVuBdfmDMpg0NjXe-cQ@mail.gmail.com/
0043  */
0044 
0045 static bool gud_is_big_endian(void)
0046 {
0047 #if defined(__BIG_ENDIAN)
0048     return true;
0049 #else
0050     return false;
0051 #endif
0052 }
0053 
0054 static size_t gud_xrgb8888_to_r124(u8 *dst, const struct drm_format_info *format,
0055                    void *src, struct drm_framebuffer *fb,
0056                    struct drm_rect *rect)
0057 {
0058     unsigned int block_width = drm_format_info_block_width(format, 0);
0059     unsigned int bits_per_pixel = 8 / block_width;
0060     unsigned int x, y, width, height;
0061     u8 pix, *pix8, *block = dst; /* Assign to silence compiler warning */
0062     size_t len;
0063     void *buf;
0064 
0065     WARN_ON_ONCE(format->char_per_block[0] != 1);
0066 
0067     /* Start on a byte boundary */
0068     rect->x1 = ALIGN_DOWN(rect->x1, block_width);
0069     width = drm_rect_width(rect);
0070     height = drm_rect_height(rect);
0071     len = drm_format_info_min_pitch(format, 0, width) * height;
0072 
0073     buf = kmalloc(width * height, GFP_KERNEL);
0074     if (!buf)
0075         return 0;
0076 
0077     drm_fb_xrgb8888_to_gray8(buf, 0, src, fb, rect);
0078     pix8 = buf;
0079 
0080     for (y = 0; y < height; y++) {
0081         for (x = 0; x < width; x++) {
0082             unsigned int pixpos = x % block_width; /* within byte from the left */
0083             unsigned int pixshift = (block_width - pixpos - 1) * bits_per_pixel;
0084 
0085             if (!pixpos) {
0086                 block = dst++;
0087                 *block = 0;
0088             }
0089 
0090             pix = (*pix8++) >> (8 - bits_per_pixel);
0091             *block |= pix << pixshift;
0092         }
0093     }
0094 
0095     kfree(buf);
0096 
0097     return len;
0098 }
0099 
0100 static size_t gud_xrgb8888_to_color(u8 *dst, const struct drm_format_info *format,
0101                     void *src, struct drm_framebuffer *fb,
0102                     struct drm_rect *rect)
0103 {
0104     unsigned int block_width = drm_format_info_block_width(format, 0);
0105     unsigned int bits_per_pixel = 8 / block_width;
0106     u8 r, g, b, pix, *block = dst; /* Assign to silence compiler warning */
0107     unsigned int x, y, width;
0108     u32 *pix32;
0109     size_t len;
0110 
0111     /* Start on a byte boundary */
0112     rect->x1 = ALIGN_DOWN(rect->x1, block_width);
0113     width = drm_rect_width(rect);
0114     len = drm_format_info_min_pitch(format, 0, width) * drm_rect_height(rect);
0115 
0116     for (y = rect->y1; y < rect->y2; y++) {
0117         pix32 = src + (y * fb->pitches[0]);
0118         pix32 += rect->x1;
0119 
0120         for (x = 0; x < width; x++) {
0121             unsigned int pixpos = x % block_width; /* within byte from the left */
0122             unsigned int pixshift = (block_width - pixpos - 1) * bits_per_pixel;
0123 
0124             if (!pixpos) {
0125                 block = dst++;
0126                 *block = 0;
0127             }
0128 
0129             r = *pix32 >> 16;
0130             g = *pix32 >> 8;
0131             b = *pix32++;
0132 
0133             switch (format->format) {
0134             case GUD_DRM_FORMAT_XRGB1111:
0135                 pix = ((r >> 7) << 2) | ((g >> 7) << 1) | (b >> 7);
0136                 break;
0137             default:
0138                 WARN_ON_ONCE(1);
0139                 return len;
0140             }
0141 
0142             *block |= pix << pixshift;
0143         }
0144     }
0145 
0146     return len;
0147 }
0148 
0149 static int gud_prep_flush(struct gud_device *gdrm, struct drm_framebuffer *fb,
0150               const struct drm_format_info *format, struct drm_rect *rect,
0151               struct gud_set_buffer_req *req)
0152 {
0153     struct dma_buf_attachment *import_attach = fb->obj[0]->import_attach;
0154     u8 compression = gdrm->compression;
0155     struct iosys_map map[DRM_FORMAT_MAX_PLANES];
0156     struct iosys_map map_data[DRM_FORMAT_MAX_PLANES];
0157     void *vaddr, *buf;
0158     size_t pitch, len;
0159     int ret = 0;
0160 
0161     pitch = drm_format_info_min_pitch(format, 0, drm_rect_width(rect));
0162     len = pitch * drm_rect_height(rect);
0163     if (len > gdrm->bulk_len)
0164         return -E2BIG;
0165 
0166     ret = drm_gem_fb_vmap(fb, map, map_data);
0167     if (ret)
0168         return ret;
0169 
0170     vaddr = map_data[0].vaddr;
0171 
0172     ret = drm_gem_fb_begin_cpu_access(fb, DMA_FROM_DEVICE);
0173     if (ret)
0174         goto vunmap;
0175 retry:
0176     if (compression)
0177         buf = gdrm->compress_buf;
0178     else
0179         buf = gdrm->bulk_buf;
0180 
0181     /*
0182      * Imported buffers are assumed to be write-combined and thus uncached
0183      * with slow reads (at least on ARM).
0184      */
0185     if (format != fb->format) {
0186         if (format->format == GUD_DRM_FORMAT_R1) {
0187             len = gud_xrgb8888_to_r124(buf, format, vaddr, fb, rect);
0188             if (!len) {
0189                 ret = -ENOMEM;
0190                 goto end_cpu_access;
0191             }
0192         } else if (format->format == DRM_FORMAT_R8) {
0193             drm_fb_xrgb8888_to_gray8(buf, 0, vaddr, fb, rect);
0194         } else if (format->format == DRM_FORMAT_RGB332) {
0195             drm_fb_xrgb8888_to_rgb332(buf, 0, vaddr, fb, rect);
0196         } else if (format->format == DRM_FORMAT_RGB565) {
0197             drm_fb_xrgb8888_to_rgb565(buf, 0, vaddr, fb, rect, gud_is_big_endian());
0198         } else if (format->format == DRM_FORMAT_RGB888) {
0199             drm_fb_xrgb8888_to_rgb888(buf, 0, vaddr, fb, rect);
0200         } else {
0201             len = gud_xrgb8888_to_color(buf, format, vaddr, fb, rect);
0202         }
0203     } else if (gud_is_big_endian() && format->cpp[0] > 1) {
0204         drm_fb_swab(buf, 0, vaddr, fb, rect, !import_attach);
0205     } else if (compression && !import_attach && pitch == fb->pitches[0]) {
0206         /* can compress directly from the framebuffer */
0207         buf = vaddr + rect->y1 * pitch;
0208     } else {
0209         drm_fb_memcpy(buf, 0, vaddr, fb, rect);
0210     }
0211 
0212     memset(req, 0, sizeof(*req));
0213     req->x = cpu_to_le32(rect->x1);
0214     req->y = cpu_to_le32(rect->y1);
0215     req->width = cpu_to_le32(drm_rect_width(rect));
0216     req->height = cpu_to_le32(drm_rect_height(rect));
0217     req->length = cpu_to_le32(len);
0218 
0219     if (compression & GUD_COMPRESSION_LZ4) {
0220         int complen;
0221 
0222         complen = LZ4_compress_default(buf, gdrm->bulk_buf, len, len, gdrm->lz4_comp_mem);
0223         if (complen <= 0) {
0224             compression = 0;
0225             goto retry;
0226         }
0227 
0228         req->compression = GUD_COMPRESSION_LZ4;
0229         req->compressed_length = cpu_to_le32(complen);
0230     }
0231 
0232 end_cpu_access:
0233     drm_gem_fb_end_cpu_access(fb, DMA_FROM_DEVICE);
0234 vunmap:
0235     drm_gem_fb_vunmap(fb, map);
0236 
0237     return ret;
0238 }
0239 
0240 struct gud_usb_bulk_context {
0241     struct timer_list timer;
0242     struct usb_sg_request sgr;
0243 };
0244 
0245 static void gud_usb_bulk_timeout(struct timer_list *t)
0246 {
0247     struct gud_usb_bulk_context *ctx = from_timer(ctx, t, timer);
0248 
0249     usb_sg_cancel(&ctx->sgr);
0250 }
0251 
0252 static int gud_usb_bulk(struct gud_device *gdrm, size_t len)
0253 {
0254     struct gud_usb_bulk_context ctx;
0255     int ret;
0256 
0257     ret = usb_sg_init(&ctx.sgr, gud_to_usb_device(gdrm), gdrm->bulk_pipe, 0,
0258               gdrm->bulk_sgt.sgl, gdrm->bulk_sgt.nents, len, GFP_KERNEL);
0259     if (ret)
0260         return ret;
0261 
0262     timer_setup_on_stack(&ctx.timer, gud_usb_bulk_timeout, 0);
0263     mod_timer(&ctx.timer, jiffies + msecs_to_jiffies(3000));
0264 
0265     usb_sg_wait(&ctx.sgr);
0266 
0267     if (!del_timer_sync(&ctx.timer))
0268         ret = -ETIMEDOUT;
0269     else if (ctx.sgr.status < 0)
0270         ret = ctx.sgr.status;
0271     else if (ctx.sgr.bytes != len)
0272         ret = -EIO;
0273 
0274     destroy_timer_on_stack(&ctx.timer);
0275 
0276     return ret;
0277 }
0278 
0279 static int gud_flush_rect(struct gud_device *gdrm, struct drm_framebuffer *fb,
0280               const struct drm_format_info *format, struct drm_rect *rect)
0281 {
0282     struct gud_set_buffer_req req;
0283     size_t len, trlen;
0284     int ret;
0285 
0286     drm_dbg(&gdrm->drm, "Flushing [FB:%d] " DRM_RECT_FMT "\n", fb->base.id, DRM_RECT_ARG(rect));
0287 
0288     ret = gud_prep_flush(gdrm, fb, format, rect, &req);
0289     if (ret)
0290         return ret;
0291 
0292     len = le32_to_cpu(req.length);
0293 
0294     if (req.compression)
0295         trlen = le32_to_cpu(req.compressed_length);
0296     else
0297         trlen = len;
0298 
0299     gdrm->stats_length += len;
0300     /* Did it wrap around? */
0301     if (gdrm->stats_length <= len && gdrm->stats_actual_length) {
0302         gdrm->stats_length = len;
0303         gdrm->stats_actual_length = 0;
0304     }
0305     gdrm->stats_actual_length += trlen;
0306 
0307     if (!(gdrm->flags & GUD_DISPLAY_FLAG_FULL_UPDATE) || gdrm->prev_flush_failed) {
0308         ret = gud_usb_set(gdrm, GUD_REQ_SET_BUFFER, 0, &req, sizeof(req));
0309         if (ret)
0310             return ret;
0311     }
0312 
0313     ret = gud_usb_bulk(gdrm, trlen);
0314     if (ret)
0315         gdrm->stats_num_errors++;
0316 
0317     return ret;
0318 }
0319 
0320 void gud_clear_damage(struct gud_device *gdrm)
0321 {
0322     gdrm->damage.x1 = INT_MAX;
0323     gdrm->damage.y1 = INT_MAX;
0324     gdrm->damage.x2 = 0;
0325     gdrm->damage.y2 = 0;
0326 }
0327 
0328 static void gud_add_damage(struct gud_device *gdrm, struct drm_rect *damage)
0329 {
0330     gdrm->damage.x1 = min(gdrm->damage.x1, damage->x1);
0331     gdrm->damage.y1 = min(gdrm->damage.y1, damage->y1);
0332     gdrm->damage.x2 = max(gdrm->damage.x2, damage->x2);
0333     gdrm->damage.y2 = max(gdrm->damage.y2, damage->y2);
0334 }
0335 
0336 static void gud_retry_failed_flush(struct gud_device *gdrm, struct drm_framebuffer *fb,
0337                    struct drm_rect *damage)
0338 {
0339     /*
0340      * pipe_update waits for the worker when the display mode is going to change.
0341      * This ensures that the width and height is still the same making it safe to
0342      * add back the damage.
0343      */
0344 
0345     mutex_lock(&gdrm->damage_lock);
0346     if (!gdrm->fb) {
0347         drm_framebuffer_get(fb);
0348         gdrm->fb = fb;
0349     }
0350     gud_add_damage(gdrm, damage);
0351     mutex_unlock(&gdrm->damage_lock);
0352 
0353     /* Retry only once to avoid a possible storm in case of continues errors. */
0354     if (!gdrm->prev_flush_failed)
0355         queue_work(system_long_wq, &gdrm->work);
0356     gdrm->prev_flush_failed = true;
0357 }
0358 
0359 void gud_flush_work(struct work_struct *work)
0360 {
0361     struct gud_device *gdrm = container_of(work, struct gud_device, work);
0362     const struct drm_format_info *format;
0363     struct drm_framebuffer *fb;
0364     struct drm_rect damage;
0365     unsigned int i, lines;
0366     int idx, ret = 0;
0367     size_t pitch;
0368 
0369     if (!drm_dev_enter(&gdrm->drm, &idx))
0370         return;
0371 
0372     mutex_lock(&gdrm->damage_lock);
0373     fb = gdrm->fb;
0374     gdrm->fb = NULL;
0375     damage = gdrm->damage;
0376     gud_clear_damage(gdrm);
0377     mutex_unlock(&gdrm->damage_lock);
0378 
0379     if (!fb)
0380         goto out;
0381 
0382     format = fb->format;
0383     if (format->format == DRM_FORMAT_XRGB8888 && gdrm->xrgb8888_emulation_format)
0384         format = gdrm->xrgb8888_emulation_format;
0385 
0386     /* Split update if it's too big */
0387     pitch = drm_format_info_min_pitch(format, 0, drm_rect_width(&damage));
0388     lines = drm_rect_height(&damage);
0389 
0390     if (gdrm->bulk_len < lines * pitch)
0391         lines = gdrm->bulk_len / pitch;
0392 
0393     for (i = 0; i < DIV_ROUND_UP(drm_rect_height(&damage), lines); i++) {
0394         struct drm_rect rect = damage;
0395 
0396         rect.y1 += i * lines;
0397         rect.y2 = min_t(u32, rect.y1 + lines, damage.y2);
0398 
0399         ret = gud_flush_rect(gdrm, fb, format, &rect);
0400         if (ret) {
0401             if (ret != -ENODEV && ret != -ECONNRESET &&
0402                 ret != -ESHUTDOWN && ret != -EPROTO) {
0403                 bool prev_flush_failed = gdrm->prev_flush_failed;
0404 
0405                 gud_retry_failed_flush(gdrm, fb, &damage);
0406                 if (!prev_flush_failed)
0407                     dev_err_ratelimited(fb->dev->dev,
0408                                 "Failed to flush framebuffer: error=%d\n", ret);
0409             }
0410             break;
0411         }
0412 
0413         gdrm->prev_flush_failed = false;
0414     }
0415 
0416     drm_framebuffer_put(fb);
0417 out:
0418     drm_dev_exit(idx);
0419 }
0420 
0421 static void gud_fb_queue_damage(struct gud_device *gdrm, struct drm_framebuffer *fb,
0422                 struct drm_rect *damage)
0423 {
0424     struct drm_framebuffer *old_fb = NULL;
0425 
0426     mutex_lock(&gdrm->damage_lock);
0427 
0428     if (fb != gdrm->fb) {
0429         old_fb = gdrm->fb;
0430         drm_framebuffer_get(fb);
0431         gdrm->fb = fb;
0432     }
0433 
0434     gud_add_damage(gdrm, damage);
0435 
0436     mutex_unlock(&gdrm->damage_lock);
0437 
0438     queue_work(system_long_wq, &gdrm->work);
0439 
0440     if (old_fb)
0441         drm_framebuffer_put(old_fb);
0442 }
0443 
0444 int gud_pipe_check(struct drm_simple_display_pipe *pipe,
0445            struct drm_plane_state *new_plane_state,
0446            struct drm_crtc_state *new_crtc_state)
0447 {
0448     struct gud_device *gdrm = to_gud_device(pipe->crtc.dev);
0449     struct drm_plane_state *old_plane_state = pipe->plane.state;
0450     const struct drm_display_mode *mode = &new_crtc_state->mode;
0451     struct drm_atomic_state *state = new_plane_state->state;
0452     struct drm_framebuffer *old_fb = old_plane_state->fb;
0453     struct drm_connector_state *connector_state = NULL;
0454     struct drm_framebuffer *fb = new_plane_state->fb;
0455     const struct drm_format_info *format = fb->format;
0456     struct drm_connector *connector;
0457     unsigned int i, num_properties;
0458     struct gud_state_req *req;
0459     int idx, ret;
0460     size_t len;
0461 
0462     if (WARN_ON_ONCE(!fb))
0463         return -EINVAL;
0464 
0465     if (old_plane_state->rotation != new_plane_state->rotation)
0466         new_crtc_state->mode_changed = true;
0467 
0468     if (old_fb && old_fb->format != format)
0469         new_crtc_state->mode_changed = true;
0470 
0471     if (!new_crtc_state->mode_changed && !new_crtc_state->connectors_changed)
0472         return 0;
0473 
0474     /* Only one connector is supported */
0475     if (hweight32(new_crtc_state->connector_mask) != 1)
0476         return -EINVAL;
0477 
0478     if (format->format == DRM_FORMAT_XRGB8888 && gdrm->xrgb8888_emulation_format)
0479         format = gdrm->xrgb8888_emulation_format;
0480 
0481     for_each_new_connector_in_state(state, connector, connector_state, i) {
0482         if (connector_state->crtc)
0483             break;
0484     }
0485 
0486     /*
0487      * DRM_IOCTL_MODE_OBJ_SETPROPERTY on the rotation property will not have
0488      * the connector included in the state.
0489      */
0490     if (!connector_state) {
0491         struct drm_connector_list_iter conn_iter;
0492 
0493         drm_connector_list_iter_begin(pipe->crtc.dev, &conn_iter);
0494         drm_for_each_connector_iter(connector, &conn_iter) {
0495             if (connector->state->crtc) {
0496                 connector_state = connector->state;
0497                 break;
0498             }
0499         }
0500         drm_connector_list_iter_end(&conn_iter);
0501     }
0502 
0503     if (WARN_ON_ONCE(!connector_state))
0504         return -ENOENT;
0505 
0506     len = struct_size(req, properties,
0507               GUD_PROPERTIES_MAX_NUM + GUD_CONNECTOR_PROPERTIES_MAX_NUM);
0508     req = kzalloc(len, GFP_KERNEL);
0509     if (!req)
0510         return -ENOMEM;
0511 
0512     gud_from_display_mode(&req->mode, mode);
0513 
0514     req->format = gud_from_fourcc(format->format);
0515     if (WARN_ON_ONCE(!req->format)) {
0516         ret = -EINVAL;
0517         goto out;
0518     }
0519 
0520     req->connector = drm_connector_index(connector_state->connector);
0521 
0522     ret = gud_connector_fill_properties(connector_state, req->properties);
0523     if (ret < 0)
0524         goto out;
0525 
0526     num_properties = ret;
0527     for (i = 0; i < gdrm->num_properties; i++) {
0528         u16 prop = gdrm->properties[i];
0529         u64 val;
0530 
0531         switch (prop) {
0532         case GUD_PROPERTY_ROTATION:
0533             /* DRM UAPI matches the protocol so use value directly */
0534             val = new_plane_state->rotation;
0535             break;
0536         default:
0537             WARN_ON_ONCE(1);
0538             ret = -EINVAL;
0539             goto out;
0540         }
0541 
0542         req->properties[num_properties + i].prop = cpu_to_le16(prop);
0543         req->properties[num_properties + i].val = cpu_to_le64(val);
0544         num_properties++;
0545     }
0546 
0547     if (drm_dev_enter(fb->dev, &idx)) {
0548         len = struct_size(req, properties, num_properties);
0549         ret = gud_usb_set(gdrm, GUD_REQ_SET_STATE_CHECK, 0, req, len);
0550         drm_dev_exit(idx);
0551     }  else {
0552         ret = -ENODEV;
0553     }
0554 out:
0555     kfree(req);
0556 
0557     return ret;
0558 }
0559 
0560 void gud_pipe_update(struct drm_simple_display_pipe *pipe,
0561              struct drm_plane_state *old_state)
0562 {
0563     struct drm_device *drm = pipe->crtc.dev;
0564     struct gud_device *gdrm = to_gud_device(drm);
0565     struct drm_plane_state *state = pipe->plane.state;
0566     struct drm_framebuffer *fb = state->fb;
0567     struct drm_crtc *crtc = &pipe->crtc;
0568     struct drm_rect damage;
0569     int idx;
0570 
0571     if (crtc->state->mode_changed || !crtc->state->enable) {
0572         cancel_work_sync(&gdrm->work);
0573         mutex_lock(&gdrm->damage_lock);
0574         if (gdrm->fb) {
0575             drm_framebuffer_put(gdrm->fb);
0576             gdrm->fb = NULL;
0577         }
0578         gud_clear_damage(gdrm);
0579         mutex_unlock(&gdrm->damage_lock);
0580     }
0581 
0582     if (!drm_dev_enter(drm, &idx))
0583         return;
0584 
0585     if (!old_state->fb)
0586         gud_usb_set_u8(gdrm, GUD_REQ_SET_CONTROLLER_ENABLE, 1);
0587 
0588     if (fb && (crtc->state->mode_changed || crtc->state->connectors_changed))
0589         gud_usb_set(gdrm, GUD_REQ_SET_STATE_COMMIT, 0, NULL, 0);
0590 
0591     if (crtc->state->active_changed)
0592         gud_usb_set_u8(gdrm, GUD_REQ_SET_DISPLAY_ENABLE, crtc->state->active);
0593 
0594     if (drm_atomic_helper_damage_merged(old_state, state, &damage)) {
0595         if (gdrm->flags & GUD_DISPLAY_FLAG_FULL_UPDATE)
0596             drm_rect_init(&damage, 0, 0, fb->width, fb->height);
0597         gud_fb_queue_damage(gdrm, fb, &damage);
0598         if (!gud_async_flush)
0599             flush_work(&gdrm->work);
0600     }
0601 
0602     if (!crtc->state->enable)
0603         gud_usb_set_u8(gdrm, GUD_REQ_SET_CONTROLLER_ENABLE, 0);
0604 
0605     drm_dev_exit(idx);
0606 }