0001
0002
0003
0004
0005
0006 #include <drm/drm_prime.h>
0007 #include <linux/dma-buf.h>
0008 #include <linux/module.h>
0009
0010 #include "etnaviv_drv.h"
0011 #include "etnaviv_gem.h"
0012
0013 MODULE_IMPORT_NS(DMA_BUF);
0014
0015 static struct lock_class_key etnaviv_prime_lock_class;
0016
0017 struct sg_table *etnaviv_gem_prime_get_sg_table(struct drm_gem_object *obj)
0018 {
0019 struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
0020 int npages = obj->size >> PAGE_SHIFT;
0021
0022 if (WARN_ON(!etnaviv_obj->pages))
0023 return ERR_PTR(-EINVAL);
0024
0025 return drm_prime_pages_to_sg(obj->dev, etnaviv_obj->pages, npages);
0026 }
0027
0028 int etnaviv_gem_prime_vmap(struct drm_gem_object *obj, struct iosys_map *map)
0029 {
0030 void *vaddr;
0031
0032 vaddr = etnaviv_gem_vmap(obj);
0033 if (!vaddr)
0034 return -ENOMEM;
0035 iosys_map_set_vaddr(map, vaddr);
0036
0037 return 0;
0038 }
0039
0040 int etnaviv_gem_prime_pin(struct drm_gem_object *obj)
0041 {
0042 if (!obj->import_attach) {
0043 struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
0044
0045 mutex_lock(&etnaviv_obj->lock);
0046 etnaviv_gem_get_pages(etnaviv_obj);
0047 mutex_unlock(&etnaviv_obj->lock);
0048 }
0049 return 0;
0050 }
0051
0052 void etnaviv_gem_prime_unpin(struct drm_gem_object *obj)
0053 {
0054 if (!obj->import_attach) {
0055 struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj);
0056
0057 mutex_lock(&etnaviv_obj->lock);
0058 etnaviv_gem_put_pages(to_etnaviv_bo(obj));
0059 mutex_unlock(&etnaviv_obj->lock);
0060 }
0061 }
0062
0063 static void etnaviv_gem_prime_release(struct etnaviv_gem_object *etnaviv_obj)
0064 {
0065 struct iosys_map map = IOSYS_MAP_INIT_VADDR(etnaviv_obj->vaddr);
0066
0067 if (etnaviv_obj->vaddr)
0068 dma_buf_vunmap(etnaviv_obj->base.import_attach->dmabuf, &map);
0069
0070
0071
0072
0073 kvfree(etnaviv_obj->pages);
0074
0075 drm_prime_gem_destroy(&etnaviv_obj->base, etnaviv_obj->sgt);
0076 }
0077
0078 static void *etnaviv_gem_prime_vmap_impl(struct etnaviv_gem_object *etnaviv_obj)
0079 {
0080 struct iosys_map map;
0081 int ret;
0082
0083 lockdep_assert_held(&etnaviv_obj->lock);
0084
0085 ret = dma_buf_vmap(etnaviv_obj->base.import_attach->dmabuf, &map);
0086 if (ret)
0087 return NULL;
0088 return map.vaddr;
0089 }
0090
0091 static int etnaviv_gem_prime_mmap_obj(struct etnaviv_gem_object *etnaviv_obj,
0092 struct vm_area_struct *vma)
0093 {
0094 return dma_buf_mmap(etnaviv_obj->base.dma_buf, vma, 0);
0095 }
0096
0097 static const struct etnaviv_gem_ops etnaviv_gem_prime_ops = {
0098
0099 .release = etnaviv_gem_prime_release,
0100 .vmap = etnaviv_gem_prime_vmap_impl,
0101 .mmap = etnaviv_gem_prime_mmap_obj,
0102 };
0103
0104 struct drm_gem_object *etnaviv_gem_prime_import_sg_table(struct drm_device *dev,
0105 struct dma_buf_attachment *attach, struct sg_table *sgt)
0106 {
0107 struct etnaviv_gem_object *etnaviv_obj;
0108 size_t size = PAGE_ALIGN(attach->dmabuf->size);
0109 int ret, npages;
0110
0111 ret = etnaviv_gem_new_private(dev, size, ETNA_BO_WC,
0112 &etnaviv_gem_prime_ops, &etnaviv_obj);
0113 if (ret < 0)
0114 return ERR_PTR(ret);
0115
0116 lockdep_set_class(&etnaviv_obj->lock, &etnaviv_prime_lock_class);
0117
0118 npages = size / PAGE_SIZE;
0119
0120 etnaviv_obj->sgt = sgt;
0121 etnaviv_obj->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL);
0122 if (!etnaviv_obj->pages) {
0123 ret = -ENOMEM;
0124 goto fail;
0125 }
0126
0127 ret = drm_prime_sg_to_page_array(sgt, etnaviv_obj->pages, npages);
0128 if (ret)
0129 goto fail;
0130
0131 etnaviv_gem_obj_add(dev, &etnaviv_obj->base);
0132
0133 return &etnaviv_obj->base;
0134
0135 fail:
0136 drm_gem_object_put(&etnaviv_obj->base);
0137
0138 return ERR_PTR(ret);
0139 }