0001
0002
0003
0004
0005
0006 #ifndef __ETNAVIV_MMU_H__
0007 #define __ETNAVIV_MMU_H__
0008
0009 #define ETNAVIV_PROT_READ (1 << 0)
0010 #define ETNAVIV_PROT_WRITE (1 << 1)
0011
0012 enum etnaviv_iommu_version {
0013 ETNAVIV_IOMMU_V1 = 0,
0014 ETNAVIV_IOMMU_V2,
0015 };
0016
0017 struct etnaviv_gpu;
0018 struct etnaviv_vram_mapping;
0019 struct etnaviv_iommu_global;
0020 struct etnaviv_iommu_context;
0021
0022 struct etnaviv_iommu_ops {
0023 struct etnaviv_iommu_context *(*init)(struct etnaviv_iommu_global *);
0024 void (*free)(struct etnaviv_iommu_context *);
0025 int (*map)(struct etnaviv_iommu_context *context, unsigned long iova,
0026 phys_addr_t paddr, size_t size, int prot);
0027 size_t (*unmap)(struct etnaviv_iommu_context *context, unsigned long iova,
0028 size_t size);
0029 size_t (*dump_size)(struct etnaviv_iommu_context *);
0030 void (*dump)(struct etnaviv_iommu_context *, void *);
0031 void (*restore)(struct etnaviv_gpu *, struct etnaviv_iommu_context *);
0032 };
0033
0034 extern const struct etnaviv_iommu_ops etnaviv_iommuv1_ops;
0035 extern const struct etnaviv_iommu_ops etnaviv_iommuv2_ops;
0036
0037 #define ETNAVIV_PTA_SIZE SZ_4K
0038 #define ETNAVIV_PTA_ENTRIES (ETNAVIV_PTA_SIZE / sizeof(u64))
0039
0040 struct etnaviv_iommu_global {
0041 struct device *dev;
0042 enum etnaviv_iommu_version version;
0043 const struct etnaviv_iommu_ops *ops;
0044 unsigned int use;
0045 struct mutex lock;
0046
0047 void *bad_page_cpu;
0048 dma_addr_t bad_page_dma;
0049
0050 u32 memory_base;
0051
0052
0053
0054
0055
0056 union {
0057 struct {
0058 struct etnaviv_iommu_context *shared_context;
0059 } v1;
0060 struct {
0061
0062 u64 *pta_cpu;
0063 dma_addr_t pta_dma;
0064 struct spinlock pta_lock;
0065 DECLARE_BITMAP(pta_alloc, ETNAVIV_PTA_ENTRIES);
0066 } v2;
0067 };
0068 };
0069
0070 struct etnaviv_iommu_context {
0071 struct kref refcount;
0072 struct etnaviv_iommu_global *global;
0073
0074
0075 struct mutex lock;
0076 struct list_head mappings;
0077 struct drm_mm mm;
0078 unsigned int flush_seq;
0079
0080
0081 struct etnaviv_vram_mapping cmdbuf_mapping;
0082 };
0083
0084 int etnaviv_iommu_global_init(struct etnaviv_gpu *gpu);
0085 void etnaviv_iommu_global_fini(struct etnaviv_gpu *gpu);
0086
0087 struct etnaviv_gem_object;
0088
0089 int etnaviv_iommu_map_gem(struct etnaviv_iommu_context *context,
0090 struct etnaviv_gem_object *etnaviv_obj, u32 memory_base,
0091 struct etnaviv_vram_mapping *mapping, u64 va);
0092 void etnaviv_iommu_unmap_gem(struct etnaviv_iommu_context *context,
0093 struct etnaviv_vram_mapping *mapping);
0094
0095 int etnaviv_iommu_get_suballoc_va(struct etnaviv_iommu_context *ctx,
0096 struct etnaviv_vram_mapping *mapping,
0097 u32 memory_base, dma_addr_t paddr,
0098 size_t size);
0099 void etnaviv_iommu_put_suballoc_va(struct etnaviv_iommu_context *ctx,
0100 struct etnaviv_vram_mapping *mapping);
0101
0102 size_t etnaviv_iommu_dump_size(struct etnaviv_iommu_context *ctx);
0103 void etnaviv_iommu_dump(struct etnaviv_iommu_context *ctx, void *buf);
0104
0105 struct etnaviv_iommu_context *
0106 etnaviv_iommu_context_init(struct etnaviv_iommu_global *global,
0107 struct etnaviv_cmdbuf_suballoc *suballoc);
0108 static inline struct etnaviv_iommu_context *
0109 etnaviv_iommu_context_get(struct etnaviv_iommu_context *ctx)
0110 {
0111 kref_get(&ctx->refcount);
0112 return ctx;
0113 }
0114 void etnaviv_iommu_context_put(struct etnaviv_iommu_context *ctx);
0115 void etnaviv_iommu_restore(struct etnaviv_gpu *gpu,
0116 struct etnaviv_iommu_context *ctx);
0117
0118 struct etnaviv_iommu_context *
0119 etnaviv_iommuv1_context_alloc(struct etnaviv_iommu_global *global);
0120 struct etnaviv_iommu_context *
0121 etnaviv_iommuv2_context_alloc(struct etnaviv_iommu_global *global);
0122
0123 u32 etnaviv_iommuv2_get_mtlb_addr(struct etnaviv_iommu_context *context);
0124 unsigned short etnaviv_iommuv2_get_pta_id(struct etnaviv_iommu_context *context);
0125
0126 #endif