0001 #ifndef __NVKM_VMM_H__
0002 #define __NVKM_VMM_H__
0003 #include "priv.h"
0004 #include <core/memory.h>
0005 enum nvkm_memory_target;
0006
0007 struct nvkm_vmm_pt {
0008
0009
0010
0011
0012
0013
0014 struct nvkm_mmu_pt *pt[2];
0015 u32 refs[2];
0016
0017
0018
0019
0020
0021
0022 u8 page;
0023
0024
0025
0026
0027
0028 bool sparse:1;
0029
0030
0031
0032
0033
0034
0035 #define NVKM_VMM_PDE_INVALID(pde) IS_ERR_OR_NULL(pde)
0036 #define NVKM_VMM_PDE_SPARSED(pde) IS_ERR(pde)
0037 #define NVKM_VMM_PDE_SPARSE ERR_PTR(-EBUSY)
0038 struct nvkm_vmm_pt **pde;
0039
0040
0041
0042
0043
0044
0045
0046
0047 #define NVKM_VMM_PTE_SPARSE 0x80
0048 #define NVKM_VMM_PTE_VALID 0x40
0049 #define NVKM_VMM_PTE_SPTES 0x3f
0050 u8 pte[];
0051 };
0052
0053 typedef void (*nvkm_vmm_pxe_func)(struct nvkm_vmm *,
0054 struct nvkm_mmu_pt *, u32 ptei, u32 ptes);
0055 typedef void (*nvkm_vmm_pde_func)(struct nvkm_vmm *,
0056 struct nvkm_vmm_pt *, u32 pdei);
0057 typedef void (*nvkm_vmm_pte_func)(struct nvkm_vmm *, struct nvkm_mmu_pt *,
0058 u32 ptei, u32 ptes, struct nvkm_vmm_map *);
0059
0060 struct nvkm_vmm_desc_func {
0061 nvkm_vmm_pxe_func invalid;
0062 nvkm_vmm_pxe_func unmap;
0063 nvkm_vmm_pxe_func sparse;
0064
0065 nvkm_vmm_pde_func pde;
0066
0067 nvkm_vmm_pte_func mem;
0068 nvkm_vmm_pte_func dma;
0069 nvkm_vmm_pte_func sgl;
0070
0071 nvkm_vmm_pte_func pfn;
0072 bool (*pfn_clear)(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32 ptei, u32 ptes);
0073 nvkm_vmm_pxe_func pfn_unmap;
0074 };
0075
0076 extern const struct nvkm_vmm_desc_func gf100_vmm_pgd;
0077 void gf100_vmm_pgd_pde(struct nvkm_vmm *, struct nvkm_vmm_pt *, u32);
0078 extern const struct nvkm_vmm_desc_func gf100_vmm_pgt;
0079 void gf100_vmm_pgt_unmap(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32, u32);
0080 void gf100_vmm_pgt_mem(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32, u32,
0081 struct nvkm_vmm_map *);
0082 void gf100_vmm_pgt_dma(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32, u32,
0083 struct nvkm_vmm_map *);
0084 void gf100_vmm_pgt_sgl(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32, u32,
0085 struct nvkm_vmm_map *);
0086
0087 void gk104_vmm_lpt_invalid(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32, u32);
0088
0089 struct nvkm_vmm_desc {
0090 enum {
0091 PGD,
0092 PGT,
0093 SPT,
0094 LPT,
0095 } type;
0096 u8 bits;
0097 u8 size;
0098 u32 align;
0099 const struct nvkm_vmm_desc_func *func;
0100 };
0101
0102 extern const struct nvkm_vmm_desc nv50_vmm_desc_12[];
0103 extern const struct nvkm_vmm_desc nv50_vmm_desc_16[];
0104
0105 extern const struct nvkm_vmm_desc gk104_vmm_desc_16_12[];
0106 extern const struct nvkm_vmm_desc gk104_vmm_desc_16_16[];
0107 extern const struct nvkm_vmm_desc gk104_vmm_desc_17_12[];
0108 extern const struct nvkm_vmm_desc gk104_vmm_desc_17_17[];
0109
0110 extern const struct nvkm_vmm_desc gm200_vmm_desc_16_12[];
0111 extern const struct nvkm_vmm_desc gm200_vmm_desc_16_16[];
0112 extern const struct nvkm_vmm_desc gm200_vmm_desc_17_12[];
0113 extern const struct nvkm_vmm_desc gm200_vmm_desc_17_17[];
0114
0115 extern const struct nvkm_vmm_desc gp100_vmm_desc_12[];
0116 extern const struct nvkm_vmm_desc gp100_vmm_desc_16[];
0117
0118 struct nvkm_vmm_page {
0119 u8 shift;
0120 const struct nvkm_vmm_desc *desc;
0121 #define NVKM_VMM_PAGE_SPARSE 0x01
0122 #define NVKM_VMM_PAGE_VRAM 0x02
0123 #define NVKM_VMM_PAGE_HOST 0x04
0124 #define NVKM_VMM_PAGE_COMP 0x08
0125 #define NVKM_VMM_PAGE_Sxxx (NVKM_VMM_PAGE_SPARSE)
0126 #define NVKM_VMM_PAGE_xVxx (NVKM_VMM_PAGE_VRAM)
0127 #define NVKM_VMM_PAGE_SVxx (NVKM_VMM_PAGE_Sxxx | NVKM_VMM_PAGE_VRAM)
0128 #define NVKM_VMM_PAGE_xxHx (NVKM_VMM_PAGE_HOST)
0129 #define NVKM_VMM_PAGE_SxHx (NVKM_VMM_PAGE_Sxxx | NVKM_VMM_PAGE_HOST)
0130 #define NVKM_VMM_PAGE_xVHx (NVKM_VMM_PAGE_xVxx | NVKM_VMM_PAGE_HOST)
0131 #define NVKM_VMM_PAGE_SVHx (NVKM_VMM_PAGE_SVxx | NVKM_VMM_PAGE_HOST)
0132 #define NVKM_VMM_PAGE_xVxC (NVKM_VMM_PAGE_xVxx | NVKM_VMM_PAGE_COMP)
0133 #define NVKM_VMM_PAGE_SVxC (NVKM_VMM_PAGE_SVxx | NVKM_VMM_PAGE_COMP)
0134 #define NVKM_VMM_PAGE_xxHC (NVKM_VMM_PAGE_xxHx | NVKM_VMM_PAGE_COMP)
0135 #define NVKM_VMM_PAGE_SxHC (NVKM_VMM_PAGE_SxHx | NVKM_VMM_PAGE_COMP)
0136 u8 type;
0137 };
0138
0139 struct nvkm_vmm_func {
0140 int (*join)(struct nvkm_vmm *, struct nvkm_memory *inst);
0141 void (*part)(struct nvkm_vmm *, struct nvkm_memory *inst);
0142
0143 int (*aper)(enum nvkm_memory_target);
0144 int (*valid)(struct nvkm_vmm *, void *argv, u32 argc,
0145 struct nvkm_vmm_map *);
0146 void (*flush)(struct nvkm_vmm *, int depth);
0147
0148 int (*mthd)(struct nvkm_vmm *, struct nvkm_client *,
0149 u32 mthd, void *argv, u32 argc);
0150
0151 void (*invalidate_pdb)(struct nvkm_vmm *, u64 addr);
0152
0153 u64 page_block;
0154 const struct nvkm_vmm_page page[];
0155 };
0156
0157 struct nvkm_vmm_join {
0158 struct nvkm_memory *inst;
0159 struct list_head head;
0160 };
0161
0162 int nvkm_vmm_new_(const struct nvkm_vmm_func *, struct nvkm_mmu *,
0163 u32 pd_header, bool managed, u64 addr, u64 size,
0164 struct lock_class_key *, const char *name,
0165 struct nvkm_vmm **);
0166 struct nvkm_vma *nvkm_vmm_node_search(struct nvkm_vmm *, u64 addr);
0167 struct nvkm_vma *nvkm_vmm_node_split(struct nvkm_vmm *, struct nvkm_vma *,
0168 u64 addr, u64 size);
0169 int nvkm_vmm_get_locked(struct nvkm_vmm *, bool getref, bool mapref,
0170 bool sparse, u8 page, u8 align, u64 size,
0171 struct nvkm_vma **pvma);
0172 void nvkm_vmm_put_locked(struct nvkm_vmm *, struct nvkm_vma *);
0173 void nvkm_vmm_unmap_locked(struct nvkm_vmm *, struct nvkm_vma *, bool pfn);
0174 void nvkm_vmm_unmap_region(struct nvkm_vmm *, struct nvkm_vma *);
0175
0176 #define NVKM_VMM_PFN_ADDR 0xfffffffffffff000ULL
0177 #define NVKM_VMM_PFN_ADDR_SHIFT 12
0178 #define NVKM_VMM_PFN_APER 0x00000000000000f0ULL
0179 #define NVKM_VMM_PFN_HOST 0x0000000000000000ULL
0180 #define NVKM_VMM_PFN_VRAM 0x0000000000000010ULL
0181 #define NVKM_VMM_PFN_A 0x0000000000000004ULL
0182 #define NVKM_VMM_PFN_W 0x0000000000000002ULL
0183 #define NVKM_VMM_PFN_V 0x0000000000000001ULL
0184 #define NVKM_VMM_PFN_NONE 0x0000000000000000ULL
0185
0186 int nvkm_vmm_pfn_map(struct nvkm_vmm *, u8 page, u64 addr, u64 size, u64 *pfn);
0187 int nvkm_vmm_pfn_unmap(struct nvkm_vmm *, u64 addr, u64 size);
0188
0189 struct nvkm_vma *nvkm_vma_tail(struct nvkm_vma *, u64 tail);
0190
0191 int nv04_vmm_new_(const struct nvkm_vmm_func *, struct nvkm_mmu *, u32,
0192 bool, u64, u64, void *, u32, struct lock_class_key *,
0193 const char *, struct nvkm_vmm **);
0194 int nv04_vmm_valid(struct nvkm_vmm *, void *, u32, struct nvkm_vmm_map *);
0195
0196 int nv50_vmm_join(struct nvkm_vmm *, struct nvkm_memory *);
0197 void nv50_vmm_part(struct nvkm_vmm *, struct nvkm_memory *);
0198 int nv50_vmm_valid(struct nvkm_vmm *, void *, u32, struct nvkm_vmm_map *);
0199 void nv50_vmm_flush(struct nvkm_vmm *, int);
0200
0201 int gf100_vmm_new_(const struct nvkm_vmm_func *, const struct nvkm_vmm_func *,
0202 struct nvkm_mmu *, bool, u64, u64, void *, u32,
0203 struct lock_class_key *, const char *, struct nvkm_vmm **);
0204 int gf100_vmm_join_(struct nvkm_vmm *, struct nvkm_memory *, u64 base);
0205 int gf100_vmm_join(struct nvkm_vmm *, struct nvkm_memory *);
0206 void gf100_vmm_part(struct nvkm_vmm *, struct nvkm_memory *);
0207 int gf100_vmm_aper(enum nvkm_memory_target);
0208 int gf100_vmm_valid(struct nvkm_vmm *, void *, u32, struct nvkm_vmm_map *);
0209 void gf100_vmm_flush(struct nvkm_vmm *, int);
0210 void gf100_vmm_invalidate(struct nvkm_vmm *, u32 type);
0211 void gf100_vmm_invalidate_pdb(struct nvkm_vmm *, u64 addr);
0212
0213 int gk20a_vmm_aper(enum nvkm_memory_target);
0214
0215 int gm200_vmm_new_(const struct nvkm_vmm_func *, const struct nvkm_vmm_func *,
0216 struct nvkm_mmu *, bool, u64, u64, void *, u32,
0217 struct lock_class_key *, const char *, struct nvkm_vmm **);
0218 int gm200_vmm_join_(struct nvkm_vmm *, struct nvkm_memory *, u64 base);
0219 int gm200_vmm_join(struct nvkm_vmm *, struct nvkm_memory *);
0220
0221 int gp100_vmm_new_(const struct nvkm_vmm_func *,
0222 struct nvkm_mmu *, bool, u64, u64, void *, u32,
0223 struct lock_class_key *, const char *, struct nvkm_vmm **);
0224 int gp100_vmm_join(struct nvkm_vmm *, struct nvkm_memory *);
0225 int gp100_vmm_valid(struct nvkm_vmm *, void *, u32, struct nvkm_vmm_map *);
0226 void gp100_vmm_flush(struct nvkm_vmm *, int);
0227 int gp100_vmm_mthd(struct nvkm_vmm *, struct nvkm_client *, u32, void *, u32);
0228 void gp100_vmm_invalidate_pdb(struct nvkm_vmm *, u64 addr);
0229
0230 int gv100_vmm_join(struct nvkm_vmm *, struct nvkm_memory *);
0231
0232 int nv04_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0233 struct lock_class_key *, const char *, struct nvkm_vmm **);
0234 int nv41_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0235 struct lock_class_key *, const char *, struct nvkm_vmm **);
0236 int nv44_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0237 struct lock_class_key *, const char *, struct nvkm_vmm **);
0238 int nv50_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0239 struct lock_class_key *, const char *, struct nvkm_vmm **);
0240 int mcp77_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0241 struct lock_class_key *, const char *, struct nvkm_vmm **);
0242 int g84_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0243 struct lock_class_key *, const char *, struct nvkm_vmm **);
0244 int gf100_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0245 struct lock_class_key *, const char *, struct nvkm_vmm **);
0246 int gk104_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0247 struct lock_class_key *, const char *, struct nvkm_vmm **);
0248 int gk20a_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0249 struct lock_class_key *, const char *, struct nvkm_vmm **);
0250 int gm200_vmm_new_fixed(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0251 struct lock_class_key *, const char *,
0252 struct nvkm_vmm **);
0253 int gm200_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0254 struct lock_class_key *, const char *,
0255 struct nvkm_vmm **);
0256 int gm20b_vmm_new_fixed(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0257 struct lock_class_key *, const char *,
0258 struct nvkm_vmm **);
0259 int gm20b_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0260 struct lock_class_key *, const char *,
0261 struct nvkm_vmm **);
0262 int gp100_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0263 struct lock_class_key *, const char *,
0264 struct nvkm_vmm **);
0265 int gp10b_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0266 struct lock_class_key *, const char *,
0267 struct nvkm_vmm **);
0268 int gv100_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0269 struct lock_class_key *, const char *,
0270 struct nvkm_vmm **);
0271 int tu102_vmm_new(struct nvkm_mmu *, bool, u64, u64, void *, u32,
0272 struct lock_class_key *, const char *,
0273 struct nvkm_vmm **);
0274
0275 #define VMM_PRINT(l,v,p,f,a...) do { \
0276 struct nvkm_vmm *_vmm = (v); \
0277 if (CONFIG_NOUVEAU_DEBUG >= (l) && _vmm->debug >= (l)) { \
0278 nvkm_printk_(&_vmm->mmu->subdev, 0, p, "%s: "f"\n", \
0279 _vmm->name, ##a); \
0280 } \
0281 } while(0)
0282 #define VMM_DEBUG(v,f,a...) VMM_PRINT(NV_DBG_DEBUG, (v), info, f, ##a)
0283 #define VMM_TRACE(v,f,a...) VMM_PRINT(NV_DBG_TRACE, (v), info, f, ##a)
0284 #define VMM_SPAM(v,f,a...) VMM_PRINT(NV_DBG_SPAM , (v), dbg, f, ##a)
0285
0286 #define VMM_MAP_ITER(VMM,PT,PTEI,PTEN,MAP,FILL,BASE,SIZE,NEXT) do { \
0287 nvkm_kmap((PT)->memory); \
0288 while (PTEN) { \
0289 u64 _ptes = ((SIZE) - MAP->off) >> MAP->page->shift; \
0290 u64 _addr = ((BASE) + MAP->off); \
0291 \
0292 if (_ptes > PTEN) { \
0293 MAP->off += PTEN << MAP->page->shift; \
0294 _ptes = PTEN; \
0295 } else { \
0296 MAP->off = 0; \
0297 NEXT; \
0298 } \
0299 \
0300 VMM_SPAM(VMM, "ITER %08x %08x PTE(s)", PTEI, (u32)_ptes); \
0301 \
0302 FILL(VMM, PT, PTEI, _ptes, MAP, _addr); \
0303 PTEI += _ptes; \
0304 PTEN -= _ptes; \
0305 } \
0306 nvkm_done((PT)->memory); \
0307 } while(0)
0308
0309 #define VMM_MAP_ITER_MEM(VMM,PT,PTEI,PTEN,MAP,FILL) \
0310 VMM_MAP_ITER(VMM,PT,PTEI,PTEN,MAP,FILL, \
0311 ((u64)MAP->mem->offset << NVKM_RAM_MM_SHIFT), \
0312 ((u64)MAP->mem->length << NVKM_RAM_MM_SHIFT), \
0313 (MAP->mem = MAP->mem->next))
0314 #define VMM_MAP_ITER_DMA(VMM,PT,PTEI,PTEN,MAP,FILL) \
0315 VMM_MAP_ITER(VMM,PT,PTEI,PTEN,MAP,FILL, \
0316 *MAP->dma, PAGE_SIZE, MAP->dma++)
0317 #define VMM_MAP_ITER_SGL(VMM,PT,PTEI,PTEN,MAP,FILL) \
0318 VMM_MAP_ITER(VMM,PT,PTEI,PTEN,MAP,FILL, \
0319 sg_dma_address(MAP->sgl), sg_dma_len(MAP->sgl), \
0320 (MAP->sgl = sg_next(MAP->sgl)))
0321
0322 #define VMM_FO(m,o,d,c,b) nvkm_fo##b((m)->memory, (o), (d), (c))
0323 #define VMM_WO(m,o,d,c,b) nvkm_wo##b((m)->memory, (o), (d))
0324 #define VMM_XO(m,v,o,d,c,b,fn,f,a...) do { \
0325 const u32 _pteo = (o); u##b _data = (d); \
0326 VMM_SPAM((v), " %010llx "f, (m)->addr + _pteo, _data, ##a); \
0327 VMM_##fn((m), (m)->base + _pteo, _data, (c), b); \
0328 } while(0)
0329
0330 #define VMM_WO032(m,v,o,d) VMM_XO((m),(v),(o),(d), 1, 32, WO, "%08x")
0331 #define VMM_FO032(m,v,o,d,c) \
0332 VMM_XO((m),(v),(o),(d),(c), 32, FO, "%08x %08x", (c))
0333
0334 #define VMM_WO064(m,v,o,d) VMM_XO((m),(v),(o),(d), 1, 64, WO, "%016llx")
0335 #define VMM_FO064(m,v,o,d,c) \
0336 VMM_XO((m),(v),(o),(d),(c), 64, FO, "%016llx %08x", (c))
0337
0338 #define VMM_XO128(m,v,o,lo,hi,c,f,a...) do { \
0339 u32 _pteo = (o), _ptes = (c); \
0340 const u64 _addr = (m)->addr + _pteo; \
0341 VMM_SPAM((v), " %010llx %016llx%016llx"f, _addr, (hi), (lo), ##a); \
0342 while (_ptes--) { \
0343 nvkm_wo64((m)->memory, (m)->base + _pteo + 0, (lo)); \
0344 nvkm_wo64((m)->memory, (m)->base + _pteo + 8, (hi)); \
0345 _pteo += 0x10; \
0346 } \
0347 } while(0)
0348
0349 #define VMM_WO128(m,v,o,lo,hi) VMM_XO128((m),(v),(o),(lo),(hi), 1, "")
0350 #define VMM_FO128(m,v,o,lo,hi,c) do { \
0351 nvkm_kmap((m)->memory); \
0352 VMM_XO128((m),(v),(o),(lo),(hi),(c), " %08x", (c)); \
0353 nvkm_done((m)->memory); \
0354 } while(0)
0355 #endif