0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025 #ifndef __I915_VMA_H__
0026 #define __I915_VMA_H__
0027
0028 #include <linux/io-mapping.h>
0029 #include <linux/rbtree.h>
0030
0031 #include <drm/drm_mm.h>
0032
0033 #include "gt/intel_ggtt_fencing.h"
0034 #include "gem/i915_gem_object.h"
0035
0036 #include "i915_gem_gtt.h"
0037
0038 #include "i915_active.h"
0039 #include "i915_request.h"
0040 #include "i915_vma_resource.h"
0041 #include "i915_vma_types.h"
0042
0043 struct i915_vma *
0044 i915_vma_instance(struct drm_i915_gem_object *obj,
0045 struct i915_address_space *vm,
0046 const struct i915_ggtt_view *view);
0047
0048 void i915_vma_unpin_and_release(struct i915_vma **p_vma, unsigned int flags);
0049 #define I915_VMA_RELEASE_MAP BIT(0)
0050
0051 static inline bool i915_vma_is_active(const struct i915_vma *vma)
0052 {
0053 return !i915_active_is_idle(&vma->active);
0054 }
0055
0056
0057 #define __EXEC_OBJECT_NO_RESERVE BIT(31)
0058
0059 int __must_check _i915_vma_move_to_active(struct i915_vma *vma,
0060 struct i915_request *rq,
0061 struct dma_fence *fence,
0062 unsigned int flags);
0063 static inline int __must_check
0064 i915_vma_move_to_active(struct i915_vma *vma, struct i915_request *rq,
0065 unsigned int flags)
0066 {
0067 return _i915_vma_move_to_active(vma, rq, &rq->fence, flags);
0068 }
0069
0070 #define __i915_vma_flags(v) ((unsigned long *)&(v)->flags.counter)
0071
0072 static inline bool i915_vma_is_ggtt(const struct i915_vma *vma)
0073 {
0074 return test_bit(I915_VMA_GGTT_BIT, __i915_vma_flags(vma));
0075 }
0076
0077 static inline bool i915_vma_is_dpt(const struct i915_vma *vma)
0078 {
0079 return i915_is_dpt(vma->vm);
0080 }
0081
0082 static inline bool i915_vma_has_ggtt_write(const struct i915_vma *vma)
0083 {
0084 return test_bit(I915_VMA_GGTT_WRITE_BIT, __i915_vma_flags(vma));
0085 }
0086
0087 static inline void i915_vma_set_ggtt_write(struct i915_vma *vma)
0088 {
0089 GEM_BUG_ON(!i915_vma_is_ggtt(vma));
0090 set_bit(I915_VMA_GGTT_WRITE_BIT, __i915_vma_flags(vma));
0091 }
0092
0093 static inline bool i915_vma_unset_ggtt_write(struct i915_vma *vma)
0094 {
0095 return test_and_clear_bit(I915_VMA_GGTT_WRITE_BIT,
0096 __i915_vma_flags(vma));
0097 }
0098
0099 void i915_vma_flush_writes(struct i915_vma *vma);
0100
0101 static inline bool i915_vma_is_map_and_fenceable(const struct i915_vma *vma)
0102 {
0103 return test_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(vma));
0104 }
0105
0106 static inline bool i915_vma_set_userfault(struct i915_vma *vma)
0107 {
0108 GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma));
0109 return test_and_set_bit(I915_VMA_USERFAULT_BIT, __i915_vma_flags(vma));
0110 }
0111
0112 static inline void i915_vma_unset_userfault(struct i915_vma *vma)
0113 {
0114 return clear_bit(I915_VMA_USERFAULT_BIT, __i915_vma_flags(vma));
0115 }
0116
0117 static inline bool i915_vma_has_userfault(const struct i915_vma *vma)
0118 {
0119 return test_bit(I915_VMA_USERFAULT_BIT, __i915_vma_flags(vma));
0120 }
0121
0122 static inline bool i915_vma_is_closed(const struct i915_vma *vma)
0123 {
0124 return !list_empty(&vma->closed_link);
0125 }
0126
0127 static inline u32 i915_ggtt_offset(const struct i915_vma *vma)
0128 {
0129 GEM_BUG_ON(!i915_vma_is_ggtt(vma));
0130 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
0131 GEM_BUG_ON(upper_32_bits(vma->node.start));
0132 GEM_BUG_ON(upper_32_bits(vma->node.start + vma->node.size - 1));
0133 return lower_32_bits(vma->node.start);
0134 }
0135
0136 static inline u32 i915_ggtt_pin_bias(struct i915_vma *vma)
0137 {
0138 return i915_vm_to_ggtt(vma->vm)->pin_bias;
0139 }
0140
0141 static inline struct i915_vma *i915_vma_get(struct i915_vma *vma)
0142 {
0143 i915_gem_object_get(vma->obj);
0144 return vma;
0145 }
0146
0147 static inline struct i915_vma *i915_vma_tryget(struct i915_vma *vma)
0148 {
0149 if (likely(kref_get_unless_zero(&vma->obj->base.refcount)))
0150 return vma;
0151
0152 return NULL;
0153 }
0154
0155 static inline void i915_vma_put(struct i915_vma *vma)
0156 {
0157 i915_gem_object_put(vma->obj);
0158 }
0159
0160 static inline long
0161 i915_vma_compare(struct i915_vma *vma,
0162 struct i915_address_space *vm,
0163 const struct i915_ggtt_view *view)
0164 {
0165 ptrdiff_t cmp;
0166
0167 GEM_BUG_ON(view && !i915_is_ggtt_or_dpt(vm));
0168
0169 cmp = ptrdiff(vma->vm, vm);
0170 if (cmp)
0171 return cmp;
0172
0173 BUILD_BUG_ON(I915_GGTT_VIEW_NORMAL != 0);
0174 cmp = vma->ggtt_view.type;
0175 if (!view)
0176 return cmp;
0177
0178 cmp -= view->type;
0179 if (cmp)
0180 return cmp;
0181
0182 assert_i915_gem_gtt_types();
0183
0184
0185
0186
0187
0188
0189
0190
0191
0192
0193
0194 BUILD_BUG_ON(I915_GGTT_VIEW_NORMAL >= I915_GGTT_VIEW_PARTIAL);
0195 BUILD_BUG_ON(I915_GGTT_VIEW_PARTIAL >= I915_GGTT_VIEW_ROTATED);
0196 BUILD_BUG_ON(I915_GGTT_VIEW_ROTATED >= I915_GGTT_VIEW_REMAPPED);
0197 BUILD_BUG_ON(offsetof(typeof(*view), rotated) !=
0198 offsetof(typeof(*view), partial));
0199 BUILD_BUG_ON(offsetof(typeof(*view), rotated) !=
0200 offsetof(typeof(*view), remapped));
0201 return memcmp(&vma->ggtt_view.partial, &view->partial, view->type);
0202 }
0203
0204 struct i915_vma_work *i915_vma_work(void);
0205 int i915_vma_bind(struct i915_vma *vma,
0206 enum i915_cache_level cache_level,
0207 u32 flags,
0208 struct i915_vma_work *work,
0209 struct i915_vma_resource *vma_res);
0210
0211 bool i915_gem_valid_gtt_space(struct i915_vma *vma, unsigned long color);
0212 bool i915_vma_misplaced(const struct i915_vma *vma,
0213 u64 size, u64 alignment, u64 flags);
0214 void __i915_vma_set_map_and_fenceable(struct i915_vma *vma);
0215 void i915_vma_revoke_mmap(struct i915_vma *vma);
0216 void vma_invalidate_tlb(struct i915_address_space *vm, u32 *tlb);
0217 struct dma_fence *__i915_vma_evict(struct i915_vma *vma, bool async);
0218 int __i915_vma_unbind(struct i915_vma *vma);
0219 int __must_check i915_vma_unbind(struct i915_vma *vma);
0220 int __must_check i915_vma_unbind_async(struct i915_vma *vma, bool trylock_vm);
0221 int __must_check i915_vma_unbind_unlocked(struct i915_vma *vma);
0222 void i915_vma_unlink_ctx(struct i915_vma *vma);
0223 void i915_vma_close(struct i915_vma *vma);
0224 void i915_vma_reopen(struct i915_vma *vma);
0225
0226 void i915_vma_destroy_locked(struct i915_vma *vma);
0227 void i915_vma_destroy(struct i915_vma *vma);
0228
0229 #define assert_vma_held(vma) dma_resv_assert_held((vma)->obj->base.resv)
0230
0231 static inline void i915_vma_lock(struct i915_vma *vma)
0232 {
0233 dma_resv_lock(vma->obj->base.resv, NULL);
0234 }
0235
0236 static inline void i915_vma_unlock(struct i915_vma *vma)
0237 {
0238 dma_resv_unlock(vma->obj->base.resv);
0239 }
0240
0241 int __must_check
0242 i915_vma_pin_ww(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
0243 u64 size, u64 alignment, u64 flags);
0244
0245 static inline int __must_check
0246 i915_vma_pin(struct i915_vma *vma, u64 size, u64 alignment, u64 flags)
0247 {
0248 struct i915_gem_ww_ctx ww;
0249 int err;
0250
0251 i915_gem_ww_ctx_init(&ww, true);
0252 retry:
0253 err = i915_gem_object_lock(vma->obj, &ww);
0254 if (!err)
0255 err = i915_vma_pin_ww(vma, &ww, size, alignment, flags);
0256 if (err == -EDEADLK) {
0257 err = i915_gem_ww_ctx_backoff(&ww);
0258 if (!err)
0259 goto retry;
0260 }
0261 i915_gem_ww_ctx_fini(&ww);
0262
0263 return err;
0264 }
0265
0266 int i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
0267 u32 align, unsigned int flags);
0268
0269 static inline int i915_vma_pin_count(const struct i915_vma *vma)
0270 {
0271 return atomic_read(&vma->flags) & I915_VMA_PIN_MASK;
0272 }
0273
0274 static inline bool i915_vma_is_pinned(const struct i915_vma *vma)
0275 {
0276 return i915_vma_pin_count(vma);
0277 }
0278
0279 static inline void __i915_vma_pin(struct i915_vma *vma)
0280 {
0281 atomic_inc(&vma->flags);
0282 GEM_BUG_ON(!i915_vma_is_pinned(vma));
0283 }
0284
0285 static inline void __i915_vma_unpin(struct i915_vma *vma)
0286 {
0287 GEM_BUG_ON(!i915_vma_is_pinned(vma));
0288 atomic_dec(&vma->flags);
0289 }
0290
0291 static inline void i915_vma_unpin(struct i915_vma *vma)
0292 {
0293 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node));
0294 __i915_vma_unpin(vma);
0295 }
0296
0297 static inline bool i915_vma_is_bound(const struct i915_vma *vma,
0298 unsigned int where)
0299 {
0300 return atomic_read(&vma->flags) & where;
0301 }
0302
0303 static inline bool i915_node_color_differs(const struct drm_mm_node *node,
0304 unsigned long color)
0305 {
0306 return drm_mm_node_allocated(node) && node->color != color;
0307 }
0308
0309
0310
0311
0312
0313
0314
0315
0316
0317
0318
0319
0320 void __iomem *i915_vma_pin_iomap(struct i915_vma *vma);
0321
0322
0323
0324
0325
0326
0327
0328
0329
0330
0331 void i915_vma_unpin_iomap(struct i915_vma *vma);
0332
0333
0334
0335
0336
0337
0338
0339
0340
0341
0342
0343
0344
0345
0346
0347
0348 int __must_check i915_vma_pin_fence(struct i915_vma *vma);
0349 void i915_vma_revoke_fence(struct i915_vma *vma);
0350
0351 int __i915_vma_pin_fence(struct i915_vma *vma);
0352
0353 static inline void __i915_vma_unpin_fence(struct i915_vma *vma)
0354 {
0355 GEM_BUG_ON(atomic_read(&vma->fence->pin_count) <= 0);
0356 atomic_dec(&vma->fence->pin_count);
0357 }
0358
0359
0360
0361
0362
0363
0364
0365
0366
0367 static inline void
0368 i915_vma_unpin_fence(struct i915_vma *vma)
0369 {
0370 if (vma->fence)
0371 __i915_vma_unpin_fence(vma);
0372 }
0373
0374 void i915_vma_parked(struct intel_gt *gt);
0375
0376 static inline bool i915_vma_is_scanout(const struct i915_vma *vma)
0377 {
0378 return test_bit(I915_VMA_SCANOUT_BIT, __i915_vma_flags(vma));
0379 }
0380
0381 static inline void i915_vma_mark_scanout(struct i915_vma *vma)
0382 {
0383 set_bit(I915_VMA_SCANOUT_BIT, __i915_vma_flags(vma));
0384 }
0385
0386 static inline void i915_vma_clear_scanout(struct i915_vma *vma)
0387 {
0388 clear_bit(I915_VMA_SCANOUT_BIT, __i915_vma_flags(vma));
0389 }
0390
0391 #define for_each_until(cond) if (cond) break; else
0392
0393
0394
0395
0396
0397
0398
0399
0400
0401
0402 #define for_each_ggtt_vma(V, OBJ) \
0403 list_for_each_entry(V, &(OBJ)->vma.list, obj_link) \
0404 for_each_until(!i915_vma_is_ggtt(V))
0405
0406 struct i915_vma *i915_vma_make_unshrinkable(struct i915_vma *vma);
0407 void i915_vma_make_shrinkable(struct i915_vma *vma);
0408 void i915_vma_make_purgeable(struct i915_vma *vma);
0409
0410 int i915_vma_wait_for_bind(struct i915_vma *vma);
0411
0412 static inline int i915_vma_sync(struct i915_vma *vma)
0413 {
0414
0415 return i915_active_wait(&vma->active);
0416 }
0417
0418
0419
0420
0421
0422
0423
0424
0425
0426
0427 static inline struct i915_vma_resource *
0428 i915_vma_get_current_resource(struct i915_vma *vma)
0429 {
0430 return i915_vma_resource_get(vma->resource);
0431 }
0432
0433 #if IS_ENABLED(CONFIG_DRM_I915_SELFTEST)
0434 void i915_vma_resource_init_from_vma(struct i915_vma_resource *vma_res,
0435 struct i915_vma *vma);
0436 #endif
0437
0438 void i915_vma_module_exit(void);
0439 int i915_vma_module_init(void);
0440
0441 I915_SELFTEST_DECLARE(int i915_vma_get_pages(struct i915_vma *vma));
0442 I915_SELFTEST_DECLARE(void i915_vma_put_pages(struct i915_vma *vma));
0443
0444 #endif