0001
0002
0003
0004
0005
0006
0007 #include <linux/string.h>
0008 #include <linux/bitops.h>
0009
0010 #include "i915_drv.h"
0011 #include "i915_gem.h"
0012 #include "i915_gem_ioctls.h"
0013 #include "i915_gem_mman.h"
0014 #include "i915_gem_object.h"
0015 #include "i915_gem_tiling.h"
0016 #include "i915_reg.h"
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055 u32 i915_gem_fence_size(struct drm_i915_private *i915,
0056 u32 size, unsigned int tiling, unsigned int stride)
0057 {
0058 u32 ggtt_size;
0059
0060 GEM_BUG_ON(!size);
0061
0062 if (tiling == I915_TILING_NONE)
0063 return size;
0064
0065 GEM_BUG_ON(!stride);
0066
0067 if (GRAPHICS_VER(i915) >= 4) {
0068 stride *= i915_gem_tile_height(tiling);
0069 GEM_BUG_ON(!IS_ALIGNED(stride, I965_FENCE_PAGE));
0070 return roundup(size, stride);
0071 }
0072
0073
0074 if (GRAPHICS_VER(i915) == 3)
0075 ggtt_size = 1024*1024;
0076 else
0077 ggtt_size = 512*1024;
0078
0079 while (ggtt_size < size)
0080 ggtt_size <<= 1;
0081
0082 return ggtt_size;
0083 }
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095 u32 i915_gem_fence_alignment(struct drm_i915_private *i915, u32 size,
0096 unsigned int tiling, unsigned int stride)
0097 {
0098 GEM_BUG_ON(!size);
0099
0100
0101
0102
0103
0104 if (tiling == I915_TILING_NONE)
0105 return I915_GTT_MIN_ALIGNMENT;
0106
0107 if (GRAPHICS_VER(i915) >= 4)
0108 return I965_FENCE_PAGE;
0109
0110
0111
0112
0113
0114 return i915_gem_fence_size(i915, size, tiling, stride);
0115 }
0116
0117
0118 static bool
0119 i915_tiling_ok(struct drm_i915_gem_object *obj,
0120 unsigned int tiling, unsigned int stride)
0121 {
0122 struct drm_i915_private *i915 = to_i915(obj->base.dev);
0123 unsigned int tile_width;
0124
0125
0126 if (tiling == I915_TILING_NONE)
0127 return true;
0128
0129 if (tiling > I915_TILING_LAST)
0130 return false;
0131
0132
0133
0134
0135 if (GRAPHICS_VER(i915) >= 7) {
0136 if (stride / 128 > GEN7_FENCE_MAX_PITCH_VAL)
0137 return false;
0138 } else if (GRAPHICS_VER(i915) >= 4) {
0139 if (stride / 128 > I965_FENCE_MAX_PITCH_VAL)
0140 return false;
0141 } else {
0142 if (stride > 8192)
0143 return false;
0144
0145 if (!is_power_of_2(stride))
0146 return false;
0147 }
0148
0149 if (GRAPHICS_VER(i915) == 2 ||
0150 (tiling == I915_TILING_Y && HAS_128_BYTE_Y_TILING(i915)))
0151 tile_width = 128;
0152 else
0153 tile_width = 512;
0154
0155 if (!stride || !IS_ALIGNED(stride, tile_width))
0156 return false;
0157
0158 return true;
0159 }
0160
0161 static bool i915_vma_fence_prepare(struct i915_vma *vma,
0162 int tiling_mode, unsigned int stride)
0163 {
0164 struct drm_i915_private *i915 = vma->vm->i915;
0165 u32 size, alignment;
0166
0167 if (!i915_vma_is_map_and_fenceable(vma))
0168 return true;
0169
0170 size = i915_gem_fence_size(i915, vma->size, tiling_mode, stride);
0171 if (vma->node.size < size)
0172 return false;
0173
0174 alignment = i915_gem_fence_alignment(i915, vma->size, tiling_mode, stride);
0175 if (!IS_ALIGNED(vma->node.start, alignment))
0176 return false;
0177
0178 return true;
0179 }
0180
0181
0182 static int
0183 i915_gem_object_fence_prepare(struct drm_i915_gem_object *obj,
0184 int tiling_mode, unsigned int stride)
0185 {
0186 struct drm_i915_private *i915 = to_i915(obj->base.dev);
0187 struct i915_ggtt *ggtt = to_gt(i915)->ggtt;
0188 struct i915_vma *vma, *vn;
0189 LIST_HEAD(unbind);
0190 int ret = 0;
0191
0192 if (tiling_mode == I915_TILING_NONE)
0193 return 0;
0194
0195 mutex_lock(&ggtt->vm.mutex);
0196
0197 spin_lock(&obj->vma.lock);
0198 for_each_ggtt_vma(vma, obj) {
0199 GEM_BUG_ON(vma->vm != &ggtt->vm);
0200
0201 if (i915_vma_fence_prepare(vma, tiling_mode, stride))
0202 continue;
0203
0204 list_move(&vma->vm_link, &unbind);
0205 }
0206 spin_unlock(&obj->vma.lock);
0207
0208 list_for_each_entry_safe(vma, vn, &unbind, vm_link) {
0209 ret = __i915_vma_unbind(vma);
0210 if (ret) {
0211
0212 list_splice(&unbind, &ggtt->vm.bound_list);
0213 break;
0214 }
0215 }
0216
0217 mutex_unlock(&ggtt->vm.mutex);
0218
0219 return ret;
0220 }
0221
0222 bool i915_gem_object_needs_bit17_swizzle(struct drm_i915_gem_object *obj)
0223 {
0224 struct drm_i915_private *i915 = to_i915(obj->base.dev);
0225
0226 return to_gt(i915)->ggtt->bit_6_swizzle_x == I915_BIT_6_SWIZZLE_9_10_17 &&
0227 i915_gem_object_is_tiled(obj);
0228 }
0229
0230 int
0231 i915_gem_object_set_tiling(struct drm_i915_gem_object *obj,
0232 unsigned int tiling, unsigned int stride)
0233 {
0234 struct drm_i915_private *i915 = to_i915(obj->base.dev);
0235 struct i915_vma *vma;
0236 int err;
0237
0238
0239 BUILD_BUG_ON(I915_TILING_LAST & STRIDE_MASK);
0240
0241 GEM_BUG_ON(!i915_tiling_ok(obj, tiling, stride));
0242 GEM_BUG_ON(!stride ^ (tiling == I915_TILING_NONE));
0243
0244 if ((tiling | stride) == obj->tiling_and_stride)
0245 return 0;
0246
0247 if (i915_gem_object_is_framebuffer(obj))
0248 return -EBUSY;
0249
0250
0251
0252
0253
0254
0255
0256
0257
0258
0259
0260
0261
0262
0263 i915_gem_object_lock(obj, NULL);
0264 if (i915_gem_object_is_framebuffer(obj)) {
0265 i915_gem_object_unlock(obj);
0266 return -EBUSY;
0267 }
0268
0269 err = i915_gem_object_fence_prepare(obj, tiling, stride);
0270 if (err) {
0271 i915_gem_object_unlock(obj);
0272 return err;
0273 }
0274
0275
0276
0277
0278
0279 if (i915_gem_object_has_pages(obj) &&
0280 obj->mm.madv == I915_MADV_WILLNEED &&
0281 i915->quirks & QUIRK_PIN_SWIZZLED_PAGES) {
0282 if (tiling == I915_TILING_NONE) {
0283 GEM_BUG_ON(!i915_gem_object_has_tiling_quirk(obj));
0284 i915_gem_object_clear_tiling_quirk(obj);
0285 i915_gem_object_make_shrinkable(obj);
0286 }
0287 if (!i915_gem_object_is_tiled(obj)) {
0288 GEM_BUG_ON(i915_gem_object_has_tiling_quirk(obj));
0289 i915_gem_object_make_unshrinkable(obj);
0290 i915_gem_object_set_tiling_quirk(obj);
0291 }
0292 }
0293
0294 spin_lock(&obj->vma.lock);
0295 for_each_ggtt_vma(vma, obj) {
0296 vma->fence_size =
0297 i915_gem_fence_size(i915, vma->size, tiling, stride);
0298 vma->fence_alignment =
0299 i915_gem_fence_alignment(i915,
0300 vma->size, tiling, stride);
0301
0302 if (vma->fence)
0303 vma->fence->dirty = true;
0304 }
0305 spin_unlock(&obj->vma.lock);
0306
0307 obj->tiling_and_stride = tiling | stride;
0308 i915_gem_object_unlock(obj);
0309
0310
0311 i915_gem_object_release_mmap_gtt(obj);
0312
0313
0314 if (i915_gem_object_needs_bit17_swizzle(obj)) {
0315 if (!obj->bit_17) {
0316 obj->bit_17 = bitmap_zalloc(obj->base.size >> PAGE_SHIFT,
0317 GFP_KERNEL);
0318 }
0319 } else {
0320 bitmap_free(obj->bit_17);
0321 obj->bit_17 = NULL;
0322 }
0323
0324 return 0;
0325 }
0326
0327
0328
0329
0330
0331
0332
0333
0334
0335
0336
0337
0338
0339
0340
0341 int
0342 i915_gem_set_tiling_ioctl(struct drm_device *dev, void *data,
0343 struct drm_file *file)
0344 {
0345 struct drm_i915_private *dev_priv = to_i915(dev);
0346 struct drm_i915_gem_set_tiling *args = data;
0347 struct drm_i915_gem_object *obj;
0348 int err;
0349
0350 if (!to_gt(dev_priv)->ggtt->num_fences)
0351 return -EOPNOTSUPP;
0352
0353 obj = i915_gem_object_lookup(file, args->handle);
0354 if (!obj)
0355 return -ENOENT;
0356
0357
0358
0359
0360
0361 if (i915_gem_object_is_proxy(obj)) {
0362 err = -ENXIO;
0363 goto err;
0364 }
0365
0366 if (!i915_tiling_ok(obj, args->tiling_mode, args->stride)) {
0367 err = -EINVAL;
0368 goto err;
0369 }
0370
0371 if (args->tiling_mode == I915_TILING_NONE) {
0372 args->swizzle_mode = I915_BIT_6_SWIZZLE_NONE;
0373 args->stride = 0;
0374 } else {
0375 if (args->tiling_mode == I915_TILING_X)
0376 args->swizzle_mode = to_gt(dev_priv)->ggtt->bit_6_swizzle_x;
0377 else
0378 args->swizzle_mode = to_gt(dev_priv)->ggtt->bit_6_swizzle_y;
0379
0380
0381
0382
0383
0384
0385
0386
0387 if (args->swizzle_mode == I915_BIT_6_SWIZZLE_9_17)
0388 args->swizzle_mode = I915_BIT_6_SWIZZLE_9;
0389 if (args->swizzle_mode == I915_BIT_6_SWIZZLE_9_10_17)
0390 args->swizzle_mode = I915_BIT_6_SWIZZLE_9_10;
0391
0392
0393 if (args->swizzle_mode == I915_BIT_6_SWIZZLE_UNKNOWN) {
0394 args->tiling_mode = I915_TILING_NONE;
0395 args->swizzle_mode = I915_BIT_6_SWIZZLE_NONE;
0396 args->stride = 0;
0397 }
0398 }
0399
0400 err = i915_gem_object_set_tiling(obj, args->tiling_mode, args->stride);
0401
0402
0403 args->stride = i915_gem_object_get_stride(obj);
0404 args->tiling_mode = i915_gem_object_get_tiling(obj);
0405
0406 err:
0407 i915_gem_object_put(obj);
0408 return err;
0409 }
0410
0411
0412
0413
0414
0415
0416
0417
0418
0419
0420
0421
0422
0423
0424 int
0425 i915_gem_get_tiling_ioctl(struct drm_device *dev, void *data,
0426 struct drm_file *file)
0427 {
0428 struct drm_i915_gem_get_tiling *args = data;
0429 struct drm_i915_private *dev_priv = to_i915(dev);
0430 struct drm_i915_gem_object *obj;
0431 int err = -ENOENT;
0432
0433 if (!to_gt(dev_priv)->ggtt->num_fences)
0434 return -EOPNOTSUPP;
0435
0436 rcu_read_lock();
0437 obj = i915_gem_object_lookup_rcu(file, args->handle);
0438 if (obj) {
0439 args->tiling_mode =
0440 READ_ONCE(obj->tiling_and_stride) & TILING_MASK;
0441 err = 0;
0442 }
0443 rcu_read_unlock();
0444 if (unlikely(err))
0445 return err;
0446
0447 switch (args->tiling_mode) {
0448 case I915_TILING_X:
0449 args->swizzle_mode = to_gt(dev_priv)->ggtt->bit_6_swizzle_x;
0450 break;
0451 case I915_TILING_Y:
0452 args->swizzle_mode = to_gt(dev_priv)->ggtt->bit_6_swizzle_y;
0453 break;
0454 default:
0455 case I915_TILING_NONE:
0456 args->swizzle_mode = I915_BIT_6_SWIZZLE_NONE;
0457 break;
0458 }
0459
0460
0461 if (dev_priv->quirks & QUIRK_PIN_SWIZZLED_PAGES)
0462 args->phys_swizzle_mode = I915_BIT_6_SWIZZLE_UNKNOWN;
0463 else
0464 args->phys_swizzle_mode = args->swizzle_mode;
0465 if (args->swizzle_mode == I915_BIT_6_SWIZZLE_9_17)
0466 args->swizzle_mode = I915_BIT_6_SWIZZLE_9;
0467 if (args->swizzle_mode == I915_BIT_6_SWIZZLE_9_10_17)
0468 args->swizzle_mode = I915_BIT_6_SWIZZLE_9_10;
0469
0470 return 0;
0471 }