Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * Copyright © 2016 Intel Corporation
0003  *
0004  * Permission is hereby granted, free of charge, to any person obtaining a
0005  * copy of this software and associated documentation files (the "Software"),
0006  * to deal in the Software without restriction, including without limitation
0007  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
0008  * and/or sell copies of the Software, and to permit persons to whom the
0009  * Software is furnished to do so, subject to the following conditions:
0010  *
0011  * The above copyright notice and this permission notice (including the next
0012  * paragraph) shall be included in all copies or substantial portions of the
0013  * Software.
0014  *
0015  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
0016  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
0017  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
0018  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
0019  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
0020  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
0021  * IN THE SOFTWARE.
0022  *
0023  */
0024 
0025 #include <linux/prime_numbers.h>
0026 
0027 #include "gem/i915_gem_context.h"
0028 #include "gem/i915_gem_internal.h"
0029 #include "gem/selftests/mock_context.h"
0030 
0031 #include "i915_scatterlist.h"
0032 #include "i915_selftest.h"
0033 
0034 #include "mock_gem_device.h"
0035 #include "mock_gtt.h"
0036 
0037 static bool assert_vma(struct i915_vma *vma,
0038                struct drm_i915_gem_object *obj,
0039                struct i915_gem_context *ctx)
0040 {
0041     bool ok = true;
0042 
0043     if (vma->vm != ctx->vm) {
0044         pr_err("VMA created with wrong VM\n");
0045         ok = false;
0046     }
0047 
0048     if (vma->size != obj->base.size) {
0049         pr_err("VMA created with wrong size, found %llu, expected %zu\n",
0050                vma->size, obj->base.size);
0051         ok = false;
0052     }
0053 
0054     if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
0055         pr_err("VMA created with wrong type [%d]\n",
0056                vma->ggtt_view.type);
0057         ok = false;
0058     }
0059 
0060     return ok;
0061 }
0062 
0063 static struct i915_vma *
0064 checked_vma_instance(struct drm_i915_gem_object *obj,
0065              struct i915_address_space *vm,
0066              const struct i915_ggtt_view *view)
0067 {
0068     struct i915_vma *vma;
0069     bool ok = true;
0070 
0071     vma = i915_vma_instance(obj, vm, view);
0072     if (IS_ERR(vma))
0073         return vma;
0074 
0075     /* Manual checks, will be reinforced by i915_vma_compare! */
0076     if (vma->vm != vm) {
0077         pr_err("VMA's vm [%p] does not match request [%p]\n",
0078                vma->vm, vm);
0079         ok = false;
0080     }
0081 
0082     if (i915_is_ggtt(vm) != i915_vma_is_ggtt(vma)) {
0083         pr_err("VMA ggtt status [%d] does not match parent [%d]\n",
0084                i915_vma_is_ggtt(vma), i915_is_ggtt(vm));
0085         ok = false;
0086     }
0087 
0088     if (i915_vma_compare(vma, vm, view)) {
0089         pr_err("i915_vma_compare failed with create parameters!\n");
0090         return ERR_PTR(-EINVAL);
0091     }
0092 
0093     if (i915_vma_compare(vma, vma->vm,
0094                  i915_vma_is_ggtt(vma) ? &vma->ggtt_view : NULL)) {
0095         pr_err("i915_vma_compare failed with itself\n");
0096         return ERR_PTR(-EINVAL);
0097     }
0098 
0099     if (!ok) {
0100         pr_err("i915_vma_compare failed to detect the difference!\n");
0101         return ERR_PTR(-EINVAL);
0102     }
0103 
0104     return vma;
0105 }
0106 
0107 static int create_vmas(struct drm_i915_private *i915,
0108                struct list_head *objects,
0109                struct list_head *contexts)
0110 {
0111     struct drm_i915_gem_object *obj;
0112     struct i915_gem_context *ctx;
0113     int pinned;
0114 
0115     list_for_each_entry(obj, objects, st_link) {
0116         for (pinned = 0; pinned <= 1; pinned++) {
0117             list_for_each_entry(ctx, contexts, link) {
0118                 struct i915_address_space *vm;
0119                 struct i915_vma *vma;
0120                 int err;
0121 
0122                 vm = i915_gem_context_get_eb_vm(ctx);
0123                 vma = checked_vma_instance(obj, vm, NULL);
0124                 i915_vm_put(vm);
0125                 if (IS_ERR(vma))
0126                     return PTR_ERR(vma);
0127 
0128                 if (!assert_vma(vma, obj, ctx)) {
0129                     pr_err("VMA lookup/create failed\n");
0130                     return -EINVAL;
0131                 }
0132 
0133                 if (!pinned) {
0134                     err = i915_vma_pin(vma, 0, 0, PIN_USER);
0135                     if (err) {
0136                         pr_err("Failed to pin VMA\n");
0137                         return err;
0138                     }
0139                 } else {
0140                     i915_vma_unpin(vma);
0141                 }
0142             }
0143         }
0144     }
0145 
0146     return 0;
0147 }
0148 
0149 static int igt_vma_create(void *arg)
0150 {
0151     struct i915_ggtt *ggtt = arg;
0152     struct drm_i915_private *i915 = ggtt->vm.i915;
0153     struct drm_i915_gem_object *obj, *on;
0154     struct i915_gem_context *ctx, *cn;
0155     unsigned long num_obj, num_ctx;
0156     unsigned long no, nc;
0157     IGT_TIMEOUT(end_time);
0158     LIST_HEAD(contexts);
0159     LIST_HEAD(objects);
0160     int err = -ENOMEM;
0161 
0162     /* Exercise creating many vma amonst many objections, checking the
0163      * vma creation and lookup routines.
0164      */
0165 
0166     no = 0;
0167     for_each_prime_number(num_obj, ULONG_MAX - 1) {
0168         for (; no < num_obj; no++) {
0169             obj = i915_gem_object_create_internal(i915, PAGE_SIZE);
0170             if (IS_ERR(obj))
0171                 goto out;
0172 
0173             list_add(&obj->st_link, &objects);
0174         }
0175 
0176         nc = 0;
0177         for_each_prime_number(num_ctx, 2 * BITS_PER_LONG) {
0178             for (; nc < num_ctx; nc++) {
0179                 ctx = mock_context(i915, "mock");
0180                 if (!ctx)
0181                     goto out;
0182 
0183                 list_move(&ctx->link, &contexts);
0184             }
0185 
0186             err = create_vmas(i915, &objects, &contexts);
0187             if (err)
0188                 goto out;
0189 
0190             if (igt_timeout(end_time,
0191                     "%s timed out: after %lu objects in %lu contexts\n",
0192                     __func__, no, nc))
0193                 goto end;
0194         }
0195 
0196         list_for_each_entry_safe(ctx, cn, &contexts, link) {
0197             list_del_init(&ctx->link);
0198             mock_context_close(ctx);
0199         }
0200 
0201         cond_resched();
0202     }
0203 
0204 end:
0205     /* Final pass to lookup all created contexts */
0206     err = create_vmas(i915, &objects, &contexts);
0207 out:
0208     list_for_each_entry_safe(ctx, cn, &contexts, link) {
0209         list_del_init(&ctx->link);
0210         mock_context_close(ctx);
0211     }
0212 
0213     list_for_each_entry_safe(obj, on, &objects, st_link)
0214         i915_gem_object_put(obj);
0215     return err;
0216 }
0217 
0218 struct pin_mode {
0219     u64 size;
0220     u64 flags;
0221     bool (*assert)(const struct i915_vma *,
0222                const struct pin_mode *mode,
0223                int result);
0224     const char *string;
0225 };
0226 
0227 static bool assert_pin_valid(const struct i915_vma *vma,
0228                  const struct pin_mode *mode,
0229                  int result)
0230 {
0231     if (result)
0232         return false;
0233 
0234     if (i915_vma_misplaced(vma, mode->size, 0, mode->flags))
0235         return false;
0236 
0237     return true;
0238 }
0239 
0240 __maybe_unused
0241 static bool assert_pin_enospc(const struct i915_vma *vma,
0242                   const struct pin_mode *mode,
0243                   int result)
0244 {
0245     return result == -ENOSPC;
0246 }
0247 
0248 __maybe_unused
0249 static bool assert_pin_einval(const struct i915_vma *vma,
0250                   const struct pin_mode *mode,
0251                   int result)
0252 {
0253     return result == -EINVAL;
0254 }
0255 
0256 static int igt_vma_pin1(void *arg)
0257 {
0258     struct i915_ggtt *ggtt = arg;
0259     const struct pin_mode modes[] = {
0260 #define VALID(sz, fl) { .size = (sz), .flags = (fl), .assert = assert_pin_valid, .string = #sz ", " #fl ", (valid) " }
0261 #define __INVALID(sz, fl, check, eval) { .size = (sz), .flags = (fl), .assert = (check), .string = #sz ", " #fl ", (invalid " #eval ")" }
0262 #define INVALID(sz, fl) __INVALID(sz, fl, assert_pin_einval, EINVAL)
0263 #define NOSPACE(sz, fl) __INVALID(sz, fl, assert_pin_enospc, ENOSPC)
0264         VALID(0, PIN_GLOBAL),
0265         VALID(0, PIN_GLOBAL | PIN_MAPPABLE),
0266 
0267         VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 4096),
0268         VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 8192),
0269         VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
0270         VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
0271         VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
0272 
0273         VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
0274         INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end),
0275         VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
0276         INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total),
0277         INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | round_down(U64_MAX, PAGE_SIZE)),
0278 
0279         VALID(4096, PIN_GLOBAL),
0280         VALID(8192, PIN_GLOBAL),
0281         VALID(ggtt->mappable_end - 4096, PIN_GLOBAL | PIN_MAPPABLE),
0282         VALID(ggtt->mappable_end, PIN_GLOBAL | PIN_MAPPABLE),
0283         NOSPACE(ggtt->mappable_end + 4096, PIN_GLOBAL | PIN_MAPPABLE),
0284         VALID(ggtt->vm.total - 4096, PIN_GLOBAL),
0285         VALID(ggtt->vm.total, PIN_GLOBAL),
0286         NOSPACE(ggtt->vm.total + 4096, PIN_GLOBAL),
0287         NOSPACE(round_down(U64_MAX, PAGE_SIZE), PIN_GLOBAL),
0288         INVALID(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
0289         INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
0290         INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (round_down(U64_MAX, PAGE_SIZE) - 4096)),
0291 
0292         VALID(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
0293 
0294 #if !IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
0295         /* Misusing BIAS is a programming error (it is not controllable
0296          * from userspace) so when debugging is enabled, it explodes.
0297          * However, the tests are still quite interesting for checking
0298          * variable start, end and size.
0299          */
0300         NOSPACE(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | ggtt->mappable_end),
0301         NOSPACE(0, PIN_GLOBAL | PIN_OFFSET_BIAS | ggtt->vm.total),
0302         NOSPACE(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
0303         NOSPACE(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
0304 #endif
0305         { },
0306 #undef NOSPACE
0307 #undef INVALID
0308 #undef __INVALID
0309 #undef VALID
0310     }, *m;
0311     struct drm_i915_gem_object *obj;
0312     struct i915_vma *vma;
0313     int err = -EINVAL;
0314 
0315     /* Exercise all the weird and wonderful i915_vma_pin requests,
0316      * focusing on error handling of boundary conditions.
0317      */
0318 
0319     GEM_BUG_ON(!drm_mm_clean(&ggtt->vm.mm));
0320 
0321     obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE);
0322     if (IS_ERR(obj))
0323         return PTR_ERR(obj);
0324 
0325     vma = checked_vma_instance(obj, &ggtt->vm, NULL);
0326     if (IS_ERR(vma))
0327         goto out;
0328 
0329     for (m = modes; m->assert; m++) {
0330         err = i915_vma_pin(vma, m->size, 0, m->flags);
0331         if (!m->assert(vma, m, err)) {
0332             pr_err("%s to pin single page into GGTT with mode[%d:%s]: size=%llx flags=%llx, err=%d\n",
0333                    m->assert == assert_pin_valid ? "Failed" : "Unexpectedly succeeded",
0334                    (int)(m - modes), m->string, m->size, m->flags,
0335                    err);
0336             if (!err)
0337                 i915_vma_unpin(vma);
0338             err = -EINVAL;
0339             goto out;
0340         }
0341 
0342         if (!err) {
0343             i915_vma_unpin(vma);
0344             err = i915_vma_unbind_unlocked(vma);
0345             if (err) {
0346                 pr_err("Failed to unbind single page from GGTT, err=%d\n", err);
0347                 goto out;
0348             }
0349         }
0350 
0351         cond_resched();
0352     }
0353 
0354     err = 0;
0355 out:
0356     i915_gem_object_put(obj);
0357     return err;
0358 }
0359 
0360 static unsigned long rotated_index(const struct intel_rotation_info *r,
0361                    unsigned int n,
0362                    unsigned int x,
0363                    unsigned int y)
0364 {
0365     return (r->plane[n].src_stride * (r->plane[n].height - y - 1) +
0366         r->plane[n].offset + x);
0367 }
0368 
0369 static struct scatterlist *
0370 assert_rotated(struct drm_i915_gem_object *obj,
0371            const struct intel_rotation_info *r, unsigned int n,
0372            struct scatterlist *sg)
0373 {
0374     unsigned int x, y;
0375 
0376     for (x = 0; x < r->plane[n].width; x++) {
0377         unsigned int left;
0378 
0379         for (y = 0; y < r->plane[n].height; y++) {
0380             unsigned long src_idx;
0381             dma_addr_t src;
0382 
0383             if (!sg) {
0384                 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
0385                        n, x, y);
0386                 return ERR_PTR(-EINVAL);
0387             }
0388 
0389             src_idx = rotated_index(r, n, x, y);
0390             src = i915_gem_object_get_dma_address(obj, src_idx);
0391 
0392             if (sg_dma_len(sg) != PAGE_SIZE) {
0393                 pr_err("Invalid sg.length, found %d, expected %lu for rotated page (%d, %d) [src index %lu]\n",
0394                        sg_dma_len(sg), PAGE_SIZE,
0395                        x, y, src_idx);
0396                 return ERR_PTR(-EINVAL);
0397             }
0398 
0399             if (sg_dma_address(sg) != src) {
0400                 pr_err("Invalid address for rotated page (%d, %d) [src index %lu]\n",
0401                        x, y, src_idx);
0402                 return ERR_PTR(-EINVAL);
0403             }
0404 
0405             sg = sg_next(sg);
0406         }
0407 
0408         left = (r->plane[n].dst_stride - y) * PAGE_SIZE;
0409 
0410         if (!left)
0411             continue;
0412 
0413         if (!sg) {
0414             pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
0415                    n, x, y);
0416             return ERR_PTR(-EINVAL);
0417         }
0418 
0419         if (sg_dma_len(sg) != left) {
0420             pr_err("Invalid sg.length, found %d, expected %u for rotated page (%d, %d)\n",
0421                    sg_dma_len(sg), left, x, y);
0422             return ERR_PTR(-EINVAL);
0423         }
0424 
0425         if (sg_dma_address(sg) != 0) {
0426             pr_err("Invalid address, found %pad, expected 0 for remapped page (%d, %d)\n",
0427                    &sg_dma_address(sg), x, y);
0428             return ERR_PTR(-EINVAL);
0429         }
0430 
0431         sg = sg_next(sg);
0432     }
0433 
0434     return sg;
0435 }
0436 
0437 static unsigned long remapped_index(const struct intel_remapped_info *r,
0438                     unsigned int n,
0439                     unsigned int x,
0440                     unsigned int y)
0441 {
0442     return (r->plane[n].src_stride * y +
0443         r->plane[n].offset + x);
0444 }
0445 
0446 static struct scatterlist *
0447 assert_remapped(struct drm_i915_gem_object *obj,
0448         const struct intel_remapped_info *r, unsigned int n,
0449         struct scatterlist *sg)
0450 {
0451     unsigned int x, y;
0452     unsigned int left = 0;
0453     unsigned int offset;
0454 
0455     for (y = 0; y < r->plane[n].height; y++) {
0456         for (x = 0; x < r->plane[n].width; x++) {
0457             unsigned long src_idx;
0458             dma_addr_t src;
0459 
0460             if (!sg) {
0461                 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
0462                        n, x, y);
0463                 return ERR_PTR(-EINVAL);
0464             }
0465             if (!left) {
0466                 offset = 0;
0467                 left = sg_dma_len(sg);
0468             }
0469 
0470             src_idx = remapped_index(r, n, x, y);
0471             src = i915_gem_object_get_dma_address(obj, src_idx);
0472 
0473             if (left < PAGE_SIZE || left & (PAGE_SIZE-1)) {
0474                 pr_err("Invalid sg.length, found %d, expected %lu for remapped page (%d, %d) [src index %lu]\n",
0475                        sg_dma_len(sg), PAGE_SIZE,
0476                        x, y, src_idx);
0477                 return ERR_PTR(-EINVAL);
0478             }
0479 
0480             if (sg_dma_address(sg) + offset != src) {
0481                 pr_err("Invalid address for remapped page (%d, %d) [src index %lu]\n",
0482                        x, y, src_idx);
0483                 return ERR_PTR(-EINVAL);
0484             }
0485 
0486             left -= PAGE_SIZE;
0487             offset += PAGE_SIZE;
0488 
0489 
0490             if (!left)
0491                 sg = sg_next(sg);
0492         }
0493 
0494         if (left) {
0495             pr_err("Unexpected sg tail with %d size for remapped page (%d, %d)\n",
0496                    left,
0497                    x, y);
0498             return ERR_PTR(-EINVAL);
0499         }
0500 
0501         left = (r->plane[n].dst_stride - r->plane[n].width) * PAGE_SIZE;
0502 
0503         if (!left)
0504             continue;
0505 
0506         if (!sg) {
0507             pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
0508                    n, x, y);
0509             return ERR_PTR(-EINVAL);
0510         }
0511 
0512         if (sg_dma_len(sg) != left) {
0513             pr_err("Invalid sg.length, found %u, expected %u for remapped page (%d, %d)\n",
0514                    sg_dma_len(sg), left,
0515                    x, y);
0516             return ERR_PTR(-EINVAL);
0517         }
0518 
0519         if (sg_dma_address(sg) != 0) {
0520             pr_err("Invalid address, found %pad, expected 0 for remapped page (%d, %d)\n",
0521                    &sg_dma_address(sg),
0522                    x, y);
0523             return ERR_PTR(-EINVAL);
0524         }
0525 
0526         sg = sg_next(sg);
0527         left = 0;
0528     }
0529 
0530     return sg;
0531 }
0532 
0533 static unsigned int remapped_size(enum i915_ggtt_view_type view_type,
0534                   const struct intel_remapped_plane_info *a,
0535                   const struct intel_remapped_plane_info *b)
0536 {
0537 
0538     if (view_type == I915_GGTT_VIEW_ROTATED)
0539         return a->dst_stride * a->width + b->dst_stride * b->width;
0540     else
0541         return a->dst_stride * a->height + b->dst_stride * b->height;
0542 }
0543 
0544 static int igt_vma_rotate_remap(void *arg)
0545 {
0546     struct i915_ggtt *ggtt = arg;
0547     struct i915_address_space *vm = &ggtt->vm;
0548     struct drm_i915_gem_object *obj;
0549     const struct intel_remapped_plane_info planes[] = {
0550         { .width = 1, .height = 1, .src_stride = 1 },
0551         { .width = 2, .height = 2, .src_stride = 2 },
0552         { .width = 4, .height = 4, .src_stride = 4 },
0553         { .width = 8, .height = 8, .src_stride = 8 },
0554 
0555         { .width = 3, .height = 5, .src_stride = 3 },
0556         { .width = 3, .height = 5, .src_stride = 4 },
0557         { .width = 3, .height = 5, .src_stride = 5 },
0558 
0559         { .width = 5, .height = 3, .src_stride = 5 },
0560         { .width = 5, .height = 3, .src_stride = 7 },
0561         { .width = 5, .height = 3, .src_stride = 9 },
0562 
0563         { .width = 4, .height = 6, .src_stride = 6 },
0564         { .width = 6, .height = 4, .src_stride = 6 },
0565 
0566         { .width = 2, .height = 2, .src_stride = 2, .dst_stride = 2 },
0567         { .width = 3, .height = 3, .src_stride = 3, .dst_stride = 4 },
0568         { .width = 5, .height = 6, .src_stride = 7, .dst_stride = 8 },
0569 
0570         { }
0571     }, *a, *b;
0572     enum i915_ggtt_view_type types[] = {
0573         I915_GGTT_VIEW_ROTATED,
0574         I915_GGTT_VIEW_REMAPPED,
0575         0,
0576     }, *t;
0577     const unsigned int max_pages = 64;
0578     int err = -ENOMEM;
0579 
0580     /* Create VMA for many different combinations of planes and check
0581      * that the page layout within the rotated VMA match our expectations.
0582      */
0583 
0584     obj = i915_gem_object_create_internal(vm->i915, max_pages * PAGE_SIZE);
0585     if (IS_ERR(obj))
0586         goto out;
0587 
0588     for (t = types; *t; t++) {
0589     for (a = planes; a->width; a++) {
0590         for (b = planes + ARRAY_SIZE(planes); b-- != planes; ) {
0591             struct i915_ggtt_view view = {
0592                 .type = *t,
0593                 .remapped.plane[0] = *a,
0594                 .remapped.plane[1] = *b,
0595             };
0596             struct intel_remapped_plane_info *plane_info = view.remapped.plane;
0597             unsigned int n, max_offset;
0598 
0599             max_offset = max(plane_info[0].src_stride * plane_info[0].height,
0600                      plane_info[1].src_stride * plane_info[1].height);
0601             GEM_BUG_ON(max_offset > max_pages);
0602             max_offset = max_pages - max_offset;
0603 
0604             if (!plane_info[0].dst_stride)
0605                 plane_info[0].dst_stride = view.type == I915_GGTT_VIEW_ROTATED ?
0606                                     plane_info[0].height :
0607                                     plane_info[0].width;
0608             if (!plane_info[1].dst_stride)
0609                 plane_info[1].dst_stride = view.type == I915_GGTT_VIEW_ROTATED ?
0610                                     plane_info[1].height :
0611                                     plane_info[1].width;
0612 
0613             for_each_prime_number_from(plane_info[0].offset, 0, max_offset) {
0614                 for_each_prime_number_from(plane_info[1].offset, 0, max_offset) {
0615                     struct scatterlist *sg;
0616                     struct i915_vma *vma;
0617                     unsigned int expected_pages;
0618 
0619                     vma = checked_vma_instance(obj, vm, &view);
0620                     if (IS_ERR(vma)) {
0621                         err = PTR_ERR(vma);
0622                         goto out_object;
0623                     }
0624 
0625                     err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
0626                     if (err) {
0627                         pr_err("Failed to pin VMA, err=%d\n", err);
0628                         goto out_object;
0629                     }
0630 
0631                     expected_pages = remapped_size(view.type, &plane_info[0], &plane_info[1]);
0632 
0633                     if (view.type == I915_GGTT_VIEW_ROTATED &&
0634                         vma->size != expected_pages * PAGE_SIZE) {
0635                         pr_err("VMA is wrong size, expected %lu, found %llu\n",
0636                                PAGE_SIZE * expected_pages, vma->size);
0637                         err = -EINVAL;
0638                         goto out_object;
0639                     }
0640 
0641                     if (view.type == I915_GGTT_VIEW_REMAPPED &&
0642                         vma->size > expected_pages * PAGE_SIZE) {
0643                         pr_err("VMA is wrong size, expected %lu, found %llu\n",
0644                                PAGE_SIZE * expected_pages, vma->size);
0645                         err = -EINVAL;
0646                         goto out_object;
0647                     }
0648 
0649                     if (vma->pages->nents > expected_pages) {
0650                         pr_err("sg table is wrong sizeo, expected %u, found %u nents\n",
0651                                expected_pages, vma->pages->nents);
0652                         err = -EINVAL;
0653                         goto out_object;
0654                     }
0655 
0656                     if (vma->node.size < vma->size) {
0657                         pr_err("VMA binding too small, expected %llu, found %llu\n",
0658                                vma->size, vma->node.size);
0659                         err = -EINVAL;
0660                         goto out_object;
0661                     }
0662 
0663                     if (vma->pages == obj->mm.pages) {
0664                         pr_err("VMA using unrotated object pages!\n");
0665                         err = -EINVAL;
0666                         goto out_object;
0667                     }
0668 
0669                     sg = vma->pages->sgl;
0670                     for (n = 0; n < ARRAY_SIZE(view.rotated.plane); n++) {
0671                         if (view.type == I915_GGTT_VIEW_ROTATED)
0672                             sg = assert_rotated(obj, &view.rotated, n, sg);
0673                         else
0674                             sg = assert_remapped(obj, &view.remapped, n, sg);
0675                         if (IS_ERR(sg)) {
0676                             pr_err("Inconsistent %s VMA pages for plane %d: [(%d, %d, %d, %d, %d), (%d, %d, %d, %d, %d)]\n",
0677                                    view.type == I915_GGTT_VIEW_ROTATED ?
0678                                    "rotated" : "remapped", n,
0679                                    plane_info[0].width,
0680                                    plane_info[0].height,
0681                                    plane_info[0].src_stride,
0682                                    plane_info[0].dst_stride,
0683                                    plane_info[0].offset,
0684                                    plane_info[1].width,
0685                                    plane_info[1].height,
0686                                    plane_info[1].src_stride,
0687                                    plane_info[1].dst_stride,
0688                                    plane_info[1].offset);
0689                             err = -EINVAL;
0690                             goto out_object;
0691                         }
0692                     }
0693 
0694                     i915_vma_unpin(vma);
0695                     err = i915_vma_unbind_unlocked(vma);
0696                     if (err) {
0697                         pr_err("Unbinding returned %i\n", err);
0698                         goto out_object;
0699                     }
0700                     cond_resched();
0701                 }
0702             }
0703         }
0704     }
0705     }
0706 
0707 out_object:
0708     i915_gem_object_put(obj);
0709 out:
0710     return err;
0711 }
0712 
0713 static bool assert_partial(struct drm_i915_gem_object *obj,
0714                struct i915_vma *vma,
0715                unsigned long offset,
0716                unsigned long size)
0717 {
0718     struct sgt_iter sgt;
0719     dma_addr_t dma;
0720 
0721     for_each_sgt_daddr(dma, sgt, vma->pages) {
0722         dma_addr_t src;
0723 
0724         if (!size) {
0725             pr_err("Partial scattergather list too long\n");
0726             return false;
0727         }
0728 
0729         src = i915_gem_object_get_dma_address(obj, offset);
0730         if (src != dma) {
0731             pr_err("DMA mismatch for partial page offset %lu\n",
0732                    offset);
0733             return false;
0734         }
0735 
0736         offset++;
0737         size--;
0738     }
0739 
0740     return true;
0741 }
0742 
0743 static bool assert_pin(struct i915_vma *vma,
0744                struct i915_ggtt_view *view,
0745                u64 size,
0746                const char *name)
0747 {
0748     bool ok = true;
0749 
0750     if (vma->size != size) {
0751         pr_err("(%s) VMA is wrong size, expected %llu, found %llu\n",
0752                name, size, vma->size);
0753         ok = false;
0754     }
0755 
0756     if (vma->node.size < vma->size) {
0757         pr_err("(%s) VMA binding too small, expected %llu, found %llu\n",
0758                name, vma->size, vma->node.size);
0759         ok = false;
0760     }
0761 
0762     if (view && view->type != I915_GGTT_VIEW_NORMAL) {
0763         if (memcmp(&vma->ggtt_view, view, sizeof(*view))) {
0764             pr_err("(%s) VMA mismatch upon creation!\n",
0765                    name);
0766             ok = false;
0767         }
0768 
0769         if (vma->pages == vma->obj->mm.pages) {
0770             pr_err("(%s) VMA using original object pages!\n",
0771                    name);
0772             ok = false;
0773         }
0774     } else {
0775         if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
0776             pr_err("Not the normal ggtt view! Found %d\n",
0777                    vma->ggtt_view.type);
0778             ok = false;
0779         }
0780 
0781         if (vma->pages != vma->obj->mm.pages) {
0782             pr_err("VMA not using object pages!\n");
0783             ok = false;
0784         }
0785     }
0786 
0787     return ok;
0788 }
0789 
0790 static int igt_vma_partial(void *arg)
0791 {
0792     struct i915_ggtt *ggtt = arg;
0793     struct i915_address_space *vm = &ggtt->vm;
0794     const unsigned int npages = 1021; /* prime! */
0795     struct drm_i915_gem_object *obj;
0796     const struct phase {
0797         const char *name;
0798     } phases[] = {
0799         { "create" },
0800         { "lookup" },
0801         { },
0802     }, *p;
0803     unsigned int sz, offset;
0804     struct i915_vma *vma;
0805     int err = -ENOMEM;
0806 
0807     /* Create lots of different VMA for the object and check that
0808      * we are returned the same VMA when we later request the same range.
0809      */
0810 
0811     obj = i915_gem_object_create_internal(vm->i915, npages * PAGE_SIZE);
0812     if (IS_ERR(obj))
0813         goto out;
0814 
0815     for (p = phases; p->name; p++) { /* exercise both create/lookup */
0816         unsigned int count, nvma;
0817 
0818         nvma = 0;
0819         for_each_prime_number_from(sz, 1, npages) {
0820             for_each_prime_number_from(offset, 0, npages - sz) {
0821                 struct i915_ggtt_view view;
0822 
0823                 view.type = I915_GGTT_VIEW_PARTIAL;
0824                 view.partial.offset = offset;
0825                 view.partial.size = sz;
0826 
0827                 if (sz == npages)
0828                     view.type = I915_GGTT_VIEW_NORMAL;
0829 
0830                 vma = checked_vma_instance(obj, vm, &view);
0831                 if (IS_ERR(vma)) {
0832                     err = PTR_ERR(vma);
0833                     goto out_object;
0834                 }
0835 
0836                 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
0837                 if (err)
0838                     goto out_object;
0839 
0840                 if (!assert_pin(vma, &view, sz*PAGE_SIZE, p->name)) {
0841                     pr_err("(%s) Inconsistent partial pinning for (offset=%d, size=%d)\n",
0842                            p->name, offset, sz);
0843                     err = -EINVAL;
0844                     goto out_object;
0845                 }
0846 
0847                 if (!assert_partial(obj, vma, offset, sz)) {
0848                     pr_err("(%s) Inconsistent partial pages for (offset=%d, size=%d)\n",
0849                            p->name, offset, sz);
0850                     err = -EINVAL;
0851                     goto out_object;
0852                 }
0853 
0854                 i915_vma_unpin(vma);
0855                 nvma++;
0856                 err = i915_vma_unbind_unlocked(vma);
0857                 if (err) {
0858                     pr_err("Unbinding returned %i\n", err);
0859                     goto out_object;
0860                 }
0861 
0862                 cond_resched();
0863             }
0864         }
0865 
0866         count = 0;
0867         list_for_each_entry(vma, &obj->vma.list, obj_link)
0868             count++;
0869         if (count != nvma) {
0870             pr_err("(%s) All partial vma were not recorded on the obj->vma_list: found %u, expected %u\n",
0871                    p->name, count, nvma);
0872             err = -EINVAL;
0873             goto out_object;
0874         }
0875 
0876         /* Check that we did create the whole object mapping */
0877         vma = checked_vma_instance(obj, vm, NULL);
0878         if (IS_ERR(vma)) {
0879             err = PTR_ERR(vma);
0880             goto out_object;
0881         }
0882 
0883         err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
0884         if (err)
0885             goto out_object;
0886 
0887         if (!assert_pin(vma, NULL, obj->base.size, p->name)) {
0888             pr_err("(%s) inconsistent full pin\n", p->name);
0889             err = -EINVAL;
0890             goto out_object;
0891         }
0892 
0893         i915_vma_unpin(vma);
0894 
0895         err = i915_vma_unbind_unlocked(vma);
0896         if (err) {
0897             pr_err("Unbinding returned %i\n", err);
0898             goto out_object;
0899         }
0900 
0901         count = 0;
0902         list_for_each_entry(vma, &obj->vma.list, obj_link)
0903             count++;
0904         if (count != nvma) {
0905             pr_err("(%s) allocated an extra full vma!\n", p->name);
0906             err = -EINVAL;
0907             goto out_object;
0908         }
0909     }
0910 
0911 out_object:
0912     i915_gem_object_put(obj);
0913 out:
0914     return err;
0915 }
0916 
0917 int i915_vma_mock_selftests(void)
0918 {
0919     static const struct i915_subtest tests[] = {
0920         SUBTEST(igt_vma_create),
0921         SUBTEST(igt_vma_pin1),
0922         SUBTEST(igt_vma_rotate_remap),
0923         SUBTEST(igt_vma_partial),
0924     };
0925     struct drm_i915_private *i915;
0926     struct intel_gt *gt;
0927     int err;
0928 
0929     i915 = mock_gem_device();
0930     if (!i915)
0931         return -ENOMEM;
0932 
0933     /* allocate the ggtt */
0934     err = intel_gt_assign_ggtt(to_gt(i915));
0935     if (err)
0936         goto out_put;
0937 
0938     gt = to_gt(i915);
0939 
0940     mock_init_ggtt(gt);
0941 
0942     err = i915_subtests(tests, gt->ggtt);
0943 
0944     mock_device_flush(i915);
0945     i915_gem_drain_freed_objects(i915);
0946     mock_fini_ggtt(gt->ggtt);
0947 
0948 out_put:
0949     mock_destroy_device(i915);
0950     return err;
0951 }
0952 
0953 static int igt_vma_remapped_gtt(void *arg)
0954 {
0955     struct drm_i915_private *i915 = arg;
0956     const struct intel_remapped_plane_info planes[] = {
0957         { .width = 1, .height = 1, .src_stride = 1 },
0958         { .width = 2, .height = 2, .src_stride = 2 },
0959         { .width = 4, .height = 4, .src_stride = 4 },
0960         { .width = 8, .height = 8, .src_stride = 8 },
0961 
0962         { .width = 3, .height = 5, .src_stride = 3 },
0963         { .width = 3, .height = 5, .src_stride = 4 },
0964         { .width = 3, .height = 5, .src_stride = 5 },
0965 
0966         { .width = 5, .height = 3, .src_stride = 5 },
0967         { .width = 5, .height = 3, .src_stride = 7 },
0968         { .width = 5, .height = 3, .src_stride = 9 },
0969 
0970         { .width = 4, .height = 6, .src_stride = 6 },
0971         { .width = 6, .height = 4, .src_stride = 6 },
0972 
0973         { .width = 2, .height = 2, .src_stride = 2, .dst_stride = 2 },
0974         { .width = 3, .height = 3, .src_stride = 3, .dst_stride = 4 },
0975         { .width = 5, .height = 6, .src_stride = 7, .dst_stride = 8 },
0976 
0977         { }
0978     }, *p;
0979     enum i915_ggtt_view_type types[] = {
0980         I915_GGTT_VIEW_ROTATED,
0981         I915_GGTT_VIEW_REMAPPED,
0982         0,
0983     }, *t;
0984     struct drm_i915_gem_object *obj;
0985     intel_wakeref_t wakeref;
0986     int err = 0;
0987 
0988     if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt))
0989         return 0;
0990 
0991     obj = i915_gem_object_create_internal(i915, 10 * 10 * PAGE_SIZE);
0992     if (IS_ERR(obj))
0993         return PTR_ERR(obj);
0994 
0995     wakeref = intel_runtime_pm_get(&i915->runtime_pm);
0996 
0997     for (t = types; *t; t++) {
0998         for (p = planes; p->width; p++) {
0999             struct i915_ggtt_view view = {
1000                 .type = *t,
1001                 .rotated.plane[0] = *p,
1002             };
1003             struct intel_remapped_plane_info *plane_info = view.rotated.plane;
1004             struct i915_vma *vma;
1005             u32 __iomem *map;
1006             unsigned int x, y;
1007 
1008             i915_gem_object_lock(obj, NULL);
1009             err = i915_gem_object_set_to_gtt_domain(obj, true);
1010             i915_gem_object_unlock(obj);
1011             if (err)
1012                 goto out;
1013 
1014             if (!plane_info[0].dst_stride)
1015                 plane_info[0].dst_stride = *t == I915_GGTT_VIEW_ROTATED ?
1016                                  p->height : p->width;
1017 
1018             vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, PIN_MAPPABLE);
1019             if (IS_ERR(vma)) {
1020                 err = PTR_ERR(vma);
1021                 goto out;
1022             }
1023 
1024             GEM_BUG_ON(vma->ggtt_view.type != *t);
1025 
1026             map = i915_vma_pin_iomap(vma);
1027             i915_vma_unpin(vma);
1028             if (IS_ERR(map)) {
1029                 err = PTR_ERR(map);
1030                 goto out;
1031             }
1032 
1033             for (y = 0 ; y < plane_info[0].height; y++) {
1034                 for (x = 0 ; x < plane_info[0].width; x++) {
1035                     unsigned int offset;
1036                     u32 val = y << 16 | x;
1037 
1038                     if (*t == I915_GGTT_VIEW_ROTATED)
1039                         offset = (x * plane_info[0].dst_stride + y) * PAGE_SIZE;
1040                     else
1041                         offset = (y * plane_info[0].dst_stride + x) * PAGE_SIZE;
1042 
1043                     iowrite32(val, &map[offset / sizeof(*map)]);
1044                 }
1045             }
1046 
1047             i915_vma_unpin_iomap(vma);
1048 
1049             vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE);
1050             if (IS_ERR(vma)) {
1051                 err = PTR_ERR(vma);
1052                 goto out;
1053             }
1054 
1055             GEM_BUG_ON(vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL);
1056 
1057             map = i915_vma_pin_iomap(vma);
1058             i915_vma_unpin(vma);
1059             if (IS_ERR(map)) {
1060                 err = PTR_ERR(map);
1061                 goto out;
1062             }
1063 
1064             for (y = 0 ; y < plane_info[0].height; y++) {
1065                 for (x = 0 ; x < plane_info[0].width; x++) {
1066                     unsigned int offset, src_idx;
1067                     u32 exp = y << 16 | x;
1068                     u32 val;
1069 
1070                     if (*t == I915_GGTT_VIEW_ROTATED)
1071                         src_idx = rotated_index(&view.rotated, 0, x, y);
1072                     else
1073                         src_idx = remapped_index(&view.remapped, 0, x, y);
1074                     offset = src_idx * PAGE_SIZE;
1075 
1076                     val = ioread32(&map[offset / sizeof(*map)]);
1077                     if (val != exp) {
1078                         pr_err("%s VMA write test failed, expected 0x%x, found 0x%x\n",
1079                                *t == I915_GGTT_VIEW_ROTATED ? "Rotated" : "Remapped",
1080                                exp, val);
1081                         i915_vma_unpin_iomap(vma);
1082                         err = -EINVAL;
1083                         goto out;
1084                     }
1085                 }
1086             }
1087             i915_vma_unpin_iomap(vma);
1088 
1089             cond_resched();
1090         }
1091     }
1092 
1093 out:
1094     intel_runtime_pm_put(&i915->runtime_pm, wakeref);
1095     i915_gem_object_put(obj);
1096 
1097     return err;
1098 }
1099 
1100 int i915_vma_live_selftests(struct drm_i915_private *i915)
1101 {
1102     static const struct i915_subtest tests[] = {
1103         SUBTEST(igt_vma_remapped_gtt),
1104     };
1105 
1106     return i915_subtests(tests, i915);
1107 }