Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * Copyright 2013 Red Hat
0003  * All Rights Reserved.
0004  *
0005  * Permission is hereby granted, free of charge, to any person obtaining a
0006  * copy of this software and associated documentation files (the "Software"),
0007  * to deal in the Software without restriction, including without limitation
0008  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
0009  * and/or sell copies of the Software, and to permit persons to whom the
0010  * Software is furnished to do so, subject to the following conditions:
0011  *
0012  * The above copyright notice and this permission notice (including the next
0013  * paragraph) shall be included in all copies or substantial portions of the
0014  * Software.
0015  *
0016  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
0017  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
0018  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
0019  * THE AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
0020  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
0021  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
0022  * OTHER DEALINGS IN THE SOFTWARE.
0023  */
0024 #ifndef VIRTGPU_DRM_H
0025 #define VIRTGPU_DRM_H
0026 
0027 #include "drm.h"
0028 
0029 #if defined(__cplusplus)
0030 extern "C" {
0031 #endif
0032 
0033 /* Please note that modifications to all structs defined here are
0034  * subject to backwards-compatibility constraints.
0035  *
0036  * Do not use pointers, use __u64 instead for 32 bit / 64 bit user/kernel
0037  * compatibility Keep fields aligned to their size
0038  */
0039 
0040 #define DRM_VIRTGPU_MAP         0x01
0041 #define DRM_VIRTGPU_EXECBUFFER  0x02
0042 #define DRM_VIRTGPU_GETPARAM    0x03
0043 #define DRM_VIRTGPU_RESOURCE_CREATE 0x04
0044 #define DRM_VIRTGPU_RESOURCE_INFO     0x05
0045 #define DRM_VIRTGPU_TRANSFER_FROM_HOST 0x06
0046 #define DRM_VIRTGPU_TRANSFER_TO_HOST 0x07
0047 #define DRM_VIRTGPU_WAIT     0x08
0048 #define DRM_VIRTGPU_GET_CAPS  0x09
0049 #define DRM_VIRTGPU_RESOURCE_CREATE_BLOB 0x0a
0050 #define DRM_VIRTGPU_CONTEXT_INIT 0x0b
0051 
0052 #define VIRTGPU_EXECBUF_FENCE_FD_IN 0x01
0053 #define VIRTGPU_EXECBUF_FENCE_FD_OUT    0x02
0054 #define VIRTGPU_EXECBUF_RING_IDX    0x04
0055 #define VIRTGPU_EXECBUF_FLAGS  (\
0056         VIRTGPU_EXECBUF_FENCE_FD_IN |\
0057         VIRTGPU_EXECBUF_FENCE_FD_OUT |\
0058         VIRTGPU_EXECBUF_RING_IDX |\
0059         0)
0060 
0061 struct drm_virtgpu_map {
0062     __u64 offset; /* use for mmap system call */
0063     __u32 handle;
0064     __u32 pad;
0065 };
0066 
0067 struct drm_virtgpu_execbuffer {
0068     __u32 flags;
0069     __u32 size;
0070     __u64 command; /* void* */
0071     __u64 bo_handles;
0072     __u32 num_bo_handles;
0073     __s32 fence_fd; /* in/out fence fd (see VIRTGPU_EXECBUF_FENCE_FD_IN/OUT) */
0074     __u32 ring_idx; /* command ring index (see VIRTGPU_EXECBUF_RING_IDX) */
0075     __u32 pad;
0076 };
0077 
0078 #define VIRTGPU_PARAM_3D_FEATURES 1 /* do we have 3D features in the hw */
0079 #define VIRTGPU_PARAM_CAPSET_QUERY_FIX 2 /* do we have the capset fix */
0080 #define VIRTGPU_PARAM_RESOURCE_BLOB 3 /* DRM_VIRTGPU_RESOURCE_CREATE_BLOB */
0081 #define VIRTGPU_PARAM_HOST_VISIBLE 4 /* Host blob resources are mappable */
0082 #define VIRTGPU_PARAM_CROSS_DEVICE 5 /* Cross virtio-device resource sharing  */
0083 #define VIRTGPU_PARAM_CONTEXT_INIT 6 /* DRM_VIRTGPU_CONTEXT_INIT */
0084 #define VIRTGPU_PARAM_SUPPORTED_CAPSET_IDs 7 /* Bitmask of supported capability set ids */
0085 
0086 struct drm_virtgpu_getparam {
0087     __u64 param;
0088     __u64 value;
0089 };
0090 
0091 /* NO_BO flags? NO resource flag? */
0092 /* resource flag for y_0_top */
0093 struct drm_virtgpu_resource_create {
0094     __u32 target;
0095     __u32 format;
0096     __u32 bind;
0097     __u32 width;
0098     __u32 height;
0099     __u32 depth;
0100     __u32 array_size;
0101     __u32 last_level;
0102     __u32 nr_samples;
0103     __u32 flags;
0104     __u32 bo_handle; /* if this is set - recreate a new resource attached to this bo ? */
0105     __u32 res_handle;  /* returned by kernel */
0106     __u32 size;        /* validate transfer in the host */
0107     __u32 stride;      /* validate transfer in the host */
0108 };
0109 
0110 struct drm_virtgpu_resource_info {
0111     __u32 bo_handle;
0112     __u32 res_handle;
0113     __u32 size;
0114     __u32 blob_mem;
0115 };
0116 
0117 struct drm_virtgpu_3d_box {
0118     __u32 x;
0119     __u32 y;
0120     __u32 z;
0121     __u32 w;
0122     __u32 h;
0123     __u32 d;
0124 };
0125 
0126 struct drm_virtgpu_3d_transfer_to_host {
0127     __u32 bo_handle;
0128     struct drm_virtgpu_3d_box box;
0129     __u32 level;
0130     __u32 offset;
0131     __u32 stride;
0132     __u32 layer_stride;
0133 };
0134 
0135 struct drm_virtgpu_3d_transfer_from_host {
0136     __u32 bo_handle;
0137     struct drm_virtgpu_3d_box box;
0138     __u32 level;
0139     __u32 offset;
0140     __u32 stride;
0141     __u32 layer_stride;
0142 };
0143 
0144 #define VIRTGPU_WAIT_NOWAIT 1 /* like it */
0145 struct drm_virtgpu_3d_wait {
0146     __u32 handle; /* 0 is an invalid handle */
0147     __u32 flags;
0148 };
0149 
0150 struct drm_virtgpu_get_caps {
0151     __u32 cap_set_id;
0152     __u32 cap_set_ver;
0153     __u64 addr;
0154     __u32 size;
0155     __u32 pad;
0156 };
0157 
0158 struct drm_virtgpu_resource_create_blob {
0159 #define VIRTGPU_BLOB_MEM_GUEST             0x0001
0160 #define VIRTGPU_BLOB_MEM_HOST3D            0x0002
0161 #define VIRTGPU_BLOB_MEM_HOST3D_GUEST      0x0003
0162 
0163 #define VIRTGPU_BLOB_FLAG_USE_MAPPABLE     0x0001
0164 #define VIRTGPU_BLOB_FLAG_USE_SHAREABLE    0x0002
0165 #define VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE 0x0004
0166     /* zero is invalid blob_mem */
0167     __u32 blob_mem;
0168     __u32 blob_flags;
0169     __u32 bo_handle;
0170     __u32 res_handle;
0171     __u64 size;
0172 
0173     /*
0174      * for 3D contexts with VIRTGPU_BLOB_MEM_HOST3D_GUEST and
0175      * VIRTGPU_BLOB_MEM_HOST3D otherwise, must be zero.
0176      */
0177     __u32 pad;
0178     __u32 cmd_size;
0179     __u64 cmd;
0180     __u64 blob_id;
0181 };
0182 
0183 #define VIRTGPU_CONTEXT_PARAM_CAPSET_ID       0x0001
0184 #define VIRTGPU_CONTEXT_PARAM_NUM_RINGS       0x0002
0185 #define VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK 0x0003
0186 struct drm_virtgpu_context_set_param {
0187     __u64 param;
0188     __u64 value;
0189 };
0190 
0191 struct drm_virtgpu_context_init {
0192     __u32 num_params;
0193     __u32 pad;
0194 
0195     /* pointer to drm_virtgpu_context_set_param array */
0196     __u64 ctx_set_params;
0197 };
0198 
0199 /*
0200  * Event code that's given when VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK is in
0201  * effect.  The event size is sizeof(drm_event), since there is no additional
0202  * payload.
0203  */
0204 #define VIRTGPU_EVENT_FENCE_SIGNALED 0x90000000
0205 
0206 #define DRM_IOCTL_VIRTGPU_MAP \
0207     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_MAP, struct drm_virtgpu_map)
0208 
0209 #define DRM_IOCTL_VIRTGPU_EXECBUFFER \
0210     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_EXECBUFFER,\
0211         struct drm_virtgpu_execbuffer)
0212 
0213 #define DRM_IOCTL_VIRTGPU_GETPARAM \
0214     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GETPARAM,\
0215         struct drm_virtgpu_getparam)
0216 
0217 #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE           \
0218     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE,    \
0219         struct drm_virtgpu_resource_create)
0220 
0221 #define DRM_IOCTL_VIRTGPU_RESOURCE_INFO \
0222     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_INFO, \
0223          struct drm_virtgpu_resource_info)
0224 
0225 #define DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST \
0226     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_FROM_HOST, \
0227         struct drm_virtgpu_3d_transfer_from_host)
0228 
0229 #define DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST \
0230     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_TO_HOST,   \
0231         struct drm_virtgpu_3d_transfer_to_host)
0232 
0233 #define DRM_IOCTL_VIRTGPU_WAIT              \
0234     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_WAIT,   \
0235         struct drm_virtgpu_3d_wait)
0236 
0237 #define DRM_IOCTL_VIRTGPU_GET_CAPS \
0238     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GET_CAPS, \
0239     struct drm_virtgpu_get_caps)
0240 
0241 #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB              \
0242     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE_BLOB,   \
0243         struct drm_virtgpu_resource_create_blob)
0244 
0245 #define DRM_IOCTL_VIRTGPU_CONTEXT_INIT                  \
0246     DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_CONTEXT_INIT,       \
0247         struct drm_virtgpu_context_init)
0248 
0249 #if defined(__cplusplus)
0250 }
0251 #endif
0252 
0253 #endif