• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2013 Red Hat
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragraph) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * THE AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22  * OTHER DEALINGS IN THE SOFTWARE.
23  */
24 #ifndef VIRTGPU_DRM_H
25 #define VIRTGPU_DRM_H
26 
27 #include "drm.h"
28 
29 #if defined(__cplusplus)
30 extern "C" {
31 #endif
32 
33 /* Please note that modifications to all structs defined here are
34  * subject to backwards-compatibility constraints.
35  *
36  * Do not use pointers, use __u64 instead for 32 bit / 64 bit user/kernel
37  * compatibility Keep fields aligned to their size
38  */
39 
40 #define DRM_VIRTGPU_MAP         0x01
41 #define DRM_VIRTGPU_EXECBUFFER  0x02
42 #define DRM_VIRTGPU_GETPARAM    0x03
43 #define DRM_VIRTGPU_RESOURCE_CREATE 0x04
44 #define DRM_VIRTGPU_RESOURCE_INFO     0x05
45 #define DRM_VIRTGPU_TRANSFER_FROM_HOST 0x06
46 #define DRM_VIRTGPU_TRANSFER_TO_HOST 0x07
47 #define DRM_VIRTGPU_WAIT     0x08
48 #define DRM_VIRTGPU_GET_CAPS  0x09
49 #define DRM_VIRTGPU_RESOURCE_CREATE_BLOB 0x0a
50 #define DRM_VIRTGPU_CONTEXT_INIT 0x0b
51 
52 #define VIRTGPU_EXECBUF_FENCE_FD_IN	0x01
53 #define VIRTGPU_EXECBUF_FENCE_FD_OUT	0x02
54 #define VIRTGPU_EXECBUF_RING_IDX	0x04
55 #define VIRTGPU_EXECBUF_FLAGS  (\
56 		VIRTGPU_EXECBUF_FENCE_FD_IN |\
57 		VIRTGPU_EXECBUF_FENCE_FD_OUT |\
58 		VIRTGPU_EXECBUF_RING_IDX |\
59 		0)
60 
61 struct drm_virtgpu_map {
62 	__u64 offset; /* use for mmap system call */
63 	__u32 handle;
64 	__u32 pad;
65 };
66 
67 struct drm_virtgpu_execbuffer {
68 	__u32 flags;
69 	__u32 size;
70 	__u64 command; /* void* */
71 	__u64 bo_handles;
72 	__u32 num_bo_handles;
73 	__s32 fence_fd; /* in/out fence fd (see VIRTGPU_EXECBUF_FENCE_FD_IN/OUT) */
74 	__u32 fence_ctx_idx;  /* which fence timeline to use */
75 	__u32 pad;
76 };
77 
78 #define VIRTGPU_PARAM_3D_FEATURES 1 /* do we have 3D features in the hw */
79 #define VIRTGPU_PARAM_CAPSET_QUERY_FIX 2 /* do we have the capset fix */
80 #define VIRTGPU_PARAM_RESOURCE_BLOB 3 /* DRM_VIRTGPU_RESOURCE_CREATE_BLOB */
81 #define VIRTGPU_PARAM_HOST_VISIBLE 4 /* Host blob resources are mappable */
82 #define VIRTGPU_PARAM_CROSS_DEVICE 5 /* Cross virtio-device resource sharing  */
83 #define VIRTGPU_PARAM_CONTEXT_INIT 6 /* DRM_VIRTGPU_CONTEXT_INIT */
84 #define VIRTGPU_PARAM_SUPPORTED_CAPSET_IDs 7 /* Bitmask of supported capability set ids */
85 #define VIRTGPU_PARAM_CREATE_GUEST_HANDLE 8 /* Host OS handle can be created from guest memory. */
86 #define VIRTGPU_PARAM_RESOURCE_SYNC 9 /* Synchronization resources */
87 #define VIRTGPU_PARAM_GUEST_VRAM 10 /* All guest allocations happen via virtgpu dedicated heap. */
88 
89 struct drm_virtgpu_getparam {
90 	__u64 param;
91 	__u64 value;
92 };
93 
94 /* NO_BO flags? NO resource flag? */
95 /* resource flag for y_0_top */
96 struct drm_virtgpu_resource_create {
97 	__u32 target;
98 	__u32 format;
99 	__u32 bind;
100 	__u32 width;
101 	__u32 height;
102 	__u32 depth;
103 	__u32 array_size;
104 	__u32 last_level;
105 	__u32 nr_samples;
106 	__u32 flags;
107 	__u32 bo_handle; /* if this is set - recreate a new resource attached to this bo ? */
108 	__u32 res_handle;  /* returned by kernel */
109 	__u32 size;        /* validate transfer in the host */
110 	__u32 stride;      /* validate transfer in the host */
111 };
112 
113 struct drm_virtgpu_resource_info {
114 	__u32 bo_handle;
115 	__u32 res_handle;
116 	__u32 size;
117 	__u32 blob_mem;
118 };
119 
120 /* CHROMIUM */
121 struct drm_virtgpu_resource_info_cros {
122 	__u32 bo_handle;
123 	__u32 res_handle;
124 	__u32 size;
125 
126 /* Return res_handle and size.  Return extended info (strides, num_planes,
127  * etc.) until chromeos-5.4 and return blob_mem since chromeos-5.10.
128  */
129 #define VIRTGPU_RESOURCE_INFO_TYPE_DEFAULT 0
130 /* Return res_handle, size, and extended info */
131 #define VIRTGPU_RESOURCE_INFO_TYPE_EXTENDED 1
132 	union {
133 		__u32 type; /* in, VIRTGPU_RESOURCE_INFO_TYPE_* */
134 		__u32 blob_mem;
135 		__u32 stride;
136 		__u32 strides[4]; /* strides[0] is accessible with stride. */
137 	};
138 	__u32 num_planes;
139 	__u32 offsets[4];
140 	__u64 format_modifier;
141 };
142 
143 struct drm_virtgpu_3d_box {
144 	__u32 x;
145 	__u32 y;
146 	__u32 z;
147 	__u32 w;
148 	__u32 h;
149 	__u32 d;
150 };
151 
152 struct drm_virtgpu_3d_transfer_to_host {
153 	__u32 bo_handle;
154 	struct drm_virtgpu_3d_box box;
155 	__u32 level;
156 	__u32 offset;
157 	__u32 stride;
158 	__u32 layer_stride;
159 };
160 
161 struct drm_virtgpu_3d_transfer_from_host {
162 	__u32 bo_handle;
163 	struct drm_virtgpu_3d_box box;
164 	__u32 level;
165 	__u32 offset;
166 	__u32 stride;
167 	__u32 layer_stride;
168 };
169 
170 #define VIRTGPU_WAIT_NOWAIT 1 /* like it */
171 struct drm_virtgpu_3d_wait {
172 	__u32 handle; /* 0 is an invalid handle */
173 	__u32 flags;
174 };
175 
176 struct drm_virtgpu_get_caps {
177 	__u32 cap_set_id;
178 	__u32 cap_set_ver;
179 	__u64 addr;
180 	__u32 size;
181 	__u32 pad;
182 };
183 
184 struct drm_virtgpu_resource_create_blob {
185 #define VIRTGPU_BLOB_MEM_GUEST             0x0001
186 #define VIRTGPU_BLOB_MEM_HOST3D            0x0002
187 #define VIRTGPU_BLOB_MEM_HOST3D_GUEST      0x0003
188 
189 #define VIRTGPU_BLOB_FLAG_USE_MAPPABLE     0x0001
190 #define VIRTGPU_BLOB_FLAG_USE_SHAREABLE    0x0002
191 #define VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE 0x0004
192 #define VIRTGPU_BLOB_FLAG_CREATE_GUEST_HANDLE 0x0008
193 #define VIRTGPU_BLOB_FLAG_CREATE_GUEST_CONTIG 0x0010
194 	/* zero is invalid blob_mem */
195 	__u32 blob_mem;
196 	__u32 blob_flags;
197 	__u32 bo_handle;
198 	__u32 res_handle;
199 	__u64 size;
200 
201 	/*
202 	 * for 3D contexts with VIRTGPU_BLOB_MEM_HOST3D_GUEST and
203 	 * VIRTGPU_BLOB_MEM_HOST3D otherwise, must be zero.
204 	 */
205 	__u32 pad;
206 	__u32 cmd_size;
207 	__u64 cmd;
208 	__u64 blob_id;
209 };
210 
211 #define VIRTGPU_CONTEXT_PARAM_CAPSET_ID       0x0001
212 #define VIRTGPU_CONTEXT_PARAM_NUM_RINGS       0x0002
213 #define VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK 0x0003
214 struct drm_virtgpu_context_set_param {
215 	__u64 param;
216 	__u64 value;
217 };
218 
219 struct drm_virtgpu_context_init {
220 	__u32 num_params;
221 	__u32 pad;
222 
223 	/* pointer to drm_virtgpu_context_set_param array */
224 	__u64 ctx_set_params;
225 };
226 
227 #define DRM_IOCTL_VIRTGPU_MAP \
228 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_MAP, struct drm_virtgpu_map)
229 
230 #define DRM_IOCTL_VIRTGPU_EXECBUFFER \
231 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_EXECBUFFER,\
232 		struct drm_virtgpu_execbuffer)
233 
234 #define DRM_IOCTL_VIRTGPU_GETPARAM \
235 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GETPARAM,\
236 		struct drm_virtgpu_getparam)
237 
238 #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE			\
239 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE,	\
240 		struct drm_virtgpu_resource_create)
241 
242 #define DRM_IOCTL_VIRTGPU_RESOURCE_INFO \
243 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_INFO, \
244 		 struct drm_virtgpu_resource_info)
245 
246 /* same ioctl number as DRM_IOCTL_VIRTGPU_RESOURCE_INFO */
247 #define DRM_IOCTL_VIRTGPU_RESOURCE_INFO_CROS \
248 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_INFO, \
249 		 struct drm_virtgpu_resource_info_cros)
250 
251 #define DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST \
252 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_FROM_HOST,	\
253 		struct drm_virtgpu_3d_transfer_from_host)
254 
255 #define DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST \
256 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_TO_HOST,	\
257 		struct drm_virtgpu_3d_transfer_to_host)
258 
259 #define DRM_IOCTL_VIRTGPU_WAIT				\
260 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_WAIT,	\
261 		struct drm_virtgpu_3d_wait)
262 
263 #define DRM_IOCTL_VIRTGPU_GET_CAPS \
264 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GET_CAPS, \
265 	struct drm_virtgpu_get_caps)
266 
267 #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB				\
268 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE_BLOB,	\
269 		struct drm_virtgpu_resource_create_blob)
270 
271 #define DRM_IOCTL_VIRTGPU_CONTEXT_INIT					\
272 	DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_CONTEXT_INIT,		\
273 		struct drm_virtgpu_context_init)
274 
275 #if defined(__cplusplus)
276 }
277 #endif
278 
279 #endif
280