1 // Copyright 2019 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #![allow(dead_code)]
6 #![allow(non_camel_case_types)]
7
8 use std::cmp::min;
9 use std::convert::From;
10 use std::fmt;
11 use std::fmt::Display;
12 use std::io;
13 use std::io::Write;
14 use std::marker::PhantomData;
15 use std::mem::size_of;
16 use std::mem::size_of_val;
17 use std::str::from_utf8;
18
19 use base::Error as BaseError;
20 use base::TubeError;
21 use data_model::Le32;
22 use data_model::Le64;
23 use gpu_display::GpuDisplayError;
24 use remain::sorted;
25 use rutabaga_gfx::RutabagaError;
26 use thiserror::Error;
27 use vm_memory::udmabuf::UdmabufError;
28 use zerocopy::FromBytes;
29 use zerocopy::Immutable;
30 use zerocopy::IntoBytes;
31 use zerocopy::KnownLayout;
32
33 pub use super::super::device_constants::gpu::virtio_gpu_config;
34 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_CONTEXT_INIT;
35 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_CREATE_GUEST_HANDLE;
36 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_EDID;
37 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_FENCE_PASSING;
38 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_RESOURCE_BLOB;
39 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_RESOURCE_UUID;
40 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_VIRGL;
41 use super::edid::EdidBytes;
42 use super::Reader;
43 use super::Writer;
44
45 pub const VIRTIO_GPU_UNDEFINED: u32 = 0x0;
46
47 /* 2d commands */
48 pub const VIRTIO_GPU_CMD_GET_DISPLAY_INFO: u32 = 0x100;
49 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: u32 = 0x101;
50 pub const VIRTIO_GPU_CMD_RESOURCE_UNREF: u32 = 0x102;
51 pub const VIRTIO_GPU_CMD_SET_SCANOUT: u32 = 0x103;
52 pub const VIRTIO_GPU_CMD_RESOURCE_FLUSH: u32 = 0x104;
53 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: u32 = 0x105;
54 pub const VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING: u32 = 0x106;
55 pub const VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING: u32 = 0x107;
56 pub const VIRTIO_GPU_CMD_GET_CAPSET_INFO: u32 = 0x108;
57 pub const VIRTIO_GPU_CMD_GET_CAPSET: u32 = 0x109;
58 pub const VIRTIO_GPU_CMD_GET_EDID: u32 = 0x10a;
59 pub const VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID: u32 = 0x10b;
60 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB: u32 = 0x10c;
61 pub const VIRTIO_GPU_CMD_SET_SCANOUT_BLOB: u32 = 0x10d;
62
63 /* 3d commands */
64 pub const VIRTIO_GPU_CMD_CTX_CREATE: u32 = 0x200;
65 pub const VIRTIO_GPU_CMD_CTX_DESTROY: u32 = 0x201;
66 pub const VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE: u32 = 0x202;
67 pub const VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE: u32 = 0x203;
68 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_3D: u32 = 0x204;
69 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D: u32 = 0x205;
70 pub const VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D: u32 = 0x206;
71 pub const VIRTIO_GPU_CMD_SUBMIT_3D: u32 = 0x207;
72 pub const VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB: u32 = 0x208;
73 pub const VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB: u32 = 0x209;
74
75 /* cursor commands */
76 pub const VIRTIO_GPU_CMD_UPDATE_CURSOR: u32 = 0x300;
77 pub const VIRTIO_GPU_CMD_MOVE_CURSOR: u32 = 0x301;
78
79 /* success responses */
80 pub const VIRTIO_GPU_RESP_OK_NODATA: u32 = 0x1100;
81 pub const VIRTIO_GPU_RESP_OK_DISPLAY_INFO: u32 = 0x1101;
82 pub const VIRTIO_GPU_RESP_OK_CAPSET_INFO: u32 = 0x1102;
83 pub const VIRTIO_GPU_RESP_OK_CAPSET: u32 = 0x1103;
84 pub const VIRTIO_GPU_RESP_OK_EDID: u32 = 0x1104;
85 pub const VIRTIO_GPU_RESP_OK_RESOURCE_UUID: u32 = 0x1105;
86 pub const VIRTIO_GPU_RESP_OK_MAP_INFO: u32 = 0x1106;
87
88 /* CHROMIUM(b/277982577): success responses */
89 pub const VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO: u32 = 0x11FF;
90
91 /* error responses */
92 pub const VIRTIO_GPU_RESP_ERR_UNSPEC: u32 = 0x1200;
93 pub const VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY: u32 = 0x1201;
94 pub const VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID: u32 = 0x1202;
95 pub const VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID: u32 = 0x1203;
96 pub const VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID: u32 = 0x1204;
97 pub const VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER: u32 = 0x1205;
98
99 pub const VIRTIO_GPU_BLOB_MEM_GUEST: u32 = 0x0001;
100 pub const VIRTIO_GPU_BLOB_MEM_HOST3D: u32 = 0x0002;
101 pub const VIRTIO_GPU_BLOB_MEM_HOST3D_GUEST: u32 = 0x0003;
102
103 pub const VIRTIO_GPU_BLOB_FLAG_USE_MAPPABLE: u32 = 0x0001;
104 pub const VIRTIO_GPU_BLOB_FLAG_USE_SHAREABLE: u32 = 0x0002;
105 pub const VIRTIO_GPU_BLOB_FLAG_USE_CROSS_DEVICE: u32 = 0x0004;
106 /* Create a OS-specific handle from guest memory (not upstreamed). */
107 pub const VIRTIO_GPU_BLOB_FLAG_CREATE_GUEST_HANDLE: u32 = 0x0008;
108
109 pub const VIRTIO_GPU_SHM_ID_NONE: u8 = 0x0000;
110 pub const VIRTIO_GPU_SHM_ID_HOST_VISIBLE: u8 = 0x0001;
111
virtio_gpu_cmd_str(cmd: u32) -> &'static str112 pub fn virtio_gpu_cmd_str(cmd: u32) -> &'static str {
113 match cmd {
114 VIRTIO_GPU_CMD_GET_DISPLAY_INFO => "VIRTIO_GPU_CMD_GET_DISPLAY_INFO",
115 VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_2D",
116 VIRTIO_GPU_CMD_RESOURCE_UNREF => "VIRTIO_GPU_CMD_RESOURCE_UNREF",
117 VIRTIO_GPU_CMD_SET_SCANOUT => "VIRTIO_GPU_CMD_SET_SCANOUT",
118 VIRTIO_GPU_CMD_SET_SCANOUT_BLOB => "VIRTIO_GPU_CMD_SET_SCANOUT_BLOB",
119 VIRTIO_GPU_CMD_RESOURCE_FLUSH => "VIRTIO_GPU_CMD_RESOURCE_FLUSH",
120 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D",
121 VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING",
122 VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING",
123 VIRTIO_GPU_CMD_GET_CAPSET_INFO => "VIRTIO_GPU_CMD_GET_CAPSET_INFO",
124 VIRTIO_GPU_CMD_GET_CAPSET => "VIRTIO_GPU_CMD_GET_CAPSET",
125 VIRTIO_GPU_CMD_GET_EDID => "VIRTIO_GPU_CMD_GET_EDID",
126 VIRTIO_GPU_CMD_CTX_CREATE => "VIRTIO_GPU_CMD_CTX_CREATE",
127 VIRTIO_GPU_CMD_CTX_DESTROY => "VIRTIO_GPU_CMD_CTX_DESTROY",
128 VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE",
129 VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE",
130 VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID => "VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID",
131 VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB => "VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB",
132 VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_3D",
133 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D",
134 VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D",
135 VIRTIO_GPU_CMD_SUBMIT_3D => "VIRTIO_GPU_CMD_SUBMIT_3D",
136 VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB => "VIRTIO_GPU_RESOURCE_MAP_BLOB",
137 VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB => "VIRTIO_GPU_RESOURCE_UNMAP_BLOB",
138 VIRTIO_GPU_CMD_UPDATE_CURSOR => "VIRTIO_GPU_CMD_UPDATE_CURSOR",
139 VIRTIO_GPU_CMD_MOVE_CURSOR => "VIRTIO_GPU_CMD_MOVE_CURSOR",
140 VIRTIO_GPU_RESP_OK_NODATA => "VIRTIO_GPU_RESP_OK_NODATA",
141 VIRTIO_GPU_RESP_OK_DISPLAY_INFO => "VIRTIO_GPU_RESP_OK_DISPLAY_INFO",
142 VIRTIO_GPU_RESP_OK_CAPSET_INFO => "VIRTIO_GPU_RESP_OK_CAPSET_INFO",
143 VIRTIO_GPU_RESP_OK_CAPSET => "VIRTIO_GPU_RESP_OK_CAPSET",
144 VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO => "VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO",
145 VIRTIO_GPU_RESP_OK_RESOURCE_UUID => "VIRTIO_GPU_RESP_OK_RESOURCE_UUID",
146 VIRTIO_GPU_RESP_OK_MAP_INFO => "VIRTIO_GPU_RESP_OK_MAP_INFO",
147 VIRTIO_GPU_RESP_ERR_UNSPEC => "VIRTIO_GPU_RESP_ERR_UNSPEC",
148 VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY => "VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY",
149 VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID",
150 VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID => "VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID",
151 VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID",
152 VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER => "VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER",
153 _ => "UNKNOWN",
154 }
155 }
156
157 pub const VIRTIO_GPU_FLAG_FENCE: u32 = 1 << 0;
158 pub const VIRTIO_GPU_FLAG_INFO_RING_IDX: u32 = 1 << 1;
159 pub const VIRTIO_GPU_FLAG_FENCE_HOST_SHAREABLE: u32 = 1 << 2;
160
161 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
162 #[repr(C)]
163 pub struct virtio_gpu_ctrl_hdr {
164 pub type_: Le32,
165 pub flags: Le32,
166 pub fence_id: Le64,
167 pub ctx_id: Le32,
168 pub ring_idx: u8,
169 pub padding: [u8; 3],
170 }
171
172 /* data passed in the cursor vq */
173
174 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
175 #[repr(C)]
176 pub struct virtio_gpu_cursor_pos {
177 pub scanout_id: Le32,
178 pub x: Le32,
179 pub y: Le32,
180 pub padding: Le32,
181 }
182
183 /* VIRTIO_GPU_CMD_UPDATE_CURSOR, VIRTIO_GPU_CMD_MOVE_CURSOR */
184 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
185 #[repr(C)]
186 pub struct virtio_gpu_update_cursor {
187 pub hdr: virtio_gpu_ctrl_hdr,
188 pub pos: virtio_gpu_cursor_pos, /* update & move */
189 pub resource_id: Le32, /* update only */
190 pub hot_x: Le32, /* update only */
191 pub hot_y: Le32, /* update only */
192 pub padding: Le32,
193 }
194
195 /* data passed in the control vq, 2d related */
196
197 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
198 #[repr(C)]
199 pub struct virtio_gpu_rect {
200 pub x: Le32,
201 pub y: Le32,
202 pub width: Le32,
203 pub height: Le32,
204 }
205
206 /* VIRTIO_GPU_CMD_RESOURCE_UNREF */
207 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
208 #[repr(C)]
209 pub struct virtio_gpu_resource_unref {
210 pub hdr: virtio_gpu_ctrl_hdr,
211 pub resource_id: Le32,
212 pub padding: Le32,
213 }
214
215 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: create a 2d resource with a format */
216 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
217 #[repr(C)]
218 pub struct virtio_gpu_resource_create_2d {
219 pub hdr: virtio_gpu_ctrl_hdr,
220 pub resource_id: Le32,
221 pub format: Le32,
222 pub width: Le32,
223 pub height: Le32,
224 }
225
226 /* VIRTIO_GPU_CMD_SET_SCANOUT */
227 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
228 #[repr(C)]
229 pub struct virtio_gpu_set_scanout {
230 pub hdr: virtio_gpu_ctrl_hdr,
231 pub r: virtio_gpu_rect,
232 pub scanout_id: Le32,
233 pub resource_id: Le32,
234 }
235
236 /* VIRTIO_GPU_CMD_RESOURCE_FLUSH */
237 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
238 #[repr(C)]
239 pub struct virtio_gpu_resource_flush {
240 pub hdr: virtio_gpu_ctrl_hdr,
241 pub r: virtio_gpu_rect,
242 pub resource_id: Le32,
243 pub padding: Le32,
244 }
245
246 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: simple transfer to_host */
247 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
248 #[repr(C)]
249 pub struct virtio_gpu_transfer_to_host_2d {
250 pub hdr: virtio_gpu_ctrl_hdr,
251 pub r: virtio_gpu_rect,
252 pub offset: Le64,
253 pub resource_id: Le32,
254 pub padding: Le32,
255 }
256
257 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
258 #[repr(C)]
259 pub struct virtio_gpu_mem_entry {
260 pub addr: Le64,
261 pub length: Le32,
262 pub padding: Le32,
263 }
264
265 /* VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING */
266 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
267 #[repr(C)]
268 pub struct virtio_gpu_resource_attach_backing {
269 pub hdr: virtio_gpu_ctrl_hdr,
270 pub resource_id: Le32,
271 pub nr_entries: Le32,
272 }
273
274 /* VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING */
275 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
276 #[repr(C)]
277 pub struct virtio_gpu_resource_detach_backing {
278 pub hdr: virtio_gpu_ctrl_hdr,
279 pub resource_id: Le32,
280 pub padding: Le32,
281 }
282
283 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
284 #[repr(C)]
285 pub struct virtio_gpu_display_one {
286 pub r: virtio_gpu_rect,
287 pub enabled: Le32,
288 pub flags: Le32,
289 }
290
291 /* VIRTIO_GPU_RESP_OK_DISPLAY_INFO */
292 pub const VIRTIO_GPU_MAX_SCANOUTS: usize = 16;
293 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
294 #[repr(C)]
295 pub struct virtio_gpu_resp_display_info {
296 pub hdr: virtio_gpu_ctrl_hdr,
297 pub pmodes: [virtio_gpu_display_one; VIRTIO_GPU_MAX_SCANOUTS],
298 }
299
300 /* data passed in the control vq, 3d related */
301
302 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
303 #[repr(C)]
304 pub struct virtio_gpu_box {
305 pub x: Le32,
306 pub y: Le32,
307 pub z: Le32,
308 pub w: Le32,
309 pub h: Le32,
310 pub d: Le32,
311 }
312
313 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D, VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D */
314 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
315 #[repr(C)]
316 pub struct virtio_gpu_transfer_host_3d {
317 pub hdr: virtio_gpu_ctrl_hdr,
318 pub box_: virtio_gpu_box,
319 pub offset: Le64,
320 pub resource_id: Le32,
321 pub level: Le32,
322 pub stride: Le32,
323 pub layer_stride: Le32,
324 }
325
326 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_3D */
327 pub const VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP: u32 = 1 << 0;
328 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
329 #[repr(C)]
330 pub struct virtio_gpu_resource_create_3d {
331 pub hdr: virtio_gpu_ctrl_hdr,
332 pub resource_id: Le32,
333 pub target: Le32,
334 pub format: Le32,
335 pub bind: Le32,
336 pub width: Le32,
337 pub height: Le32,
338 pub depth: Le32,
339 pub array_size: Le32,
340 pub last_level: Le32,
341 pub nr_samples: Le32,
342 pub flags: Le32,
343 pub padding: Le32,
344 }
345
346 /* VIRTIO_GPU_CMD_CTX_CREATE */
347 pub const VIRTIO_GPU_CONTEXT_INIT_CAPSET_ID_MASK: u32 = 1 << 0;
348 #[derive(Copy, FromBytes, Immutable, IntoBytes, KnownLayout)]
349 #[repr(C)]
350 pub struct virtio_gpu_ctx_create {
351 pub hdr: virtio_gpu_ctrl_hdr,
352 pub nlen: Le32,
353 pub context_init: Le32,
354 pub debug_name: [u8; 64],
355 }
356
357 impl Default for virtio_gpu_ctx_create {
default() -> Self358 fn default() -> Self {
359 // SAFETY: trivially safe
360 unsafe { ::std::mem::zeroed() }
361 }
362 }
363
364 impl Clone for virtio_gpu_ctx_create {
clone(&self) -> virtio_gpu_ctx_create365 fn clone(&self) -> virtio_gpu_ctx_create {
366 *self
367 }
368 }
369
370 impl fmt::Debug for virtio_gpu_ctx_create {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result371 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
372 let debug_name = from_utf8(&self.debug_name[..min(64, self.nlen.to_native() as usize)])
373 .unwrap_or("<invalid>");
374 f.debug_struct("virtio_gpu_ctx_create")
375 .field("hdr", &self.hdr)
376 .field("debug_name", &debug_name)
377 .finish()
378 }
379 }
380
381 /* VIRTIO_GPU_CMD_CTX_DESTROY */
382 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
383 #[repr(C)]
384 pub struct virtio_gpu_ctx_destroy {
385 pub hdr: virtio_gpu_ctrl_hdr,
386 }
387
388 /* VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE, VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE */
389 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
390 #[repr(C)]
391 pub struct virtio_gpu_ctx_resource {
392 pub hdr: virtio_gpu_ctrl_hdr,
393 pub resource_id: Le32,
394 pub padding: Le32,
395 }
396
397 /* VIRTIO_GPU_CMD_SUBMIT_3D */
398 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
399 #[repr(C)]
400 pub struct virtio_gpu_cmd_submit {
401 pub hdr: virtio_gpu_ctrl_hdr,
402 pub size: Le32,
403
404 // The in-fence IDs are prepended to the cmd_buf and memory layout
405 // of the VIRTIO_GPU_CMD_SUBMIT_3D buffer looks like this:
406 // _________________
407 // | CMD_SUBMIT_3D |
408 // -----------------
409 // | header |
410 // | in-fence IDs |
411 // | cmd_buf |
412 // -----------------
413 //
414 // This makes in-fence IDs naturally aligned to the sizeof(u64) inside
415 // of the virtio buffer.
416 pub num_in_fences: Le32,
417 }
418
419 pub const VIRTIO_GPU_CAPSET_VIRGL: u32 = 1;
420 pub const VIRTIO_GPU_CAPSET_VIRGL2: u32 = 2;
421 pub const VIRTIO_GPU_CAPSET_GFXSTREAM: u32 = 3;
422 pub const VIRTIO_GPU_CAPSET_VENUS: u32 = 4;
423 pub const VIRTIO_GPU_CAPSET_CROSS_DOMAIN: u32 = 5;
424
425 /* VIRTIO_GPU_CMD_GET_CAPSET_INFO */
426 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
427 #[repr(C)]
428 pub struct virtio_gpu_get_capset_info {
429 pub hdr: virtio_gpu_ctrl_hdr,
430 pub capset_index: Le32,
431 pub padding: Le32,
432 }
433
434 /* VIRTIO_GPU_RESP_OK_CAPSET_INFO */
435 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
436 #[repr(C)]
437 pub struct virtio_gpu_resp_capset_info {
438 pub hdr: virtio_gpu_ctrl_hdr,
439 pub capset_id: Le32,
440 pub capset_max_version: Le32,
441 pub capset_max_size: Le32,
442 pub padding: Le32,
443 }
444
445 /* VIRTIO_GPU_CMD_GET_CAPSET */
446 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
447 #[repr(C)]
448 pub struct virtio_gpu_get_capset {
449 pub hdr: virtio_gpu_ctrl_hdr,
450 pub capset_id: Le32,
451 pub capset_version: Le32,
452 }
453
454 /* VIRTIO_GPU_RESP_OK_CAPSET */
455 #[derive(Copy, Clone, Debug, Default)]
456 #[repr(C)]
457 pub struct virtio_gpu_resp_capset {
458 pub hdr: virtio_gpu_ctrl_hdr,
459 pub capset_data: PhantomData<[u8]>,
460 }
461
462 /* VIRTIO_GPU_CMD_GET_EDID */
463 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
464 #[repr(C)]
465 pub struct virtio_gpu_get_edid {
466 pub hdr: virtio_gpu_ctrl_hdr,
467 pub scanout: Le32,
468 pub padding: Le32,
469 }
470
471 /* VIRTIO_GPU_RESP_OK_EDID */
472 #[derive(Copy, Clone, FromBytes, Immutable, IntoBytes, KnownLayout)]
473 #[repr(C)]
474 pub struct virtio_gpu_resp_get_edid {
475 pub hdr: virtio_gpu_ctrl_hdr,
476 pub size: Le32,
477 pub padding: Le32,
478 pub edid: [u8; 1024],
479 }
480
481 /* VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO */
482 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
483 #[repr(C)]
484 pub struct virtio_gpu_resp_resource_plane_info {
485 pub hdr: virtio_gpu_ctrl_hdr,
486 pub count: Le32,
487 pub padding: Le32,
488 pub format_modifier: Le64,
489 pub strides: [Le32; 4],
490 pub offsets: [Le32; 4],
491 }
492
493 pub const PLANE_INFO_MAX_COUNT: usize = 4;
494
495 pub const VIRTIO_GPU_EVENT_DISPLAY: u32 = 1 << 0;
496
497 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
498 #[repr(C)]
499 pub struct virtio_gpu_resource_create_blob {
500 pub hdr: virtio_gpu_ctrl_hdr,
501 pub resource_id: Le32,
502 pub blob_mem: Le32,
503 pub blob_flags: Le32,
504 pub nr_entries: Le32,
505 pub blob_id: Le64,
506 pub size: Le64,
507 }
508
509 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
510 #[repr(C)]
511 pub struct virtio_gpu_resource_map_blob {
512 pub hdr: virtio_gpu_ctrl_hdr,
513 pub resource_id: Le32,
514 pub padding: Le32,
515 pub offset: Le64,
516 }
517
518 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
519 #[repr(C)]
520 pub struct virtio_gpu_resource_unmap_blob {
521 pub hdr: virtio_gpu_ctrl_hdr,
522 pub resource_id: Le32,
523 pub padding: Le32,
524 }
525
526 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
527 #[repr(C)]
528 pub struct virtio_gpu_resp_map_info {
529 pub hdr: virtio_gpu_ctrl_hdr,
530 pub map_info: Le32,
531 pub padding: u32,
532 }
533
534 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
535 #[repr(C)]
536 pub struct virtio_gpu_resource_assign_uuid {
537 pub hdr: virtio_gpu_ctrl_hdr,
538 pub resource_id: Le32,
539 pub padding: Le32,
540 }
541
542 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
543 #[repr(C)]
544 pub struct virtio_gpu_resp_resource_uuid {
545 pub hdr: virtio_gpu_ctrl_hdr,
546 pub uuid: [u8; 16],
547 }
548
549 /* VIRTIO_GPU_CMD_SET_SCANOUT_BLOB */
550 #[derive(Copy, Clone, Debug, Default, FromBytes, Immutable, IntoBytes, KnownLayout)]
551 #[repr(C)]
552 pub struct virtio_gpu_set_scanout_blob {
553 pub hdr: virtio_gpu_ctrl_hdr,
554 pub r: virtio_gpu_rect,
555 pub scanout_id: Le32,
556 pub resource_id: Le32,
557 pub width: Le32,
558 pub height: Le32,
559 pub format: Le32,
560 pub padding: Le32,
561 pub strides: [Le32; 4],
562 pub offsets: [Le32; 4],
563 }
564
565 /* simple formats for fbcon/X use */
566 pub const VIRTIO_GPU_FORMAT_B8G8R8A8_UNORM: u32 = 1;
567 pub const VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM: u32 = 2;
568 pub const VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM: u32 = 3;
569 pub const VIRTIO_GPU_FORMAT_X8R8G8B8_UNORM: u32 = 4;
570 pub const VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM: u32 = 67;
571 pub const VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM: u32 = 68;
572 pub const VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM: u32 = 121;
573 pub const VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM: u32 = 134;
574
575 /// A virtio gpu command and associated metadata specific to each command.
576 #[derive(Copy, Clone)]
577 pub enum GpuCommand {
578 GetDisplayInfo(virtio_gpu_ctrl_hdr),
579 ResourceCreate2d(virtio_gpu_resource_create_2d),
580 ResourceUnref(virtio_gpu_resource_unref),
581 SetScanout(virtio_gpu_set_scanout),
582 SetScanoutBlob(virtio_gpu_set_scanout_blob),
583 ResourceFlush(virtio_gpu_resource_flush),
584 TransferToHost2d(virtio_gpu_transfer_to_host_2d),
585 ResourceAttachBacking(virtio_gpu_resource_attach_backing),
586 ResourceDetachBacking(virtio_gpu_resource_detach_backing),
587 GetCapsetInfo(virtio_gpu_get_capset_info),
588 GetCapset(virtio_gpu_get_capset),
589 GetEdid(virtio_gpu_get_edid),
590 CtxCreate(virtio_gpu_ctx_create),
591 CtxDestroy(virtio_gpu_ctx_destroy),
592 CtxAttachResource(virtio_gpu_ctx_resource),
593 CtxDetachResource(virtio_gpu_ctx_resource),
594 ResourceCreate3d(virtio_gpu_resource_create_3d),
595 TransferToHost3d(virtio_gpu_transfer_host_3d),
596 TransferFromHost3d(virtio_gpu_transfer_host_3d),
597 CmdSubmit3d(virtio_gpu_cmd_submit),
598 ResourceCreateBlob(virtio_gpu_resource_create_blob),
599 ResourceMapBlob(virtio_gpu_resource_map_blob),
600 ResourceUnmapBlob(virtio_gpu_resource_unmap_blob),
601 UpdateCursor(virtio_gpu_update_cursor),
602 MoveCursor(virtio_gpu_update_cursor),
603 ResourceAssignUuid(virtio_gpu_resource_assign_uuid),
604 }
605
606 /// An error indicating something went wrong decoding a `GpuCommand`. These correspond to
607 /// `VIRTIO_GPU_CMD_*`.
608 #[sorted]
609 #[derive(Error, Debug)]
610 pub enum GpuCommandDecodeError {
611 /// The type of the command was invalid.
612 #[error("invalid command type ({0})")]
613 InvalidType(u32),
614 /// An I/O error occurred.
615 #[error("an I/O error occurred: {0}")]
616 IO(io::Error),
617 }
618
619 impl From<io::Error> for GpuCommandDecodeError {
from(e: io::Error) -> GpuCommandDecodeError620 fn from(e: io::Error) -> GpuCommandDecodeError {
621 GpuCommandDecodeError::IO(e)
622 }
623 }
624
625 impl fmt::Debug for GpuCommand {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result626 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
627 use self::GpuCommand::*;
628 match self {
629 GetDisplayInfo(_info) => f.debug_struct("GetDisplayInfo").finish(),
630 ResourceCreate2d(_info) => f.debug_struct("ResourceCreate2d").finish(),
631 ResourceUnref(_info) => f.debug_struct("ResourceUnref").finish(),
632 SetScanout(_info) => f.debug_struct("SetScanout").finish(),
633 SetScanoutBlob(_info) => f.debug_struct("SetScanoutBlob").finish(),
634 ResourceFlush(_info) => f.debug_struct("ResourceFlush").finish(),
635 TransferToHost2d(_info) => f.debug_struct("TransferToHost2d").finish(),
636 ResourceAttachBacking(_info) => f.debug_struct("ResourceAttachBacking").finish(),
637 ResourceDetachBacking(_info) => f.debug_struct("ResourceDetachBacking").finish(),
638 GetCapsetInfo(_info) => f.debug_struct("GetCapsetInfo").finish(),
639 GetCapset(_info) => f.debug_struct("GetCapset").finish(),
640 GetEdid(_info) => f.debug_struct("GetEdid").finish(),
641 CtxCreate(_info) => f.debug_struct("CtxCreate").finish(),
642 CtxDestroy(_info) => f.debug_struct("CtxDestroy").finish(),
643 CtxAttachResource(_info) => f.debug_struct("CtxAttachResource").finish(),
644 CtxDetachResource(_info) => f.debug_struct("CtxDetachResource").finish(),
645 ResourceCreate3d(_info) => f.debug_struct("ResourceCreate3d").finish(),
646 TransferToHost3d(_info) => f.debug_struct("TransferToHost3d").finish(),
647 TransferFromHost3d(_info) => f.debug_struct("TransferFromHost3d").finish(),
648 CmdSubmit3d(_info) => f.debug_struct("CmdSubmit3d").finish(),
649 ResourceCreateBlob(_info) => f.debug_struct("ResourceCreateBlob").finish(),
650 ResourceMapBlob(_info) => f.debug_struct("ResourceMapBlob").finish(),
651 ResourceUnmapBlob(_info) => f.debug_struct("ResourceUnmapBlob").finish(),
652 UpdateCursor(_info) => f.debug_struct("UpdateCursor").finish(),
653 MoveCursor(_info) => f.debug_struct("MoveCursor").finish(),
654 ResourceAssignUuid(_info) => f.debug_struct("ResourceAssignUuid").finish(),
655 }
656 }
657 }
658
659 impl GpuCommand {
660 /// Decodes a command from the given chunk of memory.
decode(cmd: &mut Reader) -> Result<GpuCommand, GpuCommandDecodeError>661 pub fn decode(cmd: &mut Reader) -> Result<GpuCommand, GpuCommandDecodeError> {
662 use self::GpuCommand::*;
663 let hdr = cmd.peek_obj::<virtio_gpu_ctrl_hdr>()?;
664 Ok(match hdr.type_.into() {
665 VIRTIO_GPU_CMD_GET_DISPLAY_INFO => GetDisplayInfo(cmd.read_obj()?),
666 VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => ResourceCreate2d(cmd.read_obj()?),
667 VIRTIO_GPU_CMD_RESOURCE_UNREF => ResourceUnref(cmd.read_obj()?),
668 VIRTIO_GPU_CMD_SET_SCANOUT => SetScanout(cmd.read_obj()?),
669 VIRTIO_GPU_CMD_SET_SCANOUT_BLOB => SetScanoutBlob(cmd.read_obj()?),
670 VIRTIO_GPU_CMD_RESOURCE_FLUSH => ResourceFlush(cmd.read_obj()?),
671 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => TransferToHost2d(cmd.read_obj()?),
672 VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => ResourceAttachBacking(cmd.read_obj()?),
673 VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => ResourceDetachBacking(cmd.read_obj()?),
674 VIRTIO_GPU_CMD_GET_CAPSET_INFO => GetCapsetInfo(cmd.read_obj()?),
675 VIRTIO_GPU_CMD_GET_CAPSET => GetCapset(cmd.read_obj()?),
676 VIRTIO_GPU_CMD_GET_EDID => GetEdid(cmd.read_obj()?),
677 VIRTIO_GPU_CMD_CTX_CREATE => CtxCreate(cmd.read_obj()?),
678 VIRTIO_GPU_CMD_CTX_DESTROY => CtxDestroy(cmd.read_obj()?),
679 VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => CtxAttachResource(cmd.read_obj()?),
680 VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => CtxDetachResource(cmd.read_obj()?),
681 VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => ResourceCreate3d(cmd.read_obj()?),
682 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => TransferToHost3d(cmd.read_obj()?),
683 VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => TransferFromHost3d(cmd.read_obj()?),
684 VIRTIO_GPU_CMD_SUBMIT_3D => CmdSubmit3d(cmd.read_obj()?),
685 VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB => ResourceCreateBlob(cmd.read_obj()?),
686 VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB => ResourceMapBlob(cmd.read_obj()?),
687 VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB => ResourceUnmapBlob(cmd.read_obj()?),
688 VIRTIO_GPU_CMD_UPDATE_CURSOR => UpdateCursor(cmd.read_obj()?),
689 VIRTIO_GPU_CMD_MOVE_CURSOR => MoveCursor(cmd.read_obj()?),
690 VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID => ResourceAssignUuid(cmd.read_obj()?),
691 _ => return Err(GpuCommandDecodeError::InvalidType(hdr.type_.into())),
692 })
693 }
694
695 /// Gets the generic `virtio_gpu_ctrl_hdr` from this command.
ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr696 pub fn ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr {
697 use self::GpuCommand::*;
698 match self {
699 GetDisplayInfo(info) => info,
700 ResourceCreate2d(info) => &info.hdr,
701 ResourceUnref(info) => &info.hdr,
702 SetScanout(info) => &info.hdr,
703 SetScanoutBlob(info) => &info.hdr,
704 ResourceFlush(info) => &info.hdr,
705 TransferToHost2d(info) => &info.hdr,
706 ResourceAttachBacking(info) => &info.hdr,
707 ResourceDetachBacking(info) => &info.hdr,
708 GetCapsetInfo(info) => &info.hdr,
709 GetCapset(info) => &info.hdr,
710 GetEdid(info) => &info.hdr,
711 CtxCreate(info) => &info.hdr,
712 CtxDestroy(info) => &info.hdr,
713 CtxAttachResource(info) => &info.hdr,
714 CtxDetachResource(info) => &info.hdr,
715 ResourceCreate3d(info) => &info.hdr,
716 TransferToHost3d(info) => &info.hdr,
717 TransferFromHost3d(info) => &info.hdr,
718 CmdSubmit3d(info) => &info.hdr,
719 ResourceCreateBlob(info) => &info.hdr,
720 ResourceMapBlob(info) => &info.hdr,
721 ResourceUnmapBlob(info) => &info.hdr,
722 UpdateCursor(info) => &info.hdr,
723 MoveCursor(info) => &info.hdr,
724 ResourceAssignUuid(info) => &info.hdr,
725 }
726 }
727 }
728
729 #[derive(Debug, PartialEq, Eq)]
730 pub struct GpuResponsePlaneInfo {
731 pub stride: u32,
732 pub offset: u32,
733 }
734
735 /// A response to a `GpuCommand`. These correspond to `VIRTIO_GPU_RESP_*`.
736 #[derive(Debug)]
737 pub enum GpuResponse {
738 OkNoData,
739 OkDisplayInfo(Vec<(u32, u32, bool)>),
740 OkCapsetInfo {
741 capset_id: u32,
742 version: u32,
743 size: u32,
744 },
745 OkCapset(Vec<u8>),
746 OkEdid(Box<EdidBytes>),
747 OkResourcePlaneInfo {
748 format_modifier: u64,
749 plane_info: Vec<GpuResponsePlaneInfo>,
750 },
751 OkResourceUuid {
752 uuid: [u8; 16],
753 },
754 OkMapInfo {
755 map_info: u32,
756 },
757 ErrUnspec,
758 ErrTube(TubeError),
759 ErrBase(BaseError),
760 ErrRutabaga(RutabagaError),
761 ErrDisplay(GpuDisplayError),
762 ErrScanout {
763 num_scanouts: u32,
764 },
765 ErrEdid(String),
766 ErrOutOfMemory,
767 ErrInvalidScanoutId,
768 ErrInvalidResourceId,
769 ErrInvalidContextId,
770 ErrInvalidParameter,
771 ErrUdmabuf(UdmabufError),
772 }
773
774 impl From<TubeError> for GpuResponse {
from(e: TubeError) -> GpuResponse775 fn from(e: TubeError) -> GpuResponse {
776 GpuResponse::ErrTube(e)
777 }
778 }
779
780 impl From<RutabagaError> for GpuResponse {
from(e: RutabagaError) -> GpuResponse781 fn from(e: RutabagaError) -> GpuResponse {
782 GpuResponse::ErrRutabaga(e)
783 }
784 }
785
786 impl From<GpuDisplayError> for GpuResponse {
from(e: GpuDisplayError) -> GpuResponse787 fn from(e: GpuDisplayError) -> GpuResponse {
788 GpuResponse::ErrDisplay(e)
789 }
790 }
791
792 impl From<UdmabufError> for GpuResponse {
from(e: UdmabufError) -> GpuResponse793 fn from(e: UdmabufError) -> GpuResponse {
794 GpuResponse::ErrUdmabuf(e)
795 }
796 }
797
798 impl Display for GpuResponse {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result799 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
800 use self::GpuResponse::*;
801 match self {
802 OkNoData => write!(f, "ok no data"),
803 OkDisplayInfo(_) => write!(f, "ok display info"),
804 OkCapsetInfo { .. } => write!(f, "ok capset info"),
805 OkCapset(_) => write!(f, "ok capset"),
806 OkEdid(_) => write!(f, "ok edid"),
807 OkResourcePlaneInfo { .. } => write!(f, "ok resource plane info"),
808 OkResourceUuid { .. } => write!(f, "ok resource uuid"),
809 OkMapInfo { map_info } => write!(f, "ok map info: {}", map_info),
810 ErrUnspec => write!(f, "unspecified error"),
811 ErrTube(e) => write!(f, "tube error: {}", e),
812 ErrBase(e) => write!(f, "base error: {}", e),
813 ErrRutabaga(e) => write!(f, "renderer error: {}", e),
814 ErrDisplay(e) => write!(f, "display error: {}", e),
815 ErrScanout { num_scanouts } => write!(f, "non-zero scanout: {}", num_scanouts),
816 ErrEdid(e) => write!(f, "edid error: {}", e),
817 ErrOutOfMemory => write!(f, "out of memory error"),
818 ErrInvalidScanoutId => write!(f, "invalid scanout id"),
819 ErrInvalidResourceId => write!(f, "invalid resource id"),
820 ErrInvalidContextId => write!(f, "invalid context id"),
821 ErrInvalidParameter => write!(f, "invalid parameter"),
822 ErrUdmabuf(e) => write!(f, "udmabuf error: {}", e),
823 }
824 }
825 }
826
827 impl std::error::Error for GpuResponse {}
828
829 /// An error indicating something went wrong decoding a `GpuCommand`.
830 #[sorted]
831 #[derive(Error, Debug)]
832 pub enum GpuResponseEncodeError {
833 /// An I/O error occurred.
834 #[error("an I/O error occurred: {0}")]
835 IO(io::Error),
836 /// More displays than are valid were in a `OkDisplayInfo`.
837 #[error("{0} is more displays than are valid")]
838 TooManyDisplays(usize),
839 /// More planes than are valid were in a `OkResourcePlaneInfo`.
840 #[error("{0} is more planes than are valid")]
841 TooManyPlanes(usize),
842 }
843
844 impl From<io::Error> for GpuResponseEncodeError {
from(e: io::Error) -> GpuResponseEncodeError845 fn from(e: io::Error) -> GpuResponseEncodeError {
846 GpuResponseEncodeError::IO(e)
847 }
848 }
849
850 pub type VirtioGpuResult = std::result::Result<GpuResponse, GpuResponse>;
851
852 impl GpuResponse {
853 /// Encodes a this `GpuResponse` into `resp` and the given set of metadata.
encode( &self, flags: u32, fence_id: u64, ctx_id: u32, ring_idx: u8, resp: &mut Writer, ) -> Result<u32, GpuResponseEncodeError>854 pub fn encode(
855 &self,
856 flags: u32,
857 fence_id: u64,
858 ctx_id: u32,
859 ring_idx: u8,
860 resp: &mut Writer,
861 ) -> Result<u32, GpuResponseEncodeError> {
862 let hdr = virtio_gpu_ctrl_hdr {
863 type_: Le32::from(self.get_type()),
864 flags: Le32::from(flags),
865 fence_id: Le64::from(fence_id),
866 ctx_id: Le32::from(ctx_id),
867 ring_idx,
868 padding: Default::default(),
869 };
870 let len = match *self {
871 GpuResponse::OkDisplayInfo(ref info) => {
872 if info.len() > VIRTIO_GPU_MAX_SCANOUTS {
873 return Err(GpuResponseEncodeError::TooManyDisplays(info.len()));
874 }
875 let mut disp_info = virtio_gpu_resp_display_info {
876 hdr,
877 pmodes: Default::default(),
878 };
879 for (disp_mode, &(width, height, enabled)) in disp_info.pmodes.iter_mut().zip(info)
880 {
881 disp_mode.r.width = Le32::from(width);
882 disp_mode.r.height = Le32::from(height);
883 disp_mode.enabled = Le32::from(enabled as u32);
884 }
885 resp.write_obj(disp_info)?;
886 size_of_val(&disp_info)
887 }
888 GpuResponse::OkCapsetInfo {
889 capset_id,
890 version,
891 size,
892 } => {
893 resp.write_obj(virtio_gpu_resp_capset_info {
894 hdr,
895 capset_id: Le32::from(capset_id),
896 capset_max_version: Le32::from(version),
897 capset_max_size: Le32::from(size),
898 padding: Le32::from(0),
899 })?;
900 size_of::<virtio_gpu_resp_capset_info>()
901 }
902 GpuResponse::OkCapset(ref data) => {
903 resp.write_obj(hdr)?;
904 resp.write_all(data)?;
905 size_of_val(&hdr) + data.len()
906 }
907 GpuResponse::OkEdid(ref edid_bytes) => {
908 let mut edid_resp = virtio_gpu_resp_get_edid {
909 hdr,
910 size: Le32::from(1024),
911 padding: Le32::from(0),
912 edid: [0; 1024],
913 };
914
915 edid_resp.edid[0..edid_bytes.len()].copy_from_slice(edid_bytes.as_bytes());
916 resp.write_obj(edid_resp)?;
917 size_of::<virtio_gpu_resp_get_edid>()
918 }
919 GpuResponse::OkResourcePlaneInfo {
920 format_modifier,
921 ref plane_info,
922 } => {
923 if plane_info.len() > PLANE_INFO_MAX_COUNT {
924 return Err(GpuResponseEncodeError::TooManyPlanes(plane_info.len()));
925 }
926 let mut strides = [Le32::default(); PLANE_INFO_MAX_COUNT];
927 let mut offsets = [Le32::default(); PLANE_INFO_MAX_COUNT];
928 for (plane_index, plane) in plane_info.iter().enumerate() {
929 strides[plane_index] = plane.stride.into();
930 offsets[plane_index] = plane.offset.into();
931 }
932 let plane_info = virtio_gpu_resp_resource_plane_info {
933 hdr,
934 count: Le32::from(plane_info.len() as u32),
935 padding: 0.into(),
936 format_modifier: format_modifier.into(),
937 strides,
938 offsets,
939 };
940 if resp.available_bytes() >= size_of_val(&plane_info) {
941 resp.write_obj(plane_info)?;
942 size_of_val(&plane_info)
943 } else {
944 // In case there is too little room in the response slice to store the
945 // entire virtio_gpu_resp_resource_plane_info, convert response to a regular
946 // VIRTIO_GPU_RESP_OK_NODATA and attempt to return that.
947 resp.write_obj(virtio_gpu_ctrl_hdr {
948 type_: Le32::from(VIRTIO_GPU_RESP_OK_NODATA),
949 ..hdr
950 })?;
951 size_of_val(&hdr)
952 }
953 }
954 GpuResponse::OkResourceUuid { uuid } => {
955 let resp_info = virtio_gpu_resp_resource_uuid { hdr, uuid };
956
957 resp.write_obj(resp_info)?;
958 size_of_val(&resp_info)
959 }
960 GpuResponse::OkMapInfo { map_info } => {
961 let resp_info = virtio_gpu_resp_map_info {
962 hdr,
963 map_info: Le32::from(map_info),
964 padding: Default::default(),
965 };
966
967 resp.write_obj(resp_info)?;
968 size_of_val(&resp_info)
969 }
970 _ => {
971 resp.write_obj(hdr)?;
972 size_of_val(&hdr)
973 }
974 };
975 Ok(len as u32)
976 }
977
978 /// Gets the `VIRTIO_GPU_*` enum value that corresponds to this variant.
get_type(&self) -> u32979 pub fn get_type(&self) -> u32 {
980 match self {
981 GpuResponse::OkNoData => VIRTIO_GPU_RESP_OK_NODATA,
982 GpuResponse::OkDisplayInfo(_) => VIRTIO_GPU_RESP_OK_DISPLAY_INFO,
983 GpuResponse::OkCapsetInfo { .. } => VIRTIO_GPU_RESP_OK_CAPSET_INFO,
984 GpuResponse::OkCapset(_) => VIRTIO_GPU_RESP_OK_CAPSET,
985 GpuResponse::OkEdid(_) => VIRTIO_GPU_RESP_OK_EDID,
986 GpuResponse::OkResourcePlaneInfo { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO,
987 GpuResponse::OkResourceUuid { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_UUID,
988 GpuResponse::OkMapInfo { .. } => VIRTIO_GPU_RESP_OK_MAP_INFO,
989 GpuResponse::ErrUnspec => VIRTIO_GPU_RESP_ERR_UNSPEC,
990 GpuResponse::ErrTube(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
991 GpuResponse::ErrBase(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
992 GpuResponse::ErrRutabaga(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
993 GpuResponse::ErrDisplay(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
994 GpuResponse::ErrUdmabuf(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
995 GpuResponse::ErrScanout { num_scanouts: _ } => VIRTIO_GPU_RESP_ERR_UNSPEC,
996 GpuResponse::ErrEdid(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
997 GpuResponse::ErrOutOfMemory => VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY,
998 GpuResponse::ErrInvalidScanoutId => VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID,
999 GpuResponse::ErrInvalidResourceId => VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID,
1000 GpuResponse::ErrInvalidContextId => VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID,
1001 GpuResponse::ErrInvalidParameter => VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER,
1002 }
1003 }
1004 }
1005