1 // Copyright 2019 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #![allow(dead_code)]
6 #![allow(non_camel_case_types)]
7
8 use std::cmp::min;
9 use std::convert::From;
10 use std::fmt;
11 use std::fmt::Display;
12 use std::io;
13 use std::io::Write;
14 use std::marker::PhantomData;
15 use std::mem::size_of;
16 use std::mem::size_of_val;
17 use std::str::from_utf8;
18
19 use base::Error as BaseError;
20 use base::TubeError;
21 use data_model::Le32;
22 use data_model::Le64;
23 use gpu_display::GpuDisplayError;
24 use remain::sorted;
25 use rutabaga_gfx::RutabagaError;
26 use thiserror::Error;
27 use vm_memory::udmabuf::UdmabufError;
28 use zerocopy::AsBytes;
29 use zerocopy::FromBytes;
30
31 pub use super::super::device_constants::gpu::virtio_gpu_config;
32 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_CONTEXT_INIT;
33 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_CREATE_GUEST_HANDLE;
34 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_EDID;
35 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_RESOURCE_BLOB;
36 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_RESOURCE_SYNC;
37 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_RESOURCE_UUID;
38 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_VIRGL;
39 use super::super::DescriptorError;
40 use super::edid::EdidBytes;
41 use super::Reader;
42 use super::Writer;
43
44 pub const VIRTIO_GPU_UNDEFINED: u32 = 0x0;
45
46 /* 2d commands */
47 pub const VIRTIO_GPU_CMD_GET_DISPLAY_INFO: u32 = 0x100;
48 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: u32 = 0x101;
49 pub const VIRTIO_GPU_CMD_RESOURCE_UNREF: u32 = 0x102;
50 pub const VIRTIO_GPU_CMD_SET_SCANOUT: u32 = 0x103;
51 pub const VIRTIO_GPU_CMD_RESOURCE_FLUSH: u32 = 0x104;
52 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: u32 = 0x105;
53 pub const VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING: u32 = 0x106;
54 pub const VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING: u32 = 0x107;
55 pub const VIRTIO_GPU_CMD_GET_CAPSET_INFO: u32 = 0x108;
56 pub const VIRTIO_GPU_CMD_GET_CAPSET: u32 = 0x109;
57 pub const VIRTIO_GPU_CMD_GET_EDID: u32 = 0x10a;
58 pub const VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID: u32 = 0x10b;
59 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB: u32 = 0x10c;
60 pub const VIRTIO_GPU_CMD_SET_SCANOUT_BLOB: u32 = 0x10d;
61
62 /* 3d commands */
63 pub const VIRTIO_GPU_CMD_CTX_CREATE: u32 = 0x200;
64 pub const VIRTIO_GPU_CMD_CTX_DESTROY: u32 = 0x201;
65 pub const VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE: u32 = 0x202;
66 pub const VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE: u32 = 0x203;
67 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_3D: u32 = 0x204;
68 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D: u32 = 0x205;
69 pub const VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D: u32 = 0x206;
70 pub const VIRTIO_GPU_CMD_SUBMIT_3D: u32 = 0x207;
71 pub const VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB: u32 = 0x208;
72 pub const VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB: u32 = 0x209;
73
74 /* cursor commands */
75 pub const VIRTIO_GPU_CMD_UPDATE_CURSOR: u32 = 0x300;
76 pub const VIRTIO_GPU_CMD_MOVE_CURSOR: u32 = 0x301;
77
78 /* success responses */
79 /* FIXME(b/2050923): Conflicts in enum values. The value of
80 * OK_RESOURCE_PLANE_INFO (which is not upstream) conflicts with upstream
81 * OK_EDID. We assign both OK_EDID and OK_RESOURCE_UUID to the same value. */
82 pub const VIRTIO_GPU_RESP_OK_NODATA: u32 = 0x1100;
83 pub const VIRTIO_GPU_RESP_OK_DISPLAY_INFO: u32 = 0x1101;
84 pub const VIRTIO_GPU_RESP_OK_CAPSET_INFO: u32 = 0x1102;
85 pub const VIRTIO_GPU_RESP_OK_CAPSET: u32 = 0x1103;
86 pub const VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO: u32 = 0x1104;
87 pub const VIRTIO_GPU_RESP_OK_EDID: u32 = 0x1105;
88 pub const VIRTIO_GPU_RESP_OK_RESOURCE_UUID: u32 = 0x1105;
89 pub const VIRTIO_GPU_RESP_OK_MAP_INFO: u32 = 0x1106;
90
91 /* error responses */
92 pub const VIRTIO_GPU_RESP_ERR_UNSPEC: u32 = 0x1200;
93 pub const VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY: u32 = 0x1201;
94 pub const VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID: u32 = 0x1202;
95 pub const VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID: u32 = 0x1203;
96 pub const VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID: u32 = 0x1204;
97 pub const VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER: u32 = 0x1205;
98
99 pub const VIRTIO_GPU_BLOB_MEM_GUEST: u32 = 0x0001;
100 pub const VIRTIO_GPU_BLOB_MEM_HOST3D: u32 = 0x0002;
101 pub const VIRTIO_GPU_BLOB_MEM_HOST3D_GUEST: u32 = 0x0003;
102
103 pub const VIRTIO_GPU_BLOB_FLAG_USE_MAPPABLE: u32 = 0x0001;
104 pub const VIRTIO_GPU_BLOB_FLAG_USE_SHAREABLE: u32 = 0x0002;
105 pub const VIRTIO_GPU_BLOB_FLAG_USE_CROSS_DEVICE: u32 = 0x0004;
106 /* Create a OS-specific handle from guest memory (not upstreamed). */
107 pub const VIRTIO_GPU_BLOB_FLAG_CREATE_GUEST_HANDLE: u32 = 0x0008;
108
109 pub const VIRTIO_GPU_SHM_ID_NONE: u8 = 0x0000;
110 pub const VIRTIO_GPU_SHM_ID_HOST_VISIBLE: u8 = 0x0001;
111
virtio_gpu_cmd_str(cmd: u32) -> &'static str112 pub fn virtio_gpu_cmd_str(cmd: u32) -> &'static str {
113 match cmd {
114 VIRTIO_GPU_CMD_GET_DISPLAY_INFO => "VIRTIO_GPU_CMD_GET_DISPLAY_INFO",
115 VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_2D",
116 VIRTIO_GPU_CMD_RESOURCE_UNREF => "VIRTIO_GPU_CMD_RESOURCE_UNREF",
117 VIRTIO_GPU_CMD_SET_SCANOUT => "VIRTIO_GPU_CMD_SET_SCANOUT",
118 VIRTIO_GPU_CMD_SET_SCANOUT_BLOB => "VIRTIO_GPU_CMD_SET_SCANOUT_BLOB",
119 VIRTIO_GPU_CMD_RESOURCE_FLUSH => "VIRTIO_GPU_CMD_RESOURCE_FLUSH",
120 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D",
121 VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING",
122 VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING",
123 VIRTIO_GPU_CMD_GET_CAPSET_INFO => "VIRTIO_GPU_CMD_GET_CAPSET_INFO",
124 VIRTIO_GPU_CMD_GET_CAPSET => "VIRTIO_GPU_CMD_GET_CAPSET",
125 VIRTIO_GPU_CMD_GET_EDID => "VIRTIO_GPU_CMD_GET_EDID",
126 VIRTIO_GPU_CMD_CTX_CREATE => "VIRTIO_GPU_CMD_CTX_CREATE",
127 VIRTIO_GPU_CMD_CTX_DESTROY => "VIRTIO_GPU_CMD_CTX_DESTROY",
128 VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE",
129 VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE",
130 VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID => "VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID",
131 VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB => "VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB",
132 VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_3D",
133 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D",
134 VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D",
135 VIRTIO_GPU_CMD_SUBMIT_3D => "VIRTIO_GPU_CMD_SUBMIT_3D",
136 VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB => "VIRTIO_GPU_RESOURCE_MAP_BLOB",
137 VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB => "VIRTIO_GPU_RESOURCE_UNMAP_BLOB",
138 VIRTIO_GPU_CMD_UPDATE_CURSOR => "VIRTIO_GPU_CMD_UPDATE_CURSOR",
139 VIRTIO_GPU_CMD_MOVE_CURSOR => "VIRTIO_GPU_CMD_MOVE_CURSOR",
140 VIRTIO_GPU_RESP_OK_NODATA => "VIRTIO_GPU_RESP_OK_NODATA",
141 VIRTIO_GPU_RESP_OK_DISPLAY_INFO => "VIRTIO_GPU_RESP_OK_DISPLAY_INFO",
142 VIRTIO_GPU_RESP_OK_CAPSET_INFO => "VIRTIO_GPU_RESP_OK_CAPSET_INFO",
143 VIRTIO_GPU_RESP_OK_CAPSET => "VIRTIO_GPU_RESP_OK_CAPSET",
144 VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO => "VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO",
145 VIRTIO_GPU_RESP_OK_RESOURCE_UUID => "VIRTIO_GPU_RESP_OK_RESOURCE_UUID",
146 VIRTIO_GPU_RESP_OK_MAP_INFO => "VIRTIO_GPU_RESP_OK_MAP_INFO",
147 VIRTIO_GPU_RESP_ERR_UNSPEC => "VIRTIO_GPU_RESP_ERR_UNSPEC",
148 VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY => "VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY",
149 VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID",
150 VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID => "VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID",
151 VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID",
152 VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER => "VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER",
153 _ => "UNKNOWN",
154 }
155 }
156
157 pub const VIRTIO_GPU_FLAG_FENCE: u32 = 1 << 0;
158 pub const VIRTIO_GPU_FLAG_INFO_RING_IDX: u32 = 1 << 1;
159
160 #[derive(Copy, Clone, Debug, Default, AsBytes, FromBytes)]
161 #[repr(C)]
162 pub struct virtio_gpu_ctrl_hdr {
163 pub type_: Le32,
164 pub flags: Le32,
165 pub fence_id: Le64,
166 pub ctx_id: Le32,
167 pub ring_idx: u8,
168 pub padding: [u8; 3],
169 }
170
171 /* data passed in the cursor vq */
172
173 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
174 #[repr(C)]
175 pub struct virtio_gpu_cursor_pos {
176 pub scanout_id: Le32,
177 pub x: Le32,
178 pub y: Le32,
179 pub padding: Le32,
180 }
181
182 /* VIRTIO_GPU_CMD_UPDATE_CURSOR, VIRTIO_GPU_CMD_MOVE_CURSOR */
183 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
184 #[repr(C)]
185 pub struct virtio_gpu_update_cursor {
186 pub hdr: virtio_gpu_ctrl_hdr,
187 pub pos: virtio_gpu_cursor_pos, /* update & move */
188 pub resource_id: Le32, /* update only */
189 pub hot_x: Le32, /* update only */
190 pub hot_y: Le32, /* update only */
191 pub padding: Le32,
192 }
193
194 /* data passed in the control vq, 2d related */
195
196 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
197 #[repr(C)]
198 pub struct virtio_gpu_rect {
199 pub x: Le32,
200 pub y: Le32,
201 pub width: Le32,
202 pub height: Le32,
203 }
204
205 /* VIRTIO_GPU_CMD_RESOURCE_UNREF */
206 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
207 #[repr(C)]
208 pub struct virtio_gpu_resource_unref {
209 pub hdr: virtio_gpu_ctrl_hdr,
210 pub resource_id: Le32,
211 pub padding: Le32,
212 }
213
214 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: create a 2d resource with a format */
215 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
216 #[repr(C)]
217 pub struct virtio_gpu_resource_create_2d {
218 pub hdr: virtio_gpu_ctrl_hdr,
219 pub resource_id: Le32,
220 pub format: Le32,
221 pub width: Le32,
222 pub height: Le32,
223 }
224
225 /* VIRTIO_GPU_CMD_SET_SCANOUT */
226 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
227 #[repr(C)]
228 pub struct virtio_gpu_set_scanout {
229 pub hdr: virtio_gpu_ctrl_hdr,
230 pub r: virtio_gpu_rect,
231 pub scanout_id: Le32,
232 pub resource_id: Le32,
233 }
234
235 /* VIRTIO_GPU_CMD_RESOURCE_FLUSH */
236 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
237 #[repr(C)]
238 pub struct virtio_gpu_resource_flush {
239 pub hdr: virtio_gpu_ctrl_hdr,
240 pub r: virtio_gpu_rect,
241 pub resource_id: Le32,
242 pub padding: Le32,
243 }
244
245 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: simple transfer to_host */
246 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
247 #[repr(C)]
248 pub struct virtio_gpu_transfer_to_host_2d {
249 pub hdr: virtio_gpu_ctrl_hdr,
250 pub r: virtio_gpu_rect,
251 pub offset: Le64,
252 pub resource_id: Le32,
253 pub padding: Le32,
254 }
255
256 #[derive(Copy, Clone, Debug, Default, AsBytes, FromBytes)]
257 #[repr(C)]
258 pub struct virtio_gpu_mem_entry {
259 pub addr: Le64,
260 pub length: Le32,
261 pub padding: Le32,
262 }
263
264 /* VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING */
265 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
266 #[repr(C)]
267 pub struct virtio_gpu_resource_attach_backing {
268 pub hdr: virtio_gpu_ctrl_hdr,
269 pub resource_id: Le32,
270 pub nr_entries: Le32,
271 }
272
273 /* VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING */
274 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
275 #[repr(C)]
276 pub struct virtio_gpu_resource_detach_backing {
277 pub hdr: virtio_gpu_ctrl_hdr,
278 pub resource_id: Le32,
279 pub padding: Le32,
280 }
281
282 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
283 #[repr(C)]
284 pub struct virtio_gpu_display_one {
285 pub r: virtio_gpu_rect,
286 pub enabled: Le32,
287 pub flags: Le32,
288 }
289
290 /* VIRTIO_GPU_RESP_OK_DISPLAY_INFO */
291 pub const VIRTIO_GPU_MAX_SCANOUTS: usize = 16;
292 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
293 #[repr(C)]
294 pub struct virtio_gpu_resp_display_info {
295 pub hdr: virtio_gpu_ctrl_hdr,
296 pub pmodes: [virtio_gpu_display_one; VIRTIO_GPU_MAX_SCANOUTS],
297 }
298
299 /* data passed in the control vq, 3d related */
300
301 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
302 #[repr(C)]
303 pub struct virtio_gpu_box {
304 pub x: Le32,
305 pub y: Le32,
306 pub z: Le32,
307 pub w: Le32,
308 pub h: Le32,
309 pub d: Le32,
310 }
311
312 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D, VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D */
313 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
314 #[repr(C)]
315 pub struct virtio_gpu_transfer_host_3d {
316 pub hdr: virtio_gpu_ctrl_hdr,
317 pub box_: virtio_gpu_box,
318 pub offset: Le64,
319 pub resource_id: Le32,
320 pub level: Le32,
321 pub stride: Le32,
322 pub layer_stride: Le32,
323 }
324
325 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_3D */
326 pub const VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP: u32 = 1 << 0;
327 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
328 #[repr(C)]
329 pub struct virtio_gpu_resource_create_3d {
330 pub hdr: virtio_gpu_ctrl_hdr,
331 pub resource_id: Le32,
332 pub target: Le32,
333 pub format: Le32,
334 pub bind: Le32,
335 pub width: Le32,
336 pub height: Le32,
337 pub depth: Le32,
338 pub array_size: Le32,
339 pub last_level: Le32,
340 pub nr_samples: Le32,
341 pub flags: Le32,
342 pub padding: Le32,
343 }
344
345 /* VIRTIO_GPU_CMD_CTX_CREATE */
346 pub const VIRTIO_GPU_CONTEXT_INIT_CAPSET_ID_MASK: u32 = 1 << 0;
347 #[derive(Copy, FromBytes, AsBytes)]
348 #[repr(C)]
349 pub struct virtio_gpu_ctx_create {
350 pub hdr: virtio_gpu_ctrl_hdr,
351 pub nlen: Le32,
352 pub context_init: Le32,
353 pub debug_name: [u8; 64],
354 }
355
356 impl Default for virtio_gpu_ctx_create {
default() -> Self357 fn default() -> Self {
358 unsafe { ::std::mem::zeroed() }
359 }
360 }
361
362 impl Clone for virtio_gpu_ctx_create {
clone(&self) -> virtio_gpu_ctx_create363 fn clone(&self) -> virtio_gpu_ctx_create {
364 *self
365 }
366 }
367
368 impl fmt::Debug for virtio_gpu_ctx_create {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result369 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
370 let debug_name = from_utf8(&self.debug_name[..min(64, self.nlen.to_native() as usize)])
371 .unwrap_or("<invalid>");
372 f.debug_struct("virtio_gpu_ctx_create")
373 .field("hdr", &self.hdr)
374 .field("debug_name", &debug_name)
375 .finish()
376 }
377 }
378
379 /* VIRTIO_GPU_CMD_CTX_DESTROY */
380 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
381 #[repr(C)]
382 pub struct virtio_gpu_ctx_destroy {
383 pub hdr: virtio_gpu_ctrl_hdr,
384 }
385
386 /* VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE, VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE */
387 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
388 #[repr(C)]
389 pub struct virtio_gpu_ctx_resource {
390 pub hdr: virtio_gpu_ctrl_hdr,
391 pub resource_id: Le32,
392 pub padding: Le32,
393 }
394
395 /* VIRTIO_GPU_CMD_SUBMIT_3D */
396 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
397 #[repr(C)]
398 pub struct virtio_gpu_cmd_submit {
399 pub hdr: virtio_gpu_ctrl_hdr,
400 pub size: Le32,
401 pub padding: Le32,
402 }
403
404 pub const VIRTIO_GPU_CAPSET_VIRGL: u32 = 1;
405 pub const VIRTIO_GPU_CAPSET_VIRGL2: u32 = 2;
406 pub const VIRTIO_GPU_CAPSET_GFXSTREAM: u32 = 3;
407 pub const VIRTIO_GPU_CAPSET_VENUS: u32 = 4;
408 pub const VIRTIO_GPU_CAPSET_CROSS_DOMAIN: u32 = 5;
409
410 /* VIRTIO_GPU_CMD_GET_CAPSET_INFO */
411 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
412 #[repr(C)]
413 pub struct virtio_gpu_get_capset_info {
414 pub hdr: virtio_gpu_ctrl_hdr,
415 pub capset_index: Le32,
416 pub padding: Le32,
417 }
418
419 /* VIRTIO_GPU_RESP_OK_CAPSET_INFO */
420 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
421 #[repr(C)]
422 pub struct virtio_gpu_resp_capset_info {
423 pub hdr: virtio_gpu_ctrl_hdr,
424 pub capset_id: Le32,
425 pub capset_max_version: Le32,
426 pub capset_max_size: Le32,
427 pub padding: Le32,
428 }
429
430 /* VIRTIO_GPU_CMD_GET_CAPSET */
431 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
432 #[repr(C)]
433 pub struct virtio_gpu_get_capset {
434 pub hdr: virtio_gpu_ctrl_hdr,
435 pub capset_id: Le32,
436 pub capset_version: Le32,
437 }
438
439 /* VIRTIO_GPU_RESP_OK_CAPSET */
440 #[derive(Copy, Clone, Debug, Default)]
441 #[repr(C)]
442 pub struct virtio_gpu_resp_capset {
443 pub hdr: virtio_gpu_ctrl_hdr,
444 pub capset_data: PhantomData<[u8]>,
445 }
446
447 /* VIRTIO_GPU_CMD_GET_EDID */
448 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
449 #[repr(C)]
450 pub struct virtio_gpu_get_edid {
451 pub hdr: virtio_gpu_ctrl_hdr,
452 pub scanout: Le32,
453 pub padding: Le32,
454 }
455
456 /* VIRTIO_GPU_RESP_OK_EDID */
457 #[derive(Copy, Clone, FromBytes, AsBytes)]
458 #[repr(C)]
459 pub struct virtio_gpu_resp_get_edid {
460 pub hdr: virtio_gpu_ctrl_hdr,
461 pub size: Le32,
462 pub padding: Le32,
463 pub edid: [u8; 1024],
464 }
465
466 /* VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO */
467 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
468 #[repr(C)]
469 pub struct virtio_gpu_resp_resource_plane_info {
470 pub hdr: virtio_gpu_ctrl_hdr,
471 pub count: Le32,
472 pub padding: Le32,
473 pub format_modifier: Le64,
474 pub strides: [Le32; 4],
475 pub offsets: [Le32; 4],
476 }
477
478 pub const PLANE_INFO_MAX_COUNT: usize = 4;
479
480 pub const VIRTIO_GPU_EVENT_DISPLAY: u32 = 1 << 0;
481
482 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
483 #[repr(C)]
484 pub struct virtio_gpu_resource_create_blob {
485 pub hdr: virtio_gpu_ctrl_hdr,
486 pub resource_id: Le32,
487 pub blob_mem: Le32,
488 pub blob_flags: Le32,
489 pub nr_entries: Le32,
490 pub blob_id: Le64,
491 pub size: Le64,
492 }
493
494 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
495 #[repr(C)]
496 pub struct virtio_gpu_resource_map_blob {
497 pub hdr: virtio_gpu_ctrl_hdr,
498 pub resource_id: Le32,
499 pub padding: Le32,
500 pub offset: Le64,
501 }
502
503 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
504 #[repr(C)]
505 pub struct virtio_gpu_resource_unmap_blob {
506 pub hdr: virtio_gpu_ctrl_hdr,
507 pub resource_id: Le32,
508 pub padding: Le32,
509 }
510
511 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
512 #[repr(C)]
513 pub struct virtio_gpu_resp_map_info {
514 pub hdr: virtio_gpu_ctrl_hdr,
515 pub map_info: Le32,
516 pub padding: u32,
517 }
518
519 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
520 #[repr(C)]
521 pub struct virtio_gpu_resource_assign_uuid {
522 pub hdr: virtio_gpu_ctrl_hdr,
523 pub resource_id: Le32,
524 pub padding: Le32,
525 }
526
527 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
528 #[repr(C)]
529 pub struct virtio_gpu_resp_resource_uuid {
530 pub hdr: virtio_gpu_ctrl_hdr,
531 pub uuid: [u8; 16],
532 }
533
534 /* VIRTIO_GPU_CMD_SET_SCANOUT_BLOB */
535 #[derive(Copy, Clone, Debug, Default, FromBytes, AsBytes)]
536 #[repr(C)]
537 pub struct virtio_gpu_set_scanout_blob {
538 pub hdr: virtio_gpu_ctrl_hdr,
539 pub r: virtio_gpu_rect,
540 pub scanout_id: Le32,
541 pub resource_id: Le32,
542 pub width: Le32,
543 pub height: Le32,
544 pub format: Le32,
545 pub padding: Le32,
546 pub strides: [Le32; 4],
547 pub offsets: [Le32; 4],
548 }
549
550 /* simple formats for fbcon/X use */
551 pub const VIRTIO_GPU_FORMAT_B8G8R8A8_UNORM: u32 = 1;
552 pub const VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM: u32 = 2;
553 pub const VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM: u32 = 3;
554 pub const VIRTIO_GPU_FORMAT_X8R8G8B8_UNORM: u32 = 4;
555 pub const VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM: u32 = 67;
556 pub const VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM: u32 = 68;
557 pub const VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM: u32 = 121;
558 pub const VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM: u32 = 134;
559
560 /// A virtio gpu command and associated metadata specific to each command.
561 #[derive(Copy, Clone)]
562 pub enum GpuCommand {
563 GetDisplayInfo(virtio_gpu_ctrl_hdr),
564 ResourceCreate2d(virtio_gpu_resource_create_2d),
565 ResourceUnref(virtio_gpu_resource_unref),
566 SetScanout(virtio_gpu_set_scanout),
567 SetScanoutBlob(virtio_gpu_set_scanout_blob),
568 ResourceFlush(virtio_gpu_resource_flush),
569 TransferToHost2d(virtio_gpu_transfer_to_host_2d),
570 ResourceAttachBacking(virtio_gpu_resource_attach_backing),
571 ResourceDetachBacking(virtio_gpu_resource_detach_backing),
572 GetCapsetInfo(virtio_gpu_get_capset_info),
573 GetCapset(virtio_gpu_get_capset),
574 GetEdid(virtio_gpu_get_edid),
575 CtxCreate(virtio_gpu_ctx_create),
576 CtxDestroy(virtio_gpu_ctx_destroy),
577 CtxAttachResource(virtio_gpu_ctx_resource),
578 CtxDetachResource(virtio_gpu_ctx_resource),
579 ResourceCreate3d(virtio_gpu_resource_create_3d),
580 TransferToHost3d(virtio_gpu_transfer_host_3d),
581 TransferFromHost3d(virtio_gpu_transfer_host_3d),
582 CmdSubmit3d(virtio_gpu_cmd_submit),
583 ResourceCreateBlob(virtio_gpu_resource_create_blob),
584 ResourceMapBlob(virtio_gpu_resource_map_blob),
585 ResourceUnmapBlob(virtio_gpu_resource_unmap_blob),
586 UpdateCursor(virtio_gpu_update_cursor),
587 MoveCursor(virtio_gpu_update_cursor),
588 ResourceAssignUuid(virtio_gpu_resource_assign_uuid),
589 }
590
591 /// An error indicating something went wrong decoding a `GpuCommand`. These correspond to
592 /// `VIRTIO_GPU_CMD_*`.
593 #[sorted]
594 #[derive(Error, Debug)]
595 pub enum GpuCommandDecodeError {
596 /// The type of the command was invalid.
597 #[error("invalid command type ({0})")]
598 InvalidType(u32),
599 /// An I/O error occurred.
600 #[error("an I/O error occurred: {0}")]
601 IO(io::Error),
602 /// The command referenced an inaccessible area of memory.
603 #[error("command referenced an inaccessible area of memory: {0}")]
604 Memory(DescriptorError),
605 }
606
607 impl From<DescriptorError> for GpuCommandDecodeError {
from(e: DescriptorError) -> GpuCommandDecodeError608 fn from(e: DescriptorError) -> GpuCommandDecodeError {
609 GpuCommandDecodeError::Memory(e)
610 }
611 }
612
613 impl From<io::Error> for GpuCommandDecodeError {
from(e: io::Error) -> GpuCommandDecodeError614 fn from(e: io::Error) -> GpuCommandDecodeError {
615 GpuCommandDecodeError::IO(e)
616 }
617 }
618
619 impl fmt::Debug for GpuCommand {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result620 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
621 use self::GpuCommand::*;
622 match self {
623 GetDisplayInfo(_info) => f.debug_struct("GetDisplayInfo").finish(),
624 ResourceCreate2d(_info) => f.debug_struct("ResourceCreate2d").finish(),
625 ResourceUnref(_info) => f.debug_struct("ResourceUnref").finish(),
626 SetScanout(_info) => f.debug_struct("SetScanout").finish(),
627 SetScanoutBlob(_info) => f.debug_struct("SetScanoutBlob").finish(),
628 ResourceFlush(_info) => f.debug_struct("ResourceFlush").finish(),
629 TransferToHost2d(_info) => f.debug_struct("TransferToHost2d").finish(),
630 ResourceAttachBacking(_info) => f.debug_struct("ResourceAttachBacking").finish(),
631 ResourceDetachBacking(_info) => f.debug_struct("ResourceDetachBacking").finish(),
632 GetCapsetInfo(_info) => f.debug_struct("GetCapsetInfo").finish(),
633 GetCapset(_info) => f.debug_struct("GetCapset").finish(),
634 GetEdid(_info) => f.debug_struct("GetEdid").finish(),
635 CtxCreate(_info) => f.debug_struct("CtxCreate").finish(),
636 CtxDestroy(_info) => f.debug_struct("CtxDestroy").finish(),
637 CtxAttachResource(_info) => f.debug_struct("CtxAttachResource").finish(),
638 CtxDetachResource(_info) => f.debug_struct("CtxDetachResource").finish(),
639 ResourceCreate3d(_info) => f.debug_struct("ResourceCreate3d").finish(),
640 TransferToHost3d(_info) => f.debug_struct("TransferToHost3d").finish(),
641 TransferFromHost3d(_info) => f.debug_struct("TransferFromHost3d").finish(),
642 CmdSubmit3d(_info) => f.debug_struct("CmdSubmit3d").finish(),
643 ResourceCreateBlob(_info) => f.debug_struct("ResourceCreateBlob").finish(),
644 ResourceMapBlob(_info) => f.debug_struct("ResourceMapBlob").finish(),
645 ResourceUnmapBlob(_info) => f.debug_struct("ResourceUnmapBlob").finish(),
646 UpdateCursor(_info) => f.debug_struct("UpdateCursor").finish(),
647 MoveCursor(_info) => f.debug_struct("MoveCursor").finish(),
648 ResourceAssignUuid(_info) => f.debug_struct("ResourceAssignUuid").finish(),
649 }
650 }
651 }
652
653 impl GpuCommand {
654 /// Decodes a command from the given chunk of memory.
decode(cmd: &mut Reader) -> Result<GpuCommand, GpuCommandDecodeError>655 pub fn decode(cmd: &mut Reader) -> Result<GpuCommand, GpuCommandDecodeError> {
656 use self::GpuCommand::*;
657 let hdr = cmd.clone().read_obj::<virtio_gpu_ctrl_hdr>()?;
658 Ok(match hdr.type_.into() {
659 VIRTIO_GPU_CMD_GET_DISPLAY_INFO => GetDisplayInfo(cmd.read_obj()?),
660 VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => ResourceCreate2d(cmd.read_obj()?),
661 VIRTIO_GPU_CMD_RESOURCE_UNREF => ResourceUnref(cmd.read_obj()?),
662 VIRTIO_GPU_CMD_SET_SCANOUT => SetScanout(cmd.read_obj()?),
663 VIRTIO_GPU_CMD_SET_SCANOUT_BLOB => SetScanoutBlob(cmd.read_obj()?),
664 VIRTIO_GPU_CMD_RESOURCE_FLUSH => ResourceFlush(cmd.read_obj()?),
665 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => TransferToHost2d(cmd.read_obj()?),
666 VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => ResourceAttachBacking(cmd.read_obj()?),
667 VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => ResourceDetachBacking(cmd.read_obj()?),
668 VIRTIO_GPU_CMD_GET_CAPSET_INFO => GetCapsetInfo(cmd.read_obj()?),
669 VIRTIO_GPU_CMD_GET_CAPSET => GetCapset(cmd.read_obj()?),
670 VIRTIO_GPU_CMD_GET_EDID => GetEdid(cmd.read_obj()?),
671 VIRTIO_GPU_CMD_CTX_CREATE => CtxCreate(cmd.read_obj()?),
672 VIRTIO_GPU_CMD_CTX_DESTROY => CtxDestroy(cmd.read_obj()?),
673 VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => CtxAttachResource(cmd.read_obj()?),
674 VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => CtxDetachResource(cmd.read_obj()?),
675 VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => ResourceCreate3d(cmd.read_obj()?),
676 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => TransferToHost3d(cmd.read_obj()?),
677 VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => TransferFromHost3d(cmd.read_obj()?),
678 VIRTIO_GPU_CMD_SUBMIT_3D => CmdSubmit3d(cmd.read_obj()?),
679 VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB => ResourceCreateBlob(cmd.read_obj()?),
680 VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB => ResourceMapBlob(cmd.read_obj()?),
681 VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB => ResourceUnmapBlob(cmd.read_obj()?),
682 VIRTIO_GPU_CMD_UPDATE_CURSOR => UpdateCursor(cmd.read_obj()?),
683 VIRTIO_GPU_CMD_MOVE_CURSOR => MoveCursor(cmd.read_obj()?),
684 VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID => ResourceAssignUuid(cmd.read_obj()?),
685 _ => return Err(GpuCommandDecodeError::InvalidType(hdr.type_.into())),
686 })
687 }
688
689 /// Gets the generic `virtio_gpu_ctrl_hdr` from this command.
ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr690 pub fn ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr {
691 use self::GpuCommand::*;
692 match self {
693 GetDisplayInfo(info) => info,
694 ResourceCreate2d(info) => &info.hdr,
695 ResourceUnref(info) => &info.hdr,
696 SetScanout(info) => &info.hdr,
697 SetScanoutBlob(info) => &info.hdr,
698 ResourceFlush(info) => &info.hdr,
699 TransferToHost2d(info) => &info.hdr,
700 ResourceAttachBacking(info) => &info.hdr,
701 ResourceDetachBacking(info) => &info.hdr,
702 GetCapsetInfo(info) => &info.hdr,
703 GetCapset(info) => &info.hdr,
704 GetEdid(info) => &info.hdr,
705 CtxCreate(info) => &info.hdr,
706 CtxDestroy(info) => &info.hdr,
707 CtxAttachResource(info) => &info.hdr,
708 CtxDetachResource(info) => &info.hdr,
709 ResourceCreate3d(info) => &info.hdr,
710 TransferToHost3d(info) => &info.hdr,
711 TransferFromHost3d(info) => &info.hdr,
712 CmdSubmit3d(info) => &info.hdr,
713 ResourceCreateBlob(info) => &info.hdr,
714 ResourceMapBlob(info) => &info.hdr,
715 ResourceUnmapBlob(info) => &info.hdr,
716 UpdateCursor(info) => &info.hdr,
717 MoveCursor(info) => &info.hdr,
718 ResourceAssignUuid(info) => &info.hdr,
719 }
720 }
721 }
722
723 #[derive(Debug, PartialEq, Eq)]
724 pub struct GpuResponsePlaneInfo {
725 pub stride: u32,
726 pub offset: u32,
727 }
728
729 /// A response to a `GpuCommand`. These correspond to `VIRTIO_GPU_RESP_*`.
730 #[derive(Debug)]
731 pub enum GpuResponse {
732 OkNoData,
733 OkDisplayInfo(Vec<(u32, u32, bool)>),
734 OkCapsetInfo {
735 capset_id: u32,
736 version: u32,
737 size: u32,
738 },
739 OkCapset(Vec<u8>),
740 OkEdid(Box<EdidBytes>),
741 OkResourcePlaneInfo {
742 format_modifier: u64,
743 plane_info: Vec<GpuResponsePlaneInfo>,
744 },
745 OkResourceUuid {
746 uuid: [u8; 16],
747 },
748 OkMapInfo {
749 map_info: u32,
750 },
751 ErrUnspec,
752 ErrTube(TubeError),
753 ErrBase(BaseError),
754 ErrRutabaga(RutabagaError),
755 ErrDisplay(GpuDisplayError),
756 ErrScanout {
757 num_scanouts: u32,
758 },
759 ErrEdid(String),
760 ErrOutOfMemory,
761 ErrInvalidScanoutId,
762 ErrInvalidResourceId,
763 ErrInvalidContextId,
764 ErrInvalidParameter,
765 ErrUdmabuf(UdmabufError),
766 }
767
768 impl From<TubeError> for GpuResponse {
from(e: TubeError) -> GpuResponse769 fn from(e: TubeError) -> GpuResponse {
770 GpuResponse::ErrTube(e)
771 }
772 }
773
774 impl From<RutabagaError> for GpuResponse {
from(e: RutabagaError) -> GpuResponse775 fn from(e: RutabagaError) -> GpuResponse {
776 GpuResponse::ErrRutabaga(e)
777 }
778 }
779
780 impl From<GpuDisplayError> for GpuResponse {
from(e: GpuDisplayError) -> GpuResponse781 fn from(e: GpuDisplayError) -> GpuResponse {
782 GpuResponse::ErrDisplay(e)
783 }
784 }
785
786 impl From<UdmabufError> for GpuResponse {
from(e: UdmabufError) -> GpuResponse787 fn from(e: UdmabufError) -> GpuResponse {
788 GpuResponse::ErrUdmabuf(e)
789 }
790 }
791
792 impl Display for GpuResponse {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result793 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
794 use self::GpuResponse::*;
795 match self {
796 ErrTube(e) => write!(f, "tube error: {}", e),
797 ErrBase(e) => write!(f, "base error: {}", e),
798 ErrRutabaga(e) => write!(f, "renderer error: {}", e),
799 ErrDisplay(e) => write!(f, "display error: {}", e),
800 ErrScanout { num_scanouts } => write!(f, "non-zero scanout: {}", num_scanouts),
801 ErrUdmabuf(e) => write!(f, "udmabuf error: {}", e),
802 _ => Ok(()),
803 }
804 }
805 }
806
807 /// An error indicating something went wrong decoding a `GpuCommand`.
808 #[sorted]
809 #[derive(Error, Debug)]
810 pub enum GpuResponseEncodeError {
811 /// An I/O error occurred.
812 #[error("an I/O error occurred: {0}")]
813 IO(io::Error),
814 /// The response was encoded to an inaccessible area of memory.
815 #[error("response was encoded to an inaccessible area of memory: {0}")]
816 Memory(DescriptorError),
817 /// More displays than are valid were in a `OkDisplayInfo`.
818 #[error("{0} is more displays than are valid")]
819 TooManyDisplays(usize),
820 /// More planes than are valid were in a `OkResourcePlaneInfo`.
821 #[error("{0} is more planes than are valid")]
822 TooManyPlanes(usize),
823 }
824
825 impl From<DescriptorError> for GpuResponseEncodeError {
from(e: DescriptorError) -> GpuResponseEncodeError826 fn from(e: DescriptorError) -> GpuResponseEncodeError {
827 GpuResponseEncodeError::Memory(e)
828 }
829 }
830
831 impl From<io::Error> for GpuResponseEncodeError {
from(e: io::Error) -> GpuResponseEncodeError832 fn from(e: io::Error) -> GpuResponseEncodeError {
833 GpuResponseEncodeError::IO(e)
834 }
835 }
836
837 pub type VirtioGpuResult = std::result::Result<GpuResponse, GpuResponse>;
838
839 impl GpuResponse {
840 /// Encodes a this `GpuResponse` into `resp` and the given set of metadata.
encode( &self, flags: u32, fence_id: u64, ctx_id: u32, ring_idx: u8, resp: &mut Writer, ) -> Result<u32, GpuResponseEncodeError>841 pub fn encode(
842 &self,
843 flags: u32,
844 fence_id: u64,
845 ctx_id: u32,
846 ring_idx: u8,
847 resp: &mut Writer,
848 ) -> Result<u32, GpuResponseEncodeError> {
849 let hdr = virtio_gpu_ctrl_hdr {
850 type_: Le32::from(self.get_type()),
851 flags: Le32::from(flags),
852 fence_id: Le64::from(fence_id),
853 ctx_id: Le32::from(ctx_id),
854 ring_idx,
855 padding: Default::default(),
856 };
857 let len = match *self {
858 GpuResponse::OkDisplayInfo(ref info) => {
859 if info.len() > VIRTIO_GPU_MAX_SCANOUTS {
860 return Err(GpuResponseEncodeError::TooManyDisplays(info.len()));
861 }
862 let mut disp_info = virtio_gpu_resp_display_info {
863 hdr,
864 pmodes: Default::default(),
865 };
866 for (disp_mode, &(width, height, enabled)) in disp_info.pmodes.iter_mut().zip(info)
867 {
868 disp_mode.r.width = Le32::from(width);
869 disp_mode.r.height = Le32::from(height);
870 disp_mode.enabled = Le32::from(enabled as u32);
871 }
872 resp.write_obj(disp_info)?;
873 size_of_val(&disp_info)
874 }
875 GpuResponse::OkCapsetInfo {
876 capset_id,
877 version,
878 size,
879 } => {
880 resp.write_obj(virtio_gpu_resp_capset_info {
881 hdr,
882 capset_id: Le32::from(capset_id),
883 capset_max_version: Le32::from(version),
884 capset_max_size: Le32::from(size),
885 padding: Le32::from(0),
886 })?;
887 size_of::<virtio_gpu_resp_capset_info>()
888 }
889 GpuResponse::OkCapset(ref data) => {
890 resp.write_obj(hdr)?;
891 resp.write_all(data)?;
892 size_of_val(&hdr) + data.len()
893 }
894 GpuResponse::OkEdid(ref edid_bytes) => {
895 let mut edid_resp = virtio_gpu_resp_get_edid {
896 hdr,
897 size: Le32::from(1024),
898 padding: Le32::from(0),
899 edid: [0; 1024],
900 };
901
902 edid_resp.edid[0..edid_bytes.len()].copy_from_slice(edid_bytes.as_bytes());
903 resp.write_obj(edid_resp)?;
904 size_of::<virtio_gpu_resp_get_edid>()
905 }
906 GpuResponse::OkResourcePlaneInfo {
907 format_modifier,
908 ref plane_info,
909 } => {
910 if plane_info.len() > PLANE_INFO_MAX_COUNT {
911 return Err(GpuResponseEncodeError::TooManyPlanes(plane_info.len()));
912 }
913 let mut strides = [Le32::default(); PLANE_INFO_MAX_COUNT];
914 let mut offsets = [Le32::default(); PLANE_INFO_MAX_COUNT];
915 for (plane_index, plane) in plane_info.iter().enumerate() {
916 strides[plane_index] = plane.stride.into();
917 offsets[plane_index] = plane.offset.into();
918 }
919 let plane_info = virtio_gpu_resp_resource_plane_info {
920 hdr,
921 count: Le32::from(plane_info.len() as u32),
922 padding: 0.into(),
923 format_modifier: format_modifier.into(),
924 strides,
925 offsets,
926 };
927 if resp.available_bytes() >= size_of_val(&plane_info) {
928 resp.write_obj(plane_info)?;
929 size_of_val(&plane_info)
930 } else {
931 // In case there is too little room in the response slice to store the
932 // entire virtio_gpu_resp_resource_plane_info, convert response to a regular
933 // VIRTIO_GPU_RESP_OK_NODATA and attempt to return that.
934 resp.write_obj(virtio_gpu_ctrl_hdr {
935 type_: Le32::from(VIRTIO_GPU_RESP_OK_NODATA),
936 ..hdr
937 })?;
938 size_of_val(&hdr)
939 }
940 }
941 GpuResponse::OkResourceUuid { uuid } => {
942 let resp_info = virtio_gpu_resp_resource_uuid { hdr, uuid };
943
944 resp.write_obj(resp_info)?;
945 size_of_val(&resp_info)
946 }
947 GpuResponse::OkMapInfo { map_info } => {
948 let resp_info = virtio_gpu_resp_map_info {
949 hdr,
950 map_info: Le32::from(map_info),
951 padding: Default::default(),
952 };
953
954 resp.write_obj(resp_info)?;
955 size_of_val(&resp_info)
956 }
957 _ => {
958 resp.write_obj(hdr)?;
959 size_of_val(&hdr)
960 }
961 };
962 Ok(len as u32)
963 }
964
965 /// Gets the `VIRTIO_GPU_*` enum value that corresponds to this variant.
get_type(&self) -> u32966 pub fn get_type(&self) -> u32 {
967 match self {
968 GpuResponse::OkNoData => VIRTIO_GPU_RESP_OK_NODATA,
969 GpuResponse::OkDisplayInfo(_) => VIRTIO_GPU_RESP_OK_DISPLAY_INFO,
970 GpuResponse::OkCapsetInfo { .. } => VIRTIO_GPU_RESP_OK_CAPSET_INFO,
971 GpuResponse::OkCapset(_) => VIRTIO_GPU_RESP_OK_CAPSET,
972 GpuResponse::OkEdid(_) => VIRTIO_GPU_RESP_OK_EDID,
973 GpuResponse::OkResourcePlaneInfo { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO,
974 GpuResponse::OkResourceUuid { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_UUID,
975 GpuResponse::OkMapInfo { .. } => VIRTIO_GPU_RESP_OK_MAP_INFO,
976 GpuResponse::ErrUnspec => VIRTIO_GPU_RESP_ERR_UNSPEC,
977 GpuResponse::ErrTube(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
978 GpuResponse::ErrBase(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
979 GpuResponse::ErrRutabaga(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
980 GpuResponse::ErrDisplay(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
981 GpuResponse::ErrUdmabuf(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
982 GpuResponse::ErrScanout { num_scanouts: _ } => VIRTIO_GPU_RESP_ERR_UNSPEC,
983 GpuResponse::ErrEdid(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
984 GpuResponse::ErrOutOfMemory => VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY,
985 GpuResponse::ErrInvalidScanoutId => VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID,
986 GpuResponse::ErrInvalidResourceId => VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID,
987 GpuResponse::ErrInvalidContextId => VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID,
988 GpuResponse::ErrInvalidParameter => VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER,
989 }
990 }
991 }
992