1 // Copyright 2019 The Chromium OS Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #![allow(dead_code)]
6 #![allow(non_camel_case_types)]
7
8 use std::cmp::min;
9 use std::convert::From;
10 use std::fmt::{self, Display};
11 use std::io::{self, Write};
12 use std::marker::PhantomData;
13 use std::mem::{size_of, size_of_val};
14 use std::str::from_utf8;
15
16 use super::super::DescriptorError;
17 use super::{Reader, Writer};
18 use base::Error as BaseError;
19 use base::{ExternalMappingError, TubeError};
20 use data_model::{DataInit, Le32, Le64};
21 use gpu_display::GpuDisplayError;
22 use remain::sorted;
23 use rutabaga_gfx::RutabagaError;
24 use thiserror::Error;
25
26 use crate::virtio::gpu::udmabuf::UdmabufError;
27
28 pub const VIRTIO_GPU_F_VIRGL: u32 = 0;
29 pub const VIRTIO_GPU_F_EDID: u32 = 1;
30 pub const VIRTIO_GPU_F_RESOURCE_UUID: u32 = 2;
31 pub const VIRTIO_GPU_F_RESOURCE_BLOB: u32 = 3;
32 pub const VIRTIO_GPU_F_CONTEXT_INIT: u32 = 4;
33 /* The following capabilities are not upstreamed. */
34 pub const VIRTIO_GPU_F_RESOURCE_SYNC: u32 = 5;
35 pub const VIRTIO_GPU_F_CREATE_GUEST_HANDLE: u32 = 6;
36
37 pub const VIRTIO_GPU_UNDEFINED: u32 = 0x0;
38
39 /* 2d commands */
40 pub const VIRTIO_GPU_CMD_GET_DISPLAY_INFO: u32 = 0x100;
41 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: u32 = 0x101;
42 pub const VIRTIO_GPU_CMD_RESOURCE_UNREF: u32 = 0x102;
43 pub const VIRTIO_GPU_CMD_SET_SCANOUT: u32 = 0x103;
44 pub const VIRTIO_GPU_CMD_RESOURCE_FLUSH: u32 = 0x104;
45 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: u32 = 0x105;
46 pub const VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING: u32 = 0x106;
47 pub const VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING: u32 = 0x107;
48 pub const VIRTIO_GPU_CMD_GET_CAPSET_INFO: u32 = 0x108;
49 pub const VIRTIO_GPU_CMD_GET_CAPSET: u32 = 0x109;
50 pub const VIRTIO_GPU_CMD_GET_EDID: u32 = 0x10a;
51 pub const VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID: u32 = 0x10b;
52 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB: u32 = 0x10c;
53 pub const VIRTIO_GPU_CMD_SET_SCANOUT_BLOB: u32 = 0x10d;
54
55 /* 3d commands */
56 pub const VIRTIO_GPU_CMD_CTX_CREATE: u32 = 0x200;
57 pub const VIRTIO_GPU_CMD_CTX_DESTROY: u32 = 0x201;
58 pub const VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE: u32 = 0x202;
59 pub const VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE: u32 = 0x203;
60 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_3D: u32 = 0x204;
61 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D: u32 = 0x205;
62 pub const VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D: u32 = 0x206;
63 pub const VIRTIO_GPU_CMD_SUBMIT_3D: u32 = 0x207;
64 pub const VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB: u32 = 0x208;
65 pub const VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB: u32 = 0x209;
66
67 /* cursor commands */
68 pub const VIRTIO_GPU_CMD_UPDATE_CURSOR: u32 = 0x300;
69 pub const VIRTIO_GPU_CMD_MOVE_CURSOR: u32 = 0x301;
70
71 /* success responses */
72 pub const VIRTIO_GPU_RESP_OK_NODATA: u32 = 0x1100;
73 pub const VIRTIO_GPU_RESP_OK_DISPLAY_INFO: u32 = 0x1101;
74 pub const VIRTIO_GPU_RESP_OK_CAPSET_INFO: u32 = 0x1102;
75 pub const VIRTIO_GPU_RESP_OK_CAPSET: u32 = 0x1103;
76 pub const VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO: u32 = 0x1104;
77 pub const VIRTIO_GPU_RESP_OK_EDID: u32 = 0x1105;
78 pub const VIRTIO_GPU_RESP_OK_RESOURCE_UUID: u32 = 0x1105;
79 pub const VIRTIO_GPU_RESP_OK_MAP_INFO: u32 = 0x1106;
80
81 /* error responses */
82 pub const VIRTIO_GPU_RESP_ERR_UNSPEC: u32 = 0x1200;
83 pub const VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY: u32 = 0x1201;
84 pub const VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID: u32 = 0x1202;
85 pub const VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID: u32 = 0x1203;
86 pub const VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID: u32 = 0x1204;
87 pub const VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER: u32 = 0x1205;
88
89 pub const VIRTIO_GPU_BLOB_MEM_GUEST: u32 = 0x0001;
90 pub const VIRTIO_GPU_BLOB_MEM_HOST3D: u32 = 0x0002;
91 pub const VIRTIO_GPU_BLOB_MEM_HOST3D_GUEST: u32 = 0x0003;
92
93 pub const VIRTIO_GPU_BLOB_FLAG_USE_MAPPABLE: u32 = 0x0001;
94 pub const VIRTIO_GPU_BLOB_FLAG_USE_SHAREABLE: u32 = 0x0002;
95 pub const VIRTIO_GPU_BLOB_FLAG_USE_CROSS_DEVICE: u32 = 0x0004;
96 /* Create a OS-specific handle from guest memory (not upstreamed). */
97 pub const VIRTIO_GPU_BLOB_FLAG_CREATE_GUEST_HANDLE: u32 = 0x0008;
98
99 pub const VIRTIO_GPU_SHM_ID_NONE: u8 = 0x0000;
100 pub const VIRTIO_GPU_SHM_ID_HOST_VISIBLE: u8 = 0x0001;
101
virtio_gpu_cmd_str(cmd: u32) -> &'static str102 pub fn virtio_gpu_cmd_str(cmd: u32) -> &'static str {
103 match cmd {
104 VIRTIO_GPU_CMD_GET_DISPLAY_INFO => "VIRTIO_GPU_CMD_GET_DISPLAY_INFO",
105 VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_2D",
106 VIRTIO_GPU_CMD_RESOURCE_UNREF => "VIRTIO_GPU_CMD_RESOURCE_UNREF",
107 VIRTIO_GPU_CMD_SET_SCANOUT => "VIRTIO_GPU_CMD_SET_SCANOUT",
108 VIRTIO_GPU_CMD_SET_SCANOUT_BLOB => "VIRTIO_GPU_CMD_SET_SCANOUT_BLOB",
109 VIRTIO_GPU_CMD_RESOURCE_FLUSH => "VIRTIO_GPU_CMD_RESOURCE_FLUSH",
110 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D",
111 VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING",
112 VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING",
113 VIRTIO_GPU_CMD_GET_CAPSET_INFO => "VIRTIO_GPU_CMD_GET_CAPSET_INFO",
114 VIRTIO_GPU_CMD_GET_CAPSET => "VIRTIO_GPU_CMD_GET_CAPSET",
115 VIRTIO_GPU_CMD_GET_EDID => "VIRTIO_GPU_CMD_GET_EDID",
116 VIRTIO_GPU_CMD_CTX_CREATE => "VIRTIO_GPU_CMD_CTX_CREATE",
117 VIRTIO_GPU_CMD_CTX_DESTROY => "VIRTIO_GPU_CMD_CTX_DESTROY",
118 VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE",
119 VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE",
120 VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID => "VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID",
121 VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB => "VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB",
122 VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_3D",
123 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D",
124 VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D",
125 VIRTIO_GPU_CMD_SUBMIT_3D => "VIRTIO_GPU_CMD_SUBMIT_3D",
126 VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB => "VIRTIO_GPU_RESOURCE_MAP_BLOB",
127 VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB => "VIRTIO_GPU_RESOURCE_UNMAP_BLOB",
128 VIRTIO_GPU_CMD_UPDATE_CURSOR => "VIRTIO_GPU_CMD_UPDATE_CURSOR",
129 VIRTIO_GPU_CMD_MOVE_CURSOR => "VIRTIO_GPU_CMD_MOVE_CURSOR",
130 VIRTIO_GPU_RESP_OK_NODATA => "VIRTIO_GPU_RESP_OK_NODATA",
131 VIRTIO_GPU_RESP_OK_DISPLAY_INFO => "VIRTIO_GPU_RESP_OK_DISPLAY_INFO",
132 VIRTIO_GPU_RESP_OK_CAPSET_INFO => "VIRTIO_GPU_RESP_OK_CAPSET_INFO",
133 VIRTIO_GPU_RESP_OK_CAPSET => "VIRTIO_GPU_RESP_OK_CAPSET",
134 VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO => "VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO",
135 VIRTIO_GPU_RESP_OK_RESOURCE_UUID => "VIRTIO_GPU_RESP_OK_RESOURCE_UUID",
136 VIRTIO_GPU_RESP_OK_MAP_INFO => "VIRTIO_GPU_RESP_OK_MAP_INFO",
137 VIRTIO_GPU_RESP_ERR_UNSPEC => "VIRTIO_GPU_RESP_ERR_UNSPEC",
138 VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY => "VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY",
139 VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID",
140 VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID => "VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID",
141 VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID",
142 VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER => "VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER",
143 _ => "UNKNOWN",
144 }
145 }
146
147 pub const VIRTIO_GPU_FLAG_FENCE: u32 = 1 << 0;
148 pub const VIRTIO_GPU_FLAG_INFO_RING_IDX: u32 = 1 << 1;
149
150 #[derive(Copy, Clone, Debug, Default)]
151 #[repr(C)]
152 pub struct virtio_gpu_ctrl_hdr {
153 pub type_: Le32,
154 pub flags: Le32,
155 pub fence_id: Le64,
156 pub ctx_id: Le32,
157 pub ring_idx: u8,
158 pub padding: [u8; 3],
159 }
160
161 unsafe impl DataInit for virtio_gpu_ctrl_hdr {}
162
163 /* data passed in the cursor vq */
164
165 #[derive(Copy, Clone, Debug, Default)]
166 #[repr(C)]
167 pub struct virtio_gpu_cursor_pos {
168 pub scanout_id: Le32,
169 pub x: Le32,
170 pub y: Le32,
171 pub padding: Le32,
172 }
173
174 unsafe impl DataInit for virtio_gpu_cursor_pos {}
175
176 /* VIRTIO_GPU_CMD_UPDATE_CURSOR, VIRTIO_GPU_CMD_MOVE_CURSOR */
177 #[derive(Copy, Clone, Debug, Default)]
178 #[repr(C)]
179 pub struct virtio_gpu_update_cursor {
180 pub hdr: virtio_gpu_ctrl_hdr,
181 pub pos: virtio_gpu_cursor_pos, /* update & move */
182 pub resource_id: Le32, /* update only */
183 pub hot_x: Le32, /* update only */
184 pub hot_y: Le32, /* update only */
185 pub padding: Le32,
186 }
187
188 unsafe impl DataInit for virtio_gpu_update_cursor {}
189
190 /* data passed in the control vq, 2d related */
191
192 #[derive(Copy, Clone, Debug, Default)]
193 #[repr(C)]
194 pub struct virtio_gpu_rect {
195 pub x: Le32,
196 pub y: Le32,
197 pub width: Le32,
198 pub height: Le32,
199 }
200
201 unsafe impl DataInit for virtio_gpu_rect {}
202
203 /* VIRTIO_GPU_CMD_RESOURCE_UNREF */
204 #[derive(Copy, Clone, Debug, Default)]
205 #[repr(C)]
206 pub struct virtio_gpu_resource_unref {
207 pub hdr: virtio_gpu_ctrl_hdr,
208 pub resource_id: Le32,
209 pub padding: Le32,
210 }
211
212 unsafe impl DataInit for virtio_gpu_resource_unref {}
213
214 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: create a 2d resource with a format */
215 #[derive(Copy, Clone, Debug, Default)]
216 #[repr(C)]
217 pub struct virtio_gpu_resource_create_2d {
218 pub hdr: virtio_gpu_ctrl_hdr,
219 pub resource_id: Le32,
220 pub format: Le32,
221 pub width: Le32,
222 pub height: Le32,
223 }
224
225 unsafe impl DataInit for virtio_gpu_resource_create_2d {}
226
227 /* VIRTIO_GPU_CMD_SET_SCANOUT */
228 #[derive(Copy, Clone, Debug, Default)]
229 #[repr(C)]
230 pub struct virtio_gpu_set_scanout {
231 pub hdr: virtio_gpu_ctrl_hdr,
232 pub r: virtio_gpu_rect,
233 pub scanout_id: Le32,
234 pub resource_id: Le32,
235 }
236
237 unsafe impl DataInit for virtio_gpu_set_scanout {}
238
239 /* VIRTIO_GPU_CMD_RESOURCE_FLUSH */
240 #[derive(Copy, Clone, Debug, Default)]
241 #[repr(C)]
242 pub struct virtio_gpu_resource_flush {
243 pub hdr: virtio_gpu_ctrl_hdr,
244 pub r: virtio_gpu_rect,
245 pub resource_id: Le32,
246 pub padding: Le32,
247 }
248
249 unsafe impl DataInit for virtio_gpu_resource_flush {}
250
251 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: simple transfer to_host */
252 #[derive(Copy, Clone, Debug, Default)]
253 #[repr(C)]
254 pub struct virtio_gpu_transfer_to_host_2d {
255 pub hdr: virtio_gpu_ctrl_hdr,
256 pub r: virtio_gpu_rect,
257 pub offset: Le64,
258 pub resource_id: Le32,
259 pub padding: Le32,
260 }
261
262 unsafe impl DataInit for virtio_gpu_transfer_to_host_2d {}
263
264 #[derive(Copy, Clone, Debug, Default)]
265 #[repr(C)]
266 pub struct virtio_gpu_mem_entry {
267 pub addr: Le64,
268 pub length: Le32,
269 pub padding: Le32,
270 }
271
272 unsafe impl DataInit for virtio_gpu_mem_entry {}
273
274 /* VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING */
275 #[derive(Copy, Clone, Debug, Default)]
276 #[repr(C)]
277 pub struct virtio_gpu_resource_attach_backing {
278 pub hdr: virtio_gpu_ctrl_hdr,
279 pub resource_id: Le32,
280 pub nr_entries: Le32,
281 }
282
283 unsafe impl DataInit for virtio_gpu_resource_attach_backing {}
284
285 /* VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING */
286 #[derive(Copy, Clone, Debug, Default)]
287 #[repr(C)]
288 pub struct virtio_gpu_resource_detach_backing {
289 pub hdr: virtio_gpu_ctrl_hdr,
290 pub resource_id: Le32,
291 pub padding: Le32,
292 }
293
294 unsafe impl DataInit for virtio_gpu_resource_detach_backing {}
295
296 #[derive(Copy, Clone, Debug, Default)]
297 #[repr(C)]
298 pub struct virtio_gpu_display_one {
299 pub r: virtio_gpu_rect,
300 pub enabled: Le32,
301 pub flags: Le32,
302 }
303
304 unsafe impl DataInit for virtio_gpu_display_one {}
305
306 /* VIRTIO_GPU_RESP_OK_DISPLAY_INFO */
307 const VIRTIO_GPU_MAX_SCANOUTS: usize = 16;
308 #[derive(Copy, Clone, Debug, Default)]
309 #[repr(C)]
310 pub struct virtio_gpu_resp_display_info {
311 pub hdr: virtio_gpu_ctrl_hdr,
312 pub pmodes: [virtio_gpu_display_one; VIRTIO_GPU_MAX_SCANOUTS],
313 }
314
315 unsafe impl DataInit for virtio_gpu_resp_display_info {}
316
317 /* data passed in the control vq, 3d related */
318
319 #[derive(Copy, Clone, Debug, Default)]
320 #[repr(C)]
321 pub struct virtio_gpu_box {
322 pub x: Le32,
323 pub y: Le32,
324 pub z: Le32,
325 pub w: Le32,
326 pub h: Le32,
327 pub d: Le32,
328 }
329
330 unsafe impl DataInit for virtio_gpu_box {}
331
332 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D, VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D */
333 #[derive(Copy, Clone, Debug, Default)]
334 #[repr(C)]
335 pub struct virtio_gpu_transfer_host_3d {
336 pub hdr: virtio_gpu_ctrl_hdr,
337 pub box_: virtio_gpu_box,
338 pub offset: Le64,
339 pub resource_id: Le32,
340 pub level: Le32,
341 pub stride: Le32,
342 pub layer_stride: Le32,
343 }
344
345 unsafe impl DataInit for virtio_gpu_transfer_host_3d {}
346
347 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_3D */
348 pub const VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP: u32 = 1 << 0;
349 #[derive(Copy, Clone, Debug, Default)]
350 #[repr(C)]
351 pub struct virtio_gpu_resource_create_3d {
352 pub hdr: virtio_gpu_ctrl_hdr,
353 pub resource_id: Le32,
354 pub target: Le32,
355 pub format: Le32,
356 pub bind: Le32,
357 pub width: Le32,
358 pub height: Le32,
359 pub depth: Le32,
360 pub array_size: Le32,
361 pub last_level: Le32,
362 pub nr_samples: Le32,
363 pub flags: Le32,
364 pub padding: Le32,
365 }
366
367 unsafe impl DataInit for virtio_gpu_resource_create_3d {}
368
369 /* VIRTIO_GPU_CMD_CTX_CREATE */
370 pub const VIRTIO_GPU_CONTEXT_INIT_CAPSET_ID_MASK: u32 = 1 << 0;
371 #[derive(Copy)]
372 #[repr(C)]
373 pub struct virtio_gpu_ctx_create {
374 pub hdr: virtio_gpu_ctrl_hdr,
375 pub nlen: Le32,
376 pub context_init: Le32,
377 pub debug_name: [u8; 64],
378 }
379
380 impl Default for virtio_gpu_ctx_create {
default() -> Self381 fn default() -> Self {
382 unsafe { ::std::mem::zeroed() }
383 }
384 }
385
386 unsafe impl DataInit for virtio_gpu_ctx_create {}
387
388 impl Clone for virtio_gpu_ctx_create {
clone(&self) -> virtio_gpu_ctx_create389 fn clone(&self) -> virtio_gpu_ctx_create {
390 *self
391 }
392 }
393
394 impl fmt::Debug for virtio_gpu_ctx_create {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result395 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
396 let debug_name = from_utf8(&self.debug_name[..min(64, self.nlen.to_native() as usize)])
397 .unwrap_or("<invalid>");
398 f.debug_struct("virtio_gpu_ctx_create")
399 .field("hdr", &self.hdr)
400 .field("debug_name", &debug_name)
401 .finish()
402 }
403 }
404
405 /* VIRTIO_GPU_CMD_CTX_DESTROY */
406 #[derive(Copy, Clone, Debug, Default)]
407 #[repr(C)]
408 pub struct virtio_gpu_ctx_destroy {
409 pub hdr: virtio_gpu_ctrl_hdr,
410 }
411
412 unsafe impl DataInit for virtio_gpu_ctx_destroy {}
413
414 /* VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE, VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE */
415 #[derive(Copy, Clone, Debug, Default)]
416 #[repr(C)]
417 pub struct virtio_gpu_ctx_resource {
418 pub hdr: virtio_gpu_ctrl_hdr,
419 pub resource_id: Le32,
420 pub padding: Le32,
421 }
422
423 unsafe impl DataInit for virtio_gpu_ctx_resource {}
424
425 /* VIRTIO_GPU_CMD_SUBMIT_3D */
426 #[derive(Copy, Clone, Debug, Default)]
427 #[repr(C)]
428 pub struct virtio_gpu_cmd_submit {
429 pub hdr: virtio_gpu_ctrl_hdr,
430 pub size: Le32,
431 pub padding: Le32,
432 }
433
434 unsafe impl DataInit for virtio_gpu_cmd_submit {}
435
436 pub const VIRTIO_GPU_CAPSET_VIRGL: u32 = 1;
437 pub const VIRTIO_GPU_CAPSET_VIRGL2: u32 = 2;
438 pub const VIRTIO_GPU_CAPSET_GFXSTREAM: u32 = 3;
439 pub const VIRTIO_GPU_CAPSET_VENUS: u32 = 4;
440 pub const VIRTIO_GPU_CAPSET_CROSS_DOMAIN: u32 = 5;
441
442 /* VIRTIO_GPU_CMD_GET_CAPSET_INFO */
443 #[derive(Copy, Clone, Debug, Default)]
444 #[repr(C)]
445 pub struct virtio_gpu_get_capset_info {
446 pub hdr: virtio_gpu_ctrl_hdr,
447 pub capset_index: Le32,
448 pub padding: Le32,
449 }
450
451 unsafe impl DataInit for virtio_gpu_get_capset_info {}
452
453 /* VIRTIO_GPU_RESP_OK_CAPSET_INFO */
454 #[derive(Copy, Clone, Debug, Default)]
455 #[repr(C)]
456 pub struct virtio_gpu_resp_capset_info {
457 pub hdr: virtio_gpu_ctrl_hdr,
458 pub capset_id: Le32,
459 pub capset_max_version: Le32,
460 pub capset_max_size: Le32,
461 pub padding: Le32,
462 }
463
464 unsafe impl DataInit for virtio_gpu_resp_capset_info {}
465
466 /* VIRTIO_GPU_CMD_GET_CAPSET */
467 #[derive(Copy, Clone, Debug, Default)]
468 #[repr(C)]
469 pub struct virtio_gpu_get_capset {
470 pub hdr: virtio_gpu_ctrl_hdr,
471 pub capset_id: Le32,
472 pub capset_version: Le32,
473 }
474
475 unsafe impl DataInit for virtio_gpu_get_capset {}
476
477 /* VIRTIO_GPU_RESP_OK_CAPSET */
478 #[derive(Copy, Clone, Debug, Default)]
479 #[repr(C)]
480 pub struct virtio_gpu_resp_capset {
481 pub hdr: virtio_gpu_ctrl_hdr,
482 pub capset_data: PhantomData<[u8]>,
483 }
484
485 unsafe impl DataInit for virtio_gpu_resp_capset {}
486
487 /* VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO */
488 #[derive(Copy, Clone, Debug, Default)]
489 #[repr(C)]
490 pub struct virtio_gpu_resp_resource_plane_info {
491 pub hdr: virtio_gpu_ctrl_hdr,
492 pub count: Le32,
493 pub padding: Le32,
494 pub format_modifier: Le64,
495 pub strides: [Le32; 4],
496 pub offsets: [Le32; 4],
497 }
498
499 unsafe impl DataInit for virtio_gpu_resp_resource_plane_info {}
500
501 pub const PLANE_INFO_MAX_COUNT: usize = 4;
502
503 pub const VIRTIO_GPU_EVENT_DISPLAY: u32 = 1 << 0;
504
505 #[derive(Copy, Clone, Debug, Default)]
506 #[repr(C)]
507 pub struct virtio_gpu_config {
508 pub events_read: Le32,
509 pub events_clear: Le32,
510 pub num_scanouts: Le32,
511 pub num_capsets: Le32,
512 }
513
514 unsafe impl DataInit for virtio_gpu_config {}
515
516 #[derive(Copy, Clone, Debug, Default)]
517 #[repr(C)]
518 pub struct virtio_gpu_resource_create_blob {
519 pub hdr: virtio_gpu_ctrl_hdr,
520 pub resource_id: Le32,
521 pub blob_mem: Le32,
522 pub blob_flags: Le32,
523 pub nr_entries: Le32,
524 pub blob_id: Le64,
525 pub size: Le64,
526 }
527
528 unsafe impl DataInit for virtio_gpu_resource_create_blob {}
529
530 #[derive(Copy, Clone, Debug, Default)]
531 #[repr(C)]
532 pub struct virtio_gpu_resource_map_blob {
533 pub hdr: virtio_gpu_ctrl_hdr,
534 pub resource_id: Le32,
535 pub padding: Le32,
536 pub offset: Le64,
537 }
538
539 unsafe impl DataInit for virtio_gpu_resource_map_blob {}
540
541 #[derive(Copy, Clone, Debug, Default)]
542 #[repr(C)]
543 pub struct virtio_gpu_resource_unmap_blob {
544 pub hdr: virtio_gpu_ctrl_hdr,
545 pub resource_id: Le32,
546 pub padding: Le32,
547 }
548
549 unsafe impl DataInit for virtio_gpu_resource_unmap_blob {}
550
551 #[derive(Copy, Clone, Debug, Default)]
552 #[repr(C)]
553 pub struct virtio_gpu_resp_map_info {
554 pub hdr: virtio_gpu_ctrl_hdr,
555 pub map_info: Le32,
556 }
557
558 unsafe impl DataInit for virtio_gpu_resp_map_info {}
559
560 #[derive(Copy, Clone, Debug, Default)]
561 #[repr(C)]
562 pub struct virtio_gpu_resource_assign_uuid {
563 pub hdr: virtio_gpu_ctrl_hdr,
564 pub resource_id: Le32,
565 pub padding: Le32,
566 }
567
568 unsafe impl DataInit for virtio_gpu_resource_assign_uuid {}
569
570 #[derive(Copy, Clone, Debug, Default)]
571 #[repr(C)]
572 pub struct virtio_gpu_resp_resource_uuid {
573 pub hdr: virtio_gpu_ctrl_hdr,
574 pub uuid: [u8; 16],
575 }
576
577 unsafe impl DataInit for virtio_gpu_resp_resource_uuid {}
578
579 /* VIRTIO_GPU_CMD_SET_SCANOUT_BLOB */
580 #[derive(Copy, Clone, Debug, Default)]
581 #[repr(C)]
582 pub struct virtio_gpu_set_scanout_blob {
583 pub hdr: virtio_gpu_ctrl_hdr,
584 pub r: virtio_gpu_rect,
585 pub scanout_id: Le32,
586 pub resource_id: Le32,
587 pub width: Le32,
588 pub height: Le32,
589 pub format: Le32,
590 pub padding: Le32,
591 pub strides: [Le32; 4],
592 pub offsets: [Le32; 4],
593 }
594
595 unsafe impl DataInit for virtio_gpu_set_scanout_blob {}
596
597 /* simple formats for fbcon/X use */
598 pub const VIRTIO_GPU_FORMAT_B8G8R8A8_UNORM: u32 = 1;
599 pub const VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM: u32 = 2;
600 pub const VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM: u32 = 3;
601 pub const VIRTIO_GPU_FORMAT_X8R8G8B8_UNORM: u32 = 4;
602 pub const VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM: u32 = 67;
603 pub const VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM: u32 = 68;
604 pub const VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM: u32 = 121;
605 pub const VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM: u32 = 134;
606
607 /// A virtio gpu command and associated metadata specific to each command.
608 #[derive(Copy, Clone)]
609 pub enum GpuCommand {
610 GetDisplayInfo(virtio_gpu_ctrl_hdr),
611 ResourceCreate2d(virtio_gpu_resource_create_2d),
612 ResourceUnref(virtio_gpu_resource_unref),
613 SetScanout(virtio_gpu_set_scanout),
614 SetScanoutBlob(virtio_gpu_set_scanout_blob),
615 ResourceFlush(virtio_gpu_resource_flush),
616 TransferToHost2d(virtio_gpu_transfer_to_host_2d),
617 ResourceAttachBacking(virtio_gpu_resource_attach_backing),
618 ResourceDetachBacking(virtio_gpu_resource_detach_backing),
619 GetCapsetInfo(virtio_gpu_get_capset_info),
620 GetCapset(virtio_gpu_get_capset),
621 CtxCreate(virtio_gpu_ctx_create),
622 CtxDestroy(virtio_gpu_ctx_destroy),
623 CtxAttachResource(virtio_gpu_ctx_resource),
624 CtxDetachResource(virtio_gpu_ctx_resource),
625 ResourceCreate3d(virtio_gpu_resource_create_3d),
626 TransferToHost3d(virtio_gpu_transfer_host_3d),
627 TransferFromHost3d(virtio_gpu_transfer_host_3d),
628 CmdSubmit3d(virtio_gpu_cmd_submit),
629 ResourceCreateBlob(virtio_gpu_resource_create_blob),
630 ResourceMapBlob(virtio_gpu_resource_map_blob),
631 ResourceUnmapBlob(virtio_gpu_resource_unmap_blob),
632 UpdateCursor(virtio_gpu_update_cursor),
633 MoveCursor(virtio_gpu_update_cursor),
634 ResourceAssignUuid(virtio_gpu_resource_assign_uuid),
635 }
636
637 /// An error indicating something went wrong decoding a `GpuCommand`. These correspond to
638 /// `VIRTIO_GPU_CMD_*`.
639 #[sorted]
640 #[derive(Error, Debug)]
641 pub enum GpuCommandDecodeError {
642 /// The type of the command was invalid.
643 #[error("invalid command type ({0})")]
644 InvalidType(u32),
645 /// An I/O error occurred.
646 #[error("an I/O error occurred: {0}")]
647 IO(io::Error),
648 /// The command referenced an inaccessible area of memory.
649 #[error("command referenced an inaccessible area of memory: {0}")]
650 Memory(DescriptorError),
651 }
652
653 impl From<DescriptorError> for GpuCommandDecodeError {
from(e: DescriptorError) -> GpuCommandDecodeError654 fn from(e: DescriptorError) -> GpuCommandDecodeError {
655 GpuCommandDecodeError::Memory(e)
656 }
657 }
658
659 impl From<io::Error> for GpuCommandDecodeError {
from(e: io::Error) -> GpuCommandDecodeError660 fn from(e: io::Error) -> GpuCommandDecodeError {
661 GpuCommandDecodeError::IO(e)
662 }
663 }
664
665 impl fmt::Debug for GpuCommand {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result666 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
667 use self::GpuCommand::*;
668 match self {
669 GetDisplayInfo(_info) => f.debug_struct("GetDisplayInfo").finish(),
670 ResourceCreate2d(_info) => f.debug_struct("ResourceCreate2d").finish(),
671 ResourceUnref(_info) => f.debug_struct("ResourceUnref").finish(),
672 SetScanout(_info) => f.debug_struct("SetScanout").finish(),
673 SetScanoutBlob(_info) => f.debug_struct("SetScanoutBlob").finish(),
674 ResourceFlush(_info) => f.debug_struct("ResourceFlush").finish(),
675 TransferToHost2d(_info) => f.debug_struct("TransferToHost2d").finish(),
676 ResourceAttachBacking(_info) => f.debug_struct("ResourceAttachBacking").finish(),
677 ResourceDetachBacking(_info) => f.debug_struct("ResourceDetachBacking").finish(),
678 GetCapsetInfo(_info) => f.debug_struct("GetCapsetInfo").finish(),
679 GetCapset(_info) => f.debug_struct("GetCapset").finish(),
680 CtxCreate(_info) => f.debug_struct("CtxCreate").finish(),
681 CtxDestroy(_info) => f.debug_struct("CtxDestroy").finish(),
682 CtxAttachResource(_info) => f.debug_struct("CtxAttachResource").finish(),
683 CtxDetachResource(_info) => f.debug_struct("CtxDetachResource").finish(),
684 ResourceCreate3d(_info) => f.debug_struct("ResourceCreate3d").finish(),
685 TransferToHost3d(_info) => f.debug_struct("TransferToHost3d").finish(),
686 TransferFromHost3d(_info) => f.debug_struct("TransferFromHost3d").finish(),
687 CmdSubmit3d(_info) => f.debug_struct("CmdSubmit3d").finish(),
688 ResourceCreateBlob(_info) => f.debug_struct("ResourceCreateBlob").finish(),
689 ResourceMapBlob(_info) => f.debug_struct("ResourceMapBlob").finish(),
690 ResourceUnmapBlob(_info) => f.debug_struct("ResourceUnmapBlob").finish(),
691 UpdateCursor(_info) => f.debug_struct("UpdateCursor").finish(),
692 MoveCursor(_info) => f.debug_struct("MoveCursor").finish(),
693 ResourceAssignUuid(_info) => f.debug_struct("ResourceAssignUuid").finish(),
694 }
695 }
696 }
697
698 impl GpuCommand {
699 /// Decodes a command from the given chunk of memory.
decode(cmd: &mut Reader) -> Result<GpuCommand, GpuCommandDecodeError>700 pub fn decode(cmd: &mut Reader) -> Result<GpuCommand, GpuCommandDecodeError> {
701 use self::GpuCommand::*;
702 let hdr = cmd.clone().read_obj::<virtio_gpu_ctrl_hdr>()?;
703 Ok(match hdr.type_.into() {
704 VIRTIO_GPU_CMD_GET_DISPLAY_INFO => GetDisplayInfo(cmd.read_obj()?),
705 VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => ResourceCreate2d(cmd.read_obj()?),
706 VIRTIO_GPU_CMD_RESOURCE_UNREF => ResourceUnref(cmd.read_obj()?),
707 VIRTIO_GPU_CMD_SET_SCANOUT => SetScanout(cmd.read_obj()?),
708 VIRTIO_GPU_CMD_SET_SCANOUT_BLOB => SetScanoutBlob(cmd.read_obj()?),
709 VIRTIO_GPU_CMD_RESOURCE_FLUSH => ResourceFlush(cmd.read_obj()?),
710 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => TransferToHost2d(cmd.read_obj()?),
711 VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => ResourceAttachBacking(cmd.read_obj()?),
712 VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => ResourceDetachBacking(cmd.read_obj()?),
713 VIRTIO_GPU_CMD_GET_CAPSET_INFO => GetCapsetInfo(cmd.read_obj()?),
714 VIRTIO_GPU_CMD_GET_CAPSET => GetCapset(cmd.read_obj()?),
715 VIRTIO_GPU_CMD_CTX_CREATE => CtxCreate(cmd.read_obj()?),
716 VIRTIO_GPU_CMD_CTX_DESTROY => CtxDestroy(cmd.read_obj()?),
717 VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => CtxAttachResource(cmd.read_obj()?),
718 VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => CtxDetachResource(cmd.read_obj()?),
719 VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => ResourceCreate3d(cmd.read_obj()?),
720 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => TransferToHost3d(cmd.read_obj()?),
721 VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => TransferFromHost3d(cmd.read_obj()?),
722 VIRTIO_GPU_CMD_SUBMIT_3D => CmdSubmit3d(cmd.read_obj()?),
723 VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB => ResourceCreateBlob(cmd.read_obj()?),
724 VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB => ResourceMapBlob(cmd.read_obj()?),
725 VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB => ResourceUnmapBlob(cmd.read_obj()?),
726 VIRTIO_GPU_CMD_UPDATE_CURSOR => UpdateCursor(cmd.read_obj()?),
727 VIRTIO_GPU_CMD_MOVE_CURSOR => MoveCursor(cmd.read_obj()?),
728 VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID => ResourceAssignUuid(cmd.read_obj()?),
729 _ => return Err(GpuCommandDecodeError::InvalidType(hdr.type_.into())),
730 })
731 }
732
733 /// Gets the generic `virtio_gpu_ctrl_hdr` from this command.
ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr734 pub fn ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr {
735 use self::GpuCommand::*;
736 match self {
737 GetDisplayInfo(info) => info,
738 ResourceCreate2d(info) => &info.hdr,
739 ResourceUnref(info) => &info.hdr,
740 SetScanout(info) => &info.hdr,
741 SetScanoutBlob(info) => &info.hdr,
742 ResourceFlush(info) => &info.hdr,
743 TransferToHost2d(info) => &info.hdr,
744 ResourceAttachBacking(info) => &info.hdr,
745 ResourceDetachBacking(info) => &info.hdr,
746 GetCapsetInfo(info) => &info.hdr,
747 GetCapset(info) => &info.hdr,
748 CtxCreate(info) => &info.hdr,
749 CtxDestroy(info) => &info.hdr,
750 CtxAttachResource(info) => &info.hdr,
751 CtxDetachResource(info) => &info.hdr,
752 ResourceCreate3d(info) => &info.hdr,
753 TransferToHost3d(info) => &info.hdr,
754 TransferFromHost3d(info) => &info.hdr,
755 CmdSubmit3d(info) => &info.hdr,
756 ResourceCreateBlob(info) => &info.hdr,
757 ResourceMapBlob(info) => &info.hdr,
758 ResourceUnmapBlob(info) => &info.hdr,
759 UpdateCursor(info) => &info.hdr,
760 MoveCursor(info) => &info.hdr,
761 ResourceAssignUuid(info) => &info.hdr,
762 }
763 }
764 }
765
766 #[derive(Debug, PartialEq)]
767 pub struct GpuResponsePlaneInfo {
768 pub stride: u32,
769 pub offset: u32,
770 }
771
772 /// A response to a `GpuCommand`. These correspond to `VIRTIO_GPU_RESP_*`.
773 #[derive(Debug)]
774 pub enum GpuResponse {
775 OkNoData,
776 OkDisplayInfo(Vec<(u32, u32)>),
777 OkCapsetInfo {
778 capset_id: u32,
779 version: u32,
780 size: u32,
781 },
782 OkCapset(Vec<u8>),
783 OkResourcePlaneInfo {
784 format_modifier: u64,
785 plane_info: Vec<GpuResponsePlaneInfo>,
786 },
787 OkResourceUuid {
788 uuid: [u8; 16],
789 },
790 OkMapInfo {
791 map_info: u32,
792 },
793 ErrUnspec,
794 ErrTube(TubeError),
795 ErrBase(BaseError),
796 ErrRutabaga(RutabagaError),
797 ErrDisplay(GpuDisplayError),
798 ErrMapping(ExternalMappingError),
799 ErrScanout {
800 num_scanouts: u32,
801 },
802 ErrOutOfMemory,
803 ErrInvalidScanoutId,
804 ErrInvalidResourceId,
805 ErrInvalidContextId,
806 ErrInvalidParameter,
807 ErrUdmabuf(UdmabufError),
808 }
809
810 impl From<TubeError> for GpuResponse {
from(e: TubeError) -> GpuResponse811 fn from(e: TubeError) -> GpuResponse {
812 GpuResponse::ErrTube(e)
813 }
814 }
815
816 impl From<RutabagaError> for GpuResponse {
from(e: RutabagaError) -> GpuResponse817 fn from(e: RutabagaError) -> GpuResponse {
818 GpuResponse::ErrRutabaga(e)
819 }
820 }
821
822 impl From<GpuDisplayError> for GpuResponse {
from(e: GpuDisplayError) -> GpuResponse823 fn from(e: GpuDisplayError) -> GpuResponse {
824 GpuResponse::ErrDisplay(e)
825 }
826 }
827
828 impl From<ExternalMappingError> for GpuResponse {
from(e: ExternalMappingError) -> GpuResponse829 fn from(e: ExternalMappingError) -> GpuResponse {
830 GpuResponse::ErrMapping(e)
831 }
832 }
833
834 impl From<UdmabufError> for GpuResponse {
from(e: UdmabufError) -> GpuResponse835 fn from(e: UdmabufError) -> GpuResponse {
836 GpuResponse::ErrUdmabuf(e)
837 }
838 }
839
840 impl Display for GpuResponse {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result841 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
842 use self::GpuResponse::*;
843 match self {
844 ErrTube(e) => write!(f, "tube error: {}", e),
845 ErrBase(e) => write!(f, "base error: {}", e),
846 ErrRutabaga(e) => write!(f, "renderer error: {}", e),
847 ErrDisplay(e) => write!(f, "display error: {}", e),
848 ErrScanout { num_scanouts } => write!(f, "non-zero scanout: {}", num_scanouts),
849 ErrUdmabuf(e) => write!(f, "udmabuf error: {}", e),
850 _ => Ok(()),
851 }
852 }
853 }
854
855 /// An error indicating something went wrong decoding a `GpuCommand`.
856 #[sorted]
857 #[derive(Error, Debug)]
858 pub enum GpuResponseEncodeError {
859 /// An I/O error occurred.
860 #[error("an I/O error occurred: {0}")]
861 IO(io::Error),
862 /// The response was encoded to an inaccessible area of memory.
863 #[error("response was encoded to an inaccessible area of memory: {0}")]
864 Memory(DescriptorError),
865 /// More displays than are valid were in a `OkDisplayInfo`.
866 #[error("{0} is more displays than are valid")]
867 TooManyDisplays(usize),
868 /// More planes than are valid were in a `OkResourcePlaneInfo`.
869 #[error("{0} is more planes than are valid")]
870 TooManyPlanes(usize),
871 }
872
873 impl From<DescriptorError> for GpuResponseEncodeError {
from(e: DescriptorError) -> GpuResponseEncodeError874 fn from(e: DescriptorError) -> GpuResponseEncodeError {
875 GpuResponseEncodeError::Memory(e)
876 }
877 }
878
879 impl From<io::Error> for GpuResponseEncodeError {
from(e: io::Error) -> GpuResponseEncodeError880 fn from(e: io::Error) -> GpuResponseEncodeError {
881 GpuResponseEncodeError::IO(e)
882 }
883 }
884
885 pub type VirtioGpuResult = std::result::Result<GpuResponse, GpuResponse>;
886
887 impl GpuResponse {
888 /// Encodes a this `GpuResponse` into `resp` and the given set of metadata.
encode( &self, flags: u32, fence_id: u64, ctx_id: u32, ring_idx: u8, resp: &mut Writer, ) -> Result<u32, GpuResponseEncodeError>889 pub fn encode(
890 &self,
891 flags: u32,
892 fence_id: u64,
893 ctx_id: u32,
894 ring_idx: u8,
895 resp: &mut Writer,
896 ) -> Result<u32, GpuResponseEncodeError> {
897 let hdr = virtio_gpu_ctrl_hdr {
898 type_: Le32::from(self.get_type()),
899 flags: Le32::from(flags),
900 fence_id: Le64::from(fence_id),
901 ctx_id: Le32::from(ctx_id),
902 ring_idx,
903 padding: Default::default(),
904 };
905 let len = match *self {
906 GpuResponse::OkDisplayInfo(ref info) => {
907 if info.len() > VIRTIO_GPU_MAX_SCANOUTS {
908 return Err(GpuResponseEncodeError::TooManyDisplays(info.len()));
909 }
910 let mut disp_info = virtio_gpu_resp_display_info {
911 hdr,
912 pmodes: Default::default(),
913 };
914 for (disp_mode, &(width, height)) in disp_info.pmodes.iter_mut().zip(info) {
915 disp_mode.r.width = Le32::from(width);
916 disp_mode.r.height = Le32::from(height);
917 disp_mode.enabled = Le32::from(1);
918 }
919 resp.write_obj(disp_info)?;
920 size_of_val(&disp_info)
921 }
922 GpuResponse::OkCapsetInfo {
923 capset_id,
924 version,
925 size,
926 } => {
927 resp.write_obj(virtio_gpu_resp_capset_info {
928 hdr,
929 capset_id: Le32::from(capset_id),
930 capset_max_version: Le32::from(version),
931 capset_max_size: Le32::from(size),
932 padding: Le32::from(0),
933 })?;
934 size_of::<virtio_gpu_resp_capset_info>()
935 }
936 GpuResponse::OkCapset(ref data) => {
937 resp.write_obj(hdr)?;
938 resp.write_all(data)?;
939 size_of_val(&hdr) + data.len()
940 }
941 GpuResponse::OkResourcePlaneInfo {
942 format_modifier,
943 ref plane_info,
944 } => {
945 if plane_info.len() > PLANE_INFO_MAX_COUNT {
946 return Err(GpuResponseEncodeError::TooManyPlanes(plane_info.len()));
947 }
948 let mut strides = [Le32::default(); PLANE_INFO_MAX_COUNT];
949 let mut offsets = [Le32::default(); PLANE_INFO_MAX_COUNT];
950 for (plane_index, plane) in plane_info.iter().enumerate() {
951 strides[plane_index] = plane.stride.into();
952 offsets[plane_index] = plane.offset.into();
953 }
954 let plane_info = virtio_gpu_resp_resource_plane_info {
955 hdr,
956 count: Le32::from(plane_info.len() as u32),
957 padding: 0.into(),
958 format_modifier: format_modifier.into(),
959 strides,
960 offsets,
961 };
962 if resp.available_bytes() >= size_of_val(&plane_info) {
963 resp.write_obj(plane_info)?;
964 size_of_val(&plane_info)
965 } else {
966 // In case there is too little room in the response slice to store the
967 // entire virtio_gpu_resp_resource_plane_info, convert response to a regular
968 // VIRTIO_GPU_RESP_OK_NODATA and attempt to return that.
969 resp.write_obj(virtio_gpu_ctrl_hdr {
970 type_: Le32::from(VIRTIO_GPU_RESP_OK_NODATA),
971 ..hdr
972 })?;
973 size_of_val(&hdr)
974 }
975 }
976 GpuResponse::OkResourceUuid { uuid } => {
977 let resp_info = virtio_gpu_resp_resource_uuid { hdr, uuid };
978
979 resp.write_obj(resp_info)?;
980 size_of_val(&resp_info)
981 }
982 GpuResponse::OkMapInfo { map_info } => {
983 let resp_info = virtio_gpu_resp_map_info {
984 hdr,
985 map_info: Le32::from(map_info),
986 };
987
988 resp.write_obj(resp_info)?;
989 size_of_val(&resp_info)
990 }
991 _ => {
992 resp.write_obj(hdr)?;
993 size_of_val(&hdr)
994 }
995 };
996 Ok(len as u32)
997 }
998
999 /// Gets the `VIRTIO_GPU_*` enum value that corresponds to this variant.
get_type(&self) -> u321000 pub fn get_type(&self) -> u32 {
1001 match self {
1002 GpuResponse::OkNoData => VIRTIO_GPU_RESP_OK_NODATA,
1003 GpuResponse::OkDisplayInfo(_) => VIRTIO_GPU_RESP_OK_DISPLAY_INFO,
1004 GpuResponse::OkCapsetInfo { .. } => VIRTIO_GPU_RESP_OK_CAPSET_INFO,
1005 GpuResponse::OkCapset(_) => VIRTIO_GPU_RESP_OK_CAPSET,
1006 GpuResponse::OkResourcePlaneInfo { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO,
1007 GpuResponse::OkResourceUuid { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_UUID,
1008 GpuResponse::OkMapInfo { .. } => VIRTIO_GPU_RESP_OK_MAP_INFO,
1009 GpuResponse::ErrUnspec => VIRTIO_GPU_RESP_ERR_UNSPEC,
1010 GpuResponse::ErrTube(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
1011 GpuResponse::ErrBase(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
1012 GpuResponse::ErrRutabaga(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
1013 GpuResponse::ErrDisplay(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
1014 GpuResponse::ErrMapping(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
1015 GpuResponse::ErrUdmabuf(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
1016 GpuResponse::ErrScanout { num_scanouts: _ } => VIRTIO_GPU_RESP_ERR_UNSPEC,
1017 GpuResponse::ErrOutOfMemory => VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY,
1018 GpuResponse::ErrInvalidScanoutId => VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID,
1019 GpuResponse::ErrInvalidResourceId => VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID,
1020 GpuResponse::ErrInvalidContextId => VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID,
1021 GpuResponse::ErrInvalidParameter => VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER,
1022 }
1023 }
1024 }
1025