• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2019 The Chromium OS Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #![allow(dead_code)]
6 #![allow(non_camel_case_types)]
7 
8 use std::cmp::min;
9 use std::fmt::{self, Display};
10 use std::marker::PhantomData;
11 use std::mem::{size_of, size_of_val};
12 use std::str::from_utf8;
13 
14 use data_model::{DataInit, Le32, Le64, VolatileMemory, VolatileMemoryError, VolatileSlice};
15 
16 pub const VIRTIO_GPU_F_VIRGL: u32 = 0;
17 
18 pub const VIRTIO_GPU_UNDEFINED: u32 = 0x0;
19 
20 /* 2d commands */
21 pub const VIRTIO_GPU_CMD_GET_DISPLAY_INFO: u32 = 0x100;
22 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: u32 = 0x101;
23 pub const VIRTIO_GPU_CMD_RESOURCE_UNREF: u32 = 0x102;
24 pub const VIRTIO_GPU_CMD_SET_SCANOUT: u32 = 0x103;
25 pub const VIRTIO_GPU_CMD_RESOURCE_FLUSH: u32 = 0x104;
26 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: u32 = 0x105;
27 pub const VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING: u32 = 0x106;
28 pub const VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING: u32 = 0x107;
29 pub const VIRTIO_GPU_CMD_GET_CAPSET_INFO: u32 = 0x108;
30 pub const VIRTIO_GPU_CMD_GET_CAPSET: u32 = 0x109;
31 
32 /* 3d commands */
33 pub const VIRTIO_GPU_CMD_CTX_CREATE: u32 = 0x200;
34 pub const VIRTIO_GPU_CMD_CTX_DESTROY: u32 = 0x201;
35 pub const VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE: u32 = 0x202;
36 pub const VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE: u32 = 0x203;
37 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_3D: u32 = 0x204;
38 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D: u32 = 0x205;
39 pub const VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D: u32 = 0x206;
40 pub const VIRTIO_GPU_CMD_SUBMIT_3D: u32 = 0x207;
41 
42 /* cursor commands */
43 pub const VIRTIO_GPU_CMD_UPDATE_CURSOR: u32 = 0x300;
44 pub const VIRTIO_GPU_CMD_MOVE_CURSOR: u32 = 0x301;
45 
46 /* success responses */
47 pub const VIRTIO_GPU_RESP_OK_NODATA: u32 = 0x1100;
48 pub const VIRTIO_GPU_RESP_OK_DISPLAY_INFO: u32 = 0x1101;
49 pub const VIRTIO_GPU_RESP_OK_CAPSET_INFO: u32 = 0x1102;
50 pub const VIRTIO_GPU_RESP_OK_CAPSET: u32 = 0x1103;
51 pub const VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO: u32 = 0x1104;
52 
53 /* error responses */
54 pub const VIRTIO_GPU_RESP_ERR_UNSPEC: u32 = 0x1200;
55 pub const VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY: u32 = 0x1201;
56 pub const VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID: u32 = 0x1202;
57 pub const VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID: u32 = 0x1203;
58 pub const VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID: u32 = 0x1204;
59 pub const VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER: u32 = 0x1205;
60 
virtio_gpu_cmd_str(cmd: u32) -> &'static str61 pub fn virtio_gpu_cmd_str(cmd: u32) -> &'static str {
62     match cmd {
63         VIRTIO_GPU_CMD_GET_DISPLAY_INFO => "VIRTIO_GPU_CMD_GET_DISPLAY_INFO",
64         VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_2D",
65         VIRTIO_GPU_CMD_RESOURCE_UNREF => "VIRTIO_GPU_CMD_RESOURCE_UNREF",
66         VIRTIO_GPU_CMD_SET_SCANOUT => "VIRTIO_GPU_CMD_SET_SCANOUT",
67         VIRTIO_GPU_CMD_RESOURCE_FLUSH => "VIRTIO_GPU_CMD_RESOURCE_FLUSH",
68         VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D",
69         VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING",
70         VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING",
71         VIRTIO_GPU_CMD_GET_CAPSET_INFO => "VIRTIO_GPU_CMD_GET_CAPSET_INFO",
72         VIRTIO_GPU_CMD_GET_CAPSET => "VIRTIO_GPU_CMD_GET_CAPSET",
73         VIRTIO_GPU_CMD_CTX_CREATE => "VIRTIO_GPU_CMD_CTX_CREATE",
74         VIRTIO_GPU_CMD_CTX_DESTROY => "VIRTIO_GPU_CMD_CTX_DESTROY",
75         VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE",
76         VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE",
77         VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_3D",
78         VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D",
79         VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D",
80         VIRTIO_GPU_CMD_SUBMIT_3D => "VIRTIO_GPU_CMD_SUBMIT_3D",
81         VIRTIO_GPU_CMD_UPDATE_CURSOR => "VIRTIO_GPU_CMD_UPDATE_CURSOR",
82         VIRTIO_GPU_CMD_MOVE_CURSOR => "VIRTIO_GPU_CMD_MOVE_CURSOR",
83         VIRTIO_GPU_RESP_OK_NODATA => "VIRTIO_GPU_RESP_OK_NODATA",
84         VIRTIO_GPU_RESP_OK_DISPLAY_INFO => "VIRTIO_GPU_RESP_OK_DISPLAY_INFO",
85         VIRTIO_GPU_RESP_OK_CAPSET_INFO => "VIRTIO_GPU_RESP_OK_CAPSET_INFO",
86         VIRTIO_GPU_RESP_OK_CAPSET => "VIRTIO_GPU_RESP_OK_CAPSET",
87         VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO => "VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO",
88         VIRTIO_GPU_RESP_ERR_UNSPEC => "VIRTIO_GPU_RESP_ERR_UNSPEC",
89         VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY => "VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY",
90         VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID",
91         VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID => "VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID",
92         VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID",
93         VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER => "VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER",
94         _ => "UNKNOWN",
95     }
96 }
97 
98 pub const VIRTIO_GPU_FLAG_FENCE: u32 = (1 << 0);
99 
100 #[derive(Copy, Clone, Debug)]
101 #[repr(C)]
102 pub struct virtio_gpu_ctrl_hdr {
103     pub type_: Le32,
104     pub flags: Le32,
105     pub fence_id: Le64,
106     pub ctx_id: Le32,
107     pub padding: Le32,
108 }
109 
110 unsafe impl DataInit for virtio_gpu_ctrl_hdr {}
111 
112 /* data passed in the cursor vq */
113 
114 #[derive(Copy, Clone, Debug)]
115 #[repr(C)]
116 pub struct virtio_gpu_cursor_pos {
117     pub scanout_id: Le32,
118     pub x: Le32,
119     pub y: Le32,
120     pub padding: Le32,
121 }
122 
123 unsafe impl DataInit for virtio_gpu_cursor_pos {}
124 
125 /* VIRTIO_GPU_CMD_UPDATE_CURSOR, VIRTIO_GPU_CMD_MOVE_CURSOR */
126 #[derive(Copy, Clone, Debug)]
127 #[repr(C)]
128 pub struct virtio_gpu_update_cursor {
129     pub hdr: virtio_gpu_ctrl_hdr,
130     pub pos: virtio_gpu_cursor_pos, /* update & move */
131     pub resource_id: Le32,          /* update only */
132     pub hot_x: Le32,                /* update only */
133     pub hot_y: Le32,                /* update only */
134     pub padding: Le32,
135 }
136 
137 unsafe impl DataInit for virtio_gpu_update_cursor {}
138 
139 /* data passed in the control vq, 2d related */
140 
141 #[derive(Copy, Clone, Debug, Default)]
142 #[repr(C)]
143 pub struct virtio_gpu_rect {
144     pub x: Le32,
145     pub y: Le32,
146     pub width: Le32,
147     pub height: Le32,
148 }
149 
150 unsafe impl DataInit for virtio_gpu_rect {}
151 
152 /* VIRTIO_GPU_CMD_RESOURCE_UNREF */
153 #[derive(Copy, Clone, Debug)]
154 #[repr(C)]
155 pub struct virtio_gpu_resource_unref {
156     pub hdr: virtio_gpu_ctrl_hdr,
157     pub resource_id: Le32,
158     pub padding: Le32,
159 }
160 
161 unsafe impl DataInit for virtio_gpu_resource_unref {}
162 
163 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: create a 2d resource with a format */
164 #[derive(Copy, Clone, Debug)]
165 #[repr(C)]
166 pub struct virtio_gpu_resource_create_2d {
167     pub hdr: virtio_gpu_ctrl_hdr,
168     pub resource_id: Le32,
169     pub format: Le32,
170     pub width: Le32,
171     pub height: Le32,
172 }
173 
174 unsafe impl DataInit for virtio_gpu_resource_create_2d {}
175 
176 /* VIRTIO_GPU_CMD_SET_SCANOUT */
177 #[derive(Copy, Clone, Debug)]
178 #[repr(C)]
179 pub struct virtio_gpu_set_scanout {
180     pub hdr: virtio_gpu_ctrl_hdr,
181     pub r: virtio_gpu_rect,
182     pub scanout_id: Le32,
183     pub resource_id: Le32,
184 }
185 
186 unsafe impl DataInit for virtio_gpu_set_scanout {}
187 
188 /* VIRTIO_GPU_CMD_RESOURCE_FLUSH */
189 #[derive(Copy, Clone, Debug)]
190 #[repr(C)]
191 pub struct virtio_gpu_resource_flush {
192     pub hdr: virtio_gpu_ctrl_hdr,
193     pub r: virtio_gpu_rect,
194     pub resource_id: Le32,
195     pub padding: Le32,
196 }
197 
198 unsafe impl DataInit for virtio_gpu_resource_flush {}
199 
200 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: simple transfer to_host */
201 #[derive(Copy, Clone, Debug)]
202 #[repr(C)]
203 pub struct virtio_gpu_transfer_to_host_2d {
204     pub hdr: virtio_gpu_ctrl_hdr,
205     pub r: virtio_gpu_rect,
206     pub offset: Le64,
207     pub resource_id: Le32,
208     pub padding: Le32,
209 }
210 
211 unsafe impl DataInit for virtio_gpu_transfer_to_host_2d {}
212 
213 #[derive(Copy, Clone, Debug)]
214 #[repr(C)]
215 pub struct virtio_gpu_mem_entry {
216     pub addr: Le64,
217     pub length: Le32,
218     pub padding: Le32,
219 }
220 
221 unsafe impl DataInit for virtio_gpu_mem_entry {}
222 
223 /* VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING */
224 #[derive(Copy, Clone, Debug)]
225 #[repr(C)]
226 pub struct virtio_gpu_resource_attach_backing {
227     pub hdr: virtio_gpu_ctrl_hdr,
228     pub resource_id: Le32,
229     pub nr_entries: Le32,
230 }
231 
232 unsafe impl DataInit for virtio_gpu_resource_attach_backing {}
233 
234 /* VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING */
235 #[derive(Copy, Clone, Debug)]
236 #[repr(C)]
237 pub struct virtio_gpu_resource_detach_backing {
238     pub hdr: virtio_gpu_ctrl_hdr,
239     pub resource_id: Le32,
240     pub padding: Le32,
241 }
242 
243 unsafe impl DataInit for virtio_gpu_resource_detach_backing {}
244 
245 #[derive(Copy, Clone, Debug, Default)]
246 #[repr(C)]
247 pub struct virtio_gpu_display_one {
248     pub r: virtio_gpu_rect,
249     pub enabled: Le32,
250     pub flags: Le32,
251 }
252 
253 unsafe impl DataInit for virtio_gpu_display_one {}
254 
255 /* VIRTIO_GPU_RESP_OK_DISPLAY_INFO */
256 const VIRTIO_GPU_MAX_SCANOUTS: usize = 16;
257 #[derive(Copy, Clone, Debug)]
258 #[repr(C)]
259 pub struct virtio_gpu_resp_display_info {
260     pub hdr: virtio_gpu_ctrl_hdr,
261     pub pmodes: [virtio_gpu_display_one; VIRTIO_GPU_MAX_SCANOUTS],
262 }
263 
264 unsafe impl DataInit for virtio_gpu_resp_display_info {}
265 
266 /* data passed in the control vq, 3d related */
267 
268 #[derive(Copy, Clone, Debug)]
269 #[repr(C)]
270 pub struct virtio_gpu_box {
271     pub x: Le32,
272     pub y: Le32,
273     pub z: Le32,
274     pub w: Le32,
275     pub h: Le32,
276     pub d: Le32,
277 }
278 
279 unsafe impl DataInit for virtio_gpu_box {}
280 
281 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D, VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D */
282 #[derive(Copy, Clone, Debug)]
283 #[repr(C)]
284 pub struct virtio_gpu_transfer_host_3d {
285     pub hdr: virtio_gpu_ctrl_hdr,
286     pub box_: virtio_gpu_box,
287     pub offset: Le64,
288     pub resource_id: Le32,
289     pub level: Le32,
290     pub stride: Le32,
291     pub layer_stride: Le32,
292 }
293 
294 unsafe impl DataInit for virtio_gpu_transfer_host_3d {}
295 
296 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_3D */
297 pub const VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP: u32 = (1 << 0);
298 #[derive(Copy, Clone, Debug)]
299 #[repr(C)]
300 pub struct virtio_gpu_resource_create_3d {
301     pub hdr: virtio_gpu_ctrl_hdr,
302     pub resource_id: Le32,
303     pub target: Le32,
304     pub format: Le32,
305     pub bind: Le32,
306     pub width: Le32,
307     pub height: Le32,
308     pub depth: Le32,
309     pub array_size: Le32,
310     pub last_level: Le32,
311     pub nr_samples: Le32,
312     pub flags: Le32,
313     pub padding: Le32,
314 }
315 
316 unsafe impl DataInit for virtio_gpu_resource_create_3d {}
317 
318 /* VIRTIO_GPU_CMD_CTX_CREATE */
319 #[derive(Copy)]
320 #[repr(C)]
321 pub struct virtio_gpu_ctx_create {
322     pub hdr: virtio_gpu_ctrl_hdr,
323     pub nlen: Le32,
324     pub padding: Le32,
325     pub debug_name: [u8; 64],
326 }
327 
328 unsafe impl DataInit for virtio_gpu_ctx_create {}
329 
330 impl Clone for virtio_gpu_ctx_create {
clone(&self) -> virtio_gpu_ctx_create331     fn clone(&self) -> virtio_gpu_ctx_create {
332         *self
333     }
334 }
335 
336 impl fmt::Debug for virtio_gpu_ctx_create {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result337     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
338         let debug_name = from_utf8(&self.debug_name[..min(64, self.nlen.to_native() as usize)])
339             .unwrap_or("<invalid>");
340         f.debug_struct("virtio_gpu_ctx_create")
341             .field("hdr", &self.hdr)
342             .field("debug_name", &debug_name)
343             .finish()
344     }
345 }
346 
347 /* VIRTIO_GPU_CMD_CTX_DESTROY */
348 #[derive(Copy, Clone, Debug)]
349 #[repr(C)]
350 pub struct virtio_gpu_ctx_destroy {
351     pub hdr: virtio_gpu_ctrl_hdr,
352 }
353 
354 unsafe impl DataInit for virtio_gpu_ctx_destroy {}
355 
356 /* VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE, VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE */
357 #[derive(Copy, Clone, Debug)]
358 #[repr(C)]
359 pub struct virtio_gpu_ctx_resource {
360     pub hdr: virtio_gpu_ctrl_hdr,
361     pub resource_id: Le32,
362     pub padding: Le32,
363 }
364 
365 unsafe impl DataInit for virtio_gpu_ctx_resource {}
366 
367 /* VIRTIO_GPU_CMD_SUBMIT_3D */
368 #[derive(Copy, Clone, Debug)]
369 #[repr(C)]
370 pub struct virtio_gpu_cmd_submit {
371     pub hdr: virtio_gpu_ctrl_hdr,
372     pub size: Le32,
373     pub padding: Le32,
374 }
375 
376 unsafe impl DataInit for virtio_gpu_cmd_submit {}
377 
378 pub const VIRTIO_GPU_CAPSET_VIRGL: u32 = 1;
379 pub const VIRTIO_GPU_CAPSET_VIRGL2: u32 = 2;
380 
381 /* VIRTIO_GPU_CMD_GET_CAPSET_INFO */
382 #[derive(Copy, Clone, Debug)]
383 #[repr(C)]
384 pub struct virtio_gpu_get_capset_info {
385     pub hdr: virtio_gpu_ctrl_hdr,
386     pub capset_index: Le32,
387     pub padding: Le32,
388 }
389 
390 unsafe impl DataInit for virtio_gpu_get_capset_info {}
391 
392 /* VIRTIO_GPU_RESP_OK_CAPSET_INFO */
393 #[derive(Copy, Clone, Debug)]
394 #[repr(C)]
395 pub struct virtio_gpu_resp_capset_info {
396     pub hdr: virtio_gpu_ctrl_hdr,
397     pub capset_id: Le32,
398     pub capset_max_version: Le32,
399     pub capset_max_size: Le32,
400     pub padding: Le32,
401 }
402 
403 unsafe impl DataInit for virtio_gpu_resp_capset_info {}
404 
405 /* VIRTIO_GPU_CMD_GET_CAPSET */
406 #[derive(Copy, Clone, Debug)]
407 #[repr(C)]
408 pub struct virtio_gpu_get_capset {
409     pub hdr: virtio_gpu_ctrl_hdr,
410     pub capset_id: Le32,
411     pub capset_version: Le32,
412 }
413 
414 unsafe impl DataInit for virtio_gpu_get_capset {}
415 
416 /* VIRTIO_GPU_RESP_OK_CAPSET */
417 #[derive(Copy, Clone, Debug)]
418 #[repr(C)]
419 pub struct virtio_gpu_resp_capset {
420     pub hdr: virtio_gpu_ctrl_hdr,
421     pub capset_data: PhantomData<[u8]>,
422 }
423 
424 unsafe impl DataInit for virtio_gpu_resp_capset {}
425 
426 /* VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO */
427 #[derive(Copy, Clone, Debug)]
428 #[repr(C)]
429 pub struct virtio_gpu_resp_resource_plane_info {
430     pub hdr: virtio_gpu_ctrl_hdr,
431     pub count: Le32,
432     pub padding: Le32,
433     pub format_modifier: Le64,
434     pub strides: [Le32; 4],
435     pub offsets: [Le32; 4],
436 }
437 
438 unsafe impl DataInit for virtio_gpu_resp_resource_plane_info {}
439 
440 const PLANE_INFO_MAX_COUNT: usize = 4;
441 
442 pub const VIRTIO_GPU_EVENT_DISPLAY: u32 = 1 << 0;
443 
444 #[derive(Copy, Clone, Debug)]
445 #[repr(C)]
446 pub struct virtio_gpu_config {
447     pub events_read: Le32,
448     pub events_clear: Le32,
449     pub num_scanouts: Le32,
450     pub num_capsets: Le32,
451 }
452 
453 unsafe impl DataInit for virtio_gpu_config {}
454 
455 /* simple formats for fbcon/X use */
456 pub const VIRTIO_GPU_FORMAT_B8G8R8A8_UNORM: u32 = 1;
457 pub const VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM: u32 = 2;
458 pub const VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM: u32 = 3;
459 pub const VIRTIO_GPU_FORMAT_X8R8G8B8_UNORM: u32 = 4;
460 pub const VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM: u32 = 67;
461 pub const VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM: u32 = 68;
462 pub const VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM: u32 = 121;
463 pub const VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM: u32 = 134;
464 
465 /// A virtio gpu command and associated metadata specific to each command.
466 #[derive(Copy, Clone)]
467 pub enum GpuCommand {
468     GetDisplayInfo(virtio_gpu_ctrl_hdr),
469     ResourceCreate2d(virtio_gpu_resource_create_2d),
470     ResourceUnref(virtio_gpu_resource_unref),
471     SetScanout(virtio_gpu_set_scanout),
472     ResourceFlush(virtio_gpu_resource_flush),
473     TransferToHost2d(virtio_gpu_transfer_to_host_2d),
474     ResourceAttachBacking(virtio_gpu_resource_attach_backing),
475     ResourceDetachBacking(virtio_gpu_resource_detach_backing),
476     GetCapsetInfo(virtio_gpu_get_capset_info),
477     GetCapset(virtio_gpu_get_capset),
478     CtxCreate(virtio_gpu_ctx_create),
479     CtxDestroy(virtio_gpu_ctx_destroy),
480     CtxAttachResource(virtio_gpu_ctx_resource),
481     CtxDetachResource(virtio_gpu_ctx_resource),
482     ResourceCreate3d(virtio_gpu_resource_create_3d),
483     TransferToHost3d(virtio_gpu_transfer_host_3d),
484     TransferFromHost3d(virtio_gpu_transfer_host_3d),
485     CmdSubmit3d(virtio_gpu_cmd_submit),
486     UpdateCursor(virtio_gpu_update_cursor),
487     MoveCursor(virtio_gpu_update_cursor),
488 }
489 
490 /// An error indicating something went wrong decoding a `GpuCommand`. These correspond to
491 /// `VIRTIO_GPU_CMD_*`.
492 #[derive(Debug)]
493 pub enum GpuCommandDecodeError {
494     /// The command referenced an inaccessible area of memory.
495     Memory(VolatileMemoryError),
496     /// The type of the command was invalid.
497     InvalidType(u32),
498 }
499 
500 impl Display for GpuCommandDecodeError {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result501     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
502         use self::GpuCommandDecodeError::*;
503 
504         match self {
505             Memory(e) => write!(
506                 f,
507                 "command referenced an inaccessible area of memory: {}",
508                 e,
509             ),
510             InvalidType(n) => write!(f, "invalid command type ({})", n),
511         }
512     }
513 }
514 
515 impl From<VolatileMemoryError> for GpuCommandDecodeError {
from(e: VolatileMemoryError) -> GpuCommandDecodeError516     fn from(e: VolatileMemoryError) -> GpuCommandDecodeError {
517         GpuCommandDecodeError::Memory(e)
518     }
519 }
520 
521 impl fmt::Debug for GpuCommand {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result522     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
523         use self::GpuCommand::*;
524         match self {
525             GetDisplayInfo(_info) => f.debug_struct("GetDisplayInfo").finish(),
526             ResourceCreate2d(_info) => f.debug_struct("ResourceCreate2d").finish(),
527             ResourceUnref(_info) => f.debug_struct("ResourceUnref").finish(),
528             SetScanout(_info) => f.debug_struct("SetScanout").finish(),
529             ResourceFlush(_info) => f.debug_struct("ResourceFlush").finish(),
530             TransferToHost2d(_info) => f.debug_struct("TransferToHost2d").finish(),
531             ResourceAttachBacking(_info) => f.debug_struct("ResourceAttachBacking").finish(),
532             ResourceDetachBacking(_info) => f.debug_struct("ResourceDetachBacking").finish(),
533             GetCapsetInfo(_info) => f.debug_struct("GetCapsetInfo").finish(),
534             GetCapset(_info) => f.debug_struct("GetCapset").finish(),
535             CtxCreate(_info) => f.debug_struct("CtxCreate").finish(),
536             CtxDestroy(_info) => f.debug_struct("CtxDestroy").finish(),
537             CtxAttachResource(_info) => f.debug_struct("CtxAttachResource").finish(),
538             CtxDetachResource(_info) => f.debug_struct("CtxDetachResource").finish(),
539             ResourceCreate3d(_info) => f.debug_struct("ResourceCreate3d").finish(),
540             TransferToHost3d(_info) => f.debug_struct("TransferToHost3d").finish(),
541             TransferFromHost3d(_info) => f.debug_struct("TransferFromHost3d").finish(),
542             CmdSubmit3d(_info) => f.debug_struct("CmdSubmit3d").finish(),
543             UpdateCursor(_info) => f.debug_struct("UpdateCursor").finish(),
544             MoveCursor(_info) => f.debug_struct("MoveCursor").finish(),
545         }
546     }
547 }
548 
549 impl GpuCommand {
550     /// Decodes a command from the given chunk of memory.
decode(cmd: VolatileSlice) -> Result<GpuCommand, GpuCommandDecodeError>551     pub fn decode(cmd: VolatileSlice) -> Result<GpuCommand, GpuCommandDecodeError> {
552         use self::GpuCommand::*;
553         let hdr: virtio_gpu_ctrl_hdr = cmd.get_ref(0)?.load();
554         Ok(match hdr.type_.into() {
555             VIRTIO_GPU_CMD_GET_DISPLAY_INFO => GetDisplayInfo(cmd.get_ref(0)?.load()),
556             VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => ResourceCreate2d(cmd.get_ref(0)?.load()),
557             VIRTIO_GPU_CMD_RESOURCE_UNREF => ResourceUnref(cmd.get_ref(0)?.load()),
558             VIRTIO_GPU_CMD_SET_SCANOUT => SetScanout(cmd.get_ref(0)?.load()),
559             VIRTIO_GPU_CMD_RESOURCE_FLUSH => ResourceFlush(cmd.get_ref(0)?.load()),
560             VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => TransferToHost2d(cmd.get_ref(0)?.load()),
561             VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => ResourceAttachBacking(cmd.get_ref(0)?.load()),
562             VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => ResourceDetachBacking(cmd.get_ref(0)?.load()),
563             VIRTIO_GPU_CMD_GET_CAPSET_INFO => GetCapsetInfo(cmd.get_ref(0)?.load()),
564             VIRTIO_GPU_CMD_GET_CAPSET => GetCapset(cmd.get_ref(0)?.load()),
565             VIRTIO_GPU_CMD_CTX_CREATE => CtxCreate(cmd.get_ref(0)?.load()),
566             VIRTIO_GPU_CMD_CTX_DESTROY => CtxDestroy(cmd.get_ref(0)?.load()),
567             VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => CtxAttachResource(cmd.get_ref(0)?.load()),
568             VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => CtxDetachResource(cmd.get_ref(0)?.load()),
569             VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => ResourceCreate3d(cmd.get_ref(0)?.load()),
570             VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => TransferToHost3d(cmd.get_ref(0)?.load()),
571             VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => TransferFromHost3d(cmd.get_ref(0)?.load()),
572             VIRTIO_GPU_CMD_SUBMIT_3D => CmdSubmit3d(cmd.get_ref(0)?.load()),
573             VIRTIO_GPU_CMD_UPDATE_CURSOR => UpdateCursor(cmd.get_ref(0)?.load()),
574             VIRTIO_GPU_CMD_MOVE_CURSOR => MoveCursor(cmd.get_ref(0)?.load()),
575             _ => return Err(GpuCommandDecodeError::InvalidType(hdr.type_.into())),
576         })
577     }
578 
579     /// Gets the generic `virtio_gpu_ctrl_hdr` from this command.
ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr580     pub fn ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr {
581         use self::GpuCommand::*;
582         match self {
583             GetDisplayInfo(info) => info,
584             ResourceCreate2d(info) => &info.hdr,
585             ResourceUnref(info) => &info.hdr,
586             SetScanout(info) => &info.hdr,
587             ResourceFlush(info) => &info.hdr,
588             TransferToHost2d(info) => &info.hdr,
589             ResourceAttachBacking(info) => &info.hdr,
590             ResourceDetachBacking(info) => &info.hdr,
591             GetCapsetInfo(info) => &info.hdr,
592             GetCapset(info) => &info.hdr,
593             CtxCreate(info) => &info.hdr,
594             CtxDestroy(info) => &info.hdr,
595             CtxAttachResource(info) => &info.hdr,
596             CtxDetachResource(info) => &info.hdr,
597             ResourceCreate3d(info) => &info.hdr,
598             TransferToHost3d(info) => &info.hdr,
599             TransferFromHost3d(info) => &info.hdr,
600             CmdSubmit3d(info) => &info.hdr,
601             UpdateCursor(info) => &info.hdr,
602             MoveCursor(info) => &info.hdr,
603         }
604     }
605 }
606 
607 #[derive(Debug, PartialEq)]
608 pub struct GpuResponsePlaneInfo {
609     pub stride: u32,
610     pub offset: u32,
611 }
612 
613 /// A response to a `GpuCommand`. These correspond to `VIRTIO_GPU_RESP_*`.
614 #[derive(Debug, PartialEq)]
615 pub enum GpuResponse {
616     OkNoData,
617     OkDisplayInfo(Vec<(u32, u32)>),
618     OkCapsetInfo {
619         id: u32,
620         version: u32,
621         size: u32,
622     },
623     OkCapset(Vec<u8>),
624     OkResourcePlaneInfo {
625         format_modifier: u64,
626         plane_info: Vec<GpuResponsePlaneInfo>,
627     },
628     ErrUnspec,
629     ErrOutOfMemory,
630     ErrInvalidScanoutId,
631     ErrInvalidResourceId,
632     ErrInvalidContextId,
633     ErrInvalidParameter,
634 }
635 
636 /// An error indicating something went wrong decoding a `GpuCommand`.
637 #[derive(Debug)]
638 pub enum GpuResponseEncodeError {
639     /// The response was encoded to an inaccessible area of memory.
640     Memory(VolatileMemoryError),
641     /// More displays than are valid were in a `OkDisplayInfo`.
642     TooManyDisplays(usize),
643     /// More planes than are valid were in a `OkResourcePlaneInfo`.
644     TooManyPlanes(usize),
645 }
646 
647 impl Display for GpuResponseEncodeError {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result648     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
649         use self::GpuResponseEncodeError::*;
650 
651         match self {
652             Memory(e) => write!(
653                 f,
654                 "response was encoded to an inaccessible area of memory: {}",
655                 e,
656             ),
657             TooManyDisplays(n) => write!(f, "{} is more displays than are valid", n),
658             TooManyPlanes(n) => write!(f, "{} is more planes than are valid", n),
659         }
660     }
661 }
662 
663 impl From<VolatileMemoryError> for GpuResponseEncodeError {
from(e: VolatileMemoryError) -> GpuResponseEncodeError664     fn from(e: VolatileMemoryError) -> GpuResponseEncodeError {
665         GpuResponseEncodeError::Memory(e)
666     }
667 }
668 
669 impl GpuResponse {
670     /// Encodes a this `GpuResponse` into `resp` and the given set of metadata.
encode( &self, flags: u32, fence_id: u64, ctx_id: u32, resp: VolatileSlice, ) -> Result<u32, GpuResponseEncodeError>671     pub fn encode(
672         &self,
673         flags: u32,
674         fence_id: u64,
675         ctx_id: u32,
676         resp: VolatileSlice,
677     ) -> Result<u32, GpuResponseEncodeError> {
678         let hdr = virtio_gpu_ctrl_hdr {
679             type_: Le32::from(self.get_type()),
680             flags: Le32::from(flags),
681             fence_id: Le64::from(fence_id),
682             ctx_id: Le32::from(ctx_id),
683             padding: Le32::from(0),
684         };
685         let len = match *self {
686             GpuResponse::OkDisplayInfo(ref info) => {
687                 if info.len() > VIRTIO_GPU_MAX_SCANOUTS {
688                     return Err(GpuResponseEncodeError::TooManyDisplays(info.len()));
689                 }
690                 let mut disp_info = virtio_gpu_resp_display_info {
691                     hdr,
692                     pmodes: Default::default(),
693                 };
694                 for (disp_mode, &(width, height)) in disp_info.pmodes.iter_mut().zip(info) {
695                     disp_mode.r.width = Le32::from(width);
696                     disp_mode.r.height = Le32::from(height);
697                     disp_mode.enabled = Le32::from(1);
698                 }
699                 resp.get_ref(0)?.store(disp_info);
700                 size_of_val(&disp_info)
701             }
702             GpuResponse::OkCapsetInfo { id, version, size } => {
703                 resp.get_ref(0)?.store(virtio_gpu_resp_capset_info {
704                     hdr,
705                     capset_id: Le32::from(id),
706                     capset_max_version: Le32::from(version),
707                     capset_max_size: Le32::from(size),
708                     padding: Le32::from(0),
709                 });
710                 size_of::<virtio_gpu_resp_capset_info>()
711             }
712             GpuResponse::OkCapset(ref data) => {
713                 resp.get_ref(0)?.store(hdr);
714                 let resp_data_slice =
715                     resp.get_slice(size_of_val(&hdr) as u64, data.len() as u64)?;
716                 resp_data_slice.copy_from(data);
717                 size_of_val(&hdr) + data.len()
718             }
719             GpuResponse::OkResourcePlaneInfo {
720                 format_modifier,
721                 ref plane_info,
722             } => {
723                 if plane_info.len() > PLANE_INFO_MAX_COUNT {
724                     return Err(GpuResponseEncodeError::TooManyPlanes(plane_info.len()));
725                 }
726                 let mut strides = [Le32::default(); PLANE_INFO_MAX_COUNT];
727                 let mut offsets = [Le32::default(); PLANE_INFO_MAX_COUNT];
728                 for (plane_index, plane) in plane_info.iter().enumerate() {
729                     strides[plane_index] = plane.stride.into();
730                     offsets[plane_index] = plane.offset.into();
731                 }
732                 let plane_info = virtio_gpu_resp_resource_plane_info {
733                     hdr,
734                     count: Le32::from(plane_info.len() as u32),
735                     padding: 0.into(),
736                     format_modifier: format_modifier.into(),
737                     strides,
738                     offsets,
739                 };
740                 match resp.get_ref(0) {
741                     Ok(resp_ref) => {
742                         resp_ref.store(plane_info);
743                         size_of_val(&plane_info)
744                     }
745                     _ => {
746                         // In case there is too little room in the response slice to store the
747                         // entire virtio_gpu_resp_resource_plane_info, convert response to a regular
748                         // VIRTIO_GPU_RESP_OK_NODATA and attempt to return that.
749                         resp.get_ref(0)?.store(virtio_gpu_ctrl_hdr {
750                             type_: Le32::from(VIRTIO_GPU_RESP_OK_NODATA),
751                             ..hdr
752                         });
753                         size_of_val(&hdr)
754                     }
755                 }
756             }
757             _ => {
758                 resp.get_ref(0)?.store(hdr);
759                 size_of_val(&hdr)
760             }
761         };
762         Ok(len as u32)
763     }
764 
765     /// Gets the `VIRTIO_GPU_*` enum value that corresponds to this variant.
get_type(&self) -> u32766     pub fn get_type(&self) -> u32 {
767         match self {
768             GpuResponse::OkNoData => VIRTIO_GPU_RESP_OK_NODATA,
769             GpuResponse::OkDisplayInfo(_) => VIRTIO_GPU_RESP_OK_DISPLAY_INFO,
770             GpuResponse::OkCapsetInfo { .. } => VIRTIO_GPU_RESP_OK_CAPSET_INFO,
771             GpuResponse::OkCapset(_) => VIRTIO_GPU_RESP_OK_CAPSET,
772             GpuResponse::OkResourcePlaneInfo { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO,
773             GpuResponse::ErrUnspec => VIRTIO_GPU_RESP_ERR_UNSPEC,
774             GpuResponse::ErrOutOfMemory => VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY,
775             GpuResponse::ErrInvalidScanoutId => VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID,
776             GpuResponse::ErrInvalidResourceId => VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID,
777             GpuResponse::ErrInvalidContextId => VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID,
778             GpuResponse::ErrInvalidParameter => VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER,
779         }
780     }
781 
782     /// Returns true if this response indicates success.
is_ok(&self) -> bool783     pub fn is_ok(&self) -> bool {
784         match self {
785             GpuResponse::OkNoData => true,
786             GpuResponse::OkDisplayInfo(_) => true,
787             GpuResponse::OkCapsetInfo { .. } => true,
788             GpuResponse::OkCapset(_) => true,
789             GpuResponse::OkResourcePlaneInfo { .. } => true,
790             _ => false,
791         }
792     }
793 
794     /// Returns true if this response indicates an error.
is_err(&self) -> bool795     pub fn is_err(&self) -> bool {
796         !self.is_ok()
797     }
798 }
799