1 // Copyright 2020 The Chromium OS Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 //! gfxstream: Handles 3D virtio-gpu hypercalls using gfxstream.
6 //!
7 //! External code found at https://android.googlesource.com/device/generic/vulkan-cereal/.
8
9 #![cfg(feature = "gfxstream")]
10
11 use std::cell::RefCell;
12 use std::mem::{size_of, transmute};
13 use std::os::raw::{c_char, c_int, c_uint, c_void};
14 use std::ptr::null_mut;
15 use std::rc::Rc;
16
17 use base::{ExternalMapping, ExternalMappingError, ExternalMappingResult};
18
19 use crate::generated::virgl_renderer_bindings::{
20 iovec, virgl_box, virgl_renderer_resource_create_args,
21 };
22
23 use crate::renderer_utils::*;
24 use crate::rutabaga_core::{RutabagaComponent, RutabagaContext, RutabagaResource};
25 use crate::rutabaga_utils::*;
26
27 use data_model::VolatileSlice;
28
29 // In gfxstream, only write_fence is used (for synchronization of commands delivered)
30 #[repr(C)]
31 #[derive(Debug, Copy, Clone)]
32 pub struct GfxstreamRendererCallbacks {
33 pub version: c_int,
34 pub write_fence: unsafe extern "C" fn(cookie: *mut c_void, fence: u32),
35 }
36
37 #[link(name = "gfxstream_backend")]
38 extern "C" {
39
40 // Function to globally init gfxstream backend's internal state, taking display/renderer
41 // parameters.
gfxstream_backend_init( display_width: u32, display_height: u32, display_type: u32, renderer_cookie: *mut c_void, renderer_flags: i32, renderer_callbacks: *mut GfxstreamRendererCallbacks, )42 fn gfxstream_backend_init(
43 display_width: u32,
44 display_height: u32,
45 display_type: u32,
46 renderer_cookie: *mut c_void,
47 renderer_flags: i32,
48 renderer_callbacks: *mut GfxstreamRendererCallbacks,
49 );
50
51 // virtio-gpu-3d ioctl functions (begin)
52
53 // In gfxstream, the resource create/transfer ioctls correspond to creating buffers for API
54 // forwarding and the notification of new API calls forwarded by the guest, unless they
55 // correspond to minigbm resource targets (PIPE_TEXTURE_2D), in which case they create globally
56 // visible shared GL textures to support gralloc.
pipe_virgl_renderer_poll()57 fn pipe_virgl_renderer_poll();
pipe_virgl_renderer_resource_create( args: *mut virgl_renderer_resource_create_args, iov: *mut iovec, num_iovs: u32, ) -> c_int58 fn pipe_virgl_renderer_resource_create(
59 args: *mut virgl_renderer_resource_create_args,
60 iov: *mut iovec,
61 num_iovs: u32,
62 ) -> c_int;
63
pipe_virgl_renderer_resource_unref(res_handle: u32)64 fn pipe_virgl_renderer_resource_unref(res_handle: u32);
pipe_virgl_renderer_context_create(handle: u32, nlen: u32, name: *const c_char) -> c_int65 fn pipe_virgl_renderer_context_create(handle: u32, nlen: u32, name: *const c_char) -> c_int;
pipe_virgl_renderer_context_destroy(handle: u32)66 fn pipe_virgl_renderer_context_destroy(handle: u32);
pipe_virgl_renderer_transfer_read_iov( handle: u32, ctx_id: u32, level: u32, stride: u32, layer_stride: u32, box_: *mut virgl_box, offset: u64, iov: *mut iovec, iovec_cnt: c_int, ) -> c_int67 fn pipe_virgl_renderer_transfer_read_iov(
68 handle: u32,
69 ctx_id: u32,
70 level: u32,
71 stride: u32,
72 layer_stride: u32,
73 box_: *mut virgl_box,
74 offset: u64,
75 iov: *mut iovec,
76 iovec_cnt: c_int,
77 ) -> c_int;
pipe_virgl_renderer_transfer_write_iov( handle: u32, ctx_id: u32, level: c_int, stride: u32, layer_stride: u32, box_: *mut virgl_box, offset: u64, iovec: *mut iovec, iovec_cnt: c_uint, ) -> c_int78 fn pipe_virgl_renderer_transfer_write_iov(
79 handle: u32,
80 ctx_id: u32,
81 level: c_int,
82 stride: u32,
83 layer_stride: u32,
84 box_: *mut virgl_box,
85 offset: u64,
86 iovec: *mut iovec,
87 iovec_cnt: c_uint,
88 ) -> c_int;
pipe_virgl_renderer_submit_cmd( commands: *mut c_void, ctx_id: i32, dword_count: i32, ) -> c_int89 fn pipe_virgl_renderer_submit_cmd(
90 commands: *mut c_void,
91 ctx_id: i32,
92 dword_count: i32,
93 ) -> c_int;
pipe_virgl_renderer_resource_attach_iov( res_handle: c_int, iov: *mut iovec, num_iovs: c_int, ) -> c_int94 fn pipe_virgl_renderer_resource_attach_iov(
95 res_handle: c_int,
96 iov: *mut iovec,
97 num_iovs: c_int,
98 ) -> c_int;
pipe_virgl_renderer_resource_detach_iov( res_handle: c_int, iov: *mut *mut iovec, num_iovs: *mut c_int, )99 fn pipe_virgl_renderer_resource_detach_iov(
100 res_handle: c_int,
101 iov: *mut *mut iovec,
102 num_iovs: *mut c_int,
103 );
pipe_virgl_renderer_create_fence(client_fence_id: c_int, ctx_id: u32) -> c_int104 fn pipe_virgl_renderer_create_fence(client_fence_id: c_int, ctx_id: u32) -> c_int;
pipe_virgl_renderer_ctx_attach_resource(ctx_id: c_int, res_handle: c_int)105 fn pipe_virgl_renderer_ctx_attach_resource(ctx_id: c_int, res_handle: c_int);
pipe_virgl_renderer_ctx_detach_resource(ctx_id: c_int, res_handle: c_int)106 fn pipe_virgl_renderer_ctx_detach_resource(ctx_id: c_int, res_handle: c_int);
107
stream_renderer_resource_create_v2(res_handle: u32, hostmemId: u64)108 fn stream_renderer_resource_create_v2(res_handle: u32, hostmemId: u64);
stream_renderer_resource_map( res_handle: u32, map: *mut *mut c_void, out_size: *mut u64, ) -> c_int109 fn stream_renderer_resource_map(
110 res_handle: u32,
111 map: *mut *mut c_void,
112 out_size: *mut u64,
113 ) -> c_int;
stream_renderer_resource_unmap(res_handle: u32) -> c_int114 fn stream_renderer_resource_unmap(res_handle: u32) -> c_int;
115 }
116
117 /// The virtio-gpu backend state tracker which supports accelerated rendering.
118 pub struct Gfxstream {
119 fence_state: Rc<RefCell<FenceState>>,
120 }
121
122 struct GfxstreamContext {
123 ctx_id: u32,
124 }
125
126 impl RutabagaContext for GfxstreamContext {
submit_cmd(&mut self, commands: &mut [u8]) -> RutabagaResult<()>127 fn submit_cmd(&mut self, commands: &mut [u8]) -> RutabagaResult<()> {
128 if commands.len() % size_of::<u32>() != 0 {
129 return Err(RutabagaError::InvalidCommandSize(commands.len()));
130 }
131 let dword_count = (commands.len() / size_of::<u32>()) as i32;
132 // Safe because the context and buffer are valid and virglrenderer will have been
133 // initialized if there are Context instances.
134 let ret = unsafe {
135 pipe_virgl_renderer_submit_cmd(
136 commands.as_mut_ptr() as *mut c_void,
137 self.ctx_id as i32,
138 dword_count,
139 )
140 };
141 ret_to_res(ret)
142 }
143
attach(&mut self, resource: &mut RutabagaResource)144 fn attach(&mut self, resource: &mut RutabagaResource) {
145 // The context id and resource id must be valid because the respective instances ensure
146 // their lifetime.
147 unsafe {
148 pipe_virgl_renderer_ctx_attach_resource(
149 self.ctx_id as i32,
150 resource.resource_id as i32,
151 );
152 }
153 }
154
detach(&mut self, resource: &RutabagaResource)155 fn detach(&mut self, resource: &RutabagaResource) {
156 // The context id and resource id must be valid because the respective instances ensure
157 // their lifetime.
158 unsafe {
159 pipe_virgl_renderer_ctx_detach_resource(
160 self.ctx_id as i32,
161 resource.resource_id as i32,
162 );
163 }
164 }
165 }
166
167 impl Drop for GfxstreamContext {
drop(&mut self)168 fn drop(&mut self) {
169 // The context is safe to destroy because nothing else can be referencing it.
170 unsafe {
171 pipe_virgl_renderer_context_destroy(self.ctx_id);
172 }
173 }
174 }
175
176 const GFXSTREAM_RENDERER_CALLBACKS: &GfxstreamRendererCallbacks = &GfxstreamRendererCallbacks {
177 version: 1,
178 write_fence,
179 };
180
map_func(resource_id: u32) -> ExternalMappingResult<(u64, usize)>181 fn map_func(resource_id: u32) -> ExternalMappingResult<(u64, usize)> {
182 let mut map: *mut c_void = null_mut();
183 let map_ptr: *mut *mut c_void = &mut map;
184 let mut size: u64 = 0;
185
186 // Safe because the Stream renderer wraps and validates use of vkMapMemory.
187 let ret = unsafe { stream_renderer_resource_map(resource_id, map_ptr, &mut size) };
188 if ret != 0 {
189 return Err(ExternalMappingError::LibraryError(ret));
190 }
191 Ok((map as u64, size as usize))
192 }
193
unmap_func(resource_id: u32)194 fn unmap_func(resource_id: u32) {
195 unsafe { stream_renderer_resource_unmap(resource_id) };
196 }
197
198 impl Gfxstream {
init( display_width: u32, display_height: u32, gfxstream_flags: GfxstreamFlags, ) -> RutabagaResult<Box<dyn RutabagaComponent>>199 pub fn init(
200 display_width: u32,
201 display_height: u32,
202 gfxstream_flags: GfxstreamFlags,
203 ) -> RutabagaResult<Box<dyn RutabagaComponent>> {
204 let fence_state = Rc::new(RefCell::new(FenceState { latest_fence: 0 }));
205
206 let cookie: *mut VirglCookie = Box::into_raw(Box::new(VirglCookie {
207 fence_state: Rc::clone(&fence_state),
208 }));
209
210 unsafe {
211 gfxstream_backend_init(
212 display_width,
213 display_height,
214 1, /* default to shmem display */
215 cookie as *mut c_void,
216 gfxstream_flags.into(),
217 transmute(GFXSTREAM_RENDERER_CALLBACKS),
218 );
219 }
220
221 Ok(Box::new(Gfxstream { fence_state }))
222 }
223
224 #[allow(clippy::unnecessary_wraps)]
map_info(&self, _resource_id: u32) -> RutabagaResult<u32>225 fn map_info(&self, _resource_id: u32) -> RutabagaResult<u32> {
226 Ok(RUTABAGA_MAP_CACHE_WC)
227 }
228 }
229
230 impl RutabagaComponent for Gfxstream {
get_capset_info(&self, _capset_id: u32) -> (u32, u32)231 fn get_capset_info(&self, _capset_id: u32) -> (u32, u32) {
232 (1, 0)
233 }
234
get_capset(&self, _capset_id: u32, _version: u32) -> Vec<u8>235 fn get_capset(&self, _capset_id: u32, _version: u32) -> Vec<u8> {
236 Vec::new()
237 }
238
create_fence(&mut self, fence_data: RutabagaFenceData) -> RutabagaResult<()>239 fn create_fence(&mut self, fence_data: RutabagaFenceData) -> RutabagaResult<()> {
240 let ret = unsafe {
241 pipe_virgl_renderer_create_fence(fence_data.fence_id as i32, fence_data.ctx_id)
242 };
243 ret_to_res(ret)
244 }
245
poll(&self) -> u32246 fn poll(&self) -> u32 {
247 unsafe { pipe_virgl_renderer_poll() };
248 self.fence_state.borrow().latest_fence
249 }
250
create_3d( &self, resource_id: u32, resource_create_3d: ResourceCreate3D, ) -> RutabagaResult<RutabagaResource>251 fn create_3d(
252 &self,
253 resource_id: u32,
254 resource_create_3d: ResourceCreate3D,
255 ) -> RutabagaResult<RutabagaResource> {
256 let mut args = virgl_renderer_resource_create_args {
257 handle: resource_id,
258 target: resource_create_3d.target,
259 format: resource_create_3d.format,
260 bind: resource_create_3d.bind,
261 width: resource_create_3d.width,
262 height: resource_create_3d.height,
263 depth: resource_create_3d.depth,
264 array_size: resource_create_3d.array_size,
265 last_level: resource_create_3d.last_level,
266 nr_samples: resource_create_3d.nr_samples,
267 flags: resource_create_3d.flags,
268 };
269
270 // Safe because virglrenderer is initialized by now, and the return value is checked before
271 // returning a new resource. The backing buffers are not supplied with this call.
272 let ret = unsafe { pipe_virgl_renderer_resource_create(&mut args, null_mut(), 0) };
273 ret_to_res(ret)?;
274
275 Ok(RutabagaResource {
276 resource_id,
277 handle: None,
278 blob: false,
279 blob_mem: 0,
280 blob_flags: 0,
281 map_info: None,
282 info_2d: None,
283 info_3d: None,
284 vulkan_info: None,
285 backing_iovecs: None,
286 })
287 }
288
attach_backing( &self, resource_id: u32, vecs: &mut Vec<RutabagaIovec>, ) -> RutabagaResult<()>289 fn attach_backing(
290 &self,
291 resource_id: u32,
292 vecs: &mut Vec<RutabagaIovec>,
293 ) -> RutabagaResult<()> {
294 let ret = unsafe {
295 pipe_virgl_renderer_resource_attach_iov(
296 resource_id as i32,
297 vecs.as_mut_ptr() as *mut iovec,
298 vecs.len() as i32,
299 )
300 };
301 ret_to_res(ret)
302 }
303
detach_backing(&self, resource_id: u32)304 fn detach_backing(&self, resource_id: u32) {
305 unsafe {
306 pipe_virgl_renderer_resource_detach_iov(
307 resource_id as i32,
308 std::ptr::null_mut(),
309 std::ptr::null_mut(),
310 );
311 }
312 }
313
unref_resource(&self, resource_id: u32)314 fn unref_resource(&self, resource_id: u32) {
315 // The resource is safe to unreference destroy because no user of these bindings can still
316 // be holding a reference.
317 unsafe {
318 pipe_virgl_renderer_resource_unref(resource_id);
319 }
320 }
321
transfer_write( &self, ctx_id: u32, resource: &mut RutabagaResource, transfer: Transfer3D, ) -> RutabagaResult<()>322 fn transfer_write(
323 &self,
324 ctx_id: u32,
325 resource: &mut RutabagaResource,
326 transfer: Transfer3D,
327 ) -> RutabagaResult<()> {
328 if transfer.is_empty() {
329 return Ok(());
330 }
331
332 let mut transfer_box = VirglBox {
333 x: transfer.x,
334 y: transfer.y,
335 z: transfer.z,
336 w: transfer.w,
337 h: transfer.h,
338 d: transfer.d,
339 };
340
341 // Safe because only stack variables of the appropriate type are used.
342 let ret = unsafe {
343 pipe_virgl_renderer_transfer_write_iov(
344 resource.resource_id,
345 ctx_id,
346 transfer.level as i32,
347 transfer.stride,
348 transfer.layer_stride,
349 &mut transfer_box as *mut VirglBox as *mut virgl_box,
350 transfer.offset,
351 null_mut(),
352 0,
353 )
354 };
355 ret_to_res(ret)
356 }
357
transfer_read( &self, ctx_id: u32, resource: &mut RutabagaResource, transfer: Transfer3D, buf: Option<VolatileSlice>, ) -> RutabagaResult<()>358 fn transfer_read(
359 &self,
360 ctx_id: u32,
361 resource: &mut RutabagaResource,
362 transfer: Transfer3D,
363 buf: Option<VolatileSlice>,
364 ) -> RutabagaResult<()> {
365 if transfer.is_empty() {
366 return Ok(());
367 }
368
369 let mut transfer_box = VirglBox {
370 x: transfer.x,
371 y: transfer.y,
372 z: transfer.z,
373 w: transfer.w,
374 h: transfer.h,
375 d: transfer.d,
376 };
377
378 let mut iov = RutabagaIovec {
379 base: null_mut(),
380 len: 0,
381 };
382
383 let (iovecs, num_iovecs) = match buf {
384 Some(buf) => {
385 iov.base = buf.as_ptr() as *mut c_void;
386 iov.len = buf.size() as usize;
387 (&mut iov as *mut RutabagaIovec as *mut iovec, 1)
388 }
389 None => (null_mut(), 0),
390 };
391
392 // Safe because only stack variables of the appropriate type are used.
393 let ret = unsafe {
394 pipe_virgl_renderer_transfer_read_iov(
395 resource.resource_id,
396 ctx_id,
397 transfer.level,
398 transfer.stride,
399 transfer.layer_stride,
400 &mut transfer_box as *mut VirglBox as *mut virgl_box,
401 transfer.offset,
402 iovecs,
403 num_iovecs,
404 )
405 };
406 ret_to_res(ret)
407 }
408
create_blob( &mut self, _ctx_id: u32, resource_id: u32, resource_create_blob: ResourceCreateBlob, _iovec_opt: Option<Vec<RutabagaIovec>>, ) -> RutabagaResult<RutabagaResource>409 fn create_blob(
410 &mut self,
411 _ctx_id: u32,
412 resource_id: u32,
413 resource_create_blob: ResourceCreateBlob,
414 _iovec_opt: Option<Vec<RutabagaIovec>>,
415 ) -> RutabagaResult<RutabagaResource> {
416 unsafe {
417 stream_renderer_resource_create_v2(resource_id, resource_create_blob.blob_id);
418 }
419 Ok(RutabagaResource {
420 resource_id,
421 handle: None,
422 blob: true,
423 blob_mem: resource_create_blob.blob_mem,
424 blob_flags: resource_create_blob.blob_flags,
425 map_info: self.map_info(resource_id).ok(),
426 info_2d: None,
427 info_3d: None,
428 vulkan_info: None,
429 backing_iovecs: None,
430 })
431 }
432
map(&self, resource_id: u32) -> RutabagaResult<ExternalMapping>433 fn map(&self, resource_id: u32) -> RutabagaResult<ExternalMapping> {
434 let map_result = unsafe { ExternalMapping::new(resource_id, map_func, unmap_func) };
435 match map_result {
436 Ok(mapping) => Ok(mapping),
437 Err(e) => Err(RutabagaError::MappingFailed(e)),
438 }
439 }
440
create_context( &self, ctx_id: u32, _context_init: u32, ) -> RutabagaResult<Box<dyn RutabagaContext>>441 fn create_context(
442 &self,
443 ctx_id: u32,
444 _context_init: u32,
445 ) -> RutabagaResult<Box<dyn RutabagaContext>> {
446 const CONTEXT_NAME: &[u8] = b"gpu_renderer";
447 // Safe because virglrenderer is initialized by now and the context name is statically
448 // allocated. The return value is checked before returning a new context.
449 let ret = unsafe {
450 pipe_virgl_renderer_context_create(
451 ctx_id,
452 CONTEXT_NAME.len() as u32,
453 CONTEXT_NAME.as_ptr() as *const c_char,
454 )
455 };
456 ret_to_res(ret)?;
457 Ok(Box::new(GfxstreamContext { ctx_id }))
458 }
459 }
460