1 /**************************************************************************** 2 **************************************************************************** 3 *** 4 *** This header was automatically generated from a Linux kernel header 5 *** of the same name, to make information necessary for userspace to 6 *** call into the kernel available to libc. It contains only constants, 7 *** structures, and macros generated from the original header, and thus, 8 *** contains no copyrightable information. 9 *** 10 *** To edit the content of this header, modify the corresponding 11 *** source file (e.g. under external/kernel-headers/original/) then 12 *** run bionic/libc/kernel/tools/update_all.py 13 *** 14 *** Any manual change here will be lost the next time this script will 15 *** be run. You've been warned! 16 *** 17 **************************************************************************** 18 ****************************************************************************/ 19 #ifndef VIRTGPU_DRM_H 20 #define VIRTGPU_DRM_H 21 #include "drm.h" 22 #ifdef __cplusplus 23 extern "C" { 24 #endif 25 #define DRM_VIRTGPU_MAP 0x01 26 #define DRM_VIRTGPU_EXECBUFFER 0x02 27 #define DRM_VIRTGPU_GETPARAM 0x03 28 #define DRM_VIRTGPU_RESOURCE_CREATE 0x04 29 #define DRM_VIRTGPU_RESOURCE_INFO 0x05 30 #define DRM_VIRTGPU_TRANSFER_FROM_HOST 0x06 31 #define DRM_VIRTGPU_TRANSFER_TO_HOST 0x07 32 #define DRM_VIRTGPU_WAIT 0x08 33 #define DRM_VIRTGPU_GET_CAPS 0x09 34 #define DRM_VIRTGPU_RESOURCE_CREATE_BLOB 0x0a 35 #define DRM_VIRTGPU_CONTEXT_INIT 0x0b 36 #define VIRTGPU_EXECBUF_FENCE_FD_IN 0x01 37 #define VIRTGPU_EXECBUF_FENCE_FD_OUT 0x02 38 #define VIRTGPU_EXECBUF_RING_IDX 0x04 39 #define VIRTGPU_EXECBUF_FLAGS (VIRTGPU_EXECBUF_FENCE_FD_IN | VIRTGPU_EXECBUF_FENCE_FD_OUT | VIRTGPU_EXECBUF_RING_IDX | 0) 40 struct drm_virtgpu_map { 41 __u64 offset; 42 __u32 handle; 43 __u32 pad; 44 }; 45 struct drm_virtgpu_execbuffer { 46 __u32 flags; 47 __u32 size; 48 __u64 command; 49 __u64 bo_handles; 50 __u32 num_bo_handles; 51 __s32 fence_fd; 52 __u32 ring_idx; 53 __u32 pad; 54 }; 55 #define VIRTGPU_PARAM_3D_FEATURES 1 56 #define VIRTGPU_PARAM_CAPSET_QUERY_FIX 2 57 #define VIRTGPU_PARAM_RESOURCE_BLOB 3 58 #define VIRTGPU_PARAM_HOST_VISIBLE 4 59 #define VIRTGPU_PARAM_CROSS_DEVICE 5 60 #define VIRTGPU_PARAM_CONTEXT_INIT 6 61 #define VIRTGPU_PARAM_SUPPORTED_CAPSET_IDs 7 62 struct drm_virtgpu_getparam { 63 __u64 param; 64 __u64 value; 65 }; 66 struct drm_virtgpu_resource_create { 67 __u32 target; 68 __u32 format; 69 __u32 bind; 70 __u32 width; 71 __u32 height; 72 __u32 depth; 73 __u32 array_size; 74 __u32 last_level; 75 __u32 nr_samples; 76 __u32 flags; 77 __u32 bo_handle; 78 __u32 res_handle; 79 __u32 size; 80 __u32 stride; 81 }; 82 struct drm_virtgpu_resource_info { 83 __u32 bo_handle; 84 __u32 res_handle; 85 __u32 size; 86 __u32 blob_mem; 87 }; 88 struct drm_virtgpu_3d_box { 89 __u32 x; 90 __u32 y; 91 __u32 z; 92 __u32 w; 93 __u32 h; 94 __u32 d; 95 }; 96 struct drm_virtgpu_3d_transfer_to_host { 97 __u32 bo_handle; 98 struct drm_virtgpu_3d_box box; 99 __u32 level; 100 __u32 offset; 101 __u32 stride; 102 __u32 layer_stride; 103 }; 104 struct drm_virtgpu_3d_transfer_from_host { 105 __u32 bo_handle; 106 struct drm_virtgpu_3d_box box; 107 __u32 level; 108 __u32 offset; 109 __u32 stride; 110 __u32 layer_stride; 111 }; 112 #define VIRTGPU_WAIT_NOWAIT 1 113 struct drm_virtgpu_3d_wait { 114 __u32 handle; 115 __u32 flags; 116 }; 117 struct drm_virtgpu_get_caps { 118 __u32 cap_set_id; 119 __u32 cap_set_ver; 120 __u64 addr; 121 __u32 size; 122 __u32 pad; 123 }; 124 struct drm_virtgpu_resource_create_blob { 125 #define VIRTGPU_BLOB_MEM_GUEST 0x0001 126 #define VIRTGPU_BLOB_MEM_HOST3D 0x0002 127 #define VIRTGPU_BLOB_MEM_HOST3D_GUEST 0x0003 128 #define VIRTGPU_BLOB_FLAG_USE_MAPPABLE 0x0001 129 #define VIRTGPU_BLOB_FLAG_USE_SHAREABLE 0x0002 130 #define VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE 0x0004 131 __u32 blob_mem; 132 __u32 blob_flags; 133 __u32 bo_handle; 134 __u32 res_handle; 135 __u64 size; 136 __u32 pad; 137 __u32 cmd_size; 138 __u64 cmd; 139 __u64 blob_id; 140 }; 141 #define VIRTGPU_CONTEXT_PARAM_CAPSET_ID 0x0001 142 #define VIRTGPU_CONTEXT_PARAM_NUM_RINGS 0x0002 143 #define VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK 0x0003 144 struct drm_virtgpu_context_set_param { 145 __u64 param; 146 __u64 value; 147 }; 148 struct drm_virtgpu_context_init { 149 __u32 num_params; 150 __u32 pad; 151 __u64 ctx_set_params; 152 }; 153 #define VIRTGPU_EVENT_FENCE_SIGNALED 0x90000000 154 #define DRM_IOCTL_VIRTGPU_MAP DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_MAP, struct drm_virtgpu_map) 155 #define DRM_IOCTL_VIRTGPU_EXECBUFFER DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_EXECBUFFER, struct drm_virtgpu_execbuffer) 156 #define DRM_IOCTL_VIRTGPU_GETPARAM DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GETPARAM, struct drm_virtgpu_getparam) 157 #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE, struct drm_virtgpu_resource_create) 158 #define DRM_IOCTL_VIRTGPU_RESOURCE_INFO DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_INFO, struct drm_virtgpu_resource_info) 159 #define DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_FROM_HOST, struct drm_virtgpu_3d_transfer_from_host) 160 #define DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_TO_HOST, struct drm_virtgpu_3d_transfer_to_host) 161 #define DRM_IOCTL_VIRTGPU_WAIT DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_WAIT, struct drm_virtgpu_3d_wait) 162 #define DRM_IOCTL_VIRTGPU_GET_CAPS DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_GET_CAPS, struct drm_virtgpu_get_caps) 163 #define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE_BLOB, struct drm_virtgpu_resource_create_blob) 164 #define DRM_IOCTL_VIRTGPU_CONTEXT_INIT DRM_IOWR(DRM_COMMAND_BASE + DRM_VIRTGPU_CONTEXT_INIT, struct drm_virtgpu_context_init) 165 #ifdef __cplusplus 166 } 167 #endif 168 #endif 169