1 /*
2 * Copyright 2020 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
7 #include "cros_gralloc/gralloc3/CrosGralloc3Utils.h"
8
9 #include <array>
10 #include <limits>
11 #include <unordered_map>
12
13 #include <android-base/stringprintf.h>
14 #include <android-base/strings.h>
15 #include <cutils/native_handle.h>
16
17 #include "cros_gralloc/cros_gralloc_helpers.h"
18
19 using android::hardware::hidl_bitfield;
20 using android::hardware::hidl_handle;
21 using android::hardware::hidl_vec;
22 using android::hardware::graphics::common::V1_2::BufferUsage;
23 using android::hardware::graphics::common::V1_2::PixelFormat;
24
25 using BufferDescriptorInfo =
26 android::hardware::graphics::mapper::V3_0::IMapper::BufferDescriptorInfo;
27
getPixelFormatString(PixelFormat format)28 std::string getPixelFormatString(PixelFormat format) {
29 switch (format) {
30 case PixelFormat::BGRA_8888:
31 return "PixelFormat::BGRA_8888";
32 case PixelFormat::BLOB:
33 return "PixelFormat::BLOB";
34 case PixelFormat::DEPTH_16:
35 return "PixelFormat::DEPTH_16";
36 case PixelFormat::DEPTH_24:
37 return "PixelFormat::DEPTH_24";
38 case PixelFormat::DEPTH_24_STENCIL_8:
39 return "PixelFormat::DEPTH_24_STENCIL_8";
40 case PixelFormat::DEPTH_32F:
41 return "PixelFormat::DEPTH_24";
42 case PixelFormat::DEPTH_32F_STENCIL_8:
43 return "PixelFormat::DEPTH_24_STENCIL_8";
44 case PixelFormat::HSV_888:
45 return "PixelFormat::HSV_888";
46 case PixelFormat::IMPLEMENTATION_DEFINED:
47 return "PixelFormat::IMPLEMENTATION_DEFINED";
48 case PixelFormat::RAW10:
49 return "PixelFormat::RAW10";
50 case PixelFormat::RAW12:
51 return "PixelFormat::RAW12";
52 case PixelFormat::RAW16:
53 return "PixelFormat::RAW16";
54 case PixelFormat::RAW_OPAQUE:
55 return "PixelFormat::RAW_OPAQUE";
56 case PixelFormat::RGBA_1010102:
57 return "PixelFormat::RGBA_1010102";
58 case PixelFormat::RGBA_8888:
59 return "PixelFormat::RGBA_8888";
60 case PixelFormat::RGBA_FP16:
61 return "PixelFormat::RGBA_FP16";
62 case PixelFormat::RGBX_8888:
63 return "PixelFormat::RGBX_8888";
64 case PixelFormat::RGB_565:
65 return "PixelFormat::RGB_565";
66 case PixelFormat::RGB_888:
67 return "PixelFormat::RGB_888";
68 case PixelFormat::STENCIL_8:
69 return "PixelFormat::STENCIL_8";
70 case PixelFormat::Y16:
71 return "PixelFormat::Y16";
72 case PixelFormat::Y8:
73 return "PixelFormat::Y8";
74 case PixelFormat::YCBCR_420_888:
75 return "PixelFormat::YCBCR_420_888";
76 case PixelFormat::YCBCR_422_I:
77 return "PixelFormat::YCBCR_422_I";
78 case PixelFormat::YCBCR_422_SP:
79 return "PixelFormat::YCBCR_422_SP";
80 case PixelFormat::YCBCR_P010:
81 return "PixelFormat::YCBCR_P010";
82 case PixelFormat::YCRCB_420_SP:
83 return "PixelFormat::YCRCB_420_SP";
84 case PixelFormat::YV12:
85 return "PixelFormat::YV12";
86 }
87 return android::base::StringPrintf("PixelFormat::Unknown(%d)", static_cast<uint32_t>(format));
88 }
89
getUsageString(hidl_bitfield<BufferUsage> bufferUsage)90 std::string getUsageString(hidl_bitfield<BufferUsage> bufferUsage) {
91 using Underlying = typename std::underlying_type<BufferUsage>::type;
92
93 Underlying usage = static_cast<Underlying>(bufferUsage);
94
95 std::vector<std::string> usages;
96 if (usage & BufferUsage::CAMERA_INPUT) {
97 usage &= ~static_cast<Underlying>(BufferUsage::CAMERA_INPUT);
98 usages.push_back("BufferUsage::CAMERA_INPUT");
99 }
100 if (usage & BufferUsage::CAMERA_OUTPUT) {
101 usage &= ~static_cast<Underlying>(BufferUsage::CAMERA_OUTPUT);
102 usages.push_back("BufferUsage::CAMERA_OUTPUT");
103 }
104 if (usage & BufferUsage::COMPOSER_CURSOR) {
105 usage &= ~static_cast<Underlying>(BufferUsage::COMPOSER_CURSOR);
106 usages.push_back("BufferUsage::COMPOSER_CURSOR");
107 }
108 if (usage & BufferUsage::COMPOSER_OVERLAY) {
109 usage &= ~static_cast<Underlying>(BufferUsage::COMPOSER_OVERLAY);
110 usages.push_back("BufferUsage::COMPOSER_OVERLAY");
111 }
112 if (usage & BufferUsage::CPU_READ_OFTEN) {
113 usage &= ~static_cast<Underlying>(BufferUsage::CPU_READ_OFTEN);
114 usages.push_back("BufferUsage::CPU_READ_OFTEN");
115 }
116 if (usage & BufferUsage::CPU_READ_NEVER) {
117 usage &= ~static_cast<Underlying>(BufferUsage::CPU_READ_NEVER);
118 usages.push_back("BufferUsage::CPU_READ_NEVER");
119 }
120 if (usage & BufferUsage::CPU_READ_RARELY) {
121 usage &= ~static_cast<Underlying>(BufferUsage::CPU_READ_RARELY);
122 usages.push_back("BufferUsage::CPU_READ_RARELY");
123 }
124 if (usage & BufferUsage::CPU_WRITE_NEVER) {
125 usage &= ~static_cast<Underlying>(BufferUsage::CPU_WRITE_NEVER);
126 usages.push_back("BufferUsage::CPU_WRITE_NEVER");
127 }
128 if (usage & BufferUsage::CPU_WRITE_OFTEN) {
129 usage &= ~static_cast<Underlying>(BufferUsage::CPU_WRITE_OFTEN);
130 usages.push_back("BufferUsage::CPU_WRITE_OFTEN");
131 }
132 if (usage & BufferUsage::CPU_WRITE_RARELY) {
133 usage &= ~static_cast<Underlying>(BufferUsage::CPU_WRITE_RARELY);
134 usages.push_back("BufferUsage::CPU_WRITE_RARELY");
135 }
136 if (usage & BufferUsage::GPU_RENDER_TARGET) {
137 usage &= ~static_cast<Underlying>(BufferUsage::GPU_RENDER_TARGET);
138 usages.push_back("BufferUsage::GPU_RENDER_TARGET");
139 }
140 if (usage & BufferUsage::GPU_TEXTURE) {
141 usage &= ~static_cast<Underlying>(BufferUsage::GPU_TEXTURE);
142 usages.push_back("BufferUsage::GPU_TEXTURE");
143 }
144 if (usage & BufferUsage::PROTECTED) {
145 usage &= ~static_cast<Underlying>(BufferUsage::PROTECTED);
146 usages.push_back("BufferUsage::PROTECTED");
147 }
148 if (usage & BufferUsage::RENDERSCRIPT) {
149 usage &= ~static_cast<Underlying>(BufferUsage::RENDERSCRIPT);
150 usages.push_back("BufferUsage::RENDERSCRIPT");
151 }
152 if (usage & BufferUsage::VIDEO_DECODER) {
153 usage &= ~static_cast<Underlying>(BufferUsage::VIDEO_DECODER);
154 usages.push_back("BufferUsage::VIDEO_DECODER");
155 }
156 if (usage & BufferUsage::VIDEO_ENCODER) {
157 usage &= ~static_cast<Underlying>(BufferUsage::VIDEO_ENCODER);
158 usages.push_back("BufferUsage::VIDEO_ENCODER");
159 }
160
161 if (usage) {
162 usages.push_back(android::base::StringPrintf("UnknownUsageBits-%" PRIu64, usage));
163 }
164
165 return android::base::Join(usages, '|');
166 }
167
convertToDrmFormat(PixelFormat format,uint32_t * outDrmFormat)168 int convertToDrmFormat(PixelFormat format, uint32_t* outDrmFormat) {
169 switch (format) {
170 case PixelFormat::BGRA_8888:
171 *outDrmFormat = DRM_FORMAT_ARGB8888;
172 return 0;
173 /**
174 * Choose DRM_FORMAT_R8 because <system/graphics.h> requires the buffers
175 * with a format HAL_PIXEL_FORMAT_BLOB have a height of 1, and width
176 * equal to their size in bytes.
177 */
178 case PixelFormat::BLOB:
179 *outDrmFormat = DRM_FORMAT_R8;
180 return 0;
181 case PixelFormat::DEPTH_16:
182 return -EINVAL;
183 case PixelFormat::DEPTH_24:
184 return -EINVAL;
185 case PixelFormat::DEPTH_24_STENCIL_8:
186 return -EINVAL;
187 case PixelFormat::DEPTH_32F:
188 return -EINVAL;
189 case PixelFormat::DEPTH_32F_STENCIL_8:
190 return -EINVAL;
191 case PixelFormat::HSV_888:
192 return -EINVAL;
193 case PixelFormat::IMPLEMENTATION_DEFINED:
194 *outDrmFormat = DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED;
195 return 0;
196 case PixelFormat::RAW10:
197 return -EINVAL;
198 case PixelFormat::RAW12:
199 return -EINVAL;
200 case PixelFormat::RAW16:
201 *outDrmFormat = DRM_FORMAT_R16;
202 return 0;
203 /* TODO use blob */
204 case PixelFormat::RAW_OPAQUE:
205 return -EINVAL;
206 case PixelFormat::RGBA_1010102:
207 *outDrmFormat = DRM_FORMAT_ABGR2101010;
208 return 0;
209 case PixelFormat::RGBA_8888:
210 *outDrmFormat = DRM_FORMAT_ABGR8888;
211 return 0;
212 case PixelFormat::RGBA_FP16:
213 *outDrmFormat = DRM_FORMAT_ABGR16161616F;
214 return 0;
215 case PixelFormat::RGBX_8888:
216 *outDrmFormat = DRM_FORMAT_XBGR8888;
217 return 0;
218 case PixelFormat::RGB_565:
219 *outDrmFormat = DRM_FORMAT_RGB565;
220 return 0;
221 case PixelFormat::RGB_888:
222 *outDrmFormat = DRM_FORMAT_RGB888;
223 return 0;
224 case PixelFormat::STENCIL_8:
225 return -EINVAL;
226 case PixelFormat::Y16:
227 *outDrmFormat = DRM_FORMAT_R16;
228 return 0;
229 case PixelFormat::Y8:
230 *outDrmFormat = DRM_FORMAT_R8;
231 return 0;
232 case PixelFormat::YCBCR_420_888:
233 *outDrmFormat = DRM_FORMAT_FLEX_YCbCr_420_888;
234 return 0;
235 case PixelFormat::YCBCR_422_SP:
236 return -EINVAL;
237 case PixelFormat::YCBCR_422_I:
238 return -EINVAL;
239 case PixelFormat::YCBCR_P010:
240 *outDrmFormat = DRM_FORMAT_P010;
241 return 0;
242 case PixelFormat::YCRCB_420_SP:
243 *outDrmFormat = DRM_FORMAT_NV21;
244 return 0;
245 case PixelFormat::YV12:
246 *outDrmFormat = DRM_FORMAT_YVU420_ANDROID;
247 return 0;
248 };
249 return -EINVAL;
250 }
251
convertToBufferUsage(uint64_t grallocUsage,uint64_t * outBufferUsage)252 int convertToBufferUsage(uint64_t grallocUsage, uint64_t* outBufferUsage) {
253 uint64_t bufferUsage = BO_USE_NONE;
254
255 if ((grallocUsage & BufferUsage::CPU_READ_MASK) ==
256 static_cast<uint64_t>(BufferUsage::CPU_READ_RARELY)) {
257 bufferUsage |= BO_USE_SW_READ_RARELY;
258 }
259 if ((grallocUsage & BufferUsage::CPU_READ_MASK) ==
260 static_cast<uint64_t>(BufferUsage::CPU_READ_OFTEN)) {
261 bufferUsage |= BO_USE_SW_READ_OFTEN;
262 }
263 if ((grallocUsage & BufferUsage::CPU_WRITE_MASK) ==
264 static_cast<uint64_t>(BufferUsage::CPU_WRITE_RARELY)) {
265 bufferUsage |= BO_USE_SW_WRITE_RARELY;
266 }
267 if ((grallocUsage & BufferUsage::CPU_WRITE_MASK) ==
268 static_cast<uint64_t>(BufferUsage::CPU_WRITE_OFTEN)) {
269 bufferUsage |= BO_USE_SW_WRITE_OFTEN;
270 }
271 if (grallocUsage & BufferUsage::GPU_TEXTURE) {
272 bufferUsage |= BO_USE_TEXTURE;
273 }
274 if (grallocUsage & BufferUsage::GPU_RENDER_TARGET) {
275 bufferUsage |= BO_USE_RENDERING;
276 }
277 if (grallocUsage & BufferUsage::COMPOSER_OVERLAY) {
278 /* HWC wants to use display hardware, but can defer to OpenGL. */
279 bufferUsage |= BO_USE_SCANOUT | BO_USE_TEXTURE;
280 }
281 /* Map this flag to linear until real HW protection is available on Android. */
282 if (grallocUsage & BufferUsage::PROTECTED) {
283 bufferUsage |= BO_USE_LINEAR;
284 }
285 if (grallocUsage & BufferUsage::COMPOSER_CURSOR) {
286 bufferUsage |= BO_USE_NONE;
287 }
288 if (grallocUsage & BufferUsage::VIDEO_ENCODER) {
289 /*HACK: See b/30054495 */
290 bufferUsage |= BO_USE_SW_READ_OFTEN;
291 }
292 if (grallocUsage & BufferUsage::CAMERA_OUTPUT) {
293 bufferUsage |= BO_USE_CAMERA_WRITE;
294 }
295 if (grallocUsage & BufferUsage::CAMERA_INPUT) {
296 bufferUsage |= BO_USE_CAMERA_READ;
297 }
298 if (grallocUsage & BufferUsage::RENDERSCRIPT) {
299 bufferUsage |= BO_USE_RENDERSCRIPT;
300 }
301 if (grallocUsage & BufferUsage::VIDEO_DECODER) {
302 bufferUsage |= BO_USE_HW_VIDEO_DECODER;
303 }
304
305 *outBufferUsage = bufferUsage;
306 return 0;
307 }
308
convertToMapUsage(uint64_t grallocUsage,uint32_t * outMapUsage)309 int convertToMapUsage(uint64_t grallocUsage, uint32_t* outMapUsage) {
310 uint32_t mapUsage = BO_MAP_NONE;
311
312 if (grallocUsage & BufferUsage::CPU_READ_MASK) {
313 mapUsage |= BO_MAP_READ;
314 }
315 if (grallocUsage & BufferUsage::CPU_WRITE_MASK) {
316 mapUsage |= BO_MAP_WRITE;
317 }
318
319 *outMapUsage = mapUsage;
320 return 0;
321 }
322
convertToCrosDescriptor(const BufferDescriptorInfo & descriptor,struct cros_gralloc_buffer_descriptor * outCrosDescriptor)323 int convertToCrosDescriptor(const BufferDescriptorInfo& descriptor,
324 struct cros_gralloc_buffer_descriptor* outCrosDescriptor) {
325 outCrosDescriptor->width = descriptor.width;
326 outCrosDescriptor->height = descriptor.height;
327 outCrosDescriptor->droid_format = static_cast<int32_t>(descriptor.format);
328 outCrosDescriptor->droid_usage = descriptor.usage;
329 outCrosDescriptor->reserved_region_size = 0;
330 if (descriptor.layerCount > 1) {
331 drv_log("Failed to convert descriptor. Unsupported layerCount: %d\n",
332 descriptor.layerCount);
333 return -EINVAL;
334 }
335 if (convertToDrmFormat(descriptor.format, &outCrosDescriptor->drm_format)) {
336 std::string pixelFormatString = getPixelFormatString(descriptor.format);
337 drv_log("Failed to convert descriptor. Unsupported format %s\n", pixelFormatString.c_str());
338 return -EINVAL;
339 }
340 if (convertToBufferUsage(descriptor.usage, &outCrosDescriptor->use_flags)) {
341 std::string usageString = getUsageString(descriptor.usage);
342 drv_log("Failed to convert descriptor. Unsupported usage flags %s\n", usageString.c_str());
343 return -EINVAL;
344 }
345 return 0;
346 }
347
convertToFenceFd(const hidl_handle & fenceHandle,int * outFenceFd)348 int convertToFenceFd(const hidl_handle& fenceHandle, int* outFenceFd) {
349 if (!outFenceFd) {
350 return -EINVAL;
351 }
352
353 const native_handle_t* nativeHandle = fenceHandle.getNativeHandle();
354 if (nativeHandle && nativeHandle->numFds > 1) {
355 return -EINVAL;
356 }
357
358 *outFenceFd = (nativeHandle && nativeHandle->numFds == 1) ? nativeHandle->data[0] : -1;
359 return 0;
360 }
361
convertToFenceHandle(int fenceFd,hidl_handle * outFenceHandle)362 int convertToFenceHandle(int fenceFd, hidl_handle* outFenceHandle) {
363 if (!outFenceHandle) {
364 return -EINVAL;
365 }
366 if (fenceFd < 0) {
367 return 0;
368 }
369
370 NATIVE_HANDLE_DECLARE_STORAGE(handleStorage, 1, 0);
371 auto fenceHandle = native_handle_init(handleStorage, 1, 0);
372 fenceHandle->data[0] = fenceFd;
373
374 *outFenceHandle = fenceHandle;
375 return 0;
376 }
377
decodeBufferDescriptorInfo(const hidl_vec<uint32_t> & encoded)378 std::optional<BufferDescriptorInfo> decodeBufferDescriptorInfo(const hidl_vec<uint32_t>& encoded) {
379 if (encoded.size() != 5) {
380 drv_log("Failed to decodeBufferDescriptorInfo. Invalid size: %zd.\n", encoded.size());
381 return {};
382 }
383
384 BufferDescriptorInfo descriptor;
385 descriptor.width = encoded[0];
386 descriptor.height = encoded[1];
387 descriptor.layerCount = encoded[2];
388 descriptor.format = static_cast<PixelFormat>(encoded[3]);
389 descriptor.usage = encoded[4];
390 return std::move(descriptor);
391 }
392
encodeBufferDescriptorInfo(const BufferDescriptorInfo & info)393 std::optional<hidl_vec<uint32_t>> encodeBufferDescriptorInfo(const BufferDescriptorInfo& info) {
394 hidl_vec<uint32_t> encoded;
395 encoded.resize(5);
396 encoded[0] = info.width;
397 encoded[1] = info.height;
398 encoded[2] = info.layerCount;
399 encoded[3] = static_cast<uint32_t>(info.format);
400 encoded[4] = info.usage & std::numeric_limits<uint32_t>::max();
401 return std::move(encoded);
402 }
403