1 /*
2 * Copyright (c) 2011-2014, The Linux Foundation. All rights reserved.
3
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above
10 * copyright notice, this list of conditions and the following
11 * disclaimer in the documentation and/or other materials provided
12 * with the distribution.
13 * * Neither the name of The Linux Foundation nor the names of its
14 * contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #include <cutils/log.h>
31 #include <fcntl.h>
32 #include <dlfcn.h>
33 #include "gralloc_priv.h"
34 #include "alloc_controller.h"
35 #include "memalloc.h"
36 #include "ionalloc.h"
37 #include "gr.h"
38 #include "comptype.h"
39 #include "mdp_version.h"
40
41 #ifdef VENUS_COLOR_FORMAT
42 #include <media/msm_media_info.h>
43 #else
44 #define VENUS_Y_STRIDE(args...) 0
45 #define VENUS_Y_SCANLINES(args...) 0
46 #define VENUS_BUFFER_SIZE(args...) 0
47 #endif
48
49 #define ASTC_BLOCK_SIZE 16
50
51 using namespace gralloc;
52 using namespace qdutils;
53
54 ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo);
55
56 //Common functions
canFallback(int usage,bool triedSystem)57 static bool canFallback(int usage, bool triedSystem)
58 {
59 // Fallback to system heap when alloc fails unless
60 // 1. Composition type is MDP
61 // 2. Alloc from system heap was already tried
62 // 3. The heap type is requsted explicitly
63 // 4. The heap type is protected
64 // 5. The buffer is meant for external display only
65
66 if(QCCompositionType::getInstance().getCompositionType() &
67 COMPOSITION_TYPE_MDP)
68 return false;
69 if(triedSystem)
70 return false;
71 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
72 return false;
73 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY))
74 return false;
75 //Return true by default
76 return true;
77 }
78
useUncached(int usage)79 static bool useUncached(int usage)
80 {
81 if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
82 return true;
83 if(((usage & GRALLOC_USAGE_SW_WRITE_MASK) == GRALLOC_USAGE_SW_WRITE_RARELY)
84 ||((usage & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_RARELY))
85 return true;
86 return false;
87 }
88
89 //-------------- AdrenoMemInfo-----------------------//
AdrenoMemInfo()90 AdrenoMemInfo::AdrenoMemInfo()
91 {
92 LINK_adreno_compute_aligned_width_and_height = NULL;
93 LINK_adreno_compute_padding = NULL;
94 LINK_adreno_isMacroTilingSupportedByGpu = NULL;
95 LINK_adreno_compute_compressedfmt_aligned_width_and_height = NULL;
96
97 libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW);
98 if (libadreno_utils) {
99 *(void **)&LINK_adreno_compute_aligned_width_and_height =
100 ::dlsym(libadreno_utils, "compute_aligned_width_and_height");
101 *(void **)&LINK_adreno_compute_padding =
102 ::dlsym(libadreno_utils, "compute_surface_padding");
103 *(void **)&LINK_adreno_isMacroTilingSupportedByGpu =
104 ::dlsym(libadreno_utils, "isMacroTilingSupportedByGpu");
105 *(void **)&LINK_adreno_compute_compressedfmt_aligned_width_and_height =
106 ::dlsym(libadreno_utils,
107 "compute_compressedfmt_aligned_width_and_height");
108 }
109 }
110
~AdrenoMemInfo()111 AdrenoMemInfo::~AdrenoMemInfo()
112 {
113 if (libadreno_utils) {
114 ::dlclose(libadreno_utils);
115 }
116 }
117
isMacroTilingSupportedByGPU()118 int AdrenoMemInfo::isMacroTilingSupportedByGPU()
119 {
120 if ((libadreno_utils)) {
121 if(LINK_adreno_isMacroTilingSupportedByGpu) {
122 return LINK_adreno_isMacroTilingSupportedByGpu();
123 }
124 }
125 return 0;
126 }
127
128
getAlignedWidthAndHeight(int width,int height,int format,int tile_enabled,int & aligned_w,int & aligned_h)129 void AdrenoMemInfo::getAlignedWidthAndHeight(int width, int height, int format,
130 int tile_enabled, int& aligned_w, int& aligned_h)
131 {
132 aligned_w = width;
133 aligned_h = height;
134 // Currently surface padding is only computed for RGB* surfaces.
135 if (format <= HAL_PIXEL_FORMAT_sRGB_X_8888) {
136 aligned_w = ALIGN(width, 32);
137 aligned_h = ALIGN(height, 32);
138 // Don't add any additional padding if debug.gralloc.map_fb_memory
139 // is enabled
140 char property[PROPERTY_VALUE_MAX];
141 if((property_get("debug.gralloc.map_fb_memory", property, NULL) > 0) &&
142 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
143 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
144 return;
145 }
146
147 int bpp = 4;
148 switch(format)
149 {
150 case HAL_PIXEL_FORMAT_RGB_888:
151 bpp = 3;
152 break;
153 case HAL_PIXEL_FORMAT_RGB_565:
154 bpp = 2;
155 break;
156 default: break;
157 }
158 if (libadreno_utils) {
159 int raster_mode = 0; // Adreno unknown raster mode.
160 int padding_threshold = 512; // Threshold for padding surfaces.
161 // the function below computes aligned width and aligned height
162 // based on linear or macro tile mode selected.
163 if(LINK_adreno_compute_aligned_width_and_height) {
164 LINK_adreno_compute_aligned_width_and_height(width,
165 height, bpp, tile_enabled,
166 raster_mode, padding_threshold,
167 &aligned_w, &aligned_h);
168
169 } else if(LINK_adreno_compute_padding) {
170 int surface_tile_height = 1; // Linear surface
171 aligned_w = LINK_adreno_compute_padding(width, bpp,
172 surface_tile_height, raster_mode,
173 padding_threshold);
174 ALOGW("%s: Warning!! Old GFX API is used to calculate stride",
175 __FUNCTION__);
176 } else {
177 ALOGW("%s: Warning!! Symbols compute_surface_padding and " \
178 "compute_aligned_width_and_height not found", __FUNCTION__);
179 }
180 }
181 } else {
182 switch (format)
183 {
184 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
185 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
186 aligned_w = ALIGN(width, 32);
187 break;
188 case HAL_PIXEL_FORMAT_RAW_SENSOR:
189 aligned_w = ALIGN(width, 16);
190 break;
191 case HAL_PIXEL_FORMAT_RAW10:
192 aligned_w = ALIGN(width * 10 /8, 16);
193 break;
194 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
195 aligned_w = ALIGN(width, 128);
196 break;
197 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
198 case HAL_PIXEL_FORMAT_YV12:
199 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
200 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
201 case HAL_PIXEL_FORMAT_YCbCr_422_I:
202 case HAL_PIXEL_FORMAT_YCrCb_422_I:
203 aligned_w = ALIGN(width, 16);
204 break;
205 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
206 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
207 aligned_w = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
208 aligned_h = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
209 break;
210 case HAL_PIXEL_FORMAT_BLOB:
211 break;
212 case HAL_PIXEL_FORMAT_NV21_ZSL:
213 aligned_w = ALIGN(width, 64);
214 aligned_h = ALIGN(height, 64);
215 break;
216 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_4x4_KHR:
217 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR:
218 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x4_KHR:
219 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR:
220 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x5_KHR:
221 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR:
222 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x5_KHR:
223 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR:
224 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x6_KHR:
225 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR:
226 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x5_KHR:
227 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR:
228 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x6_KHR:
229 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR:
230 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x8_KHR:
231 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR:
232 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x5_KHR:
233 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR:
234 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x6_KHR:
235 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR:
236 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x8_KHR:
237 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR:
238 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x10_KHR:
239 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR:
240 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x10_KHR:
241 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR:
242 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x12_KHR:
243 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR:
244 if(LINK_adreno_compute_compressedfmt_aligned_width_and_height) {
245 int bytesPerPixel = 0;
246 int raster_mode = 0; //Adreno unknown raster mode.
247 int padding_threshold = 512; //Threshold for padding
248 //surfaces.
249
250 LINK_adreno_compute_compressedfmt_aligned_width_and_height(
251 width, height, format, 0,raster_mode, padding_threshold,
252 &aligned_w, &aligned_h, &bytesPerPixel);
253
254 } else {
255 ALOGW("%s: Warning!! Symbols" \
256 " compute_compressedfmt_aligned_width_and_height" \
257 " not found", __FUNCTION__);
258 }
259 break;
260 default: break;
261 }
262 }
263 }
264
265 //-------------- IAllocController-----------------------//
266 IAllocController* IAllocController::sController = NULL;
getInstance(void)267 IAllocController* IAllocController::getInstance(void)
268 {
269 if(sController == NULL) {
270 sController = new IonController();
271 }
272 return sController;
273 }
274
275
276 //-------------- IonController-----------------------//
IonController()277 IonController::IonController()
278 {
279 mIonAlloc = new IonAlloc();
280 }
281
allocate(alloc_data & data,int usage)282 int IonController::allocate(alloc_data& data, int usage)
283 {
284 int ionFlags = 0;
285 int ret;
286
287 data.uncached = useUncached(usage);
288 data.allocType = 0;
289
290 if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
291 ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
292
293 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
294 ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
295
296 if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
297 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
298
299 if(usage & GRALLOC_USAGE_PROTECTED) {
300 if (usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
301 ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
302 ionFlags |= ION_SECURE;
303 } else {
304 // for targets/OEMs which do not need HW level protection
305 // do not set ion secure flag & MM heap. Fallback to IOMMU heap.
306 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
307 }
308 } else if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
309 //MM Heap is exclusively a secure heap.
310 //If it is used for non secure cases, fallback to IOMMU heap
311 ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \
312 cannot be used as an insecure heap!\
313 trying to use IOMMU instead !!");
314 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
315 }
316
317 if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
318 ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
319
320 if(usage & GRALLOC_USAGE_PRIVATE_ADSP_HEAP)
321 ionFlags |= ION_HEAP(ION_ADSP_HEAP_ID);
322
323 if(ionFlags & ION_SECURE)
324 data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
325
326 // if no flags are set, default to
327 // SF + IOMMU heaps, so that bypass can work
328 // we can fall back to system heap if
329 // we run out.
330 if(!ionFlags)
331 ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
332
333 data.flags = ionFlags;
334 ret = mIonAlloc->alloc_buffer(data);
335
336 // Fallback
337 if(ret < 0 && canFallback(usage,
338 (ionFlags & ION_SYSTEM_HEAP_ID)))
339 {
340 ALOGW("Falling back to system heap");
341 data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
342 ret = mIonAlloc->alloc_buffer(data);
343 }
344
345 if(ret >= 0 ) {
346 data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION;
347 }
348
349 return ret;
350 }
351
getAllocator(int flags)352 IMemAlloc* IonController::getAllocator(int flags)
353 {
354 IMemAlloc* memalloc = NULL;
355 if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
356 memalloc = mIonAlloc;
357 } else {
358 ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
359 }
360
361 return memalloc;
362 }
363
isMacroTileEnabled(int format,int usage)364 bool isMacroTileEnabled(int format, int usage)
365 {
366 bool tileEnabled = false;
367
368 // Check whether GPU & MDSS supports MacroTiling feature
369 if(AdrenoMemInfo::getInstance().isMacroTilingSupportedByGPU() &&
370 qdutils::MDPVersion::getInstance().supportsMacroTile())
371 {
372 // check the format
373 switch(format)
374 {
375 case HAL_PIXEL_FORMAT_RGBA_8888:
376 case HAL_PIXEL_FORMAT_RGBX_8888:
377 case HAL_PIXEL_FORMAT_BGRA_8888:
378 case HAL_PIXEL_FORMAT_RGB_565:
379 {
380 tileEnabled = true;
381 // check the usage flags
382 if (usage & (GRALLOC_USAGE_SW_READ_MASK |
383 GRALLOC_USAGE_SW_WRITE_MASK)) {
384 // Application intends to use CPU for rendering
385 tileEnabled = false;
386 }
387 break;
388 }
389 default:
390 break;
391 }
392 }
393 return tileEnabled;
394 }
395
396 // helper function
getSize(int format,int width,int height,const int alignedw,const int alignedh)397 size_t getSize(int format, int width, int height, const int alignedw,
398 const int alignedh) {
399 size_t size = 0;
400
401 switch (format) {
402 case HAL_PIXEL_FORMAT_RGBA_8888:
403 case HAL_PIXEL_FORMAT_RGBX_8888:
404 case HAL_PIXEL_FORMAT_BGRA_8888:
405 case HAL_PIXEL_FORMAT_sRGB_A_8888:
406 case HAL_PIXEL_FORMAT_sRGB_X_8888:
407 size = alignedw * alignedh * 4;
408 break;
409 case HAL_PIXEL_FORMAT_RGB_888:
410 size = alignedw * alignedh * 3;
411 break;
412 case HAL_PIXEL_FORMAT_RGB_565:
413 case HAL_PIXEL_FORMAT_RAW_SENSOR:
414 size = alignedw * alignedh * 2;
415 break;
416 case HAL_PIXEL_FORMAT_RAW10:
417 size = ALIGN(alignedw * alignedh, 4096);
418 break;
419
420 // adreno formats
421 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO: // NV21
422 size = ALIGN(alignedw*alignedh, 4096);
423 size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
424 break;
425 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: // NV12
426 // The chroma plane is subsampled,
427 // but the pitch in bytes is unchanged
428 // The GPU needs 4K alignment, but the video decoder needs 8K
429 size = ALIGN( alignedw * alignedh, 8192);
430 size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
431 break;
432 case HAL_PIXEL_FORMAT_YV12:
433 if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
434 ALOGE("w or h is odd for the YV12 format");
435 return 0;
436 }
437 size = alignedw*alignedh +
438 (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
439 size = ALIGN(size, (size_t)4096);
440 break;
441 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
442 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
443 size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2 + 1, 4096);
444 break;
445 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
446 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
447 case HAL_PIXEL_FORMAT_YCbCr_422_I:
448 case HAL_PIXEL_FORMAT_YCrCb_422_I:
449 if(width & 1) {
450 ALOGE("width is odd for the YUV422_SP format");
451 return 0;
452 }
453 size = ALIGN(alignedw * alignedh * 2, 4096);
454 break;
455 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
456 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
457 size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height);
458 break;
459 case HAL_PIXEL_FORMAT_BLOB:
460 if(height != 1) {
461 ALOGE("%s: Buffers with format HAL_PIXEL_FORMAT_BLOB \
462 must have height==1 ", __FUNCTION__);
463 return 0;
464 }
465 size = width;
466 break;
467 case HAL_PIXEL_FORMAT_NV21_ZSL:
468 size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096);
469 break;
470 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_4x4_KHR:
471 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x4_KHR:
472 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x5_KHR:
473 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x5_KHR:
474 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x6_KHR:
475 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x5_KHR:
476 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x6_KHR:
477 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x8_KHR:
478 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x5_KHR:
479 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x6_KHR:
480 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x8_KHR:
481 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x10_KHR:
482 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x10_KHR:
483 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x12_KHR:
484 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR:
485 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR:
486 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR:
487 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR:
488 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR:
489 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR:
490 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR:
491 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR:
492 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR:
493 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR:
494 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR:
495 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR:
496 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR:
497 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR:
498 size = alignedw * alignedh * ASTC_BLOCK_SIZE;
499 break;
500 default:
501 ALOGE("unrecognized pixel format: 0x%x", format);
502 return 0;
503 }
504 return size;
505 }
506
getBufferSizeAndDimensions(int width,int height,int format,int & alignedw,int & alignedh)507 size_t getBufferSizeAndDimensions(int width, int height, int format,
508 int& alignedw, int &alignedh)
509 {
510 size_t size;
511
512 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
513 height,
514 format,
515 false,
516 alignedw,
517 alignedh);
518
519 size = getSize(format, width, height, alignedw, alignedh);
520
521 return size;
522 }
523
524
getBufferSizeAndDimensions(int width,int height,int format,int usage,int & alignedw,int & alignedh)525 size_t getBufferSizeAndDimensions(int width, int height, int format, int usage,
526 int& alignedw, int &alignedh)
527 {
528 size_t size;
529 int tileEnabled = isMacroTileEnabled(format, usage);
530
531 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
532 height,
533 format,
534 tileEnabled,
535 alignedw,
536 alignedh);
537
538 size = getSize(format, width, height, alignedw, alignedh);
539
540 return size;
541 }
542
543
getBufferAttributes(int width,int height,int format,int usage,int & alignedw,int & alignedh,int & tileEnabled,size_t & size)544 void getBufferAttributes(int width, int height, int format, int usage,
545 int& alignedw, int &alignedh, int& tileEnabled, size_t& size)
546 {
547 tileEnabled = isMacroTileEnabled(format, usage);
548
549 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
550 height,
551 format,
552 tileEnabled,
553 alignedw,
554 alignedh);
555 size = getSize(format, width, height, alignedw, alignedh);
556 }
557
getYUVPlaneInfo(private_handle_t * hnd,struct android_ycbcr * ycbcr)558 int getYUVPlaneInfo(private_handle_t* hnd, struct android_ycbcr* ycbcr)
559 {
560 int err = 0;
561 size_t ystride, cstride;
562 memset(ycbcr->reserved, 0, sizeof(ycbcr->reserved));
563
564 // Get the chroma offsets from the handle width/height. We take advantage
565 // of the fact the width _is_ the stride
566 switch (hnd->format) {
567 //Semiplanar
568 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
569 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
570 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
571 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE: //Same as YCbCr_420_SP_VENUS
572 ystride = cstride = hnd->width;
573 ycbcr->y = (void*)hnd->base;
574 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height);
575 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height + 1);
576 ycbcr->ystride = ystride;
577 ycbcr->cstride = cstride;
578 ycbcr->chroma_step = 2;
579 break;
580
581 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
582 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
583 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
584 case HAL_PIXEL_FORMAT_NV21_ZSL:
585 case HAL_PIXEL_FORMAT_RAW_SENSOR:
586 case HAL_PIXEL_FORMAT_RAW10:
587 ystride = cstride = hnd->width;
588 ycbcr->y = (void*)hnd->base;
589 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height);
590 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height + 1);
591 ycbcr->ystride = ystride;
592 ycbcr->cstride = cstride;
593 ycbcr->chroma_step = 2;
594 break;
595
596 //Planar
597 case HAL_PIXEL_FORMAT_YV12:
598 ystride = hnd->width;
599 cstride = hnd->width/2;
600 ycbcr->y = (void*)hnd->base;
601 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height);
602 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height +
603 cstride * hnd->height/2);
604 ycbcr->ystride = ystride;
605 ycbcr->cstride = cstride;
606 ycbcr->chroma_step = 1;
607
608 break;
609 //Unsupported formats
610 case HAL_PIXEL_FORMAT_YCbCr_422_I:
611 case HAL_PIXEL_FORMAT_YCrCb_422_I:
612 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
613 default:
614 ALOGD("%s: Invalid format passed: 0x%x", __FUNCTION__,
615 hnd->format);
616 err = -EINVAL;
617 }
618 return err;
619
620 }
621
622
623
624 // Allocate buffer from width, height and format into a
625 // private_handle_t. It is the responsibility of the caller
626 // to free the buffer using the free_buffer function
alloc_buffer(private_handle_t ** pHnd,int w,int h,int format,int usage)627 int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
628 {
629 alloc_data data;
630 int alignedw, alignedh;
631 gralloc::IAllocController* sAlloc =
632 gralloc::IAllocController::getInstance();
633 data.base = 0;
634 data.fd = -1;
635 data.offset = 0;
636 data.size = getBufferSizeAndDimensions(w, h, format, usage, alignedw,
637 alignedh);
638
639 data.align = getpagesize();
640 data.uncached = useUncached(usage);
641 int allocFlags = usage;
642
643 int err = sAlloc->allocate(data, allocFlags);
644 if (0 != err) {
645 ALOGE("%s: allocate failed", __FUNCTION__);
646 return -ENOMEM;
647 }
648
649 private_handle_t* hnd = new private_handle_t(data.fd, data.size,
650 data.allocType, 0, format,
651 alignedw, alignedh);
652 hnd->base = (uintptr_t) data.base;
653 hnd->offset = data.offset;
654 hnd->gpuaddr = 0;
655 *pHnd = hnd;
656 return 0;
657 }
658
free_buffer(private_handle_t * hnd)659 void free_buffer(private_handle_t *hnd)
660 {
661 gralloc::IAllocController* sAlloc =
662 gralloc::IAllocController::getInstance();
663 if (hnd && hnd->fd > 0) {
664 IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags);
665 memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
666 }
667 if(hnd)
668 delete hnd;
669
670 }
671