1 /*
2 * Copyright (c) 2011-2014,2016, The Linux Foundation. All rights reserved.
3
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above
10 * copyright notice, this list of conditions and the following
11 * disclaimer in the documentation and/or other materials provided
12 * with the distribution.
13 * * Neither the name of The Linux Foundation nor the names of its
14 * contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #include <cutils/log.h>
31 #include <fcntl.h>
32 #include <dlfcn.h>
33 #include "gralloc_priv.h"
34 #include "alloc_controller.h"
35 #include "memalloc.h"
36 #include "ionalloc.h"
37 #include "gr.h"
38 #include "comptype.h"
39 #include "mdp_version.h"
40
41 #ifdef VENUS_COLOR_FORMAT
42 #include <media/msm_media_info.h>
43 #else
44 #define VENUS_Y_STRIDE(args...) 0
45 #define VENUS_Y_SCANLINES(args...) 0
46 #define VENUS_BUFFER_SIZE(args...) 0
47 #endif
48
49 #define ASTC_BLOCK_SIZE 16
50
51 using namespace gralloc;
52 using namespace qdutils;
53
54 ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo);
55
56 //Common functions
canFallback(int usage,bool triedSystem)57 static bool canFallback(int usage, bool triedSystem)
58 {
59 // Fallback to system heap when alloc fails unless
60 // 1. Composition type is MDP
61 // 2. Alloc from system heap was already tried
62 // 3. The heap type is requsted explicitly
63 // 4. The heap type is protected
64 // 5. The buffer is meant for external display only
65
66 if(QCCompositionType::getInstance().getCompositionType() &
67 COMPOSITION_TYPE_MDP)
68 return false;
69 if(triedSystem)
70 return false;
71 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
72 return false;
73 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY))
74 return false;
75 //Return true by default
76 return true;
77 }
78
79 /* The default policy is to return cached buffers unless the client explicity
80 * sets the PRIVATE_UNCACHED flag or indicates that the buffer will be rarely
81 * read or written in software. Any combination with a _RARELY_ flag will be
82 * treated as uncached. */
useUncached(const int & usage)83 static bool useUncached(const int& usage) {
84 if((usage & GRALLOC_USAGE_PRIVATE_UNCACHED) or
85 ((usage & GRALLOC_USAGE_SW_WRITE_MASK) ==
86 GRALLOC_USAGE_SW_WRITE_RARELY) or
87 ((usage & GRALLOC_USAGE_SW_READ_MASK) ==
88 GRALLOC_USAGE_SW_READ_RARELY))
89 return true;
90
91 return false;
92 }
93
94 //-------------- AdrenoMemInfo-----------------------//
AdrenoMemInfo()95 AdrenoMemInfo::AdrenoMemInfo()
96 {
97 LINK_adreno_compute_aligned_width_and_height = NULL;
98 LINK_adreno_compute_padding = NULL;
99 LINK_adreno_isMacroTilingSupportedByGpu = NULL;
100 LINK_adreno_compute_compressedfmt_aligned_width_and_height = NULL;
101 LINK_adreno_get_gpu_pixel_alignment = NULL;
102
103 libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW);
104 if (libadreno_utils) {
105 *(void **)&LINK_adreno_compute_aligned_width_and_height =
106 ::dlsym(libadreno_utils, "compute_aligned_width_and_height");
107 *(void **)&LINK_adreno_compute_padding =
108 ::dlsym(libadreno_utils, "compute_surface_padding");
109 *(void **)&LINK_adreno_isMacroTilingSupportedByGpu =
110 ::dlsym(libadreno_utils, "isMacroTilingSupportedByGpu");
111 *(void **)&LINK_adreno_compute_compressedfmt_aligned_width_and_height =
112 ::dlsym(libadreno_utils,
113 "compute_compressedfmt_aligned_width_and_height");
114 *(void **)&LINK_adreno_get_gpu_pixel_alignment =
115 ::dlsym(libadreno_utils, "get_gpu_pixel_alignment");
116 }
117 }
118
~AdrenoMemInfo()119 AdrenoMemInfo::~AdrenoMemInfo()
120 {
121 if (libadreno_utils) {
122 ::dlclose(libadreno_utils);
123 }
124 }
125
isMacroTilingSupportedByGPU()126 int AdrenoMemInfo::isMacroTilingSupportedByGPU()
127 {
128 if ((libadreno_utils)) {
129 if(LINK_adreno_isMacroTilingSupportedByGpu) {
130 return LINK_adreno_isMacroTilingSupportedByGpu();
131 }
132 }
133 return 0;
134 }
135
136
getAlignedWidthAndHeight(int width,int height,int format,int tile_enabled,int & aligned_w,int & aligned_h)137 void AdrenoMemInfo::getAlignedWidthAndHeight(int width, int height, int format,
138 int tile_enabled, int& aligned_w, int& aligned_h)
139 {
140 aligned_w = width;
141 aligned_h = height;
142 // Currently surface padding is only computed for RGB* surfaces.
143 if (format <= HAL_PIXEL_FORMAT_BGRA_8888) {
144 aligned_w = ALIGN(width, 32);
145 aligned_h = ALIGN(height, 32);
146 // Don't add any additional padding if debug.gralloc.map_fb_memory
147 // is enabled
148 char property[PROPERTY_VALUE_MAX];
149 if((property_get("debug.gralloc.map_fb_memory", property, NULL) > 0) &&
150 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
151 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
152 return;
153 }
154
155 int bpp = 4;
156 switch(format)
157 {
158 case HAL_PIXEL_FORMAT_RGB_888:
159 bpp = 3;
160 break;
161 case HAL_PIXEL_FORMAT_RGB_565:
162 bpp = 2;
163 break;
164 default: break;
165 }
166 if (libadreno_utils) {
167 int raster_mode = 0; // Adreno unknown raster mode.
168 int padding_threshold = 512; // Threshold for padding surfaces.
169 // the function below computes aligned width and aligned height
170 // based on linear or macro tile mode selected.
171 if(LINK_adreno_compute_aligned_width_and_height) {
172 LINK_adreno_compute_aligned_width_and_height(width,
173 height, bpp, tile_enabled,
174 raster_mode, padding_threshold,
175 &aligned_w, &aligned_h);
176
177 } else if(LINK_adreno_compute_padding) {
178 int surface_tile_height = 1; // Linear surface
179 aligned_w = LINK_adreno_compute_padding(width, bpp,
180 surface_tile_height, raster_mode,
181 padding_threshold);
182 ALOGW("%s: Warning!! Old GFX API is used to calculate stride",
183 __FUNCTION__);
184 } else {
185 ALOGW("%s: Warning!! Symbols compute_surface_padding and " \
186 "compute_aligned_width_and_height not found", __FUNCTION__);
187 }
188 }
189 } else {
190 int alignment = 32;
191 switch (format)
192 {
193 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
194 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
195 if (LINK_adreno_get_gpu_pixel_alignment) {
196 alignment = LINK_adreno_get_gpu_pixel_alignment();
197 }
198 aligned_w = ALIGN(width, alignment);
199 break;
200 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
201 aligned_w = ALIGN(width, alignment);
202 break;
203 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
204 aligned_w = ALIGN(width, 128);
205 break;
206 case HAL_PIXEL_FORMAT_YV12:
207 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
208 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
209 case HAL_PIXEL_FORMAT_YCbCr_422_I:
210 case HAL_PIXEL_FORMAT_YCrCb_422_I:
211 aligned_w = ALIGN(width, 16);
212 break;
213 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
214 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
215 aligned_w = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
216 aligned_h = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
217 break;
218 case HAL_PIXEL_FORMAT_BLOB:
219 break;
220 case HAL_PIXEL_FORMAT_NV21_ZSL:
221 aligned_w = ALIGN(width, 64);
222 aligned_h = ALIGN(height, 64);
223 break;
224 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_4x4_KHR:
225 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR:
226 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x4_KHR:
227 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR:
228 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x5_KHR:
229 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR:
230 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x5_KHR:
231 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR:
232 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x6_KHR:
233 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR:
234 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x5_KHR:
235 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR:
236 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x6_KHR:
237 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR:
238 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x8_KHR:
239 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR:
240 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x5_KHR:
241 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR:
242 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x6_KHR:
243 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR:
244 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x8_KHR:
245 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR:
246 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x10_KHR:
247 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR:
248 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x10_KHR:
249 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR:
250 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x12_KHR:
251 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR:
252 if(LINK_adreno_compute_compressedfmt_aligned_width_and_height) {
253 int bytesPerPixel = 0;
254 int raster_mode = 0; //Adreno unknown raster mode.
255 int padding_threshold = 512; //Threshold for padding
256 //surfaces.
257
258 LINK_adreno_compute_compressedfmt_aligned_width_and_height(
259 width, height, format, 0,raster_mode, padding_threshold,
260 &aligned_w, &aligned_h, &bytesPerPixel);
261
262 } else {
263 ALOGW("%s: Warning!! Symbols" \
264 " compute_compressedfmt_aligned_width_and_height" \
265 " not found", __FUNCTION__);
266 }
267 break;
268 default: break;
269 }
270 }
271 }
272
273 //-------------- IAllocController-----------------------//
274 IAllocController* IAllocController::sController = NULL;
getInstance(void)275 IAllocController* IAllocController::getInstance(void)
276 {
277 if(sController == NULL) {
278 sController = new IonController();
279 }
280 return sController;
281 }
282
283
284 //-------------- IonController-----------------------//
IonController()285 IonController::IonController()
286 {
287 allocateIonMem();
288 }
289
allocateIonMem()290 void IonController::allocateIonMem()
291 {
292 mIonAlloc = new IonAlloc();
293 }
294
allocate(alloc_data & data,int usage)295 int IonController::allocate(alloc_data& data, int usage)
296 {
297 int ionFlags = 0;
298 int ret;
299
300 data.uncached = useUncached(usage);
301 data.allocType = 0;
302
303 if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
304 ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
305
306 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
307 ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
308
309 if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
310 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
311
312 if(usage & GRALLOC_USAGE_PROTECTED) {
313 if (usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
314 ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
315 ionFlags |= ION_SECURE;
316 } else {
317 // for targets/OEMs which do not need HW level protection
318 // do not set ion secure flag & MM heap. Fallback to IOMMU heap.
319 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
320 }
321 } else if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
322 //MM Heap is exclusively a secure heap.
323 //If it is used for non secure cases, fallback to IOMMU heap
324 ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \
325 cannot be used as an insecure heap!\
326 trying to use IOMMU instead !!");
327 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
328 }
329
330 if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
331 ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
332
333 if(usage & GRALLOC_USAGE_PRIVATE_ADSP_HEAP)
334 ionFlags |= ION_HEAP(ION_ADSP_HEAP_ID);
335
336 if(ionFlags & ION_SECURE)
337 data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
338
339 // if no flags are set, default to
340 // SF + IOMMU heaps, so that bypass can work
341 // we can fall back to system heap if
342 // we run out.
343 if(!ionFlags)
344 ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
345
346 data.flags = ionFlags;
347 ret = mIonAlloc->alloc_buffer(data);
348
349 // Fallback
350 if(ret < 0 && canFallback(usage,
351 (ionFlags & ION_SYSTEM_HEAP_ID)))
352 {
353 ALOGW("Falling back to system heap");
354 data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
355 ret = mIonAlloc->alloc_buffer(data);
356 }
357
358 if(ret >= 0 ) {
359 data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION;
360 }
361
362 return ret;
363 }
364
getAllocator(int flags)365 IMemAlloc* IonController::getAllocator(int flags)
366 {
367 IMemAlloc* memalloc = NULL;
368 if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
369 memalloc = mIonAlloc;
370 } else {
371 ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
372 }
373
374 return memalloc;
375 }
376
isMacroTileEnabled(int format,int usage)377 bool isMacroTileEnabled(int format, int usage)
378 {
379 bool tileEnabled = false;
380
381 // Check whether GPU & MDSS supports MacroTiling feature
382 if(AdrenoMemInfo::getInstance().isMacroTilingSupportedByGPU() &&
383 qdutils::MDPVersion::getInstance().supportsMacroTile())
384 {
385 // check the format
386 switch(format)
387 {
388 case HAL_PIXEL_FORMAT_RGBA_8888:
389 case HAL_PIXEL_FORMAT_RGBX_8888:
390 case HAL_PIXEL_FORMAT_BGRA_8888:
391 case HAL_PIXEL_FORMAT_RGB_565:
392 {
393 tileEnabled = true;
394 // check the usage flags
395 if (usage & (GRALLOC_USAGE_SW_READ_MASK |
396 GRALLOC_USAGE_SW_WRITE_MASK)) {
397 // Application intends to use CPU for rendering
398 tileEnabled = false;
399 }
400 break;
401 }
402 default:
403 break;
404 }
405 }
406 return tileEnabled;
407 }
408
409 // helper function
getSize(int format,int width,int height,const int alignedw,const int alignedh)410 unsigned int getSize(int format, int width, int height, const int alignedw,
411 const int alignedh) {
412 unsigned int size = 0;
413
414 switch (format) {
415 case HAL_PIXEL_FORMAT_RGBA_8888:
416 case HAL_PIXEL_FORMAT_RGBX_8888:
417 case HAL_PIXEL_FORMAT_BGRA_8888:
418 size = alignedw * alignedh * 4;
419 break;
420 case HAL_PIXEL_FORMAT_RGB_888:
421 size = alignedw * alignedh * 3;
422 break;
423 case HAL_PIXEL_FORMAT_RGB_565:
424 case HAL_PIXEL_FORMAT_RAW16:
425 size = alignedw * alignedh * 2;
426 break;
427
428 // adreno formats
429 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO: // NV21
430 size = ALIGN(alignedw*alignedh, 4096);
431 size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
432 break;
433 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: // NV12
434 // The chroma plane is subsampled,
435 // but the pitch in bytes is unchanged
436 // The GPU needs 4K alignment, but the video decoder needs 8K
437 size = ALIGN( alignedw * alignedh, 8192);
438 size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
439 break;
440 case HAL_PIXEL_FORMAT_YV12:
441 if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
442 ALOGE("w or h is odd for the YV12 format");
443 return 0;
444 }
445 size = alignedw*alignedh +
446 (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
447 size = ALIGN(size, (unsigned int)4096);
448 break;
449 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
450 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
451 size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2 + 1, 4096);
452 break;
453 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
454 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
455 case HAL_PIXEL_FORMAT_YCbCr_422_I:
456 case HAL_PIXEL_FORMAT_YCrCb_422_I:
457 if(width & 1) {
458 ALOGE("width is odd for the YUV422_SP format");
459 return 0;
460 }
461 size = ALIGN(alignedw * alignedh * 2, 4096);
462 break;
463 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
464 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
465 size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height);
466 break;
467 case HAL_PIXEL_FORMAT_BLOB:
468 if(height != 1) {
469 ALOGE("%s: Buffers with format HAL_PIXEL_FORMAT_BLOB \
470 must have height==1 ", __FUNCTION__);
471 return 0;
472 }
473 size = width;
474 break;
475 case HAL_PIXEL_FORMAT_NV21_ZSL:
476 size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096);
477 break;
478 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_4x4_KHR:
479 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x4_KHR:
480 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x5_KHR:
481 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x5_KHR:
482 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x6_KHR:
483 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x5_KHR:
484 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x6_KHR:
485 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x8_KHR:
486 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x5_KHR:
487 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x6_KHR:
488 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x8_KHR:
489 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x10_KHR:
490 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x10_KHR:
491 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x12_KHR:
492 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR:
493 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR:
494 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR:
495 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR:
496 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR:
497 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR:
498 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR:
499 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR:
500 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR:
501 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR:
502 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR:
503 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR:
504 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR:
505 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR:
506 size = alignedw * alignedh * ASTC_BLOCK_SIZE;
507 break;
508 default:
509 ALOGE("%s: Unrecognized pixel format: 0x%x", __FUNCTION__, format);
510 return 0;
511 }
512 return size;
513 }
514
getBufferSizeAndDimensions(int width,int height,int format,int & alignedw,int & alignedh)515 unsigned int getBufferSizeAndDimensions(int width, int height, int format,
516 int& alignedw, int &alignedh)
517 {
518 unsigned int size;
519
520 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
521 height,
522 format,
523 false,
524 alignedw,
525 alignedh);
526
527 size = getSize(format, width, height, alignedw, alignedh);
528
529 return size;
530 }
531
532
getBufferSizeAndDimensions(int width,int height,int format,int usage,int & alignedw,int & alignedh)533 unsigned int getBufferSizeAndDimensions(int width, int height, int format,
534 int usage, int& alignedw, int &alignedh)
535 {
536 unsigned int size;
537 int tileEnabled = isMacroTileEnabled(format, usage);
538
539 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
540 height,
541 format,
542 tileEnabled,
543 alignedw,
544 alignedh);
545
546 size = getSize(format, width, height, alignedw, alignedh);
547
548 return size;
549 }
550
551
getBufferAttributes(int width,int height,int format,int usage,int & alignedw,int & alignedh,int & tileEnabled,unsigned int & size)552 void getBufferAttributes(int width, int height, int format, int usage,
553 int& alignedw, int &alignedh, int& tileEnabled, unsigned int& size)
554 {
555 tileEnabled = isMacroTileEnabled(format, usage);
556
557 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
558 height,
559 format,
560 tileEnabled,
561 alignedw,
562 alignedh);
563 size = getSize(format, width, height, alignedw, alignedh);
564 }
565
getYUVPlaneInfo(private_handle_t * hnd,struct android_ycbcr * ycbcr)566 int getYUVPlaneInfo(private_handle_t* hnd, struct android_ycbcr* ycbcr)
567 {
568 int err = 0;
569 unsigned int ystride, cstride;
570 memset(ycbcr->reserved, 0, sizeof(ycbcr->reserved));
571
572 // Get the chroma offsets from the handle width/height. We take advantage
573 // of the fact the width _is_ the stride
574 switch (hnd->format) {
575 //Semiplanar
576 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
577 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
578 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
579 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE: //Same as YCbCr_420_SP_VENUS
580 ystride = cstride = hnd->width;
581 ycbcr->y = (void*)hnd->base;
582 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height);
583 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height + 1);
584 ycbcr->ystride = ystride;
585 ycbcr->cstride = cstride;
586 ycbcr->chroma_step = 2;
587 break;
588
589 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
590 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
591 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
592 case HAL_PIXEL_FORMAT_NV21_ZSL:
593 ystride = cstride = hnd->width;
594 ycbcr->y = (void*)hnd->base;
595 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height);
596 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height + 1);
597 ycbcr->ystride = ystride;
598 ycbcr->cstride = cstride;
599 ycbcr->chroma_step = 2;
600 break;
601
602 //Planar
603 case HAL_PIXEL_FORMAT_YV12:
604 ystride = hnd->width;
605 cstride = ALIGN(hnd->width/2, 16);
606 ycbcr->y = (void*)hnd->base;
607 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height);
608 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height +
609 cstride * hnd->height/2);
610 ycbcr->ystride = ystride;
611 ycbcr->cstride = cstride;
612 ycbcr->chroma_step = 1;
613
614 break;
615 //Unsupported formats
616 case HAL_PIXEL_FORMAT_YCbCr_422_I:
617 case HAL_PIXEL_FORMAT_YCrCb_422_I:
618 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
619 default:
620 ALOGD("%s: Invalid format passed: 0x%x", __FUNCTION__,
621 hnd->format);
622 err = -EINVAL;
623 }
624 return err;
625
626 }
627
628
629
630 // Allocate buffer from width, height and format into a
631 // private_handle_t. It is the responsibility of the caller
632 // to free the buffer using the free_buffer function
alloc_buffer(private_handle_t ** pHnd,int w,int h,int format,int usage)633 int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
634 {
635 alloc_data data;
636 int alignedw, alignedh;
637 gralloc::IAllocController* sAlloc =
638 gralloc::IAllocController::getInstance();
639 data.base = 0;
640 data.fd = -1;
641 data.offset = 0;
642 data.size = getBufferSizeAndDimensions(w, h, format, usage, alignedw,
643 alignedh);
644
645 data.align = getpagesize();
646 data.uncached = useUncached(usage);
647 int allocFlags = usage;
648
649 int err = sAlloc->allocate(data, allocFlags);
650 if (0 != err) {
651 ALOGE("%s: allocate failed", __FUNCTION__);
652 return -ENOMEM;
653 }
654
655 private_handle_t* hnd = new private_handle_t(data.fd, data.size,
656 data.allocType, 0, format,
657 alignedw, alignedh);
658 hnd->base = (uint64_t) data.base;
659 hnd->offset = data.offset;
660 hnd->gpuaddr = 0;
661 *pHnd = hnd;
662 return 0;
663 }
664
free_buffer(private_handle_t * hnd)665 void free_buffer(private_handle_t *hnd)
666 {
667 gralloc::IAllocController* sAlloc =
668 gralloc::IAllocController::getInstance();
669 if (hnd && hnd->fd > 0) {
670 IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags);
671 memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
672 }
673 if(hnd)
674 delete hnd;
675
676 }
677