1 /*
2 * Copyright (c) 2011-2014, The Linux Foundation. All rights reserved.
3
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above
10 * copyright notice, this list of conditions and the following
11 * disclaimer in the documentation and/or other materials provided
12 * with the distribution.
13 * * Neither the name of The Linux Foundation nor the names of its
14 * contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #include <cutils/log.h>
31 #include <fcntl.h>
32 #include <dlfcn.h>
33 #include "gralloc_priv.h"
34 #include "alloc_controller.h"
35 #include "memalloc.h"
36 #include "ionalloc.h"
37 #include "gr.h"
38 #include "comptype.h"
39 #include "mdp_version.h"
40
41 #ifdef VENUS_COLOR_FORMAT
42 #include <media/msm_media_info.h>
43 #else
44 #define VENUS_Y_STRIDE(args...) 0
45 #define VENUS_Y_SCANLINES(args...) 0
46 #define VENUS_BUFFER_SIZE(args...) 0
47 #endif
48
49 #define ASTC_BLOCK_SIZE 16
50
51 using namespace gralloc;
52 using namespace qdutils;
53
54 ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo);
55
56 //Common functions
canFallback(int usage,bool triedSystem)57 static bool canFallback(int usage, bool triedSystem)
58 {
59 // Fallback to system heap when alloc fails unless
60 // 1. Composition type is MDP
61 // 2. Alloc from system heap was already tried
62 // 3. The heap type is requsted explicitly
63 // 4. The heap type is protected
64 // 5. The buffer is meant for external display only
65
66 if(QCCompositionType::getInstance().getCompositionType() &
67 COMPOSITION_TYPE_MDP)
68 return false;
69 if(triedSystem)
70 return false;
71 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
72 return false;
73 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY))
74 return false;
75 //Return true by default
76 return true;
77 }
78
useUncached(int usage)79 static bool useUncached(int usage)
80 {
81 if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
82 return true;
83 if(((usage & GRALLOC_USAGE_SW_WRITE_MASK) == GRALLOC_USAGE_SW_WRITE_RARELY)
84 ||((usage & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_RARELY))
85 return true;
86 return false;
87 }
88
89 //-------------- AdrenoMemInfo-----------------------//
AdrenoMemInfo()90 AdrenoMemInfo::AdrenoMemInfo()
91 {
92 LINK_adreno_compute_aligned_width_and_height = NULL;
93 LINK_adreno_compute_padding = NULL;
94 LINK_adreno_isMacroTilingSupportedByGpu = NULL;
95 LINK_adreno_compute_compressedfmt_aligned_width_and_height = NULL;
96
97 libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW);
98 if (libadreno_utils) {
99 *(void **)&LINK_adreno_compute_aligned_width_and_height =
100 ::dlsym(libadreno_utils, "compute_aligned_width_and_height");
101 *(void **)&LINK_adreno_compute_padding =
102 ::dlsym(libadreno_utils, "compute_surface_padding");
103 *(void **)&LINK_adreno_isMacroTilingSupportedByGpu =
104 ::dlsym(libadreno_utils, "isMacroTilingSupportedByGpu");
105 *(void **)&LINK_adreno_compute_compressedfmt_aligned_width_and_height =
106 ::dlsym(libadreno_utils,
107 "compute_compressedfmt_aligned_width_and_height");
108 }
109 }
110
~AdrenoMemInfo()111 AdrenoMemInfo::~AdrenoMemInfo()
112 {
113 if (libadreno_utils) {
114 ::dlclose(libadreno_utils);
115 }
116 }
117
isMacroTilingSupportedByGPU()118 int AdrenoMemInfo::isMacroTilingSupportedByGPU()
119 {
120 if ((libadreno_utils)) {
121 if(LINK_adreno_isMacroTilingSupportedByGpu) {
122 return LINK_adreno_isMacroTilingSupportedByGpu();
123 }
124 }
125 return 0;
126 }
127
128
getAlignedWidthAndHeight(int width,int height,int format,int tile_enabled,int & aligned_w,int & aligned_h)129 void AdrenoMemInfo::getAlignedWidthAndHeight(int width, int height, int format,
130 int tile_enabled, int& aligned_w, int& aligned_h)
131 {
132 aligned_w = width;
133 aligned_h = height;
134 // Currently surface padding is only computed for RGB* surfaces.
135 if (format <= HAL_PIXEL_FORMAT_BGRA_8888) {
136 aligned_w = ALIGN(width, 32);
137 aligned_h = ALIGN(height, 32);
138 // Don't add any additional padding if debug.gralloc.map_fb_memory
139 // is enabled
140 char property[PROPERTY_VALUE_MAX];
141 if((property_get("debug.gralloc.map_fb_memory", property, NULL) > 0) &&
142 (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
143 (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
144 return;
145 }
146
147 int bpp = 4;
148 switch(format)
149 {
150 case HAL_PIXEL_FORMAT_RGB_888:
151 bpp = 3;
152 break;
153 case HAL_PIXEL_FORMAT_RGB_565:
154 bpp = 2;
155 break;
156 default: break;
157 }
158 if (libadreno_utils) {
159 int raster_mode = 0; // Adreno unknown raster mode.
160 int padding_threshold = 512; // Threshold for padding surfaces.
161 // the function below computes aligned width and aligned height
162 // based on linear or macro tile mode selected.
163 if(LINK_adreno_compute_aligned_width_and_height) {
164 LINK_adreno_compute_aligned_width_and_height(width,
165 height, bpp, tile_enabled,
166 raster_mode, padding_threshold,
167 &aligned_w, &aligned_h);
168
169 } else if(LINK_adreno_compute_padding) {
170 int surface_tile_height = 1; // Linear surface
171 aligned_w = LINK_adreno_compute_padding(width, bpp,
172 surface_tile_height, raster_mode,
173 padding_threshold);
174 ALOGW("%s: Warning!! Old GFX API is used to calculate stride",
175 __FUNCTION__);
176 } else {
177 ALOGW("%s: Warning!! Symbols compute_surface_padding and " \
178 "compute_aligned_width_and_height not found", __FUNCTION__);
179 }
180 }
181 } else {
182 switch (format)
183 {
184 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
185 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
186 aligned_w = ALIGN(width, 32);
187 break;
188 case HAL_PIXEL_FORMAT_RAW16:
189 aligned_w = ALIGN(width, 16);
190 break;
191 case HAL_PIXEL_FORMAT_RAW10:
192 aligned_w = ALIGN(width * 10 /8, 16);
193 break;
194 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
195 aligned_w = ALIGN(width, 128);
196 break;
197 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
198 case HAL_PIXEL_FORMAT_YV12:
199 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
200 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
201 case HAL_PIXEL_FORMAT_YCbCr_422_I:
202 case HAL_PIXEL_FORMAT_YCrCb_422_I:
203 aligned_w = ALIGN(width, 16);
204 break;
205 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
206 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
207 aligned_w = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
208 aligned_h = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
209 break;
210 case HAL_PIXEL_FORMAT_BLOB:
211 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
212 break;
213 case HAL_PIXEL_FORMAT_NV21_ZSL:
214 aligned_w = ALIGN(width, 64);
215 aligned_h = ALIGN(height, 64);
216 break;
217 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_4x4_KHR:
218 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR:
219 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x4_KHR:
220 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR:
221 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x5_KHR:
222 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR:
223 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x5_KHR:
224 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR:
225 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x6_KHR:
226 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR:
227 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x5_KHR:
228 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR:
229 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x6_KHR:
230 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR:
231 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x8_KHR:
232 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR:
233 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x5_KHR:
234 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR:
235 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x6_KHR:
236 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR:
237 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x8_KHR:
238 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR:
239 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x10_KHR:
240 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR:
241 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x10_KHR:
242 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR:
243 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x12_KHR:
244 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR:
245 if(LINK_adreno_compute_compressedfmt_aligned_width_and_height) {
246 int bytesPerPixel = 0;
247 int raster_mode = 0; //Adreno unknown raster mode.
248 int padding_threshold = 512; //Threshold for padding
249 //surfaces.
250
251 LINK_adreno_compute_compressedfmt_aligned_width_and_height(
252 width, height, format, 0,raster_mode, padding_threshold,
253 &aligned_w, &aligned_h, &bytesPerPixel);
254
255 } else {
256 ALOGW("%s: Warning!! Symbols" \
257 " compute_compressedfmt_aligned_width_and_height" \
258 " not found", __FUNCTION__);
259 }
260 break;
261 default: break;
262 }
263 }
264 }
265
266 //-------------- IAllocController-----------------------//
267 IAllocController* IAllocController::sController = NULL;
getInstance(void)268 IAllocController* IAllocController::getInstance(void)
269 {
270 if(sController == NULL) {
271 sController = new IonController();
272 }
273 return sController;
274 }
275
276
277 //-------------- IonController-----------------------//
IonController()278 IonController::IonController()
279 {
280 mIonAlloc = new IonAlloc();
281 }
282
allocate(alloc_data & data,int usage)283 int IonController::allocate(alloc_data& data, int usage)
284 {
285 int ionFlags = 0;
286 int ret;
287
288 data.uncached = useUncached(usage);
289 data.allocType = 0;
290
291 if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
292 ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
293
294 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
295 ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
296
297 if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
298 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
299
300 if(usage & GRALLOC_USAGE_PROTECTED) {
301 if (usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
302 ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
303 ionFlags |= ION_SECURE;
304 } else {
305 // for targets/OEMs which do not need HW level protection
306 // do not set ion secure flag & MM heap. Fallback to IOMMU heap.
307 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
308 }
309 } else if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
310 //MM Heap is exclusively a secure heap.
311 //If it is used for non secure cases, fallback to IOMMU heap
312 ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \
313 cannot be used as an insecure heap!\
314 trying to use IOMMU instead !!");
315 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
316 }
317
318 if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
319 ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
320
321 if(usage & GRALLOC_USAGE_PRIVATE_ADSP_HEAP)
322 ionFlags |= ION_HEAP(ION_ADSP_HEAP_ID);
323
324 if(ionFlags & ION_SECURE)
325 data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
326
327 // if no flags are set, default to
328 // SF + IOMMU heaps, so that bypass can work
329 // we can fall back to system heap if
330 // we run out.
331 if(!ionFlags)
332 ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
333
334 data.flags = ionFlags;
335 ret = mIonAlloc->alloc_buffer(data);
336
337 // Fallback
338 if(ret < 0 && canFallback(usage,
339 (ionFlags & ION_SYSTEM_HEAP_ID)))
340 {
341 ALOGW("Falling back to system heap");
342 data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
343 ret = mIonAlloc->alloc_buffer(data);
344 }
345
346 if(ret >= 0 ) {
347 data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION;
348 }
349
350 return ret;
351 }
352
getAllocator(int flags)353 IMemAlloc* IonController::getAllocator(int flags)
354 {
355 IMemAlloc* memalloc = NULL;
356 if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
357 memalloc = mIonAlloc;
358 } else {
359 ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
360 }
361
362 return memalloc;
363 }
364
isMacroTileEnabled(int format,int usage)365 bool isMacroTileEnabled(int format, int usage)
366 {
367 bool tileEnabled = false;
368
369 // Check whether GPU & MDSS supports MacroTiling feature
370 if(AdrenoMemInfo::getInstance().isMacroTilingSupportedByGPU() &&
371 qdutils::MDPVersion::getInstance().supportsMacroTile())
372 {
373 // check the format
374 switch(format)
375 {
376 case HAL_PIXEL_FORMAT_RGBA_8888:
377 case HAL_PIXEL_FORMAT_RGBX_8888:
378 case HAL_PIXEL_FORMAT_BGRA_8888:
379 case HAL_PIXEL_FORMAT_RGB_565:
380 {
381 tileEnabled = true;
382 // check the usage flags
383 if (usage & (GRALLOC_USAGE_SW_READ_MASK |
384 GRALLOC_USAGE_SW_WRITE_MASK)) {
385 // Application intends to use CPU for rendering
386 tileEnabled = false;
387 }
388 break;
389 }
390 default:
391 break;
392 }
393 }
394 return tileEnabled;
395 }
396
397 // helper function
getSize(int format,int width,int height,const int alignedw,const int alignedh)398 size_t getSize(int format, int width, int height, const int alignedw,
399 const int alignedh) {
400 size_t size = 0;
401
402 switch (format) {
403 case HAL_PIXEL_FORMAT_RGBA_8888:
404 case HAL_PIXEL_FORMAT_RGBX_8888:
405 case HAL_PIXEL_FORMAT_BGRA_8888:
406 size = alignedw * alignedh * 4;
407 break;
408 case HAL_PIXEL_FORMAT_RGB_888:
409 size = alignedw * alignedh * 3;
410 break;
411 case HAL_PIXEL_FORMAT_RGB_565:
412 case HAL_PIXEL_FORMAT_RAW16:
413 size = alignedw * alignedh * 2;
414 break;
415 case HAL_PIXEL_FORMAT_RAW10:
416 size = ALIGN(alignedw * alignedh, 4096);
417 break;
418
419 // adreno formats
420 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO: // NV21
421 size = ALIGN(alignedw*alignedh, 4096);
422 size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
423 break;
424 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: // NV12
425 // The chroma plane is subsampled,
426 // but the pitch in bytes is unchanged
427 // The GPU needs 4K alignment, but the video decoder needs 8K
428 size = ALIGN( alignedw * alignedh, 8192);
429 size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
430 break;
431 case HAL_PIXEL_FORMAT_YV12:
432 if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
433 ALOGE("w or h is odd for the YV12 format");
434 return 0;
435 }
436 size = alignedw*alignedh +
437 (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
438 size = ALIGN(size, (size_t)4096);
439 break;
440 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
441 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
442 size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2 + 1, 4096);
443 break;
444 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
445 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
446 case HAL_PIXEL_FORMAT_YCbCr_422_I:
447 case HAL_PIXEL_FORMAT_YCrCb_422_I:
448 if(width & 1) {
449 ALOGE("width is odd for the YUV422_SP format");
450 return 0;
451 }
452 size = ALIGN(alignedw * alignedh * 2, 4096);
453 break;
454 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
455 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
456 size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height);
457 break;
458 case HAL_PIXEL_FORMAT_BLOB:
459 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
460 if(height != 1) {
461 ALOGE("%s: Buffers with RAW_OPAQUE/BLOB formats \
462 must have height==1 ", __FUNCTION__);
463 return 0;
464 }
465 size = width;
466 break;
467 case HAL_PIXEL_FORMAT_NV21_ZSL:
468 size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096);
469 break;
470 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_4x4_KHR:
471 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x4_KHR:
472 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_5x5_KHR:
473 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x5_KHR:
474 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_6x6_KHR:
475 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x5_KHR:
476 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x6_KHR:
477 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_8x8_KHR:
478 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x5_KHR:
479 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x6_KHR:
480 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x8_KHR:
481 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_10x10_KHR:
482 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x10_KHR:
483 case HAL_PIXEL_FORMAT_COMPRESSED_RGBA_ASTC_12x12_KHR:
484 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR:
485 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR:
486 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR:
487 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR:
488 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR:
489 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR:
490 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR:
491 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR:
492 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR:
493 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR:
494 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR:
495 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR:
496 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR:
497 case HAL_PIXEL_FORMAT_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR:
498 size = alignedw * alignedh * ASTC_BLOCK_SIZE;
499 break;
500 default:
501 ALOGE("%s: unrecognized pixel format: 0x%x", __FUNCTION__, format);
502 return 0;
503 }
504 return size;
505 }
506
getBufferSizeAndDimensions(int width,int height,int format,int & alignedw,int & alignedh)507 size_t getBufferSizeAndDimensions(int width, int height, int format,
508 int& alignedw, int &alignedh)
509 {
510 size_t size;
511
512 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
513 height,
514 format,
515 false,
516 alignedw,
517 alignedh);
518
519 size = getSize(format, width, height, alignedw, alignedh);
520
521 return size;
522 }
523
524
getBufferSizeAndDimensions(int width,int height,int format,int usage,int & alignedw,int & alignedh)525 size_t getBufferSizeAndDimensions(int width, int height, int format, int usage,
526 int& alignedw, int &alignedh)
527 {
528 size_t size;
529 int tileEnabled = isMacroTileEnabled(format, usage);
530
531 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
532 height,
533 format,
534 tileEnabled,
535 alignedw,
536 alignedh);
537
538 size = getSize(format, width, height, alignedw, alignedh);
539
540 return size;
541 }
542
543
getBufferAttributes(int width,int height,int format,int usage,int & alignedw,int & alignedh,int & tileEnabled,size_t & size)544 void getBufferAttributes(int width, int height, int format, int usage,
545 int& alignedw, int &alignedh, int& tileEnabled, size_t& size)
546 {
547 tileEnabled = isMacroTileEnabled(format, usage);
548
549 AdrenoMemInfo::getInstance().getAlignedWidthAndHeight(width,
550 height,
551 format,
552 tileEnabled,
553 alignedw,
554 alignedh);
555 size = getSize(format, width, height, alignedw, alignedh);
556 }
557
getYUVPlaneInfo(private_handle_t * hnd,struct android_ycbcr * ycbcr)558 int getYUVPlaneInfo(private_handle_t* hnd, struct android_ycbcr* ycbcr)
559 {
560 int err = 0;
561 size_t ystride, cstride;
562 memset(ycbcr->reserved, 0, sizeof(ycbcr->reserved));
563
564 // Get the chroma offsets from the handle width/height. We take advantage
565 // of the fact the width _is_ the stride
566 switch (hnd->format) {
567 //Semiplanar
568 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
569 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
570 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
571 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE: //Same as YCbCr_420_SP_VENUS
572 ystride = cstride = hnd->width;
573 ycbcr->y = (void*)hnd->base;
574 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height);
575 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height + 1);
576 ycbcr->ystride = ystride;
577 ycbcr->cstride = cstride;
578 ycbcr->chroma_step = 2;
579 break;
580
581 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
582 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
583 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
584 case HAL_PIXEL_FORMAT_NV21_ZSL:
585 case HAL_PIXEL_FORMAT_RAW16:
586 case HAL_PIXEL_FORMAT_RAW10:
587 ystride = cstride = hnd->width;
588 ycbcr->y = (void*)hnd->base;
589 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height);
590 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height + 1);
591 ycbcr->ystride = ystride;
592 ycbcr->cstride = cstride;
593 ycbcr->chroma_step = 2;
594 break;
595
596 //Planar
597 case HAL_PIXEL_FORMAT_YV12:
598 ystride = hnd->width;
599 cstride = ALIGN(hnd->width/2, 16);
600 ycbcr->y = (void*)hnd->base;
601 ycbcr->cr = (void*)(hnd->base + ystride * hnd->height);
602 ycbcr->cb = (void*)(hnd->base + ystride * hnd->height +
603 cstride * hnd->height/2);
604 ycbcr->ystride = ystride;
605 ycbcr->cstride = cstride;
606 ycbcr->chroma_step = 1;
607
608 break;
609 //Unsupported formats
610 case HAL_PIXEL_FORMAT_YCbCr_422_I:
611 case HAL_PIXEL_FORMAT_YCrCb_422_I:
612 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
613 default:
614 ALOGD("%s: Invalid format passed: 0x%x", __FUNCTION__,
615 hnd->format);
616 err = -EINVAL;
617 }
618 return err;
619
620 }
621
622
623
624 // Allocate buffer from width, height and format into a
625 // private_handle_t. It is the responsibility of the caller
626 // to free the buffer using the free_buffer function
alloc_buffer(private_handle_t ** pHnd,int w,int h,int format,int usage)627 int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
628 {
629 alloc_data data;
630 int alignedw, alignedh;
631 gralloc::IAllocController* sAlloc =
632 gralloc::IAllocController::getInstance();
633 data.base = 0;
634 data.fd = -1;
635 data.offset = 0;
636 data.size = getBufferSizeAndDimensions(w, h, format, usage, alignedw,
637 alignedh);
638
639 data.align = getpagesize();
640 data.uncached = useUncached(usage);
641 int allocFlags = usage;
642
643 int err = sAlloc->allocate(data, allocFlags);
644 if (0 != err) {
645 ALOGE("%s: allocate failed", __FUNCTION__);
646 return -ENOMEM;
647 }
648
649 private_handle_t* hnd = new private_handle_t(data.fd, data.size,
650 data.allocType, 0, format,
651 alignedw, alignedh);
652 hnd->base = (uintptr_t) data.base;
653 hnd->offset = data.offset;
654 hnd->gpuaddr = 0;
655 *pHnd = hnd;
656 return 0;
657 }
658
free_buffer(private_handle_t * hnd)659 void free_buffer(private_handle_t *hnd)
660 {
661 gralloc::IAllocController* sAlloc =
662 gralloc::IAllocController::getInstance();
663 if (hnd && hnd->fd > 0) {
664 IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags);
665 memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
666 }
667 if(hnd)
668 delete hnd;
669
670 }
671