1 /*
2 * Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
3
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above
10 * copyright notice, this list of conditions and the following
11 * disclaimer in the documentation and/or other materials provided
12 * with the distribution.
13 * * Neither the name of The Linux Foundation nor the names of its
14 * contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #include <cutils/log.h>
31 #include <fcntl.h>
32 #include <dlfcn.h>
33 #include "gralloc_priv.h"
34 #include "alloc_controller.h"
35 #include "memalloc.h"
36 #include "ionalloc.h"
37 #include "gr.h"
38 #include "comptype.h"
39
40 #ifdef VENUS_COLOR_FORMAT
41 #include <media/msm_media_info.h>
42 #else
43 #define VENUS_Y_STRIDE(args...) 0
44 #define VENUS_Y_SCANLINES(args...) 0
45 #define VENUS_BUFFER_SIZE(args...) 0
46 #endif
47
48 using namespace gralloc;
49 using namespace qdutils;
50
51 ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo);
52
53 //Common functions
canFallback(int usage,bool triedSystem)54 static bool canFallback(int usage, bool triedSystem)
55 {
56 // Fallback to system heap when alloc fails unless
57 // 1. Composition type is MDP
58 // 2. Alloc from system heap was already tried
59 // 3. The heap type is requsted explicitly
60 // 4. The heap type is protected
61 // 5. The buffer is meant for external display only
62
63 if(QCCompositionType::getInstance().getCompositionType() &
64 COMPOSITION_TYPE_MDP)
65 return false;
66 if(triedSystem)
67 return false;
68 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
69 return false;
70 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY))
71 return false;
72 //Return true by default
73 return true;
74 }
75
useUncached(int usage)76 static bool useUncached(int usage)
77 {
78 // System heaps cannot be uncached
79 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
80 return false;
81 if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
82 return true;
83 return false;
84 }
85
86 //-------------- AdrenoMemInfo-----------------------//
AdrenoMemInfo()87 AdrenoMemInfo::AdrenoMemInfo()
88 {
89 libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW);
90 if (libadreno_utils) {
91 *(void **)&LINK_adreno_compute_padding = ::dlsym(libadreno_utils,
92 "compute_surface_padding");
93 }
94 }
95
~AdrenoMemInfo()96 AdrenoMemInfo::~AdrenoMemInfo()
97 {
98 if (libadreno_utils) {
99 ::dlclose(libadreno_utils);
100 }
101 }
102
getStride(int width,int format)103 int AdrenoMemInfo::getStride(int width, int format)
104 {
105 int stride = ALIGN(width, 32);
106 // Currently surface padding is only computed for RGB* surfaces.
107 if (format <= HAL_PIXEL_FORMAT_sRGB_X_8888) {
108 int bpp = 4;
109 switch(format)
110 {
111 case HAL_PIXEL_FORMAT_RGB_888:
112 bpp = 3;
113 break;
114 case HAL_PIXEL_FORMAT_RGB_565:
115 bpp = 2;
116 break;
117 default: break;
118 }
119 if ((libadreno_utils) && (LINK_adreno_compute_padding)) {
120 int surface_tile_height = 1; // Linear surface
121 int raster_mode = 0; // Adreno unknown raster mode.
122 int padding_threshold = 512; // Threshold for padding surfaces.
123 // the function below expects the width to be a multiple of
124 // 32 pixels, hence we pass stride instead of width.
125 stride = LINK_adreno_compute_padding(stride, bpp,
126 surface_tile_height, raster_mode,
127 padding_threshold);
128 }
129 } else {
130 switch (format)
131 {
132 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
133 case HAL_PIXEL_FORMAT_RAW_SENSOR:
134 stride = ALIGN(width, 32);
135 break;
136 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
137 stride = ALIGN(width, 128);
138 break;
139 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
140 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
141 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
142 case HAL_PIXEL_FORMAT_YV12:
143 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
144 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
145 stride = ALIGN(width, 16);
146 break;
147 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
148 stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
149 break;
150 case HAL_PIXEL_FORMAT_BLOB:
151 stride = width;
152 break;
153 default: break;
154 }
155 }
156 return stride;
157 }
158
159 //-------------- IAllocController-----------------------//
160 IAllocController* IAllocController::sController = NULL;
getInstance(void)161 IAllocController* IAllocController::getInstance(void)
162 {
163 if(sController == NULL) {
164 sController = new IonController();
165 }
166 return sController;
167 }
168
169
170 //-------------- IonController-----------------------//
IonController()171 IonController::IonController()
172 {
173 mIonAlloc = new IonAlloc();
174 }
175
allocate(alloc_data & data,int usage)176 int IonController::allocate(alloc_data& data, int usage)
177 {
178 int ionFlags = 0;
179 int ret;
180
181 data.uncached = useUncached(usage);
182 data.allocType = 0;
183
184 if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
185 ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
186
187 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
188 ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
189
190 if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
191 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
192
193 //MM Heap is exclusively a secure heap.
194 if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
195 //XXX: Right now the MM heap is the only secure heap we have. When we
196 //have other secure heaps, we can change this.
197 if(usage & GRALLOC_USAGE_PROTECTED) {
198 ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
199 ionFlags |= ION_SECURE;
200 }
201 else {
202 ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \
203 cannot be used as an insecure heap!\
204 trying to use IOMMU instead !!");
205 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
206 }
207 }
208
209 if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
210 ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
211
212 if(usage & GRALLOC_USAGE_PROTECTED)
213 data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
214
215 // if no flags are set, default to
216 // SF + IOMMU heaps, so that bypass can work
217 // we can fall back to system heap if
218 // we run out.
219 if(!ionFlags)
220 ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
221
222 data.flags = ionFlags;
223 ret = mIonAlloc->alloc_buffer(data);
224
225 // Fallback
226 if(ret < 0 && canFallback(usage,
227 (ionFlags & ION_SYSTEM_HEAP_ID)))
228 {
229 ALOGW("Falling back to system heap");
230 data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
231 ret = mIonAlloc->alloc_buffer(data);
232 }
233
234 if(ret >= 0 ) {
235 data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION;
236 }
237
238 return ret;
239 }
240
getAllocator(int flags)241 IMemAlloc* IonController::getAllocator(int flags)
242 {
243 IMemAlloc* memalloc = NULL;
244 if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
245 memalloc = mIonAlloc;
246 } else {
247 ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
248 }
249
250 return memalloc;
251 }
252
getBufferSizeAndDimensions(int width,int height,int format,int & alignedw,int & alignedh)253 size_t getBufferSizeAndDimensions(int width, int height, int format,
254 int& alignedw, int &alignedh)
255 {
256 size_t size;
257
258 alignedw = AdrenoMemInfo::getInstance().getStride(width, format);
259 alignedh = ALIGN(height, 32);
260 switch (format) {
261 case HAL_PIXEL_FORMAT_RGBA_8888:
262 case HAL_PIXEL_FORMAT_RGBX_8888:
263 case HAL_PIXEL_FORMAT_BGRA_8888:
264 case HAL_PIXEL_FORMAT_sRGB_A_8888:
265 case HAL_PIXEL_FORMAT_sRGB_X_8888:
266 size = alignedw * alignedh * 4;
267 break;
268 case HAL_PIXEL_FORMAT_RGB_888:
269 size = alignedw * alignedh * 3;
270 break;
271 case HAL_PIXEL_FORMAT_RGB_565:
272 case HAL_PIXEL_FORMAT_RAW_SENSOR:
273 size = alignedw * alignedh * 2;
274 break;
275
276 // adreno formats
277 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO: // NV21
278 size = ALIGN(alignedw*alignedh, 4096);
279 size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
280 break;
281 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: // NV12
282 // The chroma plane is subsampled,
283 // but the pitch in bytes is unchanged
284 // The GPU needs 4K alignment, but the video decoder needs 8K
285 size = ALIGN( alignedw * alignedh, 8192);
286 size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
287 break;
288 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
289 case HAL_PIXEL_FORMAT_YV12:
290 if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
291 ALOGE("w or h is odd for the YV12 format");
292 return -EINVAL;
293 }
294 alignedh = height;
295 if (HAL_PIXEL_FORMAT_NV12_ENCODEABLE == format) {
296 // The encoder requires a 2K aligned chroma offset.
297 size = ALIGN(alignedw*alignedh, 2048) +
298 (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
299 } else {
300 size = alignedw*alignedh +
301 (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
302 }
303 size = ALIGN(size, 4096);
304 break;
305 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
306 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
307 alignedh = height;
308 size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096);
309 break;
310 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
311 case HAL_PIXEL_FORMAT_YCrCb_422_SP:
312 if(width & 1) {
313 ALOGE("width is odd for the YUV422_SP format");
314 return -EINVAL;
315 }
316 alignedh = height;
317 size = ALIGN(alignedw * alignedh * 2, 4096);
318 break;
319 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
320 alignedh = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
321 size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height);
322 break;
323 case HAL_PIXEL_FORMAT_BLOB:
324 if(height != 1) {
325 ALOGE("%s: Buffers with format HAL_PIXEL_FORMAT_BLOB \
326 must have height==1 ", __FUNCTION__);
327 return -EINVAL;
328 }
329 alignedh = height;
330 alignedw = width;
331 size = width;
332 break;
333 default:
334 ALOGE("unrecognized pixel format: 0x%x", format);
335 return -EINVAL;
336 }
337
338 return size;
339 }
340
341 // Allocate buffer from width, height and format into a
342 // private_handle_t. It is the responsibility of the caller
343 // to free the buffer using the free_buffer function
alloc_buffer(private_handle_t ** pHnd,int w,int h,int format,int usage)344 int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
345 {
346 alloc_data data;
347 int alignedw, alignedh;
348 gralloc::IAllocController* sAlloc =
349 gralloc::IAllocController::getInstance();
350 data.base = 0;
351 data.fd = -1;
352 data.offset = 0;
353 data.size = getBufferSizeAndDimensions(w, h, format, alignedw, alignedh);
354 data.align = getpagesize();
355 data.uncached = useUncached(usage);
356 int allocFlags = usage;
357
358 int err = sAlloc->allocate(data, allocFlags);
359 if (0 != err) {
360 ALOGE("%s: allocate failed", __FUNCTION__);
361 return -ENOMEM;
362 }
363
364 private_handle_t* hnd = new private_handle_t(data.fd, data.size,
365 data.allocType, 0, format,
366 alignedw, alignedh);
367 hnd->base = (int) data.base;
368 hnd->offset = data.offset;
369 hnd->gpuaddr = 0;
370 *pHnd = hnd;
371 return 0;
372 }
373
free_buffer(private_handle_t * hnd)374 void free_buffer(private_handle_t *hnd)
375 {
376 gralloc::IAllocController* sAlloc =
377 gralloc::IAllocController::getInstance();
378 if (hnd && hnd->fd > 0) {
379 IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags);
380 memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
381 }
382 if(hnd)
383 delete hnd;
384
385 }
386