• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
3 
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions are
6  * met:
7  *   * Redistributions of source code must retain the above copyright
8  *     notice, this list of conditions and the following disclaimer.
9  *   * Redistributions in binary form must reproduce the above
10  *     copyright notice, this list of conditions and the following
11  *     disclaimer in the documentation and/or other materials provided
12  *     with the distribution.
13  *   * Neither the name of The Linux Foundation nor the names of its
14  *     contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission.
16  *
17  * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26  * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27  * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #include <cutils/log.h>
31 #include <fcntl.h>
32 #include <dlfcn.h>
33 #include "gralloc_priv.h"
34 #include "alloc_controller.h"
35 #include "memalloc.h"
36 #include "ionalloc.h"
37 #include "gr.h"
38 #include "comptype.h"
39 
40 #ifdef VENUS_COLOR_FORMAT
41 #include <media/msm_media_info.h>
42 #else
43 #define VENUS_Y_STRIDE(args...) 0
44 #define VENUS_Y_SCANLINES(args...) 0
45 #define VENUS_BUFFER_SIZE(args...) 0
46 #endif
47 
48 using namespace gralloc;
49 using namespace qdutils;
50 
51 ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo);
52 
53 //Common functions
canFallback(int usage,bool triedSystem)54 static bool canFallback(int usage, bool triedSystem)
55 {
56     // Fallback to system heap when alloc fails unless
57     // 1. Composition type is MDP
58     // 2. Alloc from system heap was already tried
59     // 3. The heap type is requsted explicitly
60     // 4. The heap type is protected
61     // 5. The buffer is meant for external display only
62 
63     if(QCCompositionType::getInstance().getCompositionType() &
64        COMPOSITION_TYPE_MDP)
65         return false;
66     if(triedSystem)
67         return false;
68     if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
69         return false;
70     if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY))
71         return false;
72     //Return true by default
73     return true;
74 }
75 
useUncached(int usage)76 static bool useUncached(int usage)
77 {
78     if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
79         return true;
80     if(((usage & GRALLOC_USAGE_SW_WRITE_MASK) == GRALLOC_USAGE_SW_WRITE_RARELY)
81        ||((usage & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_RARELY))
82         return true;
83     return false;
84 }
85 
86 //-------------- AdrenoMemInfo-----------------------//
AdrenoMemInfo()87 AdrenoMemInfo::AdrenoMemInfo()
88 {
89     libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW);
90     if (libadreno_utils) {
91         *(void **)&LINK_adreno_compute_padding = ::dlsym(libadreno_utils,
92                                            "compute_surface_padding");
93     }
94 }
95 
~AdrenoMemInfo()96 AdrenoMemInfo::~AdrenoMemInfo()
97 {
98     if (libadreno_utils) {
99         ::dlclose(libadreno_utils);
100     }
101 }
102 
getStride(int width,int format)103 int AdrenoMemInfo::getStride(int width, int format)
104 {
105     int stride = ALIGN(width, 32);
106     // Currently surface padding is only computed for RGB* surfaces.
107     if (format <= HAL_PIXEL_FORMAT_sRGB_X_8888) {
108         // Don't add any additional padding if debug.gralloc.map_fb_memory
109         // is enabled
110         char property[PROPERTY_VALUE_MAX];
111         if((property_get("debug.gralloc.map_fb_memory", property, NULL) > 0) &&
112            (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
113            (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
114               return stride;
115         }
116 
117         int bpp = 4;
118         switch(format)
119         {
120             case HAL_PIXEL_FORMAT_RGB_888:
121                 bpp = 3;
122                 break;
123             case HAL_PIXEL_FORMAT_RGB_565:
124                 bpp = 2;
125                 break;
126             default: break;
127         }
128         if ((libadreno_utils) && (LINK_adreno_compute_padding)) {
129             int surface_tile_height = 1;   // Linear surface
130             int raster_mode         = 0;   // Adreno unknown raster mode.
131             int padding_threshold   = 512; // Threshold for padding surfaces.
132             // the function below expects the width to be a multiple of
133             // 32 pixels, hence we pass stride instead of width.
134             stride = LINK_adreno_compute_padding(stride, bpp,
135                                       surface_tile_height, raster_mode,
136                                       padding_threshold);
137         }
138     } else {
139         switch (format)
140         {
141             case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
142                 stride = ALIGN(width, 32);
143                 break;
144             case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
145                 stride = ALIGN(width, 128);
146                 break;
147             case HAL_PIXEL_FORMAT_YCbCr_420_SP:
148             case HAL_PIXEL_FORMAT_YCrCb_420_SP:
149             case HAL_PIXEL_FORMAT_YV12:
150             case HAL_PIXEL_FORMAT_YCbCr_422_SP:
151             case HAL_PIXEL_FORMAT_YCrCb_422_SP:
152             case HAL_PIXEL_FORMAT_RAW16:
153             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
154                 stride = ALIGN(width, 16);
155                 break;
156             case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
157             case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
158                 stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
159                 break;
160             case HAL_PIXEL_FORMAT_BLOB:
161                 stride = width;
162                 break;
163             case HAL_PIXEL_FORMAT_NV21_ZSL:
164                 stride = ALIGN(width, 64);
165                 break;
166             default: break;
167         }
168     }
169     return stride;
170 }
171 
172 //-------------- IAllocController-----------------------//
173 IAllocController* IAllocController::sController = NULL;
getInstance(void)174 IAllocController* IAllocController::getInstance(void)
175 {
176     if(sController == NULL) {
177         sController = new IonController();
178     }
179     return sController;
180 }
181 
182 
183 //-------------- IonController-----------------------//
IonController()184 IonController::IonController()
185 {
186     mIonAlloc = new IonAlloc();
187 }
188 
allocate(alloc_data & data,int usage)189 int IonController::allocate(alloc_data& data, int usage)
190 {
191     int ionFlags = 0;
192     int ret;
193 
194     data.uncached = useUncached(usage);
195     data.allocType = 0;
196 
197     if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
198         ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
199 
200     if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
201         ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
202 
203     if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
204         ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
205 
206     if(usage & GRALLOC_USAGE_PROTECTED) {
207         if (usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
208             ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
209             ionFlags |= ION_SECURE;
210         } else {
211             // for targets/OEMs which do not need HW level protection
212             // do not set ion secure flag & MM heap. Fallback to IOMMU heap
213             // and use DRM for such buffers
214             data.allocType |= private_handle_t::PRIV_FLAGS_L3_SECURE_BUFFER;
215             ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
216         }
217     } else if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
218         //MM Heap is exclusively a secure heap.
219         //If it is used for non secure cases, fallback to IOMMU heap
220         ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \
221                                 cannot be used as an insecure heap!\
222                                 trying to use IOMMU instead !!");
223         ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
224     }
225 
226     if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
227         ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
228 
229     if(usage & GRALLOC_USAGE_PRIVATE_ADSP_HEAP)
230         ionFlags |= ION_HEAP(ION_ADSP_HEAP_ID);
231 
232     if(ionFlags & ION_SECURE)
233          data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
234 
235     // if no flags are set, default to
236     // SF + IOMMU heaps, so that bypass can work
237     // we can fall back to system heap if
238     // we run out.
239     if(!ionFlags)
240         ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
241 
242     data.flags = ionFlags;
243     ret = mIonAlloc->alloc_buffer(data);
244 
245     // Fallback
246     if(ret < 0 && canFallback(usage,
247                               (ionFlags & ION_SYSTEM_HEAP_ID)))
248     {
249         ALOGW("Falling back to system heap");
250         data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
251         ret = mIonAlloc->alloc_buffer(data);
252     }
253 
254     if(ret >= 0 ) {
255         data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION;
256     }
257 
258     return ret;
259 }
260 
getAllocator(int flags)261 IMemAlloc* IonController::getAllocator(int flags)
262 {
263     IMemAlloc* memalloc = NULL;
264     if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
265         memalloc = mIonAlloc;
266     } else {
267         ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
268     }
269 
270     return memalloc;
271 }
272 
getBufferSizeAndDimensions(int width,int height,int format,int & alignedw,int & alignedh)273 size_t getBufferSizeAndDimensions(int width, int height, int format,
274                                   int& alignedw, int &alignedh)
275 {
276     size_t size;
277 
278     alignedw = AdrenoMemInfo::getInstance().getStride(width, format);
279     alignedh = ALIGN(height, 32);
280     switch (format) {
281         case HAL_PIXEL_FORMAT_RGBA_8888:
282         case HAL_PIXEL_FORMAT_RGBX_8888:
283         case HAL_PIXEL_FORMAT_BGRA_8888:
284         case HAL_PIXEL_FORMAT_sRGB_A_8888:
285         case HAL_PIXEL_FORMAT_sRGB_X_8888:
286             size = alignedw * alignedh * 4;
287             break;
288         case HAL_PIXEL_FORMAT_RGB_888:
289             size = alignedw * alignedh * 3;
290             break;
291         case HAL_PIXEL_FORMAT_RGB_565:
292         case HAL_PIXEL_FORMAT_RAW16:
293         case HAL_PIXEL_FORMAT_RAW_OPAQUE:
294             size = alignedw * alignedh * 2;
295             break;
296 
297             // adreno formats
298         case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:  // NV21
299             size  = ALIGN(alignedw*alignedh, 4096);
300             size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
301             break;
302         case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:   // NV12
303             // The chroma plane is subsampled,
304             // but the pitch in bytes is unchanged
305             // The GPU needs 4K alignment, but the video decoder needs 8K
306             size  = ALIGN( alignedw * alignedh, 8192);
307             size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
308             break;
309         case HAL_PIXEL_FORMAT_YV12:
310             if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
311                 ALOGE("w or h is odd for the YV12 format");
312                 return -EINVAL;
313             }
314             alignedh = height;
315             size = alignedw*alignedh +
316                     (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
317             size = ALIGN(size, 4096);
318             break;
319         case HAL_PIXEL_FORMAT_YCbCr_420_SP:
320         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
321             alignedh = height;
322             size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2 + 1, 4096);
323             break;
324         case HAL_PIXEL_FORMAT_YCbCr_422_SP:
325         case HAL_PIXEL_FORMAT_YCrCb_422_SP:
326             if(width & 1) {
327                 ALOGE("width is odd for the YUV422_SP format");
328                 return -EINVAL;
329             }
330             alignedh = height;
331             size = ALIGN(alignedw * alignedh * 2, 4096);
332             break;
333         case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
334         case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
335             alignedh = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
336             size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height);
337             break;
338         case HAL_PIXEL_FORMAT_BLOB:
339             if(height != 1) {
340                 ALOGE("%s: Buffers with format HAL_PIXEL_FORMAT_BLOB \
341                       must have height==1 ", __FUNCTION__);
342                 return -EINVAL;
343             }
344             alignedh = height;
345             alignedw = width;
346             size = width;
347             break;
348         case HAL_PIXEL_FORMAT_NV21_ZSL:
349             alignedh = ALIGN(height, 64);
350             size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096);
351             break;
352         default:
353             ALOGE("unrecognized pixel format: 0x%x", format);
354             return -EINVAL;
355     }
356 
357     return size;
358 }
359 
360 // Allocate buffer from width, height and format into a
361 // private_handle_t. It is the responsibility of the caller
362 // to free the buffer using the free_buffer function
alloc_buffer(private_handle_t ** pHnd,int w,int h,int format,int usage)363 int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
364 {
365     alloc_data data;
366     int alignedw, alignedh;
367     gralloc::IAllocController* sAlloc =
368         gralloc::IAllocController::getInstance();
369     data.base = 0;
370     data.fd = -1;
371     data.offset = 0;
372     data.size = getBufferSizeAndDimensions(w, h, format, alignedw, alignedh);
373     data.align = getpagesize();
374     data.uncached = useUncached(usage);
375     int allocFlags = usage;
376 
377     int err = sAlloc->allocate(data, allocFlags);
378     if (0 != err) {
379         ALOGE("%s: allocate failed", __FUNCTION__);
380         return -ENOMEM;
381     }
382 
383     private_handle_t* hnd = new private_handle_t(data.fd, data.size,
384                                                  data.allocType, 0, format,
385                                                  alignedw, alignedh);
386     hnd->base = (int) data.base;
387     hnd->offset = data.offset;
388     hnd->gpuaddr = 0;
389     *pHnd = hnd;
390     return 0;
391 }
392 
free_buffer(private_handle_t * hnd)393 void free_buffer(private_handle_t *hnd)
394 {
395     gralloc::IAllocController* sAlloc =
396         gralloc::IAllocController::getInstance();
397     if (hnd && hnd->fd > 0) {
398         IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags);
399         memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
400     }
401     if(hnd)
402         delete hnd;
403 
404 }
405