• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
3 
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions are
6  * met:
7  *   * Redistributions of source code must retain the above copyright
8  *     notice, this list of conditions and the following disclaimer.
9  *   * Redistributions in binary form must reproduce the above
10  *     copyright notice, this list of conditions and the following
11  *     disclaimer in the documentation and/or other materials provided
12  *     with the distribution.
13  *   * Neither the name of The Linux Foundation nor the names of its
14  *     contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission.
16  *
17  * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26  * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27  * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #include <cutils/log.h>
31 #include <fcntl.h>
32 #include <dlfcn.h>
33 #include "gralloc_priv.h"
34 #include "alloc_controller.h"
35 #include "memalloc.h"
36 #include "ionalloc.h"
37 #include "gr.h"
38 #include "comptype.h"
39 
40 #ifdef VENUS_COLOR_FORMAT
41 #include <media/msm_media_info.h>
42 #else
43 #define VENUS_Y_STRIDE(args...) 0
44 #define VENUS_Y_SCANLINES(args...) 0
45 #define VENUS_BUFFER_SIZE(args...) 0
46 #endif
47 
48 using namespace gralloc;
49 using namespace qdutils;
50 
51 namespace android {
52 ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo);
53 }
54 
55 //Common functions
canFallback(int usage,bool triedSystem)56 static bool canFallback(int usage, bool triedSystem)
57 {
58     // Fallback to system heap when alloc fails unless
59     // 1. Composition type is MDP
60     // 2. Alloc from system heap was already tried
61     // 3. The heap type is requsted explicitly
62     // 4. The heap type is protected
63     // 5. The buffer is meant for external display only
64 
65     if(QCCompositionType::getInstance().getCompositionType() &
66        COMPOSITION_TYPE_MDP)
67         return false;
68     if(triedSystem)
69         return false;
70     if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
71         return false;
72     if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY))
73         return false;
74     //Return true by default
75     return true;
76 }
77 
useUncached(int usage)78 static bool useUncached(int usage)
79 {
80     // System heaps cannot be uncached
81     if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
82         return false;
83     if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
84         return true;
85     return false;
86 }
87 
88 //-------------- AdrenoMemInfo-----------------------//
AdrenoMemInfo()89 AdrenoMemInfo::AdrenoMemInfo()
90 {
91     libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW);
92     if (libadreno_utils) {
93         *(void **)&LINK_adreno_compute_padding = ::dlsym(libadreno_utils,
94                                            "compute_surface_padding");
95     }
96 }
97 
~AdrenoMemInfo()98 AdrenoMemInfo::~AdrenoMemInfo()
99 {
100     if (libadreno_utils) {
101         ::dlclose(libadreno_utils);
102     }
103 }
104 
getStride(int width,int format)105 int AdrenoMemInfo::getStride(int width, int format)
106 {
107     int stride = ALIGN(width, 32);
108     // Currently surface padding is only computed for RGB* surfaces.
109     if (format <= HAL_PIXEL_FORMAT_BGRA_8888) {
110         int bpp = 4;
111         switch(format)
112         {
113             case HAL_PIXEL_FORMAT_RGB_888:
114                 bpp = 3;
115                 break;
116             case HAL_PIXEL_FORMAT_RGB_565:
117                 bpp = 2;
118                 break;
119             default: break;
120         }
121         if ((libadreno_utils) && (LINK_adreno_compute_padding)) {
122             int surface_tile_height = 1;   // Linear surface
123             int raster_mode         = 0;   // Adreno unknown raster mode.
124             int padding_threshold   = 512; // Threshold for padding surfaces.
125             // the function below expects the width to be a multiple of
126             // 32 pixels, hence we pass stride instead of width.
127             stride = LINK_adreno_compute_padding(stride, bpp,
128                                       surface_tile_height, raster_mode,
129                                       padding_threshold);
130         }
131     } else {
132         switch (format)
133         {
134             case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
135                 stride = ALIGN(width, 32);
136                 break;
137             case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
138                 stride = ALIGN(width, 128);
139                 break;
140             case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
141             case HAL_PIXEL_FORMAT_YCbCr_420_SP:
142             case HAL_PIXEL_FORMAT_YCrCb_420_SP:
143             case HAL_PIXEL_FORMAT_YV12:
144             case HAL_PIXEL_FORMAT_YCbCr_422_SP:
145             case HAL_PIXEL_FORMAT_YCrCb_422_SP:
146             case HAL_PIXEL_FORMAT_RAW16:
147             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
148                 stride = ALIGN(width, 16);
149                 break;
150             case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
151                 stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
152                 break;
153             case HAL_PIXEL_FORMAT_BLOB:
154                 stride = width;
155                 break;
156             default: break;
157         }
158     }
159     return stride;
160 }
161 
162 //-------------- IAllocController-----------------------//
163 IAllocController* IAllocController::sController = NULL;
getInstance(void)164 IAllocController* IAllocController::getInstance(void)
165 {
166     if(sController == NULL) {
167         sController = new IonController();
168     }
169     return sController;
170 }
171 
172 
173 //-------------- IonController-----------------------//
IonController()174 IonController::IonController()
175 {
176     mIonAlloc = new IonAlloc();
177 }
178 
allocate(alloc_data & data,int usage)179 int IonController::allocate(alloc_data& data, int usage)
180 {
181     int ionFlags = 0;
182     int ret;
183 
184     data.uncached = useUncached(usage);
185     data.allocType = 0;
186 
187     if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
188         ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
189 
190     if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
191         ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
192 
193     if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
194         ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
195 
196     //MM Heap is exclusively a secure heap.
197     if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
198         //XXX: Right now the MM heap is the only secure heap we have. When we
199         //have other secure heaps, we can change this.
200         if(usage & GRALLOC_USAGE_PROTECTED) {
201             ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
202             ionFlags |= ION_SECURE;
203         }
204         else {
205             ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \
206                   cannot be used as an insecure heap!\
207                   trying to use IOMMU instead !!");
208             ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
209         }
210     }
211 
212     if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
213         ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
214 
215     if(usage & GRALLOC_USAGE_PROTECTED)
216          data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
217 
218     // if no flags are set, default to
219     // SF + IOMMU heaps, so that bypass can work
220     // we can fall back to system heap if
221     // we run out.
222     if(!ionFlags)
223         ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
224 
225     data.flags = ionFlags;
226     ret = mIonAlloc->alloc_buffer(data);
227 
228     // Fallback
229     if(ret < 0 && canFallback(usage,
230                               (ionFlags & ION_SYSTEM_HEAP_ID)))
231     {
232         ALOGW("Falling back to system heap");
233         data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
234         ret = mIonAlloc->alloc_buffer(data);
235     }
236 
237     if(ret >= 0 ) {
238         data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION;
239     }
240 
241     return ret;
242 }
243 
getAllocator(int flags)244 IMemAlloc* IonController::getAllocator(int flags)
245 {
246     IMemAlloc* memalloc = NULL;
247     if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
248         memalloc = mIonAlloc;
249     } else {
250         ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
251     }
252 
253     return memalloc;
254 }
255 
getBufferSizeAndDimensions(int width,int height,int format,int & alignedw,int & alignedh)256 size_t getBufferSizeAndDimensions(int width, int height, int format,
257                                   int& alignedw, int &alignedh)
258 {
259     size_t size;
260 
261     alignedw = AdrenoMemInfo::getInstance().getStride(width, format);
262     alignedh = ALIGN(height, 32);
263     switch (format) {
264         case HAL_PIXEL_FORMAT_RGBA_8888:
265         case HAL_PIXEL_FORMAT_RGBX_8888:
266         case HAL_PIXEL_FORMAT_BGRA_8888:
267             size = alignedw * alignedh * 4;
268             break;
269         case HAL_PIXEL_FORMAT_RGB_888:
270             size = alignedw * alignedh * 3;
271             break;
272         case HAL_PIXEL_FORMAT_RGB_565:
273         case HAL_PIXEL_FORMAT_RAW16:
274             size = alignedw * alignedh * 2;
275             break;
276 
277             // adreno formats
278         case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:  // NV21
279             size  = ALIGN(alignedw*alignedh, 4096);
280             size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
281             break;
282         case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:   // NV12
283             // The chroma plane is subsampled,
284             // but the pitch in bytes is unchanged
285             // The GPU needs 4K alignment, but the video decoder needs 8K
286             alignedw = ALIGN(alignedw, 128);
287             size  = ALIGN( alignedw * alignedh, 8192);
288             size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
289             break;
290         case HAL_PIXEL_FORMAT_NV12:
291             alignedw = ALIGN(width, 16);
292             alignedh = height;
293             size  = ALIGN( ALIGN(width, 128) * ALIGN(height, 32), 8192);
294             size += ALIGN( ALIGN(width, 128) * ALIGN(height/2, 32), 8192);
295             break;
296         case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
297         case HAL_PIXEL_FORMAT_YV12:
298             if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
299                 ALOGE("w or h is odd for the YV12 format");
300                 return -EINVAL;
301             }
302             alignedh = height;
303             if (HAL_PIXEL_FORMAT_NV12_ENCODEABLE == format) {
304                 // The encoder requires a 2K aligned chroma offset.
305                 size = ALIGN(alignedw*alignedh, 2048) +
306                     (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
307             } else {
308                 size = alignedw*alignedh +
309                     (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
310             }
311             size = ALIGN(size, 4096);
312             break;
313         case HAL_PIXEL_FORMAT_YCbCr_420_SP:
314         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
315             alignedh = height;
316             size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096);
317             break;
318         case HAL_PIXEL_FORMAT_YCbCr_422_SP:
319         case HAL_PIXEL_FORMAT_YCrCb_422_SP:
320             if(width & 1) {
321                 ALOGE("width is odd for the YUV422_SP format");
322                 return -EINVAL;
323             }
324             alignedh = height;
325             size = ALIGN(alignedw * alignedh * 2, 4096);
326             break;
327         case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
328             alignedh = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
329             size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height);
330             break;
331         case HAL_PIXEL_FORMAT_BLOB:
332         case HAL_PIXEL_FORMAT_RAW_OPAQUE:
333             if(height != 1) {
334                 ALOGE("%s: Buffers with RAW_OPAQUE/BLOB formats \
335                       must have height==1 ", __FUNCTION__);
336                 return -EINVAL;
337             }
338             alignedh = height;
339             alignedw = width;
340             size = width;
341             break;
342         default:
343             ALOGE("unrecognized pixel format: 0x%x", format);
344             return -EINVAL;
345     }
346 
347     return size;
348 }
349 
350 // Allocate buffer from width, height and format into a
351 // private_handle_t. It is the responsibility of the caller
352 // to free the buffer using the free_buffer function
alloc_buffer(private_handle_t ** pHnd,int w,int h,int format,int usage)353 int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
354 {
355     alloc_data data;
356     int alignedw, alignedh;
357     gralloc::IAllocController* sAlloc =
358         gralloc::IAllocController::getInstance();
359     data.base = 0;
360     data.fd = -1;
361     data.offset = 0;
362     data.size = getBufferSizeAndDimensions(w, h, format, alignedw, alignedh);
363     data.align = getpagesize();
364     data.uncached = useUncached(usage);
365     int allocFlags = usage;
366 
367     int err = sAlloc->allocate(data, allocFlags);
368     if (0 != err) {
369         ALOGE("%s: allocate failed", __FUNCTION__);
370         return -ENOMEM;
371     }
372 
373     private_handle_t* hnd = new private_handle_t(data.fd, data.size,
374                                                  data.allocType, 0, format,
375                                                  alignedw, alignedh);
376     hnd->base = (int) data.base;
377     hnd->offset = data.offset;
378     hnd->gpuaddr = 0;
379     *pHnd = hnd;
380     return 0;
381 }
382 
free_buffer(private_handle_t * hnd)383 void free_buffer(private_handle_t *hnd)
384 {
385     gralloc::IAllocController* sAlloc =
386         gralloc::IAllocController::getInstance();
387     if (hnd && hnd->fd > 0) {
388         IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags);
389         memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
390     }
391     if(hnd)
392         delete hnd;
393 
394 }
395