1 /*
2 // Copyright (c) 2014 Intel Corporation
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // http://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15 */
16 #include <HwcTrace.h>
17 #include <Hwcomposer.h>
18 #include <DisplayPlaneManager.h>
19 #include <DisplayQuery.h>
20 #include <VirtualDevice.h>
21 #include <SoftVsyncObserver.h>
22
23 #include <binder/IServiceManager.h>
24 #include <binder/ProcessState.h>
25
26 #include <hal_public.h>
27 #include <libsync/sw_sync.h>
28 #include <sync/sync.h>
29
30 #include <va/va_android.h>
31 #include <va/va_vpp.h>
32 #include <va/va_tpi.h>
33
34 #include <cutils/properties.h>
35
36 #include <sys/types.h>
37 #include <sys/stat.h>
38 #include <fcntl.h>
39
40 #define NUM_CSC_BUFFERS 6
41 #define NUM_SCALING_BUFFERS 3
42
43 #define QCIF_WIDTH 176
44 #define QCIF_HEIGHT 144
45
46 namespace android {
47 namespace intel {
48
align_width(uint32_t val)49 static inline uint32_t align_width(uint32_t val)
50 {
51 return align_to(val, 64);
52 }
53
align_height(uint32_t val)54 static inline uint32_t align_height(uint32_t val)
55 {
56 return align_to(val, 16);
57 }
58
my_close_fence(const char * func,const char * fenceName,int & fenceFd)59 static void my_close_fence(const char* func, const char* fenceName, int& fenceFd)
60 {
61 if (fenceFd != -1) {
62 ALOGV("%s: closing fence %s (fd=%d)", func, fenceName, fenceFd);
63 int err = close(fenceFd);
64 if (err < 0) {
65 ALOGE("%s: fence %s close error %d: %s", func, fenceName, err, strerror(errno));
66 }
67 fenceFd = -1;
68 }
69 }
70
my_sync_wait_and_close(const char * func,const char * fenceName,int & fenceFd)71 static void my_sync_wait_and_close(const char* func, const char* fenceName, int& fenceFd)
72 {
73 if (fenceFd != -1) {
74 ALOGV("%s: waiting on fence %s (fd=%d)", func, fenceName, fenceFd);
75 int err = sync_wait(fenceFd, 300);
76 if (err < 0) {
77 ALOGE("%s: fence %s sync_wait error %d: %s", func, fenceName, err, strerror(errno));
78 }
79 my_close_fence(func, fenceName, fenceFd);
80 }
81 }
82
my_timeline_inc(const char * func,const char * timelineName,int & syncTimelineFd)83 static void my_timeline_inc(const char* func, const char* timelineName, int& syncTimelineFd)
84 {
85 if (syncTimelineFd != -1) {
86 ALOGV("%s: incrementing timeline %s (fd=%d)", func, timelineName, syncTimelineFd);
87 int err = sw_sync_timeline_inc(syncTimelineFd, 1);
88 if (err < 0)
89 ALOGE("%s sync timeline %s increment error %d: %s", func, timelineName, errno, strerror(errno));
90 syncTimelineFd = -1;
91 }
92 }
93
94 #define CLOSE_FENCE(fenceName) my_close_fence(__func__, #fenceName, fenceName)
95 #define SYNC_WAIT_AND_CLOSE(fenceName) my_sync_wait_and_close(__func__, #fenceName, fenceName)
96 #define TIMELINE_INC(timelineName) my_timeline_inc(__func__, #timelineName, timelineName)
97
98 class MappedSurface {
99 public:
MappedSurface(VADisplay dpy,VASurfaceID surf)100 MappedSurface(VADisplay dpy, VASurfaceID surf)
101 : va_dpy(dpy),
102 ptr(NULL)
103 {
104 VAStatus va_status;
105 va_status = vaDeriveImage(va_dpy, surf, &image);
106 if (va_status != VA_STATUS_SUCCESS) {
107 ETRACE("vaDeriveImage returns %08x", va_status);
108 return;
109 }
110 va_status = vaMapBuffer(va_dpy, image.buf, (void**)&ptr);
111 if (va_status != VA_STATUS_SUCCESS) {
112 ETRACE("vaMapBuffer returns %08x", va_status);
113 vaDestroyImage(va_dpy, image.image_id);
114 return;
115 }
116 }
~MappedSurface()117 ~MappedSurface() {
118 if (ptr == NULL)
119 return;
120
121 VAStatus va_status;
122
123 va_status = vaUnmapBuffer(va_dpy, image.buf);
124 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
125
126 va_status = vaDestroyImage(va_dpy, image.image_id);
127 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroyImage returns %08x", va_status);
128 }
valid()129 bool valid() { return ptr != NULL; }
getPtr()130 uint8_t* getPtr() { return ptr; }
131 private:
132 VADisplay va_dpy;
133 VAImage image;
134 uint8_t* ptr;
135 };
136
137 class VirtualDevice::VAMappedHandle {
138 public:
VAMappedHandle(VADisplay dpy,buffer_handle_t handle,uint32_t stride,uint32_t height,unsigned int pixel_format)139 VAMappedHandle(VADisplay dpy, buffer_handle_t handle, uint32_t stride, uint32_t height, unsigned int pixel_format)
140 : va_dpy(dpy),
141 surface(0)
142 {
143 VTRACE("Map gralloc %p size=%ux%u", handle, stride, height);
144
145 unsigned int format;
146 unsigned long buffer = reinterpret_cast<unsigned long>(handle);
147 VASurfaceAttribExternalBuffers buf;
148 buf.pixel_format = pixel_format;
149 buf.width = stride;
150 buf.height = height;
151 buf.buffers = &buffer;
152 buf.num_buffers = 1;
153 buf.flags = 0;
154 buf.private_data = NULL;
155
156 if (pixel_format == VA_FOURCC_RGBA || pixel_format == VA_FOURCC_BGRA) {
157 format = VA_RT_FORMAT_RGB32;
158 buf.data_size = stride * height * 4;
159 buf.num_planes = 3;
160 buf.pitches[0] = stride;
161 buf.pitches[1] = stride;
162 buf.pitches[2] = stride;
163 buf.pitches[3] = 0;
164 buf.offsets[0] = 0;
165 buf.offsets[1] = 0;
166 buf.offsets[2] = 0;
167 buf.offsets[3] = 0;
168 }
169 else {
170 format = VA_RT_FORMAT_YUV420;
171 buf.data_size = stride * height * 3/2;
172 buf.num_planes = 2;
173 buf.pitches[0] = stride;
174 buf.pitches[1] = stride;
175 buf.pitches[2] = 0;
176 buf.pitches[3] = 0;
177 buf.offsets[0] = 0;
178 buf.offsets[1] = stride * height;
179 }
180
181 VASurfaceAttrib attrib_list[3];
182 attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
183 attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
184 attrib_list[0].value.type = VAGenericValueTypeInteger;
185 attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
186 attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
187 attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
188 attrib_list[1].value.type = VAGenericValueTypePointer;
189 attrib_list[1].value.value.p = (void *)&buf;
190 attrib_list[2].type = (VASurfaceAttribType)VASurfaceAttribPixelFormat;
191 attrib_list[2].flags = VA_SURFACE_ATTRIB_SETTABLE;
192 attrib_list[2].value.type = VAGenericValueTypeInteger;
193 attrib_list[2].value.value.i = pixel_format;
194
195 VAStatus va_status;
196 va_status = vaCreateSurfaces(va_dpy,
197 format,
198 stride,
199 height,
200 &surface,
201 1,
202 attrib_list,
203 3);
204 if (va_status != VA_STATUS_SUCCESS) {
205 ETRACE("vaCreateSurfaces returns %08x, surface = %x", va_status, surface);
206 surface = 0;
207 }
208 }
VAMappedHandle(VADisplay dpy,buffer_handle_t khandle,uint32_t stride,uint32_t height,bool tiled)209 VAMappedHandle(VADisplay dpy, buffer_handle_t khandle, uint32_t stride, uint32_t height, bool tiled)
210 : va_dpy(dpy),
211 surface(0)
212 {
213 int format;
214 VASurfaceAttributeTPI attribTpi;
215 memset(&attribTpi, 0, sizeof(attribTpi));
216 VTRACE("Map khandle 0x%x size=%ux%u", khandle, stride, height);
217 attribTpi.type = VAExternalMemoryKernelDRMBufffer;
218 attribTpi.width = stride;
219 attribTpi.height = height;
220 attribTpi.size = stride*height*3/2;
221 attribTpi.pixel_format = VA_FOURCC_NV12;
222 attribTpi.tiling = tiled;
223 attribTpi.luma_stride = stride;
224 attribTpi.chroma_u_stride = stride;
225 attribTpi.chroma_v_stride = stride;
226 attribTpi.luma_offset = 0;
227 attribTpi.chroma_u_offset = stride*height;
228 attribTpi.chroma_v_offset = stride*height+1;
229 format = VA_RT_FORMAT_YUV420;
230 attribTpi.count = 1;
231 attribTpi.buffers = (long unsigned int*) &khandle;
232
233 VAStatus va_status;
234 va_status = vaCreateSurfacesWithAttribute(va_dpy,
235 stride,
236 height,
237 format,
238 1,
239 &surface,
240 &attribTpi);
241 if (va_status != VA_STATUS_SUCCESS) {
242 ETRACE("vaCreateSurfacesWithAttribute returns %08x", va_status);
243 surface = 0;
244 }
245 }
~VAMappedHandle()246 ~VAMappedHandle()
247 {
248 if (surface == 0)
249 return;
250 VAStatus va_status;
251 va_status = vaDestroySurfaces(va_dpy, &surface, 1);
252 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces returns %08x", va_status);
253 }
254 private:
255 VADisplay va_dpy;
256 public:
257 VASurfaceID surface;
258 };
259
260 // refcounted version of VAMappedHandle, to make caching easier
261 class VirtualDevice::VAMappedHandleObject : public RefBase, public VAMappedHandle {
262 public:
VAMappedHandleObject(VADisplay dpy,buffer_handle_t handle,uint32_t stride,uint32_t height,unsigned int pixel_format)263 VAMappedHandleObject(VADisplay dpy, buffer_handle_t handle, uint32_t stride, uint32_t height, unsigned int pixel_format)
264 : VAMappedHandle(dpy, handle, stride, height, pixel_format) { }
VAMappedHandleObject(VADisplay dpy,buffer_handle_t khandle,uint32_t stride,uint32_t height,bool tiled)265 VAMappedHandleObject(VADisplay dpy, buffer_handle_t khandle, uint32_t stride, uint32_t height, bool tiled)
266 : VAMappedHandle(dpy, khandle, stride, height, tiled) { }
267 protected:
~VAMappedHandleObject()268 ~VAMappedHandleObject() {}
269 };
270
CachedBuffer(BufferManager * mgr,buffer_handle_t handle)271 VirtualDevice::CachedBuffer::CachedBuffer(BufferManager *mgr, buffer_handle_t handle)
272 : manager(mgr),
273 mapper(NULL),
274 vaMappedHandle(NULL),
275 cachedKhandle(0)
276 {
277 DataBuffer *buffer = manager->lockDataBuffer((buffer_handle_t)handle);
278 mapper = manager->map(*buffer);
279 manager->unlockDataBuffer(buffer);
280 }
281
~CachedBuffer()282 VirtualDevice::CachedBuffer::~CachedBuffer()
283 {
284 if (vaMappedHandle != NULL)
285 delete vaMappedHandle;
286 manager->unmap(mapper);
287 }
288
HeldDecoderBuffer(const sp<VirtualDevice> & vd,const android::sp<CachedBuffer> & cachedBuffer)289 VirtualDevice::HeldDecoderBuffer::HeldDecoderBuffer(const sp<VirtualDevice>& vd, const android::sp<CachedBuffer>& cachedBuffer)
290 : vd(vd),
291 cachedBuffer(cachedBuffer)
292 {
293 if (!vd->mPayloadManager->setRenderStatus(cachedBuffer->mapper, true)) {
294 ETRACE("Failed to set render status");
295 }
296 }
297
~HeldDecoderBuffer()298 VirtualDevice::HeldDecoderBuffer::~HeldDecoderBuffer()
299 {
300 if (!vd->mPayloadManager->setRenderStatus(cachedBuffer->mapper, false)) {
301 ETRACE("Failed to set render status");
302 }
303 }
304
305 struct VirtualDevice::Task : public RefBase {
306 virtual void run(VirtualDevice& vd) = 0;
~Taskandroid::intel::VirtualDevice::Task307 virtual ~Task() {}
308 };
309
310 struct VirtualDevice::RenderTask : public VirtualDevice::Task {
RenderTaskandroid::intel::VirtualDevice::RenderTask311 RenderTask() : successful(false) { }
312 virtual void run(VirtualDevice& vd) = 0;
313 bool successful;
314 };
315
316 struct VirtualDevice::ComposeTask : public VirtualDevice::RenderTask {
ComposeTaskandroid::intel::VirtualDevice::ComposeTask317 ComposeTask()
318 : videoKhandle(0),
319 rgbHandle(NULL),
320 mappedRgbIn(NULL),
321 outputHandle(NULL),
322 yuvAcquireFenceFd(-1),
323 rgbAcquireFenceFd(-1),
324 outbufAcquireFenceFd(-1),
325 syncTimelineFd(-1) { }
326
~ComposeTaskandroid::intel::VirtualDevice::ComposeTask327 virtual ~ComposeTask() {
328 // If queueCompose() creates this object and sets up fences,
329 // but aborts before enqueuing the task, or if the task runs
330 // but errors out, make sure our acquire fences get closed
331 // and any release fences get signaled.
332 CLOSE_FENCE(yuvAcquireFenceFd);
333 CLOSE_FENCE(rgbAcquireFenceFd);
334 CLOSE_FENCE(outbufAcquireFenceFd);
335 TIMELINE_INC(syncTimelineFd);
336 }
337
runandroid::intel::VirtualDevice::ComposeTask338 virtual void run(VirtualDevice& vd) {
339 bool dump = false;
340 if (vd.mDebugVspDump && ++vd.mDebugCounter > 200) {
341 dump = true;
342 vd.mDebugCounter = 0;
343 }
344
345 SYNC_WAIT_AND_CLOSE(yuvAcquireFenceFd);
346
347 VASurfaceID videoInSurface;
348 if (videoKhandle == 0) {
349 videoInSurface = vd.va_blank_yuv_in;
350 } else {
351 if (videoCachedBuffer->cachedKhandle != videoKhandle || videoCachedBuffer->vaMappedHandle == NULL) {
352 if (videoCachedBuffer->vaMappedHandle != NULL)
353 delete videoCachedBuffer->vaMappedHandle;
354 videoCachedBuffer->vaMappedHandle = new VAMappedHandle(vd.va_dpy, videoKhandle, videoStride, videoBufHeight, videoTiled);
355 videoCachedBuffer->cachedKhandle = videoKhandle;
356 }
357 videoInSurface = videoCachedBuffer->vaMappedHandle->surface;
358 }
359
360 if (videoInSurface == 0) {
361 ETRACE("Couldn't map video");
362 return;
363 }
364 SYNC_WAIT_AND_CLOSE(rgbAcquireFenceFd);
365 SYNC_WAIT_AND_CLOSE(outbufAcquireFenceFd);
366
367 VAMappedHandle mappedVideoOut(vd.va_dpy, outputHandle, align_width(outWidth), align_height(outHeight), (unsigned int)VA_FOURCC_NV12);
368 if (mappedVideoOut.surface == 0) {
369 ETRACE("Unable to map outbuf");
370 return;
371 }
372
373 if (dump)
374 dumpSurface(vd.va_dpy, "/data/misc/vsp_in.yuv", videoInSurface, videoStride*videoBufHeight*3/2);
375
376 if (mappedRgbIn != NULL) {
377 if (dump)
378 dumpSurface(vd.va_dpy, "/data/misc/vsp_in.rgb", mappedRgbIn->surface, align_width(outWidth)*align_height(outHeight)*4);
379 vd.vspCompose(videoInSurface, mappedRgbIn->surface, mappedVideoOut.surface, &surface_region, &output_region);
380 }
381 else if (rgbHandle != NULL) {
382 VAMappedHandle localMappedRgbIn(vd.va_dpy, rgbHandle, align_width(outWidth), align_height(outHeight), (unsigned int)VA_FOURCC_BGRA);
383 vd.vspCompose(videoInSurface, localMappedRgbIn.surface, mappedVideoOut.surface, &surface_region, &output_region);
384 }
385 else {
386 // No RGBA, so compose with 100% transparent RGBA frame.
387 if (dump)
388 dumpSurface(vd.va_dpy, "/data/misc/vsp_in.rgb", vd.va_blank_rgb_in, align_width(outWidth)*align_height(outHeight)*4);
389 vd.vspCompose(videoInSurface, vd.va_blank_rgb_in, mappedVideoOut.surface, &surface_region, &output_region);
390 }
391 if (dump)
392 dumpSurface(vd.va_dpy, "/data/misc/vsp_out.yuv", mappedVideoOut.surface, align_width(outWidth)*align_height(outHeight)*3/2);
393 TIMELINE_INC(syncTimelineFd);
394 successful = true;
395 }
dumpSurfaceandroid::intel::VirtualDevice::ComposeTask396 void dumpSurface(VADisplay va_dpy, const char* filename, VASurfaceID surf, int size) {
397 MappedSurface dumpSurface(va_dpy, surf);
398 if (dumpSurface.valid()) {
399 int fd = open(filename, O_CREAT | O_TRUNC | O_WRONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP);
400 if (fd > 0) {
401 write(fd, dumpSurface.getPtr(), size);
402 close(fd);
403 ALOGI("Output dumped");
404 }
405 else
406 ALOGE("Error %d opening output file: %s", errno, strerror(errno));
407 }
408 else
409 ALOGE("Failed to map output for dump");
410 }
411 buffer_handle_t videoKhandle;
412 uint32_t videoStride;
413 uint32_t videoBufHeight;
414 bool videoTiled;
415 buffer_handle_t rgbHandle;
416 sp<RefBase> heldRgbHandle;
417 sp<VAMappedHandleObject> mappedRgbIn;
418 buffer_handle_t outputHandle;
419 VARectangle surface_region;
420 VARectangle output_region;
421 uint32_t outWidth;
422 uint32_t outHeight;
423 sp<CachedBuffer> videoCachedBuffer;
424 sp<RefBase> heldVideoBuffer;
425 int yuvAcquireFenceFd;
426 int rgbAcquireFenceFd;
427 int outbufAcquireFenceFd;
428 int syncTimelineFd;
429 };
430
431 struct VirtualDevice::EnableVspTask : public VirtualDevice::Task {
runandroid::intel::VirtualDevice::EnableVspTask432 virtual void run(VirtualDevice& vd) {
433 vd.vspEnable(width, height);
434 }
435 uint32_t width;
436 uint32_t height;
437 };
438
439 struct VirtualDevice::DisableVspTask : public VirtualDevice::Task {
runandroid::intel::VirtualDevice::DisableVspTask440 virtual void run(VirtualDevice& vd) {
441 vd.vspDisable();
442 }
443 };
444
445 struct VirtualDevice::BlitTask : public VirtualDevice::RenderTask {
BlitTaskandroid::intel::VirtualDevice::BlitTask446 BlitTask()
447 : srcAcquireFenceFd(-1),
448 destAcquireFenceFd(-1),
449 syncTimelineFd(-1) { }
450
~BlitTaskandroid::intel::VirtualDevice::BlitTask451 virtual ~BlitTask()
452 {
453 // If queueColorConvert() creates this object and sets up fences,
454 // but aborts before enqueuing the task, or if the task runs
455 // but errors out, make sure our acquire fences get closed
456 // and any release fences get signaled.
457 CLOSE_FENCE(srcAcquireFenceFd);
458 CLOSE_FENCE(destAcquireFenceFd);
459 TIMELINE_INC(syncTimelineFd);
460 }
461
runandroid::intel::VirtualDevice::BlitTask462 virtual void run(VirtualDevice& vd) {
463 SYNC_WAIT_AND_CLOSE(srcAcquireFenceFd);
464 SYNC_WAIT_AND_CLOSE(destAcquireFenceFd);
465 BufferManager* mgr = vd.mHwc.getBufferManager();
466 if (!(mgr->blit(srcHandle, destHandle, destRect, false, false))) {
467 ETRACE("color space conversion from RGB to NV12 failed");
468 }
469 else
470 successful = true;
471 TIMELINE_INC(syncTimelineFd);
472 }
473 buffer_handle_t srcHandle;
474 buffer_handle_t destHandle;
475 int srcAcquireFenceFd;
476 int destAcquireFenceFd;
477 int syncTimelineFd;
478 crop_t destRect;
479 };
480
481 struct VirtualDevice::FrameTypeChangedTask : public VirtualDevice::Task {
runandroid::intel::VirtualDevice::FrameTypeChangedTask482 virtual void run(VirtualDevice& vd) {
483 typeChangeListener->frameTypeChanged(inputFrameInfo);
484 ITRACE("Notify frameTypeChanged: %dx%d in %dx%d @ %d fps",
485 inputFrameInfo.contentWidth, inputFrameInfo.contentHeight,
486 inputFrameInfo.bufferWidth, inputFrameInfo.bufferHeight,
487 inputFrameInfo.contentFrameRateN);
488 }
489 sp<IFrameTypeChangeListener> typeChangeListener;
490 FrameInfo inputFrameInfo;
491 };
492
493 struct VirtualDevice::BufferInfoChangedTask : public VirtualDevice::Task {
runandroid::intel::VirtualDevice::BufferInfoChangedTask494 virtual void run(VirtualDevice& vd) {
495 typeChangeListener->bufferInfoChanged(outputFrameInfo);
496 ITRACE("Notify bufferInfoChanged: %dx%d in %dx%d @ %d fps",
497 outputFrameInfo.contentWidth, outputFrameInfo.contentHeight,
498 outputFrameInfo.bufferWidth, outputFrameInfo.bufferHeight,
499 outputFrameInfo.contentFrameRateN);
500 }
501 sp<IFrameTypeChangeListener> typeChangeListener;
502 FrameInfo outputFrameInfo;
503 };
504
505 struct VirtualDevice::OnFrameReadyTask : public VirtualDevice::Task {
runandroid::intel::VirtualDevice::OnFrameReadyTask506 virtual void run(VirtualDevice& vd) {
507 if (renderTask != NULL && !renderTask->successful)
508 return;
509
510 {
511 Mutex::Autolock _l(vd.mHeldBuffersLock);
512 //Add the heldbuffer to the vector before calling onFrameReady, so that the buffer will be removed
513 //from the vector properly even if the notifyBufferReturned call acquires mHeldBuffersLock first.
514 vd.mHeldBuffers.add(handle, heldBuffer);
515 }
516
517 // FIXME: we could remove this casting once onFrameReady receives
518 // a buffer_handle_t handle
519 status_t result = frameListener->onFrameReady((uint32_t)handle, handleType, renderTimestamp, mediaTimestamp);
520 if (result != OK) {
521 Mutex::Autolock _l(vd.mHeldBuffersLock);
522 vd.mHeldBuffers.removeItem(handle);
523 }
524 }
525 sp<RenderTask> renderTask;
526 sp<RefBase> heldBuffer;
527 sp<IFrameListener> frameListener;
528 buffer_handle_t handle;
529 HWCBufferHandleType handleType;
530 int64_t renderTimestamp;
531 int64_t mediaTimestamp;
532 };
533
534 struct VirtualDevice::BufferList::HeldBuffer : public RefBase {
HeldBufferandroid::intel::VirtualDevice::BufferList::HeldBuffer535 HeldBuffer(BufferList& list, buffer_handle_t handle, uint32_t w, uint32_t h)
536 : mList(list),
537 mHandle(handle),
538 mWidth(w),
539 mHeight(h) { }
~HeldBufferandroid::intel::VirtualDevice::BufferList::HeldBuffer540 virtual ~HeldBuffer()
541 {
542 Mutex::Autolock _l(mList.mVd.mTaskLock);
543 if (mWidth == mList.mWidth && mHeight == mList.mHeight) {
544 VTRACE("Returning %s buffer %p (%ux%u) to list", mList.mName, mHandle, mWidth, mHeight);
545 mList.mAvailableBuffers.push_back(mHandle);
546 } else {
547 VTRACE("Deleting %s buffer %p (%ux%u)", mList.mName, mHandle, mWidth, mHeight);
548 BufferManager* mgr = mList.mVd.mHwc.getBufferManager();
549 mgr->freeGrallocBuffer((mHandle));
550 if (mList.mBuffersToCreate < mList.mLimit)
551 mList.mBuffersToCreate++;
552 }
553 }
554
555 BufferList& mList;
556 buffer_handle_t mHandle;
557 uint32_t mWidth;
558 uint32_t mHeight;
559 };
560
BufferList(VirtualDevice & vd,const char * name,uint32_t limit,uint32_t format,uint32_t usage)561 VirtualDevice::BufferList::BufferList(VirtualDevice& vd, const char* name,
562 uint32_t limit, uint32_t format, uint32_t usage)
563 : mVd(vd),
564 mName(name),
565 mLimit(limit),
566 mFormat(format),
567 mUsage(usage),
568 mBuffersToCreate(0),
569 mWidth(0),
570 mHeight(0)
571 {
572 }
573
get(uint32_t width,uint32_t height,sp<RefBase> * heldBuffer)574 buffer_handle_t VirtualDevice::BufferList::get(uint32_t width, uint32_t height, sp<RefBase>* heldBuffer)
575 {
576 width = align_width(width);
577 height = align_height(height);
578 if (mWidth != width || mHeight != height) {
579 ITRACE("%s buffers changing from %dx%d to %dx%d",
580 mName, mWidth, mHeight, width, height);
581 clear();
582 mWidth = width;
583 mHeight = height;
584 mBuffersToCreate = mLimit;
585 }
586
587 buffer_handle_t handle;
588 if (mAvailableBuffers.empty()) {
589 if (mBuffersToCreate <= 0)
590 return NULL;
591 BufferManager* mgr = mVd.mHwc.getBufferManager();
592 handle = reinterpret_cast<buffer_handle_t>(
593 mgr->allocGrallocBuffer(width, height, mFormat, mUsage));
594 if (handle == NULL){
595 ETRACE("failed to allocate %s buffer", mName);
596 return NULL;
597 }
598 mBuffersToCreate--;
599 }
600 else {
601 handle = *mAvailableBuffers.begin();
602 mAvailableBuffers.erase(mAvailableBuffers.begin());
603 }
604 *heldBuffer = new HeldBuffer(*this, handle, width, height);
605 return handle;
606 }
607
clear()608 void VirtualDevice::BufferList::clear()
609 {
610 if (mWidth != 0 || mHeight != 0)
611 ITRACE("Releasing %s buffers (%ux%u)", mName, mWidth, mHeight);
612 if (!mAvailableBuffers.empty()) {
613 // iterate the list and call freeGraphicBuffer
614 for (List<buffer_handle_t>::iterator i = mAvailableBuffers.begin(); i != mAvailableBuffers.end(); ++i) {
615 VTRACE("Deleting the gralloc buffer associated with handle (%p)", (*i));
616 mVd.mHwc.getBufferManager()->freeGrallocBuffer((*i));
617 }
618 mAvailableBuffers.clear();
619 }
620 mWidth = 0;
621 mHeight = 0;
622 }
623
VirtualDevice(Hwcomposer & hwc)624 VirtualDevice::VirtualDevice(Hwcomposer& hwc)
625 : mProtectedMode(false),
626 mCscBuffers(*this, "CSC",
627 NUM_CSC_BUFFERS, DisplayQuery::queryNV12Format(),
628 GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_PRIVATE_1),
629 mRgbUpscaleBuffers(*this, "RGB upscale",
630 NUM_SCALING_BUFFERS, HAL_PIXEL_FORMAT_BGRA_8888,
631 GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER),
632 mInitialized(false),
633 mHwc(hwc),
634 mPayloadManager(NULL),
635 mVsyncObserver(NULL),
636 mOrigContentWidth(0),
637 mOrigContentHeight(0),
638 mFirstVideoFrame(true),
639 mLastConnectionStatus(false),
640 mCachedBufferCapcity(16),
641 mDecWidth(0),
642 mDecHeight(0)
643 {
644 CTRACE();
645 mNextConfig.frameServerActive = false;
646 }
647
~VirtualDevice()648 VirtualDevice::~VirtualDevice()
649 {
650 WARN_IF_NOT_DEINIT();
651 }
652
getMappedBuffer(buffer_handle_t handle)653 sp<VirtualDevice::CachedBuffer> VirtualDevice::getMappedBuffer(buffer_handle_t handle)
654 {
655 ssize_t index = mMappedBufferCache.indexOfKey(handle);
656 sp<CachedBuffer> cachedBuffer;
657 if (index == NAME_NOT_FOUND) {
658 if (mMappedBufferCache.size() > mCachedBufferCapcity)
659 mMappedBufferCache.clear();
660
661 cachedBuffer = new CachedBuffer(mHwc.getBufferManager(), handle);
662 mMappedBufferCache.add(handle, cachedBuffer);
663 } else {
664 cachedBuffer = mMappedBufferCache[index];
665 }
666
667 return cachedBuffer;
668 }
669
threadLoop()670 bool VirtualDevice::threadLoop()
671 {
672 sp<Task> task;
673 {
674 Mutex::Autolock _l(mTaskLock);
675 while (mTasks.empty()) {
676 mRequestQueued.wait(mTaskLock);
677 }
678 task = *mTasks.begin();
679 mTasks.erase(mTasks.begin());
680 }
681 if (task != NULL) {
682 task->run(*this);
683 task = NULL;
684 }
685 mRequestDequeued.signal();
686
687 return true;
688 }
689
start(sp<IFrameTypeChangeListener> typeChangeListener)690 status_t VirtualDevice::start(sp<IFrameTypeChangeListener> typeChangeListener)
691 {
692 ITRACE();
693 Mutex::Autolock _l(mConfigLock);
694 mNextConfig.typeChangeListener = typeChangeListener;
695 mNextConfig.frameListener = NULL;
696 mNextConfig.policy.scaledWidth = 0;
697 mNextConfig.policy.scaledHeight = 0;
698 mNextConfig.policy.xdpi = 96;
699 mNextConfig.policy.ydpi = 96;
700 mNextConfig.policy.refresh = 60;
701 mNextConfig.extendedModeEnabled =
702 Hwcomposer::getInstance().getDisplayAnalyzer()->isVideoExtModeEnabled();
703 mVideoFramerate = 0;
704 mFirstVideoFrame = true;
705 mNextConfig.frameServerActive = true;
706 mNextConfig.forceNotifyFrameType = true;
707 mNextConfig.forceNotifyBufferInfo = true;
708
709 return NO_ERROR;
710 }
711
stop(bool isConnected)712 status_t VirtualDevice::stop(bool isConnected)
713 {
714 ITRACE();
715 Mutex::Autolock _l(mConfigLock);
716 mNextConfig.typeChangeListener = NULL;
717 mNextConfig.frameListener = NULL;
718 mNextConfig.policy.scaledWidth = 0;
719 mNextConfig.policy.scaledHeight = 0;
720 mNextConfig.policy.xdpi = 96;
721 mNextConfig.policy.ydpi = 96;
722 mNextConfig.policy.refresh = 60;
723 mNextConfig.frameServerActive = false;
724 mNextConfig.extendedModeEnabled = false;
725 mNextConfig.forceNotifyFrameType = false;
726 mNextConfig.forceNotifyBufferInfo = false;
727 {
728 Mutex::Autolock _l(mTaskLock);
729 mCscBuffers.clear();
730 }
731 return NO_ERROR;
732 }
733
isFrameServerActive() const734 bool VirtualDevice::isFrameServerActive() const
735 {
736 return mCurrentConfig.frameServerActive;
737 }
738
739 /* TODO: 64-bit - this handle of size 32-bit is a problem for 64-bit */
notifyBufferReturned(int handle)740 status_t VirtualDevice::notifyBufferReturned(int handle)
741 {
742 CTRACE();
743 Mutex::Autolock _l(mHeldBuffersLock);
744 ssize_t index = mHeldBuffers.indexOfKey((buffer_handle_t)handle);
745 if (index == NAME_NOT_FOUND) {
746 ETRACE("Couldn't find returned khandle %p", handle);
747 } else {
748 VTRACE("Removing heldBuffer associated with handle (%p)", handle);
749 mHeldBuffers.removeItemsAt(index, 1);
750 }
751 return NO_ERROR;
752 }
753
setResolution(const FrameProcessingPolicy & policy,sp<IFrameListener> listener)754 status_t VirtualDevice::setResolution(const FrameProcessingPolicy& policy, sp<IFrameListener> listener)
755 {
756 ITRACE();
757 Mutex::Autolock _l(mConfigLock);
758 mNextConfig.frameListener = listener;
759 mNextConfig.policy = policy;
760 return NO_ERROR;
761 }
762
canUseDirectly(const hwc_display_contents_1_t * display,size_t n)763 static bool canUseDirectly(const hwc_display_contents_1_t *display, size_t n)
764 {
765 const hwc_layer_1_t& fbTarget = display->hwLayers[display->numHwLayers-1];
766 const hwc_layer_1_t& layer = display->hwLayers[n];
767 const IMG_native_handle_t* nativeHandle = reinterpret_cast<const IMG_native_handle_t*>(layer.handle);
768 return !(layer.flags & HWC_SKIP_LAYER) && layer.transform == 0 &&
769 layer.blending == HWC_BLENDING_PREMULT &&
770 layer.sourceCropf.left == 0 && layer.sourceCropf.top == 0 &&
771 layer.displayFrame.left == 0 && layer.displayFrame.top == 0 &&
772 layer.sourceCropf.right == fbTarget.sourceCropf.right &&
773 layer.sourceCropf.bottom == fbTarget.sourceCropf.bottom &&
774 layer.displayFrame.right == fbTarget.displayFrame.right &&
775 layer.displayFrame.bottom == fbTarget.displayFrame.bottom &&
776 layer.planeAlpha == 255 && layer.handle != NULL &&
777 (nativeHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888 ||
778 nativeHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888);
779 }
780
prePrepare(hwc_display_contents_1_t * display)781 bool VirtualDevice::prePrepare(hwc_display_contents_1_t *display)
782 {
783 RETURN_FALSE_IF_NOT_INIT();
784 return true;
785 }
786
prepare(hwc_display_contents_1_t * display)787 bool VirtualDevice::prepare(hwc_display_contents_1_t *display)
788 {
789 RETURN_FALSE_IF_NOT_INIT();
790
791 mRenderTimestamp = systemTime();
792 mVspInUse = false;
793 mExpectAcquireFences = false;
794 mIsForceCloneMode = false;
795
796 {
797 Mutex::Autolock _l(mConfigLock);
798 mCurrentConfig = mNextConfig;
799 }
800
801 bool shouldBeConnected = (display != NULL);
802 if (shouldBeConnected != mLastConnectionStatus) {
803 // calling this will reload the property 'hwc.video.extmode.enable'
804 Hwcomposer::getInstance().getDisplayAnalyzer()->isVideoExtModeEnabled();
805 char propertyVal[PROPERTY_VALUE_MAX];
806 if (property_get("widi.compose.rgb_upscale", propertyVal, NULL) > 0)
807 mVspUpscale = atoi(propertyVal);
808 if (property_get("widi.compose.all_video", propertyVal, NULL) > 0)
809 mDebugVspClear = atoi(propertyVal);
810 if (property_get("widi.compose.dump", propertyVal, NULL) > 0)
811 mDebugVspDump = atoi(propertyVal);
812
813 Hwcomposer::getInstance().getMultiDisplayObserver()->notifyWidiConnectionStatus(shouldBeConnected);
814 mLastConnectionStatus = shouldBeConnected;
815 }
816
817 if (!display) {
818 // No image. We're done with any mappings and CSC buffers.
819 mMappedBufferCache.clear();
820 Mutex::Autolock _l(mTaskLock);
821 mCscBuffers.clear();
822 return true;
823 }
824
825 if (!mCurrentConfig.frameServerActive) {
826 // We're done with CSC buffers, since we blit to outbuf in this mode.
827 // We want to keep mappings cached, so we don't clear mMappedBufferCache.
828 Mutex::Autolock _l(mTaskLock);
829 mCscBuffers.clear();
830 }
831
832 // by default send the FRAMEBUFFER_TARGET layer (composited image)
833 const ssize_t fbTarget = display->numHwLayers-1;
834 mRgbLayer = fbTarget;
835 mYuvLayer = -1;
836
837 DisplayAnalyzer *analyzer = mHwc.getDisplayAnalyzer();
838
839 mProtectedMode = false;
840
841 if (mCurrentConfig.typeChangeListener != NULL &&
842 !analyzer->isOverlayAllowed() &&
843 analyzer->getVideoInstances() <= 1) {
844 if (mCurrentConfig.typeChangeListener->shutdownVideo() != OK) {
845 ITRACE("Waiting for prior encoder session to shut down...");
846 }
847 /* Setting following flag to true will enable us to call bufferInfoChanged() in clone mode. */
848 mNextConfig.forceNotifyBufferInfo = true;
849 mYuvLayer = -1;
850 mRgbLayer = -1;
851 // Skipping frames.
852 // Fences aren't set in prepare, and we don't need them here, but they'll
853 // be set later and we have to close them. Don't log a warning in this case.
854 mExpectAcquireFences = true;
855 for (ssize_t i = 0; i < fbTarget; i++)
856 display->hwLayers[i].compositionType = HWC_OVERLAY;
857 return true;
858 }
859
860 for (ssize_t i = 0; i < fbTarget; i++) {
861 hwc_layer_1_t& layer = display->hwLayers[i];
862 if (analyzer->isVideoLayer(layer) && (mCurrentConfig.extendedModeEnabled || mDebugVspClear || analyzer->isProtectedLayer(layer))) {
863 if (mCurrentConfig.frameServerActive && mCurrentConfig.extendedModeEnabled) {
864 // If composed in surface flinger, then stream fbtarget.
865 if ((layer.flags & HWC_SKIP_LAYER) && !analyzer->ignoreVideoSkipFlag()) {
866 continue;
867 }
868
869 /* If the resolution of the video layer is less than QCIF, then we are going to play it in clone mode only.*/
870 uint32_t vidContentWidth = layer.sourceCropf.right - layer.sourceCropf.left;
871 uint32_t vidContentHeight = layer.sourceCropf.bottom - layer.sourceCropf.top;
872 if (vidContentWidth < QCIF_WIDTH || vidContentHeight < QCIF_HEIGHT) {
873 VTRACE("Ingoring layer %d which is too small for extended mode", i);
874 continue;
875 }
876 }
877 mYuvLayer = i;
878 mProtectedMode = analyzer->isProtectedLayer(layer);
879 break;
880 }
881 }
882
883 if (mYuvLayer == -1) {
884 mFirstVideoFrame = true;
885 mDecWidth = 0;
886 mDecHeight = 0;
887 }
888
889 if (mCurrentConfig.frameServerActive && mCurrentConfig.extendedModeEnabled && mYuvLayer != -1) {
890 if (handleExtendedMode(display)) {
891 mYuvLayer = -1;
892 mRgbLayer = -1;
893 // Extended mode is successful.
894 // Fences aren't set in prepare, and we don't need them here, but they'll
895 // be set later and we have to close them. Don't log a warning in this case.
896 mExpectAcquireFences = true;
897 for (ssize_t i = 0; i < fbTarget; i++)
898 display->hwLayers[i].compositionType = HWC_OVERLAY;
899 return true;
900 }
901 // if error in playback file , switch to clone mode
902 WTRACE("Error, falling back to clone mode");
903 mIsForceCloneMode = true;
904 mYuvLayer = -1;
905 }
906
907 if (mYuvLayer == 0 && fbTarget == 1) {
908 // No RGB layer, so tell queueCompose to use blank RGB in fbtarget.
909 mRgbLayer = -1;
910 }
911 else if (mYuvLayer == 0 && fbTarget == 2) {
912 if (canUseDirectly(display, 1))
913 mRgbLayer = 1;
914 }
915 else if (mYuvLayer == -1 && fbTarget == 1) {
916 if (canUseDirectly(display, 0))
917 mRgbLayer = 0;
918 }
919
920 for (ssize_t i = 0; i < fbTarget; i++) {
921 hwc_layer_1_t& layer = display->hwLayers[i];
922 if (i == mYuvLayer || i == mRgbLayer || mRgbLayer != fbTarget)
923 layer.compositionType = HWC_OVERLAY;
924 else
925 layer.compositionType = HWC_FRAMEBUFFER;
926 }
927 if (mYuvLayer != -1 && mRgbLayer == fbTarget)
928 // This tells SurfaceFlinger to render this layer by writing transparent pixels
929 // to this layer's target region within the framebuffer. This effectively punches
930 // a hole through any content that is supposed to show below the video, and the
931 // video can be seen through this hole when we composite the YUV and RGBA layers
932 // together. Content above will draw on top of this hole and can cover the video.
933 // This has no effect when the video is the bottommost layer.
934 display->hwLayers[mYuvLayer].hints |= HWC_HINT_CLEAR_FB;
935
936 // we're streaming fbtarget, so send onFramePrepare and wait for composition to happen
937 if (mCurrentConfig.frameListener != NULL)
938 mCurrentConfig.frameListener->onFramePrepare(mRenderTimestamp, -1);
939
940 return true;
941 }
942
commit(hwc_display_contents_1_t * display,IDisplayContext * context)943 bool VirtualDevice::commit(hwc_display_contents_1_t *display, IDisplayContext *context)
944 {
945 RETURN_FALSE_IF_NOT_INIT();
946
947 if (display != NULL && (mRgbLayer != -1 || mYuvLayer != -1))
948 sendToWidi(display);
949
950 if (mVspEnabled && !mVspInUse) {
951 mVaMapCache.clear();
952 sp<DisableVspTask> disableVsp = new DisableVspTask();
953 mMappedBufferCache.clear();
954 Mutex::Autolock _l(mTaskLock);
955 mRgbUpscaleBuffers.clear();
956 mTasks.push(disableVsp);
957 mRequestQueued.signal();
958 mVspEnabled = false;
959 }
960
961 if (display != NULL) {
962 // All acquire fences should be copied somewhere else or closed by now
963 // and set to -1 in these structs except in the case of extended mode.
964 // Make sure the fences are closed and log a warning if not in extended mode.
965 if (display->outbufAcquireFenceFd != -1) {
966 if (!mExpectAcquireFences)
967 WTRACE("outbuf acquire fence (fd=%d) not yet saved or closed", display->outbufAcquireFenceFd);
968 CLOSE_FENCE(display->outbufAcquireFenceFd);
969 }
970 for (size_t i = 0; i < display->numHwLayers; i++) {
971 hwc_layer_1_t& layer = display->hwLayers[i];
972 if (layer.acquireFenceFd != -1) {
973 if (!mExpectAcquireFences && (i < display->numHwLayers-1 || i == (size_t) mRgbLayer))
974 WTRACE("layer %zd acquire fence (fd=%zd) not yet saved or closed", i, layer.acquireFenceFd);
975 CLOSE_FENCE(layer.acquireFenceFd);
976 }
977 }
978 }
979
980 return true;
981 }
982
sendToWidi(hwc_display_contents_1_t * display)983 bool VirtualDevice::sendToWidi(hwc_display_contents_1_t *display)
984 {
985 VTRACE("RGB=%d, YUV=%d", mRgbLayer, mYuvLayer);
986
987 if (mYuvLayer == -1 && mRgbLayer == -1)
988 return true;
989
990 if (mYuvLayer != -1) {
991 mVspInUse = true;
992 if (queueCompose(display))
993 return true;
994 }
995
996 return queueColorConvert(display);
997 }
998
queueCompose(hwc_display_contents_1_t * display)999 bool VirtualDevice::queueCompose(hwc_display_contents_1_t *display)
1000 {
1001 hwc_layer_1_t& yuvLayer = display->hwLayers[mYuvLayer];
1002 if (yuvLayer.handle == NULL) {
1003 ETRACE("No video handle");
1004 return false;
1005 }
1006 if (!mCurrentConfig.frameServerActive && display->outbuf == NULL) {
1007 ETRACE("No outbuf");
1008 return true; // fallback would be pointless
1009 }
1010
1011 sp<ComposeTask> composeTask = new ComposeTask();
1012
1013 sp<RefBase> heldBuffer;
1014 sp<OnFrameReadyTask> frameReadyTask;
1015 Mutex::Autolock _l(mTaskLock);
1016
1017 float upscale_x = 1.0;
1018 float upscale_y = 1.0;
1019 hwc_layer_1_t& fbTarget = display->hwLayers[display->numHwLayers-1];
1020 composeTask->outWidth = fbTarget.sourceCropf.right - fbTarget.sourceCropf.left;
1021 composeTask->outHeight = fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top;
1022
1023 bool scaleRgb = false;
1024 if (mCurrentConfig.frameServerActive) {
1025 if (mVspUpscale) {
1026 composeTask->outWidth = mCurrentConfig.policy.scaledWidth;
1027 composeTask->outHeight = mCurrentConfig.policy.scaledHeight;
1028 upscale_x = mCurrentConfig.policy.scaledWidth/(fbTarget.sourceCropf.right - fbTarget.sourceCropf.left);
1029 upscale_y = mCurrentConfig.policy.scaledHeight/(fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top);
1030 scaleRgb = composeTask->outWidth != fbTarget.sourceCropf.right - fbTarget.sourceCropf.left ||
1031 composeTask->outHeight != fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top;
1032 }
1033
1034 composeTask->outputHandle = mCscBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldBuffer);
1035 if (composeTask->outputHandle == NULL) {
1036 WTRACE("Out of CSC buffers, dropping frame");
1037 return true;
1038 }
1039 } else {
1040 composeTask->outputHandle = display->outbuf;
1041 }
1042
1043 vspPrepare(composeTask->outWidth, composeTask->outHeight);
1044
1045 composeTask->videoCachedBuffer = getMappedBuffer(yuvLayer.handle);
1046 if (composeTask->videoCachedBuffer == NULL) {
1047 ETRACE("Couldn't map video handle %p", yuvLayer.handle);
1048 return false;
1049 }
1050 if (composeTask->videoCachedBuffer->mapper == NULL) {
1051 ETRACE("Src mapper gone");
1052 return false;
1053 }
1054 composeTask->heldVideoBuffer = new HeldDecoderBuffer(this, composeTask->videoCachedBuffer);
1055 IVideoPayloadManager::MetaData videoMetadata;
1056 if (!mPayloadManager->getMetaData(composeTask->videoCachedBuffer->mapper, &videoMetadata)) {
1057 ETRACE("Failed to map video payload info");
1058 return false;
1059 }
1060 if (videoMetadata.normalBuffer.width == 0 || videoMetadata.normalBuffer.height == 0) {
1061 ETRACE("Bad video metadata for handle %p", yuvLayer.handle);
1062 return false;
1063 }
1064 if (videoMetadata.normalBuffer.khandle == 0) {
1065 ETRACE("Bad khandle");
1066 return false;
1067 }
1068
1069 VARectangle& output_region = composeTask->output_region;
1070 output_region.x = static_cast<uint32_t>(yuvLayer.displayFrame.left*upscale_x) & ~1;
1071 output_region.y = static_cast<uint32_t>(yuvLayer.displayFrame.top*upscale_y) & ~1;
1072 output_region.width = (static_cast<uint32_t>(yuvLayer.displayFrame.right*upscale_y+1) & ~1) - output_region.x;
1073 output_region.height = (static_cast<uint32_t>(yuvLayer.displayFrame.bottom*upscale_y+1) & ~1) - output_region.y;
1074
1075 uint32_t videoWidth;
1076 uint32_t videoHeight;
1077 if (videoMetadata.transform == 0 || videoMetadata.transform == HAL_TRANSFORM_ROT_180) {
1078 videoWidth = videoMetadata.normalBuffer.width;
1079 videoHeight = videoMetadata.normalBuffer.height;
1080 } else {
1081 videoWidth = videoMetadata.normalBuffer.height;
1082 videoHeight = videoMetadata.normalBuffer.width;
1083 }
1084
1085 // Layer source crop info is based on an unrotated, unscaled buffer.
1086 // Rotate the rectangle to get the source crop we'd use for a rotated, unscaled buffer.
1087 hwc_frect_t rotatedCrop;
1088 switch (videoMetadata.transform) {
1089 default:
1090 rotatedCrop = yuvLayer.sourceCropf;
1091 break;
1092 case HAL_TRANSFORM_ROT_90:
1093 rotatedCrop.left = yuvLayer.sourceCropf.top;
1094 rotatedCrop.top = videoHeight - yuvLayer.sourceCropf.right;
1095 rotatedCrop.right = yuvLayer.sourceCropf.bottom;
1096 rotatedCrop.bottom = videoHeight - yuvLayer.sourceCropf.left;
1097 break;
1098 case HAL_TRANSFORM_ROT_180:
1099 rotatedCrop.left = videoWidth - yuvLayer.sourceCropf.right;
1100 rotatedCrop.top = videoHeight - yuvLayer.sourceCropf.bottom;
1101 rotatedCrop.right = videoWidth - yuvLayer.sourceCropf.left;
1102 rotatedCrop.bottom = videoHeight - yuvLayer.sourceCropf.top;
1103 break;
1104 case HAL_TRANSFORM_ROT_270:
1105 rotatedCrop.left = videoWidth - yuvLayer.sourceCropf.bottom;
1106 rotatedCrop.top = yuvLayer.sourceCropf.left;
1107 rotatedCrop.right = videoWidth - yuvLayer.sourceCropf.top;
1108 rotatedCrop.bottom = yuvLayer.sourceCropf.right;
1109 break;
1110 }
1111
1112 float factor_x = output_region.width / (rotatedCrop.right - rotatedCrop.left);
1113 float factor_y = output_region.height / (rotatedCrop.bottom - rotatedCrop.top);
1114
1115 uint32_t scaleWidth = videoWidth * factor_x;
1116 uint32_t scaleHeight = videoHeight * factor_y;
1117
1118 scaleWidth &= ~1;
1119 scaleHeight &= ~1;
1120
1121 IVideoPayloadManager::Buffer info;
1122 if (!getFrameOfSize(scaleWidth, scaleHeight, videoMetadata, info)) {
1123 //Returning true as else we fall into the queueColorConvert
1124 //resulting into scrambled frames for protected content.
1125 ITRACE("scaled frame not yet available.");
1126 return true;
1127 }
1128
1129 composeTask->videoKhandle = info.khandle;
1130 composeTask->videoStride = info.lumaStride;
1131 composeTask->videoBufHeight = info.bufHeight;
1132 composeTask->videoTiled = info.tiled;
1133
1134 // rotatedCrop accounts for rotation. Now account for any scaling along each dimension.
1135 hwc_frect_t scaledCrop = rotatedCrop;
1136 if (info.width < videoWidth) {
1137 float factor = static_cast<float>(info.width) / videoWidth;
1138 scaledCrop.left *= factor;
1139 scaledCrop.right *= factor;
1140 }
1141 if (info.height < videoHeight) {
1142 float factor = static_cast<float>(info.height) / videoHeight;
1143 scaledCrop.top *= factor;
1144 scaledCrop.bottom *= factor;
1145 }
1146
1147 VARectangle& surface_region = composeTask->surface_region;
1148 surface_region.x = static_cast<int>(scaledCrop.left) + info.offsetX;
1149 surface_region.y = static_cast<int>(scaledCrop.top) + info.offsetY;
1150 surface_region.width = static_cast<int>(scaledCrop.right - scaledCrop.left);
1151 surface_region.height = static_cast<int>(scaledCrop.bottom - scaledCrop.top);
1152
1153 VTRACE("Want to take (%d,%d)-(%d,%d) region from %dx%d video (in %dx%d buffer) and output to (%d,%d)-(%d,%d)",
1154 surface_region.x, surface_region.y,
1155 surface_region.x + surface_region.width, surface_region.y + surface_region.height,
1156 info.width, info.height,
1157 info.bufWidth, info.bufHeight,
1158 output_region.x, output_region.y,
1159 output_region.x + output_region.width, output_region.y + output_region.height);
1160
1161 if (surface_region.x + surface_region.width > static_cast<int>(info.width + info.offsetX) ||
1162 surface_region.y + surface_region.height > static_cast<int>(info.height + info.offsetY))
1163 {
1164 ETRACE("Source crop exceeds video dimensions: (%d,%d)-(%d,%d) > %ux%u",
1165 surface_region.x, surface_region.y,
1166 surface_region.x + surface_region.width, surface_region.y + surface_region.height,
1167 info.width, info.height);
1168 return false;
1169 }
1170
1171 if (surface_region.width > output_region.width || surface_region.height > output_region.height) {
1172 // VSP can upscale but can't downscale video, so use blank video
1173 // until we start getting downscaled frames.
1174 surface_region.x = 0;
1175 surface_region.y = 0;
1176 surface_region.width = composeTask->outWidth;
1177 surface_region.height = composeTask->outHeight;
1178 output_region = surface_region;
1179 composeTask->videoKhandle = 0;
1180 composeTask->videoStride = composeTask->outWidth;
1181 composeTask->videoBufHeight = composeTask->outHeight;
1182 composeTask->videoTiled = false;
1183 }
1184
1185 composeTask->yuvAcquireFenceFd = yuvLayer.acquireFenceFd;
1186 yuvLayer.acquireFenceFd = -1;
1187
1188 composeTask->outbufAcquireFenceFd = display->outbufAcquireFenceFd;
1189 display->outbufAcquireFenceFd = -1;
1190
1191 int retireFd = sw_sync_fence_create(mSyncTimelineFd, "widi_compose_retire", mNextSyncPoint);
1192 yuvLayer.releaseFenceFd = retireFd;
1193
1194 if (mRgbLayer == -1) {
1195 CLOSE_FENCE(fbTarget.acquireFenceFd);
1196 } else {
1197 hwc_layer_1_t& rgbLayer = display->hwLayers[mRgbLayer];
1198 composeTask->rgbAcquireFenceFd = rgbLayer.acquireFenceFd;
1199 rgbLayer.acquireFenceFd = -1;
1200 rgbLayer.releaseFenceFd = dup(retireFd);
1201 }
1202
1203 mNextSyncPoint++;
1204 composeTask->syncTimelineFd = mSyncTimelineFd;
1205
1206 if (mRgbLayer != -1)
1207 {
1208 hwc_layer_1_t& rgbLayer = display->hwLayers[mRgbLayer];
1209 if (rgbLayer.handle == NULL) {
1210 ETRACE("No RGB handle");
1211 return false;
1212 }
1213
1214 if (scaleRgb) {
1215 buffer_handle_t scalingBuffer;
1216 sp<RefBase> heldUpscaleBuffer;
1217 while ((scalingBuffer = mRgbUpscaleBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldUpscaleBuffer)) == NULL &&
1218 !mTasks.empty()) {
1219 VTRACE("Waiting for free RGB upscale buffer...");
1220 mRequestDequeued.wait(mTaskLock);
1221 }
1222 if (scalingBuffer == NULL) {
1223 ETRACE("Couldn't get scaling buffer");
1224 return false;
1225 }
1226 BufferManager* mgr = mHwc.getBufferManager();
1227 crop_t destRect;
1228 destRect.x = 0;
1229 destRect.y = 0;
1230 destRect.w = composeTask->outWidth;
1231 destRect.h = composeTask->outHeight;
1232 if (!mgr->blit(rgbLayer.handle, scalingBuffer, destRect, true, true))
1233 return true;
1234 composeTask->rgbHandle = scalingBuffer;
1235 composeTask->heldRgbHandle = heldUpscaleBuffer;
1236 }
1237 else {
1238 unsigned int pixel_format = VA_FOURCC_BGRA;
1239 const IMG_native_handle_t* nativeHandle = reinterpret_cast<const IMG_native_handle_t*>(rgbLayer.handle);
1240 if (nativeHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888)
1241 pixel_format = VA_FOURCC_RGBA;
1242 mRgbUpscaleBuffers.clear();
1243 ssize_t index = mVaMapCache.indexOfKey(rgbLayer.handle);
1244 if (index == NAME_NOT_FOUND) {
1245 composeTask->mappedRgbIn = new VAMappedHandleObject(va_dpy, rgbLayer.handle, composeTask->outWidth, composeTask->outHeight, pixel_format);
1246 mVaMapCache.add(rgbLayer.handle, composeTask->mappedRgbIn);
1247 }
1248 else
1249 composeTask->mappedRgbIn = mVaMapCache[index];
1250 if (composeTask->mappedRgbIn->surface == 0) {
1251 ETRACE("Unable to map RGB surface");
1252 return false;
1253 }
1254 }
1255 }
1256 else
1257 composeTask->mappedRgbIn = NULL;
1258
1259 mTasks.push_back(composeTask);
1260 mRequestQueued.signal();
1261
1262 if (mCurrentConfig.frameServerActive) {
1263
1264 FrameInfo inputFrameInfo;
1265 memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1266 inputFrameInfo.isProtected = mProtectedMode;
1267 inputFrameInfo.frameType = HWC_FRAMETYPE_FRAME_BUFFER;
1268 if (mVspUpscale) {
1269 float upscale_x = (rotatedCrop.right - rotatedCrop.left) /
1270 (yuvLayer.displayFrame.right - yuvLayer.displayFrame.left);
1271 float upscale_y = (rotatedCrop.bottom - rotatedCrop.top) /
1272 (yuvLayer.displayFrame.bottom - yuvLayer.displayFrame.top);
1273 float upscale = upscale_x > upscale_y ? upscale_x : upscale_y;
1274 if (upscale <= 1.0)
1275 upscale = 1.0;
1276 inputFrameInfo.contentWidth = (fbTarget.sourceCropf.right - fbTarget.sourceCropf.left)*upscale;
1277 inputFrameInfo.contentHeight = (fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top)*upscale;
1278 }
1279 else {
1280 inputFrameInfo.contentWidth = composeTask->outWidth;
1281 inputFrameInfo.contentHeight = composeTask->outHeight;
1282 }
1283 inputFrameInfo.contentFrameRateN = 0;
1284 inputFrameInfo.contentFrameRateD = 0;
1285 FrameInfo outputFrameInfo = inputFrameInfo;
1286
1287 BufferManager* mgr = mHwc.getBufferManager();
1288 DataBuffer* dataBuf = mgr->lockDataBuffer(composeTask->outputHandle);
1289 outputFrameInfo.contentWidth = composeTask->outWidth;
1290 outputFrameInfo.contentHeight = composeTask->outHeight;
1291 outputFrameInfo.bufferWidth = dataBuf->getWidth();
1292 outputFrameInfo.bufferHeight = dataBuf->getHeight();
1293 outputFrameInfo.lumaUStride = dataBuf->getWidth();
1294 outputFrameInfo.chromaUStride = dataBuf->getWidth();
1295 outputFrameInfo.chromaVStride = dataBuf->getWidth();
1296 mgr->unlockDataBuffer(dataBuf);
1297
1298 queueFrameTypeInfo(inputFrameInfo);
1299 if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1300 return true; // This isn't a failure, WiDi just doesn't want frames right now.
1301 queueBufferInfo(outputFrameInfo);
1302
1303 if (mCurrentConfig.frameListener != NULL) {
1304 frameReadyTask = new OnFrameReadyTask();
1305 frameReadyTask->renderTask = composeTask;
1306 frameReadyTask->heldBuffer = heldBuffer;
1307 frameReadyTask->frameListener = mCurrentConfig.frameListener;
1308 frameReadyTask->handle = composeTask->outputHandle;
1309 frameReadyTask->handleType = HWC_HANDLE_TYPE_GRALLOC;
1310 frameReadyTask->renderTimestamp = mRenderTimestamp;
1311 frameReadyTask->mediaTimestamp = -1;
1312 mTasks.push_back(frameReadyTask);
1313 }
1314 }
1315 else {
1316 display->retireFenceFd = dup(retireFd);
1317 }
1318
1319 return true;
1320 }
1321
queueColorConvert(hwc_display_contents_1_t * display)1322 bool VirtualDevice::queueColorConvert(hwc_display_contents_1_t *display)
1323 {
1324 if (mRgbLayer == -1) {
1325 ETRACE("RGB layer not set");
1326 return false;
1327 }
1328 hwc_layer_1_t& layer = display->hwLayers[mRgbLayer];
1329 if (layer.handle == NULL) {
1330 ETRACE("RGB layer has no handle set");
1331 return false;
1332 }
1333 if (display->outbuf == NULL) {
1334 ETRACE("outbuf is not set");
1335 return false;
1336 }
1337
1338 {
1339 const IMG_native_handle_t* nativeSrcHandle = reinterpret_cast<const IMG_native_handle_t*>(layer.handle);
1340 const IMG_native_handle_t* nativeDestHandle = reinterpret_cast<const IMG_native_handle_t*>(display->outbuf);
1341
1342 if ((nativeSrcHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888 &&
1343 nativeDestHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888) ||
1344 (nativeSrcHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888 &&
1345 nativeDestHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888))
1346 {
1347 SYNC_WAIT_AND_CLOSE(layer.acquireFenceFd);
1348 SYNC_WAIT_AND_CLOSE(display->outbufAcquireFenceFd);
1349 display->retireFenceFd = -1;
1350
1351 // synchronous in this case
1352 colorSwap(layer.handle, display->outbuf, ((nativeSrcHandle->iWidth+31)&~31)*nativeSrcHandle->iHeight);
1353 // Workaround: Don't keep cached buffers. If the VirtualDisplaySurface gets destroyed,
1354 // these would be unmapped on the next frame, after the buffers are destroyed,
1355 // which is causing heap corruption, probably due to a double-free somewhere.
1356 mMappedBufferCache.clear();
1357 return true;
1358 }
1359 }
1360
1361 sp<BlitTask> blitTask = new BlitTask();
1362 sp<OnFrameReadyTask> frameReadyTask;
1363 blitTask->destRect.x = 0;
1364 blitTask->destRect.y = 0;
1365 blitTask->destRect.w = layer.sourceCropf.right - layer.sourceCropf.left;
1366 blitTask->destRect.h = layer.sourceCropf.bottom - layer.sourceCropf.top;
1367 blitTask->srcHandle = layer.handle;
1368
1369 sp<RefBase> heldBuffer;
1370 Mutex::Autolock _l(mTaskLock);
1371
1372 blitTask->srcAcquireFenceFd = layer.acquireFenceFd;
1373 layer.acquireFenceFd = -1;
1374
1375 blitTask->syncTimelineFd = mSyncTimelineFd;
1376 // Framebuffer after BlitTask::run() calls sw_sync_timeline_inc().
1377 layer.releaseFenceFd = sw_sync_fence_create(mSyncTimelineFd, "widi_blit_retire", mNextSyncPoint);
1378 mNextSyncPoint++;
1379
1380 if (mCurrentConfig.frameServerActive) {
1381 blitTask->destHandle = mCscBuffers.get(blitTask->destRect.w, blitTask->destRect.h, &heldBuffer);
1382 blitTask->destAcquireFenceFd = -1;
1383
1384 // we do not use retire fence in frameServerActive path.
1385 CLOSE_FENCE(display->retireFenceFd);
1386
1387 // we use our own buffer, so just close this fence without a wait
1388 CLOSE_FENCE(display->outbufAcquireFenceFd);
1389 }
1390 else {
1391 blitTask->destHandle = display->outbuf;
1392 blitTask->destAcquireFenceFd = display->outbufAcquireFenceFd;
1393 // don't let TngDisplayContext::commitEnd() close this
1394 display->outbufAcquireFenceFd = -1;
1395 display->retireFenceFd = dup(layer.releaseFenceFd);
1396 }
1397
1398 if (blitTask->destHandle == NULL) {
1399 WTRACE("Out of CSC buffers, dropping frame");
1400 return false;
1401 }
1402
1403 mTasks.push_back(blitTask);
1404 mRequestQueued.signal();
1405
1406 if (mCurrentConfig.frameServerActive) {
1407 FrameInfo inputFrameInfo;
1408 memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1409 inputFrameInfo.isProtected = mProtectedMode;
1410 FrameInfo outputFrameInfo;
1411
1412 inputFrameInfo.frameType = HWC_FRAMETYPE_FRAME_BUFFER;
1413 inputFrameInfo.contentWidth = blitTask->destRect.w;
1414 inputFrameInfo.contentHeight = blitTask->destRect.h;
1415 inputFrameInfo.contentFrameRateN = 0;
1416 inputFrameInfo.contentFrameRateD = 0;
1417 outputFrameInfo = inputFrameInfo;
1418
1419 BufferManager* mgr = mHwc.getBufferManager();
1420 DataBuffer* dataBuf = mgr->lockDataBuffer(blitTask->destHandle);
1421 outputFrameInfo.bufferWidth = dataBuf->getWidth();
1422 outputFrameInfo.bufferHeight = dataBuf->getHeight();
1423 outputFrameInfo.lumaUStride = dataBuf->getWidth();
1424 outputFrameInfo.chromaUStride = dataBuf->getWidth();
1425 outputFrameInfo.chromaVStride = dataBuf->getWidth();
1426 mgr->unlockDataBuffer(dataBuf);
1427
1428 if (!mIsForceCloneMode)
1429 queueFrameTypeInfo(inputFrameInfo);
1430
1431 if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1432 return true; // This isn't a failure, WiDi just doesn't want frames right now.
1433 queueBufferInfo(outputFrameInfo);
1434
1435 if (mCurrentConfig.frameListener != NULL) {
1436 frameReadyTask = new OnFrameReadyTask();
1437 frameReadyTask->renderTask = blitTask;
1438 frameReadyTask->heldBuffer = heldBuffer;
1439 frameReadyTask->frameListener = mCurrentConfig.frameListener;
1440 frameReadyTask->handle = blitTask->destHandle;
1441 frameReadyTask->handleType = HWC_HANDLE_TYPE_GRALLOC;
1442 frameReadyTask->renderTimestamp = mRenderTimestamp;
1443 frameReadyTask->mediaTimestamp = -1;
1444 mTasks.push_back(frameReadyTask);
1445 }
1446 }
1447
1448 return true;
1449 }
1450
handleExtendedMode(hwc_display_contents_1_t * display)1451 bool VirtualDevice::handleExtendedMode(hwc_display_contents_1_t *display)
1452 {
1453 FrameInfo inputFrameInfo;
1454 memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1455 inputFrameInfo.isProtected = mProtectedMode;
1456
1457 hwc_layer_1_t& layer = display->hwLayers[mYuvLayer];
1458 if (layer.handle == NULL) {
1459 ETRACE("video layer has no handle set");
1460 return false;
1461 }
1462 sp<CachedBuffer> cachedBuffer;
1463 if ((cachedBuffer = getMappedBuffer(layer.handle)) == NULL) {
1464 ETRACE("Failed to map display buffer");
1465 return false;
1466 }
1467
1468 inputFrameInfo.frameType = HWC_FRAMETYPE_VIDEO;
1469 // for video mode let 30 fps be the default value.
1470 inputFrameInfo.contentFrameRateN = 30;
1471 inputFrameInfo.contentFrameRateD = 1;
1472
1473 IVideoPayloadManager::MetaData metadata;
1474 if (!mPayloadManager->getMetaData(cachedBuffer->mapper, &metadata)) {
1475 ETRACE("Failed to get metadata");
1476 return false;
1477 }
1478
1479 if (metadata.transform == 0 || metadata.transform == HAL_TRANSFORM_ROT_180) {
1480 inputFrameInfo.contentWidth = metadata.normalBuffer.width;
1481 inputFrameInfo.contentHeight = metadata.normalBuffer.height;
1482 } else {
1483 inputFrameInfo.contentWidth = metadata.normalBuffer.height;
1484 inputFrameInfo.contentHeight = metadata.normalBuffer.width;
1485 // 90 and 270 have some issues that appear to be decoder bugs
1486 ITRACE("Skipping extended mode due to rotation of 90 or 270");
1487 return false;
1488 }
1489 // Use the crop size if something changed derive it again..
1490 // Only get video source info if frame rate has not been initialized.
1491 // getVideoSourceInfo() is a fairly expensive operation. This optimization
1492 // will save us a few milliseconds per frame
1493 if (mFirstVideoFrame || (mOrigContentWidth != metadata.normalBuffer.width) ||
1494 (mOrigContentHeight != metadata.normalBuffer.height)) {
1495 mVideoFramerate = inputFrameInfo.contentFrameRateN;
1496 VTRACE("VideoWidth = %d, VideoHeight = %d", metadata.normalBuffer.width, metadata.normalBuffer.height);
1497 mOrigContentWidth = metadata.normalBuffer.width;
1498 mOrigContentHeight = metadata.normalBuffer.height;
1499
1500 // For the first video session by default
1501 int sessionID = Hwcomposer::getInstance().getDisplayAnalyzer()->getFirstVideoInstanceSessionID();
1502 if (sessionID >= 0) {
1503 ITRACE("Session id = %d", sessionID);
1504 VideoSourceInfo videoInfo;
1505 memset(&videoInfo, 0, sizeof(videoInfo));
1506 status_t ret = mHwc.getMultiDisplayObserver()->getVideoSourceInfo(sessionID, &videoInfo);
1507 if (ret == NO_ERROR) {
1508 ITRACE("width = %d, height = %d, fps = %d", videoInfo.width, videoInfo.height,
1509 videoInfo.frameRate);
1510 if (videoInfo.frameRate > 0) {
1511 mVideoFramerate = videoInfo.frameRate;
1512 }
1513 }
1514 }
1515 mFirstVideoFrame = false;
1516 }
1517 inputFrameInfo.contentFrameRateN = mVideoFramerate;
1518 inputFrameInfo.contentFrameRateD = 1;
1519
1520 sp<ComposeTask> composeTask;
1521 sp<RefBase> heldBuffer;
1522 Mutex::Autolock _l(mTaskLock);
1523
1524 if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0) {
1525 queueFrameTypeInfo(inputFrameInfo);
1526 return true; // This isn't a failure, WiDi just doesn't want frames right now.
1527 }
1528
1529 IVideoPayloadManager::Buffer info;
1530 if (!getFrameOfSize(mCurrentConfig.policy.scaledWidth, mCurrentConfig.policy.scaledHeight, metadata, info)) {
1531 ITRACE("Extended mode waiting for scaled frame");
1532 return false;
1533 }
1534
1535 queueFrameTypeInfo(inputFrameInfo);
1536
1537 heldBuffer = new HeldDecoderBuffer(this, cachedBuffer);
1538 int64_t mediaTimestamp = metadata.timestamp;
1539
1540 VARectangle surface_region;
1541 surface_region.x = info.offsetX;
1542 surface_region.y = info.offsetY;
1543 surface_region.width = info.width;
1544 surface_region.height = info.height;
1545 FrameInfo outputFrameInfo = inputFrameInfo;
1546 outputFrameInfo.bufferFormat = metadata.format;
1547
1548 outputFrameInfo.contentWidth = info.width;
1549 outputFrameInfo.contentHeight = info.height;
1550 outputFrameInfo.bufferWidth = info.bufWidth;
1551 outputFrameInfo.bufferHeight = info.bufHeight;
1552 outputFrameInfo.lumaUStride = info.lumaStride;
1553 outputFrameInfo.chromaUStride = info.chromaUStride;
1554 outputFrameInfo.chromaVStride = info.chromaVStride;
1555
1556 if (outputFrameInfo.bufferFormat == 0 ||
1557 outputFrameInfo.bufferWidth < outputFrameInfo.contentWidth ||
1558 outputFrameInfo.bufferHeight < outputFrameInfo.contentHeight ||
1559 outputFrameInfo.contentWidth <= 0 || outputFrameInfo.contentHeight <= 0 ||
1560 outputFrameInfo.lumaUStride <= 0 ||
1561 outputFrameInfo.chromaUStride <= 0 || outputFrameInfo.chromaVStride <= 0) {
1562 ITRACE("Payload cleared or inconsistent info, not sending frame");
1563 ITRACE("outputFrameInfo.bufferFormat = %d ", outputFrameInfo.bufferFormat);
1564 ITRACE("outputFrameInfo.bufferWidth = %d ", outputFrameInfo.bufferWidth);
1565 ITRACE("outputFrameInfo.contentWidth = %d ", outputFrameInfo.contentWidth);
1566 ITRACE("outputFrameInfo.bufferHeight = %d ", outputFrameInfo.bufferHeight);
1567 ITRACE("outputFrameInfo.contentHeight = %d ", outputFrameInfo.contentHeight);
1568 ITRACE("outputFrameInfo.lumaUStride = %d ", outputFrameInfo.lumaUStride);
1569 ITRACE("outputFrameInfo.chromaUStride = %d ", outputFrameInfo.chromaUStride);
1570 ITRACE("outputFrameInfo.chromaVStride = %d ", outputFrameInfo.chromaVStride);
1571 return false;
1572 }
1573
1574 if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1575 return true; // This isn't a failure, WiDi just doesn't want frames right now.
1576
1577 if (info.khandle == mExtLastKhandle && mediaTimestamp == mExtLastTimestamp) {
1578 // Same frame again. We don't send a frame, but we return true because
1579 // this isn't an error.
1580 if (metadata.transform != 0)
1581 mVspInUse = true; // Don't shut down VSP just to start it again really quick.
1582 return true;
1583 }
1584 mExtLastKhandle = info.khandle;
1585 mExtLastTimestamp = mediaTimestamp;
1586
1587 HWCBufferHandleType handleType = HWC_HANDLE_TYPE_KBUF;
1588
1589 buffer_handle_t handle = info.khandle;
1590
1591 // Ideally we'd check if there's an offset (info.offsetX > 0 || info.offsetY > 0),
1592 // so we use VSP only when cropping is needed. But using the khandle directly when
1593 // both rotation and scaling are involved can encode the frame with the wrong
1594 // tiling status, so use VSP to normalize if any rotation is involved.
1595 if (metadata.transform != 0) {
1596 // Cropping (or above workaround) needed, so use VSP to do it.
1597 mVspInUse = true;
1598 vspPrepare(info.width, info.height);
1599
1600 composeTask = new ComposeTask();
1601 composeTask->heldVideoBuffer = heldBuffer;
1602 heldBuffer = NULL;
1603 composeTask->outWidth = info.width;
1604 composeTask->outHeight = info.height;
1605 composeTask->outputHandle = mCscBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldBuffer);
1606 if (composeTask->outputHandle == NULL) {
1607 ITRACE("Out of CSC buffers, dropping frame");
1608 return true;
1609 }
1610
1611 composeTask->surface_region = surface_region;
1612 composeTask->videoCachedBuffer = cachedBuffer;
1613 VARectangle& output_region = composeTask->output_region;
1614 output_region.x = 0;
1615 output_region.y = 0;
1616 output_region.width = info.width;
1617 output_region.height = info.height;
1618
1619 composeTask->videoKhandle = info.khandle;
1620 composeTask->videoStride = info.lumaStride;
1621 composeTask->videoBufHeight = info.bufHeight;
1622 composeTask->videoTiled = info.tiled;
1623
1624 BufferManager* mgr = mHwc.getBufferManager();
1625 DataBuffer* dataBuf = mgr->lockDataBuffer(composeTask->outputHandle);
1626 outputFrameInfo.contentWidth = composeTask->outWidth;
1627 outputFrameInfo.contentHeight = composeTask->outHeight;
1628 outputFrameInfo.bufferWidth = dataBuf->getWidth();
1629 outputFrameInfo.bufferHeight = dataBuf->getHeight();
1630 outputFrameInfo.lumaUStride = dataBuf->getWidth();
1631 outputFrameInfo.chromaUStride = dataBuf->getWidth();
1632 outputFrameInfo.chromaVStride = dataBuf->getWidth();
1633 mgr->unlockDataBuffer(dataBuf);
1634
1635 handle = composeTask->outputHandle;
1636 handleType = HWC_HANDLE_TYPE_GRALLOC;
1637
1638 mTasks.push_back(composeTask);
1639 mRequestQueued.signal();
1640 }
1641
1642 queueBufferInfo(outputFrameInfo);
1643
1644 if (mCurrentConfig.frameListener != NULL) {
1645 sp<OnFrameReadyTask> frameReadyTask = new OnFrameReadyTask();
1646 frameReadyTask->renderTask = composeTask;
1647 frameReadyTask->heldBuffer = heldBuffer;
1648 frameReadyTask->frameListener = mCurrentConfig.frameListener;
1649 frameReadyTask->handle = handle;
1650 frameReadyTask->handleType = handleType;
1651 frameReadyTask->renderTimestamp = mRenderTimestamp;
1652 frameReadyTask->mediaTimestamp = mediaTimestamp;
1653
1654 mTasks.push_back(frameReadyTask);
1655 mRequestQueued.signal();
1656 }
1657
1658 return true;
1659 }
1660
queueFrameTypeInfo(const FrameInfo & inputFrameInfo)1661 void VirtualDevice::queueFrameTypeInfo(const FrameInfo& inputFrameInfo)
1662 {
1663 if (mCurrentConfig.forceNotifyFrameType ||
1664 memcmp(&inputFrameInfo, &mLastInputFrameInfo, sizeof(inputFrameInfo)) != 0) {
1665 // something changed, notify type change listener
1666 mNextConfig.forceNotifyFrameType = false;
1667 mLastInputFrameInfo = inputFrameInfo;
1668
1669 sp<FrameTypeChangedTask> notifyTask = new FrameTypeChangedTask;
1670 notifyTask->typeChangeListener = mCurrentConfig.typeChangeListener;
1671 notifyTask->inputFrameInfo = inputFrameInfo;
1672 mTasks.push_back(notifyTask);
1673 }
1674 }
1675
queueBufferInfo(const FrameInfo & outputFrameInfo)1676 void VirtualDevice::queueBufferInfo(const FrameInfo& outputFrameInfo)
1677 {
1678 if (mCurrentConfig.forceNotifyBufferInfo ||
1679 memcmp(&outputFrameInfo, &mLastOutputFrameInfo, sizeof(outputFrameInfo)) != 0) {
1680 mNextConfig.forceNotifyBufferInfo = false;
1681 mLastOutputFrameInfo = outputFrameInfo;
1682
1683 sp<BufferInfoChangedTask> notifyTask = new BufferInfoChangedTask;
1684 notifyTask->typeChangeListener = mCurrentConfig.typeChangeListener;
1685 notifyTask->outputFrameInfo = outputFrameInfo;
1686
1687 //if (handleType == HWC_HANDLE_TYPE_GRALLOC)
1688 // mMappedBufferCache.clear(); // !
1689 mTasks.push_back(notifyTask);
1690 }
1691 }
1692
colorSwap(buffer_handle_t src,buffer_handle_t dest,uint32_t pixelCount)1693 void VirtualDevice::colorSwap(buffer_handle_t src, buffer_handle_t dest, uint32_t pixelCount)
1694 {
1695 sp<CachedBuffer> srcCachedBuffer;
1696 sp<CachedBuffer> destCachedBuffer;
1697
1698 {
1699 srcCachedBuffer = getMappedBuffer(src);
1700 if (srcCachedBuffer == NULL || srcCachedBuffer->mapper == NULL)
1701 return;
1702 destCachedBuffer = getMappedBuffer(dest);
1703 if (destCachedBuffer == NULL || destCachedBuffer->mapper == NULL)
1704 return;
1705 }
1706
1707 uint8_t* srcPtr = static_cast<uint8_t*>(srcCachedBuffer->mapper->getCpuAddress(0));
1708 uint8_t* destPtr = static_cast<uint8_t*>(destCachedBuffer->mapper->getCpuAddress(0));
1709 if (srcPtr == NULL || destPtr == NULL)
1710 return;
1711 while (pixelCount > 0) {
1712 destPtr[0] = srcPtr[2];
1713 destPtr[1] = srcPtr[1];
1714 destPtr[2] = srcPtr[0];
1715 destPtr[3] = srcPtr[3];
1716 srcPtr += 4;
1717 destPtr += 4;
1718 pixelCount--;
1719 }
1720 }
1721
vspPrepare(uint32_t width,uint32_t height)1722 void VirtualDevice::vspPrepare(uint32_t width, uint32_t height)
1723 {
1724 if (mVspEnabled && width == mVspWidth && height == mVspHeight)
1725 return;
1726
1727 if (mVspEnabled)
1728 {
1729 ITRACE("Going to switch VSP from %ux%u to %ux%u", mVspWidth, mVspHeight, width, height);
1730 mMappedBufferCache.clear();
1731 mVaMapCache.clear();
1732 sp<DisableVspTask> disableVsp = new DisableVspTask();
1733 mTasks.push_back(disableVsp);
1734 }
1735 mVspWidth = width;
1736 mVspHeight = height;
1737
1738 sp<EnableVspTask> enableTask = new EnableVspTask();
1739 enableTask->width = width;
1740 enableTask->height = height;
1741 mTasks.push_back(enableTask);
1742 mRequestQueued.signal();
1743 // to map a buffer from this thread, we need this task to complete on the other thread
1744 while (enableTask->getStrongCount() > 1) {
1745 VTRACE("Waiting for WidiBlit thread to enable VSP...");
1746 mRequestDequeued.wait(mTaskLock);
1747 }
1748 mVspEnabled = true;
1749 }
1750
vspEnable(uint32_t width,uint32_t height)1751 void VirtualDevice::vspEnable(uint32_t width, uint32_t height)
1752 {
1753 width = align_width(width);
1754 height = align_height(height);
1755 ITRACE("Start VSP at %ux%u", width, height);
1756 VAStatus va_status;
1757
1758 int display = 0;
1759 int major_ver, minor_ver;
1760 va_dpy = vaGetDisplay(&display);
1761 va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
1762 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaInitialize returns %08x", va_status);
1763
1764 VAConfigAttrib va_attr;
1765 va_attr.type = VAConfigAttribRTFormat;
1766 va_status = vaGetConfigAttributes(va_dpy,
1767 VAProfileNone,
1768 VAEntrypointVideoProc,
1769 &va_attr,
1770 1);
1771 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaGetConfigAttributes returns %08x", va_status);
1772
1773 va_status = vaCreateConfig(
1774 va_dpy,
1775 VAProfileNone,
1776 VAEntrypointVideoProc,
1777 &(va_attr),
1778 1,
1779 &va_config
1780 );
1781 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateConfig returns %08x", va_status);
1782
1783 VADisplayAttribute attr;
1784 attr.type = VADisplayAttribRenderMode;
1785 attr.value = VA_RENDER_MODE_LOCAL_OVERLAY;
1786 va_status = vaSetDisplayAttributes(va_dpy, &attr, 1);
1787 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaSetDisplayAttributes returns %08x", va_status);
1788
1789
1790 va_status = vaCreateSurfaces(
1791 va_dpy,
1792 VA_RT_FORMAT_YUV420,
1793 width,
1794 height,
1795 &va_blank_yuv_in,
1796 1,
1797 NULL,
1798 0);
1799 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (video in) returns %08x", va_status);
1800
1801 unsigned long buffer;
1802 VASurfaceAttribExternalBuffers buf;
1803 int stride = align_width(width);
1804 int bufHeight = align_height(height);
1805 buf.pixel_format = VA_FOURCC_RGBA;
1806 buf.width = width;
1807 buf.height = height;
1808 buf.data_size = stride * bufHeight * 4;
1809 buf.num_planes = 3;
1810 buf.pitches[0] = stride;
1811 buf.pitches[1] = stride;
1812 buf.pitches[2] = stride;
1813 buf.pitches[3] = 0;
1814 buf.offsets[0] = 0;
1815 buf.offsets[1] = stride * bufHeight;
1816 buf.offsets[2] = buf.offsets[1];
1817 buf.offsets[3] = 0;
1818 buf.buffers = &buffer;
1819 buf.num_buffers = 1;
1820 buf.flags = 0;
1821 buf.private_data = NULL;
1822
1823 VASurfaceAttrib attrib_list[2];
1824 attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
1825 attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
1826 attrib_list[0].value.type = VAGenericValueTypeInteger;
1827 attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA;
1828 attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
1829 attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
1830 attrib_list[1].value.type = VAGenericValueTypePointer;
1831 attrib_list[1].value.value.p = (void *)&buf;
1832
1833 va_status = vaCreateSurfaces(
1834 va_dpy,
1835 VA_RT_FORMAT_RGB32,
1836 stride,
1837 bufHeight,
1838 &va_blank_rgb_in,
1839 1,
1840 attrib_list,
1841 2);
1842 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (blank rgba in) returns %08x", va_status);
1843
1844 va_status = vaCreateContext(
1845 va_dpy,
1846 va_config,
1847 stride,
1848 bufHeight,
1849 0,
1850 &va_blank_yuv_in /* not used by VSP, but libva checks for it */,
1851 1,
1852 &va_context);
1853 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateContext returns %08x", va_status);
1854
1855 VASurfaceID tmp_yuv;
1856 va_status = vaCreateSurfaces(
1857 va_dpy,
1858 VA_RT_FORMAT_YUV420,
1859 stride,
1860 bufHeight,
1861 &tmp_yuv,
1862 1,
1863 NULL,
1864 0);
1865 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (temp yuv) returns %08x", va_status);
1866 {
1867 MappedSurface mappedVideoIn(va_dpy, tmp_yuv);
1868 if (mappedVideoIn.valid()) {
1869 // Value doesn't matter, as RGBA will be opaque,
1870 // but I don't want random data in here.
1871 memset(mappedVideoIn.getPtr(), 0x0, width*height*3/2);
1872 }
1873 else
1874 ETRACE("Unable to map tmp black surface");
1875 }
1876
1877 {
1878 MappedSurface mappedBlankIn(va_dpy, va_blank_rgb_in);
1879 if (mappedBlankIn.valid()) {
1880 // Fill RGBA with opaque black temporarily, in order to generate an
1881 // encrypted black buffer in va_blank_yuv_in to use in place of the
1882 // real frame data during the short interval where we're waiting for
1883 // downscaling to kick in.
1884 uint32_t* pixels = reinterpret_cast<uint32_t*>(mappedBlankIn.getPtr());
1885 for (size_t i = 0; i < stride*height; i++)
1886 pixels[i] = 0xff000000;
1887 }
1888 else
1889 ETRACE("Unable to map blank rgba in");
1890 }
1891
1892 // Compose opaque black with temp yuv to produce encrypted black yuv.
1893 VARectangle region;
1894 region.x = 0;
1895 region.y = 0;
1896 region.width = width;
1897 region.height = height;
1898 vspCompose(tmp_yuv, va_blank_rgb_in, va_blank_yuv_in, ®ion, ®ion);
1899
1900 va_status = vaDestroySurfaces(va_dpy, &tmp_yuv, 1);
1901 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (temp yuv) returns %08x", va_status);
1902
1903 {
1904 // Fill RGBA with transparent black now, to be used when there is no
1905 // UI to compose on top of the video.
1906 MappedSurface mappedBlankIn(va_dpy, va_blank_rgb_in);
1907 if (mappedBlankIn.valid())
1908 memset(mappedBlankIn.getPtr(), 0, stride*height*4);
1909 else
1910 ETRACE("Unable to map blank rgba in");
1911 }
1912 }
1913
vspDisable()1914 void VirtualDevice::vspDisable()
1915 {
1916 ITRACE("Shut down VSP");
1917
1918 if (va_context == 0 && va_blank_yuv_in == 0) {
1919 ITRACE("Already shut down");
1920 return;
1921 }
1922
1923 VABufferID pipeline_param_id;
1924 VAStatus va_status;
1925 va_status = vaCreateBuffer(va_dpy,
1926 va_context,
1927 VAProcPipelineParameterBufferType,
1928 sizeof(VAProcPipelineParameterBuffer),
1929 1,
1930 NULL,
1931 &pipeline_param_id);
1932 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateBuffer returns %08x", va_status);
1933
1934 VABlendState blend_state;
1935 VAProcPipelineParameterBuffer *pipeline_param;
1936 va_status = vaMapBuffer(va_dpy,
1937 pipeline_param_id,
1938 (void **)&pipeline_param);
1939 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaMapBuffer returns %08x", va_status);
1940
1941 memset(pipeline_param, 0, sizeof(VAProcPipelineParameterBuffer));
1942 pipeline_param->pipeline_flags = VA_PIPELINE_FLAG_END;
1943 pipeline_param->num_filters = 0;
1944 pipeline_param->blend_state = &blend_state;
1945
1946 va_status = vaUnmapBuffer(va_dpy, pipeline_param_id);
1947 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
1948
1949 va_status = vaBeginPicture(va_dpy, va_context, va_blank_yuv_in /* just need some valid surface */);
1950 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaBeginPicture returns %08x", va_status);
1951
1952 va_status = vaRenderPicture(va_dpy, va_context, &pipeline_param_id, 1);
1953 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaRenderPicture returns %08x", va_status);
1954
1955 va_status = vaEndPicture(va_dpy, va_context);
1956 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaEndPicture returns %08x", va_status);
1957
1958 va_status = vaDestroyContext(va_dpy, va_context);
1959 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroyContext returns %08x", va_status);
1960 va_context = 0;
1961
1962 va_status = vaDestroySurfaces(va_dpy, &va_blank_yuv_in, 1);
1963 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (video in) returns %08x", va_status);
1964 va_blank_yuv_in = 0;
1965
1966 va_status = vaDestroySurfaces(va_dpy, &va_blank_rgb_in, 1);
1967 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (blank rgba in) returns %08x", va_status);
1968
1969 if (va_config) {
1970 vaDestroyConfig(va_dpy, va_config);
1971 va_config = 0;
1972 }
1973 if (va_dpy) {
1974 vaTerminate(va_dpy);
1975 va_dpy = NULL;
1976 }
1977 }
1978
vspCompose(VASurfaceID videoIn,VASurfaceID rgbIn,VASurfaceID videoOut,const VARectangle * surface_region,const VARectangle * output_region)1979 void VirtualDevice::vspCompose(VASurfaceID videoIn, VASurfaceID rgbIn, VASurfaceID videoOut,
1980 const VARectangle* surface_region, const VARectangle* output_region)
1981 {
1982 VAStatus va_status;
1983
1984 VABufferID pipeline_param_id;
1985 va_status = vaCreateBuffer(va_dpy,
1986 va_context,
1987 VAProcPipelineParameterBufferType,
1988 sizeof(VAProcPipelineParameterBuffer),
1989 1,
1990 NULL,
1991 &pipeline_param_id);
1992 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateBuffer returns %08x", va_status);
1993
1994 VABlendState blend_state;
1995
1996 VAProcPipelineParameterBuffer *pipeline_param;
1997 va_status = vaMapBuffer(va_dpy,
1998 pipeline_param_id,
1999 (void **)&pipeline_param);
2000 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaMapBuffer returns %08x", va_status);
2001
2002 memset(pipeline_param, 0, sizeof(VAProcPipelineParameterBuffer));
2003 pipeline_param->surface = videoIn;
2004 pipeline_param->surface_region = surface_region;
2005 pipeline_param->output_region = output_region;
2006
2007 pipeline_param->pipeline_flags = 0;
2008 pipeline_param->num_filters = 0;
2009 pipeline_param->blend_state = &blend_state;
2010 pipeline_param->num_additional_outputs = 1;
2011 pipeline_param->additional_outputs = &rgbIn;
2012
2013 va_status = vaUnmapBuffer(va_dpy, pipeline_param_id);
2014 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
2015
2016 va_status = vaBeginPicture(va_dpy, va_context, videoOut);
2017 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaBeginPicture returns %08x", va_status);
2018
2019 va_status = vaRenderPicture(va_dpy, va_context, &pipeline_param_id, 1);
2020 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaRenderPicture returns %08x", va_status);
2021
2022 va_status = vaEndPicture(va_dpy, va_context);
2023 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaEndPicture returns %08x", va_status);
2024
2025 va_status = vaSyncSurface(va_dpy, videoOut);
2026 if (va_status != VA_STATUS_SUCCESS) ETRACE("vaSyncSurface returns %08x", va_status);
2027 }
2028
min(uint32_t a,uint32_t b)2029 static uint32_t min(uint32_t a, uint32_t b)
2030 {
2031 return (a < b) ? a : b;
2032 }
2033
getFrameOfSize(uint32_t width,uint32_t height,const IVideoPayloadManager::MetaData & metadata,IVideoPayloadManager::Buffer & info)2034 bool VirtualDevice::getFrameOfSize(uint32_t width, uint32_t height, const IVideoPayloadManager::MetaData& metadata, IVideoPayloadManager::Buffer& info)
2035 {
2036 if (metadata.transform == 0 || metadata.transform == HAL_TRANSFORM_ROT_180)
2037 setMaxDecodeResolution(min(width, metadata.normalBuffer.width), min(height, metadata.normalBuffer.height));
2038 else
2039 setMaxDecodeResolution(min(height, metadata.normalBuffer.width), min(width, metadata.normalBuffer.height));
2040
2041 if (metadata.transform == 0) {
2042 if (metadata.normalBuffer.khandle != 0 && metadata.normalBuffer.width <= width && metadata.normalBuffer.height <= height) {
2043 info = metadata.normalBuffer;
2044 return true;
2045 }
2046
2047 if (metadata.scalingBuffer.khandle != 0 && metadata.scalingBuffer.width <= width && metadata.scalingBuffer.height <= height) {
2048 info = metadata.scalingBuffer;
2049 return true;
2050 }
2051 } else {
2052 if (metadata.rotationBuffer.khandle != 0 && metadata.rotationBuffer.width <= width && metadata.rotationBuffer.height <= height) {
2053 info = metadata.rotationBuffer;
2054 return true;
2055 }
2056 }
2057
2058 return false;
2059 }
2060
setMaxDecodeResolution(uint32_t width,uint32_t height)2061 void VirtualDevice::setMaxDecodeResolution(uint32_t width, uint32_t height)
2062 {
2063 if (mDecWidth == width && mDecHeight == height)
2064 return;
2065
2066 int sessionID = mHwc.getDisplayAnalyzer()->getFirstVideoInstanceSessionID();
2067 if (sessionID < 0) {
2068 ETRACE("Session id is less than 0");
2069 return;
2070 }
2071
2072 MultiDisplayObserver* mds = mHwc.getMultiDisplayObserver();
2073 status_t ret = mds->setDecoderOutputResolution(sessionID, width, height, 0, 0, width, height);
2074 if (ret != NO_ERROR) {
2075 ETRACE("Failed to set scaling to %ux%u: %x", width, height, ret);
2076 return;
2077 }
2078
2079 mDecWidth = width;
2080 mDecHeight = height;
2081 ITRACE("Set scaling to %ux%u",mDecWidth, mDecHeight);
2082 }
2083
vsyncControl(bool enabled)2084 bool VirtualDevice::vsyncControl(bool enabled)
2085 {
2086 RETURN_FALSE_IF_NOT_INIT();
2087 return mVsyncObserver->control(enabled);
2088 }
2089
blank(bool blank)2090 bool VirtualDevice::blank(bool blank)
2091 {
2092 RETURN_FALSE_IF_NOT_INIT();
2093 return true;
2094 }
2095
getDisplaySize(int * width,int * height)2096 bool VirtualDevice::getDisplaySize(int *width, int *height)
2097 {
2098 RETURN_FALSE_IF_NOT_INIT();
2099 if (!width || !height) {
2100 ETRACE("invalid parameters");
2101 return false;
2102 }
2103
2104 // TODO: make this platform specifc
2105 *width = 1280;
2106 *height = 720;
2107 return true;
2108 }
2109
getDisplayConfigs(uint32_t * configs,size_t * numConfigs)2110 bool VirtualDevice::getDisplayConfigs(uint32_t *configs,
2111 size_t *numConfigs)
2112 {
2113 RETURN_FALSE_IF_NOT_INIT();
2114 if (!configs || !numConfigs) {
2115 ETRACE("invalid parameters");
2116 return false;
2117 }
2118
2119 *configs = 0;
2120 *numConfigs = 1;
2121
2122 return true;
2123 }
2124
getDisplayAttributes(uint32_t configs,const uint32_t * attributes,int32_t * values)2125 bool VirtualDevice::getDisplayAttributes(uint32_t configs,
2126 const uint32_t *attributes,
2127 int32_t *values)
2128 {
2129 RETURN_FALSE_IF_NOT_INIT();
2130
2131 if (!attributes || !values) {
2132 ETRACE("invalid parameters");
2133 return false;
2134 }
2135
2136 int i = 0;
2137 while (attributes[i] != HWC_DISPLAY_NO_ATTRIBUTE) {
2138 switch (attributes[i]) {
2139 case HWC_DISPLAY_VSYNC_PERIOD:
2140 values[i] = 1e9 / 60;
2141 break;
2142 case HWC_DISPLAY_WIDTH:
2143 values[i] = 1280;
2144 break;
2145 case HWC_DISPLAY_HEIGHT:
2146 values[i] = 720;
2147 break;
2148 case HWC_DISPLAY_DPI_X:
2149 values[i] = 0;
2150 break;
2151 case HWC_DISPLAY_DPI_Y:
2152 values[i] = 0;
2153 break;
2154 default:
2155 ETRACE("unknown attribute %d", attributes[i]);
2156 break;
2157 }
2158 i++;
2159 }
2160
2161 return true;
2162 }
2163
compositionComplete()2164 bool VirtualDevice::compositionComplete()
2165 {
2166 RETURN_FALSE_IF_NOT_INIT();
2167 return true;
2168 }
2169
initialize()2170 bool VirtualDevice::initialize()
2171 {
2172 // Add initialization codes here. If init fails, invoke DEINIT_AND_RETURN_FALSE();
2173 mNextConfig.typeChangeListener = NULL;
2174 mNextConfig.policy.scaledWidth = 0;
2175 mNextConfig.policy.scaledHeight = 0;
2176 mNextConfig.policy.xdpi = 96;
2177 mNextConfig.policy.ydpi = 96;
2178 mNextConfig.policy.refresh = 60;
2179 mNextConfig.extendedModeEnabled = false;
2180 mNextConfig.forceNotifyFrameType = false;
2181 mNextConfig.forceNotifyBufferInfo = false;
2182 mCurrentConfig = mNextConfig;
2183 mRgbLayer = -1;
2184 mYuvLayer = -1;
2185
2186 memset(&mLastInputFrameInfo, 0, sizeof(mLastInputFrameInfo));
2187 memset(&mLastOutputFrameInfo, 0, sizeof(mLastOutputFrameInfo));
2188
2189 mPayloadManager = mHwc.getPlatFactory()->createVideoPayloadManager();
2190
2191 if (!mPayloadManager) {
2192 DEINIT_AND_RETURN_FALSE("Failed to create payload manager");
2193 }
2194
2195 mVsyncObserver = new SoftVsyncObserver(*this);
2196 if (!mVsyncObserver || !mVsyncObserver->initialize()) {
2197 DEINIT_AND_RETURN_FALSE("Failed to create Soft Vsync Observer");
2198 }
2199
2200 mSyncTimelineFd = sw_sync_timeline_create();
2201 mNextSyncPoint = 1;
2202 mExpectAcquireFences = false;
2203
2204 mThread = new WidiBlitThread(this);
2205 mThread->run("WidiBlit", PRIORITY_URGENT_DISPLAY);
2206
2207 // Publish frame server service with service manager
2208 status_t ret = defaultServiceManager()->addService(String16("hwc.widi"), this);
2209 if (ret == NO_ERROR) {
2210 ProcessState::self()->startThreadPool();
2211 mInitialized = true;
2212 } else {
2213 ETRACE("Could not register hwc.widi with service manager, error = %d", ret);
2214 deinitialize();
2215 }
2216
2217 mVspEnabled = false;
2218 mVspInUse = false;
2219 mVspWidth = 0;
2220 mVspHeight = 0;
2221 va_dpy = NULL;
2222 va_config = 0;
2223 va_context = 0;
2224 va_blank_yuv_in = 0;
2225 va_blank_rgb_in = 0;
2226 mVspUpscale = false;
2227 mDebugVspClear = false;
2228 mDebugVspDump = false;
2229 mDebugCounter = 0;
2230
2231 ITRACE("Init done.");
2232
2233 return mInitialized;
2234 }
2235
isConnected() const2236 bool VirtualDevice::isConnected() const
2237 {
2238 return true;
2239 }
2240
getName() const2241 const char* VirtualDevice::getName() const
2242 {
2243 return "Virtual";
2244 }
2245
getType() const2246 int VirtualDevice::getType() const
2247 {
2248 return DEVICE_VIRTUAL;
2249 }
2250
onVsync(int64_t timestamp)2251 void VirtualDevice::onVsync(int64_t timestamp)
2252 {
2253 mHwc.vsync(DEVICE_VIRTUAL, timestamp);
2254 }
2255
dump(Dump & d)2256 void VirtualDevice::dump(Dump& d)
2257 {
2258 }
2259
deinitialize()2260 void VirtualDevice::deinitialize()
2261 {
2262 VAStatus va_status;
2263
2264 if (mPayloadManager) {
2265 delete mPayloadManager;
2266 mPayloadManager = NULL;
2267 }
2268 DEINIT_AND_DELETE_OBJ(mVsyncObserver);
2269 mInitialized = false;
2270 }
2271
setPowerMode(int)2272 bool VirtualDevice::setPowerMode(int /*mode*/)
2273 {
2274 return true;
2275 }
2276
getActiveConfig()2277 int VirtualDevice::getActiveConfig()
2278 {
2279 return 0;
2280 }
2281
setActiveConfig(int)2282 bool VirtualDevice::setActiveConfig(int /*index*/)
2283 {
2284 return false;
2285 }
2286
2287 } // namespace intel
2288 } // namespace android
2289