• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "EvsEmulatedCamera.h"
18 
19 #include <android-base/logging.h>
20 #include <android/hardware_buffer.h>
21 #include <ui/GraphicBufferAllocator.h>
22 #include <ui/GraphicBufferMapper.h>
23 #include <utils/SystemClock.h>
24 
25 #include <filesystem>
26 
27 namespace {
28 
29 using ::android::hardware::automotive::evs::V1_1::EvsEventDesc;
30 using ::android::hardware::automotive::evs::V1_1::EvsEventType;
31 
32 using BufferDesc_1_0 = ::android::hardware::automotive::evs::V1_0::BufferDesc;
33 using BufferDesc_1_1 = ::android::hardware::automotive::evs::V1_1::BufferDesc;
34 
35 // Arbitrary limit on number of graphics buffers allowed to be allocated
36 // Safeguards against unreasonable resource consumption and provides a testable limit
37 const unsigned MAX_BUFFERS_IN_FLIGHT = 100;
38 
yuvToRgbx(const unsigned char Y,const unsigned char Uin,const unsigned char Vin)39 uint32_t yuvToRgbx(const unsigned char Y, const unsigned char Uin, const unsigned char Vin) {
40     const float U = Uin - 128.0f;
41     const float V = Vin - 128.0f;
42 
43     const float Rf = Y + 1.140f * V;
44     const float Gf = Y - 0.395f * U - 0.581f * V;
45     const float Bf = Y + 2.032f * U;
46     const unsigned char R = static_cast<unsigned char>(std::clamp(Rf, 0.0f, 255.0f));
47     const unsigned char G = static_cast<unsigned char>(std::clamp(Gf, 0.0f, 255.0f));
48     const unsigned char B = static_cast<unsigned char>(std::clamp(Bf, 0.0f, 255.0f));
49 
50     return ((R & 0xFF)) | ((G & 0xFF) << 8) | ((B & 0xFF) << 16) |
51             0xFF000000;  // Fill the alpha channel with ones
52 }
53 
fillRGBAFromYUYV(const BufferDesc_1_1 & dstBuff,uint8_t * dstData,void * srcData,unsigned srcStride,unsigned srcHeight)54 void fillRGBAFromYUYV(const BufferDesc_1_1& dstBuff, uint8_t* dstData, void* srcData,
55                       unsigned srcStride, unsigned srcHeight) {
56     const AHardwareBuffer_Desc* pDesc =
57             reinterpret_cast<const AHardwareBuffer_Desc*>(&dstBuff.buffer.description);
58     unsigned width = pDesc->width;
59     uint32_t* src = reinterpret_cast<uint32_t*>(srcData);
60     uint32_t* dst = reinterpret_cast<uint32_t*>(dstData);
61     unsigned srcStridePixels = srcStride / 2;
62     unsigned dstStridePixels = pDesc->stride;
63 
64     const int srcRowPadding32 =
65             srcStridePixels / 2 - width / 2;              // 2 bytes per pixel, 4 bytes per word
66     const int dstRowPadding32 = dstStridePixels - width;  // 4 bytes per pixel, 4 bytes per word
67 
68     const unsigned numRows = std::min(srcHeight, pDesc->height);
69     for (unsigned r = 0; r < numRows; ++r) {
70         for (unsigned c = 0; c < width / 2; c++) {
71             // Note:  we're walking two pixels at a time here (even/odd)
72             uint32_t srcPixel = *src++;
73 
74             uint8_t Y1 = (srcPixel)&0xFF;
75             uint8_t U = (srcPixel >> 8) & 0xFF;
76             uint8_t Y2 = (srcPixel >> 16) & 0xFF;
77             uint8_t V = (srcPixel >> 24) & 0xFF;
78 
79             // On the RGB output, we're writing one pixel at a time
80             *(dst + 0) = yuvToRgbx(Y1, U, V);
81             *(dst + 1) = yuvToRgbx(Y2, U, V);
82             dst += 2;
83         }
84 
85         // Skip over any extra data or end of row alignment padding
86         src += srcRowPadding32;
87         dst += dstRowPadding32;
88     }
89 }
90 
fillBufferCopy(const BufferDesc_1_1 & dstBuff,uint8_t * dst,void * srcData,unsigned srcStride,unsigned srcHeight)91 void fillBufferCopy(const BufferDesc_1_1& dstBuff, uint8_t* dst, void* srcData, unsigned srcStride,
92                     unsigned srcHeight) {
93     const AHardwareBuffer_Desc* pDesc =
94             reinterpret_cast<const AHardwareBuffer_Desc*>(&dstBuff.buffer.description);
95 
96     // HAL_PIXEL_FORMAT_RGBA_8888 default output format
97     const unsigned bytesPerPixel = 4;
98     const unsigned dstStride = pDesc->stride * bytesPerPixel;
99 
100     // Simply copy the data, row by row, without the scaling.
101     const unsigned copyStride = std::min(srcStride, dstStride);
102     const unsigned numRows = std::min(srcHeight, pDesc->height);
103     uint8_t* src = reinterpret_cast<uint8_t*>(srcData);
104     for (auto r = 0; r < numRows; ++r) {
105         memcpy(dst, src, copyStride);
106 
107         // Moves to the next row
108         src += srcStride;
109         dst += dstStride;
110     }
111 }
112 }  // namespace
113 
114 namespace android {
115 namespace automotive {
116 namespace evs {
117 namespace V1_1 {
118 namespace implementation {
119 
EvsEmulatedCamera(const char * deviceName,const EmulatedCameraDesc & desc)120 EvsEmulatedCamera::EvsEmulatedCamera(const char* deviceName, const EmulatedCameraDesc& desc) :
121       mFramesAllowed(0), mFramesInUse(0), mCaptureDeviceDesc(desc) {
122     LOG(INFO) << "EvsEmulatedCamera instantiated";
123     mDescription.v1.cameraId = deviceName;
124 
125     mVideo = new VideoCapture();
126 
127     // Default output buffer format.
128     mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
129 
130     // How we expect to use the gralloc buffers we'll exchange with our client
131     mUsage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
132 
133     mDescription.v1.cameraId = deviceName;
134 }
135 
~EvsEmulatedCamera()136 EvsEmulatedCamera::~EvsEmulatedCamera() {
137     LOG(INFO) << "EvsEmulatedCamera being destroyed";
138     shutdown();
139 }
140 
openDevice()141 bool EvsEmulatedCamera::openDevice() {
142     bool opened = false;
143     if (mVideo) {
144         opened = mVideo->open(mCaptureDeviceDesc.path, mCaptureDeviceDesc.interval);
145     }
146 
147     return opened;
148 }
149 
shutdown()150 void EvsEmulatedCamera::shutdown() {
151     LOG(INFO) << "EvsEmulatedCamera shutdown";
152 
153     // Make sure our output stream is cleaned up
154     // (It really should be already)
155     stopVideoStream();
156 
157     // Note:  Since stopVideoStream is blocking, no other threads can now be running
158 
159     // Close our video capture device
160     mVideo->close();
161 
162     // Drop all the graphics buffers we've been using
163     if (mBuffers.size() > 0) {
164         GraphicBufferAllocator& alloc(GraphicBufferAllocator::get());
165         for (auto&& rec : mBuffers) {
166             if (rec.inUse) {
167                 LOG(WARNING) << "Releasing buffer despite remote ownership";
168             }
169             alloc.free(rec.handle);
170             rec.handle = nullptr;
171         }
172         mBuffers.clear();
173     }
174 }
175 
176 // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow.
getCameraInfo(getCameraInfo_cb _hidl_cb)177 Return<void> EvsEmulatedCamera::getCameraInfo(getCameraInfo_cb _hidl_cb) {
178     LOG(DEBUG) << __FUNCTION__;
179 
180     // Send back our self description
181     _hidl_cb(mDescription.v1);
182     return {};
183 }
184 
setMaxFramesInFlight(uint32_t bufferCount)185 Return<EvsResult> EvsEmulatedCamera::setMaxFramesInFlight(uint32_t bufferCount) {
186     LOG(DEBUG) << __FUNCTION__;
187     std::scoped_lock<std::mutex> lock(mAccessLock);
188 
189     // Check whether underlying device is still open
190     if (!mVideo->isOpen()) {
191         LOG(WARNING) << "Ignoring startVideoStream call when camera has been lost.";
192         return EvsResult::OWNERSHIP_LOST;
193     }
194 
195     // We cannot function without at least one video buffer to send data
196     if (bufferCount < 1) {
197         LOG(ERROR) << "Ignoring setMaxFramesInFlight with less than one buffer requested";
198         return EvsResult::INVALID_ARG;
199     }
200 
201     // Update our internal state
202     if (setAvailableFrames_Locked(bufferCount)) {
203         return EvsResult::OK;
204     } else {
205         return EvsResult::BUFFER_NOT_AVAILABLE;
206     }
207 }
208 
startVideoStream(const sp<IEvsCameraStream_1_0> & stream)209 Return<EvsResult> EvsEmulatedCamera::startVideoStream(const sp<IEvsCameraStream_1_0>& stream) {
210     LOG(DEBUG) << __FUNCTION__;
211     std::scoped_lock<std::mutex> lock(mAccessLock);
212 
213     // Check whether underlying device is still open
214     if (!mVideo->isOpen()) {
215         LOG(WARNING) << "Ignoring startVideoStream call when camera has been lost.";
216         return EvsResult::OWNERSHIP_LOST;
217     }
218 
219     if (mStream != nullptr) {
220         LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
221         return EvsResult::STREAM_ALREADY_RUNNING;
222     }
223 
224     mStream = IEvsCameraStream_1_1::castFrom(stream).withDefault(nullptr);
225     if (mStream == nullptr) {
226         LOG(ERROR) << "A given IEvsCameraStream does not supoprt v1.1 interface.";
227         return EvsResult::INVALID_ARG;
228     }
229 
230     // If the client never indicated otherwise, configure ourselves for a single streaming buffer
231     if (mFramesAllowed < 1) {
232         if (!setAvailableFrames_Locked(1)) {
233             LOG(ERROR) << "Failed to start stream because we couldn't get a graphics buffer";
234             return EvsResult::BUFFER_NOT_AVAILABLE;
235         }
236     }
237 
238     if (!mVideo->startStream([this](VideoCapture*, imageBufferDesc* tgt, void* data) {
239             this->forwardFrame(tgt, data);
240         })) {
241         // No need to hold onto this if we failed to start
242         mStream = nullptr;
243         LOG(ERROR) << "Underlying camera start stream failed";
244         return EvsResult::UNDERLYING_SERVICE_ERROR;
245     }
246 
247     return EvsResult::OK;
248 }
249 
doneWithFrame(const BufferDesc_1_0 & buffer)250 Return<void> EvsEmulatedCamera::doneWithFrame(const BufferDesc_1_0& buffer) {
251     LOG(DEBUG) << __FUNCTION__;
252     doneWithFrame_impl(buffer.bufferId, buffer.memHandle);
253 
254     return {};
255 }
256 
stopVideoStream()257 Return<void> EvsEmulatedCamera::stopVideoStream() {
258     LOG(DEBUG) << __FUNCTION__;
259 
260     // Tells the capture device to stop (and block until it does)
261     mVideo->stopStream();
262 
263     if (mStream != nullptr) {
264         // V1.1 client is waiting on STREAM_STOPPED event.
265         std::scoped_lock<std::mutex> lock(mAccessLock);
266 
267         EvsEventDesc event;
268         event.aType = EvsEventType::STREAM_STOPPED;
269         auto result = mStream->notify(event);
270         if (!result.isOk()) {
271             LOG(ERROR) << "Error delivering end of stream event";
272         }
273 
274         // Drop our reference to the client's stream receiver
275         mStream = nullptr;
276     }
277 
278     return {};
279 }
280 
getExtendedInfo(uint32_t)281 Return<int32_t> EvsEmulatedCamera::getExtendedInfo(uint32_t /*opaqueIdentifier*/) {
282     LOG(DEBUG) << __FUNCTION__;
283     // Return zero by default as required by the spec
284     return 0;
285 }
286 
setExtendedInfo(uint32_t,int32_t)287 Return<EvsResult> EvsEmulatedCamera::setExtendedInfo(uint32_t /*opaqueIdentifier*/,
288                                                      int32_t /*opaqueValue*/) {
289     LOG(DEBUG) << __FUNCTION__;
290     std::scoped_lock<std::mutex> lock(mAccessLock);
291 
292     // If we've been displaced by another owner of the camera, then we can't do anything else
293     if (!mVideo->isOpen()) {
294         LOG(WARNING) << "Ignoring setExtendedInfo call when camera has been lost.";
295         return EvsResult::OWNERSHIP_LOST;
296     }
297 
298     // We don't store any device specific information in this implementation
299     return EvsResult::INVALID_ARG;
300 }
301 
302 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb)303 Return<void> EvsEmulatedCamera::getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb) {
304     LOG(DEBUG) << __FUNCTION__;
305 
306     // Send back our self description
307     _hidl_cb(mDescription);
308     return {};
309 }
310 
getPhysicalCameraInfo(const hidl_string &,getPhysicalCameraInfo_cb _hidl_cb)311 Return<void> EvsEmulatedCamera::getPhysicalCameraInfo(const hidl_string& /*id*/,
312                                                       getPhysicalCameraInfo_cb _hidl_cb) {
313     LOG(DEBUG) << __FUNCTION__;
314 
315     // This method works exactly the same as getCameraInfo_1_1() in EVS HW module.
316     _hidl_cb(mDescription);
317     return {};
318 }
319 
doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1> & buffers)320 Return<EvsResult> EvsEmulatedCamera::doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers) {
321     LOG(DEBUG) << __FUNCTION__;
322 
323     for (auto&& buffer : buffers) {
324         doneWithFrame_impl(buffer.bufferId, buffer.buffer.nativeHandle);
325     }
326 
327     return EvsResult::OK;
328 }
329 
pauseVideoStream()330 Return<EvsResult> EvsEmulatedCamera::pauseVideoStream() {
331     return EvsResult::UNDERLYING_SERVICE_ERROR;
332 }
333 
resumeVideoStream()334 Return<EvsResult> EvsEmulatedCamera::resumeVideoStream() {
335     return EvsResult::UNDERLYING_SERVICE_ERROR;
336 }
337 
setMaster()338 Return<EvsResult> EvsEmulatedCamera::setMaster() {
339     // TODO(b/162946784): Implement this operation
340     return EvsResult::OK;
341 }
342 
forceMaster(const sp<IEvsDisplay_1_0> &)343 Return<EvsResult> EvsEmulatedCamera::forceMaster(const sp<IEvsDisplay_1_0>&) {
344     // TODO(b/162946784): Implement this operation
345     return EvsResult::OK;
346 }
347 
unsetMaster()348 Return<EvsResult> EvsEmulatedCamera::unsetMaster() {
349     // TODO(b/162946784): Implement this operation
350     return EvsResult::OK;
351 }
352 
getParameterList(getParameterList_cb _hidl_cb)353 Return<void> EvsEmulatedCamera::getParameterList(getParameterList_cb _hidl_cb) {
354     // TODO(b/162946784): reads emulated controls from the configuration and
355     //                    returns.
356     hidl_vec<CameraParam> hidlCtrls;
357     _hidl_cb(hidlCtrls);
358     return {};
359 }
360 
getIntParameterRange(CameraParam,getIntParameterRange_cb _hidl_cb)361 Return<void> EvsEmulatedCamera::getIntParameterRange(CameraParam /*id*/,
362                                                      getIntParameterRange_cb _hidl_cb) {
363     // TODO(b/162946784): reads emulated controls from the configuration and
364     //                    returns.
365     _hidl_cb(0, 0, 0);
366     return {};
367 }
368 
setIntParameter(CameraParam,int32_t,setIntParameter_cb _hidl_cb)369 Return<void> EvsEmulatedCamera::setIntParameter(CameraParam /*id*/, int32_t /*value*/,
370                                                 setIntParameter_cb _hidl_cb) {
371     // TODO(b/162946784): Implement this operation
372     hidl_vec<int32_t> values;
373     values.resize(1);
374     _hidl_cb(EvsResult::INVALID_ARG, values);
375     return {};
376 }
377 
getIntParameter(CameraParam,getIntParameter_cb _hidl_cb)378 Return<void> EvsEmulatedCamera::getIntParameter(CameraParam /*id*/, getIntParameter_cb _hidl_cb) {
379     // TODO(b/162946784): Implement this operation
380     hidl_vec<int32_t> values;
381     values.resize(1);
382     _hidl_cb(EvsResult::INVALID_ARG, values);
383     return {};
384 }
385 
setExtendedInfo_1_1(uint32_t opaqueIdentifier,const hidl_vec<uint8_t> & opaqueValue)386 Return<EvsResult> EvsEmulatedCamera::setExtendedInfo_1_1(uint32_t opaqueIdentifier,
387                                                          const hidl_vec<uint8_t>& opaqueValue) {
388     mExtInfo.insert_or_assign(opaqueIdentifier, opaqueValue);
389     return EvsResult::OK;
390 }
391 
getExtendedInfo_1_1(uint32_t opaqueIdentifier,getExtendedInfo_1_1_cb _hidl_cb)392 Return<void> EvsEmulatedCamera::getExtendedInfo_1_1(uint32_t opaqueIdentifier,
393                                                     getExtendedInfo_1_1_cb _hidl_cb) {
394     const auto it = mExtInfo.find(opaqueIdentifier);
395     hidl_vec<uint8_t> value;
396     auto status = EvsResult::OK;
397     if (it == mExtInfo.end()) {
398         status = EvsResult::INVALID_ARG;
399     } else {
400         value = mExtInfo[opaqueIdentifier];
401     }
402 
403     _hidl_cb(status, value);
404     return {};
405 }
406 
importExternalBuffers(const hidl_vec<BufferDesc_1_1> & buffers,importExternalBuffers_cb _hidl_cb)407 Return<void> EvsEmulatedCamera::importExternalBuffers(const hidl_vec<BufferDesc_1_1>& buffers,
408                                                       importExternalBuffers_cb _hidl_cb) {
409     LOG(DEBUG) << __FUNCTION__;
410 
411     // If we've been displaced by another owner of the camera, then we can't do anything else
412     if (!mVideo->isOpen()) {
413         LOG(WARNING) << "Ignoring a request add external buffers "
414                      << "when camera has been lost.";
415         _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, mFramesAllowed);
416         return {};
417     }
418 
419     auto numBuffersToAdd = buffers.size();
420     if (numBuffersToAdd < 1) {
421         LOG(DEBUG) << "No buffers to add.";
422         _hidl_cb(EvsResult::OK, mFramesAllowed);
423         return {};
424     }
425 
426     {
427         std::scoped_lock<std::mutex> lock(mAccessLock);
428 
429         if (numBuffersToAdd > (MAX_BUFFERS_IN_FLIGHT - mFramesAllowed)) {
430             numBuffersToAdd -= (MAX_BUFFERS_IN_FLIGHT - mFramesAllowed);
431             LOG(WARNING) << "Exceed the limit on number of buffers.  " << numBuffersToAdd
432                          << " buffers will be added only.";
433         }
434 
435         GraphicBufferMapper& mapper = GraphicBufferMapper::get();
436         const auto before = mFramesAllowed;
437         for (auto i = 0; i < numBuffersToAdd; ++i) {
438             auto& b = buffers[i];
439             const AHardwareBuffer_Desc* pDesc =
440                     reinterpret_cast<const AHardwareBuffer_Desc*>(&b.buffer.description);
441 
442             // Import a buffer to add
443             buffer_handle_t memHandle = nullptr;
444             status_t result = mapper.importBuffer(b.buffer.nativeHandle, pDesc->width,
445                                                   pDesc->height, pDesc->layers, pDesc->format,
446                                                   pDesc->usage, pDesc->stride, &memHandle);
447             if (result != android::NO_ERROR || !memHandle) {
448                 LOG(WARNING) << "Failed to import a buffer " << b.bufferId;
449                 continue;
450             }
451 
452             auto stored = false;
453             for (auto&& rec : mBuffers) {
454                 if (rec.handle == nullptr) {
455                     // Use this existing entry
456                     rec.handle = memHandle;
457                     rec.inUse = false;
458 
459                     stored = true;
460                     break;
461                 }
462             }
463 
464             if (!stored) {
465                 // Add a BufferRecord wrapping this handle to our set of available buffers
466                 mBuffers.emplace_back(memHandle);
467             }
468 
469             ++mFramesAllowed;
470         }
471 
472         _hidl_cb(EvsResult::OK, mFramesAllowed - before);
473         return {};
474     }
475 }
476 
doneWithFrame_impl(const uint32_t bufferId,const buffer_handle_t memHandle)477 EvsResult EvsEmulatedCamera::doneWithFrame_impl(const uint32_t bufferId,
478                                                 const buffer_handle_t memHandle) {
479     std::scoped_lock<std::mutex> lock(mAccessLock);
480 
481     // If we've been displaced by another owner of the camera, then we can't do anything else
482     if (!mVideo->isOpen()) {
483         LOG(WARNING) << "Ignoring doneWithFrame call when camera has been lost.";
484     } else {
485         if (memHandle == nullptr) {
486             LOG(ERROR) << "Ignoring doneWithFrame called with null handle";
487         } else if (bufferId >= mBuffers.size()) {
488             LOG(ERROR) << "Ignoring doneWithFrame called with invalid bufferId " << bufferId
489                        << " (max is " << mBuffers.size() - 1 << ")";
490         } else if (!mBuffers[bufferId].inUse) {
491             LOG(ERROR) << "Ignoring doneWithFrame called on frame " << bufferId
492                        << " which is already free";
493         } else {
494             // Mark the frame as available
495             mBuffers[bufferId].inUse = false;
496             --mFramesInUse;
497 
498             // If this frame's index is high in the array, try to move it down
499             // to improve locality after mFramesAllowed has been reduced.
500             if (bufferId >= mFramesAllowed) {
501                 // Find an empty slot lower in the array (which should always exist in this case)
502                 for (auto&& rec : mBuffers) {
503                     if (rec.handle == nullptr) {
504                         rec.handle = mBuffers[bufferId].handle;
505                         mBuffers[bufferId].handle = nullptr;
506                         break;
507                     }
508                 }
509             }
510         }
511     }
512 
513     return EvsResult::OK;
514 }
515 
setAvailableFrames_Locked(unsigned bufferCount)516 bool EvsEmulatedCamera::setAvailableFrames_Locked(unsigned bufferCount) {
517     if (bufferCount < 1) {
518         LOG(ERROR) << "Rejecting a buffer request to set buffer count to zero";
519         return false;
520     }
521 
522     if (bufferCount > MAX_BUFFERS_IN_FLIGHT) {
523         LOG(ERROR) << "Rejecting a buffer request in excess of internal limit";
524         return false;
525     }
526 
527     // Is an increase required?
528     if (mFramesAllowed < bufferCount) {
529         // An increase is required
530         unsigned needed = bufferCount - mFramesAllowed;
531         LOG(INFO) << "Allocating " << needed << " buffers for camera frames";
532 
533         unsigned added = increaseAvailableFrames_Locked(needed);
534         if (added != needed) {
535             // If we didn't add all the frames we needed, then roll back to the previous state
536             LOG(ERROR) << "Rolling back to previous frame queue size";
537             decreaseAvailableFrames_Locked(added);
538             return false;
539         }
540     } else if (mFramesAllowed > bufferCount) {
541         // A decrease is required
542         unsigned framesToRelease = mFramesAllowed - bufferCount;
543         LOG(INFO) << "Returning " << framesToRelease << " camera frame buffers";
544 
545         unsigned released = decreaseAvailableFrames_Locked(framesToRelease);
546         if (released != framesToRelease) {
547             // This shouldn't happen with a properly behaving client because the client
548             // should only make this call after returning sufficient outstanding buffers
549             // to allow a clean resize.
550             LOG(ERROR) << "Buffer queue shrink failed -- too many buffers currently in use?";
551         }
552     }
553 
554     return true;
555 }
556 
increaseAvailableFrames_Locked(unsigned numToAdd)557 unsigned EvsEmulatedCamera::increaseAvailableFrames_Locked(unsigned numToAdd) {
558     // Acquire the graphics buffer allocator
559     GraphicBufferAllocator& alloc(GraphicBufferAllocator::get());
560 
561     unsigned added = 0;
562 
563     while (added < numToAdd) {
564         unsigned pixelsPerLine;
565         buffer_handle_t memHandle = nullptr;
566         status_t result = alloc.allocate(mCaptureDeviceDesc.width, mCaptureDeviceDesc.height,
567                                          mFormat, 1 /* layers */, mUsage, &memHandle,
568                                          &pixelsPerLine, 0, "EvsEmulatedCamera");
569         if (result != NO_ERROR) {
570             LOG(ERROR) << "Error " << result << " allocating " << mCaptureDeviceDesc.width << " x "
571                        << mCaptureDeviceDesc.height << " graphics buffer";
572             break;
573         }
574 
575         if (!memHandle) {
576             LOG(ERROR) << "We didn't get a buffer handle back from the allocator";
577             break;
578         }
579 
580         if (mStride) {
581             if (mStride != pixelsPerLine) {
582                 LOG(ERROR) << "We did not expect to get buffers with different strides!";
583             }
584         } else {
585             // Gralloc defines stride in terms of pixels per line
586             mStride = pixelsPerLine;
587         }
588 
589         // Find a place to store the new buffer
590         bool stored = false;
591         for (auto&& rec : mBuffers) {
592             if (rec.handle == nullptr) {
593                 // Use this existing entry
594                 rec.handle = memHandle;
595                 rec.inUse = false;
596                 stored = true;
597                 break;
598             }
599         }
600 
601         if (!stored) {
602             // Add a BufferRecord wrapping this handle to our set of available buffers
603             mBuffers.emplace_back(memHandle);
604         }
605 
606         mFramesAllowed++;
607         added++;
608     }
609 
610     return added;
611 }
612 
decreaseAvailableFrames_Locked(unsigned numToRemove)613 unsigned EvsEmulatedCamera::decreaseAvailableFrames_Locked(unsigned numToRemove) {
614     // Acquire the graphics buffer allocator
615     GraphicBufferAllocator& alloc(GraphicBufferAllocator::get());
616 
617     unsigned removed = 0;
618 
619     for (auto&& rec : mBuffers) {
620         // Is this record not in use, but holding a buffer that we can free?
621         if ((rec.inUse == false) && (rec.handle != nullptr)) {
622             // Release buffer and update the record so we can recognize it as "empty"
623             alloc.free(rec.handle);
624             rec.handle = nullptr;
625 
626             mFramesAllowed--;
627             removed++;
628 
629             if (removed == numToRemove) {
630                 break;
631             }
632         }
633     }
634 
635     return removed;
636 }
637 
638 // This is the async callback from the video camera that tells us a frame is ready
forwardFrame(imageBufferDesc * pBufferInfo,void * pData)639 void EvsEmulatedCamera::forwardFrame(imageBufferDesc* pBufferInfo, void* pData) {
640     bool readyForFrame = false;
641     size_t idx = 0;
642 
643     // Lock scope for updating shared state
644     {
645         std::scoped_lock<std::mutex> lock(mAccessLock);
646 
647         // Are we allowed to issue another buffer?
648         if (mFramesInUse >= mFramesAllowed) {
649             // Can't do anything right now -- skip this frame
650             LOG(WARNING) << "Skipped a frame because too many are in flight";
651         } else {
652             // Identify an available buffer to fill
653             for (idx = 0; idx < mBuffers.size(); idx++) {
654                 if (!mBuffers[idx].inUse) {
655                     if (mBuffers[idx].handle != nullptr) {
656                         // Found an available record, so stop looking
657                         break;
658                     }
659                 }
660             }
661 
662             if (idx >= mBuffers.size()) {
663                 // This shouldn't happen since we already checked mFramesInUse vs mFramesAllowed
664                 LOG(ERROR) << "Failed to find an available buffer slot";
665             } else {
666                 // We're going to make the frame busy
667                 mBuffers[idx].inUse = true;
668                 readyForFrame = true;
669                 ++mFramesInUse;
670             }
671         }
672     }
673 
674     if (!readyForFrame) {
675         // We need to return the video buffer so it can capture a new frame
676         mVideo->markFrameConsumed();
677     } else {
678         // Assemble the buffer description we'll transmit below
679         BufferDesc_1_1 bufDesc_1_1 = {};
680         AHardwareBuffer_Desc* pDesc =
681                 reinterpret_cast<AHardwareBuffer_Desc*>(&bufDesc_1_1.buffer.description);
682 
683         pDesc->width = mCaptureDeviceDesc.width;
684         pDesc->height = mCaptureDeviceDesc.height;
685         pDesc->layers = 1;
686         pDesc->format = mFormat;
687         pDesc->usage = mUsage;
688         pDesc->stride = mStride;
689         bufDesc_1_1.buffer.nativeHandle = mBuffers[idx].handle;
690         bufDesc_1_1.bufferId = idx;
691         bufDesc_1_1.deviceId = mDescription.v1.cameraId;
692         // timestamp in microseconds.
693         bufDesc_1_1.timestamp = systemTime(SYSTEM_TIME_MONOTONIC);
694 
695         // Lock our output buffer for writing
696         void* targetPixels = nullptr;
697         GraphicBufferMapper& mapper = GraphicBufferMapper::get();
698         status_t result =
699                 mapper.lock(bufDesc_1_1.buffer.nativeHandle,
700                             GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_NEVER,
701                             android::Rect(pDesc->width, pDesc->height), (void**)&targetPixels);
702 
703         // If we failed to lock the pixel buffer, we're about to crash, but log it first
704         if (!targetPixels) {
705             LOG(ERROR) << "Camera failed to gain access to image buffer for writing - "
706                        << " status: " << statusToString(result) << " , error: " << strerror(errno);
707         }
708 
709         // Transfer the video image into the output buffer, making any needed
710         // format conversion along the way
711         switch (pBufferInfo->info.format) {
712             case V4L2_PIX_FMT_YUYV:
713                 fillRGBAFromYUYV(bufDesc_1_1, reinterpret_cast<uint8_t*>(targetPixels), pData,
714                                  mVideo->getStride(), mVideo->getHeight());
715                 break;
716 
717             case V4L2_PIX_FMT_XBGR32:
718                 [[fallthrough]];
719             case V4L2_PIX_FMT_ABGR32:
720                 fillBufferCopy(bufDesc_1_1, reinterpret_cast<uint8_t*>(targetPixels), pData,
721                                mVideo->getStride(), mVideo->getHeight());
722                 break;
723 
724             default:
725                 LOG(ERROR) << "Source data is in unsupported format";
726                 break;
727         }
728 
729         // Unlock the output buffer
730         mapper.unlock(bufDesc_1_1.buffer.nativeHandle);
731 
732         // Give the video frame back to the underlying device for reuse
733         // Note that we do this before making the client callback to give the
734         // underlying camera more time to capture the next frame
735         mVideo->markFrameConsumed();
736 
737         // Issue the (asynchronous) callback to the client -- can't be holding
738         // the lock
739         bool flag = false;
740         {
741             hidl_vec<BufferDesc_1_1> frames;
742             frames.resize(1);
743             frames[0] = bufDesc_1_1;
744             auto result = mStream->deliverFrame_1_1(frames);
745             flag = result.isOk();
746         }
747 
748         if (flag) {
749             LOG(DEBUG) << "Delivered " << bufDesc_1_1.buffer.nativeHandle.getNativeHandle()
750                        << " as id " << bufDesc_1_1.bufferId;
751         } else {
752             // This can happen if the client dies and is likely unrecoverable.
753             // To avoid consuming resources generating failing calls, we stop sending
754             // frames.  Note, however, that the stream remains in the "STREAMING" state
755             // until cleaned up on the main thread.
756             LOG(ERROR) << "Frame delivery call failed in the transport layer.";
757 
758             // Since we didn't actually deliver it, mark the frame as available
759             std::scoped_lock<std::mutex> lock(mAccessLock);
760             mBuffers[idx].inUse = false;
761 
762             --mFramesInUse;
763         }
764     }
765 }
766 
Create(const char * deviceName,const EmulatedCameraDesc & desc)767 sp<EvsEmulatedCamera> EvsEmulatedCamera::Create(const char* deviceName,
768                                                 const EmulatedCameraDesc& desc) {
769     LOG(INFO) << "Create " << deviceName;
770     sp<EvsEmulatedCamera> pCamera = new EvsEmulatedCamera(deviceName, desc);
771     if (pCamera->openDevice()) {
772         return pCamera;
773     } else {
774         LOG(ERROR) << "Failed to open a video device.";
775         return nullptr;
776     }
777 }
778 
779 }  // namespace implementation
780 }  // namespace V1_1
781 }  // namespace evs
782 }  // namespace automotive
783 }  // namespace android
784