• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "EvsEmulatedCamera.h"
18 
19 #include <filesystem>
20 
21 #include <android/hardware_buffer.h>
22 #include <android-base/logging.h>
23 #include <ui/GraphicBufferAllocator.h>
24 #include <ui/GraphicBufferMapper.h>
25 #include <utils/SystemClock.h>
26 
27 using BufferDesc_1_0 = ::android::hardware::automotive::evs::V1_0::BufferDesc;
28 using BufferDesc_1_1 = ::android::hardware::automotive::evs::V1_1::BufferDesc;
29 
30 namespace {
31     // Arbitrary limit on number of graphics buffers allowed to be allocated
32     // Safeguards against unreasonable resource consumption and provides a testable limit
33     const unsigned MAX_BUFFERS_IN_FLIGHT = 100;
34 
yuvToRgbx(const unsigned char Y,const unsigned char Uin,const unsigned char Vin)35     uint32_t yuvToRgbx(const unsigned char Y, const unsigned char Uin, const unsigned char Vin) {
36         const float U = Uin - 128.0f;
37         const float V = Vin - 128.0f;
38 
39         const float Rf = Y + 1.140f*V;
40         const float Gf = Y - 0.395f*U - 0.581f*V;
41         const float Bf = Y + 2.032f*U;
42         const unsigned char R = static_cast<unsigned char>(std::clamp(Rf, 0.0f, 255.0f));
43         const unsigned char G = static_cast<unsigned char>(std::clamp(Gf, 0.0f, 255.0f));
44         const unsigned char B = static_cast<unsigned char>(std::clamp(Bf, 0.0f, 255.0f));
45 
46         return ((R & 0xFF))       |
47                ((G & 0xFF) << 8)  |
48                ((B & 0xFF) << 16) |
49                0xFF000000;  // Fill the alpha channel with ones
50     }
51 
52 
fillRGBAFromYUYV(const BufferDesc & dstBuff,uint8_t * dstData,void * srcData,unsigned srcStride,unsigned srcHeight)53     void fillRGBAFromYUYV(const BufferDesc& dstBuff,
54                                  uint8_t* dstData,
55                                  void* srcData,
56                                  unsigned srcStride,
57                                  unsigned srcHeight) {
58         const AHardwareBuffer_Desc* pDesc =
59             reinterpret_cast<const AHardwareBuffer_Desc*>(&dstBuff.buffer.description);
60         unsigned width = pDesc->width;
61         uint32_t* src = reinterpret_cast<uint32_t*>(srcData);
62         uint32_t* dst = reinterpret_cast<uint32_t*>(dstData);
63         unsigned srcStridePixels = srcStride / 2;
64         unsigned dstStridePixels = pDesc->stride;
65 
66         const int srcRowPadding32 =
67             srcStridePixels / 2 - width / 2;  // 2 bytes per pixel, 4 bytes per word
68         const int dstRowPadding32 =
69             dstStridePixels - width;    // 4 bytes per pixel, 4 bytes per word
70 
71         const unsigned numRows = std::min(srcHeight, pDesc->height);
72         for (unsigned r = 0; r < numRows; ++r) {
73             for (unsigned c = 0; c < width/2; c++) {
74                 // Note:  we're walking two pixels at a time here (even/odd)
75                 uint32_t srcPixel = *src++;
76 
77                 uint8_t Y1 = (srcPixel)       & 0xFF;
78                 uint8_t U  = (srcPixel >> 8)  & 0xFF;
79                 uint8_t Y2 = (srcPixel >> 16) & 0xFF;
80                 uint8_t V  = (srcPixel >> 24) & 0xFF;
81 
82                 // On the RGB output, we're writing one pixel at a time
83                 *(dst+0) = yuvToRgbx(Y1, U, V);
84                 *(dst+1) = yuvToRgbx(Y2, U, V);
85                 dst += 2;
86             }
87 
88             // Skip over any extra data or end of row alignment padding
89             src += srcRowPadding32;
90             dst += dstRowPadding32;
91         }
92     }
93 
94 
fillBufferCopy(const BufferDesc & dstBuff,uint8_t * dst,void * srcData,unsigned srcStride,unsigned srcHeight)95     void fillBufferCopy(const BufferDesc& dstBuff,
96                                uint8_t* dst,
97                                void* srcData,
98                                unsigned srcStride,
99                                unsigned srcHeight) {
100         const AHardwareBuffer_Desc* pDesc =
101             reinterpret_cast<const AHardwareBuffer_Desc*>(&dstBuff.buffer.description);
102 
103         // HAL_PIXEL_FORMAT_RGBA_8888 default output format
104         const unsigned bytesPerPixel = 4;
105         const unsigned dstStride = pDesc->stride * bytesPerPixel;
106 
107         // Simply copy the data, row by row, without the scaling.
108         const unsigned copyStride = std::min(srcStride, dstStride);
109         const unsigned numRows = std::min(srcHeight, pDesc->height);
110         uint8_t* src = reinterpret_cast<uint8_t*>(srcData);
111         for (auto r = 0; r < numRows; ++r) {
112             memcpy(dst, src, copyStride);
113 
114             // Moves to the next row
115             src += srcStride;
116             dst += dstStride;
117         }
118     }
119 } // namespace
120 
121 
122 namespace android {
123 namespace automotive {
124 namespace evs {
125 namespace V1_1 {
126 namespace implementation {
127 
EvsEmulatedCamera(const char * deviceName,const EmulatedCameraDesc & desc)128 EvsEmulatedCamera::EvsEmulatedCamera(const char *deviceName,
129                                      const EmulatedCameraDesc& desc) :
130         mFramesAllowed(0),
131         mFramesInUse(0),
132         mCaptureDeviceDesc(desc) {
133     LOG(INFO) << "EvsEmulatedCamera instantiated";
134     mDescription.v1.cameraId = deviceName;
135 
136     mVideo = new VideoCapture();
137 
138     // Default output buffer format.
139     mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
140 
141     // How we expect to use the gralloc buffers we'll exchange with our client
142     mUsage  = GRALLOC_USAGE_HW_TEXTURE     |
143               GRALLOC_USAGE_SW_READ_RARELY |
144               GRALLOC_USAGE_SW_WRITE_OFTEN;
145 
146     mDescription.v1.cameraId = deviceName;
147 }
148 
149 
~EvsEmulatedCamera()150 EvsEmulatedCamera::~EvsEmulatedCamera() {
151     LOG(INFO) << "EvsEmulatedCamera being destroyed";
152     shutdown();
153 }
154 
155 
openDevice()156 bool EvsEmulatedCamera::openDevice() {
157     bool opened = false;
158     if (mVideo) {
159         opened = mVideo->open(mCaptureDeviceDesc.path,
160                               mCaptureDeviceDesc.interval);
161     }
162 
163     return opened;
164 }
165 
166 
shutdown()167 void EvsEmulatedCamera::shutdown()
168 {
169     LOG(INFO) << "EvsEmulatedCamera shutdown";
170 
171     // Make sure our output stream is cleaned up
172     // (It really should be already)
173     stopVideoStream();
174 
175     // Note:  Since stopVideoStream is blocking, no other threads can now be running
176 
177     // Close our video capture device
178     mVideo->close();
179 
180     // Drop all the graphics buffers we've been using
181     if (mBuffers.size() > 0) {
182         GraphicBufferAllocator& alloc(GraphicBufferAllocator::get());
183         for (auto&& rec : mBuffers) {
184             if (rec.inUse) {
185                 LOG(WARNING) << "Releasing buffer despite remote ownership";
186             }
187             alloc.free(rec.handle);
188             rec.handle = nullptr;
189         }
190         mBuffers.clear();
191     }
192 }
193 
194 
195 // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow.
getCameraInfo(getCameraInfo_cb _hidl_cb)196 Return<void> EvsEmulatedCamera::getCameraInfo(getCameraInfo_cb _hidl_cb) {
197     LOG(DEBUG) << __FUNCTION__;
198 
199     // Send back our self description
200     _hidl_cb(mDescription.v1);
201     return {};
202 }
203 
204 
setMaxFramesInFlight(uint32_t bufferCount)205 Return<EvsResult> EvsEmulatedCamera::setMaxFramesInFlight(uint32_t bufferCount) {
206     LOG(DEBUG) << __FUNCTION__;
207     std::scoped_lock<std::mutex> lock(mAccessLock);
208 
209     // Check whether underlying device is still open
210     if (!mVideo->isOpen()) {
211         LOG(WARNING) << "Ignoring startVideoStream call when camera has been lost.";
212         return EvsResult::OWNERSHIP_LOST;
213     }
214 
215     // We cannot function without at least one video buffer to send data
216     if (bufferCount < 1) {
217         LOG(ERROR) << "Ignoring setMaxFramesInFlight with less than one buffer requested";
218         return EvsResult::INVALID_ARG;
219     }
220 
221     // Update our internal state
222     if (setAvailableFrames_Locked(bufferCount)) {
223         return EvsResult::OK;
224     } else {
225         return EvsResult::BUFFER_NOT_AVAILABLE;
226     }
227 }
228 
229 
startVideoStream(const sp<IEvsCameraStream_1_0> & stream)230 Return<EvsResult> EvsEmulatedCamera::startVideoStream(const sp<IEvsCameraStream_1_0>& stream)  {
231     LOG(DEBUG) << __FUNCTION__;
232     std::scoped_lock<std::mutex> lock(mAccessLock);
233 
234     // Check whether underlying device is still open
235     if (!mVideo->isOpen()) {
236         LOG(WARNING) << "Ignoring startVideoStream call when camera has been lost.";
237         return EvsResult::OWNERSHIP_LOST;
238     }
239 
240     if (mStream != nullptr) {
241         LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
242         return EvsResult::STREAM_ALREADY_RUNNING;
243     }
244 
245     mStream = IEvsCameraStream_1_1::castFrom(stream).withDefault(nullptr);
246     if (mStream == nullptr) {
247         LOG(ERROR) << "A given IEvsCameraStream does not supoprt v1.1 interface.";
248         return EvsResult::INVALID_ARG;
249     }
250 
251     // If the client never indicated otherwise, configure ourselves for a single streaming buffer
252     if (mFramesAllowed < 1) {
253         if (!setAvailableFrames_Locked(1)) {
254             LOG(ERROR) << "Failed to start stream because we couldn't get a graphics buffer";
255             return EvsResult::BUFFER_NOT_AVAILABLE;
256         }
257     }
258 
259     if (!mVideo->startStream([this](VideoCapture*, imageBufferDesc* tgt, void* data) {
260                                 this->forwardFrame(tgt, data);
261                             })
262     ) {
263         // No need to hold onto this if we failed to start
264         mStream = nullptr;
265         LOG(ERROR) << "Underlying camera start stream failed";
266         return EvsResult::UNDERLYING_SERVICE_ERROR;
267     }
268 
269     return EvsResult::OK;
270 }
271 
272 
doneWithFrame(const BufferDesc_1_0 & buffer)273 Return<void> EvsEmulatedCamera::doneWithFrame(const BufferDesc_1_0& buffer)  {
274     LOG(DEBUG) << __FUNCTION__;
275     doneWithFrame_impl(buffer.bufferId, buffer.memHandle);
276 
277     return {};
278 }
279 
280 
stopVideoStream()281 Return<void> EvsEmulatedCamera::stopVideoStream()  {
282     LOG(DEBUG) << __FUNCTION__;
283 
284     // Tells the capture device to stop (and block until it does)
285     mVideo->stopStream();
286 
287     if (mStream != nullptr) {
288         // V1.1 client is waiting on STREAM_STOPPED event.
289         std::scoped_lock<std::mutex> lock(mAccessLock);
290 
291         EvsEventDesc event;
292         event.aType = EvsEventType::STREAM_STOPPED;
293         auto result = mStream->notify(event);
294         if (!result.isOk()) {
295             LOG(ERROR) << "Error delivering end of stream event";
296         }
297 
298         // Drop our reference to the client's stream receiver
299         mStream = nullptr;
300     }
301 
302     return {};
303 }
304 
305 
getExtendedInfo(uint32_t)306 Return<int32_t> EvsEmulatedCamera::getExtendedInfo(uint32_t /*opaqueIdentifier*/)  {
307     LOG(DEBUG) << __FUNCTION__;
308     // Return zero by default as required by the spec
309     return 0;
310 }
311 
312 
setExtendedInfo(uint32_t,int32_t)313 Return<EvsResult> EvsEmulatedCamera::setExtendedInfo(uint32_t /*opaqueIdentifier*/,
314                                                      int32_t  /*opaqueValue*/)  {
315     LOG(DEBUG) << __FUNCTION__;
316     std::scoped_lock<std::mutex> lock(mAccessLock);
317 
318     // If we've been displaced by another owner of the camera, then we can't do anything else
319     if (!mVideo->isOpen()) {
320         LOG(WARNING) << "Ignoring setExtendedInfo call when camera has been lost.";
321         return EvsResult::OWNERSHIP_LOST;
322     }
323 
324     // We don't store any device specific information in this implementation
325     return EvsResult::INVALID_ARG;
326 }
327 
328 
329 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb)330 Return<void> EvsEmulatedCamera::getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb) {
331     LOG(DEBUG) << __FUNCTION__;
332 
333     // Send back our self description
334     _hidl_cb(mDescription);
335     return {};
336 }
337 
338 
getPhysicalCameraInfo(const hidl_string &,getPhysicalCameraInfo_cb _hidl_cb)339 Return<void> EvsEmulatedCamera::getPhysicalCameraInfo(const hidl_string& /*id*/,
340                                                       getPhysicalCameraInfo_cb _hidl_cb) {
341     LOG(DEBUG) << __FUNCTION__;
342 
343     // This method works exactly the same as getCameraInfo_1_1() in EVS HW module.
344     _hidl_cb(mDescription);
345     return {};
346 }
347 
348 
doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1> & buffers)349 Return<EvsResult> EvsEmulatedCamera::doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers)  {
350     LOG(DEBUG) << __FUNCTION__;
351 
352     for (auto&& buffer : buffers) {
353         doneWithFrame_impl(buffer.bufferId, buffer.buffer.nativeHandle);
354     }
355 
356     return EvsResult::OK;
357 }
358 
359 
pauseVideoStream()360 Return<EvsResult> EvsEmulatedCamera::pauseVideoStream() {
361     return EvsResult::UNDERLYING_SERVICE_ERROR;
362 }
363 
364 
resumeVideoStream()365 Return<EvsResult> EvsEmulatedCamera::resumeVideoStream() {
366     return EvsResult::UNDERLYING_SERVICE_ERROR;
367 }
368 
369 
setMaster()370 Return<EvsResult> EvsEmulatedCamera::setMaster() {
371     // TODO(b/162946784): Implement this operation
372     return EvsResult::OK;
373 }
374 
375 
forceMaster(const sp<IEvsDisplay_1_0> &)376 Return<EvsResult> EvsEmulatedCamera::forceMaster(const sp<IEvsDisplay_1_0>&) {
377     // TODO(b/162946784): Implement this operation
378     return EvsResult::OK;
379 }
380 
381 
unsetMaster()382 Return<EvsResult> EvsEmulatedCamera::unsetMaster() {
383     // TODO(b/162946784): Implement this operation
384     return EvsResult::OK;
385 }
386 
387 
getParameterList(getParameterList_cb _hidl_cb)388 Return<void> EvsEmulatedCamera::getParameterList(getParameterList_cb _hidl_cb) {
389     // TODO(b/162946784): reads emulated controls from the configuration and
390     //                    returns.
391     hidl_vec<CameraParam> hidlCtrls;
392     _hidl_cb(hidlCtrls);
393     return {};
394 }
395 
396 
getIntParameterRange(CameraParam,getIntParameterRange_cb _hidl_cb)397 Return<void> EvsEmulatedCamera::getIntParameterRange(CameraParam /*id*/,
398                                                 getIntParameterRange_cb _hidl_cb) {
399     // TODO(b/162946784): reads emulated controls from the configuration and
400     //                    returns.
401     _hidl_cb(0, 0, 0);
402     return {};
403 }
404 
405 
setIntParameter(CameraParam,int32_t,setIntParameter_cb _hidl_cb)406 Return<void> EvsEmulatedCamera::setIntParameter(CameraParam /*id*/,
407                                                 int32_t /*value*/,
408                                                 setIntParameter_cb _hidl_cb) {
409     // TODO(b/162946784): Implement this operation
410     hidl_vec<int32_t> values;
411     values.resize(1);
412     _hidl_cb(EvsResult::INVALID_ARG, values);
413     return {};
414 }
415 
416 
getIntParameter(CameraParam,getIntParameter_cb _hidl_cb)417 Return<void> EvsEmulatedCamera::getIntParameter(CameraParam /*id*/,
418                                                 getIntParameter_cb _hidl_cb) {
419     // TODO(b/162946784): Implement this operation
420     hidl_vec<int32_t> values;
421     values.resize(1);
422     _hidl_cb(EvsResult::INVALID_ARG, values);
423     return {};
424 }
425 
426 
setExtendedInfo_1_1(uint32_t opaqueIdentifier,const hidl_vec<uint8_t> & opaqueValue)427 Return<EvsResult> EvsEmulatedCamera::setExtendedInfo_1_1(uint32_t opaqueIdentifier,
428                                                          const hidl_vec<uint8_t>& opaqueValue) {
429     mExtInfo.insert_or_assign(opaqueIdentifier, opaqueValue);
430     return EvsResult::OK;
431 }
432 
433 
getExtendedInfo_1_1(uint32_t opaqueIdentifier,getExtendedInfo_1_1_cb _hidl_cb)434 Return<void> EvsEmulatedCamera::getExtendedInfo_1_1(uint32_t opaqueIdentifier,
435                                                     getExtendedInfo_1_1_cb _hidl_cb) {
436     const auto it = mExtInfo.find(opaqueIdentifier);
437     hidl_vec<uint8_t> value;
438     auto status = EvsResult::OK;
439     if (it == mExtInfo.end()) {
440         status = EvsResult::INVALID_ARG;
441     } else {
442         value = mExtInfo[opaqueIdentifier];
443     }
444 
445     _hidl_cb(status, value);
446     return {};
447 }
448 
449 
importExternalBuffers(const hidl_vec<BufferDesc_1_1> & buffers,importExternalBuffers_cb _hidl_cb)450 Return<void> EvsEmulatedCamera::importExternalBuffers(const hidl_vec<BufferDesc_1_1>& buffers,
451                                                       importExternalBuffers_cb _hidl_cb) {
452     LOG(DEBUG) << __FUNCTION__;
453 
454     // If we've been displaced by another owner of the camera, then we can't do anything else
455     if (!mVideo->isOpen()) {
456         LOG(WARNING) << "Ignoring a request add external buffers "
457                      << "when camera has been lost.";
458         _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, mFramesAllowed);
459         return {};
460     }
461 
462     auto numBuffersToAdd = buffers.size();
463     if (numBuffersToAdd < 1) {
464         LOG(DEBUG) << "No buffers to add.";
465         _hidl_cb(EvsResult::OK, mFramesAllowed);
466         return {};
467     }
468 
469     {
470         std::scoped_lock<std::mutex> lock(mAccessLock);
471 
472         if (numBuffersToAdd > (MAX_BUFFERS_IN_FLIGHT - mFramesAllowed)) {
473             numBuffersToAdd -= (MAX_BUFFERS_IN_FLIGHT - mFramesAllowed);
474             LOG(WARNING) << "Exceed the limit on number of buffers.  "
475                          << numBuffersToAdd << " buffers will be added only.";
476         }
477 
478         GraphicBufferMapper& mapper = GraphicBufferMapper::get();
479         const auto before = mFramesAllowed;
480         for (auto i = 0; i < numBuffersToAdd; ++i) {
481             auto& b = buffers[i];
482             const AHardwareBuffer_Desc* pDesc =
483                 reinterpret_cast<const AHardwareBuffer_Desc *>(&b.buffer.description);
484 
485             // Import a buffer to add
486             buffer_handle_t memHandle = nullptr;
487             status_t result = mapper.importBuffer(b.buffer.nativeHandle,
488                                                   pDesc->width,
489                                                   pDesc->height,
490                                                   pDesc->layers,
491                                                   pDesc->format,
492                                                   pDesc->usage,
493                                                   pDesc->stride,
494                                                   &memHandle);
495             if (result != android::NO_ERROR || !memHandle) {
496                 LOG(WARNING) << "Failed to import a buffer " << b.bufferId;
497                 continue;
498             }
499 
500             auto stored = false;
501             for (auto&& rec : mBuffers) {
502                 if (rec.handle == nullptr) {
503                     // Use this existing entry
504                     rec.handle = memHandle;
505                     rec.inUse = false;
506 
507                     stored = true;
508                     break;
509                 }
510             }
511 
512             if (!stored) {
513                 // Add a BufferRecord wrapping this handle to our set of available buffers
514                 mBuffers.emplace_back(memHandle);
515             }
516 
517             ++mFramesAllowed;
518         }
519 
520         _hidl_cb(EvsResult::OK, mFramesAllowed - before);
521         return {};
522     }
523 }
524 
525 
doneWithFrame_impl(const uint32_t bufferId,const buffer_handle_t memHandle)526 EvsResult EvsEmulatedCamera::doneWithFrame_impl(const uint32_t bufferId,
527                                                 const buffer_handle_t memHandle) {
528     std::scoped_lock <std::mutex> lock(mAccessLock);
529 
530     // If we've been displaced by another owner of the camera, then we can't do anything else
531     if (!mVideo->isOpen()) {
532         LOG(WARNING) << "Ignoring doneWithFrame call when camera has been lost.";
533     } else {
534         if (memHandle == nullptr) {
535             LOG(ERROR) << "Ignoring doneWithFrame called with null handle";
536         } else if (bufferId >= mBuffers.size()) {
537             LOG(ERROR) << "Ignoring doneWithFrame called with invalid bufferId " << bufferId
538                        << " (max is " << mBuffers.size() - 1 << ")";
539         } else if (!mBuffers[bufferId].inUse) {
540             LOG(ERROR) << "Ignoring doneWithFrame called on frame " << bufferId
541                        << " which is already free";
542         } else {
543             // Mark the frame as available
544             mBuffers[bufferId].inUse = false;
545             --mFramesInUse;
546 
547             // If this frame's index is high in the array, try to move it down
548             // to improve locality after mFramesAllowed has been reduced.
549             if (bufferId >= mFramesAllowed) {
550                 // Find an empty slot lower in the array (which should always exist in this case)
551                 for (auto&& rec : mBuffers) {
552                     if (rec.handle == nullptr) {
553                         rec.handle = mBuffers[bufferId].handle;
554                         mBuffers[bufferId].handle = nullptr;
555                         break;
556                     }
557                 }
558             }
559         }
560     }
561 
562     return EvsResult::OK;
563 }
564 
565 
setAvailableFrames_Locked(unsigned bufferCount)566 bool EvsEmulatedCamera::setAvailableFrames_Locked(unsigned bufferCount) {
567     if (bufferCount < 1) {
568         LOG(ERROR) << "Rejecting a buffer request to set buffer count to zero";
569         return false;
570     }
571 
572     if (bufferCount > MAX_BUFFERS_IN_FLIGHT) {
573         LOG(ERROR) << "Rejecting a buffer request in excess of internal limit";
574         return false;
575     }
576 
577     // Is an increase required?
578     if (mFramesAllowed < bufferCount) {
579         // An increase is required
580         unsigned needed = bufferCount - mFramesAllowed;
581         LOG(INFO) << "Allocating " << needed << " buffers for camera frames";
582 
583         unsigned added = increaseAvailableFrames_Locked(needed);
584         if (added != needed) {
585             // If we didn't add all the frames we needed, then roll back to the previous state
586             LOG(ERROR) << "Rolling back to previous frame queue size";
587             decreaseAvailableFrames_Locked(added);
588             return false;
589         }
590     } else if (mFramesAllowed > bufferCount) {
591         // A decrease is required
592         unsigned framesToRelease = mFramesAllowed - bufferCount;
593         LOG(INFO) << "Returning " << framesToRelease << " camera frame buffers";
594 
595         unsigned released = decreaseAvailableFrames_Locked(framesToRelease);
596         if (released != framesToRelease) {
597             // This shouldn't happen with a properly behaving client because the client
598             // should only make this call after returning sufficient outstanding buffers
599             // to allow a clean resize.
600             LOG(ERROR) << "Buffer queue shrink failed -- too many buffers currently in use?";
601         }
602     }
603 
604     return true;
605 }
606 
607 
increaseAvailableFrames_Locked(unsigned numToAdd)608 unsigned EvsEmulatedCamera::increaseAvailableFrames_Locked(unsigned numToAdd) {
609     // Acquire the graphics buffer allocator
610     GraphicBufferAllocator &alloc(GraphicBufferAllocator::get());
611 
612     unsigned added = 0;
613 
614     while (added < numToAdd) {
615         unsigned pixelsPerLine;
616         buffer_handle_t memHandle = nullptr;
617         status_t result = alloc.allocate(mCaptureDeviceDesc.width, mCaptureDeviceDesc.height,
618                                          mFormat, 1 /* layers */, mUsage,
619                                          &memHandle, &pixelsPerLine, 0, "EvsEmulatedCamera");
620         if (result != NO_ERROR) {
621             LOG(ERROR) << "Error " << result << " allocating "
622                        << mCaptureDeviceDesc.width << " x " << mCaptureDeviceDesc.height
623                        << " graphics buffer";
624             break;
625         }
626 
627         if (!memHandle) {
628             LOG(ERROR) << "We didn't get a buffer handle back from the allocator";
629             break;
630         }
631 
632         if (mStride) {
633             if (mStride != pixelsPerLine) {
634                 LOG(ERROR) << "We did not expect to get buffers with different strides!";
635             }
636         } else {
637             // Gralloc defines stride in terms of pixels per line
638             mStride = pixelsPerLine;
639         }
640 
641         // Find a place to store the new buffer
642         bool stored = false;
643         for (auto&& rec : mBuffers) {
644             if (rec.handle == nullptr) {
645                 // Use this existing entry
646                 rec.handle = memHandle;
647                 rec.inUse = false;
648                 stored = true;
649                 break;
650             }
651         }
652 
653         if (!stored) {
654             // Add a BufferRecord wrapping this handle to our set of available buffers
655             mBuffers.emplace_back(memHandle);
656         }
657 
658         mFramesAllowed++;
659         added++;
660     }
661 
662     return added;
663 }
664 
665 
decreaseAvailableFrames_Locked(unsigned numToRemove)666 unsigned EvsEmulatedCamera::decreaseAvailableFrames_Locked(unsigned numToRemove) {
667     // Acquire the graphics buffer allocator
668     GraphicBufferAllocator &alloc(GraphicBufferAllocator::get());
669 
670     unsigned removed = 0;
671 
672     for (auto&& rec : mBuffers) {
673         // Is this record not in use, but holding a buffer that we can free?
674         if ((rec.inUse == false) && (rec.handle != nullptr)) {
675             // Release buffer and update the record so we can recognize it as "empty"
676             alloc.free(rec.handle);
677             rec.handle = nullptr;
678 
679             mFramesAllowed--;
680             removed++;
681 
682             if (removed == numToRemove) {
683                 break;
684             }
685         }
686     }
687 
688     return removed;
689 }
690 
691 
692 // This is the async callback from the video camera that tells us a frame is ready
forwardFrame(imageBufferDesc * pBufferInfo,void * pData)693 void EvsEmulatedCamera::forwardFrame(imageBufferDesc* pBufferInfo, void* pData) {
694     bool readyForFrame = false;
695     size_t idx = 0;
696 
697     // Lock scope for updating shared state
698     {
699         std::scoped_lock<std::mutex> lock(mAccessLock);
700 
701         // Are we allowed to issue another buffer?
702         if (mFramesInUse >= mFramesAllowed) {
703             // Can't do anything right now -- skip this frame
704             LOG(WARNING) << "Skipped a frame because too many are in flight";
705         } else {
706             // Identify an available buffer to fill
707             for (idx = 0; idx < mBuffers.size(); idx++) {
708                 if (!mBuffers[idx].inUse) {
709                     if (mBuffers[idx].handle != nullptr) {
710                         // Found an available record, so stop looking
711                         break;
712                     }
713                 }
714             }
715 
716             if (idx >= mBuffers.size()) {
717                 // This shouldn't happen since we already checked mFramesInUse vs mFramesAllowed
718                 LOG(ERROR) << "Failed to find an available buffer slot";
719             } else {
720                 // We're going to make the frame busy
721                 mBuffers[idx].inUse = true;
722                 readyForFrame = true;
723                 ++mFramesInUse;
724             }
725         }
726     }
727 
728     if (!readyForFrame) {
729         // We need to return the video buffer so it can capture a new frame
730         mVideo->markFrameConsumed();
731     } else {
732         // Assemble the buffer description we'll transmit below
733         BufferDesc_1_1 bufDesc_1_1 = {};
734         AHardwareBuffer_Desc* pDesc =
735             reinterpret_cast<AHardwareBuffer_Desc *>(&bufDesc_1_1.buffer.description);
736 
737         pDesc->width  = mCaptureDeviceDesc.width;
738         pDesc->height = mCaptureDeviceDesc.height;
739         pDesc->layers = 1;
740         pDesc->format = mFormat;
741         pDesc->usage  = mUsage;
742         pDesc->stride = mStride;
743         bufDesc_1_1.buffer.nativeHandle = mBuffers[idx].handle;
744         bufDesc_1_1.bufferId = idx;
745         bufDesc_1_1.deviceId = mDescription.v1.cameraId;
746         // timestamp in microseconds.
747         bufDesc_1_1.timestamp = systemTime(SYSTEM_TIME_MONOTONIC);
748 
749         // Lock our output buffer for writing
750         void *targetPixels = nullptr;
751         GraphicBufferMapper &mapper = GraphicBufferMapper::get();
752         status_t result =
753             mapper.lock(bufDesc_1_1.buffer.nativeHandle,
754                         GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_NEVER,
755                         android::Rect(pDesc->width, pDesc->height),
756                         (void **)&targetPixels);
757 
758         // If we failed to lock the pixel buffer, we're about to crash, but log it first
759         if (!targetPixels) {
760             LOG(ERROR) << "Camera failed to gain access to image buffer for writing - "
761                        << " status: " << statusToString(result)
762                        << " , error: " << strerror(errno);
763         }
764 
765         // Transfer the video image into the output buffer, making any needed
766         // format conversion along the way
767         switch (pBufferInfo->info.format) {
768             case V4L2_PIX_FMT_YUYV:
769                 fillRGBAFromYUYV(bufDesc_1_1,
770                                  reinterpret_cast<uint8_t*>(targetPixels),
771                                  pData,
772                                  mVideo->getStride(),
773                                  mVideo->getHeight());
774                 break;
775 
776             case V4L2_PIX_FMT_XBGR32:
777                 [[fallthrough]];
778             case V4L2_PIX_FMT_ABGR32:
779                 fillBufferCopy(bufDesc_1_1,
780                                reinterpret_cast<uint8_t*>(targetPixels),
781                                pData,
782                                mVideo->getStride(),
783                                mVideo->getHeight());
784                 break;
785 
786             default:
787                 LOG(ERROR) << "Source data is in unsupported format";
788                 break;
789         }
790 
791         // Unlock the output buffer
792         mapper.unlock(bufDesc_1_1.buffer.nativeHandle);
793 
794         // Give the video frame back to the underlying device for reuse
795         // Note that we do this before making the client callback to give the
796         // underlying camera more time to capture the next frame
797         mVideo->markFrameConsumed();
798 
799         // Issue the (asynchronous) callback to the client -- can't be holding
800         // the lock
801         bool flag = false;
802         {
803             hidl_vec<BufferDesc_1_1> frames;
804             frames.resize(1);
805             frames[0] = bufDesc_1_1;
806             auto result = mStream->deliverFrame_1_1(frames);
807             flag = result.isOk();
808         }
809 
810         if (flag) {
811             LOG(DEBUG) << "Delivered " << bufDesc_1_1.buffer.nativeHandle.getNativeHandle()
812                        << " as id " << bufDesc_1_1.bufferId;
813         } else {
814             // This can happen if the client dies and is likely unrecoverable.
815             // To avoid consuming resources generating failing calls, we stop sending
816             // frames.  Note, however, that the stream remains in the "STREAMING" state
817             // until cleaned up on the main thread.
818             LOG(ERROR) << "Frame delivery call failed in the transport layer.";
819 
820             // Since we didn't actually deliver it, mark the frame as available
821             std::scoped_lock<std::mutex> lock(mAccessLock);
822             mBuffers[idx].inUse = false;
823 
824             --mFramesInUse;
825         }
826     }
827 }
828 
829 
Create(const char * deviceName,const EmulatedCameraDesc & desc)830 sp<EvsEmulatedCamera> EvsEmulatedCamera::Create(const char *deviceName,
831                                                 const EmulatedCameraDesc& desc) {
832     LOG(INFO) << "Create " << deviceName;
833     sp<EvsEmulatedCamera> pCamera = new EvsEmulatedCamera(deviceName, desc);
834     if (pCamera->openDevice()) {
835         return pCamera;
836     } else {
837         LOG(ERROR) << "Failed to open a video device.";
838         return nullptr;
839     }
840 }
841 
842 
843 } // namespace implementation
844 } // namespace V1_1
845 } // namespace evs
846 } // namespace automotive
847 } // namespace android
848