• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "VirtualCamera.h"
18 
19 #include "Enumerator.h"
20 #include "HalCamera.h"
21 #include "utils/include/Utils.h"
22 
23 #include <android-base/file.h>
24 #include <android-base/logging.h>
25 #include <android-base/stringprintf.h>
26 #include <android/hardware_buffer.h>
27 
28 #include <chrono>
29 
30 namespace aidl::android::automotive::evs::implementation {
31 
32 using ::aidl::android::hardware::automotive::evs::BufferDesc;
33 using ::aidl::android::hardware::automotive::evs::CameraDesc;
34 using ::aidl::android::hardware::automotive::evs::CameraParam;
35 using ::aidl::android::hardware::automotive::evs::DisplayState;
36 using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
37 using ::aidl::android::hardware::automotive::evs::EvsEventType;
38 using ::aidl::android::hardware::automotive::evs::EvsResult;
39 using ::aidl::android::hardware::automotive::evs::IEvsCameraStream;
40 using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
41 using ::aidl::android::hardware::automotive::evs::ParameterRange;
42 using ::aidl::android::hardware::common::NativeHandle;
43 using ::aidl::android::hardware::graphics::common::HardwareBuffer;
44 using ::android::base::StringAppendF;
45 using ::ndk::ScopedAStatus;
46 using ::std::chrono_literals::operator""s;
47 
VirtualCamera(const std::vector<std::shared_ptr<HalCamera>> & halCameras)48 VirtualCamera::VirtualCamera(const std::vector<std::shared_ptr<HalCamera>>& halCameras) :
49       mStreamState(STOPPED) {
50     for (auto&& cam : halCameras) {
51         mHalCamera.insert_or_assign(cam->getId(), cam);
52     }
53 }
54 
~VirtualCamera()55 VirtualCamera::~VirtualCamera() {
56     shutdown();
57 }
58 
doneWithFrame(const std::vector<BufferDesc> & buffers)59 ScopedAStatus VirtualCamera::doneWithFrame(const std::vector<BufferDesc>& buffers) {
60     std::lock_guard lock(mMutex);
61 
62     for (auto&& buffer : buffers) {
63         // Find this buffer in our "held" list
64         auto it = std::find_if(mFramesHeld[buffer.deviceId].begin(),
65                                mFramesHeld[buffer.deviceId].end(),
66                                [id = buffer.bufferId](const BufferDesc& buffer) {
67                                    return id == buffer.bufferId;
68                                });
69         if (it == mFramesHeld[buffer.deviceId].end()) {
70             // We should always find the frame in our "held" list
71             LOG(WARNING) << "Ignoring doneWithFrame called with unrecognized frame id "
72                          << buffer.bufferId;
73             continue;
74         }
75 
76         // Take this frame out of our "held" list
77         BufferDesc bufferToReturn = std::move(*it);
78         mFramesHeld[buffer.deviceId].erase(it);
79 
80         // Tell our parent that we're done with this buffer
81         std::shared_ptr<HalCamera> pHwCamera = mHalCamera[buffer.deviceId].lock();
82         if (pHwCamera) {
83             auto status = pHwCamera->doneWithFrame(std::move(bufferToReturn));
84             if (!status.isOk()) {
85                 LOG(WARNING) << "Failed to return a buffer " << buffer.bufferId;
86             }
87         } else {
88             LOG(WARNING) << "Possible memory leak; " << buffer.deviceId << " is not valid.";
89         }
90     }
91 
92     return ScopedAStatus::ok();
93 }
94 
forcePrimaryClient(const std::shared_ptr<IEvsDisplay> & display)95 ScopedAStatus VirtualCamera::forcePrimaryClient(const std::shared_ptr<IEvsDisplay>& display) {
96     if (!isValid()) {
97         LOG(ERROR) << "No hardware camera is available.";
98         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
99     } else if (isLogicalCamera()) {
100         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
101         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
102     }
103 
104     if (display == nullptr) {
105         LOG(ERROR) << __FUNCTION__ << ": Passed display is invalid";
106         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
107     }
108 
109     DisplayState state = DisplayState::DEAD;
110     auto status = display->getDisplayState(&state);
111     if (!status.isOk()) {
112         LOG(ERROR) << "Failed to read current display state";
113         return Utils::buildScopedAStatusFromEvsResult(EvsResult::UNDERLYING_SERVICE_ERROR);
114     }
115 
116     auto displayStateRange = ::ndk::enum_range<DisplayState>();
117     if (state == DisplayState::NOT_OPEN || state == DisplayState::DEAD ||
118         std::find(displayStateRange.begin(), displayStateRange.end(), state) ==
119                 displayStateRange.end()) {
120         LOG(ERROR) << __FUNCTION__ << ": Passed display is in invalid state";
121         return Utils::buildScopedAStatusFromEvsResult(EvsResult::INVALID_ARG);
122     }
123 
124     // mHalCamera is guaranteed to have at least one element.
125     auto pHwCamera = mHalCamera.begin()->second.lock();
126     if (pHwCamera == nullptr) {
127         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
128         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
129     }
130 
131     return pHwCamera->forcePrimaryClient(ref<VirtualCamera>());
132 }
133 
getCameraInfo(CameraDesc * _aidl_return)134 ScopedAStatus VirtualCamera::getCameraInfo(CameraDesc* _aidl_return) {
135     if (!isValid()) {
136         LOG(ERROR) << "No hardware camera is available.";
137         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
138     } else if (isLogicalCamera()) {
139         // Logical camera description is stored in VirtualCamera object.
140         *_aidl_return = *mDesc;
141         return ScopedAStatus::ok();
142     }
143 
144     // Straight pass through to hardware layer
145     auto pHwCamera = mHalCamera.begin()->second.lock();
146     if (pHwCamera == nullptr) {
147         // Return an empty list
148         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
149     }
150 
151     return pHwCamera->getHwCamera()->getCameraInfo(_aidl_return);
152 }
153 
getExtendedInfo(int32_t opaqueIdentifier,std::vector<uint8_t> * value)154 ScopedAStatus VirtualCamera::getExtendedInfo(int32_t opaqueIdentifier,
155                                              std::vector<uint8_t>* value) {
156     if (!isValid()) {
157         LOG(ERROR) << "No hardware camera is available.";
158         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
159     } else if (isLogicalCamera()) {
160         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
161         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
162     }
163 
164     auto pHwCamera = mHalCamera.begin()->second.lock();
165     if (pHwCamera == nullptr) {
166         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
167         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
168     }
169 
170     return pHwCamera->getHwCamera()->getExtendedInfo(opaqueIdentifier, value);
171 }
172 
getIntParameter(CameraParam id,std::vector<int32_t> * value)173 ScopedAStatus VirtualCamera::getIntParameter(CameraParam id, std::vector<int32_t>* value) {
174     if (!isValid()) {
175         LOG(ERROR) << "No hardware camera is available.";
176         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
177     } else if (isLogicalCamera()) {
178         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
179         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
180     }
181 
182     auto pHwCamera = mHalCamera.begin()->second.lock();
183     if (pHwCamera == nullptr) {
184         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
185         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
186     }
187 
188     return pHwCamera->getHwCamera()->getIntParameter(id, value);
189 }
190 
getIntParameterRange(CameraParam id,ParameterRange * _aidl_return)191 ScopedAStatus VirtualCamera::getIntParameterRange(CameraParam id, ParameterRange* _aidl_return) {
192     if (!isValid()) {
193         LOG(ERROR) << "No hardware camera is available.";
194         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
195     } else if (isLogicalCamera()) {
196         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
197         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
198     }
199 
200     // Straight pass through to hardware layer
201     auto pHwCamera = mHalCamera.begin()->second.lock();
202     if (pHwCamera == nullptr) {
203         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
204         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
205     }
206 
207     return pHwCamera->getHwCamera()->getIntParameterRange(id, _aidl_return);
208 }
209 
getParameterList(std::vector<CameraParam> * _aidl_return)210 ScopedAStatus VirtualCamera::getParameterList(std::vector<CameraParam>* _aidl_return) {
211     if (!isValid()) {
212         LOG(ERROR) << "No hardware camera is available.";
213         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
214     } else if (isLogicalCamera()) {
215         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
216         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
217     }
218 
219     // Straight pass through to hardware layer
220     auto pHwCamera = mHalCamera.begin()->second.lock();
221     if (pHwCamera == nullptr) {
222         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
223         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
224     }
225 
226     return pHwCamera->getHwCamera()->getParameterList(_aidl_return);
227 }
228 
getPhysicalCameraInfo(const std::string & deviceId,CameraDesc * _aidl_return)229 ScopedAStatus VirtualCamera::getPhysicalCameraInfo(const std::string& deviceId,
230                                                    CameraDesc* _aidl_return) {
231     auto device = mHalCamera.find(deviceId);
232     if (device == mHalCamera.end()) {
233         LOG(ERROR) << " Requested device " << deviceId << " does not back this device.";
234         return Utils::buildScopedAStatusFromEvsResult(EvsResult::INVALID_ARG);
235     }
236 
237     // Straight pass through to hardware layer
238     auto pHwCamera = device->second.lock();
239     if (pHwCamera == nullptr) {
240         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
241         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
242     }
243 
244     return pHwCamera->getHwCamera()->getCameraInfo(_aidl_return);
245 }
246 
importExternalBuffers(const std::vector<BufferDesc> & buffers,int32_t * _aidl_return)247 ScopedAStatus VirtualCamera::importExternalBuffers(const std::vector<BufferDesc>& buffers,
248                                                    int32_t* _aidl_return) {
249     if (!isValid()) {
250         LOG(ERROR) << "No hardware camera is available.";
251         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
252     } else if (isLogicalCamera()) {
253         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
254         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
255     }
256 
257     auto pHwCamera = mHalCamera.begin()->second.lock();
258     if (pHwCamera == nullptr) {
259         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
260         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
261     }
262 
263     int delta = 0;
264     if (!pHwCamera->changeFramesInFlight(buffers, &delta)) {
265         LOG(ERROR) << "Failed to add extenral capture buffers.";
266         return Utils::buildScopedAStatusFromEvsResult(EvsResult::UNDERLYING_SERVICE_ERROR);
267     }
268 
269     mFramesAllowed += delta;
270     *_aidl_return = delta;
271     return ScopedAStatus::ok();
272 }
273 
pauseVideoStream()274 ScopedAStatus VirtualCamera::pauseVideoStream() {
275     if (!isValid()) {
276         LOG(ERROR) << "No hardware camera is available.";
277         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
278     }
279 
280     auto pHwCamera = mHalCamera.begin()->second.lock();
281     if (pHwCamera == nullptr) {
282         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
283         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
284     }
285 
286     return pHwCamera->getHwCamera()->pauseVideoStream();
287 }
288 
resumeVideoStream()289 ScopedAStatus VirtualCamera::resumeVideoStream() {
290     if (!isValid()) {
291         LOG(ERROR) << "No hardware camera is available.";
292         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
293     }
294 
295     auto pHwCamera = mHalCamera.begin()->second.lock();
296     if (pHwCamera == nullptr) {
297         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
298         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
299     }
300 
301     return pHwCamera->getHwCamera()->resumeVideoStream();
302 }
303 
setExtendedInfo(int32_t opaqueIdentifier,const std::vector<uint8_t> & opaqueValue)304 ScopedAStatus VirtualCamera::setExtendedInfo(int32_t opaqueIdentifier,
305                                              const std::vector<uint8_t>& opaqueValue) {
306     if (!isValid()) {
307         LOG(ERROR) << "No hardware camera is available.";
308         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
309     } else if (isLogicalCamera()) {
310         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
311         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
312     }
313 
314     auto pHwCamera = mHalCamera.begin()->second.lock();
315     if (pHwCamera == nullptr) {
316         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
317         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
318     }
319 
320     return pHwCamera->getHwCamera()->setExtendedInfo(opaqueIdentifier, opaqueValue);
321 }
322 
setIntParameter(CameraParam id,int32_t value,std::vector<int32_t> * effectiveValue)323 ScopedAStatus VirtualCamera::setIntParameter(CameraParam id, int32_t value,
324                                              std::vector<int32_t>* effectiveValue) {
325     if (!isValid()) {
326         LOG(ERROR) << "No hardware camera is available.";
327         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
328     } else if (isLogicalCamera()) {
329         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
330         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
331     }
332 
333     auto pHwCamera = mHalCamera.begin()->second.lock();
334     if (pHwCamera == nullptr) {
335         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
336         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
337     }
338 
339     auto status = pHwCamera->setParameter(ref<VirtualCamera>(), id, &value);
340     if (status.isOk()) {
341         effectiveValue->push_back(value);
342     }
343     return status;
344 }
345 
setPrimaryClient()346 ScopedAStatus VirtualCamera::setPrimaryClient() {
347     if (!isValid()) {
348         LOG(ERROR) << "No hardware camera is available.";
349         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
350     } else if (isLogicalCamera()) {
351         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
352         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
353     }
354 
355     auto pHwCamera = mHalCamera.begin()->second.lock();
356     if (pHwCamera == nullptr) {
357         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
358         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
359     }
360 
361     return pHwCamera->setPrimaryClient(ref<VirtualCamera>());
362 }
363 
setMaxFramesInFlight(int32_t bufferCount)364 ScopedAStatus VirtualCamera::setMaxFramesInFlight(int32_t bufferCount) {
365     if (bufferCount < 1) {
366         LOG(ERROR) << "Given bufferCount = " << bufferCount
367                    << " is invalid; it must be greater than zero.";
368         return Utils::buildScopedAStatusFromEvsResult(EvsResult::INVALID_ARG);
369     }
370 
371     // How many buffers are we trying to add (or remove if negative)
372     int bufferCountChange = bufferCount - mFramesAllowed;
373 
374     // Ask our parent for more buffers
375     bool result = true;
376     std::vector<std::shared_ptr<HalCamera>> changedCameras;
377     for (auto&& [key, hwCamera] : mHalCamera) {
378         auto pHwCamera = hwCamera.lock();
379         if (!pHwCamera) {
380             continue;
381         }
382 
383         result = pHwCamera->changeFramesInFlight(bufferCountChange);
384         if (!result) {
385             LOG(ERROR) << key << ": Failed to change buffer count by " << bufferCountChange
386                        << " to " << bufferCount;
387             break;
388         }
389 
390         changedCameras.push_back(std::move(pHwCamera));
391     }
392 
393     // Update our notion of how many frames we're allowed
394     mFramesAllowed = bufferCount;
395 
396     if (!result) {
397         // Rollback changes because we failed to update all cameras
398         for (auto&& hwCamera : changedCameras) {
399             LOG(WARNING) << "Rollback a change on  " << hwCamera->getId();
400             hwCamera->changeFramesInFlight(-bufferCountChange);
401         }
402 
403         // Restore the original buffer count
404         mFramesAllowed -= bufferCountChange;
405         return Utils::buildScopedAStatusFromEvsResult(EvsResult::BUFFER_NOT_AVAILABLE);
406     }
407 
408     return ScopedAStatus::ok();
409 }
410 
startVideoStream(const std::shared_ptr<IEvsCameraStream> & receiver)411 ScopedAStatus VirtualCamera::startVideoStream(const std::shared_ptr<IEvsCameraStream>& receiver) {
412     std::lock_guard lock(mMutex);
413 
414     if (!receiver) {
415         LOG(ERROR) << "Given IEvsCameraStream object is invalid.";
416         return Utils::buildScopedAStatusFromEvsResult(EvsResult::INVALID_ARG);
417     }
418 
419     // We only support a single stream at a time
420     if (mStreamState != STOPPED) {
421         LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
422         return Utils::buildScopedAStatusFromEvsResult(EvsResult::STREAM_ALREADY_RUNNING);
423     }
424 
425     // Validate our held frame count is starting out at zero as we expect
426     assert(mFramesHeld.empty());
427 
428     // Record the user's callback for use when we have a frame ready
429     mStream = receiver;
430     mStreamState = RUNNING;
431 
432     // Tell the underlying camera hardware that we want to stream
433     for (auto iter = mHalCamera.begin(); iter != mHalCamera.end(); ++iter) {
434         std::shared_ptr<HalCamera> pHwCamera = iter->second.lock();
435         if (!pHwCamera) {
436             LOG(ERROR) << "Failed to start a video stream on " << iter->first;
437             continue;
438         }
439 
440         LOG(INFO) << __FUNCTION__ << " starts a video stream on " << iter->first;
441         if (!pHwCamera->clientStreamStarting().isOk()) {
442             // If we failed to start the underlying stream, then we're not actually running
443             mStream = nullptr;
444             mStreamState = STOPPED;
445 
446             // Request to stop streams started by this client.
447             auto rb = mHalCamera.begin();
448             while (rb != iter) {
449                 auto ptr = rb->second.lock();
450                 if (ptr) {
451                     ptr->clientStreamEnding(this);
452                 }
453                 ++rb;
454             }
455 
456             return Utils::buildScopedAStatusFromEvsResult(EvsResult::UNDERLYING_SERVICE_ERROR);
457         }
458     }
459 
460     mCaptureThread = std::thread([this]() {
461         // TODO(b/145466570): With a proper camera hang handler, we may want
462         // to reduce an amount of timeout.
463         constexpr auto kFrameTimeout = 5s;  // timeout in seconds.
464         int64_t lastFrameTimestamp = -1;
465         EvsResult status = EvsResult::OK;
466         while (true) {
467             std::unique_lock lock(mMutex);
468             ::android::base::ScopedLockAssertion assume_lock(mMutex);
469 
470             if (mStreamState != RUNNING) {
471                 // A video stream is stopped while a capture thread is acquiring
472                 // a lock.
473                 LOG(DEBUG) << "Requested to stop capturing frames";
474                 break;
475             }
476 
477             unsigned count = 0;
478             for (auto&& [key, hwCamera] : mHalCamera) {
479                 std::shared_ptr<HalCamera> pHwCamera = hwCamera.lock();
480                 if (!pHwCamera) {
481                     LOG(WARNING) << "Invalid camera " << key << " is ignored.";
482                     continue;
483                 }
484 
485                 pHwCamera->requestNewFrame(ref<VirtualCamera>(), lastFrameTimestamp);
486                 mSourceCameras.insert(pHwCamera->getId());
487                 ++count;
488             }
489 
490             if (count < 1) {
491                 LOG(ERROR) << "No camera is available.";
492                 status = EvsResult::RESOURCE_NOT_AVAILABLE;
493                 break;
494             }
495 
496             if (!mFramesReadySignal.wait_for(lock, kFrameTimeout, [this]() REQUIRES(mMutex) {
497                     // Stops waiting if
498                     // 1) we've requested to stop capturing
499                     //    new frames
500                     // 2) or, we've got all frames
501                     return mStreamState != RUNNING || mSourceCameras.empty();
502                 })) {
503                 // This happens when either a new frame does not arrive
504                 // before a timer expires or we're requested to stop
505                 // capturing frames.
506                 LOG(DEBUG) << "Timer for a new frame expires";
507                 status = EvsResult::UNDERLYING_SERVICE_ERROR;
508                 break;
509             }
510 
511             if (mStreamState != RUNNING || !mStream) {
512                 // A video stream is stopped while a capture thread is waiting
513                 // for a new frame or we have lost a client.
514                 LOG(DEBUG) << "Requested to stop capturing frames or lost a client";
515                 break;
516             }
517 
518             // Fetch frames and forward to the client
519             if (mFramesHeld.empty()) {
520                 // We do not have any frame to forward.
521                 continue;
522             }
523 
524             // Pass this buffer through to our client
525             std::vector<BufferDesc> frames;
526             frames.resize(count);
527             unsigned i = 0;
528             for (auto&& [key, hwCamera] : mHalCamera) {
529                 std::shared_ptr<HalCamera> pHwCamera = hwCamera.lock();
530                 if (!pHwCamera || mFramesHeld[key].empty()) {
531                     continue;
532                 }
533 
534                 // Duplicate the latest buffer and forward it to the
535                 // active clients
536                 auto frame = Utils::dupBufferDesc(mFramesHeld[key].back(),
537                                                   /* doDup= */ true);
538                 if (frame.timestamp > lastFrameTimestamp) {
539                     lastFrameTimestamp = frame.timestamp;
540                 }
541                 frames[i++] = std::move(frame);
542             }
543 
544             if (!mStream->deliverFrame(frames).isOk()) {
545                 LOG(WARNING) << "Failed to forward frames";
546             }
547         }
548 
549         LOG(DEBUG) << "Exiting a capture thread";
550         if (status != EvsResult::OK && mStream) {
551             EvsEventDesc event{
552                     .aType = status == EvsResult::RESOURCE_NOT_AVAILABLE
553                             ? EvsEventType::STREAM_ERROR
554                             : EvsEventType::TIMEOUT,
555                     .payload = {static_cast<int32_t>(status)},
556             };
557             if (!mStream->notify(std::move(event)).isOk()) {
558                 LOG(WARNING) << "Error delivering a stream event"
559                              << static_cast<int32_t>(event.aType);
560             }
561         }
562     });
563 
564     // TODO(b/213108625):
565     // Detect and exit if we encounter a stalled stream or unresponsive driver?
566     // Consider using a timer and watching for frame arrival?
567 
568     return ScopedAStatus::ok();
569 }
570 
stopVideoStream()571 ScopedAStatus VirtualCamera::stopVideoStream() {
572     {
573         std::lock_guard lock(mMutex);
574         if (mStreamState != RUNNING) {
575             // No action is required.
576             return ScopedAStatus::ok();
577         }
578 
579         // Tell the frame delivery pipeline we don't want any more frames
580         mStreamState = STOPPING;
581 
582         // Awake the capture thread; this thread will terminate.
583         mFramesReadySignal.notify_all();
584 
585         // Deliver the stream-ending notification
586         EvsEventDesc event{
587                 .aType = EvsEventType::STREAM_STOPPED,
588         };
589         if (mStream && !mStream->notify(event).isOk()) {
590             LOG(WARNING) << "Error delivering end of stream event";
591         }
592 
593         // Since we are single threaded, no frame can be delivered while this function is running,
594         // so we can go directly to the STOPPED state here on the server.
595         // Note, however, that there still might be frames already queued that client will see
596         // after returning from the client side of this call.
597         mStreamState = STOPPED;
598     }
599 
600     // Give the underlying hardware camera the heads up that it might be time to stop
601     for (auto&& [_, hwCamera] : mHalCamera) {
602         auto pHwCamera = hwCamera.lock();
603         if (pHwCamera) {
604             pHwCamera->clientStreamEnding(this);
605         }
606     }
607 
608     // Signal a condition to unblock a capture thread and then join
609     mSourceCameras.clear();
610     mFramesReadySignal.notify_all();
611 
612     if (mCaptureThread.joinable()) {
613         mCaptureThread.join();
614     }
615 
616     return ScopedAStatus::ok();
617 }
618 
unsetPrimaryClient()619 ScopedAStatus VirtualCamera::unsetPrimaryClient() {
620     if (!isValid()) {
621         // Safely ignores a request if no hardware camera is active.
622         return ScopedAStatus::ok();
623     }
624 
625     if (isLogicalCamera()) {
626         LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
627         return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
628     }
629 
630     auto pHwCamera = mHalCamera.begin()->second.lock();
631     if (!pHwCamera) {
632         LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
633         return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
634     }
635 
636     return pHwCamera->unsetPrimaryClient(this);
637 }
638 
shutdown()639 void VirtualCamera::shutdown() {
640     {
641         std::lock_guard lock(mMutex);
642 
643         // In normal operation, the stream should already be stopped by the time we get here
644         if (mStreamState != RUNNING) {
645             return;
646         }
647 
648         // Note that if we hit this case, no terminating frame will be sent to the client,
649         // but they're probably already dead anyway.
650         LOG(WARNING) << "Virtual camera being shutdown while stream is running";
651 
652         // Tell the frame delivery pipeline we don't want any more frames
653         mStreamState = STOPPING;
654 
655         // Returns buffers held by this client
656         for (auto&& [key, hwCamera] : mHalCamera) {
657             auto pHwCamera = hwCamera.lock();
658             if (!pHwCamera) {
659                 LOG(WARNING) << "Camera device " << key << " is not alive.";
660                 continue;
661             }
662 
663             if (!mFramesHeld[key].empty()) {
664                 LOG(WARNING) << "VirtualCamera destructing with frames in flight.";
665 
666                 // Return to the underlying hardware camera any buffers the client was holding
667                 while (!mFramesHeld[key].empty()) {
668                     auto it = mFramesHeld[key].begin();
669                     pHwCamera->doneWithFrame(std::move(*it));
670                     mFramesHeld[key].erase(it);
671                 }
672             }
673 
674             // Retire from a primary client
675             pHwCamera->unsetPrimaryClient(this);
676 
677             // Give the underlying hardware camera the heads up that it might be time to stop
678             pHwCamera->clientStreamEnding(this);
679 
680             // Retire from the participating HW camera's client list
681             pHwCamera->disownVirtualCamera(this);
682         }
683 
684         // Awakes the capture thread; this thread will terminate.
685         mFramesReadySignal.notify_all();
686     }
687 
688     // Join a capture thread
689     if (mCaptureThread.joinable()) {
690         mCaptureThread.join();
691     }
692 
693     mFramesHeld.clear();
694 
695     // Drop our reference to our associated hardware camera
696     mHalCamera.clear();
697 }
698 
getHalCameras()699 std::vector<std::shared_ptr<HalCamera>> VirtualCamera::getHalCameras() {
700     std::vector<std::shared_ptr<HalCamera>> cameras;
701     for (auto&& [key, cam] : mHalCamera) {
702         auto ptr = cam.lock();
703         if (ptr) {
704             cameras.push_back(std::move(ptr));
705         }
706     }
707 
708     return cameras;
709 }
710 
deliverFrame(const BufferDesc & bufDesc)711 bool VirtualCamera::deliverFrame(const BufferDesc& bufDesc) {
712     std::lock_guard lock(mMutex);
713 
714     if (mStreamState == STOPPED) {
715         // A stopped stream gets no frames
716         LOG(ERROR) << "A stopped stream should not get any frames";
717         return false;
718     }
719 
720     if (mFramesHeld[bufDesc.deviceId].size() >= mFramesAllowed) {
721         // Indicate that we declined to send the frame to the client because they're at quota
722         LOG(INFO) << "Skipping new frame as we hold " << mFramesHeld[bufDesc.deviceId].size()
723                   << " of " << mFramesAllowed;
724 
725         if (mStream) {
726             // Report a frame drop to the client.
727             EvsEventDesc event;
728             event.deviceId = bufDesc.deviceId;
729             event.aType = EvsEventType::FRAME_DROPPED;
730             if (!mStream->notify(event).isOk()) {
731                 LOG(WARNING) << "Error delivering end of stream event";
732             }
733         }
734 
735         // Marks that a new frame has arrived though it was not accepted
736         mSourceCameras.erase(bufDesc.deviceId);
737         mFramesReadySignal.notify_all();
738 
739         return false;
740     }
741 
742     // Keep a record of this frame so we can clean up if we have to in case of client death
743     mFramesHeld[bufDesc.deviceId].push_back(
744             std::move(Utils::dupBufferDesc(bufDesc, /* doDup= */ true)));
745 
746     // v1.0 client uses an old frame-delivery mechanism.
747     if (mCaptureThread.joinable()) {
748         // Keep forwarding frames as long as a capture thread is alive
749         // Notify a new frame receipt
750         mSourceCameras.erase(bufDesc.deviceId);
751         mFramesReadySignal.notify_all();
752     }
753 
754     return true;
755 }
756 
notify(const EvsEventDesc & event)757 bool VirtualCamera::notify(const EvsEventDesc& event) {
758     switch (event.aType) {
759         case EvsEventType::STREAM_STOPPED: {
760             {
761                 std::lock_guard lock(mMutex);
762                 if (mStreamState != RUNNING) {
763                     // We're not actively consuming a video stream or already in
764                     // a process to stop a video stream.
765                     return true;
766                 }
767 
768                 // Warn if we got an unexpected stream termination
769                 LOG(WARNING) << "Stream unexpectedly stopped, current status " << mStreamState;
770             }
771 
772             // Clean up the resource and forward an event to the client
773             stopVideoStream();
774             return true;
775         }
776 
777         // v1.0 client will ignore all other events.
778         case EvsEventType::PARAMETER_CHANGED:
779             LOG(DEBUG) << "A camera parameter " << event.payload[0] << " is set to "
780                        << event.payload[1];
781             break;
782 
783         case EvsEventType::MASTER_RELEASED:
784             LOG(DEBUG) << "The primary client has been released";
785             break;
786 
787         default:
788             LOG(WARNING) << "Unknown event id " << static_cast<int32_t>(event.aType);
789             break;
790     }
791 
792     // Forward a received event to the v1.1 client
793     if (mStream && !mStream->notify(event).isOk()) {
794         LOG(ERROR) << "Failed to forward an event";
795         return false;
796     }
797 
798     return true;
799 }
800 
toString(const char * indent) const801 std::string VirtualCamera::toString(const char* indent) const {
802     std::string buffer;
803     StringAppendF(&buffer,
804                   "%sLogical camera device: %s\n"
805                   "%sFramesAllowed: %u\n"
806                   "%sFrames in use:\n",
807                   indent, mHalCamera.size() > 1 ? "T" : "F", indent, mFramesAllowed, indent);
808 
809     std::string next_indent(indent);
810     next_indent += "\t";
811     for (auto&& [id, queue] : mFramesHeld) {
812         StringAppendF(&buffer, "%s%s: %d\n", next_indent.data(), id.data(),
813                       static_cast<int>(queue.size()));
814     }
815     StringAppendF(&buffer, "%sCurrent stream state: %d\n", indent, mStreamState);
816 
817     return buffer;
818 }
819 
820 }  // namespace aidl::android::automotive::evs::implementation
821