1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VirtualCamera.h"
18 #include "HalCamera.h"
19 #include "Enumerator.h"
20
21 #include <android/hardware_buffer.h>
22 #include <android-base/file.h>
23 #include <android-base/logging.h>
24 #include <android-base/stringprintf.h>
25
26 using ::android::base::StringAppendF;
27 using ::android::base::StringPrintf;
28 using ::android::base::WriteStringToFd;
29 using ::android::hardware::automotive::evs::V1_0::DisplayState;
30
31
32 namespace android {
33 namespace automotive {
34 namespace evs {
35 namespace V1_1 {
36 namespace implementation {
37
38
VirtualCamera(const std::vector<sp<HalCamera>> & halCameras)39 VirtualCamera::VirtualCamera(const std::vector<sp<HalCamera>>& halCameras) :
40 mStreamState(STOPPED) {
41 for (auto&& cam : halCameras) {
42 mHalCamera.try_emplace(cam->getId(), cam);
43 }
44 }
45
46
~VirtualCamera()47 VirtualCamera::~VirtualCamera() {
48 shutdown();
49 }
50
51
shutdown()52 void VirtualCamera::shutdown() {
53 // In normal operation, the stream should already be stopped by the time we get here
54 if (mStreamState == RUNNING) {
55 // Note that if we hit this case, no terminating frame will be sent to the client,
56 // but they're probably already dead anyway.
57 LOG(WARNING) << "Virtual camera being shutdown while stream is running";
58
59 // Tell the frame delivery pipeline we don't want any more frames
60 mStreamState = STOPPING;
61
62 // Awakes the capture thread; this thread will terminate.
63 mFramesReadySignal.notify_all();
64
65 // Returns buffers held by this client
66 for (auto&& [key, hwCamera] : mHalCamera) {
67 auto pHwCamera = hwCamera.promote();
68 if (pHwCamera == nullptr) {
69 LOG(WARNING) << "Camera device " << key << " is not alive.";
70 continue;
71 }
72
73 if (mFramesHeld[key].size() > 0) {
74 LOG(WARNING) << "VirtualCamera destructing with frames in flight.";
75
76 // Return to the underlying hardware camera any buffers the client was holding
77 for (auto&& heldBuffer : mFramesHeld[key]) {
78 // Tell our parent that we're done with this buffer
79 pHwCamera->doneWithFrame(heldBuffer);
80 }
81 mFramesHeld[key].clear();
82 }
83
84 // Retire from a primary client
85 pHwCamera->unsetMaster(this);
86
87 // Give the underlying hardware camera the heads up that it might be time to stop
88 pHwCamera->clientStreamEnding(this);
89 }
90
91 // Join a capture thread
92 if (mCaptureThread.joinable()) {
93 mCaptureThread.join();
94 }
95
96 mFramesHeld.clear();
97
98 // Drop our reference to our associated hardware camera
99 mHalCamera.clear();
100 }
101 }
102
103
getHalCameras()104 std::vector<sp<HalCamera>> VirtualCamera::getHalCameras() {
105 std::vector<sp<HalCamera>> cameras;
106 for (auto&& [key, cam] : mHalCamera) {
107 auto ptr = cam.promote();
108 if (ptr != nullptr) {
109 cameras.emplace_back(ptr);
110 }
111 }
112
113 return cameras;
114 }
115
116
deliverFrame(const BufferDesc_1_1 & bufDesc)117 bool VirtualCamera::deliverFrame(const BufferDesc_1_1& bufDesc) {
118 if (mStreamState == STOPPED) {
119 // A stopped stream gets no frames
120 LOG(ERROR) << "A stopped stream should not get any frames";
121 return false;
122 } else if (mFramesHeld[bufDesc.deviceId].size() >= mFramesAllowed) {
123 // Indicate that we declined to send the frame to the client because they're at quota
124 LOG(INFO) << "Skipping new frame as we hold " << mFramesHeld[bufDesc.deviceId].size()
125 << " of " << mFramesAllowed;
126
127 if (mStream_1_1 != nullptr) {
128 // Report a frame drop to v1.1 client.
129 EvsEventDesc event;
130 event.deviceId = bufDesc.deviceId;
131 event.aType = EvsEventType::FRAME_DROPPED;
132 auto result = mStream_1_1->notify(event);
133 if (!result.isOk()) {
134 LOG(ERROR) << "Error delivering end of stream event";
135 }
136 }
137
138 // Marks that a new frame has arrived though it was not accepted
139 {
140 std::lock_guard<std::mutex> lock(mFrameDeliveryMutex);
141 mSourceCameras.erase(bufDesc.deviceId);
142 mFramesReadySignal.notify_all();
143 }
144
145 return false;
146 } else {
147 // Keep a record of this frame so we can clean up if we have to in case of client death
148 mFramesHeld[bufDesc.deviceId].emplace_back(bufDesc);
149
150 // v1.0 client uses an old frame-delivery mechanism.
151 if (mStream_1_1 == nullptr) {
152 // Forward a frame to v1.0 client
153 BufferDesc_1_0 frame_1_0 = {};
154 const AHardwareBuffer_Desc* pDesc =
155 reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
156 frame_1_0.width = pDesc->width;
157 frame_1_0.height = pDesc->height;
158 frame_1_0.format = pDesc->format;
159 frame_1_0.usage = pDesc->usage;
160 frame_1_0.stride = pDesc->stride;
161 frame_1_0.memHandle = bufDesc.buffer.nativeHandle;
162 frame_1_0.pixelSize = bufDesc.pixelSize;
163 frame_1_0.bufferId = bufDesc.bufferId;
164
165 mStream->deliverFrame(frame_1_0);
166 } else if (mCaptureThread.joinable()) {
167 // Keep forwarding frames as long as a capture thread is alive
168 if (mFramesHeld.size() > 0 && mStream_1_1 != nullptr) {
169 // Notify a new frame receipt
170 std::lock_guard<std::mutex> lock(mFrameDeliveryMutex);
171 mSourceCameras.erase(bufDesc.deviceId);
172 mFramesReadySignal.notify_all();
173 }
174 }
175
176 return true;
177 }
178 }
179
180
notify(const EvsEventDesc & event)181 bool VirtualCamera::notify(const EvsEventDesc& event) {
182 switch(event.aType) {
183 case EvsEventType::STREAM_STOPPED:
184 if (mStreamState != STOPPING) {
185 // Warn if we got an unexpected stream termination
186 LOG(WARNING) << "Stream unexpectedly stopped, current status "
187 << mStreamState;
188
189 // Clean up the resource and forward an event to the client
190 stopVideoStream();
191
192 // This event is handled properly.
193 return true;
194 }
195
196 if (mStream_1_1 == nullptr) {
197 // Send a null frame instead, for v1.0 client
198 auto result = mStream->deliverFrame({});
199 if (!result.isOk()) {
200 LOG(ERROR) << "Error delivering end of stream marker";
201 }
202 }
203 break;
204
205 // v1.0 client will ignore all other events.
206 case EvsEventType::PARAMETER_CHANGED:
207 LOG(DEBUG) << "A camera parameter " << event.payload[0]
208 << " is set to " << event.payload[1];
209 break;
210
211 case EvsEventType::MASTER_RELEASED:
212 LOG(DEBUG) << "The primary client has been released";
213 break;
214
215 default:
216 LOG(WARNING) << "Unknown event id " << static_cast<int32_t>(event.aType);
217 break;
218 }
219
220 if (mStream_1_1 != nullptr) {
221 // Forward a received event to the v1.1 client
222 auto result = mStream_1_1->notify(event);
223 if (!result.isOk()) {
224 LOG(ERROR) << "Failed to forward an event";
225 return false;
226 }
227 }
228
229 return true;
230 }
231
232
233 // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow.
getCameraInfo(getCameraInfo_cb info_cb)234 Return<void> VirtualCamera::getCameraInfo(getCameraInfo_cb info_cb) {
235 // Straight pass through to hardware layer
236 if (mHalCamera.size() > 1) {
237 LOG(ERROR) << __FUNCTION__
238 << " must NOT be called on a logical camera object.";
239 info_cb({});
240 return Void();
241 }
242
243 auto halCamera = mHalCamera.begin()->second.promote();
244 if (halCamera != nullptr) {
245 return halCamera->getHwCamera()->getCameraInfo(info_cb);
246 } else {
247 info_cb({});
248 return Void();
249 }
250 }
251
252
setMaxFramesInFlight(uint32_t bufferCount)253 Return<EvsResult> VirtualCamera::setMaxFramesInFlight(uint32_t bufferCount) {
254 // How many buffers are we trying to add (or remove if negative)
255 int bufferCountChange = bufferCount - mFramesAllowed;
256
257 // Ask our parent for more buffers
258 bool result = true;
259 std::vector<sp<HalCamera>> changedCameras;
260 for (auto&& [key, hwCamera] : mHalCamera) {
261 auto pHwCam = hwCamera.promote();
262 if (pHwCam == nullptr) {
263 continue;
264 }
265
266 result = pHwCam->changeFramesInFlight(bufferCountChange);
267 if (!result) {
268 LOG(ERROR) << key
269 << ": Failed to change buffer count by " << bufferCountChange
270 << " to " << bufferCount;
271 break;
272 }
273
274 changedCameras.emplace_back(pHwCam);
275 }
276
277 // Update our notion of how many frames we're allowed
278 mFramesAllowed = bufferCount;
279
280 if (!result) {
281 // Rollback changes because we failed to update all cameras
282 for (auto&& hwCamera : changedCameras) {
283 LOG(WARNING) << "Rollback a change on " << hwCamera->getId();
284 hwCamera->changeFramesInFlight(-bufferCountChange);
285 }
286
287 // Restore the original buffer count
288 mFramesAllowed -= bufferCountChange;
289 return EvsResult::BUFFER_NOT_AVAILABLE;
290 } else {
291 return EvsResult::OK;
292 }
293 }
294
295
startVideoStream(const::android::sp<IEvsCameraStream_1_0> & stream)296 Return<EvsResult> VirtualCamera::startVideoStream(const ::android::sp<IEvsCameraStream_1_0>& stream) {
297 // We only support a single stream at a time
298 if (mStreamState != STOPPED) {
299 LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
300 return EvsResult::STREAM_ALREADY_RUNNING;
301 }
302
303 // Validate our held frame count is starting out at zero as we expect
304 assert(mFramesHeld.size() == 0);
305
306 // Record the user's callback for use when we have a frame ready
307 mStream = stream;
308 mStream_1_1 = IEvsCameraStream_1_1::castFrom(stream).withDefault(nullptr);
309 if (mStream_1_1 == nullptr) {
310 LOG(INFO) << "Start video stream for v1.0 client.";
311 } else {
312 LOG(INFO) << "Start video stream for v1.1 client.";
313 }
314
315 mStreamState = RUNNING;
316
317 // Tell the underlying camera hardware that we want to stream
318 auto iter = mHalCamera.begin();
319 while (iter != mHalCamera.end()) {
320 auto pHwCamera = iter->second.promote();
321 if (pHwCamera == nullptr) {
322 LOG(ERROR) << "Failed to start a video stream on " << iter->first;
323 continue;
324 }
325
326 LOG(INFO) << __FUNCTION__
327 << " starts a video stream on " << iter->first;
328 Return<EvsResult> result = pHwCamera->clientStreamStarting();
329 if ((!result.isOk()) || (result != EvsResult::OK)) {
330 // If we failed to start the underlying stream, then we're not actually running
331 mStream = mStream_1_1 = nullptr;
332 mStreamState = STOPPED;
333
334 // Request to stop streams started by this client.
335 auto rb = mHalCamera.begin();
336 while (rb != iter) {
337 auto ptr = rb->second.promote();
338 if (ptr != nullptr) {
339 ptr->clientStreamEnding(this);
340 }
341 ++rb;
342 }
343 return EvsResult::UNDERLYING_SERVICE_ERROR;
344 }
345 ++iter;
346 }
347
348 // Start a thread that waits on the fence and forwards collected frames
349 // to the v1.1 client.
350 auto pHwCamera = mHalCamera.begin()->second.promote();
351 if (mStream_1_1 != nullptr && pHwCamera != nullptr) {
352 mCaptureThread = std::thread([this]() {
353 // TODO(b/145466570): With a proper camera hang handler, we may want
354 // to reduce an amount of timeout.
355 constexpr auto kFrameTimeout = 5s; // timeout in seconds.
356 int64_t lastFrameTimestamp = -1;
357 while (mStreamState == RUNNING) {
358 unsigned count = 0;
359 for (auto&& [key, hwCamera] : mHalCamera) {
360 auto pHwCamera = hwCamera.promote();
361 if (pHwCamera == nullptr) {
362 LOG(WARNING) << "Invalid camera " << key << " is ignored.";
363 continue;
364 }
365
366 pHwCamera->requestNewFrame(this, lastFrameTimestamp);
367 {
368 std::lock_guard<std::mutex> lock(mFrameDeliveryMutex);
369 mSourceCameras.emplace(pHwCamera->getId());
370 }
371 ++count;
372 }
373
374 std::unique_lock<std::mutex> lock(mFrameDeliveryMutex);
375 if (!mFramesReadySignal.wait_for(lock,
376 kFrameTimeout,
377 [this]() REQUIRES(mFrameDeliveryMutex) {
378 // Stops waiting if
379 // 1) we've requested to stop capturing
380 // new frames
381 // 2) or, we've got all frames
382 return mStreamState != RUNNING ||
383 mSourceCameras.empty();
384 })) {
385 // This happens when either a new frame does not arrive
386 // before a timer expires or we're requested to stop
387 // capturing frames.
388 LOG(DEBUG) << "Exiting a capture thread.";
389 break;
390 } else if (mStreamState == RUNNING) {
391 // Fetch frames and forward to the client
392 if (mFramesHeld.size() > 0 && mStream_1_1 != nullptr) {
393 // Pass this buffer through to our client
394 hardware::hidl_vec<BufferDesc_1_1> frames;
395 frames.resize(count);
396 unsigned i = 0;
397 for (auto&& [key, hwCamera] : mHalCamera) {
398 auto pHwCamera = hwCamera.promote();
399 if (pHwCamera == nullptr) {
400 continue;
401 }
402
403 const auto frame = mFramesHeld[key].back();
404 if (frame.timestamp > lastFrameTimestamp) {
405 lastFrameTimestamp = frame.timestamp;
406 }
407 frames[i++] = frame;
408 }
409
410 auto ret = mStream_1_1->deliverFrame_1_1(frames);
411 if (!ret.isOk()) {
412 LOG(WARNING) << "Failed to forward frames";
413 }
414 }
415 } else if (mStreamState != RUNNING) {
416 LOG(DEBUG) << "Requested to stop capturing frames";
417 }
418 }
419
420 LOG(DEBUG) << "Exiting a capture thread";
421 });
422 }
423
424 // TODO(changyeon):
425 // Detect and exit if we encounter a stalled stream or unresponsive driver?
426 // Consider using a timer and watching for frame arrival?
427
428 return EvsResult::OK;
429 }
430
431
doneWithFrame(const BufferDesc_1_0 & buffer)432 Return<void> VirtualCamera::doneWithFrame(const BufferDesc_1_0& buffer) {
433 if (buffer.memHandle == nullptr) {
434 LOG(ERROR) << "Ignoring doneWithFrame called with invalid handle";
435 } else if (mFramesHeld.size() > 1) {
436 LOG(ERROR) << __FUNCTION__
437 << " must NOT be called on a logical camera object.";
438 } else {
439 // Find this buffer in our "held" list
440 auto& frameQueue = mFramesHeld.begin()->second;
441 auto it = frameQueue.begin();
442 while (it != frameQueue.end()) {
443 if (it->bufferId == buffer.bufferId) {
444 // found it!
445 break;
446 }
447 ++it;
448 }
449 if (it == frameQueue.end()) {
450 // We should always find the frame in our "held" list
451 LOG(ERROR) << "Ignoring doneWithFrame called with unrecognized frameID "
452 << buffer.bufferId;
453 } else {
454 // Take this frame out of our "held" list
455 frameQueue.erase(it);
456
457 // Tell our parent that we're done with this buffer
458 auto pHwCamera = mHalCamera.begin()->second.promote();
459 if (pHwCamera != nullptr) {
460 pHwCamera->doneWithFrame(buffer);
461 } else {
462 LOG(WARNING) << "Possible memory leak because a device "
463 << mHalCamera.begin()->first
464 << " is not valid.";
465 }
466 }
467 }
468
469 return Void();
470 }
471
472
stopVideoStream()473 Return<void> VirtualCamera::stopVideoStream() {
474 if (mStreamState == RUNNING) {
475 // Tell the frame delivery pipeline we don't want any more frames
476 mStreamState = STOPPING;
477
478 // Awake the capture thread; this thread will terminate.
479 mFramesReadySignal.notify_all();
480
481 // Deliver the stream-ending notification
482 if (mStream_1_1 != nullptr) {
483 // v1.1 client waits for a stream stopped event
484 EvsEventDesc event;
485 event.aType = EvsEventType::STREAM_STOPPED;
486 auto result = mStream_1_1->notify(event);
487 if (!result.isOk()) {
488 LOG(ERROR) << "Error delivering end of stream event";
489 }
490 } else {
491 // v1.0 client expects a null frame at the end of the stream
492 auto result = mStream->deliverFrame({});
493 if (!result.isOk()) {
494 LOG(ERROR) << "Error delivering end of stream marker";
495 }
496 }
497
498 // Since we are single threaded, no frame can be delivered while this function is running,
499 // so we can go directly to the STOPPED state here on the server.
500 // Note, however, that there still might be frames already queued that client will see
501 // after returning from the client side of this call.
502 mStreamState = STOPPED;
503
504 // Give the underlying hardware camera the heads up that it might be time to stop
505 for (auto&& [key, hwCamera] : mHalCamera) {
506 auto pHwCamera = hwCamera.promote();
507 if (pHwCamera != nullptr) {
508 pHwCamera->clientStreamEnding(this);
509 }
510 }
511
512 // Signal a condition to unblock a capture thread and then join
513 {
514 std::lock_guard<std::mutex> lock(mFrameDeliveryMutex);
515 mSourceCameras.clear();
516 mFramesReadySignal.notify_all();
517 }
518
519 if (mCaptureThread.joinable()) {
520 mCaptureThread.join();
521 }
522
523 }
524
525 return Void();
526 }
527
528
getExtendedInfo(uint32_t opaqueIdentifier)529 Return<int32_t> VirtualCamera::getExtendedInfo(uint32_t opaqueIdentifier) {
530 if (mHalCamera.size() > 1) {
531 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
532 return 0;
533 }
534
535 // Pass straight through to the hardware device
536 auto pHwCamera = mHalCamera.begin()->second.promote();
537 if (pHwCamera != nullptr) {
538 return pHwCamera->getHwCamera()->getExtendedInfo(opaqueIdentifier);
539 } else {
540 LOG(WARNING) << mHalCamera.begin()->first << " is invalid.";
541 return 0;
542 }
543 }
544
545
setExtendedInfo(uint32_t opaqueIdentifier,int32_t opaqueValue)546 Return<EvsResult> VirtualCamera::setExtendedInfo(uint32_t opaqueIdentifier, int32_t opaqueValue) {
547 if (mHalCamera.size() > 1) {
548 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
549 return EvsResult::INVALID_ARG;
550 }
551
552 // Pass straight through to the hardware device
553 auto pHwCamera = mHalCamera.begin()->second.promote();
554 if (pHwCamera != nullptr) {
555 return pHwCamera->getHwCamera()->setExtendedInfo(opaqueIdentifier, opaqueValue);
556 } else {
557 LOG(WARNING) << mHalCamera.begin()->first << " is invalid.";
558 return EvsResult::INVALID_ARG;
559 }
560 }
561
562
563 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
getCameraInfo_1_1(getCameraInfo_1_1_cb info_cb)564 Return<void> VirtualCamera::getCameraInfo_1_1(getCameraInfo_1_1_cb info_cb) {
565 if (mHalCamera.size() > 1) {
566 // Logical camera description is stored in VirtualCamera object.
567 info_cb(*mDesc);
568 return Void();
569 }
570
571 // Straight pass through to hardware layer
572 auto pHwCamera = mHalCamera.begin()->second.promote();
573 if (pHwCamera == nullptr) {
574 // Return an empty list
575 info_cb({});
576 return Void();
577 }
578
579 auto hwCamera_1_1 =
580 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
581 if (hwCamera_1_1 != nullptr) {
582 return hwCamera_1_1->getCameraInfo_1_1(info_cb);
583 } else {
584 // Return an empty list
585 info_cb({});
586 return Void();
587 }
588 }
589
590
getPhysicalCameraInfo(const hidl_string & deviceId,getPhysicalCameraInfo_cb info_cb)591 Return<void> VirtualCamera::getPhysicalCameraInfo(const hidl_string& deviceId,
592 getPhysicalCameraInfo_cb info_cb) {
593 auto device = mHalCamera.find(deviceId);
594 if (device != mHalCamera.end()) {
595 // Straight pass through to hardware layer
596 auto pHwCamera = device->second.promote();
597 if (pHwCamera != nullptr) {
598 auto hwCamera_1_1 =
599 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
600 if (hwCamera_1_1 != nullptr) {
601 return hwCamera_1_1->getCameraInfo_1_1(info_cb);
602 } else {
603 LOG(WARNING) << "Failed to promote HW camera to v1.1.";
604 }
605 } else {
606 LOG(WARNING) << "Camera device " << deviceId << " is not alive.";
607 }
608 } else {
609 LOG(WARNING) << " Requested device " << deviceId
610 << " does not back this device.";
611 }
612
613 // Return an empty list
614 info_cb({});
615 return Void();
616 }
617
618
doneWithFrame_1_1(const hardware::hidl_vec<BufferDesc_1_1> & buffers)619 Return<EvsResult> VirtualCamera::doneWithFrame_1_1(
620 const hardware::hidl_vec<BufferDesc_1_1>& buffers) {
621
622 for (auto&& buffer : buffers) {
623 if (buffer.buffer.nativeHandle == nullptr) {
624 LOG(WARNING) << "Ignoring doneWithFrame called with invalid handle";
625 } else {
626 // Find this buffer in our "held" list
627 auto it = mFramesHeld[buffer.deviceId].begin();
628 while (it != mFramesHeld[buffer.deviceId].end()) {
629 if (it->bufferId == buffer.bufferId) {
630 // found it!
631 break;
632 }
633 ++it;
634 }
635 if (it == mFramesHeld[buffer.deviceId].end()) {
636 // We should always find the frame in our "held" list
637 LOG(ERROR) << "Ignoring doneWithFrame called with unrecognized frameID "
638 << buffer.bufferId;
639 } else {
640 // Take this frame out of our "held" list
641 mFramesHeld[buffer.deviceId].erase(it);
642
643 // Tell our parent that we're done with this buffer
644 auto pHwCamera = mHalCamera[buffer.deviceId].promote();
645 if (pHwCamera != nullptr) {
646 pHwCamera->doneWithFrame(buffer);
647 } else {
648 LOG(WARNING) << "Possible memory leak; "
649 << buffer.deviceId << " is not valid.";
650 }
651 }
652 }
653 }
654
655 return EvsResult::OK;
656 }
657
658
setMaster()659 Return<EvsResult> VirtualCamera::setMaster() {
660 if (mHalCamera.size() > 1) {
661 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
662 return EvsResult::INVALID_ARG;
663 }
664
665 auto pHwCamera = mHalCamera.begin()->second.promote();
666 if (pHwCamera != nullptr) {
667 return pHwCamera->setMaster(this);
668 } else {
669 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
670 return EvsResult::INVALID_ARG;
671 }
672 }
673
674
forceMaster(const sp<IEvsDisplay_1_0> & display)675 Return<EvsResult> VirtualCamera::forceMaster(const sp<IEvsDisplay_1_0>& display) {
676 if (mHalCamera.size() > 1) {
677 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
678 return EvsResult::INVALID_ARG;
679 }
680
681 if (display.get() == nullptr) {
682 LOG(ERROR) << __FUNCTION__
683 << ": Passed display is invalid";
684 return EvsResult::INVALID_ARG;
685 }
686
687 DisplayState state = display->getDisplayState();
688 if (state == DisplayState::NOT_OPEN ||
689 state == DisplayState::DEAD ||
690 state >= DisplayState::NUM_STATES) {
691 LOG(ERROR) << __FUNCTION__
692 << ": Passed display is in invalid state";
693 return EvsResult::INVALID_ARG;
694 }
695
696 auto pHwCamera = mHalCamera.begin()->second.promote();
697 if (pHwCamera != nullptr) {
698 return pHwCamera->forceMaster(this);
699 } else {
700 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
701 return EvsResult::INVALID_ARG;
702 }
703 }
704
705
unsetMaster()706 Return<EvsResult> VirtualCamera::unsetMaster() {
707 if (mHalCamera.size() > 1) {
708 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
709 return EvsResult::INVALID_ARG;
710 }
711
712 auto pHwCamera = mHalCamera.begin()->second.promote();
713 if (pHwCamera != nullptr) {
714 return pHwCamera->unsetMaster(this);
715 } else {
716 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
717 return EvsResult::INVALID_ARG;
718 }
719 }
720
721
getParameterList(getParameterList_cb _hidl_cb)722 Return<void> VirtualCamera::getParameterList(getParameterList_cb _hidl_cb) {
723 if (mHalCamera.size() > 1) {
724 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
725
726 // Return an empty list
727 _hidl_cb({});
728 return Void();
729 }
730
731 // Straight pass through to hardware layer
732 auto pHwCamera = mHalCamera.begin()->second.promote();
733 if (pHwCamera == nullptr) {
734 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
735
736 // Return an empty list
737 _hidl_cb({});
738 return Void();
739 }
740
741 auto hwCamera_1_1 =
742 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
743 if (hwCamera_1_1 != nullptr) {
744 return hwCamera_1_1->getParameterList(_hidl_cb);
745 } else {
746 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first
747 << " does not support a parameter programming.";
748
749 // Return an empty list
750 _hidl_cb({});
751 return Void();
752 }
753 }
754
755
getIntParameterRange(CameraParam id,getIntParameterRange_cb _hidl_cb)756 Return<void> VirtualCamera::getIntParameterRange(CameraParam id,
757 getIntParameterRange_cb _hidl_cb) {
758 if (mHalCamera.size() > 1) {
759 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
760
761 // Return [0, 0, 0]
762 _hidl_cb(0, 0, 0);
763 return Void();
764 }
765
766 // Straight pass through to hardware layer
767 auto pHwCamera = mHalCamera.begin()->second.promote();
768 if (pHwCamera == nullptr) {
769 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
770
771 // Return [0, 0, 0]
772 _hidl_cb(0, 0, 0);
773 return Void();
774 }
775
776 auto hwCamera_1_1 =
777 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
778 if (hwCamera_1_1 != nullptr) {
779 return hwCamera_1_1->getIntParameterRange(id, _hidl_cb);
780 } else {
781 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first
782 << " does not support a parameter programming.";
783
784 // Return [0, 0, 0]
785 _hidl_cb(0, 0, 0);
786 return Void();
787 }
788 return Void();
789 }
790
791
setIntParameter(CameraParam id,int32_t value,setIntParameter_cb _hidl_cb)792 Return<void> VirtualCamera::setIntParameter(CameraParam id,
793 int32_t value,
794 setIntParameter_cb _hidl_cb) {
795 hardware::hidl_vec<int32_t> values;
796 EvsResult status = EvsResult::INVALID_ARG;
797 if (mHalCamera.size() > 1) {
798 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
799 _hidl_cb(status, values);
800 return Void();
801 }
802
803 auto pHwCamera = mHalCamera.begin()->second.promote();
804 if (pHwCamera == nullptr) {
805 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
806 _hidl_cb(status, values);
807 return Void();
808 }
809
810 status = pHwCamera->setParameter(this, id, value);
811
812 values.resize(1);
813 values[0] = value;
814 _hidl_cb(status, values);
815
816 return Void();
817 }
818
819
getIntParameter(CameraParam id,getIntParameter_cb _hidl_cb)820 Return<void> VirtualCamera::getIntParameter(CameraParam id,
821 getIntParameter_cb _hidl_cb) {
822 hardware::hidl_vec<int32_t> values;
823 EvsResult status = EvsResult::INVALID_ARG;
824 if (mHalCamera.size() > 1) {
825 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
826 _hidl_cb(status, values);
827 return Void();
828 }
829
830 auto pHwCamera = mHalCamera.begin()->second.promote();
831 if (pHwCamera == nullptr) {
832 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
833 _hidl_cb(status, values);
834 return Void();
835 }
836
837 int32_t value;
838 status = pHwCamera->getParameter(id, value);
839
840 values.resize(1);
841 values[0] = value;
842 _hidl_cb(status, values);
843
844 return Void();
845 }
846
847
setExtendedInfo_1_1(uint32_t opaqueIdentifier,const hidl_vec<uint8_t> & opaqueValue)848 Return<EvsResult> VirtualCamera::setExtendedInfo_1_1(uint32_t opaqueIdentifier,
849 const hidl_vec<uint8_t>& opaqueValue) {
850 hardware::hidl_vec<int32_t> values;
851 if (mHalCamera.size() > 1) {
852 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
853 return EvsResult::INVALID_ARG;
854 }
855
856 auto pHwCamera = mHalCamera.begin()->second.promote();
857 if (pHwCamera == nullptr) {
858 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
859 return EvsResult::INVALID_ARG;
860 } else {
861 auto hwCamera = IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
862 if (hwCamera != nullptr) {
863 return hwCamera->setExtendedInfo_1_1(opaqueIdentifier, opaqueValue);
864 } else {
865 LOG(ERROR) << "Underlying hardware camera does not implement v1.1 interfaces.";
866 return EvsResult::INVALID_ARG;
867 }
868 }
869 }
870
871
getExtendedInfo_1_1(uint32_t opaqueIdentifier,getExtendedInfo_1_1_cb _hidl_cb)872 Return<void> VirtualCamera::getExtendedInfo_1_1(uint32_t opaqueIdentifier,
873 getExtendedInfo_1_1_cb _hidl_cb) {
874 hardware::hidl_vec<uint8_t> values;
875 EvsResult status = EvsResult::INVALID_ARG;
876 if (mHalCamera.size() > 1) {
877 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
878 _hidl_cb(status, values);
879 return Void();
880 }
881
882 auto pHwCamera = mHalCamera.begin()->second.promote();
883 if (pHwCamera == nullptr) {
884 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
885 _hidl_cb(status, values);
886 } else {
887 auto hwCamera = IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
888 if (hwCamera != nullptr) {
889 hwCamera->getExtendedInfo_1_1(opaqueIdentifier, _hidl_cb);
890 } else {
891 LOG(ERROR) << "Underlying hardware camera does not implement v1.1 interfaces.";
892 _hidl_cb(status, values);
893 }
894 }
895
896 return Void();
897 }
898
899
900 Return<void>
importExternalBuffers(const hidl_vec<BufferDesc_1_1> & buffers,importExternalBuffers_cb _hidl_cb)901 VirtualCamera::importExternalBuffers(const hidl_vec<BufferDesc_1_1>& buffers,
902 importExternalBuffers_cb _hidl_cb) {
903 if (mHalCamera.size() > 1) {
904 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
905 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
906 return {};
907 }
908
909 auto pHwCamera = mHalCamera.begin()->second.promote();
910 if (pHwCamera == nullptr) {
911 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
912 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
913 return {};
914 }
915
916 int delta = 0;
917 if (!pHwCamera->changeFramesInFlight(buffers, &delta)) {
918 LOG(ERROR) << "Failed to add extenral capture buffers.";
919 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
920 return {};
921 }
922
923 mFramesAllowed += delta;
924 _hidl_cb(EvsResult::OK, delta);
925 return {};
926 }
927
928
toString(const char * indent) const929 std::string VirtualCamera::toString(const char* indent) const {
930 std::string buffer;
931 StringAppendF(&buffer, "%sLogical camera device: %s\n"
932 "%sFramesAllowed: %u\n"
933 "%sFrames in use:\n",
934 indent, mHalCamera.size() > 1 ? "T" : "F",
935 indent, mFramesAllowed,
936 indent);
937
938 std::string next_indent(indent);
939 next_indent += "\t";
940 for (auto&& [id, queue] : mFramesHeld) {
941 StringAppendF(&buffer, "%s%s: %d\n",
942 next_indent.c_str(),
943 id.c_str(),
944 static_cast<int>(queue.size()));
945 }
946 StringAppendF(&buffer, "%sCurrent stream state: %d\n",
947 indent, mStreamState);
948
949 return buffer;
950 }
951
952
953 } // namespace implementation
954 } // namespace V1_1
955 } // namespace evs
956 } // namespace automotive
957 } // namespace android
958