1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "VtsHalEvsTest"
18
19
20 // These values are called out in the EVS design doc (as of Mar 8, 2017)
21 static const int kMaxStreamStartMilliseconds = 500;
22 static const int kMinimumFramesPerSecond = 10;
23
24 static const int kSecondsToMilliseconds = 1000;
25 static const int kMillisecondsToMicroseconds = 1000;
26 static const float kNanoToMilliseconds = 0.000001f;
27 static const float kNanoToSeconds = 0.000000001f;
28
29
30 #include "FrameHandler.h"
31 #include "FrameHandlerUltrasonics.h"
32
33 #include <cstdio>
34 #include <cstring>
35 #include <cstdlib>
36 #include <thread>
37 #include <unordered_set>
38
39 #include <hidl/HidlTransportSupport.h>
40 #include <hwbinder/ProcessState.h>
41 #include <utils/Errors.h>
42 #include <utils/StrongPointer.h>
43
44 #include <android/hardware/automotive/evs/1.1/IEvsCamera.h>
45 #include <android/hardware/automotive/evs/1.1/IEvsCameraStream.h>
46 #include <android/hardware/automotive/evs/1.1/IEvsEnumerator.h>
47 #include <android/hardware/automotive/evs/1.1/IEvsDisplay.h>
48 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
49 #include <android-base/logging.h>
50 #include <system/camera_metadata.h>
51 #include <ui/DisplayConfig.h>
52 #include <ui/DisplayState.h>
53 #include <ui/GraphicBuffer.h>
54 #include <ui/GraphicBufferAllocator.h>
55
56 #include <gtest/gtest.h>
57 #include <hidl/GtestPrinter.h>
58 #include <hidl/ServiceManagement.h>
59
60 using namespace ::android::hardware::automotive::evs::V1_1;
61 using namespace std::chrono_literals;
62
63 using ::android::hardware::Return;
64 using ::android::hardware::Void;
65 using ::android::hardware::hidl_vec;
66 using ::android::hardware::hidl_handle;
67 using ::android::hardware::hidl_string;
68 using ::android::sp;
69 using ::android::wp;
70 using ::android::hardware::camera::device::V3_2::Stream;
71 using ::android::hardware::automotive::evs::V1_1::BufferDesc;
72 using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
73 using ::android::hardware::automotive::evs::V1_0::DisplayState;
74 using ::android::hardware::graphics::common::V1_0::PixelFormat;
75 using ::android::frameworks::automotive::display::V1_0::HwDisplayConfig;
76 using ::android::frameworks::automotive::display::V1_0::HwDisplayState;
77 using IEvsCamera_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsCamera;
78 using IEvsCamera_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsCamera;
79 using IEvsDisplay_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsDisplay;
80 using IEvsDisplay_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsDisplay;
81
82 /*
83 * Plese note that this is different from what is defined in
84 * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
85 * field to store a framerate.
86 */
87 const size_t kStreamCfgSz = 5;
88 typedef struct {
89 int32_t width;
90 int32_t height;
91 int32_t format;
92 int32_t direction;
93 int32_t framerate;
94 } RawStreamConfig;
95
96
97 // The main test class for EVS
98 class EvsHidlTest : public ::testing::TestWithParam<std::string> {
99 public:
SetUp()100 virtual void SetUp() override {
101 // Make sure we can connect to the enumerator
102 std::string service_name = GetParam();
103 pEnumerator = IEvsEnumerator::getService(service_name);
104 ASSERT_NE(pEnumerator.get(), nullptr);
105 LOG(INFO) << "Test target service: " << service_name;
106
107 mIsHwModule = pEnumerator->isHardware();
108 }
109
TearDown()110 virtual void TearDown() override {
111 // Attempt to close any active camera
112 for (auto &&cam : activeCameras) {
113 if (cam != nullptr) {
114 pEnumerator->closeCamera(cam);
115 }
116 }
117 activeCameras.clear();
118 }
119
120 protected:
loadCameraList()121 void loadCameraList() {
122 // SetUp() must run first!
123 assert(pEnumerator != nullptr);
124
125 // Get the camera list
126 pEnumerator->getCameraList_1_1(
127 [this](hidl_vec <CameraDesc> cameraList) {
128 LOG(INFO) << "Camera list callback received "
129 << cameraList.size()
130 << " cameras";
131 cameraInfo.reserve(cameraList.size());
132 for (auto&& cam: cameraList) {
133 LOG(INFO) << "Found camera " << cam.v1.cameraId;
134 cameraInfo.push_back(cam);
135 }
136 }
137 );
138 }
139
loadUltrasonicsArrayList()140 void loadUltrasonicsArrayList() {
141 // SetUp() must run first!
142 assert(pEnumerator != nullptr);
143
144 // Get the ultrasonics array list
145 pEnumerator->getUltrasonicsArrayList([this](hidl_vec<UltrasonicsArrayDesc> ultraList) {
146 LOG(INFO) << "Ultrasonics array list callback received "
147 << ultraList.size()
148 << " arrays";
149 ultrasonicsArraysInfo.reserve(ultraList.size());
150 for (auto&& ultraArray : ultraList) {
151 LOG(INFO) << "Found ultrasonics array " << ultraArray.ultrasonicsArrayId;
152 ultrasonicsArraysInfo.push_back(ultraArray);
153 }
154 });
155 }
156
isLogicalCamera(const camera_metadata_t * metadata)157 bool isLogicalCamera(const camera_metadata_t *metadata) {
158 if (metadata == nullptr) {
159 // A logical camera device must have a valid camera metadata.
160 return false;
161 }
162
163 // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
164 camera_metadata_ro_entry_t entry;
165 int rc = find_camera_metadata_ro_entry(metadata,
166 ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
167 &entry);
168 if (0 != rc) {
169 // No capabilities are found.
170 return false;
171 }
172
173 for (size_t i = 0; i < entry.count; ++i) {
174 uint8_t cap = entry.data.u8[i];
175 if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
176 return true;
177 }
178 }
179
180 return false;
181 }
182
getPhysicalCameraIds(const std::string & id,bool & flag)183 std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id,
184 bool& flag) {
185 std::unordered_set<std::string> physicalCameras;
186
187 auto it = cameraInfo.begin();
188 while (it != cameraInfo.end()) {
189 if (it->v1.cameraId == id) {
190 break;
191 }
192 ++it;
193 }
194
195 if (it == cameraInfo.end()) {
196 // Unknown camera is requested. Return an empty list.
197 return physicalCameras;
198 }
199
200 const camera_metadata_t *metadata =
201 reinterpret_cast<camera_metadata_t *>(&it->metadata[0]);
202 flag = isLogicalCamera(metadata);
203 if (!flag) {
204 // EVS assumes that the device w/o a valid metadata is a physical
205 // device.
206 LOG(INFO) << id << " is not a logical camera device.";
207 physicalCameras.emplace(id);
208 return physicalCameras;
209 }
210
211 // Look for physical camera identifiers
212 camera_metadata_ro_entry entry;
213 int rc = find_camera_metadata_ro_entry(metadata,
214 ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
215 &entry);
216 if (rc != 0) {
217 LOG(ERROR) << "No physical camera ID is found for a logical camera device";
218 }
219
220 const uint8_t *ids = entry.data.u8;
221 size_t start = 0;
222 for (size_t i = 0; i < entry.count; ++i) {
223 if (ids[i] == '\0') {
224 if (start != i) {
225 std::string id(reinterpret_cast<const char *>(ids + start));
226 physicalCameras.emplace(id);
227 }
228 start = i + 1;
229 }
230 }
231
232 LOG(INFO) << id
233 << " consists of "
234 << physicalCameras.size()
235 << " physical camera devices";
236 return physicalCameras;
237 }
238
239
240 sp<IEvsEnumerator> pEnumerator; // Every test needs access to the service
241 std::vector<CameraDesc> cameraInfo; // Empty unless/until loadCameraList() is called
242 bool mIsHwModule; // boolean to tell current module under testing
243 // is HW module implementation.
244 std::deque<sp<IEvsCamera_1_1>> activeCameras; // A list of active camera handles that are
245 // needed to be cleaned up.
246 std::vector<UltrasonicsArrayDesc>
247 ultrasonicsArraysInfo; // Empty unless/until
248 // loadUltrasonicsArrayList() is called
249 std::deque<wp<IEvsCamera_1_1>> activeUltrasonicsArrays; // A list of active ultrasonic array
250 // handles that are to be cleaned up.
251 };
252
253
254 // Test cases, their implementations, and corresponding requirements are
255 // documented at go/aae-evs-public-api-test.
256
257 /*
258 * CameraOpenClean:
259 * Opens each camera reported by the enumerator and then explicitly closes it via a
260 * call to closeCamera. Then repeats the test to ensure all cameras can be reopened.
261 */
TEST_P(EvsHidlTest,CameraOpenClean)262 TEST_P(EvsHidlTest, CameraOpenClean) {
263 LOG(INFO) << "Starting CameraOpenClean test";
264
265 // Get the camera list
266 loadCameraList();
267
268 // Using null stream configuration makes EVS uses the default resolution and
269 // output format.
270 Stream nullCfg = {};
271
272 // Open and close each camera twice
273 for (auto&& cam: cameraInfo) {
274 bool isLogicalCam = false;
275 auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
276 if (mIsHwModule && isLogicalCam) {
277 LOG(INFO) << "Skip a logical device, " << cam.v1.cameraId << " for HW target.";
278 continue;
279 }
280
281 for (int pass = 0; pass < 2; pass++) {
282 sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg);
283 ASSERT_NE(pCam, nullptr);
284
285 for (auto&& devName : devices) {
286 bool matched = false;
287 pCam->getPhysicalCameraInfo(devName,
288 [&devName, &matched](const CameraDesc& info) {
289 matched = devName == info.v1.cameraId;
290 });
291 ASSERT_TRUE(matched);
292 }
293
294 // Store a camera handle for a clean-up
295 activeCameras.push_back(pCam);
296
297 // Verify that this camera self-identifies correctly
298 pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
299 LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
300 EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
301 }
302 );
303
304 // Verify methods for extended info
305 const auto id = 0xFFFFFFFF; // meaningless id
306 hidl_vec<uint8_t> values;
307 auto err = pCam->setExtendedInfo_1_1(id, values);
308 if (isLogicalCam) {
309 // Logical camera device does not support setExtendedInfo
310 // method.
311 ASSERT_EQ(EvsResult::INVALID_ARG, err);
312 } else {
313 ASSERT_NE(EvsResult::INVALID_ARG, err);
314 }
315
316
317 pCam->getExtendedInfo_1_1(id, [&isLogicalCam](const auto& result, const auto& data) {
318 if (isLogicalCam) {
319 ASSERT_EQ(EvsResult::INVALID_ARG, result);
320 } else {
321 ASSERT_NE(EvsResult::INVALID_ARG, result);
322 ASSERT_EQ(0, data.size());
323 }
324 });
325
326 // Explicitly close the camera so resources are released right away
327 pEnumerator->closeCamera(pCam);
328 activeCameras.clear();
329 }
330 }
331 }
332
333
334 /*
335 * CameraOpenAggressive:
336 * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
337 * call. This ensures that the intended "aggressive open" behavior works. This is necessary for
338 * the system to be tolerant of shutdown/restart race conditions.
339 */
TEST_P(EvsHidlTest,CameraOpenAggressive)340 TEST_P(EvsHidlTest, CameraOpenAggressive) {
341 LOG(INFO) << "Starting CameraOpenAggressive test";
342
343 // Get the camera list
344 loadCameraList();
345
346 // Using null stream configuration makes EVS uses the default resolution and
347 // output format.
348 Stream nullCfg = {};
349
350 // Open and close each camera twice
351 for (auto&& cam: cameraInfo) {
352 bool isLogicalCam = false;
353 getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
354 if (mIsHwModule && isLogicalCam) {
355 LOG(INFO) << "Skip a logical device, " << cam.v1.cameraId << " for HW target.";
356 continue;
357 }
358
359 activeCameras.clear();
360 sp<IEvsCamera_1_1> pCam =
361 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
362 .withDefault(nullptr);
363 ASSERT_NE(pCam, nullptr);
364
365 // Store a camera handle for a clean-up
366 activeCameras.push_back(pCam);
367
368 // Verify that this camera self-identifies correctly
369 pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
370 LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
371 EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
372 }
373 );
374
375 sp<IEvsCamera_1_1> pCam2 =
376 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
377 .withDefault(nullptr);
378 ASSERT_NE(pCam2, nullptr);
379
380 // Store a camera handle for a clean-up
381 activeCameras.push_back(pCam2);
382
383 ASSERT_NE(pCam, pCam2);
384
385 Return<EvsResult> result = pCam->setMaxFramesInFlight(2);
386 if (mIsHwModule) {
387 // Verify that the old camera rejects calls via HW module.
388 EXPECT_EQ(EvsResult::OWNERSHIP_LOST, EvsResult(result));
389 } else {
390 // default implementation supports multiple clients.
391 EXPECT_EQ(EvsResult::OK, EvsResult(result));
392 }
393
394 // Close the superceded camera
395 pEnumerator->closeCamera(pCam);
396 activeCameras.pop_front();
397
398 // Verify that the second camera instance self-identifies correctly
399 pCam2->getCameraInfo_1_1([&cam](CameraDesc desc) {
400 LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
401 EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
402 }
403 );
404
405 // Close the second camera instance
406 pEnumerator->closeCamera(pCam2);
407 activeCameras.pop_front();
408 }
409
410 // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
411 sleep(1); // I hate that this is an arbitrary time to wait. :( b/36122635
412 }
413
414
415 /*
416 * CameraStreamPerformance:
417 * Measure and qualify the stream start up time and streaming frame rate of each reported camera
418 */
TEST_P(EvsHidlTest,CameraStreamPerformance)419 TEST_P(EvsHidlTest, CameraStreamPerformance) {
420 LOG(INFO) << "Starting CameraStreamPerformance test";
421
422 // Get the camera list
423 loadCameraList();
424
425 // Using null stream configuration makes EVS uses the default resolution and
426 // output format.
427 Stream nullCfg = {};
428
429 // Test each reported camera
430 for (auto&& cam: cameraInfo) {
431 bool isLogicalCam = false;
432 auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
433 if (mIsHwModule && isLogicalCam) {
434 LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
435 continue;
436 }
437
438 sp<IEvsCamera_1_1> pCam =
439 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
440 .withDefault(nullptr);
441 ASSERT_NE(pCam, nullptr);
442
443 // Store a camera handle for a clean-up
444 activeCameras.push_back(pCam);
445
446 // Set up a frame receiver object which will fire up its own thread
447 sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
448 nullptr,
449 FrameHandler::eAutoReturn);
450
451 // Start the camera's video stream
452 nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
453
454 bool startResult = frameHandler->startStream();
455 ASSERT_TRUE(startResult);
456
457 // Ensure the first frame arrived within the expected time
458 frameHandler->waitForFrameCount(1);
459 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
460 nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
461
462 // Extra delays are expected when we attempt to start a video stream on
463 // the logical camera device. The amount of delay is expected the
464 // number of physical camera devices multiplied by
465 // kMaxStreamStartMilliseconds at most.
466 EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
467 kMaxStreamStartMilliseconds * devices.size());
468 printf("%s: Measured time to first frame %0.2f ms\n",
469 cam.v1.cameraId.c_str(), timeToFirstFrame * kNanoToMilliseconds);
470 LOG(INFO) << cam.v1.cameraId
471 << ": Measured time to first frame "
472 << std::scientific << timeToFirstFrame * kNanoToMilliseconds
473 << " ms.";
474
475 // Check aspect ratio
476 unsigned width = 0, height = 0;
477 frameHandler->getFrameDimension(&width, &height);
478 EXPECT_GE(width, height);
479
480 // Wait a bit, then ensure we get at least the required minimum number of frames
481 sleep(5);
482 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
483
484 // Even when the camera pointer goes out of scope, the FrameHandler object will
485 // keep the stream alive unless we tell it to shutdown.
486 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
487 // we have to break that cycle in order for either of them to get cleaned up.
488 frameHandler->shutdown();
489
490 unsigned framesReceived = 0;
491 frameHandler->getFramesCounters(&framesReceived, nullptr);
492 framesReceived = framesReceived - 1; // Back out the first frame we already waited for
493 nsecs_t runTime = end - firstFrame;
494 float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
495 printf("Measured camera rate %3.2f fps\n", framesPerSecond);
496 LOG(INFO) << "Measured camera rate "
497 << std::scientific << framesPerSecond
498 << " fps.";
499 EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
500
501 // Explicitly release the camera
502 pEnumerator->closeCamera(pCam);
503 activeCameras.clear();
504 }
505 }
506
507
508 /*
509 * CameraStreamBuffering:
510 * Ensure the camera implementation behaves properly when the client holds onto buffers for more
511 * than one frame time. The camera must cleanly skip frames until the client is ready again.
512 */
TEST_P(EvsHidlTest,CameraStreamBuffering)513 TEST_P(EvsHidlTest, CameraStreamBuffering) {
514 LOG(INFO) << "Starting CameraStreamBuffering test";
515
516 // Arbitrary constant (should be > 1 and less than crazy)
517 static const unsigned int kBuffersToHold = 6;
518
519 // Get the camera list
520 loadCameraList();
521
522 // Using null stream configuration makes EVS uses the default resolution and
523 // output format.
524 Stream nullCfg = {};
525
526 // Test each reported camera
527 for (auto&& cam: cameraInfo) {
528 bool isLogicalCam = false;
529 getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
530 if (mIsHwModule && isLogicalCam) {
531 LOG(INFO) << "Skip a logical device " << cam.v1.cameraId << " for HW target.";
532 continue;
533 }
534
535 sp<IEvsCamera_1_1> pCam =
536 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
537 .withDefault(nullptr);
538 ASSERT_NE(pCam, nullptr);
539
540 // Store a camera handle for a clean-up
541 activeCameras.push_back(pCam);
542
543 // Ask for a crazy number of buffers in flight to ensure it errors correctly
544 Return<EvsResult> badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
545 EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
546
547 // Now ask for exactly two buffers in flight as we'll test behavior in that case
548 Return<EvsResult> goodResult = pCam->setMaxFramesInFlight(kBuffersToHold);
549 EXPECT_EQ(EvsResult::OK, goodResult);
550
551
552 // Set up a frame receiver object which will fire up its own thread.
553 sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
554 nullptr,
555 FrameHandler::eNoAutoReturn);
556
557 // Start the camera's video stream
558 bool startResult = frameHandler->startStream();
559 ASSERT_TRUE(startResult);
560
561 // Check that the video stream stalls once we've gotten exactly the number of buffers
562 // we requested since we told the frameHandler not to return them.
563 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
564 unsigned framesReceived = 0;
565 frameHandler->getFramesCounters(&framesReceived, nullptr);
566 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
567
568
569 // Give back one buffer
570 bool didReturnBuffer = frameHandler->returnHeldBuffer();
571 EXPECT_TRUE(didReturnBuffer);
572
573 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
574 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
575 usleep(110 * kMillisecondsToMicroseconds);
576 frameHandler->getFramesCounters(&framesReceived, nullptr);
577 EXPECT_EQ(kBuffersToHold+1, framesReceived) << "Stream should've resumed";
578
579 // Even when the camera pointer goes out of scope, the FrameHandler object will
580 // keep the stream alive unless we tell it to shutdown.
581 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
582 // we have to break that cycle in order for either of them to get cleaned up.
583 frameHandler->shutdown();
584
585 // Explicitly release the camera
586 pEnumerator->closeCamera(pCam);
587 activeCameras.clear();
588 }
589 }
590
591
592 /*
593 * CameraToDisplayRoundTrip:
594 * End to end test of data flowing from the camera to the display. Each delivered frame of camera
595 * imagery is simply copied to the display buffer and presented on screen. This is the one test
596 * which a human could observe to see the operation of the system on the physical display.
597 */
TEST_P(EvsHidlTest,CameraToDisplayRoundTrip)598 TEST_P(EvsHidlTest, CameraToDisplayRoundTrip) {
599 LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
600
601 // Get the camera list
602 loadCameraList();
603
604 // Using null stream configuration makes EVS uses the default resolution and
605 // output format.
606 Stream nullCfg = {};
607
608 // Request available display IDs
609 uint8_t targetDisplayId = 0;
610 pEnumerator->getDisplayIdList([&targetDisplayId](auto ids) {
611 ASSERT_GT(ids.size(), 0);
612 targetDisplayId = ids[0];
613 });
614
615 // Request exclusive access to the first EVS display
616 sp<IEvsDisplay_1_1> pDisplay = pEnumerator->openDisplay_1_1(targetDisplayId);
617 ASSERT_NE(pDisplay, nullptr);
618 LOG(INFO) << "Display " << targetDisplayId << " is alreay in use.";
619
620 // Get the display descriptor
621 pDisplay->getDisplayInfo_1_1([](const HwDisplayConfig& config, const HwDisplayState& state) {
622 ASSERT_GT(config.size(), 0);
623 ASSERT_GT(state.size(), 0);
624
625 android::DisplayConfig* pConfig = (android::DisplayConfig*)config.data();
626 const auto width = pConfig->resolution.getWidth();
627 const auto height = pConfig->resolution.getHeight();
628 LOG(INFO) << " Resolution: " << width << "x" << height;
629 ASSERT_GT(width, 0);
630 ASSERT_GT(height, 0);
631
632 android::ui::DisplayState* pState = (android::ui::DisplayState*)state.data();
633 ASSERT_NE(pState->layerStack, -1);
634 });
635
636 // Test each reported camera
637 for (auto&& cam: cameraInfo) {
638 bool isLogicalCam = false;
639 getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
640 if (mIsHwModule && isLogicalCam) {
641 LOG(INFO) << "Skip a logical device " << cam.v1.cameraId << " for HW target.";
642 continue;
643 }
644
645 sp<IEvsCamera_1_1> pCam =
646 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
647 .withDefault(nullptr);
648 ASSERT_NE(pCam, nullptr);
649
650 // Store a camera handle for a clean-up
651 activeCameras.push_back(pCam);
652
653 // Set up a frame receiver object which will fire up its own thread.
654 sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
655 pDisplay,
656 FrameHandler::eAutoReturn);
657
658
659 // Activate the display
660 pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
661
662 // Start the camera's video stream
663 bool startResult = frameHandler->startStream();
664 ASSERT_TRUE(startResult);
665
666 // Wait a while to let the data flow
667 static const int kSecondsToWait = 5;
668 const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
669 kMaxStreamStartMilliseconds;
670 const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
671 kSecondsToMilliseconds;
672 sleep(kSecondsToWait);
673 unsigned framesReceived = 0;
674 unsigned framesDisplayed = 0;
675 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
676 EXPECT_EQ(framesReceived, framesDisplayed);
677 EXPECT_GE(framesDisplayed, minimumFramesExpected);
678
679 // Turn off the display (yes, before the stream stops -- it should be handled)
680 pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
681
682 // Shut down the streamer
683 frameHandler->shutdown();
684
685 // Explicitly release the camera
686 pEnumerator->closeCamera(pCam);
687 activeCameras.clear();
688 }
689
690 // Explicitly release the display
691 pEnumerator->closeDisplay(pDisplay);
692 }
693
694
695 /*
696 * MultiCameraStream:
697 * Verify that each client can start and stop video streams on the same
698 * underlying camera.
699 */
TEST_P(EvsHidlTest,MultiCameraStream)700 TEST_P(EvsHidlTest, MultiCameraStream) {
701 LOG(INFO) << "Starting MultiCameraStream test";
702
703 if (mIsHwModule) {
704 // This test is not for HW module implementation.
705 return;
706 }
707
708 // Get the camera list
709 loadCameraList();
710
711 // Using null stream configuration makes EVS uses the default resolution and
712 // output format.
713 Stream nullCfg = {};
714
715 // Test each reported camera
716 for (auto&& cam: cameraInfo) {
717 // Create two camera clients.
718 sp<IEvsCamera_1_1> pCam0 =
719 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
720 .withDefault(nullptr);
721 ASSERT_NE(pCam0, nullptr);
722
723 // Store a camera handle for a clean-up
724 activeCameras.push_back(pCam0);
725
726 sp<IEvsCamera_1_1> pCam1 =
727 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
728 .withDefault(nullptr);
729 ASSERT_NE(pCam1, nullptr);
730
731 // Store a camera handle for a clean-up
732 activeCameras.push_back(pCam1);
733
734 // Set up per-client frame receiver objects which will fire up its own thread
735 sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
736 nullptr,
737 FrameHandler::eAutoReturn);
738 ASSERT_NE(frameHandler0, nullptr);
739
740 sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
741 nullptr,
742 FrameHandler::eAutoReturn);
743 ASSERT_NE(frameHandler1, nullptr);
744
745 // Start the camera's video stream via client 0
746 bool startResult = false;
747 startResult = frameHandler0->startStream() &&
748 frameHandler1->startStream();
749 ASSERT_TRUE(startResult);
750
751 // Ensure the stream starts
752 frameHandler0->waitForFrameCount(1);
753 frameHandler1->waitForFrameCount(1);
754
755 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
756
757 // Wait a bit, then ensure both clients get at least the required minimum number of frames
758 sleep(5);
759 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
760 unsigned framesReceived0 = 0, framesReceived1 = 0;
761 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
762 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
763 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
764 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
765 nsecs_t runTime = end - firstFrame;
766 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
767 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
768 LOG(INFO) << "Measured camera rate "
769 << std::scientific << framesPerSecond0 << " fps and "
770 << framesPerSecond1 << " fps";
771 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
772 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
773
774 // Shutdown one client
775 frameHandler0->shutdown();
776
777 // Read frame counters again
778 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
779 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
780
781 // Wait a bit again
782 sleep(5);
783 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
784 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
785 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
786 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
787 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
788
789 // Shutdown another
790 frameHandler1->shutdown();
791
792 // Explicitly release the camera
793 pEnumerator->closeCamera(pCam0);
794 pEnumerator->closeCamera(pCam1);
795 activeCameras.clear();
796
797 // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
798 // destruction of active camera objects; this may be related with two
799 // issues.
800 sleep(1);
801 }
802 }
803
804
805 /*
806 * CameraParameter:
807 * Verify that a client can adjust a camera parameter.
808 */
TEST_P(EvsHidlTest,CameraParameter)809 TEST_P(EvsHidlTest, CameraParameter) {
810 LOG(INFO) << "Starting CameraParameter test";
811
812 // Get the camera list
813 loadCameraList();
814
815 // Using null stream configuration makes EVS uses the default resolution and
816 // output format.
817 Stream nullCfg = {};
818
819 // Test each reported camera
820 Return<EvsResult> result = EvsResult::OK;
821 for (auto&& cam: cameraInfo) {
822 bool isLogicalCam = false;
823 getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
824 if (isLogicalCam) {
825 // TODO(b/145465724): Support camera parameter programming on
826 // logical devices.
827 LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
828 continue;
829 }
830
831 // Create a camera client
832 sp<IEvsCamera_1_1> pCam =
833 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
834 .withDefault(nullptr);
835 ASSERT_NE(pCam, nullptr);
836
837 // Store a camera
838 activeCameras.push_back(pCam);
839
840 // Get the parameter list
841 std::vector<CameraParam> cmds;
842 pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
843 cmds.reserve(cmdList.size());
844 for (auto &&cmd : cmdList) {
845 cmds.push_back(cmd);
846 }
847 }
848 );
849
850 if (cmds.size() < 1) {
851 continue;
852 }
853
854 // Set up per-client frame receiver objects which will fire up its own thread
855 sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
856 nullptr,
857 FrameHandler::eAutoReturn);
858 ASSERT_NE(frameHandler, nullptr);
859
860 // Start the camera's video stream
861 bool startResult = frameHandler->startStream();
862 ASSERT_TRUE(startResult);
863
864 // Ensure the stream starts
865 frameHandler->waitForFrameCount(1);
866
867 result = pCam->setMaster();
868 ASSERT_EQ(EvsResult::OK, result);
869
870 for (auto &cmd : cmds) {
871 // Get a valid parameter value range
872 int32_t minVal, maxVal, step;
873 pCam->getIntParameterRange(
874 cmd,
875 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
876 minVal = val0;
877 maxVal = val1;
878 step = val2;
879 }
880 );
881
882 EvsResult result = EvsResult::OK;
883 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
884 // Try to turn off auto-focus
885 std::vector<int32_t> values;
886 pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
887 [&result, &values](auto status, auto effectiveValues) {
888 result = status;
889 if (status == EvsResult::OK) {
890 for (auto &&v : effectiveValues) {
891 values.push_back(v);
892 }
893 }
894 });
895 ASSERT_EQ(EvsResult::OK, result);
896 for (auto &&v : values) {
897 ASSERT_EQ(v, 0);
898 }
899 }
900
901 // Try to program a parameter with a random value [minVal, maxVal]
902 int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
903 std::vector<int32_t> values;
904
905 // Rounding down
906 val0 = val0 - (val0 % step);
907 pCam->setIntParameter(cmd, val0,
908 [&result, &values](auto status, auto effectiveValues) {
909 result = status;
910 if (status == EvsResult::OK) {
911 for (auto &&v : effectiveValues) {
912 values.push_back(v);
913 }
914 }
915 });
916
917 ASSERT_EQ(EvsResult::OK, result);
918
919 values.clear();
920 pCam->getIntParameter(cmd,
921 [&result, &values](auto status, auto readValues) {
922 result = status;
923 if (status == EvsResult::OK) {
924 for (auto &&v : readValues) {
925 values.push_back(v);
926 }
927 }
928 });
929 ASSERT_EQ(EvsResult::OK, result);
930 for (auto &&v : values) {
931 ASSERT_EQ(val0, v) << "Values are not matched.";
932 }
933 }
934
935 result = pCam->unsetMaster();
936 ASSERT_EQ(EvsResult::OK, result);
937
938 // Shutdown
939 frameHandler->shutdown();
940
941 // Explicitly release the camera
942 pEnumerator->closeCamera(pCam);
943 activeCameras.clear();
944 }
945 }
946
947
948 /*
949 * CameraMasterRelease
950 * Verify that non-master client gets notified when the master client either
951 * terminates or releases a role.
952 */
TEST_P(EvsHidlTest,CameraMasterRelease)953 TEST_P(EvsHidlTest, CameraMasterRelease) {
954 LOG(INFO) << "Starting CameraMasterRelease test";
955
956 if (mIsHwModule) {
957 // This test is not for HW module implementation.
958 return;
959 }
960
961 // Get the camera list
962 loadCameraList();
963
964 // Using null stream configuration makes EVS uses the default resolution and
965 // output format.
966 Stream nullCfg = {};
967
968 // Test each reported camera
969 for (auto&& cam: cameraInfo) {
970 bool isLogicalCam = false;
971 getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
972 if (isLogicalCam) {
973 // TODO(b/145465724): Support camera parameter programming on
974 // logical devices.
975 LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
976 continue;
977 }
978
979 // Create two camera clients.
980 sp<IEvsCamera_1_1> pCamMaster =
981 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
982 .withDefault(nullptr);
983 ASSERT_NE(pCamMaster, nullptr);
984
985 // Store a camera handle for a clean-up
986 activeCameras.push_back(pCamMaster);
987
988 sp<IEvsCamera_1_1> pCamNonMaster =
989 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
990 .withDefault(nullptr);
991 ASSERT_NE(pCamNonMaster, nullptr);
992
993 // Store a camera handle for a clean-up
994 activeCameras.push_back(pCamNonMaster);
995
996 // Set up per-client frame receiver objects which will fire up its own thread
997 sp<FrameHandler> frameHandlerMaster =
998 new FrameHandler(pCamMaster, cam,
999 nullptr,
1000 FrameHandler::eAutoReturn);
1001 ASSERT_NE(frameHandlerMaster, nullptr);
1002 sp<FrameHandler> frameHandlerNonMaster =
1003 new FrameHandler(pCamNonMaster, cam,
1004 nullptr,
1005 FrameHandler::eAutoReturn);
1006 ASSERT_NE(frameHandlerNonMaster, nullptr);
1007
1008 // Set one client as the master
1009 EvsResult result = pCamMaster->setMaster();
1010 ASSERT_TRUE(result == EvsResult::OK);
1011
1012 // Try to set another client as the master.
1013 result = pCamNonMaster->setMaster();
1014 ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
1015
1016 // Start the camera's video stream via a master client.
1017 bool startResult = frameHandlerMaster->startStream();
1018 ASSERT_TRUE(startResult);
1019
1020 // Ensure the stream starts
1021 frameHandlerMaster->waitForFrameCount(1);
1022
1023 // Start the camera's video stream via another client
1024 startResult = frameHandlerNonMaster->startStream();
1025 ASSERT_TRUE(startResult);
1026
1027 // Ensure the stream starts
1028 frameHandlerNonMaster->waitForFrameCount(1);
1029
1030 // Non-master client expects to receive a master role relesed
1031 // notification.
1032 EvsEventDesc aTargetEvent = {};
1033 EvsEventDesc aNotification = {};
1034
1035 bool listening = false;
1036 std::mutex eventLock;
1037 std::condition_variable eventCond;
1038 std::thread listener = std::thread(
1039 [&aNotification, &frameHandlerNonMaster, &listening, &eventCond]() {
1040 // Notify that a listening thread is running.
1041 listening = true;
1042 eventCond.notify_all();
1043
1044 EvsEventDesc aTargetEvent;
1045 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1046 if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification, true)) {
1047 LOG(WARNING) << "A timer is expired before a target event is fired.";
1048 }
1049
1050 }
1051 );
1052
1053 // Wait until a listening thread starts.
1054 std::unique_lock<std::mutex> lock(eventLock);
1055 auto timer = std::chrono::system_clock::now();
1056 while (!listening) {
1057 timer += 1s;
1058 eventCond.wait_until(lock, timer);
1059 }
1060 lock.unlock();
1061
1062 // Release a master role.
1063 pCamMaster->unsetMaster();
1064
1065 // Join a listening thread.
1066 if (listener.joinable()) {
1067 listener.join();
1068 }
1069
1070 // Verify change notifications.
1071 ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1072 static_cast<EvsEventType>(aNotification.aType));
1073
1074 // Non-master becomes a master.
1075 result = pCamNonMaster->setMaster();
1076 ASSERT_TRUE(result == EvsResult::OK);
1077
1078 // Previous master client fails to become a master.
1079 result = pCamMaster->setMaster();
1080 ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
1081
1082 listening = false;
1083 listener = std::thread(
1084 [&aNotification, &frameHandlerMaster, &listening, &eventCond]() {
1085 // Notify that a listening thread is running.
1086 listening = true;
1087 eventCond.notify_all();
1088
1089 EvsEventDesc aTargetEvent;
1090 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1091 if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification, true)) {
1092 LOG(WARNING) << "A timer is expired before a target event is fired.";
1093 }
1094
1095 }
1096 );
1097
1098 // Wait until a listening thread starts.
1099 timer = std::chrono::system_clock::now();
1100 lock.lock();
1101 while (!listening) {
1102 eventCond.wait_until(lock, timer + 1s);
1103 }
1104 lock.unlock();
1105
1106 // Closing current master client.
1107 frameHandlerNonMaster->shutdown();
1108
1109 // Join a listening thread.
1110 if (listener.joinable()) {
1111 listener.join();
1112 }
1113
1114 // Verify change notifications.
1115 ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1116 static_cast<EvsEventType>(aNotification.aType));
1117
1118 // Closing streams.
1119 frameHandlerMaster->shutdown();
1120
1121 // Explicitly release the camera
1122 pEnumerator->closeCamera(pCamMaster);
1123 pEnumerator->closeCamera(pCamNonMaster);
1124 activeCameras.clear();
1125 }
1126 }
1127
1128
1129 /*
1130 * MultiCameraParameter:
1131 * Verify that master and non-master clients behave as expected when they try to adjust
1132 * camera parameters.
1133 */
TEST_P(EvsHidlTest,MultiCameraParameter)1134 TEST_P(EvsHidlTest, MultiCameraParameter) {
1135 LOG(INFO) << "Starting MultiCameraParameter test";
1136
1137 if (mIsHwModule) {
1138 // This test is not for HW module implementation.
1139 return;
1140 }
1141
1142 // Get the camera list
1143 loadCameraList();
1144
1145 // Using null stream configuration makes EVS uses the default resolution and
1146 // output format.
1147 Stream nullCfg = {};
1148
1149 // Test each reported camera
1150 for (auto&& cam: cameraInfo) {
1151 bool isLogicalCam = false;
1152 getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
1153 if (isLogicalCam) {
1154 // TODO(b/145465724): Support camera parameter programming on
1155 // logical devices.
1156 LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
1157 continue;
1158 }
1159
1160 // Create two camera clients.
1161 sp<IEvsCamera_1_1> pCamMaster =
1162 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
1163 .withDefault(nullptr);
1164 ASSERT_NE(pCamMaster, nullptr);
1165
1166 // Store a camera handle for a clean-up
1167 activeCameras.push_back(pCamMaster);
1168
1169 sp<IEvsCamera_1_1> pCamNonMaster =
1170 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
1171 .withDefault(nullptr);
1172 ASSERT_NE(pCamNonMaster, nullptr);
1173
1174 // Store a camera handle for a clean-up
1175 activeCameras.push_back(pCamNonMaster);
1176
1177 // Get the parameter list
1178 std::vector<CameraParam> camMasterCmds, camNonMasterCmds;
1179 pCamMaster->getParameterList([&camMasterCmds](hidl_vec<CameraParam> cmdList) {
1180 camMasterCmds.reserve(cmdList.size());
1181 for (auto &&cmd : cmdList) {
1182 camMasterCmds.push_back(cmd);
1183 }
1184 }
1185 );
1186
1187 pCamNonMaster->getParameterList([&camNonMasterCmds](hidl_vec<CameraParam> cmdList) {
1188 camNonMasterCmds.reserve(cmdList.size());
1189 for (auto &&cmd : cmdList) {
1190 camNonMasterCmds.push_back(cmd);
1191 }
1192 }
1193 );
1194
1195 if (camMasterCmds.size() < 1 ||
1196 camNonMasterCmds.size() < 1) {
1197 // Skip a camera device if it does not support any parameter.
1198 continue;
1199 }
1200
1201 // Set up per-client frame receiver objects which will fire up its own thread
1202 sp<FrameHandler> frameHandlerMaster =
1203 new FrameHandler(pCamMaster, cam,
1204 nullptr,
1205 FrameHandler::eAutoReturn);
1206 ASSERT_NE(frameHandlerMaster, nullptr);
1207 sp<FrameHandler> frameHandlerNonMaster =
1208 new FrameHandler(pCamNonMaster, cam,
1209 nullptr,
1210 FrameHandler::eAutoReturn);
1211 ASSERT_NE(frameHandlerNonMaster, nullptr);
1212
1213 // Set one client as the master
1214 EvsResult result = pCamMaster->setMaster();
1215 ASSERT_EQ(EvsResult::OK, result);
1216
1217 // Try to set another client as the master.
1218 result = pCamNonMaster->setMaster();
1219 ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
1220
1221 // Start the camera's video stream via a master client.
1222 bool startResult = frameHandlerMaster->startStream();
1223 ASSERT_TRUE(startResult);
1224
1225 // Ensure the stream starts
1226 frameHandlerMaster->waitForFrameCount(1);
1227
1228 // Start the camera's video stream via another client
1229 startResult = frameHandlerNonMaster->startStream();
1230 ASSERT_TRUE(startResult);
1231
1232 // Ensure the stream starts
1233 frameHandlerNonMaster->waitForFrameCount(1);
1234
1235 int32_t val0 = 0;
1236 std::vector<int32_t> values;
1237 EvsEventDesc aNotification0 = {};
1238 EvsEventDesc aNotification1 = {};
1239 for (auto &cmd : camMasterCmds) {
1240 // Get a valid parameter value range
1241 int32_t minVal, maxVal, step;
1242 pCamMaster->getIntParameterRange(
1243 cmd,
1244 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1245 minVal = val0;
1246 maxVal = val1;
1247 step = val2;
1248 }
1249 );
1250
1251 EvsResult result = EvsResult::OK;
1252 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1253 // Try to turn off auto-focus
1254 values.clear();
1255 pCamMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1256 [&result, &values](auto status, auto effectiveValues) {
1257 result = status;
1258 if (status == EvsResult::OK) {
1259 for (auto &&v : effectiveValues) {
1260 values.push_back(v);
1261 }
1262 }
1263 });
1264 ASSERT_EQ(EvsResult::OK, result);
1265 for (auto &&v : values) {
1266 ASSERT_EQ(v, 0);
1267 }
1268 }
1269
1270 // Calculate a parameter value to program.
1271 val0 = minVal + (std::rand() % (maxVal - minVal));
1272 val0 = val0 - (val0 % step);
1273
1274 // Prepare and start event listeners.
1275 bool listening0 = false;
1276 bool listening1 = false;
1277 std::condition_variable eventCond;
1278 std::thread listener0 = std::thread(
1279 [cmd, val0,
1280 &aNotification0, &frameHandlerMaster, &listening0, &listening1, &eventCond]() {
1281 listening0 = true;
1282 if (listening1) {
1283 eventCond.notify_all();
1284 }
1285
1286 EvsEventDesc aTargetEvent;
1287 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1288 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1289 aTargetEvent.payload[1] = val0;
1290 if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification0)) {
1291 LOG(WARNING) << "A timer is expired before a target event is fired.";
1292 }
1293 }
1294 );
1295 std::thread listener1 = std::thread(
1296 [cmd, val0,
1297 &aNotification1, &frameHandlerNonMaster, &listening0, &listening1, &eventCond]() {
1298 listening1 = true;
1299 if (listening0) {
1300 eventCond.notify_all();
1301 }
1302
1303 EvsEventDesc aTargetEvent;
1304 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1305 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1306 aTargetEvent.payload[1] = val0;
1307 if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification1)) {
1308 LOG(WARNING) << "A timer is expired before a target event is fired.";
1309 }
1310 }
1311 );
1312
1313 // Wait until a listening thread starts.
1314 std::mutex eventLock;
1315 std::unique_lock<std::mutex> lock(eventLock);
1316 auto timer = std::chrono::system_clock::now();
1317 while (!listening0 || !listening1) {
1318 eventCond.wait_until(lock, timer + 1s);
1319 }
1320 lock.unlock();
1321
1322 // Try to program a parameter
1323 values.clear();
1324 pCamMaster->setIntParameter(cmd, val0,
1325 [&result, &values](auto status, auto effectiveValues) {
1326 result = status;
1327 if (status == EvsResult::OK) {
1328 for (auto &&v : effectiveValues) {
1329 values.push_back(v);
1330 }
1331 }
1332 });
1333
1334 ASSERT_EQ(EvsResult::OK, result);
1335 for (auto &&v : values) {
1336 ASSERT_EQ(val0, v) << "Values are not matched.";
1337 }
1338
1339 // Join a listening thread.
1340 if (listener0.joinable()) {
1341 listener0.join();
1342 }
1343 if (listener1.joinable()) {
1344 listener1.join();
1345 }
1346
1347 // Verify a change notification
1348 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1349 static_cast<EvsEventType>(aNotification0.aType));
1350 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1351 static_cast<EvsEventType>(aNotification1.aType));
1352 ASSERT_EQ(cmd,
1353 static_cast<CameraParam>(aNotification0.payload[0]));
1354 ASSERT_EQ(cmd,
1355 static_cast<CameraParam>(aNotification1.payload[0]));
1356 for (auto &&v : values) {
1357 ASSERT_EQ(v,
1358 static_cast<int32_t>(aNotification0.payload[1]));
1359 ASSERT_EQ(v,
1360 static_cast<int32_t>(aNotification1.payload[1]));
1361 }
1362
1363 // Clients expects to receive a parameter change notification
1364 // whenever a master client adjusts it.
1365 values.clear();
1366 pCamMaster->getIntParameter(cmd,
1367 [&result, &values](auto status, auto readValues) {
1368 result = status;
1369 if (status == EvsResult::OK) {
1370 for (auto &&v : readValues) {
1371 values.push_back(v);
1372 }
1373 }
1374 });
1375 ASSERT_EQ(EvsResult::OK, result);
1376 for (auto &&v : values) {
1377 ASSERT_EQ(val0, v) << "Values are not matched.";
1378 }
1379 }
1380
1381 // Try to adjust a parameter via non-master client
1382 values.clear();
1383 pCamNonMaster->setIntParameter(camNonMasterCmds[0], val0,
1384 [&result, &values](auto status, auto effectiveValues) {
1385 result = status;
1386 if (status == EvsResult::OK) {
1387 for (auto &&v : effectiveValues) {
1388 values.push_back(v);
1389 }
1390 }
1391 });
1392 ASSERT_EQ(EvsResult::INVALID_ARG, result);
1393
1394 // Non-master client attemps to be a master
1395 result = pCamNonMaster->setMaster();
1396 ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
1397
1398 // Master client retires from a master role
1399 bool listening = false;
1400 std::condition_variable eventCond;
1401 std::thread listener = std::thread(
1402 [&aNotification0, &frameHandlerNonMaster, &listening, &eventCond]() {
1403 listening = true;
1404 eventCond.notify_all();
1405
1406 EvsEventDesc aTargetEvent;
1407 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1408 if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification0, true)) {
1409 LOG(WARNING) << "A timer is expired before a target event is fired.";
1410 }
1411 }
1412 );
1413
1414 std::mutex eventLock;
1415 auto timer = std::chrono::system_clock::now();
1416 std::unique_lock<std::mutex> lock(eventLock);
1417 while (!listening) {
1418 eventCond.wait_until(lock, timer + 1s);
1419 }
1420 lock.unlock();
1421
1422 result = pCamMaster->unsetMaster();
1423 ASSERT_EQ(EvsResult::OK, result);
1424
1425 if (listener.joinable()) {
1426 listener.join();
1427 }
1428 ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1429 static_cast<EvsEventType>(aNotification0.aType));
1430
1431 // Try to adjust a parameter after being retired
1432 values.clear();
1433 pCamMaster->setIntParameter(camMasterCmds[0], val0,
1434 [&result, &values](auto status, auto effectiveValues) {
1435 result = status;
1436 if (status == EvsResult::OK) {
1437 for (auto &&v : effectiveValues) {
1438 values.push_back(v);
1439 }
1440 }
1441 });
1442 ASSERT_EQ(EvsResult::INVALID_ARG, result);
1443
1444 // Non-master client becomes a master
1445 result = pCamNonMaster->setMaster();
1446 ASSERT_EQ(EvsResult::OK, result);
1447
1448 // Try to adjust a parameter via new master client
1449 for (auto &cmd : camNonMasterCmds) {
1450 // Get a valid parameter value range
1451 int32_t minVal, maxVal, step;
1452 pCamNonMaster->getIntParameterRange(
1453 cmd,
1454 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1455 minVal = val0;
1456 maxVal = val1;
1457 step = val2;
1458 }
1459 );
1460
1461 EvsResult result = EvsResult::OK;
1462 values.clear();
1463 if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1464 // Try to turn off auto-focus
1465 values.clear();
1466 pCamNonMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1467 [&result, &values](auto status, auto effectiveValues) {
1468 result = status;
1469 if (status == EvsResult::OK) {
1470 for (auto &&v : effectiveValues) {
1471 values.push_back(v);
1472 }
1473 }
1474 });
1475 ASSERT_EQ(EvsResult::OK, result);
1476 for (auto &&v : values) {
1477 ASSERT_EQ(v, 0);
1478 }
1479 }
1480
1481 // Calculate a parameter value to program. This is being rounding down.
1482 val0 = minVal + (std::rand() % (maxVal - minVal));
1483 val0 = val0 - (val0 % step);
1484
1485 // Prepare and start event listeners.
1486 bool listening0 = false;
1487 bool listening1 = false;
1488 std::condition_variable eventCond;
1489 std::thread listener0 = std::thread(
1490 [&cmd, &val0, &aNotification0, &frameHandlerMaster, &listening0, &listening1, &eventCond]() {
1491 listening0 = true;
1492 if (listening1) {
1493 eventCond.notify_all();
1494 }
1495
1496 EvsEventDesc aTargetEvent;
1497 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1498 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1499 aTargetEvent.payload[1] = val0;
1500 if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification0)) {
1501 LOG(WARNING) << "A timer is expired before a target event is fired.";
1502 }
1503 }
1504 );
1505 std::thread listener1 = std::thread(
1506 [&cmd, &val0, &aNotification1, &frameHandlerNonMaster, &listening0, &listening1, &eventCond]() {
1507 listening1 = true;
1508 if (listening0) {
1509 eventCond.notify_all();
1510 }
1511
1512 EvsEventDesc aTargetEvent;
1513 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1514 aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1515 aTargetEvent.payload[1] = val0;
1516 if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification1)) {
1517 LOG(WARNING) << "A timer is expired before a target event is fired.";
1518 }
1519 }
1520 );
1521
1522 // Wait until a listening thread starts.
1523 std::mutex eventLock;
1524 std::unique_lock<std::mutex> lock(eventLock);
1525 auto timer = std::chrono::system_clock::now();
1526 while (!listening0 || !listening1) {
1527 eventCond.wait_until(lock, timer + 1s);
1528 }
1529 lock.unlock();
1530
1531 // Try to program a parameter
1532 values.clear();
1533 pCamNonMaster->setIntParameter(cmd, val0,
1534 [&result, &values](auto status, auto effectiveValues) {
1535 result = status;
1536 if (status == EvsResult::OK) {
1537 for (auto &&v : effectiveValues) {
1538 values.push_back(v);
1539 }
1540 }
1541 });
1542 ASSERT_EQ(EvsResult::OK, result);
1543
1544 // Clients expects to receive a parameter change notification
1545 // whenever a master client adjusts it.
1546 values.clear();
1547 pCamNonMaster->getIntParameter(cmd,
1548 [&result, &values](auto status, auto readValues) {
1549 result = status;
1550 if (status == EvsResult::OK) {
1551 for (auto &&v : readValues) {
1552 values.push_back(v);
1553 }
1554 }
1555 });
1556 ASSERT_EQ(EvsResult::OK, result);
1557 for (auto &&v : values) {
1558 ASSERT_EQ(val0, v) << "Values are not matched.";
1559 }
1560
1561 // Join a listening thread.
1562 if (listener0.joinable()) {
1563 listener0.join();
1564 }
1565 if (listener1.joinable()) {
1566 listener1.join();
1567 }
1568
1569 // Verify a change notification
1570 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1571 static_cast<EvsEventType>(aNotification0.aType));
1572 ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1573 static_cast<EvsEventType>(aNotification1.aType));
1574 ASSERT_EQ(cmd,
1575 static_cast<CameraParam>(aNotification0.payload[0]));
1576 ASSERT_EQ(cmd,
1577 static_cast<CameraParam>(aNotification1.payload[0]));
1578 for (auto &&v : values) {
1579 ASSERT_EQ(v,
1580 static_cast<int32_t>(aNotification0.payload[1]));
1581 ASSERT_EQ(v,
1582 static_cast<int32_t>(aNotification1.payload[1]));
1583 }
1584 }
1585
1586 // New master retires from a master role
1587 result = pCamNonMaster->unsetMaster();
1588 ASSERT_EQ(EvsResult::OK, result);
1589
1590 // Shutdown
1591 frameHandlerMaster->shutdown();
1592 frameHandlerNonMaster->shutdown();
1593
1594 // Explicitly release the camera
1595 pEnumerator->closeCamera(pCamMaster);
1596 pEnumerator->closeCamera(pCamNonMaster);
1597 activeCameras.clear();
1598 }
1599 }
1600
1601
1602 /*
1603 * HighPriorityCameraClient:
1604 * EVS client, which owns the display, is priortized and therefore can take over
1605 * a master role from other EVS clients without the display.
1606 */
TEST_P(EvsHidlTest,HighPriorityCameraClient)1607 TEST_P(EvsHidlTest, HighPriorityCameraClient) {
1608 LOG(INFO) << "Starting HighPriorityCameraClient test";
1609
1610 if (mIsHwModule) {
1611 // This test is not for HW module implementation.
1612 return;
1613 }
1614
1615 // Get the camera list
1616 loadCameraList();
1617
1618 // Using null stream configuration makes EVS uses the default resolution and
1619 // output format.
1620 Stream nullCfg = {};
1621
1622 // Request exclusive access to the EVS display
1623 sp<IEvsDisplay_1_0> pDisplay = pEnumerator->openDisplay();
1624 ASSERT_NE(pDisplay, nullptr);
1625
1626 // Test each reported camera
1627 for (auto&& cam: cameraInfo) {
1628 // Create two clients
1629 sp<IEvsCamera_1_1> pCam0 =
1630 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
1631 .withDefault(nullptr);
1632 ASSERT_NE(pCam0, nullptr);
1633
1634 // Store a camera handle for a clean-up
1635 activeCameras.push_back(pCam0);
1636
1637 sp<IEvsCamera_1_1> pCam1 =
1638 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
1639 .withDefault(nullptr);
1640 ASSERT_NE(pCam1, nullptr);
1641
1642 // Store a camera handle for a clean-up
1643 activeCameras.push_back(pCam1);
1644
1645 // Get the parameter list; this test will use the first command in both
1646 // lists.
1647 std::vector<CameraParam> cam0Cmds, cam1Cmds;
1648 pCam0->getParameterList([&cam0Cmds](hidl_vec<CameraParam> cmdList) {
1649 cam0Cmds.reserve(cmdList.size());
1650 for (auto &&cmd : cmdList) {
1651 cam0Cmds.push_back(cmd);
1652 }
1653 }
1654 );
1655
1656 pCam1->getParameterList([&cam1Cmds](hidl_vec<CameraParam> cmdList) {
1657 cam1Cmds.reserve(cmdList.size());
1658 for (auto &&cmd : cmdList) {
1659 cam1Cmds.push_back(cmd);
1660 }
1661 }
1662 );
1663 if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1664 // Cannot execute this test.
1665 return;
1666 }
1667
1668 // Set up a frame receiver object which will fire up its own thread.
1669 sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
1670 pDisplay,
1671 FrameHandler::eAutoReturn);
1672 sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
1673 nullptr,
1674 FrameHandler::eAutoReturn);
1675
1676 // Activate the display
1677 pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
1678
1679 // Start the camera's video stream
1680 ASSERT_TRUE(frameHandler0->startStream());
1681 ASSERT_TRUE(frameHandler1->startStream());
1682
1683 // Ensure the stream starts
1684 frameHandler0->waitForFrameCount(1);
1685 frameHandler1->waitForFrameCount(1);
1686
1687 // Client 1 becomes a master and programs a parameter.
1688 EvsResult result = EvsResult::OK;
1689 // Get a valid parameter value range
1690 int32_t minVal, maxVal, step;
1691 pCam1->getIntParameterRange(
1692 cam1Cmds[0],
1693 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1694 minVal = val0;
1695 maxVal = val1;
1696 step = val2;
1697 }
1698 );
1699
1700 // Client1 becomes a master
1701 result = pCam1->setMaster();
1702 ASSERT_EQ(EvsResult::OK, result);
1703
1704 std::vector<int32_t> values;
1705 EvsEventDesc aTargetEvent = {};
1706 EvsEventDesc aNotification = {};
1707 bool listening = false;
1708 std::mutex eventLock;
1709 std::condition_variable eventCond;
1710 if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1711 std::thread listener = std::thread(
1712 [&frameHandler0, &aNotification, &listening, &eventCond] {
1713 listening = true;
1714 eventCond.notify_all();
1715
1716 EvsEventDesc aTargetEvent;
1717 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1718 aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1719 aTargetEvent.payload[1] = 0;
1720 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1721 LOG(WARNING) << "A timer is expired before a target event is fired.";
1722 }
1723 }
1724 );
1725
1726 // Wait until a lister starts.
1727 std::unique_lock<std::mutex> lock(eventLock);
1728 auto timer = std::chrono::system_clock::now();
1729 while (!listening) {
1730 eventCond.wait_until(lock, timer + 1s);
1731 }
1732 lock.unlock();
1733
1734 // Try to turn off auto-focus
1735 pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1736 [&result, &values](auto status, auto effectiveValues) {
1737 result = status;
1738 if (status == EvsResult::OK) {
1739 for (auto &&v : effectiveValues) {
1740 values.push_back(v);
1741 }
1742 }
1743 });
1744 ASSERT_EQ(EvsResult::OK, result);
1745 for (auto &&v : values) {
1746 ASSERT_EQ(v, 0);
1747 }
1748
1749 // Join a listener
1750 if (listener.joinable()) {
1751 listener.join();
1752 }
1753
1754 // Make sure AUTO_FOCUS is off.
1755 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1756 EvsEventType::PARAMETER_CHANGED);
1757 }
1758
1759 // Try to program a parameter with a random value [minVal, maxVal] after
1760 // rounding it down.
1761 int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
1762 val0 = val0 - (val0 % step);
1763
1764 std::thread listener = std::thread(
1765 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1766 listening = true;
1767 eventCond.notify_all();
1768
1769 EvsEventDesc aTargetEvent;
1770 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1771 aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
1772 aTargetEvent.payload[1] = val0;
1773 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1774 LOG(WARNING) << "A timer is expired before a target event is fired.";
1775 }
1776 }
1777 );
1778
1779 // Wait until a lister starts.
1780 listening = false;
1781 std::unique_lock<std::mutex> lock(eventLock);
1782 auto timer = std::chrono::system_clock::now();
1783 while (!listening) {
1784 eventCond.wait_until(lock, timer + 1s);
1785 }
1786 lock.unlock();
1787
1788 values.clear();
1789 pCam1->setIntParameter(cam1Cmds[0], val0,
1790 [&result, &values](auto status, auto effectiveValues) {
1791 result = status;
1792 if (status == EvsResult::OK) {
1793 for (auto &&v : effectiveValues) {
1794 values.push_back(v);
1795 }
1796 }
1797 });
1798 ASSERT_EQ(EvsResult::OK, result);
1799 for (auto &&v : values) {
1800 ASSERT_EQ(val0, v);
1801 }
1802
1803 // Join a listener
1804 if (listener.joinable()) {
1805 listener.join();
1806 }
1807
1808 // Verify a change notification
1809 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1810 EvsEventType::PARAMETER_CHANGED);
1811 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
1812 cam1Cmds[0]);
1813 for (auto &&v : values) {
1814 ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1815 }
1816
1817 listener = std::thread(
1818 [&frameHandler1, &aNotification, &listening, &eventCond] {
1819 listening = true;
1820 eventCond.notify_all();
1821
1822 EvsEventDesc aTargetEvent;
1823 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1824 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1825 LOG(WARNING) << "A timer is expired before a target event is fired.";
1826 }
1827 }
1828 );
1829
1830 // Wait until a lister starts.
1831 listening = false;
1832 lock.lock();
1833 timer = std::chrono::system_clock::now();
1834 while (!listening) {
1835 eventCond.wait_until(lock, timer + 1s);
1836 }
1837 lock.unlock();
1838
1839 // Client 0 steals a master role
1840 ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
1841
1842 // Join a listener
1843 if (listener.joinable()) {
1844 listener.join();
1845 }
1846
1847 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1848 EvsEventType::MASTER_RELEASED);
1849
1850 // Client 0 programs a parameter
1851 val0 = minVal + (std::rand() % (maxVal - minVal));
1852
1853 // Rounding down
1854 val0 = val0 - (val0 % step);
1855
1856 if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1857 std::thread listener = std::thread(
1858 [&frameHandler1, &aNotification, &listening, &eventCond] {
1859 listening = true;
1860 eventCond.notify_all();
1861
1862 EvsEventDesc aTargetEvent;
1863 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1864 aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1865 aTargetEvent.payload[1] = 0;
1866 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1867 LOG(WARNING) << "A timer is expired before a target event is fired.";
1868 }
1869 }
1870 );
1871
1872 // Wait until a lister starts.
1873 std::unique_lock<std::mutex> lock(eventLock);
1874 auto timer = std::chrono::system_clock::now();
1875 while (!listening) {
1876 eventCond.wait_until(lock, timer + 1s);
1877 }
1878 lock.unlock();
1879
1880 // Try to turn off auto-focus
1881 values.clear();
1882 pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1883 [&result, &values](auto status, auto effectiveValues) {
1884 result = status;
1885 if (status == EvsResult::OK) {
1886 for (auto &&v : effectiveValues) {
1887 values.push_back(v);
1888 }
1889 }
1890 });
1891 ASSERT_EQ(EvsResult::OK, result);
1892 for (auto &&v : values) {
1893 ASSERT_EQ(v, 0);
1894 }
1895
1896 // Join a listener
1897 if (listener.joinable()) {
1898 listener.join();
1899 }
1900
1901 // Make sure AUTO_FOCUS is off.
1902 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1903 EvsEventType::PARAMETER_CHANGED);
1904 }
1905
1906 listener = std::thread(
1907 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1908 listening = true;
1909 eventCond.notify_all();
1910
1911 EvsEventDesc aTargetEvent;
1912 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1913 aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
1914 aTargetEvent.payload[1] = val0;
1915 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1916 LOG(WARNING) << "A timer is expired before a target event is fired.";
1917 }
1918 }
1919 );
1920
1921 // Wait until a lister starts.
1922 listening = false;
1923 timer = std::chrono::system_clock::now();
1924 lock.lock();
1925 while (!listening) {
1926 eventCond.wait_until(lock, timer + 1s);
1927 }
1928 lock.unlock();
1929
1930 values.clear();
1931 pCam0->setIntParameter(cam0Cmds[0], val0,
1932 [&result, &values](auto status, auto effectiveValues) {
1933 result = status;
1934 if (status == EvsResult::OK) {
1935 for (auto &&v : effectiveValues) {
1936 values.push_back(v);
1937 }
1938 }
1939 });
1940 ASSERT_EQ(EvsResult::OK, result);
1941
1942 // Join a listener
1943 if (listener.joinable()) {
1944 listener.join();
1945 }
1946 // Verify a change notification
1947 ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1948 EvsEventType::PARAMETER_CHANGED);
1949 ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
1950 cam0Cmds[0]);
1951 for (auto &&v : values) {
1952 ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1953 }
1954
1955 // Turn off the display (yes, before the stream stops -- it should be handled)
1956 pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
1957
1958 // Shut down the streamer
1959 frameHandler0->shutdown();
1960 frameHandler1->shutdown();
1961
1962 // Explicitly release the camera
1963 pEnumerator->closeCamera(pCam0);
1964 pEnumerator->closeCamera(pCam1);
1965 activeCameras.clear();
1966
1967 }
1968
1969 // Explicitly release the display
1970 pEnumerator->closeDisplay(pDisplay);
1971 }
1972
1973
1974 /*
1975 * CameraUseStreamConfigToDisplay:
1976 * End to end test of data flowing from the camera to the display. Similar to
1977 * CameraToDisplayRoundTrip test case but this case retrieves available stream
1978 * configurations from EVS and uses one of them to start a video stream.
1979 */
TEST_P(EvsHidlTest,CameraUseStreamConfigToDisplay)1980 TEST_P(EvsHidlTest, CameraUseStreamConfigToDisplay) {
1981 LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1982
1983 // Get the camera list
1984 loadCameraList();
1985
1986 // Request exclusive access to the EVS display
1987 sp<IEvsDisplay_1_0> pDisplay = pEnumerator->openDisplay();
1988 ASSERT_NE(pDisplay, nullptr);
1989
1990 // Test each reported camera
1991 for (auto&& cam: cameraInfo) {
1992 // choose a configuration that has a frame rate faster than minReqFps.
1993 Stream targetCfg = {};
1994 const int32_t minReqFps = 15;
1995 int32_t maxArea = 0;
1996 camera_metadata_entry_t streamCfgs;
1997 bool foundCfg = false;
1998 if (!find_camera_metadata_entry(
1999 reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
2000 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2001 &streamCfgs)) {
2002 // Stream configurations are found in metadata
2003 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
2004 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
2005 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
2006 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
2007
2008 if (ptr->width * ptr->height > maxArea &&
2009 ptr->framerate >= minReqFps) {
2010 targetCfg.width = ptr->width;
2011 targetCfg.height = ptr->height;
2012
2013 maxArea = ptr->width * ptr->height;
2014 foundCfg = true;
2015 }
2016 }
2017 ++ptr;
2018 }
2019 }
2020 targetCfg.format =
2021 static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
2022
2023 if (!foundCfg) {
2024 // Current EVS camera does not provide stream configurations in the
2025 // metadata.
2026 continue;
2027 }
2028
2029 sp<IEvsCamera_1_1> pCam =
2030 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
2031 .withDefault(nullptr);
2032 ASSERT_NE(pCam, nullptr);
2033
2034 // Store a camera handle for a clean-up
2035 activeCameras.push_back(pCam);
2036
2037 // Set up a frame receiver object which will fire up its own thread.
2038 sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
2039 pDisplay,
2040 FrameHandler::eAutoReturn);
2041
2042
2043 // Activate the display
2044 pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
2045
2046 // Start the camera's video stream
2047 bool startResult = frameHandler->startStream();
2048 ASSERT_TRUE(startResult);
2049
2050 // Wait a while to let the data flow
2051 static const int kSecondsToWait = 5;
2052 const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
2053 kMaxStreamStartMilliseconds;
2054 const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
2055 kSecondsToMilliseconds;
2056 sleep(kSecondsToWait);
2057 unsigned framesReceived = 0;
2058 unsigned framesDisplayed = 0;
2059 frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
2060 EXPECT_EQ(framesReceived, framesDisplayed);
2061 EXPECT_GE(framesDisplayed, minimumFramesExpected);
2062
2063 // Turn off the display (yes, before the stream stops -- it should be handled)
2064 pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
2065
2066 // Shut down the streamer
2067 frameHandler->shutdown();
2068
2069 // Explicitly release the camera
2070 pEnumerator->closeCamera(pCam);
2071 activeCameras.clear();
2072 }
2073
2074 // Explicitly release the display
2075 pEnumerator->closeDisplay(pDisplay);
2076 }
2077
2078
2079 /*
2080 * MultiCameraStreamUseConfig:
2081 * Verify that each client can start and stop video streams on the same
2082 * underlying camera with same configuration.
2083 */
TEST_P(EvsHidlTest,MultiCameraStreamUseConfig)2084 TEST_P(EvsHidlTest, MultiCameraStreamUseConfig) {
2085 LOG(INFO) << "Starting MultiCameraStream test";
2086
2087 if (mIsHwModule) {
2088 // This test is not for HW module implementation.
2089 return;
2090 }
2091
2092 // Get the camera list
2093 loadCameraList();
2094
2095 // Test each reported camera
2096 for (auto&& cam: cameraInfo) {
2097 // choose a configuration that has a frame rate faster than minReqFps.
2098 Stream targetCfg = {};
2099 const int32_t minReqFps = 15;
2100 int32_t maxArea = 0;
2101 camera_metadata_entry_t streamCfgs;
2102 bool foundCfg = false;
2103 if (!find_camera_metadata_entry(
2104 reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
2105 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2106 &streamCfgs)) {
2107 // Stream configurations are found in metadata
2108 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
2109 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
2110 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
2111 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
2112
2113 if (ptr->width * ptr->height > maxArea &&
2114 ptr->framerate >= minReqFps) {
2115 targetCfg.width = ptr->width;
2116 targetCfg.height = ptr->height;
2117
2118 maxArea = ptr->width * ptr->height;
2119 foundCfg = true;
2120 }
2121 }
2122 ++ptr;
2123 }
2124 }
2125 targetCfg.format =
2126 static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
2127
2128 if (!foundCfg) {
2129 LOG(INFO) << "Device " << cam.v1.cameraId
2130 << " does not provide a list of supported stream configurations, skipped";
2131 continue;
2132 }
2133
2134 // Create the first camera client with a selected stream configuration.
2135 sp<IEvsCamera_1_1> pCam0 =
2136 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
2137 .withDefault(nullptr);
2138 ASSERT_NE(pCam0, nullptr);
2139
2140 // Store a camera handle for a clean-up
2141 activeCameras.push_back(pCam0);
2142
2143 // Try to create the second camera client with different stream
2144 // configuration.
2145 int32_t id = targetCfg.id;
2146 targetCfg.id += 1; // EVS manager sees only the stream id.
2147 sp<IEvsCamera_1_1> pCam1 =
2148 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
2149 .withDefault(nullptr);
2150 ASSERT_EQ(pCam1, nullptr);
2151
2152 // Store a camera handle for a clean-up
2153 activeCameras.push_back(pCam0);
2154
2155 // Try again with same stream configuration.
2156 targetCfg.id = id;
2157 pCam1 =
2158 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
2159 .withDefault(nullptr);
2160 ASSERT_NE(pCam1, nullptr);
2161
2162 // Set up per-client frame receiver objects which will fire up its own thread
2163 sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
2164 nullptr,
2165 FrameHandler::eAutoReturn);
2166 ASSERT_NE(frameHandler0, nullptr);
2167
2168 sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
2169 nullptr,
2170 FrameHandler::eAutoReturn);
2171 ASSERT_NE(frameHandler1, nullptr);
2172
2173 // Start the camera's video stream via client 0
2174 bool startResult = false;
2175 startResult = frameHandler0->startStream() &&
2176 frameHandler1->startStream();
2177 ASSERT_TRUE(startResult);
2178
2179 // Ensure the stream starts
2180 frameHandler0->waitForFrameCount(1);
2181 frameHandler1->waitForFrameCount(1);
2182
2183 nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
2184
2185 // Wait a bit, then ensure both clients get at least the required minimum number of frames
2186 sleep(5);
2187 nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
2188 unsigned framesReceived0 = 0, framesReceived1 = 0;
2189 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
2190 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
2191 framesReceived0 = framesReceived0 - 1; // Back out the first frame we already waited for
2192 framesReceived1 = framesReceived1 - 1; // Back out the first frame we already waited for
2193 nsecs_t runTime = end - firstFrame;
2194 float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
2195 float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
2196 LOG(INFO) << "Measured camera rate "
2197 << std::scientific << framesPerSecond0 << " fps and "
2198 << framesPerSecond1 << " fps";
2199 EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
2200 EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
2201
2202 // Shutdown one client
2203 frameHandler0->shutdown();
2204
2205 // Read frame counters again
2206 frameHandler0->getFramesCounters(&framesReceived0, nullptr);
2207 frameHandler1->getFramesCounters(&framesReceived1, nullptr);
2208
2209 // Wait a bit again
2210 sleep(5);
2211 unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
2212 frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
2213 frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
2214 EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
2215 EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
2216
2217 // Shutdown another
2218 frameHandler1->shutdown();
2219
2220 // Explicitly release the camera
2221 pEnumerator->closeCamera(pCam0);
2222 pEnumerator->closeCamera(pCam1);
2223 activeCameras.clear();
2224 }
2225 }
2226
2227
2228 /*
2229 * LogicalCameraMetadata:
2230 * Opens logical camera reported by the enumerator and validate its metadata by
2231 * checking its capability and locating supporting physical camera device
2232 * identifiers.
2233 */
TEST_P(EvsHidlTest,LogicalCameraMetadata)2234 TEST_P(EvsHidlTest, LogicalCameraMetadata) {
2235 LOG(INFO) << "Starting LogicalCameraMetadata test";
2236
2237 // Get the camera list
2238 loadCameraList();
2239
2240 // Open and close each camera twice
2241 for (auto&& cam: cameraInfo) {
2242 bool isLogicalCam = false;
2243 auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2244 if (isLogicalCam) {
2245 ASSERT_GE(devices.size(), 1) <<
2246 "Logical camera device must have at least one physical camera device ID in its metadata.";
2247 }
2248 }
2249 }
2250
2251
2252 /*
2253 * CameraStreamExternalBuffering:
2254 * This is same with CameraStreamBuffering except frame buffers are allocated by
2255 * the test client and then imported by EVS framework.
2256 */
TEST_P(EvsHidlTest,CameraStreamExternalBuffering)2257 TEST_P(EvsHidlTest, CameraStreamExternalBuffering) {
2258 LOG(INFO) << "Starting CameraStreamExternalBuffering test";
2259
2260 // Arbitrary constant (should be > 1 and less than crazy)
2261 static const unsigned int kBuffersToHold = 6;
2262
2263 // Get the camera list
2264 loadCameraList();
2265
2266 // Using null stream configuration makes EVS uses the default resolution and
2267 // output format.
2268 Stream nullCfg = {};
2269
2270 // Acquire the graphics buffer allocator
2271 android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
2272 const auto usage = GRALLOC_USAGE_HW_TEXTURE |
2273 GRALLOC_USAGE_SW_READ_RARELY |
2274 GRALLOC_USAGE_SW_WRITE_OFTEN;
2275 const auto format = HAL_PIXEL_FORMAT_RGBA_8888;
2276 const auto width = 640;
2277 const auto height = 360;
2278
2279 // Allocate buffers to use
2280 hidl_vec<BufferDesc> buffers;
2281 buffers.resize(kBuffersToHold);
2282 for (auto i = 0; i < kBuffersToHold; ++i) {
2283 unsigned pixelsPerLine;
2284 buffer_handle_t memHandle = nullptr;
2285 android::status_t result = alloc.allocate(width,
2286 height,
2287 format,
2288 1,
2289 usage,
2290 &memHandle,
2291 &pixelsPerLine,
2292 0,
2293 "EvsApp");
2294 if (result != android::NO_ERROR) {
2295 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
2296 } else {
2297 BufferDesc buf;
2298 AHardwareBuffer_Desc* pDesc =
2299 reinterpret_cast<AHardwareBuffer_Desc *>(&buf.buffer.description);
2300 pDesc->width = width;
2301 pDesc->height = height;
2302 pDesc->layers = 1;
2303 pDesc->format = format;
2304 pDesc->usage = usage;
2305 pDesc->stride = pixelsPerLine;
2306 buf.buffer.nativeHandle = memHandle;
2307 buf.bufferId = i; // Unique number to identify this buffer
2308 buffers[i] = buf;
2309 }
2310 }
2311
2312 // Test each reported camera
2313 for (auto&& cam: cameraInfo) {
2314 bool isLogicalCam = false;
2315 getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2316
2317 sp<IEvsCamera_1_1> pCam =
2318 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
2319 .withDefault(nullptr);
2320 ASSERT_NE(pCam, nullptr);
2321
2322 // Store a camera handle for a clean-up
2323 activeCameras.push_back(pCam);
2324
2325 // Request to import buffers
2326 EvsResult result = EvsResult::OK;
2327 int delta = 0;
2328 pCam->importExternalBuffers(buffers,
2329 [&] (auto _result, auto _delta) {
2330 result = _result;
2331 delta = _delta;
2332 });
2333 if (isLogicalCam) {
2334 EXPECT_EQ(result, EvsResult::UNDERLYING_SERVICE_ERROR);
2335 continue;
2336 }
2337
2338 EXPECT_EQ(result, EvsResult::OK);
2339 EXPECT_GE(delta, 0);
2340
2341 // Set up a frame receiver object which will fire up its own thread.
2342 sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
2343 nullptr,
2344 FrameHandler::eNoAutoReturn);
2345
2346 // Start the camera's video stream
2347 bool startResult = frameHandler->startStream();
2348 ASSERT_TRUE(startResult);
2349
2350 // Check that the video stream stalls once we've gotten exactly the number of buffers
2351 // we requested since we told the frameHandler not to return them.
2352 sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
2353 unsigned framesReceived = 0;
2354 frameHandler->getFramesCounters(&framesReceived, nullptr);
2355 ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2356
2357
2358 // Give back one buffer
2359 bool didReturnBuffer = frameHandler->returnHeldBuffer();
2360 EXPECT_TRUE(didReturnBuffer);
2361
2362 // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2363 // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2364 usleep(110 * kMillisecondsToMicroseconds);
2365 frameHandler->getFramesCounters(&framesReceived, nullptr);
2366 EXPECT_EQ(kBuffersToHold+1, framesReceived) << "Stream should've resumed";
2367
2368 // Even when the camera pointer goes out of scope, the FrameHandler object will
2369 // keep the stream alive unless we tell it to shutdown.
2370 // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2371 // we have to break that cycle in order for either of them to get cleaned up.
2372 frameHandler->shutdown();
2373
2374 // Explicitly release the camera
2375 pEnumerator->closeCamera(pCam);
2376 activeCameras.clear();
2377 }
2378
2379 // Release buffers
2380 for (auto& b : buffers) {
2381 alloc.free(b.buffer.nativeHandle);
2382 }
2383 buffers.resize(0);
2384 }
2385
2386
2387 /*
2388 * UltrasonicsArrayOpenClean:
2389 * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2390 * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2391 * can be reopened.
2392 */
TEST_P(EvsHidlTest,UltrasonicsArrayOpenClean)2393 TEST_P(EvsHidlTest, UltrasonicsArrayOpenClean) {
2394 LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2395
2396 // Get the ultrasonics array list
2397 loadUltrasonicsArrayList();
2398
2399 // Open and close each ultrasonics array twice
2400 for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2401 for (int pass = 0; pass < 2; pass++) {
2402 sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2403 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2404 ASSERT_NE(pUltrasonicsArray, nullptr);
2405
2406 // Verify that this ultrasonics array self-identifies correctly
2407 pUltrasonicsArray->getUltrasonicArrayInfo([&ultraInfo](UltrasonicsArrayDesc desc) {
2408 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2409 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2410 });
2411
2412 // Explicitly close the ultrasonics array so resources are released right away
2413 pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2414 }
2415 }
2416 }
2417
2418
2419 // Starts a stream and verifies all data received is valid.
TEST_P(EvsHidlTest,UltrasonicsVerifyStreamData)2420 TEST_P(EvsHidlTest, UltrasonicsVerifyStreamData) {
2421 LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2422
2423 // Get the ultrasonics array list
2424 loadUltrasonicsArrayList();
2425
2426 // For each ultrasonics array.
2427 for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2428 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2429
2430 sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2431 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2432 ASSERT_NE(pUltrasonicsArray, nullptr);
2433
2434 sp<FrameHandlerUltrasonics> frameHandler = new FrameHandlerUltrasonics(pUltrasonicsArray);
2435
2436 // Start stream.
2437 EvsResult result = pUltrasonicsArray->startStream(frameHandler);
2438 ASSERT_EQ(result, EvsResult::OK);
2439
2440 // Wait 5 seconds to receive frames.
2441 sleep(5);
2442
2443 // Stop stream.
2444 pUltrasonicsArray->stopStream();
2445
2446 EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2447 EXPECT_TRUE(frameHandler->areAllFramesValid());
2448
2449 // Explicitly close the ultrasonics array so resources are released right away
2450 pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2451 }
2452 }
2453
2454
2455 // Sets frames in flight before and after start of stream and verfies success.
TEST_P(EvsHidlTest,UltrasonicsSetFramesInFlight)2456 TEST_P(EvsHidlTest, UltrasonicsSetFramesInFlight) {
2457 LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2458
2459 // Get the ultrasonics array list
2460 loadUltrasonicsArrayList();
2461
2462 // For each ultrasonics array.
2463 for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2464 LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2465
2466 sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2467 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2468 ASSERT_NE(pUltrasonicsArray, nullptr);
2469
2470 EvsResult result = pUltrasonicsArray->setMaxFramesInFlight(10);
2471 EXPECT_EQ(result, EvsResult::OK);
2472
2473 sp<FrameHandlerUltrasonics> frameHandler = new FrameHandlerUltrasonics(pUltrasonicsArray);
2474
2475 // Start stream.
2476 result = pUltrasonicsArray->startStream(frameHandler);
2477 ASSERT_EQ(result, EvsResult::OK);
2478
2479 result = pUltrasonicsArray->setMaxFramesInFlight(5);
2480 EXPECT_EQ(result, EvsResult::OK);
2481
2482 // Stop stream.
2483 pUltrasonicsArray->stopStream();
2484
2485 // Explicitly close the ultrasonics array so resources are released right away
2486 pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2487 }
2488 }
2489
2490
2491 INSTANTIATE_TEST_SUITE_P(
2492 PerInstance,
2493 EvsHidlTest,
2494 testing::ValuesIn(android::hardware::getAllHalInstanceNames(IEvsEnumerator::descriptor)),
2495 android::hardware::PrintInstanceNameToString);
2496
2497