• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "VtsHalEvsTest"
18 
19 
20 // These values are called out in the EVS design doc (as of Mar 8, 2017)
21 static const int kMaxStreamStartMilliseconds = 500;
22 static const int kMinimumFramesPerSecond = 10;
23 
24 static const int kSecondsToMilliseconds = 1000;
25 static const int kMillisecondsToMicroseconds = 1000;
26 static const float kNanoToMilliseconds = 0.000001f;
27 static const float kNanoToSeconds = 0.000000001f;
28 
29 
30 #include "FrameHandler.h"
31 #include "FrameHandlerUltrasonics.h"
32 
33 #include <cstdio>
34 #include <cstring>
35 #include <cstdlib>
36 #include <thread>
37 #include <unordered_set>
38 
39 #include <hidl/HidlTransportSupport.h>
40 #include <hwbinder/ProcessState.h>
41 #include <utils/Errors.h>
42 #include <utils/StrongPointer.h>
43 
44 #include <android-base/logging.h>
45 #include <android/hardware/automotive/evs/1.1/IEvsCamera.h>
46 #include <android/hardware/automotive/evs/1.1/IEvsCameraStream.h>
47 #include <android/hardware/automotive/evs/1.1/IEvsDisplay.h>
48 #include <android/hardware/automotive/evs/1.1/IEvsEnumerator.h>
49 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
50 #include <system/camera_metadata.h>
51 #include <ui/DisplayMode.h>
52 #include <ui/DisplayState.h>
53 #include <ui/GraphicBuffer.h>
54 #include <ui/GraphicBufferAllocator.h>
55 
56 #include <gtest/gtest.h>
57 #include <hidl/GtestPrinter.h>
58 #include <hidl/ServiceManagement.h>
59 
60 using namespace ::android::hardware::automotive::evs::V1_1;
61 using namespace std::chrono_literals;
62 
63 using ::android::hardware::Return;
64 using ::android::hardware::Void;
65 using ::android::hardware::hidl_vec;
66 using ::android::hardware::hidl_handle;
67 using ::android::hardware::hidl_string;
68 using ::android::sp;
69 using ::android::wp;
70 using ::android::hardware::camera::device::V3_2::Stream;
71 using ::android::hardware::automotive::evs::V1_1::BufferDesc;
72 using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
73 using ::android::hardware::automotive::evs::V1_0::DisplayState;
74 using ::android::hardware::graphics::common::V1_0::PixelFormat;
75 using ::android::frameworks::automotive::display::V1_0::HwDisplayConfig;
76 using ::android::frameworks::automotive::display::V1_0::HwDisplayState;
77 using IEvsCamera_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsCamera;
78 using IEvsCamera_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsCamera;
79 using IEvsDisplay_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsDisplay;
80 using IEvsDisplay_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsDisplay;
81 
82 namespace {
83 
84 /*
85  * Plese note that this is different from what is defined in
86  * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
87  * field to store a framerate.
88  */
89 typedef struct {
90     int32_t id;
91     int32_t width;
92     int32_t height;
93     int32_t format;
94     int32_t direction;
95     int32_t framerate;
96 } RawStreamConfig;
97 constexpr const size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
98 
99 } // anonymous namespace
100 
101 
102 // The main test class for EVS
103 class EvsHidlTest : public ::testing::TestWithParam<std::string> {
104 public:
SetUp()105     virtual void SetUp() override {
106         // Make sure we can connect to the enumerator
107         std::string service_name = GetParam();
108         pEnumerator = IEvsEnumerator::getService(service_name);
109         ASSERT_NE(pEnumerator.get(), nullptr);
110         LOG(INFO) << "Test target service: " << service_name;
111 
112         mIsHwModule = pEnumerator->isHardware();
113     }
114 
TearDown()115     virtual void TearDown() override {
116         // Attempt to close any active camera
117         for (auto &&cam : activeCameras) {
118             if (cam != nullptr) {
119                 pEnumerator->closeCamera(cam);
120             }
121         }
122         activeCameras.clear();
123     }
124 
125 protected:
loadCameraList()126     void loadCameraList() {
127         // SetUp() must run first!
128         assert(pEnumerator != nullptr);
129 
130         // Get the camera list
131         pEnumerator->getCameraList_1_1(
132             [this](hidl_vec <CameraDesc> cameraList) {
133                 LOG(INFO) << "Camera list callback received "
134                           << cameraList.size()
135                           << " cameras";
136                 cameraInfo.reserve(cameraList.size());
137                 for (auto&& cam: cameraList) {
138                     LOG(INFO) << "Found camera " << cam.v1.cameraId;
139                     cameraInfo.push_back(cam);
140                 }
141             }
142         );
143     }
144 
loadUltrasonicsArrayList()145     void loadUltrasonicsArrayList() {
146         // SetUp() must run first!
147         assert(pEnumerator != nullptr);
148 
149         // Get the ultrasonics array list
150         pEnumerator->getUltrasonicsArrayList([this](hidl_vec<UltrasonicsArrayDesc> ultraList) {
151             LOG(INFO) << "Ultrasonics array list callback received "
152                       << ultraList.size()
153                       << " arrays";
154             ultrasonicsArraysInfo.reserve(ultraList.size());
155             for (auto&& ultraArray : ultraList) {
156                 LOG(INFO) << "Found ultrasonics array " << ultraArray.ultrasonicsArrayId;
157                 ultrasonicsArraysInfo.push_back(ultraArray);
158             }
159         });
160     }
161 
isLogicalCamera(const camera_metadata_t * metadata)162     bool isLogicalCamera(const camera_metadata_t *metadata) {
163         if (metadata == nullptr) {
164             // A logical camera device must have a valid camera metadata.
165             return false;
166         }
167 
168         // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
169         camera_metadata_ro_entry_t entry;
170         int rc = find_camera_metadata_ro_entry(metadata,
171                                                ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
172                                                &entry);
173         if (0 != rc) {
174             // No capabilities are found.
175             return false;
176         }
177 
178         for (size_t i = 0; i < entry.count; ++i) {
179             uint8_t cap = entry.data.u8[i];
180             if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
181                 return true;
182             }
183         }
184 
185         return false;
186     }
187 
getPhysicalCameraIds(const std::string & id,bool & flag)188     std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id,
189                                                          bool& flag) {
190         std::unordered_set<std::string> physicalCameras;
191 
192         auto it = cameraInfo.begin();
193         while (it != cameraInfo.end()) {
194             if (it->v1.cameraId == id) {
195                 break;
196             }
197             ++it;
198         }
199 
200         if (it == cameraInfo.end()) {
201             // Unknown camera is requested.  Return an empty list.
202             return physicalCameras;
203         }
204 
205         const camera_metadata_t *metadata =
206             reinterpret_cast<camera_metadata_t *>(&it->metadata[0]);
207         flag = isLogicalCamera(metadata);
208         if (!flag) {
209             // EVS assumes that the device w/o a valid metadata is a physical
210             // device.
211             LOG(INFO) << id << " is not a logical camera device.";
212             physicalCameras.emplace(id);
213             return physicalCameras;
214         }
215 
216         // Look for physical camera identifiers
217         camera_metadata_ro_entry entry;
218         int rc = find_camera_metadata_ro_entry(metadata,
219                                                ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
220                                                &entry);
221         if (rc != 0) {
222             LOG(ERROR) << "No physical camera ID is found for a logical camera device";
223         }
224 
225         const uint8_t *ids = entry.data.u8;
226         size_t start = 0;
227         for (size_t i = 0; i < entry.count; ++i) {
228             if (ids[i] == '\0') {
229                 if (start != i) {
230                     std::string id(reinterpret_cast<const char *>(ids + start));
231                     physicalCameras.emplace(id);
232                 }
233                 start = i + 1;
234             }
235         }
236 
237         LOG(INFO) << id
238                   << " consists of "
239                   << physicalCameras.size()
240                   << " physical camera devices";
241         return physicalCameras;
242     }
243 
getFirstStreamConfiguration(camera_metadata_t * metadata)244     Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
245         Stream targetCfg = {};
246         camera_metadata_entry_t streamCfgs;
247         if (!find_camera_metadata_entry(metadata,
248                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
249                  &streamCfgs)) {
250             // Stream configurations are found in metadata
251             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
252             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
253                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
254                     targetCfg.width = ptr->width;
255                     targetCfg.height = ptr->height;
256                     targetCfg.format = static_cast<PixelFormat>(ptr->format);
257                     break;
258                 }
259                 ++ptr;
260             }
261         }
262 
263         return targetCfg;
264     }
265 
266     sp<IEvsEnumerator>              pEnumerator;   // Every test needs access to the service
267     std::vector<CameraDesc>         cameraInfo;    // Empty unless/until loadCameraList() is called
268     bool                            mIsHwModule;   // boolean to tell current module under testing
269                                                    // is HW module implementation.
270     std::deque<sp<IEvsCamera_1_1>>  activeCameras; // A list of active camera handles that are
271                                                    // needed to be cleaned up.
272     std::vector<UltrasonicsArrayDesc>
273             ultrasonicsArraysInfo;                           // Empty unless/until
274                                                              // loadUltrasonicsArrayList() is called
275     std::deque<wp<IEvsCamera_1_1>> activeUltrasonicsArrays;  // A list of active ultrasonic array
276                                                              // handles that are to be cleaned up.
277 };
278 
279 
280 // Test cases, their implementations, and corresponding requirements are
281 // documented at go/aae-evs-public-api-test.
282 
283 /*
284  * CameraOpenClean:
285  * Opens each camera reported by the enumerator and then explicitly closes it via a
286  * call to closeCamera.  Then repeats the test to ensure all cameras can be reopened.
287  */
TEST_P(EvsHidlTest,CameraOpenClean)288 TEST_P(EvsHidlTest, CameraOpenClean) {
289     LOG(INFO) << "Starting CameraOpenClean test";
290 
291     // Get the camera list
292     loadCameraList();
293 
294     // Open and close each camera twice
295     for (auto&& cam: cameraInfo) {
296         bool isLogicalCam = false;
297         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
298         if (mIsHwModule && isLogicalCam) {
299             LOG(INFO) << "Skip a logical device, " << cam.v1.cameraId << " for HW target.";
300             continue;
301         }
302 
303         // Read a target resolution from the metadata
304         Stream targetCfg =
305             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
306         ASSERT_GT(targetCfg.width, 0);
307         ASSERT_GT(targetCfg.height, 0);
308 
309         for (int pass = 0; pass < 2; pass++) {
310             sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
311             ASSERT_NE(pCam, nullptr);
312 
313             for (auto&& devName : devices) {
314                 bool matched = false;
315                 pCam->getPhysicalCameraInfo(devName,
316                                             [&devName, &matched](const CameraDesc& info) {
317                                                 matched = devName == info.v1.cameraId;
318                                             });
319                 ASSERT_TRUE(matched);
320             }
321 
322             // Store a camera handle for a clean-up
323             activeCameras.push_back(pCam);
324 
325             // Verify that this camera self-identifies correctly
326             pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
327                                         LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
328                                         EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
329                                     }
330             );
331 
332             // Verify methods for extended info
333             const auto id = 0xFFFFFFFF; // meaningless id
334             hidl_vec<uint8_t> values;
335             auto err = pCam->setExtendedInfo_1_1(id, values);
336             if (isLogicalCam) {
337                 // Logical camera device does not support setExtendedInfo
338                 // method.
339                 ASSERT_EQ(EvsResult::INVALID_ARG, err);
340             } else {
341                 ASSERT_NE(EvsResult::INVALID_ARG, err);
342             }
343 
344 
345             pCam->getExtendedInfo_1_1(id, [&isLogicalCam](const auto& result, const auto& data) {
346                 if (isLogicalCam) {
347                     ASSERT_EQ(EvsResult::INVALID_ARG, result);
348                 } else {
349                     ASSERT_NE(EvsResult::INVALID_ARG, result);
350                     ASSERT_EQ(0, data.size());
351                 }
352             });
353 
354             // Explicitly close the camera so resources are released right away
355             pEnumerator->closeCamera(pCam);
356             activeCameras.clear();
357         }
358     }
359 }
360 
361 
362 /*
363  * CameraOpenAggressive:
364  * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
365  * call.  This ensures that the intended "aggressive open" behavior works.  This is necessary for
366  * the system to be tolerant of shutdown/restart race conditions.
367  */
TEST_P(EvsHidlTest,CameraOpenAggressive)368 TEST_P(EvsHidlTest, CameraOpenAggressive) {
369     LOG(INFO) << "Starting CameraOpenAggressive test";
370 
371     // Get the camera list
372     loadCameraList();
373 
374     // Open and close each camera twice
375     for (auto&& cam: cameraInfo) {
376         bool isLogicalCam = false;
377         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
378         if (mIsHwModule && isLogicalCam) {
379             LOG(INFO) << "Skip a logical device, " << cam.v1.cameraId << " for HW target.";
380             continue;
381         }
382 
383         // Read a target resolution from the metadata
384         Stream targetCfg =
385             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
386         ASSERT_GT(targetCfg.width, 0);
387         ASSERT_GT(targetCfg.height, 0);
388 
389         activeCameras.clear();
390         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
391         ASSERT_NE(pCam, nullptr);
392 
393         // Store a camera handle for a clean-up
394         activeCameras.push_back(pCam);
395 
396         // Verify that this camera self-identifies correctly
397         pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
398                                     LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
399                                     EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
400                                 }
401         );
402 
403         sp<IEvsCamera_1_1> pCam2 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
404         ASSERT_NE(pCam2, nullptr);
405 
406         // Store a camera handle for a clean-up
407         activeCameras.push_back(pCam2);
408 
409         ASSERT_NE(pCam, pCam2);
410 
411         Return<EvsResult> result = pCam->setMaxFramesInFlight(2);
412         if (mIsHwModule) {
413             // Verify that the old camera rejects calls via HW module.
414             EXPECT_EQ(EvsResult::OWNERSHIP_LOST, EvsResult(result));
415         } else {
416             // default implementation supports multiple clients.
417             EXPECT_EQ(EvsResult::OK, EvsResult(result));
418         }
419 
420         // Close the superceded camera
421         pEnumerator->closeCamera(pCam);
422         activeCameras.pop_front();
423 
424         // Verify that the second camera instance self-identifies correctly
425         pCam2->getCameraInfo_1_1([&cam](CameraDesc desc) {
426                                      LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
427                                      EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
428                                  }
429         );
430 
431         // Close the second camera instance
432         pEnumerator->closeCamera(pCam2);
433         activeCameras.pop_front();
434     }
435 
436     // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
437     sleep(1);   // I hate that this is an arbitrary time to wait.  :(  b/36122635
438 }
439 
440 
441 /*
442  * CameraStreamPerformance:
443  * Measure and qualify the stream start up time and streaming frame rate of each reported camera
444  */
TEST_P(EvsHidlTest,CameraStreamPerformance)445 TEST_P(EvsHidlTest, CameraStreamPerformance) {
446     LOG(INFO) << "Starting CameraStreamPerformance test";
447 
448     // Get the camera list
449     loadCameraList();
450 
451     // Test each reported camera
452     for (auto&& cam: cameraInfo) {
453         bool isLogicalCam = false;
454         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
455         if (mIsHwModule && isLogicalCam) {
456             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
457             continue;
458         }
459 
460         // Read a target resolution from the metadata
461         Stream targetCfg =
462             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
463         ASSERT_GT(targetCfg.width, 0);
464         ASSERT_GT(targetCfg.height, 0);
465 
466         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
467         ASSERT_NE(pCam, nullptr);
468 
469         // Store a camera handle for a clean-up
470         activeCameras.push_back(pCam);
471 
472         // Set up a frame receiver object which will fire up its own thread
473         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
474                                                          nullptr,
475                                                          FrameHandler::eAutoReturn);
476 
477         // Start the camera's video stream
478         nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
479 
480         bool startResult = frameHandler->startStream();
481         ASSERT_TRUE(startResult);
482 
483         // Ensure the first frame arrived within the expected time
484         frameHandler->waitForFrameCount(1);
485         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
486         nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
487 
488         // Extra delays are expected when we attempt to start a video stream on
489         // the logical camera device.  The amount of delay is expected the
490         // number of physical camera devices multiplied by
491         // kMaxStreamStartMilliseconds at most.
492         EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
493                   kMaxStreamStartMilliseconds * devices.size());
494         printf("%s: Measured time to first frame %0.2f ms\n",
495                cam.v1.cameraId.c_str(), timeToFirstFrame * kNanoToMilliseconds);
496         LOG(INFO) << cam.v1.cameraId
497                   << ": Measured time to first frame "
498                   << std::scientific << timeToFirstFrame * kNanoToMilliseconds
499                   << " ms.";
500 
501         // Wait a bit, then ensure we get at least the required minimum number of frames
502         sleep(5);
503         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
504 
505         // Even when the camera pointer goes out of scope, the FrameHandler object will
506         // keep the stream alive unless we tell it to shutdown.
507         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
508         // we have to break that cycle in order for either of them to get cleaned up.
509         frameHandler->shutdown();
510 
511         unsigned framesReceived = 0;
512         frameHandler->getFramesCounters(&framesReceived, nullptr);
513         framesReceived = framesReceived - 1;    // Back out the first frame we already waited for
514         nsecs_t runTime = end - firstFrame;
515         float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
516         printf("Measured camera rate %3.2f fps\n", framesPerSecond);
517         LOG(INFO) << "Measured camera rate "
518                   << std::scientific << framesPerSecond
519                   << " fps.";
520         EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
521 
522         // Explicitly release the camera
523         pEnumerator->closeCamera(pCam);
524         activeCameras.clear();
525     }
526 }
527 
528 
529 /*
530  * CameraStreamBuffering:
531  * Ensure the camera implementation behaves properly when the client holds onto buffers for more
532  * than one frame time.  The camera must cleanly skip frames until the client is ready again.
533  */
TEST_P(EvsHidlTest,CameraStreamBuffering)534 TEST_P(EvsHidlTest, CameraStreamBuffering) {
535     LOG(INFO) << "Starting CameraStreamBuffering test";
536 
537     // Arbitrary constant (should be > 1 and not too big)
538     static const unsigned int kBuffersToHold = 2;
539 
540     // Get the camera list
541     loadCameraList();
542 
543     // Test each reported camera
544     for (auto&& cam: cameraInfo) {
545         bool isLogicalCam = false;
546         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
547         if (mIsHwModule && isLogicalCam) {
548             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId << " for HW target.";
549             continue;
550         }
551 
552         // Read a target resolution from the metadata
553         Stream targetCfg =
554             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
555         ASSERT_GT(targetCfg.width, 0);
556         ASSERT_GT(targetCfg.height, 0);
557 
558         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
559         ASSERT_NE(pCam, nullptr);
560 
561         // Store a camera handle for a clean-up
562         activeCameras.push_back(pCam);
563 
564         // Ask for a very large number of buffers in flight to ensure it errors correctly
565         Return<EvsResult> badResult =
566                 pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
567         EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
568 
569         // Now ask for exactly two buffers in flight as we'll test behavior in that case
570         Return<EvsResult> goodResult = pCam->setMaxFramesInFlight(kBuffersToHold);
571         EXPECT_EQ(EvsResult::OK, goodResult);
572 
573 
574         // Set up a frame receiver object which will fire up its own thread.
575         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
576                                                          nullptr,
577                                                          FrameHandler::eNoAutoReturn);
578 
579         // Start the camera's video stream
580         bool startResult = frameHandler->startStream();
581         ASSERT_TRUE(startResult);
582 
583         // Check that the video stream stalls once we've gotten exactly the number of buffers
584         // we requested since we told the frameHandler not to return them.
585         sleep(1);   // 1 second should be enough for at least 5 frames to be delivered worst case
586         unsigned framesReceived = 0;
587         frameHandler->getFramesCounters(&framesReceived, nullptr);
588         ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
589 
590 
591         // Give back one buffer
592         bool didReturnBuffer = frameHandler->returnHeldBuffer();
593         EXPECT_TRUE(didReturnBuffer);
594 
595         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
596         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
597         usleep(110 * kMillisecondsToMicroseconds);
598         frameHandler->getFramesCounters(&framesReceived, nullptr);
599         EXPECT_EQ(kBuffersToHold+1, framesReceived) << "Stream should've resumed";
600 
601         // Even when the camera pointer goes out of scope, the FrameHandler object will
602         // keep the stream alive unless we tell it to shutdown.
603         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
604         // we have to break that cycle in order for either of them to get cleaned up.
605         frameHandler->shutdown();
606 
607         // Explicitly release the camera
608         pEnumerator->closeCamera(pCam);
609         activeCameras.clear();
610     }
611 }
612 
613 
614 /*
615  * CameraToDisplayRoundTrip:
616  * End to end test of data flowing from the camera to the display.  Each delivered frame of camera
617  * imagery is simply copied to the display buffer and presented on screen.  This is the one test
618  * which a human could observe to see the operation of the system on the physical display.
619  */
TEST_P(EvsHidlTest,CameraToDisplayRoundTrip)620 TEST_P(EvsHidlTest, CameraToDisplayRoundTrip) {
621     LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
622 
623     // Get the camera list
624     loadCameraList();
625 
626     // Request available display IDs
627     uint8_t targetDisplayId = 0;
628     pEnumerator->getDisplayIdList([&targetDisplayId](auto ids) {
629         ASSERT_GT(ids.size(), 0);
630         targetDisplayId = ids[0];
631     });
632 
633     // Test each reported camera
634     for (auto&& cam: cameraInfo) {
635         // Request exclusive access to the first EVS display
636         sp<IEvsDisplay_1_1> pDisplay = pEnumerator->openDisplay_1_1(targetDisplayId);
637         ASSERT_NE(pDisplay, nullptr);
638         LOG(INFO) << "Display " << targetDisplayId << " is already in use.";
639 
640         // Get the display descriptor
641         pDisplay->getDisplayInfo_1_1([](const HwDisplayConfig& config, const HwDisplayState& state) {
642             ASSERT_GT(config.size(), 0);
643             ASSERT_GT(state.size(), 0);
644 
645             android::ui::DisplayMode* pConfig = (android::ui::DisplayMode*)config.data();
646             const auto width = pConfig->resolution.getWidth();
647             const auto height = pConfig->resolution.getHeight();
648             LOG(INFO) << "    Resolution: " << width << "x" << height;
649             ASSERT_GT(width, 0);
650             ASSERT_GT(height, 0);
651 
652             android::ui::DisplayState* pState = (android::ui::DisplayState*)state.data();
653             ASSERT_NE(pState->layerStack, android::ui::INVALID_LAYER_STACK);
654         });
655 
656         bool isLogicalCam = false;
657         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
658         if (mIsHwModule && isLogicalCam) {
659             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId << " for HW target.";
660             continue;
661         }
662 
663         // Read a target resolution from the metadata
664         Stream targetCfg =
665             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
666         ASSERT_GT(targetCfg.width, 0);
667         ASSERT_GT(targetCfg.height, 0);
668 
669         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
670         ASSERT_NE(pCam, nullptr);
671 
672         // Store a camera handle for a clean-up
673         activeCameras.push_back(pCam);
674 
675         // Set up a frame receiver object which will fire up its own thread.
676         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
677                                                          pDisplay,
678                                                          FrameHandler::eAutoReturn);
679 
680 
681         // Activate the display
682         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
683 
684         // Start the camera's video stream
685         bool startResult = frameHandler->startStream();
686         ASSERT_TRUE(startResult);
687 
688         // Wait a while to let the data flow
689         static const int kSecondsToWait = 5;
690         const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
691                                  kMaxStreamStartMilliseconds;
692         const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
693                                                kSecondsToMilliseconds;
694         sleep(kSecondsToWait);
695         unsigned framesReceived = 0;
696         unsigned framesDisplayed = 0;
697         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
698         EXPECT_EQ(framesReceived, framesDisplayed);
699         EXPECT_GE(framesDisplayed, minimumFramesExpected);
700 
701         // Turn off the display (yes, before the stream stops -- it should be handled)
702         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
703 
704         // Shut down the streamer
705         frameHandler->shutdown();
706 
707         // Explicitly release the camera
708         pEnumerator->closeCamera(pCam);
709         activeCameras.clear();
710 
711         // Explicitly release the display
712         pEnumerator->closeDisplay(pDisplay);
713     }
714 }
715 
716 
717 /*
718  * MultiCameraStream:
719  * Verify that each client can start and stop video streams on the same
720  * underlying camera.
721  */
TEST_P(EvsHidlTest,MultiCameraStream)722 TEST_P(EvsHidlTest, MultiCameraStream) {
723     LOG(INFO) << "Starting MultiCameraStream test";
724 
725     if (mIsHwModule) {
726         // This test is not for HW module implementation.
727         return;
728     }
729 
730     // Get the camera list
731     loadCameraList();
732 
733     // Test each reported camera
734     for (auto&& cam: cameraInfo) {
735         // Read a target resolution from the metadata
736         Stream targetCfg =
737             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
738         ASSERT_GT(targetCfg.width, 0);
739         ASSERT_GT(targetCfg.height, 0);
740 
741         // Create two camera clients.
742         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
743         ASSERT_NE(pCam0, nullptr);
744 
745         // Store a camera handle for a clean-up
746         activeCameras.push_back(pCam0);
747 
748         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
749         ASSERT_NE(pCam1, nullptr);
750 
751         // Store a camera handle for a clean-up
752         activeCameras.push_back(pCam1);
753 
754         // Set up per-client frame receiver objects which will fire up its own thread
755         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
756                                                           nullptr,
757                                                           FrameHandler::eAutoReturn);
758         ASSERT_NE(frameHandler0, nullptr);
759 
760         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
761                                                           nullptr,
762                                                           FrameHandler::eAutoReturn);
763         ASSERT_NE(frameHandler1, nullptr);
764 
765         // Start the camera's video stream via client 0
766         bool startResult = false;
767         startResult = frameHandler0->startStream() &&
768                       frameHandler1->startStream();
769         ASSERT_TRUE(startResult);
770 
771         // Ensure the stream starts
772         frameHandler0->waitForFrameCount(1);
773         frameHandler1->waitForFrameCount(1);
774 
775         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
776 
777         // Wait a bit, then ensure both clients get at least the required minimum number of frames
778         sleep(5);
779         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
780         unsigned framesReceived0 = 0, framesReceived1 = 0;
781         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
782         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
783         framesReceived0 = framesReceived0 - 1;    // Back out the first frame we already waited for
784         framesReceived1 = framesReceived1 - 1;    // Back out the first frame we already waited for
785         nsecs_t runTime = end - firstFrame;
786         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
787         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
788         LOG(INFO) << "Measured camera rate "
789                   << std::scientific << framesPerSecond0 << " fps and "
790                   << framesPerSecond1 << " fps";
791         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
792         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
793 
794         // Shutdown one client
795         frameHandler0->shutdown();
796 
797         // Read frame counters again
798         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
799         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
800 
801         // Wait a bit again
802         sleep(5);
803         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
804         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
805         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
806         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
807         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
808 
809         // Shutdown another
810         frameHandler1->shutdown();
811 
812         // Explicitly release the camera
813         pEnumerator->closeCamera(pCam0);
814         pEnumerator->closeCamera(pCam1);
815         activeCameras.clear();
816 
817         // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
818         // destruction of active camera objects; this may be related with two
819         // issues.
820         sleep(1);
821     }
822 }
823 
824 
825 /*
826  * CameraParameter:
827  * Verify that a client can adjust a camera parameter.
828  */
TEST_P(EvsHidlTest,CameraParameter)829 TEST_P(EvsHidlTest, CameraParameter) {
830     LOG(INFO) << "Starting CameraParameter test";
831 
832     // Get the camera list
833     loadCameraList();
834 
835     // Test each reported camera
836     Return<EvsResult> result = EvsResult::OK;
837     for (auto&& cam: cameraInfo) {
838         bool isLogicalCam = false;
839         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
840         if (isLogicalCam) {
841             // TODO(b/145465724): Support camera parameter programming on
842             // logical devices.
843             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
844             continue;
845         }
846 
847         // Read a target resolution from the metadata
848         Stream targetCfg =
849             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
850         ASSERT_GT(targetCfg.width, 0);
851         ASSERT_GT(targetCfg.height, 0);
852 
853         // Create a camera client
854         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
855         ASSERT_NE(pCam, nullptr);
856 
857         // Store a camera
858         activeCameras.push_back(pCam);
859 
860         // Get the parameter list
861         std::vector<CameraParam> cmds;
862         pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
863                 cmds.reserve(cmdList.size());
864                 for (auto &&cmd : cmdList) {
865                     cmds.push_back(cmd);
866                 }
867             }
868         );
869 
870         if (cmds.size() < 1) {
871             continue;
872         }
873 
874         // Set up per-client frame receiver objects which will fire up its own thread
875         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
876                                                          nullptr,
877                                                          FrameHandler::eAutoReturn);
878         ASSERT_NE(frameHandler, nullptr);
879 
880         // Start the camera's video stream
881         bool startResult = frameHandler->startStream();
882         ASSERT_TRUE(startResult);
883 
884         // Ensure the stream starts
885         frameHandler->waitForFrameCount(1);
886 
887         result = pCam->setMaster();
888         ASSERT_EQ(EvsResult::OK, result);
889 
890         for (auto &cmd : cmds) {
891             // Get a valid parameter value range
892             int32_t minVal, maxVal, step;
893             pCam->getIntParameterRange(
894                 cmd,
895                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
896                     minVal = val0;
897                     maxVal = val1;
898                     step   = val2;
899                 }
900             );
901 
902             EvsResult result = EvsResult::OK;
903             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
904                 // Try to turn off auto-focus
905                 std::vector<int32_t> values;
906                 pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
907                                    [&result, &values](auto status, auto effectiveValues) {
908                                        result = status;
909                                        if (status == EvsResult::OK) {
910                                           for (auto &&v : effectiveValues) {
911                                               values.push_back(v);
912                                           }
913                                        }
914                                    });
915                 ASSERT_EQ(EvsResult::OK, result);
916                 for (auto &&v : values) {
917                     ASSERT_EQ(v, 0);
918                 }
919             }
920 
921             // Try to program a parameter with a random value [minVal, maxVal]
922             int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
923             std::vector<int32_t> values;
924 
925             // Rounding down
926             val0 = val0 - (val0 % step);
927             pCam->setIntParameter(cmd, val0,
928                                [&result, &values](auto status, auto effectiveValues) {
929                                    result = status;
930                                    if (status == EvsResult::OK) {
931                                       for (auto &&v : effectiveValues) {
932                                           values.push_back(v);
933                                       }
934                                    }
935                                });
936 
937             ASSERT_EQ(EvsResult::OK, result);
938 
939             values.clear();
940             pCam->getIntParameter(cmd,
941                                [&result, &values](auto status, auto readValues) {
942                                    result = status;
943                                    if (status == EvsResult::OK) {
944                                       for (auto &&v : readValues) {
945                                           values.push_back(v);
946                                       }
947                                    }
948                                });
949             ASSERT_EQ(EvsResult::OK, result);
950             for (auto &&v : values) {
951                 ASSERT_EQ(val0, v) << "Values are not matched.";
952             }
953         }
954 
955         result = pCam->unsetMaster();
956         ASSERT_EQ(EvsResult::OK, result);
957 
958         // Shutdown
959         frameHandler->shutdown();
960 
961         // Explicitly release the camera
962         pEnumerator->closeCamera(pCam);
963         activeCameras.clear();
964     }
965 }
966 
967 
968 /*
969  * CameraPrimaryClientRelease
970  * Verify that non-primary client gets notified when the primary client either
971  * terminates or releases a role.
972  */
TEST_P(EvsHidlTest,CameraPrimaryClientRelease)973 TEST_P(EvsHidlTest, CameraPrimaryClientRelease) {
974     LOG(INFO) << "Starting CameraPrimaryClientRelease test";
975 
976     if (mIsHwModule) {
977         // This test is not for HW module implementation.
978         return;
979     }
980 
981     // Get the camera list
982     loadCameraList();
983 
984     // Test each reported camera
985     for (auto&& cam: cameraInfo) {
986         bool isLogicalCam = false;
987         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
988         if (isLogicalCam) {
989             // TODO(b/145465724): Support camera parameter programming on
990             // logical devices.
991             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
992             continue;
993         }
994 
995         // Read a target resolution from the metadata
996         Stream targetCfg =
997             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
998         ASSERT_GT(targetCfg.width, 0);
999         ASSERT_GT(targetCfg.height, 0);
1000 
1001         // Create two camera clients.
1002         sp<IEvsCamera_1_1> pCamPrimary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1003         ASSERT_NE(pCamPrimary, nullptr);
1004 
1005         // Store a camera handle for a clean-up
1006         activeCameras.push_back(pCamPrimary);
1007 
1008         sp<IEvsCamera_1_1> pCamSecondary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1009         ASSERT_NE(pCamSecondary, nullptr);
1010 
1011         // Store a camera handle for a clean-up
1012         activeCameras.push_back(pCamSecondary);
1013 
1014         // Set up per-client frame receiver objects which will fire up its own thread
1015         sp<FrameHandler> frameHandlerPrimary =
1016             new FrameHandler(pCamPrimary, cam,
1017                              nullptr,
1018                              FrameHandler::eAutoReturn);
1019         ASSERT_NE(frameHandlerPrimary, nullptr);
1020         sp<FrameHandler> frameHandlerSecondary =
1021             new FrameHandler(pCamSecondary, cam,
1022                              nullptr,
1023                              FrameHandler::eAutoReturn);
1024         ASSERT_NE(frameHandlerSecondary, nullptr);
1025 
1026         // Set one client as the primary client
1027         EvsResult result = pCamPrimary->setMaster();
1028         ASSERT_TRUE(result == EvsResult::OK);
1029 
1030         // Try to set another client as the primary client.
1031         result = pCamSecondary->setMaster();
1032         ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
1033 
1034         // Start the camera's video stream via a primary client client.
1035         bool startResult = frameHandlerPrimary->startStream();
1036         ASSERT_TRUE(startResult);
1037 
1038         // Ensure the stream starts
1039         frameHandlerPrimary->waitForFrameCount(1);
1040 
1041         // Start the camera's video stream via another client
1042         startResult = frameHandlerSecondary->startStream();
1043         ASSERT_TRUE(startResult);
1044 
1045         // Ensure the stream starts
1046         frameHandlerSecondary->waitForFrameCount(1);
1047 
1048         // Non-primary client expects to receive a primary client role relesed
1049         // notification.
1050         EvsEventDesc aTargetEvent  = {};
1051         EvsEventDesc aNotification = {};
1052 
1053         bool listening = false;
1054         std::mutex eventLock;
1055         std::condition_variable eventCond;
1056         std::thread listener = std::thread(
1057             [&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
1058                 // Notify that a listening thread is running.
1059                 listening = true;
1060                 eventCond.notify_all();
1061 
1062                 EvsEventDesc aTargetEvent;
1063                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1064                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
1065                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1066                 }
1067 
1068             }
1069         );
1070 
1071         // Wait until a listening thread starts.
1072         std::unique_lock<std::mutex> lock(eventLock);
1073         auto timer = std::chrono::system_clock::now();
1074         while (!listening) {
1075             timer += 1s;
1076             eventCond.wait_until(lock, timer);
1077         }
1078         lock.unlock();
1079 
1080         // Release a primary client role.
1081         pCamPrimary->unsetMaster();
1082 
1083         // Join a listening thread.
1084         if (listener.joinable()) {
1085             listener.join();
1086         }
1087 
1088         // Verify change notifications.
1089         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1090                   static_cast<EvsEventType>(aNotification.aType));
1091 
1092         // Non-primary becomes a primary client.
1093         result = pCamSecondary->setMaster();
1094         ASSERT_TRUE(result == EvsResult::OK);
1095 
1096         // Previous primary client fails to become a primary client.
1097         result = pCamPrimary->setMaster();
1098         ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
1099 
1100         listening = false;
1101         listener = std::thread(
1102             [&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
1103                 // Notify that a listening thread is running.
1104                 listening = true;
1105                 eventCond.notify_all();
1106 
1107                 EvsEventDesc aTargetEvent;
1108                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1109                 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1110                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1111                 }
1112 
1113             }
1114         );
1115 
1116         // Wait until a listening thread starts.
1117         timer = std::chrono::system_clock::now();
1118         lock.lock();
1119         while (!listening) {
1120             eventCond.wait_until(lock, timer + 1s);
1121         }
1122         lock.unlock();
1123 
1124         // Closing current primary client.
1125         frameHandlerSecondary->shutdown();
1126 
1127         // Join a listening thread.
1128         if (listener.joinable()) {
1129             listener.join();
1130         }
1131 
1132         // Verify change notifications.
1133         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1134                   static_cast<EvsEventType>(aNotification.aType));
1135 
1136         // Closing streams.
1137         frameHandlerPrimary->shutdown();
1138 
1139         // Explicitly release the camera
1140         pEnumerator->closeCamera(pCamPrimary);
1141         pEnumerator->closeCamera(pCamSecondary);
1142         activeCameras.clear();
1143     }
1144 }
1145 
1146 
1147 /*
1148  * MultiCameraParameter:
1149  * Verify that primary and non-primary clients behave as expected when they try to adjust
1150  * camera parameters.
1151  */
TEST_P(EvsHidlTest,MultiCameraParameter)1152 TEST_P(EvsHidlTest, MultiCameraParameter) {
1153     LOG(INFO) << "Starting MultiCameraParameter test";
1154 
1155     if (mIsHwModule) {
1156         // This test is not for HW module implementation.
1157         return;
1158     }
1159 
1160     // Get the camera list
1161     loadCameraList();
1162 
1163     // Test each reported camera
1164     for (auto&& cam: cameraInfo) {
1165         bool isLogicalCam = false;
1166         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
1167         if (isLogicalCam) {
1168             // TODO(b/145465724): Support camera parameter programming on
1169             // logical devices.
1170             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
1171             continue;
1172         }
1173 
1174         // Read a target resolution from the metadata
1175         Stream targetCfg =
1176             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1177         ASSERT_GT(targetCfg.width, 0);
1178         ASSERT_GT(targetCfg.height, 0);
1179 
1180         // Create two camera clients.
1181         sp<IEvsCamera_1_1> pCamPrimary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1182         ASSERT_NE(pCamPrimary, nullptr);
1183 
1184         // Store a camera handle for a clean-up
1185         activeCameras.push_back(pCamPrimary);
1186 
1187         sp<IEvsCamera_1_1> pCamSecondary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1188         ASSERT_NE(pCamSecondary, nullptr);
1189 
1190         // Store a camera handle for a clean-up
1191         activeCameras.push_back(pCamSecondary);
1192 
1193         // Get the parameter list
1194         std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1195         pCamPrimary->getParameterList([&camPrimaryCmds](hidl_vec<CameraParam> cmdList) {
1196                 camPrimaryCmds.reserve(cmdList.size());
1197                 for (auto &&cmd : cmdList) {
1198                     camPrimaryCmds.push_back(cmd);
1199                 }
1200             }
1201         );
1202 
1203         pCamSecondary->getParameterList([&camSecondaryCmds](hidl_vec<CameraParam> cmdList) {
1204                 camSecondaryCmds.reserve(cmdList.size());
1205                 for (auto &&cmd : cmdList) {
1206                     camSecondaryCmds.push_back(cmd);
1207                 }
1208             }
1209         );
1210 
1211         if (camPrimaryCmds.size() < 1 ||
1212             camSecondaryCmds.size() < 1) {
1213             // Skip a camera device if it does not support any parameter.
1214             continue;
1215         }
1216 
1217         // Set up per-client frame receiver objects which will fire up its own thread
1218         sp<FrameHandler> frameHandlerPrimary =
1219             new FrameHandler(pCamPrimary, cam,
1220                              nullptr,
1221                              FrameHandler::eAutoReturn);
1222         ASSERT_NE(frameHandlerPrimary, nullptr);
1223         sp<FrameHandler> frameHandlerSecondary =
1224             new FrameHandler(pCamSecondary, cam,
1225                              nullptr,
1226                              FrameHandler::eAutoReturn);
1227         ASSERT_NE(frameHandlerSecondary, nullptr);
1228 
1229         // Set one client as the primary client.
1230         EvsResult result = pCamPrimary->setMaster();
1231         ASSERT_EQ(EvsResult::OK, result);
1232 
1233         // Try to set another client as the primary client.
1234         result = pCamSecondary->setMaster();
1235         ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
1236 
1237         // Start the camera's video stream via a primary client client.
1238         bool startResult = frameHandlerPrimary->startStream();
1239         ASSERT_TRUE(startResult);
1240 
1241         // Ensure the stream starts
1242         frameHandlerPrimary->waitForFrameCount(1);
1243 
1244         // Start the camera's video stream via another client
1245         startResult = frameHandlerSecondary->startStream();
1246         ASSERT_TRUE(startResult);
1247 
1248         // Ensure the stream starts
1249         frameHandlerSecondary->waitForFrameCount(1);
1250 
1251         int32_t val0 = 0;
1252         std::vector<int32_t> values;
1253         EvsEventDesc aNotification0 = {};
1254         EvsEventDesc aNotification1 = {};
1255         for (auto &cmd : camPrimaryCmds) {
1256             // Get a valid parameter value range
1257             int32_t minVal, maxVal, step;
1258             pCamPrimary->getIntParameterRange(
1259                 cmd,
1260                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1261                     minVal = val0;
1262                     maxVal = val1;
1263                     step   = val2;
1264                 }
1265             );
1266 
1267             EvsResult result = EvsResult::OK;
1268             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1269                 // Try to turn off auto-focus
1270                 values.clear();
1271                 pCamPrimary->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1272                                    [&result, &values](auto status, auto effectiveValues) {
1273                                        result = status;
1274                                        if (status == EvsResult::OK) {
1275                                           for (auto &&v : effectiveValues) {
1276                                               values.push_back(v);
1277                                           }
1278                                        }
1279                                    });
1280                 ASSERT_EQ(EvsResult::OK, result);
1281                 for (auto &&v : values) {
1282                     ASSERT_EQ(v, 0);
1283                 }
1284             }
1285 
1286             // Calculate a parameter value to program.
1287             val0 = minVal + (std::rand() % (maxVal - minVal));
1288             val0 = val0 - (val0 % step);
1289 
1290             // Prepare and start event listeners.
1291             bool listening0 = false;
1292             bool listening1 = false;
1293             std::condition_variable eventCond;
1294             std::thread listener0 = std::thread(
1295                 [cmd, val0,
1296                  &aNotification0, &frameHandlerPrimary, &listening0, &listening1, &eventCond]() {
1297                     listening0 = true;
1298                     if (listening1) {
1299                         eventCond.notify_all();
1300                     }
1301 
1302                     EvsEventDesc aTargetEvent;
1303                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1304                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1305                     aTargetEvent.payload[1] = val0;
1306                     if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1307                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1308                     }
1309                 }
1310             );
1311             std::thread listener1 = std::thread(
1312                 [cmd, val0,
1313                  &aNotification1, &frameHandlerSecondary, &listening0, &listening1, &eventCond]() {
1314                     listening1 = true;
1315                     if (listening0) {
1316                         eventCond.notify_all();
1317                     }
1318 
1319                     EvsEventDesc aTargetEvent;
1320                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1321                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1322                     aTargetEvent.payload[1] = val0;
1323                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1324                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1325                     }
1326                 }
1327             );
1328 
1329             // Wait until a listening thread starts.
1330             std::mutex eventLock;
1331             std::unique_lock<std::mutex> lock(eventLock);
1332             auto timer = std::chrono::system_clock::now();
1333             while (!listening0 || !listening1) {
1334                 eventCond.wait_until(lock, timer + 1s);
1335             }
1336             lock.unlock();
1337 
1338             // Try to program a parameter
1339             values.clear();
1340             pCamPrimary->setIntParameter(cmd, val0,
1341                                      [&result, &values](auto status, auto effectiveValues) {
1342                                          result = status;
1343                                          if (status == EvsResult::OK) {
1344                                             for (auto &&v : effectiveValues) {
1345                                                 values.push_back(v);
1346                                             }
1347                                          }
1348                                      });
1349 
1350             ASSERT_EQ(EvsResult::OK, result);
1351             for (auto &&v : values) {
1352                 ASSERT_EQ(val0, v) << "Values are not matched.";
1353             }
1354 
1355             // Join a listening thread.
1356             if (listener0.joinable()) {
1357                 listener0.join();
1358             }
1359             if (listener1.joinable()) {
1360                 listener1.join();
1361             }
1362 
1363             // Verify a change notification
1364             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1365                       static_cast<EvsEventType>(aNotification0.aType));
1366             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1367                       static_cast<EvsEventType>(aNotification1.aType));
1368             ASSERT_EQ(cmd,
1369                       static_cast<CameraParam>(aNotification0.payload[0]));
1370             ASSERT_EQ(cmd,
1371                       static_cast<CameraParam>(aNotification1.payload[0]));
1372             for (auto &&v : values) {
1373                 ASSERT_EQ(v,
1374                           static_cast<int32_t>(aNotification0.payload[1]));
1375                 ASSERT_EQ(v,
1376                           static_cast<int32_t>(aNotification1.payload[1]));
1377             }
1378 
1379             // Clients expects to receive a parameter change notification
1380             // whenever a primary client client adjusts it.
1381             values.clear();
1382             pCamPrimary->getIntParameter(cmd,
1383                                      [&result, &values](auto status, auto readValues) {
1384                                          result = status;
1385                                          if (status == EvsResult::OK) {
1386                                             for (auto &&v : readValues) {
1387                                                 values.push_back(v);
1388                                             }
1389                                          }
1390                                      });
1391             ASSERT_EQ(EvsResult::OK, result);
1392             for (auto &&v : values) {
1393                 ASSERT_EQ(val0, v) << "Values are not matched.";
1394             }
1395         }
1396 
1397         // Try to adjust a parameter via non-primary client
1398         values.clear();
1399         pCamSecondary->setIntParameter(camSecondaryCmds[0], val0,
1400                                     [&result, &values](auto status, auto effectiveValues) {
1401                                         result = status;
1402                                         if (status == EvsResult::OK) {
1403                                             for (auto &&v : effectiveValues) {
1404                                                 values.push_back(v);
1405                                             }
1406                                         }
1407                                     });
1408         ASSERT_EQ(EvsResult::INVALID_ARG, result);
1409 
1410         // Non-primary client attempts to be a primary client
1411         result = pCamSecondary->setMaster();
1412         ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
1413 
1414         // Primary client retires from a primary client role
1415         bool listening = false;
1416         std::condition_variable eventCond;
1417         std::thread listener = std::thread(
1418             [&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1419                 listening = true;
1420                 eventCond.notify_all();
1421 
1422                 EvsEventDesc aTargetEvent;
1423                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1424                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1425                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1426                 }
1427             }
1428         );
1429 
1430         std::mutex eventLock;
1431         auto timer = std::chrono::system_clock::now();
1432         std::unique_lock<std::mutex> lock(eventLock);
1433         while (!listening) {
1434             eventCond.wait_until(lock, timer + 1s);
1435         }
1436         lock.unlock();
1437 
1438         result = pCamPrimary->unsetMaster();
1439         ASSERT_EQ(EvsResult::OK, result);
1440 
1441         if (listener.joinable()) {
1442             listener.join();
1443         }
1444         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1445                   static_cast<EvsEventType>(aNotification0.aType));
1446 
1447         // Try to adjust a parameter after being retired
1448         values.clear();
1449         pCamPrimary->setIntParameter(camPrimaryCmds[0], val0,
1450                                  [&result, &values](auto status, auto effectiveValues) {
1451                                      result = status;
1452                                      if (status == EvsResult::OK) {
1453                                         for (auto &&v : effectiveValues) {
1454                                             values.push_back(v);
1455                                         }
1456                                      }
1457                                  });
1458         ASSERT_EQ(EvsResult::INVALID_ARG, result);
1459 
1460         // Non-primary client becomes a primary client
1461         result = pCamSecondary->setMaster();
1462         ASSERT_EQ(EvsResult::OK, result);
1463 
1464         // Try to adjust a parameter via new primary client
1465         for (auto &cmd : camSecondaryCmds) {
1466             // Get a valid parameter value range
1467             int32_t minVal, maxVal, step;
1468             pCamSecondary->getIntParameterRange(
1469                 cmd,
1470                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1471                     minVal = val0;
1472                     maxVal = val1;
1473                     step   = val2;
1474                 }
1475             );
1476 
1477             EvsResult result = EvsResult::OK;
1478             values.clear();
1479             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1480                 // Try to turn off auto-focus
1481                 values.clear();
1482                 pCamSecondary->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1483                                    [&result, &values](auto status, auto effectiveValues) {
1484                                        result = status;
1485                                        if (status == EvsResult::OK) {
1486                                           for (auto &&v : effectiveValues) {
1487                                               values.push_back(v);
1488                                           }
1489                                        }
1490                                    });
1491                 ASSERT_EQ(EvsResult::OK, result);
1492                 for (auto &&v : values) {
1493                     ASSERT_EQ(v, 0);
1494                 }
1495             }
1496 
1497             // Calculate a parameter value to program.  This is being rounding down.
1498             val0 = minVal + (std::rand() % (maxVal - minVal));
1499             val0 = val0 - (val0 % step);
1500 
1501             // Prepare and start event listeners.
1502             bool listening0 = false;
1503             bool listening1 = false;
1504             std::condition_variable eventCond;
1505             std::thread listener0 = std::thread(
1506                 [&]() {
1507                     listening0 = true;
1508                     if (listening1) {
1509                         eventCond.notify_all();
1510                     }
1511 
1512                     EvsEventDesc aTargetEvent;
1513                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1514                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1515                     aTargetEvent.payload[1] = val0;
1516                     if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1517                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1518                     }
1519                 }
1520             );
1521             std::thread listener1 = std::thread(
1522                 [&]() {
1523                     listening1 = true;
1524                     if (listening0) {
1525                         eventCond.notify_all();
1526                     }
1527 
1528                     EvsEventDesc aTargetEvent;
1529                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1530                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1531                     aTargetEvent.payload[1] = val0;
1532                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1533                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1534                     }
1535                 }
1536             );
1537 
1538             // Wait until a listening thread starts.
1539             std::mutex eventLock;
1540             std::unique_lock<std::mutex> lock(eventLock);
1541             auto timer = std::chrono::system_clock::now();
1542             while (!listening0 || !listening1) {
1543                 eventCond.wait_until(lock, timer + 1s);
1544             }
1545             lock.unlock();
1546 
1547             // Try to program a parameter
1548             values.clear();
1549             pCamSecondary->setIntParameter(cmd, val0,
1550                                         [&result, &values](auto status, auto effectiveValues) {
1551                                             result = status;
1552                                             if (status == EvsResult::OK) {
1553                                                 for (auto &&v : effectiveValues) {
1554                                                     values.push_back(v);
1555                                                 }
1556                                             }
1557                                         });
1558             ASSERT_EQ(EvsResult::OK, result);
1559 
1560             // Clients expects to receive a parameter change notification
1561             // whenever a primary client client adjusts it.
1562             values.clear();
1563             pCamSecondary->getIntParameter(cmd,
1564                                         [&result, &values](auto status, auto readValues) {
1565                                             result = status;
1566                                             if (status == EvsResult::OK) {
1567                                                 for (auto &&v : readValues) {
1568                                                     values.push_back(v);
1569                                                 }
1570                                             }
1571                                         });
1572             ASSERT_EQ(EvsResult::OK, result);
1573             for (auto &&v : values) {
1574                 ASSERT_EQ(val0, v) << "Values are not matched.";
1575             }
1576 
1577             // Join a listening thread.
1578             if (listener0.joinable()) {
1579                 listener0.join();
1580             }
1581             if (listener1.joinable()) {
1582                 listener1.join();
1583             }
1584 
1585             // Verify a change notification
1586             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1587                       static_cast<EvsEventType>(aNotification0.aType));
1588             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1589                       static_cast<EvsEventType>(aNotification1.aType));
1590             ASSERT_EQ(cmd,
1591                       static_cast<CameraParam>(aNotification0.payload[0]));
1592             ASSERT_EQ(cmd,
1593                       static_cast<CameraParam>(aNotification1.payload[0]));
1594             for (auto &&v : values) {
1595                 ASSERT_EQ(v,
1596                           static_cast<int32_t>(aNotification0.payload[1]));
1597                 ASSERT_EQ(v,
1598                           static_cast<int32_t>(aNotification1.payload[1]));
1599             }
1600         }
1601 
1602         // New primary client retires from the role
1603         result = pCamSecondary->unsetMaster();
1604         ASSERT_EQ(EvsResult::OK, result);
1605 
1606         // Shutdown
1607         frameHandlerPrimary->shutdown();
1608         frameHandlerSecondary->shutdown();
1609 
1610         // Explicitly release the camera
1611         pEnumerator->closeCamera(pCamPrimary);
1612         pEnumerator->closeCamera(pCamSecondary);
1613         activeCameras.clear();
1614     }
1615 }
1616 
1617 
1618 /*
1619  * HighPriorityCameraClient:
1620  * EVS client, which owns the display, is priortized and therefore can take over
1621  * a primary client role from other EVS clients without the display.
1622  */
TEST_P(EvsHidlTest,HighPriorityCameraClient)1623 TEST_P(EvsHidlTest, HighPriorityCameraClient) {
1624     LOG(INFO) << "Starting HighPriorityCameraClient test";
1625 
1626     if (mIsHwModule) {
1627         // This test is not for HW module implementation.
1628         return;
1629     }
1630 
1631     // Get the camera list
1632     loadCameraList();
1633 
1634     // Test each reported camera
1635     for (auto&& cam: cameraInfo) {
1636         // Request exclusive access to the EVS display
1637         sp<IEvsDisplay_1_0> pDisplay = pEnumerator->openDisplay();
1638         ASSERT_NE(pDisplay, nullptr);
1639 
1640         // Read a target resolution from the metadata
1641         Stream targetCfg =
1642             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1643         ASSERT_GT(targetCfg.width, 0);
1644         ASSERT_GT(targetCfg.height, 0);
1645 
1646         // Create two clients
1647         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1648         ASSERT_NE(pCam0, nullptr);
1649 
1650         // Store a camera handle for a clean-up
1651         activeCameras.push_back(pCam0);
1652 
1653         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1654         ASSERT_NE(pCam1, nullptr);
1655 
1656         // Store a camera handle for a clean-up
1657         activeCameras.push_back(pCam1);
1658 
1659         // Get the parameter list; this test will use the first command in both
1660         // lists.
1661         std::vector<CameraParam> cam0Cmds, cam1Cmds;
1662         pCam0->getParameterList([&cam0Cmds](hidl_vec<CameraParam> cmdList) {
1663                 cam0Cmds.reserve(cmdList.size());
1664                 for (auto &&cmd : cmdList) {
1665                     cam0Cmds.push_back(cmd);
1666                 }
1667             }
1668         );
1669 
1670         pCam1->getParameterList([&cam1Cmds](hidl_vec<CameraParam> cmdList) {
1671                 cam1Cmds.reserve(cmdList.size());
1672                 for (auto &&cmd : cmdList) {
1673                     cam1Cmds.push_back(cmd);
1674                 }
1675             }
1676         );
1677         if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1678             // Cannot execute this test.
1679             return;
1680         }
1681 
1682         // Set up a frame receiver object which will fire up its own thread.
1683         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
1684                                                           pDisplay,
1685                                                           FrameHandler::eAutoReturn);
1686         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
1687                                                           nullptr,
1688                                                           FrameHandler::eAutoReturn);
1689 
1690         // Activate the display
1691         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
1692 
1693         // Start the camera's video stream
1694         ASSERT_TRUE(frameHandler0->startStream());
1695         ASSERT_TRUE(frameHandler1->startStream());
1696 
1697         // Ensure the stream starts
1698         frameHandler0->waitForFrameCount(1);
1699         frameHandler1->waitForFrameCount(1);
1700 
1701         // Client 1 becomes a primary client and programs a parameter.
1702         EvsResult result = EvsResult::OK;
1703         // Get a valid parameter value range
1704         int32_t minVal, maxVal, step;
1705         pCam1->getIntParameterRange(
1706             cam1Cmds[0],
1707             [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1708                 minVal = val0;
1709                 maxVal = val1;
1710                 step   = val2;
1711             }
1712         );
1713 
1714         // Client1 becomes a primary client
1715         result = pCam1->setMaster();
1716         ASSERT_EQ(EvsResult::OK, result);
1717 
1718         std::vector<int32_t> values;
1719         EvsEventDesc aTargetEvent  = {};
1720         EvsEventDesc aNotification = {};
1721         bool listening = false;
1722         std::mutex eventLock;
1723         std::condition_variable eventCond;
1724         if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1725             std::thread listener = std::thread(
1726                 [&frameHandler0, &aNotification, &listening, &eventCond] {
1727                     listening = true;
1728                     eventCond.notify_all();
1729 
1730                     EvsEventDesc aTargetEvent;
1731                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1732                     aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1733                     aTargetEvent.payload[1] = 0;
1734                     if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1735                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1736                     }
1737                 }
1738             );
1739 
1740             // Wait until a lister starts.
1741             std::unique_lock<std::mutex> lock(eventLock);
1742             auto timer = std::chrono::system_clock::now();
1743             while (!listening) {
1744                 eventCond.wait_until(lock, timer + 1s);
1745             }
1746             lock.unlock();
1747 
1748             // Try to turn off auto-focus
1749             pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1750                                [&result, &values](auto status, auto effectiveValues) {
1751                                    result = status;
1752                                    if (status == EvsResult::OK) {
1753                                       for (auto &&v : effectiveValues) {
1754                                           values.push_back(v);
1755                                       }
1756                                    }
1757                                });
1758             ASSERT_EQ(EvsResult::OK, result);
1759             for (auto &&v : values) {
1760                 ASSERT_EQ(v, 0);
1761             }
1762 
1763             // Join a listener
1764             if (listener.joinable()) {
1765                 listener.join();
1766             }
1767 
1768             // Make sure AUTO_FOCUS is off.
1769             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1770                       EvsEventType::PARAMETER_CHANGED);
1771         }
1772 
1773         // Try to program a parameter with a random value [minVal, maxVal] after
1774         // rounding it down.
1775         int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
1776         val0 = val0 - (val0 % step);
1777 
1778         std::thread listener = std::thread(
1779             [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1780                 listening = true;
1781                 eventCond.notify_all();
1782 
1783                 EvsEventDesc aTargetEvent;
1784                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1785                 aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
1786                 aTargetEvent.payload[1] = val0;
1787                 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1788                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1789                 }
1790             }
1791         );
1792 
1793         // Wait until a lister starts.
1794         listening = false;
1795         std::unique_lock<std::mutex> lock(eventLock);
1796         auto timer = std::chrono::system_clock::now();
1797         while (!listening) {
1798             eventCond.wait_until(lock, timer + 1s);
1799         }
1800         lock.unlock();
1801 
1802         values.clear();
1803         pCam1->setIntParameter(cam1Cmds[0], val0,
1804                             [&result, &values](auto status, auto effectiveValues) {
1805                                 result = status;
1806                                 if (status == EvsResult::OK) {
1807                                     for (auto &&v : effectiveValues) {
1808                                         values.push_back(v);
1809                                     }
1810                                 }
1811                             });
1812         ASSERT_EQ(EvsResult::OK, result);
1813         for (auto &&v : values) {
1814             ASSERT_EQ(val0, v);
1815         }
1816 
1817         // Join a listener
1818         if (listener.joinable()) {
1819             listener.join();
1820         }
1821 
1822         // Verify a change notification
1823         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1824                   EvsEventType::PARAMETER_CHANGED);
1825         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
1826                   cam1Cmds[0]);
1827         for (auto &&v : values) {
1828             ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1829         }
1830 
1831         listener = std::thread(
1832             [&frameHandler1, &aNotification, &listening, &eventCond] {
1833                 listening = true;
1834                 eventCond.notify_all();
1835 
1836                 EvsEventDesc aTargetEvent;
1837                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1838                 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1839                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1840                 }
1841             }
1842         );
1843 
1844         // Wait until a lister starts.
1845         listening = false;
1846         lock.lock();
1847         timer = std::chrono::system_clock::now();
1848         while (!listening) {
1849             eventCond.wait_until(lock, timer + 1s);
1850         }
1851         lock.unlock();
1852 
1853         // Client 0 steals a primary client role
1854         ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
1855 
1856         // Join a listener
1857         if (listener.joinable()) {
1858             listener.join();
1859         }
1860 
1861         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1862                   EvsEventType::MASTER_RELEASED);
1863 
1864         // Client 0 programs a parameter
1865         val0 = minVal + (std::rand() % (maxVal - minVal));
1866 
1867         // Rounding down
1868         val0 = val0 - (val0 % step);
1869 
1870         if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1871             std::thread listener = std::thread(
1872                 [&frameHandler1, &aNotification, &listening, &eventCond] {
1873                     listening = true;
1874                     eventCond.notify_all();
1875 
1876                     EvsEventDesc aTargetEvent;
1877                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1878                     aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1879                     aTargetEvent.payload[1] = 0;
1880                     if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1881                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1882                     }
1883                 }
1884             );
1885 
1886             // Wait until a lister starts.
1887             std::unique_lock<std::mutex> lock(eventLock);
1888             auto timer = std::chrono::system_clock::now();
1889             while (!listening) {
1890                 eventCond.wait_until(lock, timer + 1s);
1891             }
1892             lock.unlock();
1893 
1894             // Try to turn off auto-focus
1895             values.clear();
1896             pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1897                                [&result, &values](auto status, auto effectiveValues) {
1898                                    result = status;
1899                                    if (status == EvsResult::OK) {
1900                                       for (auto &&v : effectiveValues) {
1901                                           values.push_back(v);
1902                                       }
1903                                    }
1904                                });
1905             ASSERT_EQ(EvsResult::OK, result);
1906             for (auto &&v : values) {
1907                 ASSERT_EQ(v, 0);
1908             }
1909 
1910             // Join a listener
1911             if (listener.joinable()) {
1912                 listener.join();
1913             }
1914 
1915             // Make sure AUTO_FOCUS is off.
1916             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1917                       EvsEventType::PARAMETER_CHANGED);
1918         }
1919 
1920         listener = std::thread(
1921             [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1922                 listening = true;
1923                 eventCond.notify_all();
1924 
1925                 EvsEventDesc aTargetEvent;
1926                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1927                 aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
1928                 aTargetEvent.payload[1] = val0;
1929                 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1930                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1931                 }
1932             }
1933         );
1934 
1935         // Wait until a lister starts.
1936         listening = false;
1937         timer = std::chrono::system_clock::now();
1938         lock.lock();
1939         while (!listening) {
1940             eventCond.wait_until(lock, timer + 1s);
1941         }
1942         lock.unlock();
1943 
1944         values.clear();
1945         pCam0->setIntParameter(cam0Cmds[0], val0,
1946                             [&result, &values](auto status, auto effectiveValues) {
1947                                 result = status;
1948                                 if (status == EvsResult::OK) {
1949                                     for (auto &&v : effectiveValues) {
1950                                         values.push_back(v);
1951                                     }
1952                                 }
1953                             });
1954         ASSERT_EQ(EvsResult::OK, result);
1955 
1956         // Join a listener
1957         if (listener.joinable()) {
1958             listener.join();
1959         }
1960         // Verify a change notification
1961         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1962                   EvsEventType::PARAMETER_CHANGED);
1963         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
1964                   cam0Cmds[0]);
1965         for (auto &&v : values) {
1966             ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1967         }
1968 
1969         // Turn off the display (yes, before the stream stops -- it should be handled)
1970         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
1971 
1972         // Shut down the streamer
1973         frameHandler0->shutdown();
1974         frameHandler1->shutdown();
1975 
1976         // Explicitly release the camera
1977         pEnumerator->closeCamera(pCam0);
1978         pEnumerator->closeCamera(pCam1);
1979         activeCameras.clear();
1980 
1981         // Explicitly release the display
1982         pEnumerator->closeDisplay(pDisplay);
1983     }
1984 }
1985 
1986 
1987 /*
1988  * CameraUseStreamConfigToDisplay:
1989  * End to end test of data flowing from the camera to the display.  Similar to
1990  * CameraToDisplayRoundTrip test case but this case retrieves available stream
1991  * configurations from EVS and uses one of them to start a video stream.
1992  */
TEST_P(EvsHidlTest,CameraUseStreamConfigToDisplay)1993 TEST_P(EvsHidlTest, CameraUseStreamConfigToDisplay) {
1994     LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1995 
1996     // Get the camera list
1997     loadCameraList();
1998 
1999     // Test each reported camera
2000     for (auto&& cam: cameraInfo) {
2001         // Request exclusive access to the EVS display
2002         sp<IEvsDisplay_1_0> pDisplay = pEnumerator->openDisplay();
2003         ASSERT_NE(pDisplay, nullptr);
2004 
2005         // choose a configuration that has a frame rate faster than minReqFps.
2006         Stream targetCfg = {};
2007         const int32_t minReqFps = 15;
2008         int32_t maxArea = 0;
2009         camera_metadata_entry_t streamCfgs;
2010         bool foundCfg = false;
2011         if (!find_camera_metadata_entry(
2012                  reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
2013                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2014                  &streamCfgs)) {
2015             // Stream configurations are found in metadata
2016             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
2017             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
2018                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
2019                     if (ptr->width * ptr->height > maxArea &&
2020                         ptr->framerate >= minReqFps) {
2021                         targetCfg.width = ptr->width;
2022                         targetCfg.height = ptr->height;
2023                         targetCfg.format = static_cast<PixelFormat>(ptr->format);
2024 
2025                         maxArea = ptr->width * ptr->height;
2026                         foundCfg = true;
2027                     }
2028                 }
2029                 ++ptr;
2030             }
2031         }
2032 
2033         if (!foundCfg) {
2034             // Current EVS camera does not provide stream configurations in the
2035             // metadata.
2036             continue;
2037         }
2038 
2039         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2040         ASSERT_NE(pCam, nullptr);
2041 
2042         // Store a camera handle for a clean-up
2043         activeCameras.push_back(pCam);
2044 
2045         // Set up a frame receiver object which will fire up its own thread.
2046         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
2047                                                          pDisplay,
2048                                                          FrameHandler::eAutoReturn);
2049 
2050 
2051         // Activate the display
2052         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
2053 
2054         // Start the camera's video stream
2055         bool startResult = frameHandler->startStream();
2056         ASSERT_TRUE(startResult);
2057 
2058         // Wait a while to let the data flow
2059         static const int kSecondsToWait = 5;
2060         const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
2061                                  kMaxStreamStartMilliseconds;
2062         const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
2063                                                kSecondsToMilliseconds;
2064         sleep(kSecondsToWait);
2065         unsigned framesReceived = 0;
2066         unsigned framesDisplayed = 0;
2067         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
2068         EXPECT_EQ(framesReceived, framesDisplayed);
2069         EXPECT_GE(framesDisplayed, minimumFramesExpected);
2070 
2071         // Turn off the display (yes, before the stream stops -- it should be handled)
2072         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
2073 
2074         // Shut down the streamer
2075         frameHandler->shutdown();
2076 
2077         // Explicitly release the camera
2078         pEnumerator->closeCamera(pCam);
2079         activeCameras.clear();
2080 
2081         // Explicitly release the display
2082         pEnumerator->closeDisplay(pDisplay);
2083     }
2084 }
2085 
2086 
2087 /*
2088  * MultiCameraStreamUseConfig:
2089  * Verify that each client can start and stop video streams on the same
2090  * underlying camera with same configuration.
2091  */
TEST_P(EvsHidlTest,MultiCameraStreamUseConfig)2092 TEST_P(EvsHidlTest, MultiCameraStreamUseConfig) {
2093     LOG(INFO) << "Starting MultiCameraStream test";
2094 
2095     if (mIsHwModule) {
2096         // This test is not for HW module implementation.
2097         return;
2098     }
2099 
2100     // Get the camera list
2101     loadCameraList();
2102 
2103     // Test each reported camera
2104     for (auto&& cam: cameraInfo) {
2105         // choose a configuration that has a frame rate faster than minReqFps.
2106         Stream targetCfg = {};
2107         const int32_t minReqFps = 15;
2108         int32_t maxArea = 0;
2109         camera_metadata_entry_t streamCfgs;
2110         bool foundCfg = false;
2111         if (!find_camera_metadata_entry(
2112                  reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
2113                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2114                  &streamCfgs)) {
2115             // Stream configurations are found in metadata
2116             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
2117             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
2118                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
2119                     if (ptr->width * ptr->height > maxArea &&
2120                         ptr->framerate >= minReqFps) {
2121                         targetCfg.width = ptr->width;
2122                         targetCfg.height = ptr->height;
2123                         targetCfg.format = static_cast<PixelFormat>(ptr->format);
2124 
2125                         maxArea = ptr->width * ptr->height;
2126                         foundCfg = true;
2127                     }
2128                 }
2129                 ++ptr;
2130             }
2131         }
2132 
2133         if (!foundCfg) {
2134             LOG(INFO) << "Device " << cam.v1.cameraId
2135                       << " does not provide a list of supported stream configurations, skipped";
2136             continue;
2137         }
2138 
2139         // Create the first camera client with a selected stream configuration.
2140         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2141         ASSERT_NE(pCam0, nullptr);
2142 
2143         // Store a camera handle for a clean-up
2144         activeCameras.push_back(pCam0);
2145 
2146         // Try to create the second camera client with different stream
2147         // configuration.
2148         int32_t id = targetCfg.id;
2149         targetCfg.id += 1;  // EVS manager sees only the stream id.
2150         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2151         ASSERT_EQ(pCam1, nullptr);
2152 
2153         // Store a camera handle for a clean-up
2154         activeCameras.push_back(pCam0);
2155 
2156         // Try again with same stream configuration.
2157         targetCfg.id = id;
2158         pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2159         ASSERT_NE(pCam1, nullptr);
2160 
2161         // Set up per-client frame receiver objects which will fire up its own thread
2162         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
2163                                                           nullptr,
2164                                                           FrameHandler::eAutoReturn);
2165         ASSERT_NE(frameHandler0, nullptr);
2166 
2167         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
2168                                                           nullptr,
2169                                                           FrameHandler::eAutoReturn);
2170         ASSERT_NE(frameHandler1, nullptr);
2171 
2172         // Start the camera's video stream via client 0
2173         bool startResult = false;
2174         startResult = frameHandler0->startStream() &&
2175                       frameHandler1->startStream();
2176         ASSERT_TRUE(startResult);
2177 
2178         // Ensure the stream starts
2179         frameHandler0->waitForFrameCount(1);
2180         frameHandler1->waitForFrameCount(1);
2181 
2182         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
2183 
2184         // Wait a bit, then ensure both clients get at least the required minimum number of frames
2185         sleep(5);
2186         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
2187         unsigned framesReceived0 = 0, framesReceived1 = 0;
2188         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
2189         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
2190         framesReceived0 = framesReceived0 - 1;    // Back out the first frame we already waited for
2191         framesReceived1 = framesReceived1 - 1;    // Back out the first frame we already waited for
2192         nsecs_t runTime = end - firstFrame;
2193         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
2194         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
2195         LOG(INFO) << "Measured camera rate "
2196                   << std::scientific << framesPerSecond0 << " fps and "
2197                   << framesPerSecond1 << " fps";
2198         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
2199         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
2200 
2201         // Shutdown one client
2202         frameHandler0->shutdown();
2203 
2204         // Read frame counters again
2205         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
2206         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
2207 
2208         // Wait a bit again
2209         sleep(5);
2210         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
2211         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
2212         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
2213         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
2214         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
2215 
2216         // Shutdown another
2217         frameHandler1->shutdown();
2218 
2219         // Explicitly release the camera
2220         pEnumerator->closeCamera(pCam0);
2221         pEnumerator->closeCamera(pCam1);
2222         activeCameras.clear();
2223     }
2224 }
2225 
2226 
2227 /*
2228  * LogicalCameraMetadata:
2229  * Opens logical camera reported by the enumerator and validate its metadata by
2230  * checking its capability and locating supporting physical camera device
2231  * identifiers.
2232  */
TEST_P(EvsHidlTest,LogicalCameraMetadata)2233 TEST_P(EvsHidlTest, LogicalCameraMetadata) {
2234     LOG(INFO) << "Starting LogicalCameraMetadata test";
2235 
2236     // Get the camera list
2237     loadCameraList();
2238 
2239     // Open and close each camera twice
2240     for (auto&& cam: cameraInfo) {
2241         bool isLogicalCam = false;
2242         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2243         if (isLogicalCam) {
2244             ASSERT_GE(devices.size(), 1) <<
2245                 "Logical camera device must have at least one physical camera device ID in its metadata.";
2246         }
2247     }
2248 }
2249 
2250 
2251 /*
2252  * CameraStreamExternalBuffering:
2253  * This is same with CameraStreamBuffering except frame buffers are allocated by
2254  * the test client and then imported by EVS framework.
2255  */
TEST_P(EvsHidlTest,CameraStreamExternalBuffering)2256 TEST_P(EvsHidlTest, CameraStreamExternalBuffering) {
2257     LOG(INFO) << "Starting CameraStreamExternalBuffering test";
2258 
2259     // Arbitrary constant (should be > 1 and not too big)
2260     static const unsigned int kBuffersToHold = 3;
2261 
2262     // Get the camera list
2263     loadCameraList();
2264 
2265     // Acquire the graphics buffer allocator
2266     android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
2267     const auto usage =
2268             GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
2269 
2270     // Test each reported camera
2271     for (auto&& cam : cameraInfo) {
2272         // Read a target resolution from the metadata
2273         Stream targetCfg =
2274             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
2275         ASSERT_GT(targetCfg.width, 0);
2276         ASSERT_GT(targetCfg.height, 0);
2277 
2278         // Allocate buffers to use
2279         hidl_vec<BufferDesc> buffers;
2280         buffers.resize(kBuffersToHold);
2281         for (auto i = 0; i < kBuffersToHold; ++i) {
2282             unsigned pixelsPerLine;
2283             buffer_handle_t memHandle = nullptr;
2284             android::status_t result =
2285                     alloc.allocate(targetCfg.width, targetCfg.height,
2286                                    (android::PixelFormat)targetCfg.format,
2287                                    /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
2288                                    /* graphicBufferId = */ 0,
2289                                    /* requestorName = */ "CameraStreamExternalBufferingTest");
2290             if (result != android::NO_ERROR) {
2291                 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
2292                 // Release previous allocated buffers
2293                 for (auto j = 0; j < i; j++) {
2294                     alloc.free(buffers[i].buffer.nativeHandle);
2295                 }
2296                 return;
2297             } else {
2298                 BufferDesc buf;
2299                 AHardwareBuffer_Desc* pDesc =
2300                         reinterpret_cast<AHardwareBuffer_Desc*>(&buf.buffer.description);
2301                 pDesc->width = targetCfg.width;
2302                 pDesc->height = targetCfg.height;
2303                 pDesc->layers = 1;
2304                 pDesc->format = static_cast<uint32_t>(targetCfg.format);
2305                 pDesc->usage = usage;
2306                 pDesc->stride = pixelsPerLine;
2307                 buf.buffer.nativeHandle = memHandle;
2308                 buf.bufferId = i;  // Unique number to identify this buffer
2309                 buffers[i] = buf;
2310             }
2311         }
2312 
2313         bool isLogicalCam = false;
2314         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2315 
2316         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2317         ASSERT_NE(pCam, nullptr);
2318 
2319         // Store a camera handle for a clean-up
2320         activeCameras.push_back(pCam);
2321 
2322         // Request to import buffers
2323         EvsResult result = EvsResult::OK;
2324         int delta = 0;
2325         pCam->importExternalBuffers(buffers,
2326                                     [&] (auto _result, auto _delta) {
2327                                         result = _result;
2328                                         delta = _delta;
2329                                     });
2330         if (isLogicalCam) {
2331             EXPECT_EQ(result, EvsResult::UNDERLYING_SERVICE_ERROR);
2332             continue;
2333         }
2334 
2335         EXPECT_EQ(result, EvsResult::OK);
2336         EXPECT_GE(delta, kBuffersToHold);
2337 
2338         // Set up a frame receiver object which will fire up its own thread.
2339         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
2340                                                          nullptr,
2341                                                          FrameHandler::eNoAutoReturn);
2342 
2343         // Start the camera's video stream
2344         bool startResult = frameHandler->startStream();
2345         ASSERT_TRUE(startResult);
2346 
2347         // Check that the video stream stalls once we've gotten exactly the number of buffers
2348         // we requested since we told the frameHandler not to return them.
2349         sleep(1);   // 1 second should be enough for at least 5 frames to be delivered worst case
2350         unsigned framesReceived = 0;
2351         frameHandler->getFramesCounters(&framesReceived, nullptr);
2352         ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2353 
2354 
2355         // Give back one buffer
2356         bool didReturnBuffer = frameHandler->returnHeldBuffer();
2357         EXPECT_TRUE(didReturnBuffer);
2358 
2359         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2360         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2361         unsigned framesReceivedAfter = 0;
2362         usleep(110 * kMillisecondsToMicroseconds);
2363         frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2364         EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2365 
2366         // Even when the camera pointer goes out of scope, the FrameHandler object will
2367         // keep the stream alive unless we tell it to shutdown.
2368         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2369         // we have to break that cycle in order for either of them to get cleaned up.
2370         frameHandler->shutdown();
2371 
2372         // Explicitly release the camera
2373         pEnumerator->closeCamera(pCam);
2374         activeCameras.clear();
2375         // Release buffers
2376         for (auto& b : buffers) {
2377             alloc.free(b.buffer.nativeHandle);
2378         }
2379         buffers.resize(0);
2380     }
2381 }
2382 
2383 
2384 /*
2385  * UltrasonicsArrayOpenClean:
2386  * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2387  * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2388  * can be reopened.
2389  */
TEST_P(EvsHidlTest,UltrasonicsArrayOpenClean)2390 TEST_P(EvsHidlTest, UltrasonicsArrayOpenClean) {
2391     LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2392 
2393     // Get the ultrasonics array list
2394     loadUltrasonicsArrayList();
2395 
2396     // Open and close each ultrasonics array twice
2397     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2398         for (int pass = 0; pass < 2; pass++) {
2399             sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2400                     pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2401             ASSERT_NE(pUltrasonicsArray, nullptr);
2402 
2403             // Verify that this ultrasonics array self-identifies correctly
2404             pUltrasonicsArray->getUltrasonicArrayInfo([&ultraInfo](UltrasonicsArrayDesc desc) {
2405                 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2406                 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2407             });
2408 
2409             // Explicitly close the ultrasonics array so resources are released right away
2410             pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2411         }
2412     }
2413 }
2414 
2415 
2416 // Starts a stream and verifies all data received is valid.
TEST_P(EvsHidlTest,UltrasonicsVerifyStreamData)2417 TEST_P(EvsHidlTest, UltrasonicsVerifyStreamData) {
2418     LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2419 
2420     // Get the ultrasonics array list
2421     loadUltrasonicsArrayList();
2422 
2423     // For each ultrasonics array.
2424     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2425         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2426 
2427         sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2428                 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2429         ASSERT_NE(pUltrasonicsArray, nullptr);
2430 
2431         sp<FrameHandlerUltrasonics> frameHandler = new FrameHandlerUltrasonics(pUltrasonicsArray);
2432 
2433         // Start stream.
2434         EvsResult result = pUltrasonicsArray->startStream(frameHandler);
2435         ASSERT_EQ(result, EvsResult::OK);
2436 
2437         // Wait 5 seconds to receive frames.
2438         sleep(5);
2439 
2440         // Stop stream.
2441         pUltrasonicsArray->stopStream();
2442 
2443         EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2444         EXPECT_TRUE(frameHandler->areAllFramesValid());
2445 
2446         // Explicitly close the ultrasonics array so resources are released right away
2447         pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2448     }
2449 }
2450 
2451 
2452 // Sets frames in flight before and after start of stream and verfies success.
TEST_P(EvsHidlTest,UltrasonicsSetFramesInFlight)2453 TEST_P(EvsHidlTest, UltrasonicsSetFramesInFlight) {
2454     LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2455 
2456     // Get the ultrasonics array list
2457     loadUltrasonicsArrayList();
2458 
2459     // For each ultrasonics array.
2460     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2461         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2462 
2463         sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2464                 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2465         ASSERT_NE(pUltrasonicsArray, nullptr);
2466 
2467         EvsResult result = pUltrasonicsArray->setMaxFramesInFlight(10);
2468         EXPECT_EQ(result, EvsResult::OK);
2469 
2470         sp<FrameHandlerUltrasonics> frameHandler = new FrameHandlerUltrasonics(pUltrasonicsArray);
2471 
2472         // Start stream.
2473         result = pUltrasonicsArray->startStream(frameHandler);
2474         ASSERT_EQ(result, EvsResult::OK);
2475 
2476         result = pUltrasonicsArray->setMaxFramesInFlight(5);
2477         EXPECT_EQ(result, EvsResult::OK);
2478 
2479         // Stop stream.
2480         pUltrasonicsArray->stopStream();
2481 
2482         // Explicitly close the ultrasonics array so resources are released right away
2483         pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2484     }
2485 }
2486 
2487 GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsHidlTest);
2488 INSTANTIATE_TEST_SUITE_P(
2489     PerInstance,
2490     EvsHidlTest,
2491     testing::ValuesIn(android::hardware::getAllHalInstanceNames(IEvsEnumerator::descriptor)),
2492     android::hardware::PrintInstanceNameToString);
2493 
2494