• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "FrameHandler.h"
18 #include "FrameHandlerUltrasonics.h"
19 
20 #include <aidl/Gtest.h>
21 #include <aidl/Vintf.h>
22 #include <aidl/android/hardware/automotive/evs/BnEvsEnumeratorStatusCallback.h>
23 #include <aidl/android/hardware/automotive/evs/BufferDesc.h>
24 #include <aidl/android/hardware/automotive/evs/CameraDesc.h>
25 #include <aidl/android/hardware/automotive/evs/CameraParam.h>
26 #include <aidl/android/hardware/automotive/evs/DeviceStatus.h>
27 #include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
28 #include <aidl/android/hardware/automotive/evs/DisplayState.h>
29 #include <aidl/android/hardware/automotive/evs/EvsEventDesc.h>
30 #include <aidl/android/hardware/automotive/evs/EvsEventType.h>
31 #include <aidl/android/hardware/automotive/evs/EvsResult.h>
32 #include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
33 #include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
34 #include <aidl/android/hardware/automotive/evs/IEvsEnumerator.h>
35 #include <aidl/android/hardware/automotive/evs/IEvsEnumeratorStatusCallback.h>
36 #include <aidl/android/hardware/automotive/evs/IEvsUltrasonicsArray.h>
37 #include <aidl/android/hardware/automotive/evs/ParameterRange.h>
38 #include <aidl/android/hardware/automotive/evs/Stream.h>
39 #include <aidl/android/hardware/automotive/evs/UltrasonicsArrayDesc.h>
40 #include <aidl/android/hardware/common/NativeHandle.h>
41 #include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
42 #include <aidl/android/hardware/graphics/common/PixelFormat.h>
43 #include <aidlcommonsupport/NativeHandle.h>
44 #include <android-base/logging.h>
45 #include <android/binder_ibinder.h>
46 #include <android/binder_manager.h>
47 #include <android/binder_process.h>
48 #include <android/binder_status.h>
49 #include <system/camera_metadata.h>
50 #include <ui/GraphicBuffer.h>
51 #include <ui/GraphicBufferAllocator.h>
52 #include <utils/Timers.h>
53 
54 #include <chrono>
55 #include <deque>
56 #include <thread>
57 #include <unordered_set>
58 
59 namespace {
60 
61 // These values are called out in the EVS design doc (as of Mar 8, 2017)
62 constexpr int kMaxStreamStartMilliseconds = 500;
63 constexpr int kMinimumFramesPerSecond = 10;
64 constexpr int kSecondsToMilliseconds = 1000;
65 constexpr int kMillisecondsToMicroseconds = 1000;
66 constexpr float kNanoToMilliseconds = 0.000001f;
67 constexpr float kNanoToSeconds = 0.000000001f;
68 
69 /*
70  * Please note that this is different from what is defined in
71  * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
72  * field to store a framerate.
73  */
74 typedef struct {
75     int32_t id;
76     int32_t width;
77     int32_t height;
78     int32_t format;
79     int32_t direction;
80     int32_t framerate;
81 } RawStreamConfig;
82 constexpr size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
83 
84 using ::aidl::android::hardware::automotive::evs::BnEvsEnumeratorStatusCallback;
85 using ::aidl::android::hardware::automotive::evs::BufferDesc;
86 using ::aidl::android::hardware::automotive::evs::CameraDesc;
87 using ::aidl::android::hardware::automotive::evs::CameraParam;
88 using ::aidl::android::hardware::automotive::evs::DeviceStatus;
89 using ::aidl::android::hardware::automotive::evs::DisplayDesc;
90 using ::aidl::android::hardware::automotive::evs::DisplayState;
91 using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
92 using ::aidl::android::hardware::automotive::evs::EvsEventType;
93 using ::aidl::android::hardware::automotive::evs::EvsResult;
94 using ::aidl::android::hardware::automotive::evs::IEvsCamera;
95 using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
96 using ::aidl::android::hardware::automotive::evs::IEvsEnumerator;
97 using ::aidl::android::hardware::automotive::evs::IEvsEnumeratorStatusCallback;
98 using ::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray;
99 using ::aidl::android::hardware::automotive::evs::ParameterRange;
100 using ::aidl::android::hardware::automotive::evs::Stream;
101 using ::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc;
102 using ::aidl::android::hardware::graphics::common::BufferUsage;
103 using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
104 using ::aidl::android::hardware::graphics::common::PixelFormat;
105 using std::chrono_literals::operator""s;
106 
107 }  // namespace
108 
109 // The main test class for EVS
110 class EvsAidlTest : public ::testing::TestWithParam<std::string> {
111   public:
SetUp()112     virtual void SetUp() override {
113         // Make sure we can connect to the enumerator
114         std::string service_name = GetParam();
115         AIBinder* binder = AServiceManager_waitForService(service_name.data());
116         ASSERT_NE(binder, nullptr);
117         mEnumerator = IEvsEnumerator::fromBinder(::ndk::SpAIBinder(binder));
118         LOG(INFO) << "Test target service: " << service_name;
119 
120         ASSERT_TRUE(mEnumerator->isHardware(&mIsHwModule).isOk());
121     }
122 
TearDown()123     virtual void TearDown() override {
124         // Attempt to close any active camera
125         for (auto&& cam : mActiveCameras) {
126             if (cam != nullptr) {
127                 mEnumerator->closeCamera(cam);
128             }
129         }
130         mActiveCameras.clear();
131     }
132 
133   protected:
loadCameraList()134     void loadCameraList() {
135         // SetUp() must run first!
136         ASSERT_NE(mEnumerator, nullptr);
137 
138         // Get the camera list
139         ASSERT_TRUE(mEnumerator->getCameraList(&mCameraInfo).isOk())
140                 << "Failed to get a list of available cameras";
141         LOG(INFO) << "We have " << mCameraInfo.size() << " cameras.";
142     }
143 
loadUltrasonicsArrayList()144     void loadUltrasonicsArrayList() {
145         // SetUp() must run first!
146         ASSERT_NE(mEnumerator, nullptr);
147 
148         // Get the ultrasonics array list
149         auto result = mEnumerator->getUltrasonicsArrayList(&mUltrasonicsArraysInfo);
150         ASSERT_TRUE(result.isOk() ||
151                 // TODO(b/149874793): Remove below conditions when
152                 // getUltrasonicsArrayList() is implemented.
153                 (!result.isOk() && result.getServiceSpecificError() ==
154                         static_cast<int32_t>(EvsResult::NOT_IMPLEMENTED)))
155                 << "Failed to get a list of available ultrasonics arrays";
156         LOG(INFO) << "We have " << mCameraInfo.size() << " ultrasonics arrays.";
157     }
158 
isLogicalCamera(const camera_metadata_t * metadata)159     bool isLogicalCamera(const camera_metadata_t* metadata) {
160         if (metadata == nullptr) {
161             // A logical camera device must have a valid camera metadata.
162             return false;
163         }
164 
165         // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
166         camera_metadata_ro_entry_t entry;
167         int rc = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
168                                                &entry);
169         if (rc != 0) {
170             // No capabilities are found.
171             return false;
172         }
173 
174         for (size_t i = 0; i < entry.count; ++i) {
175             uint8_t cap = entry.data.u8[i];
176             if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
177                 return true;
178             }
179         }
180 
181         return false;
182     }
183 
getPhysicalCameraIds(const std::string & id,bool & flag)184     std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id, bool& flag) {
185         std::unordered_set<std::string> physicalCameras;
186         const auto it = std::find_if(mCameraInfo.begin(), mCameraInfo.end(),
187                                      [&id](const CameraDesc& desc) { return id == desc.id; });
188         if (it == mCameraInfo.end()) {
189             // Unknown camera is requested.  Return an empty list.
190             return physicalCameras;
191         }
192 
193         const camera_metadata_t* metadata = reinterpret_cast<camera_metadata_t*>(&it->metadata[0]);
194         flag = isLogicalCamera(metadata);
195         if (!flag) {
196             // EVS assumes that the device w/o a valid metadata is a physical
197             // device.
198             LOG(INFO) << id << " is not a logical camera device.";
199             physicalCameras.insert(id);
200             return physicalCameras;
201         }
202 
203         // Look for physical camera identifiers
204         camera_metadata_ro_entry entry;
205         int rc = find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
206                                                &entry);
207         if (rc != 0) {
208             LOG(ERROR) << "No physical camera ID is found for a logical camera device";
209         }
210 
211         const uint8_t* ids = entry.data.u8;
212         size_t start = 0;
213         for (size_t i = 0; i < entry.count; ++i) {
214             if (ids[i] == '\0') {
215                 if (start != i) {
216                     std::string id(reinterpret_cast<const char*>(ids + start));
217                     physicalCameras.insert(id);
218                 }
219                 start = i + 1;
220             }
221         }
222 
223         LOG(INFO) << id << " consists of " << physicalCameras.size() << " physical camera devices";
224         return physicalCameras;
225     }
226 
getFirstStreamConfiguration(camera_metadata_t * metadata)227     Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
228         Stream targetCfg = {};
229         camera_metadata_entry_t streamCfgs;
230         if (!find_camera_metadata_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
231                                         &streamCfgs)) {
232             // Stream configurations are found in metadata
233             RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
234             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
235                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
236                     targetCfg.width = ptr->width;
237                     targetCfg.height = ptr->height;
238                     targetCfg.format = static_cast<PixelFormat>(ptr->format);
239                     break;
240                 }
241                 ++ptr;
242             }
243         }
244 
245         return targetCfg;
246     }
247 
248     class DeviceStatusCallback : public BnEvsEnumeratorStatusCallback {
deviceStatusChanged(const std::vector<DeviceStatus> &)249         ndk::ScopedAStatus deviceStatusChanged(const std::vector<DeviceStatus>&) override {
250             // This empty implementation returns always ok().
251             return ndk::ScopedAStatus::ok();
252         }
253     };
254 
255     // Every test needs access to the service
256     std::shared_ptr<IEvsEnumerator> mEnumerator;
257     // Empty unless/util loadCameraList() is called
258     std::vector<CameraDesc> mCameraInfo;
259     // boolean to tell current module under testing is HW module implementation
260     // or not
261     bool mIsHwModule;
262     // A list of active camera handles that are need to be cleaned up
263     std::deque<std::shared_ptr<IEvsCamera>> mActiveCameras;
264     // Empty unless/util loadUltrasonicsArrayList() is called
265     std::vector<UltrasonicsArrayDesc> mUltrasonicsArraysInfo;
266     // A list of active ultrasonics array handles that are to be cleaned up
267     std::deque<std::weak_ptr<IEvsUltrasonicsArray>> mActiveUltrasonicsArrays;
268 };
269 
270 // Test cases, their implementations, and corresponding requirements are
271 // documented at go/aae-evs-public-api-test.
272 
273 /*
274  * CameraOpenClean:
275  * Opens each camera reported by the enumerator and then explicitly closes it via a
276  * call to closeCamera.  Then repeats the test to ensure all cameras can be reopened.
277  */
TEST_P(EvsAidlTest,CameraOpenClean)278 TEST_P(EvsAidlTest, CameraOpenClean) {
279     LOG(INFO) << "Starting CameraOpenClean test";
280 
281     // Get the camera list
282     loadCameraList();
283 
284     // Open and close each camera twice
285     for (auto&& cam : mCameraInfo) {
286         bool isLogicalCam = false;
287         auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
288         if (mIsHwModule && isLogicalCam) {
289             LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
290             continue;
291         }
292 
293         // Read a target resolution from the metadata
294         Stream targetCfg = getFirstStreamConfiguration(
295                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
296         ASSERT_GT(targetCfg.width, 0);
297         ASSERT_GT(targetCfg.height, 0);
298 
299         for (int pass = 0; pass < 2; pass++) {
300             std::shared_ptr<IEvsCamera> pCam;
301             ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
302             ASSERT_NE(pCam, nullptr);
303 
304             CameraDesc cameraInfo;
305             for (auto&& devName : devices) {
306                 ASSERT_TRUE(pCam->getPhysicalCameraInfo(devName, &cameraInfo).isOk());
307                 EXPECT_EQ(devName, cameraInfo.id);
308             }
309 
310             // Store a camera handle for a clean-up
311             mActiveCameras.push_back(pCam);
312 
313             // Verify that this camera self-identifies correctly
314             ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
315             EXPECT_EQ(cam.id, cameraInfo.id);
316 
317             // Verify methods for extended info
318             const auto id = 0xFFFFFFFF;  // meaningless id
319             std::vector<uint8_t> values;
320             auto status = pCam->setExtendedInfo(id, values);
321             if (isLogicalCam) {
322                 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
323                                                       static_cast<int>(EvsResult::NOT_SUPPORTED));
324             } else {
325                 EXPECT_TRUE(status.isOk());
326             }
327 
328             status = pCam->getExtendedInfo(id, &values);
329             if (isLogicalCam) {
330                 EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
331                                                       static_cast<int>(EvsResult::NOT_SUPPORTED));
332             } else {
333                 EXPECT_TRUE(status.isOk());
334             }
335 
336             // Explicitly close the camera so resources are released right away
337             ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
338             mActiveCameras.clear();
339         }
340     }
341 }
342 
343 /*
344  * CameraOpenAggressive:
345  * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
346  * call.  This ensures that the intended "aggressive open" behavior works.  This is necessary for
347  * the system to be tolerant of shutdown/restart race conditions.
348  */
TEST_P(EvsAidlTest,CameraOpenAggressive)349 TEST_P(EvsAidlTest, CameraOpenAggressive) {
350     LOG(INFO) << "Starting CameraOpenAggressive test";
351 
352     // Get the camera list
353     loadCameraList();
354 
355     // Open and close each camera twice
356     for (auto&& cam : mCameraInfo) {
357         bool isLogicalCam = false;
358         getPhysicalCameraIds(cam.id, isLogicalCam);
359         if (mIsHwModule && isLogicalCam) {
360             LOG(INFO) << "Skip a logical device, " << cam.id << " for HW target.";
361             continue;
362         }
363 
364         // Read a target resolution from the metadata
365         Stream targetCfg = getFirstStreamConfiguration(
366                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
367         ASSERT_GT(targetCfg.width, 0);
368         ASSERT_GT(targetCfg.height, 0);
369 
370         mActiveCameras.clear();
371         std::shared_ptr<IEvsCamera> pCam;
372         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
373         EXPECT_NE(pCam, nullptr);
374 
375         // Store a camera handle for a clean-up
376         mActiveCameras.push_back(pCam);
377 
378         // Verify that this camera self-identifies correctly
379         CameraDesc cameraInfo;
380         ASSERT_TRUE(pCam->getCameraInfo(&cameraInfo).isOk());
381         EXPECT_EQ(cam.id, cameraInfo.id);
382 
383         std::shared_ptr<IEvsCamera> pCam2;
384         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam2).isOk());
385         EXPECT_NE(pCam2, nullptr);
386         EXPECT_NE(pCam, pCam2);
387 
388         // Store a camera handle for a clean-up
389         mActiveCameras.push_back(pCam2);
390 
391         auto status = pCam->setMaxFramesInFlight(2);
392         if (mIsHwModule) {
393             // Verify that the old camera rejects calls via HW module.
394             EXPECT_TRUE(!status.isOk() && status.getServiceSpecificError() ==
395                                                   static_cast<int>(EvsResult::OWNERSHIP_LOST));
396         } else {
397             // default implementation supports multiple clients.
398             EXPECT_TRUE(status.isOk());
399         }
400 
401         // Close the superseded camera
402         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
403         mActiveCameras.pop_front();
404 
405         // Verify that the second camera instance self-identifies correctly
406         ASSERT_TRUE(pCam2->getCameraInfo(&cameraInfo).isOk());
407         EXPECT_EQ(cam.id, cameraInfo.id);
408 
409         // Close the second camera instance
410         ASSERT_TRUE(mEnumerator->closeCamera(pCam2).isOk());
411         mActiveCameras.pop_front();
412     }
413 
414     // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
415     sleep(1);  // I hate that this is an arbitrary time to wait.  :(  b/36122635
416 }
417 
418 /*
419  * CameraStreamPerformance:
420  * Measure and qualify the stream start up time and streaming frame rate of each reported camera
421  */
TEST_P(EvsAidlTest,CameraStreamPerformance)422 TEST_P(EvsAidlTest, CameraStreamPerformance) {
423     LOG(INFO) << "Starting CameraStreamPerformance test";
424 
425     // Get the camera list
426     loadCameraList();
427 
428     // Test each reported camera
429     for (auto&& cam : mCameraInfo) {
430         bool isLogicalCam = false;
431         auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
432         if (mIsHwModule && isLogicalCam) {
433             LOG(INFO) << "Skip a logical device " << cam.id;
434             continue;
435         }
436 
437         // Read a target resolution from the metadata
438         Stream targetCfg = getFirstStreamConfiguration(
439                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
440         ASSERT_GT(targetCfg.width, 0);
441         ASSERT_GT(targetCfg.height, 0);
442 
443         std::shared_ptr<IEvsCamera> pCam;
444         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
445         EXPECT_NE(pCam, nullptr);
446 
447         // Store a camera handle for a clean-up
448         mActiveCameras.push_back(pCam);
449 
450         // Set up a frame receiver object which will fire up its own thread
451         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
452                 pCam, cam, nullptr, FrameHandler::eAutoReturn);
453         EXPECT_NE(frameHandler, nullptr);
454 
455         // Start the camera's video stream
456         nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
457         ASSERT_TRUE(frameHandler->startStream());
458 
459         // Ensure the first frame arrived within the expected time
460         frameHandler->waitForFrameCount(1);
461         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
462         nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
463 
464         // Extra delays are expected when we attempt to start a video stream on
465         // the logical camera device.  The amount of delay is expected the
466         // number of physical camera devices multiplied by
467         // kMaxStreamStartMilliseconds at most.
468         EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
469                   kMaxStreamStartMilliseconds * devices.size());
470         printf("%s: Measured time to first frame %0.2f ms\n", cam.id.data(),
471                timeToFirstFrame * kNanoToMilliseconds);
472         LOG(INFO) << cam.id << ": Measured time to first frame " << std::scientific
473                   << timeToFirstFrame * kNanoToMilliseconds << " ms.";
474 
475         // Check aspect ratio
476         unsigned width = 0, height = 0;
477         frameHandler->getFrameDimension(&width, &height);
478         EXPECT_GE(width, height);
479 
480         // Wait a bit, then ensure we get at least the required minimum number of frames
481         sleep(5);
482         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
483 
484         // Even when the camera pointer goes out of scope, the FrameHandler object will
485         // keep the stream alive unless we tell it to shutdown.
486         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
487         // we have to break that cycle in order for either of them to get cleaned up.
488         frameHandler->shutdown();
489 
490         unsigned framesReceived = 0;
491         frameHandler->getFramesCounters(&framesReceived, nullptr);
492         framesReceived = framesReceived - 1;  // Back out the first frame we already waited for
493         nsecs_t runTime = end - firstFrame;
494         float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
495         printf("Measured camera rate %3.2f fps\n", framesPerSecond);
496         LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond << " fps.";
497         EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
498 
499         // Explicitly release the camera
500         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
501         mActiveCameras.clear();
502     }
503 }
504 
505 /*
506  * CameraStreamBuffering:
507  * Ensure the camera implementation behaves properly when the client holds onto buffers for more
508  * than one frame time.  The camera must cleanly skip frames until the client is ready again.
509  */
TEST_P(EvsAidlTest,CameraStreamBuffering)510 TEST_P(EvsAidlTest, CameraStreamBuffering) {
511     LOG(INFO) << "Starting CameraStreamBuffering test";
512 
513     // Arbitrary constant (should be > 1 and not too big)
514     static const unsigned int kBuffersToHold = 6;
515 
516     // Get the camera list
517     loadCameraList();
518 
519     // Test each reported camera
520     for (auto&& cam : mCameraInfo) {
521         bool isLogicalCam = false;
522         getPhysicalCameraIds(cam.id, isLogicalCam);
523         if (mIsHwModule && isLogicalCam) {
524             LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
525             continue;
526         }
527 
528         // Read a target resolution from the metadata
529         Stream targetCfg = getFirstStreamConfiguration(
530                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
531         ASSERT_GT(targetCfg.width, 0);
532         ASSERT_GT(targetCfg.height, 0);
533 
534         std::shared_ptr<IEvsCamera> pCam;
535         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
536         EXPECT_NE(pCam, nullptr);
537 
538         // Store a camera handle for a clean-up
539         mActiveCameras.push_back(pCam);
540 
541         // Ask for a very large number of buffers in flight to ensure it errors correctly
542         auto badResult = pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
543         EXPECT_TRUE(!badResult.isOk() && badResult.getServiceSpecificError() ==
544                                                  static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
545 
546         // Now ask for exactly two buffers in flight as we'll test behavior in that case
547         ASSERT_TRUE(pCam->setMaxFramesInFlight(kBuffersToHold).isOk());
548 
549         // Set up a frame receiver object which will fire up its own thread.
550         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
551                 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
552         EXPECT_NE(frameHandler, nullptr);
553 
554         // Start the camera's video stream
555         ASSERT_TRUE(frameHandler->startStream());
556 
557         // Check that the video stream stalls once we've gotten exactly the number of buffers
558         // we requested since we told the frameHandler not to return them.
559         sleep(1);  // 1 second should be enough for at least 5 frames to be delivered worst case
560         unsigned framesReceived = 0;
561         frameHandler->getFramesCounters(&framesReceived, nullptr);
562         ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
563 
564         // Give back one buffer
565         ASSERT_TRUE(frameHandler->returnHeldBuffer());
566 
567         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
568         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
569         usleep(110 * kMillisecondsToMicroseconds);
570         frameHandler->getFramesCounters(&framesReceived, nullptr);
571         EXPECT_EQ(kBuffersToHold + 1, framesReceived) << "Stream should've resumed";
572 
573         // Even when the camera pointer goes out of scope, the FrameHandler object will
574         // keep the stream alive unless we tell it to shutdown.
575         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
576         // we have to break that cycle in order for either of them to get cleaned up.
577         frameHandler->shutdown();
578 
579         // Explicitly release the camera
580         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
581         mActiveCameras.clear();
582     }
583 }
584 
585 /*
586  * CameraToDisplayRoundTrip:
587  * End to end test of data flowing from the camera to the display.  Each delivered frame of camera
588  * imagery is simply copied to the display buffer and presented on screen.  This is the one test
589  * which a human could observe to see the operation of the system on the physical display.
590  */
TEST_P(EvsAidlTest,CameraToDisplayRoundTrip)591 TEST_P(EvsAidlTest, CameraToDisplayRoundTrip) {
592     LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
593 
594     // Get the camera list
595     loadCameraList();
596 
597     // Request available display IDs
598     uint8_t targetDisplayId = 0;
599     std::vector<uint8_t> displayIds;
600     ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
601     EXPECT_GT(displayIds.size(), 0);
602     targetDisplayId = displayIds[0];
603 
604     // Test each reported camera
605     for (auto&& cam : mCameraInfo) {
606         // Request exclusive access to the first EVS display
607         std::shared_ptr<IEvsDisplay> pDisplay;
608         ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
609         EXPECT_NE(pDisplay, nullptr);
610         LOG(INFO) << "Display " << static_cast<int>(targetDisplayId) << " is in use.";
611 
612         // Get the display descriptor
613         DisplayDesc displayDesc;
614         ASSERT_TRUE(pDisplay->getDisplayInfo(&displayDesc).isOk());
615         LOG(INFO) << "    Resolution: " << displayDesc.width << "x" << displayDesc.height;
616         ASSERT_GT(displayDesc.width, 0);
617         ASSERT_GT(displayDesc.height, 0);
618 
619         bool isLogicalCam = false;
620         getPhysicalCameraIds(cam.id, isLogicalCam);
621         if (mIsHwModule && isLogicalCam) {
622             LOG(INFO) << "Skip a logical device " << cam.id << " for HW target.";
623             ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
624             continue;
625         }
626 
627         // Read a target resolution from the metadata
628         Stream targetCfg = getFirstStreamConfiguration(
629                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
630         ASSERT_GT(targetCfg.width, 0);
631         ASSERT_GT(targetCfg.height, 0);
632 
633         std::shared_ptr<IEvsCamera> pCam;
634         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
635         EXPECT_NE(pCam, nullptr);
636 
637         // Store a camera handle for a clean-up
638         mActiveCameras.push_back(pCam);
639 
640         // Set up a frame receiver object which will fire up its own thread.
641         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
642                 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
643         EXPECT_NE(frameHandler, nullptr);
644 
645         // Activate the display
646         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
647 
648         // Start the camera's video stream
649         ASSERT_TRUE(frameHandler->startStream());
650 
651         // Wait a while to let the data flow
652         static const int kSecondsToWait = 5;
653         const int streamTimeMs =
654                 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
655         const unsigned minimumFramesExpected =
656                 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
657         sleep(kSecondsToWait);
658         unsigned framesReceived = 0;
659         unsigned framesDisplayed = 0;
660         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
661         EXPECT_EQ(framesReceived, framesDisplayed);
662         EXPECT_GE(framesDisplayed, minimumFramesExpected);
663 
664         // Turn off the display (yes, before the stream stops -- it should be handled)
665         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
666 
667         // Shut down the streamer
668         frameHandler->shutdown();
669 
670         // Explicitly release the camera
671         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
672         mActiveCameras.clear();
673 
674         // Explicitly release the display
675         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
676     }
677 }
678 
679 /*
680  * MultiCameraStream:
681  * Verify that each client can start and stop video streams on the same
682  * underlying camera.
683  */
TEST_P(EvsAidlTest,MultiCameraStream)684 TEST_P(EvsAidlTest, MultiCameraStream) {
685     LOG(INFO) << "Starting MultiCameraStream test";
686 
687     if (mIsHwModule) {
688         // This test is not for HW module implementation.
689         return;
690     }
691 
692     // Get the camera list
693     loadCameraList();
694 
695     // Test each reported camera
696     for (auto&& cam : mCameraInfo) {
697         // Read a target resolution from the metadata
698         Stream targetCfg = getFirstStreamConfiguration(
699                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
700         ASSERT_GT(targetCfg.width, 0);
701         ASSERT_GT(targetCfg.height, 0);
702 
703         // Create two camera clients.
704         std::shared_ptr<IEvsCamera> pCam0;
705         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
706         EXPECT_NE(pCam0, nullptr);
707 
708         // Store a camera handle for a clean-up
709         mActiveCameras.push_back(pCam0);
710 
711         std::shared_ptr<IEvsCamera> pCam1;
712         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
713         EXPECT_NE(pCam1, nullptr);
714 
715         // Store a camera handle for a clean-up
716         mActiveCameras.push_back(pCam1);
717 
718         // Set up per-client frame receiver objects which will fire up its own thread
719         std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
720                 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
721         std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
722                 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
723         EXPECT_NE(frameHandler0, nullptr);
724         EXPECT_NE(frameHandler1, nullptr);
725 
726         // Start the camera's video stream via client 0
727         ASSERT_TRUE(frameHandler0->startStream());
728         ASSERT_TRUE(frameHandler1->startStream());
729 
730         // Ensure the stream starts
731         frameHandler0->waitForFrameCount(1);
732         frameHandler1->waitForFrameCount(1);
733 
734         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
735 
736         // Wait a bit, then ensure both clients get at least the required minimum number of frames
737         sleep(5);
738         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
739         unsigned framesReceived0 = 0, framesReceived1 = 0;
740         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
741         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
742         framesReceived0 = framesReceived0 - 1;  // Back out the first frame we already waited for
743         framesReceived1 = framesReceived1 - 1;  // Back out the first frame we already waited for
744         nsecs_t runTime = end - firstFrame;
745         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
746         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
747         LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
748                   << framesPerSecond1 << " fps";
749         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
750         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
751 
752         // Shutdown one client
753         frameHandler0->shutdown();
754 
755         // Read frame counters again
756         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
757         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
758 
759         // Wait a bit again
760         sleep(5);
761         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
762         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
763         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
764         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
765         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
766 
767         // Shutdown another
768         frameHandler1->shutdown();
769 
770         // Explicitly release the camera
771         ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
772         ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
773         mActiveCameras.clear();
774 
775         // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
776         // destruction of active camera objects; this may be related with two
777         // issues.
778         sleep(1);
779     }
780 }
781 
782 /*
783  * CameraParameter:
784  * Verify that a client can adjust a camera parameter.
785  */
TEST_P(EvsAidlTest,CameraParameter)786 TEST_P(EvsAidlTest, CameraParameter) {
787     LOG(INFO) << "Starting CameraParameter test";
788 
789     // Get the camera list
790     loadCameraList();
791 
792     // Test each reported camera
793     for (auto&& cam : mCameraInfo) {
794         bool isLogicalCam = false;
795         getPhysicalCameraIds(cam.id, isLogicalCam);
796         if (isLogicalCam) {
797             // TODO(b/145465724): Support camera parameter programming on
798             // logical devices.
799             LOG(INFO) << "Skip a logical device " << cam.id;
800             continue;
801         }
802 
803         // Read a target resolution from the metadata
804         Stream targetCfg = getFirstStreamConfiguration(
805                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
806         ASSERT_GT(targetCfg.width, 0);
807         ASSERT_GT(targetCfg.height, 0);
808 
809         // Create a camera client
810         std::shared_ptr<IEvsCamera> pCam;
811         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
812         EXPECT_NE(pCam, nullptr);
813 
814         // Store a camera
815         mActiveCameras.push_back(pCam);
816 
817         // Get the parameter list
818         std::vector<CameraParam> cmds;
819         ASSERT_TRUE(pCam->getParameterList(&cmds).isOk());
820         if (cmds.size() < 1) {
821             continue;
822         }
823 
824         // Set up per-client frame receiver objects which will fire up its own thread
825         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
826                 pCam, cam, nullptr, FrameHandler::eAutoReturn);
827         EXPECT_NE(frameHandler, nullptr);
828 
829         // Start the camera's video stream
830         ASSERT_TRUE(frameHandler->startStream());
831 
832         // Ensure the stream starts
833         frameHandler->waitForFrameCount(1);
834 
835         // Set current client is the primary client
836         ASSERT_TRUE(pCam->setPrimaryClient().isOk());
837         for (auto& cmd : cmds) {
838             // Get a valid parameter value range
839             ParameterRange range;
840             ASSERT_TRUE(pCam->getIntParameterRange(cmd, &range).isOk());
841 
842             std::vector<int32_t> values;
843             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
844                 // Try to turn off auto-focus
845                 ASSERT_TRUE(pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
846                 for (auto&& v : values) {
847                     EXPECT_EQ(v, 0);
848                 }
849             }
850 
851             // Try to program a parameter with a random value [minVal, maxVal]
852             int32_t val0 = range.min + (std::rand() % (range.max - range.min));
853 
854             // Rounding down
855             val0 = val0 - (val0 % range.step);
856             values.clear();
857             ASSERT_TRUE(pCam->setIntParameter(cmd, val0, &values).isOk());
858 
859             values.clear();
860             ASSERT_TRUE(pCam->getIntParameter(cmd, &values).isOk());
861             for (auto&& v : values) {
862                 EXPECT_EQ(val0, v) << "Values are not matched.";
863             }
864         }
865         ASSERT_TRUE(pCam->unsetPrimaryClient().isOk());
866 
867         // Shutdown
868         frameHandler->shutdown();
869 
870         // Explicitly release the camera
871         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
872         mActiveCameras.clear();
873     }
874 }
875 
876 /*
877  * CameraPrimaryClientRelease
878  * Verify that non-primary client gets notified when the primary client either
879  * terminates or releases a role.
880  */
TEST_P(EvsAidlTest,CameraPrimaryClientRelease)881 TEST_P(EvsAidlTest, CameraPrimaryClientRelease) {
882     LOG(INFO) << "Starting CameraPrimaryClientRelease test";
883 
884     if (mIsHwModule) {
885         // This test is not for HW module implementation.
886         return;
887     }
888 
889     // Get the camera list
890     loadCameraList();
891 
892     // Test each reported camera
893     for (auto&& cam : mCameraInfo) {
894         bool isLogicalCam = false;
895         getPhysicalCameraIds(cam.id, isLogicalCam);
896         if (isLogicalCam) {
897             // TODO(b/145465724): Support camera parameter programming on
898             // logical devices.
899             LOG(INFO) << "Skip a logical device " << cam.id;
900             continue;
901         }
902 
903         // Read a target resolution from the metadata
904         Stream targetCfg = getFirstStreamConfiguration(
905                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
906         ASSERT_GT(targetCfg.width, 0);
907         ASSERT_GT(targetCfg.height, 0);
908 
909         // Create two camera clients.
910         std::shared_ptr<IEvsCamera> pPrimaryCam;
911         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
912         EXPECT_NE(pPrimaryCam, nullptr);
913 
914         // Store a camera handle for a clean-up
915         mActiveCameras.push_back(pPrimaryCam);
916 
917         std::shared_ptr<IEvsCamera> pSecondaryCam;
918         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
919         EXPECT_NE(pSecondaryCam, nullptr);
920 
921         // Store a camera handle for a clean-up
922         mActiveCameras.push_back(pSecondaryCam);
923 
924         // Set up per-client frame receiver objects which will fire up its own thread
925         std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
926                 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
927         std::shared_ptr<FrameHandler> frameHandlerSecondary =
928                 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
929                                                        FrameHandler::eAutoReturn);
930         EXPECT_NE(frameHandlerPrimary, nullptr);
931         EXPECT_NE(frameHandlerSecondary, nullptr);
932 
933         // Set one client as the primary client
934         ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
935 
936         // Try to set another client as the primary client.
937         ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
938 
939         // Start the camera's video stream via a primary client client.
940         ASSERT_TRUE(frameHandlerPrimary->startStream());
941 
942         // Ensure the stream starts
943         frameHandlerPrimary->waitForFrameCount(1);
944 
945         // Start the camera's video stream via another client
946         ASSERT_TRUE(frameHandlerSecondary->startStream());
947 
948         // Ensure the stream starts
949         frameHandlerSecondary->waitForFrameCount(1);
950 
951         // Non-primary client expects to receive a primary client role relesed
952         // notification.
953         EvsEventDesc aTargetEvent = {};
954         EvsEventDesc aNotification = {};
955 
956         bool listening = false;
957         std::mutex eventLock;
958         std::condition_variable eventCond;
959         std::thread listener =
960                 std::thread([&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
961                     // Notify that a listening thread is running.
962                     listening = true;
963                     eventCond.notify_all();
964 
965                     EvsEventDesc aTargetEvent;
966                     aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
967                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
968                         LOG(WARNING) << "A timer is expired before a target event is fired.";
969                     }
970                 });
971 
972         // Wait until a listening thread starts.
973         std::unique_lock<std::mutex> lock(eventLock);
974         auto timer = std::chrono::system_clock::now();
975         while (!listening) {
976             timer += 1s;
977             eventCond.wait_until(lock, timer);
978         }
979         lock.unlock();
980 
981         // Release a primary client role.
982         ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
983 
984         // Join a listening thread.
985         if (listener.joinable()) {
986             listener.join();
987         }
988 
989         // Verify change notifications.
990         ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
991 
992         // Non-primary becomes a primary client.
993         ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
994 
995         // Previous primary client fails to become a primary client.
996         ASSERT_FALSE(pPrimaryCam->setPrimaryClient().isOk());
997 
998         listening = false;
999         listener = std::thread([&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
1000             // Notify that a listening thread is running.
1001             listening = true;
1002             eventCond.notify_all();
1003 
1004             EvsEventDesc aTargetEvent;
1005             aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1006             if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1007                 LOG(WARNING) << "A timer is expired before a target event is fired.";
1008             }
1009         });
1010 
1011         // Wait until a listening thread starts.
1012         timer = std::chrono::system_clock::now();
1013         lock.lock();
1014         while (!listening) {
1015             eventCond.wait_until(lock, timer + 1s);
1016         }
1017         lock.unlock();
1018 
1019         // Closing current primary client.
1020         frameHandlerSecondary->shutdown();
1021 
1022         // Join a listening thread.
1023         if (listener.joinable()) {
1024             listener.join();
1025         }
1026 
1027         // Verify change notifications.
1028         ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification.aType));
1029 
1030         // Closing streams.
1031         frameHandlerPrimary->shutdown();
1032 
1033         // Explicitly release the camera
1034         ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1035         ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1036         mActiveCameras.clear();
1037     }
1038 }
1039 
1040 /*
1041  * MultiCameraParameter:
1042  * Verify that primary and non-primary clients behave as expected when they try to adjust
1043  * camera parameters.
1044  */
TEST_P(EvsAidlTest,MultiCameraParameter)1045 TEST_P(EvsAidlTest, MultiCameraParameter) {
1046     LOG(INFO) << "Starting MultiCameraParameter test";
1047 
1048     if (mIsHwModule) {
1049         // This test is not for HW module implementation.
1050         return;
1051     }
1052 
1053     // Get the camera list
1054     loadCameraList();
1055 
1056     // Test each reported camera
1057     for (auto&& cam : mCameraInfo) {
1058         bool isLogicalCam = false;
1059         getPhysicalCameraIds(cam.id, isLogicalCam);
1060         if (isLogicalCam) {
1061             // TODO(b/145465724): Support camera parameter programming on
1062             // logical devices.
1063             LOG(INFO) << "Skip a logical device " << cam.id;
1064             continue;
1065         }
1066 
1067         // Read a target resolution from the metadata
1068         Stream targetCfg = getFirstStreamConfiguration(
1069                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1070         ASSERT_GT(targetCfg.width, 0);
1071         ASSERT_GT(targetCfg.height, 0);
1072 
1073         // Create two camera clients.
1074         std::shared_ptr<IEvsCamera> pPrimaryCam;
1075         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pPrimaryCam).isOk());
1076         EXPECT_NE(pPrimaryCam, nullptr);
1077 
1078         // Store a camera handle for a clean-up
1079         mActiveCameras.push_back(pPrimaryCam);
1080 
1081         std::shared_ptr<IEvsCamera> pSecondaryCam;
1082         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pSecondaryCam).isOk());
1083         EXPECT_NE(pSecondaryCam, nullptr);
1084 
1085         // Store a camera handle for a clean-up
1086         mActiveCameras.push_back(pSecondaryCam);
1087 
1088         // Get the parameter list
1089         std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1090         ASSERT_TRUE(pPrimaryCam->getParameterList(&camPrimaryCmds).isOk());
1091         ASSERT_TRUE(pSecondaryCam->getParameterList(&camSecondaryCmds).isOk());
1092         if (camPrimaryCmds.size() < 1 || camSecondaryCmds.size() < 1) {
1093             // Skip a camera device if it does not support any parameter.
1094             continue;
1095         }
1096 
1097         // Set up per-client frame receiver objects which will fire up its own thread
1098         std::shared_ptr<FrameHandler> frameHandlerPrimary = ndk::SharedRefBase::make<FrameHandler>(
1099                 pPrimaryCam, cam, nullptr, FrameHandler::eAutoReturn);
1100         std::shared_ptr<FrameHandler> frameHandlerSecondary =
1101                 ndk::SharedRefBase::make<FrameHandler>(pSecondaryCam, cam, nullptr,
1102                                                        FrameHandler::eAutoReturn);
1103         EXPECT_NE(frameHandlerPrimary, nullptr);
1104         EXPECT_NE(frameHandlerSecondary, nullptr);
1105 
1106         // Set one client as the primary client.
1107         ASSERT_TRUE(pPrimaryCam->setPrimaryClient().isOk());
1108 
1109         // Try to set another client as the primary client.
1110         ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1111 
1112         // Start the camera's video stream via a primary client client.
1113         ASSERT_TRUE(frameHandlerPrimary->startStream());
1114 
1115         // Ensure the stream starts
1116         frameHandlerPrimary->waitForFrameCount(1);
1117 
1118         // Start the camera's video stream via another client
1119         ASSERT_TRUE(frameHandlerSecondary->startStream());
1120 
1121         // Ensure the stream starts
1122         frameHandlerSecondary->waitForFrameCount(1);
1123 
1124         int32_t val0 = 0;
1125         std::vector<int32_t> values;
1126         EvsEventDesc aNotification0 = {};
1127         EvsEventDesc aNotification1 = {};
1128         for (auto& cmd : camPrimaryCmds) {
1129             // Get a valid parameter value range
1130             ParameterRange range;
1131             ASSERT_TRUE(pPrimaryCam->getIntParameterRange(cmd, &range).isOk());
1132             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1133                 // Try to turn off auto-focus
1134                 values.clear();
1135                 ASSERT_TRUE(
1136                         pPrimaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1137                 for (auto&& v : values) {
1138                     EXPECT_EQ(v, 0);
1139                 }
1140             }
1141 
1142             // Calculate a parameter value to program.
1143             val0 = range.min + (std::rand() % (range.max - range.min));
1144             val0 = val0 - (val0 % range.step);
1145 
1146             // Prepare and start event listeners.
1147             bool listening0 = false;
1148             bool listening1 = false;
1149             std::condition_variable eventCond;
1150             std::thread listener0 = std::thread([cmd, val0, &aNotification0, &frameHandlerPrimary,
1151                                                  &listening0, &listening1, &eventCond]() {
1152                 listening0 = true;
1153                 if (listening1) {
1154                     eventCond.notify_all();
1155                 }
1156 
1157                 EvsEventDesc aTargetEvent;
1158                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1159                 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1160                 aTargetEvent.payload.push_back(val0);
1161                 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1162                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1163                 }
1164             });
1165             std::thread listener1 = std::thread([cmd, val0, &aNotification1, &frameHandlerSecondary,
1166                                                  &listening0, &listening1, &eventCond]() {
1167                 listening1 = true;
1168                 if (listening0) {
1169                     eventCond.notify_all();
1170                 }
1171 
1172                 EvsEventDesc aTargetEvent;
1173                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1174                 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1175                 aTargetEvent.payload.push_back(val0);
1176                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1177                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1178                 }
1179             });
1180 
1181             // Wait until a listening thread starts.
1182             std::mutex eventLock;
1183             std::unique_lock<std::mutex> lock(eventLock);
1184             auto timer = std::chrono::system_clock::now();
1185             while (!listening0 || !listening1) {
1186                 eventCond.wait_until(lock, timer + 1s);
1187             }
1188             lock.unlock();
1189 
1190             // Try to program a parameter
1191             values.clear();
1192             ASSERT_TRUE(pPrimaryCam->setIntParameter(cmd, val0, &values).isOk());
1193             for (auto&& v : values) {
1194                 EXPECT_EQ(val0, v) << "Values are not matched.";
1195             }
1196 
1197             // Join a listening thread.
1198             if (listener0.joinable()) {
1199                 listener0.join();
1200             }
1201             if (listener1.joinable()) {
1202                 listener1.join();
1203             }
1204 
1205             // Verify a change notification
1206             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1207                       static_cast<EvsEventType>(aNotification0.aType));
1208             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1209                       static_cast<EvsEventType>(aNotification1.aType));
1210             ASSERT_GE(aNotification0.payload.size(), 2);
1211             ASSERT_GE(aNotification1.payload.size(), 2);
1212             ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1213             ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1214             for (auto&& v : values) {
1215                 ASSERT_EQ(v, aNotification0.payload[1]);
1216                 ASSERT_EQ(v, aNotification1.payload[1]);
1217             }
1218 
1219             // Clients expects to receive a parameter change notification
1220             // whenever a primary client client adjusts it.
1221             values.clear();
1222             ASSERT_TRUE(pPrimaryCam->getIntParameter(cmd, &values).isOk());
1223             for (auto&& v : values) {
1224                 EXPECT_EQ(val0, v) << "Values are not matched.";
1225             }
1226         }
1227 
1228         // Try to adjust a parameter via non-primary client
1229         values.clear();
1230         ASSERT_FALSE(pSecondaryCam->setIntParameter(camSecondaryCmds[0], val0, &values).isOk());
1231 
1232         // Non-primary client attempts to be a primary client
1233         ASSERT_FALSE(pSecondaryCam->setPrimaryClient().isOk());
1234 
1235         // Primary client retires from a primary client role
1236         bool listening = false;
1237         std::condition_variable eventCond;
1238         std::thread listener =
1239                 std::thread([&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1240                     listening = true;
1241                     eventCond.notify_all();
1242 
1243                     EvsEventDesc aTargetEvent;
1244                     aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1245                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1246                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1247                     }
1248                 });
1249 
1250         std::mutex eventLock;
1251         auto timer = std::chrono::system_clock::now();
1252         std::unique_lock<std::mutex> lock(eventLock);
1253         while (!listening) {
1254             eventCond.wait_until(lock, timer + 1s);
1255         }
1256         lock.unlock();
1257 
1258         ASSERT_TRUE(pPrimaryCam->unsetPrimaryClient().isOk());
1259 
1260         if (listener.joinable()) {
1261             listener.join();
1262         }
1263         ASSERT_EQ(EvsEventType::MASTER_RELEASED, static_cast<EvsEventType>(aNotification0.aType));
1264 
1265         // Try to adjust a parameter after being retired
1266         values.clear();
1267         ASSERT_FALSE(pPrimaryCam->setIntParameter(camPrimaryCmds[0], val0, &values).isOk());
1268 
1269         // Non-primary client becomes a primary client
1270         ASSERT_TRUE(pSecondaryCam->setPrimaryClient().isOk());
1271 
1272         // Try to adjust a parameter via new primary client
1273         for (auto& cmd : camSecondaryCmds) {
1274             // Get a valid parameter value range
1275             ParameterRange range;
1276             ASSERT_TRUE(pSecondaryCam->getIntParameterRange(cmd, &range).isOk());
1277 
1278             values.clear();
1279             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1280                 // Try to turn off auto-focus
1281                 values.clear();
1282                 ASSERT_TRUE(
1283                         pSecondaryCam->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1284                 for (auto&& v : values) {
1285                     EXPECT_EQ(v, 0);
1286                 }
1287             }
1288 
1289             // Calculate a parameter value to program.  This is being rounding down.
1290             val0 = range.min + (std::rand() % (range.max - range.min));
1291             val0 = val0 - (val0 % range.step);
1292 
1293             // Prepare and start event listeners.
1294             bool listening0 = false;
1295             bool listening1 = false;
1296             std::condition_variable eventCond;
1297             std::thread listener0 = std::thread([&]() {
1298                 listening0 = true;
1299                 if (listening1) {
1300                     eventCond.notify_all();
1301                 }
1302 
1303                 EvsEventDesc aTargetEvent;
1304                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1305                 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1306                 aTargetEvent.payload.push_back(val0);
1307                 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1308                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1309                 }
1310             });
1311             std::thread listener1 = std::thread([&]() {
1312                 listening1 = true;
1313                 if (listening0) {
1314                     eventCond.notify_all();
1315                 }
1316 
1317                 EvsEventDesc aTargetEvent;
1318                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1319                 aTargetEvent.payload.push_back(static_cast<int32_t>(cmd));
1320                 aTargetEvent.payload.push_back(val0);
1321                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1322                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1323                 }
1324             });
1325 
1326             // Wait until a listening thread starts.
1327             std::mutex eventLock;
1328             std::unique_lock<std::mutex> lock(eventLock);
1329             auto timer = std::chrono::system_clock::now();
1330             while (!listening0 || !listening1) {
1331                 eventCond.wait_until(lock, timer + 1s);
1332             }
1333             lock.unlock();
1334 
1335             // Try to program a parameter
1336             values.clear();
1337             ASSERT_TRUE(pSecondaryCam->setIntParameter(cmd, val0, &values).isOk());
1338 
1339             // Clients expects to receive a parameter change notification
1340             // whenever a primary client client adjusts it.
1341             values.clear();
1342             ASSERT_TRUE(pSecondaryCam->getIntParameter(cmd, &values).isOk());
1343             for (auto&& v : values) {
1344                 EXPECT_EQ(val0, v) << "Values are not matched.";
1345             }
1346 
1347             // Join a listening thread.
1348             if (listener0.joinable()) {
1349                 listener0.join();
1350             }
1351             if (listener1.joinable()) {
1352                 listener1.join();
1353             }
1354 
1355             // Verify a change notification
1356             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1357                       static_cast<EvsEventType>(aNotification0.aType));
1358             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1359                       static_cast<EvsEventType>(aNotification1.aType));
1360             ASSERT_GE(aNotification0.payload.size(), 2);
1361             ASSERT_GE(aNotification1.payload.size(), 2);
1362             ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification0.payload[0]));
1363             ASSERT_EQ(cmd, static_cast<CameraParam>(aNotification1.payload[0]));
1364             for (auto&& v : values) {
1365                 ASSERT_EQ(v, aNotification0.payload[1]);
1366                 ASSERT_EQ(v, aNotification1.payload[1]);
1367             }
1368         }
1369 
1370         // New primary client retires from the role
1371         ASSERT_TRUE(pSecondaryCam->unsetPrimaryClient().isOk());
1372 
1373         // Shutdown
1374         frameHandlerPrimary->shutdown();
1375         frameHandlerSecondary->shutdown();
1376 
1377         // Explicitly release the camera
1378         ASSERT_TRUE(mEnumerator->closeCamera(pPrimaryCam).isOk());
1379         ASSERT_TRUE(mEnumerator->closeCamera(pSecondaryCam).isOk());
1380         mActiveCameras.clear();
1381     }
1382 }
1383 
1384 /*
1385  * HighPriorityCameraClient:
1386  * EVS client, which owns the display, is priortized and therefore can take over
1387  * a primary client role from other EVS clients without the display.
1388  */
TEST_P(EvsAidlTest,HighPriorityCameraClient)1389 TEST_P(EvsAidlTest, HighPriorityCameraClient) {
1390     LOG(INFO) << "Starting HighPriorityCameraClient test";
1391 
1392     if (mIsHwModule) {
1393         // This test is not for HW module implementation.
1394         return;
1395     }
1396 
1397     // Get the camera list
1398     loadCameraList();
1399 
1400     // Test each reported camera
1401     for (auto&& cam : mCameraInfo) {
1402         // Request available display IDs
1403         uint8_t targetDisplayId = 0;
1404         std::vector<uint8_t> displayIds;
1405         ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1406         EXPECT_GT(displayIds.size(), 0);
1407         targetDisplayId = displayIds[0];
1408 
1409         // Request exclusive access to the EVS display
1410         std::shared_ptr<IEvsDisplay> pDisplay;
1411         ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1412         EXPECT_NE(pDisplay, nullptr);
1413 
1414         // Read a target resolution from the metadata
1415         Stream targetCfg = getFirstStreamConfiguration(
1416                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1417         ASSERT_GT(targetCfg.width, 0);
1418         ASSERT_GT(targetCfg.height, 0);
1419 
1420         // Create two clients
1421         std::shared_ptr<IEvsCamera> pCam0;
1422         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1423         EXPECT_NE(pCam0, nullptr);
1424 
1425         // Store a camera handle for a clean-up
1426         mActiveCameras.push_back(pCam0);
1427 
1428         std::shared_ptr<IEvsCamera> pCam1;
1429         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1430         EXPECT_NE(pCam1, nullptr);
1431 
1432         // Store a camera handle for a clean-up
1433         mActiveCameras.push_back(pCam1);
1434 
1435         // Get the parameter list; this test will use the first command in both
1436         // lists.
1437         std::vector<CameraParam> cam0Cmds, cam1Cmds;
1438         ASSERT_TRUE(pCam0->getParameterList(&cam0Cmds).isOk());
1439         ASSERT_TRUE(pCam1->getParameterList(&cam1Cmds).isOk());
1440         if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1441             // Cannot execute this test.
1442             ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1443             continue;
1444         }
1445 
1446         // Set up a frame receiver object which will fire up its own thread.
1447         std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1448                 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1449         std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1450                 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
1451         EXPECT_NE(frameHandler0, nullptr);
1452         EXPECT_NE(frameHandler1, nullptr);
1453 
1454         // Activate the display
1455         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1456 
1457         // Start the camera's video stream
1458         ASSERT_TRUE(frameHandler0->startStream());
1459         ASSERT_TRUE(frameHandler1->startStream());
1460 
1461         // Ensure the stream starts
1462         frameHandler0->waitForFrameCount(1);
1463         frameHandler1->waitForFrameCount(1);
1464 
1465         // Client 1 becomes a primary client and programs a parameter.
1466 
1467         // Get a valid parameter value range
1468         ParameterRange range;
1469         ASSERT_TRUE(pCam1->getIntParameterRange(cam1Cmds[0], &range).isOk());
1470 
1471         // Client1 becomes a primary client
1472         ASSERT_TRUE(pCam1->setPrimaryClient().isOk());
1473 
1474         std::vector<int32_t> values;
1475         EvsEventDesc aTargetEvent = {};
1476         EvsEventDesc aNotification = {};
1477         bool listening = false;
1478         std::mutex eventLock;
1479         std::condition_variable eventCond;
1480         if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1481             std::thread listener =
1482                     std::thread([&frameHandler0, &aNotification, &listening, &eventCond] {
1483                         listening = true;
1484                         eventCond.notify_all();
1485 
1486                         EvsEventDesc aTargetEvent;
1487                         aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1488                         aTargetEvent.payload.push_back(
1489                                 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1490                         aTargetEvent.payload.push_back(0);
1491                         if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1492                             LOG(WARNING) << "A timer is expired before a target event is fired.";
1493                         }
1494                     });
1495 
1496             // Wait until a lister starts.
1497             std::unique_lock<std::mutex> lock(eventLock);
1498             auto timer = std::chrono::system_clock::now();
1499             while (!listening) {
1500                 eventCond.wait_until(lock, timer + 1s);
1501             }
1502             lock.unlock();
1503 
1504             // Try to turn off auto-focus
1505             ASSERT_TRUE(pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1506             for (auto&& v : values) {
1507                 EXPECT_EQ(v, 0);
1508             }
1509 
1510             // Join a listener
1511             if (listener.joinable()) {
1512                 listener.join();
1513             }
1514 
1515             // Make sure AUTO_FOCUS is off.
1516             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1517                       EvsEventType::PARAMETER_CHANGED);
1518         }
1519 
1520         // Try to program a parameter with a random value [minVal, maxVal] after
1521         // rounding it down.
1522         int32_t val0 = range.min + (std::rand() % (range.max - range.min));
1523         val0 = val0 - (val0 % range.step);
1524 
1525         std::thread listener = std::thread(
1526                 [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1527                     listening = true;
1528                     eventCond.notify_all();
1529 
1530                     EvsEventDesc aTargetEvent;
1531                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1532                     aTargetEvent.payload.push_back(static_cast<int32_t>(cam1Cmds[0]));
1533                     aTargetEvent.payload.push_back(val0);
1534                     if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1535                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1536                     }
1537                 });
1538 
1539         // Wait until a lister starts.
1540         listening = false;
1541         std::unique_lock<std::mutex> lock(eventLock);
1542         auto timer = std::chrono::system_clock::now();
1543         while (!listening) {
1544             eventCond.wait_until(lock, timer + 1s);
1545         }
1546         lock.unlock();
1547 
1548         values.clear();
1549         ASSERT_TRUE(pCam1->setIntParameter(cam1Cmds[0], val0, &values).isOk());
1550         for (auto&& v : values) {
1551             EXPECT_EQ(val0, v);
1552         }
1553 
1554         // Join a listener
1555         if (listener.joinable()) {
1556             listener.join();
1557         }
1558 
1559         // Verify a change notification
1560         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
1561         ASSERT_GE(aNotification.payload.size(), 2);
1562         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam1Cmds[0]);
1563         for (auto&& v : values) {
1564             ASSERT_EQ(v, aNotification.payload[1]);
1565         }
1566 
1567         listener = std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1568             listening = true;
1569             eventCond.notify_all();
1570 
1571             EvsEventDesc aTargetEvent;
1572             aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1573             if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1574                 LOG(WARNING) << "A timer is expired before a target event is fired.";
1575             }
1576         });
1577 
1578         // Wait until a lister starts.
1579         listening = false;
1580         lock.lock();
1581         timer = std::chrono::system_clock::now();
1582         while (!listening) {
1583             eventCond.wait_until(lock, timer + 1s);
1584         }
1585         lock.unlock();
1586 
1587         // Client 0 steals a primary client role
1588         ASSERT_TRUE(pCam0->forcePrimaryClient(pDisplay).isOk());
1589 
1590         // Join a listener
1591         if (listener.joinable()) {
1592             listener.join();
1593         }
1594 
1595         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::MASTER_RELEASED);
1596 
1597         // Client 0 programs a parameter
1598         val0 = range.min + (std::rand() % (range.max - range.min));
1599 
1600         // Rounding down
1601         val0 = val0 - (val0 % range.step);
1602 
1603         if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1604             std::thread listener =
1605                     std::thread([&frameHandler1, &aNotification, &listening, &eventCond] {
1606                         listening = true;
1607                         eventCond.notify_all();
1608 
1609                         EvsEventDesc aTargetEvent;
1610                         aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1611                         aTargetEvent.payload.push_back(
1612                                 static_cast<int32_t>(CameraParam::AUTO_FOCUS));
1613                         aTargetEvent.payload.push_back(0);
1614                         if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1615                             LOG(WARNING) << "A timer is expired before a target event is fired.";
1616                         }
1617                     });
1618 
1619             // Wait until a lister starts.
1620             std::unique_lock<std::mutex> lock(eventLock);
1621             auto timer = std::chrono::system_clock::now();
1622             while (!listening) {
1623                 eventCond.wait_until(lock, timer + 1s);
1624             }
1625             lock.unlock();
1626 
1627             // Try to turn off auto-focus
1628             values.clear();
1629             ASSERT_TRUE(pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0, &values).isOk());
1630             for (auto&& v : values) {
1631                 EXPECT_EQ(v, 0);
1632             }
1633 
1634             // Join a listener
1635             if (listener.joinable()) {
1636                 listener.join();
1637             }
1638 
1639             // Make sure AUTO_FOCUS is off.
1640             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1641                       EvsEventType::PARAMETER_CHANGED);
1642         }
1643 
1644         listener = std::thread(
1645                 [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1646                     listening = true;
1647                     eventCond.notify_all();
1648 
1649                     EvsEventDesc aTargetEvent;
1650                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1651                     aTargetEvent.payload.push_back(static_cast<int32_t>(cam0Cmds[0]));
1652                     aTargetEvent.payload.push_back(val0);
1653                     if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1654                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1655                     }
1656                 });
1657 
1658         // Wait until a lister starts.
1659         listening = false;
1660         timer = std::chrono::system_clock::now();
1661         lock.lock();
1662         while (!listening) {
1663             eventCond.wait_until(lock, timer + 1s);
1664         }
1665         lock.unlock();
1666 
1667         values.clear();
1668         ASSERT_TRUE(pCam0->setIntParameter(cam0Cmds[0], val0, &values).isOk());
1669 
1670         // Join a listener
1671         if (listener.joinable()) {
1672             listener.join();
1673         }
1674         // Verify a change notification
1675         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType), EvsEventType::PARAMETER_CHANGED);
1676         ASSERT_GE(aNotification.payload.size(), 2);
1677         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]), cam0Cmds[0]);
1678         for (auto&& v : values) {
1679             ASSERT_EQ(v, aNotification.payload[1]);
1680         }
1681 
1682         // Turn off the display (yes, before the stream stops -- it should be handled)
1683         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1684 
1685         // Shut down the streamer
1686         frameHandler0->shutdown();
1687         frameHandler1->shutdown();
1688 
1689         // Explicitly release the camera
1690         ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1691         ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1692         mActiveCameras.clear();
1693 
1694         // Explicitly release the display
1695         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1696     }
1697 }
1698 
1699 /*
1700  * CameraUseStreamConfigToDisplay:
1701  * End to end test of data flowing from the camera to the display.  Similar to
1702  * CameraToDisplayRoundTrip test case but this case retrieves available stream
1703  * configurations from EVS and uses one of them to start a video stream.
1704  */
TEST_P(EvsAidlTest,CameraUseStreamConfigToDisplay)1705 TEST_P(EvsAidlTest, CameraUseStreamConfigToDisplay) {
1706     LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
1707 
1708     // Get the camera list
1709     loadCameraList();
1710 
1711     // Request available display IDs
1712     uint8_t targetDisplayId = 0;
1713     std::vector<uint8_t> displayIds;
1714     ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
1715     EXPECT_GT(displayIds.size(), 0);
1716     targetDisplayId = displayIds[0];
1717 
1718     // Test each reported camera
1719     for (auto&& cam : mCameraInfo) {
1720         // Request exclusive access to the EVS display
1721         std::shared_ptr<IEvsDisplay> pDisplay;
1722         ASSERT_TRUE(mEnumerator->openDisplay(targetDisplayId, &pDisplay).isOk());
1723         EXPECT_NE(pDisplay, nullptr);
1724 
1725         // choose a configuration that has a frame rate faster than minReqFps.
1726         Stream targetCfg = {};
1727         const int32_t minReqFps = 15;
1728         int32_t maxArea = 0;
1729         camera_metadata_entry_t streamCfgs;
1730         bool foundCfg = false;
1731         if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1732                                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1733                                         &streamCfgs)) {
1734             // Stream configurations are found in metadata
1735             RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1736             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1737                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
1738                     if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1739                         targetCfg.width = ptr->width;
1740                         targetCfg.height = ptr->height;
1741                         targetCfg.format = static_cast<PixelFormat>(ptr->format);
1742 
1743                         maxArea = ptr->width * ptr->height;
1744                         foundCfg = true;
1745                     }
1746                 }
1747                 ++ptr;
1748             }
1749         }
1750 
1751         if (!foundCfg) {
1752             // Current EVS camera does not provide stream configurations in the
1753             // metadata.
1754             continue;
1755         }
1756 
1757         std::shared_ptr<IEvsCamera> pCam;
1758         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
1759         EXPECT_NE(pCam, nullptr);
1760 
1761         // Store a camera handle for a clean-up
1762         mActiveCameras.push_back(pCam);
1763 
1764         // Set up a frame receiver object which will fire up its own thread.
1765         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
1766                 pCam, cam, pDisplay, FrameHandler::eAutoReturn);
1767         EXPECT_NE(frameHandler, nullptr);
1768 
1769         // Activate the display
1770         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
1771 
1772         // Start the camera's video stream
1773         ASSERT_TRUE(frameHandler->startStream());
1774 
1775         // Wait a while to let the data flow
1776         static const int kSecondsToWait = 5;
1777         const int streamTimeMs =
1778                 kSecondsToWait * kSecondsToMilliseconds - kMaxStreamStartMilliseconds;
1779         const unsigned minimumFramesExpected =
1780                 streamTimeMs * kMinimumFramesPerSecond / kSecondsToMilliseconds;
1781         sleep(kSecondsToWait);
1782         unsigned framesReceived = 0;
1783         unsigned framesDisplayed = 0;
1784         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
1785         EXPECT_EQ(framesReceived, framesDisplayed);
1786         EXPECT_GE(framesDisplayed, minimumFramesExpected);
1787 
1788         // Turn off the display (yes, before the stream stops -- it should be handled)
1789         ASSERT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
1790 
1791         // Shut down the streamer
1792         frameHandler->shutdown();
1793 
1794         // Explicitly release the camera
1795         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
1796         mActiveCameras.clear();
1797 
1798         // Explicitly release the display
1799         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
1800     }
1801 }
1802 
1803 /*
1804  * MultiCameraStreamUseConfig:
1805  * Verify that each client can start and stop video streams on the same
1806  * underlying camera with same configuration.
1807  */
TEST_P(EvsAidlTest,MultiCameraStreamUseConfig)1808 TEST_P(EvsAidlTest, MultiCameraStreamUseConfig) {
1809     LOG(INFO) << "Starting MultiCameraStream test";
1810 
1811     if (mIsHwModule) {
1812         // This test is not for HW module implementation.
1813         return;
1814     }
1815 
1816     // Get the camera list
1817     loadCameraList();
1818 
1819     // Test each reported camera
1820     for (auto&& cam : mCameraInfo) {
1821         // choose a configuration that has a frame rate faster than minReqFps.
1822         Stream targetCfg = {};
1823         const int32_t minReqFps = 15;
1824         int32_t maxArea = 0;
1825         camera_metadata_entry_t streamCfgs;
1826         bool foundCfg = false;
1827         if (!find_camera_metadata_entry(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()),
1828                                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1829                                         &streamCfgs)) {
1830             // Stream configurations are found in metadata
1831             RawStreamConfig* ptr = reinterpret_cast<RawStreamConfig*>(streamCfgs.data.i32);
1832             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
1833                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
1834                     if (ptr->width * ptr->height > maxArea && ptr->framerate >= minReqFps) {
1835                         targetCfg.width = ptr->width;
1836                         targetCfg.height = ptr->height;
1837                         targetCfg.format = static_cast<PixelFormat>(ptr->format);
1838 
1839                         maxArea = ptr->width * ptr->height;
1840                         foundCfg = true;
1841                     }
1842                 }
1843                 ++ptr;
1844             }
1845         }
1846 
1847         if (!foundCfg) {
1848             LOG(INFO) << "Device " << cam.id
1849                       << " does not provide a list of supported stream configurations, skipped";
1850             continue;
1851         }
1852 
1853         // Create the first camera client with a selected stream configuration.
1854         std::shared_ptr<IEvsCamera> pCam0;
1855         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam0).isOk());
1856         EXPECT_NE(pCam0, nullptr);
1857 
1858         // Store a camera handle for a clean-up
1859         mActiveCameras.push_back(pCam0);
1860 
1861         // Try to create the second camera client with different stream
1862         // configuration.
1863         int32_t id = targetCfg.id;
1864         targetCfg.id += 1;  // EVS manager sees only the stream id.
1865         std::shared_ptr<IEvsCamera> pCam1;
1866         ASSERT_FALSE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1867 
1868         // Try again with same stream configuration.
1869         targetCfg.id = id;
1870         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam1).isOk());
1871         EXPECT_NE(pCam1, nullptr);
1872 
1873         // Set up per-client frame receiver objects which will fire up its own thread
1874         std::shared_ptr<FrameHandler> frameHandler0 = ndk::SharedRefBase::make<FrameHandler>(
1875                 pCam0, cam, nullptr, FrameHandler::eAutoReturn);
1876         std::shared_ptr<FrameHandler> frameHandler1 = ndk::SharedRefBase::make<FrameHandler>(
1877                 pCam1, cam, nullptr, FrameHandler::eAutoReturn);
1878         EXPECT_NE(frameHandler0, nullptr);
1879         EXPECT_NE(frameHandler1, nullptr);
1880 
1881         // Start the camera's video stream via client 0
1882         ASSERT_TRUE(frameHandler0->startStream());
1883         ASSERT_TRUE(frameHandler1->startStream());
1884 
1885         // Ensure the stream starts
1886         frameHandler0->waitForFrameCount(1);
1887         frameHandler1->waitForFrameCount(1);
1888 
1889         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
1890 
1891         // Wait a bit, then ensure both clients get at least the required minimum number of frames
1892         sleep(5);
1893         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
1894         unsigned framesReceived0 = 0, framesReceived1 = 0;
1895         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1896         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1897         framesReceived0 = framesReceived0 - 1;  // Back out the first frame we already waited for
1898         framesReceived1 = framesReceived1 - 1;  // Back out the first frame we already waited for
1899         nsecs_t runTime = end - firstFrame;
1900         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
1901         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
1902         LOG(INFO) << "Measured camera rate " << std::scientific << framesPerSecond0 << " fps and "
1903                   << framesPerSecond1 << " fps";
1904         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
1905         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
1906 
1907         // Shutdown one client
1908         frameHandler0->shutdown();
1909 
1910         // Read frame counters again
1911         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
1912         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
1913 
1914         // Wait a bit again
1915         sleep(5);
1916         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
1917         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
1918         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
1919         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
1920         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
1921 
1922         // Shutdown another
1923         frameHandler1->shutdown();
1924 
1925         // Explicitly release the camera
1926         ASSERT_TRUE(mEnumerator->closeCamera(pCam0).isOk());
1927         ASSERT_TRUE(mEnumerator->closeCamera(pCam1).isOk());
1928         mActiveCameras.clear();
1929     }
1930 }
1931 
1932 /*
1933  * LogicalCameraMetadata:
1934  * Opens logical camera reported by the enumerator and validate its metadata by
1935  * checking its capability and locating supporting physical camera device
1936  * identifiers.
1937  */
TEST_P(EvsAidlTest,LogicalCameraMetadata)1938 TEST_P(EvsAidlTest, LogicalCameraMetadata) {
1939     LOG(INFO) << "Starting LogicalCameraMetadata test";
1940 
1941     // Get the camera list
1942     loadCameraList();
1943 
1944     // Open and close each camera twice
1945     for (auto&& cam : mCameraInfo) {
1946         bool isLogicalCam = false;
1947         auto devices = getPhysicalCameraIds(cam.id, isLogicalCam);
1948         if (isLogicalCam) {
1949             ASSERT_GE(devices.size(), 1) << "Logical camera device must have at least one physical "
1950                                             "camera device ID in its metadata.";
1951         }
1952     }
1953 }
1954 
1955 /*
1956  * CameraStreamExternalBuffering:
1957  * This is same with CameraStreamBuffering except frame buffers are allocated by
1958  * the test client and then imported by EVS framework.
1959  */
TEST_P(EvsAidlTest,CameraStreamExternalBuffering)1960 TEST_P(EvsAidlTest, CameraStreamExternalBuffering) {
1961     LOG(INFO) << "Starting CameraStreamExternalBuffering test";
1962 
1963     // Arbitrary constant (should be > 1 and not too big)
1964     static const unsigned int kBuffersToHold = 3;
1965 
1966     // Get the camera list
1967     loadCameraList();
1968 
1969     // Acquire the graphics buffer allocator
1970     android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
1971     const auto usage =
1972             GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
1973 
1974     // Test each reported camera
1975     for (auto&& cam : mCameraInfo) {
1976         // Read a target resolution from the metadata
1977         Stream targetCfg = getFirstStreamConfiguration(
1978                 reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1979         ASSERT_GT(targetCfg.width, 0);
1980         ASSERT_GT(targetCfg.height, 0);
1981 
1982         // Allocate buffers to use
1983         std::vector<BufferDesc> buffers;
1984         buffers.resize(kBuffersToHold);
1985         for (auto i = 0; i < kBuffersToHold; ++i) {
1986             unsigned pixelsPerLine;
1987             buffer_handle_t memHandle = nullptr;
1988             android::status_t result =
1989                     alloc.allocate(targetCfg.width, targetCfg.height,
1990                                    static_cast<android::PixelFormat>(targetCfg.format),
1991                                    /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
1992                                    /* graphicBufferId = */ 0,
1993                                    /* requestorName = */ "CameraStreamExternalBufferingTest");
1994             if (result != android::NO_ERROR) {
1995                 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
1996                 // Release previous allocated buffers
1997                 for (auto j = 0; j < i; j++) {
1998                     alloc.free(::android::dupFromAidl(buffers[i].buffer.handle));
1999                 }
2000                 return;
2001             } else {
2002                 BufferDesc buf;
2003                 HardwareBufferDescription* pDesc =
2004                         reinterpret_cast<HardwareBufferDescription*>(&buf.buffer.description);
2005                 pDesc->width = targetCfg.width;
2006                 pDesc->height = targetCfg.height;
2007                 pDesc->layers = 1;
2008                 pDesc->format = targetCfg.format;
2009                 pDesc->usage = static_cast<BufferUsage>(usage);
2010                 pDesc->stride = pixelsPerLine;
2011                 buf.buffer.handle = ::android::dupToAidl(memHandle);
2012                 buf.bufferId = i;  // Unique number to identify this buffer
2013                 buffers[i] = std::move(buf);
2014             }
2015         }
2016 
2017         bool isLogicalCam = false;
2018         getPhysicalCameraIds(cam.id, isLogicalCam);
2019 
2020         std::shared_ptr<IEvsCamera> pCam;
2021         ASSERT_TRUE(mEnumerator->openCamera(cam.id, targetCfg, &pCam).isOk());
2022         EXPECT_NE(pCam, nullptr);
2023 
2024         // Store a camera handle for a clean-up
2025         mActiveCameras.push_back(pCam);
2026 
2027         // Request to import buffers
2028         int delta = 0;
2029         auto status = pCam->importExternalBuffers(buffers, &delta);
2030         if (isLogicalCam) {
2031             ASSERT_FALSE(status.isOk());
2032             continue;
2033         }
2034 
2035         ASSERT_TRUE(status.isOk());
2036         EXPECT_GE(delta, kBuffersToHold);
2037 
2038         // Set up a frame receiver object which will fire up its own thread.
2039         std::shared_ptr<FrameHandler> frameHandler = ndk::SharedRefBase::make<FrameHandler>(
2040                 pCam, cam, nullptr, FrameHandler::eNoAutoReturn);
2041         EXPECT_NE(frameHandler, nullptr);
2042 
2043         // Start the camera's video stream
2044         ASSERT_TRUE(frameHandler->startStream());
2045 
2046         // Check that the video stream stalls once we've gotten exactly the number of buffers
2047         // we requested since we told the frameHandler not to return them.
2048         sleep(1);  // 1 second should be enough for at least 5 frames to be delivered worst case
2049         unsigned framesReceived = 0;
2050         frameHandler->getFramesCounters(&framesReceived, nullptr);
2051         ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2052 
2053         // Give back one buffer
2054         EXPECT_TRUE(frameHandler->returnHeldBuffer());
2055 
2056         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2057         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2058         unsigned framesReceivedAfter = 0;
2059         usleep(110 * kMillisecondsToMicroseconds);
2060         frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2061         EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2062 
2063         // Even when the camera pointer goes out of scope, the FrameHandler object will
2064         // keep the stream alive unless we tell it to shutdown.
2065         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2066         // we have to break that cycle in order for either of them to get cleaned up.
2067         frameHandler->shutdown();
2068 
2069         // Explicitly release the camera
2070         ASSERT_TRUE(mEnumerator->closeCamera(pCam).isOk());
2071         mActiveCameras.clear();
2072         // Release buffers
2073         for (auto& b : buffers) {
2074             alloc.free(::android::dupFromAidl(b.buffer.handle));
2075         }
2076         buffers.resize(0);
2077     }
2078 }
2079 
TEST_P(EvsAidlTest,DeviceStatusCallbackRegistration)2080 TEST_P(EvsAidlTest, DeviceStatusCallbackRegistration) {
2081     std::shared_ptr<IEvsEnumeratorStatusCallback> cb =
2082             ndk::SharedRefBase::make<DeviceStatusCallback>();
2083     ndk::ScopedAStatus status = mEnumerator->registerStatusCallback(cb);
2084     if (mIsHwModule) {
2085         ASSERT_TRUE(status.isOk());
2086     } else {
2087         // A callback registration may fail if a HIDL EVS HAL implementation is
2088         // running.
2089         ASSERT_TRUE(status.isOk() ||
2090                     status.getServiceSpecificError() == static_cast<int>(EvsResult::NOT_SUPPORTED));
2091     }
2092 }
2093 
2094 /*
2095  * UltrasonicsArrayOpenClean:
2096  * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2097  * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2098  * can be reopened.
2099  */
TEST_P(EvsAidlTest,UltrasonicsArrayOpenClean)2100 TEST_P(EvsAidlTest, UltrasonicsArrayOpenClean) {
2101     LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2102 
2103     // Get the ultrasonics array list
2104     loadUltrasonicsArrayList();
2105 
2106     // Open and close each ultrasonics array twice
2107     for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2108         for (int pass = 0; pass < 2; pass++) {
2109             std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2110             ASSERT_TRUE(
2111                     mEnumerator
2112                             ->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2113                             .isOk());
2114             EXPECT_NE(pUltrasonicsArray, nullptr);
2115 
2116             // Verify that this ultrasonics array self-identifies correctly
2117             UltrasonicsArrayDesc desc;
2118             ASSERT_TRUE(pUltrasonicsArray->getUltrasonicArrayInfo(&desc).isOk());
2119             EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2120             LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2121 
2122             // Explicitly close the ultrasonics array so resources are released right away
2123             ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2124         }
2125     }
2126 }
2127 
2128 // Starts a stream and verifies all data received is valid.
TEST_P(EvsAidlTest,UltrasonicsVerifyStreamData)2129 TEST_P(EvsAidlTest, UltrasonicsVerifyStreamData) {
2130     LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2131 
2132     // Get the ultrasonics array list
2133     loadUltrasonicsArrayList();
2134 
2135     // For each ultrasonics array.
2136     for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2137         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2138 
2139         std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2140         ASSERT_TRUE(
2141                 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2142                         .isOk());
2143         EXPECT_NE(pUltrasonicsArray, nullptr);
2144 
2145         std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
2146                 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
2147         EXPECT_NE(frameHandler, nullptr);
2148 
2149         // Start stream.
2150         ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2151 
2152         // Wait 5 seconds to receive frames.
2153         sleep(5);
2154 
2155         // Stop stream.
2156         ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2157 
2158         EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2159         EXPECT_TRUE(frameHandler->areAllFramesValid());
2160 
2161         // Explicitly close the ultrasonics array so resources are released right away
2162         ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2163     }
2164 }
2165 
2166 // Sets frames in flight before and after start of stream and verfies success.
TEST_P(EvsAidlTest,UltrasonicsSetFramesInFlight)2167 TEST_P(EvsAidlTest, UltrasonicsSetFramesInFlight) {
2168     LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2169 
2170     // Get the ultrasonics array list
2171     loadUltrasonicsArrayList();
2172 
2173     // For each ultrasonics array.
2174     for (auto&& ultraInfo : mUltrasonicsArraysInfo) {
2175         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2176 
2177         std::shared_ptr<IEvsUltrasonicsArray> pUltrasonicsArray;
2178         ASSERT_TRUE(
2179                 mEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId, &pUltrasonicsArray)
2180                         .isOk());
2181         EXPECT_NE(pUltrasonicsArray, nullptr);
2182 
2183         ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(10).isOk());
2184 
2185         std::shared_ptr<FrameHandlerUltrasonics> frameHandler =
2186                 ndk::SharedRefBase::make<FrameHandlerUltrasonics>(pUltrasonicsArray);
2187         EXPECT_NE(frameHandler, nullptr);
2188 
2189         // Start stream.
2190         ASSERT_TRUE(pUltrasonicsArray->startStream(frameHandler).isOk());
2191         ASSERT_TRUE(pUltrasonicsArray->setMaxFramesInFlight(5).isOk());
2192 
2193         // Stop stream.
2194         ASSERT_TRUE(pUltrasonicsArray->stopStream().isOk());
2195 
2196         // Explicitly close the ultrasonics array so resources are released right away
2197         ASSERT_TRUE(mEnumerator->closeUltrasonicsArray(pUltrasonicsArray).isOk());
2198     }
2199 }
2200 
2201 /*
2202  * DisplayOpen:
2203  * Test both clean shut down and "aggressive open" device stealing behavior.
2204  */
TEST_P(EvsAidlTest,DisplayOpen)2205 TEST_P(EvsAidlTest, DisplayOpen) {
2206     LOG(INFO) << "Starting DisplayOpen test";
2207 
2208     // Request available display IDs.
2209     std::vector<uint8_t> displayIds;
2210     ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
2211     EXPECT_GT(displayIds.size(), 0);
2212 
2213     for (const auto displayId : displayIds) {
2214         std::shared_ptr<IEvsDisplay> pDisplay;
2215 
2216         // Request exclusive access to each EVS display, then let it go.
2217         ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2218         ASSERT_NE(pDisplay, nullptr);
2219 
2220         {
2221             // Ask the display what its name is.
2222             DisplayDesc desc;
2223             ASSERT_TRUE(pDisplay->getDisplayInfo(&desc).isOk());
2224             LOG(DEBUG) << "Found display " << desc.id;
2225         }
2226 
2227         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
2228 
2229         // Ensure we can reopen the display after it has been closed.
2230         ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2231         ASSERT_NE(pDisplay, nullptr);
2232 
2233         // Open the display while its already open -- ownership should be transferred.
2234         std::shared_ptr<IEvsDisplay> pDisplay2;
2235         ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay2).isOk());
2236         ASSERT_NE(pDisplay2, nullptr);
2237 
2238         {
2239             // Ensure the old display properly reports its assassination.
2240             DisplayState badState;
2241             EXPECT_TRUE(pDisplay->getDisplayState(&badState).isOk());
2242             EXPECT_EQ(badState, DisplayState::DEAD);
2243         }
2244 
2245         // Close only the newest display instance -- the other should already be a zombie.
2246         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay2).isOk());
2247 
2248         // Finally, validate that we can open the display after the provoked failure above.
2249         ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2250         ASSERT_NE(pDisplay, nullptr);
2251         ASSERT_TRUE(mEnumerator->closeDisplay(pDisplay).isOk());
2252     }
2253 }
2254 
2255 /*
2256  * DisplayStates:
2257  * Validate that display states transition as expected and can be queried from either the display
2258  * object itself or the owning enumerator.
2259  */
TEST_P(EvsAidlTest,DisplayStates)2260 TEST_P(EvsAidlTest, DisplayStates) {
2261     using std::literals::chrono_literals::operator""ms;
2262 
2263     LOG(INFO) << "Starting DisplayStates test";
2264 
2265     // Request available display IDs.
2266     std::vector<uint8_t> displayIds;
2267     ASSERT_TRUE(mEnumerator->getDisplayIdList(&displayIds).isOk());
2268     EXPECT_GT(displayIds.size(), 0);
2269 
2270     for (const auto displayId : displayIds) {
2271         // Ensure the display starts in the expected state.
2272         {
2273             DisplayState state;
2274             EXPECT_FALSE(mEnumerator->getDisplayState(&state).isOk());
2275         }
2276         for (const auto displayIdToQuery : displayIds) {
2277             DisplayState state;
2278             EXPECT_FALSE(mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk());
2279         }
2280 
2281         // Scope to limit the lifetime of the pDisplay pointer, and thus the IEvsDisplay object.
2282         {
2283             // Request exclusive access to the EVS display.
2284             std::shared_ptr<IEvsDisplay> pDisplay;
2285             ASSERT_TRUE(mEnumerator->openDisplay(displayId, &pDisplay).isOk());
2286             ASSERT_NE(pDisplay, nullptr);
2287             {
2288                 DisplayState state;
2289                 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2290                 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2291             }
2292             for (const auto displayIdToQuery : displayIds) {
2293                 DisplayState state;
2294                 bool get_state_ok =
2295                         mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2296                 if (displayIdToQuery != displayId) {
2297                     EXPECT_FALSE(get_state_ok);
2298                 } else if (get_state_ok) {
2299                     EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2300                 }
2301             }
2302 
2303             // Activate the display.
2304             EXPECT_TRUE(pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME).isOk());
2305             {
2306                 DisplayState state;
2307                 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2308                 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2309             }
2310             {
2311                 DisplayState state;
2312                 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2313                 EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2314             }
2315             for (const auto displayIdToQuery : displayIds) {
2316                 DisplayState state;
2317                 bool get_state_ok =
2318                         mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2319                 if (displayIdToQuery != displayId) {
2320                     EXPECT_FALSE(get_state_ok);
2321                 } else if (get_state_ok) {
2322                     EXPECT_EQ(state, DisplayState::VISIBLE_ON_NEXT_FRAME);
2323                 }
2324             }
2325 
2326             // Get the output buffer we'd use to display the imagery.
2327             BufferDesc tgtBuffer;
2328             ASSERT_TRUE(pDisplay->getTargetBuffer(&tgtBuffer).isOk());
2329 
2330             // Send the target buffer back for display (we didn't actually fill anything).
2331             EXPECT_TRUE(pDisplay->returnTargetBufferForDisplay(tgtBuffer).isOk());
2332 
2333             // Sleep for a tenth of a second to ensure the driver has time to get the image
2334             // displayed.
2335             std::this_thread::sleep_for(100ms);
2336             {
2337                 DisplayState state;
2338                 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2339                 EXPECT_EQ(state, DisplayState::VISIBLE);
2340             }
2341             {
2342                 DisplayState state;
2343                 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2344                 EXPECT_EQ(state, DisplayState::VISIBLE);
2345             }
2346             for (const auto displayIdToQuery : displayIds) {
2347                 DisplayState state;
2348                 bool get_state_ok =
2349                         mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2350                 if (displayIdToQuery != displayId) {
2351                     EXPECT_FALSE(get_state_ok);
2352                 } else if (get_state_ok) {
2353                     EXPECT_EQ(state, DisplayState::VISIBLE);
2354                 }
2355             }
2356 
2357             // Turn off the display.
2358             EXPECT_TRUE(pDisplay->setDisplayState(DisplayState::NOT_VISIBLE).isOk());
2359             std::this_thread::sleep_for(100ms);
2360             {
2361                 DisplayState state;
2362                 EXPECT_TRUE(mEnumerator->getDisplayState(&state).isOk());
2363                 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2364             }
2365             {
2366                 DisplayState state;
2367                 EXPECT_TRUE(pDisplay->getDisplayState(&state).isOk());
2368                 EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2369             }
2370             for (const auto displayIdToQuery : displayIds) {
2371                 DisplayState state;
2372                 bool get_state_ok =
2373                         mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk();
2374                 if (displayIdToQuery != displayId) {
2375                     EXPECT_FALSE(get_state_ok);
2376                 } else if (get_state_ok) {
2377                     EXPECT_EQ(state, DisplayState::NOT_VISIBLE);
2378                 }
2379             }
2380 
2381             // Close the display.
2382             mEnumerator->closeDisplay(pDisplay);
2383         }
2384 
2385         // Now that the display pointer has gone out of scope, causing the IEvsDisplay interface
2386         // object to be destroyed, we should be back to the "not open" state.
2387         // NOTE:  If we want this to pass without the sleep above, we'd have to add the
2388         //        (now recommended) closeDisplay() call instead of relying on the smarter pointer
2389         //        going out of scope.  I've not done that because I want to verify that the deletion
2390         //        of the object does actually clean up (eventually).
2391         {
2392             DisplayState state;
2393             EXPECT_FALSE(mEnumerator->getDisplayState(&state).isOk());
2394         }
2395         for (const auto displayIdToQuery : displayIds) {
2396             DisplayState state;
2397             EXPECT_FALSE(mEnumerator->getDisplayStateById(displayIdToQuery, &state).isOk());
2398         }
2399     }
2400 }
2401 
2402 GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsAidlTest);
2403 INSTANTIATE_TEST_SUITE_P(
2404         PerInstance, EvsAidlTest,
2405         testing::ValuesIn(android::getAidlHalInstanceNames(IEvsEnumerator::descriptor)),
2406         android::PrintInstanceNameToString);
2407 
main(int argc,char ** argv)2408 int main(int argc, char** argv) {
2409     ::testing::InitGoogleTest(&argc, argv);
2410     ABinderProcess_setThreadPoolMaxThreadCount(1);
2411     ABinderProcess_startThreadPool();
2412     return RUN_ALL_TESTS();
2413 }
2414