• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "camera_aidl_test.h"
18 
19 #include <inttypes.h>
20 
21 #include <CameraParameters.h>
22 #include <HandleImporter.h>
23 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
24 #include <aidl/android/hardware/camera/metadata/CameraMetadataTag.h>
25 #include <aidl/android/hardware/camera/metadata/RequestAvailableColorSpaceProfilesMap.h>
26 #include <aidl/android/hardware/camera/metadata/RequestAvailableDynamicRangeProfilesMap.h>
27 #include <aidl/android/hardware/camera/metadata/SensorInfoColorFilterArrangement.h>
28 #include <aidl/android/hardware/camera/metadata/SensorPixelMode.h>
29 #include <aidl/android/hardware/camera/provider/BnCameraProviderCallback.h>
30 #include <aidlcommonsupport/NativeHandle.h>
31 #include <android/binder_manager.h>
32 #include <android/binder_process.h>
33 #include <device_cb.h>
34 #include <empty_device_cb.h>
35 #include <grallocusage/GrallocUsageConversion.h>
36 #include <hardware/gralloc1.h>
37 #include <simple_device_cb.h>
38 #include <ui/Fence.h>
39 #include <ui/GraphicBufferAllocator.h>
40 #include <regex>
41 #include <typeinfo>
42 
43 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
44 using ::aidl::android::hardware::camera::common::TorchModeStatus;
45 using ::aidl::android::hardware::camera::device::CameraMetadata;
46 using ::aidl::android::hardware::camera::device::ICameraDevice;
47 using ::aidl::android::hardware::camera::metadata::CameraMetadataTag;
48 using ::aidl::android::hardware::camera::metadata::SensorInfoColorFilterArrangement;
49 using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
50 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
51 using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
52 using ::aidl::android::hardware::camera::provider::ICameraProvider;
53 using ::aidl::android::hardware::common::NativeHandle;
54 using ::android::hardware::camera::common::V1_0::helper::Size;
55 using ::ndk::ScopedAStatus;
56 using ::ndk::SpAIBinder;
57 
58 namespace {
parseProviderName(const std::string & serviceDescriptor,std::string * type,uint32_t * id)59 bool parseProviderName(const std::string& serviceDescriptor, std::string* type /*out*/,
60                        uint32_t* id /*out*/) {
61     if (!type || !id) {
62         ADD_FAILURE();
63         return false;
64     }
65 
66     // expected format: <service_name>/<type>/<id>
67     std::string::size_type slashIdx1 = serviceDescriptor.find('/');
68     if (slashIdx1 == std::string::npos || slashIdx1 == serviceDescriptor.size() - 1) {
69         ADD_FAILURE() << "Provider name does not have / separator between name, type, and id";
70         return false;
71     }
72 
73     std::string::size_type slashIdx2 = serviceDescriptor.find('/', slashIdx1 + 1);
74     if (slashIdx2 == std::string::npos || slashIdx2 == serviceDescriptor.size() - 1) {
75         ADD_FAILURE() << "Provider name does not have / separator between type and id";
76         return false;
77     }
78 
79     std::string typeVal = serviceDescriptor.substr(slashIdx1 + 1, slashIdx2 - slashIdx1 - 1);
80 
81     char* endPtr;
82     errno = 0;
83     int64_t idVal = strtol(serviceDescriptor.c_str() + slashIdx2 + 1, &endPtr, 10);
84     if (errno != 0) {
85         ADD_FAILURE() << "cannot parse provider id as an integer:" << serviceDescriptor.c_str()
86                       << strerror(errno) << errno;
87         return false;
88     }
89     if (endPtr != serviceDescriptor.c_str() + serviceDescriptor.size()) {
90         ADD_FAILURE() << "provider id has unexpected length " << serviceDescriptor.c_str();
91         return false;
92     }
93     if (idVal < 0) {
94         ADD_FAILURE() << "id is negative: " << serviceDescriptor.c_str() << idVal;
95         return false;
96     }
97 
98     *type = typeVal;
99     *id = static_cast<uint32_t>(idVal);
100 
101     return true;
102 }
103 
104 const std::vector<int64_t> kMandatoryUseCases = {
105         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
106         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
107         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
108         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
109         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
110         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
111 }  // namespace
112 
SetUp()113 void CameraAidlTest::SetUp() {
114     std::string serviceDescriptor = GetParam();
115     ALOGI("get service with name: %s", serviceDescriptor.c_str());
116 
117     bool success = ABinderProcess_setThreadPoolMaxThreadCount(5);
118     ALOGI("ABinderProcess_setThreadPoolMaxThreadCount returns %s", success ? "true" : "false");
119     ASSERT_TRUE(success);
120     ABinderProcess_startThreadPool();
121 
122     SpAIBinder cameraProviderBinder =
123             SpAIBinder(AServiceManager_waitForService(serviceDescriptor.c_str()));
124     ASSERT_NE(cameraProviderBinder.get(), nullptr);
125 
126     std::shared_ptr<ICameraProvider> cameraProvider =
127             ICameraProvider::fromBinder(cameraProviderBinder);
128     ASSERT_NE(cameraProvider.get(), nullptr);
129     mProvider = cameraProvider;
130     uint32_t id;
131     ASSERT_TRUE(parseProviderName(serviceDescriptor, &mProviderType, &id));
132 
133     notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
134 }
135 
TearDown()136 void CameraAidlTest::TearDown() {
137     if (mSession != nullptr) {
138         ndk::ScopedAStatus ret = mSession->close();
139         ASSERT_TRUE(ret.isOk());
140     }
141 }
142 
waitForReleaseFence(std::vector<InFlightRequest::StreamBufferAndTimestamp> & resultOutputBuffers)143 void CameraAidlTest::waitForReleaseFence(
144         std::vector<InFlightRequest::StreamBufferAndTimestamp>& resultOutputBuffers) {
145     for (auto& bufferAndTimestamp : resultOutputBuffers) {
146         // wait for the fence timestamp and store it along with the buffer
147         android::sp<android::Fence> releaseFence = nullptr;
148         const native_handle_t* releaseFenceHandle = bufferAndTimestamp.buffer.releaseFence;
149         if (releaseFenceHandle != nullptr && releaseFenceHandle->numFds == 1 &&
150             releaseFenceHandle->data[0] >= 0) {
151             releaseFence = new android::Fence(dup(releaseFenceHandle->data[0]));
152         }
153         if (releaseFence && releaseFence->isValid()) {
154             releaseFence->wait(/*ms*/ 300);
155             nsecs_t releaseTime = releaseFence->getSignalTime();
156             if (bufferAndTimestamp.timeStamp < releaseTime)
157                 bufferAndTimestamp.timeStamp = releaseTime;
158         }
159     }
160 }
161 
getCameraDeviceNames(std::shared_ptr<ICameraProvider> & provider,bool addSecureOnly)162 std::vector<std::string> CameraAidlTest::getCameraDeviceNames(
163         std::shared_ptr<ICameraProvider>& provider, bool addSecureOnly) {
164     std::vector<std::string> cameraDeviceNames;
165 
166     ScopedAStatus ret = provider->getCameraIdList(&cameraDeviceNames);
167     if (!ret.isOk()) {
168         ADD_FAILURE() << "Could not get camera id list";
169     }
170 
171     // External camera devices are reported through cameraDeviceStatusChange
172     struct ProviderCb : public BnCameraProviderCallback {
173         ScopedAStatus cameraDeviceStatusChange(const std::string& devName,
174                                                CameraDeviceStatus newStatus) override {
175             ALOGI("camera device status callback name %s, status %d", devName.c_str(),
176                   (int)newStatus);
177             if (newStatus == CameraDeviceStatus::PRESENT) {
178                 externalCameraDeviceNames.push_back(devName);
179             }
180             return ScopedAStatus::ok();
181         }
182 
183         ScopedAStatus torchModeStatusChange(const std::string&, TorchModeStatus) override {
184             return ScopedAStatus::ok();
185         }
186 
187         ScopedAStatus physicalCameraDeviceStatusChange(
188                 const std::string&, const std::string&,
189                 ::aidl::android::hardware::camera::common::CameraDeviceStatus) override {
190             return ScopedAStatus::ok();
191         }
192 
193         std::vector<std::string> externalCameraDeviceNames;
194     };
195     std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
196     auto status = mProvider->setCallback(cb);
197 
198     for (const auto& devName : cb->externalCameraDeviceNames) {
199         if (cameraDeviceNames.end() ==
200             std::find(cameraDeviceNames.begin(), cameraDeviceNames.end(), devName)) {
201             cameraDeviceNames.push_back(devName);
202         }
203     }
204 
205     std::vector<std::string> retList;
206     for (auto& cameraDeviceName : cameraDeviceNames) {
207         bool isSecureOnlyCamera = isSecureOnly(mProvider, cameraDeviceName);
208         if (addSecureOnly) {
209             if (isSecureOnlyCamera) {
210                 retList.emplace_back(cameraDeviceName);
211             }
212         } else if (!isSecureOnlyCamera) {
213             retList.emplace_back(cameraDeviceName);
214         }
215     }
216     return retList;
217 }
218 
isSecureOnly(const std::shared_ptr<ICameraProvider> & provider,const std::string & name)219 bool CameraAidlTest::isSecureOnly(const std::shared_ptr<ICameraProvider>& provider,
220                                   const std::string& name) {
221     std::shared_ptr<ICameraDevice> cameraDevice = nullptr;
222     ScopedAStatus retInterface = provider->getCameraDeviceInterface(name, &cameraDevice);
223     if (!retInterface.isOk()) {
224         ADD_FAILURE() << "Failed to get camera device interface for " << name;
225     }
226 
227     CameraMetadata cameraCharacteristics;
228     ScopedAStatus retChars = cameraDevice->getCameraCharacteristics(&cameraCharacteristics);
229     if (!retChars.isOk()) {
230         ADD_FAILURE() << "Failed to get camera characteristics for device " << name;
231     }
232 
233     camera_metadata_t* chars =
234             reinterpret_cast<camera_metadata_t*>(cameraCharacteristics.metadata.data());
235 
236     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
237     Status retCameraKind = getSystemCameraKind(chars, &systemCameraKind);
238     if (retCameraKind != Status::OK) {
239         ADD_FAILURE() << "Failed to get camera kind for " << name;
240     }
241 
242     return systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA;
243 }
244 
getCameraDeviceIdToNameMap(std::shared_ptr<ICameraProvider> provider)245 std::map<std::string, std::string> CameraAidlTest::getCameraDeviceIdToNameMap(
246         std::shared_ptr<ICameraProvider> provider) {
247     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(provider);
248 
249     std::map<std::string, std::string> idToNameMap;
250     for (auto& name : cameraDeviceNames) {
251         std::string version, cameraId;
252         if (!matchDeviceName(name, mProviderType, &version, &cameraId)) {
253             ADD_FAILURE();
254         }
255         idToNameMap.insert(std::make_pair(std::string(cameraId), name));
256     }
257     return idToNameMap;
258 }
259 
verifyMonochromeCameraResult(const::android::hardware::camera::common::V1_0::helper::CameraMetadata & metadata)260 void CameraAidlTest::verifyMonochromeCameraResult(
261         const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata) {
262     camera_metadata_ro_entry entry;
263 
264     // Check tags that are not applicable for monochrome camera
265     ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_GREEN_SPLIT));
266     ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_NEUTRAL_COLOR_POINT));
267     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_MODE));
268     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_TRANSFORM));
269     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_GAINS));
270 
271     // Check dynamicBlackLevel
272     entry = metadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
273     if (entry.count > 0) {
274         ASSERT_EQ(entry.count, 4);
275         for (size_t i = 1; i < entry.count; i++) {
276             ASSERT_FLOAT_EQ(entry.data.f[i], entry.data.f[0]);
277         }
278     }
279 
280     // Check noiseProfile
281     entry = metadata.find(ANDROID_SENSOR_NOISE_PROFILE);
282     if (entry.count > 0) {
283         ASSERT_EQ(entry.count, 2);
284     }
285 
286     // Check lensShadingMap
287     entry = metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
288     if (entry.count > 0) {
289         ASSERT_EQ(entry.count % 4, 0);
290         for (size_t i = 0; i < entry.count / 4; i++) {
291             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 1], entry.data.f[i * 4]);
292             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 2], entry.data.f[i * 4]);
293             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 3], entry.data.f[i * 4]);
294         }
295     }
296 
297     // Check tonemapCurve
298     camera_metadata_ro_entry curveRed = metadata.find(ANDROID_TONEMAP_CURVE_RED);
299     camera_metadata_ro_entry curveGreen = metadata.find(ANDROID_TONEMAP_CURVE_GREEN);
300     camera_metadata_ro_entry curveBlue = metadata.find(ANDROID_TONEMAP_CURVE_BLUE);
301     if (curveRed.count > 0 && curveGreen.count > 0 && curveBlue.count > 0) {
302         ASSERT_EQ(curveRed.count, curveGreen.count);
303         ASSERT_EQ(curveRed.count, curveBlue.count);
304         for (size_t i = 0; i < curveRed.count; i++) {
305             ASSERT_FLOAT_EQ(curveGreen.data.f[i], curveRed.data.f[i]);
306             ASSERT_FLOAT_EQ(curveBlue.data.f[i], curveRed.data.f[i]);
307         }
308     }
309 }
310 
verifyStreamUseCaseCharacteristics(const camera_metadata_t * metadata)311 void CameraAidlTest::verifyStreamUseCaseCharacteristics(const camera_metadata_t* metadata) {
312     camera_metadata_ro_entry entry;
313     // Check capabilities
314     int retcode =
315             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
316     bool hasStreamUseCaseCap = false;
317     if ((0 == retcode) && (entry.count > 0)) {
318         if (std::find(entry.data.u8, entry.data.u8 + entry.count,
319                       ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE) !=
320             entry.data.u8 + entry.count) {
321             hasStreamUseCaseCap = true;
322         }
323     }
324 
325     bool supportMandatoryUseCases = false;
326     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
327                                             &entry);
328     if ((0 == retcode) && (entry.count > 0)) {
329         supportMandatoryUseCases = true;
330         for (size_t i = 0; i < kMandatoryUseCases.size(); i++) {
331             if (std::find(entry.data.i64, entry.data.i64 + entry.count, kMandatoryUseCases[i]) ==
332                 entry.data.i64 + entry.count) {
333                 supportMandatoryUseCases = false;
334                 break;
335             }
336         }
337         bool supportDefaultUseCase = false;
338         for (size_t i = 0; i < entry.count; i++) {
339             if (entry.data.i64[i] == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
340                 supportDefaultUseCase = true;
341             }
342             ASSERT_TRUE(entry.data.i64[i] <= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW
343                         || entry.data.i64[i] >=
344                                 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START);
345         }
346         ASSERT_TRUE(supportDefaultUseCase);
347     }
348 
349     ASSERT_EQ(hasStreamUseCaseCap, supportMandatoryUseCases);
350 }
351 
verifySettingsOverrideCharacteristics(const camera_metadata_t * metadata)352 void CameraAidlTest::verifySettingsOverrideCharacteristics(const camera_metadata_t* metadata) {
353     camera_metadata_ro_entry entry;
354     int retcode = find_camera_metadata_ro_entry(metadata,
355             ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES, &entry);
356     bool supportSettingsOverride = false;
357     if (0 == retcode) {
358         supportSettingsOverride = true;
359         bool hasOff = false;
360         for (size_t i = 0; i < entry.count; i++) {
361             if (entry.data.u8[i] == ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF) {
362                 hasOff = true;
363             }
364         }
365         ASSERT_TRUE(hasOff);
366     }
367 
368     // Check availableRequestKeys
369     retcode = find_camera_metadata_ro_entry(metadata,
370             ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
371     bool hasSettingsOverrideRequestKey = false;
372     if ((0 == retcode) && (entry.count > 0)) {
373         hasSettingsOverrideRequestKey =
374                 std::find(entry.data.i32, entry.data.i32 + entry.count,
375                         ANDROID_CONTROL_SETTINGS_OVERRIDE) != entry.data.i32 + entry.count;
376     } else {
377         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
378     }
379 
380     // Check availableResultKeys
381     retcode = find_camera_metadata_ro_entry(metadata,
382             ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
383     bool hasSettingsOverrideResultKey = false;
384     bool hasOverridingFrameNumberKey = false;
385     if ((0 == retcode) && (entry.count > 0)) {
386         hasSettingsOverrideResultKey =
387                 std::find(entry.data.i32, entry.data.i32 + entry.count,
388                         ANDROID_CONTROL_SETTINGS_OVERRIDE) != entry.data.i32 + entry.count;
389         hasOverridingFrameNumberKey =
390                 std::find(entry.data.i32, entry.data.i32 + entry.count,
391                         ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER)
392                         != entry.data.i32 + entry.count;
393     } else {
394         ADD_FAILURE() << "Get camera availableResultKeys failed!";
395     }
396 
397     // Check availableCharacteristicKeys
398     retcode = find_camera_metadata_ro_entry(metadata,
399             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
400     bool hasSettingsOverrideCharacteristicsKey= false;
401     if ((0 == retcode) && (entry.count > 0)) {
402         hasSettingsOverrideCharacteristicsKey = std::find(entry.data.i32,
403                 entry.data.i32 + entry.count, ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES)
404                         != entry.data.i32 + entry.count;
405     } else {
406         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
407     }
408 
409     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideRequestKey);
410     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideResultKey);
411     ASSERT_EQ(supportSettingsOverride, hasOverridingFrameNumberKey);
412     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideCharacteristicsKey);
413 }
414 
isMonochromeCamera(const camera_metadata_t * staticMeta)415 Status CameraAidlTest::isMonochromeCamera(const camera_metadata_t* staticMeta) {
416     Status ret = Status::OPERATION_NOT_SUPPORTED;
417     if (nullptr == staticMeta) {
418         return Status::ILLEGAL_ARGUMENT;
419     }
420 
421     camera_metadata_ro_entry entry;
422     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
423                                            &entry);
424 
425     if (0 != rc) {
426         return Status::ILLEGAL_ARGUMENT;
427     }
428 
429     for (size_t i = 0; i < entry.count; i++) {
430         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME == entry.data.u8[i]) {
431             ret = Status::OK;
432             break;
433         }
434     }
435 
436     return ret;
437 }
438 
isLogicalMultiCamera(const camera_metadata_t * staticMeta)439 Status CameraAidlTest::isLogicalMultiCamera(const camera_metadata_t* staticMeta) {
440     Status ret = Status::OPERATION_NOT_SUPPORTED;
441     if (nullptr == staticMeta) {
442         return Status::ILLEGAL_ARGUMENT;
443     }
444 
445     camera_metadata_ro_entry entry;
446     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
447                                            &entry);
448     if (0 != rc) {
449         return Status::ILLEGAL_ARGUMENT;
450     }
451 
452     for (size_t i = 0; i < entry.count; i++) {
453         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA == entry.data.u8[i]) {
454             ret = Status::OK;
455             break;
456         }
457     }
458 
459     return ret;
460 }
461 
verifyLogicalCameraResult(const camera_metadata_t * staticMetadata,const std::vector<uint8_t> & resultMetadata)462 void CameraAidlTest::verifyLogicalCameraResult(const camera_metadata_t* staticMetadata,
463                                                const std::vector<uint8_t>& resultMetadata) {
464     camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data();
465 
466     std::unordered_set<std::string> physicalIds;
467     Status rc = getPhysicalCameraIds(staticMetadata, &physicalIds);
468     ASSERT_TRUE(Status::OK == rc);
469     ASSERT_TRUE(physicalIds.size() > 1);
470 
471     camera_metadata_ro_entry entry;
472     // Check mainPhysicalId
473     find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID,
474                                   &entry);
475     if (entry.count > 0) {
476         std::string mainPhysicalId(reinterpret_cast<const char*>(entry.data.u8));
477         ASSERT_NE(physicalIds.find(mainPhysicalId), physicalIds.end());
478     } else {
479         ADD_FAILURE() << "Get LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID failed!";
480     }
481 }
482 
getPhysicalCameraIds(const camera_metadata_t * staticMeta,std::unordered_set<std::string> * physicalIds)483 Status CameraAidlTest::getPhysicalCameraIds(const camera_metadata_t* staticMeta,
484                                             std::unordered_set<std::string>* physicalIds) {
485     if ((nullptr == staticMeta) || (nullptr == physicalIds)) {
486         return Status::ILLEGAL_ARGUMENT;
487     }
488 
489     camera_metadata_ro_entry entry;
490     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
491                                            &entry);
492     if (0 != rc) {
493         return Status::ILLEGAL_ARGUMENT;
494     }
495 
496     const uint8_t* ids = entry.data.u8;
497     size_t start = 0;
498     for (size_t i = 0; i < entry.count; i++) {
499         if (ids[i] == '\0') {
500             if (start != i) {
501                 std::string currentId(reinterpret_cast<const char*>(ids + start));
502                 physicalIds->emplace(currentId);
503             }
504             start = i + 1;
505         }
506     }
507 
508     return Status::OK;
509 }
510 
getSystemCameraKind(const camera_metadata_t * staticMeta,SystemCameraKind * systemCameraKind)511 Status CameraAidlTest::getSystemCameraKind(const camera_metadata_t* staticMeta,
512                                            SystemCameraKind* systemCameraKind) {
513     if (nullptr == staticMeta || nullptr == systemCameraKind) {
514         return Status::ILLEGAL_ARGUMENT;
515     }
516 
517     camera_metadata_ro_entry entry{};
518     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
519                                            &entry);
520     if (0 != rc) {
521         return Status::ILLEGAL_ARGUMENT;
522     }
523 
524     if (entry.count == 1 &&
525         entry.data.u8[0] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA) {
526         *systemCameraKind = SystemCameraKind::HIDDEN_SECURE_CAMERA;
527         return Status::OK;
528     }
529 
530     // Go through the capabilities and check if it has
531     // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA
532     for (size_t i = 0; i < entry.count; ++i) {
533         uint8_t capability = entry.data.u8[i];
534         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA) {
535             *systemCameraKind = SystemCameraKind::SYSTEM_ONLY_CAMERA;
536             return Status::OK;
537         }
538     }
539     *systemCameraKind = SystemCameraKind::PUBLIC;
540     return Status::OK;
541 }
542 
notifyDeviceState(int64_t state)543 void CameraAidlTest::notifyDeviceState(int64_t state) {
544     if (mProvider == nullptr) {
545         return;
546     }
547     mProvider->notifyDeviceStateChange(state);
548 }
549 
allocateGraphicBuffer(uint32_t width,uint32_t height,uint64_t usage,PixelFormat format,buffer_handle_t * buffer_handle)550 void CameraAidlTest::allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage,
551                                            PixelFormat format, buffer_handle_t* buffer_handle) {
552     ASSERT_NE(buffer_handle, nullptr);
553 
554     uint32_t stride;
555 
556     android::status_t err = android::GraphicBufferAllocator::get().allocateRawHandle(
557             width, height, static_cast<int32_t>(format), 1u /*layerCount*/, usage, buffer_handle,
558             &stride, "VtsHalCameraProviderV2");
559     ASSERT_EQ(err, android::NO_ERROR);
560 }
561 
matchDeviceName(const std::string & deviceName,const std::string & providerType,std::string * deviceVersion,std::string * cameraId)562 bool CameraAidlTest::matchDeviceName(const std::string& deviceName, const std::string& providerType,
563                                      std::string* deviceVersion, std::string* cameraId) {
564     // expected format: device@<major>.<minor>/<type>/<id>
565     std::stringstream pattern;
566     pattern << "device@([0-9]+\\.[0-9]+)/" << providerType << "/(.+)";
567     std::regex e(pattern.str());
568 
569     std::smatch sm;
570     if (std::regex_match(deviceName, sm, e)) {
571         if (deviceVersion != nullptr) {
572             *deviceVersion = sm[1];
573         }
574         if (cameraId != nullptr) {
575             *cameraId = sm[2];
576         }
577         return true;
578     }
579     return false;
580 }
581 
verifyCameraCharacteristics(const CameraMetadata & chars)582 void CameraAidlTest::verifyCameraCharacteristics(const CameraMetadata& chars) {
583     const camera_metadata_t* metadata =
584             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
585 
586     size_t expectedSize = chars.metadata.size();
587     int result = validate_camera_metadata_structure(metadata, &expectedSize);
588     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
589     size_t entryCount = get_camera_metadata_entry_count(metadata);
590     // TODO: we can do better than 0 here. Need to check how many required
591     // characteristics keys we've defined.
592     ASSERT_GT(entryCount, 0u);
593 
594     camera_metadata_ro_entry entry;
595     int retcode =
596             find_camera_metadata_ro_entry(metadata, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry);
597     if ((0 == retcode) && (entry.count > 0)) {
598         uint8_t hardwareLevel = entry.data.u8[0];
599         ASSERT_TRUE(hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED ||
600                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL ||
601                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 ||
602                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL);
603     } else {
604         ADD_FAILURE() << "Get camera hardware level failed!";
605     }
606 
607     entry.count = 0;
608     retcode = find_camera_metadata_ro_entry(
609             metadata, ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION, &entry);
610     if ((0 == retcode) || (entry.count > 0)) {
611         ADD_FAILURE() << "ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION "
612                       << " per API contract should never be set by Hal!";
613     }
614     retcode = find_camera_metadata_ro_entry(
615             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, &entry);
616     if ((0 == retcode) || (entry.count > 0)) {
617         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS"
618                       << " per API contract should never be set by Hal!";
619     }
620     retcode = find_camera_metadata_ro_entry(
621             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, &entry);
622     if ((0 == retcode) || (entry.count > 0)) {
623         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS"
624                       << " per API contract should never be set by Hal!";
625     }
626     retcode = find_camera_metadata_ro_entry(
627             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, &entry);
628     if ((0 == retcode) || (entry.count > 0)) {
629         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS"
630                       << " per API contract should never be set by Hal!";
631     }
632 
633     retcode = find_camera_metadata_ro_entry(
634             metadata, ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, &entry);
635     if (0 == retcode || entry.count > 0) {
636         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS "
637                       << " per API contract should never be set by Hal!";
638     }
639 
640     retcode = find_camera_metadata_ro_entry(
641             metadata, ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, &entry);
642     if (0 == retcode || entry.count > 0) {
643         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS "
644                       << " per API contract should never be set by Hal!";
645     }
646 
647     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
648                                             &entry);
649     if (0 == retcode || entry.count > 0) {
650         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS "
651                       << " per API contract should never be set by Hal!";
652     }
653 
654     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_INFO_SUPPORTED, &entry);
655     if (0 == retcode && entry.count > 0) {
656         retcode = find_camera_metadata_ro_entry(
657                 metadata, ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT, &entry);
658         if (0 == retcode && entry.count > 0) {
659             uint8_t maxJpegAppSegmentsCount = entry.data.u8[0];
660             ASSERT_TRUE(maxJpegAppSegmentsCount >= 1 && maxJpegAppSegmentsCount <= 16);
661         } else {
662             ADD_FAILURE() << "Get Heic maxJpegAppSegmentsCount failed!";
663         }
664     }
665 
666     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_LENS_POSE_REFERENCE, &entry);
667     if (0 == retcode && entry.count > 0) {
668         uint8_t poseReference = entry.data.u8[0];
669         ASSERT_TRUE(poseReference <= ANDROID_LENS_POSE_REFERENCE_UNDEFINED &&
670                     poseReference >= ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA);
671     }
672 
673     retcode =
674             find_camera_metadata_ro_entry(metadata, ANDROID_INFO_DEVICE_STATE_ORIENTATIONS, &entry);
675     if (0 == retcode && entry.count > 0) {
676         ASSERT_TRUE((entry.count % 2) == 0);
677         uint64_t maxPublicState = ((uint64_t)ICameraProvider::DEVICE_STATE_FOLDED) << 1;
678         uint64_t vendorStateStart = 1UL << 31;  // Reserved for vendor specific states
679         uint64_t stateMask = (1 << vendorStateStart) - 1;
680         stateMask &= ~((1 << maxPublicState) - 1);
681         for (int i = 0; i < entry.count; i += 2) {
682             ASSERT_TRUE((entry.data.i64[i] & stateMask) == 0);
683             ASSERT_TRUE((entry.data.i64[i + 1] % 90) == 0);
684         }
685     }
686 
687     verifyExtendedSceneModeCharacteristics(metadata);
688     verifyZoomCharacteristics(metadata);
689     verifyStreamUseCaseCharacteristics(metadata);
690     verifySettingsOverrideCharacteristics(metadata);
691 }
692 
verifyExtendedSceneModeCharacteristics(const camera_metadata_t * metadata)693 void CameraAidlTest::verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata) {
694     camera_metadata_ro_entry entry;
695     int retcode = 0;
696 
697     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_AVAILABLE_MODES, &entry);
698     if ((0 == retcode) && (entry.count > 0)) {
699         for (auto i = 0; i < entry.count; i++) {
700             ASSERT_TRUE(entry.data.u8[i] >= ANDROID_CONTROL_MODE_OFF &&
701                         entry.data.u8[i] <= ANDROID_CONTROL_MODE_USE_EXTENDED_SCENE_MODE);
702         }
703     } else {
704         ADD_FAILURE() << "Get camera controlAvailableModes failed!";
705     }
706 
707     // Check key availability in capabilities, request and result.
708 
709     retcode =
710             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
711     bool hasExtendedSceneModeRequestKey = false;
712     if ((0 == retcode) && (entry.count > 0)) {
713         hasExtendedSceneModeRequestKey =
714                 std::find(entry.data.i32, entry.data.i32 + entry.count,
715                           ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
716     } else {
717         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
718     }
719 
720     retcode =
721             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
722     bool hasExtendedSceneModeResultKey = false;
723     if ((0 == retcode) && (entry.count > 0)) {
724         hasExtendedSceneModeResultKey =
725                 std::find(entry.data.i32, entry.data.i32 + entry.count,
726                           ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
727     } else {
728         ADD_FAILURE() << "Get camera availableResultKeys failed!";
729     }
730 
731     retcode = find_camera_metadata_ro_entry(metadata,
732                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
733     bool hasExtendedSceneModeMaxSizesKey = false;
734     bool hasExtendedSceneModeZoomRatioRangesKey = false;
735     if ((0 == retcode) && (entry.count > 0)) {
736         hasExtendedSceneModeMaxSizesKey =
737                 std::find(entry.data.i32, entry.data.i32 + entry.count,
738                           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES) !=
739                 entry.data.i32 + entry.count;
740         hasExtendedSceneModeZoomRatioRangesKey =
741                 std::find(entry.data.i32, entry.data.i32 + entry.count,
742                           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES) !=
743                 entry.data.i32 + entry.count;
744     } else {
745         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
746     }
747 
748     camera_metadata_ro_entry maxSizesEntry;
749     retcode = find_camera_metadata_ro_entry(
750             metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES, &maxSizesEntry);
751     bool hasExtendedSceneModeMaxSizes = (0 == retcode && maxSizesEntry.count > 0);
752 
753     camera_metadata_ro_entry zoomRatioRangesEntry;
754     retcode = find_camera_metadata_ro_entry(
755             metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
756             &zoomRatioRangesEntry);
757     bool hasExtendedSceneModeZoomRatioRanges = (0 == retcode && zoomRatioRangesEntry.count > 0);
758 
759     // Extended scene mode keys must all be available, or all be unavailable.
760     bool noExtendedSceneMode =
761             !hasExtendedSceneModeRequestKey && !hasExtendedSceneModeResultKey &&
762             !hasExtendedSceneModeMaxSizesKey && !hasExtendedSceneModeZoomRatioRangesKey &&
763             !hasExtendedSceneModeMaxSizes && !hasExtendedSceneModeZoomRatioRanges;
764     if (noExtendedSceneMode) {
765         return;
766     }
767     bool hasExtendedSceneMode = hasExtendedSceneModeRequestKey && hasExtendedSceneModeResultKey &&
768                                 hasExtendedSceneModeMaxSizesKey &&
769                                 hasExtendedSceneModeZoomRatioRangesKey &&
770                                 hasExtendedSceneModeMaxSizes && hasExtendedSceneModeZoomRatioRanges;
771     ASSERT_TRUE(hasExtendedSceneMode);
772 
773     // Must have DISABLED, and must have one of BOKEH_STILL_CAPTURE, BOKEH_CONTINUOUS, or a VENDOR
774     // mode.
775     ASSERT_TRUE((maxSizesEntry.count == 6 && zoomRatioRangesEntry.count == 2) ||
776                 (maxSizesEntry.count == 9 && zoomRatioRangesEntry.count == 4));
777     bool hasDisabledMode = false;
778     bool hasBokehStillCaptureMode = false;
779     bool hasBokehContinuousMode = false;
780     bool hasVendorMode = false;
781     std::vector<AvailableStream> outputStreams;
782     ASSERT_EQ(Status::OK, getAvailableOutputStreams(metadata, outputStreams));
783     for (int i = 0, j = 0; i < maxSizesEntry.count && j < zoomRatioRangesEntry.count; i += 3) {
784         int32_t mode = maxSizesEntry.data.i32[i];
785         int32_t maxWidth = maxSizesEntry.data.i32[i + 1];
786         int32_t maxHeight = maxSizesEntry.data.i32[i + 2];
787         switch (mode) {
788             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED:
789                 hasDisabledMode = true;
790                 ASSERT_TRUE(maxWidth == 0 && maxHeight == 0);
791                 break;
792             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE:
793                 hasBokehStillCaptureMode = true;
794                 j += 2;
795                 break;
796             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS:
797                 hasBokehContinuousMode = true;
798                 j += 2;
799                 break;
800             default:
801                 if (mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START) {
802                     ADD_FAILURE() << "Invalid extended scene mode advertised: " << mode;
803                 } else {
804                     hasVendorMode = true;
805                     j += 2;
806                 }
807                 break;
808         }
809 
810         if (mode != ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED) {
811             // Make sure size is supported.
812             bool sizeSupported = false;
813             for (const auto& stream : outputStreams) {
814                 if ((stream.format == static_cast<int32_t>(PixelFormat::YCBCR_420_888) ||
815                      stream.format == static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) &&
816                     stream.width == maxWidth && stream.height == maxHeight) {
817                     sizeSupported = true;
818                     break;
819                 }
820             }
821             ASSERT_TRUE(sizeSupported);
822 
823             // Make sure zoom range is valid
824             float minZoomRatio = zoomRatioRangesEntry.data.f[0];
825             float maxZoomRatio = zoomRatioRangesEntry.data.f[1];
826             ASSERT_GT(minZoomRatio, 0.0f);
827             ASSERT_LE(minZoomRatio, maxZoomRatio);
828         }
829     }
830     ASSERT_TRUE(hasDisabledMode);
831     ASSERT_TRUE(hasBokehStillCaptureMode || hasBokehContinuousMode || hasVendorMode);
832 }
833 
getAvailableOutputStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,bool maxResolution)834 Status CameraAidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta,
835                                                  std::vector<AvailableStream>& outputStreams,
836                                                  const AvailableStream* threshold,
837                                                  bool maxResolution) {
838     if (nullptr == staticMeta) {
839         return Status::ILLEGAL_ARGUMENT;
840     }
841     int scalerTag = maxResolution
842                             ? ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
843                             : ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
844     int depthTag = maxResolution
845                            ? ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
846                            : ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
847 
848     camera_metadata_ro_entry scalerEntry;
849     camera_metadata_ro_entry depthEntry;
850     int foundScaler = find_camera_metadata_ro_entry(staticMeta, scalerTag, &scalerEntry);
851     int foundDepth = find_camera_metadata_ro_entry(staticMeta, depthTag, &depthEntry);
852     if ((0 != foundScaler || (0 != (scalerEntry.count % 4))) &&
853         (0 != foundDepth || (0 != (depthEntry.count % 4)))) {
854         return Status::ILLEGAL_ARGUMENT;
855     }
856 
857     if (foundScaler == 0 && (0 == (scalerEntry.count % 4))) {
858         fillOutputStreams(&scalerEntry, outputStreams, threshold,
859                           ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
860     }
861 
862     if (foundDepth == 0 && (0 == (depthEntry.count % 4))) {
863         AvailableStream depthPreviewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
864                                                  static_cast<int32_t>(PixelFormat::Y16)};
865         const AvailableStream* depthThreshold =
866                 isDepthOnly(staticMeta) ? &depthPreviewThreshold : threshold;
867         fillOutputStreams(&depthEntry, outputStreams, depthThreshold,
868                           ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT);
869     }
870 
871     return Status::OK;
872 }
873 
fillOutputStreams(camera_metadata_ro_entry_t * entry,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,const int32_t availableConfigOutputTag)874 void CameraAidlTest::fillOutputStreams(camera_metadata_ro_entry_t* entry,
875                                        std::vector<AvailableStream>& outputStreams,
876                                        const AvailableStream* threshold,
877                                        const int32_t availableConfigOutputTag) {
878     for (size_t i = 0; i < entry->count; i += 4) {
879         if (availableConfigOutputTag == entry->data.i32[i + 3]) {
880             if (nullptr == threshold) {
881                 AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
882                                      entry->data.i32[i]};
883                 outputStreams.push_back(s);
884             } else {
885                 if ((threshold->format == entry->data.i32[i]) &&
886                     (threshold->width >= entry->data.i32[i + 1]) &&
887                     (threshold->height >= entry->data.i32[i + 2])) {
888                     AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
889                                          threshold->format};
890                     outputStreams.push_back(s);
891                 }
892             }
893         }
894     }
895 }
896 
verifyZoomCharacteristics(const camera_metadata_t * metadata)897 void CameraAidlTest::verifyZoomCharacteristics(const camera_metadata_t* metadata) {
898     camera_metadata_ro_entry entry;
899     int retcode = 0;
900 
901     // Check key availability in capabilities, request and result.
902     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
903                                             &entry);
904     float maxDigitalZoom = 1.0;
905     if ((0 == retcode) && (entry.count == 1)) {
906         maxDigitalZoom = entry.data.f[0];
907     } else {
908         ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!";
909     }
910 
911     retcode =
912             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
913     bool hasZoomRequestKey = false;
914     if ((0 == retcode) && (entry.count > 0)) {
915         hasZoomRequestKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
916                                       ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
917     } else {
918         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
919     }
920 
921     retcode =
922             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
923     bool hasZoomResultKey = false;
924     if ((0 == retcode) && (entry.count > 0)) {
925         hasZoomResultKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
926                                      ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
927     } else {
928         ADD_FAILURE() << "Get camera availableResultKeys failed!";
929     }
930 
931     retcode = find_camera_metadata_ro_entry(metadata,
932                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
933     bool hasZoomCharacteristicsKey = false;
934     if ((0 == retcode) && (entry.count > 0)) {
935         hasZoomCharacteristicsKey =
936                 std::find(entry.data.i32, entry.data.i32 + entry.count,
937                           ANDROID_CONTROL_ZOOM_RATIO_RANGE) != entry.data.i32 + entry.count;
938     } else {
939         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
940     }
941 
942     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
943     bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
944 
945     // Zoom keys must all be available, or all be unavailable.
946     bool noZoomRatio = !hasZoomRequestKey && !hasZoomResultKey && !hasZoomCharacteristicsKey &&
947                        !hasZoomRatioRange;
948     if (noZoomRatio) {
949         return;
950     }
951     bool hasZoomRatio =
952             hasZoomRequestKey && hasZoomResultKey && hasZoomCharacteristicsKey && hasZoomRatioRange;
953     ASSERT_TRUE(hasZoomRatio);
954 
955     float minZoomRatio = entry.data.f[0];
956     float maxZoomRatio = entry.data.f[1];
957     constexpr float FLOATING_POINT_THRESHOLD = 0.00001f;
958     if (maxDigitalZoom > maxZoomRatio + FLOATING_POINT_THRESHOLD) {
959         ADD_FAILURE() << "Maximum digital zoom " << maxDigitalZoom
960                       << " is larger than maximum zoom ratio " << maxZoomRatio << " + threshold "
961                       << FLOATING_POINT_THRESHOLD << "!";
962     }
963     if (minZoomRatio > maxZoomRatio) {
964         ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!";
965     }
966     if (minZoomRatio > 1.0f) {
967         ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!";
968     }
969     if (maxZoomRatio < 1.0f) {
970         ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!";
971     }
972 
973     // Make sure CROPPING_TYPE is CENTER_ONLY
974     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_CROPPING_TYPE, &entry);
975     if ((0 == retcode) && (entry.count == 1)) {
976         int8_t croppingType = entry.data.u8[0];
977         ASSERT_EQ(croppingType, ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY);
978     } else {
979         ADD_FAILURE() << "Get camera scalerCroppingType failed!";
980     }
981 }
982 
verifyMonochromeCharacteristics(const CameraMetadata & chars)983 void CameraAidlTest::verifyMonochromeCharacteristics(const CameraMetadata& chars) {
984     const camera_metadata_t* metadata = (camera_metadata_t*)chars.metadata.data();
985     Status rc = isMonochromeCamera(metadata);
986     if (Status::OPERATION_NOT_SUPPORTED == rc) {
987         return;
988     }
989     ASSERT_EQ(Status::OK, rc);
990 
991     camera_metadata_ro_entry entry;
992     // Check capabilities
993     int retcode =
994             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
995     if ((0 == retcode) && (entry.count > 0)) {
996         ASSERT_EQ(std::find(entry.data.u8, entry.data.u8 + entry.count,
997                             ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING),
998                   entry.data.u8 + entry.count);
999     }
1000 
1001     // Check Cfa
1002     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1003                                             &entry);
1004     if ((0 == retcode) && (entry.count == 1)) {
1005         ASSERT_TRUE(entry.data.i32[0] ==
1006                             static_cast<int32_t>(
1007                                     SensorInfoColorFilterArrangement::
1008                                             ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO) ||
1009                     entry.data.i32[0] ==
1010                             static_cast<int32_t>(
1011                                     SensorInfoColorFilterArrangement::
1012                                             ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR));
1013     }
1014 
1015     // Check availableRequestKeys
1016     retcode =
1017             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
1018     if ((0 == retcode) && (entry.count > 0)) {
1019         for (size_t i = 0; i < entry.count; i++) {
1020             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
1021             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
1022             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
1023         }
1024     } else {
1025         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
1026     }
1027 
1028     // Check availableResultKeys
1029     retcode =
1030             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
1031     if ((0 == retcode) && (entry.count > 0)) {
1032         for (size_t i = 0; i < entry.count; i++) {
1033             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_GREEN_SPLIT);
1034             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1035             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
1036             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
1037             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
1038         }
1039     } else {
1040         ADD_FAILURE() << "Get camera availableResultKeys failed!";
1041     }
1042 
1043     // Check availableCharacteristicKeys
1044     retcode = find_camera_metadata_ro_entry(metadata,
1045                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
1046     if ((0 == retcode) && (entry.count > 0)) {
1047         for (size_t i = 0; i < entry.count; i++) {
1048             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1049             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1050             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1051             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1052             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM1);
1053             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM2);
1054             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX1);
1055             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX2);
1056         }
1057     } else {
1058         ADD_FAILURE() << "Get camera availableResultKeys failed!";
1059     }
1060 
1061     // Check blackLevelPattern
1062     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry);
1063     if ((0 == retcode) && (entry.count > 0)) {
1064         ASSERT_EQ(entry.count, 4);
1065         for (size_t i = 1; i < entry.count; i++) {
1066             ASSERT_EQ(entry.data.i32[i], entry.data.i32[0]);
1067         }
1068     }
1069 }
1070 
verifyRecommendedConfigs(const CameraMetadata & chars)1071 void CameraAidlTest::verifyRecommendedConfigs(const CameraMetadata& chars) {
1072     size_t CONFIG_ENTRY_SIZE = 5;
1073     size_t CONFIG_ENTRY_TYPE_OFFSET = 3;
1074     size_t CONFIG_ENTRY_BITFIELD_OFFSET = 4;
1075     uint32_t maxPublicUsecase =
1076             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END_3_8;
1077     uint32_t vendorUsecaseStart =
1078             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START;
1079     uint32_t usecaseMask = (1 << vendorUsecaseStart) - 1;
1080     usecaseMask &= ~((1 << maxPublicUsecase) - 1);
1081 
1082     const camera_metadata_t* metadata =
1083             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1084 
1085     camera_metadata_ro_entry recommendedConfigsEntry, recommendedDepthConfigsEntry, ioMapEntry;
1086     recommendedConfigsEntry.count = recommendedDepthConfigsEntry.count = ioMapEntry.count = 0;
1087     int retCode = find_camera_metadata_ro_entry(
1088             metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
1089             &recommendedConfigsEntry);
1090     int depthRetCode = find_camera_metadata_ro_entry(
1091             metadata, ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
1092             &recommendedDepthConfigsEntry);
1093     int ioRetCode = find_camera_metadata_ro_entry(
1094             metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP, &ioMapEntry);
1095     if ((0 != retCode) && (0 != depthRetCode)) {
1096         // In case both regular and depth recommended configurations are absent,
1097         // I/O should be absent as well.
1098         ASSERT_NE(ioRetCode, 0);
1099         return;
1100     }
1101 
1102     camera_metadata_ro_entry availableKeysEntry;
1103     retCode = find_camera_metadata_ro_entry(
1104             metadata, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &availableKeysEntry);
1105     ASSERT_TRUE((0 == retCode) && (availableKeysEntry.count > 0));
1106     std::vector<int32_t> availableKeys;
1107     availableKeys.reserve(availableKeysEntry.count);
1108     availableKeys.insert(availableKeys.end(), availableKeysEntry.data.i32,
1109                          availableKeysEntry.data.i32 + availableKeysEntry.count);
1110 
1111     if (recommendedConfigsEntry.count > 0) {
1112         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1113                             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS),
1114                   availableKeys.end());
1115         ASSERT_EQ((recommendedConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1116         for (size_t i = 0; i < recommendedConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1117             int32_t entryType = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1118             uint32_t bitfield = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1119             ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1120                         (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1121             ASSERT_TRUE((bitfield & usecaseMask) == 0);
1122         }
1123     }
1124 
1125     if (recommendedDepthConfigsEntry.count > 0) {
1126         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1127                             ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS),
1128                   availableKeys.end());
1129         ASSERT_EQ((recommendedDepthConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1130         for (size_t i = 0; i < recommendedDepthConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1131             int32_t entryType = recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1132             uint32_t bitfield =
1133                     recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1134             ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1135                         (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1136             ASSERT_TRUE((bitfield & usecaseMask) == 0);
1137         }
1138 
1139         if (recommendedConfigsEntry.count == 0) {
1140             // In case regular recommended configurations are absent but suggested depth
1141             // configurations are present, I/O should be absent.
1142             ASSERT_NE(ioRetCode, 0);
1143         }
1144     }
1145 
1146     if ((ioRetCode == 0) && (ioMapEntry.count > 0)) {
1147         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1148                             ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP),
1149                   availableKeys.end());
1150         ASSERT_EQ(isZSLModeAvailable(metadata), Status::OK);
1151     }
1152 }
1153 
1154 // Check whether ZSL is available using the static camera
1155 // characteristics.
isZSLModeAvailable(const camera_metadata_t * staticMeta)1156 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta) {
1157     if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1158         return Status::OK;
1159     } else {
1160         return isZSLModeAvailable(staticMeta, YUV_REPROCESS);
1161     }
1162 }
1163 
isZSLModeAvailable(const camera_metadata_t * staticMeta,ReprocessType reprocType)1164 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta,
1165                                           ReprocessType reprocType) {
1166     Status ret = Status::OPERATION_NOT_SUPPORTED;
1167     if (nullptr == staticMeta) {
1168         return Status::ILLEGAL_ARGUMENT;
1169     }
1170 
1171     camera_metadata_ro_entry entry;
1172     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1173                                            &entry);
1174     if (0 != rc) {
1175         return Status::ILLEGAL_ARGUMENT;
1176     }
1177 
1178     for (size_t i = 0; i < entry.count; i++) {
1179         if ((reprocType == PRIV_REPROCESS &&
1180              ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING == entry.data.u8[i]) ||
1181             (reprocType == YUV_REPROCESS &&
1182              ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING == entry.data.u8[i])) {
1183             ret = Status::OK;
1184             break;
1185         }
1186     }
1187 
1188     return ret;
1189 }
1190 
1191 // Verify logical or ultra high resolution camera static metadata
verifyLogicalOrUltraHighResCameraMetadata(const std::string & cameraName,const std::shared_ptr<ICameraDevice> & device,const CameraMetadata & chars,const std::vector<std::string> & deviceNames)1192 void CameraAidlTest::verifyLogicalOrUltraHighResCameraMetadata(
1193         const std::string& cameraName, const std::shared_ptr<ICameraDevice>& device,
1194         const CameraMetadata& chars, const std::vector<std::string>& deviceNames) {
1195     const camera_metadata_t* metadata =
1196             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1197     ASSERT_NE(nullptr, metadata);
1198     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
1199     Status retStatus = getSystemCameraKind(metadata, &systemCameraKind);
1200     ASSERT_EQ(retStatus, Status::OK);
1201     Status rc = isLogicalMultiCamera(metadata);
1202     ASSERT_TRUE(Status::OK == rc || Status::OPERATION_NOT_SUPPORTED == rc);
1203     bool isMultiCamera = (Status::OK == rc);
1204     bool isUltraHighResCamera = isUltraHighResolution(metadata);
1205     if (!isMultiCamera && !isUltraHighResCamera) {
1206         return;
1207     }
1208 
1209     camera_metadata_ro_entry entry;
1210     int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1211     bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1212     retcode = find_camera_metadata_ro_entry(
1213             metadata, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
1214     bool hasHalBufferManager =
1215             (0 == retcode && 1 == entry.count &&
1216              entry.data.i32[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
1217     retcode = find_camera_metadata_ro_entry(
1218             metadata, ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED, &entry);
1219     bool multiResolutionStreamSupported =
1220             (0 == retcode && 1 == entry.count &&
1221              entry.data.u8[0] == ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE);
1222     if (multiResolutionStreamSupported) {
1223         ASSERT_TRUE(hasHalBufferManager);
1224     }
1225 
1226     std::string version, cameraId;
1227     ASSERT_TRUE(matchDeviceName(cameraName, mProviderType, &version, &cameraId));
1228     std::unordered_set<std::string> physicalIds;
1229     rc = getPhysicalCameraIds(metadata, &physicalIds);
1230     ASSERT_TRUE(isUltraHighResCamera || Status::OK == rc);
1231     for (const auto& physicalId : physicalIds) {
1232         ASSERT_NE(physicalId, cameraId);
1233     }
1234     if (physicalIds.size() == 0) {
1235         ASSERT_TRUE(isUltraHighResCamera && !isMultiCamera);
1236         physicalIds.insert(cameraId);
1237     }
1238 
1239     std::unordered_set<int32_t> physicalRequestKeyIDs;
1240     rc = getSupportedKeys(const_cast<camera_metadata_t*>(metadata),
1241                           ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1242                           &physicalRequestKeyIDs);
1243     ASSERT_TRUE(Status::OK == rc);
1244     bool hasTestPatternPhysicalRequestKey =
1245             physicalRequestKeyIDs.find(ANDROID_SENSOR_TEST_PATTERN_MODE) !=
1246             physicalRequestKeyIDs.end();
1247     std::unordered_set<int32_t> privacyTestPatternModes;
1248     getPrivacyTestPatternModes(metadata, &privacyTestPatternModes);
1249 
1250     // Map from image format to number of multi-resolution sizes for that format
1251     std::unordered_map<int32_t, size_t> multiResOutputFormatCounterMap;
1252     std::unordered_map<int32_t, size_t> multiResInputFormatCounterMap;
1253     for (const auto& physicalId : physicalIds) {
1254         bool isPublicId = false;
1255         std::string fullPublicId;
1256         SystemCameraKind physSystemCameraKind = SystemCameraKind::PUBLIC;
1257         for (auto& deviceName : deviceNames) {
1258             std::string publicVersion, publicId;
1259             ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
1260             if (physicalId == publicId) {
1261                 isPublicId = true;
1262                 fullPublicId = deviceName;
1263                 break;
1264             }
1265         }
1266 
1267         camera_metadata_ro_entry physicalMultiResStreamConfigs;
1268         camera_metadata_ro_entry physicalStreamConfigs;
1269         camera_metadata_ro_entry physicalMaxResolutionStreamConfigs;
1270         CameraMetadata physChars;
1271         bool isUltraHighRes = false;
1272         std::unordered_set<int32_t> subCameraPrivacyTestPatterns;
1273         if (isPublicId) {
1274             std::shared_ptr<ICameraDevice> subDevice;
1275             ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(fullPublicId, &subDevice);
1276             ASSERT_TRUE(ret.isOk());
1277             ASSERT_NE(subDevice, nullptr);
1278 
1279             ret = subDevice->getCameraCharacteristics(&physChars);
1280             ASSERT_TRUE(ret.isOk());
1281 
1282             const camera_metadata_t* staticMetadata =
1283                     reinterpret_cast<const camera_metadata_t*>(physChars.metadata.data());
1284             retStatus = getSystemCameraKind(staticMetadata, &physSystemCameraKind);
1285             ASSERT_EQ(retStatus, Status::OK);
1286 
1287             // Make sure that the system camera kind of a non-hidden
1288             // physical cameras is the same as the logical camera associated
1289             // with it.
1290             ASSERT_EQ(physSystemCameraKind, systemCameraKind);
1291             retcode = find_camera_metadata_ro_entry(staticMetadata,
1292                                                     ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1293             bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1294             ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1295 
1296             getMultiResolutionStreamConfigurations(
1297                     &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1298                     &physicalMaxResolutionStreamConfigs, staticMetadata);
1299             isUltraHighRes = isUltraHighResolution(staticMetadata);
1300 
1301             getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1302         } else {
1303             // Check camera characteristics for hidden camera id
1304             ndk::ScopedAStatus ret =
1305                     device->getPhysicalCameraCharacteristics(physicalId, &physChars);
1306             ASSERT_TRUE(ret.isOk());
1307             verifyCameraCharacteristics(physChars);
1308             verifyMonochromeCharacteristics(physChars);
1309 
1310             auto staticMetadata = (const camera_metadata_t*)physChars.metadata.data();
1311             retcode = find_camera_metadata_ro_entry(staticMetadata,
1312                                                     ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1313             bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1314             ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1315 
1316             getMultiResolutionStreamConfigurations(
1317                     &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1318                     &physicalMaxResolutionStreamConfigs, staticMetadata);
1319             isUltraHighRes = isUltraHighResolution(staticMetadata);
1320             getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1321 
1322             // Check calling getCameraDeviceInterface_V3_x() on hidden camera id returns
1323             // ILLEGAL_ARGUMENT.
1324             std::stringstream s;
1325             s << "device@" << version << "/" << mProviderType << "/" << physicalId;
1326             std::string fullPhysicalId(s.str());
1327             std::shared_ptr<ICameraDevice> subDevice;
1328             ret = mProvider->getCameraDeviceInterface(fullPhysicalId, &subDevice);
1329             ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1330                         ret.getServiceSpecificError());
1331             ASSERT_EQ(subDevice, nullptr);
1332         }
1333 
1334         if (hasTestPatternPhysicalRequestKey) {
1335             ASSERT_TRUE(privacyTestPatternModes == subCameraPrivacyTestPatterns);
1336         }
1337 
1338         if (physicalMultiResStreamConfigs.count > 0) {
1339             ASSERT_EQ(physicalMultiResStreamConfigs.count % 4, 0);
1340 
1341             // Each supported size must be max size for that format,
1342             for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4; i++) {
1343                 int32_t multiResFormat = physicalMultiResStreamConfigs.data.i32[i * 4];
1344                 int32_t multiResWidth = physicalMultiResStreamConfigs.data.i32[i * 4 + 1];
1345                 int32_t multiResHeight = physicalMultiResStreamConfigs.data.i32[i * 4 + 2];
1346                 int32_t multiResInput = physicalMultiResStreamConfigs.data.i32[i * 4 + 3];
1347 
1348                 // Check if the resolution is the max resolution in stream
1349                 // configuration map
1350                 bool supported = false;
1351                 bool isMaxSize = true;
1352                 for (size_t j = 0; j < physicalStreamConfigs.count / 4; j++) {
1353                     int32_t format = physicalStreamConfigs.data.i32[j * 4];
1354                     int32_t width = physicalStreamConfigs.data.i32[j * 4 + 1];
1355                     int32_t height = physicalStreamConfigs.data.i32[j * 4 + 2];
1356                     int32_t input = physicalStreamConfigs.data.i32[j * 4 + 3];
1357                     if (format == multiResFormat && input == multiResInput) {
1358                         if (width == multiResWidth && height == multiResHeight) {
1359                             supported = true;
1360                         } else if (width * height > multiResWidth * multiResHeight) {
1361                             isMaxSize = false;
1362                         }
1363                     }
1364                 }
1365                 // Check if the resolution is the max resolution in max
1366                 // resolution stream configuration map
1367                 bool supportedUltraHighRes = false;
1368                 bool isUltraHighResMaxSize = true;
1369                 for (size_t j = 0; j < physicalMaxResolutionStreamConfigs.count / 4; j++) {
1370                     int32_t format = physicalMaxResolutionStreamConfigs.data.i32[j * 4];
1371                     int32_t width = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 1];
1372                     int32_t height = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 2];
1373                     int32_t input = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 3];
1374                     if (format == multiResFormat && input == multiResInput) {
1375                         if (width == multiResWidth && height == multiResHeight) {
1376                             supportedUltraHighRes = true;
1377                         } else if (width * height > multiResWidth * multiResHeight) {
1378                             isUltraHighResMaxSize = false;
1379                         }
1380                     }
1381                 }
1382 
1383                 if (isUltraHighRes) {
1384                     // For ultra high resolution camera, the configuration must
1385                     // be the maximum size in stream configuration map, or max
1386                     // resolution stream configuration map
1387                     ASSERT_TRUE((supported && isMaxSize) ||
1388                                 (supportedUltraHighRes && isUltraHighResMaxSize));
1389                 } else {
1390                     // The configuration must be the maximum size in stream
1391                     // configuration map
1392                     ASSERT_TRUE(supported && isMaxSize);
1393                     ASSERT_FALSE(supportedUltraHighRes);
1394                 }
1395 
1396                 // Increment the counter for the configuration's format.
1397                 auto& formatCounterMap = multiResInput ? multiResInputFormatCounterMap
1398                                                        : multiResOutputFormatCounterMap;
1399                 if (formatCounterMap.count(multiResFormat) == 0) {
1400                     formatCounterMap[multiResFormat] = 1;
1401                 } else {
1402                     formatCounterMap[multiResFormat]++;
1403                 }
1404             }
1405 
1406             // There must be no duplicates
1407             for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4 - 1; i++) {
1408                 for (size_t j = i + 1; j < physicalMultiResStreamConfigs.count / 4; j++) {
1409                     // Input/output doesn't match
1410                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 3] !=
1411                         physicalMultiResStreamConfigs.data.i32[j * 4 + 3]) {
1412                         continue;
1413                     }
1414                     // Format doesn't match
1415                     if (physicalMultiResStreamConfigs.data.i32[i * 4] !=
1416                         physicalMultiResStreamConfigs.data.i32[j * 4]) {
1417                         continue;
1418                     }
1419                     // Width doesn't match
1420                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 1] !=
1421                         physicalMultiResStreamConfigs.data.i32[j * 4 + 1]) {
1422                         continue;
1423                     }
1424                     // Height doesn't match
1425                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 2] !=
1426                         physicalMultiResStreamConfigs.data.i32[j * 4 + 2]) {
1427                         continue;
1428                     }
1429                     // input/output, format, width, and height all match
1430                     ADD_FAILURE();
1431                 }
1432             }
1433         }
1434     }
1435 
1436     // If a multi-resolution stream is supported, there must be at least one
1437     // format with more than one resolutions
1438     if (multiResolutionStreamSupported) {
1439         size_t numMultiResFormats = 0;
1440         for (const auto& [format, sizeCount] : multiResOutputFormatCounterMap) {
1441             if (sizeCount >= 2) {
1442                 numMultiResFormats++;
1443             }
1444         }
1445         for (const auto& [format, sizeCount] : multiResInputFormatCounterMap) {
1446             if (sizeCount >= 2) {
1447                 numMultiResFormats++;
1448 
1449                 // If multi-resolution reprocessing is supported, the logical
1450                 // camera or ultra-high resolution sensor camera must support
1451                 // the corresponding reprocessing capability.
1452                 if (format == static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) {
1453                     ASSERT_EQ(isZSLModeAvailable(metadata, PRIV_REPROCESS), Status::OK);
1454                 } else if (format == static_cast<int32_t>(PixelFormat::YCBCR_420_888)) {
1455                     ASSERT_EQ(isZSLModeAvailable(metadata, YUV_REPROCESS), Status::OK);
1456                 }
1457             }
1458         }
1459         ASSERT_GT(numMultiResFormats, 0);
1460     }
1461 
1462     // Make sure ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID is available in
1463     // result keys.
1464     if (isMultiCamera) {
1465         retcode = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
1466                                                 &entry);
1467         if ((0 == retcode) && (entry.count > 0)) {
1468             ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count,
1469                                 static_cast<int32_t>(
1470                                         CameraMetadataTag::
1471                                                 ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID)),
1472                       entry.data.i32 + entry.count);
1473         } else {
1474             ADD_FAILURE() << "Get camera availableResultKeys failed!";
1475         }
1476     }
1477 }
1478 
isUltraHighResolution(const camera_metadata_t * staticMeta)1479 bool CameraAidlTest::isUltraHighResolution(const camera_metadata_t* staticMeta) {
1480     camera_metadata_ro_entry scalerEntry;
1481     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1482                                            &scalerEntry);
1483     if (rc == 0) {
1484         for (uint32_t i = 0; i < scalerEntry.count; i++) {
1485             if (scalerEntry.data.u8[i] ==
1486                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
1487                 return true;
1488             }
1489         }
1490     }
1491     return false;
1492 }
1493 
getSupportedKeys(camera_metadata_t * staticMeta,uint32_t tagId,std::unordered_set<int32_t> * requestIDs)1494 Status CameraAidlTest::getSupportedKeys(camera_metadata_t* staticMeta, uint32_t tagId,
1495                                         std::unordered_set<int32_t>* requestIDs) {
1496     if ((nullptr == staticMeta) || (nullptr == requestIDs)) {
1497         return Status::ILLEGAL_ARGUMENT;
1498     }
1499 
1500     camera_metadata_ro_entry entry;
1501     int rc = find_camera_metadata_ro_entry(staticMeta, tagId, &entry);
1502     if ((0 != rc) || (entry.count == 0)) {
1503         return Status::OK;
1504     }
1505 
1506     requestIDs->insert(entry.data.i32, entry.data.i32 + entry.count);
1507 
1508     return Status::OK;
1509 }
1510 
getPrivacyTestPatternModes(const camera_metadata_t * staticMetadata,std::unordered_set<int32_t> * privacyTestPatternModes)1511 void CameraAidlTest::getPrivacyTestPatternModes(
1512         const camera_metadata_t* staticMetadata,
1513         std::unordered_set<int32_t>* privacyTestPatternModes) {
1514     ASSERT_NE(staticMetadata, nullptr);
1515     ASSERT_NE(privacyTestPatternModes, nullptr);
1516 
1517     camera_metadata_ro_entry entry;
1518     int retcode = find_camera_metadata_ro_entry(
1519             staticMetadata, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &entry);
1520     ASSERT_TRUE(0 == retcode);
1521 
1522     for (auto i = 0; i < entry.count; i++) {
1523         if (entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR ||
1524             entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
1525             privacyTestPatternModes->insert(entry.data.i32[i]);
1526         }
1527     }
1528 }
1529 
getMultiResolutionStreamConfigurations(camera_metadata_ro_entry * multiResStreamConfigs,camera_metadata_ro_entry * streamConfigs,camera_metadata_ro_entry * maxResolutionStreamConfigs,const camera_metadata_t * staticMetadata)1530 void CameraAidlTest::getMultiResolutionStreamConfigurations(
1531         camera_metadata_ro_entry* multiResStreamConfigs, camera_metadata_ro_entry* streamConfigs,
1532         camera_metadata_ro_entry* maxResolutionStreamConfigs,
1533         const camera_metadata_t* staticMetadata) {
1534     ASSERT_NE(multiResStreamConfigs, nullptr);
1535     ASSERT_NE(streamConfigs, nullptr);
1536     ASSERT_NE(maxResolutionStreamConfigs, nullptr);
1537     ASSERT_NE(staticMetadata, nullptr);
1538 
1539     int retcode = find_camera_metadata_ro_entry(
1540             staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, streamConfigs);
1541     ASSERT_TRUE(0 == retcode);
1542     retcode = find_camera_metadata_ro_entry(
1543             staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
1544             maxResolutionStreamConfigs);
1545     ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1546     retcode = find_camera_metadata_ro_entry(
1547             staticMetadata, ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
1548             multiResStreamConfigs);
1549     ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1550 }
1551 
isTorchSupported(const camera_metadata_t * staticMeta)1552 bool CameraAidlTest::isTorchSupported(const camera_metadata_t* staticMeta) {
1553     camera_metadata_ro_entry torchEntry;
1554     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_AVAILABLE, &torchEntry);
1555     if (rc != 0) {
1556         ALOGI("isTorchSupported: Failed to find entry for ANDROID_FLASH_INFO_AVAILABLE");
1557         return false;
1558     }
1559     if (torchEntry.count == 1 && !torchEntry.data.u8[0]) {
1560         ALOGI("isTorchSupported: Torch not supported");
1561         return false;
1562     }
1563     ALOGI("isTorchSupported: Torch supported");
1564     return true;
1565 }
1566 
isTorchStrengthControlSupported(const camera_metadata_t * staticMeta)1567 bool CameraAidlTest::isTorchStrengthControlSupported(const camera_metadata_t* staticMeta) {
1568     int32_t maxLevel = 0;
1569     camera_metadata_ro_entry maxEntry;
1570     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
1571                                            &maxEntry);
1572     if (rc != 0) {
1573         ALOGI("isTorchStrengthControlSupported: Failed to find entry for "
1574               "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL");
1575         return false;
1576     }
1577 
1578     maxLevel = *maxEntry.data.i32;
1579     if (maxLevel > 1) {
1580         ALOGI("isTorchStrengthControlSupported: Torch strength control supported.");
1581         return true;
1582     }
1583     ALOGI("isTorchStrengthControlSupported: Torch strength control not supported.");
1584     return false;
1585 }
1586 
verifyRequestTemplate(const camera_metadata_t * metadata,RequestTemplate requestTemplate)1587 void CameraAidlTest::verifyRequestTemplate(const camera_metadata_t* metadata,
1588                                            RequestTemplate requestTemplate) {
1589     ASSERT_NE(nullptr, metadata);
1590     size_t entryCount = get_camera_metadata_entry_count(metadata);
1591     ALOGI("template %u metadata entry count is %zu", (int32_t)requestTemplate, entryCount);
1592     // TODO: we can do better than 0 here. Need to check how many required
1593     // request keys we've defined for each template
1594     ASSERT_GT(entryCount, 0u);
1595 
1596     // Check zoomRatio
1597     camera_metadata_ro_entry zoomRatioEntry;
1598     int foundZoomRatio =
1599             find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO, &zoomRatioEntry);
1600     if (foundZoomRatio == 0) {
1601         ASSERT_EQ(zoomRatioEntry.count, 1);
1602         ASSERT_EQ(zoomRatioEntry.data.f[0], 1.0f);
1603     }
1604 
1605     // Check settings override
1606     camera_metadata_ro_entry settingsOverrideEntry;
1607     int foundSettingsOverride = find_camera_metadata_ro_entry(metadata,
1608            ANDROID_CONTROL_SETTINGS_OVERRIDE, &settingsOverrideEntry);
1609     if (foundSettingsOverride == 0) {
1610         ASSERT_EQ(settingsOverrideEntry.count, 1);
1611         ASSERT_EQ(settingsOverrideEntry.data.u8[0], ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF);
1612     }
1613 }
1614 
openEmptyDeviceSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraDeviceSession> * session,CameraMetadata * staticMeta,std::shared_ptr<ICameraDevice> * device)1615 void CameraAidlTest::openEmptyDeviceSession(const std::string& name,
1616                                             const std::shared_ptr<ICameraProvider>& provider,
1617                                             std::shared_ptr<ICameraDeviceSession>* session,
1618                                             CameraMetadata* staticMeta,
1619                                             std::shared_ptr<ICameraDevice>* device) {
1620     ASSERT_NE(nullptr, session);
1621     ASSERT_NE(nullptr, staticMeta);
1622     ASSERT_NE(nullptr, device);
1623 
1624     ALOGI("configureStreams: Testing camera device %s", name.c_str());
1625     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1626     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
1627           ret.getServiceSpecificError());
1628     ASSERT_TRUE(ret.isOk());
1629     ASSERT_NE(device, nullptr);
1630 
1631     std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1632     ret = (*device)->open(cb, session);
1633     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
1634           ret.getServiceSpecificError());
1635     ASSERT_TRUE(ret.isOk());
1636     ASSERT_NE(*session, nullptr);
1637 
1638     ret = (*device)->getCameraCharacteristics(staticMeta);
1639     ASSERT_TRUE(ret.isOk());
1640 }
1641 
openEmptyInjectionSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraInjectionSession> * session,CameraMetadata * metadata,std::shared_ptr<ICameraDevice> * device)1642 void CameraAidlTest::openEmptyInjectionSession(const std::string& name,
1643                                                const std::shared_ptr<ICameraProvider>& provider,
1644                                                std::shared_ptr<ICameraInjectionSession>* session,
1645                                                CameraMetadata* metadata,
1646                                                std::shared_ptr<ICameraDevice>* device) {
1647     ASSERT_NE(nullptr, session);
1648     ASSERT_NE(nullptr, metadata);
1649     ASSERT_NE(nullptr, device);
1650 
1651     ALOGI("openEmptyInjectionSession: Testing camera device %s", name.c_str());
1652     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1653     ALOGI("openEmptyInjectionSession: getCameraDeviceInterface returns status:%d:%d",
1654           ret.getExceptionCode(), ret.getServiceSpecificError());
1655     ASSERT_TRUE(ret.isOk());
1656     ASSERT_NE(*device, nullptr);
1657 
1658     std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1659     ret = (*device)->openInjectionSession(cb, session);
1660     ALOGI("device::openInjectionSession returns status:%d:%d", ret.getExceptionCode(),
1661           ret.getServiceSpecificError());
1662 
1663     if (static_cast<Status>(ret.getServiceSpecificError()) == Status::OPERATION_NOT_SUPPORTED &&
1664         *session == nullptr) {
1665         return;  // Injection Session not supported. Callee will receive nullptr in *session
1666     }
1667 
1668     ASSERT_TRUE(ret.isOk());
1669     ASSERT_NE(*session, nullptr);
1670 
1671     ret = (*device)->getCameraCharacteristics(metadata);
1672     ASSERT_TRUE(ret.isOk());
1673 }
1674 
getJpegBufferSize(camera_metadata_t * staticMeta,int32_t * outBufSize)1675 Status CameraAidlTest::getJpegBufferSize(camera_metadata_t* staticMeta, int32_t* outBufSize) {
1676     if (nullptr == staticMeta || nullptr == outBufSize) {
1677         return Status::ILLEGAL_ARGUMENT;
1678     }
1679 
1680     camera_metadata_ro_entry entry;
1681     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_JPEG_MAX_SIZE, &entry);
1682     if ((0 != rc) || (1 != entry.count)) {
1683         return Status::ILLEGAL_ARGUMENT;
1684     }
1685 
1686     *outBufSize = entry.data.i32[0];
1687     return Status::OK;
1688 }
1689 
getDataspace(PixelFormat format)1690 Dataspace CameraAidlTest::getDataspace(PixelFormat format) {
1691     switch (format) {
1692         case PixelFormat::BLOB:
1693             return Dataspace::JFIF;
1694         case PixelFormat::Y16:
1695             return Dataspace::DEPTH;
1696         case PixelFormat::RAW16:
1697         case PixelFormat::RAW_OPAQUE:
1698         case PixelFormat::RAW10:
1699         case PixelFormat::RAW12:
1700             return Dataspace::ARBITRARY;
1701         default:
1702             return Dataspace::UNKNOWN;
1703     }
1704 }
1705 
createStreamConfiguration(std::vector<Stream> & streams,StreamConfigurationMode configMode,StreamConfiguration * config,int32_t jpegBufferSize)1706 void CameraAidlTest::createStreamConfiguration(std::vector<Stream>& streams,
1707                                                StreamConfigurationMode configMode,
1708                                                StreamConfiguration* config,
1709                                                int32_t jpegBufferSize) {
1710     ASSERT_NE(nullptr, config);
1711 
1712     for (auto& stream : streams) {
1713         stream.bufferSize =
1714                 (stream.format == PixelFormat::BLOB && stream.dataSpace == Dataspace::JFIF)
1715                         ? jpegBufferSize
1716                         : 0;
1717     }
1718 
1719     // Caller is responsible to fill in non-zero config->streamConfigCounter after this returns
1720     config->streams = streams;
1721     config->operationMode = configMode;
1722     config->multiResolutionInputImage = false;
1723 }
1724 
verifyStreamCombination(const std::shared_ptr<ICameraDevice> & device,const StreamConfiguration & config,bool expectedStatus,bool expectStreamCombQuery)1725 void CameraAidlTest::verifyStreamCombination(const std::shared_ptr<ICameraDevice>& device,
1726                                              const StreamConfiguration& config, bool expectedStatus,
1727                                              bool expectStreamCombQuery) {
1728     if (device != nullptr) {
1729         bool streamCombinationSupported;
1730         ScopedAStatus ret =
1731                 device->isStreamCombinationSupported(config, &streamCombinationSupported);
1732         // TODO: Check is unsupported operation is correct.
1733         ASSERT_TRUE(ret.isOk() ||
1734                     (expectStreamCombQuery && ret.getExceptionCode() == EX_UNSUPPORTED_OPERATION));
1735         if (ret.isOk()) {
1736             ASSERT_EQ(expectedStatus, streamCombinationSupported);
1737         }
1738     }
1739 }
1740 
getConcurrentDeviceCombinations(std::shared_ptr<ICameraProvider> & provider)1741 std::vector<ConcurrentCameraIdCombination> CameraAidlTest::getConcurrentDeviceCombinations(
1742         std::shared_ptr<ICameraProvider>& provider) {
1743     std::vector<ConcurrentCameraIdCombination> combinations;
1744     ndk::ScopedAStatus ret = provider->getConcurrentCameraIds(&combinations);
1745     if (!ret.isOk()) {
1746         ADD_FAILURE();
1747     }
1748 
1749     return combinations;
1750 }
1751 
getMandatoryConcurrentStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> * outputStreams)1752 Status CameraAidlTest::getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta,
1753                                                      std::vector<AvailableStream>* outputStreams) {
1754     if (nullptr == staticMeta || nullptr == outputStreams) {
1755         return Status::ILLEGAL_ARGUMENT;
1756     }
1757 
1758     if (isDepthOnly(staticMeta)) {
1759         Size y16MaxSize(640, 480);
1760         Size maxAvailableY16Size;
1761         getMaxOutputSizeForFormat(staticMeta, PixelFormat::Y16, &maxAvailableY16Size);
1762         Size y16ChosenSize = getMinSize(y16MaxSize, maxAvailableY16Size);
1763         AvailableStream y16Stream = {.width = y16ChosenSize.width,
1764                                      .height = y16ChosenSize.height,
1765                                      .format = static_cast<int32_t>(PixelFormat::Y16)};
1766         outputStreams->push_back(y16Stream);
1767         return Status::OK;
1768     }
1769 
1770     Size yuvMaxSize(1280, 720);
1771     Size jpegMaxSize(1920, 1440);
1772     Size maxAvailableYuvSize;
1773     Size maxAvailableJpegSize;
1774     getMaxOutputSizeForFormat(staticMeta, PixelFormat::YCBCR_420_888, &maxAvailableYuvSize);
1775     getMaxOutputSizeForFormat(staticMeta, PixelFormat::BLOB, &maxAvailableJpegSize);
1776     Size yuvChosenSize = getMinSize(yuvMaxSize, maxAvailableYuvSize);
1777     Size jpegChosenSize = getMinSize(jpegMaxSize, maxAvailableJpegSize);
1778 
1779     AvailableStream yuvStream = {.width = yuvChosenSize.width,
1780                                  .height = yuvChosenSize.height,
1781                                  .format = static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1782 
1783     AvailableStream jpegStream = {.width = jpegChosenSize.width,
1784                                   .height = jpegChosenSize.height,
1785                                   .format = static_cast<int32_t>(PixelFormat::BLOB)};
1786     outputStreams->push_back(yuvStream);
1787     outputStreams->push_back(jpegStream);
1788 
1789     return Status::OK;
1790 }
1791 
isDepthOnly(const camera_metadata_t * staticMeta)1792 bool CameraAidlTest::isDepthOnly(const camera_metadata_t* staticMeta) {
1793     camera_metadata_ro_entry scalerEntry;
1794     camera_metadata_ro_entry depthEntry;
1795 
1796     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1797                                            &scalerEntry);
1798     if (rc == 0) {
1799         for (uint32_t i = 0; i < scalerEntry.count; i++) {
1800             if (scalerEntry.data.u8[i] ==
1801                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) {
1802                 return false;
1803             }
1804         }
1805     }
1806 
1807     for (uint32_t i = 0; i < scalerEntry.count; i++) {
1808         if (scalerEntry.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) {
1809             rc = find_camera_metadata_ro_entry(
1810                     staticMeta, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &depthEntry);
1811             size_t idx = 0;
1812             if (rc == 0 && depthEntry.data.i32[idx] == static_cast<int32_t>(PixelFormat::Y16)) {
1813                 // only Depth16 format is supported now
1814                 return true;
1815             }
1816             break;
1817         }
1818     }
1819 
1820     return false;
1821 }
1822 
getMaxOutputSizeForFormat(const camera_metadata_t * staticMeta,PixelFormat format,Size * size,bool maxResolution)1823 Status CameraAidlTest::getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta,
1824                                                  PixelFormat format, Size* size,
1825                                                  bool maxResolution) {
1826     std::vector<AvailableStream> outputStreams;
1827     if (size == nullptr ||
1828         getAvailableOutputStreams(staticMeta, outputStreams,
1829                                   /*threshold*/ nullptr, maxResolution) != Status::OK) {
1830         return Status::ILLEGAL_ARGUMENT;
1831     }
1832     Size maxSize;
1833     bool found = false;
1834     for (auto& outputStream : outputStreams) {
1835         if (static_cast<int32_t>(format) == outputStream.format &&
1836             (outputStream.width * outputStream.height > maxSize.width * maxSize.height)) {
1837             maxSize.width = outputStream.width;
1838             maxSize.height = outputStream.height;
1839             found = true;
1840         }
1841     }
1842     if (!found) {
1843         ALOGE("%s :chosen format %d not found", __FUNCTION__, static_cast<int32_t>(format));
1844         return Status::ILLEGAL_ARGUMENT;
1845     }
1846     *size = maxSize;
1847     return Status::OK;
1848 }
1849 
getMinSize(Size a,Size b)1850 Size CameraAidlTest::getMinSize(Size a, Size b) {
1851     if (a.width * a.height < b.width * b.height) {
1852         return a;
1853     }
1854     return b;
1855 }
1856 
getZSLInputOutputMap(camera_metadata_t * staticMeta,std::vector<AvailableZSLInputOutput> & inputOutputMap)1857 Status CameraAidlTest::getZSLInputOutputMap(camera_metadata_t* staticMeta,
1858                                             std::vector<AvailableZSLInputOutput>& inputOutputMap) {
1859     if (nullptr == staticMeta) {
1860         return Status::ILLEGAL_ARGUMENT;
1861     }
1862 
1863     camera_metadata_ro_entry entry;
1864     int rc = find_camera_metadata_ro_entry(
1865             staticMeta, ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry);
1866     if ((0 != rc) || (0 >= entry.count)) {
1867         return Status::ILLEGAL_ARGUMENT;
1868     }
1869 
1870     const int32_t* contents = &entry.data.i32[0];
1871     for (size_t i = 0; i < entry.count;) {
1872         int32_t inputFormat = contents[i++];
1873         int32_t length = contents[i++];
1874         for (int32_t j = 0; j < length; j++) {
1875             int32_t outputFormat = contents[i + j];
1876             AvailableZSLInputOutput zslEntry = {inputFormat, outputFormat};
1877             inputOutputMap.push_back(zslEntry);
1878         }
1879         i += length;
1880     }
1881 
1882     return Status::OK;
1883 }
1884 
findLargestSize(const std::vector<AvailableStream> & streamSizes,int32_t format,AvailableStream & result)1885 Status CameraAidlTest::findLargestSize(const std::vector<AvailableStream>& streamSizes,
1886                                        int32_t format, AvailableStream& result) {
1887     result = {0, 0, 0};
1888     for (auto& iter : streamSizes) {
1889         if (format == iter.format) {
1890             if ((result.width * result.height) < (iter.width * iter.height)) {
1891                 result = iter;
1892             }
1893         }
1894     }
1895 
1896     return (result.format == format) ? Status::OK : Status::ILLEGAL_ARGUMENT;
1897 }
1898 
constructFilteredSettings(const std::shared_ptr<ICameraDeviceSession> & session,const std::unordered_set<int32_t> & availableKeys,RequestTemplate reqTemplate,android::hardware::camera::common::V1_0::helper::CameraMetadata * defaultSettings,android::hardware::camera::common::V1_0::helper::CameraMetadata * filteredSettings)1899 void CameraAidlTest::constructFilteredSettings(
1900         const std::shared_ptr<ICameraDeviceSession>& session,
1901         const std::unordered_set<int32_t>& availableKeys, RequestTemplate reqTemplate,
1902         android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings,
1903         android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings) {
1904     ASSERT_NE(defaultSettings, nullptr);
1905     ASSERT_NE(filteredSettings, nullptr);
1906 
1907     CameraMetadata req;
1908     auto ret = session->constructDefaultRequestSettings(reqTemplate, &req);
1909     ASSERT_TRUE(ret.isOk());
1910 
1911     const camera_metadata_t* metadata =
1912             clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(req.metadata.data()));
1913     size_t expectedSize = req.metadata.size();
1914     int result = validate_camera_metadata_structure(metadata, &expectedSize);
1915     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1916 
1917     size_t entryCount = get_camera_metadata_entry_count(metadata);
1918     ASSERT_GT(entryCount, 0u);
1919     *defaultSettings = metadata;
1920 
1921     const android::hardware::camera::common::V1_0::helper::CameraMetadata& constSettings =
1922             *defaultSettings;
1923     for (const auto& keyIt : availableKeys) {
1924         camera_metadata_ro_entry entry = constSettings.find(keyIt);
1925         if (entry.count > 0) {
1926             filteredSettings->update(entry);
1927         }
1928     }
1929 }
1930 
verifySessionReconfigurationQuery(const std::shared_ptr<ICameraDeviceSession> & session,camera_metadata * oldSessionParams,camera_metadata * newSessionParams)1931 void CameraAidlTest::verifySessionReconfigurationQuery(
1932         const std::shared_ptr<ICameraDeviceSession>& session, camera_metadata* oldSessionParams,
1933         camera_metadata* newSessionParams) {
1934     ASSERT_NE(nullptr, session);
1935     ASSERT_NE(nullptr, oldSessionParams);
1936     ASSERT_NE(nullptr, newSessionParams);
1937 
1938     std::vector<uint8_t> oldParams =
1939             std::vector(reinterpret_cast<uint8_t*>(oldSessionParams),
1940                         reinterpret_cast<uint8_t*>(oldSessionParams) +
1941                                 get_camera_metadata_size(oldSessionParams));
1942     CameraMetadata oldMetadata = {oldParams};
1943 
1944     std::vector<uint8_t> newParams =
1945             std::vector(reinterpret_cast<uint8_t*>(newSessionParams),
1946                         reinterpret_cast<uint8_t*>(newSessionParams) +
1947                                 get_camera_metadata_size(newSessionParams));
1948     CameraMetadata newMetadata = {newParams};
1949 
1950     bool reconfigReq;
1951     ndk::ScopedAStatus ret =
1952             session->isReconfigurationRequired(oldMetadata, newMetadata, &reconfigReq);
1953     ASSERT_TRUE(ret.isOk() || static_cast<Status>(ret.getServiceSpecificError()) ==
1954                                       Status::OPERATION_NOT_SUPPORTED);
1955 }
1956 
isConstrainedModeAvailable(camera_metadata_t * staticMeta)1957 Status CameraAidlTest::isConstrainedModeAvailable(camera_metadata_t* staticMeta) {
1958     Status ret = Status::OPERATION_NOT_SUPPORTED;
1959     if (nullptr == staticMeta) {
1960         return Status::ILLEGAL_ARGUMENT;
1961     }
1962 
1963     camera_metadata_ro_entry entry;
1964     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1965                                            &entry);
1966     if (0 != rc) {
1967         return Status::ILLEGAL_ARGUMENT;
1968     }
1969 
1970     for (size_t i = 0; i < entry.count; i++) {
1971         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO ==
1972             entry.data.u8[i]) {
1973             ret = Status::OK;
1974             break;
1975         }
1976     }
1977 
1978     return ret;
1979 }
1980 
pickConstrainedModeSize(camera_metadata_t * staticMeta,AvailableStream & hfrStream)1981 Status CameraAidlTest::pickConstrainedModeSize(camera_metadata_t* staticMeta,
1982                                                AvailableStream& hfrStream) {
1983     if (nullptr == staticMeta) {
1984         return Status::ILLEGAL_ARGUMENT;
1985     }
1986 
1987     camera_metadata_ro_entry entry;
1988     int rc = find_camera_metadata_ro_entry(
1989             staticMeta, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &entry);
1990     if (0 != rc) {
1991         return Status::OPERATION_NOT_SUPPORTED;
1992     } else if (0 != (entry.count % 5)) {
1993         return Status::ILLEGAL_ARGUMENT;
1994     }
1995 
1996     hfrStream = {0, 0, static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1997     for (size_t i = 0; i < entry.count; i += 5) {
1998         int32_t w = entry.data.i32[i];
1999         int32_t h = entry.data.i32[i + 1];
2000         if ((hfrStream.width * hfrStream.height) < (w * h)) {
2001             hfrStream.width = w;
2002             hfrStream.height = h;
2003         }
2004     }
2005 
2006     return Status::OK;
2007 }
2008 
processCaptureRequestInternal(uint64_t bufferUsage,RequestTemplate reqTemplate,bool useSecureOnlyCameras)2009 void CameraAidlTest::processCaptureRequestInternal(uint64_t bufferUsage,
2010                                                    RequestTemplate reqTemplate,
2011                                                    bool useSecureOnlyCameras) {
2012     std::vector<std::string> cameraDeviceNames =
2013             getCameraDeviceNames(mProvider, useSecureOnlyCameras);
2014     AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2015                                        static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2016     int64_t bufferId = 1;
2017     int32_t frameNumber = 1;
2018     CameraMetadata settings;
2019 
2020     for (const auto& name : cameraDeviceNames) {
2021         Stream testStream;
2022         std::vector<HalStream> halStreams;
2023         std::shared_ptr<ICameraDeviceSession> session;
2024         std::shared_ptr<DeviceCb> cb;
2025         bool supportsPartialResults = false;
2026         bool useHalBufManager = false;
2027         int32_t partialResultCount = 0;
2028         configureSingleStream(name, mProvider, &streamThreshold, bufferUsage, reqTemplate,
2029                               &session /*out*/, &testStream /*out*/, &halStreams /*out*/,
2030                               &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2031                               &useHalBufManager /*out*/, &cb /*out*/);
2032 
2033         ASSERT_NE(session, nullptr);
2034         ASSERT_NE(cb, nullptr);
2035         ASSERT_FALSE(halStreams.empty());
2036 
2037         std::shared_ptr<ResultMetadataQueue> resultQueue;
2038         ::aidl::android::hardware::common::fmq::MQDescriptor<
2039                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2040                 descriptor;
2041         ndk::ScopedAStatus ret = session->getCaptureResultMetadataQueue(&descriptor);
2042         ASSERT_TRUE(ret.isOk());
2043 
2044         resultQueue = std::make_shared<ResultMetadataQueue>(descriptor);
2045         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2046             ALOGE("%s: HAL returns empty result metadata fmq,"
2047                   " not use it",
2048                   __func__);
2049             resultQueue = nullptr;
2050             // Don't use the queue onwards.
2051         }
2052 
2053         std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2054                 1, false, supportsPartialResults, partialResultCount, resultQueue);
2055 
2056         CameraMetadata req;
2057         ret = session->constructDefaultRequestSettings(reqTemplate, &req);
2058         ASSERT_TRUE(ret.isOk());
2059         settings = req;
2060 
2061         overrideRotateAndCrop(&settings);
2062 
2063         std::vector<CaptureRequest> requests(1);
2064         CaptureRequest& request = requests[0];
2065         request.frameNumber = frameNumber;
2066         request.fmqSettingsSize = 0;
2067         request.settings = settings;
2068 
2069         std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2070         outputBuffers.resize(1);
2071         StreamBuffer& outputBuffer = outputBuffers[0];
2072         if (useHalBufManager) {
2073             outputBuffer = {halStreams[0].id,
2074                             /*bufferId*/ 0,   NativeHandle(), BufferStatus::OK,
2075                             NativeHandle(),   NativeHandle()};
2076         } else {
2077             buffer_handle_t handle;
2078             allocateGraphicBuffer(
2079                     testStream.width, testStream.height,
2080                     /* We don't look at halStreamConfig.streams[0].consumerUsage
2081                      * since that is 0 for output streams
2082                      */
2083                     android_convertGralloc1To0Usage(
2084                             static_cast<uint64_t>(halStreams[0].producerUsage), bufferUsage),
2085                     halStreams[0].overrideFormat, &handle);
2086 
2087             outputBuffer = {halStreams[0].id, bufferId,       ::android::makeToAidl(handle),
2088                             BufferStatus::OK, NativeHandle(), NativeHandle()};
2089         }
2090         request.inputBuffer = {-1,
2091                                0,
2092                                NativeHandle(),
2093                                BufferStatus::ERROR,
2094                                NativeHandle(),
2095                                NativeHandle()};  // Empty Input Buffer
2096 
2097         {
2098             std::unique_lock<std::mutex> l(mLock);
2099             mInflightMap.clear();
2100             mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2101         }
2102 
2103         int32_t numRequestProcessed = 0;
2104         std::vector<BufferCache> cachesToRemove;
2105         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2106         ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2107               ret.getExceptionCode(), ret.getServiceSpecificError());
2108 
2109         ASSERT_TRUE(ret.isOk());
2110         ASSERT_EQ(numRequestProcessed, 1u);
2111 
2112         {
2113             std::unique_lock<std::mutex> l(mLock);
2114             while (!inflightReq->errorCodeValid &&
2115                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2116                 auto timeout = std::chrono::system_clock::now() +
2117                                std::chrono::seconds(kStreamBufferTimeoutSec);
2118                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2119             }
2120 
2121             ASSERT_FALSE(inflightReq->errorCodeValid);
2122             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2123             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2124 
2125             // shutterReadoutTimestamp must be available, and it must
2126             // be >= shutterTimestamp + exposureTime,
2127             // and < shutterTimestamp + exposureTime + rollingShutterSkew / 2.
2128             ASSERT_TRUE(inflightReq->shutterReadoutTimestampValid);
2129             ASSERT_FALSE(inflightReq->collectedResult.isEmpty());
2130 
2131             if (inflightReq->collectedResult.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2132                 camera_metadata_entry_t exposureTimeResult =
2133                         inflightReq->collectedResult.find(ANDROID_SENSOR_EXPOSURE_TIME);
2134                 nsecs_t exposureToReadout =
2135                         inflightReq->shutterReadoutTimestamp - inflightReq->shutterTimestamp;
2136                 ASSERT_GE(exposureToReadout, exposureTimeResult.data.i64[0]);
2137                 if (inflightReq->collectedResult.exists(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW)) {
2138                     camera_metadata_entry_t rollingShutterSkew =
2139                             inflightReq->collectedResult.find(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
2140                     ASSERT_LT(exposureToReadout,
2141                               exposureTimeResult.data.i64[0] + rollingShutterSkew.data.i64[0] / 2);
2142                 }
2143             }
2144 
2145             request.frameNumber++;
2146             // Empty settings should be supported after the first call
2147             // for repeating requests.
2148             request.settings.metadata.clear();
2149             // The buffer has been registered to HAL by bufferId, so per
2150             // API contract we should send a null handle for this buffer
2151             request.outputBuffers[0].buffer = NativeHandle();
2152             mInflightMap.clear();
2153             inflightReq = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2154                                                             partialResultCount, resultQueue);
2155             mInflightMap.insert(std::make_pair(request.frameNumber, inflightReq));
2156         }
2157 
2158         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2159         ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2160               ret.getExceptionCode(), ret.getServiceSpecificError());
2161         ASSERT_TRUE(ret.isOk());
2162         ASSERT_EQ(numRequestProcessed, 1u);
2163 
2164         {
2165             std::unique_lock<std::mutex> l(mLock);
2166             while (!inflightReq->errorCodeValid &&
2167                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2168                 auto timeout = std::chrono::system_clock::now() +
2169                                std::chrono::seconds(kStreamBufferTimeoutSec);
2170                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2171             }
2172 
2173             ASSERT_FALSE(inflightReq->errorCodeValid);
2174             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2175             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2176         }
2177 
2178         if (useHalBufManager) {
2179             verifyBuffersReturned(session, testStream.id, cb);
2180         }
2181 
2182         ret = session->close();
2183         ASSERT_TRUE(ret.isOk());
2184     }
2185 }
2186 
configureStreamUseCaseInternal(const AvailableStream & threshold)2187 void CameraAidlTest::configureStreamUseCaseInternal(const AvailableStream &threshold) {
2188     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2189 
2190     for (const auto& name : cameraDeviceNames) {
2191         CameraMetadata meta;
2192         std::shared_ptr<ICameraDevice> cameraDevice;
2193 
2194         openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2195                                &cameraDevice /*out*/);
2196 
2197         camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2198         // Check if camera support depth only
2199         if (isDepthOnly(staticMeta) ||
2200                 (threshold.format == static_cast<int32_t>(PixelFormat::RAW16) &&
2201                         !supportsCroppedRawUseCase(staticMeta))) {
2202             ndk::ScopedAStatus ret = mSession->close();
2203             mSession = nullptr;
2204             ASSERT_TRUE(ret.isOk());
2205             continue;
2206         }
2207 
2208         std::vector<AvailableStream> outputPreviewStreams;
2209 
2210         ASSERT_EQ(Status::OK,
2211                   getAvailableOutputStreams(staticMeta, outputPreviewStreams, &threshold));
2212         ASSERT_NE(0u, outputPreviewStreams.size());
2213 
2214         // Combine valid and invalid stream use cases
2215         std::vector<int64_t> useCases(kMandatoryUseCases);
2216         useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW + 1);
2217 
2218         std::vector<int64_t> supportedUseCases;
2219         if (threshold.format == static_cast<int32_t>(PixelFormat::RAW16)) {
2220             // If the format is RAW16, supported use case is only CROPPED_RAW.
2221             // All others are unsupported for this format.
2222             useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW);
2223             supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW);
2224             supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2225         } else {
2226             camera_metadata_ro_entry entry;
2227             auto retcode = find_camera_metadata_ro_entry(
2228                     staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
2229             if ((0 == retcode) && (entry.count > 0)) {
2230                 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
2231                                          entry.data.i64 + entry.count);
2232             } else {
2233                 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2234             }
2235         }
2236 
2237         std::vector<Stream> streams(1);
2238         streams[0] = {
2239                 0,
2240                 StreamType::OUTPUT,
2241                 outputPreviewStreams[0].width,
2242                 outputPreviewStreams[0].height,
2243                 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2244                 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2245                         GRALLOC1_CONSUMER_USAGE_CPU_READ),
2246                 Dataspace::UNKNOWN,
2247                 StreamRotation::ROTATION_0,
2248                 std::string(),
2249                 0,
2250                 -1,
2251                 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2252                 RequestAvailableDynamicRangeProfilesMap::
2253                         ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2254                 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2255                 static_cast<int>(
2256                         RequestAvailableColorSpaceProfilesMap::
2257                                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2258 
2259         int32_t streamConfigCounter = 0;
2260         CameraMetadata req;
2261         StreamConfiguration config;
2262         RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
2263         ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
2264         ASSERT_TRUE(ret.isOk());
2265         config.sessionParams = req;
2266 
2267         for (int64_t useCase : useCases) {
2268             bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
2269                                               useCase) != supportedUseCases.end();
2270 
2271             streams[0].useCase = static_cast<
2272                     aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
2273                     useCase);
2274             config.streams = streams;
2275             config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2276             config.streamConfigCounter = streamConfigCounter;
2277             config.multiResolutionInputImage = false;
2278 
2279             bool combSupported;
2280             ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
2281             if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
2282                 ret.getServiceSpecificError()) {
2283                 continue;
2284             }
2285 
2286             ASSERT_TRUE(ret.isOk());
2287             ASSERT_EQ(combSupported, useCaseSupported);
2288 
2289             std::vector<HalStream> halStreams;
2290             ret = mSession->configureStreams(config, &halStreams);
2291             ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
2292             if (useCaseSupported) {
2293                 ASSERT_TRUE(ret.isOk());
2294                 ASSERT_EQ(1u, halStreams.size());
2295             } else {
2296                 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2297                           ret.getServiceSpecificError());
2298             }
2299         }
2300         ret = mSession->close();
2301         mSession = nullptr;
2302         ASSERT_TRUE(ret.isOk());
2303     }
2304 
2305 }
2306 
configureSingleStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,uint64_t bufferUsage,RequestTemplate reqTemplate,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)2307 void CameraAidlTest::configureSingleStream(
2308         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2309         const AvailableStream* previewThreshold, uint64_t bufferUsage, RequestTemplate reqTemplate,
2310         std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2311         std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2312         int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
2313         uint32_t streamConfigCounter) {
2314     ASSERT_NE(nullptr, session);
2315     ASSERT_NE(nullptr, previewStream);
2316     ASSERT_NE(nullptr, halStreams);
2317     ASSERT_NE(nullptr, supportsPartialResults);
2318     ASSERT_NE(nullptr, partialResultCount);
2319     ASSERT_NE(nullptr, useHalBufManager);
2320     ASSERT_NE(nullptr, cb);
2321 
2322     std::vector<AvailableStream> outputPreviewStreams;
2323     std::shared_ptr<ICameraDevice> device;
2324     ALOGI("configureStreams: Testing camera device %s", name.c_str());
2325 
2326     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2327     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2328           ret.getServiceSpecificError());
2329     ASSERT_TRUE(ret.isOk());
2330     ASSERT_NE(device, nullptr);
2331 
2332     camera_metadata_t* staticMeta;
2333     CameraMetadata chars;
2334     ret = device->getCameraCharacteristics(&chars);
2335     ASSERT_TRUE(ret.isOk());
2336     staticMeta = clone_camera_metadata(
2337             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()));
2338     ASSERT_NE(nullptr, staticMeta);
2339 
2340     size_t expectedSize = chars.metadata.size();
2341     ALOGE("validate_camera_metadata_structure: %d",
2342           validate_camera_metadata_structure(staticMeta, &expectedSize));
2343 
2344     camera_metadata_ro_entry entry;
2345     auto status =
2346             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2347     if ((0 == status) && (entry.count > 0)) {
2348         *partialResultCount = entry.data.i32[0];
2349         *supportsPartialResults = (*partialResultCount > 1);
2350     }
2351 
2352     *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2353 
2354     device->open(*cb, session);
2355     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2356           ret.getServiceSpecificError());
2357     ASSERT_TRUE(ret.isOk());
2358     ASSERT_NE(*session, nullptr);
2359 
2360     *useHalBufManager = false;
2361     status = find_camera_metadata_ro_entry(
2362             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2363     if ((0 == status) && (entry.count == 1)) {
2364         *useHalBufManager = (entry.data.u8[0] ==
2365                              ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
2366     }
2367 
2368     outputPreviewStreams.clear();
2369     auto rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
2370 
2371     int32_t jpegBufferSize = 0;
2372     ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
2373     ASSERT_NE(0u, jpegBufferSize);
2374 
2375     ASSERT_EQ(Status::OK, rc);
2376     ASSERT_FALSE(outputPreviewStreams.empty());
2377 
2378     Dataspace dataspace = Dataspace::UNKNOWN;
2379     switch (static_cast<PixelFormat>(outputPreviewStreams[0].format)) {
2380         case PixelFormat::Y16:
2381             dataspace = Dataspace::DEPTH;
2382             break;
2383         default:
2384             dataspace = Dataspace::UNKNOWN;
2385     }
2386 
2387     std::vector<Stream> streams(1);
2388     streams[0] = {0,
2389                   StreamType::OUTPUT,
2390                   outputPreviewStreams[0].width,
2391                   outputPreviewStreams[0].height,
2392                   static_cast<PixelFormat>(outputPreviewStreams[0].format),
2393                   static_cast<aidl::android::hardware::graphics::common::BufferUsage>(bufferUsage),
2394                   dataspace,
2395                   StreamRotation::ROTATION_0,
2396                   "",
2397                   0,
2398                   /*groupId*/ -1,
2399                   {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2400                   RequestAvailableDynamicRangeProfilesMap::
2401                           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2402                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2403                   static_cast<int>(
2404                           RequestAvailableColorSpaceProfilesMap::
2405                                   ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2406 
2407     StreamConfiguration config;
2408     config.streams = streams;
2409     createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2410                               jpegBufferSize);
2411     if (*session != nullptr) {
2412         CameraMetadata sessionParams;
2413         ret = (*session)->constructDefaultRequestSettings(reqTemplate, &sessionParams);
2414         ASSERT_TRUE(ret.isOk());
2415         config.sessionParams = sessionParams;
2416         config.streamConfigCounter = (int32_t)streamConfigCounter;
2417 
2418         bool supported = false;
2419         ret = device->isStreamCombinationSupported(config, &supported);
2420         ASSERT_TRUE(ret.isOk());
2421         ASSERT_EQ(supported, true);
2422 
2423         std::vector<HalStream> halConfigs;
2424         ret = (*session)->configureStreams(config, &halConfigs);
2425         ALOGI("configureStreams returns status: %d:%d", ret.getExceptionCode(),
2426               ret.getServiceSpecificError());
2427         ASSERT_TRUE(ret.isOk());
2428         ASSERT_EQ(1u, halConfigs.size());
2429         halStreams->clear();
2430         halStreams->push_back(halConfigs[0]);
2431         if (*useHalBufManager) {
2432             std::vector<Stream> ss(1);
2433             std::vector<HalStream> hs(1);
2434             ss[0] = config.streams[0];
2435             hs[0] = halConfigs[0];
2436             (*cb)->setCurrentStreamConfig(ss, hs);
2437         }
2438     }
2439     *previewStream = config.streams[0];
2440     ASSERT_TRUE(ret.isOk());
2441 }
2442 
overrideRotateAndCrop(CameraMetadata * settings)2443 void CameraAidlTest::overrideRotateAndCrop(CameraMetadata* settings) {
2444     if (settings == nullptr) {
2445         return;
2446     }
2447 
2448     ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta =
2449             clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(settings->metadata.data()));
2450     auto entry = requestMeta.find(ANDROID_SCALER_ROTATE_AND_CROP);
2451     if ((entry.count > 0) && (entry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO)) {
2452         uint8_t disableRotateAndCrop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2453         requestMeta.update(ANDROID_SCALER_ROTATE_AND_CROP, &disableRotateAndCrop, 1);
2454         settings->metadata.clear();
2455         camera_metadata_t* metaBuffer = requestMeta.release();
2456         uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2457         settings->metadata =
2458                 std::vector(rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2459     }
2460 }
2461 
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,int32_t streamId,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)2462 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
2463                                            int32_t streamId, const std::shared_ptr<DeviceCb>& cb,
2464                                            uint32_t streamConfigCounter) {
2465     ASSERT_NE(nullptr, session);
2466 
2467     std::vector<int32_t> streamIds(1);
2468     streamIds[0] = streamId;
2469     session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
2470     cb->waitForBuffersReturned();
2471 }
2472 
processPreviewStabilizationCaptureRequestInternal(bool previewStabilizationOn,std::unordered_map<std::string,nsecs_t> & cameraDeviceToTimeLag)2473 void CameraAidlTest::processPreviewStabilizationCaptureRequestInternal(
2474         bool previewStabilizationOn,
2475         // Used as output when preview stabilization is off, as output when its on.
2476         std::unordered_map<std::string, nsecs_t>& cameraDeviceToTimeLag) {
2477     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2478     AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2479                                        static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2480     int64_t bufferId = 1;
2481     int32_t frameNumber = 1;
2482     std::vector<uint8_t> settings;
2483 
2484     for (const auto& name : cameraDeviceNames) {
2485         if (!supportsPreviewStabilization(name, mProvider)) {
2486             ALOGI(" %s Camera device %s doesn't support preview stabilization, skipping", __func__,
2487                   name.c_str());
2488             continue;
2489         }
2490 
2491         Stream testStream;
2492         std::vector<HalStream> halStreams;
2493         std::shared_ptr<ICameraDeviceSession> session;
2494         std::shared_ptr<DeviceCb> cb;
2495         bool supportsPartialResults = false;
2496         bool useHalBufManager = false;
2497         int32_t partialResultCount = 0;
2498         configureSingleStream(name, mProvider, &streamThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
2499                               RequestTemplate::PREVIEW, &session /*out*/, &testStream /*out*/,
2500                               &halStreams /*out*/, &supportsPartialResults /*out*/,
2501                               &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/);
2502 
2503         ::aidl::android::hardware::common::fmq::MQDescriptor<
2504                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2505                 descriptor;
2506         ndk::ScopedAStatus resultQueueRet = session->getCaptureResultMetadataQueue(&descriptor);
2507         ASSERT_TRUE(resultQueueRet.isOk());
2508 
2509         std::shared_ptr<ResultMetadataQueue> resultQueue =
2510                 std::make_shared<ResultMetadataQueue>(descriptor);
2511         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2512             ALOGE("%s: HAL returns empty result metadata fmq,"
2513                   " not use it",
2514                   __func__);
2515             resultQueue = nullptr;
2516             // Don't use the queue onwards.
2517         }
2518 
2519         std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2520                 1, false, supportsPartialResults, partialResultCount, resultQueue);
2521 
2522         CameraMetadata defaultMetadata;
2523         android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
2524         ndk::ScopedAStatus ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW,
2525                                                                           &defaultMetadata);
2526         ASSERT_TRUE(ret.isOk());
2527 
2528         const camera_metadata_t* metadata =
2529                 reinterpret_cast<const camera_metadata_t*>(defaultMetadata.metadata.data());
2530         defaultSettings = metadata;
2531         android::status_t metadataRet = ::android::OK;
2532         uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2533         if (previewStabilizationOn) {
2534             videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION;
2535             metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2536                                                  &videoStabilizationMode, 1);
2537         } else {
2538             metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2539                                                  &videoStabilizationMode, 1);
2540         }
2541         ASSERT_EQ(metadataRet, ::android::OK);
2542 
2543         camera_metadata_t* releasedMetadata = defaultSettings.release();
2544         uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(releasedMetadata);
2545 
2546         buffer_handle_t buffer_handle;
2547 
2548         std::vector<CaptureRequest> requests(1);
2549         CaptureRequest& request = requests[0];
2550         request.frameNumber = frameNumber;
2551         request.fmqSettingsSize = 0;
2552         request.settings.metadata =
2553                 std::vector(rawMetadata, rawMetadata + get_camera_metadata_size(releasedMetadata));
2554         overrideRotateAndCrop(&request.settings);
2555         request.outputBuffers = std::vector<StreamBuffer>(1);
2556         StreamBuffer& outputBuffer = request.outputBuffers[0];
2557         if (useHalBufManager) {
2558             outputBuffer = {halStreams[0].id,
2559                             /*bufferId*/ 0,   NativeHandle(), BufferStatus::OK,
2560                             NativeHandle(),   NativeHandle()};
2561         } else {
2562             allocateGraphicBuffer(testStream.width, testStream.height,
2563                                   /* We don't look at halStreamConfig.streams[0].consumerUsage
2564                                    * since that is 0 for output streams
2565                                    */
2566                                   android_convertGralloc1To0Usage(
2567                                           static_cast<uint64_t>(halStreams[0].producerUsage),
2568                                           GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2569                                   halStreams[0].overrideFormat, &buffer_handle);
2570             outputBuffer = {halStreams[0].id, bufferId,       ::android::makeToAidl(buffer_handle),
2571                             BufferStatus::OK, NativeHandle(), NativeHandle()};
2572         }
2573         request.inputBuffer = {
2574                 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2575 
2576         {
2577             std::unique_lock<std::mutex> l(mLock);
2578             mInflightMap.clear();
2579             mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2580         }
2581 
2582         int32_t numRequestProcessed = 0;
2583         std::vector<BufferCache> cachesToRemove;
2584         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2585         ASSERT_TRUE(ret.isOk());
2586         ASSERT_EQ(numRequestProcessed, 1u);
2587 
2588         {
2589             std::unique_lock<std::mutex> l(mLock);
2590             while (!inflightReq->errorCodeValid &&
2591                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2592                 auto timeout = std::chrono::system_clock::now() +
2593                                std::chrono::seconds(kStreamBufferTimeoutSec);
2594                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2595             }
2596             waitForReleaseFence(inflightReq->resultOutputBuffers);
2597 
2598             ASSERT_FALSE(inflightReq->errorCodeValid);
2599             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2600             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2601             ASSERT_TRUE(inflightReq->shutterReadoutTimestampValid);
2602             nsecs_t readoutTimestamp = inflightReq->shutterReadoutTimestamp;
2603 
2604             if (previewStabilizationOn) {
2605                 // Here we collect the time difference between the buffer ready
2606                 // timestamp - notify readout timestamp.
2607                 // timeLag = buffer ready timestamp - notify readout timestamp.
2608                 // timeLag(previewStabilization) must be <=
2609                 //        timeLag(stabilization off) + 1 frame duration.
2610                 auto it = cameraDeviceToTimeLag.find(name);
2611                 camera_metadata_entry e;
2612                 e = inflightReq->collectedResult.find(ANDROID_SENSOR_FRAME_DURATION);
2613                 ASSERT_TRUE(e.count > 0);
2614                 nsecs_t frameDuration = e.data.i64[0];
2615                 ASSERT_TRUE(it != cameraDeviceToTimeLag.end());
2616 
2617                 nsecs_t previewStabOnLagTime =
2618                         inflightReq->resultOutputBuffers[0].timeStamp - readoutTimestamp;
2619                 ASSERT_TRUE(previewStabOnLagTime <= (it->second + frameDuration));
2620             } else {
2621                 // Fill in the buffer ready timestamp - notify timestamp;
2622                 cameraDeviceToTimeLag[std::string(name)] =
2623                         inflightReq->resultOutputBuffers[0].timeStamp - readoutTimestamp;
2624             }
2625         }
2626 
2627         if (useHalBufManager) {
2628             verifyBuffersReturned(session, testStream.id, cb);
2629         }
2630 
2631         ret = session->close();
2632         ASSERT_TRUE(ret.isOk());
2633     }
2634 }
2635 
supportsPreviewStabilization(const std::string & name,const std::shared_ptr<ICameraProvider> & provider)2636 bool CameraAidlTest::supportsPreviewStabilization(
2637         const std::string& name, const std::shared_ptr<ICameraProvider>& provider) {
2638     std::shared_ptr<ICameraDevice> device;
2639     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2640     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2641           ret.getServiceSpecificError());
2642     if (!ret.isOk() || device == nullptr) {
2643         ADD_FAILURE() << "Failed to get camera device interface for " << name;
2644     }
2645 
2646     CameraMetadata metadata;
2647     ret = device->getCameraCharacteristics(&metadata);
2648     camera_metadata_t* staticMeta = clone_camera_metadata(
2649             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
2650     if (!(ret.isOk())) {
2651         ADD_FAILURE() << "Failed to get camera characteristics for " << name;
2652     }
2653     // Go through the characteristics and see if video stabilization modes have
2654     // preview stabilization
2655     camera_metadata_ro_entry entry;
2656 
2657     int retcode = find_camera_metadata_ro_entry(
2658             staticMeta, ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &entry);
2659     if ((0 == retcode) && (entry.count > 0)) {
2660         for (auto i = 0; i < entry.count; i++) {
2661             if (entry.data.u8[i] ==
2662                 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) {
2663                 return true;
2664             }
2665         }
2666     }
2667     return false;
2668 }
2669 
configurePreviewStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,const std::unordered_set<std::string> & physicalIds,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,int32_t streamConfigCounter,bool allowUnsupport)2670 void CameraAidlTest::configurePreviewStreams(
2671         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2672         const AvailableStream* previewThreshold, const std::unordered_set<std::string>& physicalIds,
2673         std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2674         std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2675         int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
2676         int32_t streamConfigCounter, bool allowUnsupport) {
2677     ASSERT_NE(nullptr, session);
2678     ASSERT_NE(nullptr, halStreams);
2679     ASSERT_NE(nullptr, previewStream);
2680     ASSERT_NE(nullptr, supportsPartialResults);
2681     ASSERT_NE(nullptr, partialResultCount);
2682     ASSERT_NE(nullptr, useHalBufManager);
2683     ASSERT_NE(nullptr, cb);
2684 
2685     ASSERT_FALSE(physicalIds.empty());
2686 
2687     std::vector<AvailableStream> outputPreviewStreams;
2688     std::shared_ptr<ICameraDevice> device;
2689     ALOGI("configureStreams: Testing camera device %s", name.c_str());
2690 
2691     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2692     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2693           ret.getServiceSpecificError());
2694     ASSERT_TRUE(ret.isOk());
2695     ASSERT_NE(device, nullptr);
2696 
2697     CameraMetadata meta;
2698     ret = device->getCameraCharacteristics(&meta);
2699     ASSERT_TRUE(ret.isOk());
2700     camera_metadata_t* staticMeta =
2701             clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(meta.metadata.data()));
2702     ASSERT_NE(nullptr, staticMeta);
2703 
2704     camera_metadata_ro_entry entry;
2705     auto status =
2706             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2707     if ((0 == status) && (entry.count > 0)) {
2708         *partialResultCount = entry.data.i32[0];
2709         *supportsPartialResults = (*partialResultCount > 1);
2710     }
2711 
2712     *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2713     ret = device->open(*cb, session);
2714     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2715           ret.getServiceSpecificError());
2716     ASSERT_TRUE(ret.isOk());
2717     ASSERT_NE(*session, nullptr);
2718 
2719     *useHalBufManager = false;
2720     status = find_camera_metadata_ro_entry(
2721             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2722     if ((0 == status) && (entry.count == 1)) {
2723         *useHalBufManager = (entry.data.u8[0] ==
2724                              ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
2725     }
2726 
2727     outputPreviewStreams.clear();
2728     Status rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
2729 
2730     ASSERT_EQ(Status::OK, rc);
2731     ASSERT_FALSE(outputPreviewStreams.empty());
2732 
2733     std::vector<Stream> streams(physicalIds.size());
2734     int32_t streamId = 0;
2735     for (auto const& physicalId : physicalIds) {
2736         streams[streamId] = {
2737                 streamId,
2738                 StreamType::OUTPUT,
2739                 outputPreviewStreams[0].width,
2740                 outputPreviewStreams[0].height,
2741                 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2742                 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2743                         GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2744                 Dataspace::UNKNOWN,
2745                 StreamRotation::ROTATION_0,
2746                 physicalId,
2747                 0,
2748                 -1,
2749                 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2750                 RequestAvailableDynamicRangeProfilesMap::
2751                         ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2752                 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2753                 static_cast<int>(
2754                         RequestAvailableColorSpaceProfilesMap::
2755                                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2756         streamId++;
2757     }
2758 
2759     StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
2760 
2761     RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2762     ret = (*session)->constructDefaultRequestSettings(reqTemplate, &config.sessionParams);
2763     ASSERT_TRUE(ret.isOk());
2764 
2765     bool supported = false;
2766     ret = device->isStreamCombinationSupported(config, &supported);
2767     ASSERT_TRUE(ret.isOk());
2768     if (allowUnsupport && !supported) {
2769         // stream combination not supported. return null session
2770         ret = (*session)->close();
2771         ASSERT_TRUE(ret.isOk());
2772         *session = nullptr;
2773         return;
2774     }
2775     ASSERT_TRUE(supported) << "Stream combination must be supported.";
2776 
2777     config.streamConfigCounter = streamConfigCounter;
2778     std::vector<HalStream> halConfigs;
2779     ret = (*session)->configureStreams(config, &halConfigs);
2780     ASSERT_TRUE(ret.isOk());
2781     ASSERT_EQ(physicalIds.size(), halConfigs.size());
2782     *halStreams = halConfigs;
2783     if (*useHalBufManager) {
2784         std::vector<Stream> ss(physicalIds.size());
2785         std::vector<HalStream> hs(physicalIds.size());
2786         for (size_t i = 0; i < physicalIds.size(); i++) {
2787             ss[i] = streams[i];
2788             hs[i] = halConfigs[i];
2789         }
2790         (*cb)->setCurrentStreamConfig(ss, hs);
2791     }
2792     *previewStream = streams[0];
2793     ASSERT_TRUE(ret.isOk());
2794 }
2795 
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,const std::vector<int32_t> & streamIds,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)2796 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
2797                                            const std::vector<int32_t>& streamIds,
2798                                            const std::shared_ptr<DeviceCb>& cb,
2799                                            uint32_t streamConfigCounter) {
2800     ndk::ScopedAStatus ret =
2801             session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
2802     ASSERT_TRUE(ret.isOk());
2803     cb->waitForBuffersReturned();
2804 }
2805 
configureStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,PixelFormat format,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * outCb,uint32_t streamConfigCounter,bool maxResolution,RequestAvailableDynamicRangeProfilesMap dynamicRangeProf,RequestAvailableColorSpaceProfilesMap colorSpaceProf)2806 void CameraAidlTest::configureStreams(const std::string& name,
2807                                       const std::shared_ptr<ICameraProvider>& provider,
2808                                       PixelFormat format,
2809                                       std::shared_ptr<ICameraDeviceSession>* session,
2810                                       Stream* previewStream, std::vector<HalStream>* halStreams,
2811                                       bool* supportsPartialResults, int32_t* partialResultCount,
2812                                       bool* useHalBufManager, std::shared_ptr<DeviceCb>* outCb,
2813                                       uint32_t streamConfigCounter, bool maxResolution,
2814                                       RequestAvailableDynamicRangeProfilesMap dynamicRangeProf,
2815                                       RequestAvailableColorSpaceProfilesMap colorSpaceProf) {
2816     ASSERT_NE(nullptr, session);
2817     ASSERT_NE(nullptr, halStreams);
2818     ASSERT_NE(nullptr, previewStream);
2819     ASSERT_NE(nullptr, supportsPartialResults);
2820     ASSERT_NE(nullptr, partialResultCount);
2821     ASSERT_NE(nullptr, useHalBufManager);
2822     ASSERT_NE(nullptr, outCb);
2823 
2824     ALOGI("configureStreams: Testing camera device %s", name.c_str());
2825 
2826     std::vector<AvailableStream> outputStreams;
2827     std::shared_ptr<ICameraDevice> device;
2828 
2829     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2830     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2831           ret.getServiceSpecificError());
2832     ASSERT_TRUE(ret.isOk());
2833     ASSERT_NE(device, nullptr);
2834 
2835     CameraMetadata metadata;
2836     camera_metadata_t* staticMeta;
2837     ret = device->getCameraCharacteristics(&metadata);
2838     ASSERT_TRUE(ret.isOk());
2839     staticMeta = clone_camera_metadata(
2840             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
2841     ASSERT_NE(staticMeta, nullptr);
2842 
2843     camera_metadata_ro_entry entry;
2844     auto status =
2845             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2846     if ((0 == status) && (entry.count > 0)) {
2847         *partialResultCount = entry.data.i32[0];
2848         *supportsPartialResults = (*partialResultCount > 1);
2849     }
2850 
2851     *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2852     ret = device->open(*outCb, session);
2853     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2854           ret.getServiceSpecificError());
2855     ASSERT_TRUE(ret.isOk());
2856     ASSERT_NE(*session, nullptr);
2857 
2858     *useHalBufManager = false;
2859     status = find_camera_metadata_ro_entry(
2860             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2861     if ((0 == status) && (entry.count == 1)) {
2862         *useHalBufManager = (entry.data.u8[0] ==
2863                              ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
2864     }
2865 
2866     outputStreams.clear();
2867     Size maxSize;
2868     if (maxResolution) {
2869         auto rc = getMaxOutputSizeForFormat(staticMeta, format, &maxSize, maxResolution);
2870         ASSERT_EQ(Status::OK, rc);
2871     } else {
2872         AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2873             static_cast<int32_t>(format)};
2874         auto rc = getAvailableOutputStreams(staticMeta, outputStreams, &previewThreshold);
2875 
2876         ASSERT_EQ(Status::OK, rc);
2877         ASSERT_FALSE(outputStreams.empty());
2878         maxSize.width = outputStreams[0].width;
2879         maxSize.height = outputStreams[0].height;
2880     }
2881 
2882 
2883     std::vector<Stream> streams(1);
2884     streams[0] = {0,
2885                   StreamType::OUTPUT,
2886                   maxSize.width,
2887                   maxSize.height,
2888                   format,
2889                   previewStream->usage,
2890                   previewStream->dataSpace,
2891                   StreamRotation::ROTATION_0,
2892                   "",
2893                   0,
2894                   -1,
2895                   {maxResolution ? SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION
2896                                  : SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2897                   dynamicRangeProf,
2898                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2899                   static_cast<int>(colorSpaceProf)};
2900 
2901     StreamConfiguration config;
2902     config.streams = streams;
2903     config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2904     config.streamConfigCounter = streamConfigCounter;
2905     config.multiResolutionInputImage = false;
2906     CameraMetadata req;
2907     RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
2908     ret = (*session)->constructDefaultRequestSettings(reqTemplate, &req);
2909     ASSERT_TRUE(ret.isOk());
2910     config.sessionParams = req;
2911 
2912     bool supported = false;
2913     ret = device->isStreamCombinationSupported(config, &supported);
2914     ASSERT_TRUE(ret.isOk());
2915     ASSERT_EQ(supported, true);
2916 
2917     ret = (*session)->configureStreams(config, halStreams);
2918     ASSERT_TRUE(ret.isOk());
2919 
2920     if (*useHalBufManager) {
2921         std::vector<Stream> ss(1);
2922         std::vector<HalStream> hs(1);
2923         ss[0] = streams[0];
2924         hs[0] = (*halStreams)[0];
2925         (*outCb)->setCurrentStreamConfig(ss, hs);
2926     }
2927 
2928     *previewStream = streams[0];
2929     ASSERT_TRUE(ret.isOk());
2930 }
2931 
is10BitDynamicRangeCapable(const camera_metadata_t * staticMeta)2932 bool CameraAidlTest::is10BitDynamicRangeCapable(const camera_metadata_t* staticMeta) {
2933     camera_metadata_ro_entry scalerEntry;
2934     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2935                                            &scalerEntry);
2936     if (rc == 0) {
2937         for (uint32_t i = 0; i < scalerEntry.count; i++) {
2938             if (scalerEntry.data.u8[i] ==
2939                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
2940                 return true;
2941             }
2942         }
2943     }
2944     return false;
2945 }
2946 
get10BitDynamicRangeProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableDynamicRangeProfilesMap> * profiles)2947 void CameraAidlTest::get10BitDynamicRangeProfiles(
2948         const camera_metadata_t* staticMeta,
2949         std::vector<RequestAvailableDynamicRangeProfilesMap>* profiles) {
2950     ASSERT_NE(nullptr, staticMeta);
2951     ASSERT_NE(nullptr, profiles);
2952     camera_metadata_ro_entry entry;
2953     std::unordered_set<int64_t> entries;
2954     int rc = find_camera_metadata_ro_entry(
2955             staticMeta, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP, &entry);
2956     ASSERT_EQ(rc, 0);
2957     ASSERT_TRUE(entry.count > 0);
2958     ASSERT_EQ(entry.count % 3, 0);
2959 
2960     for (uint32_t i = 0; i < entry.count; i += 3) {
2961         ASSERT_NE(entry.data.i64[i], ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
2962         ASSERT_EQ(entries.find(entry.data.i64[i]), entries.end());
2963         entries.insert(static_cast<int64_t>(entry.data.i64[i]));
2964         profiles->emplace_back(
2965                 static_cast<RequestAvailableDynamicRangeProfilesMap>(entry.data.i64[i]));
2966     }
2967 
2968     if (!entries.empty()) {
2969         ASSERT_NE(entries.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10),
2970                   entries.end());
2971     }
2972 }
2973 
verify10BitMetadata(HandleImporter & importer,const InFlightRequest & request,aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap profile)2974 void CameraAidlTest::verify10BitMetadata(
2975         HandleImporter& importer, const InFlightRequest& request,
2976         aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap
2977                 profile) {
2978     for (auto b : request.resultOutputBuffers) {
2979         importer.importBuffer(b.buffer.buffer);
2980         bool smpte2086Present = importer.isSmpte2086Present(b.buffer.buffer);
2981         bool smpte2094_10Present = importer.isSmpte2094_10Present(b.buffer.buffer);
2982         bool smpte2094_40Present = importer.isSmpte2094_40Present(b.buffer.buffer);
2983 
2984         switch (static_cast<int64_t>(profile)) {
2985             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
2986                 ASSERT_FALSE(smpte2086Present);
2987                 ASSERT_FALSE(smpte2094_10Present);
2988                 ASSERT_FALSE(smpte2094_40Present);
2989                 break;
2990             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
2991                 ASSERT_TRUE(smpte2086Present);
2992                 ASSERT_FALSE(smpte2094_10Present);
2993                 ASSERT_FALSE(smpte2094_40Present);
2994                 break;
2995             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
2996                 ASSERT_FALSE(smpte2094_10Present);
2997                 ASSERT_TRUE(smpte2094_40Present);
2998                 break;
2999             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
3000             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
3001             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
3002             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
3003             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
3004             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
3005             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
3006             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
3007                 ASSERT_FALSE(smpte2086Present);
3008                 ASSERT_TRUE(smpte2094_10Present);
3009                 ASSERT_FALSE(smpte2094_40Present);
3010                 break;
3011             default:
3012                 ALOGE("%s: Unexpected 10-bit dynamic range profile: %" PRId64, __FUNCTION__,
3013                       profile);
3014                 ADD_FAILURE();
3015         }
3016         importer.freeBuffer(b.buffer.buffer);
3017     }
3018 }
3019 
reportsColorSpaces(const camera_metadata_t * staticMeta)3020 bool CameraAidlTest::reportsColorSpaces(const camera_metadata_t* staticMeta) {
3021     camera_metadata_ro_entry capabilityEntry;
3022     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3023                                            &capabilityEntry);
3024     if (rc == 0) {
3025         for (uint32_t i = 0; i < capabilityEntry.count; i++) {
3026             if (capabilityEntry.data.u8[i] ==
3027                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
3028                 return true;
3029             }
3030         }
3031     }
3032     return false;
3033 }
3034 
getColorSpaceProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableColorSpaceProfilesMap> * profiles)3035 void CameraAidlTest::getColorSpaceProfiles(
3036         const camera_metadata_t* staticMeta,
3037         std::vector<RequestAvailableColorSpaceProfilesMap>* profiles) {
3038     ASSERT_NE(nullptr, staticMeta);
3039     ASSERT_NE(nullptr, profiles);
3040     camera_metadata_ro_entry entry;
3041     int rc = find_camera_metadata_ro_entry(
3042             staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
3043     ASSERT_EQ(rc, 0);
3044     ASSERT_TRUE(entry.count > 0);
3045     ASSERT_EQ(entry.count % 3, 0);
3046 
3047     for (uint32_t i = 0; i < entry.count; i += 3) {
3048         ASSERT_NE(entry.data.i64[i],
3049                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
3050         if (std::find(profiles->begin(), profiles->end(),
3051                 static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]))
3052                 == profiles->end()) {
3053             profiles->emplace_back(
3054                     static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]));
3055         }
3056     }
3057 }
3058 
isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(const camera_metadata_t * staticMeta,RequestAvailableColorSpaceProfilesMap colorSpace,RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,aidl::android::hardware::graphics::common::PixelFormat pixelFormat)3059 bool CameraAidlTest::isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
3060         const camera_metadata_t* staticMeta,
3061         RequestAvailableColorSpaceProfilesMap colorSpace,
3062         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,
3063         aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
3064     camera_metadata_ro_entry entry;
3065     int rc = find_camera_metadata_ro_entry(
3066             staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
3067 
3068     if (rc == 0) {
3069         for (uint32_t i = 0; i < entry.count; i += 3) {
3070             RequestAvailableColorSpaceProfilesMap entryColorSpace =
3071                     static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]);
3072             int64_t dynamicRangeProfileI64 = static_cast<int64_t>(dynamicRangeProfile);
3073             int32_t entryImageFormat = static_cast<int32_t>(entry.data.i64[i + 1]);
3074             int32_t expectedImageFormat = halFormatToPublicFormat(pixelFormat);
3075             if (entryColorSpace == colorSpace
3076                     && (entry.data.i64[i + 2] & dynamicRangeProfileI64) != 0
3077                     && entryImageFormat == expectedImageFormat) {
3078                 return true;
3079             }
3080         }
3081     }
3082 
3083     return false;
3084 }
3085 
getColorSpaceProfileString(RequestAvailableColorSpaceProfilesMap colorSpace)3086 const char* CameraAidlTest::getColorSpaceProfileString(
3087         RequestAvailableColorSpaceProfilesMap colorSpace) {
3088     auto colorSpaceCast = static_cast<int>(colorSpace);
3089     switch (colorSpaceCast) {
3090         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED:
3091             return "UNSPECIFIED";
3092         case ColorSpaceNamed::SRGB:
3093             return "SRGB";
3094         case ColorSpaceNamed::LINEAR_SRGB:
3095             return "LINEAR_SRGB";
3096         case ColorSpaceNamed::EXTENDED_SRGB:
3097             return "EXTENDED_SRGB";
3098         case ColorSpaceNamed::LINEAR_EXTENDED_SRGB:
3099             return "LINEAR_EXTENDED_SRGB";
3100         case ColorSpaceNamed::BT709:
3101             return "BT709";
3102         case ColorSpaceNamed::BT2020:
3103             return "BT2020";
3104         case ColorSpaceNamed::DCI_P3:
3105             return "DCI_P3";
3106         case ColorSpaceNamed::DISPLAY_P3:
3107             return "DISPLAY_P3";
3108         case ColorSpaceNamed::NTSC_1953:
3109             return "NTSC_1953";
3110         case ColorSpaceNamed::SMPTE_C:
3111             return "SMPTE_C";
3112         case ColorSpaceNamed::ADOBE_RGB:
3113             return "ADOBE_RGB";
3114         case ColorSpaceNamed::PRO_PHOTO_RGB:
3115             return "PRO_PHOTO_RGB";
3116         case ColorSpaceNamed::ACES:
3117             return "ACES";
3118         case ColorSpaceNamed::ACESCG:
3119             return "ACESCG";
3120         case ColorSpaceNamed::CIE_XYZ:
3121             return "CIE_XYZ";
3122         case ColorSpaceNamed::CIE_LAB:
3123             return "CIE_LAB";
3124         case ColorSpaceNamed::BT2020_HLG:
3125             return "BT2020_HLG";
3126         case ColorSpaceNamed::BT2020_PQ:
3127             return "BT2020_PQ";
3128         default:
3129             return "INVALID";
3130     }
3131 
3132     return "INVALID";
3133 }
3134 
getDynamicRangeProfileString(RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile)3135 const char* CameraAidlTest::getDynamicRangeProfileString(
3136         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
3137     auto dynamicRangeProfileCast =
3138             static_cast<camera_metadata_enum_android_request_available_dynamic_range_profiles_map>
3139             (dynamicRangeProfile);
3140     switch (dynamicRangeProfileCast) {
3141         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD:
3142             return "STANDARD";
3143         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
3144             return "HLG10";
3145         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
3146             return "HDR10";
3147         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
3148             return "HDR10_PLUS";
3149         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
3150             return "DOLBY_VISION_10B_HDR_REF";
3151         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
3152             return "DOLBY_VISION_10B_HDR_REF_P0";
3153         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
3154             return "DOLBY_VISION_10B_HDR_OEM";
3155         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
3156             return "DOLBY_VISION_10B_HDR_OEM_P0";
3157         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
3158             return "DOLBY_VISION_8B_HDR_REF";
3159         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
3160             return "DOLBY_VISION_8B_HDR_REF_P0";
3161         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
3162             return "DOLBY_VISION_8B_HDR_OEM";
3163         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
3164             return "DOLBY_VISION_8B_HDR_OEM_P0";
3165         default:
3166             return "INVALID";
3167     }
3168 
3169     return "INVALID";
3170 }
3171 
halFormatToPublicFormat(aidl::android::hardware::graphics::common::PixelFormat pixelFormat)3172 int32_t CameraAidlTest::halFormatToPublicFormat(
3173         aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
3174     // This is an incomplete mapping of pixel format to image format and assumes dataspaces
3175     // (see getDataspace)
3176     switch (pixelFormat) {
3177     case PixelFormat::BLOB:
3178         return 0x100; // ImageFormat.JPEG
3179     case PixelFormat::Y16:
3180         return 0x44363159; // ImageFormat.DEPTH16
3181     default:
3182         return static_cast<int32_t>(pixelFormat);
3183     }
3184 }
3185 
supportZoomSettingsOverride(const camera_metadata_t * staticMeta)3186 bool CameraAidlTest::supportZoomSettingsOverride(const camera_metadata_t* staticMeta) {
3187     camera_metadata_ro_entry availableOverridesEntry;
3188     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
3189                                            &availableOverridesEntry);
3190     if (rc == 0) {
3191         for (size_t i = 0; i < availableOverridesEntry.count; i++) {
3192             if (availableOverridesEntry.data.i32[i] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
3193                 return true;
3194             }
3195         }
3196     }
3197     return false;
3198 }
3199 
supportsCroppedRawUseCase(const camera_metadata_t * staticMeta)3200 bool CameraAidlTest::supportsCroppedRawUseCase(const camera_metadata_t *staticMeta) {
3201     camera_metadata_ro_entry availableStreamUseCasesEntry;
3202     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
3203                                            &availableStreamUseCasesEntry);
3204     if (rc == 0) {
3205         for (size_t i = 0; i < availableStreamUseCasesEntry.count; i++) {
3206             if (availableStreamUseCasesEntry.data.i64[i] ==
3207                     ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW) {
3208                 return true;
3209             }
3210         }
3211     }
3212     return false;
3213 }
3214 
isPerFrameControl(const camera_metadata_t * staticMeta)3215 bool CameraAidlTest::isPerFrameControl(const camera_metadata_t* staticMeta) {
3216     camera_metadata_ro_entry syncLatencyEntry;
3217     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SYNC_MAX_LATENCY,
3218                                            &syncLatencyEntry);
3219     if (rc == 0 && syncLatencyEntry.data.i32[0] == ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL) {
3220         return true;
3221     }
3222     return false;
3223 }
3224 
configurePreviewStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)3225 void CameraAidlTest::configurePreviewStream(
3226         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3227         const AvailableStream* previewThreshold, std::shared_ptr<ICameraDeviceSession>* session,
3228         Stream* previewStream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3229         int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
3230         uint32_t streamConfigCounter) {
3231     configureSingleStream(name, provider, previewThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
3232                           RequestTemplate::PREVIEW, session, previewStream, halStreams,
3233                           supportsPartialResults, partialResultCount, useHalBufManager, cb,
3234                           streamConfigCounter);
3235 }
3236 
isOfflineSessionSupported(const camera_metadata_t * staticMeta)3237 Status CameraAidlTest::isOfflineSessionSupported(const camera_metadata_t* staticMeta) {
3238     Status ret = Status::OPERATION_NOT_SUPPORTED;
3239     if (nullptr == staticMeta) {
3240         return Status::ILLEGAL_ARGUMENT;
3241     }
3242 
3243     camera_metadata_ro_entry entry;
3244     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3245                                            &entry);
3246     if (0 != rc) {
3247         return Status::ILLEGAL_ARGUMENT;
3248     }
3249 
3250     for (size_t i = 0; i < entry.count; i++) {
3251         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING == entry.data.u8[i]) {
3252             ret = Status::OK;
3253             break;
3254         }
3255     }
3256 
3257     return ret;
3258 }
3259 
configureOfflineStillStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * threshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * stream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::shared_ptr<DeviceCb> * outCb,int32_t * jpegBufferSize,bool * useHalBufManager)3260 void CameraAidlTest::configureOfflineStillStream(
3261         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3262         const AvailableStream* threshold, std::shared_ptr<ICameraDeviceSession>* session,
3263         Stream* stream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3264         int32_t* partialResultCount, std::shared_ptr<DeviceCb>* outCb, int32_t* jpegBufferSize,
3265         bool* useHalBufManager) {
3266     ASSERT_NE(nullptr, session);
3267     ASSERT_NE(nullptr, halStreams);
3268     ASSERT_NE(nullptr, stream);
3269     ASSERT_NE(nullptr, supportsPartialResults);
3270     ASSERT_NE(nullptr, partialResultCount);
3271     ASSERT_NE(nullptr, outCb);
3272     ASSERT_NE(nullptr, jpegBufferSize);
3273     ASSERT_NE(nullptr, useHalBufManager);
3274 
3275     std::vector<AvailableStream> outputStreams;
3276     std::shared_ptr<ICameraDevice> cameraDevice;
3277     ALOGI("configureStreams: Testing camera device %s", name.c_str());
3278 
3279     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &cameraDevice);
3280     ASSERT_TRUE(ret.isOk());
3281     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3282           ret.getServiceSpecificError());
3283     ASSERT_NE(cameraDevice, nullptr);
3284 
3285     CameraMetadata metadata;
3286     ret = cameraDevice->getCameraCharacteristics(&metadata);
3287     ASSERT_TRUE(ret.isOk());
3288     camera_metadata_t* staticMeta = clone_camera_metadata(
3289             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
3290     ASSERT_NE(nullptr, staticMeta);
3291 
3292     camera_metadata_ro_entry entry;
3293     auto status =
3294             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3295     if ((0 == status) && (entry.count > 0)) {
3296         *partialResultCount = entry.data.i32[0];
3297         *supportsPartialResults = (*partialResultCount > 1);
3298     }
3299 
3300     *useHalBufManager = false;
3301     status = find_camera_metadata_ro_entry(
3302             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3303     if ((0 == status) && (entry.count == 1)) {
3304         *useHalBufManager = (entry.data.u8[0] ==
3305                              ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
3306     }
3307 
3308     auto st = getJpegBufferSize(staticMeta, jpegBufferSize);
3309     ASSERT_EQ(st, Status::OK);
3310 
3311     *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3312     ret = cameraDevice->open(*outCb, session);
3313     ASSERT_TRUE(ret.isOk());
3314     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3315           ret.getServiceSpecificError());
3316     ASSERT_NE(session, nullptr);
3317 
3318     outputStreams.clear();
3319     auto rc = getAvailableOutputStreams(staticMeta, outputStreams, threshold);
3320     size_t idx = 0;
3321     int currLargest = outputStreams[0].width * outputStreams[0].height;
3322     for (size_t i = 0; i < outputStreams.size(); i++) {
3323         int area = outputStreams[i].width * outputStreams[i].height;
3324         if (area > currLargest) {
3325             idx = i;
3326             currLargest = area;
3327         }
3328     }
3329 
3330     ASSERT_EQ(Status::OK, rc);
3331     ASSERT_FALSE(outputStreams.empty());
3332 
3333     Dataspace dataspace = getDataspace(static_cast<PixelFormat>(outputStreams[idx].format));
3334 
3335     std::vector<Stream> streams(/*size*/ 1);
3336     streams[0] = {/*id*/ 0,
3337                   StreamType::OUTPUT,
3338                   outputStreams[idx].width,
3339                   outputStreams[idx].height,
3340                   static_cast<PixelFormat>(outputStreams[idx].format),
3341                   static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3342                           GRALLOC1_CONSUMER_USAGE_CPU_READ),
3343                   dataspace,
3344                   StreamRotation::ROTATION_0,
3345                   /*physicalId*/ std::string(),
3346                   *jpegBufferSize,
3347                   /*groupId*/ 0,
3348                   {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3349                   RequestAvailableDynamicRangeProfilesMap::
3350                           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
3351                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3352                   static_cast<int>(
3353                           RequestAvailableColorSpaceProfilesMap::
3354                                   ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
3355 
3356     StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
3357 
3358     (*session)->configureStreams(config, halStreams);
3359     ASSERT_TRUE(ret.isOk());
3360 
3361     if (*useHalBufManager) {
3362         (*outCb)->setCurrentStreamConfig(streams, *halStreams);
3363     }
3364 
3365     *stream = streams[0];
3366 }
3367 
updateInflightResultQueue(const std::shared_ptr<ResultMetadataQueue> & resultQueue)3368 void CameraAidlTest::updateInflightResultQueue(
3369         const std::shared_ptr<ResultMetadataQueue>& resultQueue) {
3370     std::unique_lock<std::mutex> l(mLock);
3371     for (auto& it : mInflightMap) {
3372         it.second->resultQueue = resultQueue;
3373     }
3374 }
3375 
processColorSpaceRequest(RequestAvailableColorSpaceProfilesMap colorSpace,RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile)3376 void CameraAidlTest::processColorSpaceRequest(
3377         RequestAvailableColorSpaceProfilesMap colorSpace,
3378         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
3379     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3380     CameraMetadata settings;
3381 
3382     for (const auto& name : cameraDeviceNames) {
3383         std::string version, deviceId;
3384         ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
3385         CameraMetadata meta;
3386         std::shared_ptr<ICameraDevice> device;
3387         openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
3388         camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3389 
3390         // Device does not report color spaces, skip.
3391         if (!reportsColorSpaces(staticMeta)) {
3392             ndk::ScopedAStatus ret = mSession->close();
3393             mSession = nullptr;
3394             ASSERT_TRUE(ret.isOk());
3395             ALOGV("Camera %s does not report color spaces", name.c_str());
3396             continue;
3397         }
3398         std::vector<RequestAvailableColorSpaceProfilesMap> profileList;
3399         getColorSpaceProfiles(staticMeta, &profileList);
3400         ASSERT_FALSE(profileList.empty());
3401 
3402         // Device does not support color space / dynamic range profile, skip
3403         if (std::find(profileList.begin(), profileList.end(), colorSpace)
3404                 == profileList.end() || !isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
3405                         staticMeta, colorSpace, dynamicRangeProfile,
3406                         PixelFormat::IMPLEMENTATION_DEFINED)) {
3407             ndk::ScopedAStatus ret = mSession->close();
3408             mSession = nullptr;
3409             ASSERT_TRUE(ret.isOk());
3410             ALOGV("Camera %s does not support color space %s with dynamic range profile %s and "
3411                   "pixel format %d", name.c_str(), getColorSpaceProfileString(colorSpace),
3412                   getDynamicRangeProfileString(dynamicRangeProfile),
3413                   PixelFormat::IMPLEMENTATION_DEFINED);
3414             continue;
3415         }
3416 
3417         ALOGV("Camera %s supports color space %s with dynamic range profile %s and pixel format %d",
3418                 name.c_str(), getColorSpaceProfileString(colorSpace),
3419                 getDynamicRangeProfileString(dynamicRangeProfile),
3420                 PixelFormat::IMPLEMENTATION_DEFINED);
3421 
3422         // If an HDR dynamic range profile is reported in the color space profile list,
3423         // the device must also have the dynamic range profiles map capability and contain
3424         // the dynamic range profile in the map.
3425         if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
3426                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
3427             ASSERT_TRUE(is10BitDynamicRangeCapable(staticMeta));
3428 
3429             std::vector<RequestAvailableDynamicRangeProfilesMap> dynamicRangeProfiles;
3430             get10BitDynamicRangeProfiles(staticMeta, &dynamicRangeProfiles);
3431             ASSERT_FALSE(dynamicRangeProfiles.empty());
3432             ASSERT_FALSE(std::find(dynamicRangeProfiles.begin(), dynamicRangeProfiles.end(),
3433                     dynamicRangeProfile) == dynamicRangeProfiles.end());
3434         }
3435 
3436         CameraMetadata req;
3437         android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
3438         ndk::ScopedAStatus ret =
3439                 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
3440         ASSERT_TRUE(ret.isOk());
3441 
3442         const camera_metadata_t* metadata =
3443                 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
3444         size_t expectedSize = req.metadata.size();
3445         int result = validate_camera_metadata_structure(metadata, &expectedSize);
3446         ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
3447 
3448         size_t entryCount = get_camera_metadata_entry_count(metadata);
3449         ASSERT_GT(entryCount, 0u);
3450         defaultSettings = metadata;
3451 
3452         const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
3453         uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
3454         settings.metadata = std::vector(
3455                 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
3456         overrideRotateAndCrop(&settings);
3457 
3458         ret = mSession->close();
3459         mSession = nullptr;
3460         ASSERT_TRUE(ret.isOk());
3461 
3462         std::vector<HalStream> halStreams;
3463         bool supportsPartialResults = false;
3464         bool useHalBufManager = false;
3465         int32_t partialResultCount = 0;
3466         Stream previewStream;
3467         std::shared_ptr<DeviceCb> cb;
3468 
3469         previewStream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3470                 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
3471         configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
3472                          &previewStream, &halStreams, &supportsPartialResults, &partialResultCount,
3473                          &useHalBufManager, &cb, 0,
3474                          /*maxResolution*/ false, dynamicRangeProfile, colorSpace);
3475         ASSERT_NE(mSession, nullptr);
3476 
3477         ::aidl::android::hardware::common::fmq::MQDescriptor<
3478                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
3479                 descriptor;
3480         auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
3481         ASSERT_TRUE(resultQueueRet.isOk());
3482 
3483         std::shared_ptr<ResultMetadataQueue> resultQueue =
3484                 std::make_shared<ResultMetadataQueue>(descriptor);
3485         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
3486             ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
3487             resultQueue = nullptr;
3488             // Don't use the queue onwards.
3489         }
3490 
3491         mInflightMap.clear();
3492         // Stream as long as needed to fill the Hal inflight queue
3493         std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
3494 
3495         for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
3496             std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
3497                     static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
3498                     partialResultCount, std::unordered_set<std::string>(), resultQueue);
3499 
3500             CaptureRequest& request = requests[requestId];
3501             std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
3502             outputBuffers.resize(halStreams.size());
3503 
3504             size_t k = 0;
3505             inflightReq->mOutstandingBufferIds.resize(halStreams.size());
3506             std::vector<buffer_handle_t> graphicBuffers;
3507             graphicBuffers.reserve(halStreams.size());
3508 
3509             auto bufferId = requestId + 1;  // Buffer id value 0 is not valid
3510             for (const auto& halStream : halStreams) {
3511                 buffer_handle_t buffer_handle;
3512                 if (useHalBufManager) {
3513                     outputBuffers[k] = {halStream.id,   0,
3514                                         NativeHandle(), BufferStatus::OK,
3515                                         NativeHandle(), NativeHandle()};
3516                 } else {
3517                     auto usage = android_convertGralloc1To0Usage(
3518                             static_cast<uint64_t>(halStream.producerUsage),
3519                             static_cast<uint64_t>(halStream.consumerUsage));
3520                     allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
3521                                             halStream.overrideFormat, &buffer_handle);
3522 
3523                     inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
3524                     graphicBuffers.push_back(buffer_handle);
3525                     outputBuffers[k] = {
3526                             halStream.id,     bufferId,       android::makeToAidl(buffer_handle),
3527                             BufferStatus::OK, NativeHandle(), NativeHandle()};
3528                 }
3529                 k++;
3530             }
3531 
3532             request.inputBuffer = {
3533                     -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
3534             request.frameNumber = bufferId;
3535             request.fmqSettingsSize = 0;
3536             request.settings = settings;
3537             request.inputWidth = 0;
3538             request.inputHeight = 0;
3539 
3540             {
3541                 std::unique_lock<std::mutex> l(mLock);
3542                 mInflightMap[bufferId] = inflightReq;
3543             }
3544         }
3545 
3546         int32_t numRequestProcessed = 0;
3547         std::vector<BufferCache> cachesToRemove;
3548         ndk::ScopedAStatus returnStatus =
3549             mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
3550         ASSERT_TRUE(returnStatus.isOk());
3551         ASSERT_EQ(numRequestProcessed, requests.size());
3552 
3553         returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
3554                 std::vector<int32_t> {halStreams[0].id});
3555         ASSERT_TRUE(returnStatus.isOk());
3556 
3557         // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
3558         // is used to indicate a buffer that is not present/available so buffer ids as well
3559         // as frame numbers begin with 1.
3560         for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
3561             const auto& inflightReq = mInflightMap[frameNumber];
3562             std::unique_lock<std::mutex> l(mLock);
3563             while (!inflightReq->errorCodeValid &&
3564                     ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
3565                 auto timeout = std::chrono::system_clock::now() +
3566                                 std::chrono::seconds(kStreamBufferTimeoutSec);
3567                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
3568             }
3569 
3570             ASSERT_FALSE(inflightReq->errorCodeValid);
3571             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
3572 
3573             if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
3574                     ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
3575                 verify10BitMetadata(mHandleImporter, *inflightReq, dynamicRangeProfile);
3576             }
3577         }
3578 
3579         if (useHalBufManager) {
3580             std::vector<int32_t> streamIds(halStreams.size());
3581             for (size_t i = 0; i < streamIds.size(); i++) {
3582                 streamIds[i] = halStreams[i].id;
3583             }
3584             mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
3585             cb->waitForBuffersReturned();
3586         }
3587 
3588         ret = mSession->close();
3589         mSession = nullptr;
3590         ASSERT_TRUE(ret.isOk());
3591     }
3592 }
3593 
processZoomSettingsOverrideRequests(int32_t frameCount,const bool * overrideSequence,const bool * expectedResults)3594 void CameraAidlTest::processZoomSettingsOverrideRequests(
3595         int32_t frameCount, const bool *overrideSequence, const bool *expectedResults) {
3596     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3597     AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3598                                         static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
3599     int64_t bufferId = 1;
3600     int32_t frameNumber = 1;
3601     CameraMetadata settings;
3602     ndk::ScopedAStatus ret;
3603     for (const auto& name : cameraDeviceNames) {
3604         CameraMetadata meta;
3605         std::shared_ptr<ICameraDevice> device;
3606         openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3607                                &device /*out*/);
3608         camera_metadata_t* staticMeta =
3609                 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
3610 
3611         ret = mSession->close();
3612         mSession = nullptr;
3613         ASSERT_TRUE(ret.isOk());
3614 
3615         // Device does not support zoom settnigs override
3616         if (!supportZoomSettingsOverride(staticMeta)) {
3617             continue;
3618         }
3619 
3620         if (!isPerFrameControl(staticMeta)) {
3621             continue;
3622         }
3623 
3624         bool supportsPartialResults = false;
3625         bool useHalBufManager = false;
3626         int32_t partialResultCount = 0;
3627         Stream previewStream;
3628         std::vector<HalStream> halStreams;
3629         std::shared_ptr<DeviceCb> cb;
3630         configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
3631                                &previewStream /*out*/, &halStreams /*out*/,
3632                                &supportsPartialResults /*out*/, &partialResultCount /*out*/,
3633                                &useHalBufManager /*out*/, &cb /*out*/);
3634         ASSERT_NE(mSession, nullptr);
3635 
3636         ::aidl::android::hardware::common::fmq::MQDescriptor<
3637                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
3638                 descriptor;
3639         auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
3640         ASSERT_TRUE(resultQueueRet.isOk());
3641 
3642         std::shared_ptr<ResultMetadataQueue> resultQueue =
3643                 std::make_shared<ResultMetadataQueue>(descriptor);
3644         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
3645             ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
3646             resultQueue = nullptr;
3647             // Don't use the queue onwards.
3648         }
3649 
3650         ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
3651         ASSERT_TRUE(ret.isOk());
3652 
3653         mInflightMap.clear();
3654         ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
3655         std::vector<CaptureRequest> requests(frameCount);
3656         std::vector<buffer_handle_t> buffers(frameCount);
3657         std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(frameCount);
3658         std::vector<CameraMetadata> requestSettings(frameCount);
3659 
3660         for (int32_t i = 0; i < frameCount; i++) {
3661             std::unique_lock<std::mutex> l(mLock);
3662             CaptureRequest& request = requests[i];
3663             std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
3664             outputBuffers.resize(1);
3665             StreamBuffer& outputBuffer = outputBuffers[0];
3666 
3667             if (useHalBufManager) {
3668                 outputBuffer = {halStreams[0].id, 0,
3669                                 NativeHandle(),   BufferStatus::OK,
3670                                 NativeHandle(),   NativeHandle()};
3671             } else {
3672                 allocateGraphicBuffer(previewStream.width, previewStream.height,
3673                                       android_convertGralloc1To0Usage(
3674                                               static_cast<uint64_t>(halStreams[0].producerUsage),
3675                                               static_cast<uint64_t>(halStreams[0].consumerUsage)),
3676                                       halStreams[0].overrideFormat, &buffers[i]);
3677                 outputBuffer = {halStreams[0].id, bufferId + i,   ::android::makeToAidl(buffers[i]),
3678                                 BufferStatus::OK, NativeHandle(), NativeHandle()};
3679             }
3680 
3681             // Set appropriate settings override tag
3682             requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
3683             int32_t settingsOverride = overrideSequence[i] ?
3684                     ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM : ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
3685             ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
3686                     &settingsOverride, 1));
3687             camera_metadata_t* metaBuffer = requestMeta.release();
3688             uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
3689             requestSettings[i].metadata = std::vector(
3690                     rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
3691             overrideRotateAndCrop(&(requestSettings[i]));
3692             request.frameNumber = frameNumber + i;
3693             request.fmqSettingsSize = 0;
3694             request.settings = requestSettings[i];
3695             request.inputBuffer = {
3696                     -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
3697 
3698             inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
3699                                                                 partialResultCount, resultQueue);
3700             mInflightMap[frameNumber + i] = inflightReqs[i];
3701         }
3702 
3703         int32_t numRequestProcessed = 0;
3704         std::vector<BufferCache> cachesToRemove;
3705 
3706         ndk::ScopedAStatus returnStatus =
3707                 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
3708         ASSERT_TRUE(returnStatus.isOk());
3709         ASSERT_EQ(numRequestProcessed, frameCount);
3710 
3711         for (size_t i = 0; i < frameCount; i++) {
3712             std::unique_lock<std::mutex> l(mLock);
3713             while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
3714                                                         (!inflightReqs[i]->haveResultMetadata))) {
3715                 auto timeout = std::chrono::system_clock::now() +
3716                                std::chrono::seconds(kStreamBufferTimeoutSec);
3717                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
3718             }
3719 
3720             ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
3721             ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
3722             ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
3723             ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
3724             ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_CONTROL_SETTINGS_OVERRIDE));
3725             camera_metadata_entry_t overrideResult =
3726                     inflightReqs[i]->collectedResult.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
3727             ASSERT_EQ(overrideResult.data.i32[0] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM,
3728                     expectedResults[i]);
3729             ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(
3730                     ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER));
3731             camera_metadata_entry_t frameNumberEntry = inflightReqs[i]->collectedResult.find(
3732                     ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
3733             ALOGV("%s: i %zu, expcetedResults[i] %d, overrideResult is %d, frameNumber %d",
3734                   __FUNCTION__, i, expectedResults[i], overrideResult.data.i32[0],
3735                   frameNumberEntry.data.i32[0]);
3736             if (expectedResults[i]) {
3737                 ASSERT_GT(frameNumberEntry.data.i32[0], inflightReqs[i]->frameNumber);
3738             } else {
3739                 ASSERT_EQ(frameNumberEntry.data.i32[0], frameNumber + i);
3740             }
3741         }
3742 
3743         ret = mSession->close();
3744         mSession = nullptr;
3745         ASSERT_TRUE(ret.isOk());
3746     }
3747 }
3748 
getSupportedSizes(const camera_metadata_t * ch,uint32_t tag,int32_t format,std::vector<std::tuple<size_t,size_t>> * sizes)3749 void CameraAidlTest::getSupportedSizes(const camera_metadata_t* ch, uint32_t tag, int32_t format,
3750                                        std::vector<std::tuple<size_t, size_t>>* sizes /*out*/) {
3751     if (sizes == nullptr) {
3752         return;
3753     }
3754 
3755     camera_metadata_ro_entry entry;
3756     int retcode = find_camera_metadata_ro_entry(ch, tag, &entry);
3757     if ((0 == retcode) && (entry.count > 0)) {
3758         // Scaler entry contains 4 elements (format, width, height, type)
3759         for (size_t i = 0; i < entry.count; i += 4) {
3760             if ((entry.data.i32[i] == format) &&
3761                 (entry.data.i32[i + 3] == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
3762                 sizes->push_back(std::make_tuple(entry.data.i32[i + 1], entry.data.i32[i + 2]));
3763             }
3764         }
3765     }
3766 }
3767 
getSupportedDurations(const camera_metadata_t * ch,uint32_t tag,int32_t format,const std::vector<std::tuple<size_t,size_t>> & sizes,std::vector<int64_t> * durations)3768 void CameraAidlTest::getSupportedDurations(const camera_metadata_t* ch, uint32_t tag,
3769                                            int32_t format,
3770                                            const std::vector<std::tuple<size_t, size_t>>& sizes,
3771                                            std::vector<int64_t>* durations /*out*/) {
3772     if (durations == nullptr) {
3773         return;
3774     }
3775 
3776     camera_metadata_ro_entry entry;
3777     int retcode = find_camera_metadata_ro_entry(ch, tag, &entry);
3778     if ((0 == retcode) && (entry.count > 0)) {
3779         // Duration entry contains 4 elements (format, width, height, duration)
3780         for (const auto& size : sizes) {
3781             int64_t width = std::get<0>(size);
3782             int64_t height = std::get<1>(size);
3783             for (size_t i = 0; i < entry.count; i += 4) {
3784                 if ((entry.data.i64[i] == format) && (entry.data.i64[i + 1] == width) &&
3785                     (entry.data.i64[i + 2] == height)) {
3786                     durations->push_back(entry.data.i64[i + 3]);
3787                     break;
3788                 }
3789             }
3790         }
3791     }
3792 }
3793