• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "camera_aidl_test.h"
18 
19 #include <inttypes.h>
20 
21 #include <CameraParameters.h>
22 #include <HandleImporter.h>
23 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
24 #include <aidl/android/hardware/camera/metadata/CameraMetadataTag.h>
25 #include <aidl/android/hardware/camera/metadata/RequestAvailableColorSpaceProfilesMap.h>
26 #include <aidl/android/hardware/camera/metadata/RequestAvailableDynamicRangeProfilesMap.h>
27 #include <aidl/android/hardware/camera/metadata/SensorInfoColorFilterArrangement.h>
28 #include <aidl/android/hardware/camera/metadata/SensorPixelMode.h>
29 #include <aidl/android/hardware/camera/provider/BnCameraProviderCallback.h>
30 #include <aidlcommonsupport/NativeHandle.h>
31 #include <android/binder_manager.h>
32 #include <android/binder_process.h>
33 #include <com_android_internal_camera_flags.h>
34 #include <device_cb.h>
35 #include <empty_device_cb.h>
36 #include <grallocusage/GrallocUsageConversion.h>
37 #include <hardware/gralloc1.h>
38 #include <simple_device_cb.h>
39 #include <ui/Fence.h>
40 #include <ui/GraphicBufferAllocator.h>
41 #include <regex>
42 #include <typeinfo>
43 #include "utils/Errors.h"
44 #include <nativebase/nativebase.h>
45 
46 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
47 using ::aidl::android::hardware::camera::common::TorchModeStatus;
48 using ::aidl::android::hardware::camera::device::CameraMetadata;
49 using ::aidl::android::hardware::camera::device::ICameraDevice;
50 using ::aidl::android::hardware::camera::metadata::CameraMetadataTag;
51 using ::aidl::android::hardware::camera::metadata::SensorInfoColorFilterArrangement;
52 using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
53 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
54 using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
55 using ::aidl::android::hardware::camera::provider::ICameraProvider;
56 using ::aidl::android::hardware::common::NativeHandle;
57 using ::android::hardware::camera::common::V1_0::helper::Size;
58 using ::ndk::ScopedAStatus;
59 using ::ndk::SpAIBinder;
60 
61 namespace {
62 namespace flags = com::android::internal::camera::flags;
63 
parseProviderName(const std::string & serviceDescriptor,std::string * type,uint32_t * id)64 bool parseProviderName(const std::string& serviceDescriptor, std::string* type /*out*/,
65                        uint32_t* id /*out*/) {
66     if (!type || !id) {
67         ADD_FAILURE();
68         return false;
69     }
70 
71     // expected format: <service_name>/<type>/<id>
72     std::string::size_type slashIdx1 = serviceDescriptor.find('/');
73     if (slashIdx1 == std::string::npos || slashIdx1 == serviceDescriptor.size() - 1) {
74         ADD_FAILURE() << "Provider name does not have / separator between name, type, and id";
75         return false;
76     }
77 
78     std::string::size_type slashIdx2 = serviceDescriptor.find('/', slashIdx1 + 1);
79     if (slashIdx2 == std::string::npos || slashIdx2 == serviceDescriptor.size() - 1) {
80         ADD_FAILURE() << "Provider name does not have / separator between type and id";
81         return false;
82     }
83 
84     std::string typeVal = serviceDescriptor.substr(slashIdx1 + 1, slashIdx2 - slashIdx1 - 1);
85 
86     char* endPtr;
87     errno = 0;
88     int64_t idVal = strtol(serviceDescriptor.c_str() + slashIdx2 + 1, &endPtr, 10);
89     if (errno != 0) {
90         ADD_FAILURE() << "cannot parse provider id as an integer:" << serviceDescriptor.c_str()
91                       << strerror(errno) << errno;
92         return false;
93     }
94     if (endPtr != serviceDescriptor.c_str() + serviceDescriptor.size()) {
95         ADD_FAILURE() << "provider id has unexpected length " << serviceDescriptor.c_str();
96         return false;
97     }
98     if (idVal < 0) {
99         ADD_FAILURE() << "id is negative: " << serviceDescriptor.c_str() << idVal;
100         return false;
101     }
102 
103     *type = typeVal;
104     *id = static_cast<uint32_t>(idVal);
105 
106     return true;
107 }
108 
109 namespace flags = com::android::internal::camera::flags;
110 
111 const std::vector<int64_t> kMandatoryUseCases = {
112         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
113         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
114         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
115         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
116         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
117         ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
118 }  // namespace
119 
SetUp()120 void CameraAidlTest::SetUp() {
121     std::string serviceDescriptor = GetParam();
122     ALOGI("get service with name: %s", serviceDescriptor.c_str());
123 
124     bool success = ABinderProcess_setThreadPoolMaxThreadCount(5);
125     ALOGI("ABinderProcess_setThreadPoolMaxThreadCount returns %s", success ? "true" : "false");
126     ASSERT_TRUE(success);
127     ABinderProcess_startThreadPool();
128 
129     SpAIBinder cameraProviderBinder =
130             SpAIBinder(AServiceManager_waitForService(serviceDescriptor.c_str()));
131     ASSERT_NE(cameraProviderBinder.get(), nullptr);
132 
133     std::shared_ptr<ICameraProvider> cameraProvider =
134             ICameraProvider::fromBinder(cameraProviderBinder);
135     ASSERT_NE(cameraProvider.get(), nullptr);
136     mProvider = cameraProvider;
137     uint32_t id;
138     ASSERT_TRUE(parseProviderName(serviceDescriptor, &mProviderType, &id));
139 
140     notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
141 }
142 
TearDown()143 void CameraAidlTest::TearDown() {
144     if (mSession != nullptr) {
145         ndk::ScopedAStatus ret = mSession->close();
146         ASSERT_TRUE(ret.isOk());
147     }
148 }
149 
waitForReleaseFence(std::vector<InFlightRequest::StreamBufferAndTimestamp> & resultOutputBuffers)150 void CameraAidlTest::waitForReleaseFence(
151         std::vector<InFlightRequest::StreamBufferAndTimestamp>& resultOutputBuffers) {
152     for (auto& bufferAndTimestamp : resultOutputBuffers) {
153         // wait for the fence timestamp and store it along with the buffer
154         android::sp<android::Fence> releaseFence = nullptr;
155         const native_handle_t* releaseFenceHandle = bufferAndTimestamp.buffer.releaseFence;
156         if (releaseFenceHandle != nullptr && releaseFenceHandle->numFds == 1 &&
157             releaseFenceHandle->data[0] >= 0) {
158             releaseFence = new android::Fence(dup(releaseFenceHandle->data[0]));
159         }
160         if (releaseFence && releaseFence->isValid()) {
161             releaseFence->wait(/*ms*/ 300);
162             nsecs_t releaseTime = releaseFence->getSignalTime();
163             if (bufferAndTimestamp.timeStamp < releaseTime)
164                 bufferAndTimestamp.timeStamp = releaseTime;
165         }
166     }
167 }
168 
getCameraDeviceNames(std::shared_ptr<ICameraProvider> & provider,bool addSecureOnly)169 std::vector<std::string> CameraAidlTest::getCameraDeviceNames(
170         std::shared_ptr<ICameraProvider>& provider, bool addSecureOnly) {
171     std::vector<std::string> cameraDeviceNames;
172 
173     ScopedAStatus ret = provider->getCameraIdList(&cameraDeviceNames);
174     if (!ret.isOk()) {
175         ADD_FAILURE() << "Could not get camera id list";
176     }
177 
178     // External camera devices are reported through cameraDeviceStatusChange
179     struct ProviderCb : public BnCameraProviderCallback {
180         ScopedAStatus cameraDeviceStatusChange(const std::string& devName,
181                                                CameraDeviceStatus newStatus) override {
182             ALOGI("camera device status callback name %s, status %d", devName.c_str(),
183                   (int)newStatus);
184             if (newStatus == CameraDeviceStatus::PRESENT) {
185                 externalCameraDeviceNames.push_back(devName);
186             }
187             return ScopedAStatus::ok();
188         }
189 
190         ScopedAStatus torchModeStatusChange(const std::string&, TorchModeStatus) override {
191             return ScopedAStatus::ok();
192         }
193 
194         ScopedAStatus physicalCameraDeviceStatusChange(
195                 const std::string&, const std::string&,
196                 ::aidl::android::hardware::camera::common::CameraDeviceStatus) override {
197             return ScopedAStatus::ok();
198         }
199 
200         std::vector<std::string> externalCameraDeviceNames;
201     };
202     std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
203     auto status = mProvider->setCallback(cb);
204 
205     for (const auto& devName : cb->externalCameraDeviceNames) {
206         if (cameraDeviceNames.end() ==
207             std::find(cameraDeviceNames.begin(), cameraDeviceNames.end(), devName)) {
208             cameraDeviceNames.push_back(devName);
209         }
210     }
211 
212     std::vector<std::string> retList;
213     for (auto& cameraDeviceName : cameraDeviceNames) {
214         bool isSecureOnlyCamera = isSecureOnly(mProvider, cameraDeviceName);
215         if (addSecureOnly) {
216             if (isSecureOnlyCamera) {
217                 retList.emplace_back(cameraDeviceName);
218             }
219         } else if (!isSecureOnlyCamera) {
220             retList.emplace_back(cameraDeviceName);
221         }
222     }
223     return retList;
224 }
225 
isSecureOnly(const std::shared_ptr<ICameraProvider> & provider,const std::string & name)226 bool CameraAidlTest::isSecureOnly(const std::shared_ptr<ICameraProvider>& provider,
227                                   const std::string& name) {
228     std::shared_ptr<ICameraDevice> cameraDevice = nullptr;
229     ScopedAStatus retInterface = provider->getCameraDeviceInterface(name, &cameraDevice);
230     if (!retInterface.isOk()) {
231         ADD_FAILURE() << "Failed to get camera device interface for " << name;
232     }
233 
234     CameraMetadata cameraCharacteristics;
235     ScopedAStatus retChars = cameraDevice->getCameraCharacteristics(&cameraCharacteristics);
236     if (!retChars.isOk()) {
237         ADD_FAILURE() << "Failed to get camera characteristics for device " << name;
238     }
239 
240     camera_metadata_t* chars =
241             reinterpret_cast<camera_metadata_t*>(cameraCharacteristics.metadata.data());
242 
243     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
244     Status retCameraKind = getSystemCameraKind(chars, &systemCameraKind);
245     if (retCameraKind != Status::OK) {
246         ADD_FAILURE() << "Failed to get camera kind for " << name;
247     }
248 
249     return systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA;
250 }
251 
getCameraDeviceIdToNameMap(std::shared_ptr<ICameraProvider> provider)252 std::map<std::string, std::string> CameraAidlTest::getCameraDeviceIdToNameMap(
253         std::shared_ptr<ICameraProvider> provider) {
254     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(provider);
255 
256     std::map<std::string, std::string> idToNameMap;
257     for (auto& name : cameraDeviceNames) {
258         std::string version, cameraId;
259         if (!matchDeviceName(name, mProviderType, &version, &cameraId)) {
260             ADD_FAILURE();
261         }
262         idToNameMap.insert(std::make_pair(std::string(cameraId), name));
263     }
264     return idToNameMap;
265 }
266 
verifyMonochromeCameraResult(const::android::hardware::camera::common::V1_0::helper::CameraMetadata & metadata)267 void CameraAidlTest::verifyMonochromeCameraResult(
268         const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata) {
269     camera_metadata_ro_entry entry;
270 
271     // Check tags that are not applicable for monochrome camera
272     ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_GREEN_SPLIT));
273     ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_NEUTRAL_COLOR_POINT));
274     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_MODE));
275     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_TRANSFORM));
276     ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_GAINS));
277 
278     // Check dynamicBlackLevel
279     entry = metadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
280     if (entry.count > 0) {
281         ASSERT_EQ(entry.count, 4);
282         for (size_t i = 1; i < entry.count; i++) {
283             ASSERT_FLOAT_EQ(entry.data.f[i], entry.data.f[0]);
284         }
285     }
286 
287     // Check noiseProfile
288     entry = metadata.find(ANDROID_SENSOR_NOISE_PROFILE);
289     if (entry.count > 0) {
290         ASSERT_EQ(entry.count, 2);
291     }
292 
293     // Check lensShadingMap
294     entry = metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
295     if (entry.count > 0) {
296         ASSERT_EQ(entry.count % 4, 0);
297         for (size_t i = 0; i < entry.count / 4; i++) {
298             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 1], entry.data.f[i * 4]);
299             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 2], entry.data.f[i * 4]);
300             ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 3], entry.data.f[i * 4]);
301         }
302     }
303 
304     // Check tonemapCurve
305     camera_metadata_ro_entry curveRed = metadata.find(ANDROID_TONEMAP_CURVE_RED);
306     camera_metadata_ro_entry curveGreen = metadata.find(ANDROID_TONEMAP_CURVE_GREEN);
307     camera_metadata_ro_entry curveBlue = metadata.find(ANDROID_TONEMAP_CURVE_BLUE);
308     if (curveRed.count > 0 && curveGreen.count > 0 && curveBlue.count > 0) {
309         ASSERT_EQ(curveRed.count, curveGreen.count);
310         ASSERT_EQ(curveRed.count, curveBlue.count);
311         for (size_t i = 0; i < curveRed.count; i++) {
312             ASSERT_FLOAT_EQ(curveGreen.data.f[i], curveRed.data.f[i]);
313             ASSERT_FLOAT_EQ(curveBlue.data.f[i], curveRed.data.f[i]);
314         }
315     }
316 }
317 
verifyStreamUseCaseCharacteristics(const camera_metadata_t * metadata)318 void CameraAidlTest::verifyStreamUseCaseCharacteristics(const camera_metadata_t* metadata) {
319     camera_metadata_ro_entry entry;
320     bool hasStreamUseCaseCap = supportsStreamUseCaseCap(metadata);
321 
322     bool supportMandatoryUseCases = false;
323     int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
324                                                 &entry);
325     if ((0 == retcode) && (entry.count > 0)) {
326         supportMandatoryUseCases = true;
327         for (size_t i = 0; i < kMandatoryUseCases.size(); i++) {
328             if (std::find(entry.data.i64, entry.data.i64 + entry.count, kMandatoryUseCases[i]) ==
329                 entry.data.i64 + entry.count) {
330                 supportMandatoryUseCases = false;
331                 break;
332             }
333         }
334         bool supportDefaultUseCase = false;
335         for (size_t i = 0; i < entry.count; i++) {
336             if (entry.data.i64[i] == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
337                 supportDefaultUseCase = true;
338             }
339             ASSERT_TRUE(entry.data.i64[i] <= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW
340                         || entry.data.i64[i] >=
341                                 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START);
342         }
343         ASSERT_TRUE(supportDefaultUseCase);
344     }
345 
346     ASSERT_EQ(hasStreamUseCaseCap, supportMandatoryUseCases);
347 }
348 
verifySettingsOverrideCharacteristics(const camera_metadata_t * metadata)349 void CameraAidlTest::verifySettingsOverrideCharacteristics(const camera_metadata_t* metadata) {
350     camera_metadata_ro_entry entry;
351     int retcode = find_camera_metadata_ro_entry(metadata,
352             ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES, &entry);
353     bool supportSettingsOverride = false;
354     if (0 == retcode) {
355         supportSettingsOverride = true;
356         bool hasOff = false;
357         for (size_t i = 0; i < entry.count; i++) {
358             if (entry.data.u8[i] == ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF) {
359                 hasOff = true;
360             }
361         }
362         ASSERT_TRUE(hasOff);
363     }
364 
365     // Check availableRequestKeys
366     retcode = find_camera_metadata_ro_entry(metadata,
367             ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
368     bool hasSettingsOverrideRequestKey = false;
369     if ((0 == retcode) && (entry.count > 0)) {
370         hasSettingsOverrideRequestKey =
371                 std::find(entry.data.i32, entry.data.i32 + entry.count,
372                         ANDROID_CONTROL_SETTINGS_OVERRIDE) != entry.data.i32 + entry.count;
373     } else {
374         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
375     }
376 
377     // Check availableResultKeys
378     retcode = find_camera_metadata_ro_entry(metadata,
379             ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
380     bool hasSettingsOverrideResultKey = false;
381     bool hasOverridingFrameNumberKey = false;
382     if ((0 == retcode) && (entry.count > 0)) {
383         hasSettingsOverrideResultKey =
384                 std::find(entry.data.i32, entry.data.i32 + entry.count,
385                         ANDROID_CONTROL_SETTINGS_OVERRIDE) != entry.data.i32 + entry.count;
386         hasOverridingFrameNumberKey =
387                 std::find(entry.data.i32, entry.data.i32 + entry.count,
388                         ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER)
389                         != entry.data.i32 + entry.count;
390     } else {
391         ADD_FAILURE() << "Get camera availableResultKeys failed!";
392     }
393 
394     // Check availableCharacteristicKeys
395     retcode = find_camera_metadata_ro_entry(metadata,
396             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
397     bool hasSettingsOverrideCharacteristicsKey= false;
398     if ((0 == retcode) && (entry.count > 0)) {
399         hasSettingsOverrideCharacteristicsKey = std::find(entry.data.i32,
400                 entry.data.i32 + entry.count, ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES)
401                         != entry.data.i32 + entry.count;
402     } else {
403         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
404     }
405 
406     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideRequestKey);
407     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideResultKey);
408     ASSERT_EQ(supportSettingsOverride, hasOverridingFrameNumberKey);
409     ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideCharacteristicsKey);
410 }
411 
isMonochromeCamera(const camera_metadata_t * staticMeta)412 Status CameraAidlTest::isMonochromeCamera(const camera_metadata_t* staticMeta) {
413     Status ret = Status::OPERATION_NOT_SUPPORTED;
414     if (nullptr == staticMeta) {
415         return Status::ILLEGAL_ARGUMENT;
416     }
417 
418     camera_metadata_ro_entry entry;
419     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
420                                            &entry);
421 
422     if (0 != rc) {
423         return Status::ILLEGAL_ARGUMENT;
424     }
425 
426     for (size_t i = 0; i < entry.count; i++) {
427         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME == entry.data.u8[i]) {
428             ret = Status::OK;
429             break;
430         }
431     }
432 
433     return ret;
434 }
435 
isLogicalMultiCamera(const camera_metadata_t * staticMeta)436 Status CameraAidlTest::isLogicalMultiCamera(const camera_metadata_t* staticMeta) {
437     Status ret = Status::OPERATION_NOT_SUPPORTED;
438     if (nullptr == staticMeta) {
439         return Status::ILLEGAL_ARGUMENT;
440     }
441 
442     camera_metadata_ro_entry entry;
443     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
444                                            &entry);
445     if (0 != rc) {
446         return Status::ILLEGAL_ARGUMENT;
447     }
448 
449     for (size_t i = 0; i < entry.count; i++) {
450         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA == entry.data.u8[i]) {
451             ret = Status::OK;
452             break;
453         }
454     }
455 
456     return ret;
457 }
458 
verifyLogicalCameraResult(const camera_metadata_t * staticMetadata,const std::vector<uint8_t> & resultMetadata)459 void CameraAidlTest::verifyLogicalCameraResult(const camera_metadata_t* staticMetadata,
460                                                const std::vector<uint8_t>& resultMetadata) {
461     camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data();
462 
463     std::unordered_set<std::string> physicalIds;
464     Status rc = getPhysicalCameraIds(staticMetadata, &physicalIds);
465     ASSERT_TRUE(Status::OK == rc);
466     ASSERT_TRUE(physicalIds.size() > 1);
467 
468     camera_metadata_ro_entry entry;
469     // Check mainPhysicalId
470     find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID,
471                                   &entry);
472     if (entry.count > 0) {
473         std::string mainPhysicalId(reinterpret_cast<const char*>(entry.data.u8));
474         ASSERT_NE(physicalIds.find(mainPhysicalId), physicalIds.end());
475     } else {
476         ADD_FAILURE() << "Get LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID failed!";
477     }
478 
479     if (flags::concert_mode()) {
480         auto ret = find_camera_metadata_ro_entry(
481                 metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION, &entry);
482         if ((ret == android::OK) && (entry.count > 0)) {
483             ASSERT_TRUE(entry.count == 4);
484             ASSERT_GE(entry.data.i32[0], 0);  // Top must be non-negative
485             ASSERT_GE(entry.data.i32[1], 0);  // Left must be non-negative
486             ASSERT_GT(entry.data.i32[2], 0);  // Width must be positive
487             ASSERT_GT(entry.data.i32[3], 0);  // Height must be positive
488         }
489     }
490 }
491 
verifyLensIntrinsicsResult(const std::vector<uint8_t> & resultMetadata)492 void CameraAidlTest::verifyLensIntrinsicsResult(const std::vector<uint8_t>& resultMetadata) {
493     if (flags::concert_mode()) {
494         camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data();
495 
496         camera_metadata_ro_entry timestampsEntry, intrinsicsEntry;
497         auto tsRet = find_camera_metadata_ro_entry(
498                 metadata, ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS, &timestampsEntry);
499         auto inRet = find_camera_metadata_ro_entry(
500                 metadata, ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES, &intrinsicsEntry);
501         ASSERT_EQ(tsRet, inRet);
502         ASSERT_TRUE((intrinsicsEntry.count % 5) == 0);
503         ASSERT_EQ(timestampsEntry.count, intrinsicsEntry.count / 5);
504         if (timestampsEntry.count > 0) {
505             for (size_t i = 0; i < timestampsEntry.count - 1; i++) {
506                 ASSERT_GE(timestampsEntry.data.i64[i + 1], timestampsEntry.data.i64[i]);
507             }
508         }
509     }
510 }
511 
getPhysicalCameraIds(const camera_metadata_t * staticMeta,std::unordered_set<std::string> * physicalIds)512 Status CameraAidlTest::getPhysicalCameraIds(const camera_metadata_t* staticMeta,
513                                             std::unordered_set<std::string>* physicalIds) {
514     if ((nullptr == staticMeta) || (nullptr == physicalIds)) {
515         return Status::ILLEGAL_ARGUMENT;
516     }
517 
518     camera_metadata_ro_entry entry;
519     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
520                                            &entry);
521     if (0 != rc) {
522         return Status::ILLEGAL_ARGUMENT;
523     }
524 
525     const uint8_t* ids = entry.data.u8;
526     size_t start = 0;
527     for (size_t i = 0; i < entry.count; i++) {
528         if (ids[i] == '\0') {
529             if (start != i) {
530                 std::string currentId(reinterpret_cast<const char*>(ids + start));
531                 physicalIds->emplace(currentId);
532             }
533             start = i + 1;
534         }
535     }
536 
537     return Status::OK;
538 }
539 
getSystemCameraKind(const camera_metadata_t * staticMeta,SystemCameraKind * systemCameraKind)540 Status CameraAidlTest::getSystemCameraKind(const camera_metadata_t* staticMeta,
541                                            SystemCameraKind* systemCameraKind) {
542     if (nullptr == staticMeta || nullptr == systemCameraKind) {
543         return Status::ILLEGAL_ARGUMENT;
544     }
545 
546     camera_metadata_ro_entry entry{};
547     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
548                                            &entry);
549     if (0 != rc) {
550         return Status::ILLEGAL_ARGUMENT;
551     }
552 
553     if (entry.count == 1 &&
554         entry.data.u8[0] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA) {
555         *systemCameraKind = SystemCameraKind::HIDDEN_SECURE_CAMERA;
556         return Status::OK;
557     }
558 
559     // Go through the capabilities and check if it has
560     // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA
561     for (size_t i = 0; i < entry.count; ++i) {
562         uint8_t capability = entry.data.u8[i];
563         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA) {
564             *systemCameraKind = SystemCameraKind::SYSTEM_ONLY_CAMERA;
565             return Status::OK;
566         }
567     }
568     *systemCameraKind = SystemCameraKind::PUBLIC;
569     return Status::OK;
570 }
571 
notifyDeviceState(int64_t state)572 void CameraAidlTest::notifyDeviceState(int64_t state) {
573     if (mProvider == nullptr) {
574         return;
575     }
576     mProvider->notifyDeviceStateChange(state);
577 }
578 
allocateGraphicBuffer(uint32_t width,uint32_t height,uint64_t usage,PixelFormat format,buffer_handle_t * buffer_handle)579 void CameraAidlTest::allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage,
580                                            PixelFormat format, buffer_handle_t* buffer_handle) {
581     ASSERT_NE(buffer_handle, nullptr);
582 
583     uint32_t stride;
584 
585     android::status_t err = android::GraphicBufferAllocator::get().allocateRawHandle(
586             width, height, static_cast<int32_t>(format), 1u /*layerCount*/, usage, buffer_handle,
587             &stride, "VtsHalCameraProviderV2");
588     ASSERT_EQ(err, android::NO_ERROR);
589 }
590 
matchDeviceName(const std::string & deviceName,const std::string & providerType,std::string * deviceVersion,std::string * cameraId)591 bool CameraAidlTest::matchDeviceName(const std::string& deviceName, const std::string& providerType,
592                                      std::string* deviceVersion, std::string* cameraId) {
593     // expected format: device@<major>.<minor>/<type>/<id>
594     std::stringstream pattern;
595     pattern << "device@([0-9]+\\.[0-9]+)/" << providerType << "/(.+)";
596     std::regex e(pattern.str());
597 
598     std::smatch sm;
599     if (std::regex_match(deviceName, sm, e)) {
600         if (deviceVersion != nullptr) {
601             *deviceVersion = sm[1];
602         }
603         if (cameraId != nullptr) {
604             *cameraId = sm[2];
605         }
606         return true;
607     }
608     return false;
609 }
610 
verifyCameraCharacteristics(const CameraMetadata & chars)611 void CameraAidlTest::verifyCameraCharacteristics(const CameraMetadata& chars) {
612     const camera_metadata_t* metadata =
613             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
614 
615     size_t expectedSize = chars.metadata.size();
616     int result = validate_camera_metadata_structure(metadata, &expectedSize);
617     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
618     size_t entryCount = get_camera_metadata_entry_count(metadata);
619     // TODO: we can do better than 0 here. Need to check how many required
620     // characteristics keys we've defined.
621     ASSERT_GT(entryCount, 0u);
622 
623     camera_metadata_ro_entry entry;
624     int retcode =
625             find_camera_metadata_ro_entry(metadata, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry);
626     if ((0 == retcode) && (entry.count > 0)) {
627         uint8_t hardwareLevel = entry.data.u8[0];
628         ASSERT_TRUE(hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED ||
629                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL ||
630                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 ||
631                     hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL);
632     } else {
633         ADD_FAILURE() << "Get camera hardware level failed!";
634     }
635 
636     entry.count = 0;
637     retcode = find_camera_metadata_ro_entry(
638             metadata, ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION, &entry);
639     if ((0 == retcode) || (entry.count > 0)) {
640         ADD_FAILURE() << "ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION "
641                       << " per API contract should never be set by Hal!";
642     }
643     retcode = find_camera_metadata_ro_entry(
644             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, &entry);
645     if ((0 == retcode) || (entry.count > 0)) {
646         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS"
647                       << " per API contract should never be set by Hal!";
648     }
649     retcode = find_camera_metadata_ro_entry(
650             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, &entry);
651     if ((0 == retcode) || (entry.count > 0)) {
652         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS"
653                       << " per API contract should never be set by Hal!";
654     }
655     retcode = find_camera_metadata_ro_entry(
656             metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, &entry);
657     if ((0 == retcode) || (entry.count > 0)) {
658         ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS"
659                       << " per API contract should never be set by Hal!";
660     }
661 
662     retcode = find_camera_metadata_ro_entry(
663             metadata, ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, &entry);
664     if (0 == retcode || entry.count > 0) {
665         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS "
666                       << " per API contract should never be set by Hal!";
667     }
668 
669     retcode = find_camera_metadata_ro_entry(
670             metadata, ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, &entry);
671     if (0 == retcode || entry.count > 0) {
672         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS "
673                       << " per API contract should never be set by Hal!";
674     }
675 
676     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
677                                             &entry);
678     if (0 == retcode || entry.count > 0) {
679         ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS "
680                       << " per API contract should never be set by Hal!";
681     }
682 
683     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_INFO_SUPPORTED, &entry);
684     if (0 == retcode && entry.count > 0) {
685         retcode = find_camera_metadata_ro_entry(
686                 metadata, ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT, &entry);
687         if (0 == retcode && entry.count > 0) {
688             uint8_t maxJpegAppSegmentsCount = entry.data.u8[0];
689             ASSERT_TRUE(maxJpegAppSegmentsCount >= 1 && maxJpegAppSegmentsCount <= 16);
690         } else {
691             ADD_FAILURE() << "Get Heic maxJpegAppSegmentsCount failed!";
692         }
693     }
694 
695     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_LENS_POSE_REFERENCE, &entry);
696     if (0 == retcode && entry.count > 0) {
697         uint8_t poseReference = entry.data.u8[0];
698         ASSERT_TRUE(poseReference <= ANDROID_LENS_POSE_REFERENCE_AUTOMOTIVE &&
699                 poseReference >= ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA);
700     }
701 
702     retcode =
703             find_camera_metadata_ro_entry(metadata, ANDROID_INFO_DEVICE_STATE_ORIENTATIONS, &entry);
704     if (0 == retcode && entry.count > 0) {
705         ASSERT_TRUE((entry.count % 2) == 0);
706         uint64_t maxPublicState = ((uint64_t)ICameraProvider::DEVICE_STATE_FOLDED) << 1;
707         uint64_t vendorStateStart = 1UL << 31;  // Reserved for vendor specific states
708         uint64_t stateMask = (1 << vendorStateStart) - 1;
709         stateMask &= ~((1 << maxPublicState) - 1);
710         for (int i = 0; i < entry.count; i += 2) {
711             ASSERT_TRUE((entry.data.i64[i] & stateMask) == 0);
712             ASSERT_TRUE((entry.data.i64[i + 1] % 90) == 0);
713         }
714     }
715 
716     verifyExtendedSceneModeCharacteristics(metadata);
717     verifyZoomCharacteristics(metadata);
718     verifyStreamUseCaseCharacteristics(metadata);
719     verifySettingsOverrideCharacteristics(metadata);
720 }
721 
verifyExtendedSceneModeCharacteristics(const camera_metadata_t * metadata)722 void CameraAidlTest::verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata) {
723     camera_metadata_ro_entry entry;
724     int retcode = 0;
725 
726     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_AVAILABLE_MODES, &entry);
727     if ((0 == retcode) && (entry.count > 0)) {
728         for (auto i = 0; i < entry.count; i++) {
729             ASSERT_TRUE(entry.data.u8[i] >= ANDROID_CONTROL_MODE_OFF &&
730                         entry.data.u8[i] <= ANDROID_CONTROL_MODE_USE_EXTENDED_SCENE_MODE);
731         }
732     } else {
733         ADD_FAILURE() << "Get camera controlAvailableModes failed!";
734     }
735 
736     // Check key availability in capabilities, request and result.
737 
738     retcode =
739             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
740     bool hasExtendedSceneModeRequestKey = false;
741     if ((0 == retcode) && (entry.count > 0)) {
742         hasExtendedSceneModeRequestKey =
743                 std::find(entry.data.i32, entry.data.i32 + entry.count,
744                           ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
745     } else {
746         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
747     }
748 
749     retcode =
750             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
751     bool hasExtendedSceneModeResultKey = false;
752     if ((0 == retcode) && (entry.count > 0)) {
753         hasExtendedSceneModeResultKey =
754                 std::find(entry.data.i32, entry.data.i32 + entry.count,
755                           ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
756     } else {
757         ADD_FAILURE() << "Get camera availableResultKeys failed!";
758     }
759 
760     retcode = find_camera_metadata_ro_entry(metadata,
761                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
762     bool hasExtendedSceneModeMaxSizesKey = false;
763     bool hasExtendedSceneModeZoomRatioRangesKey = false;
764     if ((0 == retcode) && (entry.count > 0)) {
765         hasExtendedSceneModeMaxSizesKey =
766                 std::find(entry.data.i32, entry.data.i32 + entry.count,
767                           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES) !=
768                 entry.data.i32 + entry.count;
769         hasExtendedSceneModeZoomRatioRangesKey =
770                 std::find(entry.data.i32, entry.data.i32 + entry.count,
771                           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES) !=
772                 entry.data.i32 + entry.count;
773     } else {
774         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
775     }
776 
777     camera_metadata_ro_entry maxSizesEntry;
778     retcode = find_camera_metadata_ro_entry(
779             metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES, &maxSizesEntry);
780     bool hasExtendedSceneModeMaxSizes = (0 == retcode && maxSizesEntry.count > 0);
781 
782     camera_metadata_ro_entry zoomRatioRangesEntry;
783     retcode = find_camera_metadata_ro_entry(
784             metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
785             &zoomRatioRangesEntry);
786     bool hasExtendedSceneModeZoomRatioRanges = (0 == retcode && zoomRatioRangesEntry.count > 0);
787 
788     // Extended scene mode keys must all be available, or all be unavailable.
789     bool noExtendedSceneMode =
790             !hasExtendedSceneModeRequestKey && !hasExtendedSceneModeResultKey &&
791             !hasExtendedSceneModeMaxSizesKey && !hasExtendedSceneModeZoomRatioRangesKey &&
792             !hasExtendedSceneModeMaxSizes && !hasExtendedSceneModeZoomRatioRanges;
793     if (noExtendedSceneMode) {
794         return;
795     }
796     bool hasExtendedSceneMode = hasExtendedSceneModeRequestKey && hasExtendedSceneModeResultKey &&
797                                 hasExtendedSceneModeMaxSizesKey &&
798                                 hasExtendedSceneModeZoomRatioRangesKey &&
799                                 hasExtendedSceneModeMaxSizes && hasExtendedSceneModeZoomRatioRanges;
800     ASSERT_TRUE(hasExtendedSceneMode);
801 
802     // Must have DISABLED, and must have one of BOKEH_STILL_CAPTURE, BOKEH_CONTINUOUS, or a VENDOR
803     // mode.
804     ASSERT_TRUE((maxSizesEntry.count == 6 && zoomRatioRangesEntry.count == 2) ||
805                 (maxSizesEntry.count == 9 && zoomRatioRangesEntry.count == 4));
806     bool hasDisabledMode = false;
807     bool hasBokehStillCaptureMode = false;
808     bool hasBokehContinuousMode = false;
809     bool hasVendorMode = false;
810     std::vector<AvailableStream> outputStreams;
811     ASSERT_EQ(Status::OK, getAvailableOutputStreams(metadata, outputStreams));
812     for (int i = 0, j = 0; i < maxSizesEntry.count && j < zoomRatioRangesEntry.count; i += 3) {
813         int32_t mode = maxSizesEntry.data.i32[i];
814         int32_t maxWidth = maxSizesEntry.data.i32[i + 1];
815         int32_t maxHeight = maxSizesEntry.data.i32[i + 2];
816         switch (mode) {
817             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED:
818                 hasDisabledMode = true;
819                 ASSERT_TRUE(maxWidth == 0 && maxHeight == 0);
820                 break;
821             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE:
822                 hasBokehStillCaptureMode = true;
823                 j += 2;
824                 break;
825             case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS:
826                 hasBokehContinuousMode = true;
827                 j += 2;
828                 break;
829             default:
830                 if (mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START) {
831                     ADD_FAILURE() << "Invalid extended scene mode advertised: " << mode;
832                 } else {
833                     hasVendorMode = true;
834                     j += 2;
835                 }
836                 break;
837         }
838 
839         if (mode != ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED) {
840             // Make sure size is supported.
841             bool sizeSupported = false;
842             for (const auto& stream : outputStreams) {
843                 if ((stream.format == static_cast<int32_t>(PixelFormat::YCBCR_420_888) ||
844                      stream.format == static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) &&
845                     stream.width == maxWidth && stream.height == maxHeight) {
846                     sizeSupported = true;
847                     break;
848                 }
849             }
850             ASSERT_TRUE(sizeSupported);
851 
852             // Make sure zoom range is valid
853             float minZoomRatio = zoomRatioRangesEntry.data.f[0];
854             float maxZoomRatio = zoomRatioRangesEntry.data.f[1];
855             ASSERT_GT(minZoomRatio, 0.0f);
856             ASSERT_LE(minZoomRatio, maxZoomRatio);
857         }
858     }
859     ASSERT_TRUE(hasDisabledMode);
860     ASSERT_TRUE(hasBokehStillCaptureMode || hasBokehContinuousMode || hasVendorMode);
861 }
862 
verifyHighSpeedRecordingCharacteristics(const std::string & cameraName,const CameraMetadata & chars)863 void CameraAidlTest::verifyHighSpeedRecordingCharacteristics(const std::string& cameraName,
864                                                              const CameraMetadata& chars) {
865     const camera_metadata_t* metadata =
866             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
867 
868     // Check capabilities
869     bool hasHighSpeedRecordingCapability = false;
870     bool hasUltraHighResolutionCapability = false;
871     camera_metadata_ro_entry entry;
872     int rc =
873             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
874     if ((0 == rc) && (entry.count > 0)) {
875         hasHighSpeedRecordingCapability =
876                 std::find(entry.data.u8, entry.data.u8 + entry.count,
877                           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) !=
878                 entry.data.u8 + entry.count;
879 
880         hasUltraHighResolutionCapability =
881                 std::find(entry.data.u8, entry.data.u8 + entry.count,
882                           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) !=
883                 entry.data.u8 + entry.count;
884     }
885 
886     // Check high speed video configurations
887     camera_metadata_ro_entry highSpeedEntry;
888     rc = find_camera_metadata_ro_entry(
889             metadata, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &highSpeedEntry);
890     bool hasHighSpeedEntry = (0 == rc && highSpeedEntry.count > 0);
891 
892     camera_metadata_ro_entry highSpeedMaxResEntry;
893     rc = find_camera_metadata_ro_entry(
894             metadata, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
895             &highSpeedMaxResEntry);
896     bool hasHighSpeedMaxResEntry = (0 == rc && highSpeedMaxResEntry.count > 0);
897 
898     // High speed recording configuration entry must be available based on capabilities
899     bool noHighSpeedRecording =
900             !hasHighSpeedRecordingCapability && !hasHighSpeedEntry && !hasHighSpeedMaxResEntry;
901     if (noHighSpeedRecording) {
902         return;
903     }
904     bool hasHighSpeedRecording = hasHighSpeedRecordingCapability && hasHighSpeedEntry &&
905                                  ((hasHighSpeedMaxResEntry && hasUltraHighResolutionCapability) ||
906                                   !hasHighSpeedMaxResEntry);
907     ASSERT_TRUE(hasHighSpeedRecording);
908 
909     std::string version, cameraId;
910     ASSERT_TRUE(matchDeviceName(cameraName, mProviderType, &version, &cameraId));
911     bool needBatchSizeCheck = (version != CAMERA_DEVICE_API_VERSION_1);
912 
913     // Check each entry item
914     ASSERT_TRUE(highSpeedEntry.count > 0 && highSpeedEntry.count % 5 == 0);
915     for (auto i = 4; i < highSpeedEntry.count; i += 5) {
916         int32_t fps_min = highSpeedEntry.data.i32[i - 2];
917         int32_t fps_max = highSpeedEntry.data.i32[i - 1];
918         int32_t batch_size_max = highSpeedEntry.data.i32[i];
919         int32_t allowedMaxBatchSize = fps_max / 30;
920 
921         ASSERT_GE(fps_max, 120);
922         ASSERT_TRUE(fps_min % 30 == 0 && fps_max % 30 == 0);
923         if (needBatchSizeCheck) {
924             ASSERT_LE(batch_size_max, 32);
925             ASSERT_TRUE(allowedMaxBatchSize % batch_size_max == 0);
926         }
927     }
928 
929     if (hasHighSpeedMaxResEntry) {
930         ASSERT_TRUE(highSpeedMaxResEntry.count > 0 && highSpeedMaxResEntry.count % 5 == 0);
931         for (auto i = 4; i < highSpeedMaxResEntry.count; i += 5) {
932             int32_t fps_min = highSpeedMaxResEntry.data.i32[i - 2];
933             int32_t fps_max = highSpeedMaxResEntry.data.i32[i - 1];
934             int32_t batch_size_max = highSpeedMaxResEntry.data.i32[i];
935             int32_t allowedMaxBatchSize = fps_max / 30;
936 
937             ASSERT_GE(fps_max, 120);
938             ASSERT_TRUE(fps_min % 30 == 0 && fps_max % 30 == 0);
939             if (needBatchSizeCheck) {
940                 ASSERT_LE(batch_size_max, 32);
941                 ASSERT_TRUE(allowedMaxBatchSize % batch_size_max == 0);
942             }
943         }
944     }
945 }
946 
getAvailableOutputStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,bool maxResolution)947 Status CameraAidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta,
948                                                  std::vector<AvailableStream>& outputStreams,
949                                                  const AvailableStream* threshold,
950                                                  bool maxResolution) {
951     if (nullptr == staticMeta) {
952         return Status::ILLEGAL_ARGUMENT;
953     }
954     int scalerTag = maxResolution
955                             ? ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
956                             : ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
957     int depthTag = maxResolution
958                            ? ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
959                            : ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
960 
961     camera_metadata_ro_entry scalerEntry;
962     camera_metadata_ro_entry depthEntry;
963     int foundScaler = find_camera_metadata_ro_entry(staticMeta, scalerTag, &scalerEntry);
964     int foundDepth = find_camera_metadata_ro_entry(staticMeta, depthTag, &depthEntry);
965     if ((0 != foundScaler || (0 != (scalerEntry.count % 4))) &&
966         (0 != foundDepth || (0 != (depthEntry.count % 4)))) {
967         return Status::ILLEGAL_ARGUMENT;
968     }
969 
970     if (foundScaler == 0 && (0 == (scalerEntry.count % 4))) {
971         fillOutputStreams(&scalerEntry, outputStreams, threshold,
972                           ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
973     }
974 
975     if (foundDepth == 0 && (0 == (depthEntry.count % 4))) {
976         AvailableStream depthPreviewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
977                                                  static_cast<int32_t>(PixelFormat::Y16)};
978         const AvailableStream* depthThreshold =
979                 isDepthOnly(staticMeta) ? &depthPreviewThreshold : threshold;
980         fillOutputStreams(&depthEntry, outputStreams, depthThreshold,
981                           ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT);
982     }
983 
984     return Status::OK;
985 }
986 
fillOutputStreams(camera_metadata_ro_entry_t * entry,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,const int32_t availableConfigOutputTag)987 void CameraAidlTest::fillOutputStreams(camera_metadata_ro_entry_t* entry,
988                                        std::vector<AvailableStream>& outputStreams,
989                                        const AvailableStream* threshold,
990                                        const int32_t availableConfigOutputTag) {
991     for (size_t i = 0; i < entry->count; i += 4) {
992         if (availableConfigOutputTag == entry->data.i32[i + 3]) {
993             if (nullptr == threshold) {
994                 AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
995                                      entry->data.i32[i]};
996                 outputStreams.push_back(s);
997             } else {
998                 if ((threshold->format == entry->data.i32[i]) &&
999                     (threshold->width >= entry->data.i32[i + 1]) &&
1000                     (threshold->height >= entry->data.i32[i + 2])) {
1001                     AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
1002                                          threshold->format};
1003                     outputStreams.push_back(s);
1004                 }
1005             }
1006         }
1007     }
1008 }
1009 
verifyZoomCharacteristics(const camera_metadata_t * metadata)1010 void CameraAidlTest::verifyZoomCharacteristics(const camera_metadata_t* metadata) {
1011     camera_metadata_ro_entry entry;
1012     int retcode = 0;
1013 
1014     // Check key availability in capabilities, request and result.
1015     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1016                                             &entry);
1017     float maxDigitalZoom = 1.0;
1018     if ((0 == retcode) && (entry.count == 1)) {
1019         maxDigitalZoom = entry.data.f[0];
1020     } else {
1021         ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!";
1022     }
1023 
1024     retcode =
1025             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
1026     bool hasZoomRequestKey = false;
1027     if ((0 == retcode) && (entry.count > 0)) {
1028         hasZoomRequestKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
1029                                       ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
1030     } else {
1031         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
1032     }
1033 
1034     retcode =
1035             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
1036     bool hasZoomResultKey = false;
1037     if ((0 == retcode) && (entry.count > 0)) {
1038         hasZoomResultKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
1039                                      ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
1040     } else {
1041         ADD_FAILURE() << "Get camera availableResultKeys failed!";
1042     }
1043 
1044     retcode = find_camera_metadata_ro_entry(metadata,
1045                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
1046     bool hasZoomCharacteristicsKey = false;
1047     if ((0 == retcode) && (entry.count > 0)) {
1048         hasZoomCharacteristicsKey =
1049                 std::find(entry.data.i32, entry.data.i32 + entry.count,
1050                           ANDROID_CONTROL_ZOOM_RATIO_RANGE) != entry.data.i32 + entry.count;
1051     } else {
1052         ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
1053     }
1054 
1055     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1056     bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1057 
1058     // Zoom keys must all be available, or all be unavailable.
1059     bool noZoomRatio = !hasZoomRequestKey && !hasZoomResultKey && !hasZoomCharacteristicsKey &&
1060                        !hasZoomRatioRange;
1061     if (noZoomRatio) {
1062         return;
1063     }
1064     bool hasZoomRatio =
1065             hasZoomRequestKey && hasZoomResultKey && hasZoomCharacteristicsKey && hasZoomRatioRange;
1066     ASSERT_TRUE(hasZoomRatio);
1067 
1068     float minZoomRatio = entry.data.f[0];
1069     float maxZoomRatio = entry.data.f[1];
1070     constexpr float FLOATING_POINT_THRESHOLD = 0.00001f;
1071     if (maxDigitalZoom > maxZoomRatio + FLOATING_POINT_THRESHOLD) {
1072         ADD_FAILURE() << "Maximum digital zoom " << maxDigitalZoom
1073                       << " is larger than maximum zoom ratio " << maxZoomRatio << " + threshold "
1074                       << FLOATING_POINT_THRESHOLD << "!";
1075     }
1076     if (minZoomRatio > maxZoomRatio) {
1077         ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!";
1078     }
1079     if (minZoomRatio > 1.0f) {
1080         ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!";
1081     }
1082     if (maxZoomRatio < 1.0f) {
1083         ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!";
1084     }
1085 
1086     // Make sure CROPPING_TYPE is CENTER_ONLY
1087     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_CROPPING_TYPE, &entry);
1088     if ((0 == retcode) && (entry.count == 1)) {
1089         int8_t croppingType = entry.data.u8[0];
1090         ASSERT_EQ(croppingType, ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY);
1091     } else {
1092         ADD_FAILURE() << "Get camera scalerCroppingType failed!";
1093     }
1094 }
1095 
verifyMonochromeCharacteristics(const CameraMetadata & chars)1096 void CameraAidlTest::verifyMonochromeCharacteristics(const CameraMetadata& chars) {
1097     const camera_metadata_t* metadata = (camera_metadata_t*)chars.metadata.data();
1098     Status rc = isMonochromeCamera(metadata);
1099     if (Status::OPERATION_NOT_SUPPORTED == rc) {
1100         return;
1101     }
1102     ASSERT_EQ(Status::OK, rc);
1103 
1104     camera_metadata_ro_entry entry;
1105     // Check capabilities
1106     int retcode =
1107             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
1108     if ((0 == retcode) && (entry.count > 0)) {
1109         ASSERT_EQ(std::find(entry.data.u8, entry.data.u8 + entry.count,
1110                             ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING),
1111                   entry.data.u8 + entry.count);
1112     }
1113 
1114     // Check Cfa
1115     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1116                                             &entry);
1117     if ((0 == retcode) && (entry.count == 1)) {
1118         ASSERT_TRUE(entry.data.i32[0] ==
1119                             static_cast<int32_t>(
1120                                     SensorInfoColorFilterArrangement::
1121                                             ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO) ||
1122                     entry.data.i32[0] ==
1123                             static_cast<int32_t>(
1124                                     SensorInfoColorFilterArrangement::
1125                                             ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR));
1126     }
1127 
1128     // Check availableRequestKeys
1129     retcode =
1130             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
1131     if ((0 == retcode) && (entry.count > 0)) {
1132         for (size_t i = 0; i < entry.count; i++) {
1133             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
1134             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
1135             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
1136         }
1137     } else {
1138         ADD_FAILURE() << "Get camera availableRequestKeys failed!";
1139     }
1140 
1141     // Check availableResultKeys
1142     retcode =
1143             find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
1144     if ((0 == retcode) && (entry.count > 0)) {
1145         for (size_t i = 0; i < entry.count; i++) {
1146             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_GREEN_SPLIT);
1147             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1148             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
1149             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
1150             ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
1151         }
1152     } else {
1153         ADD_FAILURE() << "Get camera availableResultKeys failed!";
1154     }
1155 
1156     // Check availableCharacteristicKeys
1157     retcode = find_camera_metadata_ro_entry(metadata,
1158                                             ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
1159     if ((0 == retcode) && (entry.count > 0)) {
1160         for (size_t i = 0; i < entry.count; i++) {
1161             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1162             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1163             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1164             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1165             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM1);
1166             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM2);
1167             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX1);
1168             ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX2);
1169         }
1170     } else {
1171         ADD_FAILURE() << "Get camera availableResultKeys failed!";
1172     }
1173 
1174     // Check blackLevelPattern
1175     retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry);
1176     if ((0 == retcode) && (entry.count > 0)) {
1177         ASSERT_EQ(entry.count, 4);
1178         for (size_t i = 1; i < entry.count; i++) {
1179             ASSERT_EQ(entry.data.i32[i], entry.data.i32[0]);
1180         }
1181     }
1182 }
1183 
verifyManualFlashStrengthControlCharacteristics(const camera_metadata_t * staticMeta)1184 void CameraAidlTest::verifyManualFlashStrengthControlCharacteristics(
1185         const camera_metadata_t* staticMeta) {
1186     camera_metadata_ro_entry singleMaxEntry;
1187     camera_metadata_ro_entry singleDefEntry;
1188     camera_metadata_ro_entry torchMaxEntry;
1189     camera_metadata_ro_entry torchDefEntry;
1190     bool torch_supported = false;
1191     int32_t singleMaxLevel = 0;
1192     int32_t singleDefLevel = 0;
1193     int32_t torchMaxLevel = 0;
1194     int32_t torchDefLevel = 0;
1195 
1196     // determine whether the device supports torch or not
1197     torch_supported = isTorchSupported(staticMeta);
1198 
1199     int singleMaxRetCode = find_camera_metadata_ro_entry(staticMeta,
1200             ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL, &singleMaxEntry);
1201     int singleDefRetCode = find_camera_metadata_ro_entry(staticMeta,
1202             ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL, &singleDefEntry);
1203     int torchMaxRetCode = find_camera_metadata_ro_entry(staticMeta,
1204             ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL, &torchMaxEntry);
1205     int torchDefRetCode = find_camera_metadata_ro_entry(staticMeta,
1206             ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL, &torchDefEntry);
1207     if (torch_supported) {
1208         int expectedEntryCount;
1209         if(singleMaxRetCode == 0 && singleDefRetCode == 0 && torchMaxRetCode == 0 &&
1210                 torchDefRetCode == 0) {
1211             singleMaxLevel = *singleMaxEntry.data.i32;
1212             singleDefLevel = *singleDefEntry.data.i32;
1213             torchMaxLevel = *torchMaxEntry.data.i32;
1214             torchDefLevel = *torchDefEntry.data.i32;
1215             expectedEntryCount = 1;
1216         } else {
1217             expectedEntryCount = 0;
1218         }
1219         ASSERT_EQ(singleMaxEntry.count, expectedEntryCount);
1220         ASSERT_EQ(singleDefEntry.count, expectedEntryCount);
1221         ASSERT_EQ(torchMaxEntry.count, expectedEntryCount);
1222         ASSERT_EQ(torchDefEntry.count, expectedEntryCount);
1223         // if the device supports this feature default levels should be greater than 0
1224         if (singleMaxLevel > 1) {
1225             ASSERT_GT(torchMaxLevel, 1);
1226             ASSERT_GT(torchDefLevel, 0);
1227             ASSERT_GT(singleDefLevel, 0);
1228             ASSERT_TRUE(torchDefLevel <= torchMaxLevel); // default levels should be <= max levels
1229             ASSERT_TRUE(singleDefLevel <= singleMaxLevel);
1230         }
1231     } else {
1232         ASSERT_TRUE(singleMaxRetCode != 0);
1233         ASSERT_TRUE(singleDefRetCode != 0);
1234         ASSERT_TRUE(torchMaxRetCode != 0);
1235         ASSERT_TRUE(torchDefRetCode != 0);
1236     }
1237 }
1238 
verifyRecommendedConfigs(const CameraMetadata & chars)1239 void CameraAidlTest::verifyRecommendedConfigs(const CameraMetadata& chars) {
1240     size_t CONFIG_ENTRY_SIZE = 5;
1241     size_t CONFIG_ENTRY_TYPE_OFFSET = 3;
1242     size_t CONFIG_ENTRY_BITFIELD_OFFSET = 4;
1243     uint32_t maxPublicUsecase =
1244             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END_3_8;
1245     uint32_t vendorUsecaseStart =
1246             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START;
1247     uint32_t usecaseMask = (1 << vendorUsecaseStart) - 1;
1248     usecaseMask &= ~((1 << maxPublicUsecase) - 1);
1249 
1250     const camera_metadata_t* metadata =
1251             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1252 
1253     camera_metadata_ro_entry recommendedConfigsEntry, recommendedDepthConfigsEntry, ioMapEntry;
1254     recommendedConfigsEntry.count = recommendedDepthConfigsEntry.count = ioMapEntry.count = 0;
1255     int retCode = find_camera_metadata_ro_entry(
1256             metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
1257             &recommendedConfigsEntry);
1258     int depthRetCode = find_camera_metadata_ro_entry(
1259             metadata, ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
1260             &recommendedDepthConfigsEntry);
1261     int ioRetCode = find_camera_metadata_ro_entry(
1262             metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP, &ioMapEntry);
1263     if ((0 != retCode) && (0 != depthRetCode)) {
1264         // In case both regular and depth recommended configurations are absent,
1265         // I/O should be absent as well.
1266         ASSERT_NE(ioRetCode, 0);
1267         return;
1268     }
1269 
1270     camera_metadata_ro_entry availableKeysEntry;
1271     retCode = find_camera_metadata_ro_entry(
1272             metadata, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &availableKeysEntry);
1273     ASSERT_TRUE((0 == retCode) && (availableKeysEntry.count > 0));
1274     std::vector<int32_t> availableKeys;
1275     availableKeys.reserve(availableKeysEntry.count);
1276     availableKeys.insert(availableKeys.end(), availableKeysEntry.data.i32,
1277                          availableKeysEntry.data.i32 + availableKeysEntry.count);
1278 
1279     if (recommendedConfigsEntry.count > 0) {
1280         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1281                             ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS),
1282                   availableKeys.end());
1283         ASSERT_EQ((recommendedConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1284         for (size_t i = 0; i < recommendedConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1285             int32_t entryType = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1286             uint32_t bitfield = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1287             ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1288                         (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1289             ASSERT_TRUE((bitfield & usecaseMask) == 0);
1290         }
1291     }
1292 
1293     if (recommendedDepthConfigsEntry.count > 0) {
1294         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1295                             ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS),
1296                   availableKeys.end());
1297         ASSERT_EQ((recommendedDepthConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1298         for (size_t i = 0; i < recommendedDepthConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1299             int32_t entryType = recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1300             uint32_t bitfield =
1301                     recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1302             ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1303                         (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1304             ASSERT_TRUE((bitfield & usecaseMask) == 0);
1305         }
1306 
1307         if (recommendedConfigsEntry.count == 0) {
1308             // In case regular recommended configurations are absent but suggested depth
1309             // configurations are present, I/O should be absent.
1310             ASSERT_NE(ioRetCode, 0);
1311         }
1312     }
1313 
1314     if ((ioRetCode == 0) && (ioMapEntry.count > 0)) {
1315         ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1316                             ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP),
1317                   availableKeys.end());
1318         ASSERT_EQ(isZSLModeAvailable(metadata), Status::OK);
1319     }
1320 }
1321 
1322 // Check whether ZSL is available using the static camera
1323 // characteristics.
isZSLModeAvailable(const camera_metadata_t * staticMeta)1324 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta) {
1325     if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1326         return Status::OK;
1327     } else {
1328         return isZSLModeAvailable(staticMeta, YUV_REPROCESS);
1329     }
1330 }
1331 
isZSLModeAvailable(const camera_metadata_t * staticMeta,ReprocessType reprocType)1332 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta,
1333                                           ReprocessType reprocType) {
1334     Status ret = Status::OPERATION_NOT_SUPPORTED;
1335     if (nullptr == staticMeta) {
1336         return Status::ILLEGAL_ARGUMENT;
1337     }
1338 
1339     camera_metadata_ro_entry entry;
1340     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1341                                            &entry);
1342     if (0 != rc) {
1343         return Status::ILLEGAL_ARGUMENT;
1344     }
1345 
1346     for (size_t i = 0; i < entry.count; i++) {
1347         if ((reprocType == PRIV_REPROCESS &&
1348              ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING == entry.data.u8[i]) ||
1349             (reprocType == YUV_REPROCESS &&
1350              ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING == entry.data.u8[i])) {
1351             ret = Status::OK;
1352             break;
1353         }
1354     }
1355 
1356     return ret;
1357 }
1358 
1359 // Verify logical or ultra high resolution camera static metadata
verifyLogicalOrUltraHighResCameraMetadata(const std::string & cameraName,const std::shared_ptr<ICameraDevice> & device,const CameraMetadata & chars,const std::vector<std::string> & deviceNames)1360 void CameraAidlTest::verifyLogicalOrUltraHighResCameraMetadata(
1361         const std::string& cameraName, const std::shared_ptr<ICameraDevice>& device,
1362         const CameraMetadata& chars, const std::vector<std::string>& deviceNames) {
1363     const camera_metadata_t* metadata =
1364             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1365     ASSERT_NE(nullptr, metadata);
1366     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
1367     Status retStatus = getSystemCameraKind(metadata, &systemCameraKind);
1368     ASSERT_EQ(retStatus, Status::OK);
1369     Status rc = isLogicalMultiCamera(metadata);
1370     ASSERT_TRUE(Status::OK == rc || Status::OPERATION_NOT_SUPPORTED == rc);
1371     bool isMultiCamera = (Status::OK == rc);
1372     bool isUltraHighResCamera = isUltraHighResolution(metadata);
1373     if (!isMultiCamera && !isUltraHighResCamera) {
1374         return;
1375     }
1376 
1377     camera_metadata_ro_entry entry;
1378     int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1379     bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1380     retcode = find_camera_metadata_ro_entry(
1381             metadata, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
1382     bool hasHalBufferManager =
1383             (0 == retcode && 1 == entry.count &&
1384              entry.data.i32[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
1385     bool sessionHalBufferManager =
1386             (0 == retcode && 1 == entry.count &&
1387              entry.data.i32[0] ==
1388                      ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE);
1389     retcode = find_camera_metadata_ro_entry(
1390             metadata, ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED, &entry);
1391     bool multiResolutionStreamSupported =
1392             (0 == retcode && 1 == entry.count &&
1393              entry.data.u8[0] == ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE);
1394     if (multiResolutionStreamSupported) {
1395         ASSERT_TRUE(hasHalBufferManager || sessionHalBufferManager);
1396     }
1397 
1398     std::string version, cameraId;
1399     ASSERT_TRUE(matchDeviceName(cameraName, mProviderType, &version, &cameraId));
1400     std::unordered_set<std::string> physicalIds;
1401     rc = getPhysicalCameraIds(metadata, &physicalIds);
1402     ASSERT_TRUE(isUltraHighResCamera || Status::OK == rc);
1403     for (const auto& physicalId : physicalIds) {
1404         ASSERT_NE(physicalId, cameraId);
1405     }
1406     if (physicalIds.size() == 0) {
1407         ASSERT_TRUE(isUltraHighResCamera && !isMultiCamera);
1408         physicalIds.insert(cameraId);
1409     }
1410 
1411     std::unordered_set<int32_t> physicalRequestKeyIDs;
1412     rc = getSupportedKeys(const_cast<camera_metadata_t*>(metadata),
1413                           ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1414                           &physicalRequestKeyIDs);
1415     ASSERT_TRUE(Status::OK == rc);
1416     bool hasTestPatternPhysicalRequestKey =
1417             physicalRequestKeyIDs.find(ANDROID_SENSOR_TEST_PATTERN_MODE) !=
1418             physicalRequestKeyIDs.end();
1419     std::unordered_set<int32_t> privacyTestPatternModes;
1420     getPrivacyTestPatternModes(metadata, &privacyTestPatternModes);
1421 
1422     // Map from image format to number of multi-resolution sizes for that format
1423     std::unordered_map<int32_t, size_t> multiResOutputFormatCounterMap;
1424     std::unordered_map<int32_t, size_t> multiResInputFormatCounterMap;
1425     for (const auto& physicalId : physicalIds) {
1426         bool isPublicId = false;
1427         std::string fullPublicId;
1428         SystemCameraKind physSystemCameraKind = SystemCameraKind::PUBLIC;
1429         for (auto& deviceName : deviceNames) {
1430             std::string publicVersion, publicId;
1431             ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
1432             if (physicalId == publicId) {
1433                 isPublicId = true;
1434                 fullPublicId = deviceName;
1435                 break;
1436             }
1437         }
1438 
1439         camera_metadata_ro_entry physicalMultiResStreamConfigs;
1440         camera_metadata_ro_entry physicalStreamConfigs;
1441         camera_metadata_ro_entry physicalMaxResolutionStreamConfigs;
1442         CameraMetadata physChars;
1443         bool isUltraHighRes = false;
1444         std::unordered_set<int32_t> subCameraPrivacyTestPatterns;
1445         if (isPublicId) {
1446             std::shared_ptr<ICameraDevice> subDevice;
1447             ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(fullPublicId, &subDevice);
1448             ASSERT_TRUE(ret.isOk());
1449             ASSERT_NE(subDevice, nullptr);
1450 
1451             ret = subDevice->getCameraCharacteristics(&physChars);
1452             ASSERT_TRUE(ret.isOk());
1453 
1454             const camera_metadata_t* staticMetadata =
1455                     reinterpret_cast<const camera_metadata_t*>(physChars.metadata.data());
1456             retStatus = getSystemCameraKind(staticMetadata, &physSystemCameraKind);
1457             ASSERT_EQ(retStatus, Status::OK);
1458 
1459             // Make sure that the system camera kind of a non-hidden
1460             // physical cameras is the same as the logical camera associated
1461             // with it.
1462             ASSERT_EQ(physSystemCameraKind, systemCameraKind);
1463             retcode = find_camera_metadata_ro_entry(staticMetadata,
1464                                                     ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1465             bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1466             ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1467 
1468             getMultiResolutionStreamConfigurations(
1469                     &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1470                     &physicalMaxResolutionStreamConfigs, staticMetadata);
1471             isUltraHighRes = isUltraHighResolution(staticMetadata);
1472 
1473             getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1474         } else {
1475             // Check camera characteristics for hidden camera id
1476             ndk::ScopedAStatus ret =
1477                     device->getPhysicalCameraCharacteristics(physicalId, &physChars);
1478             ASSERT_TRUE(ret.isOk());
1479             verifyCameraCharacteristics(physChars);
1480             verifyMonochromeCharacteristics(physChars);
1481 
1482             auto staticMetadata = (const camera_metadata_t*)physChars.metadata.data();
1483             retcode = find_camera_metadata_ro_entry(staticMetadata,
1484                                                     ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1485             bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1486             ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1487 
1488             getMultiResolutionStreamConfigurations(
1489                     &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1490                     &physicalMaxResolutionStreamConfigs, staticMetadata);
1491             isUltraHighRes = isUltraHighResolution(staticMetadata);
1492             getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1493 
1494             // Check calling getCameraDeviceInterface_V3_x() on hidden camera id returns
1495             // ILLEGAL_ARGUMENT.
1496             std::stringstream s;
1497             s << "device@" << version << "/" << mProviderType << "/" << physicalId;
1498             std::string fullPhysicalId(s.str());
1499             std::shared_ptr<ICameraDevice> subDevice;
1500             ret = mProvider->getCameraDeviceInterface(fullPhysicalId, &subDevice);
1501             ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1502                         ret.getServiceSpecificError());
1503             ASSERT_EQ(subDevice, nullptr);
1504         }
1505 
1506         if (hasTestPatternPhysicalRequestKey) {
1507             ASSERT_TRUE(privacyTestPatternModes == subCameraPrivacyTestPatterns);
1508         }
1509 
1510         if (physicalMultiResStreamConfigs.count > 0) {
1511             ASSERT_EQ(physicalMultiResStreamConfigs.count % 4, 0);
1512 
1513             // Each supported size must be max size for that format,
1514             for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4; i++) {
1515                 int32_t multiResFormat = physicalMultiResStreamConfigs.data.i32[i * 4];
1516                 int32_t multiResWidth = physicalMultiResStreamConfigs.data.i32[i * 4 + 1];
1517                 int32_t multiResHeight = physicalMultiResStreamConfigs.data.i32[i * 4 + 2];
1518                 int32_t multiResInput = physicalMultiResStreamConfigs.data.i32[i * 4 + 3];
1519 
1520                 // Check if the resolution is the max resolution in stream
1521                 // configuration map
1522                 bool supported = false;
1523                 bool isMaxSize = true;
1524                 for (size_t j = 0; j < physicalStreamConfigs.count / 4; j++) {
1525                     int32_t format = physicalStreamConfigs.data.i32[j * 4];
1526                     int32_t width = physicalStreamConfigs.data.i32[j * 4 + 1];
1527                     int32_t height = physicalStreamConfigs.data.i32[j * 4 + 2];
1528                     int32_t input = physicalStreamConfigs.data.i32[j * 4 + 3];
1529                     if (format == multiResFormat && input == multiResInput) {
1530                         if (width == multiResWidth && height == multiResHeight) {
1531                             supported = true;
1532                         } else if (width * height > multiResWidth * multiResHeight) {
1533                             isMaxSize = false;
1534                         }
1535                     }
1536                 }
1537                 // Check if the resolution is the max resolution in max
1538                 // resolution stream configuration map
1539                 bool supportedUltraHighRes = false;
1540                 bool isUltraHighResMaxSize = true;
1541                 for (size_t j = 0; j < physicalMaxResolutionStreamConfigs.count / 4; j++) {
1542                     int32_t format = physicalMaxResolutionStreamConfigs.data.i32[j * 4];
1543                     int32_t width = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 1];
1544                     int32_t height = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 2];
1545                     int32_t input = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 3];
1546                     if (format == multiResFormat && input == multiResInput) {
1547                         if (width == multiResWidth && height == multiResHeight) {
1548                             supportedUltraHighRes = true;
1549                         } else if (width * height > multiResWidth * multiResHeight) {
1550                             isUltraHighResMaxSize = false;
1551                         }
1552                     }
1553                 }
1554 
1555                 if (isUltraHighRes) {
1556                     // For ultra high resolution camera, the configuration must
1557                     // be the maximum size in stream configuration map, or max
1558                     // resolution stream configuration map
1559                     ASSERT_TRUE((supported && isMaxSize) ||
1560                                 (supportedUltraHighRes && isUltraHighResMaxSize));
1561                 } else {
1562                     // The configuration must be the maximum size in stream
1563                     // configuration map
1564                     ASSERT_TRUE(supported && isMaxSize);
1565                     ASSERT_FALSE(supportedUltraHighRes);
1566                 }
1567 
1568                 // Increment the counter for the configuration's format.
1569                 auto& formatCounterMap = multiResInput ? multiResInputFormatCounterMap
1570                                                        : multiResOutputFormatCounterMap;
1571                 if (formatCounterMap.count(multiResFormat) == 0) {
1572                     formatCounterMap[multiResFormat] = 1;
1573                 } else {
1574                     formatCounterMap[multiResFormat]++;
1575                 }
1576             }
1577 
1578             // There must be no duplicates
1579             for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4 - 1; i++) {
1580                 for (size_t j = i + 1; j < physicalMultiResStreamConfigs.count / 4; j++) {
1581                     // Input/output doesn't match
1582                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 3] !=
1583                         physicalMultiResStreamConfigs.data.i32[j * 4 + 3]) {
1584                         continue;
1585                     }
1586                     // Format doesn't match
1587                     if (physicalMultiResStreamConfigs.data.i32[i * 4] !=
1588                         physicalMultiResStreamConfigs.data.i32[j * 4]) {
1589                         continue;
1590                     }
1591                     // Width doesn't match
1592                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 1] !=
1593                         physicalMultiResStreamConfigs.data.i32[j * 4 + 1]) {
1594                         continue;
1595                     }
1596                     // Height doesn't match
1597                     if (physicalMultiResStreamConfigs.data.i32[i * 4 + 2] !=
1598                         physicalMultiResStreamConfigs.data.i32[j * 4 + 2]) {
1599                         continue;
1600                     }
1601                     // input/output, format, width, and height all match
1602                     ADD_FAILURE();
1603                 }
1604             }
1605         }
1606     }
1607 
1608     // If a multi-resolution stream is supported, there must be at least one
1609     // format with more than one resolutions
1610     if (multiResolutionStreamSupported) {
1611         size_t numMultiResFormats = 0;
1612         for (const auto& [format, sizeCount] : multiResOutputFormatCounterMap) {
1613             if (sizeCount >= 2) {
1614                 numMultiResFormats++;
1615             }
1616         }
1617         for (const auto& [format, sizeCount] : multiResInputFormatCounterMap) {
1618             if (sizeCount >= 2) {
1619                 numMultiResFormats++;
1620 
1621                 // If multi-resolution reprocessing is supported, the logical
1622                 // camera or ultra-high resolution sensor camera must support
1623                 // the corresponding reprocessing capability.
1624                 if (format == static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) {
1625                     ASSERT_EQ(isZSLModeAvailable(metadata, PRIV_REPROCESS), Status::OK);
1626                 } else if (format == static_cast<int32_t>(PixelFormat::YCBCR_420_888)) {
1627                     ASSERT_EQ(isZSLModeAvailable(metadata, YUV_REPROCESS), Status::OK);
1628                 }
1629             }
1630         }
1631         ASSERT_GT(numMultiResFormats, 0);
1632     }
1633 
1634     // Make sure ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID is available in
1635     // result keys.
1636     if (isMultiCamera) {
1637         retcode = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
1638                                                 &entry);
1639         if ((0 == retcode) && (entry.count > 0)) {
1640             ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count,
1641                                 static_cast<int32_t>(
1642                                         CameraMetadataTag::
1643                                                 ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID)),
1644                       entry.data.i32 + entry.count);
1645         } else {
1646             ADD_FAILURE() << "Get camera availableResultKeys failed!";
1647         }
1648     }
1649 }
1650 
isUltraHighResolution(const camera_metadata_t * staticMeta)1651 bool CameraAidlTest::isUltraHighResolution(const camera_metadata_t* staticMeta) {
1652     camera_metadata_ro_entry scalerEntry;
1653     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1654                                            &scalerEntry);
1655     if (rc == 0) {
1656         for (uint32_t i = 0; i < scalerEntry.count; i++) {
1657             if (scalerEntry.data.u8[i] ==
1658                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
1659                 return true;
1660             }
1661         }
1662     }
1663     return false;
1664 }
1665 
getSupportedKeys(camera_metadata_t * staticMeta,uint32_t tagId,std::unordered_set<int32_t> * requestIDs)1666 Status CameraAidlTest::getSupportedKeys(camera_metadata_t* staticMeta, uint32_t tagId,
1667                                         std::unordered_set<int32_t>* requestIDs) {
1668     if ((nullptr == staticMeta) || (nullptr == requestIDs)) {
1669         return Status::ILLEGAL_ARGUMENT;
1670     }
1671 
1672     camera_metadata_ro_entry entry;
1673     int rc = find_camera_metadata_ro_entry(staticMeta, tagId, &entry);
1674     if ((0 != rc) || (entry.count == 0)) {
1675         return Status::OK;
1676     }
1677 
1678     requestIDs->insert(entry.data.i32, entry.data.i32 + entry.count);
1679 
1680     return Status::OK;
1681 }
1682 
getPrivacyTestPatternModes(const camera_metadata_t * staticMetadata,std::unordered_set<int32_t> * privacyTestPatternModes)1683 void CameraAidlTest::getPrivacyTestPatternModes(
1684         const camera_metadata_t* staticMetadata,
1685         std::unordered_set<int32_t>* privacyTestPatternModes) {
1686     ASSERT_NE(staticMetadata, nullptr);
1687     ASSERT_NE(privacyTestPatternModes, nullptr);
1688 
1689     camera_metadata_ro_entry entry;
1690     int retcode = find_camera_metadata_ro_entry(
1691             staticMetadata, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &entry);
1692     ASSERT_TRUE(0 == retcode);
1693 
1694     for (auto i = 0; i < entry.count; i++) {
1695         if (entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR ||
1696             entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
1697             privacyTestPatternModes->insert(entry.data.i32[i]);
1698         }
1699     }
1700 }
1701 
getMultiResolutionStreamConfigurations(camera_metadata_ro_entry * multiResStreamConfigs,camera_metadata_ro_entry * streamConfigs,camera_metadata_ro_entry * maxResolutionStreamConfigs,const camera_metadata_t * staticMetadata)1702 void CameraAidlTest::getMultiResolutionStreamConfigurations(
1703         camera_metadata_ro_entry* multiResStreamConfigs, camera_metadata_ro_entry* streamConfigs,
1704         camera_metadata_ro_entry* maxResolutionStreamConfigs,
1705         const camera_metadata_t* staticMetadata) {
1706     ASSERT_NE(multiResStreamConfigs, nullptr);
1707     ASSERT_NE(streamConfigs, nullptr);
1708     ASSERT_NE(maxResolutionStreamConfigs, nullptr);
1709     ASSERT_NE(staticMetadata, nullptr);
1710 
1711     int retcode = find_camera_metadata_ro_entry(
1712             staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, streamConfigs);
1713     ASSERT_TRUE(0 == retcode);
1714     retcode = find_camera_metadata_ro_entry(
1715             staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
1716             maxResolutionStreamConfigs);
1717     ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1718     retcode = find_camera_metadata_ro_entry(
1719             staticMetadata, ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
1720             multiResStreamConfigs);
1721     ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1722 }
1723 
isTorchSupported(const camera_metadata_t * staticMeta)1724 bool CameraAidlTest::isTorchSupported(const camera_metadata_t* staticMeta) {
1725     camera_metadata_ro_entry torchEntry;
1726     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_AVAILABLE, &torchEntry);
1727     if (rc != 0) {
1728         ALOGI("isTorchSupported: Failed to find entry for ANDROID_FLASH_INFO_AVAILABLE");
1729         return false;
1730     }
1731     if (torchEntry.count == 1 && !torchEntry.data.u8[0]) {
1732         ALOGI("isTorchSupported: Torch not supported");
1733         return false;
1734     }
1735     ALOGI("isTorchSupported: Torch supported");
1736     return true;
1737 }
1738 
isTorchStrengthControlSupported(const camera_metadata_t * staticMeta)1739 bool CameraAidlTest::isTorchStrengthControlSupported(const camera_metadata_t* staticMeta) {
1740     int32_t maxLevel = 0;
1741     camera_metadata_ro_entry maxEntry;
1742     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
1743                                            &maxEntry);
1744     if (rc != 0) {
1745         ALOGI("isTorchStrengthControlSupported: Failed to find entry for "
1746               "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL");
1747         return false;
1748     }
1749 
1750     maxLevel = *maxEntry.data.i32;
1751     if (maxLevel > 1) {
1752         ALOGI("isTorchStrengthControlSupported: Torch strength control supported.");
1753         return true;
1754     }
1755     ALOGI("isTorchStrengthControlSupported: Torch strength control not supported.");
1756     return false;
1757 }
1758 
verifyRequestTemplate(const camera_metadata_t * metadata,RequestTemplate requestTemplate)1759 void CameraAidlTest::verifyRequestTemplate(const camera_metadata_t* metadata,
1760                                            RequestTemplate requestTemplate) {
1761     ASSERT_NE(nullptr, metadata);
1762     size_t entryCount = get_camera_metadata_entry_count(metadata);
1763     ALOGI("template %u metadata entry count is %zu", (int32_t)requestTemplate, entryCount);
1764     // TODO: we can do better than 0 here. Need to check how many required
1765     // request keys we've defined for each template
1766     ASSERT_GT(entryCount, 0u);
1767 
1768     // Check zoomRatio
1769     camera_metadata_ro_entry zoomRatioEntry;
1770     int foundZoomRatio =
1771             find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO, &zoomRatioEntry);
1772     if (foundZoomRatio == 0) {
1773         ASSERT_EQ(zoomRatioEntry.count, 1);
1774         ASSERT_EQ(zoomRatioEntry.data.f[0], 1.0f);
1775     }
1776 
1777     // Check settings override
1778     camera_metadata_ro_entry settingsOverrideEntry;
1779     int foundSettingsOverride = find_camera_metadata_ro_entry(metadata,
1780            ANDROID_CONTROL_SETTINGS_OVERRIDE, &settingsOverrideEntry);
1781     if (foundSettingsOverride == 0) {
1782         ASSERT_EQ(settingsOverrideEntry.count, 1);
1783         ASSERT_EQ(settingsOverrideEntry.data.u8[0], ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF);
1784     }
1785 }
1786 
openEmptyDeviceSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraDeviceSession> * session,CameraMetadata * staticMeta,std::shared_ptr<ICameraDevice> * device)1787 void CameraAidlTest::openEmptyDeviceSession(const std::string& name,
1788                                             const std::shared_ptr<ICameraProvider>& provider,
1789                                             std::shared_ptr<ICameraDeviceSession>* session,
1790                                             CameraMetadata* staticMeta,
1791                                             std::shared_ptr<ICameraDevice>* device) {
1792     ASSERT_NE(nullptr, session);
1793     ASSERT_NE(nullptr, staticMeta);
1794     ASSERT_NE(nullptr, device);
1795 
1796     ALOGI("configureStreams: Testing camera device %s", name.c_str());
1797     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1798     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
1799           ret.getServiceSpecificError());
1800     ASSERT_TRUE(ret.isOk());
1801     ASSERT_NE(device, nullptr);
1802 
1803     std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1804     ret = (*device)->open(cb, session);
1805     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
1806           ret.getServiceSpecificError());
1807     ASSERT_TRUE(ret.isOk());
1808     ASSERT_NE(*session, nullptr);
1809 
1810     ret = (*device)->getCameraCharacteristics(staticMeta);
1811     ASSERT_TRUE(ret.isOk());
1812 }
1813 
openEmptyInjectionSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraInjectionSession> * session,CameraMetadata * metadata,std::shared_ptr<ICameraDevice> * device)1814 void CameraAidlTest::openEmptyInjectionSession(const std::string& name,
1815                                                const std::shared_ptr<ICameraProvider>& provider,
1816                                                std::shared_ptr<ICameraInjectionSession>* session,
1817                                                CameraMetadata* metadata,
1818                                                std::shared_ptr<ICameraDevice>* device) {
1819     ASSERT_NE(nullptr, session);
1820     ASSERT_NE(nullptr, metadata);
1821     ASSERT_NE(nullptr, device);
1822 
1823     ALOGI("openEmptyInjectionSession: Testing camera device %s", name.c_str());
1824     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1825     ALOGI("openEmptyInjectionSession: getCameraDeviceInterface returns status:%d:%d",
1826           ret.getExceptionCode(), ret.getServiceSpecificError());
1827     ASSERT_TRUE(ret.isOk());
1828     ASSERT_NE(*device, nullptr);
1829 
1830     std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1831     ret = (*device)->openInjectionSession(cb, session);
1832     ALOGI("device::openInjectionSession returns status:%d:%d", ret.getExceptionCode(),
1833           ret.getServiceSpecificError());
1834 
1835     if (static_cast<Status>(ret.getServiceSpecificError()) == Status::OPERATION_NOT_SUPPORTED &&
1836         *session == nullptr) {
1837         return;  // Injection Session not supported. Callee will receive nullptr in *session
1838     }
1839 
1840     ASSERT_TRUE(ret.isOk());
1841     ASSERT_NE(*session, nullptr);
1842 
1843     ret = (*device)->getCameraCharacteristics(metadata);
1844     ASSERT_TRUE(ret.isOk());
1845 }
1846 
getJpegBufferSize(camera_metadata_t * staticMeta,int32_t * outBufSize)1847 Status CameraAidlTest::getJpegBufferSize(camera_metadata_t* staticMeta, int32_t* outBufSize) {
1848     if (nullptr == staticMeta || nullptr == outBufSize) {
1849         return Status::ILLEGAL_ARGUMENT;
1850     }
1851 
1852     camera_metadata_ro_entry entry;
1853     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_JPEG_MAX_SIZE, &entry);
1854     if ((0 != rc) || (1 != entry.count)) {
1855         return Status::ILLEGAL_ARGUMENT;
1856     }
1857 
1858     *outBufSize = entry.data.i32[0];
1859     return Status::OK;
1860 }
1861 
getDataspace(PixelFormat format)1862 Dataspace CameraAidlTest::getDataspace(PixelFormat format) {
1863     switch (format) {
1864         case PixelFormat::BLOB:
1865             return Dataspace::JFIF;
1866         case PixelFormat::Y16:
1867             return Dataspace::DEPTH;
1868         case PixelFormat::RAW16:
1869         case PixelFormat::RAW_OPAQUE:
1870         case PixelFormat::RAW10:
1871         case PixelFormat::RAW12:
1872             return Dataspace::ARBITRARY;
1873         default:
1874             return Dataspace::UNKNOWN;
1875     }
1876 }
1877 
createStreamConfiguration(std::vector<Stream> & streams,StreamConfigurationMode configMode,StreamConfiguration * config,int32_t jpegBufferSize)1878 void CameraAidlTest::createStreamConfiguration(std::vector<Stream>& streams,
1879                                                StreamConfigurationMode configMode,
1880                                                StreamConfiguration* config,
1881                                                int32_t jpegBufferSize) {
1882     ASSERT_NE(nullptr, config);
1883 
1884     for (auto& stream : streams) {
1885         stream.bufferSize =
1886                 (stream.format == PixelFormat::BLOB && stream.dataSpace == Dataspace::JFIF)
1887                         ? jpegBufferSize
1888                         : 0;
1889     }
1890 
1891     // Caller is responsible to fill in non-zero config->streamConfigCounter after this returns
1892     config->streams = streams;
1893     config->operationMode = configMode;
1894     config->multiResolutionInputImage = false;
1895 }
1896 
verifyStreamCombination(const std::shared_ptr<ICameraDevice> & device,const StreamConfiguration & config,bool expectedStatus)1897 void CameraAidlTest::verifyStreamCombination(const std::shared_ptr<ICameraDevice>& device,
1898                                              const StreamConfiguration& config,
1899                                              bool expectedStatus) {
1900     if (device != nullptr) {
1901         bool streamCombinationSupported;
1902         ScopedAStatus ret =
1903                 device->isStreamCombinationSupported(config, &streamCombinationSupported);
1904         ASSERT_TRUE(ret.isOk());
1905         ASSERT_EQ(expectedStatus, streamCombinationSupported);
1906 
1907         if (flags::feature_combination_query()) {
1908             int32_t interfaceVersion;
1909             ret = device->getInterfaceVersion(&interfaceVersion);
1910             ASSERT_TRUE(ret.isOk());
1911             bool supportFeatureCombinationQuery =
1912                     (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
1913             if (supportFeatureCombinationQuery) {
1914                 ret = device->isStreamCombinationWithSettingsSupported(config,
1915                                                                        &streamCombinationSupported);
1916                 ASSERT_TRUE(ret.isOk());
1917                 ASSERT_EQ(expectedStatus, streamCombinationSupported);
1918             }
1919         }
1920     }
1921 }
1922 
verifySessionCharacteristics(const CameraMetadata & session_chars,const CameraMetadata & camera_chars)1923 void CameraAidlTest::verifySessionCharacteristics(const CameraMetadata& session_chars,
1924                                                   const CameraMetadata& camera_chars) {
1925     if (!flags::feature_combination_query()) {
1926         return;
1927     }
1928 
1929     const camera_metadata_t* session_metadata =
1930             reinterpret_cast<const camera_metadata_t*>(session_chars.metadata.data());
1931 
1932     const camera_metadata_t* camera_metadata =
1933             reinterpret_cast<const camera_metadata_t*>(camera_chars.metadata.data());
1934 
1935     size_t expectedSize = session_chars.metadata.size();
1936     int result = validate_camera_metadata_structure(session_metadata, &expectedSize);
1937     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1938     size_t entryCount = get_camera_metadata_entry_count(session_metadata);
1939     // There should be at least 1 characteristic present:
1940     // SCALER_MAX_DIGITAL_ZOOM must always be available.
1941     // ZOOM_RATIO_RANGE must be available if ZOOM_RATIO is supported.
1942     ASSERT_TRUE(entryCount >= 1);
1943 
1944     camera_metadata_ro_entry entry;
1945     int retcode = 0;
1946     float maxDigitalZoom = 1.0;
1947 
1948     for (size_t i = 0; i < entryCount; i++) {
1949         retcode = get_camera_metadata_ro_entry(session_metadata, i, &entry);
1950         ASSERT_TRUE(retcode == 0);
1951 
1952         std::set<uint32_t> allowed_tags = {ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1953                                            ANDROID_CONTROL_ZOOM_RATIO_RANGE};
1954 
1955         if (contains(allowed_tags, entry.tag)) {
1956             continue;
1957         }
1958 
1959         // Other than the ones above, no tags should be allowed apart from vendor tags.
1960         ASSERT_TRUE(entry.tag >= VENDOR_SECTION_START);
1961     }
1962 
1963     retcode = find_camera_metadata_ro_entry(session_metadata,
1964                                             ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &entry);
1965     if ((0 == retcode) && (entry.count == 1)) {
1966         maxDigitalZoom = entry.data.f[0];
1967     } else {
1968         ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!";
1969     }
1970 
1971     retcode = find_camera_metadata_ro_entry(camera_metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE,
1972                                             &entry);
1973     bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1974     if (!hasZoomRatioRange) {
1975         ALOGI("Skipping the rest of the test as ZOOM_RATIO_RANGE is not in camera characteristics");
1976         return;
1977     }
1978 
1979     // Session characteristics must contain zoom_ratio_range if camera characteristics has it.
1980     retcode = find_camera_metadata_ro_entry(session_metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE,
1981                                             &entry);
1982     ASSERT_TRUE(0 == retcode && entry.count == 2);
1983 
1984     float minZoomRatio = entry.data.f[0];
1985     float maxZoomRatio = entry.data.f[1];
1986     constexpr float FLOATING_POINT_THRESHOLD = 0.00001f;
1987     if (abs(maxDigitalZoom - maxZoomRatio) > FLOATING_POINT_THRESHOLD) {
1988         ADD_FAILURE() << "Difference between maximum digital zoom " << maxDigitalZoom
1989                       << " and maximum zoom ratio " << maxZoomRatio
1990                       << " is greater than the threshold " << FLOATING_POINT_THRESHOLD << "!";
1991     }
1992     if (minZoomRatio > maxZoomRatio) {
1993         ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!";
1994     }
1995     if (minZoomRatio > 1.0f) {
1996         ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!";
1997     }
1998     if (maxZoomRatio < 1.0f) {
1999         ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!";
2000     }
2001 }
2002 
getConcurrentDeviceCombinations(std::shared_ptr<ICameraProvider> & provider)2003 std::vector<ConcurrentCameraIdCombination> CameraAidlTest::getConcurrentDeviceCombinations(
2004         std::shared_ptr<ICameraProvider>& provider) {
2005     std::vector<ConcurrentCameraIdCombination> combinations;
2006     ndk::ScopedAStatus ret = provider->getConcurrentCameraIds(&combinations);
2007     if (!ret.isOk()) {
2008         ADD_FAILURE();
2009     }
2010 
2011     return combinations;
2012 }
2013 
getMandatoryConcurrentStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> * outputStreams)2014 Status CameraAidlTest::getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta,
2015                                                      std::vector<AvailableStream>* outputStreams) {
2016     if (nullptr == staticMeta || nullptr == outputStreams) {
2017         return Status::ILLEGAL_ARGUMENT;
2018     }
2019 
2020     if (isDepthOnly(staticMeta)) {
2021         Size y16MaxSize(640, 480);
2022         Size maxAvailableY16Size;
2023         getMaxOutputSizeForFormat(staticMeta, PixelFormat::Y16, &maxAvailableY16Size);
2024         Size y16ChosenSize = getMinSize(y16MaxSize, maxAvailableY16Size);
2025         AvailableStream y16Stream = {.width = y16ChosenSize.width,
2026                                      .height = y16ChosenSize.height,
2027                                      .format = static_cast<int32_t>(PixelFormat::Y16)};
2028         outputStreams->push_back(y16Stream);
2029         return Status::OK;
2030     }
2031 
2032     Size yuvMaxSize(1280, 720);
2033     Size jpegMaxSize(1920, 1440);
2034     Size maxAvailableYuvSize;
2035     Size maxAvailableJpegSize;
2036     getMaxOutputSizeForFormat(staticMeta, PixelFormat::YCBCR_420_888, &maxAvailableYuvSize);
2037     getMaxOutputSizeForFormat(staticMeta, PixelFormat::BLOB, &maxAvailableJpegSize);
2038     Size yuvChosenSize = getMinSize(yuvMaxSize, maxAvailableYuvSize);
2039     Size jpegChosenSize = getMinSize(jpegMaxSize, maxAvailableJpegSize);
2040 
2041     AvailableStream yuvStream = {.width = yuvChosenSize.width,
2042                                  .height = yuvChosenSize.height,
2043                                  .format = static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2044 
2045     AvailableStream jpegStream = {.width = jpegChosenSize.width,
2046                                   .height = jpegChosenSize.height,
2047                                   .format = static_cast<int32_t>(PixelFormat::BLOB)};
2048     outputStreams->push_back(yuvStream);
2049     outputStreams->push_back(jpegStream);
2050 
2051     return Status::OK;
2052 }
2053 
isDepthOnly(const camera_metadata_t * staticMeta)2054 bool CameraAidlTest::isDepthOnly(const camera_metadata_t* staticMeta) {
2055     camera_metadata_ro_entry scalerEntry;
2056     camera_metadata_ro_entry depthEntry;
2057 
2058     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2059                                            &scalerEntry);
2060     if (rc == 0) {
2061         for (uint32_t i = 0; i < scalerEntry.count; i++) {
2062             if (scalerEntry.data.u8[i] ==
2063                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) {
2064                 return false;
2065             }
2066         }
2067     }
2068 
2069     for (uint32_t i = 0; i < scalerEntry.count; i++) {
2070         if (scalerEntry.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) {
2071             rc = find_camera_metadata_ro_entry(
2072                     staticMeta, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &depthEntry);
2073             size_t idx = 0;
2074             if (rc == 0 && depthEntry.data.i32[idx] == static_cast<int32_t>(PixelFormat::Y16)) {
2075                 // only Depth16 format is supported now
2076                 return true;
2077             }
2078             break;
2079         }
2080     }
2081 
2082     return false;
2083 }
2084 
getMaxOutputSizeForFormat(const camera_metadata_t * staticMeta,PixelFormat format,Size * size,bool maxResolution)2085 Status CameraAidlTest::getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta,
2086                                                  PixelFormat format, Size* size,
2087                                                  bool maxResolution) {
2088     std::vector<AvailableStream> outputStreams;
2089     if (size == nullptr ||
2090         getAvailableOutputStreams(staticMeta, outputStreams,
2091                                   /*threshold*/ nullptr, maxResolution) != Status::OK) {
2092         return Status::ILLEGAL_ARGUMENT;
2093     }
2094     Size maxSize;
2095     bool found = false;
2096     for (auto& outputStream : outputStreams) {
2097         if (static_cast<int32_t>(format) == outputStream.format &&
2098             (outputStream.width * outputStream.height > maxSize.width * maxSize.height)) {
2099             maxSize.width = outputStream.width;
2100             maxSize.height = outputStream.height;
2101             found = true;
2102         }
2103     }
2104     if (!found) {
2105         ALOGE("%s :chosen format %d not found", __FUNCTION__, static_cast<int32_t>(format));
2106         return Status::ILLEGAL_ARGUMENT;
2107     }
2108     *size = maxSize;
2109     return Status::OK;
2110 }
2111 
getMinSize(Size a,Size b)2112 Size CameraAidlTest::getMinSize(Size a, Size b) {
2113     if (a.width * a.height < b.width * b.height) {
2114         return a;
2115     }
2116     return b;
2117 }
2118 
getZSLInputOutputMap(camera_metadata_t * staticMeta,std::vector<AvailableZSLInputOutput> & inputOutputMap)2119 Status CameraAidlTest::getZSLInputOutputMap(camera_metadata_t* staticMeta,
2120                                             std::vector<AvailableZSLInputOutput>& inputOutputMap) {
2121     if (nullptr == staticMeta) {
2122         return Status::ILLEGAL_ARGUMENT;
2123     }
2124 
2125     camera_metadata_ro_entry entry;
2126     int rc = find_camera_metadata_ro_entry(
2127             staticMeta, ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry);
2128     if ((0 != rc) || (0 >= entry.count)) {
2129         return Status::ILLEGAL_ARGUMENT;
2130     }
2131 
2132     const int32_t* contents = &entry.data.i32[0];
2133     for (size_t i = 0; i < entry.count;) {
2134         int32_t inputFormat = contents[i++];
2135         int32_t length = contents[i++];
2136         for (int32_t j = 0; j < length; j++) {
2137             int32_t outputFormat = contents[i + j];
2138             AvailableZSLInputOutput zslEntry = {inputFormat, outputFormat};
2139             inputOutputMap.push_back(zslEntry);
2140         }
2141         i += length;
2142     }
2143 
2144     return Status::OK;
2145 }
2146 
findLargestSize(const std::vector<AvailableStream> & streamSizes,int32_t format,AvailableStream & result)2147 Status CameraAidlTest::findLargestSize(const std::vector<AvailableStream>& streamSizes,
2148                                        int32_t format, AvailableStream& result) {
2149     result = {0, 0, 0};
2150     for (auto& iter : streamSizes) {
2151         if (format == iter.format) {
2152             if ((result.width * result.height) < (iter.width * iter.height)) {
2153                 result = iter;
2154             }
2155         }
2156     }
2157 
2158     return (result.format == format) ? Status::OK : Status::ILLEGAL_ARGUMENT;
2159 }
2160 
constructFilteredSettings(const std::shared_ptr<ICameraDeviceSession> & session,const std::unordered_set<int32_t> & availableKeys,RequestTemplate reqTemplate,android::hardware::camera::common::V1_0::helper::CameraMetadata * defaultSettings,android::hardware::camera::common::V1_0::helper::CameraMetadata * filteredSettings)2161 void CameraAidlTest::constructFilteredSettings(
2162         const std::shared_ptr<ICameraDeviceSession>& session,
2163         const std::unordered_set<int32_t>& availableKeys, RequestTemplate reqTemplate,
2164         android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings,
2165         android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings) {
2166     ASSERT_NE(defaultSettings, nullptr);
2167     ASSERT_NE(filteredSettings, nullptr);
2168 
2169     CameraMetadata req;
2170     auto ret = session->constructDefaultRequestSettings(reqTemplate, &req);
2171     ASSERT_TRUE(ret.isOk());
2172 
2173     const camera_metadata_t* metadata =
2174             clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(req.metadata.data()));
2175     size_t expectedSize = req.metadata.size();
2176     int result = validate_camera_metadata_structure(metadata, &expectedSize);
2177     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
2178 
2179     size_t entryCount = get_camera_metadata_entry_count(metadata);
2180     ASSERT_GT(entryCount, 0u);
2181     *defaultSettings = metadata;
2182 
2183     const android::hardware::camera::common::V1_0::helper::CameraMetadata& constSettings =
2184             *defaultSettings;
2185     for (const auto& keyIt : availableKeys) {
2186         camera_metadata_ro_entry entry = constSettings.find(keyIt);
2187         if (entry.count > 0) {
2188             filteredSettings->update(entry);
2189         }
2190     }
2191 }
2192 
verifySessionReconfigurationQuery(const std::shared_ptr<ICameraDeviceSession> & session,camera_metadata * oldSessionParams,camera_metadata * newSessionParams)2193 void CameraAidlTest::verifySessionReconfigurationQuery(
2194         const std::shared_ptr<ICameraDeviceSession>& session, camera_metadata* oldSessionParams,
2195         camera_metadata* newSessionParams) {
2196     ASSERT_NE(nullptr, session);
2197     ASSERT_NE(nullptr, oldSessionParams);
2198     ASSERT_NE(nullptr, newSessionParams);
2199 
2200     std::vector<uint8_t> oldParams =
2201             std::vector(reinterpret_cast<uint8_t*>(oldSessionParams),
2202                         reinterpret_cast<uint8_t*>(oldSessionParams) +
2203                                 get_camera_metadata_size(oldSessionParams));
2204     CameraMetadata oldMetadata = {oldParams};
2205 
2206     std::vector<uint8_t> newParams =
2207             std::vector(reinterpret_cast<uint8_t*>(newSessionParams),
2208                         reinterpret_cast<uint8_t*>(newSessionParams) +
2209                                 get_camera_metadata_size(newSessionParams));
2210     CameraMetadata newMetadata = {newParams};
2211 
2212     bool reconfigReq;
2213     ndk::ScopedAStatus ret =
2214             session->isReconfigurationRequired(oldMetadata, newMetadata, &reconfigReq);
2215     ASSERT_TRUE(ret.isOk() || static_cast<Status>(ret.getServiceSpecificError()) ==
2216                                       Status::OPERATION_NOT_SUPPORTED);
2217 }
2218 
isConstrainedModeAvailable(camera_metadata_t * staticMeta)2219 Status CameraAidlTest::isConstrainedModeAvailable(camera_metadata_t* staticMeta) {
2220     Status ret = Status::OPERATION_NOT_SUPPORTED;
2221     if (nullptr == staticMeta) {
2222         return Status::ILLEGAL_ARGUMENT;
2223     }
2224 
2225     camera_metadata_ro_entry entry;
2226     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2227                                            &entry);
2228     if (0 != rc) {
2229         return Status::ILLEGAL_ARGUMENT;
2230     }
2231 
2232     for (size_t i = 0; i < entry.count; i++) {
2233         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO ==
2234             entry.data.u8[i]) {
2235             ret = Status::OK;
2236             break;
2237         }
2238     }
2239 
2240     return ret;
2241 }
2242 
pickConstrainedModeSize(camera_metadata_t * staticMeta,AvailableStream & hfrStream)2243 Status CameraAidlTest::pickConstrainedModeSize(camera_metadata_t* staticMeta,
2244                                                AvailableStream& hfrStream) {
2245     if (nullptr == staticMeta) {
2246         return Status::ILLEGAL_ARGUMENT;
2247     }
2248 
2249     camera_metadata_ro_entry entry;
2250     int rc = find_camera_metadata_ro_entry(
2251             staticMeta, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &entry);
2252     if (0 != rc) {
2253         return Status::OPERATION_NOT_SUPPORTED;
2254     } else if (0 != (entry.count % 5)) {
2255         return Status::ILLEGAL_ARGUMENT;
2256     }
2257 
2258     hfrStream = {0, 0, static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2259     for (size_t i = 0; i < entry.count; i += 5) {
2260         int32_t w = entry.data.i32[i];
2261         int32_t h = entry.data.i32[i + 1];
2262         if ((hfrStream.width * hfrStream.height) < (w * h)) {
2263             hfrStream.width = w;
2264             hfrStream.height = h;
2265         }
2266     }
2267 
2268     return Status::OK;
2269 }
2270 
processCaptureRequestInternal(uint64_t bufferUsage,RequestTemplate reqTemplate,bool useSecureOnlyCameras)2271 void CameraAidlTest::processCaptureRequestInternal(uint64_t bufferUsage,
2272                                                    RequestTemplate reqTemplate,
2273                                                    bool useSecureOnlyCameras) {
2274     std::vector<std::string> cameraDeviceNames =
2275             getCameraDeviceNames(mProvider, useSecureOnlyCameras);
2276     AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2277                                        static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2278     int64_t bufferId = 1;
2279     int32_t frameNumber = 1;
2280     CameraMetadata settings;
2281     for (const auto& name : cameraDeviceNames) {
2282         Stream testStream;
2283         std::vector<HalStream> halStreams;
2284         std::shared_ptr<ICameraDeviceSession> session;
2285         std::shared_ptr<DeviceCb> cb;
2286         bool supportsPartialResults = false;
2287         bool useHalBufManager = false;
2288         int32_t partialResultCount = 0;
2289         configureSingleStream(name, mProvider, &streamThreshold, bufferUsage, reqTemplate,
2290                               &session /*out*/, &testStream /*out*/, &halStreams /*out*/,
2291                               &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2292                               &useHalBufManager /*out*/, &cb /*out*/);
2293 
2294         ASSERT_NE(session, nullptr);
2295         ASSERT_NE(cb, nullptr);
2296         ASSERT_FALSE(halStreams.empty());
2297 
2298         std::shared_ptr<ResultMetadataQueue> resultQueue;
2299         ::aidl::android::hardware::common::fmq::MQDescriptor<
2300                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2301                 descriptor;
2302         ndk::ScopedAStatus ret = session->getCaptureResultMetadataQueue(&descriptor);
2303         ASSERT_TRUE(ret.isOk());
2304 
2305         resultQueue = std::make_shared<ResultMetadataQueue>(descriptor);
2306         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2307             ALOGE("%s: HAL returns empty result metadata fmq,"
2308                   " not use it",
2309                   __func__);
2310             resultQueue = nullptr;
2311             // Don't use the queue onwards.
2312         }
2313 
2314         std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2315                 1, false, supportsPartialResults, partialResultCount, resultQueue);
2316 
2317         CameraMetadata req;
2318         ret = session->constructDefaultRequestSettings(reqTemplate, &req);
2319         ASSERT_TRUE(ret.isOk());
2320         settings = req;
2321 
2322         overrideRotateAndCrop(&settings);
2323 
2324         std::vector<CaptureRequest> requests(1);
2325         CaptureRequest& request = requests[0];
2326         request.frameNumber = frameNumber;
2327         request.fmqSettingsSize = 0;
2328         request.settings = settings;
2329 
2330         std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2331         outputBuffers.resize(1);
2332         StreamBuffer& outputBuffer = outputBuffers[0];
2333         if (useHalBufManager) {
2334             outputBuffer = {halStreams[0].id,
2335                             /*bufferId*/ 0,   NativeHandle(), BufferStatus::OK,
2336                             NativeHandle(),   NativeHandle()};
2337         } else {
2338             buffer_handle_t handle;
2339             allocateGraphicBuffer(
2340                     testStream.width, testStream.height,
2341                     /* We don't look at halStreamConfig.streams[0].consumerUsage
2342                      * since that is 0 for output streams
2343                      */
2344                     ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2345                             static_cast<uint64_t>(halStreams[0].producerUsage), bufferUsage)),
2346                     halStreams[0].overrideFormat, &handle);
2347 
2348             outputBuffer = {halStreams[0].id, bufferId,       ::android::makeToAidl(handle),
2349                             BufferStatus::OK, NativeHandle(), NativeHandle()};
2350         }
2351         request.inputBuffer = {-1,
2352                                0,
2353                                NativeHandle(),
2354                                BufferStatus::ERROR,
2355                                NativeHandle(),
2356                                NativeHandle()};  // Empty Input Buffer
2357 
2358         {
2359             std::unique_lock<std::mutex> l(mLock);
2360             mInflightMap.clear();
2361             mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2362         }
2363 
2364         int32_t numRequestProcessed = 0;
2365         std::vector<BufferCache> cachesToRemove;
2366         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2367         ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2368               ret.getExceptionCode(), ret.getServiceSpecificError());
2369 
2370         ASSERT_TRUE(ret.isOk());
2371         ASSERT_EQ(numRequestProcessed, 1u);
2372 
2373         {
2374             std::unique_lock<std::mutex> l(mLock);
2375             while (!inflightReq->errorCodeValid &&
2376                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2377                 auto timeout = std::chrono::system_clock::now() +
2378                                std::chrono::seconds(kStreamBufferTimeoutSec);
2379                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2380             }
2381 
2382             ASSERT_FALSE(inflightReq->errorCodeValid);
2383             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2384             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2385 
2386             // shutterReadoutTimestamp must be available, and it must
2387             // be >= shutterTimestamp + exposureTime,
2388             // and < shutterTimestamp + exposureTime + rollingShutterSkew / 2.
2389             ASSERT_TRUE(inflightReq->shutterReadoutTimestampValid);
2390             ASSERT_FALSE(inflightReq->collectedResult.isEmpty());
2391 
2392             if (inflightReq->collectedResult.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2393                 camera_metadata_entry_t exposureTimeResult =
2394                         inflightReq->collectedResult.find(ANDROID_SENSOR_EXPOSURE_TIME);
2395                 nsecs_t exposureToReadout =
2396                         inflightReq->shutterReadoutTimestamp - inflightReq->shutterTimestamp;
2397                 ASSERT_GE(exposureToReadout, exposureTimeResult.data.i64[0]);
2398                 if (inflightReq->collectedResult.exists(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW)) {
2399                     camera_metadata_entry_t rollingShutterSkew =
2400                             inflightReq->collectedResult.find(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
2401                     ASSERT_LT(exposureToReadout,
2402                               exposureTimeResult.data.i64[0] + rollingShutterSkew.data.i64[0] / 2);
2403                 }
2404             }
2405 
2406             request.frameNumber++;
2407             // Empty settings should be supported after the first call
2408             // for repeating requests.
2409             request.settings.metadata.clear();
2410             // The buffer has been registered to HAL by bufferId, so per
2411             // API contract we should send a null handle for this buffer
2412             request.outputBuffers[0].buffer = NativeHandle();
2413             mInflightMap.clear();
2414             inflightReq = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2415                                                             partialResultCount, resultQueue);
2416             mInflightMap.insert(std::make_pair(request.frameNumber, inflightReq));
2417         }
2418 
2419         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2420         ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2421               ret.getExceptionCode(), ret.getServiceSpecificError());
2422         ASSERT_TRUE(ret.isOk());
2423         ASSERT_EQ(numRequestProcessed, 1u);
2424 
2425         {
2426             std::unique_lock<std::mutex> l(mLock);
2427             while (!inflightReq->errorCodeValid &&
2428                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2429                 auto timeout = std::chrono::system_clock::now() +
2430                                std::chrono::seconds(kStreamBufferTimeoutSec);
2431                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2432             }
2433 
2434             ASSERT_FALSE(inflightReq->errorCodeValid);
2435             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2436             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2437         }
2438 
2439         if (useHalBufManager) {
2440             verifyBuffersReturned(session, testStream.id, cb);
2441         }
2442 
2443         ret = session->close();
2444         ASSERT_TRUE(ret.isOk());
2445     }
2446 }
2447 
configureStreamUseCaseInternal(const AvailableStream & threshold)2448 void CameraAidlTest::configureStreamUseCaseInternal(const AvailableStream &threshold) {
2449     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2450 
2451     for (const auto& name : cameraDeviceNames) {
2452         CameraMetadata meta;
2453         std::shared_ptr<ICameraDevice> cameraDevice;
2454 
2455         openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
2456                                &cameraDevice /*out*/);
2457 
2458         camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2459         // Check if camera support depth only or doesn't support stream use case capability
2460         if (isDepthOnly(staticMeta) || !supportsStreamUseCaseCap(staticMeta) ||
2461             (threshold.format == static_cast<int32_t>(PixelFormat::RAW16) &&
2462              !supportsCroppedRawUseCase(staticMeta))) {
2463             ndk::ScopedAStatus ret = mSession->close();
2464             mSession = nullptr;
2465             ASSERT_TRUE(ret.isOk());
2466             continue;
2467         }
2468 
2469         std::vector<AvailableStream> outputPreviewStreams;
2470 
2471         ASSERT_EQ(Status::OK,
2472                   getAvailableOutputStreams(staticMeta, outputPreviewStreams, &threshold));
2473         ASSERT_NE(0u, outputPreviewStreams.size());
2474 
2475         // Combine valid and invalid stream use cases
2476         std::vector<int64_t> testedUseCases;
2477         testedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW + 1);
2478 
2479         std::vector<int64_t> supportedUseCases;
2480         if (threshold.format == static_cast<int32_t>(PixelFormat::RAW16)) {
2481             // If the format is RAW16, supported use case is only CROPPED_RAW.
2482             // All others are unsupported for this format.
2483             testedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW);
2484             supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW);
2485             supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2486         } else {
2487             camera_metadata_ro_entry entry;
2488             testedUseCases.insert(testedUseCases.end(), kMandatoryUseCases.begin(),
2489                                   kMandatoryUseCases.end());
2490             auto retcode = find_camera_metadata_ro_entry(
2491                     staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
2492             if ((0 == retcode) && (entry.count > 0)) {
2493                 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
2494                                          entry.data.i64 + entry.count);
2495             } else {
2496                 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2497             }
2498         }
2499 
2500         std::vector<Stream> streams(1);
2501         streams[0] = {
2502                 0,
2503                 StreamType::OUTPUT,
2504                 outputPreviewStreams[0].width,
2505                 outputPreviewStreams[0].height,
2506                 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2507                 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2508                         GRALLOC1_CONSUMER_USAGE_CPU_READ),
2509                 Dataspace::UNKNOWN,
2510                 StreamRotation::ROTATION_0,
2511                 std::string(),
2512                 0,
2513                 -1,
2514                 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2515                 RequestAvailableDynamicRangeProfilesMap::
2516                         ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2517                 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2518                 static_cast<int>(
2519                         RequestAvailableColorSpaceProfilesMap::
2520                                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2521 
2522         int32_t streamConfigCounter = 0;
2523         CameraMetadata req;
2524         StreamConfiguration config;
2525         RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
2526         ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
2527         ASSERT_TRUE(ret.isOk());
2528         config.sessionParams = req;
2529 
2530         for (int64_t useCase : testedUseCases) {
2531             bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
2532                                               useCase) != supportedUseCases.end();
2533 
2534             streams[0].useCase = static_cast<
2535                     aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
2536                     useCase);
2537             config.streams = streams;
2538             config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2539             config.streamConfigCounter = streamConfigCounter;
2540             config.multiResolutionInputImage = false;
2541 
2542             bool combSupported;
2543             ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
2544             if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
2545                 ret.getServiceSpecificError()) {
2546                 continue;
2547             }
2548 
2549             ASSERT_TRUE(ret.isOk());
2550             ASSERT_EQ(combSupported, useCaseSupported);
2551 
2552             std::vector<HalStream> halStreams;
2553             ret = mSession->configureStreams(config, &halStreams);
2554             ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
2555             if (useCaseSupported) {
2556                 ASSERT_TRUE(ret.isOk());
2557                 ASSERT_EQ(1u, halStreams.size());
2558             } else {
2559                 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2560                           ret.getServiceSpecificError());
2561             }
2562         }
2563         ret = mSession->close();
2564         mSession = nullptr;
2565         ASSERT_TRUE(ret.isOk());
2566     }
2567 
2568 }
2569 
configureStreams(std::shared_ptr<ICameraDeviceSession> & session,const StreamConfiguration & config,BufferManagerType bufferManagerType,std::set<int32_t> * halBufManagedStreamIds,std::vector<HalStream> * halStreams)2570 ndk::ScopedAStatus CameraAidlTest::configureStreams(std::shared_ptr<ICameraDeviceSession>& session,
2571                                                     const StreamConfiguration& config,
2572                                                     BufferManagerType bufferManagerType,
2573                                                     std::set<int32_t>* halBufManagedStreamIds,
2574                                                     std::vector<HalStream>* halStreams) {
2575     auto ret = ndk::ScopedAStatus::ok();
2576     ConfigureStreamsRet aidl_return;
2577     int32_t interfaceVersion = -1;
2578     ret = session->getInterfaceVersion(&interfaceVersion);
2579     if (!ret.isOk()) {
2580         return ret;
2581     }
2582 
2583     if (flags::session_hal_buf_manager() &&
2584         (bufferManagerType == BufferManagerType::SESSION && interfaceVersion >= 3)) {
2585         ret = session->configureStreamsV2(config, &aidl_return);
2586     } else {
2587         ret = session->configureStreams(config, halStreams);
2588     }
2589     if (!ret.isOk()) {
2590         return ret;
2591     }
2592     if (flags::session_hal_buf_manager() && bufferManagerType == BufferManagerType::SESSION) {
2593         *halStreams = std::move(aidl_return.halStreams);
2594     }
2595     for (const auto& halStream : *halStreams) {
2596         if ((flags::session_hal_buf_manager() && bufferManagerType == BufferManagerType::SESSION &&
2597              halStream.enableHalBufferManager) ||
2598             bufferManagerType == BufferManagerType::HAL) {
2599             halBufManagedStreamIds->insert(halStream.id);
2600         }
2601     }
2602     return ndk::ScopedAStatus::ok();
2603 }
2604 
configureSingleStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,uint64_t bufferUsage,RequestTemplate reqTemplate,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)2605 void CameraAidlTest::configureSingleStream(
2606         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2607         const AvailableStream* previewThreshold, uint64_t bufferUsage, RequestTemplate reqTemplate,
2608         std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2609         std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2610         int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
2611         uint32_t streamConfigCounter) {
2612     ASSERT_NE(nullptr, session);
2613     ASSERT_NE(nullptr, previewStream);
2614     ASSERT_NE(nullptr, halStreams);
2615     ASSERT_NE(nullptr, supportsPartialResults);
2616     ASSERT_NE(nullptr, partialResultCount);
2617     ASSERT_NE(nullptr, useHalBufManager);
2618     ASSERT_NE(nullptr, cb);
2619 
2620     std::vector<AvailableStream> outputPreviewStreams;
2621     std::shared_ptr<ICameraDevice> device;
2622     ALOGI("configureStreams: Testing camera device %s", name.c_str());
2623 
2624     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2625     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2626           ret.getServiceSpecificError());
2627     ASSERT_TRUE(ret.isOk());
2628     ASSERT_NE(device, nullptr);
2629 
2630     camera_metadata_t* staticMeta;
2631     CameraMetadata chars;
2632     ret = device->getCameraCharacteristics(&chars);
2633     ASSERT_TRUE(ret.isOk());
2634     staticMeta = clone_camera_metadata(
2635             reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()));
2636     ASSERT_NE(nullptr, staticMeta);
2637 
2638     size_t expectedSize = chars.metadata.size();
2639     ALOGE("validate_camera_metadata_structure: %d",
2640           validate_camera_metadata_structure(staticMeta, &expectedSize));
2641 
2642     camera_metadata_ro_entry entry;
2643     auto status =
2644             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2645     if ((0 == status) && (entry.count > 0)) {
2646         *partialResultCount = entry.data.i32[0];
2647         *supportsPartialResults = (*partialResultCount > 1);
2648     }
2649 
2650     *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2651 
2652     device->open(*cb, session);
2653     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2654           ret.getServiceSpecificError());
2655     ASSERT_TRUE(ret.isOk());
2656     ASSERT_NE(*session, nullptr);
2657 
2658     BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
2659     status = find_camera_metadata_ro_entry(
2660             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2661     if ((0 == status) && (entry.count == 1)) {
2662         if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
2663             bufferManagerType = BufferManagerType::HAL;
2664         } else if (entry.data.u8[0] ==
2665                    ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
2666             bufferManagerType = BufferManagerType::SESSION;
2667         }
2668     }
2669 
2670     outputPreviewStreams.clear();
2671     auto rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
2672 
2673     int32_t jpegBufferSize = 0;
2674     ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
2675     ASSERT_NE(0u, jpegBufferSize);
2676 
2677     ASSERT_EQ(Status::OK, rc);
2678     ASSERT_FALSE(outputPreviewStreams.empty());
2679 
2680     Dataspace dataspace = Dataspace::UNKNOWN;
2681     switch (static_cast<PixelFormat>(outputPreviewStreams[0].format)) {
2682         case PixelFormat::Y16:
2683             dataspace = Dataspace::DEPTH;
2684             break;
2685         default:
2686             dataspace = Dataspace::UNKNOWN;
2687     }
2688 
2689     std::vector<Stream> streams(1);
2690     streams[0] = {0,
2691                   StreamType::OUTPUT,
2692                   outputPreviewStreams[0].width,
2693                   outputPreviewStreams[0].height,
2694                   static_cast<PixelFormat>(outputPreviewStreams[0].format),
2695                   static_cast<aidl::android::hardware::graphics::common::BufferUsage>(bufferUsage),
2696                   dataspace,
2697                   StreamRotation::ROTATION_0,
2698                   "",
2699                   0,
2700                   /*groupId*/ -1,
2701                   {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2702                   RequestAvailableDynamicRangeProfilesMap::
2703                           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2704                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2705                   static_cast<int>(
2706                           RequestAvailableColorSpaceProfilesMap::
2707                                   ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2708 
2709     StreamConfiguration config;
2710     config.streams = streams;
2711     createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2712                               jpegBufferSize);
2713     if (*session != nullptr) {
2714         CameraMetadata sessionParams;
2715         ret = (*session)->constructDefaultRequestSettings(reqTemplate, &sessionParams);
2716         ASSERT_TRUE(ret.isOk());
2717         config.sessionParams = sessionParams;
2718         config.streamConfigCounter = (int32_t)streamConfigCounter;
2719 
2720         bool supported = false;
2721         ret = device->isStreamCombinationSupported(config, &supported);
2722         ASSERT_TRUE(ret.isOk());
2723         ASSERT_EQ(supported, true);
2724 
2725         std::vector<HalStream> halConfigs;
2726         std::set<int32_t> halBufManagedStreamIds;
2727         ret = configureStreams(*session, config, bufferManagerType, &halBufManagedStreamIds,
2728                                &halConfigs);
2729         ALOGI("configureStreams returns status: %d:%d", ret.getExceptionCode(),
2730               ret.getServiceSpecificError());
2731         ASSERT_TRUE(ret.isOk());
2732         ASSERT_EQ(1u, halConfigs.size());
2733         halStreams->clear();
2734         halStreams->push_back(halConfigs[0]);
2735         *useHalBufManager = halBufManagedStreamIds.size() != 0;
2736         if (*useHalBufManager) {
2737             std::vector<Stream> ss(1);
2738             std::vector<HalStream> hs(1);
2739             ss[0] = config.streams[0];
2740             hs[0] = halConfigs[0];
2741             (*cb)->setCurrentStreamConfig(ss, hs);
2742         }
2743     }
2744     *previewStream = config.streams[0];
2745     ASSERT_TRUE(ret.isOk());
2746 }
2747 
overrideRotateAndCrop(CameraMetadata * settings)2748 void CameraAidlTest::overrideRotateAndCrop(CameraMetadata* settings) {
2749     if (settings == nullptr) {
2750         return;
2751     }
2752 
2753     ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta =
2754             clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(settings->metadata.data()));
2755     auto entry = requestMeta.find(ANDROID_SCALER_ROTATE_AND_CROP);
2756     if ((entry.count > 0) && (entry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO)) {
2757         uint8_t disableRotateAndCrop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2758         requestMeta.update(ANDROID_SCALER_ROTATE_AND_CROP, &disableRotateAndCrop, 1);
2759         settings->metadata.clear();
2760         camera_metadata_t* metaBuffer = requestMeta.release();
2761         uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2762         settings->metadata =
2763                 std::vector(rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2764     }
2765 }
2766 
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,int32_t streamId,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)2767 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
2768                                            int32_t streamId, const std::shared_ptr<DeviceCb>& cb,
2769                                            uint32_t streamConfigCounter) {
2770     ASSERT_NE(nullptr, session);
2771 
2772     std::vector<int32_t> streamIds(1);
2773     streamIds[0] = streamId;
2774     session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
2775     cb->waitForBuffersReturned();
2776 }
2777 
processPreviewStabilizationCaptureRequestInternal(bool previewStabilizationOn,std::unordered_map<std::string,nsecs_t> & cameraDeviceToTimeLag)2778 void CameraAidlTest::processPreviewStabilizationCaptureRequestInternal(
2779         bool previewStabilizationOn,
2780         // Used as output when preview stabilization is off, as output when its on.
2781         std::unordered_map<std::string, nsecs_t>& cameraDeviceToTimeLag) {
2782     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2783     AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2784                                        static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2785     int64_t bufferId = 1;
2786     int32_t frameNumber = 1;
2787     std::vector<uint8_t> settings;
2788 
2789     for (const auto& name : cameraDeviceNames) {
2790         if (!supportsPreviewStabilization(name, mProvider)) {
2791             ALOGI(" %s Camera device %s doesn't support preview stabilization, skipping", __func__,
2792                   name.c_str());
2793             continue;
2794         }
2795 
2796         Stream testStream;
2797         std::vector<HalStream> halStreams;
2798         std::shared_ptr<ICameraDeviceSession> session;
2799         std::shared_ptr<DeviceCb> cb;
2800         bool supportsPartialResults = false;
2801         bool useHalBufManager = false;
2802         int32_t partialResultCount = 0;
2803         configureSingleStream(name, mProvider, &streamThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
2804                               RequestTemplate::PREVIEW, &session /*out*/, &testStream /*out*/,
2805                               &halStreams /*out*/, &supportsPartialResults /*out*/,
2806                               &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/);
2807 
2808         ::aidl::android::hardware::common::fmq::MQDescriptor<
2809                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2810                 descriptor;
2811         ndk::ScopedAStatus resultQueueRet = session->getCaptureResultMetadataQueue(&descriptor);
2812         ASSERT_TRUE(resultQueueRet.isOk());
2813 
2814         std::shared_ptr<ResultMetadataQueue> resultQueue =
2815                 std::make_shared<ResultMetadataQueue>(descriptor);
2816         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2817             ALOGE("%s: HAL returns empty result metadata fmq,"
2818                   " not use it",
2819                   __func__);
2820             resultQueue = nullptr;
2821             // Don't use the queue onwards.
2822         }
2823 
2824         std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2825                 1, false, supportsPartialResults, partialResultCount, resultQueue);
2826 
2827         CameraMetadata defaultMetadata;
2828         android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
2829         ndk::ScopedAStatus ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW,
2830                                                                           &defaultMetadata);
2831         ASSERT_TRUE(ret.isOk());
2832 
2833         const camera_metadata_t* metadata =
2834                 reinterpret_cast<const camera_metadata_t*>(defaultMetadata.metadata.data());
2835         defaultSettings = metadata;
2836         android::status_t metadataRet = ::android::OK;
2837         uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2838         if (previewStabilizationOn) {
2839             videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION;
2840             metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2841                                                  &videoStabilizationMode, 1);
2842         } else {
2843             metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2844                                                  &videoStabilizationMode, 1);
2845         }
2846         ASSERT_EQ(metadataRet, ::android::OK);
2847 
2848         camera_metadata_t* releasedMetadata = defaultSettings.release();
2849         uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(releasedMetadata);
2850 
2851         buffer_handle_t buffer_handle;
2852 
2853         std::vector<CaptureRequest> requests(1);
2854         CaptureRequest& request = requests[0];
2855         request.frameNumber = frameNumber;
2856         request.fmqSettingsSize = 0;
2857         request.settings.metadata =
2858                 std::vector(rawMetadata, rawMetadata + get_camera_metadata_size(releasedMetadata));
2859         overrideRotateAndCrop(&request.settings);
2860         request.outputBuffers = std::vector<StreamBuffer>(1);
2861         StreamBuffer& outputBuffer = request.outputBuffers[0];
2862 
2863         if (useHalBufManager) {
2864             outputBuffer = {halStreams[0].id,
2865                             /*bufferId*/ 0,   NativeHandle(), BufferStatus::OK,
2866                             NativeHandle(),   NativeHandle()};
2867         } else {
2868             allocateGraphicBuffer(testStream.width, testStream.height,
2869                                   /* We don't look at halStreamConfig.streams[0].consumerUsage
2870                                    * since that is 0 for output streams
2871                                    */
2872                                   ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2873                                           static_cast<uint64_t>(halStreams[0].producerUsage),
2874                                           GRALLOC1_CONSUMER_USAGE_HWCOMPOSER)),
2875                                   halStreams[0].overrideFormat, &buffer_handle);
2876             outputBuffer = {halStreams[0].id, bufferId,       ::android::makeToAidl(buffer_handle),
2877                             BufferStatus::OK, NativeHandle(), NativeHandle()};
2878         }
2879         request.inputBuffer = {
2880                 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2881 
2882         {
2883             std::unique_lock<std::mutex> l(mLock);
2884             mInflightMap.clear();
2885             mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2886         }
2887 
2888         int32_t numRequestProcessed = 0;
2889         std::vector<BufferCache> cachesToRemove;
2890         ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2891         ASSERT_TRUE(ret.isOk());
2892         ASSERT_EQ(numRequestProcessed, 1u);
2893 
2894         {
2895             std::unique_lock<std::mutex> l(mLock);
2896             while (!inflightReq->errorCodeValid &&
2897                    ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2898                 auto timeout = std::chrono::system_clock::now() +
2899                                std::chrono::seconds(kStreamBufferTimeoutSec);
2900                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2901             }
2902             waitForReleaseFence(inflightReq->resultOutputBuffers);
2903 
2904             ASSERT_FALSE(inflightReq->errorCodeValid);
2905             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2906             ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2907             ASSERT_TRUE(inflightReq->shutterReadoutTimestampValid);
2908             nsecs_t readoutTimestamp = inflightReq->shutterReadoutTimestamp;
2909 
2910             if (previewStabilizationOn) {
2911                 // Here we collect the time difference between the buffer ready
2912                 // timestamp - notify readout timestamp.
2913                 // timeLag = buffer ready timestamp - notify readout timestamp.
2914                 // timeLag(previewStabilization) must be <=
2915                 //        timeLag(stabilization off) + 1 frame duration.
2916                 auto it = cameraDeviceToTimeLag.find(name);
2917                 camera_metadata_entry e;
2918                 e = inflightReq->collectedResult.find(ANDROID_SENSOR_FRAME_DURATION);
2919                 ASSERT_TRUE(e.count > 0);
2920                 nsecs_t frameDuration = e.data.i64[0];
2921                 ASSERT_TRUE(it != cameraDeviceToTimeLag.end());
2922 
2923                 nsecs_t previewStabOnLagTime =
2924                         inflightReq->resultOutputBuffers[0].timeStamp - readoutTimestamp;
2925                 ASSERT_TRUE(previewStabOnLagTime <= (it->second + frameDuration));
2926             } else {
2927                 // Fill in the buffer ready timestamp - notify timestamp;
2928                 cameraDeviceToTimeLag[std::string(name)] =
2929                         inflightReq->resultOutputBuffers[0].timeStamp - readoutTimestamp;
2930             }
2931         }
2932 
2933         if (useHalBufManager) {
2934             verifyBuffersReturned(session, testStream.id, cb);
2935         }
2936 
2937         ret = session->close();
2938         ASSERT_TRUE(ret.isOk());
2939     }
2940 }
2941 
supportsPreviewStabilization(const std::string & name,const std::shared_ptr<ICameraProvider> & provider)2942 bool CameraAidlTest::supportsPreviewStabilization(
2943         const std::string& name, const std::shared_ptr<ICameraProvider>& provider) {
2944     std::shared_ptr<ICameraDevice> device;
2945     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2946     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2947           ret.getServiceSpecificError());
2948     if (!ret.isOk() || device == nullptr) {
2949         ADD_FAILURE() << "Failed to get camera device interface for " << name;
2950     }
2951 
2952     CameraMetadata metadata;
2953     ret = device->getCameraCharacteristics(&metadata);
2954     camera_metadata_t* staticMeta = clone_camera_metadata(
2955             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
2956     if (!(ret.isOk())) {
2957         ADD_FAILURE() << "Failed to get camera characteristics for " << name;
2958     }
2959     // Go through the characteristics and see if video stabilization modes have
2960     // preview stabilization
2961     camera_metadata_ro_entry entry;
2962 
2963     int retcode = find_camera_metadata_ro_entry(
2964             staticMeta, ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &entry);
2965     if ((0 == retcode) && (entry.count > 0)) {
2966         for (auto i = 0; i < entry.count; i++) {
2967             if (entry.data.u8[i] ==
2968                 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) {
2969                 return true;
2970             }
2971         }
2972     }
2973     return false;
2974 }
2975 
configurePreviewStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,const std::unordered_set<std::string> & physicalIds,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::set<int32_t> * halBufManagedStreamIds,std::shared_ptr<DeviceCb> * cb,int32_t streamConfigCounter,bool allowUnsupport)2976 void CameraAidlTest::configurePreviewStreams(
2977         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2978         const AvailableStream* previewThreshold, const std::unordered_set<std::string>& physicalIds,
2979         std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2980         std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2981         int32_t* partialResultCount, std::set<int32_t>* halBufManagedStreamIds,
2982         std::shared_ptr<DeviceCb>* cb, int32_t streamConfigCounter, bool allowUnsupport) {
2983     ASSERT_NE(nullptr, session);
2984     ASSERT_NE(nullptr, halStreams);
2985     ASSERT_NE(nullptr, previewStream);
2986     ASSERT_NE(nullptr, supportsPartialResults);
2987     ASSERT_NE(nullptr, partialResultCount);
2988     ASSERT_NE(nullptr, halBufManagedStreamIds);
2989     ASSERT_NE(nullptr, cb);
2990 
2991     ASSERT_FALSE(physicalIds.empty());
2992 
2993     std::vector<AvailableStream> outputPreviewStreams;
2994     std::shared_ptr<ICameraDevice> device;
2995     ALOGI("configureStreams: Testing camera device %s", name.c_str());
2996 
2997     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2998     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2999           ret.getServiceSpecificError());
3000     ASSERT_TRUE(ret.isOk());
3001     ASSERT_NE(device, nullptr);
3002 
3003     CameraMetadata meta;
3004     ret = device->getCameraCharacteristics(&meta);
3005     ASSERT_TRUE(ret.isOk());
3006     camera_metadata_t* staticMeta =
3007             clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(meta.metadata.data()));
3008     ASSERT_NE(nullptr, staticMeta);
3009 
3010     camera_metadata_ro_entry entry;
3011     auto status =
3012             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3013     if ((0 == status) && (entry.count > 0)) {
3014         *partialResultCount = entry.data.i32[0];
3015         *supportsPartialResults = (*partialResultCount > 1);
3016     }
3017 
3018     *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3019     ret = device->open(*cb, session);
3020     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3021           ret.getServiceSpecificError());
3022     ASSERT_TRUE(ret.isOk());
3023     ASSERT_NE(*session, nullptr);
3024 
3025     BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3026     status = find_camera_metadata_ro_entry(
3027             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3028     if ((0 == status) && (entry.count == 1)) {
3029         if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3030             bufferManagerType = BufferManagerType::HAL;
3031         } else if (entry.data.u8[0] ==
3032                    ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3033             bufferManagerType = BufferManagerType::SESSION;
3034         }
3035     }
3036 
3037     outputPreviewStreams.clear();
3038     Status rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
3039 
3040     ASSERT_EQ(Status::OK, rc);
3041     ASSERT_FALSE(outputPreviewStreams.empty());
3042 
3043     std::vector<Stream> streams(physicalIds.size());
3044     int32_t streamId = 0;
3045     for (auto const& physicalId : physicalIds) {
3046         streams[streamId] = {
3047                 streamId,
3048                 StreamType::OUTPUT,
3049                 outputPreviewStreams[0].width,
3050                 outputPreviewStreams[0].height,
3051                 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3052                 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3053                         GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3054                 Dataspace::UNKNOWN,
3055                 StreamRotation::ROTATION_0,
3056                 physicalId,
3057                 0,
3058                 -1,
3059                 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3060                 RequestAvailableDynamicRangeProfilesMap::
3061                         ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
3062                 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3063                 static_cast<int>(
3064                         RequestAvailableColorSpaceProfilesMap::
3065                                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
3066         streamId++;
3067     }
3068 
3069     StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
3070 
3071     RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
3072     ret = (*session)->constructDefaultRequestSettings(reqTemplate, &config.sessionParams);
3073     ASSERT_TRUE(ret.isOk());
3074 
3075     bool supported = false;
3076     ret = device->isStreamCombinationSupported(config, &supported);
3077     ASSERT_TRUE(ret.isOk());
3078     if (allowUnsupport && !supported) {
3079         // stream combination not supported. return null session
3080         ret = (*session)->close();
3081         ASSERT_TRUE(ret.isOk());
3082         *session = nullptr;
3083         return;
3084     }
3085     ASSERT_TRUE(supported) << "Stream combination must be supported.";
3086 
3087     config.streamConfigCounter = streamConfigCounter;
3088     std::vector<HalStream> halConfigs;
3089     ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds,
3090                            &halConfigs);
3091 
3092     ASSERT_TRUE(ret.isOk());
3093     ASSERT_EQ(physicalIds.size(), halConfigs.size());
3094     *halStreams = halConfigs;
3095     if (halBufManagedStreamIds->size() != 0) {
3096         // Only include the streams that are HAL buffer managed
3097         std::vector<Stream> ss;
3098         std::vector<HalStream> hs;
3099         for (size_t i = 0; i < physicalIds.size(); i++) {
3100             if (contains(*halBufManagedStreamIds, halConfigs[i].id)) {
3101                 ss.emplace_back(streams[i]);
3102                 hs.emplace_back(halConfigs[i]);
3103             }
3104         }
3105         (*cb)->setCurrentStreamConfig(ss, hs);
3106     }
3107     *previewStream = streams[0];
3108     ASSERT_TRUE(ret.isOk());
3109 }
3110 
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,const std::vector<int32_t> & streamIds,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)3111 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
3112                                            const std::vector<int32_t>& streamIds,
3113                                            const std::shared_ptr<DeviceCb>& cb,
3114                                            uint32_t streamConfigCounter) {
3115     ndk::ScopedAStatus ret =
3116             session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
3117     ASSERT_TRUE(ret.isOk());
3118     cb->waitForBuffersReturned();
3119 }
3120 
configureStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,PixelFormat format,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::set<int32_t> * halBufManagedStreamIds,std::shared_ptr<DeviceCb> * outCb,uint32_t streamConfigCounter,bool maxResolution,RequestAvailableDynamicRangeProfilesMap dynamicRangeProf,RequestAvailableColorSpaceProfilesMap colorSpaceProf)3121 void CameraAidlTest::configureStreams(
3122         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3123         PixelFormat format, std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
3124         std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3125         int32_t* partialResultCount, std::set<int32_t>* halBufManagedStreamIds,
3126         std::shared_ptr<DeviceCb>* outCb, uint32_t streamConfigCounter, bool maxResolution,
3127         RequestAvailableDynamicRangeProfilesMap dynamicRangeProf,
3128         RequestAvailableColorSpaceProfilesMap colorSpaceProf) {
3129     ASSERT_NE(nullptr, session);
3130     ASSERT_NE(nullptr, halStreams);
3131     ASSERT_NE(nullptr, previewStream);
3132     ASSERT_NE(nullptr, supportsPartialResults);
3133     ASSERT_NE(nullptr, partialResultCount);
3134     ASSERT_NE(nullptr, halBufManagedStreamIds);
3135     ASSERT_NE(nullptr, outCb);
3136 
3137     ALOGI("configureStreams: Testing camera device %s", name.c_str());
3138 
3139     std::vector<AvailableStream> outputStreams;
3140     std::shared_ptr<ICameraDevice> device;
3141 
3142     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
3143     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3144           ret.getServiceSpecificError());
3145     ASSERT_TRUE(ret.isOk());
3146     ASSERT_NE(device, nullptr);
3147 
3148     CameraMetadata metadata;
3149     camera_metadata_t* staticMeta;
3150     ret = device->getCameraCharacteristics(&metadata);
3151     ASSERT_TRUE(ret.isOk());
3152     staticMeta = clone_camera_metadata(
3153             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
3154     ASSERT_NE(staticMeta, nullptr);
3155 
3156     camera_metadata_ro_entry entry;
3157     auto status =
3158             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3159     if ((0 == status) && (entry.count > 0)) {
3160         *partialResultCount = entry.data.i32[0];
3161         *supportsPartialResults = (*partialResultCount > 1);
3162     }
3163 
3164     *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3165     ret = device->open(*outCb, session);
3166     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3167           ret.getServiceSpecificError());
3168     ASSERT_TRUE(ret.isOk());
3169     ASSERT_NE(*session, nullptr);
3170 
3171     BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3172     status = find_camera_metadata_ro_entry(
3173             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3174     if ((0 == status) && (entry.count == 1)) {
3175         if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3176             bufferManagerType = BufferManagerType::HAL;
3177         } else if (entry.data.u8[0] ==
3178                    ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3179             bufferManagerType = BufferManagerType::SESSION;
3180         }
3181     }
3182 
3183     outputStreams.clear();
3184     Size maxSize;
3185     if (maxResolution) {
3186         auto rc = getMaxOutputSizeForFormat(staticMeta, format, &maxSize, maxResolution);
3187         ASSERT_EQ(Status::OK, rc);
3188     } else {
3189         AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3190             static_cast<int32_t>(format)};
3191         auto rc = getAvailableOutputStreams(staticMeta, outputStreams, &previewThreshold);
3192 
3193         ASSERT_EQ(Status::OK, rc);
3194         ASSERT_FALSE(outputStreams.empty());
3195         maxSize.width = outputStreams[0].width;
3196         maxSize.height = outputStreams[0].height;
3197     }
3198 
3199 
3200     std::vector<Stream> streams(1);
3201     streams[0] = {0,
3202                   StreamType::OUTPUT,
3203                   maxSize.width,
3204                   maxSize.height,
3205                   format,
3206                   previewStream->usage,
3207                   previewStream->dataSpace,
3208                   StreamRotation::ROTATION_0,
3209                   "",
3210                   0,
3211                   -1,
3212                   {maxResolution ? SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION
3213                                  : SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3214                   dynamicRangeProf,
3215                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3216                   static_cast<int>(colorSpaceProf)};
3217 
3218     StreamConfiguration config;
3219     config.streams = streams;
3220     config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3221     config.streamConfigCounter = streamConfigCounter;
3222     config.multiResolutionInputImage = false;
3223     CameraMetadata req;
3224     RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3225     ret = (*session)->constructDefaultRequestSettings(reqTemplate, &req);
3226     ASSERT_TRUE(ret.isOk());
3227     config.sessionParams = req;
3228 
3229     bool supported = false;
3230     ret = device->isStreamCombinationSupported(config, &supported);
3231     ASSERT_TRUE(ret.isOk());
3232     ASSERT_EQ(supported, true);
3233 
3234     ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds, halStreams);
3235 
3236     ASSERT_TRUE(ret.isOk());
3237 
3238     if (halBufManagedStreamIds->size() != 0) {
3239         std::vector<Stream> ss(1);
3240         std::vector<HalStream> hs(1);
3241         ss[0] = streams[0];
3242         hs[0] = (*halStreams)[0];
3243         (*outCb)->setCurrentStreamConfig(ss, hs);
3244     }
3245 
3246     *previewStream = streams[0];
3247     ASSERT_TRUE(ret.isOk());
3248 }
3249 
is10BitDynamicRangeCapable(const camera_metadata_t * staticMeta)3250 bool CameraAidlTest::is10BitDynamicRangeCapable(const camera_metadata_t* staticMeta) {
3251     camera_metadata_ro_entry scalerEntry;
3252     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3253                                            &scalerEntry);
3254     if (rc == 0) {
3255         for (uint32_t i = 0; i < scalerEntry.count; i++) {
3256             if (scalerEntry.data.u8[i] ==
3257                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
3258                 return true;
3259             }
3260         }
3261     }
3262     return false;
3263 }
3264 
get10BitDynamicRangeProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableDynamicRangeProfilesMap> * profiles)3265 void CameraAidlTest::get10BitDynamicRangeProfiles(
3266         const camera_metadata_t* staticMeta,
3267         std::vector<RequestAvailableDynamicRangeProfilesMap>* profiles) {
3268     ASSERT_NE(nullptr, staticMeta);
3269     ASSERT_NE(nullptr, profiles);
3270     camera_metadata_ro_entry entry;
3271     std::unordered_set<int64_t> entries;
3272     int rc = find_camera_metadata_ro_entry(
3273             staticMeta, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP, &entry);
3274     ASSERT_EQ(rc, 0);
3275     ASSERT_TRUE(entry.count > 0);
3276     ASSERT_EQ(entry.count % 3, 0);
3277 
3278     for (uint32_t i = 0; i < entry.count; i += 3) {
3279         ASSERT_NE(entry.data.i64[i], ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
3280         ASSERT_EQ(entries.find(entry.data.i64[i]), entries.end());
3281         entries.insert(static_cast<int64_t>(entry.data.i64[i]));
3282         profiles->emplace_back(
3283                 static_cast<RequestAvailableDynamicRangeProfilesMap>(entry.data.i64[i]));
3284     }
3285 
3286     if (!entries.empty()) {
3287         ASSERT_NE(entries.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10),
3288                   entries.end());
3289     }
3290 }
3291 
verify10BitMetadata(HandleImporter & importer,const InFlightRequest & request,aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap profile)3292 void CameraAidlTest::verify10BitMetadata(
3293         HandleImporter& importer, const InFlightRequest& request,
3294         aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap
3295                 profile) {
3296     for (auto b : request.resultOutputBuffers) {
3297         importer.importBuffer(b.buffer.buffer);
3298         bool smpte2086Present = importer.isSmpte2086Present(b.buffer.buffer);
3299         bool smpte2094_10Present = importer.isSmpte2094_10Present(b.buffer.buffer);
3300         bool smpte2094_40Present = importer.isSmpte2094_40Present(b.buffer.buffer);
3301 
3302         switch (static_cast<int64_t>(profile)) {
3303             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
3304                 ASSERT_FALSE(smpte2086Present);
3305                 ASSERT_FALSE(smpte2094_10Present);
3306                 ASSERT_FALSE(smpte2094_40Present);
3307                 break;
3308             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
3309                 ASSERT_TRUE(smpte2086Present);
3310                 ASSERT_FALSE(smpte2094_10Present);
3311                 ASSERT_FALSE(smpte2094_40Present);
3312                 break;
3313             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
3314                 ASSERT_FALSE(smpte2094_10Present);
3315                 ASSERT_TRUE(smpte2094_40Present);
3316                 break;
3317             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
3318             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
3319             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
3320             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
3321             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
3322             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
3323             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
3324             case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
3325                 ASSERT_FALSE(smpte2086Present);
3326                 ASSERT_TRUE(smpte2094_10Present);
3327                 ASSERT_FALSE(smpte2094_40Present);
3328                 break;
3329             default:
3330                 ALOGE("%s: Unexpected 10-bit dynamic range profile: %" PRId64, __FUNCTION__,
3331                       profile);
3332                 ADD_FAILURE();
3333         }
3334         importer.freeBuffer(b.buffer.buffer);
3335     }
3336 }
3337 
reportsColorSpaces(const camera_metadata_t * staticMeta)3338 bool CameraAidlTest::reportsColorSpaces(const camera_metadata_t* staticMeta) {
3339     camera_metadata_ro_entry capabilityEntry;
3340     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3341                                            &capabilityEntry);
3342     if (rc == 0) {
3343         for (uint32_t i = 0; i < capabilityEntry.count; i++) {
3344             if (capabilityEntry.data.u8[i] ==
3345                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
3346                 return true;
3347             }
3348         }
3349     }
3350     return false;
3351 }
3352 
getColorSpaceProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableColorSpaceProfilesMap> * profiles)3353 void CameraAidlTest::getColorSpaceProfiles(
3354         const camera_metadata_t* staticMeta,
3355         std::vector<RequestAvailableColorSpaceProfilesMap>* profiles) {
3356     ASSERT_NE(nullptr, staticMeta);
3357     ASSERT_NE(nullptr, profiles);
3358     camera_metadata_ro_entry entry;
3359     int rc = find_camera_metadata_ro_entry(
3360             staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
3361     ASSERT_EQ(rc, 0);
3362     ASSERT_TRUE(entry.count > 0);
3363     ASSERT_EQ(entry.count % 3, 0);
3364 
3365     for (uint32_t i = 0; i < entry.count; i += 3) {
3366         ASSERT_NE(entry.data.i64[i],
3367                 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
3368         if (std::find(profiles->begin(), profiles->end(),
3369                 static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]))
3370                 == profiles->end()) {
3371             profiles->emplace_back(
3372                     static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]));
3373         }
3374     }
3375 }
3376 
isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(const camera_metadata_t * staticMeta,RequestAvailableColorSpaceProfilesMap colorSpace,RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,aidl::android::hardware::graphics::common::PixelFormat pixelFormat)3377 bool CameraAidlTest::isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
3378         const camera_metadata_t* staticMeta,
3379         RequestAvailableColorSpaceProfilesMap colorSpace,
3380         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,
3381         aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
3382     camera_metadata_ro_entry entry;
3383     int rc = find_camera_metadata_ro_entry(
3384             staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
3385 
3386     if (rc == 0) {
3387         for (uint32_t i = 0; i < entry.count; i += 3) {
3388             RequestAvailableColorSpaceProfilesMap entryColorSpace =
3389                     static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]);
3390             int64_t dynamicRangeProfileI64 = static_cast<int64_t>(dynamicRangeProfile);
3391             int32_t entryImageFormat = static_cast<int32_t>(entry.data.i64[i + 1]);
3392             int32_t expectedImageFormat = halFormatToPublicFormat(pixelFormat);
3393             if (entryColorSpace == colorSpace
3394                     && (entry.data.i64[i + 2] & dynamicRangeProfileI64) != 0
3395                     && entryImageFormat == expectedImageFormat) {
3396                 return true;
3397             }
3398         }
3399     }
3400 
3401     return false;
3402 }
3403 
getColorSpaceProfileString(RequestAvailableColorSpaceProfilesMap colorSpace)3404 const char* CameraAidlTest::getColorSpaceProfileString(
3405         RequestAvailableColorSpaceProfilesMap colorSpace) {
3406     auto colorSpaceCast = static_cast<int>(colorSpace);
3407     switch (colorSpaceCast) {
3408         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED:
3409             return "UNSPECIFIED";
3410         case ColorSpaceNamed::SRGB:
3411             return "SRGB";
3412         case ColorSpaceNamed::LINEAR_SRGB:
3413             return "LINEAR_SRGB";
3414         case ColorSpaceNamed::EXTENDED_SRGB:
3415             return "EXTENDED_SRGB";
3416         case ColorSpaceNamed::LINEAR_EXTENDED_SRGB:
3417             return "LINEAR_EXTENDED_SRGB";
3418         case ColorSpaceNamed::BT709:
3419             return "BT709";
3420         case ColorSpaceNamed::BT2020:
3421             return "BT2020";
3422         case ColorSpaceNamed::DCI_P3:
3423             return "DCI_P3";
3424         case ColorSpaceNamed::DISPLAY_P3:
3425             return "DISPLAY_P3";
3426         case ColorSpaceNamed::NTSC_1953:
3427             return "NTSC_1953";
3428         case ColorSpaceNamed::SMPTE_C:
3429             return "SMPTE_C";
3430         case ColorSpaceNamed::ADOBE_RGB:
3431             return "ADOBE_RGB";
3432         case ColorSpaceNamed::PRO_PHOTO_RGB:
3433             return "PRO_PHOTO_RGB";
3434         case ColorSpaceNamed::ACES:
3435             return "ACES";
3436         case ColorSpaceNamed::ACESCG:
3437             return "ACESCG";
3438         case ColorSpaceNamed::CIE_XYZ:
3439             return "CIE_XYZ";
3440         case ColorSpaceNamed::CIE_LAB:
3441             return "CIE_LAB";
3442         case ColorSpaceNamed::BT2020_HLG:
3443             return "BT2020_HLG";
3444         case ColorSpaceNamed::BT2020_PQ:
3445             return "BT2020_PQ";
3446         default:
3447             return "INVALID";
3448     }
3449 
3450     return "INVALID";
3451 }
3452 
getDynamicRangeProfileString(RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile)3453 const char* CameraAidlTest::getDynamicRangeProfileString(
3454         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
3455     auto dynamicRangeProfileCast =
3456             static_cast<camera_metadata_enum_android_request_available_dynamic_range_profiles_map>
3457             (dynamicRangeProfile);
3458     switch (dynamicRangeProfileCast) {
3459         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD:
3460             return "STANDARD";
3461         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
3462             return "HLG10";
3463         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
3464             return "HDR10";
3465         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
3466             return "HDR10_PLUS";
3467         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
3468             return "DOLBY_VISION_10B_HDR_REF";
3469         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
3470             return "DOLBY_VISION_10B_HDR_REF_P0";
3471         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
3472             return "DOLBY_VISION_10B_HDR_OEM";
3473         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
3474             return "DOLBY_VISION_10B_HDR_OEM_P0";
3475         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
3476             return "DOLBY_VISION_8B_HDR_REF";
3477         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
3478             return "DOLBY_VISION_8B_HDR_REF_P0";
3479         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
3480             return "DOLBY_VISION_8B_HDR_OEM";
3481         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
3482             return "DOLBY_VISION_8B_HDR_OEM_P0";
3483         default:
3484             return "INVALID";
3485     }
3486 
3487     return "INVALID";
3488 }
3489 
halFormatToPublicFormat(aidl::android::hardware::graphics::common::PixelFormat pixelFormat)3490 int32_t CameraAidlTest::halFormatToPublicFormat(
3491         aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
3492     // This is an incomplete mapping of pixel format to image format and assumes dataspaces
3493     // (see getDataspace)
3494     switch (pixelFormat) {
3495     case PixelFormat::BLOB:
3496         return 0x100; // ImageFormat.JPEG
3497     case PixelFormat::Y16:
3498         return 0x44363159; // ImageFormat.DEPTH16
3499     default:
3500         return static_cast<int32_t>(pixelFormat);
3501     }
3502 }
3503 
supportZoomSettingsOverride(const camera_metadata_t * staticMeta)3504 bool CameraAidlTest::supportZoomSettingsOverride(const camera_metadata_t* staticMeta) {
3505     camera_metadata_ro_entry availableOverridesEntry;
3506     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
3507                                            &availableOverridesEntry);
3508     if (rc == 0) {
3509         for (size_t i = 0; i < availableOverridesEntry.count; i++) {
3510             if (availableOverridesEntry.data.i32[i] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
3511                 return true;
3512             }
3513         }
3514     }
3515     return false;
3516 }
3517 
supportsCroppedRawUseCase(const camera_metadata_t * staticMeta)3518 bool CameraAidlTest::supportsCroppedRawUseCase(const camera_metadata_t *staticMeta) {
3519     camera_metadata_ro_entry availableStreamUseCasesEntry;
3520     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
3521                                            &availableStreamUseCasesEntry);
3522     if (rc == 0) {
3523         for (size_t i = 0; i < availableStreamUseCasesEntry.count; i++) {
3524             if (availableStreamUseCasesEntry.data.i64[i] ==
3525                     ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW) {
3526                 return true;
3527             }
3528         }
3529     }
3530     return false;
3531 }
3532 
supportsStreamUseCaseCap(const camera_metadata_t * staticMeta)3533 bool CameraAidlTest::supportsStreamUseCaseCap(const camera_metadata_t* staticMeta) {
3534     camera_metadata_ro_entry entry;
3535     int retcode = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3536                                                 &entry);
3537     bool hasStreamUseCaseCap = false;
3538     if ((0 == retcode) && (entry.count > 0)) {
3539         if (std::find(entry.data.u8, entry.data.u8 + entry.count,
3540                       ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE) !=
3541             entry.data.u8 + entry.count) {
3542             hasStreamUseCaseCap = true;
3543         }
3544     }
3545     return hasStreamUseCaseCap;
3546 }
3547 
isPerFrameControl(const camera_metadata_t * staticMeta)3548 bool CameraAidlTest::isPerFrameControl(const camera_metadata_t* staticMeta) {
3549     camera_metadata_ro_entry syncLatencyEntry;
3550     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SYNC_MAX_LATENCY,
3551                                            &syncLatencyEntry);
3552     if (rc == 0 && syncLatencyEntry.data.i32[0] == ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL) {
3553         return true;
3554     }
3555     return false;
3556 }
3557 
configurePreviewStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)3558 void CameraAidlTest::configurePreviewStream(
3559         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3560         const AvailableStream* previewThreshold, std::shared_ptr<ICameraDeviceSession>* session,
3561         Stream* previewStream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3562         int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
3563         uint32_t streamConfigCounter) {
3564     configureSingleStream(name, provider, previewThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
3565                           RequestTemplate::PREVIEW, session, previewStream, halStreams,
3566                           supportsPartialResults, partialResultCount, useHalBufManager, cb,
3567                           streamConfigCounter);
3568 }
3569 
isOfflineSessionSupported(const camera_metadata_t * staticMeta)3570 Status CameraAidlTest::isOfflineSessionSupported(const camera_metadata_t* staticMeta) {
3571     Status ret = Status::OPERATION_NOT_SUPPORTED;
3572     if (nullptr == staticMeta) {
3573         return Status::ILLEGAL_ARGUMENT;
3574     }
3575 
3576     camera_metadata_ro_entry entry;
3577     int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3578                                            &entry);
3579     if (0 != rc) {
3580         return Status::ILLEGAL_ARGUMENT;
3581     }
3582 
3583     for (size_t i = 0; i < entry.count; i++) {
3584         if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING == entry.data.u8[i]) {
3585             ret = Status::OK;
3586             break;
3587         }
3588     }
3589 
3590     return ret;
3591 }
3592 
configureOfflineStillStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * threshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * stream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::shared_ptr<DeviceCb> * outCb,int32_t * jpegBufferSize,std::set<int32_t> * halBufManagedStreamIds)3593 void CameraAidlTest::configureOfflineStillStream(
3594         const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3595         const AvailableStream* threshold, std::shared_ptr<ICameraDeviceSession>* session,
3596         Stream* stream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3597         int32_t* partialResultCount, std::shared_ptr<DeviceCb>* outCb, int32_t* jpegBufferSize,
3598         std::set<int32_t>* halBufManagedStreamIds) {
3599     ASSERT_NE(nullptr, session);
3600     ASSERT_NE(nullptr, halStreams);
3601     ASSERT_NE(nullptr, stream);
3602     ASSERT_NE(nullptr, supportsPartialResults);
3603     ASSERT_NE(nullptr, partialResultCount);
3604     ASSERT_NE(nullptr, outCb);
3605     ASSERT_NE(nullptr, jpegBufferSize);
3606     ASSERT_NE(nullptr, halBufManagedStreamIds);
3607 
3608     std::vector<AvailableStream> outputStreams;
3609     std::shared_ptr<ICameraDevice> cameraDevice;
3610     ALOGI("configureStreams: Testing camera device %s", name.c_str());
3611 
3612     ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &cameraDevice);
3613     ASSERT_TRUE(ret.isOk());
3614     ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3615           ret.getServiceSpecificError());
3616     ASSERT_NE(cameraDevice, nullptr);
3617 
3618     CameraMetadata metadata;
3619     ret = cameraDevice->getCameraCharacteristics(&metadata);
3620     ASSERT_TRUE(ret.isOk());
3621     camera_metadata_t* staticMeta = clone_camera_metadata(
3622             reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
3623     ASSERT_NE(nullptr, staticMeta);
3624 
3625     camera_metadata_ro_entry entry;
3626     auto status =
3627             find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3628     if ((0 == status) && (entry.count > 0)) {
3629         *partialResultCount = entry.data.i32[0];
3630         *supportsPartialResults = (*partialResultCount > 1);
3631     }
3632 
3633     BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3634     status = find_camera_metadata_ro_entry(
3635             staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3636     if ((0 == status) && (entry.count == 1)) {
3637         if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3638             bufferManagerType = BufferManagerType::HAL;
3639         } else if (entry.data.u8[0] ==
3640                    ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3641             bufferManagerType = BufferManagerType::SESSION;
3642         }
3643     }
3644 
3645     auto st = getJpegBufferSize(staticMeta, jpegBufferSize);
3646     ASSERT_EQ(st, Status::OK);
3647 
3648     *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3649     ret = cameraDevice->open(*outCb, session);
3650     ASSERT_TRUE(ret.isOk());
3651     ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3652           ret.getServiceSpecificError());
3653     ASSERT_NE(session, nullptr);
3654 
3655     outputStreams.clear();
3656     auto rc = getAvailableOutputStreams(staticMeta, outputStreams, threshold);
3657     size_t idx = 0;
3658     int currLargest = outputStreams[0].width * outputStreams[0].height;
3659     for (size_t i = 0; i < outputStreams.size(); i++) {
3660         int area = outputStreams[i].width * outputStreams[i].height;
3661         if (area > currLargest) {
3662             idx = i;
3663             currLargest = area;
3664         }
3665     }
3666 
3667     ASSERT_EQ(Status::OK, rc);
3668     ASSERT_FALSE(outputStreams.empty());
3669 
3670     Dataspace dataspace = getDataspace(static_cast<PixelFormat>(outputStreams[idx].format));
3671 
3672     std::vector<Stream> streams(/*size*/ 1);
3673     streams[0] = {/*id*/ 0,
3674                   StreamType::OUTPUT,
3675                   outputStreams[idx].width,
3676                   outputStreams[idx].height,
3677                   static_cast<PixelFormat>(outputStreams[idx].format),
3678                   static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3679                           GRALLOC1_CONSUMER_USAGE_CPU_READ),
3680                   dataspace,
3681                   StreamRotation::ROTATION_0,
3682                   /*physicalId*/ std::string(),
3683                   *jpegBufferSize,
3684                   /*groupId*/ 0,
3685                   {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3686                   RequestAvailableDynamicRangeProfilesMap::
3687                           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
3688                   ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3689                   static_cast<int>(
3690                           RequestAvailableColorSpaceProfilesMap::
3691                                   ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
3692 
3693     StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
3694 
3695     ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds, halStreams);
3696 
3697     ASSERT_TRUE(ret.isOk());
3698 
3699     if (halBufManagedStreamIds->size() != 0) {
3700         (*outCb)->setCurrentStreamConfig(streams, *halStreams);
3701     }
3702 
3703     *stream = streams[0];
3704 }
3705 
updateInflightResultQueue(const std::shared_ptr<ResultMetadataQueue> & resultQueue)3706 void CameraAidlTest::updateInflightResultQueue(
3707         const std::shared_ptr<ResultMetadataQueue>& resultQueue) {
3708     std::unique_lock<std::mutex> l(mLock);
3709     for (auto& it : mInflightMap) {
3710         it.second->resultQueue = resultQueue;
3711     }
3712 }
3713 
processColorSpaceRequest(RequestAvailableColorSpaceProfilesMap colorSpace,RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile)3714 void CameraAidlTest::processColorSpaceRequest(
3715         RequestAvailableColorSpaceProfilesMap colorSpace,
3716         RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
3717     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3718     CameraMetadata settings;
3719 
3720     for (const auto& name : cameraDeviceNames) {
3721         std::string version, deviceId;
3722         ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
3723         CameraMetadata meta;
3724         std::shared_ptr<ICameraDevice> device;
3725         openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
3726         camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3727 
3728         // Device does not report color spaces, skip.
3729         if (!reportsColorSpaces(staticMeta)) {
3730             ndk::ScopedAStatus ret = mSession->close();
3731             mSession = nullptr;
3732             ASSERT_TRUE(ret.isOk());
3733             ALOGV("Camera %s does not report color spaces", name.c_str());
3734             continue;
3735         }
3736         std::vector<RequestAvailableColorSpaceProfilesMap> profileList;
3737         getColorSpaceProfiles(staticMeta, &profileList);
3738         ASSERT_FALSE(profileList.empty());
3739 
3740         // Device does not support color space / dynamic range profile, skip
3741         if (std::find(profileList.begin(), profileList.end(), colorSpace)
3742                 == profileList.end() || !isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
3743                         staticMeta, colorSpace, dynamicRangeProfile,
3744                         PixelFormat::IMPLEMENTATION_DEFINED)) {
3745             ndk::ScopedAStatus ret = mSession->close();
3746             mSession = nullptr;
3747             ASSERT_TRUE(ret.isOk());
3748             ALOGV("Camera %s does not support color space %s with dynamic range profile %s and "
3749                   "pixel format %d", name.c_str(), getColorSpaceProfileString(colorSpace),
3750                   getDynamicRangeProfileString(dynamicRangeProfile),
3751                   PixelFormat::IMPLEMENTATION_DEFINED);
3752             continue;
3753         }
3754 
3755         ALOGV("Camera %s supports color space %s with dynamic range profile %s and pixel format %d",
3756                 name.c_str(), getColorSpaceProfileString(colorSpace),
3757                 getDynamicRangeProfileString(dynamicRangeProfile),
3758                 PixelFormat::IMPLEMENTATION_DEFINED);
3759 
3760         // If an HDR dynamic range profile is reported in the color space profile list,
3761         // the device must also have the dynamic range profiles map capability and contain
3762         // the dynamic range profile in the map.
3763         if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
3764                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
3765             ASSERT_TRUE(is10BitDynamicRangeCapable(staticMeta));
3766 
3767             std::vector<RequestAvailableDynamicRangeProfilesMap> dynamicRangeProfiles;
3768             get10BitDynamicRangeProfiles(staticMeta, &dynamicRangeProfiles);
3769             ASSERT_FALSE(dynamicRangeProfiles.empty());
3770             ASSERT_FALSE(std::find(dynamicRangeProfiles.begin(), dynamicRangeProfiles.end(),
3771                     dynamicRangeProfile) == dynamicRangeProfiles.end());
3772         }
3773 
3774         CameraMetadata req;
3775         android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
3776         ndk::ScopedAStatus ret =
3777                 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
3778         ASSERT_TRUE(ret.isOk());
3779 
3780         const camera_metadata_t* metadata =
3781                 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
3782         size_t expectedSize = req.metadata.size();
3783         int result = validate_camera_metadata_structure(metadata, &expectedSize);
3784         ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
3785 
3786         size_t entryCount = get_camera_metadata_entry_count(metadata);
3787         ASSERT_GT(entryCount, 0u);
3788         defaultSettings = metadata;
3789 
3790         const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
3791         uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
3792         settings.metadata = std::vector(
3793                 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
3794         overrideRotateAndCrop(&settings);
3795 
3796         ret = mSession->close();
3797         mSession = nullptr;
3798         ASSERT_TRUE(ret.isOk());
3799 
3800         std::vector<HalStream> halStreams;
3801         bool supportsPartialResults = false;
3802         std::set<int32_t> halBufManagedStreamIds;
3803         int32_t partialResultCount = 0;
3804         Stream previewStream;
3805         std::shared_ptr<DeviceCb> cb;
3806 
3807         previewStream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3808                 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
3809         configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
3810                          &previewStream, &halStreams, &supportsPartialResults, &partialResultCount,
3811                          &halBufManagedStreamIds, &cb, 0,
3812                          /*maxResolution*/ false, dynamicRangeProfile, colorSpace);
3813         ASSERT_NE(mSession, nullptr);
3814 
3815         ::aidl::android::hardware::common::fmq::MQDescriptor<
3816                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
3817                 descriptor;
3818         auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
3819         ASSERT_TRUE(resultQueueRet.isOk());
3820 
3821         std::shared_ptr<ResultMetadataQueue> resultQueue =
3822                 std::make_shared<ResultMetadataQueue>(descriptor);
3823         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
3824             ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
3825             resultQueue = nullptr;
3826             // Don't use the queue onwards.
3827         }
3828 
3829         mInflightMap.clear();
3830         // Stream as long as needed to fill the Hal inflight queue
3831         std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
3832 
3833         for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
3834             std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
3835                     static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
3836                     partialResultCount, std::unordered_set<std::string>(), resultQueue);
3837 
3838             CaptureRequest& request = requests[requestId];
3839             std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
3840             outputBuffers.resize(halStreams.size());
3841 
3842             size_t k = 0;
3843             inflightReq->mOutstandingBufferIds.resize(halStreams.size());
3844             std::vector<buffer_handle_t> graphicBuffers;
3845             graphicBuffers.reserve(halStreams.size());
3846 
3847             auto bufferId = requestId + 1;  // Buffer id value 0 is not valid
3848             for (const auto& halStream : halStreams) {
3849                 buffer_handle_t buffer_handle;
3850                 if (contains(halBufManagedStreamIds, halStream.id)) {
3851                     outputBuffers[k] = {halStream.id,   0,
3852                                         NativeHandle(), BufferStatus::OK,
3853                                         NativeHandle(), NativeHandle()};
3854                 } else {
3855                     auto usage = ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
3856                             static_cast<uint64_t>(halStream.producerUsage),
3857                             static_cast<uint64_t>(halStream.consumerUsage)));
3858                     allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
3859                                             halStream.overrideFormat, &buffer_handle);
3860 
3861                     inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
3862                     graphicBuffers.push_back(buffer_handle);
3863                     outputBuffers[k] = {
3864                             halStream.id,     bufferId,       android::makeToAidl(buffer_handle),
3865                             BufferStatus::OK, NativeHandle(), NativeHandle()};
3866                 }
3867                 k++;
3868             }
3869 
3870             request.inputBuffer = {
3871                     -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
3872             request.frameNumber = bufferId;
3873             request.fmqSettingsSize = 0;
3874             request.settings = settings;
3875             request.inputWidth = 0;
3876             request.inputHeight = 0;
3877 
3878             {
3879                 std::unique_lock<std::mutex> l(mLock);
3880                 mInflightMap[bufferId] = inflightReq;
3881             }
3882         }
3883 
3884         int32_t numRequestProcessed = 0;
3885         std::vector<BufferCache> cachesToRemove;
3886         ndk::ScopedAStatus returnStatus =
3887             mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
3888         ASSERT_TRUE(returnStatus.isOk());
3889         ASSERT_EQ(numRequestProcessed, requests.size());
3890 
3891         returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
3892                 std::vector<int32_t> {halStreams[0].id});
3893         ASSERT_TRUE(returnStatus.isOk());
3894 
3895         // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
3896         // is used to indicate a buffer that is not present/available so buffer ids as well
3897         // as frame numbers begin with 1.
3898         for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
3899             const auto& inflightReq = mInflightMap[frameNumber];
3900             std::unique_lock<std::mutex> l(mLock);
3901             while (!inflightReq->errorCodeValid &&
3902                     ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
3903                 auto timeout = std::chrono::system_clock::now() +
3904                                 std::chrono::seconds(kStreamBufferTimeoutSec);
3905                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
3906             }
3907 
3908             ASSERT_FALSE(inflightReq->errorCodeValid);
3909             ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
3910 
3911             if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
3912                     ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
3913                 verify10BitMetadata(mHandleImporter, *inflightReq, dynamicRangeProfile);
3914             }
3915         }
3916 
3917         if (halBufManagedStreamIds.size() != 0) {
3918             std::vector<int32_t> streamIds;
3919             for (size_t i = 0; i < streamIds.size(); i++) {
3920                 if (contains(halBufManagedStreamIds, halStreams[i].id)) {
3921                     streamIds.emplace_back(halStreams[i].id);
3922                 }
3923             }
3924             mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
3925             cb->waitForBuffersReturned();
3926         }
3927 
3928         ret = mSession->close();
3929         mSession = nullptr;
3930         ASSERT_TRUE(ret.isOk());
3931     }
3932 }
3933 
processZoomSettingsOverrideRequests(int32_t frameCount,const bool * overrideSequence,const bool * expectedResults)3934 void CameraAidlTest::processZoomSettingsOverrideRequests(
3935         int32_t frameCount, const bool *overrideSequence, const bool *expectedResults) {
3936     std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3937     AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3938                                         static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
3939     int64_t bufferId = 1;
3940     int32_t frameNumber = 1;
3941     CameraMetadata settings;
3942     ndk::ScopedAStatus ret;
3943     for (const auto& name : cameraDeviceNames) {
3944         CameraMetadata meta;
3945         std::shared_ptr<ICameraDevice> device;
3946         openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/,
3947                                &device /*out*/);
3948         camera_metadata_t* staticMeta =
3949                 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
3950 
3951         ret = mSession->close();
3952         mSession = nullptr;
3953         ASSERT_TRUE(ret.isOk());
3954 
3955         // Device does not support zoom settnigs override
3956         if (!supportZoomSettingsOverride(staticMeta)) {
3957             continue;
3958         }
3959 
3960         if (!isPerFrameControl(staticMeta)) {
3961             continue;
3962         }
3963 
3964         bool supportsPartialResults = false;
3965         bool useHalBufManager = false;
3966         int32_t partialResultCount = 0;
3967         Stream previewStream;
3968         std::vector<HalStream> halStreams;
3969         std::shared_ptr<DeviceCb> cb;
3970         configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
3971                                &previewStream /*out*/, &halStreams /*out*/,
3972                                &supportsPartialResults /*out*/, &partialResultCount /*out*/,
3973                                &useHalBufManager /*out*/, &cb /*out*/);
3974         ASSERT_NE(mSession, nullptr);
3975 
3976         ::aidl::android::hardware::common::fmq::MQDescriptor<
3977                 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
3978                 descriptor;
3979         auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
3980         ASSERT_TRUE(resultQueueRet.isOk());
3981 
3982         std::shared_ptr<ResultMetadataQueue> resultQueue =
3983                 std::make_shared<ResultMetadataQueue>(descriptor);
3984         if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
3985             ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
3986             resultQueue = nullptr;
3987             // Don't use the queue onwards.
3988         }
3989 
3990         ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
3991         ASSERT_TRUE(ret.isOk());
3992 
3993         mInflightMap.clear();
3994         ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
3995         std::vector<CaptureRequest> requests(frameCount);
3996         std::vector<buffer_handle_t> buffers(frameCount);
3997         std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(frameCount);
3998         std::vector<CameraMetadata> requestSettings(frameCount);
3999 
4000         for (int32_t i = 0; i < frameCount; i++) {
4001             std::unique_lock<std::mutex> l(mLock);
4002             CaptureRequest& request = requests[i];
4003             std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
4004             outputBuffers.resize(1);
4005             StreamBuffer& outputBuffer = outputBuffers[0];
4006 
4007             if (useHalBufManager) {
4008                 outputBuffer = {halStreams[0].id, 0,
4009                                 NativeHandle(),   BufferStatus::OK,
4010                                 NativeHandle(),   NativeHandle()};
4011             } else {
4012                 allocateGraphicBuffer(previewStream.width, previewStream.height,
4013                                       ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
4014                                               static_cast<uint64_t>(halStreams[0].producerUsage),
4015                                               static_cast<uint64_t>(halStreams[0].consumerUsage))),
4016                                       halStreams[0].overrideFormat, &buffers[i]);
4017                 outputBuffer = {halStreams[0].id, bufferId + i,   ::android::makeToAidl(buffers[i]),
4018                                 BufferStatus::OK, NativeHandle(), NativeHandle()};
4019             }
4020 
4021             // Set appropriate settings override tag
4022             requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
4023             int32_t settingsOverride = overrideSequence[i] ?
4024                     ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM : ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
4025             ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
4026                     &settingsOverride, 1));
4027             camera_metadata_t* metaBuffer = requestMeta.release();
4028             uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
4029             requestSettings[i].metadata = std::vector(
4030                     rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
4031             overrideRotateAndCrop(&(requestSettings[i]));
4032             request.frameNumber = frameNumber + i;
4033             request.fmqSettingsSize = 0;
4034             request.settings = requestSettings[i];
4035             request.inputBuffer = {
4036                     -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
4037 
4038             inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
4039                                                                 partialResultCount, resultQueue);
4040             mInflightMap[frameNumber + i] = inflightReqs[i];
4041         }
4042 
4043         int32_t numRequestProcessed = 0;
4044         std::vector<BufferCache> cachesToRemove;
4045 
4046         ndk::ScopedAStatus returnStatus =
4047                 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
4048         ASSERT_TRUE(returnStatus.isOk());
4049         ASSERT_EQ(numRequestProcessed, frameCount);
4050 
4051         for (size_t i = 0; i < frameCount; i++) {
4052             std::unique_lock<std::mutex> l(mLock);
4053             while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
4054                                                         (!inflightReqs[i]->haveResultMetadata))) {
4055                 auto timeout = std::chrono::system_clock::now() +
4056                                std::chrono::seconds(kStreamBufferTimeoutSec);
4057                 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
4058             }
4059 
4060             ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
4061             ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
4062             ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
4063             ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
4064             ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_CONTROL_SETTINGS_OVERRIDE));
4065             camera_metadata_entry_t overrideResult =
4066                     inflightReqs[i]->collectedResult.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
4067             ASSERT_EQ(overrideResult.data.i32[0] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM,
4068                     expectedResults[i]);
4069             ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(
4070                     ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER));
4071             camera_metadata_entry_t frameNumberEntry = inflightReqs[i]->collectedResult.find(
4072                     ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
4073             ALOGV("%s: i %zu, expcetedResults[i] %d, overrideResult is %d, frameNumber %d",
4074                   __FUNCTION__, i, expectedResults[i], overrideResult.data.i32[0],
4075                   frameNumberEntry.data.i32[0]);
4076             if (expectedResults[i]) {
4077                 ASSERT_GT(frameNumberEntry.data.i32[0], inflightReqs[i]->frameNumber);
4078             } else {
4079                 ASSERT_EQ(frameNumberEntry.data.i32[0], frameNumber + i);
4080             }
4081         }
4082 
4083         ret = mSession->close();
4084         mSession = nullptr;
4085         ASSERT_TRUE(ret.isOk());
4086     }
4087 }
4088 
getSupportedSizes(const camera_metadata_t * ch,uint32_t tag,int32_t format,std::vector<std::tuple<size_t,size_t>> * sizes)4089 void CameraAidlTest::getSupportedSizes(const camera_metadata_t* ch, uint32_t tag, int32_t format,
4090                                        std::vector<std::tuple<size_t, size_t>>* sizes /*out*/) {
4091     if (sizes == nullptr) {
4092         return;
4093     }
4094 
4095     camera_metadata_ro_entry entry;
4096     int retcode = find_camera_metadata_ro_entry(ch, tag, &entry);
4097     if ((0 == retcode) && (entry.count > 0)) {
4098         // Scaler entry contains 4 elements (format, width, height, type)
4099         for (size_t i = 0; i < entry.count; i += 4) {
4100             if ((entry.data.i32[i] == format) &&
4101                 (entry.data.i32[i + 3] == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
4102                 sizes->push_back(std::make_tuple(entry.data.i32[i + 1], entry.data.i32[i + 2]));
4103             }
4104         }
4105     }
4106 }
4107 
getSupportedDurations(const camera_metadata_t * ch,uint32_t tag,int32_t format,const std::vector<std::tuple<size_t,size_t>> & sizes,std::vector<int64_t> * durations)4108 void CameraAidlTest::getSupportedDurations(const camera_metadata_t* ch, uint32_t tag,
4109                                            int32_t format,
4110                                            const std::vector<std::tuple<size_t, size_t>>& sizes,
4111                                            std::vector<int64_t>* durations /*out*/) {
4112     if (durations == nullptr) {
4113         return;
4114     }
4115 
4116     camera_metadata_ro_entry entry;
4117     int retcode = find_camera_metadata_ro_entry(ch, tag, &entry);
4118     if ((0 == retcode) && (entry.count > 0)) {
4119         // Duration entry contains 4 elements (format, width, height, duration)
4120         for (const auto& size : sizes) {
4121             int64_t width = std::get<0>(size);
4122             int64_t height = std::get<1>(size);
4123             for (size_t i = 0; i < entry.count; i += 4) {
4124                 if ((entry.data.i64[i] == format) && (entry.data.i64[i + 1] == width) &&
4125                     (entry.data.i64[i + 2] == height)) {
4126                     durations->push_back(entry.data.i64[i + 3]);
4127                     break;
4128                 }
4129             }
4130         }
4131     }
4132 }
4133 
validateDefaultRequestMetadata(RequestTemplate reqTemplate,const CameraMetadata & rawMetadata)4134 void CameraAidlTest::validateDefaultRequestMetadata(RequestTemplate reqTemplate,
4135                                                     const CameraMetadata& rawMetadata) {
4136     const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
4137     size_t expectedSize = rawMetadata.metadata.size();
4138     int result = validate_camera_metadata_structure(metadata, &expectedSize);
4139     ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
4140     verifyRequestTemplate(metadata, reqTemplate);
4141 }
4142