1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "camera_aidl_test.h"
18
19 #include <inttypes.h>
20
21 #include <CameraParameters.h>
22 #include <HandleImporter.h>
23 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
24 #include <aidl/android/hardware/camera/metadata/CameraMetadataTag.h>
25 #include <aidl/android/hardware/camera/metadata/RequestAvailableColorSpaceProfilesMap.h>
26 #include <aidl/android/hardware/camera/metadata/RequestAvailableDynamicRangeProfilesMap.h>
27 #include <aidl/android/hardware/camera/metadata/SensorInfoColorFilterArrangement.h>
28 #include <aidl/android/hardware/camera/metadata/SensorPixelMode.h>
29 #include <aidl/android/hardware/camera/provider/BnCameraProviderCallback.h>
30 #include <aidlcommonsupport/NativeHandle.h>
31 #include <android/binder_manager.h>
32 #include <android/binder_process.h>
33 #include <com_android_internal_camera_flags.h>
34 #include <device_cb.h>
35 #include <empty_device_cb.h>
36 #include <grallocusage/GrallocUsageConversion.h>
37 #include <hardware/gralloc1.h>
38 #include <simple_device_cb.h>
39 #include <ui/Fence.h>
40 #include <ui/GraphicBufferAllocator.h>
41 #include <regex>
42 #include <typeinfo>
43 #include "utils/Errors.h"
44 #include <nativebase/nativebase.h>
45
46 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
47 using ::aidl::android::hardware::camera::common::TorchModeStatus;
48 using ::aidl::android::hardware::camera::device::CameraMetadata;
49 using ::aidl::android::hardware::camera::device::ICameraDevice;
50 using ::aidl::android::hardware::camera::metadata::CameraMetadataTag;
51 using ::aidl::android::hardware::camera::metadata::SensorInfoColorFilterArrangement;
52 using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
53 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
54 using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
55 using ::aidl::android::hardware::camera::provider::ICameraProvider;
56 using ::aidl::android::hardware::common::NativeHandle;
57 using ::android::hardware::camera::common::V1_0::helper::Size;
58 using ::ndk::ScopedAStatus;
59 using ::ndk::SpAIBinder;
60
61 namespace {
62 namespace flags = com::android::internal::camera::flags;
63
parseProviderName(const std::string & serviceDescriptor,std::string * type,uint32_t * id)64 bool parseProviderName(const std::string& serviceDescriptor, std::string* type /*out*/,
65 uint32_t* id /*out*/) {
66 if (!type || !id) {
67 ADD_FAILURE();
68 return false;
69 }
70
71 // expected format: <service_name>/<type>/<id>
72 std::string::size_type slashIdx1 = serviceDescriptor.find('/');
73 if (slashIdx1 == std::string::npos || slashIdx1 == serviceDescriptor.size() - 1) {
74 ADD_FAILURE() << "Provider name does not have / separator between name, type, and id";
75 return false;
76 }
77
78 std::string::size_type slashIdx2 = serviceDescriptor.find('/', slashIdx1 + 1);
79 if (slashIdx2 == std::string::npos || slashIdx2 == serviceDescriptor.size() - 1) {
80 ADD_FAILURE() << "Provider name does not have / separator between type and id";
81 return false;
82 }
83
84 std::string typeVal = serviceDescriptor.substr(slashIdx1 + 1, slashIdx2 - slashIdx1 - 1);
85
86 char* endPtr;
87 errno = 0;
88 int64_t idVal = strtol(serviceDescriptor.c_str() + slashIdx2 + 1, &endPtr, 10);
89 if (errno != 0) {
90 ADD_FAILURE() << "cannot parse provider id as an integer:" << serviceDescriptor.c_str()
91 << strerror(errno) << errno;
92 return false;
93 }
94 if (endPtr != serviceDescriptor.c_str() + serviceDescriptor.size()) {
95 ADD_FAILURE() << "provider id has unexpected length " << serviceDescriptor.c_str();
96 return false;
97 }
98 if (idVal < 0) {
99 ADD_FAILURE() << "id is negative: " << serviceDescriptor.c_str() << idVal;
100 return false;
101 }
102
103 *type = typeVal;
104 *id = static_cast<uint32_t>(idVal);
105
106 return true;
107 }
108
109 namespace flags = com::android::internal::camera::flags;
110
111 const std::vector<int64_t> kMandatoryUseCases = {
112 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
113 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
114 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
115 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
116 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
117 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
118 } // namespace
119
SetUp()120 void CameraAidlTest::SetUp() {
121 std::string serviceDescriptor = GetParam();
122 ALOGI("get service with name: %s", serviceDescriptor.c_str());
123
124 bool success = ABinderProcess_setThreadPoolMaxThreadCount(5);
125 ALOGI("ABinderProcess_setThreadPoolMaxThreadCount returns %s", success ? "true" : "false");
126 ASSERT_TRUE(success);
127 ABinderProcess_startThreadPool();
128
129 SpAIBinder cameraProviderBinder =
130 SpAIBinder(AServiceManager_waitForService(serviceDescriptor.c_str()));
131 ASSERT_NE(cameraProviderBinder.get(), nullptr);
132
133 std::shared_ptr<ICameraProvider> cameraProvider =
134 ICameraProvider::fromBinder(cameraProviderBinder);
135 ASSERT_NE(cameraProvider.get(), nullptr);
136 mProvider = cameraProvider;
137 uint32_t id;
138 ASSERT_TRUE(parseProviderName(serviceDescriptor, &mProviderType, &id));
139
140 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
141 }
142
TearDown()143 void CameraAidlTest::TearDown() {
144 if (mSession != nullptr) {
145 ndk::ScopedAStatus ret = mSession->close();
146 ASSERT_TRUE(ret.isOk());
147 }
148 }
149
waitForReleaseFence(std::vector<InFlightRequest::StreamBufferAndTimestamp> & resultOutputBuffers)150 void CameraAidlTest::waitForReleaseFence(
151 std::vector<InFlightRequest::StreamBufferAndTimestamp>& resultOutputBuffers) {
152 for (auto& bufferAndTimestamp : resultOutputBuffers) {
153 // wait for the fence timestamp and store it along with the buffer
154 android::sp<android::Fence> releaseFence = nullptr;
155 const native_handle_t* releaseFenceHandle = bufferAndTimestamp.buffer.releaseFence;
156 if (releaseFenceHandle != nullptr && releaseFenceHandle->numFds == 1 &&
157 releaseFenceHandle->data[0] >= 0) {
158 releaseFence = new android::Fence(dup(releaseFenceHandle->data[0]));
159 }
160 if (releaseFence && releaseFence->isValid()) {
161 releaseFence->wait(/*ms*/ 300);
162 nsecs_t releaseTime = releaseFence->getSignalTime();
163 if (bufferAndTimestamp.timeStamp < releaseTime)
164 bufferAndTimestamp.timeStamp = releaseTime;
165 }
166 }
167 }
168
getCameraDeviceNames(std::shared_ptr<ICameraProvider> & provider,bool addSecureOnly)169 std::vector<std::string> CameraAidlTest::getCameraDeviceNames(
170 std::shared_ptr<ICameraProvider>& provider, bool addSecureOnly) {
171 std::vector<std::string> cameraDeviceNames;
172
173 ScopedAStatus ret = provider->getCameraIdList(&cameraDeviceNames);
174 if (!ret.isOk()) {
175 ADD_FAILURE() << "Could not get camera id list";
176 }
177
178 // External camera devices are reported through cameraDeviceStatusChange
179 struct ProviderCb : public BnCameraProviderCallback {
180 ScopedAStatus cameraDeviceStatusChange(const std::string& devName,
181 CameraDeviceStatus newStatus) override {
182 ALOGI("camera device status callback name %s, status %d", devName.c_str(),
183 (int)newStatus);
184 if (newStatus == CameraDeviceStatus::PRESENT) {
185 externalCameraDeviceNames.push_back(devName);
186 }
187 return ScopedAStatus::ok();
188 }
189
190 ScopedAStatus torchModeStatusChange(const std::string&, TorchModeStatus) override {
191 return ScopedAStatus::ok();
192 }
193
194 ScopedAStatus physicalCameraDeviceStatusChange(
195 const std::string&, const std::string&,
196 ::aidl::android::hardware::camera::common::CameraDeviceStatus) override {
197 return ScopedAStatus::ok();
198 }
199
200 std::vector<std::string> externalCameraDeviceNames;
201 };
202 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
203 auto status = mProvider->setCallback(cb);
204
205 for (const auto& devName : cb->externalCameraDeviceNames) {
206 if (cameraDeviceNames.end() ==
207 std::find(cameraDeviceNames.begin(), cameraDeviceNames.end(), devName)) {
208 cameraDeviceNames.push_back(devName);
209 }
210 }
211
212 std::vector<std::string> retList;
213 for (auto& cameraDeviceName : cameraDeviceNames) {
214 bool isSecureOnlyCamera = isSecureOnly(mProvider, cameraDeviceName);
215 if (addSecureOnly) {
216 if (isSecureOnlyCamera) {
217 retList.emplace_back(cameraDeviceName);
218 }
219 } else if (!isSecureOnlyCamera) {
220 retList.emplace_back(cameraDeviceName);
221 }
222 }
223 return retList;
224 }
225
isSecureOnly(const std::shared_ptr<ICameraProvider> & provider,const std::string & name)226 bool CameraAidlTest::isSecureOnly(const std::shared_ptr<ICameraProvider>& provider,
227 const std::string& name) {
228 std::shared_ptr<ICameraDevice> cameraDevice = nullptr;
229 ScopedAStatus retInterface = provider->getCameraDeviceInterface(name, &cameraDevice);
230 if (!retInterface.isOk()) {
231 ADD_FAILURE() << "Failed to get camera device interface for " << name;
232 }
233
234 CameraMetadata cameraCharacteristics;
235 ScopedAStatus retChars = cameraDevice->getCameraCharacteristics(&cameraCharacteristics);
236 if (!retChars.isOk()) {
237 ADD_FAILURE() << "Failed to get camera characteristics for device " << name;
238 }
239
240 camera_metadata_t* chars =
241 reinterpret_cast<camera_metadata_t*>(cameraCharacteristics.metadata.data());
242
243 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
244 Status retCameraKind = getSystemCameraKind(chars, &systemCameraKind);
245 if (retCameraKind != Status::OK) {
246 ADD_FAILURE() << "Failed to get camera kind for " << name;
247 }
248
249 return systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA;
250 }
251
getCameraDeviceIdToNameMap(std::shared_ptr<ICameraProvider> provider)252 std::map<std::string, std::string> CameraAidlTest::getCameraDeviceIdToNameMap(
253 std::shared_ptr<ICameraProvider> provider) {
254 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(provider);
255
256 std::map<std::string, std::string> idToNameMap;
257 for (auto& name : cameraDeviceNames) {
258 std::string version, cameraId;
259 if (!matchDeviceName(name, mProviderType, &version, &cameraId)) {
260 ADD_FAILURE();
261 }
262 idToNameMap.insert(std::make_pair(std::string(cameraId), name));
263 }
264 return idToNameMap;
265 }
266
verifyMonochromeCameraResult(const::android::hardware::camera::common::V1_0::helper::CameraMetadata & metadata)267 void CameraAidlTest::verifyMonochromeCameraResult(
268 const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata) {
269 camera_metadata_ro_entry entry;
270
271 // Check tags that are not applicable for monochrome camera
272 ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_GREEN_SPLIT));
273 ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_NEUTRAL_COLOR_POINT));
274 ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_MODE));
275 ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_TRANSFORM));
276 ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_GAINS));
277
278 // Check dynamicBlackLevel
279 entry = metadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
280 if (entry.count > 0) {
281 ASSERT_EQ(entry.count, 4);
282 for (size_t i = 1; i < entry.count; i++) {
283 ASSERT_FLOAT_EQ(entry.data.f[i], entry.data.f[0]);
284 }
285 }
286
287 // Check noiseProfile
288 entry = metadata.find(ANDROID_SENSOR_NOISE_PROFILE);
289 if (entry.count > 0) {
290 ASSERT_EQ(entry.count, 2);
291 }
292
293 // Check lensShadingMap
294 entry = metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
295 if (entry.count > 0) {
296 ASSERT_EQ(entry.count % 4, 0);
297 for (size_t i = 0; i < entry.count / 4; i++) {
298 ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 1], entry.data.f[i * 4]);
299 ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 2], entry.data.f[i * 4]);
300 ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 3], entry.data.f[i * 4]);
301 }
302 }
303
304 // Check tonemapCurve
305 camera_metadata_ro_entry curveRed = metadata.find(ANDROID_TONEMAP_CURVE_RED);
306 camera_metadata_ro_entry curveGreen = metadata.find(ANDROID_TONEMAP_CURVE_GREEN);
307 camera_metadata_ro_entry curveBlue = metadata.find(ANDROID_TONEMAP_CURVE_BLUE);
308 if (curveRed.count > 0 && curveGreen.count > 0 && curveBlue.count > 0) {
309 ASSERT_EQ(curveRed.count, curveGreen.count);
310 ASSERT_EQ(curveRed.count, curveBlue.count);
311 for (size_t i = 0; i < curveRed.count; i++) {
312 ASSERT_FLOAT_EQ(curveGreen.data.f[i], curveRed.data.f[i]);
313 ASSERT_FLOAT_EQ(curveBlue.data.f[i], curveRed.data.f[i]);
314 }
315 }
316 }
317
verifyStreamUseCaseCharacteristics(const camera_metadata_t * metadata)318 void CameraAidlTest::verifyStreamUseCaseCharacteristics(const camera_metadata_t* metadata) {
319 camera_metadata_ro_entry entry;
320 bool hasStreamUseCaseCap = supportsStreamUseCaseCap(metadata);
321
322 bool supportMandatoryUseCases = false;
323 int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
324 &entry);
325 if ((0 == retcode) && (entry.count > 0)) {
326 supportMandatoryUseCases = true;
327 for (size_t i = 0; i < kMandatoryUseCases.size(); i++) {
328 if (std::find(entry.data.i64, entry.data.i64 + entry.count, kMandatoryUseCases[i]) ==
329 entry.data.i64 + entry.count) {
330 supportMandatoryUseCases = false;
331 break;
332 }
333 }
334 bool supportDefaultUseCase = false;
335 for (size_t i = 0; i < entry.count; i++) {
336 if (entry.data.i64[i] == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
337 supportDefaultUseCase = true;
338 }
339 ASSERT_TRUE(entry.data.i64[i] <= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW
340 || entry.data.i64[i] >=
341 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START);
342 }
343 ASSERT_TRUE(supportDefaultUseCase);
344 }
345
346 ASSERT_EQ(hasStreamUseCaseCap, supportMandatoryUseCases);
347 }
348
verifySettingsOverrideCharacteristics(const camera_metadata_t * metadata)349 void CameraAidlTest::verifySettingsOverrideCharacteristics(const camera_metadata_t* metadata) {
350 camera_metadata_ro_entry entry;
351 int retcode = find_camera_metadata_ro_entry(metadata,
352 ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES, &entry);
353 bool supportSettingsOverride = false;
354 if (0 == retcode) {
355 supportSettingsOverride = true;
356 bool hasOff = false;
357 for (size_t i = 0; i < entry.count; i++) {
358 if (entry.data.u8[i] == ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF) {
359 hasOff = true;
360 }
361 }
362 ASSERT_TRUE(hasOff);
363 }
364
365 // Check availableRequestKeys
366 retcode = find_camera_metadata_ro_entry(metadata,
367 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
368 bool hasSettingsOverrideRequestKey = false;
369 if ((0 == retcode) && (entry.count > 0)) {
370 hasSettingsOverrideRequestKey =
371 std::find(entry.data.i32, entry.data.i32 + entry.count,
372 ANDROID_CONTROL_SETTINGS_OVERRIDE) != entry.data.i32 + entry.count;
373 } else {
374 ADD_FAILURE() << "Get camera availableRequestKeys failed!";
375 }
376
377 // Check availableResultKeys
378 retcode = find_camera_metadata_ro_entry(metadata,
379 ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
380 bool hasSettingsOverrideResultKey = false;
381 bool hasOverridingFrameNumberKey = false;
382 if ((0 == retcode) && (entry.count > 0)) {
383 hasSettingsOverrideResultKey =
384 std::find(entry.data.i32, entry.data.i32 + entry.count,
385 ANDROID_CONTROL_SETTINGS_OVERRIDE) != entry.data.i32 + entry.count;
386 hasOverridingFrameNumberKey =
387 std::find(entry.data.i32, entry.data.i32 + entry.count,
388 ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER)
389 != entry.data.i32 + entry.count;
390 } else {
391 ADD_FAILURE() << "Get camera availableResultKeys failed!";
392 }
393
394 // Check availableCharacteristicKeys
395 retcode = find_camera_metadata_ro_entry(metadata,
396 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
397 bool hasSettingsOverrideCharacteristicsKey= false;
398 if ((0 == retcode) && (entry.count > 0)) {
399 hasSettingsOverrideCharacteristicsKey = std::find(entry.data.i32,
400 entry.data.i32 + entry.count, ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES)
401 != entry.data.i32 + entry.count;
402 } else {
403 ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
404 }
405
406 ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideRequestKey);
407 ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideResultKey);
408 ASSERT_EQ(supportSettingsOverride, hasOverridingFrameNumberKey);
409 ASSERT_EQ(supportSettingsOverride, hasSettingsOverrideCharacteristicsKey);
410 }
411
isMonochromeCamera(const camera_metadata_t * staticMeta)412 Status CameraAidlTest::isMonochromeCamera(const camera_metadata_t* staticMeta) {
413 Status ret = Status::OPERATION_NOT_SUPPORTED;
414 if (nullptr == staticMeta) {
415 return Status::ILLEGAL_ARGUMENT;
416 }
417
418 camera_metadata_ro_entry entry;
419 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
420 &entry);
421
422 if (0 != rc) {
423 return Status::ILLEGAL_ARGUMENT;
424 }
425
426 for (size_t i = 0; i < entry.count; i++) {
427 if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME == entry.data.u8[i]) {
428 ret = Status::OK;
429 break;
430 }
431 }
432
433 return ret;
434 }
435
isLogicalMultiCamera(const camera_metadata_t * staticMeta)436 Status CameraAidlTest::isLogicalMultiCamera(const camera_metadata_t* staticMeta) {
437 Status ret = Status::OPERATION_NOT_SUPPORTED;
438 if (nullptr == staticMeta) {
439 return Status::ILLEGAL_ARGUMENT;
440 }
441
442 camera_metadata_ro_entry entry;
443 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
444 &entry);
445 if (0 != rc) {
446 return Status::ILLEGAL_ARGUMENT;
447 }
448
449 for (size_t i = 0; i < entry.count; i++) {
450 if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA == entry.data.u8[i]) {
451 ret = Status::OK;
452 break;
453 }
454 }
455
456 return ret;
457 }
458
isReadoutTimestampSupported(const camera_metadata_t * staticMeta)459 bool CameraAidlTest::isReadoutTimestampSupported(const camera_metadata_t* staticMeta) {
460 camera_metadata_ro_entry readoutTimestampEntry;
461 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SENSOR_READOUT_TIMESTAMP,
462 &readoutTimestampEntry);
463 if (rc != 0) {
464 ALOGI("%s: Failed to find ANDROID_SENSOR_READOUT_TIMESTAMP", __FUNCTION__);
465 return true;
466 }
467 if (readoutTimestampEntry.count == 1 && !readoutTimestampEntry.data.u8[0]) {
468 ALOGI("%s: readout timestamp not supported", __FUNCTION__);
469 return false;
470 }
471 ALOGI("%s: readout timestamp supported", __FUNCTION__);
472 return true;
473 }
474
verifyLogicalCameraResult(const camera_metadata_t * staticMetadata,const std::vector<uint8_t> & resultMetadata)475 void CameraAidlTest::verifyLogicalCameraResult(const camera_metadata_t* staticMetadata,
476 const std::vector<uint8_t>& resultMetadata) {
477 camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data();
478
479 std::unordered_set<std::string> physicalIds;
480 Status rc = getPhysicalCameraIds(staticMetadata, &physicalIds);
481 ASSERT_TRUE(Status::OK == rc);
482 ASSERT_TRUE(physicalIds.size() > 1);
483
484 camera_metadata_ro_entry entry;
485 // Check mainPhysicalId
486 find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID,
487 &entry);
488 if (entry.count > 0) {
489 std::string mainPhysicalId(reinterpret_cast<const char*>(entry.data.u8));
490 ASSERT_NE(physicalIds.find(mainPhysicalId), physicalIds.end());
491 } else {
492 ADD_FAILURE() << "Get LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID failed!";
493 }
494
495 if (flags::concert_mode()) {
496 auto ret = find_camera_metadata_ro_entry(
497 metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION, &entry);
498 if ((ret == android::OK) && (entry.count > 0)) {
499 ASSERT_TRUE(entry.count == 4);
500 ASSERT_GE(entry.data.i32[0], 0); // Top must be non-negative
501 ASSERT_GE(entry.data.i32[1], 0); // Left must be non-negative
502 ASSERT_GT(entry.data.i32[2], 0); // Width must be positive
503 ASSERT_GT(entry.data.i32[3], 0); // Height must be positive
504 }
505 }
506 }
507
verifyLensIntrinsicsResult(const std::vector<uint8_t> & resultMetadata)508 void CameraAidlTest::verifyLensIntrinsicsResult(const std::vector<uint8_t>& resultMetadata) {
509 if (flags::concert_mode()) {
510 camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data();
511
512 camera_metadata_ro_entry timestampsEntry, intrinsicsEntry;
513 auto tsRet = find_camera_metadata_ro_entry(
514 metadata, ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS, ×tampsEntry);
515 auto inRet = find_camera_metadata_ro_entry(
516 metadata, ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES, &intrinsicsEntry);
517 ASSERT_EQ(tsRet, inRet);
518 ASSERT_TRUE((intrinsicsEntry.count % 5) == 0);
519 ASSERT_EQ(timestampsEntry.count, intrinsicsEntry.count / 5);
520 if (timestampsEntry.count > 0) {
521 for (size_t i = 0; i < timestampsEntry.count - 1; i++) {
522 ASSERT_GE(timestampsEntry.data.i64[i + 1], timestampsEntry.data.i64[i]);
523 }
524 }
525 }
526 }
527
getPhysicalCameraIds(const camera_metadata_t * staticMeta,std::unordered_set<std::string> * physicalIds)528 Status CameraAidlTest::getPhysicalCameraIds(const camera_metadata_t* staticMeta,
529 std::unordered_set<std::string>* physicalIds) {
530 if ((nullptr == staticMeta) || (nullptr == physicalIds)) {
531 return Status::ILLEGAL_ARGUMENT;
532 }
533
534 camera_metadata_ro_entry entry;
535 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
536 &entry);
537 if (0 != rc) {
538 return Status::ILLEGAL_ARGUMENT;
539 }
540
541 const uint8_t* ids = entry.data.u8;
542 size_t start = 0;
543 for (size_t i = 0; i < entry.count; i++) {
544 if (ids[i] == '\0') {
545 if (start != i) {
546 std::string currentId(reinterpret_cast<const char*>(ids + start));
547 physicalIds->emplace(currentId);
548 }
549 start = i + 1;
550 }
551 }
552
553 return Status::OK;
554 }
555
getSystemCameraKind(const camera_metadata_t * staticMeta,SystemCameraKind * systemCameraKind)556 Status CameraAidlTest::getSystemCameraKind(const camera_metadata_t* staticMeta,
557 SystemCameraKind* systemCameraKind) {
558 if (nullptr == staticMeta || nullptr == systemCameraKind) {
559 return Status::ILLEGAL_ARGUMENT;
560 }
561
562 camera_metadata_ro_entry entry{};
563 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
564 &entry);
565 if (0 != rc) {
566 return Status::ILLEGAL_ARGUMENT;
567 }
568
569 if (entry.count == 1 &&
570 entry.data.u8[0] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA) {
571 *systemCameraKind = SystemCameraKind::HIDDEN_SECURE_CAMERA;
572 return Status::OK;
573 }
574
575 // Go through the capabilities and check if it has
576 // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA
577 for (size_t i = 0; i < entry.count; ++i) {
578 uint8_t capability = entry.data.u8[i];
579 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA) {
580 *systemCameraKind = SystemCameraKind::SYSTEM_ONLY_CAMERA;
581 return Status::OK;
582 }
583 }
584 *systemCameraKind = SystemCameraKind::PUBLIC;
585 return Status::OK;
586 }
587
notifyDeviceState(int64_t state)588 void CameraAidlTest::notifyDeviceState(int64_t state) {
589 if (mProvider == nullptr) {
590 return;
591 }
592 mProvider->notifyDeviceStateChange(state);
593 }
594
allocateGraphicBuffer(uint32_t width,uint32_t height,uint64_t usage,PixelFormat format,buffer_handle_t * buffer_handle)595 void CameraAidlTest::allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage,
596 PixelFormat format, buffer_handle_t* buffer_handle) {
597 ASSERT_NE(buffer_handle, nullptr);
598
599 uint32_t stride;
600
601 android::status_t err = android::GraphicBufferAllocator::get().allocateRawHandle(
602 width, height, static_cast<int32_t>(format), 1u /*layerCount*/, usage, buffer_handle,
603 &stride, "VtsHalCameraProviderV2");
604 ASSERT_EQ(err, android::NO_ERROR);
605 }
606
matchDeviceName(const std::string & deviceName,const std::string & providerType,std::string * deviceVersion,std::string * cameraId)607 bool CameraAidlTest::matchDeviceName(const std::string& deviceName, const std::string& providerType,
608 std::string* deviceVersion, std::string* cameraId) {
609 // expected format: device@<major>.<minor>/<type>/<id>
610 std::stringstream pattern;
611 pattern << "device@([0-9]+\\.[0-9]+)/" << providerType << "/(.+)";
612 std::regex e(pattern.str());
613
614 std::smatch sm;
615 if (std::regex_match(deviceName, sm, e)) {
616 if (deviceVersion != nullptr) {
617 *deviceVersion = sm[1];
618 }
619 if (cameraId != nullptr) {
620 *cameraId = sm[2];
621 }
622 return true;
623 }
624 return false;
625 }
626
verifyCameraCharacteristics(const CameraMetadata & chars)627 void CameraAidlTest::verifyCameraCharacteristics(const CameraMetadata& chars) {
628 const camera_metadata_t* metadata =
629 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
630
631 size_t expectedSize = chars.metadata.size();
632 int result = validate_camera_metadata_structure(metadata, &expectedSize);
633 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
634 size_t entryCount = get_camera_metadata_entry_count(metadata);
635 // TODO: we can do better than 0 here. Need to check how many required
636 // characteristics keys we've defined.
637 ASSERT_GT(entryCount, 0u);
638
639 camera_metadata_ro_entry entry;
640 int retcode =
641 find_camera_metadata_ro_entry(metadata, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry);
642 if ((0 == retcode) && (entry.count > 0)) {
643 uint8_t hardwareLevel = entry.data.u8[0];
644 ASSERT_TRUE(hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED ||
645 hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL ||
646 hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 ||
647 hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL);
648 } else {
649 ADD_FAILURE() << "Get camera hardware level failed!";
650 }
651
652 entry.count = 0;
653 retcode = find_camera_metadata_ro_entry(
654 metadata, ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION, &entry);
655 if ((0 == retcode) || (entry.count > 0)) {
656 ADD_FAILURE() << "ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION "
657 << " per API contract should never be set by Hal!";
658 }
659 retcode = find_camera_metadata_ro_entry(
660 metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, &entry);
661 if ((0 == retcode) || (entry.count > 0)) {
662 ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS"
663 << " per API contract should never be set by Hal!";
664 }
665 retcode = find_camera_metadata_ro_entry(
666 metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, &entry);
667 if ((0 == retcode) || (entry.count > 0)) {
668 ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS"
669 << " per API contract should never be set by Hal!";
670 }
671 retcode = find_camera_metadata_ro_entry(
672 metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, &entry);
673 if ((0 == retcode) || (entry.count > 0)) {
674 ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS"
675 << " per API contract should never be set by Hal!";
676 }
677
678 retcode = find_camera_metadata_ro_entry(
679 metadata, ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, &entry);
680 if (0 == retcode || entry.count > 0) {
681 ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS "
682 << " per API contract should never be set by Hal!";
683 }
684
685 retcode = find_camera_metadata_ro_entry(
686 metadata, ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, &entry);
687 if (0 == retcode || entry.count > 0) {
688 ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS "
689 << " per API contract should never be set by Hal!";
690 }
691
692 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
693 &entry);
694 if (0 == retcode || entry.count > 0) {
695 ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS "
696 << " per API contract should never be set by Hal!";
697 }
698
699 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_INFO_SUPPORTED, &entry);
700 if (0 == retcode && entry.count > 0) {
701 retcode = find_camera_metadata_ro_entry(
702 metadata, ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT, &entry);
703 if (0 == retcode && entry.count > 0) {
704 uint8_t maxJpegAppSegmentsCount = entry.data.u8[0];
705 ASSERT_TRUE(maxJpegAppSegmentsCount >= 1 && maxJpegAppSegmentsCount <= 16);
706 } else {
707 ADD_FAILURE() << "Get Heic maxJpegAppSegmentsCount failed!";
708 }
709 }
710
711 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_LENS_POSE_REFERENCE, &entry);
712 if (0 == retcode && entry.count > 0) {
713 uint8_t poseReference = entry.data.u8[0];
714 ASSERT_TRUE(poseReference <= ANDROID_LENS_POSE_REFERENCE_AUTOMOTIVE &&
715 poseReference >= ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA);
716 }
717
718 retcode =
719 find_camera_metadata_ro_entry(metadata, ANDROID_INFO_DEVICE_STATE_ORIENTATIONS, &entry);
720 if (0 == retcode && entry.count > 0) {
721 ASSERT_TRUE((entry.count % 2) == 0);
722 uint64_t maxPublicState = ((uint64_t)ICameraProvider::DEVICE_STATE_FOLDED) << 1;
723 uint64_t vendorStateStart = 1UL << 31; // Reserved for vendor specific states
724 uint64_t stateMask = (1 << vendorStateStart) - 1;
725 stateMask &= ~((1 << maxPublicState) - 1);
726 for (int i = 0; i < entry.count; i += 2) {
727 ASSERT_TRUE((entry.data.i64[i] & stateMask) == 0);
728 ASSERT_TRUE((entry.data.i64[i + 1] % 90) == 0);
729 }
730 }
731
732 verifyExtendedSceneModeCharacteristics(metadata);
733 verifyZoomCharacteristics(metadata);
734 verifyStreamUseCaseCharacteristics(metadata);
735 verifySettingsOverrideCharacteristics(metadata);
736 }
737
verifyExtendedSceneModeCharacteristics(const camera_metadata_t * metadata)738 void CameraAidlTest::verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata) {
739 camera_metadata_ro_entry entry;
740 int retcode = 0;
741
742 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_AVAILABLE_MODES, &entry);
743 if ((0 == retcode) && (entry.count > 0)) {
744 for (auto i = 0; i < entry.count; i++) {
745 ASSERT_TRUE(entry.data.u8[i] >= ANDROID_CONTROL_MODE_OFF &&
746 entry.data.u8[i] <= ANDROID_CONTROL_MODE_USE_EXTENDED_SCENE_MODE);
747 }
748 } else {
749 ADD_FAILURE() << "Get camera controlAvailableModes failed!";
750 }
751
752 // Check key availability in capabilities, request and result.
753
754 retcode =
755 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
756 bool hasExtendedSceneModeRequestKey = false;
757 if ((0 == retcode) && (entry.count > 0)) {
758 hasExtendedSceneModeRequestKey =
759 std::find(entry.data.i32, entry.data.i32 + entry.count,
760 ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
761 } else {
762 ADD_FAILURE() << "Get camera availableRequestKeys failed!";
763 }
764
765 retcode =
766 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
767 bool hasExtendedSceneModeResultKey = false;
768 if ((0 == retcode) && (entry.count > 0)) {
769 hasExtendedSceneModeResultKey =
770 std::find(entry.data.i32, entry.data.i32 + entry.count,
771 ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
772 } else {
773 ADD_FAILURE() << "Get camera availableResultKeys failed!";
774 }
775
776 retcode = find_camera_metadata_ro_entry(metadata,
777 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
778 bool hasExtendedSceneModeMaxSizesKey = false;
779 bool hasExtendedSceneModeZoomRatioRangesKey = false;
780 if ((0 == retcode) && (entry.count > 0)) {
781 hasExtendedSceneModeMaxSizesKey =
782 std::find(entry.data.i32, entry.data.i32 + entry.count,
783 ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES) !=
784 entry.data.i32 + entry.count;
785 hasExtendedSceneModeZoomRatioRangesKey =
786 std::find(entry.data.i32, entry.data.i32 + entry.count,
787 ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES) !=
788 entry.data.i32 + entry.count;
789 } else {
790 ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
791 }
792
793 camera_metadata_ro_entry maxSizesEntry;
794 retcode = find_camera_metadata_ro_entry(
795 metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES, &maxSizesEntry);
796 bool hasExtendedSceneModeMaxSizes = (0 == retcode && maxSizesEntry.count > 0);
797
798 camera_metadata_ro_entry zoomRatioRangesEntry;
799 retcode = find_camera_metadata_ro_entry(
800 metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
801 &zoomRatioRangesEntry);
802 bool hasExtendedSceneModeZoomRatioRanges = (0 == retcode && zoomRatioRangesEntry.count > 0);
803
804 // Extended scene mode keys must all be available, or all be unavailable.
805 bool noExtendedSceneMode =
806 !hasExtendedSceneModeRequestKey && !hasExtendedSceneModeResultKey &&
807 !hasExtendedSceneModeMaxSizesKey && !hasExtendedSceneModeZoomRatioRangesKey &&
808 !hasExtendedSceneModeMaxSizes && !hasExtendedSceneModeZoomRatioRanges;
809 if (noExtendedSceneMode) {
810 return;
811 }
812 bool hasExtendedSceneMode = hasExtendedSceneModeRequestKey && hasExtendedSceneModeResultKey &&
813 hasExtendedSceneModeMaxSizesKey &&
814 hasExtendedSceneModeZoomRatioRangesKey &&
815 hasExtendedSceneModeMaxSizes && hasExtendedSceneModeZoomRatioRanges;
816 ASSERT_TRUE(hasExtendedSceneMode);
817
818 // Must have DISABLED, and must have one of BOKEH_STILL_CAPTURE, BOKEH_CONTINUOUS, or a VENDOR
819 // mode.
820 ASSERT_TRUE((maxSizesEntry.count == 6 && zoomRatioRangesEntry.count == 2) ||
821 (maxSizesEntry.count == 9 && zoomRatioRangesEntry.count == 4));
822 bool hasDisabledMode = false;
823 bool hasBokehStillCaptureMode = false;
824 bool hasBokehContinuousMode = false;
825 bool hasVendorMode = false;
826 std::vector<AvailableStream> outputStreams;
827 ASSERT_EQ(Status::OK, getAvailableOutputStreams(metadata, outputStreams));
828 for (int i = 0, j = 0; i < maxSizesEntry.count && j < zoomRatioRangesEntry.count; i += 3) {
829 int32_t mode = maxSizesEntry.data.i32[i];
830 int32_t maxWidth = maxSizesEntry.data.i32[i + 1];
831 int32_t maxHeight = maxSizesEntry.data.i32[i + 2];
832 switch (mode) {
833 case ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED:
834 hasDisabledMode = true;
835 ASSERT_TRUE(maxWidth == 0 && maxHeight == 0);
836 break;
837 case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE:
838 hasBokehStillCaptureMode = true;
839 j += 2;
840 break;
841 case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS:
842 hasBokehContinuousMode = true;
843 j += 2;
844 break;
845 default:
846 if (mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START) {
847 ADD_FAILURE() << "Invalid extended scene mode advertised: " << mode;
848 } else {
849 hasVendorMode = true;
850 j += 2;
851 }
852 break;
853 }
854
855 if (mode != ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED) {
856 // Make sure size is supported.
857 bool sizeSupported = false;
858 for (const auto& stream : outputStreams) {
859 if ((stream.format == static_cast<int32_t>(PixelFormat::YCBCR_420_888) ||
860 stream.format == static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) &&
861 stream.width == maxWidth && stream.height == maxHeight) {
862 sizeSupported = true;
863 break;
864 }
865 }
866 ASSERT_TRUE(sizeSupported);
867
868 // Make sure zoom range is valid
869 float minZoomRatio = zoomRatioRangesEntry.data.f[0];
870 float maxZoomRatio = zoomRatioRangesEntry.data.f[1];
871 ASSERT_GT(minZoomRatio, 0.0f);
872 ASSERT_LE(minZoomRatio, maxZoomRatio);
873 }
874 }
875 ASSERT_TRUE(hasDisabledMode);
876 ASSERT_TRUE(hasBokehStillCaptureMode || hasBokehContinuousMode || hasVendorMode);
877 }
878
verifyHighSpeedRecordingCharacteristics(const std::string & cameraName,const CameraMetadata & chars)879 void CameraAidlTest::verifyHighSpeedRecordingCharacteristics(const std::string& cameraName,
880 const CameraMetadata& chars) {
881 const camera_metadata_t* metadata =
882 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
883
884 // Check capabilities
885 bool hasHighSpeedRecordingCapability = false;
886 bool hasUltraHighResolutionCapability = false;
887 camera_metadata_ro_entry entry;
888 int rc =
889 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
890 if ((0 == rc) && (entry.count > 0)) {
891 hasHighSpeedRecordingCapability =
892 std::find(entry.data.u8, entry.data.u8 + entry.count,
893 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) !=
894 entry.data.u8 + entry.count;
895
896 hasUltraHighResolutionCapability =
897 std::find(entry.data.u8, entry.data.u8 + entry.count,
898 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) !=
899 entry.data.u8 + entry.count;
900 }
901
902 // Check high speed video configurations
903 camera_metadata_ro_entry highSpeedEntry;
904 rc = find_camera_metadata_ro_entry(
905 metadata, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &highSpeedEntry);
906 bool hasHighSpeedEntry = (0 == rc && highSpeedEntry.count > 0);
907
908 camera_metadata_ro_entry highSpeedMaxResEntry;
909 rc = find_camera_metadata_ro_entry(
910 metadata, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
911 &highSpeedMaxResEntry);
912 bool hasHighSpeedMaxResEntry = (0 == rc && highSpeedMaxResEntry.count > 0);
913
914 // High speed recording configuration entry must be available based on capabilities
915 bool noHighSpeedRecording =
916 !hasHighSpeedRecordingCapability && !hasHighSpeedEntry && !hasHighSpeedMaxResEntry;
917 if (noHighSpeedRecording) {
918 return;
919 }
920 bool hasHighSpeedRecording = hasHighSpeedRecordingCapability && hasHighSpeedEntry &&
921 ((hasHighSpeedMaxResEntry && hasUltraHighResolutionCapability) ||
922 !hasHighSpeedMaxResEntry);
923 ASSERT_TRUE(hasHighSpeedRecording);
924
925 std::string version, cameraId;
926 ASSERT_TRUE(matchDeviceName(cameraName, mProviderType, &version, &cameraId));
927 bool needBatchSizeCheck = (version != CAMERA_DEVICE_API_VERSION_1);
928
929 // Check each entry item
930 ASSERT_TRUE(highSpeedEntry.count > 0 && highSpeedEntry.count % 5 == 0);
931 for (auto i = 4; i < highSpeedEntry.count; i += 5) {
932 int32_t fps_min = highSpeedEntry.data.i32[i - 2];
933 int32_t fps_max = highSpeedEntry.data.i32[i - 1];
934 int32_t batch_size_max = highSpeedEntry.data.i32[i];
935 int32_t allowedMaxBatchSize = fps_max / 30;
936
937 ASSERT_GE(fps_max, 120);
938 ASSERT_TRUE(fps_min % 30 == 0 && fps_max % 30 == 0);
939 if (needBatchSizeCheck) {
940 ASSERT_LE(batch_size_max, 32);
941 ASSERT_TRUE(allowedMaxBatchSize % batch_size_max == 0);
942 }
943 }
944
945 if (hasHighSpeedMaxResEntry) {
946 ASSERT_TRUE(highSpeedMaxResEntry.count > 0 && highSpeedMaxResEntry.count % 5 == 0);
947 for (auto i = 4; i < highSpeedMaxResEntry.count; i += 5) {
948 int32_t fps_min = highSpeedMaxResEntry.data.i32[i - 2];
949 int32_t fps_max = highSpeedMaxResEntry.data.i32[i - 1];
950 int32_t batch_size_max = highSpeedMaxResEntry.data.i32[i];
951 int32_t allowedMaxBatchSize = fps_max / 30;
952
953 ASSERT_GE(fps_max, 120);
954 ASSERT_TRUE(fps_min % 30 == 0 && fps_max % 30 == 0);
955 if (needBatchSizeCheck) {
956 ASSERT_LE(batch_size_max, 32);
957 ASSERT_TRUE(allowedMaxBatchSize % batch_size_max == 0);
958 }
959 }
960 }
961 }
962
getAvailableOutputStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,bool maxResolution)963 Status CameraAidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta,
964 std::vector<AvailableStream>& outputStreams,
965 const AvailableStream* threshold,
966 bool maxResolution) {
967 if (nullptr == staticMeta) {
968 return Status::ILLEGAL_ARGUMENT;
969 }
970 int scalerTag = maxResolution
971 ? ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
972 : ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
973 int depthTag = maxResolution
974 ? ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
975 : ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
976
977 camera_metadata_ro_entry scalerEntry;
978 camera_metadata_ro_entry depthEntry;
979 int foundScaler = find_camera_metadata_ro_entry(staticMeta, scalerTag, &scalerEntry);
980 int foundDepth = find_camera_metadata_ro_entry(staticMeta, depthTag, &depthEntry);
981 if ((0 != foundScaler || (0 != (scalerEntry.count % 4))) &&
982 (0 != foundDepth || (0 != (depthEntry.count % 4)))) {
983 return Status::ILLEGAL_ARGUMENT;
984 }
985
986 if (foundScaler == 0 && (0 == (scalerEntry.count % 4))) {
987 fillOutputStreams(&scalerEntry, outputStreams, threshold,
988 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
989 }
990
991 if (foundDepth == 0 && (0 == (depthEntry.count % 4))) {
992 AvailableStream depthPreviewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
993 static_cast<int32_t>(PixelFormat::Y16)};
994 const AvailableStream* depthThreshold =
995 isDepthOnly(staticMeta) ? &depthPreviewThreshold : threshold;
996 fillOutputStreams(&depthEntry, outputStreams, depthThreshold,
997 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT);
998 }
999
1000 return Status::OK;
1001 }
1002
fillOutputStreams(camera_metadata_ro_entry_t * entry,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,const int32_t availableConfigOutputTag)1003 void CameraAidlTest::fillOutputStreams(camera_metadata_ro_entry_t* entry,
1004 std::vector<AvailableStream>& outputStreams,
1005 const AvailableStream* threshold,
1006 const int32_t availableConfigOutputTag) {
1007 for (size_t i = 0; i < entry->count; i += 4) {
1008 if (availableConfigOutputTag == entry->data.i32[i + 3]) {
1009 if (nullptr == threshold) {
1010 AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
1011 entry->data.i32[i]};
1012 outputStreams.push_back(s);
1013 } else {
1014 if ((threshold->format == entry->data.i32[i]) &&
1015 (threshold->width >= entry->data.i32[i + 1]) &&
1016 (threshold->height >= entry->data.i32[i + 2])) {
1017 AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
1018 threshold->format};
1019 outputStreams.push_back(s);
1020 }
1021 }
1022 }
1023 }
1024 }
1025
verifyZoomCharacteristics(const camera_metadata_t * metadata)1026 void CameraAidlTest::verifyZoomCharacteristics(const camera_metadata_t* metadata) {
1027 camera_metadata_ro_entry entry;
1028 int retcode = 0;
1029
1030 // Check key availability in capabilities, request and result.
1031 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1032 &entry);
1033 float maxDigitalZoom = 1.0;
1034 if ((0 == retcode) && (entry.count == 1)) {
1035 maxDigitalZoom = entry.data.f[0];
1036 } else {
1037 ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!";
1038 }
1039
1040 retcode =
1041 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
1042 bool hasZoomRequestKey = false;
1043 if ((0 == retcode) && (entry.count > 0)) {
1044 hasZoomRequestKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
1045 ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
1046 } else {
1047 ADD_FAILURE() << "Get camera availableRequestKeys failed!";
1048 }
1049
1050 retcode =
1051 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
1052 bool hasZoomResultKey = false;
1053 if ((0 == retcode) && (entry.count > 0)) {
1054 hasZoomResultKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
1055 ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
1056 } else {
1057 ADD_FAILURE() << "Get camera availableResultKeys failed!";
1058 }
1059
1060 retcode = find_camera_metadata_ro_entry(metadata,
1061 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
1062 bool hasZoomCharacteristicsKey = false;
1063 if ((0 == retcode) && (entry.count > 0)) {
1064 hasZoomCharacteristicsKey =
1065 std::find(entry.data.i32, entry.data.i32 + entry.count,
1066 ANDROID_CONTROL_ZOOM_RATIO_RANGE) != entry.data.i32 + entry.count;
1067 } else {
1068 ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
1069 }
1070
1071 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1072 bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1073
1074 // Zoom keys must all be available, or all be unavailable.
1075 bool noZoomRatio = !hasZoomRequestKey && !hasZoomResultKey && !hasZoomCharacteristicsKey &&
1076 !hasZoomRatioRange;
1077 if (noZoomRatio) {
1078 return;
1079 }
1080 bool hasZoomRatio =
1081 hasZoomRequestKey && hasZoomResultKey && hasZoomCharacteristicsKey && hasZoomRatioRange;
1082 ASSERT_TRUE(hasZoomRatio);
1083
1084 float minZoomRatio = entry.data.f[0];
1085 float maxZoomRatio = entry.data.f[1];
1086 constexpr float FLOATING_POINT_THRESHOLD = 0.00001f;
1087 if (maxDigitalZoom > maxZoomRatio + FLOATING_POINT_THRESHOLD) {
1088 ADD_FAILURE() << "Maximum digital zoom " << maxDigitalZoom
1089 << " is larger than maximum zoom ratio " << maxZoomRatio << " + threshold "
1090 << FLOATING_POINT_THRESHOLD << "!";
1091 }
1092 if (minZoomRatio > maxZoomRatio) {
1093 ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!";
1094 }
1095 if (minZoomRatio > 1.0f) {
1096 ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!";
1097 }
1098 if (maxZoomRatio < 1.0f) {
1099 ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!";
1100 }
1101
1102 // Make sure CROPPING_TYPE is CENTER_ONLY
1103 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_CROPPING_TYPE, &entry);
1104 if ((0 == retcode) && (entry.count == 1)) {
1105 int8_t croppingType = entry.data.u8[0];
1106 ASSERT_EQ(croppingType, ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY);
1107 } else {
1108 ADD_FAILURE() << "Get camera scalerCroppingType failed!";
1109 }
1110 }
1111
verifyMonochromeCharacteristics(const CameraMetadata & chars)1112 void CameraAidlTest::verifyMonochromeCharacteristics(const CameraMetadata& chars) {
1113 const camera_metadata_t* metadata = (camera_metadata_t*)chars.metadata.data();
1114 Status rc = isMonochromeCamera(metadata);
1115 if (Status::OPERATION_NOT_SUPPORTED == rc) {
1116 return;
1117 }
1118 ASSERT_EQ(Status::OK, rc);
1119
1120 camera_metadata_ro_entry entry;
1121 // Check capabilities
1122 int retcode =
1123 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
1124 if ((0 == retcode) && (entry.count > 0)) {
1125 ASSERT_EQ(std::find(entry.data.u8, entry.data.u8 + entry.count,
1126 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING),
1127 entry.data.u8 + entry.count);
1128 }
1129
1130 // Check Cfa
1131 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1132 &entry);
1133 if ((0 == retcode) && (entry.count == 1)) {
1134 ASSERT_TRUE(entry.data.i32[0] ==
1135 static_cast<int32_t>(
1136 SensorInfoColorFilterArrangement::
1137 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO) ||
1138 entry.data.i32[0] ==
1139 static_cast<int32_t>(
1140 SensorInfoColorFilterArrangement::
1141 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR));
1142 }
1143
1144 // Check availableRequestKeys
1145 retcode =
1146 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
1147 if ((0 == retcode) && (entry.count > 0)) {
1148 for (size_t i = 0; i < entry.count; i++) {
1149 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
1150 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
1151 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
1152 }
1153 } else {
1154 ADD_FAILURE() << "Get camera availableRequestKeys failed!";
1155 }
1156
1157 // Check availableResultKeys
1158 retcode =
1159 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
1160 if ((0 == retcode) && (entry.count > 0)) {
1161 for (size_t i = 0; i < entry.count; i++) {
1162 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_GREEN_SPLIT);
1163 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1164 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
1165 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
1166 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
1167 }
1168 } else {
1169 ADD_FAILURE() << "Get camera availableResultKeys failed!";
1170 }
1171
1172 // Check availableCharacteristicKeys
1173 retcode = find_camera_metadata_ro_entry(metadata,
1174 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
1175 if ((0 == retcode) && (entry.count > 0)) {
1176 for (size_t i = 0; i < entry.count; i++) {
1177 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1178 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1179 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1180 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1181 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM1);
1182 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM2);
1183 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX1);
1184 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX2);
1185 }
1186 } else {
1187 ADD_FAILURE() << "Get camera availableResultKeys failed!";
1188 }
1189
1190 // Check blackLevelPattern
1191 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry);
1192 if ((0 == retcode) && (entry.count > 0)) {
1193 ASSERT_EQ(entry.count, 4);
1194 for (size_t i = 1; i < entry.count; i++) {
1195 ASSERT_EQ(entry.data.i32[i], entry.data.i32[0]);
1196 }
1197 }
1198 }
1199
verifyManualFlashStrengthControlCharacteristics(const camera_metadata_t * staticMeta)1200 void CameraAidlTest::verifyManualFlashStrengthControlCharacteristics(
1201 const camera_metadata_t* staticMeta) {
1202 camera_metadata_ro_entry singleMaxEntry;
1203 camera_metadata_ro_entry singleDefEntry;
1204 camera_metadata_ro_entry torchMaxEntry;
1205 camera_metadata_ro_entry torchDefEntry;
1206 bool torch_supported = false;
1207 int32_t singleMaxLevel = 0;
1208 int32_t singleDefLevel = 0;
1209 int32_t torchMaxLevel = 0;
1210 int32_t torchDefLevel = 0;
1211
1212 // determine whether the device supports torch or not
1213 torch_supported = isTorchSupported(staticMeta);
1214
1215 int singleMaxRetCode = find_camera_metadata_ro_entry(staticMeta,
1216 ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL, &singleMaxEntry);
1217 int singleDefRetCode = find_camera_metadata_ro_entry(staticMeta,
1218 ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL, &singleDefEntry);
1219 int torchMaxRetCode = find_camera_metadata_ro_entry(staticMeta,
1220 ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL, &torchMaxEntry);
1221 int torchDefRetCode = find_camera_metadata_ro_entry(staticMeta,
1222 ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL, &torchDefEntry);
1223 if (torch_supported) {
1224 int expectedEntryCount;
1225 if(singleMaxRetCode == 0 && singleDefRetCode == 0 && torchMaxRetCode == 0 &&
1226 torchDefRetCode == 0) {
1227 singleMaxLevel = *singleMaxEntry.data.i32;
1228 singleDefLevel = *singleDefEntry.data.i32;
1229 torchMaxLevel = *torchMaxEntry.data.i32;
1230 torchDefLevel = *torchDefEntry.data.i32;
1231 expectedEntryCount = 1;
1232 } else {
1233 expectedEntryCount = 0;
1234 }
1235 ASSERT_EQ(singleMaxEntry.count, expectedEntryCount);
1236 ASSERT_EQ(singleDefEntry.count, expectedEntryCount);
1237 ASSERT_EQ(torchMaxEntry.count, expectedEntryCount);
1238 ASSERT_EQ(torchDefEntry.count, expectedEntryCount);
1239 // if the device supports this feature default levels should be greater than 0
1240 if (singleMaxLevel > 1) {
1241 ASSERT_GT(torchMaxLevel, 1);
1242 ASSERT_GT(torchDefLevel, 0);
1243 ASSERT_GT(singleDefLevel, 0);
1244 ASSERT_TRUE(torchDefLevel <= torchMaxLevel); // default levels should be <= max levels
1245 ASSERT_TRUE(singleDefLevel <= singleMaxLevel);
1246 }
1247 } else {
1248 ASSERT_TRUE(singleMaxRetCode != 0);
1249 ASSERT_TRUE(singleDefRetCode != 0);
1250 ASSERT_TRUE(torchMaxRetCode != 0);
1251 ASSERT_TRUE(torchDefRetCode != 0);
1252 }
1253 }
1254
verifyRecommendedConfigs(const CameraMetadata & chars)1255 void CameraAidlTest::verifyRecommendedConfigs(const CameraMetadata& chars) {
1256 size_t CONFIG_ENTRY_SIZE = 5;
1257 size_t CONFIG_ENTRY_TYPE_OFFSET = 3;
1258 size_t CONFIG_ENTRY_BITFIELD_OFFSET = 4;
1259 uint32_t maxPublicUsecase =
1260 ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END_3_8;
1261 uint32_t vendorUsecaseStart =
1262 ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START;
1263 uint32_t usecaseMask = (1 << vendorUsecaseStart) - 1;
1264 usecaseMask &= ~((1 << maxPublicUsecase) - 1);
1265
1266 const camera_metadata_t* metadata =
1267 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1268
1269 camera_metadata_ro_entry recommendedConfigsEntry, recommendedDepthConfigsEntry, ioMapEntry;
1270 recommendedConfigsEntry.count = recommendedDepthConfigsEntry.count = ioMapEntry.count = 0;
1271 int retCode = find_camera_metadata_ro_entry(
1272 metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
1273 &recommendedConfigsEntry);
1274 int depthRetCode = find_camera_metadata_ro_entry(
1275 metadata, ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
1276 &recommendedDepthConfigsEntry);
1277 int ioRetCode = find_camera_metadata_ro_entry(
1278 metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP, &ioMapEntry);
1279 if ((0 != retCode) && (0 != depthRetCode)) {
1280 // In case both regular and depth recommended configurations are absent,
1281 // I/O should be absent as well.
1282 ASSERT_NE(ioRetCode, 0);
1283 return;
1284 }
1285
1286 camera_metadata_ro_entry availableKeysEntry;
1287 retCode = find_camera_metadata_ro_entry(
1288 metadata, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &availableKeysEntry);
1289 ASSERT_TRUE((0 == retCode) && (availableKeysEntry.count > 0));
1290 std::vector<int32_t> availableKeys;
1291 availableKeys.reserve(availableKeysEntry.count);
1292 availableKeys.insert(availableKeys.end(), availableKeysEntry.data.i32,
1293 availableKeysEntry.data.i32 + availableKeysEntry.count);
1294
1295 if (recommendedConfigsEntry.count > 0) {
1296 ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1297 ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS),
1298 availableKeys.end());
1299 ASSERT_EQ((recommendedConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1300 for (size_t i = 0; i < recommendedConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1301 int32_t entryType = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1302 uint32_t bitfield = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1303 ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1304 (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1305 ASSERT_TRUE((bitfield & usecaseMask) == 0);
1306 }
1307 }
1308
1309 if (recommendedDepthConfigsEntry.count > 0) {
1310 ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1311 ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS),
1312 availableKeys.end());
1313 ASSERT_EQ((recommendedDepthConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1314 for (size_t i = 0; i < recommendedDepthConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1315 int32_t entryType = recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1316 uint32_t bitfield =
1317 recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1318 ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1319 (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1320 ASSERT_TRUE((bitfield & usecaseMask) == 0);
1321 }
1322
1323 if (recommendedConfigsEntry.count == 0) {
1324 // In case regular recommended configurations are absent but suggested depth
1325 // configurations are present, I/O should be absent.
1326 ASSERT_NE(ioRetCode, 0);
1327 }
1328 }
1329
1330 if ((ioRetCode == 0) && (ioMapEntry.count > 0)) {
1331 ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1332 ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP),
1333 availableKeys.end());
1334 ASSERT_EQ(isZSLModeAvailable(metadata), Status::OK);
1335 }
1336 }
1337
1338 // Check whether ZSL is available using the static camera
1339 // characteristics.
isZSLModeAvailable(const camera_metadata_t * staticMeta)1340 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta) {
1341 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1342 return Status::OK;
1343 } else {
1344 return isZSLModeAvailable(staticMeta, YUV_REPROCESS);
1345 }
1346 }
1347
isZSLModeAvailable(const camera_metadata_t * staticMeta,ReprocessType reprocType)1348 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta,
1349 ReprocessType reprocType) {
1350 Status ret = Status::OPERATION_NOT_SUPPORTED;
1351 if (nullptr == staticMeta) {
1352 return Status::ILLEGAL_ARGUMENT;
1353 }
1354
1355 camera_metadata_ro_entry entry;
1356 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1357 &entry);
1358 if (0 != rc) {
1359 return Status::ILLEGAL_ARGUMENT;
1360 }
1361
1362 for (size_t i = 0; i < entry.count; i++) {
1363 if ((reprocType == PRIV_REPROCESS &&
1364 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING == entry.data.u8[i]) ||
1365 (reprocType == YUV_REPROCESS &&
1366 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING == entry.data.u8[i])) {
1367 ret = Status::OK;
1368 break;
1369 }
1370 }
1371
1372 return ret;
1373 }
1374
1375 // Verify logical or ultra high resolution camera static metadata
verifyLogicalOrUltraHighResCameraMetadata(const std::string & cameraName,const std::shared_ptr<ICameraDevice> & device,const CameraMetadata & chars,const std::vector<std::string> & deviceNames)1376 void CameraAidlTest::verifyLogicalOrUltraHighResCameraMetadata(
1377 const std::string& cameraName, const std::shared_ptr<ICameraDevice>& device,
1378 const CameraMetadata& chars, const std::vector<std::string>& deviceNames) {
1379 const camera_metadata_t* metadata =
1380 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1381 ASSERT_NE(nullptr, metadata);
1382 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
1383 Status retStatus = getSystemCameraKind(metadata, &systemCameraKind);
1384 ASSERT_EQ(retStatus, Status::OK);
1385 Status rc = isLogicalMultiCamera(metadata);
1386 ASSERT_TRUE(Status::OK == rc || Status::OPERATION_NOT_SUPPORTED == rc);
1387 bool isMultiCamera = (Status::OK == rc);
1388 bool isUltraHighResCamera = isUltraHighResolution(metadata);
1389 if (!isMultiCamera && !isUltraHighResCamera) {
1390 return;
1391 }
1392
1393 camera_metadata_ro_entry entry;
1394 int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1395 bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1396 retcode = find_camera_metadata_ro_entry(
1397 metadata, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
1398 bool hasHalBufferManager =
1399 (0 == retcode && 1 == entry.count &&
1400 entry.data.i32[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
1401 bool sessionHalBufferManager =
1402 (0 == retcode && 1 == entry.count &&
1403 entry.data.i32[0] ==
1404 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE);
1405 retcode = find_camera_metadata_ro_entry(
1406 metadata, ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED, &entry);
1407 bool multiResolutionStreamSupported =
1408 (0 == retcode && 1 == entry.count &&
1409 entry.data.u8[0] == ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE);
1410 if (multiResolutionStreamSupported) {
1411 ASSERT_TRUE(hasHalBufferManager || sessionHalBufferManager);
1412 }
1413
1414 std::string version, cameraId;
1415 ASSERT_TRUE(matchDeviceName(cameraName, mProviderType, &version, &cameraId));
1416 std::unordered_set<std::string> physicalIds;
1417 rc = getPhysicalCameraIds(metadata, &physicalIds);
1418 ASSERT_TRUE(isUltraHighResCamera || Status::OK == rc);
1419 for (const auto& physicalId : physicalIds) {
1420 ASSERT_NE(physicalId, cameraId);
1421 }
1422 if (physicalIds.size() == 0) {
1423 ASSERT_TRUE(isUltraHighResCamera && !isMultiCamera);
1424 physicalIds.insert(cameraId);
1425 }
1426
1427 std::unordered_set<int32_t> physicalRequestKeyIDs;
1428 rc = getSupportedKeys(const_cast<camera_metadata_t*>(metadata),
1429 ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1430 &physicalRequestKeyIDs);
1431 ASSERT_TRUE(Status::OK == rc);
1432 bool hasTestPatternPhysicalRequestKey =
1433 physicalRequestKeyIDs.find(ANDROID_SENSOR_TEST_PATTERN_MODE) !=
1434 physicalRequestKeyIDs.end();
1435 std::unordered_set<int32_t> privacyTestPatternModes;
1436 getPrivacyTestPatternModes(metadata, &privacyTestPatternModes);
1437
1438 // Map from image format to number of multi-resolution sizes for that format
1439 std::unordered_map<int32_t, size_t> multiResOutputFormatCounterMap;
1440 std::unordered_map<int32_t, size_t> multiResInputFormatCounterMap;
1441 for (const auto& physicalId : physicalIds) {
1442 bool isPublicId = false;
1443 std::string fullPublicId;
1444 SystemCameraKind physSystemCameraKind = SystemCameraKind::PUBLIC;
1445 for (auto& deviceName : deviceNames) {
1446 std::string publicVersion, publicId;
1447 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
1448 if (physicalId == publicId) {
1449 isPublicId = true;
1450 fullPublicId = deviceName;
1451 break;
1452 }
1453 }
1454
1455 camera_metadata_ro_entry physicalMultiResStreamConfigs;
1456 camera_metadata_ro_entry physicalStreamConfigs;
1457 camera_metadata_ro_entry physicalMaxResolutionStreamConfigs;
1458 CameraMetadata physChars;
1459 bool isUltraHighRes = false;
1460 std::unordered_set<int32_t> subCameraPrivacyTestPatterns;
1461 if (isPublicId) {
1462 std::shared_ptr<ICameraDevice> subDevice;
1463 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(fullPublicId, &subDevice);
1464 ASSERT_TRUE(ret.isOk());
1465 ASSERT_NE(subDevice, nullptr);
1466
1467 ret = subDevice->getCameraCharacteristics(&physChars);
1468 ASSERT_TRUE(ret.isOk());
1469
1470 const camera_metadata_t* staticMetadata =
1471 reinterpret_cast<const camera_metadata_t*>(physChars.metadata.data());
1472 retStatus = getSystemCameraKind(staticMetadata, &physSystemCameraKind);
1473 ASSERT_EQ(retStatus, Status::OK);
1474
1475 // Make sure that the system camera kind of a non-hidden
1476 // physical cameras is the same as the logical camera associated
1477 // with it.
1478 ASSERT_EQ(physSystemCameraKind, systemCameraKind);
1479 retcode = find_camera_metadata_ro_entry(staticMetadata,
1480 ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1481 bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1482 ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1483
1484 getMultiResolutionStreamConfigurations(
1485 &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1486 &physicalMaxResolutionStreamConfigs, staticMetadata);
1487 isUltraHighRes = isUltraHighResolution(staticMetadata);
1488
1489 getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1490 } else {
1491 // Check camera characteristics for hidden camera id
1492 ndk::ScopedAStatus ret =
1493 device->getPhysicalCameraCharacteristics(physicalId, &physChars);
1494 ASSERT_TRUE(ret.isOk());
1495 verifyCameraCharacteristics(physChars);
1496 verifyMonochromeCharacteristics(physChars);
1497
1498 auto staticMetadata = (const camera_metadata_t*)physChars.metadata.data();
1499 retcode = find_camera_metadata_ro_entry(staticMetadata,
1500 ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1501 bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1502 ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1503
1504 getMultiResolutionStreamConfigurations(
1505 &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1506 &physicalMaxResolutionStreamConfigs, staticMetadata);
1507 isUltraHighRes = isUltraHighResolution(staticMetadata);
1508 getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1509
1510 // Check calling getCameraDeviceInterface_V3_x() on hidden camera id returns
1511 // ILLEGAL_ARGUMENT.
1512 std::stringstream s;
1513 s << "device@" << version << "/" << mProviderType << "/" << physicalId;
1514 std::string fullPhysicalId(s.str());
1515 std::shared_ptr<ICameraDevice> subDevice;
1516 ret = mProvider->getCameraDeviceInterface(fullPhysicalId, &subDevice);
1517 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1518 ret.getServiceSpecificError());
1519 ASSERT_EQ(subDevice, nullptr);
1520 }
1521
1522 if (hasTestPatternPhysicalRequestKey) {
1523 ASSERT_TRUE(privacyTestPatternModes == subCameraPrivacyTestPatterns);
1524 }
1525
1526 if (physicalMultiResStreamConfigs.count > 0) {
1527 ASSERT_EQ(physicalMultiResStreamConfigs.count % 4, 0);
1528
1529 // Each supported size must be max size for that format,
1530 for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4; i++) {
1531 int32_t multiResFormat = physicalMultiResStreamConfigs.data.i32[i * 4];
1532 int32_t multiResWidth = physicalMultiResStreamConfigs.data.i32[i * 4 + 1];
1533 int32_t multiResHeight = physicalMultiResStreamConfigs.data.i32[i * 4 + 2];
1534 int32_t multiResInput = physicalMultiResStreamConfigs.data.i32[i * 4 + 3];
1535
1536 // Check if the resolution is the max resolution in stream
1537 // configuration map
1538 bool supported = false;
1539 bool isMaxSize = true;
1540 for (size_t j = 0; j < physicalStreamConfigs.count / 4; j++) {
1541 int32_t format = physicalStreamConfigs.data.i32[j * 4];
1542 int32_t width = physicalStreamConfigs.data.i32[j * 4 + 1];
1543 int32_t height = physicalStreamConfigs.data.i32[j * 4 + 2];
1544 int32_t input = physicalStreamConfigs.data.i32[j * 4 + 3];
1545 if (format == multiResFormat && input == multiResInput) {
1546 if (width == multiResWidth && height == multiResHeight) {
1547 supported = true;
1548 } else if (width * height > multiResWidth * multiResHeight) {
1549 isMaxSize = false;
1550 }
1551 }
1552 }
1553 // Check if the resolution is the max resolution in max
1554 // resolution stream configuration map
1555 bool supportedUltraHighRes = false;
1556 bool isUltraHighResMaxSize = true;
1557 for (size_t j = 0; j < physicalMaxResolutionStreamConfigs.count / 4; j++) {
1558 int32_t format = physicalMaxResolutionStreamConfigs.data.i32[j * 4];
1559 int32_t width = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 1];
1560 int32_t height = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 2];
1561 int32_t input = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 3];
1562 if (format == multiResFormat && input == multiResInput) {
1563 if (width == multiResWidth && height == multiResHeight) {
1564 supportedUltraHighRes = true;
1565 } else if (width * height > multiResWidth * multiResHeight) {
1566 isUltraHighResMaxSize = false;
1567 }
1568 }
1569 }
1570
1571 if (isUltraHighRes) {
1572 // For ultra high resolution camera, the configuration must
1573 // be the maximum size in stream configuration map, or max
1574 // resolution stream configuration map
1575 ASSERT_TRUE((supported && isMaxSize) ||
1576 (supportedUltraHighRes && isUltraHighResMaxSize));
1577 } else {
1578 // The configuration must be the maximum size in stream
1579 // configuration map
1580 ASSERT_TRUE(supported && isMaxSize);
1581 ASSERT_FALSE(supportedUltraHighRes);
1582 }
1583
1584 // Increment the counter for the configuration's format.
1585 auto& formatCounterMap = multiResInput ? multiResInputFormatCounterMap
1586 : multiResOutputFormatCounterMap;
1587 if (formatCounterMap.count(multiResFormat) == 0) {
1588 formatCounterMap[multiResFormat] = 1;
1589 } else {
1590 formatCounterMap[multiResFormat]++;
1591 }
1592 }
1593
1594 // There must be no duplicates
1595 for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4 - 1; i++) {
1596 for (size_t j = i + 1; j < physicalMultiResStreamConfigs.count / 4; j++) {
1597 // Input/output doesn't match
1598 if (physicalMultiResStreamConfigs.data.i32[i * 4 + 3] !=
1599 physicalMultiResStreamConfigs.data.i32[j * 4 + 3]) {
1600 continue;
1601 }
1602 // Format doesn't match
1603 if (physicalMultiResStreamConfigs.data.i32[i * 4] !=
1604 physicalMultiResStreamConfigs.data.i32[j * 4]) {
1605 continue;
1606 }
1607 // Width doesn't match
1608 if (physicalMultiResStreamConfigs.data.i32[i * 4 + 1] !=
1609 physicalMultiResStreamConfigs.data.i32[j * 4 + 1]) {
1610 continue;
1611 }
1612 // Height doesn't match
1613 if (physicalMultiResStreamConfigs.data.i32[i * 4 + 2] !=
1614 physicalMultiResStreamConfigs.data.i32[j * 4 + 2]) {
1615 continue;
1616 }
1617 // input/output, format, width, and height all match
1618 ADD_FAILURE();
1619 }
1620 }
1621 }
1622 }
1623
1624 // If a multi-resolution stream is supported, there must be at least one
1625 // format with more than one resolutions
1626 if (multiResolutionStreamSupported) {
1627 size_t numMultiResFormats = 0;
1628 for (const auto& [format, sizeCount] : multiResOutputFormatCounterMap) {
1629 if (sizeCount >= 2) {
1630 numMultiResFormats++;
1631 }
1632 }
1633 for (const auto& [format, sizeCount] : multiResInputFormatCounterMap) {
1634 if (sizeCount >= 2) {
1635 numMultiResFormats++;
1636
1637 // If multi-resolution reprocessing is supported, the logical
1638 // camera or ultra-high resolution sensor camera must support
1639 // the corresponding reprocessing capability.
1640 if (format == static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) {
1641 ASSERT_EQ(isZSLModeAvailable(metadata, PRIV_REPROCESS), Status::OK);
1642 } else if (format == static_cast<int32_t>(PixelFormat::YCBCR_420_888)) {
1643 ASSERT_EQ(isZSLModeAvailable(metadata, YUV_REPROCESS), Status::OK);
1644 }
1645 }
1646 }
1647 ASSERT_GT(numMultiResFormats, 0);
1648 }
1649
1650 // Make sure ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID is available in
1651 // result keys.
1652 if (isMultiCamera) {
1653 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
1654 &entry);
1655 if ((0 == retcode) && (entry.count > 0)) {
1656 ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count,
1657 static_cast<int32_t>(
1658 CameraMetadataTag::
1659 ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID)),
1660 entry.data.i32 + entry.count);
1661 } else {
1662 ADD_FAILURE() << "Get camera availableResultKeys failed!";
1663 }
1664 }
1665 }
1666
isUltraHighResolution(const camera_metadata_t * staticMeta)1667 bool CameraAidlTest::isUltraHighResolution(const camera_metadata_t* staticMeta) {
1668 camera_metadata_ro_entry scalerEntry;
1669 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1670 &scalerEntry);
1671 if (rc == 0) {
1672 for (uint32_t i = 0; i < scalerEntry.count; i++) {
1673 if (scalerEntry.data.u8[i] ==
1674 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
1675 return true;
1676 }
1677 }
1678 }
1679 return false;
1680 }
1681
getSupportedKeys(camera_metadata_t * staticMeta,uint32_t tagId,std::unordered_set<int32_t> * requestIDs)1682 Status CameraAidlTest::getSupportedKeys(camera_metadata_t* staticMeta, uint32_t tagId,
1683 std::unordered_set<int32_t>* requestIDs) {
1684 if ((nullptr == staticMeta) || (nullptr == requestIDs)) {
1685 return Status::ILLEGAL_ARGUMENT;
1686 }
1687
1688 camera_metadata_ro_entry entry;
1689 int rc = find_camera_metadata_ro_entry(staticMeta, tagId, &entry);
1690 if ((0 != rc) || (entry.count == 0)) {
1691 return Status::OK;
1692 }
1693
1694 requestIDs->insert(entry.data.i32, entry.data.i32 + entry.count);
1695
1696 return Status::OK;
1697 }
1698
getPrivacyTestPatternModes(const camera_metadata_t * staticMetadata,std::unordered_set<int32_t> * privacyTestPatternModes)1699 void CameraAidlTest::getPrivacyTestPatternModes(
1700 const camera_metadata_t* staticMetadata,
1701 std::unordered_set<int32_t>* privacyTestPatternModes) {
1702 ASSERT_NE(staticMetadata, nullptr);
1703 ASSERT_NE(privacyTestPatternModes, nullptr);
1704
1705 camera_metadata_ro_entry entry;
1706 int retcode = find_camera_metadata_ro_entry(
1707 staticMetadata, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &entry);
1708 ASSERT_TRUE(0 == retcode);
1709
1710 for (auto i = 0; i < entry.count; i++) {
1711 if (entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR ||
1712 entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
1713 privacyTestPatternModes->insert(entry.data.i32[i]);
1714 }
1715 }
1716 }
1717
getMultiResolutionStreamConfigurations(camera_metadata_ro_entry * multiResStreamConfigs,camera_metadata_ro_entry * streamConfigs,camera_metadata_ro_entry * maxResolutionStreamConfigs,const camera_metadata_t * staticMetadata)1718 void CameraAidlTest::getMultiResolutionStreamConfigurations(
1719 camera_metadata_ro_entry* multiResStreamConfigs, camera_metadata_ro_entry* streamConfigs,
1720 camera_metadata_ro_entry* maxResolutionStreamConfigs,
1721 const camera_metadata_t* staticMetadata) {
1722 ASSERT_NE(multiResStreamConfigs, nullptr);
1723 ASSERT_NE(streamConfigs, nullptr);
1724 ASSERT_NE(maxResolutionStreamConfigs, nullptr);
1725 ASSERT_NE(staticMetadata, nullptr);
1726
1727 int retcode = find_camera_metadata_ro_entry(
1728 staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, streamConfigs);
1729 ASSERT_TRUE(0 == retcode);
1730 retcode = find_camera_metadata_ro_entry(
1731 staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
1732 maxResolutionStreamConfigs);
1733 ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1734 retcode = find_camera_metadata_ro_entry(
1735 staticMetadata, ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
1736 multiResStreamConfigs);
1737 ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1738 }
1739
isTorchSupported(const camera_metadata_t * staticMeta)1740 bool CameraAidlTest::isTorchSupported(const camera_metadata_t* staticMeta) {
1741 camera_metadata_ro_entry torchEntry;
1742 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_AVAILABLE, &torchEntry);
1743 if (rc != 0) {
1744 ALOGI("isTorchSupported: Failed to find entry for ANDROID_FLASH_INFO_AVAILABLE");
1745 return false;
1746 }
1747 if (torchEntry.count == 1 && !torchEntry.data.u8[0]) {
1748 ALOGI("isTorchSupported: Torch not supported");
1749 return false;
1750 }
1751 ALOGI("isTorchSupported: Torch supported");
1752 return true;
1753 }
1754
isTorchStrengthControlSupported(const camera_metadata_t * staticMeta)1755 bool CameraAidlTest::isTorchStrengthControlSupported(const camera_metadata_t* staticMeta) {
1756 int32_t maxLevel = 0;
1757 camera_metadata_ro_entry maxEntry;
1758 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
1759 &maxEntry);
1760 if (rc != 0) {
1761 ALOGI("isTorchStrengthControlSupported: Failed to find entry for "
1762 "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL");
1763 return false;
1764 }
1765
1766 maxLevel = *maxEntry.data.i32;
1767 if (maxLevel > 1) {
1768 ALOGI("isTorchStrengthControlSupported: Torch strength control supported.");
1769 return true;
1770 }
1771 ALOGI("isTorchStrengthControlSupported: Torch strength control not supported.");
1772 return false;
1773 }
1774
verifyRequestTemplate(const camera_metadata_t * metadata,RequestTemplate requestTemplate)1775 void CameraAidlTest::verifyRequestTemplate(const camera_metadata_t* metadata,
1776 RequestTemplate requestTemplate) {
1777 ASSERT_NE(nullptr, metadata);
1778 size_t entryCount = get_camera_metadata_entry_count(metadata);
1779 ALOGI("template %u metadata entry count is %zu", (int32_t)requestTemplate, entryCount);
1780 // TODO: we can do better than 0 here. Need to check how many required
1781 // request keys we've defined for each template
1782 ASSERT_GT(entryCount, 0u);
1783
1784 // Check zoomRatio
1785 camera_metadata_ro_entry zoomRatioEntry;
1786 int foundZoomRatio =
1787 find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO, &zoomRatioEntry);
1788 if (foundZoomRatio == 0) {
1789 ASSERT_EQ(zoomRatioEntry.count, 1);
1790 ASSERT_EQ(zoomRatioEntry.data.f[0], 1.0f);
1791 }
1792
1793 // Check settings override
1794 camera_metadata_ro_entry settingsOverrideEntry;
1795 int foundSettingsOverride = find_camera_metadata_ro_entry(metadata,
1796 ANDROID_CONTROL_SETTINGS_OVERRIDE, &settingsOverrideEntry);
1797 if (foundSettingsOverride == 0) {
1798 ASSERT_EQ(settingsOverrideEntry.count, 1);
1799 ASSERT_EQ(settingsOverrideEntry.data.u8[0], ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF);
1800 }
1801 }
1802
openEmptyDeviceSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraDeviceSession> * session,CameraMetadata * staticMeta,std::shared_ptr<ICameraDevice> * device)1803 void CameraAidlTest::openEmptyDeviceSession(const std::string& name,
1804 const std::shared_ptr<ICameraProvider>& provider,
1805 std::shared_ptr<ICameraDeviceSession>* session,
1806 CameraMetadata* staticMeta,
1807 std::shared_ptr<ICameraDevice>* device) {
1808 ASSERT_NE(nullptr, session);
1809 ASSERT_NE(nullptr, staticMeta);
1810 ASSERT_NE(nullptr, device);
1811
1812 ALOGI("configureStreams: Testing camera device %s", name.c_str());
1813 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1814 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
1815 ret.getServiceSpecificError());
1816 ASSERT_TRUE(ret.isOk());
1817 ASSERT_NE(*device, nullptr);
1818
1819 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1820 ret = (*device)->open(cb, session);
1821 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
1822 ret.getServiceSpecificError());
1823 ASSERT_TRUE(ret.isOk());
1824 ASSERT_NE(*session, nullptr);
1825
1826 ret = (*device)->getCameraCharacteristics(staticMeta);
1827 ASSERT_TRUE(ret.isOk());
1828 }
1829
openEmptyInjectionSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraInjectionSession> * session,CameraMetadata * metadata,std::shared_ptr<ICameraDevice> * device)1830 void CameraAidlTest::openEmptyInjectionSession(const std::string& name,
1831 const std::shared_ptr<ICameraProvider>& provider,
1832 std::shared_ptr<ICameraInjectionSession>* session,
1833 CameraMetadata* metadata,
1834 std::shared_ptr<ICameraDevice>* device) {
1835 ASSERT_NE(nullptr, session);
1836 ASSERT_NE(nullptr, metadata);
1837 ASSERT_NE(nullptr, device);
1838
1839 ALOGI("openEmptyInjectionSession: Testing camera device %s", name.c_str());
1840 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1841 ALOGI("openEmptyInjectionSession: getCameraDeviceInterface returns status:%d:%d",
1842 ret.getExceptionCode(), ret.getServiceSpecificError());
1843 ASSERT_TRUE(ret.isOk());
1844 ASSERT_NE(*device, nullptr);
1845
1846 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1847 ret = (*device)->openInjectionSession(cb, session);
1848 ALOGI("device::openInjectionSession returns status:%d:%d", ret.getExceptionCode(),
1849 ret.getServiceSpecificError());
1850
1851 if (static_cast<Status>(ret.getServiceSpecificError()) == Status::OPERATION_NOT_SUPPORTED &&
1852 *session == nullptr) {
1853 return; // Injection Session not supported. Callee will receive nullptr in *session
1854 }
1855
1856 ASSERT_TRUE(ret.isOk());
1857 ASSERT_NE(*session, nullptr);
1858
1859 ret = (*device)->getCameraCharacteristics(metadata);
1860 ASSERT_TRUE(ret.isOk());
1861 }
1862
getJpegBufferSize(camera_metadata_t * staticMeta,int32_t * outBufSize)1863 Status CameraAidlTest::getJpegBufferSize(camera_metadata_t* staticMeta, int32_t* outBufSize) {
1864 if (nullptr == staticMeta || nullptr == outBufSize) {
1865 return Status::ILLEGAL_ARGUMENT;
1866 }
1867
1868 camera_metadata_ro_entry entry;
1869 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_JPEG_MAX_SIZE, &entry);
1870 if ((0 != rc) || (1 != entry.count)) {
1871 return Status::ILLEGAL_ARGUMENT;
1872 }
1873
1874 *outBufSize = entry.data.i32[0];
1875 return Status::OK;
1876 }
1877
getDataspace(PixelFormat format)1878 Dataspace CameraAidlTest::getDataspace(PixelFormat format) {
1879 switch (format) {
1880 case PixelFormat::BLOB:
1881 return Dataspace::JFIF;
1882 case PixelFormat::Y16:
1883 return Dataspace::DEPTH;
1884 case PixelFormat::RAW16:
1885 case PixelFormat::RAW_OPAQUE:
1886 case PixelFormat::RAW10:
1887 case PixelFormat::RAW12:
1888 return Dataspace::ARBITRARY;
1889 default:
1890 return Dataspace::UNKNOWN;
1891 }
1892 }
1893
createStreamConfiguration(std::vector<Stream> & streams,StreamConfigurationMode configMode,StreamConfiguration * config,int32_t jpegBufferSize)1894 void CameraAidlTest::createStreamConfiguration(std::vector<Stream>& streams,
1895 StreamConfigurationMode configMode,
1896 StreamConfiguration* config,
1897 int32_t jpegBufferSize) {
1898 ASSERT_NE(nullptr, config);
1899
1900 for (auto& stream : streams) {
1901 stream.bufferSize =
1902 (stream.format == PixelFormat::BLOB && stream.dataSpace == Dataspace::JFIF)
1903 ? jpegBufferSize
1904 : 0;
1905 }
1906
1907 // Caller is responsible to fill in non-zero config->streamConfigCounter after this returns
1908 config->streams = streams;
1909 config->operationMode = configMode;
1910 config->multiResolutionInputImage = false;
1911 }
1912
verifyStreamCombination(const std::shared_ptr<ICameraDevice> & device,const StreamConfiguration & config,bool expectedStatus)1913 void CameraAidlTest::verifyStreamCombination(const std::shared_ptr<ICameraDevice>& device,
1914 const StreamConfiguration& config,
1915 bool expectedStatus) {
1916 if (device != nullptr) {
1917 bool streamCombinationSupported;
1918 ScopedAStatus ret =
1919 device->isStreamCombinationSupported(config, &streamCombinationSupported);
1920 ASSERT_TRUE(ret.isOk());
1921 ASSERT_EQ(expectedStatus, streamCombinationSupported);
1922
1923 int32_t interfaceVersion;
1924 ret = device->getInterfaceVersion(&interfaceVersion);
1925 ASSERT_TRUE(ret.isOk());
1926 bool supportFeatureCombinationQuery =
1927 (interfaceVersion >= CAMERA_DEVICE_API_MINOR_VERSION_3);
1928 if (supportFeatureCombinationQuery) {
1929 ret = device->isStreamCombinationWithSettingsSupported(config,
1930 &streamCombinationSupported);
1931 ASSERT_TRUE(ret.isOk());
1932 ASSERT_EQ(expectedStatus, streamCombinationSupported);
1933 }
1934 }
1935 }
1936
verifySessionCharacteristics(const CameraMetadata & session_chars,const CameraMetadata & camera_chars)1937 void CameraAidlTest::verifySessionCharacteristics(const CameraMetadata& session_chars,
1938 const CameraMetadata& camera_chars) {
1939 const camera_metadata_t* session_metadata =
1940 reinterpret_cast<const camera_metadata_t*>(session_chars.metadata.data());
1941
1942 const camera_metadata_t* camera_metadata =
1943 reinterpret_cast<const camera_metadata_t*>(camera_chars.metadata.data());
1944
1945 size_t expectedSize = session_chars.metadata.size();
1946 int result = validate_camera_metadata_structure(session_metadata, &expectedSize);
1947 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1948 size_t entryCount = get_camera_metadata_entry_count(session_metadata);
1949 // There should be at least 1 characteristic present:
1950 // SCALER_MAX_DIGITAL_ZOOM must always be available.
1951 // ZOOM_RATIO_RANGE must be available if ZOOM_RATIO is supported.
1952 ASSERT_TRUE(entryCount >= 1);
1953
1954 camera_metadata_ro_entry entry;
1955 int retcode = 0;
1956 float maxDigitalZoom = 1.0;
1957
1958 for (size_t i = 0; i < entryCount; i++) {
1959 retcode = get_camera_metadata_ro_entry(session_metadata, i, &entry);
1960 ASSERT_TRUE(retcode == 0);
1961
1962 std::set<uint32_t> allowed_tags = {ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1963 ANDROID_CONTROL_ZOOM_RATIO_RANGE};
1964
1965 if (contains(allowed_tags, entry.tag)) {
1966 continue;
1967 }
1968
1969 // Other than the ones above, no tags should be allowed apart from vendor tags.
1970 ASSERT_TRUE(entry.tag >= VENDOR_SECTION_START);
1971 }
1972
1973 retcode = find_camera_metadata_ro_entry(session_metadata,
1974 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &entry);
1975 if ((0 == retcode) && (entry.count == 1)) {
1976 maxDigitalZoom = entry.data.f[0];
1977 } else {
1978 ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!";
1979 }
1980
1981 retcode = find_camera_metadata_ro_entry(camera_metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE,
1982 &entry);
1983 bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1984 if (!hasZoomRatioRange) {
1985 ALOGI("Skipping the rest of the test as ZOOM_RATIO_RANGE is not in camera characteristics");
1986 return;
1987 }
1988
1989 // Session characteristics must contain zoom_ratio_range if camera characteristics has it.
1990 retcode = find_camera_metadata_ro_entry(session_metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE,
1991 &entry);
1992 ASSERT_TRUE(0 == retcode && entry.count == 2);
1993
1994 float minZoomRatio = entry.data.f[0];
1995 float maxZoomRatio = entry.data.f[1];
1996 constexpr float FLOATING_POINT_THRESHOLD = 0.00001f;
1997 if (abs(maxDigitalZoom - maxZoomRatio) > FLOATING_POINT_THRESHOLD) {
1998 ADD_FAILURE() << "Difference between maximum digital zoom " << maxDigitalZoom
1999 << " and maximum zoom ratio " << maxZoomRatio
2000 << " is greater than the threshold " << FLOATING_POINT_THRESHOLD << "!";
2001 }
2002 if (minZoomRatio > maxZoomRatio) {
2003 ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!";
2004 }
2005 if (minZoomRatio > 1.0f) {
2006 ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!";
2007 }
2008 if (maxZoomRatio < 1.0f) {
2009 ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!";
2010 }
2011 }
2012
getConcurrentDeviceCombinations(std::shared_ptr<ICameraProvider> & provider)2013 std::vector<ConcurrentCameraIdCombination> CameraAidlTest::getConcurrentDeviceCombinations(
2014 std::shared_ptr<ICameraProvider>& provider) {
2015 std::vector<ConcurrentCameraIdCombination> combinations;
2016 ndk::ScopedAStatus ret = provider->getConcurrentCameraIds(&combinations);
2017 if (!ret.isOk()) {
2018 ADD_FAILURE();
2019 }
2020
2021 return combinations;
2022 }
2023
getMandatoryConcurrentStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> * outputStreams)2024 Status CameraAidlTest::getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta,
2025 std::vector<AvailableStream>* outputStreams) {
2026 if (nullptr == staticMeta || nullptr == outputStreams) {
2027 return Status::ILLEGAL_ARGUMENT;
2028 }
2029
2030 if (isDepthOnly(staticMeta)) {
2031 Size y16MaxSize(640, 480);
2032 Size maxAvailableY16Size;
2033 getMaxOutputSizeForFormat(staticMeta, PixelFormat::Y16, &maxAvailableY16Size);
2034 Size y16ChosenSize = getMinSize(y16MaxSize, maxAvailableY16Size);
2035 AvailableStream y16Stream = {.width = y16ChosenSize.width,
2036 .height = y16ChosenSize.height,
2037 .format = static_cast<int32_t>(PixelFormat::Y16)};
2038 outputStreams->push_back(y16Stream);
2039 return Status::OK;
2040 }
2041
2042 Size yuvMaxSize(1280, 720);
2043 Size jpegMaxSize(1920, 1440);
2044 Size maxAvailableYuvSize;
2045 Size maxAvailableJpegSize;
2046 getMaxOutputSizeForFormat(staticMeta, PixelFormat::YCBCR_420_888, &maxAvailableYuvSize);
2047 getMaxOutputSizeForFormat(staticMeta, PixelFormat::BLOB, &maxAvailableJpegSize);
2048 Size yuvChosenSize = getMinSize(yuvMaxSize, maxAvailableYuvSize);
2049 Size jpegChosenSize = getMinSize(jpegMaxSize, maxAvailableJpegSize);
2050
2051 AvailableStream yuvStream = {.width = yuvChosenSize.width,
2052 .height = yuvChosenSize.height,
2053 .format = static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
2054
2055 AvailableStream jpegStream = {.width = jpegChosenSize.width,
2056 .height = jpegChosenSize.height,
2057 .format = static_cast<int32_t>(PixelFormat::BLOB)};
2058 outputStreams->push_back(yuvStream);
2059 outputStreams->push_back(jpegStream);
2060
2061 return Status::OK;
2062 }
2063
isDepthOnly(const camera_metadata_t * staticMeta)2064 bool CameraAidlTest::isDepthOnly(const camera_metadata_t* staticMeta) {
2065 camera_metadata_ro_entry scalerEntry;
2066 camera_metadata_ro_entry depthEntry;
2067
2068 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2069 &scalerEntry);
2070 if (rc == 0) {
2071 for (uint32_t i = 0; i < scalerEntry.count; i++) {
2072 if (scalerEntry.data.u8[i] ==
2073 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) {
2074 return false;
2075 }
2076 }
2077 }
2078
2079 for (uint32_t i = 0; i < scalerEntry.count; i++) {
2080 if (scalerEntry.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) {
2081 rc = find_camera_metadata_ro_entry(
2082 staticMeta, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &depthEntry);
2083 size_t idx = 0;
2084 if (rc == 0 && depthEntry.data.i32[idx] == static_cast<int32_t>(PixelFormat::Y16)) {
2085 // only Depth16 format is supported now
2086 return true;
2087 }
2088 break;
2089 }
2090 }
2091
2092 return false;
2093 }
2094
getMaxOutputSizeForFormat(const camera_metadata_t * staticMeta,PixelFormat format,Size * size,bool maxResolution)2095 Status CameraAidlTest::getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta,
2096 PixelFormat format, Size* size,
2097 bool maxResolution) {
2098 std::vector<AvailableStream> outputStreams;
2099 if (size == nullptr ||
2100 getAvailableOutputStreams(staticMeta, outputStreams,
2101 /*threshold*/ nullptr, maxResolution) != Status::OK) {
2102 return Status::ILLEGAL_ARGUMENT;
2103 }
2104 Size maxSize;
2105 bool found = false;
2106 for (auto& outputStream : outputStreams) {
2107 if (static_cast<int32_t>(format) == outputStream.format &&
2108 (outputStream.width * outputStream.height > maxSize.width * maxSize.height)) {
2109 maxSize.width = outputStream.width;
2110 maxSize.height = outputStream.height;
2111 found = true;
2112 }
2113 }
2114 if (!found) {
2115 ALOGE("%s :chosen format %d not found", __FUNCTION__, static_cast<int32_t>(format));
2116 return Status::ILLEGAL_ARGUMENT;
2117 }
2118 *size = maxSize;
2119 return Status::OK;
2120 }
2121
getMinSize(Size a,Size b)2122 Size CameraAidlTest::getMinSize(Size a, Size b) {
2123 if (a.width * a.height < b.width * b.height) {
2124 return a;
2125 }
2126 return b;
2127 }
2128
getZSLInputOutputMap(camera_metadata_t * staticMeta,std::vector<AvailableZSLInputOutput> & inputOutputMap)2129 Status CameraAidlTest::getZSLInputOutputMap(camera_metadata_t* staticMeta,
2130 std::vector<AvailableZSLInputOutput>& inputOutputMap) {
2131 if (nullptr == staticMeta) {
2132 return Status::ILLEGAL_ARGUMENT;
2133 }
2134
2135 camera_metadata_ro_entry entry;
2136 int rc = find_camera_metadata_ro_entry(
2137 staticMeta, ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry);
2138 if ((0 != rc) || (0 >= entry.count)) {
2139 return Status::ILLEGAL_ARGUMENT;
2140 }
2141
2142 const int32_t* contents = &entry.data.i32[0];
2143 for (size_t i = 0; i < entry.count;) {
2144 int32_t inputFormat = contents[i++];
2145 int32_t length = contents[i++];
2146 for (int32_t j = 0; j < length; j++) {
2147 int32_t outputFormat = contents[i + j];
2148 AvailableZSLInputOutput zslEntry = {inputFormat, outputFormat};
2149 inputOutputMap.push_back(zslEntry);
2150 }
2151 i += length;
2152 }
2153
2154 return Status::OK;
2155 }
2156
findLargestSize(const std::vector<AvailableStream> & streamSizes,int32_t format,AvailableStream & result)2157 Status CameraAidlTest::findLargestSize(const std::vector<AvailableStream>& streamSizes,
2158 int32_t format, AvailableStream& result) {
2159 result = {0, 0, 0};
2160 for (auto& iter : streamSizes) {
2161 if (format == iter.format) {
2162 if ((result.width * result.height) < (iter.width * iter.height)) {
2163 result = iter;
2164 }
2165 }
2166 }
2167
2168 return (result.format == format) ? Status::OK : Status::ILLEGAL_ARGUMENT;
2169 }
2170
constructFilteredSettings(const std::shared_ptr<ICameraDeviceSession> & session,const std::unordered_set<int32_t> & availableKeys,RequestTemplate reqTemplate,android::hardware::camera::common::V1_0::helper::CameraMetadata * defaultSettings,android::hardware::camera::common::V1_0::helper::CameraMetadata * filteredSettings)2171 void CameraAidlTest::constructFilteredSettings(
2172 const std::shared_ptr<ICameraDeviceSession>& session,
2173 const std::unordered_set<int32_t>& availableKeys, RequestTemplate reqTemplate,
2174 android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings,
2175 android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings) {
2176 ASSERT_NE(defaultSettings, nullptr);
2177 ASSERT_NE(filteredSettings, nullptr);
2178
2179 CameraMetadata req;
2180 auto ret = session->constructDefaultRequestSettings(reqTemplate, &req);
2181 ASSERT_TRUE(ret.isOk());
2182
2183 const camera_metadata_t* metadata =
2184 clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(req.metadata.data()));
2185 size_t expectedSize = req.metadata.size();
2186 int result = validate_camera_metadata_structure(metadata, &expectedSize);
2187 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
2188
2189 size_t entryCount = get_camera_metadata_entry_count(metadata);
2190 ASSERT_GT(entryCount, 0u);
2191 *defaultSettings = metadata;
2192
2193 const android::hardware::camera::common::V1_0::helper::CameraMetadata& constSettings =
2194 *defaultSettings;
2195 for (const auto& keyIt : availableKeys) {
2196 camera_metadata_ro_entry entry = constSettings.find(keyIt);
2197 if (entry.count > 0) {
2198 filteredSettings->update(entry);
2199 }
2200 }
2201 }
2202
verifySessionReconfigurationQuery(const std::shared_ptr<ICameraDeviceSession> & session,camera_metadata * oldSessionParams,camera_metadata * newSessionParams)2203 void CameraAidlTest::verifySessionReconfigurationQuery(
2204 const std::shared_ptr<ICameraDeviceSession>& session, camera_metadata* oldSessionParams,
2205 camera_metadata* newSessionParams) {
2206 ASSERT_NE(nullptr, session);
2207 ASSERT_NE(nullptr, oldSessionParams);
2208 ASSERT_NE(nullptr, newSessionParams);
2209
2210 std::vector<uint8_t> oldParams =
2211 std::vector(reinterpret_cast<uint8_t*>(oldSessionParams),
2212 reinterpret_cast<uint8_t*>(oldSessionParams) +
2213 get_camera_metadata_size(oldSessionParams));
2214 CameraMetadata oldMetadata = {oldParams};
2215
2216 std::vector<uint8_t> newParams =
2217 std::vector(reinterpret_cast<uint8_t*>(newSessionParams),
2218 reinterpret_cast<uint8_t*>(newSessionParams) +
2219 get_camera_metadata_size(newSessionParams));
2220 CameraMetadata newMetadata = {newParams};
2221
2222 bool reconfigReq;
2223 ndk::ScopedAStatus ret =
2224 session->isReconfigurationRequired(oldMetadata, newMetadata, &reconfigReq);
2225 ASSERT_TRUE(ret.isOk() || static_cast<Status>(ret.getServiceSpecificError()) ==
2226 Status::OPERATION_NOT_SUPPORTED);
2227 }
2228
isConstrainedModeAvailable(camera_metadata_t * staticMeta)2229 Status CameraAidlTest::isConstrainedModeAvailable(camera_metadata_t* staticMeta) {
2230 Status ret = Status::OPERATION_NOT_SUPPORTED;
2231 if (nullptr == staticMeta) {
2232 return Status::ILLEGAL_ARGUMENT;
2233 }
2234
2235 camera_metadata_ro_entry entry;
2236 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2237 &entry);
2238 if (0 != rc) {
2239 return Status::ILLEGAL_ARGUMENT;
2240 }
2241
2242 for (size_t i = 0; i < entry.count; i++) {
2243 if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO ==
2244 entry.data.u8[i]) {
2245 ret = Status::OK;
2246 break;
2247 }
2248 }
2249
2250 return ret;
2251 }
2252
pickConstrainedModeSize(camera_metadata_t * staticMeta,AvailableStream & hfrStream)2253 Status CameraAidlTest::pickConstrainedModeSize(camera_metadata_t* staticMeta,
2254 AvailableStream& hfrStream) {
2255 if (nullptr == staticMeta) {
2256 return Status::ILLEGAL_ARGUMENT;
2257 }
2258
2259 camera_metadata_ro_entry entry;
2260 int rc = find_camera_metadata_ro_entry(
2261 staticMeta, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &entry);
2262 if (0 != rc) {
2263 return Status::OPERATION_NOT_SUPPORTED;
2264 } else if (0 != (entry.count % 5)) {
2265 return Status::ILLEGAL_ARGUMENT;
2266 }
2267
2268 hfrStream = {0, 0, static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2269 for (size_t i = 0; i < entry.count; i += 5) {
2270 int32_t w = entry.data.i32[i];
2271 int32_t h = entry.data.i32[i + 1];
2272 if ((hfrStream.width * hfrStream.height) < (w * h)) {
2273 hfrStream.width = w;
2274 hfrStream.height = h;
2275 }
2276 }
2277
2278 return Status::OK;
2279 }
2280
processCaptureRequestInternal(uint64_t bufferUsage,RequestTemplate reqTemplate,bool useSecureOnlyCameras)2281 void CameraAidlTest::processCaptureRequestInternal(uint64_t bufferUsage,
2282 RequestTemplate reqTemplate,
2283 bool useSecureOnlyCameras) {
2284 std::vector<std::string> cameraDeviceNames =
2285 getCameraDeviceNames(mProvider, useSecureOnlyCameras);
2286 AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2287 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2288 int64_t bufferId = 1;
2289 int32_t frameNumber = 1;
2290 CameraMetadata settings;
2291 for (const auto& name : cameraDeviceNames) {
2292 Stream testStream;
2293 std::vector<HalStream> halStreams;
2294 std::shared_ptr<ICameraDeviceSession> session;
2295 std::shared_ptr<DeviceCb> cb;
2296 bool supportsPartialResults = false;
2297 bool useHalBufManager = false;
2298 int32_t partialResultCount = 0;
2299 ASSERT_NO_FATAL_FAILURE(configureSingleStream(
2300 name, mProvider, &streamThreshold, bufferUsage, reqTemplate, &session /*out*/,
2301 &testStream /*out*/, &halStreams /*out*/, &supportsPartialResults /*out*/,
2302 &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/));
2303
2304 std::shared_ptr<ResultMetadataQueue> resultQueue;
2305 ::aidl::android::hardware::common::fmq::MQDescriptor<
2306 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2307 descriptor;
2308 ndk::ScopedAStatus ret = session->getCaptureResultMetadataQueue(&descriptor);
2309 ASSERT_TRUE(ret.isOk());
2310
2311 resultQueue = std::make_shared<ResultMetadataQueue>(descriptor);
2312 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2313 ALOGE("%s: HAL returns empty result metadata fmq,"
2314 " not use it",
2315 __func__);
2316 resultQueue = nullptr;
2317 // Don't use the queue onwards.
2318 }
2319
2320 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2321 1, false, supportsPartialResults, partialResultCount, resultQueue);
2322
2323 CameraMetadata req;
2324 ret = session->constructDefaultRequestSettings(reqTemplate, &req);
2325 ASSERT_TRUE(ret.isOk());
2326 settings = req;
2327
2328 overrideRotateAndCrop(&settings);
2329
2330 std::vector<CaptureRequest> requests(1);
2331 CaptureRequest& request = requests[0];
2332 request.frameNumber = frameNumber;
2333 request.fmqSettingsSize = 0;
2334 request.settings = settings;
2335
2336 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
2337 outputBuffers.resize(1);
2338 StreamBuffer& outputBuffer = outputBuffers[0];
2339 if (useHalBufManager) {
2340 outputBuffer = {halStreams[0].id,
2341 /*bufferId*/ 0, NativeHandle(), BufferStatus::OK,
2342 NativeHandle(), NativeHandle()};
2343 } else {
2344 buffer_handle_t handle;
2345 allocateGraphicBuffer(
2346 testStream.width, testStream.height,
2347 /* We don't look at halStreamConfig.streams[0].consumerUsage
2348 * since that is 0 for output streams
2349 */
2350 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2351 static_cast<uint64_t>(halStreams[0].producerUsage), bufferUsage)),
2352 halStreams[0].overrideFormat, &handle);
2353
2354 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(handle),
2355 BufferStatus::OK, NativeHandle(), NativeHandle()};
2356 }
2357 request.inputBuffer = {-1,
2358 0,
2359 NativeHandle(),
2360 BufferStatus::ERROR,
2361 NativeHandle(),
2362 NativeHandle()}; // Empty Input Buffer
2363
2364 {
2365 std::unique_lock<std::mutex> l(mLock);
2366 mInflightMap.clear();
2367 mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2368 }
2369
2370 int32_t numRequestProcessed = 0;
2371 std::vector<BufferCache> cachesToRemove;
2372 ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2373 ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2374 ret.getExceptionCode(), ret.getServiceSpecificError());
2375
2376 ASSERT_TRUE(ret.isOk());
2377 ASSERT_EQ(numRequestProcessed, 1u);
2378
2379 {
2380 std::unique_lock<std::mutex> l(mLock);
2381 while (!inflightReq->errorCodeValid &&
2382 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2383 auto timeout = std::chrono::system_clock::now() +
2384 std::chrono::seconds(kStreamBufferTimeoutSec);
2385 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2386 }
2387
2388 ASSERT_FALSE(inflightReq->errorCodeValid);
2389 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2390 ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2391
2392 // shutterReadoutTimestamp, if supported, must
2393 // be >= shutterTimestamp + exposureTime,
2394 // and < shutterTimestamp + exposureTime + rollingShutterSkew / 2.
2395 ASSERT_FALSE(inflightReq->collectedResult.isEmpty());
2396
2397 if (mSupportReadoutTimestamp &&
2398 inflightReq->collectedResult.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2399 camera_metadata_entry_t exposureTimeResult =
2400 inflightReq->collectedResult.find(ANDROID_SENSOR_EXPOSURE_TIME);
2401 nsecs_t exposureToReadout =
2402 inflightReq->shutterReadoutTimestamp - inflightReq->shutterTimestamp;
2403 ASSERT_GE(exposureToReadout, exposureTimeResult.data.i64[0]);
2404 if (inflightReq->collectedResult.exists(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW)) {
2405 camera_metadata_entry_t rollingShutterSkew =
2406 inflightReq->collectedResult.find(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
2407 ASSERT_LT(exposureToReadout,
2408 exposureTimeResult.data.i64[0] + rollingShutterSkew.data.i64[0] / 2);
2409 }
2410 }
2411
2412 request.frameNumber++;
2413 // Empty settings should be supported after the first call
2414 // for repeating requests.
2415 request.settings.metadata.clear();
2416 // The buffer has been registered to HAL by bufferId, so per
2417 // API contract we should send a null handle for this buffer
2418 request.outputBuffers[0].buffer = NativeHandle();
2419 mInflightMap.clear();
2420 inflightReq = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2421 partialResultCount, resultQueue);
2422 mInflightMap.insert(std::make_pair(request.frameNumber, inflightReq));
2423 }
2424
2425 ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2426 ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2427 ret.getExceptionCode(), ret.getServiceSpecificError());
2428 ASSERT_TRUE(ret.isOk());
2429 ASSERT_EQ(numRequestProcessed, 1u);
2430
2431 {
2432 std::unique_lock<std::mutex> l(mLock);
2433 while (!inflightReq->errorCodeValid &&
2434 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2435 auto timeout = std::chrono::system_clock::now() +
2436 std::chrono::seconds(kStreamBufferTimeoutSec);
2437 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2438 }
2439
2440 ASSERT_FALSE(inflightReq->errorCodeValid);
2441 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2442 ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2443 }
2444
2445 if (useHalBufManager) {
2446 verifyBuffersReturned(session, testStream.id, cb);
2447 }
2448
2449 ret = session->close();
2450 ASSERT_TRUE(ret.isOk());
2451 }
2452 }
2453
configureStreamUseCaseInternal(const AvailableStream & threshold)2454 void CameraAidlTest::configureStreamUseCaseInternal(const AvailableStream &threshold) {
2455 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2456
2457 for (const auto& name : cameraDeviceNames) {
2458 CameraMetadata meta;
2459 std::shared_ptr<ICameraDevice> cameraDevice;
2460
2461 ASSERT_NO_FATAL_FAILURE(openEmptyDeviceSession(name, mProvider, &mSession /*out*/,
2462 &meta /*out*/, &cameraDevice /*out*/));
2463
2464 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
2465 // Check if camera support depth only or doesn't support stream use case capability
2466 if (isDepthOnly(staticMeta) || !supportsStreamUseCaseCap(staticMeta) ||
2467 (threshold.format == static_cast<int32_t>(PixelFormat::RAW16) &&
2468 !supportsCroppedRawUseCase(staticMeta))) {
2469 ndk::ScopedAStatus ret = mSession->close();
2470 mSession = nullptr;
2471 ASSERT_TRUE(ret.isOk());
2472 continue;
2473 }
2474
2475 std::vector<AvailableStream> outputPreviewStreams;
2476
2477 ASSERT_EQ(Status::OK,
2478 getAvailableOutputStreams(staticMeta, outputPreviewStreams, &threshold));
2479 ASSERT_NE(0u, outputPreviewStreams.size());
2480
2481 // Combine valid and invalid stream use cases
2482 std::vector<int64_t> testedUseCases;
2483 testedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW + 1);
2484
2485 std::vector<int64_t> supportedUseCases;
2486 if (threshold.format == static_cast<int32_t>(PixelFormat::RAW16)) {
2487 // If the format is RAW16, supported use case is only CROPPED_RAW.
2488 // All others are unsupported for this format.
2489 testedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW);
2490 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW);
2491 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2492 } else {
2493 camera_metadata_ro_entry entry;
2494 testedUseCases.insert(testedUseCases.end(), kMandatoryUseCases.begin(),
2495 kMandatoryUseCases.end());
2496 auto retcode = find_camera_metadata_ro_entry(
2497 staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
2498 if ((0 == retcode) && (entry.count > 0)) {
2499 supportedUseCases.insert(supportedUseCases.end(), entry.data.i64,
2500 entry.data.i64 + entry.count);
2501 } else {
2502 supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
2503 }
2504 }
2505
2506 std::vector<Stream> streams(1);
2507 streams[0] = {
2508 0,
2509 StreamType::OUTPUT,
2510 outputPreviewStreams[0].width,
2511 outputPreviewStreams[0].height,
2512 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2513 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2514 GRALLOC1_CONSUMER_USAGE_CPU_READ),
2515 Dataspace::UNKNOWN,
2516 StreamRotation::ROTATION_0,
2517 std::string(),
2518 0,
2519 -1,
2520 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2521 RequestAvailableDynamicRangeProfilesMap::
2522 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2523 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2524 static_cast<int>(
2525 RequestAvailableColorSpaceProfilesMap::
2526 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2527
2528 int32_t streamConfigCounter = 0;
2529 CameraMetadata req;
2530 StreamConfiguration config;
2531 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
2532 ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req);
2533 ASSERT_TRUE(ret.isOk());
2534 config.sessionParams = req;
2535
2536 for (int64_t useCase : testedUseCases) {
2537 bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(),
2538 useCase) != supportedUseCases.end();
2539
2540 streams[0].useCase = static_cast<
2541 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>(
2542 useCase);
2543 config.streams = streams;
2544 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2545 config.streamConfigCounter = streamConfigCounter;
2546 config.multiResolutionInputImage = false;
2547
2548 bool combSupported;
2549 ret = cameraDevice->isStreamCombinationSupported(config, &combSupported);
2550 if (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) ==
2551 ret.getServiceSpecificError()) {
2552 continue;
2553 }
2554
2555 ASSERT_TRUE(ret.isOk());
2556 ASSERT_EQ(combSupported, useCaseSupported);
2557
2558 std::vector<HalStream> halStreams;
2559 ret = mSession->configureStreams(config, &halStreams);
2560 ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError());
2561 if (useCaseSupported) {
2562 ASSERT_TRUE(ret.isOk());
2563 ASSERT_EQ(1u, halStreams.size());
2564 } else {
2565 ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT),
2566 ret.getServiceSpecificError());
2567 }
2568 }
2569 ret = mSession->close();
2570 mSession = nullptr;
2571 ASSERT_TRUE(ret.isOk());
2572 }
2573
2574 }
2575
configureStreams(std::shared_ptr<ICameraDeviceSession> & session,const StreamConfiguration & config,BufferManagerType bufferManagerType,std::set<int32_t> * halBufManagedStreamIds,std::vector<HalStream> * halStreams)2576 ndk::ScopedAStatus CameraAidlTest::configureStreams(std::shared_ptr<ICameraDeviceSession>& session,
2577 const StreamConfiguration& config,
2578 BufferManagerType bufferManagerType,
2579 std::set<int32_t>* halBufManagedStreamIds,
2580 std::vector<HalStream>* halStreams) {
2581 auto ret = ndk::ScopedAStatus::ok();
2582 ConfigureStreamsRet aidl_return;
2583 int32_t interfaceVersion = -1;
2584 ret = session->getInterfaceVersion(&interfaceVersion);
2585 if (!ret.isOk()) {
2586 return ret;
2587 }
2588
2589 if (bufferManagerType == BufferManagerType::SESSION && interfaceVersion >= 3) {
2590 ret = session->configureStreamsV2(config, &aidl_return);
2591 } else {
2592 ret = session->configureStreams(config, halStreams);
2593 }
2594 if (!ret.isOk()) {
2595 return ret;
2596 }
2597 if (bufferManagerType == BufferManagerType::SESSION) {
2598 *halStreams = std::move(aidl_return.halStreams);
2599 }
2600 for (const auto& halStream : *halStreams) {
2601 if ((bufferManagerType == BufferManagerType::SESSION && halStream.enableHalBufferManager) ||
2602 bufferManagerType == BufferManagerType::HAL) {
2603 halBufManagedStreamIds->insert(halStream.id);
2604 }
2605 }
2606 return ndk::ScopedAStatus::ok();
2607 }
2608
configureSingleStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,uint64_t bufferUsage,RequestTemplate reqTemplate,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)2609 void CameraAidlTest::configureSingleStream(
2610 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2611 const AvailableStream* previewThreshold, uint64_t bufferUsage, RequestTemplate reqTemplate,
2612 std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2613 std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2614 int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
2615 uint32_t streamConfigCounter) {
2616 ASSERT_NE(nullptr, session);
2617 ASSERT_NE(nullptr, previewStream);
2618 ASSERT_NE(nullptr, halStreams);
2619 ASSERT_NE(nullptr, supportsPartialResults);
2620 ASSERT_NE(nullptr, partialResultCount);
2621 ASSERT_NE(nullptr, useHalBufManager);
2622 ASSERT_NE(nullptr, cb);
2623
2624 std::vector<AvailableStream> outputPreviewStreams;
2625 std::shared_ptr<ICameraDevice> device;
2626 ALOGI("configureStreams: Testing camera device %s", name.c_str());
2627
2628 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2629 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2630 ret.getServiceSpecificError());
2631 ASSERT_TRUE(ret.isOk());
2632 ASSERT_NE(device, nullptr);
2633
2634 camera_metadata_t* staticMeta;
2635 CameraMetadata chars;
2636 ret = device->getCameraCharacteristics(&chars);
2637 ASSERT_TRUE(ret.isOk());
2638 staticMeta = clone_camera_metadata(
2639 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()));
2640 ASSERT_NE(nullptr, staticMeta);
2641
2642 size_t expectedSize = chars.metadata.size();
2643 ALOGE("validate_camera_metadata_structure: %d",
2644 validate_camera_metadata_structure(staticMeta, &expectedSize));
2645
2646 camera_metadata_ro_entry entry;
2647 auto status =
2648 find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2649 if ((0 == status) && (entry.count > 0)) {
2650 *partialResultCount = entry.data.i32[0];
2651 *supportsPartialResults = (*partialResultCount > 1);
2652 }
2653
2654 *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2655
2656 device->open(*cb, session);
2657 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2658 ret.getServiceSpecificError());
2659 ASSERT_TRUE(ret.isOk());
2660 ASSERT_NE(*session, nullptr);
2661
2662 BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
2663 status = find_camera_metadata_ro_entry(
2664 staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2665 if ((0 == status) && (entry.count == 1)) {
2666 if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
2667 bufferManagerType = BufferManagerType::HAL;
2668 } else if (entry.data.u8[0] ==
2669 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
2670 bufferManagerType = BufferManagerType::SESSION;
2671 }
2672 }
2673
2674 outputPreviewStreams.clear();
2675 auto rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
2676
2677 int32_t jpegBufferSize = 0;
2678 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
2679 ASSERT_NE(0u, jpegBufferSize);
2680
2681 ASSERT_EQ(Status::OK, rc);
2682 ASSERT_FALSE(outputPreviewStreams.empty());
2683
2684 Dataspace dataspace = Dataspace::UNKNOWN;
2685 switch (static_cast<PixelFormat>(outputPreviewStreams[0].format)) {
2686 case PixelFormat::Y16:
2687 dataspace = Dataspace::DEPTH;
2688 break;
2689 default:
2690 dataspace = Dataspace::UNKNOWN;
2691 }
2692
2693 std::vector<Stream> streams(1);
2694 streams[0] = {0,
2695 StreamType::OUTPUT,
2696 outputPreviewStreams[0].width,
2697 outputPreviewStreams[0].height,
2698 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2699 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(bufferUsage),
2700 dataspace,
2701 StreamRotation::ROTATION_0,
2702 "",
2703 0,
2704 /*groupId*/ -1,
2705 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2706 RequestAvailableDynamicRangeProfilesMap::
2707 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
2708 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
2709 static_cast<int>(
2710 RequestAvailableColorSpaceProfilesMap::
2711 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
2712
2713 StreamConfiguration config;
2714 config.streams = streams;
2715 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2716 jpegBufferSize);
2717
2718 CameraMetadata sessionParams;
2719 ret = (*session)->constructDefaultRequestSettings(reqTemplate, &sessionParams);
2720 ASSERT_TRUE(ret.isOk());
2721 config.sessionParams = sessionParams;
2722 config.streamConfigCounter = (int32_t)streamConfigCounter;
2723
2724 bool supported = false;
2725 ret = device->isStreamCombinationSupported(config, &supported);
2726 ASSERT_TRUE(ret.isOk());
2727 ASSERT_EQ(supported, true);
2728
2729 std::vector<HalStream> halConfigs;
2730 std::set<int32_t> halBufManagedStreamIds;
2731 ret = configureStreams(*session, config, bufferManagerType, &halBufManagedStreamIds,
2732 &halConfigs);
2733 ALOGI("configureStreams returns status: %d:%d", ret.getExceptionCode(),
2734 ret.getServiceSpecificError());
2735 ASSERT_TRUE(ret.isOk());
2736 ASSERT_EQ(1u, halConfigs.size());
2737 halStreams->clear();
2738 halStreams->push_back(halConfigs[0]);
2739 *useHalBufManager = halBufManagedStreamIds.size() != 0;
2740 if (*useHalBufManager) {
2741 std::vector<Stream> ss(1);
2742 std::vector<HalStream> hs(1);
2743 ss[0] = config.streams[0];
2744 hs[0] = halConfigs[0];
2745 (*cb)->setCurrentStreamConfig(ss, hs);
2746 }
2747
2748 *previewStream = config.streams[0];
2749 ASSERT_TRUE(ret.isOk());
2750 }
2751
overrideRotateAndCrop(CameraMetadata * settings)2752 void CameraAidlTest::overrideRotateAndCrop(CameraMetadata* settings) {
2753 if (settings == nullptr) {
2754 return;
2755 }
2756
2757 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta =
2758 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(settings->metadata.data()));
2759 auto entry = requestMeta.find(ANDROID_SCALER_ROTATE_AND_CROP);
2760 if ((entry.count > 0) && (entry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO)) {
2761 uint8_t disableRotateAndCrop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2762 requestMeta.update(ANDROID_SCALER_ROTATE_AND_CROP, &disableRotateAndCrop, 1);
2763 settings->metadata.clear();
2764 camera_metadata_t* metaBuffer = requestMeta.release();
2765 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2766 settings->metadata =
2767 std::vector(rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2768 }
2769 }
2770
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,int32_t streamId,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)2771 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
2772 int32_t streamId, const std::shared_ptr<DeviceCb>& cb,
2773 uint32_t streamConfigCounter) {
2774 ASSERT_NE(nullptr, session);
2775
2776 std::vector<int32_t> streamIds(1);
2777 streamIds[0] = streamId;
2778 session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
2779 cb->waitForBuffersReturned();
2780 }
2781
processPreviewStabilizationCaptureRequestInternal(bool previewStabilizationOn,std::unordered_map<std::string,nsecs_t> & cameraDeviceToTimeLag)2782 void CameraAidlTest::processPreviewStabilizationCaptureRequestInternal(
2783 bool previewStabilizationOn,
2784 // Used as output when preview stabilization is off, as output when its on.
2785 std::unordered_map<std::string, nsecs_t>& cameraDeviceToTimeLag) {
2786 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2787 AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2788 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2789 int64_t bufferId = 1;
2790 int32_t frameNumber = 1;
2791 std::vector<uint8_t> settings;
2792
2793 for (const auto& name : cameraDeviceNames) {
2794 if (!supportsPreviewStabilization(name, mProvider)) {
2795 ALOGI(" %s Camera device %s doesn't support preview stabilization, skipping", __func__,
2796 name.c_str());
2797 continue;
2798 }
2799
2800 Stream testStream;
2801 std::vector<HalStream> halStreams;
2802 std::shared_ptr<ICameraDeviceSession> session;
2803 std::shared_ptr<DeviceCb> cb;
2804 bool supportsPartialResults = false;
2805 bool useHalBufManager = false;
2806 int32_t partialResultCount = 0;
2807 ASSERT_NO_FATAL_FAILURE(configureSingleStream(
2808 name, mProvider, &streamThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
2809 RequestTemplate::PREVIEW, &session /*out*/, &testStream /*out*/,
2810 &halStreams /*out*/, &supportsPartialResults /*out*/, &partialResultCount /*out*/,
2811 &useHalBufManager /*out*/, &cb /*out*/));
2812
2813 ::aidl::android::hardware::common::fmq::MQDescriptor<
2814 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2815 descriptor;
2816 ndk::ScopedAStatus resultQueueRet = session->getCaptureResultMetadataQueue(&descriptor);
2817 ASSERT_TRUE(resultQueueRet.isOk());
2818
2819 std::shared_ptr<ResultMetadataQueue> resultQueue =
2820 std::make_shared<ResultMetadataQueue>(descriptor);
2821 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2822 ALOGE("%s: HAL returns empty result metadata fmq,"
2823 " not use it",
2824 __func__);
2825 resultQueue = nullptr;
2826 // Don't use the queue onwards.
2827 }
2828
2829 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2830 1, false, supportsPartialResults, partialResultCount, resultQueue);
2831
2832 CameraMetadata defaultMetadata;
2833 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
2834 ndk::ScopedAStatus ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW,
2835 &defaultMetadata);
2836 ASSERT_TRUE(ret.isOk());
2837
2838 const camera_metadata_t* metadata =
2839 reinterpret_cast<const camera_metadata_t*>(defaultMetadata.metadata.data());
2840 defaultSettings = metadata;
2841 android::status_t metadataRet = ::android::OK;
2842 uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2843 if (previewStabilizationOn) {
2844 videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION;
2845 metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2846 &videoStabilizationMode, 1);
2847 } else {
2848 metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2849 &videoStabilizationMode, 1);
2850 }
2851 ASSERT_EQ(metadataRet, ::android::OK);
2852
2853 camera_metadata_t* releasedMetadata = defaultSettings.release();
2854 uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(releasedMetadata);
2855
2856 buffer_handle_t buffer_handle;
2857
2858 std::vector<CaptureRequest> requests(1);
2859 CaptureRequest& request = requests[0];
2860 request.frameNumber = frameNumber;
2861 request.fmqSettingsSize = 0;
2862 request.settings.metadata =
2863 std::vector(rawMetadata, rawMetadata + get_camera_metadata_size(releasedMetadata));
2864 overrideRotateAndCrop(&request.settings);
2865 request.outputBuffers = std::vector<StreamBuffer>(1);
2866 StreamBuffer& outputBuffer = request.outputBuffers[0];
2867
2868 if (useHalBufManager) {
2869 outputBuffer = {halStreams[0].id,
2870 /*bufferId*/ 0, NativeHandle(), BufferStatus::OK,
2871 NativeHandle(), NativeHandle()};
2872 } else {
2873 allocateGraphicBuffer(testStream.width, testStream.height,
2874 /* We don't look at halStreamConfig.streams[0].consumerUsage
2875 * since that is 0 for output streams
2876 */
2877 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
2878 static_cast<uint64_t>(halStreams[0].producerUsage),
2879 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER)),
2880 halStreams[0].overrideFormat, &buffer_handle);
2881 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2882 BufferStatus::OK, NativeHandle(), NativeHandle()};
2883 }
2884 request.inputBuffer = {
2885 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2886
2887 {
2888 std::unique_lock<std::mutex> l(mLock);
2889 mInflightMap.clear();
2890 mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2891 }
2892
2893 int32_t numRequestProcessed = 0;
2894 std::vector<BufferCache> cachesToRemove;
2895 ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2896 ASSERT_TRUE(ret.isOk());
2897 ASSERT_EQ(numRequestProcessed, 1u);
2898
2899 {
2900 std::unique_lock<std::mutex> l(mLock);
2901 while (!inflightReq->errorCodeValid &&
2902 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2903 auto timeout = std::chrono::system_clock::now() +
2904 std::chrono::seconds(kStreamBufferTimeoutSec);
2905 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2906 }
2907 waitForReleaseFence(inflightReq->resultOutputBuffers);
2908
2909 ASSERT_FALSE(inflightReq->errorCodeValid);
2910 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2911 ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2912 nsecs_t captureTimestamp = mSupportReadoutTimestamp
2913 ? inflightReq->shutterReadoutTimestamp
2914 : inflightReq->shutterTimestamp;
2915
2916 if (previewStabilizationOn) {
2917 // Here we collect the time difference between the buffer ready
2918 // timestamp - notify timestamp.
2919 // timeLag = buffer ready timestamp - notify timestamp.
2920 // timeLag(previewStabilization) must be <=
2921 // timeLag(stabilization off) + 1 frame duration.
2922 auto it = cameraDeviceToTimeLag.find(name);
2923 camera_metadata_entry e;
2924 e = inflightReq->collectedResult.find(ANDROID_SENSOR_FRAME_DURATION);
2925 ASSERT_TRUE(e.count > 0);
2926 nsecs_t frameDuration = e.data.i64[0];
2927 ASSERT_TRUE(it != cameraDeviceToTimeLag.end());
2928
2929 nsecs_t previewStabOnLagTime =
2930 inflightReq->resultOutputBuffers[0].timeStamp - captureTimestamp;
2931 ASSERT_TRUE(previewStabOnLagTime <= (it->second + frameDuration));
2932 } else {
2933 // Fill in the buffer ready timestamp - notify timestamp;
2934 cameraDeviceToTimeLag[std::string(name)] =
2935 inflightReq->resultOutputBuffers[0].timeStamp - captureTimestamp;
2936 }
2937 }
2938
2939 if (useHalBufManager) {
2940 verifyBuffersReturned(session, testStream.id, cb);
2941 }
2942
2943 ret = session->close();
2944 ASSERT_TRUE(ret.isOk());
2945 }
2946 }
2947
supportsPreviewStabilization(const std::string & name,const std::shared_ptr<ICameraProvider> & provider)2948 bool CameraAidlTest::supportsPreviewStabilization(
2949 const std::string& name, const std::shared_ptr<ICameraProvider>& provider) {
2950 std::shared_ptr<ICameraDevice> device;
2951 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2952 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2953 ret.getServiceSpecificError());
2954 if (!ret.isOk() || device == nullptr) {
2955 ADD_FAILURE() << "Failed to get camera device interface for " << name;
2956 }
2957
2958 CameraMetadata metadata;
2959 ret = device->getCameraCharacteristics(&metadata);
2960 camera_metadata_t* staticMeta = clone_camera_metadata(
2961 reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
2962 if (!(ret.isOk())) {
2963 ADD_FAILURE() << "Failed to get camera characteristics for " << name;
2964 }
2965 // Go through the characteristics and see if video stabilization modes have
2966 // preview stabilization
2967 camera_metadata_ro_entry entry;
2968
2969 int retcode = find_camera_metadata_ro_entry(
2970 staticMeta, ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &entry);
2971 if ((0 == retcode) && (entry.count > 0)) {
2972 for (auto i = 0; i < entry.count; i++) {
2973 if (entry.data.u8[i] ==
2974 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) {
2975 return true;
2976 }
2977 }
2978 }
2979 return false;
2980 }
2981
configurePreviewStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,const std::unordered_set<std::string> & physicalIds,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::set<int32_t> * halBufManagedStreamIds,std::shared_ptr<DeviceCb> * cb,int32_t streamConfigCounter,bool allowUnsupport)2982 void CameraAidlTest::configurePreviewStreams(
2983 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2984 const AvailableStream* previewThreshold, const std::unordered_set<std::string>& physicalIds,
2985 std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2986 std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2987 int32_t* partialResultCount, std::set<int32_t>* halBufManagedStreamIds,
2988 std::shared_ptr<DeviceCb>* cb, int32_t streamConfigCounter, bool allowUnsupport) {
2989 ASSERT_NE(nullptr, session);
2990 ASSERT_NE(nullptr, halStreams);
2991 ASSERT_NE(nullptr, previewStream);
2992 ASSERT_NE(nullptr, supportsPartialResults);
2993 ASSERT_NE(nullptr, partialResultCount);
2994 ASSERT_NE(nullptr, halBufManagedStreamIds);
2995 ASSERT_NE(nullptr, cb);
2996
2997 ASSERT_FALSE(physicalIds.empty());
2998
2999 std::vector<AvailableStream> outputPreviewStreams;
3000 std::shared_ptr<ICameraDevice> device;
3001 ALOGI("configureStreams: Testing camera device %s", name.c_str());
3002
3003 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
3004 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3005 ret.getServiceSpecificError());
3006 ASSERT_TRUE(ret.isOk());
3007 ASSERT_NE(device, nullptr);
3008
3009 CameraMetadata meta;
3010 ret = device->getCameraCharacteristics(&meta);
3011 ASSERT_TRUE(ret.isOk());
3012 camera_metadata_t* staticMeta =
3013 clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(meta.metadata.data()));
3014 ASSERT_NE(nullptr, staticMeta);
3015
3016 camera_metadata_ro_entry entry;
3017 auto status =
3018 find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3019 if ((0 == status) && (entry.count > 0)) {
3020 *partialResultCount = entry.data.i32[0];
3021 *supportsPartialResults = (*partialResultCount > 1);
3022 }
3023
3024 *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3025 ret = device->open(*cb, session);
3026 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3027 ret.getServiceSpecificError());
3028 ASSERT_TRUE(ret.isOk());
3029 ASSERT_NE(*session, nullptr);
3030
3031 BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3032 status = find_camera_metadata_ro_entry(
3033 staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3034 if ((0 == status) && (entry.count == 1)) {
3035 if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3036 bufferManagerType = BufferManagerType::HAL;
3037 } else if (entry.data.u8[0] ==
3038 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3039 bufferManagerType = BufferManagerType::SESSION;
3040 }
3041 }
3042
3043 outputPreviewStreams.clear();
3044 Status rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
3045
3046 ASSERT_EQ(Status::OK, rc);
3047 ASSERT_FALSE(outputPreviewStreams.empty());
3048
3049 std::vector<Stream> streams(physicalIds.size());
3050 int32_t streamId = 0;
3051 for (auto const& physicalId : physicalIds) {
3052 streams[streamId] = {
3053 streamId,
3054 StreamType::OUTPUT,
3055 outputPreviewStreams[0].width,
3056 outputPreviewStreams[0].height,
3057 static_cast<PixelFormat>(outputPreviewStreams[0].format),
3058 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3059 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
3060 Dataspace::UNKNOWN,
3061 StreamRotation::ROTATION_0,
3062 physicalId,
3063 0,
3064 -1,
3065 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3066 RequestAvailableDynamicRangeProfilesMap::
3067 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
3068 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3069 static_cast<int>(
3070 RequestAvailableColorSpaceProfilesMap::
3071 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
3072 streamId++;
3073 }
3074
3075 StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
3076
3077 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
3078 ret = (*session)->constructDefaultRequestSettings(reqTemplate, &config.sessionParams);
3079 ASSERT_TRUE(ret.isOk());
3080
3081 bool supported = false;
3082 ret = device->isStreamCombinationSupported(config, &supported);
3083 ASSERT_TRUE(ret.isOk());
3084 if (allowUnsupport && !supported) {
3085 // stream combination not supported. return null session
3086 ret = (*session)->close();
3087 ASSERT_TRUE(ret.isOk());
3088 *session = nullptr;
3089 return;
3090 }
3091 ASSERT_TRUE(supported) << "Stream combination must be supported.";
3092
3093 config.streamConfigCounter = streamConfigCounter;
3094 std::vector<HalStream> halConfigs;
3095 ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds,
3096 &halConfigs);
3097
3098 ASSERT_TRUE(ret.isOk());
3099 ASSERT_EQ(physicalIds.size(), halConfigs.size());
3100 *halStreams = halConfigs;
3101 if (halBufManagedStreamIds->size() != 0) {
3102 // Only include the streams that are HAL buffer managed
3103 std::vector<Stream> ss;
3104 std::vector<HalStream> hs;
3105 for (size_t i = 0; i < physicalIds.size(); i++) {
3106 if (contains(*halBufManagedStreamIds, halConfigs[i].id)) {
3107 ss.emplace_back(streams[i]);
3108 hs.emplace_back(halConfigs[i]);
3109 }
3110 }
3111 (*cb)->setCurrentStreamConfig(ss, hs);
3112 }
3113 *previewStream = streams[0];
3114 ASSERT_TRUE(ret.isOk());
3115 }
3116
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,const std::vector<int32_t> & streamIds,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)3117 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
3118 const std::vector<int32_t>& streamIds,
3119 const std::shared_ptr<DeviceCb>& cb,
3120 uint32_t streamConfigCounter) {
3121 ndk::ScopedAStatus ret =
3122 session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
3123 ASSERT_TRUE(ret.isOk());
3124 cb->waitForBuffersReturned();
3125 }
3126
configureStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,PixelFormat format,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::set<int32_t> * halBufManagedStreamIds,std::shared_ptr<DeviceCb> * outCb,uint32_t streamConfigCounter,bool maxResolution,RequestAvailableDynamicRangeProfilesMap dynamicRangeProf,RequestAvailableColorSpaceProfilesMap colorSpaceProf)3127 void CameraAidlTest::configureStreams(
3128 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3129 PixelFormat format, std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
3130 std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3131 int32_t* partialResultCount, std::set<int32_t>* halBufManagedStreamIds,
3132 std::shared_ptr<DeviceCb>* outCb, uint32_t streamConfigCounter, bool maxResolution,
3133 RequestAvailableDynamicRangeProfilesMap dynamicRangeProf,
3134 RequestAvailableColorSpaceProfilesMap colorSpaceProf) {
3135 ASSERT_NE(nullptr, session);
3136 ASSERT_NE(nullptr, halStreams);
3137 ASSERT_NE(nullptr, previewStream);
3138 ASSERT_NE(nullptr, supportsPartialResults);
3139 ASSERT_NE(nullptr, partialResultCount);
3140 ASSERT_NE(nullptr, halBufManagedStreamIds);
3141 ASSERT_NE(nullptr, outCb);
3142
3143 ALOGI("configureStreams: Testing camera device %s", name.c_str());
3144
3145 std::vector<AvailableStream> outputStreams;
3146 std::shared_ptr<ICameraDevice> device;
3147
3148 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
3149 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3150 ret.getServiceSpecificError());
3151 ASSERT_TRUE(ret.isOk());
3152 ASSERT_NE(device, nullptr);
3153
3154 CameraMetadata metadata;
3155 camera_metadata_t* staticMeta;
3156 ret = device->getCameraCharacteristics(&metadata);
3157 ASSERT_TRUE(ret.isOk());
3158 staticMeta = clone_camera_metadata(
3159 reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
3160 ASSERT_NE(staticMeta, nullptr);
3161
3162 camera_metadata_ro_entry entry;
3163 auto status =
3164 find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3165 if ((0 == status) && (entry.count > 0)) {
3166 *partialResultCount = entry.data.i32[0];
3167 *supportsPartialResults = (*partialResultCount > 1);
3168 }
3169
3170 *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3171 ret = device->open(*outCb, session);
3172 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3173 ret.getServiceSpecificError());
3174 ASSERT_TRUE(ret.isOk());
3175 ASSERT_NE(*session, nullptr);
3176
3177 BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3178 status = find_camera_metadata_ro_entry(
3179 staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3180 if ((0 == status) && (entry.count == 1)) {
3181 if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3182 bufferManagerType = BufferManagerType::HAL;
3183 } else if (entry.data.u8[0] ==
3184 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3185 bufferManagerType = BufferManagerType::SESSION;
3186 }
3187 }
3188
3189 outputStreams.clear();
3190 Size maxSize;
3191 if (maxResolution) {
3192 auto rc = getMaxOutputSizeForFormat(staticMeta, format, &maxSize, maxResolution);
3193 ASSERT_EQ(Status::OK, rc);
3194 } else {
3195 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3196 static_cast<int32_t>(format)};
3197 auto rc = getAvailableOutputStreams(staticMeta, outputStreams, &previewThreshold);
3198
3199 ASSERT_EQ(Status::OK, rc);
3200 ASSERT_FALSE(outputStreams.empty());
3201 maxSize.width = outputStreams[0].width;
3202 maxSize.height = outputStreams[0].height;
3203 }
3204
3205
3206 std::vector<Stream> streams(1);
3207 streams[0] = {0,
3208 StreamType::OUTPUT,
3209 maxSize.width,
3210 maxSize.height,
3211 format,
3212 previewStream->usage,
3213 previewStream->dataSpace,
3214 StreamRotation::ROTATION_0,
3215 "",
3216 0,
3217 -1,
3218 {maxResolution ? SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION
3219 : SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3220 dynamicRangeProf,
3221 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3222 static_cast<int>(colorSpaceProf)};
3223
3224 StreamConfiguration config;
3225 config.streams = streams;
3226 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
3227 config.streamConfigCounter = streamConfigCounter;
3228 config.multiResolutionInputImage = false;
3229 CameraMetadata req;
3230 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
3231 ret = (*session)->constructDefaultRequestSettings(reqTemplate, &req);
3232 ASSERT_TRUE(ret.isOk());
3233 config.sessionParams = req;
3234
3235 bool supported = false;
3236 ret = device->isStreamCombinationSupported(config, &supported);
3237 ASSERT_TRUE(ret.isOk());
3238 ASSERT_EQ(supported, true);
3239
3240 ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds, halStreams);
3241
3242 ASSERT_TRUE(ret.isOk());
3243
3244 if (halBufManagedStreamIds->size() != 0) {
3245 std::vector<Stream> ss(1);
3246 std::vector<HalStream> hs(1);
3247 ss[0] = streams[0];
3248 hs[0] = (*halStreams)[0];
3249 (*outCb)->setCurrentStreamConfig(ss, hs);
3250 }
3251
3252 *previewStream = streams[0];
3253 ASSERT_TRUE(ret.isOk());
3254 }
3255
is10BitDynamicRangeCapable(const camera_metadata_t * staticMeta)3256 bool CameraAidlTest::is10BitDynamicRangeCapable(const camera_metadata_t* staticMeta) {
3257 camera_metadata_ro_entry scalerEntry;
3258 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3259 &scalerEntry);
3260 if (rc == 0) {
3261 for (uint32_t i = 0; i < scalerEntry.count; i++) {
3262 if (scalerEntry.data.u8[i] ==
3263 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
3264 return true;
3265 }
3266 }
3267 }
3268 return false;
3269 }
3270
get10BitDynamicRangeProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableDynamicRangeProfilesMap> * profiles)3271 void CameraAidlTest::get10BitDynamicRangeProfiles(
3272 const camera_metadata_t* staticMeta,
3273 std::vector<RequestAvailableDynamicRangeProfilesMap>* profiles) {
3274 ASSERT_NE(nullptr, staticMeta);
3275 ASSERT_NE(nullptr, profiles);
3276 camera_metadata_ro_entry entry;
3277 std::unordered_set<int64_t> entries;
3278 int rc = find_camera_metadata_ro_entry(
3279 staticMeta, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP, &entry);
3280 ASSERT_EQ(rc, 0);
3281 ASSERT_TRUE(entry.count > 0);
3282 ASSERT_EQ(entry.count % 3, 0);
3283
3284 for (uint32_t i = 0; i < entry.count; i += 3) {
3285 ASSERT_NE(entry.data.i64[i], ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
3286 ASSERT_EQ(entries.find(entry.data.i64[i]), entries.end());
3287 entries.insert(static_cast<int64_t>(entry.data.i64[i]));
3288 profiles->emplace_back(
3289 static_cast<RequestAvailableDynamicRangeProfilesMap>(entry.data.i64[i]));
3290 }
3291
3292 if (!entries.empty()) {
3293 ASSERT_NE(entries.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10),
3294 entries.end());
3295 }
3296 }
3297
verify10BitMetadata(HandleImporter & importer,const InFlightRequest & request,aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap profile)3298 void CameraAidlTest::verify10BitMetadata(
3299 HandleImporter& importer, const InFlightRequest& request,
3300 aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap
3301 profile) {
3302 for (auto b : request.resultOutputBuffers) {
3303 importer.importBuffer(b.buffer.buffer);
3304 bool smpte2086Present = importer.isSmpte2086Present(b.buffer.buffer);
3305 bool smpte2094_10Present = importer.isSmpte2094_10Present(b.buffer.buffer);
3306 bool smpte2094_40Present = importer.isSmpte2094_40Present(b.buffer.buffer);
3307
3308 switch (static_cast<int64_t>(profile)) {
3309 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
3310 ASSERT_FALSE(smpte2086Present);
3311 ASSERT_FALSE(smpte2094_10Present);
3312 ASSERT_FALSE(smpte2094_40Present);
3313 break;
3314 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
3315 ASSERT_TRUE(smpte2086Present);
3316 ASSERT_FALSE(smpte2094_10Present);
3317 ASSERT_FALSE(smpte2094_40Present);
3318 break;
3319 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
3320 ASSERT_FALSE(smpte2094_10Present);
3321 ASSERT_TRUE(smpte2094_40Present);
3322 break;
3323 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
3324 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
3325 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
3326 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
3327 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
3328 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
3329 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
3330 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
3331 ASSERT_FALSE(smpte2086Present);
3332 ASSERT_TRUE(smpte2094_10Present);
3333 ASSERT_FALSE(smpte2094_40Present);
3334 break;
3335 default:
3336 ALOGE("%s: Unexpected 10-bit dynamic range profile: %" PRId64, __FUNCTION__,
3337 profile);
3338 ADD_FAILURE();
3339 }
3340 importer.freeBuffer(b.buffer.buffer);
3341 }
3342 }
3343
reportsColorSpaces(const camera_metadata_t * staticMeta)3344 bool CameraAidlTest::reportsColorSpaces(const camera_metadata_t* staticMeta) {
3345 camera_metadata_ro_entry capabilityEntry;
3346 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3347 &capabilityEntry);
3348 if (rc == 0) {
3349 for (uint32_t i = 0; i < capabilityEntry.count; i++) {
3350 if (capabilityEntry.data.u8[i] ==
3351 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
3352 return true;
3353 }
3354 }
3355 }
3356 return false;
3357 }
3358
getColorSpaceProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableColorSpaceProfilesMap> * profiles)3359 void CameraAidlTest::getColorSpaceProfiles(
3360 const camera_metadata_t* staticMeta,
3361 std::vector<RequestAvailableColorSpaceProfilesMap>* profiles) {
3362 ASSERT_NE(nullptr, staticMeta);
3363 ASSERT_NE(nullptr, profiles);
3364 camera_metadata_ro_entry entry;
3365 int rc = find_camera_metadata_ro_entry(
3366 staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
3367 ASSERT_EQ(rc, 0);
3368 ASSERT_TRUE(entry.count > 0);
3369 ASSERT_EQ(entry.count % 3, 0);
3370
3371 for (uint32_t i = 0; i < entry.count; i += 3) {
3372 ASSERT_NE(entry.data.i64[i],
3373 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
3374 if (std::find(profiles->begin(), profiles->end(),
3375 static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]))
3376 == profiles->end()) {
3377 profiles->emplace_back(
3378 static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]));
3379 }
3380 }
3381 }
3382
isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(const camera_metadata_t * staticMeta,RequestAvailableColorSpaceProfilesMap colorSpace,RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,aidl::android::hardware::graphics::common::PixelFormat pixelFormat)3383 bool CameraAidlTest::isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
3384 const camera_metadata_t* staticMeta,
3385 RequestAvailableColorSpaceProfilesMap colorSpace,
3386 RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,
3387 aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
3388 camera_metadata_ro_entry entry;
3389 int rc = find_camera_metadata_ro_entry(
3390 staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
3391
3392 if (rc == 0) {
3393 for (uint32_t i = 0; i < entry.count; i += 3) {
3394 RequestAvailableColorSpaceProfilesMap entryColorSpace =
3395 static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]);
3396 int64_t dynamicRangeProfileI64 = static_cast<int64_t>(dynamicRangeProfile);
3397 int32_t entryImageFormat = static_cast<int32_t>(entry.data.i64[i + 1]);
3398 int32_t expectedImageFormat = halFormatToPublicFormat(pixelFormat);
3399 if (entryColorSpace == colorSpace
3400 && (entry.data.i64[i + 2] & dynamicRangeProfileI64) != 0
3401 && entryImageFormat == expectedImageFormat) {
3402 return true;
3403 }
3404 }
3405 }
3406
3407 return false;
3408 }
3409
getColorSpaceProfileString(RequestAvailableColorSpaceProfilesMap colorSpace)3410 const char* CameraAidlTest::getColorSpaceProfileString(
3411 RequestAvailableColorSpaceProfilesMap colorSpace) {
3412 auto colorSpaceCast = static_cast<int>(colorSpace);
3413 switch (colorSpaceCast) {
3414 case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED:
3415 return "UNSPECIFIED";
3416 case ColorSpaceNamed::SRGB:
3417 return "SRGB";
3418 case ColorSpaceNamed::LINEAR_SRGB:
3419 return "LINEAR_SRGB";
3420 case ColorSpaceNamed::EXTENDED_SRGB:
3421 return "EXTENDED_SRGB";
3422 case ColorSpaceNamed::LINEAR_EXTENDED_SRGB:
3423 return "LINEAR_EXTENDED_SRGB";
3424 case ColorSpaceNamed::BT709:
3425 return "BT709";
3426 case ColorSpaceNamed::BT2020:
3427 return "BT2020";
3428 case ColorSpaceNamed::DCI_P3:
3429 return "DCI_P3";
3430 case ColorSpaceNamed::DISPLAY_P3:
3431 return "DISPLAY_P3";
3432 case ColorSpaceNamed::NTSC_1953:
3433 return "NTSC_1953";
3434 case ColorSpaceNamed::SMPTE_C:
3435 return "SMPTE_C";
3436 case ColorSpaceNamed::ADOBE_RGB:
3437 return "ADOBE_RGB";
3438 case ColorSpaceNamed::PRO_PHOTO_RGB:
3439 return "PRO_PHOTO_RGB";
3440 case ColorSpaceNamed::ACES:
3441 return "ACES";
3442 case ColorSpaceNamed::ACESCG:
3443 return "ACESCG";
3444 case ColorSpaceNamed::CIE_XYZ:
3445 return "CIE_XYZ";
3446 case ColorSpaceNamed::CIE_LAB:
3447 return "CIE_LAB";
3448 case ColorSpaceNamed::BT2020_HLG:
3449 return "BT2020_HLG";
3450 case ColorSpaceNamed::BT2020_PQ:
3451 return "BT2020_PQ";
3452 default:
3453 return "INVALID";
3454 }
3455
3456 return "INVALID";
3457 }
3458
getDynamicRangeProfileString(RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile)3459 const char* CameraAidlTest::getDynamicRangeProfileString(
3460 RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
3461 auto dynamicRangeProfileCast =
3462 static_cast<camera_metadata_enum_android_request_available_dynamic_range_profiles_map>
3463 (dynamicRangeProfile);
3464 switch (dynamicRangeProfileCast) {
3465 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD:
3466 return "STANDARD";
3467 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
3468 return "HLG10";
3469 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
3470 return "HDR10";
3471 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
3472 return "HDR10_PLUS";
3473 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
3474 return "DOLBY_VISION_10B_HDR_REF";
3475 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
3476 return "DOLBY_VISION_10B_HDR_REF_P0";
3477 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
3478 return "DOLBY_VISION_10B_HDR_OEM";
3479 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
3480 return "DOLBY_VISION_10B_HDR_OEM_P0";
3481 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
3482 return "DOLBY_VISION_8B_HDR_REF";
3483 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
3484 return "DOLBY_VISION_8B_HDR_REF_P0";
3485 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
3486 return "DOLBY_VISION_8B_HDR_OEM";
3487 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
3488 return "DOLBY_VISION_8B_HDR_OEM_P0";
3489 default:
3490 return "INVALID";
3491 }
3492
3493 return "INVALID";
3494 }
3495
halFormatToPublicFormat(aidl::android::hardware::graphics::common::PixelFormat pixelFormat)3496 int32_t CameraAidlTest::halFormatToPublicFormat(
3497 aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
3498 // This is an incomplete mapping of pixel format to image format and assumes dataspaces
3499 // (see getDataspace)
3500 switch (pixelFormat) {
3501 case PixelFormat::BLOB:
3502 return 0x100; // ImageFormat.JPEG
3503 case PixelFormat::Y16:
3504 return 0x44363159; // ImageFormat.DEPTH16
3505 default:
3506 return static_cast<int32_t>(pixelFormat);
3507 }
3508 }
3509
supportZoomSettingsOverride(const camera_metadata_t * staticMeta)3510 bool CameraAidlTest::supportZoomSettingsOverride(const camera_metadata_t* staticMeta) {
3511 camera_metadata_ro_entry availableOverridesEntry;
3512 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
3513 &availableOverridesEntry);
3514 if (rc == 0) {
3515 for (size_t i = 0; i < availableOverridesEntry.count; i++) {
3516 if (availableOverridesEntry.data.i32[i] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
3517 return true;
3518 }
3519 }
3520 }
3521 return false;
3522 }
3523
supportsCroppedRawUseCase(const camera_metadata_t * staticMeta)3524 bool CameraAidlTest::supportsCroppedRawUseCase(const camera_metadata_t *staticMeta) {
3525 camera_metadata_ro_entry availableStreamUseCasesEntry;
3526 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
3527 &availableStreamUseCasesEntry);
3528 if (rc == 0) {
3529 for (size_t i = 0; i < availableStreamUseCasesEntry.count; i++) {
3530 if (availableStreamUseCasesEntry.data.i64[i] ==
3531 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW) {
3532 return true;
3533 }
3534 }
3535 }
3536 return false;
3537 }
3538
supportsStreamUseCaseCap(const camera_metadata_t * staticMeta)3539 bool CameraAidlTest::supportsStreamUseCaseCap(const camera_metadata_t* staticMeta) {
3540 camera_metadata_ro_entry entry;
3541 int retcode = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3542 &entry);
3543 bool hasStreamUseCaseCap = false;
3544 if ((0 == retcode) && (entry.count > 0)) {
3545 if (std::find(entry.data.u8, entry.data.u8 + entry.count,
3546 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE) !=
3547 entry.data.u8 + entry.count) {
3548 hasStreamUseCaseCap = true;
3549 }
3550 }
3551 return hasStreamUseCaseCap;
3552 }
3553
isPerFrameControl(const camera_metadata_t * staticMeta)3554 bool CameraAidlTest::isPerFrameControl(const camera_metadata_t* staticMeta) {
3555 camera_metadata_ro_entry syncLatencyEntry;
3556 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_SYNC_MAX_LATENCY,
3557 &syncLatencyEntry);
3558 if (rc == 0 && syncLatencyEntry.data.i32[0] == ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL) {
3559 return true;
3560 }
3561 return false;
3562 }
3563
configurePreviewStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)3564 void CameraAidlTest::configurePreviewStream(
3565 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3566 const AvailableStream* previewThreshold, std::shared_ptr<ICameraDeviceSession>* session,
3567 Stream* previewStream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3568 int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
3569 uint32_t streamConfigCounter) {
3570 ASSERT_NO_FATAL_FAILURE(configureSingleStream(
3571 name, provider, previewThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
3572 RequestTemplate::PREVIEW, session, previewStream, halStreams, supportsPartialResults,
3573 partialResultCount, useHalBufManager, cb, streamConfigCounter));
3574 }
3575
isOfflineSessionSupported(const camera_metadata_t * staticMeta)3576 Status CameraAidlTest::isOfflineSessionSupported(const camera_metadata_t* staticMeta) {
3577 Status ret = Status::OPERATION_NOT_SUPPORTED;
3578 if (nullptr == staticMeta) {
3579 return Status::ILLEGAL_ARGUMENT;
3580 }
3581
3582 camera_metadata_ro_entry entry;
3583 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3584 &entry);
3585 if (0 != rc) {
3586 return Status::ILLEGAL_ARGUMENT;
3587 }
3588
3589 for (size_t i = 0; i < entry.count; i++) {
3590 if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING == entry.data.u8[i]) {
3591 ret = Status::OK;
3592 break;
3593 }
3594 }
3595
3596 return ret;
3597 }
3598
configureOfflineStillStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * threshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * stream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::shared_ptr<DeviceCb> * outCb,int32_t * jpegBufferSize,std::set<int32_t> * halBufManagedStreamIds)3599 void CameraAidlTest::configureOfflineStillStream(
3600 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
3601 const AvailableStream* threshold, std::shared_ptr<ICameraDeviceSession>* session,
3602 Stream* stream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
3603 int32_t* partialResultCount, std::shared_ptr<DeviceCb>* outCb, int32_t* jpegBufferSize,
3604 std::set<int32_t>* halBufManagedStreamIds) {
3605 ASSERT_NE(nullptr, session);
3606 ASSERT_NE(nullptr, halStreams);
3607 ASSERT_NE(nullptr, stream);
3608 ASSERT_NE(nullptr, supportsPartialResults);
3609 ASSERT_NE(nullptr, partialResultCount);
3610 ASSERT_NE(nullptr, outCb);
3611 ASSERT_NE(nullptr, jpegBufferSize);
3612 ASSERT_NE(nullptr, halBufManagedStreamIds);
3613
3614 std::vector<AvailableStream> outputStreams;
3615 std::shared_ptr<ICameraDevice> cameraDevice;
3616 ALOGI("configureStreams: Testing camera device %s", name.c_str());
3617
3618 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &cameraDevice);
3619 ASSERT_TRUE(ret.isOk());
3620 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
3621 ret.getServiceSpecificError());
3622 ASSERT_NE(cameraDevice, nullptr);
3623
3624 CameraMetadata metadata;
3625 ret = cameraDevice->getCameraCharacteristics(&metadata);
3626 ASSERT_TRUE(ret.isOk());
3627 camera_metadata_t* staticMeta = clone_camera_metadata(
3628 reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
3629 ASSERT_NE(nullptr, staticMeta);
3630
3631 camera_metadata_ro_entry entry;
3632 auto status =
3633 find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
3634 if ((0 == status) && (entry.count > 0)) {
3635 *partialResultCount = entry.data.i32[0];
3636 *supportsPartialResults = (*partialResultCount > 1);
3637 }
3638
3639 BufferManagerType bufferManagerType = BufferManagerType::FRAMEWORK;
3640 status = find_camera_metadata_ro_entry(
3641 staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
3642 if ((0 == status) && (entry.count == 1)) {
3643 if (entry.data.u8[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5) {
3644 bufferManagerType = BufferManagerType::HAL;
3645 } else if (entry.data.u8[0] ==
3646 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE) {
3647 bufferManagerType = BufferManagerType::SESSION;
3648 }
3649 }
3650
3651 auto st = getJpegBufferSize(staticMeta, jpegBufferSize);
3652 ASSERT_EQ(st, Status::OK);
3653
3654 *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
3655 ret = cameraDevice->open(*outCb, session);
3656 ASSERT_TRUE(ret.isOk());
3657 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
3658 ret.getServiceSpecificError());
3659 ASSERT_NE(session, nullptr);
3660
3661 outputStreams.clear();
3662 auto rc = getAvailableOutputStreams(staticMeta, outputStreams, threshold);
3663 size_t idx = 0;
3664 int currLargest = outputStreams[0].width * outputStreams[0].height;
3665 for (size_t i = 0; i < outputStreams.size(); i++) {
3666 int area = outputStreams[i].width * outputStreams[i].height;
3667 if (area > currLargest) {
3668 idx = i;
3669 currLargest = area;
3670 }
3671 }
3672
3673 ASSERT_EQ(Status::OK, rc);
3674 ASSERT_FALSE(outputStreams.empty());
3675
3676 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(outputStreams[idx].format));
3677
3678 std::vector<Stream> streams(/*size*/ 1);
3679 streams[0] = {/*id*/ 0,
3680 StreamType::OUTPUT,
3681 outputStreams[idx].width,
3682 outputStreams[idx].height,
3683 static_cast<PixelFormat>(outputStreams[idx].format),
3684 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
3685 GRALLOC1_CONSUMER_USAGE_CPU_READ),
3686 dataspace,
3687 StreamRotation::ROTATION_0,
3688 /*physicalId*/ std::string(),
3689 *jpegBufferSize,
3690 /*groupId*/ 0,
3691 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
3692 RequestAvailableDynamicRangeProfilesMap::
3693 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
3694 ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
3695 static_cast<int>(
3696 RequestAvailableColorSpaceProfilesMap::
3697 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)};
3698
3699 StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
3700
3701 ret = configureStreams(*session, config, bufferManagerType, halBufManagedStreamIds, halStreams);
3702
3703 ASSERT_TRUE(ret.isOk());
3704
3705 if (halBufManagedStreamIds->size() != 0) {
3706 (*outCb)->setCurrentStreamConfig(streams, *halStreams);
3707 }
3708
3709 *stream = streams[0];
3710 }
3711
updateInflightResultQueue(const std::shared_ptr<ResultMetadataQueue> & resultQueue)3712 void CameraAidlTest::updateInflightResultQueue(
3713 const std::shared_ptr<ResultMetadataQueue>& resultQueue) {
3714 std::unique_lock<std::mutex> l(mLock);
3715 for (auto& it : mInflightMap) {
3716 it.second->resultQueue = resultQueue;
3717 }
3718 }
3719
processColorSpaceRequest(RequestAvailableColorSpaceProfilesMap colorSpace,RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile)3720 void CameraAidlTest::processColorSpaceRequest(
3721 RequestAvailableColorSpaceProfilesMap colorSpace,
3722 RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
3723 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3724 CameraMetadata settings;
3725
3726 for (const auto& name : cameraDeviceNames) {
3727 std::string version, deviceId;
3728 ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
3729 CameraMetadata meta;
3730 std::shared_ptr<ICameraDevice> device;
3731 ASSERT_NO_FATAL_FAILURE(openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device));
3732 camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
3733
3734 // Device does not report color spaces, skip.
3735 if (!reportsColorSpaces(staticMeta)) {
3736 ndk::ScopedAStatus ret = mSession->close();
3737 mSession = nullptr;
3738 ASSERT_TRUE(ret.isOk());
3739 ALOGV("Camera %s does not report color spaces", name.c_str());
3740 continue;
3741 }
3742 std::vector<RequestAvailableColorSpaceProfilesMap> profileList;
3743 getColorSpaceProfiles(staticMeta, &profileList);
3744 ASSERT_FALSE(profileList.empty());
3745
3746 // Device does not support color space / dynamic range profile, skip
3747 if (std::find(profileList.begin(), profileList.end(), colorSpace)
3748 == profileList.end() || !isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
3749 staticMeta, colorSpace, dynamicRangeProfile,
3750 PixelFormat::IMPLEMENTATION_DEFINED)) {
3751 ndk::ScopedAStatus ret = mSession->close();
3752 mSession = nullptr;
3753 ASSERT_TRUE(ret.isOk());
3754 ALOGV("Camera %s does not support color space %s with dynamic range profile %s and "
3755 "pixel format %d", name.c_str(), getColorSpaceProfileString(colorSpace),
3756 getDynamicRangeProfileString(dynamicRangeProfile),
3757 PixelFormat::IMPLEMENTATION_DEFINED);
3758 continue;
3759 }
3760
3761 ALOGV("Camera %s supports color space %s with dynamic range profile %s and pixel format %d",
3762 name.c_str(), getColorSpaceProfileString(colorSpace),
3763 getDynamicRangeProfileString(dynamicRangeProfile),
3764 PixelFormat::IMPLEMENTATION_DEFINED);
3765
3766 // If an HDR dynamic range profile is reported in the color space profile list,
3767 // the device must also have the dynamic range profiles map capability and contain
3768 // the dynamic range profile in the map.
3769 if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
3770 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
3771 ASSERT_TRUE(is10BitDynamicRangeCapable(staticMeta));
3772
3773 std::vector<RequestAvailableDynamicRangeProfilesMap> dynamicRangeProfiles;
3774 get10BitDynamicRangeProfiles(staticMeta, &dynamicRangeProfiles);
3775 ASSERT_FALSE(dynamicRangeProfiles.empty());
3776 ASSERT_FALSE(std::find(dynamicRangeProfiles.begin(), dynamicRangeProfiles.end(),
3777 dynamicRangeProfile) == dynamicRangeProfiles.end());
3778 }
3779
3780 CameraMetadata req;
3781 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
3782 ndk::ScopedAStatus ret =
3783 mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
3784 ASSERT_TRUE(ret.isOk());
3785
3786 const camera_metadata_t* metadata =
3787 reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
3788 size_t expectedSize = req.metadata.size();
3789 int result = validate_camera_metadata_structure(metadata, &expectedSize);
3790 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
3791
3792 size_t entryCount = get_camera_metadata_entry_count(metadata);
3793 ASSERT_GT(entryCount, 0u);
3794 defaultSettings = metadata;
3795
3796 const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
3797 uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
3798 settings.metadata = std::vector(
3799 rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
3800 overrideRotateAndCrop(&settings);
3801
3802 ret = mSession->close();
3803 mSession = nullptr;
3804 ASSERT_TRUE(ret.isOk());
3805
3806 std::vector<HalStream> halStreams;
3807 bool supportsPartialResults = false;
3808 std::set<int32_t> halBufManagedStreamIds;
3809 int32_t partialResultCount = 0;
3810 Stream previewStream;
3811 std::shared_ptr<DeviceCb> cb;
3812
3813 previewStream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
3814 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
3815 configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
3816 &previewStream, &halStreams, &supportsPartialResults, &partialResultCount,
3817 &halBufManagedStreamIds, &cb, 0,
3818 /*maxResolution*/ false, dynamicRangeProfile, colorSpace);
3819 ASSERT_NE(mSession, nullptr);
3820
3821 ::aidl::android::hardware::common::fmq::MQDescriptor<
3822 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
3823 descriptor;
3824 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
3825 ASSERT_TRUE(resultQueueRet.isOk());
3826
3827 std::shared_ptr<ResultMetadataQueue> resultQueue =
3828 std::make_shared<ResultMetadataQueue>(descriptor);
3829 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
3830 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
3831 resultQueue = nullptr;
3832 // Don't use the queue onwards.
3833 }
3834
3835 mInflightMap.clear();
3836 // Stream as long as needed to fill the Hal inflight queue
3837 std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
3838
3839 for (int32_t requestId = 0; requestId < requests.size(); requestId++) {
3840 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
3841 static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
3842 partialResultCount, std::unordered_set<std::string>(), resultQueue);
3843
3844 CaptureRequest& request = requests[requestId];
3845 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
3846 outputBuffers.resize(halStreams.size());
3847
3848 size_t k = 0;
3849 inflightReq->mOutstandingBufferIds.resize(halStreams.size());
3850 std::vector<buffer_handle_t> graphicBuffers;
3851 graphicBuffers.reserve(halStreams.size());
3852
3853 auto bufferId = requestId + 1; // Buffer id value 0 is not valid
3854 for (const auto& halStream : halStreams) {
3855 buffer_handle_t buffer_handle;
3856 if (contains(halBufManagedStreamIds, halStream.id)) {
3857 outputBuffers[k] = {halStream.id, 0,
3858 NativeHandle(), BufferStatus::OK,
3859 NativeHandle(), NativeHandle()};
3860 } else {
3861 auto usage = ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
3862 static_cast<uint64_t>(halStream.producerUsage),
3863 static_cast<uint64_t>(halStream.consumerUsage)));
3864 allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
3865 halStream.overrideFormat, &buffer_handle);
3866
3867 inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
3868 graphicBuffers.push_back(buffer_handle);
3869 outputBuffers[k] = {
3870 halStream.id, bufferId, android::makeToAidl(buffer_handle),
3871 BufferStatus::OK, NativeHandle(), NativeHandle()};
3872 }
3873 k++;
3874 }
3875
3876 request.inputBuffer = {
3877 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
3878 request.frameNumber = bufferId;
3879 request.fmqSettingsSize = 0;
3880 request.settings = settings;
3881 request.inputWidth = 0;
3882 request.inputHeight = 0;
3883
3884 {
3885 std::unique_lock<std::mutex> l(mLock);
3886 mInflightMap[bufferId] = inflightReq;
3887 }
3888 }
3889
3890 int32_t numRequestProcessed = 0;
3891 std::vector<BufferCache> cachesToRemove;
3892 ndk::ScopedAStatus returnStatus =
3893 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
3894 ASSERT_TRUE(returnStatus.isOk());
3895 ASSERT_EQ(numRequestProcessed, requests.size());
3896
3897 returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
3898 std::vector<int32_t> {halStreams[0].id});
3899 ASSERT_TRUE(returnStatus.isOk());
3900
3901 // We are keeping frame numbers and buffer ids consistent. Buffer id value of 0
3902 // is used to indicate a buffer that is not present/available so buffer ids as well
3903 // as frame numbers begin with 1.
3904 for (int32_t frameNumber = 1; frameNumber <= requests.size(); frameNumber++) {
3905 const auto& inflightReq = mInflightMap[frameNumber];
3906 std::unique_lock<std::mutex> l(mLock);
3907 while (!inflightReq->errorCodeValid &&
3908 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
3909 auto timeout = std::chrono::system_clock::now() +
3910 std::chrono::seconds(kStreamBufferTimeoutSec);
3911 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
3912 }
3913
3914 ASSERT_FALSE(inflightReq->errorCodeValid);
3915 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
3916
3917 if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
3918 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
3919 verify10BitMetadata(mHandleImporter, *inflightReq, dynamicRangeProfile);
3920 }
3921 }
3922
3923 if (halBufManagedStreamIds.size() != 0) {
3924 std::vector<int32_t> streamIds;
3925 for (size_t i = 0; i < streamIds.size(); i++) {
3926 if (contains(halBufManagedStreamIds, halStreams[i].id)) {
3927 streamIds.emplace_back(halStreams[i].id);
3928 }
3929 }
3930 mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
3931 cb->waitForBuffersReturned();
3932 }
3933
3934 ret = mSession->close();
3935 mSession = nullptr;
3936 ASSERT_TRUE(ret.isOk());
3937 }
3938 }
3939
processZoomSettingsOverrideRequests(int32_t frameCount,const bool * overrideSequence,const bool * expectedResults)3940 void CameraAidlTest::processZoomSettingsOverrideRequests(
3941 int32_t frameCount, const bool *overrideSequence, const bool *expectedResults) {
3942 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
3943 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
3944 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
3945 int64_t bufferId = 1;
3946 int32_t frameNumber = 1;
3947 CameraMetadata settings;
3948 ndk::ScopedAStatus ret;
3949 for (const auto& name : cameraDeviceNames) {
3950 CameraMetadata meta;
3951 std::shared_ptr<ICameraDevice> device;
3952 ASSERT_NO_FATAL_FAILURE(openEmptyDeviceSession(name, mProvider, &mSession /*out*/,
3953 &meta /*out*/, &device /*out*/));
3954 camera_metadata_t* staticMeta =
3955 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data()));
3956
3957 ret = mSession->close();
3958 mSession = nullptr;
3959 ASSERT_TRUE(ret.isOk());
3960
3961 // Device does not support zoom settnigs override
3962 if (!supportZoomSettingsOverride(staticMeta)) {
3963 continue;
3964 }
3965
3966 if (!isPerFrameControl(staticMeta)) {
3967 continue;
3968 }
3969
3970 bool supportsPartialResults = false;
3971 bool useHalBufManager = false;
3972 int32_t partialResultCount = 0;
3973 Stream previewStream;
3974 std::vector<HalStream> halStreams;
3975 std::shared_ptr<DeviceCb> cb;
3976 configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/,
3977 &previewStream /*out*/, &halStreams /*out*/,
3978 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
3979 &useHalBufManager /*out*/, &cb /*out*/);
3980 ASSERT_NE(mSession, nullptr);
3981
3982 ::aidl::android::hardware::common::fmq::MQDescriptor<
3983 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
3984 descriptor;
3985 auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
3986 ASSERT_TRUE(resultQueueRet.isOk());
3987
3988 std::shared_ptr<ResultMetadataQueue> resultQueue =
3989 std::make_shared<ResultMetadataQueue>(descriptor);
3990 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
3991 ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
3992 resultQueue = nullptr;
3993 // Don't use the queue onwards.
3994 }
3995
3996 ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings);
3997 ASSERT_TRUE(ret.isOk());
3998
3999 mInflightMap.clear();
4000 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta;
4001 std::vector<CaptureRequest> requests(frameCount);
4002 std::vector<buffer_handle_t> buffers(frameCount);
4003 std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(frameCount);
4004 std::vector<CameraMetadata> requestSettings(frameCount);
4005
4006 for (int32_t i = 0; i < frameCount; i++) {
4007 std::unique_lock<std::mutex> l(mLock);
4008 CaptureRequest& request = requests[i];
4009 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
4010 outputBuffers.resize(1);
4011 StreamBuffer& outputBuffer = outputBuffers[0];
4012
4013 if (useHalBufManager) {
4014 outputBuffer = {halStreams[0].id, 0,
4015 NativeHandle(), BufferStatus::OK,
4016 NativeHandle(), NativeHandle()};
4017 } else {
4018 allocateGraphicBuffer(previewStream.width, previewStream.height,
4019 ANDROID_NATIVE_UNSIGNED_CAST(android_convertGralloc1To0Usage(
4020 static_cast<uint64_t>(halStreams[0].producerUsage),
4021 static_cast<uint64_t>(halStreams[0].consumerUsage))),
4022 halStreams[0].overrideFormat, &buffers[i]);
4023 outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]),
4024 BufferStatus::OK, NativeHandle(), NativeHandle()};
4025 }
4026
4027 // Set appropriate settings override tag
4028 requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data()));
4029 int32_t settingsOverride = overrideSequence[i] ?
4030 ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM : ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
4031 ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
4032 &settingsOverride, 1));
4033 camera_metadata_t* metaBuffer = requestMeta.release();
4034 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
4035 requestSettings[i].metadata = std::vector(
4036 rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
4037 overrideRotateAndCrop(&(requestSettings[i]));
4038 request.frameNumber = frameNumber + i;
4039 request.fmqSettingsSize = 0;
4040 request.settings = requestSettings[i];
4041 request.inputBuffer = {
4042 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
4043
4044 inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
4045 partialResultCount, resultQueue);
4046 mInflightMap[frameNumber + i] = inflightReqs[i];
4047 }
4048
4049 int32_t numRequestProcessed = 0;
4050 std::vector<BufferCache> cachesToRemove;
4051
4052 ndk::ScopedAStatus returnStatus =
4053 mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
4054 ASSERT_TRUE(returnStatus.isOk());
4055 ASSERT_EQ(numRequestProcessed, frameCount);
4056
4057 for (size_t i = 0; i < frameCount; i++) {
4058 std::unique_lock<std::mutex> l(mLock);
4059 while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) ||
4060 (!inflightReqs[i]->haveResultMetadata))) {
4061 auto timeout = std::chrono::system_clock::now() +
4062 std::chrono::seconds(kStreamBufferTimeoutSec);
4063 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
4064 }
4065
4066 ASSERT_FALSE(inflightReqs[i]->errorCodeValid);
4067 ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u);
4068 ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId);
4069 ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty());
4070 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_CONTROL_SETTINGS_OVERRIDE));
4071 camera_metadata_entry_t overrideResult =
4072 inflightReqs[i]->collectedResult.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
4073 ASSERT_EQ(overrideResult.data.i32[0] == ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM,
4074 expectedResults[i]);
4075 ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(
4076 ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER));
4077 camera_metadata_entry_t frameNumberEntry = inflightReqs[i]->collectedResult.find(
4078 ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
4079 ALOGV("%s: i %zu, expcetedResults[i] %d, overrideResult is %d, frameNumber %d",
4080 __FUNCTION__, i, expectedResults[i], overrideResult.data.i32[0],
4081 frameNumberEntry.data.i32[0]);
4082 if (expectedResults[i]) {
4083 ASSERT_GT(frameNumberEntry.data.i32[0], inflightReqs[i]->frameNumber);
4084 } else {
4085 ASSERT_EQ(frameNumberEntry.data.i32[0], frameNumber + i);
4086 }
4087 }
4088
4089 ret = mSession->close();
4090 mSession = nullptr;
4091 ASSERT_TRUE(ret.isOk());
4092 }
4093 }
4094
getSupportedSizes(const camera_metadata_t * ch,uint32_t tag,int32_t format,std::vector<std::tuple<size_t,size_t>> * sizes)4095 void CameraAidlTest::getSupportedSizes(const camera_metadata_t* ch, uint32_t tag, int32_t format,
4096 std::vector<std::tuple<size_t, size_t>>* sizes /*out*/) {
4097 if (sizes == nullptr) {
4098 return;
4099 }
4100
4101 camera_metadata_ro_entry entry;
4102 int retcode = find_camera_metadata_ro_entry(ch, tag, &entry);
4103 if ((0 == retcode) && (entry.count > 0)) {
4104 // Scaler entry contains 4 elements (format, width, height, type)
4105 for (size_t i = 0; i < entry.count; i += 4) {
4106 if ((entry.data.i32[i] == format) &&
4107 (entry.data.i32[i + 3] == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
4108 sizes->push_back(std::make_tuple(entry.data.i32[i + 1], entry.data.i32[i + 2]));
4109 }
4110 }
4111 }
4112 }
4113
getSupportedDurations(const camera_metadata_t * ch,uint32_t tag,int32_t format,const std::vector<std::tuple<size_t,size_t>> & sizes,std::vector<int64_t> * durations)4114 void CameraAidlTest::getSupportedDurations(const camera_metadata_t* ch, uint32_t tag,
4115 int32_t format,
4116 const std::vector<std::tuple<size_t, size_t>>& sizes,
4117 std::vector<int64_t>* durations /*out*/) {
4118 if (durations == nullptr) {
4119 return;
4120 }
4121
4122 camera_metadata_ro_entry entry;
4123 int retcode = find_camera_metadata_ro_entry(ch, tag, &entry);
4124 if ((0 == retcode) && (entry.count > 0)) {
4125 // Duration entry contains 4 elements (format, width, height, duration)
4126 for (const auto& size : sizes) {
4127 int64_t width = std::get<0>(size);
4128 int64_t height = std::get<1>(size);
4129 for (size_t i = 0; i < entry.count; i += 4) {
4130 if ((entry.data.i64[i] == format) && (entry.data.i64[i + 1] == width) &&
4131 (entry.data.i64[i + 2] == height)) {
4132 durations->push_back(entry.data.i64[i + 3]);
4133 break;
4134 }
4135 }
4136 }
4137 }
4138 }
4139
validateDefaultRequestMetadata(RequestTemplate reqTemplate,const CameraMetadata & rawMetadata)4140 void CameraAidlTest::validateDefaultRequestMetadata(RequestTemplate reqTemplate,
4141 const CameraMetadata& rawMetadata) {
4142 const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data();
4143 size_t expectedSize = rawMetadata.metadata.size();
4144 int result = validate_camera_metadata_structure(metadata, &expectedSize);
4145 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
4146 verifyRequestTemplate(metadata, reqTemplate);
4147 }
4148