1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "camera_aidl_test.h"
18
19 #include <inttypes.h>
20
21 #include <CameraParameters.h>
22 #include <HandleImporter.h>
23 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
24 #include <aidl/android/hardware/camera/metadata/CameraMetadataTag.h>
25 #include <aidl/android/hardware/camera/metadata/RequestAvailableDynamicRangeProfilesMap.h>
26 #include <aidl/android/hardware/camera/metadata/SensorInfoColorFilterArrangement.h>
27 #include <aidl/android/hardware/camera/metadata/SensorPixelMode.h>
28 #include <aidl/android/hardware/camera/provider/BnCameraProviderCallback.h>
29 #include <aidlcommonsupport/NativeHandle.h>
30 #include <android/binder_manager.h>
31 #include <android/binder_process.h>
32 #include <device_cb.h>
33 #include <empty_device_cb.h>
34 #include <grallocusage/GrallocUsageConversion.h>
35 #include <hardware/gralloc1.h>
36 #include <simple_device_cb.h>
37 #include <ui/Fence.h>
38 #include <ui/GraphicBufferAllocator.h>
39 #include <regex>
40 #include <typeinfo>
41
42 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
43 using ::aidl::android::hardware::camera::common::TorchModeStatus;
44 using ::aidl::android::hardware::camera::device::CameraMetadata;
45 using ::aidl::android::hardware::camera::device::ICameraDevice;
46 using ::aidl::android::hardware::camera::metadata::CameraMetadataTag;
47 using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
48 using ::aidl::android::hardware::camera::metadata::SensorInfoColorFilterArrangement;
49 using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
50 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
51 using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
52 using ::aidl::android::hardware::camera::provider::ICameraProvider;
53 using ::aidl::android::hardware::common::NativeHandle;
54 using ::android::hardware::camera::common::V1_0::helper::Size;
55 using ::ndk::ScopedAStatus;
56 using ::ndk::SpAIBinder;
57
58 namespace {
parseProviderName(const std::string & serviceDescriptor,std::string * type,uint32_t * id)59 bool parseProviderName(const std::string& serviceDescriptor, std::string* type /*out*/,
60 uint32_t* id /*out*/) {
61 if (!type || !id) {
62 ADD_FAILURE();
63 return false;
64 }
65
66 // expected format: <service_name>/<type>/<id>
67 std::string::size_type slashIdx1 = serviceDescriptor.find('/');
68 if (slashIdx1 == std::string::npos || slashIdx1 == serviceDescriptor.size() - 1) {
69 ADD_FAILURE() << "Provider name does not have / separator between name, type, and id";
70 return false;
71 }
72
73 std::string::size_type slashIdx2 = serviceDescriptor.find('/', slashIdx1 + 1);
74 if (slashIdx2 == std::string::npos || slashIdx2 == serviceDescriptor.size() - 1) {
75 ADD_FAILURE() << "Provider name does not have / separator between type and id";
76 return false;
77 }
78
79 std::string typeVal = serviceDescriptor.substr(slashIdx1 + 1, slashIdx2 - slashIdx1 - 1);
80
81 char* endPtr;
82 errno = 0;
83 int64_t idVal = strtol(serviceDescriptor.c_str() + slashIdx2 + 1, &endPtr, 10);
84 if (errno != 0) {
85 ADD_FAILURE() << "cannot parse provider id as an integer:" << serviceDescriptor.c_str()
86 << strerror(errno) << errno;
87 return false;
88 }
89 if (endPtr != serviceDescriptor.c_str() + serviceDescriptor.size()) {
90 ADD_FAILURE() << "provider id has unexpected length " << serviceDescriptor.c_str();
91 return false;
92 }
93 if (idVal < 0) {
94 ADD_FAILURE() << "id is negative: " << serviceDescriptor.c_str() << idVal;
95 return false;
96 }
97
98 *type = typeVal;
99 *id = static_cast<uint32_t>(idVal);
100
101 return true;
102 }
103
104 const std::vector<int64_t> kMandatoryUseCases = {
105 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
106 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
107 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
108 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
109 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
110 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL};
111 } // namespace
112
SetUp()113 void CameraAidlTest::SetUp() {
114 std::string serviceDescriptor = GetParam();
115 ALOGI("get service with name: %s", serviceDescriptor.c_str());
116
117 bool success = ABinderProcess_setThreadPoolMaxThreadCount(5);
118 ALOGI("ABinderProcess_setThreadPoolMaxThreadCount returns %s", success ? "true" : "false");
119 ASSERT_TRUE(success);
120 ABinderProcess_startThreadPool();
121
122 SpAIBinder cameraProviderBinder =
123 SpAIBinder(AServiceManager_getService(serviceDescriptor.c_str()));
124 ASSERT_NE(cameraProviderBinder.get(), nullptr);
125
126 std::shared_ptr<ICameraProvider> cameraProvider =
127 ICameraProvider::fromBinder(cameraProviderBinder);
128 ASSERT_NE(cameraProvider.get(), nullptr);
129 mProvider = cameraProvider;
130 uint32_t id;
131 ASSERT_TRUE(parseProviderName(serviceDescriptor, &mProviderType, &id));
132
133 notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL);
134 }
135
TearDown()136 void CameraAidlTest::TearDown() {
137 if (mSession != nullptr) {
138 ndk::ScopedAStatus ret = mSession->close();
139 ASSERT_TRUE(ret.isOk());
140 }
141 }
142
waitForReleaseFence(std::vector<InFlightRequest::StreamBufferAndTimestamp> & resultOutputBuffers)143 void CameraAidlTest::waitForReleaseFence(
144 std::vector<InFlightRequest::StreamBufferAndTimestamp>& resultOutputBuffers) {
145 for (auto& bufferAndTimestamp : resultOutputBuffers) {
146 // wait for the fence timestamp and store it along with the buffer
147 android::sp<android::Fence> releaseFence = nullptr;
148 const native_handle_t* releaseFenceHandle = bufferAndTimestamp.buffer.releaseFence;
149 if (releaseFenceHandle != nullptr && releaseFenceHandle->numFds == 1 &&
150 releaseFenceHandle->data[0] >= 0) {
151 releaseFence = new android::Fence(releaseFenceHandle->data[0]);
152 }
153 if (releaseFence && releaseFence->isValid()) {
154 releaseFence->wait(/*ms*/ 300);
155 nsecs_t releaseTime = releaseFence->getSignalTime();
156 if (bufferAndTimestamp.timeStamp < releaseTime)
157 bufferAndTimestamp.timeStamp = releaseTime;
158 }
159 }
160 }
161
getCameraDeviceNames(std::shared_ptr<ICameraProvider> & provider,bool addSecureOnly)162 std::vector<std::string> CameraAidlTest::getCameraDeviceNames(
163 std::shared_ptr<ICameraProvider>& provider, bool addSecureOnly) {
164 std::vector<std::string> cameraDeviceNames;
165
166 ScopedAStatus ret = provider->getCameraIdList(&cameraDeviceNames);
167 if (!ret.isOk()) {
168 ADD_FAILURE() << "Could not get camera id list";
169 }
170
171 // External camera devices are reported through cameraDeviceStatusChange
172 struct ProviderCb : public BnCameraProviderCallback {
173 ScopedAStatus cameraDeviceStatusChange(const std::string& devName,
174 CameraDeviceStatus newStatus) override {
175 ALOGI("camera device status callback name %s, status %d", devName.c_str(),
176 (int)newStatus);
177 if (newStatus == CameraDeviceStatus::PRESENT) {
178 externalCameraDeviceNames.push_back(devName);
179 }
180 return ScopedAStatus::ok();
181 }
182
183 ScopedAStatus torchModeStatusChange(const std::string&, TorchModeStatus) override {
184 return ScopedAStatus::ok();
185 }
186
187 ScopedAStatus physicalCameraDeviceStatusChange(
188 const std::string&, const std::string&,
189 ::aidl::android::hardware::camera::common::CameraDeviceStatus) override {
190 return ScopedAStatus::ok();
191 }
192
193 std::vector<std::string> externalCameraDeviceNames;
194 };
195 std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>();
196 auto status = mProvider->setCallback(cb);
197
198 for (const auto& devName : cb->externalCameraDeviceNames) {
199 if (cameraDeviceNames.end() ==
200 std::find(cameraDeviceNames.begin(), cameraDeviceNames.end(), devName)) {
201 cameraDeviceNames.push_back(devName);
202 }
203 }
204
205 std::vector<std::string> retList;
206 for (auto& cameraDeviceName : cameraDeviceNames) {
207 bool isSecureOnlyCamera = isSecureOnly(mProvider, cameraDeviceName);
208 if (addSecureOnly) {
209 if (isSecureOnlyCamera) {
210 retList.emplace_back(cameraDeviceName);
211 }
212 } else if (!isSecureOnlyCamera) {
213 retList.emplace_back(cameraDeviceName);
214 }
215 }
216 return retList;
217 }
218
isSecureOnly(const std::shared_ptr<ICameraProvider> & provider,const std::string & name)219 bool CameraAidlTest::isSecureOnly(const std::shared_ptr<ICameraProvider>& provider,
220 const std::string& name) {
221 std::shared_ptr<ICameraDevice> cameraDevice = nullptr;
222 ScopedAStatus retInterface = provider->getCameraDeviceInterface(name, &cameraDevice);
223 if (!retInterface.isOk()) {
224 ADD_FAILURE() << "Failed to get camera device interface for " << name;
225 }
226
227 CameraMetadata cameraCharacteristics;
228 ScopedAStatus retChars = cameraDevice->getCameraCharacteristics(&cameraCharacteristics);
229 if (!retChars.isOk()) {
230 ADD_FAILURE() << "Failed to get camera characteristics for device " << name;
231 }
232
233 camera_metadata_t* chars =
234 reinterpret_cast<camera_metadata_t*>(cameraCharacteristics.metadata.data());
235
236 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
237 Status retCameraKind = getSystemCameraKind(chars, &systemCameraKind);
238 if (retCameraKind != Status::OK) {
239 ADD_FAILURE() << "Failed to get camera kind for " << name;
240 }
241
242 return systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA;
243 }
244
getCameraDeviceIdToNameMap(std::shared_ptr<ICameraProvider> provider)245 std::map<std::string, std::string> CameraAidlTest::getCameraDeviceIdToNameMap(
246 std::shared_ptr<ICameraProvider> provider) {
247 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(provider);
248
249 std::map<std::string, std::string> idToNameMap;
250 for (auto& name : cameraDeviceNames) {
251 std::string version, cameraId;
252 if (!matchDeviceName(name, mProviderType, &version, &cameraId)) {
253 ADD_FAILURE();
254 }
255 idToNameMap.insert(std::make_pair(std::string(cameraId), name));
256 }
257 return idToNameMap;
258 }
259
verifyMonochromeCameraResult(const::android::hardware::camera::common::V1_0::helper::CameraMetadata & metadata)260 void CameraAidlTest::verifyMonochromeCameraResult(
261 const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata) {
262 camera_metadata_ro_entry entry;
263
264 // Check tags that are not applicable for monochrome camera
265 ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_GREEN_SPLIT));
266 ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_NEUTRAL_COLOR_POINT));
267 ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_MODE));
268 ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_TRANSFORM));
269 ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_GAINS));
270
271 // Check dynamicBlackLevel
272 entry = metadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
273 if (entry.count > 0) {
274 ASSERT_EQ(entry.count, 4);
275 for (size_t i = 1; i < entry.count; i++) {
276 ASSERT_FLOAT_EQ(entry.data.f[i], entry.data.f[0]);
277 }
278 }
279
280 // Check noiseProfile
281 entry = metadata.find(ANDROID_SENSOR_NOISE_PROFILE);
282 if (entry.count > 0) {
283 ASSERT_EQ(entry.count, 2);
284 }
285
286 // Check lensShadingMap
287 entry = metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
288 if (entry.count > 0) {
289 ASSERT_EQ(entry.count % 4, 0);
290 for (size_t i = 0; i < entry.count / 4; i++) {
291 ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 1], entry.data.f[i * 4]);
292 ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 2], entry.data.f[i * 4]);
293 ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 3], entry.data.f[i * 4]);
294 }
295 }
296
297 // Check tonemapCurve
298 camera_metadata_ro_entry curveRed = metadata.find(ANDROID_TONEMAP_CURVE_RED);
299 camera_metadata_ro_entry curveGreen = metadata.find(ANDROID_TONEMAP_CURVE_GREEN);
300 camera_metadata_ro_entry curveBlue = metadata.find(ANDROID_TONEMAP_CURVE_BLUE);
301 if (curveRed.count > 0 && curveGreen.count > 0 && curveBlue.count > 0) {
302 ASSERT_EQ(curveRed.count, curveGreen.count);
303 ASSERT_EQ(curveRed.count, curveBlue.count);
304 for (size_t i = 0; i < curveRed.count; i++) {
305 ASSERT_FLOAT_EQ(curveGreen.data.f[i], curveRed.data.f[i]);
306 ASSERT_FLOAT_EQ(curveBlue.data.f[i], curveRed.data.f[i]);
307 }
308 }
309 }
310
verifyStreamUseCaseCharacteristics(const camera_metadata_t * metadata)311 void CameraAidlTest::verifyStreamUseCaseCharacteristics(const camera_metadata_t* metadata) {
312 camera_metadata_ro_entry entry;
313 // Check capabilities
314 int retcode =
315 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
316 bool hasStreamUseCaseCap = false;
317 if ((0 == retcode) && (entry.count > 0)) {
318 if (std::find(entry.data.u8, entry.data.u8 + entry.count,
319 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE) !=
320 entry.data.u8 + entry.count) {
321 hasStreamUseCaseCap = true;
322 }
323 }
324
325 bool supportMandatoryUseCases = false;
326 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
327 &entry);
328 if ((0 == retcode) && (entry.count > 0)) {
329 supportMandatoryUseCases = true;
330 for (size_t i = 0; i < kMandatoryUseCases.size(); i++) {
331 if (std::find(entry.data.i64, entry.data.i64 + entry.count, kMandatoryUseCases[i]) ==
332 entry.data.i64 + entry.count) {
333 supportMandatoryUseCases = false;
334 break;
335 }
336 }
337 bool supportDefaultUseCase = false;
338 for (size_t i = 0; i < entry.count; i++) {
339 if (entry.data.i64[i] == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
340 supportDefaultUseCase = true;
341 }
342 ASSERT_TRUE(entry.data.i64[i] <= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL ||
343 entry.data.i64[i] >=
344 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START);
345 }
346 ASSERT_TRUE(supportDefaultUseCase);
347 }
348
349 ASSERT_EQ(hasStreamUseCaseCap, supportMandatoryUseCases);
350 }
351
isMonochromeCamera(const camera_metadata_t * staticMeta)352 Status CameraAidlTest::isMonochromeCamera(const camera_metadata_t* staticMeta) {
353 Status ret = Status::OPERATION_NOT_SUPPORTED;
354 if (nullptr == staticMeta) {
355 return Status::ILLEGAL_ARGUMENT;
356 }
357
358 camera_metadata_ro_entry entry;
359 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
360 &entry);
361
362 if (0 != rc) {
363 return Status::ILLEGAL_ARGUMENT;
364 }
365
366 for (size_t i = 0; i < entry.count; i++) {
367 if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME == entry.data.u8[i]) {
368 ret = Status::OK;
369 break;
370 }
371 }
372
373 return ret;
374 }
375
isLogicalMultiCamera(const camera_metadata_t * staticMeta)376 Status CameraAidlTest::isLogicalMultiCamera(const camera_metadata_t* staticMeta) {
377 Status ret = Status::OPERATION_NOT_SUPPORTED;
378 if (nullptr == staticMeta) {
379 return Status::ILLEGAL_ARGUMENT;
380 }
381
382 camera_metadata_ro_entry entry;
383 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
384 &entry);
385 if (0 != rc) {
386 return Status::ILLEGAL_ARGUMENT;
387 }
388
389 for (size_t i = 0; i < entry.count; i++) {
390 if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA == entry.data.u8[i]) {
391 ret = Status::OK;
392 break;
393 }
394 }
395
396 return ret;
397 }
398
verifyLogicalCameraResult(const camera_metadata_t * staticMetadata,const std::vector<uint8_t> & resultMetadata)399 void CameraAidlTest::verifyLogicalCameraResult(const camera_metadata_t* staticMetadata,
400 const std::vector<uint8_t>& resultMetadata) {
401 camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data();
402
403 std::unordered_set<std::string> physicalIds;
404 Status rc = getPhysicalCameraIds(staticMetadata, &physicalIds);
405 ASSERT_TRUE(Status::OK == rc);
406 ASSERT_TRUE(physicalIds.size() > 1);
407
408 camera_metadata_ro_entry entry;
409 // Check mainPhysicalId
410 find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID,
411 &entry);
412 if (entry.count > 0) {
413 std::string mainPhysicalId(reinterpret_cast<const char*>(entry.data.u8));
414 ASSERT_NE(physicalIds.find(mainPhysicalId), physicalIds.end());
415 } else {
416 ADD_FAILURE() << "Get LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID failed!";
417 }
418 }
419
getPhysicalCameraIds(const camera_metadata_t * staticMeta,std::unordered_set<std::string> * physicalIds)420 Status CameraAidlTest::getPhysicalCameraIds(const camera_metadata_t* staticMeta,
421 std::unordered_set<std::string>* physicalIds) {
422 if ((nullptr == staticMeta) || (nullptr == physicalIds)) {
423 return Status::ILLEGAL_ARGUMENT;
424 }
425
426 camera_metadata_ro_entry entry;
427 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
428 &entry);
429 if (0 != rc) {
430 return Status::ILLEGAL_ARGUMENT;
431 }
432
433 const uint8_t* ids = entry.data.u8;
434 size_t start = 0;
435 for (size_t i = 0; i < entry.count; i++) {
436 if (ids[i] == '\0') {
437 if (start != i) {
438 std::string currentId(reinterpret_cast<const char*>(ids + start));
439 physicalIds->emplace(currentId);
440 }
441 start = i + 1;
442 }
443 }
444
445 return Status::OK;
446 }
447
getSystemCameraKind(const camera_metadata_t * staticMeta,SystemCameraKind * systemCameraKind)448 Status CameraAidlTest::getSystemCameraKind(const camera_metadata_t* staticMeta,
449 SystemCameraKind* systemCameraKind) {
450 if (nullptr == staticMeta || nullptr == systemCameraKind) {
451 return Status::ILLEGAL_ARGUMENT;
452 }
453
454 camera_metadata_ro_entry entry{};
455 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
456 &entry);
457 if (0 != rc) {
458 return Status::ILLEGAL_ARGUMENT;
459 }
460
461 if (entry.count == 1 &&
462 entry.data.u8[0] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA) {
463 *systemCameraKind = SystemCameraKind::HIDDEN_SECURE_CAMERA;
464 return Status::OK;
465 }
466
467 // Go through the capabilities and check if it has
468 // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA
469 for (size_t i = 0; i < entry.count; ++i) {
470 uint8_t capability = entry.data.u8[i];
471 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA) {
472 *systemCameraKind = SystemCameraKind::SYSTEM_ONLY_CAMERA;
473 return Status::OK;
474 }
475 }
476 *systemCameraKind = SystemCameraKind::PUBLIC;
477 return Status::OK;
478 }
479
notifyDeviceState(int64_t state)480 void CameraAidlTest::notifyDeviceState(int64_t state) {
481 if (mProvider == nullptr) {
482 return;
483 }
484 mProvider->notifyDeviceStateChange(state);
485 }
486
allocateGraphicBuffer(uint32_t width,uint32_t height,uint64_t usage,PixelFormat format,buffer_handle_t * buffer_handle)487 void CameraAidlTest::allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage,
488 PixelFormat format, buffer_handle_t* buffer_handle) {
489 ASSERT_NE(buffer_handle, nullptr);
490
491 uint32_t stride;
492
493 android::status_t err = android::GraphicBufferAllocator::get().allocateRawHandle(
494 width, height, static_cast<int32_t>(format), 1u /*layerCount*/, usage, buffer_handle,
495 &stride, "VtsHalCameraProviderV2");
496 ASSERT_EQ(err, android::NO_ERROR);
497 }
498
matchDeviceName(const std::string & deviceName,const std::string & providerType,std::string * deviceVersion,std::string * cameraId)499 bool CameraAidlTest::matchDeviceName(const std::string& deviceName, const std::string& providerType,
500 std::string* deviceVersion, std::string* cameraId) {
501 // expected format: device@<major>.<minor>/<type>/<id>
502 std::stringstream pattern;
503 pattern << "device@([0-9]+\\.[0-9]+)/" << providerType << "/(.+)";
504 std::regex e(pattern.str());
505
506 std::smatch sm;
507 if (std::regex_match(deviceName, sm, e)) {
508 if (deviceVersion != nullptr) {
509 *deviceVersion = sm[1];
510 }
511 if (cameraId != nullptr) {
512 *cameraId = sm[2];
513 }
514 return true;
515 }
516 return false;
517 }
518
verifyCameraCharacteristics(const CameraMetadata & chars)519 void CameraAidlTest::verifyCameraCharacteristics(const CameraMetadata& chars) {
520 const camera_metadata_t* metadata =
521 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
522
523 size_t expectedSize = chars.metadata.size();
524 int result = validate_camera_metadata_structure(metadata, &expectedSize);
525 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
526 size_t entryCount = get_camera_metadata_entry_count(metadata);
527 // TODO: we can do better than 0 here. Need to check how many required
528 // characteristics keys we've defined.
529 ASSERT_GT(entryCount, 0u);
530
531 camera_metadata_ro_entry entry;
532 int retcode =
533 find_camera_metadata_ro_entry(metadata, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry);
534 if ((0 == retcode) && (entry.count > 0)) {
535 uint8_t hardwareLevel = entry.data.u8[0];
536 ASSERT_TRUE(hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED ||
537 hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL ||
538 hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 ||
539 hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL);
540 } else {
541 ADD_FAILURE() << "Get camera hardware level failed!";
542 }
543
544 entry.count = 0;
545 retcode = find_camera_metadata_ro_entry(
546 metadata, ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION, &entry);
547 if ((0 == retcode) || (entry.count > 0)) {
548 ADD_FAILURE() << "ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION "
549 << " per API contract should never be set by Hal!";
550 }
551 retcode = find_camera_metadata_ro_entry(
552 metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, &entry);
553 if ((0 == retcode) || (entry.count > 0)) {
554 ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS"
555 << " per API contract should never be set by Hal!";
556 }
557 retcode = find_camera_metadata_ro_entry(
558 metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, &entry);
559 if ((0 == retcode) || (entry.count > 0)) {
560 ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS"
561 << " per API contract should never be set by Hal!";
562 }
563 retcode = find_camera_metadata_ro_entry(
564 metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, &entry);
565 if ((0 == retcode) || (entry.count > 0)) {
566 ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS"
567 << " per API contract should never be set by Hal!";
568 }
569
570 retcode = find_camera_metadata_ro_entry(
571 metadata, ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, &entry);
572 if (0 == retcode || entry.count > 0) {
573 ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS "
574 << " per API contract should never be set by Hal!";
575 }
576
577 retcode = find_camera_metadata_ro_entry(
578 metadata, ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, &entry);
579 if (0 == retcode || entry.count > 0) {
580 ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS "
581 << " per API contract should never be set by Hal!";
582 }
583
584 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
585 &entry);
586 if (0 == retcode || entry.count > 0) {
587 ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS "
588 << " per API contract should never be set by Hal!";
589 }
590
591 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_INFO_SUPPORTED, &entry);
592 if (0 == retcode && entry.count > 0) {
593 retcode = find_camera_metadata_ro_entry(
594 metadata, ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT, &entry);
595 if (0 == retcode && entry.count > 0) {
596 uint8_t maxJpegAppSegmentsCount = entry.data.u8[0];
597 ASSERT_TRUE(maxJpegAppSegmentsCount >= 1 && maxJpegAppSegmentsCount <= 16);
598 } else {
599 ADD_FAILURE() << "Get Heic maxJpegAppSegmentsCount failed!";
600 }
601 }
602
603 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_LENS_POSE_REFERENCE, &entry);
604 if (0 == retcode && entry.count > 0) {
605 uint8_t poseReference = entry.data.u8[0];
606 ASSERT_TRUE(poseReference <= ANDROID_LENS_POSE_REFERENCE_UNDEFINED &&
607 poseReference >= ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA);
608 }
609
610 retcode =
611 find_camera_metadata_ro_entry(metadata, ANDROID_INFO_DEVICE_STATE_ORIENTATIONS, &entry);
612 if (0 == retcode && entry.count > 0) {
613 ASSERT_TRUE((entry.count % 2) == 0);
614 uint64_t maxPublicState = ((uint64_t)ICameraProvider::DEVICE_STATE_FOLDED) << 1;
615 uint64_t vendorStateStart = 1UL << 31; // Reserved for vendor specific states
616 uint64_t stateMask = (1 << vendorStateStart) - 1;
617 stateMask &= ~((1 << maxPublicState) - 1);
618 for (int i = 0; i < entry.count; i += 2) {
619 ASSERT_TRUE((entry.data.i64[i] & stateMask) == 0);
620 ASSERT_TRUE((entry.data.i64[i + 1] % 90) == 0);
621 }
622 }
623
624 verifyExtendedSceneModeCharacteristics(metadata);
625 verifyZoomCharacteristics(metadata);
626 verifyStreamUseCaseCharacteristics(metadata);
627 }
628
verifyExtendedSceneModeCharacteristics(const camera_metadata_t * metadata)629 void CameraAidlTest::verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata) {
630 camera_metadata_ro_entry entry;
631 int retcode = 0;
632
633 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_AVAILABLE_MODES, &entry);
634 if ((0 == retcode) && (entry.count > 0)) {
635 for (auto i = 0; i < entry.count; i++) {
636 ASSERT_TRUE(entry.data.u8[i] >= ANDROID_CONTROL_MODE_OFF &&
637 entry.data.u8[i] <= ANDROID_CONTROL_MODE_USE_EXTENDED_SCENE_MODE);
638 }
639 } else {
640 ADD_FAILURE() << "Get camera controlAvailableModes failed!";
641 }
642
643 // Check key availability in capabilities, request and result.
644
645 retcode =
646 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
647 bool hasExtendedSceneModeRequestKey = false;
648 if ((0 == retcode) && (entry.count > 0)) {
649 hasExtendedSceneModeRequestKey =
650 std::find(entry.data.i32, entry.data.i32 + entry.count,
651 ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
652 } else {
653 ADD_FAILURE() << "Get camera availableRequestKeys failed!";
654 }
655
656 retcode =
657 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
658 bool hasExtendedSceneModeResultKey = false;
659 if ((0 == retcode) && (entry.count > 0)) {
660 hasExtendedSceneModeResultKey =
661 std::find(entry.data.i32, entry.data.i32 + entry.count,
662 ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count;
663 } else {
664 ADD_FAILURE() << "Get camera availableResultKeys failed!";
665 }
666
667 retcode = find_camera_metadata_ro_entry(metadata,
668 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
669 bool hasExtendedSceneModeMaxSizesKey = false;
670 bool hasExtendedSceneModeZoomRatioRangesKey = false;
671 if ((0 == retcode) && (entry.count > 0)) {
672 hasExtendedSceneModeMaxSizesKey =
673 std::find(entry.data.i32, entry.data.i32 + entry.count,
674 ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES) !=
675 entry.data.i32 + entry.count;
676 hasExtendedSceneModeZoomRatioRangesKey =
677 std::find(entry.data.i32, entry.data.i32 + entry.count,
678 ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES) !=
679 entry.data.i32 + entry.count;
680 } else {
681 ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
682 }
683
684 camera_metadata_ro_entry maxSizesEntry;
685 retcode = find_camera_metadata_ro_entry(
686 metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES, &maxSizesEntry);
687 bool hasExtendedSceneModeMaxSizes = (0 == retcode && maxSizesEntry.count > 0);
688
689 camera_metadata_ro_entry zoomRatioRangesEntry;
690 retcode = find_camera_metadata_ro_entry(
691 metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
692 &zoomRatioRangesEntry);
693 bool hasExtendedSceneModeZoomRatioRanges = (0 == retcode && zoomRatioRangesEntry.count > 0);
694
695 // Extended scene mode keys must all be available, or all be unavailable.
696 bool noExtendedSceneMode =
697 !hasExtendedSceneModeRequestKey && !hasExtendedSceneModeResultKey &&
698 !hasExtendedSceneModeMaxSizesKey && !hasExtendedSceneModeZoomRatioRangesKey &&
699 !hasExtendedSceneModeMaxSizes && !hasExtendedSceneModeZoomRatioRanges;
700 if (noExtendedSceneMode) {
701 return;
702 }
703 bool hasExtendedSceneMode = hasExtendedSceneModeRequestKey && hasExtendedSceneModeResultKey &&
704 hasExtendedSceneModeMaxSizesKey &&
705 hasExtendedSceneModeZoomRatioRangesKey &&
706 hasExtendedSceneModeMaxSizes && hasExtendedSceneModeZoomRatioRanges;
707 ASSERT_TRUE(hasExtendedSceneMode);
708
709 // Must have DISABLED, and must have one of BOKEH_STILL_CAPTURE, BOKEH_CONTINUOUS, or a VENDOR
710 // mode.
711 ASSERT_TRUE((maxSizesEntry.count == 6 && zoomRatioRangesEntry.count == 2) ||
712 (maxSizesEntry.count == 9 && zoomRatioRangesEntry.count == 4));
713 bool hasDisabledMode = false;
714 bool hasBokehStillCaptureMode = false;
715 bool hasBokehContinuousMode = false;
716 bool hasVendorMode = false;
717 std::vector<AvailableStream> outputStreams;
718 ASSERT_EQ(Status::OK, getAvailableOutputStreams(metadata, outputStreams));
719 for (int i = 0, j = 0; i < maxSizesEntry.count && j < zoomRatioRangesEntry.count; i += 3) {
720 int32_t mode = maxSizesEntry.data.i32[i];
721 int32_t maxWidth = maxSizesEntry.data.i32[i + 1];
722 int32_t maxHeight = maxSizesEntry.data.i32[i + 2];
723 switch (mode) {
724 case ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED:
725 hasDisabledMode = true;
726 ASSERT_TRUE(maxWidth == 0 && maxHeight == 0);
727 break;
728 case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE:
729 hasBokehStillCaptureMode = true;
730 j += 2;
731 break;
732 case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS:
733 hasBokehContinuousMode = true;
734 j += 2;
735 break;
736 default:
737 if (mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START) {
738 ADD_FAILURE() << "Invalid extended scene mode advertised: " << mode;
739 } else {
740 hasVendorMode = true;
741 j += 2;
742 }
743 break;
744 }
745
746 if (mode != ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED) {
747 // Make sure size is supported.
748 bool sizeSupported = false;
749 for (const auto& stream : outputStreams) {
750 if ((stream.format == static_cast<int32_t>(PixelFormat::YCBCR_420_888) ||
751 stream.format == static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) &&
752 stream.width == maxWidth && stream.height == maxHeight) {
753 sizeSupported = true;
754 break;
755 }
756 }
757 ASSERT_TRUE(sizeSupported);
758
759 // Make sure zoom range is valid
760 float minZoomRatio = zoomRatioRangesEntry.data.f[0];
761 float maxZoomRatio = zoomRatioRangesEntry.data.f[1];
762 ASSERT_GT(minZoomRatio, 0.0f);
763 ASSERT_LE(minZoomRatio, maxZoomRatio);
764 }
765 }
766 ASSERT_TRUE(hasDisabledMode);
767 ASSERT_TRUE(hasBokehStillCaptureMode || hasBokehContinuousMode || hasVendorMode);
768 }
769
getAvailableOutputStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,bool maxResolution)770 Status CameraAidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta,
771 std::vector<AvailableStream>& outputStreams,
772 const AvailableStream* threshold,
773 bool maxResolution) {
774 if (nullptr == staticMeta) {
775 return Status::ILLEGAL_ARGUMENT;
776 }
777 int scalerTag = maxResolution
778 ? ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
779 : ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
780 int depthTag = maxResolution
781 ? ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
782 : ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
783
784 camera_metadata_ro_entry scalerEntry;
785 camera_metadata_ro_entry depthEntry;
786 int foundScaler = find_camera_metadata_ro_entry(staticMeta, scalerTag, &scalerEntry);
787 int foundDepth = find_camera_metadata_ro_entry(staticMeta, depthTag, &depthEntry);
788 if ((0 != foundScaler || (0 != (scalerEntry.count % 4))) &&
789 (0 != foundDepth || (0 != (depthEntry.count % 4)))) {
790 return Status::ILLEGAL_ARGUMENT;
791 }
792
793 if (foundScaler == 0 && (0 == (scalerEntry.count % 4))) {
794 fillOutputStreams(&scalerEntry, outputStreams, threshold,
795 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
796 }
797
798 if (foundDepth == 0 && (0 == (depthEntry.count % 4))) {
799 AvailableStream depthPreviewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
800 static_cast<int32_t>(PixelFormat::Y16)};
801 const AvailableStream* depthThreshold =
802 isDepthOnly(staticMeta) ? &depthPreviewThreshold : threshold;
803 fillOutputStreams(&depthEntry, outputStreams, depthThreshold,
804 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT);
805 }
806
807 return Status::OK;
808 }
809
fillOutputStreams(camera_metadata_ro_entry_t * entry,std::vector<AvailableStream> & outputStreams,const AvailableStream * threshold,const int32_t availableConfigOutputTag)810 void CameraAidlTest::fillOutputStreams(camera_metadata_ro_entry_t* entry,
811 std::vector<AvailableStream>& outputStreams,
812 const AvailableStream* threshold,
813 const int32_t availableConfigOutputTag) {
814 for (size_t i = 0; i < entry->count; i += 4) {
815 if (availableConfigOutputTag == entry->data.i32[i + 3]) {
816 if (nullptr == threshold) {
817 AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
818 entry->data.i32[i]};
819 outputStreams.push_back(s);
820 } else {
821 if ((threshold->format == entry->data.i32[i]) &&
822 (threshold->width >= entry->data.i32[i + 1]) &&
823 (threshold->height >= entry->data.i32[i + 2])) {
824 AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2],
825 threshold->format};
826 outputStreams.push_back(s);
827 }
828 }
829 }
830 }
831 }
832
verifyZoomCharacteristics(const camera_metadata_t * metadata)833 void CameraAidlTest::verifyZoomCharacteristics(const camera_metadata_t* metadata) {
834 camera_metadata_ro_entry entry;
835 int retcode = 0;
836
837 // Check key availability in capabilities, request and result.
838 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
839 &entry);
840 float maxDigitalZoom = 1.0;
841 if ((0 == retcode) && (entry.count == 1)) {
842 maxDigitalZoom = entry.data.f[0];
843 } else {
844 ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!";
845 }
846
847 retcode =
848 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
849 bool hasZoomRequestKey = false;
850 if ((0 == retcode) && (entry.count > 0)) {
851 hasZoomRequestKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
852 ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
853 } else {
854 ADD_FAILURE() << "Get camera availableRequestKeys failed!";
855 }
856
857 retcode =
858 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
859 bool hasZoomResultKey = false;
860 if ((0 == retcode) && (entry.count > 0)) {
861 hasZoomResultKey = std::find(entry.data.i32, entry.data.i32 + entry.count,
862 ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count;
863 } else {
864 ADD_FAILURE() << "Get camera availableResultKeys failed!";
865 }
866
867 retcode = find_camera_metadata_ro_entry(metadata,
868 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
869 bool hasZoomCharacteristicsKey = false;
870 if ((0 == retcode) && (entry.count > 0)) {
871 hasZoomCharacteristicsKey =
872 std::find(entry.data.i32, entry.data.i32 + entry.count,
873 ANDROID_CONTROL_ZOOM_RATIO_RANGE) != entry.data.i32 + entry.count;
874 } else {
875 ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!";
876 }
877
878 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
879 bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
880
881 // Zoom keys must all be available, or all be unavailable.
882 bool noZoomRatio = !hasZoomRequestKey && !hasZoomResultKey && !hasZoomCharacteristicsKey &&
883 !hasZoomRatioRange;
884 if (noZoomRatio) {
885 return;
886 }
887 bool hasZoomRatio =
888 hasZoomRequestKey && hasZoomResultKey && hasZoomCharacteristicsKey && hasZoomRatioRange;
889 ASSERT_TRUE(hasZoomRatio);
890
891 float minZoomRatio = entry.data.f[0];
892 float maxZoomRatio = entry.data.f[1];
893 constexpr float FLOATING_POINT_THRESHOLD = 0.00001f;
894 if (maxDigitalZoom > maxZoomRatio + FLOATING_POINT_THRESHOLD) {
895 ADD_FAILURE() << "Maximum digital zoom " << maxDigitalZoom
896 << " is larger than maximum zoom ratio " << maxZoomRatio << " + threshold "
897 << FLOATING_POINT_THRESHOLD << "!";
898 }
899 if (minZoomRatio > maxZoomRatio) {
900 ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!";
901 }
902 if (minZoomRatio > 1.0f) {
903 ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!";
904 }
905 if (maxZoomRatio < 1.0f) {
906 ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!";
907 }
908
909 // Make sure CROPPING_TYPE is CENTER_ONLY
910 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_CROPPING_TYPE, &entry);
911 if ((0 == retcode) && (entry.count == 1)) {
912 int8_t croppingType = entry.data.u8[0];
913 ASSERT_EQ(croppingType, ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY);
914 } else {
915 ADD_FAILURE() << "Get camera scalerCroppingType failed!";
916 }
917 }
918
verifyMonochromeCharacteristics(const CameraMetadata & chars)919 void CameraAidlTest::verifyMonochromeCharacteristics(const CameraMetadata& chars) {
920 const camera_metadata_t* metadata = (camera_metadata_t*)chars.metadata.data();
921 Status rc = isMonochromeCamera(metadata);
922 if (Status::OPERATION_NOT_SUPPORTED == rc) {
923 return;
924 }
925 ASSERT_EQ(Status::OK, rc);
926
927 camera_metadata_ro_entry entry;
928 // Check capabilities
929 int retcode =
930 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
931 if ((0 == retcode) && (entry.count > 0)) {
932 ASSERT_EQ(std::find(entry.data.u8, entry.data.u8 + entry.count,
933 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING),
934 entry.data.u8 + entry.count);
935 }
936
937 // Check Cfa
938 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
939 &entry);
940 if ((0 == retcode) && (entry.count == 1)) {
941 ASSERT_TRUE(entry.data.i32[0] ==
942 static_cast<int32_t>(
943 SensorInfoColorFilterArrangement::
944 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO) ||
945 entry.data.i32[0] ==
946 static_cast<int32_t>(
947 SensorInfoColorFilterArrangement::
948 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR));
949 }
950
951 // Check availableRequestKeys
952 retcode =
953 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry);
954 if ((0 == retcode) && (entry.count > 0)) {
955 for (size_t i = 0; i < entry.count; i++) {
956 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
957 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
958 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
959 }
960 } else {
961 ADD_FAILURE() << "Get camera availableRequestKeys failed!";
962 }
963
964 // Check availableResultKeys
965 retcode =
966 find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry);
967 if ((0 == retcode) && (entry.count > 0)) {
968 for (size_t i = 0; i < entry.count; i++) {
969 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_GREEN_SPLIT);
970 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
971 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE);
972 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM);
973 ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS);
974 }
975 } else {
976 ADD_FAILURE() << "Get camera availableResultKeys failed!";
977 }
978
979 // Check availableCharacteristicKeys
980 retcode = find_camera_metadata_ro_entry(metadata,
981 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry);
982 if ((0 == retcode) && (entry.count > 0)) {
983 for (size_t i = 0; i < entry.count; i++) {
984 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
985 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
986 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
987 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
988 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM1);
989 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM2);
990 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX1);
991 ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX2);
992 }
993 } else {
994 ADD_FAILURE() << "Get camera availableResultKeys failed!";
995 }
996
997 // Check blackLevelPattern
998 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry);
999 if ((0 == retcode) && (entry.count > 0)) {
1000 ASSERT_EQ(entry.count, 4);
1001 for (size_t i = 1; i < entry.count; i++) {
1002 ASSERT_EQ(entry.data.i32[i], entry.data.i32[0]);
1003 }
1004 }
1005 }
1006
verifyRecommendedConfigs(const CameraMetadata & chars)1007 void CameraAidlTest::verifyRecommendedConfigs(const CameraMetadata& chars) {
1008 size_t CONFIG_ENTRY_SIZE = 5;
1009 size_t CONFIG_ENTRY_TYPE_OFFSET = 3;
1010 size_t CONFIG_ENTRY_BITFIELD_OFFSET = 4;
1011 uint32_t maxPublicUsecase =
1012 ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END_3_8;
1013 uint32_t vendorUsecaseStart =
1014 ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START;
1015 uint32_t usecaseMask = (1 << vendorUsecaseStart) - 1;
1016 usecaseMask &= ~((1 << maxPublicUsecase) - 1);
1017
1018 const camera_metadata_t* metadata =
1019 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1020
1021 camera_metadata_ro_entry recommendedConfigsEntry, recommendedDepthConfigsEntry, ioMapEntry;
1022 recommendedConfigsEntry.count = recommendedDepthConfigsEntry.count = ioMapEntry.count = 0;
1023 int retCode = find_camera_metadata_ro_entry(
1024 metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
1025 &recommendedConfigsEntry);
1026 int depthRetCode = find_camera_metadata_ro_entry(
1027 metadata, ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
1028 &recommendedDepthConfigsEntry);
1029 int ioRetCode = find_camera_metadata_ro_entry(
1030 metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP, &ioMapEntry);
1031 if ((0 != retCode) && (0 != depthRetCode)) {
1032 // In case both regular and depth recommended configurations are absent,
1033 // I/O should be absent as well.
1034 ASSERT_NE(ioRetCode, 0);
1035 return;
1036 }
1037
1038 camera_metadata_ro_entry availableKeysEntry;
1039 retCode = find_camera_metadata_ro_entry(
1040 metadata, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &availableKeysEntry);
1041 ASSERT_TRUE((0 == retCode) && (availableKeysEntry.count > 0));
1042 std::vector<int32_t> availableKeys;
1043 availableKeys.reserve(availableKeysEntry.count);
1044 availableKeys.insert(availableKeys.end(), availableKeysEntry.data.i32,
1045 availableKeysEntry.data.i32 + availableKeysEntry.count);
1046
1047 if (recommendedConfigsEntry.count > 0) {
1048 ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1049 ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS),
1050 availableKeys.end());
1051 ASSERT_EQ((recommendedConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1052 for (size_t i = 0; i < recommendedConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1053 int32_t entryType = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1054 uint32_t bitfield = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1055 ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1056 (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1057 ASSERT_TRUE((bitfield & usecaseMask) == 0);
1058 }
1059 }
1060
1061 if (recommendedDepthConfigsEntry.count > 0) {
1062 ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1063 ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS),
1064 availableKeys.end());
1065 ASSERT_EQ((recommendedDepthConfigsEntry.count % CONFIG_ENTRY_SIZE), 0);
1066 for (size_t i = 0; i < recommendedDepthConfigsEntry.count; i += CONFIG_ENTRY_SIZE) {
1067 int32_t entryType = recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET];
1068 uint32_t bitfield =
1069 recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET];
1070 ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) ||
1071 (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT));
1072 ASSERT_TRUE((bitfield & usecaseMask) == 0);
1073 }
1074
1075 if (recommendedConfigsEntry.count == 0) {
1076 // In case regular recommended configurations are absent but suggested depth
1077 // configurations are present, I/O should be absent.
1078 ASSERT_NE(ioRetCode, 0);
1079 }
1080 }
1081
1082 if ((ioRetCode == 0) && (ioMapEntry.count > 0)) {
1083 ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(),
1084 ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP),
1085 availableKeys.end());
1086 ASSERT_EQ(isZSLModeAvailable(metadata), Status::OK);
1087 }
1088 }
1089
1090 // Check whether ZSL is available using the static camera
1091 // characteristics.
isZSLModeAvailable(const camera_metadata_t * staticMeta)1092 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta) {
1093 if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) {
1094 return Status::OK;
1095 } else {
1096 return isZSLModeAvailable(staticMeta, YUV_REPROCESS);
1097 }
1098 }
1099
isZSLModeAvailable(const camera_metadata_t * staticMeta,ReprocessType reprocType)1100 Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta,
1101 ReprocessType reprocType) {
1102 Status ret = Status::OPERATION_NOT_SUPPORTED;
1103 if (nullptr == staticMeta) {
1104 return Status::ILLEGAL_ARGUMENT;
1105 }
1106
1107 camera_metadata_ro_entry entry;
1108 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1109 &entry);
1110 if (0 != rc) {
1111 return Status::ILLEGAL_ARGUMENT;
1112 }
1113
1114 for (size_t i = 0; i < entry.count; i++) {
1115 if ((reprocType == PRIV_REPROCESS &&
1116 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING == entry.data.u8[i]) ||
1117 (reprocType == YUV_REPROCESS &&
1118 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING == entry.data.u8[i])) {
1119 ret = Status::OK;
1120 break;
1121 }
1122 }
1123
1124 return ret;
1125 }
1126
1127 // Verify logical or ultra high resolution camera static metadata
verifyLogicalOrUltraHighResCameraMetadata(const std::string & cameraName,const std::shared_ptr<ICameraDevice> & device,const CameraMetadata & chars,const std::vector<std::string> & deviceNames)1128 void CameraAidlTest::verifyLogicalOrUltraHighResCameraMetadata(
1129 const std::string& cameraName, const std::shared_ptr<ICameraDevice>& device,
1130 const CameraMetadata& chars, const std::vector<std::string>& deviceNames) {
1131 const camera_metadata_t* metadata =
1132 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data());
1133 ASSERT_NE(nullptr, metadata);
1134 SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
1135 Status retStatus = getSystemCameraKind(metadata, &systemCameraKind);
1136 ASSERT_EQ(retStatus, Status::OK);
1137 Status rc = isLogicalMultiCamera(metadata);
1138 ASSERT_TRUE(Status::OK == rc || Status::OPERATION_NOT_SUPPORTED == rc);
1139 bool isMultiCamera = (Status::OK == rc);
1140 bool isUltraHighResCamera = isUltraHighResolution(metadata);
1141 if (!isMultiCamera && !isUltraHighResCamera) {
1142 return;
1143 }
1144
1145 camera_metadata_ro_entry entry;
1146 int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1147 bool hasZoomRatioRange = (0 == retcode && entry.count == 2);
1148 retcode = find_camera_metadata_ro_entry(
1149 metadata, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
1150 bool hasHalBufferManager =
1151 (0 == retcode && 1 == entry.count &&
1152 entry.data.i32[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
1153 retcode = find_camera_metadata_ro_entry(
1154 metadata, ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED, &entry);
1155 bool multiResolutionStreamSupported =
1156 (0 == retcode && 1 == entry.count &&
1157 entry.data.u8[0] == ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE);
1158 if (multiResolutionStreamSupported) {
1159 ASSERT_TRUE(hasHalBufferManager);
1160 }
1161
1162 std::string version, cameraId;
1163 ASSERT_TRUE(matchDeviceName(cameraName, mProviderType, &version, &cameraId));
1164 std::unordered_set<std::string> physicalIds;
1165 rc = getPhysicalCameraIds(metadata, &physicalIds);
1166 ASSERT_TRUE(isUltraHighResCamera || Status::OK == rc);
1167 for (const auto& physicalId : physicalIds) {
1168 ASSERT_NE(physicalId, cameraId);
1169 }
1170 if (physicalIds.size() == 0) {
1171 ASSERT_TRUE(isUltraHighResCamera && !isMultiCamera);
1172 physicalIds.insert(cameraId);
1173 }
1174
1175 std::unordered_set<int32_t> physicalRequestKeyIDs;
1176 rc = getSupportedKeys(const_cast<camera_metadata_t*>(metadata),
1177 ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
1178 &physicalRequestKeyIDs);
1179 ASSERT_TRUE(Status::OK == rc);
1180 bool hasTestPatternPhysicalRequestKey =
1181 physicalRequestKeyIDs.find(ANDROID_SENSOR_TEST_PATTERN_MODE) !=
1182 physicalRequestKeyIDs.end();
1183 std::unordered_set<int32_t> privacyTestPatternModes;
1184 getPrivacyTestPatternModes(metadata, &privacyTestPatternModes);
1185
1186 // Map from image format to number of multi-resolution sizes for that format
1187 std::unordered_map<int32_t, size_t> multiResOutputFormatCounterMap;
1188 std::unordered_map<int32_t, size_t> multiResInputFormatCounterMap;
1189 for (const auto& physicalId : physicalIds) {
1190 bool isPublicId = false;
1191 std::string fullPublicId;
1192 SystemCameraKind physSystemCameraKind = SystemCameraKind::PUBLIC;
1193 for (auto& deviceName : deviceNames) {
1194 std::string publicVersion, publicId;
1195 ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId));
1196 if (physicalId == publicId) {
1197 isPublicId = true;
1198 fullPublicId = deviceName;
1199 break;
1200 }
1201 }
1202
1203 camera_metadata_ro_entry physicalMultiResStreamConfigs;
1204 camera_metadata_ro_entry physicalStreamConfigs;
1205 camera_metadata_ro_entry physicalMaxResolutionStreamConfigs;
1206 CameraMetadata physChars;
1207 bool isUltraHighRes = false;
1208 std::unordered_set<int32_t> subCameraPrivacyTestPatterns;
1209 if (isPublicId) {
1210 std::shared_ptr<ICameraDevice> subDevice;
1211 ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(fullPublicId, &subDevice);
1212 ASSERT_TRUE(ret.isOk());
1213 ASSERT_NE(subDevice, nullptr);
1214
1215 ret = subDevice->getCameraCharacteristics(&physChars);
1216 ASSERT_TRUE(ret.isOk());
1217
1218 const camera_metadata_t* staticMetadata =
1219 reinterpret_cast<const camera_metadata_t*>(physChars.metadata.data());
1220 retStatus = getSystemCameraKind(staticMetadata, &physSystemCameraKind);
1221 ASSERT_EQ(retStatus, Status::OK);
1222
1223 // Make sure that the system camera kind of a non-hidden
1224 // physical cameras is the same as the logical camera associated
1225 // with it.
1226 ASSERT_EQ(physSystemCameraKind, systemCameraKind);
1227 retcode = find_camera_metadata_ro_entry(staticMetadata,
1228 ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1229 bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1230 ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1231
1232 getMultiResolutionStreamConfigurations(
1233 &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1234 &physicalMaxResolutionStreamConfigs, staticMetadata);
1235 isUltraHighRes = isUltraHighResolution(staticMetadata);
1236
1237 getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1238 } else {
1239 // Check camera characteristics for hidden camera id
1240 ndk::ScopedAStatus ret =
1241 device->getPhysicalCameraCharacteristics(physicalId, &physChars);
1242 ASSERT_TRUE(ret.isOk());
1243 verifyCameraCharacteristics(physChars);
1244 verifyMonochromeCharacteristics(physChars);
1245
1246 auto staticMetadata = (const camera_metadata_t*)physChars.metadata.data();
1247 retcode = find_camera_metadata_ro_entry(staticMetadata,
1248 ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry);
1249 bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2);
1250 ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange);
1251
1252 getMultiResolutionStreamConfigurations(
1253 &physicalMultiResStreamConfigs, &physicalStreamConfigs,
1254 &physicalMaxResolutionStreamConfigs, staticMetadata);
1255 isUltraHighRes = isUltraHighResolution(staticMetadata);
1256 getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns);
1257
1258 // Check calling getCameraDeviceInterface_V3_x() on hidden camera id returns
1259 // ILLEGAL_ARGUMENT.
1260 std::stringstream s;
1261 s << "device@" << version << "/" << mProviderType << "/" << physicalId;
1262 std::string fullPhysicalId(s.str());
1263 std::shared_ptr<ICameraDevice> subDevice;
1264 ret = mProvider->getCameraDeviceInterface(fullPhysicalId, &subDevice);
1265 ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) ==
1266 ret.getServiceSpecificError());
1267 ASSERT_EQ(subDevice, nullptr);
1268 }
1269
1270 if (hasTestPatternPhysicalRequestKey) {
1271 ASSERT_TRUE(privacyTestPatternModes == subCameraPrivacyTestPatterns);
1272 }
1273
1274 if (physicalMultiResStreamConfigs.count > 0) {
1275 ASSERT_EQ(physicalMultiResStreamConfigs.count % 4, 0);
1276
1277 // Each supported size must be max size for that format,
1278 for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4; i++) {
1279 int32_t multiResFormat = physicalMultiResStreamConfigs.data.i32[i * 4];
1280 int32_t multiResWidth = physicalMultiResStreamConfigs.data.i32[i * 4 + 1];
1281 int32_t multiResHeight = physicalMultiResStreamConfigs.data.i32[i * 4 + 2];
1282 int32_t multiResInput = physicalMultiResStreamConfigs.data.i32[i * 4 + 3];
1283
1284 // Check if the resolution is the max resolution in stream
1285 // configuration map
1286 bool supported = false;
1287 bool isMaxSize = true;
1288 for (size_t j = 0; j < physicalStreamConfigs.count / 4; j++) {
1289 int32_t format = physicalStreamConfigs.data.i32[j * 4];
1290 int32_t width = physicalStreamConfigs.data.i32[j * 4 + 1];
1291 int32_t height = physicalStreamConfigs.data.i32[j * 4 + 2];
1292 int32_t input = physicalStreamConfigs.data.i32[j * 4 + 3];
1293 if (format == multiResFormat && input == multiResInput) {
1294 if (width == multiResWidth && height == multiResHeight) {
1295 supported = true;
1296 } else if (width * height > multiResWidth * multiResHeight) {
1297 isMaxSize = false;
1298 }
1299 }
1300 }
1301 // Check if the resolution is the max resolution in max
1302 // resolution stream configuration map
1303 bool supportedUltraHighRes = false;
1304 bool isUltraHighResMaxSize = true;
1305 for (size_t j = 0; j < physicalMaxResolutionStreamConfigs.count / 4; j++) {
1306 int32_t format = physicalMaxResolutionStreamConfigs.data.i32[j * 4];
1307 int32_t width = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 1];
1308 int32_t height = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 2];
1309 int32_t input = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 3];
1310 if (format == multiResFormat && input == multiResInput) {
1311 if (width == multiResWidth && height == multiResHeight) {
1312 supportedUltraHighRes = true;
1313 } else if (width * height > multiResWidth * multiResHeight) {
1314 isUltraHighResMaxSize = false;
1315 }
1316 }
1317 }
1318
1319 if (isUltraHighRes) {
1320 // For ultra high resolution camera, the configuration must
1321 // be the maximum size in stream configuration map, or max
1322 // resolution stream configuration map
1323 ASSERT_TRUE((supported && isMaxSize) ||
1324 (supportedUltraHighRes && isUltraHighResMaxSize));
1325 } else {
1326 // The configuration must be the maximum size in stream
1327 // configuration map
1328 ASSERT_TRUE(supported && isMaxSize);
1329 ASSERT_FALSE(supportedUltraHighRes);
1330 }
1331
1332 // Increment the counter for the configuration's format.
1333 auto& formatCounterMap = multiResInput ? multiResInputFormatCounterMap
1334 : multiResOutputFormatCounterMap;
1335 if (formatCounterMap.count(multiResFormat) == 0) {
1336 formatCounterMap[multiResFormat] = 1;
1337 } else {
1338 formatCounterMap[multiResFormat]++;
1339 }
1340 }
1341
1342 // There must be no duplicates
1343 for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4 - 1; i++) {
1344 for (size_t j = i + 1; j < physicalMultiResStreamConfigs.count / 4; j++) {
1345 // Input/output doesn't match
1346 if (physicalMultiResStreamConfigs.data.i32[i * 4 + 3] !=
1347 physicalMultiResStreamConfigs.data.i32[j * 4 + 3]) {
1348 continue;
1349 }
1350 // Format doesn't match
1351 if (physicalMultiResStreamConfigs.data.i32[i * 4] !=
1352 physicalMultiResStreamConfigs.data.i32[j * 4]) {
1353 continue;
1354 }
1355 // Width doesn't match
1356 if (physicalMultiResStreamConfigs.data.i32[i * 4 + 1] !=
1357 physicalMultiResStreamConfigs.data.i32[j * 4 + 1]) {
1358 continue;
1359 }
1360 // Height doesn't match
1361 if (physicalMultiResStreamConfigs.data.i32[i * 4 + 2] !=
1362 physicalMultiResStreamConfigs.data.i32[j * 4 + 2]) {
1363 continue;
1364 }
1365 // input/output, format, width, and height all match
1366 ADD_FAILURE();
1367 }
1368 }
1369 }
1370 }
1371
1372 // If a multi-resolution stream is supported, there must be at least one
1373 // format with more than one resolutions
1374 if (multiResolutionStreamSupported) {
1375 size_t numMultiResFormats = 0;
1376 for (const auto& [format, sizeCount] : multiResOutputFormatCounterMap) {
1377 if (sizeCount >= 2) {
1378 numMultiResFormats++;
1379 }
1380 }
1381 for (const auto& [format, sizeCount] : multiResInputFormatCounterMap) {
1382 if (sizeCount >= 2) {
1383 numMultiResFormats++;
1384
1385 // If multi-resolution reprocessing is supported, the logical
1386 // camera or ultra-high resolution sensor camera must support
1387 // the corresponding reprocessing capability.
1388 if (format == static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) {
1389 ASSERT_EQ(isZSLModeAvailable(metadata, PRIV_REPROCESS), Status::OK);
1390 } else if (format == static_cast<int32_t>(PixelFormat::YCBCR_420_888)) {
1391 ASSERT_EQ(isZSLModeAvailable(metadata, YUV_REPROCESS), Status::OK);
1392 }
1393 }
1394 }
1395 ASSERT_GT(numMultiResFormats, 0);
1396 }
1397
1398 // Make sure ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID is available in
1399 // result keys.
1400 if (isMultiCamera) {
1401 retcode = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
1402 &entry);
1403 if ((0 == retcode) && (entry.count > 0)) {
1404 ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count,
1405 static_cast<int32_t>(
1406 CameraMetadataTag::
1407 ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID)),
1408 entry.data.i32 + entry.count);
1409 } else {
1410 ADD_FAILURE() << "Get camera availableResultKeys failed!";
1411 }
1412 }
1413 }
1414
isUltraHighResolution(const camera_metadata_t * staticMeta)1415 bool CameraAidlTest::isUltraHighResolution(const camera_metadata_t* staticMeta) {
1416 camera_metadata_ro_entry scalerEntry;
1417 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1418 &scalerEntry);
1419 if (rc == 0) {
1420 for (uint32_t i = 0; i < scalerEntry.count; i++) {
1421 if (scalerEntry.data.u8[i] ==
1422 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
1423 return true;
1424 }
1425 }
1426 }
1427 return false;
1428 }
1429
getSupportedKeys(camera_metadata_t * staticMeta,uint32_t tagId,std::unordered_set<int32_t> * requestIDs)1430 Status CameraAidlTest::getSupportedKeys(camera_metadata_t* staticMeta, uint32_t tagId,
1431 std::unordered_set<int32_t>* requestIDs) {
1432 if ((nullptr == staticMeta) || (nullptr == requestIDs)) {
1433 return Status::ILLEGAL_ARGUMENT;
1434 }
1435
1436 camera_metadata_ro_entry entry;
1437 int rc = find_camera_metadata_ro_entry(staticMeta, tagId, &entry);
1438 if ((0 != rc) || (entry.count == 0)) {
1439 return Status::OK;
1440 }
1441
1442 requestIDs->insert(entry.data.i32, entry.data.i32 + entry.count);
1443
1444 return Status::OK;
1445 }
1446
getPrivacyTestPatternModes(const camera_metadata_t * staticMetadata,std::unordered_set<int32_t> * privacyTestPatternModes)1447 void CameraAidlTest::getPrivacyTestPatternModes(
1448 const camera_metadata_t* staticMetadata,
1449 std::unordered_set<int32_t>* privacyTestPatternModes) {
1450 ASSERT_NE(staticMetadata, nullptr);
1451 ASSERT_NE(privacyTestPatternModes, nullptr);
1452
1453 camera_metadata_ro_entry entry;
1454 int retcode = find_camera_metadata_ro_entry(
1455 staticMetadata, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &entry);
1456 ASSERT_TRUE(0 == retcode);
1457
1458 for (auto i = 0; i < entry.count; i++) {
1459 if (entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR ||
1460 entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
1461 privacyTestPatternModes->insert(entry.data.i32[i]);
1462 }
1463 }
1464 }
1465
getMultiResolutionStreamConfigurations(camera_metadata_ro_entry * multiResStreamConfigs,camera_metadata_ro_entry * streamConfigs,camera_metadata_ro_entry * maxResolutionStreamConfigs,const camera_metadata_t * staticMetadata)1466 void CameraAidlTest::getMultiResolutionStreamConfigurations(
1467 camera_metadata_ro_entry* multiResStreamConfigs, camera_metadata_ro_entry* streamConfigs,
1468 camera_metadata_ro_entry* maxResolutionStreamConfigs,
1469 const camera_metadata_t* staticMetadata) {
1470 ASSERT_NE(multiResStreamConfigs, nullptr);
1471 ASSERT_NE(streamConfigs, nullptr);
1472 ASSERT_NE(maxResolutionStreamConfigs, nullptr);
1473 ASSERT_NE(staticMetadata, nullptr);
1474
1475 int retcode = find_camera_metadata_ro_entry(
1476 staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, streamConfigs);
1477 ASSERT_TRUE(0 == retcode);
1478 retcode = find_camera_metadata_ro_entry(
1479 staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
1480 maxResolutionStreamConfigs);
1481 ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1482 retcode = find_camera_metadata_ro_entry(
1483 staticMetadata, ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
1484 multiResStreamConfigs);
1485 ASSERT_TRUE(-ENOENT == retcode || 0 == retcode);
1486 }
1487
isTorchSupported(const camera_metadata_t * staticMeta)1488 bool CameraAidlTest::isTorchSupported(const camera_metadata_t* staticMeta) {
1489 camera_metadata_ro_entry torchEntry;
1490 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_AVAILABLE, &torchEntry);
1491 if (rc != 0) {
1492 ALOGI("isTorchSupported: Failed to find entry for ANDROID_FLASH_INFO_AVAILABLE");
1493 return false;
1494 }
1495 if (torchEntry.count == 1 && !torchEntry.data.u8[0]) {
1496 ALOGI("isTorchSupported: Torch not supported");
1497 return false;
1498 }
1499 ALOGI("isTorchSupported: Torch supported");
1500 return true;
1501 }
1502
isTorchStrengthControlSupported(const camera_metadata_t * staticMeta)1503 bool CameraAidlTest::isTorchStrengthControlSupported(const camera_metadata_t* staticMeta) {
1504 int32_t maxLevel = 0;
1505 camera_metadata_ro_entry maxEntry;
1506 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
1507 &maxEntry);
1508 if (rc != 0) {
1509 ALOGI("isTorchStrengthControlSupported: Failed to find entry for "
1510 "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL");
1511 return false;
1512 }
1513
1514 maxLevel = *maxEntry.data.i32;
1515 if (maxLevel > 1) {
1516 ALOGI("isTorchStrengthControlSupported: Torch strength control supported.");
1517 return true;
1518 }
1519 ALOGI("isTorchStrengthControlSupported: Torch strength control not supported.");
1520 return false;
1521 }
1522
verifyRequestTemplate(const camera_metadata_t * metadata,RequestTemplate requestTemplate)1523 void CameraAidlTest::verifyRequestTemplate(const camera_metadata_t* metadata,
1524 RequestTemplate requestTemplate) {
1525 ASSERT_NE(nullptr, metadata);
1526 size_t entryCount = get_camera_metadata_entry_count(metadata);
1527 ALOGI("template %u metadata entry count is %zu", (int32_t)requestTemplate, entryCount);
1528 // TODO: we can do better than 0 here. Need to check how many required
1529 // request keys we've defined for each template
1530 ASSERT_GT(entryCount, 0u);
1531
1532 // Check zoomRatio
1533 camera_metadata_ro_entry zoomRatioEntry;
1534 int foundZoomRatio =
1535 find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO, &zoomRatioEntry);
1536 if (foundZoomRatio == 0) {
1537 ASSERT_EQ(zoomRatioEntry.count, 1);
1538 ASSERT_EQ(zoomRatioEntry.data.f[0], 1.0f);
1539 }
1540 }
1541
openEmptyDeviceSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraDeviceSession> * session,CameraMetadata * staticMeta,std::shared_ptr<ICameraDevice> * device)1542 void CameraAidlTest::openEmptyDeviceSession(const std::string& name,
1543 const std::shared_ptr<ICameraProvider>& provider,
1544 std::shared_ptr<ICameraDeviceSession>* session,
1545 CameraMetadata* staticMeta,
1546 std::shared_ptr<ICameraDevice>* device) {
1547 ASSERT_NE(nullptr, session);
1548 ASSERT_NE(nullptr, staticMeta);
1549 ASSERT_NE(nullptr, device);
1550
1551 ALOGI("configureStreams: Testing camera device %s", name.c_str());
1552 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1553 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
1554 ret.getServiceSpecificError());
1555 ASSERT_TRUE(ret.isOk());
1556 ASSERT_NE(device, nullptr);
1557
1558 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1559 ret = (*device)->open(cb, session);
1560 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
1561 ret.getServiceSpecificError());
1562 ASSERT_TRUE(ret.isOk());
1563 ASSERT_NE(*session, nullptr);
1564
1565 ret = (*device)->getCameraCharacteristics(staticMeta);
1566 ASSERT_TRUE(ret.isOk());
1567 }
1568
openEmptyInjectionSession(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,std::shared_ptr<ICameraInjectionSession> * session,CameraMetadata * metadata,std::shared_ptr<ICameraDevice> * device)1569 void CameraAidlTest::openEmptyInjectionSession(const std::string& name,
1570 const std::shared_ptr<ICameraProvider>& provider,
1571 std::shared_ptr<ICameraInjectionSession>* session,
1572 CameraMetadata* metadata,
1573 std::shared_ptr<ICameraDevice>* device) {
1574 ASSERT_NE(nullptr, session);
1575 ASSERT_NE(nullptr, metadata);
1576 ASSERT_NE(nullptr, device);
1577
1578 ALOGI("openEmptyInjectionSession: Testing camera device %s", name.c_str());
1579 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device);
1580 ALOGI("openEmptyInjectionSession: getCameraDeviceInterface returns status:%d:%d",
1581 ret.getExceptionCode(), ret.getServiceSpecificError());
1582 ASSERT_TRUE(ret.isOk());
1583 ASSERT_NE(*device, nullptr);
1584
1585 std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>();
1586 ret = (*device)->openInjectionSession(cb, session);
1587 ALOGI("device::openInjectionSession returns status:%d:%d", ret.getExceptionCode(),
1588 ret.getServiceSpecificError());
1589
1590 if (static_cast<Status>(ret.getServiceSpecificError()) == Status::OPERATION_NOT_SUPPORTED &&
1591 *session == nullptr) {
1592 return; // Injection Session not supported. Callee will receive nullptr in *session
1593 }
1594
1595 ASSERT_TRUE(ret.isOk());
1596 ASSERT_NE(*session, nullptr);
1597
1598 ret = (*device)->getCameraCharacteristics(metadata);
1599 ASSERT_TRUE(ret.isOk());
1600 }
1601
getJpegBufferSize(camera_metadata_t * staticMeta,int32_t * outBufSize)1602 Status CameraAidlTest::getJpegBufferSize(camera_metadata_t* staticMeta, int32_t* outBufSize) {
1603 if (nullptr == staticMeta || nullptr == outBufSize) {
1604 return Status::ILLEGAL_ARGUMENT;
1605 }
1606
1607 camera_metadata_ro_entry entry;
1608 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_JPEG_MAX_SIZE, &entry);
1609 if ((0 != rc) || (1 != entry.count)) {
1610 return Status::ILLEGAL_ARGUMENT;
1611 }
1612
1613 *outBufSize = entry.data.i32[0];
1614 return Status::OK;
1615 }
1616
getDataspace(PixelFormat format)1617 Dataspace CameraAidlTest::getDataspace(PixelFormat format) {
1618 switch (format) {
1619 case PixelFormat::BLOB:
1620 return Dataspace::JFIF;
1621 case PixelFormat::Y16:
1622 return Dataspace::DEPTH;
1623 case PixelFormat::RAW16:
1624 case PixelFormat::RAW_OPAQUE:
1625 case PixelFormat::RAW10:
1626 case PixelFormat::RAW12:
1627 return Dataspace::ARBITRARY;
1628 default:
1629 return Dataspace::UNKNOWN;
1630 }
1631 }
1632
createStreamConfiguration(std::vector<Stream> & streams,StreamConfigurationMode configMode,StreamConfiguration * config,int32_t jpegBufferSize)1633 void CameraAidlTest::createStreamConfiguration(std::vector<Stream>& streams,
1634 StreamConfigurationMode configMode,
1635 StreamConfiguration* config,
1636 int32_t jpegBufferSize) {
1637 ASSERT_NE(nullptr, config);
1638
1639 for (auto& stream : streams) {
1640 stream.bufferSize =
1641 (stream.format == PixelFormat::BLOB && stream.dataSpace == Dataspace::JFIF)
1642 ? jpegBufferSize
1643 : 0;
1644 }
1645
1646 // Caller is responsible to fill in non-zero config->streamConfigCounter after this returns
1647 config->streams = streams;
1648 config->operationMode = configMode;
1649 config->multiResolutionInputImage = false;
1650 }
1651
verifyStreamCombination(const std::shared_ptr<ICameraDevice> & device,const StreamConfiguration & config,bool expectedStatus,bool expectStreamCombQuery)1652 void CameraAidlTest::verifyStreamCombination(const std::shared_ptr<ICameraDevice>& device,
1653 const StreamConfiguration& config, bool expectedStatus,
1654 bool expectStreamCombQuery) {
1655 if (device != nullptr) {
1656 bool streamCombinationSupported;
1657 ScopedAStatus ret =
1658 device->isStreamCombinationSupported(config, &streamCombinationSupported);
1659 // TODO: Check is unsupported operation is correct.
1660 ASSERT_TRUE(ret.isOk() ||
1661 (expectStreamCombQuery && ret.getExceptionCode() == EX_UNSUPPORTED_OPERATION));
1662 if (ret.isOk()) {
1663 ASSERT_EQ(expectedStatus, streamCombinationSupported);
1664 }
1665 }
1666 }
1667
getConcurrentDeviceCombinations(std::shared_ptr<ICameraProvider> & provider)1668 std::vector<ConcurrentCameraIdCombination> CameraAidlTest::getConcurrentDeviceCombinations(
1669 std::shared_ptr<ICameraProvider>& provider) {
1670 std::vector<ConcurrentCameraIdCombination> combinations;
1671 ndk::ScopedAStatus ret = provider->getConcurrentCameraIds(&combinations);
1672 if (!ret.isOk()) {
1673 ADD_FAILURE();
1674 }
1675
1676 return combinations;
1677 }
1678
getMandatoryConcurrentStreams(const camera_metadata_t * staticMeta,std::vector<AvailableStream> * outputStreams)1679 Status CameraAidlTest::getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta,
1680 std::vector<AvailableStream>* outputStreams) {
1681 if (nullptr == staticMeta || nullptr == outputStreams) {
1682 return Status::ILLEGAL_ARGUMENT;
1683 }
1684
1685 if (isDepthOnly(staticMeta)) {
1686 Size y16MaxSize(640, 480);
1687 Size maxAvailableY16Size;
1688 getMaxOutputSizeForFormat(staticMeta, PixelFormat::Y16, &maxAvailableY16Size);
1689 Size y16ChosenSize = getMinSize(y16MaxSize, maxAvailableY16Size);
1690 AvailableStream y16Stream = {.width = y16ChosenSize.width,
1691 .height = y16ChosenSize.height,
1692 .format = static_cast<int32_t>(PixelFormat::Y16)};
1693 outputStreams->push_back(y16Stream);
1694 return Status::OK;
1695 }
1696
1697 Size yuvMaxSize(1280, 720);
1698 Size jpegMaxSize(1920, 1440);
1699 Size maxAvailableYuvSize;
1700 Size maxAvailableJpegSize;
1701 getMaxOutputSizeForFormat(staticMeta, PixelFormat::YCBCR_420_888, &maxAvailableYuvSize);
1702 getMaxOutputSizeForFormat(staticMeta, PixelFormat::BLOB, &maxAvailableJpegSize);
1703 Size yuvChosenSize = getMinSize(yuvMaxSize, maxAvailableYuvSize);
1704 Size jpegChosenSize = getMinSize(jpegMaxSize, maxAvailableJpegSize);
1705
1706 AvailableStream yuvStream = {.width = yuvChosenSize.width,
1707 .height = yuvChosenSize.height,
1708 .format = static_cast<int32_t>(PixelFormat::YCBCR_420_888)};
1709
1710 AvailableStream jpegStream = {.width = jpegChosenSize.width,
1711 .height = jpegChosenSize.height,
1712 .format = static_cast<int32_t>(PixelFormat::BLOB)};
1713 outputStreams->push_back(yuvStream);
1714 outputStreams->push_back(jpegStream);
1715
1716 return Status::OK;
1717 }
1718
isDepthOnly(const camera_metadata_t * staticMeta)1719 bool CameraAidlTest::isDepthOnly(const camera_metadata_t* staticMeta) {
1720 camera_metadata_ro_entry scalerEntry;
1721 camera_metadata_ro_entry depthEntry;
1722
1723 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1724 &scalerEntry);
1725 if (rc == 0) {
1726 for (uint32_t i = 0; i < scalerEntry.count; i++) {
1727 if (scalerEntry.data.u8[i] ==
1728 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) {
1729 return false;
1730 }
1731 }
1732 }
1733
1734 for (uint32_t i = 0; i < scalerEntry.count; i++) {
1735 if (scalerEntry.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) {
1736 rc = find_camera_metadata_ro_entry(
1737 staticMeta, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &depthEntry);
1738 size_t idx = 0;
1739 if (rc == 0 && depthEntry.data.i32[idx] == static_cast<int32_t>(PixelFormat::Y16)) {
1740 // only Depth16 format is supported now
1741 return true;
1742 }
1743 break;
1744 }
1745 }
1746
1747 return false;
1748 }
1749
getMaxOutputSizeForFormat(const camera_metadata_t * staticMeta,PixelFormat format,Size * size,bool maxResolution)1750 Status CameraAidlTest::getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta,
1751 PixelFormat format, Size* size,
1752 bool maxResolution) {
1753 std::vector<AvailableStream> outputStreams;
1754 if (size == nullptr ||
1755 getAvailableOutputStreams(staticMeta, outputStreams,
1756 /*threshold*/ nullptr, maxResolution) != Status::OK) {
1757 return Status::ILLEGAL_ARGUMENT;
1758 }
1759 Size maxSize;
1760 bool found = false;
1761 for (auto& outputStream : outputStreams) {
1762 if (static_cast<int32_t>(format) == outputStream.format &&
1763 (outputStream.width * outputStream.height > maxSize.width * maxSize.height)) {
1764 maxSize.width = outputStream.width;
1765 maxSize.height = outputStream.height;
1766 found = true;
1767 }
1768 }
1769 if (!found) {
1770 ALOGE("%s :chosen format %d not found", __FUNCTION__, static_cast<int32_t>(format));
1771 return Status::ILLEGAL_ARGUMENT;
1772 }
1773 *size = maxSize;
1774 return Status::OK;
1775 }
1776
getMinSize(Size a,Size b)1777 Size CameraAidlTest::getMinSize(Size a, Size b) {
1778 if (a.width * a.height < b.width * b.height) {
1779 return a;
1780 }
1781 return b;
1782 }
1783
getZSLInputOutputMap(camera_metadata_t * staticMeta,std::vector<AvailableZSLInputOutput> & inputOutputMap)1784 Status CameraAidlTest::getZSLInputOutputMap(camera_metadata_t* staticMeta,
1785 std::vector<AvailableZSLInputOutput>& inputOutputMap) {
1786 if (nullptr == staticMeta) {
1787 return Status::ILLEGAL_ARGUMENT;
1788 }
1789
1790 camera_metadata_ro_entry entry;
1791 int rc = find_camera_metadata_ro_entry(
1792 staticMeta, ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry);
1793 if ((0 != rc) || (0 >= entry.count)) {
1794 return Status::ILLEGAL_ARGUMENT;
1795 }
1796
1797 const int32_t* contents = &entry.data.i32[0];
1798 for (size_t i = 0; i < entry.count;) {
1799 int32_t inputFormat = contents[i++];
1800 int32_t length = contents[i++];
1801 for (int32_t j = 0; j < length; j++) {
1802 int32_t outputFormat = contents[i + j];
1803 AvailableZSLInputOutput zslEntry = {inputFormat, outputFormat};
1804 inputOutputMap.push_back(zslEntry);
1805 }
1806 i += length;
1807 }
1808
1809 return Status::OK;
1810 }
1811
findLargestSize(const std::vector<AvailableStream> & streamSizes,int32_t format,AvailableStream & result)1812 Status CameraAidlTest::findLargestSize(const std::vector<AvailableStream>& streamSizes,
1813 int32_t format, AvailableStream& result) {
1814 result = {0, 0, 0};
1815 for (auto& iter : streamSizes) {
1816 if (format == iter.format) {
1817 if ((result.width * result.height) < (iter.width * iter.height)) {
1818 result = iter;
1819 }
1820 }
1821 }
1822
1823 return (result.format == format) ? Status::OK : Status::ILLEGAL_ARGUMENT;
1824 }
1825
constructFilteredSettings(const std::shared_ptr<ICameraDeviceSession> & session,const std::unordered_set<int32_t> & availableKeys,RequestTemplate reqTemplate,android::hardware::camera::common::V1_0::helper::CameraMetadata * defaultSettings,android::hardware::camera::common::V1_0::helper::CameraMetadata * filteredSettings)1826 void CameraAidlTest::constructFilteredSettings(
1827 const std::shared_ptr<ICameraDeviceSession>& session,
1828 const std::unordered_set<int32_t>& availableKeys, RequestTemplate reqTemplate,
1829 android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings,
1830 android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings) {
1831 ASSERT_NE(defaultSettings, nullptr);
1832 ASSERT_NE(filteredSettings, nullptr);
1833
1834 CameraMetadata req;
1835 auto ret = session->constructDefaultRequestSettings(reqTemplate, &req);
1836 ASSERT_TRUE(ret.isOk());
1837
1838 const camera_metadata_t* metadata =
1839 clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(req.metadata.data()));
1840 size_t expectedSize = req.metadata.size();
1841 int result = validate_camera_metadata_structure(metadata, &expectedSize);
1842 ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
1843
1844 size_t entryCount = get_camera_metadata_entry_count(metadata);
1845 ASSERT_GT(entryCount, 0u);
1846 *defaultSettings = metadata;
1847
1848 const android::hardware::camera::common::V1_0::helper::CameraMetadata& constSettings =
1849 *defaultSettings;
1850 for (const auto& keyIt : availableKeys) {
1851 camera_metadata_ro_entry entry = constSettings.find(keyIt);
1852 if (entry.count > 0) {
1853 filteredSettings->update(entry);
1854 }
1855 }
1856 }
1857
verifySessionReconfigurationQuery(const std::shared_ptr<ICameraDeviceSession> & session,camera_metadata * oldSessionParams,camera_metadata * newSessionParams)1858 void CameraAidlTest::verifySessionReconfigurationQuery(
1859 const std::shared_ptr<ICameraDeviceSession>& session, camera_metadata* oldSessionParams,
1860 camera_metadata* newSessionParams) {
1861 ASSERT_NE(nullptr, session);
1862 ASSERT_NE(nullptr, oldSessionParams);
1863 ASSERT_NE(nullptr, newSessionParams);
1864
1865 std::vector<uint8_t> oldParams =
1866 std::vector(reinterpret_cast<uint8_t*>(oldSessionParams),
1867 reinterpret_cast<uint8_t*>(oldSessionParams) +
1868 get_camera_metadata_size(oldSessionParams));
1869 CameraMetadata oldMetadata = {oldParams};
1870
1871 std::vector<uint8_t> newParams =
1872 std::vector(reinterpret_cast<uint8_t*>(newSessionParams),
1873 reinterpret_cast<uint8_t*>(newSessionParams) +
1874 get_camera_metadata_size(newSessionParams));
1875 CameraMetadata newMetadata = {newParams};
1876
1877 bool reconfigReq;
1878 ndk::ScopedAStatus ret =
1879 session->isReconfigurationRequired(oldMetadata, newMetadata, &reconfigReq);
1880 ASSERT_TRUE(ret.isOk() || static_cast<Status>(ret.getServiceSpecificError()) ==
1881 Status::OPERATION_NOT_SUPPORTED);
1882 }
1883
isConstrainedModeAvailable(camera_metadata_t * staticMeta)1884 Status CameraAidlTest::isConstrainedModeAvailable(camera_metadata_t* staticMeta) {
1885 Status ret = Status::OPERATION_NOT_SUPPORTED;
1886 if (nullptr == staticMeta) {
1887 return Status::ILLEGAL_ARGUMENT;
1888 }
1889
1890 camera_metadata_ro_entry entry;
1891 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
1892 &entry);
1893 if (0 != rc) {
1894 return Status::ILLEGAL_ARGUMENT;
1895 }
1896
1897 for (size_t i = 0; i < entry.count; i++) {
1898 if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO ==
1899 entry.data.u8[i]) {
1900 ret = Status::OK;
1901 break;
1902 }
1903 }
1904
1905 return ret;
1906 }
1907
pickConstrainedModeSize(camera_metadata_t * staticMeta,AvailableStream & hfrStream)1908 Status CameraAidlTest::pickConstrainedModeSize(camera_metadata_t* staticMeta,
1909 AvailableStream& hfrStream) {
1910 if (nullptr == staticMeta) {
1911 return Status::ILLEGAL_ARGUMENT;
1912 }
1913
1914 camera_metadata_ro_entry entry;
1915 int rc = find_camera_metadata_ro_entry(
1916 staticMeta, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &entry);
1917 if (0 != rc) {
1918 return Status::OPERATION_NOT_SUPPORTED;
1919 } else if (0 != (entry.count % 5)) {
1920 return Status::ILLEGAL_ARGUMENT;
1921 }
1922
1923 hfrStream = {0, 0, static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1924 for (size_t i = 0; i < entry.count; i += 5) {
1925 int32_t w = entry.data.i32[i];
1926 int32_t h = entry.data.i32[i + 1];
1927 if ((hfrStream.width * hfrStream.height) < (w * h)) {
1928 hfrStream.width = w;
1929 hfrStream.height = h;
1930 }
1931 }
1932
1933 return Status::OK;
1934 }
1935
processCaptureRequestInternal(uint64_t bufferUsage,RequestTemplate reqTemplate,bool useSecureOnlyCameras)1936 void CameraAidlTest::processCaptureRequestInternal(uint64_t bufferUsage,
1937 RequestTemplate reqTemplate,
1938 bool useSecureOnlyCameras) {
1939 std::vector<std::string> cameraDeviceNames =
1940 getCameraDeviceNames(mProvider, useSecureOnlyCameras);
1941 AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
1942 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
1943 int64_t bufferId = 1;
1944 int32_t frameNumber = 1;
1945 CameraMetadata settings;
1946
1947 for (const auto& name : cameraDeviceNames) {
1948 Stream testStream;
1949 std::vector<HalStream> halStreams;
1950 std::shared_ptr<ICameraDeviceSession> session;
1951 std::shared_ptr<DeviceCb> cb;
1952 bool supportsPartialResults = false;
1953 bool useHalBufManager = false;
1954 int32_t partialResultCount = 0;
1955 configureSingleStream(name, mProvider, &streamThreshold, bufferUsage, reqTemplate,
1956 &session /*out*/, &testStream /*out*/, &halStreams /*out*/,
1957 &supportsPartialResults /*out*/, &partialResultCount /*out*/,
1958 &useHalBufManager /*out*/, &cb /*out*/);
1959
1960 ASSERT_NE(session, nullptr);
1961 ASSERT_NE(cb, nullptr);
1962 ASSERT_FALSE(halStreams.empty());
1963
1964 std::shared_ptr<ResultMetadataQueue> resultQueue;
1965 ::aidl::android::hardware::common::fmq::MQDescriptor<
1966 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
1967 descriptor;
1968 ndk::ScopedAStatus ret = session->getCaptureResultMetadataQueue(&descriptor);
1969 ASSERT_TRUE(ret.isOk());
1970
1971 resultQueue = std::make_shared<ResultMetadataQueue>(descriptor);
1972 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
1973 ALOGE("%s: HAL returns empty result metadata fmq,"
1974 " not use it",
1975 __func__);
1976 resultQueue = nullptr;
1977 // Don't use the queue onwards.
1978 }
1979
1980 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
1981 1, false, supportsPartialResults, partialResultCount, resultQueue);
1982
1983 CameraMetadata req;
1984 ret = session->constructDefaultRequestSettings(reqTemplate, &req);
1985 ASSERT_TRUE(ret.isOk());
1986 settings = req;
1987
1988 overrideRotateAndCrop(&settings);
1989
1990 std::vector<CaptureRequest> requests(1);
1991 CaptureRequest& request = requests[0];
1992 request.frameNumber = frameNumber;
1993 request.fmqSettingsSize = 0;
1994 request.settings = settings;
1995
1996 std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
1997 outputBuffers.resize(1);
1998 StreamBuffer& outputBuffer = outputBuffers[0];
1999 if (useHalBufManager) {
2000 outputBuffer = {halStreams[0].id,
2001 /*bufferId*/ 0, NativeHandle(), BufferStatus::OK,
2002 NativeHandle(), NativeHandle()};
2003 } else {
2004 buffer_handle_t handle;
2005 allocateGraphicBuffer(
2006 testStream.width, testStream.height,
2007 /* We don't look at halStreamConfig.streams[0].consumerUsage
2008 * since that is 0 for output streams
2009 */
2010 android_convertGralloc1To0Usage(
2011 static_cast<uint64_t>(halStreams[0].producerUsage), bufferUsage),
2012 halStreams[0].overrideFormat, &handle);
2013
2014 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(handle),
2015 BufferStatus::OK, NativeHandle(), NativeHandle()};
2016 }
2017 request.inputBuffer = {-1,
2018 0,
2019 NativeHandle(),
2020 BufferStatus::ERROR,
2021 NativeHandle(),
2022 NativeHandle()}; // Empty Input Buffer
2023
2024 {
2025 std::unique_lock<std::mutex> l(mLock);
2026 mInflightMap.clear();
2027 mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2028 }
2029
2030 int32_t numRequestProcessed = 0;
2031 std::vector<BufferCache> cachesToRemove;
2032 ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2033 ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2034 ret.getExceptionCode(), ret.getServiceSpecificError());
2035
2036 ASSERT_TRUE(ret.isOk());
2037 ASSERT_EQ(numRequestProcessed, 1u);
2038
2039 {
2040 std::unique_lock<std::mutex> l(mLock);
2041 while (!inflightReq->errorCodeValid &&
2042 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2043 auto timeout = std::chrono::system_clock::now() +
2044 std::chrono::seconds(kStreamBufferTimeoutSec);
2045 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2046 }
2047
2048 ASSERT_FALSE(inflightReq->errorCodeValid);
2049 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2050 ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2051
2052 // shutterReadoutTimestamp must be available, and it must
2053 // be >= shutterTimestamp + exposureTime,
2054 // and < shutterTimestamp + exposureTime + rollingShutterSkew / 2.
2055 ASSERT_TRUE(inflightReq->shutterReadoutTimestampValid);
2056 ASSERT_FALSE(inflightReq->collectedResult.isEmpty());
2057
2058 if (inflightReq->collectedResult.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
2059 camera_metadata_entry_t exposureTimeResult =
2060 inflightReq->collectedResult.find(ANDROID_SENSOR_EXPOSURE_TIME);
2061 nsecs_t exposureToReadout =
2062 inflightReq->shutterReadoutTimestamp - inflightReq->shutterTimestamp;
2063 ASSERT_GE(exposureToReadout, exposureTimeResult.data.i64[0]);
2064 if (inflightReq->collectedResult.exists(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW)) {
2065 camera_metadata_entry_t rollingShutterSkew =
2066 inflightReq->collectedResult.find(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
2067 ASSERT_LT(exposureToReadout,
2068 exposureTimeResult.data.i64[0] + rollingShutterSkew.data.i64[0] / 2);
2069 }
2070 }
2071
2072 request.frameNumber++;
2073 // Empty settings should be supported after the first call
2074 // for repeating requests.
2075 request.settings.metadata.clear();
2076 // The buffer has been registered to HAL by bufferId, so per
2077 // API contract we should send a null handle for this buffer
2078 request.outputBuffers[0].buffer = NativeHandle();
2079 mInflightMap.clear();
2080 inflightReq = std::make_shared<InFlightRequest>(1, false, supportsPartialResults,
2081 partialResultCount, resultQueue);
2082 mInflightMap.insert(std::make_pair(request.frameNumber, inflightReq));
2083 }
2084
2085 ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2086 ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d",
2087 ret.getExceptionCode(), ret.getServiceSpecificError());
2088 ASSERT_TRUE(ret.isOk());
2089 ASSERT_EQ(numRequestProcessed, 1u);
2090
2091 {
2092 std::unique_lock<std::mutex> l(mLock);
2093 while (!inflightReq->errorCodeValid &&
2094 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2095 auto timeout = std::chrono::system_clock::now() +
2096 std::chrono::seconds(kStreamBufferTimeoutSec);
2097 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2098 }
2099
2100 ASSERT_FALSE(inflightReq->errorCodeValid);
2101 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2102 ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2103 }
2104
2105 if (useHalBufManager) {
2106 verifyBuffersReturned(session, testStream.id, cb);
2107 }
2108
2109 ret = session->close();
2110 ASSERT_TRUE(ret.isOk());
2111 }
2112 }
2113
configureSingleStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,uint64_t bufferUsage,RequestTemplate reqTemplate,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)2114 void CameraAidlTest::configureSingleStream(
2115 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2116 const AvailableStream* previewThreshold, uint64_t bufferUsage, RequestTemplate reqTemplate,
2117 std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2118 std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2119 int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
2120 uint32_t streamConfigCounter) {
2121 ASSERT_NE(nullptr, session);
2122 ASSERT_NE(nullptr, previewStream);
2123 ASSERT_NE(nullptr, halStreams);
2124 ASSERT_NE(nullptr, supportsPartialResults);
2125 ASSERT_NE(nullptr, partialResultCount);
2126 ASSERT_NE(nullptr, useHalBufManager);
2127 ASSERT_NE(nullptr, cb);
2128
2129 std::vector<AvailableStream> outputPreviewStreams;
2130 std::shared_ptr<ICameraDevice> device;
2131 ALOGI("configureStreams: Testing camera device %s", name.c_str());
2132
2133 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2134 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2135 ret.getServiceSpecificError());
2136 ASSERT_TRUE(ret.isOk());
2137 ASSERT_NE(device, nullptr);
2138
2139 camera_metadata_t* staticMeta;
2140 CameraMetadata chars;
2141 ret = device->getCameraCharacteristics(&chars);
2142 ASSERT_TRUE(ret.isOk());
2143 staticMeta = clone_camera_metadata(
2144 reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()));
2145 ASSERT_NE(nullptr, staticMeta);
2146
2147 size_t expectedSize = chars.metadata.size();
2148 ALOGE("validate_camera_metadata_structure: %d",
2149 validate_camera_metadata_structure(staticMeta, &expectedSize));
2150
2151 camera_metadata_ro_entry entry;
2152 auto status =
2153 find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2154 if ((0 == status) && (entry.count > 0)) {
2155 *partialResultCount = entry.data.i32[0];
2156 *supportsPartialResults = (*partialResultCount > 1);
2157 }
2158
2159 *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2160
2161 device->open(*cb, session);
2162 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2163 ret.getServiceSpecificError());
2164 ASSERT_TRUE(ret.isOk());
2165 ASSERT_NE(*session, nullptr);
2166
2167 *useHalBufManager = false;
2168 status = find_camera_metadata_ro_entry(
2169 staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2170 if ((0 == status) && (entry.count == 1)) {
2171 *useHalBufManager = (entry.data.u8[0] ==
2172 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
2173 }
2174
2175 outputPreviewStreams.clear();
2176 auto rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
2177
2178 int32_t jpegBufferSize = 0;
2179 ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize));
2180 ASSERT_NE(0u, jpegBufferSize);
2181
2182 ASSERT_EQ(Status::OK, rc);
2183 ASSERT_FALSE(outputPreviewStreams.empty());
2184
2185 Dataspace dataspace = Dataspace::UNKNOWN;
2186 switch (static_cast<PixelFormat>(outputPreviewStreams[0].format)) {
2187 case PixelFormat::Y16:
2188 dataspace = Dataspace::DEPTH;
2189 break;
2190 default:
2191 dataspace = Dataspace::UNKNOWN;
2192 }
2193
2194 std::vector<Stream> streams(1);
2195 streams[0] = {0,
2196 StreamType::OUTPUT,
2197 outputPreviewStreams[0].width,
2198 outputPreviewStreams[0].height,
2199 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2200 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(bufferUsage),
2201 dataspace,
2202 StreamRotation::ROTATION_0,
2203 "",
2204 0,
2205 /*groupId*/ -1,
2206 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2207 RequestAvailableDynamicRangeProfilesMap::
2208 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2209
2210 StreamConfiguration config;
2211 config.streams = streams;
2212 createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config,
2213 jpegBufferSize);
2214 if (*session != nullptr) {
2215 CameraMetadata sessionParams;
2216 ret = (*session)->constructDefaultRequestSettings(reqTemplate, &sessionParams);
2217 ASSERT_TRUE(ret.isOk());
2218 config.sessionParams = sessionParams;
2219 config.streamConfigCounter = (int32_t)streamConfigCounter;
2220
2221 bool supported = false;
2222 ret = device->isStreamCombinationSupported(config, &supported);
2223 ASSERT_TRUE(ret.isOk());
2224 ASSERT_EQ(supported, true);
2225
2226 std::vector<HalStream> halConfigs;
2227 ret = (*session)->configureStreams(config, &halConfigs);
2228 ALOGI("configureStreams returns status: %d:%d", ret.getExceptionCode(),
2229 ret.getServiceSpecificError());
2230 ASSERT_TRUE(ret.isOk());
2231 ASSERT_EQ(1u, halConfigs.size());
2232 halStreams->clear();
2233 halStreams->push_back(halConfigs[0]);
2234 if (*useHalBufManager) {
2235 std::vector<Stream> ss(1);
2236 std::vector<HalStream> hs(1);
2237 ss[0] = config.streams[0];
2238 hs[0] = halConfigs[0];
2239 (*cb)->setCurrentStreamConfig(ss, hs);
2240 }
2241 }
2242 *previewStream = config.streams[0];
2243 ASSERT_TRUE(ret.isOk());
2244 }
2245
overrideRotateAndCrop(CameraMetadata * settings)2246 void CameraAidlTest::overrideRotateAndCrop(CameraMetadata* settings) {
2247 if (settings == nullptr) {
2248 return;
2249 }
2250
2251 ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta =
2252 clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(settings->metadata.data()));
2253 auto entry = requestMeta.find(ANDROID_SCALER_ROTATE_AND_CROP);
2254 if ((entry.count > 0) && (entry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO)) {
2255 uint8_t disableRotateAndCrop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2256 requestMeta.update(ANDROID_SCALER_ROTATE_AND_CROP, &disableRotateAndCrop, 1);
2257 settings->metadata.clear();
2258 camera_metadata_t* metaBuffer = requestMeta.release();
2259 uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer);
2260 settings->metadata =
2261 std::vector(rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer));
2262 }
2263 }
2264
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,int32_t streamId,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)2265 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
2266 int32_t streamId, const std::shared_ptr<DeviceCb>& cb,
2267 uint32_t streamConfigCounter) {
2268 ASSERT_NE(nullptr, session);
2269
2270 std::vector<int32_t> streamIds(1);
2271 streamIds[0] = streamId;
2272 session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
2273 cb->waitForBuffersReturned();
2274 }
2275
processPreviewStabilizationCaptureRequestInternal(bool previewStabilizationOn,std::unordered_map<std::string,nsecs_t> & cameraDeviceToTimeLag)2276 void CameraAidlTest::processPreviewStabilizationCaptureRequestInternal(
2277 bool previewStabilizationOn,
2278 // Used as output when preview stabilization is off, as output when its on.
2279 std::unordered_map<std::string, nsecs_t>& cameraDeviceToTimeLag) {
2280 std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
2281 AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2282 static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
2283 int64_t bufferId = 1;
2284 int32_t frameNumber = 1;
2285 std::vector<uint8_t> settings;
2286
2287 for (const auto& name : cameraDeviceNames) {
2288 if (!supportsPreviewStabilization(name, mProvider)) {
2289 ALOGI(" %s Camera device %s doesn't support preview stabilization, skipping", __func__,
2290 name.c_str());
2291 continue;
2292 }
2293
2294 Stream testStream;
2295 std::vector<HalStream> halStreams;
2296 std::shared_ptr<ICameraDeviceSession> session;
2297 std::shared_ptr<DeviceCb> cb;
2298 bool supportsPartialResults = false;
2299 bool useHalBufManager = false;
2300 int32_t partialResultCount = 0;
2301 configureSingleStream(name, mProvider, &streamThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
2302 RequestTemplate::PREVIEW, &session /*out*/, &testStream /*out*/,
2303 &halStreams /*out*/, &supportsPartialResults /*out*/,
2304 &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/);
2305
2306 ::aidl::android::hardware::common::fmq::MQDescriptor<
2307 int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
2308 descriptor;
2309 ndk::ScopedAStatus resultQueueRet = session->getCaptureResultMetadataQueue(&descriptor);
2310 ASSERT_TRUE(resultQueueRet.isOk());
2311
2312 std::shared_ptr<ResultMetadataQueue> resultQueue =
2313 std::make_shared<ResultMetadataQueue>(descriptor);
2314 if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
2315 ALOGE("%s: HAL returns empty result metadata fmq,"
2316 " not use it",
2317 __func__);
2318 resultQueue = nullptr;
2319 // Don't use the queue onwards.
2320 }
2321
2322 std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
2323 1, false, supportsPartialResults, partialResultCount, resultQueue);
2324
2325 CameraMetadata defaultMetadata;
2326 android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
2327 ndk::ScopedAStatus ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW,
2328 &defaultMetadata);
2329 ASSERT_TRUE(ret.isOk());
2330
2331 const camera_metadata_t* metadata =
2332 reinterpret_cast<const camera_metadata_t*>(defaultMetadata.metadata.data());
2333 defaultSettings = metadata;
2334 android::status_t metadataRet = ::android::OK;
2335 uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2336 if (previewStabilizationOn) {
2337 videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION;
2338 metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2339 &videoStabilizationMode, 1);
2340 } else {
2341 metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
2342 &videoStabilizationMode, 1);
2343 }
2344 ASSERT_EQ(metadataRet, ::android::OK);
2345
2346 camera_metadata_t* releasedMetadata = defaultSettings.release();
2347 uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(releasedMetadata);
2348
2349 buffer_handle_t buffer_handle;
2350
2351 std::vector<CaptureRequest> requests(1);
2352 CaptureRequest& request = requests[0];
2353 request.frameNumber = frameNumber;
2354 request.fmqSettingsSize = 0;
2355 request.settings.metadata =
2356 std::vector(rawMetadata, rawMetadata + get_camera_metadata_size(releasedMetadata));
2357 overrideRotateAndCrop(&request.settings);
2358 request.outputBuffers = std::vector<StreamBuffer>(1);
2359 StreamBuffer& outputBuffer = request.outputBuffers[0];
2360 if (useHalBufManager) {
2361 outputBuffer = {halStreams[0].id,
2362 /*bufferId*/ 0, NativeHandle(), BufferStatus::OK,
2363 NativeHandle(), NativeHandle()};
2364 } else {
2365 allocateGraphicBuffer(testStream.width, testStream.height,
2366 /* We don't look at halStreamConfig.streams[0].consumerUsage
2367 * since that is 0 for output streams
2368 */
2369 android_convertGralloc1To0Usage(
2370 static_cast<uint64_t>(halStreams[0].producerUsage),
2371 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2372 halStreams[0].overrideFormat, &buffer_handle);
2373 outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle),
2374 BufferStatus::OK, NativeHandle(), NativeHandle()};
2375 }
2376 request.inputBuffer = {
2377 -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
2378
2379 {
2380 std::unique_lock<std::mutex> l(mLock);
2381 mInflightMap.clear();
2382 mInflightMap.insert(std::make_pair(frameNumber, inflightReq));
2383 }
2384
2385 int32_t numRequestProcessed = 0;
2386 std::vector<BufferCache> cachesToRemove;
2387 ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
2388 ASSERT_TRUE(ret.isOk());
2389 ASSERT_EQ(numRequestProcessed, 1u);
2390
2391 {
2392 std::unique_lock<std::mutex> l(mLock);
2393 while (!inflightReq->errorCodeValid &&
2394 ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
2395 auto timeout = std::chrono::system_clock::now() +
2396 std::chrono::seconds(kStreamBufferTimeoutSec);
2397 ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
2398 }
2399 waitForReleaseFence(inflightReq->resultOutputBuffers);
2400
2401 ASSERT_FALSE(inflightReq->errorCodeValid);
2402 ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
2403 ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId);
2404 ASSERT_TRUE(inflightReq->shutterReadoutTimestampValid);
2405 nsecs_t readoutTimestamp = inflightReq->shutterReadoutTimestamp;
2406
2407 if (previewStabilizationOn) {
2408 // Here we collect the time difference between the buffer ready
2409 // timestamp - notify readout timestamp.
2410 // timeLag = buffer ready timestamp - notify readout timestamp.
2411 // timeLag(previewStabilization) must be <=
2412 // timeLag(stabilization off) + 1 frame duration.
2413 auto it = cameraDeviceToTimeLag.find(name);
2414 camera_metadata_entry e;
2415 e = inflightReq->collectedResult.find(ANDROID_SENSOR_FRAME_DURATION);
2416 ASSERT_TRUE(e.count > 0);
2417 nsecs_t frameDuration = e.data.i64[0];
2418 ASSERT_TRUE(it != cameraDeviceToTimeLag.end());
2419
2420 nsecs_t previewStabOnLagTime =
2421 inflightReq->resultOutputBuffers[0].timeStamp - readoutTimestamp;
2422 ASSERT_TRUE(previewStabOnLagTime <= (it->second + frameDuration));
2423 } else {
2424 // Fill in the buffer ready timestamp - notify timestamp;
2425 cameraDeviceToTimeLag[std::string(name)] =
2426 inflightReq->resultOutputBuffers[0].timeStamp - readoutTimestamp;
2427 }
2428 }
2429
2430 if (useHalBufManager) {
2431 verifyBuffersReturned(session, testStream.id, cb);
2432 }
2433
2434 ret = session->close();
2435 ASSERT_TRUE(ret.isOk());
2436 }
2437 }
2438
supportsPreviewStabilization(const std::string & name,const std::shared_ptr<ICameraProvider> & provider)2439 bool CameraAidlTest::supportsPreviewStabilization(
2440 const std::string& name, const std::shared_ptr<ICameraProvider>& provider) {
2441 std::shared_ptr<ICameraDevice> device;
2442 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2443 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2444 ret.getServiceSpecificError());
2445 if (!ret.isOk() || device == nullptr) {
2446 ADD_FAILURE() << "Failed to get camera device interface for " << name;
2447 }
2448
2449 CameraMetadata metadata;
2450 ret = device->getCameraCharacteristics(&metadata);
2451 camera_metadata_t* staticMeta = clone_camera_metadata(
2452 reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
2453 if (!(ret.isOk())) {
2454 ADD_FAILURE() << "Failed to get camera characteristics for " << name;
2455 }
2456 // Go through the characteristics and see if video stabilization modes have
2457 // preview stabilization
2458 camera_metadata_ro_entry entry;
2459
2460 int retcode = find_camera_metadata_ro_entry(
2461 staticMeta, ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &entry);
2462 if ((0 == retcode) && (entry.count > 0)) {
2463 for (auto i = 0; i < entry.count; i++) {
2464 if (entry.data.u8[i] ==
2465 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) {
2466 return true;
2467 }
2468 }
2469 }
2470 return false;
2471 }
2472
configurePreviewStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,const std::unordered_set<std::string> & physicalIds,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,int32_t streamConfigCounter,bool allowUnsupport)2473 void CameraAidlTest::configurePreviewStreams(
2474 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2475 const AvailableStream* previewThreshold, const std::unordered_set<std::string>& physicalIds,
2476 std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream,
2477 std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2478 int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
2479 int32_t streamConfigCounter, bool allowUnsupport) {
2480 ASSERT_NE(nullptr, session);
2481 ASSERT_NE(nullptr, halStreams);
2482 ASSERT_NE(nullptr, previewStream);
2483 ASSERT_NE(nullptr, supportsPartialResults);
2484 ASSERT_NE(nullptr, partialResultCount);
2485 ASSERT_NE(nullptr, useHalBufManager);
2486 ASSERT_NE(nullptr, cb);
2487
2488 ASSERT_FALSE(physicalIds.empty());
2489
2490 std::vector<AvailableStream> outputPreviewStreams;
2491 std::shared_ptr<ICameraDevice> device;
2492 ALOGI("configureStreams: Testing camera device %s", name.c_str());
2493
2494 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2495 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2496 ret.getServiceSpecificError());
2497 ASSERT_TRUE(ret.isOk());
2498 ASSERT_NE(device, nullptr);
2499
2500 CameraMetadata meta;
2501 ret = device->getCameraCharacteristics(&meta);
2502 ASSERT_TRUE(ret.isOk());
2503 camera_metadata_t* staticMeta =
2504 clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(meta.metadata.data()));
2505 ASSERT_NE(nullptr, staticMeta);
2506
2507 camera_metadata_ro_entry entry;
2508 auto status =
2509 find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2510 if ((0 == status) && (entry.count > 0)) {
2511 *partialResultCount = entry.data.i32[0];
2512 *supportsPartialResults = (*partialResultCount > 1);
2513 }
2514
2515 *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2516 ret = device->open(*cb, session);
2517 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2518 ret.getServiceSpecificError());
2519 ASSERT_TRUE(ret.isOk());
2520 ASSERT_NE(*session, nullptr);
2521
2522 *useHalBufManager = false;
2523 status = find_camera_metadata_ro_entry(
2524 staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2525 if ((0 == status) && (entry.count == 1)) {
2526 *useHalBufManager = (entry.data.u8[0] ==
2527 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
2528 }
2529
2530 outputPreviewStreams.clear();
2531 Status rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold);
2532
2533 ASSERT_EQ(Status::OK, rc);
2534 ASSERT_FALSE(outputPreviewStreams.empty());
2535
2536 std::vector<Stream> streams(physicalIds.size());
2537 int32_t streamId = 0;
2538 for (auto const& physicalId : physicalIds) {
2539 streams[streamId] = {streamId,
2540 StreamType::OUTPUT,
2541 outputPreviewStreams[0].width,
2542 outputPreviewStreams[0].height,
2543 static_cast<PixelFormat>(outputPreviewStreams[0].format),
2544 static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
2545 GRALLOC1_CONSUMER_USAGE_HWCOMPOSER),
2546 Dataspace::UNKNOWN,
2547 StreamRotation::ROTATION_0,
2548 physicalId,
2549 0,
2550 -1,
2551 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2552 RequestAvailableDynamicRangeProfilesMap::
2553 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2554 streamId++;
2555 }
2556
2557 StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
2558
2559 RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
2560 ret = (*session)->constructDefaultRequestSettings(reqTemplate, &config.sessionParams);
2561 ASSERT_TRUE(ret.isOk());
2562
2563 bool supported = false;
2564 ret = device->isStreamCombinationSupported(config, &supported);
2565 ASSERT_TRUE(ret.isOk());
2566 if (allowUnsupport && !supported) {
2567 // stream combination not supported. return null session
2568 ret = (*session)->close();
2569 ASSERT_TRUE(ret.isOk());
2570 *session = nullptr;
2571 return;
2572 }
2573 ASSERT_TRUE(supported) << "Stream combination must be supported.";
2574
2575 config.streamConfigCounter = streamConfigCounter;
2576 std::vector<HalStream> halConfigs;
2577 ret = (*session)->configureStreams(config, &halConfigs);
2578 ASSERT_TRUE(ret.isOk());
2579 ASSERT_EQ(physicalIds.size(), halConfigs.size());
2580 *halStreams = halConfigs;
2581 if (*useHalBufManager) {
2582 std::vector<Stream> ss(physicalIds.size());
2583 std::vector<HalStream> hs(physicalIds.size());
2584 for (size_t i = 0; i < physicalIds.size(); i++) {
2585 ss[i] = streams[i];
2586 hs[i] = halConfigs[i];
2587 }
2588 (*cb)->setCurrentStreamConfig(ss, hs);
2589 }
2590 *previewStream = streams[0];
2591 ASSERT_TRUE(ret.isOk());
2592 }
2593
verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession> & session,const std::vector<int32_t> & streamIds,const std::shared_ptr<DeviceCb> & cb,uint32_t streamConfigCounter)2594 void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session,
2595 const std::vector<int32_t>& streamIds,
2596 const std::shared_ptr<DeviceCb>& cb,
2597 uint32_t streamConfigCounter) {
2598 ndk::ScopedAStatus ret =
2599 session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter);
2600 ASSERT_TRUE(ret.isOk());
2601 cb->waitForBuffersReturned();
2602 }
2603
configureStreams(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,PixelFormat format,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * outCb,uint32_t streamConfigCounter,bool maxResolution,RequestAvailableDynamicRangeProfilesMap prof)2604 void CameraAidlTest::configureStreams(const std::string& name,
2605 const std::shared_ptr<ICameraProvider>& provider,
2606 PixelFormat format,
2607 std::shared_ptr<ICameraDeviceSession>* session,
2608 Stream* previewStream, std::vector<HalStream>* halStreams,
2609 bool* supportsPartialResults, int32_t* partialResultCount,
2610 bool* useHalBufManager, std::shared_ptr<DeviceCb>* outCb,
2611 uint32_t streamConfigCounter, bool maxResolution,
2612 RequestAvailableDynamicRangeProfilesMap prof) {
2613 ASSERT_NE(nullptr, session);
2614 ASSERT_NE(nullptr, halStreams);
2615 ASSERT_NE(nullptr, previewStream);
2616 ASSERT_NE(nullptr, supportsPartialResults);
2617 ASSERT_NE(nullptr, partialResultCount);
2618 ASSERT_NE(nullptr, useHalBufManager);
2619 ASSERT_NE(nullptr, outCb);
2620
2621 ALOGI("configureStreams: Testing camera device %s", name.c_str());
2622
2623 std::vector<AvailableStream> outputStreams;
2624 std::shared_ptr<ICameraDevice> device;
2625
2626 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device);
2627 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2628 ret.getServiceSpecificError());
2629 ASSERT_TRUE(ret.isOk());
2630 ASSERT_NE(device, nullptr);
2631
2632 CameraMetadata metadata;
2633 camera_metadata_t* staticMeta;
2634 ret = device->getCameraCharacteristics(&metadata);
2635 ASSERT_TRUE(ret.isOk());
2636 staticMeta = clone_camera_metadata(
2637 reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
2638 ASSERT_NE(staticMeta, nullptr);
2639
2640 camera_metadata_ro_entry entry;
2641 auto status =
2642 find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2643 if ((0 == status) && (entry.count > 0)) {
2644 *partialResultCount = entry.data.i32[0];
2645 *supportsPartialResults = (*partialResultCount > 1);
2646 }
2647
2648 *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2649 ret = device->open(*outCb, session);
2650 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2651 ret.getServiceSpecificError());
2652 ASSERT_TRUE(ret.isOk());
2653 ASSERT_NE(*session, nullptr);
2654
2655 *useHalBufManager = false;
2656 status = find_camera_metadata_ro_entry(
2657 staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2658 if ((0 == status) && (entry.count == 1)) {
2659 *useHalBufManager = (entry.data.u8[0] ==
2660 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
2661 }
2662
2663 outputStreams.clear();
2664 Size maxSize;
2665 if (maxResolution) {
2666 auto rc = getMaxOutputSizeForFormat(staticMeta, format, &maxSize, maxResolution);
2667 ASSERT_EQ(Status::OK, rc);
2668 } else {
2669 AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
2670 static_cast<int32_t>(format)};
2671 auto rc = getAvailableOutputStreams(staticMeta, outputStreams, &previewThreshold);
2672
2673 ASSERT_EQ(Status::OK, rc);
2674 ASSERT_FALSE(outputStreams.empty());
2675 maxSize.width = outputStreams[0].width;
2676 maxSize.height = outputStreams[0].height;
2677 }
2678
2679
2680 std::vector<Stream> streams(1);
2681 streams[0] = {0,
2682 StreamType::OUTPUT,
2683 maxSize.width,
2684 maxSize.height,
2685 format,
2686 previewStream->usage,
2687 previewStream->dataSpace,
2688 StreamRotation::ROTATION_0,
2689 "",
2690 0,
2691 -1,
2692 {maxResolution ? SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION
2693 : SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2694 prof};
2695
2696 StreamConfiguration config;
2697 config.streams = streams;
2698 config.operationMode = StreamConfigurationMode::NORMAL_MODE;
2699 config.streamConfigCounter = streamConfigCounter;
2700 config.multiResolutionInputImage = false;
2701 CameraMetadata req;
2702 RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE;
2703 ret = (*session)->constructDefaultRequestSettings(reqTemplate, &req);
2704 ASSERT_TRUE(ret.isOk());
2705 config.sessionParams = req;
2706
2707 bool supported = false;
2708 ret = device->isStreamCombinationSupported(config, &supported);
2709 ASSERT_TRUE(ret.isOk());
2710 ASSERT_EQ(supported, true);
2711
2712 ret = (*session)->configureStreams(config, halStreams);
2713 ASSERT_TRUE(ret.isOk());
2714
2715 if (*useHalBufManager) {
2716 std::vector<Stream> ss(1);
2717 std::vector<HalStream> hs(1);
2718 ss[0] = streams[0];
2719 hs[0] = (*halStreams)[0];
2720 (*outCb)->setCurrentStreamConfig(ss, hs);
2721 }
2722
2723 *previewStream = streams[0];
2724 ASSERT_TRUE(ret.isOk());
2725 }
2726
is10BitDynamicRangeCapable(const camera_metadata_t * staticMeta)2727 bool CameraAidlTest::is10BitDynamicRangeCapable(const camera_metadata_t* staticMeta) {
2728 camera_metadata_ro_entry scalerEntry;
2729 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2730 &scalerEntry);
2731 if (rc == 0) {
2732 for (uint32_t i = 0; i < scalerEntry.count; i++) {
2733 if (scalerEntry.data.u8[i] ==
2734 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
2735 return true;
2736 }
2737 }
2738 }
2739 return false;
2740 }
2741
get10BitDynamicRangeProfiles(const camera_metadata_t * staticMeta,std::vector<RequestAvailableDynamicRangeProfilesMap> * profiles)2742 void CameraAidlTest::get10BitDynamicRangeProfiles(
2743 const camera_metadata_t* staticMeta,
2744 std::vector<RequestAvailableDynamicRangeProfilesMap>* profiles) {
2745 ASSERT_NE(nullptr, staticMeta);
2746 ASSERT_NE(nullptr, profiles);
2747 camera_metadata_ro_entry entry;
2748 std::unordered_set<int64_t> entries;
2749 int rc = find_camera_metadata_ro_entry(
2750 staticMeta, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP, &entry);
2751 ASSERT_EQ(rc, 0);
2752 ASSERT_TRUE(entry.count > 0);
2753 ASSERT_EQ(entry.count % 3, 0);
2754
2755 for (uint32_t i = 0; i < entry.count; i += 3) {
2756 ASSERT_NE(entry.data.i64[i], ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
2757 ASSERT_EQ(entries.find(entry.data.i64[i]), entries.end());
2758 entries.insert(static_cast<int64_t>(entry.data.i64[i]));
2759 profiles->emplace_back(
2760 static_cast<RequestAvailableDynamicRangeProfilesMap>(entry.data.i64[i]));
2761 }
2762
2763 if (!entries.empty()) {
2764 ASSERT_NE(entries.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10),
2765 entries.end());
2766 }
2767 }
2768
verify10BitMetadata(HandleImporter & importer,const InFlightRequest & request,aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap profile)2769 void CameraAidlTest::verify10BitMetadata(
2770 HandleImporter& importer, const InFlightRequest& request,
2771 aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap
2772 profile) {
2773 for (auto b : request.resultOutputBuffers) {
2774 importer.importBuffer(b.buffer.buffer);
2775 bool smpte2086Present = importer.isSmpte2086Present(b.buffer.buffer);
2776 bool smpte2094_10Present = importer.isSmpte2094_10Present(b.buffer.buffer);
2777 bool smpte2094_40Present = importer.isSmpte2094_40Present(b.buffer.buffer);
2778
2779 switch (static_cast<int64_t>(profile)) {
2780 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
2781 ASSERT_FALSE(smpte2086Present);
2782 ASSERT_FALSE(smpte2094_10Present);
2783 ASSERT_FALSE(smpte2094_40Present);
2784 break;
2785 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
2786 ASSERT_TRUE(smpte2086Present);
2787 ASSERT_FALSE(smpte2094_10Present);
2788 ASSERT_FALSE(smpte2094_40Present);
2789 break;
2790 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
2791 ASSERT_FALSE(smpte2094_10Present);
2792 ASSERT_TRUE(smpte2094_40Present);
2793 break;
2794 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
2795 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
2796 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
2797 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
2798 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
2799 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
2800 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
2801 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
2802 ASSERT_FALSE(smpte2086Present);
2803 ASSERT_TRUE(smpte2094_10Present);
2804 ASSERT_FALSE(smpte2094_40Present);
2805 break;
2806 default:
2807 ALOGE("%s: Unexpected 10-bit dynamic range profile: %" PRId64, __FUNCTION__,
2808 profile);
2809 ADD_FAILURE();
2810 }
2811 importer.freeBuffer(b.buffer.buffer);
2812 }
2813 }
2814
configurePreviewStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * previewThreshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * previewStream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,bool * useHalBufManager,std::shared_ptr<DeviceCb> * cb,uint32_t streamConfigCounter)2815 void CameraAidlTest::configurePreviewStream(
2816 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2817 const AvailableStream* previewThreshold, std::shared_ptr<ICameraDeviceSession>* session,
2818 Stream* previewStream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2819 int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb,
2820 uint32_t streamConfigCounter) {
2821 configureSingleStream(name, provider, previewThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER,
2822 RequestTemplate::PREVIEW, session, previewStream, halStreams,
2823 supportsPartialResults, partialResultCount, useHalBufManager, cb,
2824 streamConfigCounter);
2825 }
2826
isOfflineSessionSupported(const camera_metadata_t * staticMeta)2827 Status CameraAidlTest::isOfflineSessionSupported(const camera_metadata_t* staticMeta) {
2828 Status ret = Status::OPERATION_NOT_SUPPORTED;
2829 if (nullptr == staticMeta) {
2830 return Status::ILLEGAL_ARGUMENT;
2831 }
2832
2833 camera_metadata_ro_entry entry;
2834 int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2835 &entry);
2836 if (0 != rc) {
2837 return Status::ILLEGAL_ARGUMENT;
2838 }
2839
2840 for (size_t i = 0; i < entry.count; i++) {
2841 if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING == entry.data.u8[i]) {
2842 ret = Status::OK;
2843 break;
2844 }
2845 }
2846
2847 return ret;
2848 }
2849
configureOfflineStillStream(const std::string & name,const std::shared_ptr<ICameraProvider> & provider,const AvailableStream * threshold,std::shared_ptr<ICameraDeviceSession> * session,Stream * stream,std::vector<HalStream> * halStreams,bool * supportsPartialResults,int32_t * partialResultCount,std::shared_ptr<DeviceCb> * outCb,int32_t * jpegBufferSize,bool * useHalBufManager)2850 void CameraAidlTest::configureOfflineStillStream(
2851 const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
2852 const AvailableStream* threshold, std::shared_ptr<ICameraDeviceSession>* session,
2853 Stream* stream, std::vector<HalStream>* halStreams, bool* supportsPartialResults,
2854 int32_t* partialResultCount, std::shared_ptr<DeviceCb>* outCb, int32_t* jpegBufferSize,
2855 bool* useHalBufManager) {
2856 ASSERT_NE(nullptr, session);
2857 ASSERT_NE(nullptr, halStreams);
2858 ASSERT_NE(nullptr, stream);
2859 ASSERT_NE(nullptr, supportsPartialResults);
2860 ASSERT_NE(nullptr, partialResultCount);
2861 ASSERT_NE(nullptr, outCb);
2862 ASSERT_NE(nullptr, jpegBufferSize);
2863 ASSERT_NE(nullptr, useHalBufManager);
2864
2865 std::vector<AvailableStream> outputStreams;
2866 std::shared_ptr<ICameraDevice> cameraDevice;
2867 ALOGI("configureStreams: Testing camera device %s", name.c_str());
2868
2869 ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &cameraDevice);
2870 ASSERT_TRUE(ret.isOk());
2871 ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(),
2872 ret.getServiceSpecificError());
2873 ASSERT_NE(cameraDevice, nullptr);
2874
2875 CameraMetadata metadata;
2876 ret = cameraDevice->getCameraCharacteristics(&metadata);
2877 ASSERT_TRUE(ret.isOk());
2878 camera_metadata_t* staticMeta = clone_camera_metadata(
2879 reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data()));
2880 ASSERT_NE(nullptr, staticMeta);
2881
2882 camera_metadata_ro_entry entry;
2883 auto status =
2884 find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
2885 if ((0 == status) && (entry.count > 0)) {
2886 *partialResultCount = entry.data.i32[0];
2887 *supportsPartialResults = (*partialResultCount > 1);
2888 }
2889
2890 *useHalBufManager = false;
2891 status = find_camera_metadata_ro_entry(
2892 staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
2893 if ((0 == status) && (entry.count == 1)) {
2894 *useHalBufManager = (entry.data.u8[0] ==
2895 ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
2896 }
2897
2898 auto st = getJpegBufferSize(staticMeta, jpegBufferSize);
2899 ASSERT_EQ(st, Status::OK);
2900
2901 *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta);
2902 ret = cameraDevice->open(*outCb, session);
2903 ASSERT_TRUE(ret.isOk());
2904 ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(),
2905 ret.getServiceSpecificError());
2906 ASSERT_NE(session, nullptr);
2907
2908 outputStreams.clear();
2909 auto rc = getAvailableOutputStreams(staticMeta, outputStreams, threshold);
2910 size_t idx = 0;
2911 int currLargest = outputStreams[0].width * outputStreams[0].height;
2912 for (size_t i = 0; i < outputStreams.size(); i++) {
2913 int area = outputStreams[i].width * outputStreams[i].height;
2914 if (area > currLargest) {
2915 idx = i;
2916 currLargest = area;
2917 }
2918 }
2919
2920 ASSERT_EQ(Status::OK, rc);
2921 ASSERT_FALSE(outputStreams.empty());
2922
2923 Dataspace dataspace = getDataspace(static_cast<PixelFormat>(outputStreams[idx].format));
2924
2925 std::vector<Stream> streams(/*size*/ 1);
2926 streams[0] = {/*id*/ 0,
2927 StreamType::OUTPUT,
2928 outputStreams[idx].width,
2929 outputStreams[idx].height,
2930 static_cast<PixelFormat>(outputStreams[idx].format),
2931 static_cast<::aidl::android::hardware::graphics::common::BufferUsage>(
2932 GRALLOC1_CONSUMER_USAGE_CPU_READ),
2933 dataspace,
2934 StreamRotation::ROTATION_0,
2935 /*physicalId*/ std::string(),
2936 *jpegBufferSize,
2937 /*groupId*/ 0,
2938 {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT},
2939 RequestAvailableDynamicRangeProfilesMap::
2940 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD};
2941
2942 StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()};
2943
2944 (*session)->configureStreams(config, halStreams);
2945 ASSERT_TRUE(ret.isOk());
2946
2947 if (*useHalBufManager) {
2948 (*outCb)->setCurrentStreamConfig(streams, *halStreams);
2949 }
2950
2951 *stream = streams[0];
2952 }
2953
updateInflightResultQueue(const std::shared_ptr<ResultMetadataQueue> & resultQueue)2954 void CameraAidlTest::updateInflightResultQueue(
2955 const std::shared_ptr<ResultMetadataQueue>& resultQueue) {
2956 std::unique_lock<std::mutex> l(mLock);
2957 for (auto& it : mInflightMap) {
2958 it.second->resultQueue = resultQueue;
2959 }
2960 }