1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "EmulatedCameraProviderHwlImpl"
19 #include "EmulatedCameraProviderHWLImpl.h"
20
21 #include <android-base/file.h>
22 #include <android-base/strings.h>
23 #include <cutils/properties.h>
24 #include <hardware/camera_common.h>
25 #include <log/log.h>
26
27 #include "EmulatedCameraDeviceHWLImpl.h"
28 #include "EmulatedCameraDeviceSessionHWLImpl.h"
29 #include "EmulatedLogicalRequestState.h"
30 #include "EmulatedSensor.h"
31 #include "EmulatedTorchState.h"
32 #include "utils/HWLUtils.h"
33 #include "vendor_tag_defs.h"
34
35 namespace android {
36
37 // Location of the camera configuration files.
38 constexpr std::string_view kCameraConfigBack = "emu_camera_back.json";
39 constexpr std::string_view kCameraConfigFront = "emu_camera_front.json";
40 constexpr std::string_view kCameraConfigExternal = "emu_camera_external.json";
41 constexpr std::string_view kCameraConfigDepth = "emu_camera_depth.json";
42 constexpr std::string_view kCameraConfigFiles[] = {
43 kCameraConfigBack, kCameraConfigFront, kCameraConfigExternal,
44 kCameraConfigDepth};
45
46 constexpr std::string_view kConfigurationFileDirVendor = "/vendor/etc/config/";
47 constexpr std::string_view kConfigurationFileDirApex =
48 "/apex/com.google.emulated.camera.provider.hal/etc/config/";
49
50 constexpr StreamSize s240pStreamSize = std::pair(240, 180);
51 constexpr StreamSize s720pStreamSize = std::pair(1280, 720);
52 constexpr StreamSize s1440pStreamSize = std::pair(1920, 1440);
53
54 std::unique_ptr<EmulatedCameraProviderHwlImpl>
Create()55 EmulatedCameraProviderHwlImpl::Create() {
56 auto provider = std::unique_ptr<EmulatedCameraProviderHwlImpl>(
57 new EmulatedCameraProviderHwlImpl());
58
59 if (provider == nullptr) {
60 ALOGE("%s: Creating EmulatedCameraProviderHwlImpl failed.", __FUNCTION__);
61 return nullptr;
62 }
63
64 status_t res = provider->Initialize();
65 if (res != OK) {
66 ALOGE("%s: Initializing EmulatedCameraProviderHwlImpl failed: %s (%d).",
67 __FUNCTION__, strerror(-res), res);
68 return nullptr;
69 }
70
71 ALOGI("%s: Created EmulatedCameraProviderHwlImpl", __FUNCTION__);
72
73 return provider;
74 }
75
GetTagFromName(const char * name,uint32_t * tag)76 status_t EmulatedCameraProviderHwlImpl::GetTagFromName(const char* name,
77 uint32_t* tag) {
78 if (name == nullptr || tag == nullptr) {
79 return BAD_VALUE;
80 }
81
82 size_t name_length = strlen(name);
83 // First, find the section by the longest string match
84 const char* section = NULL;
85 size_t section_index = 0;
86 size_t section_length = 0;
87 for (size_t i = 0; i < ANDROID_SECTION_COUNT; ++i) {
88 const char* str = camera_metadata_section_names[i];
89
90 ALOGV("%s: Trying to match against section '%s'", __FUNCTION__, str);
91
92 if (strstr(name, str) == name) { // name begins with the section name
93 size_t str_length = strlen(str);
94
95 ALOGV("%s: Name begins with section name", __FUNCTION__);
96
97 // section name is the longest we've found so far
98 if (section == NULL || section_length < str_length) {
99 section = str;
100 section_index = i;
101 section_length = str_length;
102
103 ALOGV("%s: Found new best section (%s)", __FUNCTION__, section);
104 }
105 }
106 }
107
108 if (section == NULL) {
109 return NAME_NOT_FOUND;
110 } else {
111 ALOGV("%s: Found matched section '%s' (%zu)", __FUNCTION__, section,
112 section_index);
113 }
114
115 // Get the tag name component of the name
116 const char* name_tag_name = name + section_length + 1; // x.y.z -> z
117 if (section_length + 1 >= name_length) {
118 return BAD_VALUE;
119 }
120
121 // Match rest of name against the tag names in that section only
122 uint32_t candidate_tag = 0;
123 // Match built-in tags (typically android.*)
124 uint32_t tag_begin, tag_end; // [tag_begin, tag_end)
125 tag_begin = camera_metadata_section_bounds[section_index][0];
126 tag_end = camera_metadata_section_bounds[section_index][1];
127
128 for (candidate_tag = tag_begin; candidate_tag < tag_end; ++candidate_tag) {
129 const char* tag_name = get_camera_metadata_tag_name(candidate_tag);
130
131 if (strcmp(name_tag_name, tag_name) == 0) {
132 ALOGV("%s: Found matched tag '%s' (%d)", __FUNCTION__, tag_name,
133 candidate_tag);
134 break;
135 }
136 }
137
138 if (candidate_tag == tag_end) {
139 return NAME_NOT_FOUND;
140 }
141
142 *tag = candidate_tag;
143 return OK;
144 }
145
IsMaxSupportedSizeGreaterThanOrEqual(const std::set<StreamSize> & stream_sizes,StreamSize compare_size)146 static bool IsMaxSupportedSizeGreaterThanOrEqual(
147 const std::set<StreamSize>& stream_sizes, StreamSize compare_size) {
148 for (const auto& stream_size : stream_sizes) {
149 if (stream_size.first * stream_size.second >=
150 compare_size.first * compare_size.second) {
151 return true;
152 }
153 }
154 return false;
155 }
156
SupportsCapability(const uint32_t camera_id,const HalCameraMetadata & static_metadata,uint8_t cap)157 static bool SupportsCapability(const uint32_t camera_id,
158 const HalCameraMetadata& static_metadata,
159 uint8_t cap) {
160 camera_metadata_ro_entry_t entry;
161 auto ret = static_metadata.Get(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
162 if (ret != OK || (entry.count == 0)) {
163 ALOGE("Error getting capabilities for camera id %u", camera_id);
164 return false;
165 }
166 for (size_t i = 0; i < entry.count; i++) {
167 if (entry.data.u8[i] == cap) {
168 return true;
169 }
170 }
171 return false;
172 }
173
SupportsMandatoryConcurrentStreams(uint32_t camera_id)174 bool EmulatedCameraProviderHwlImpl::SupportsMandatoryConcurrentStreams(
175 uint32_t camera_id) {
176 HalCameraMetadata& static_metadata = *(static_metadata_[camera_id]);
177 auto map = std::make_unique<StreamConfigurationMap>(static_metadata);
178 auto yuv_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_YCBCR_420_888);
179 auto blob_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_BLOB);
180 auto depth16_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_Y16);
181 auto priv_output_sizes =
182 map->GetOutputSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
183
184 if (!SupportsCapability(
185 camera_id, static_metadata,
186 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) &&
187 IsMaxSupportedSizeGreaterThanOrEqual(depth16_output_sizes,
188 s240pStreamSize)) {
189 ALOGI("%s: Depth only output supported by camera id %u", __FUNCTION__,
190 camera_id);
191 return true;
192 }
193 if (yuv_output_sizes.empty()) {
194 ALOGW("%s: No YUV output supported by camera id %u", __FUNCTION__,
195 camera_id);
196 return false;
197 }
198
199 if (priv_output_sizes.empty()) {
200 ALOGW("No PRIV output supported by camera id %u", camera_id);
201 return false;
202 }
203
204 if (blob_output_sizes.empty()) {
205 ALOGW("No BLOB output supported by camera id %u", camera_id);
206 return false;
207 }
208
209 // According to the HAL spec, if a device supports format sizes > 1440p and
210 // 720p, it must support both 1440p and 720p streams for PRIV, JPEG and YUV
211 // formats. Otherwise it must support 2 streams (YUV / PRIV + JPEG) of the max
212 // supported size.
213
214 // Check for YUV output sizes
215 if (IsMaxSupportedSizeGreaterThanOrEqual(yuv_output_sizes, s1440pStreamSize) &&
216 (yuv_output_sizes.find(s1440pStreamSize) == yuv_output_sizes.end() ||
217 yuv_output_sizes.find(s720pStreamSize) == yuv_output_sizes.end())) {
218 ALOGW("%s: 1440p+720p YUV outputs not found for camera id %u", __FUNCTION__,
219 camera_id);
220 return false;
221 } else if (IsMaxSupportedSizeGreaterThanOrEqual(yuv_output_sizes,
222 s720pStreamSize) &&
223 yuv_output_sizes.find(s720pStreamSize) == yuv_output_sizes.end()) {
224 ALOGW("%s: 720p YUV output not found for camera id %u", __FUNCTION__,
225 camera_id);
226 return false;
227 }
228
229 // Check for PRIV output sizes
230 if (IsMaxSupportedSizeGreaterThanOrEqual(priv_output_sizes, s1440pStreamSize) &&
231 (priv_output_sizes.find(s1440pStreamSize) == priv_output_sizes.end() ||
232 priv_output_sizes.find(s720pStreamSize) == priv_output_sizes.end())) {
233 ALOGW("%s: 1440p + 720p PRIV outputs not found for camera id %u",
234 __FUNCTION__, camera_id);
235 return false;
236 } else if (IsMaxSupportedSizeGreaterThanOrEqual(priv_output_sizes,
237 s720pStreamSize) &&
238 priv_output_sizes.find(s720pStreamSize) == priv_output_sizes.end()) {
239 ALOGW("%s: 720p PRIV output not found for camera id %u", __FUNCTION__,
240 camera_id);
241 return false;
242 }
243
244 // Check for BLOB output sizes
245 if (IsMaxSupportedSizeGreaterThanOrEqual(blob_output_sizes, s1440pStreamSize) &&
246 (blob_output_sizes.find(s1440pStreamSize) == blob_output_sizes.end() ||
247 blob_output_sizes.find(s720pStreamSize) == blob_output_sizes.end())) {
248 ALOGW("%s: 1440p + 720p BLOB outputs not found for camera id %u",
249 __FUNCTION__, camera_id);
250 return false;
251 } else if (IsMaxSupportedSizeGreaterThanOrEqual(blob_output_sizes,
252 s720pStreamSize) &&
253 blob_output_sizes.find(s720pStreamSize) == blob_output_sizes.end()) {
254 ALOGW("%s: 720p BLOB output not found for camera id %u", __FUNCTION__,
255 camera_id);
256 return false;
257 }
258
259 return true;
260 }
261
GetConcurrentStreamingCameraIds(std::vector<std::unordered_set<uint32_t>> * combinations)262 status_t EmulatedCameraProviderHwlImpl::GetConcurrentStreamingCameraIds(
263 std::vector<std::unordered_set<uint32_t>>* combinations) {
264 if (combinations == nullptr) {
265 return BAD_VALUE;
266 }
267 // Collect all camera ids that support the guaranteed stream combinations
268 // (720p YUV and IMPLEMENTATION_DEFINED) and put them in one set. We don't
269 // make all possible combinations since it should be possible to stream all
270 // of them at once in the emulated camera.
271 std::unordered_set<uint32_t> candidate_ids;
272 for (auto& entry : camera_id_map_) {
273 if (SupportsMandatoryConcurrentStreams(entry.first)) {
274 candidate_ids.insert(entry.first);
275 }
276 }
277 combinations->emplace_back(std::move(candidate_ids));
278 return OK;
279 }
280
IsConcurrentStreamCombinationSupported(const std::vector<CameraIdAndStreamConfiguration> & configs,bool * is_supported)281 status_t EmulatedCameraProviderHwlImpl::IsConcurrentStreamCombinationSupported(
282 const std::vector<CameraIdAndStreamConfiguration>& configs,
283 bool* is_supported) {
284 *is_supported = false;
285
286 // Go through the given camera ids, get their sensor characteristics, stream
287 // config maps and call EmulatedSensor::IsStreamCombinationSupported()
288 for (auto& config : configs) {
289 // TODO: Consider caching sensor characteristics and StreamConfigurationMap
290 if (camera_id_map_.find(config.camera_id) == camera_id_map_.end()) {
291 ALOGE("%s: Camera id %u does not exist", __FUNCTION__, config.camera_id);
292 return BAD_VALUE;
293 }
294
295 auto stream_configuration_map = std::make_unique<StreamConfigurationMap>(
296 *(static_metadata_[config.camera_id]));
297 auto stream_configuration_map_max_resolution =
298 std::make_unique<StreamConfigurationMap>(
299 *(static_metadata_[config.camera_id]), /*maxResolution*/ true);
300
301 LogicalCharacteristics sensor_chars;
302 status_t ret =
303 GetSensorCharacteristics((static_metadata_[config.camera_id]).get(),
304 &sensor_chars[config.camera_id]);
305 if (ret != OK) {
306 ALOGE("%s: Unable to extract sensor chars for camera id %u", __FUNCTION__,
307 config.camera_id);
308 return UNKNOWN_ERROR;
309 }
310
311 PhysicalStreamConfigurationMap physical_stream_configuration_map;
312 PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution;
313 auto const& physicalCameraInfo = camera_id_map_[config.camera_id];
314 for (size_t i = 0; i < physicalCameraInfo.size(); i++) {
315 uint32_t physical_camera_id = physicalCameraInfo[i].second;
316 physical_stream_configuration_map.emplace(
317 physical_camera_id, std::make_unique<StreamConfigurationMap>(
318 *(static_metadata_[physical_camera_id])));
319
320 physical_stream_configuration_map_max_resolution.emplace(
321 physical_camera_id,
322 std::make_unique<StreamConfigurationMap>(
323 *(static_metadata_[physical_camera_id]), /*maxResolution*/ true));
324
325 ret = GetSensorCharacteristics(static_metadata_[physical_camera_id].get(),
326 &sensor_chars[physical_camera_id]);
327 if (ret != OK) {
328 ALOGE("%s: Unable to extract camera %d sensor characteristics %s (%d)",
329 __FUNCTION__, physical_camera_id, strerror(-ret), ret);
330 return ret;
331 }
332 }
333
334 if (!EmulatedSensor::IsStreamCombinationSupported(
335 config.camera_id, config.stream_configuration,
336 *stream_configuration_map, *stream_configuration_map_max_resolution,
337 physical_stream_configuration_map,
338 physical_stream_configuration_map_max_resolution, sensor_chars)) {
339 return OK;
340 }
341 }
342
343 *is_supported = true;
344 return OK;
345 }
346
IsDigit(const std::string & value)347 bool IsDigit(const std::string& value) {
348 if (value.empty()) {
349 return false;
350 }
351
352 for (const auto& c : value) {
353 if (!std::isdigit(c) && (!std::ispunct(c))) {
354 return false;
355 }
356 }
357
358 return true;
359 }
360
361 template <typename T>
GetEnumValue(uint32_t tag_id,const char * cstring,T * result)362 status_t GetEnumValue(uint32_t tag_id, const char* cstring, T* result /*out*/) {
363 if ((result == nullptr) || (cstring == nullptr)) {
364 return BAD_VALUE;
365 }
366
367 uint32_t enum_value;
368 auto ret =
369 camera_metadata_enum_value(tag_id, cstring, strlen(cstring), &enum_value);
370 if (ret != OK) {
371 ALOGE("%s: Failed to match tag id: 0x%x value: %s", __FUNCTION__, tag_id,
372 cstring);
373 return ret;
374 }
375 *result = enum_value;
376
377 return OK;
378 }
379
GetUInt8Value(const Json::Value & value,uint32_t tag_id,uint8_t * result)380 status_t GetUInt8Value(const Json::Value& value, uint32_t tag_id,
381 uint8_t* result /*out*/) {
382 if (result == nullptr) {
383 return BAD_VALUE;
384 }
385
386 if (value.isString()) {
387 errno = 0;
388 if (IsDigit(value.asString())) {
389 auto int_value = strtol(value.asCString(), nullptr, 10);
390 if ((int_value >= 0) && (int_value <= UINT8_MAX) && (errno == 0)) {
391 *result = int_value;
392 } else {
393 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
394 return BAD_VALUE;
395 }
396 } else {
397 return GetEnumValue(tag_id, value.asCString(), result);
398 }
399 } else {
400 ALOGE(
401 "%s: Unexpected json type: %d! All value types are expected to be "
402 "strings!",
403 __FUNCTION__, value.type());
404 return BAD_VALUE;
405 }
406
407 return OK;
408 }
409
GetInt32Value(const Json::Value & value,uint32_t tag_id,int32_t * result)410 status_t GetInt32Value(const Json::Value& value, uint32_t tag_id,
411 int32_t* result /*out*/) {
412 if (result == nullptr) {
413 return BAD_VALUE;
414 }
415
416 if (value.isString()) {
417 errno = 0;
418 if (IsDigit(value.asString())) {
419 auto int_value = strtol(value.asCString(), nullptr, 10);
420 if ((int_value >= INT32_MIN) && (int_value <= INT32_MAX) && (errno == 0)) {
421 *result = int_value;
422 } else {
423 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
424 return BAD_VALUE;
425 }
426 } else {
427 return GetEnumValue(tag_id, value.asCString(), result);
428 }
429 } else {
430 ALOGE(
431 "%s: Unexpected json type: %d! All value types are expected to be "
432 "strings!",
433 __FUNCTION__, value.type());
434 return BAD_VALUE;
435 }
436
437 return OK;
438 }
439
GetInt64Value(const Json::Value & value,uint32_t tag_id,int64_t * result)440 status_t GetInt64Value(const Json::Value& value, uint32_t tag_id,
441 int64_t* result /*out*/) {
442 if (result == nullptr) {
443 return BAD_VALUE;
444 }
445
446 if (value.isString()) {
447 errno = 0;
448 auto int_value = strtoll(value.asCString(), nullptr, 10);
449 if ((int_value >= INT64_MIN) && (int_value <= INT64_MAX) && (errno == 0)) {
450 *result = int_value;
451 } else {
452 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
453 return BAD_VALUE;
454 }
455 } else {
456 ALOGE(
457 "%s: Unexpected json type: %d! All value types are expected to be "
458 "strings!",
459 __FUNCTION__, value.type());
460 return BAD_VALUE;
461 }
462
463 return OK;
464 }
465
GetFloatValue(const Json::Value & value,uint32_t tag_id,float * result)466 status_t GetFloatValue(const Json::Value& value, uint32_t tag_id,
467 float* result /*out*/) {
468 if (result == nullptr) {
469 return BAD_VALUE;
470 }
471
472 if (value.isString()) {
473 errno = 0;
474 auto float_value = strtof(value.asCString(), nullptr);
475 if (errno == 0) {
476 *result = float_value;
477 } else {
478 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
479 return BAD_VALUE;
480 }
481 } else {
482 ALOGE(
483 "%s: Unexpected json type: %d! All value types are expected to be "
484 "strings!",
485 __FUNCTION__, value.type());
486 return BAD_VALUE;
487 }
488
489 return OK;
490 }
491
GetDoubleValue(const Json::Value & value,uint32_t tag_id,double * result)492 status_t GetDoubleValue(const Json::Value& value, uint32_t tag_id,
493 double* result /*out*/) {
494 if (result == nullptr) {
495 return BAD_VALUE;
496 }
497
498 if (value.isString()) {
499 errno = 0;
500 auto double_value = strtod(value.asCString(), nullptr);
501 if (errno == 0) {
502 *result = double_value;
503 } else {
504 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
505 return BAD_VALUE;
506 }
507 } else {
508 ALOGE(
509 "%s: Unexpected json type: %d! All value types are expected to be "
510 "strings!",
511 __FUNCTION__, value.type());
512 return BAD_VALUE;
513 }
514
515 return OK;
516 }
517
518 template <typename T>
FilterVendorKeys(uint32_t tag_id,std::vector<T> * values)519 void FilterVendorKeys(uint32_t tag_id, std::vector<T>* values) {
520 if ((values == nullptr) || (values->empty())) {
521 return;
522 }
523
524 switch (tag_id) {
525 case ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS:
526 case ANDROID_REQUEST_AVAILABLE_RESULT_KEYS:
527 case ANDROID_REQUEST_AVAILABLE_SESSION_KEYS:
528 case ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: {
529 auto it = values->begin();
530 while (it != values->end()) {
531 // Per spec. the tags we are handling here will be "int32_t".
532 // In this case all vendor defined values will be negative.
533 if (*it < 0) {
534 it = values->erase(it);
535 } else {
536 it++;
537 }
538 }
539 } break;
540 default:
541 // no-op
542 break;
543 }
544 }
545
546 template <typename T, typename func_type>
InsertTag(const Json::Value & json_value,uint32_t tag_id,func_type get_val_func,HalCameraMetadata * meta)547 status_t InsertTag(const Json::Value& json_value, uint32_t tag_id,
548 func_type get_val_func, HalCameraMetadata* meta /*out*/) {
549 if (meta == nullptr) {
550 return BAD_VALUE;
551 }
552
553 std::vector<T> values;
554 T result;
555 status_t ret = -1;
556 values.reserve(json_value.size());
557 for (const auto& val : json_value) {
558 ret = get_val_func(val, tag_id, &result);
559 if (ret != OK) {
560 break;
561 }
562
563 values.push_back(result);
564 }
565
566 if (ret == OK) {
567 FilterVendorKeys(tag_id, &values);
568 ret = meta->Set(tag_id, values.data(), values.size());
569 }
570
571 return ret;
572 }
573
InsertRationalTag(const Json::Value & json_value,uint32_t tag_id,HalCameraMetadata * meta)574 status_t InsertRationalTag(const Json::Value& json_value, uint32_t tag_id,
575 HalCameraMetadata* meta /*out*/) {
576 if (meta == nullptr) {
577 return BAD_VALUE;
578 }
579
580 std::vector<camera_metadata_rational_t> values;
581 status_t ret = OK;
582 if (json_value.isArray() && ((json_value.size() % 2) == 0)) {
583 values.reserve(json_value.size() / 2);
584 auto it = json_value.begin();
585 while (it != json_value.end()) {
586 camera_metadata_rational_t result;
587 ret = GetInt32Value((*it), tag_id, &result.numerator);
588 it++;
589 ret |= GetInt32Value((*it), tag_id, &result.denominator);
590 it++;
591 if (ret != OK) {
592 break;
593 }
594 values.push_back(result);
595 }
596 } else {
597 ALOGE("%s: json type: %d doesn't match with rational tag type",
598 __FUNCTION__, json_value.type());
599 return BAD_VALUE;
600 }
601
602 if (ret == OK) {
603 ret = meta->Set(tag_id, values.data(), values.size());
604 }
605
606 return ret;
607 }
608
ParseCharacteristics(const Json::Value & value,ssize_t id)609 uint32_t EmulatedCameraProviderHwlImpl::ParseCharacteristics(
610 const Json::Value& value, ssize_t id) {
611 if (!value.isObject()) {
612 ALOGE("%s: Configuration root is not an object", __FUNCTION__);
613 return BAD_VALUE;
614 }
615
616 auto static_meta = HalCameraMetadata::Create(1, 10);
617 auto members = value.getMemberNames();
618 for (const auto& member : members) {
619 uint32_t tag_id;
620 auto stat = GetTagFromName(member.c_str(), &tag_id);
621 if (stat != OK) {
622 ALOGE("%s: tag %s not supported, skipping!", __func__, member.c_str());
623 continue;
624 }
625
626 auto tag_type = get_camera_metadata_tag_type(tag_id);
627 auto tag_value = value[member.c_str()];
628 switch (tag_type) {
629 case TYPE_BYTE:
630 InsertTag<uint8_t>(tag_value, tag_id, GetUInt8Value, static_meta.get());
631 break;
632 case TYPE_INT32:
633 InsertTag<int32_t>(tag_value, tag_id, GetInt32Value, static_meta.get());
634 break;
635 case TYPE_INT64:
636 InsertTag<int64_t>(tag_value, tag_id, GetInt64Value, static_meta.get());
637 break;
638 case TYPE_FLOAT:
639 InsertTag<float>(tag_value, tag_id, GetFloatValue, static_meta.get());
640 break;
641 case TYPE_DOUBLE:
642 InsertTag<double>(tag_value, tag_id, GetDoubleValue, static_meta.get());
643 break;
644 case TYPE_RATIONAL:
645 InsertRationalTag(tag_value, tag_id, static_meta.get());
646 break;
647 default:
648 ALOGE("%s: Unsupported tag type: %d!", __FUNCTION__, tag_type);
649 }
650 }
651
652 SensorCharacteristics sensor_characteristics;
653 auto ret =
654 GetSensorCharacteristics(static_meta.get(), &sensor_characteristics);
655 if (ret != OK) {
656 ALOGE("%s: Unable to extract sensor characteristics!", __FUNCTION__);
657 return ret;
658 }
659
660 if (!EmulatedSensor::AreCharacteristicsSupported(sensor_characteristics)) {
661 ALOGE("%s: Sensor characteristics not supported!", __FUNCTION__);
662 return BAD_VALUE;
663 }
664
665 // Although we don't support HdrPlus, this data is still required by HWL
666 int32_t payload_frames = 0;
667 static_meta->Set(google_camera_hal::kHdrplusPayloadFrames, &payload_frames, 1);
668
669 if (id < 0) {
670 static_metadata_.push_back(std::move(static_meta));
671 id = static_metadata_.size() - 1;
672 } else {
673 static_metadata_[id] = std::move(static_meta);
674 }
675
676 return id;
677 }
678
WaitForQemuSfFakeCameraPropertyAvailable()679 status_t EmulatedCameraProviderHwlImpl::WaitForQemuSfFakeCameraPropertyAvailable() {
680 // Camera service may start running before qemu-props sets
681 // vendor.qemu.sf.fake_camera to any of the following four values:
682 // "none,front,back,both"; so we need to wait.
683 int num_attempts = 100;
684 char prop[PROPERTY_VALUE_MAX];
685 bool timeout = true;
686 for (int i = 0; i < num_attempts; ++i) {
687 if (property_get("vendor.qemu.sf.fake_camera", prop, nullptr) != 0) {
688 timeout = false;
689 break;
690 }
691 usleep(5000);
692 }
693 if (timeout) {
694 ALOGE("timeout (%dms) waiting for property vendor.qemu.sf.fake_camera to be set\n",
695 5 * num_attempts);
696 return BAD_VALUE;
697 }
698 return OK;
699 }
700
Initialize()701 status_t EmulatedCameraProviderHwlImpl::Initialize() {
702 // GCH expects all physical ids to be bigger than the logical ones.
703 // Resize 'static_metadata_' to fit all logical devices and insert them
704 // accordingly, push any remaining physical cameras in the back.
705 std::string config;
706 size_t logical_id = 0;
707 std::vector<std::string> config_file_locations;
708 std::string config_dir = "";
709 struct stat st;
710 if (stat(kConfigurationFileDirApex.data(), &st) == 0) {
711 config_dir += kConfigurationFileDirApex.data();
712 } else {
713 config_dir += kConfigurationFileDirVendor.data();
714 }
715 char prop[PROPERTY_VALUE_MAX];
716 if (!property_get_bool("ro.boot.qemu", false)) {
717 // Cuttlefish
718 property_get("ro.vendor.camera.config", prop, nullptr);
719 if (strcmp(prop, "external") == 0) {
720 config_file_locations.emplace_back(config_dir +
721 kCameraConfigExternal.data());
722 logical_id = 1;
723 } else {
724 // Default phone layout.
725 config_file_locations.emplace_back(config_dir + kCameraConfigBack.data());
726 config_file_locations.emplace_back(config_dir + kCameraConfigFront.data());
727 config_file_locations.emplace_back(config_dir + kCameraConfigDepth.data());
728 }
729 } else {
730 // Android Studio Emulator
731 if (!property_get_bool("ro.boot.qemu.legacy_fake_camera", false)) {
732 if (WaitForQemuSfFakeCameraPropertyAvailable() == OK) {
733 property_get("vendor.qemu.sf.fake_camera", prop, nullptr);
734 if (strcmp(prop, "both") == 0) {
735 config_file_locations.emplace_back(config_dir +
736 kCameraConfigBack.data());
737 config_file_locations.emplace_back(config_dir +
738 kCameraConfigFront.data());
739 } else if (strcmp(prop, "front") == 0) {
740 config_file_locations.emplace_back(config_dir +
741 kCameraConfigFront.data());
742 logical_id = 1;
743 } else if (strcmp(prop, "back") == 0) {
744 config_file_locations.emplace_back(config_dir +
745 kCameraConfigBack.data());
746 logical_id = 1;
747 }
748 }
749 }
750 }
751 static_metadata_.resize(ARRAY_SIZE(kCameraConfigFiles));
752
753 for (const auto& config_path : config_file_locations) {
754 if (!android::base::ReadFileToString(config_path, &config)) {
755 ALOGW("%s: Could not open configuration file: %s", __FUNCTION__,
756 config_path.c_str());
757 continue;
758 }
759
760 Json::CharReaderBuilder builder;
761 std::unique_ptr<Json::CharReader> config_reader(builder.newCharReader());
762 Json::Value root;
763 std::string error_message;
764 if (!config_reader->parse(&*config.begin(), &*config.end(), &root,
765 &error_message)) {
766 ALOGE("Could not parse configuration file: %s", error_message.c_str());
767 return BAD_VALUE;
768 }
769
770 if (root.isArray()) {
771 auto device_iter = root.begin();
772 auto result_id = ParseCharacteristics(*device_iter, logical_id);
773 if (logical_id != result_id) {
774 return result_id;
775 }
776 device_iter++;
777
778 // The first device entry is always the logical camera followed by the
779 // physical devices. They must be at least 2.
780 camera_id_map_.emplace(logical_id, std::vector<std::pair<CameraDeviceStatus, uint32_t>>());
781 if (root.size() >= 3) {
782 camera_id_map_[logical_id].reserve(root.size() - 1);
783 size_t current_physical_device = 0;
784 while (device_iter != root.end()) {
785 auto physical_id = ParseCharacteristics(*device_iter, /*id*/ -1);
786 if (physical_id < 0) {
787 return physical_id;
788 }
789 // Only notify unavailable physical camera if there are more than 2
790 // physical cameras backing the logical camera
791 auto device_status = (current_physical_device < 2) ? CameraDeviceStatus::kPresent :
792 CameraDeviceStatus::kNotPresent;
793 camera_id_map_[logical_id].push_back(std::make_pair(device_status, physical_id));
794 device_iter++; current_physical_device++;
795 }
796
797 auto physical_devices = std::make_unique<PhysicalDeviceMap>();
798 for (const auto& physical_device : camera_id_map_[logical_id]) {
799 physical_devices->emplace(
800 physical_device.second, std::make_pair(physical_device.first,
801 HalCameraMetadata::Clone(
802 static_metadata_[physical_device.second].get())));
803 }
804 auto updated_logical_chars =
805 EmulatedLogicalRequestState::AdaptLogicalCharacteristics(
806 HalCameraMetadata::Clone(static_metadata_[logical_id].get()),
807 std::move(physical_devices));
808 if (updated_logical_chars.get() != nullptr) {
809 static_metadata_[logical_id].swap(updated_logical_chars);
810 } else {
811 ALOGE("%s: Failed to updating logical camera characteristics!",
812 __FUNCTION__);
813 return BAD_VALUE;
814 }
815 }
816 } else {
817 auto result_id = ParseCharacteristics(root, logical_id);
818 if (result_id != logical_id) {
819 return result_id;
820 }
821 camera_id_map_.emplace(logical_id, std::vector<std::pair<CameraDeviceStatus, uint32_t>>());
822 }
823
824 logical_id++;
825 }
826
827 return OK;
828 }
829
SetCallback(const HwlCameraProviderCallback & callback)830 status_t EmulatedCameraProviderHwlImpl::SetCallback(
831 const HwlCameraProviderCallback& callback) {
832 torch_cb_ = callback.torch_mode_status_change;
833 physical_camera_status_cb_ = callback.physical_camera_device_status_change;
834
835 return OK;
836 }
837
TriggerDeferredCallbacks()838 status_t EmulatedCameraProviderHwlImpl::TriggerDeferredCallbacks() {
839 std::lock_guard<std::mutex> lock(status_callback_future_lock_);
840 if (status_callback_future_.valid()) {
841 return OK;
842 }
843
844 status_callback_future_ = std::async(
845 std::launch::async,
846 &EmulatedCameraProviderHwlImpl::NotifyPhysicalCameraUnavailable, this);
847 return OK;
848 }
849
WaitForStatusCallbackFuture()850 void EmulatedCameraProviderHwlImpl::WaitForStatusCallbackFuture() {
851 {
852 std::lock_guard<std::mutex> lock(status_callback_future_lock_);
853 if (!status_callback_future_.valid()) {
854 // If there is no future pending, construct a dummy one.
855 status_callback_future_ = std::async([]() { return; });
856 }
857 }
858 status_callback_future_.wait();
859 }
860
NotifyPhysicalCameraUnavailable()861 void EmulatedCameraProviderHwlImpl::NotifyPhysicalCameraUnavailable() {
862 for (const auto& one_map : camera_id_map_) {
863 for (const auto& physical_device : one_map.second) {
864 if (physical_device.first != CameraDeviceStatus::kNotPresent) {
865 continue;
866 }
867
868 uint32_t logical_camera_id = one_map.first;
869 uint32_t physical_camera_id = physical_device.second;
870 physical_camera_status_cb_(
871 logical_camera_id, physical_camera_id,
872 CameraDeviceStatus::kNotPresent);
873 }
874 }
875 }
876
GetVendorTags(std::vector<VendorTagSection> * vendor_tag_sections)877 status_t EmulatedCameraProviderHwlImpl::GetVendorTags(
878 std::vector<VendorTagSection>* vendor_tag_sections) {
879 if (vendor_tag_sections == nullptr) {
880 ALOGE("%s: vendor_tag_sections is nullptr.", __FUNCTION__);
881 return BAD_VALUE;
882 }
883
884 // No vendor specific tags as of now
885 return OK;
886 }
887
GetVisibleCameraIds(std::vector<std::uint32_t> * camera_ids)888 status_t EmulatedCameraProviderHwlImpl::GetVisibleCameraIds(
889 std::vector<std::uint32_t>* camera_ids) {
890 if (camera_ids == nullptr) {
891 ALOGE("%s: camera_ids is nullptr.", __FUNCTION__);
892 return BAD_VALUE;
893 }
894
895 for (const auto& device : camera_id_map_) {
896 camera_ids->push_back(device.first);
897 }
898
899 return OK;
900 }
901
CreateCameraDeviceHwl(uint32_t camera_id,std::unique_ptr<CameraDeviceHwl> * camera_device_hwl)902 status_t EmulatedCameraProviderHwlImpl::CreateCameraDeviceHwl(
903 uint32_t camera_id, std::unique_ptr<CameraDeviceHwl>* camera_device_hwl) {
904 if (camera_device_hwl == nullptr) {
905 ALOGE("%s: camera_device_hwl is nullptr.", __FUNCTION__);
906 return BAD_VALUE;
907 }
908
909 if (camera_id_map_.find(camera_id) == camera_id_map_.end()) {
910 ALOGE("%s: Invalid camera id: %u", __func__, camera_id);
911 return BAD_VALUE;
912 }
913
914 std::unique_ptr<HalCameraMetadata> meta =
915 HalCameraMetadata::Clone(static_metadata_[camera_id].get());
916
917 std::shared_ptr<EmulatedTorchState> torch_state;
918 camera_metadata_ro_entry entry;
919 bool flash_supported = false;
920 auto ret = meta->Get(ANDROID_FLASH_INFO_AVAILABLE, &entry);
921 if ((ret == OK) && (entry.count == 1)) {
922 if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
923 flash_supported = true;
924 }
925 }
926
927 if (flash_supported) {
928 torch_state = std::make_shared<EmulatedTorchState>(camera_id, torch_cb_);
929 }
930
931 auto physical_devices = std::make_unique<PhysicalDeviceMap>();
932 for (const auto& physical_device : camera_id_map_[camera_id]) {
933 physical_devices->emplace(
934 physical_device.second, std::make_pair(physical_device.first,
935 HalCameraMetadata::Clone(static_metadata_[physical_device.second].get())));
936 }
937 *camera_device_hwl = EmulatedCameraDeviceHwlImpl::Create(
938 camera_id, std::move(meta), std::move(physical_devices), torch_state);
939 if (*camera_device_hwl == nullptr) {
940 ALOGE("%s: Cannot create EmulatedCameraDeviceHWlImpl.", __FUNCTION__);
941 return BAD_VALUE;
942 }
943
944 return OK;
945 }
946
CreateBufferAllocatorHwl(std::unique_ptr<CameraBufferAllocatorHwl> * camera_buffer_allocator_hwl)947 status_t EmulatedCameraProviderHwlImpl::CreateBufferAllocatorHwl(
948 std::unique_ptr<CameraBufferAllocatorHwl>* camera_buffer_allocator_hwl) {
949 if (camera_buffer_allocator_hwl == nullptr) {
950 ALOGE("%s: camera_buffer_allocator_hwl is nullptr.", __FUNCTION__);
951 return BAD_VALUE;
952 }
953
954 // Currently not supported
955 return INVALID_OPERATION;
956 }
957
NotifyDeviceStateChange(DeviceState)958 status_t EmulatedCameraProviderHwlImpl::NotifyDeviceStateChange(
959 DeviceState /*device_state*/) {
960 return OK;
961 }
962 } // namespace android
963