• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "EmulatedCameraProviderHwlImpl"
19 #include "EmulatedCameraProviderHWLImpl.h"
20 
21 #include <android-base/file.h>
22 #include <android-base/strings.h>
23 #include <cutils/properties.h>
24 #include <hardware/camera_common.h>
25 #include <log/log.h>
26 
27 #include "EmulatedCameraDeviceHWLImpl.h"
28 #include "EmulatedCameraDeviceSessionHWLImpl.h"
29 #include "EmulatedLogicalRequestState.h"
30 #include "EmulatedSensor.h"
31 #include "EmulatedTorchState.h"
32 #include "utils/HWLUtils.h"
33 #include "vendor_tag_defs.h"
34 
35 namespace android {
36 
37 // Location of the camera configuration files.
38 constexpr std::string_view kConfigurationFileNames[] = {
39     "emu_camera_back.json",
40     "emu_camera_front.json",
41     "emu_camera_depth.json",
42 };
43 constexpr std::string_view kConfigurationFileDirVendor = "/vendor/etc/config/";
44 constexpr std::string_view kConfigurationFileDirApex =
45     "/apex/com.google.emulated.camera.provider.hal/etc/config/";
46 
47 constexpr StreamSize s240pStreamSize = std::pair(240, 180);
48 constexpr StreamSize s720pStreamSize = std::pair(1280, 720);
49 constexpr StreamSize s1440pStreamSize = std::pair(1920, 1440);
50 
51 std::unique_ptr<EmulatedCameraProviderHwlImpl>
Create()52 EmulatedCameraProviderHwlImpl::Create() {
53   auto provider = std::unique_ptr<EmulatedCameraProviderHwlImpl>(
54       new EmulatedCameraProviderHwlImpl());
55 
56   if (provider == nullptr) {
57     ALOGE("%s: Creating EmulatedCameraProviderHwlImpl failed.", __FUNCTION__);
58     return nullptr;
59   }
60 
61   status_t res = provider->Initialize();
62   if (res != OK) {
63     ALOGE("%s: Initializing EmulatedCameraProviderHwlImpl failed: %s (%d).",
64           __FUNCTION__, strerror(-res), res);
65     return nullptr;
66   }
67 
68   ALOGI("%s: Created EmulatedCameraProviderHwlImpl", __FUNCTION__);
69 
70   return provider;
71 }
72 
GetTagFromName(const char * name,uint32_t * tag)73 status_t EmulatedCameraProviderHwlImpl::GetTagFromName(const char* name,
74                                                        uint32_t* tag) {
75   if (name == nullptr || tag == nullptr) {
76     return BAD_VALUE;
77   }
78 
79   size_t name_length = strlen(name);
80   // First, find the section by the longest string match
81   const char* section = NULL;
82   size_t section_index = 0;
83   size_t section_length = 0;
84   for (size_t i = 0; i < ANDROID_SECTION_COUNT; ++i) {
85     const char* str = camera_metadata_section_names[i];
86 
87     ALOGV("%s: Trying to match against section '%s'", __FUNCTION__, str);
88 
89     if (strstr(name, str) == name) {  // name begins with the section name
90       size_t str_length = strlen(str);
91 
92       ALOGV("%s: Name begins with section name", __FUNCTION__);
93 
94       // section name is the longest we've found so far
95       if (section == NULL || section_length < str_length) {
96         section = str;
97         section_index = i;
98         section_length = str_length;
99 
100         ALOGV("%s: Found new best section (%s)", __FUNCTION__, section);
101       }
102     }
103   }
104 
105   if (section == NULL) {
106     return NAME_NOT_FOUND;
107   } else {
108     ALOGV("%s: Found matched section '%s' (%zu)", __FUNCTION__, section,
109           section_index);
110   }
111 
112   // Get the tag name component of the name
113   const char* name_tag_name = name + section_length + 1;  // x.y.z -> z
114   if (section_length + 1 >= name_length) {
115     return BAD_VALUE;
116   }
117 
118   // Match rest of name against the tag names in that section only
119   uint32_t candidate_tag = 0;
120   // Match built-in tags (typically android.*)
121   uint32_t tag_begin, tag_end;  // [tag_begin, tag_end)
122   tag_begin = camera_metadata_section_bounds[section_index][0];
123   tag_end = camera_metadata_section_bounds[section_index][1];
124 
125   for (candidate_tag = tag_begin; candidate_tag < tag_end; ++candidate_tag) {
126     const char* tag_name = get_camera_metadata_tag_name(candidate_tag);
127 
128     if (strcmp(name_tag_name, tag_name) == 0) {
129       ALOGV("%s: Found matched tag '%s' (%d)", __FUNCTION__, tag_name,
130             candidate_tag);
131       break;
132     }
133   }
134 
135   if (candidate_tag == tag_end) {
136     return NAME_NOT_FOUND;
137   }
138 
139   *tag = candidate_tag;
140   return OK;
141 }
142 
IsMaxSupportedSizeGreaterThanOrEqual(const std::set<StreamSize> & stream_sizes,StreamSize compare_size)143 static bool IsMaxSupportedSizeGreaterThanOrEqual(
144     const std::set<StreamSize>& stream_sizes, StreamSize compare_size) {
145   for (const auto& stream_size : stream_sizes) {
146     if (stream_size.first * stream_size.second >=
147         compare_size.first * compare_size.second) {
148       return true;
149     }
150   }
151   return false;
152 }
153 
SupportsCapability(const uint32_t camera_id,const HalCameraMetadata & static_metadata,uint8_t cap)154 static bool SupportsCapability(const uint32_t camera_id,
155                                const HalCameraMetadata& static_metadata,
156                                uint8_t cap) {
157   camera_metadata_ro_entry_t entry;
158   auto ret = static_metadata.Get(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
159   if (ret != OK || (entry.count == 0)) {
160     ALOGE("Error getting capabilities for camera id %u", camera_id);
161     return false;
162   }
163   for (size_t i = 0; i < entry.count; i++) {
164     if (entry.data.u8[i] == cap) {
165       return true;
166     }
167   }
168   return false;
169 }
170 
SupportsMandatoryConcurrentStreams(uint32_t camera_id)171 bool EmulatedCameraProviderHwlImpl::SupportsMandatoryConcurrentStreams(
172     uint32_t camera_id) {
173   HalCameraMetadata& static_metadata = *(static_metadata_[camera_id]);
174   auto map = std::make_unique<StreamConfigurationMap>(static_metadata);
175   auto yuv_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_YCBCR_420_888);
176   auto blob_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_BLOB);
177   auto depth16_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_Y16);
178   auto priv_output_sizes =
179       map->GetOutputSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
180 
181   if (!SupportsCapability(
182           camera_id, static_metadata,
183           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) &&
184       IsMaxSupportedSizeGreaterThanOrEqual(depth16_output_sizes,
185                                            s240pStreamSize)) {
186     ALOGI("%s: Depth only output supported by camera id %u", __FUNCTION__,
187           camera_id);
188     return true;
189   }
190   if (yuv_output_sizes.empty()) {
191     ALOGW("%s: No YUV output supported by camera id %u", __FUNCTION__,
192           camera_id);
193     return false;
194   }
195 
196   if (priv_output_sizes.empty()) {
197     ALOGW("No PRIV output supported by camera id %u", camera_id);
198     return false;
199   }
200 
201   if (blob_output_sizes.empty()) {
202     ALOGW("No BLOB output supported by camera id %u", camera_id);
203     return false;
204   }
205 
206   // According to the HAL spec, if a device supports format sizes > 1440p and
207   // 720p, it must support both 1440p and 720p streams for PRIV, JPEG and YUV
208   // formats. Otherwise it must support 2 streams (YUV / PRIV + JPEG) of the max
209   // supported size.
210 
211   // Check for YUV output sizes
212   if (IsMaxSupportedSizeGreaterThanOrEqual(yuv_output_sizes, s1440pStreamSize) &&
213       (yuv_output_sizes.find(s1440pStreamSize) == yuv_output_sizes.end() ||
214        yuv_output_sizes.find(s720pStreamSize) == yuv_output_sizes.end())) {
215     ALOGW("%s: 1440p+720p YUV outputs not found for camera id %u", __FUNCTION__,
216           camera_id);
217     return false;
218   } else if (IsMaxSupportedSizeGreaterThanOrEqual(yuv_output_sizes,
219                                                   s720pStreamSize) &&
220              yuv_output_sizes.find(s720pStreamSize) == yuv_output_sizes.end()) {
221     ALOGW("%s: 720p YUV output not found for camera id %u", __FUNCTION__,
222           camera_id);
223     return false;
224   }
225 
226   // Check for PRIV output sizes
227   if (IsMaxSupportedSizeGreaterThanOrEqual(priv_output_sizes, s1440pStreamSize) &&
228       (priv_output_sizes.find(s1440pStreamSize) == priv_output_sizes.end() ||
229        priv_output_sizes.find(s720pStreamSize) == priv_output_sizes.end())) {
230     ALOGW("%s: 1440p + 720p PRIV outputs not found for camera id %u",
231           __FUNCTION__, camera_id);
232     return false;
233   } else if (IsMaxSupportedSizeGreaterThanOrEqual(priv_output_sizes,
234                                                   s720pStreamSize) &&
235              priv_output_sizes.find(s720pStreamSize) == priv_output_sizes.end()) {
236     ALOGW("%s: 720p PRIV output not found for camera id %u", __FUNCTION__,
237           camera_id);
238     return false;
239   }
240 
241   // Check for BLOB output sizes
242   if (IsMaxSupportedSizeGreaterThanOrEqual(blob_output_sizes, s1440pStreamSize) &&
243       (blob_output_sizes.find(s1440pStreamSize) == blob_output_sizes.end() ||
244        blob_output_sizes.find(s720pStreamSize) == blob_output_sizes.end())) {
245     ALOGW("%s: 1440p + 720p BLOB outputs not found for camera id %u",
246           __FUNCTION__, camera_id);
247     return false;
248   } else if (IsMaxSupportedSizeGreaterThanOrEqual(blob_output_sizes,
249                                                   s720pStreamSize) &&
250              blob_output_sizes.find(s720pStreamSize) == blob_output_sizes.end()) {
251     ALOGW("%s: 720p BLOB output not found for camera id %u", __FUNCTION__,
252           camera_id);
253     return false;
254   }
255 
256   return true;
257 }
258 
GetConcurrentStreamingCameraIds(std::vector<std::unordered_set<uint32_t>> * combinations)259 status_t EmulatedCameraProviderHwlImpl::GetConcurrentStreamingCameraIds(
260     std::vector<std::unordered_set<uint32_t>>* combinations) {
261   if (combinations == nullptr) {
262     return BAD_VALUE;
263   }
264   // Collect all camera ids that support the guaranteed stream combinations
265   // (720p YUV and IMPLEMENTATION_DEFINED) and put them in one set. We don't
266   // make all possible combinations since it should be possible to stream all
267   // of them at once in the emulated camera.
268   std::unordered_set<uint32_t> candidate_ids;
269   for (auto& entry : camera_id_map_) {
270     if (SupportsMandatoryConcurrentStreams(entry.first)) {
271       candidate_ids.insert(entry.first);
272     }
273   }
274   combinations->emplace_back(std::move(candidate_ids));
275   return OK;
276 }
277 
IsConcurrentStreamCombinationSupported(const std::vector<CameraIdAndStreamConfiguration> & configs,bool * is_supported)278 status_t EmulatedCameraProviderHwlImpl::IsConcurrentStreamCombinationSupported(
279     const std::vector<CameraIdAndStreamConfiguration>& configs,
280     bool* is_supported) {
281   *is_supported = false;
282 
283   // Go through the given camera ids, get their sensor characteristics, stream
284   // config maps and call EmulatedSensor::IsStreamCombinationSupported()
285   for (auto& config : configs) {
286     // TODO: Consider caching sensor characteristics and StreamConfigurationMap
287     if (camera_id_map_.find(config.camera_id) == camera_id_map_.end()) {
288       ALOGE("%s: Camera id %u does not exist", __FUNCTION__, config.camera_id);
289       return BAD_VALUE;
290     }
291 
292     auto stream_configuration_map = std::make_unique<StreamConfigurationMap>(
293         *(static_metadata_[config.camera_id]));
294     auto stream_configuration_map_max_resolution =
295         std::make_unique<StreamConfigurationMap>(
296             *(static_metadata_[config.camera_id]), /*maxResolution*/ true);
297 
298     LogicalCharacteristics sensor_chars;
299     status_t ret =
300         GetSensorCharacteristics((static_metadata_[config.camera_id]).get(),
301                                  &sensor_chars[config.camera_id]);
302     if (ret != OK) {
303       ALOGE("%s: Unable to extract sensor chars for camera id %u", __FUNCTION__,
304             config.camera_id);
305       return UNKNOWN_ERROR;
306     }
307 
308     PhysicalStreamConfigurationMap physical_stream_configuration_map;
309     PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution;
310     auto const& physicalCameraInfo = camera_id_map_[config.camera_id];
311     for (size_t i = 0; i < physicalCameraInfo.size(); i++) {
312       uint32_t physical_camera_id = physicalCameraInfo[i].second;
313       physical_stream_configuration_map.emplace(
314           physical_camera_id, std::make_unique<StreamConfigurationMap>(
315                                   *(static_metadata_[physical_camera_id])));
316 
317       physical_stream_configuration_map_max_resolution.emplace(
318           physical_camera_id,
319           std::make_unique<StreamConfigurationMap>(
320               *(static_metadata_[physical_camera_id]), /*maxResolution*/ true));
321 
322       ret = GetSensorCharacteristics(static_metadata_[physical_camera_id].get(),
323                                      &sensor_chars[physical_camera_id]);
324       if (ret != OK) {
325         ALOGE("%s: Unable to extract camera %d sensor characteristics %s (%d)",
326               __FUNCTION__, physical_camera_id, strerror(-ret), ret);
327         return ret;
328       }
329     }
330 
331     if (!EmulatedSensor::IsStreamCombinationSupported(
332             config.camera_id, config.stream_configuration,
333             *stream_configuration_map, *stream_configuration_map_max_resolution,
334             physical_stream_configuration_map,
335             physical_stream_configuration_map_max_resolution, sensor_chars)) {
336       return OK;
337     }
338   }
339 
340   *is_supported = true;
341   return OK;
342 }
343 
IsDigit(const std::string & value)344 bool IsDigit(const std::string& value) {
345   if (value.empty()) {
346     return false;
347   }
348 
349   for (const auto& c : value) {
350     if (!std::isdigit(c) && (!std::ispunct(c))) {
351       return false;
352     }
353   }
354 
355   return true;
356 }
357 
358 template <typename T>
GetEnumValue(uint32_t tag_id,const char * cstring,T * result)359 status_t GetEnumValue(uint32_t tag_id, const char* cstring, T* result /*out*/) {
360   if ((result == nullptr) || (cstring == nullptr)) {
361     return BAD_VALUE;
362   }
363 
364   uint32_t enum_value;
365   auto ret =
366       camera_metadata_enum_value(tag_id, cstring, strlen(cstring), &enum_value);
367   if (ret != OK) {
368     ALOGE("%s: Failed to match tag id: 0x%x value: %s", __FUNCTION__, tag_id,
369           cstring);
370     return ret;
371   }
372   *result = enum_value;
373 
374   return OK;
375 }
376 
GetUInt8Value(const Json::Value & value,uint32_t tag_id,uint8_t * result)377 status_t GetUInt8Value(const Json::Value& value, uint32_t tag_id,
378                        uint8_t* result /*out*/) {
379   if (result == nullptr) {
380     return BAD_VALUE;
381   }
382 
383   if (value.isString()) {
384     errno = 0;
385     if (IsDigit(value.asString())) {
386       auto int_value = strtol(value.asCString(), nullptr, 10);
387       if ((int_value >= 0) && (int_value <= UINT8_MAX) && (errno == 0)) {
388         *result = int_value;
389       } else {
390         ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
391         return BAD_VALUE;
392       }
393     } else {
394       return GetEnumValue(tag_id, value.asCString(), result);
395     }
396   } else {
397     ALOGE(
398         "%s: Unexpected json type: %d! All value types are expected to be "
399         "strings!",
400         __FUNCTION__, value.type());
401     return BAD_VALUE;
402   }
403 
404   return OK;
405 }
406 
GetInt32Value(const Json::Value & value,uint32_t tag_id,int32_t * result)407 status_t GetInt32Value(const Json::Value& value, uint32_t tag_id,
408                        int32_t* result /*out*/) {
409   if (result == nullptr) {
410     return BAD_VALUE;
411   }
412 
413   if (value.isString()) {
414     errno = 0;
415     if (IsDigit(value.asString())) {
416       auto int_value = strtol(value.asCString(), nullptr, 10);
417       if ((int_value >= INT32_MIN) && (int_value <= INT32_MAX) && (errno == 0)) {
418         *result = int_value;
419       } else {
420         ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
421         return BAD_VALUE;
422       }
423     } else {
424       return GetEnumValue(tag_id, value.asCString(), result);
425     }
426   } else {
427     ALOGE(
428         "%s: Unexpected json type: %d! All value types are expected to be "
429         "strings!",
430         __FUNCTION__, value.type());
431     return BAD_VALUE;
432   }
433 
434   return OK;
435 }
436 
GetInt64Value(const Json::Value & value,uint32_t tag_id,int64_t * result)437 status_t GetInt64Value(const Json::Value& value, uint32_t tag_id,
438                        int64_t* result /*out*/) {
439   if (result == nullptr) {
440     return BAD_VALUE;
441   }
442 
443   if (value.isString()) {
444     errno = 0;
445     auto int_value = strtoll(value.asCString(), nullptr, 10);
446     if ((int_value >= INT64_MIN) && (int_value <= INT64_MAX) && (errno == 0)) {
447       *result = int_value;
448     } else {
449       ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
450       return BAD_VALUE;
451     }
452   } else {
453     ALOGE(
454         "%s: Unexpected json type: %d! All value types are expected to be "
455         "strings!",
456         __FUNCTION__, value.type());
457     return BAD_VALUE;
458   }
459 
460   return OK;
461 }
462 
GetFloatValue(const Json::Value & value,uint32_t tag_id,float * result)463 status_t GetFloatValue(const Json::Value& value, uint32_t tag_id,
464                        float* result /*out*/) {
465   if (result == nullptr) {
466     return BAD_VALUE;
467   }
468 
469   if (value.isString()) {
470     errno = 0;
471     auto float_value = strtof(value.asCString(), nullptr);
472     if (errno == 0) {
473       *result = float_value;
474     } else {
475       ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
476       return BAD_VALUE;
477     }
478   } else {
479     ALOGE(
480         "%s: Unexpected json type: %d! All value types are expected to be "
481         "strings!",
482         __FUNCTION__, value.type());
483     return BAD_VALUE;
484   }
485 
486   return OK;
487 }
488 
GetDoubleValue(const Json::Value & value,uint32_t tag_id,double * result)489 status_t GetDoubleValue(const Json::Value& value, uint32_t tag_id,
490                         double* result /*out*/) {
491   if (result == nullptr) {
492     return BAD_VALUE;
493   }
494 
495   if (value.isString()) {
496     errno = 0;
497     auto double_value = strtod(value.asCString(), nullptr);
498     if (errno == 0) {
499       *result = double_value;
500     } else {
501       ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
502       return BAD_VALUE;
503     }
504   } else {
505     ALOGE(
506         "%s: Unexpected json type: %d! All value types are expected to be "
507         "strings!",
508         __FUNCTION__, value.type());
509     return BAD_VALUE;
510   }
511 
512   return OK;
513 }
514 
515 template <typename T>
FilterVendorKeys(uint32_t tag_id,std::vector<T> * values)516 void FilterVendorKeys(uint32_t tag_id, std::vector<T>* values) {
517   if ((values == nullptr) || (values->empty())) {
518     return;
519   }
520 
521   switch (tag_id) {
522     case ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS:
523     case ANDROID_REQUEST_AVAILABLE_RESULT_KEYS:
524     case ANDROID_REQUEST_AVAILABLE_SESSION_KEYS:
525     case ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: {
526       auto it = values->begin();
527       while (it != values->end()) {
528         // Per spec. the tags we are handling here will be "int32_t".
529         // In this case all vendor defined values will be negative.
530         if (*it < 0) {
531           it = values->erase(it);
532         } else {
533           it++;
534         }
535       }
536     } break;
537     default:
538       // no-op
539       break;
540   }
541 }
542 
543 template <typename T, typename func_type>
InsertTag(const Json::Value & json_value,uint32_t tag_id,func_type get_val_func,HalCameraMetadata * meta)544 status_t InsertTag(const Json::Value& json_value, uint32_t tag_id,
545                    func_type get_val_func, HalCameraMetadata* meta /*out*/) {
546   if (meta == nullptr) {
547     return BAD_VALUE;
548   }
549 
550   std::vector<T> values;
551   T result;
552   status_t ret = -1;
553   values.reserve(json_value.size());
554   for (const auto& val : json_value) {
555     ret = get_val_func(val, tag_id, &result);
556     if (ret != OK) {
557       break;
558     }
559 
560     values.push_back(result);
561   }
562 
563   if (ret == OK) {
564     FilterVendorKeys(tag_id, &values);
565     ret = meta->Set(tag_id, values.data(), values.size());
566   }
567 
568   return ret;
569 }
570 
InsertRationalTag(const Json::Value & json_value,uint32_t tag_id,HalCameraMetadata * meta)571 status_t InsertRationalTag(const Json::Value& json_value, uint32_t tag_id,
572                            HalCameraMetadata* meta /*out*/) {
573   if (meta == nullptr) {
574     return BAD_VALUE;
575   }
576 
577   std::vector<camera_metadata_rational_t> values;
578   status_t ret = OK;
579   if (json_value.isArray() && ((json_value.size() % 2) == 0)) {
580     values.reserve(json_value.size() / 2);
581     auto it = json_value.begin();
582     while (it != json_value.end()) {
583       camera_metadata_rational_t result;
584       ret = GetInt32Value((*it), tag_id, &result.numerator);
585       it++;
586       ret |= GetInt32Value((*it), tag_id, &result.denominator);
587       it++;
588       if (ret != OK) {
589         break;
590       }
591       values.push_back(result);
592     }
593   } else {
594     ALOGE("%s: json type: %d doesn't match with rational tag type",
595           __FUNCTION__, json_value.type());
596     return BAD_VALUE;
597   }
598 
599   if (ret == OK) {
600     ret = meta->Set(tag_id, values.data(), values.size());
601   }
602 
603   return ret;
604 }
605 
ParseCharacteristics(const Json::Value & value,ssize_t id)606 uint32_t EmulatedCameraProviderHwlImpl::ParseCharacteristics(
607     const Json::Value& value, ssize_t id) {
608   if (!value.isObject()) {
609     ALOGE("%s: Configuration root is not an object", __FUNCTION__);
610     return BAD_VALUE;
611   }
612 
613   auto static_meta = HalCameraMetadata::Create(1, 10);
614   auto members = value.getMemberNames();
615   for (const auto& member : members) {
616     uint32_t tag_id;
617     auto stat = GetTagFromName(member.c_str(), &tag_id);
618     if (stat != OK) {
619       ALOGE("%s: tag %s not supported, skipping!", __func__, member.c_str());
620       continue;
621     }
622 
623     auto tag_type = get_camera_metadata_tag_type(tag_id);
624     auto tag_value = value[member.c_str()];
625     switch (tag_type) {
626       case TYPE_BYTE:
627         InsertTag<uint8_t>(tag_value, tag_id, GetUInt8Value, static_meta.get());
628         break;
629       case TYPE_INT32:
630         InsertTag<int32_t>(tag_value, tag_id, GetInt32Value, static_meta.get());
631         break;
632       case TYPE_INT64:
633         InsertTag<int64_t>(tag_value, tag_id, GetInt64Value, static_meta.get());
634         break;
635       case TYPE_FLOAT:
636         InsertTag<float>(tag_value, tag_id, GetFloatValue, static_meta.get());
637         break;
638       case TYPE_DOUBLE:
639         InsertTag<double>(tag_value, tag_id, GetDoubleValue, static_meta.get());
640         break;
641       case TYPE_RATIONAL:
642         InsertRationalTag(tag_value, tag_id, static_meta.get());
643         break;
644       default:
645         ALOGE("%s: Unsupported tag type: %d!", __FUNCTION__, tag_type);
646     }
647   }
648 
649   SensorCharacteristics sensor_characteristics;
650   auto ret =
651       GetSensorCharacteristics(static_meta.get(), &sensor_characteristics);
652   if (ret != OK) {
653     ALOGE("%s: Unable to extract sensor characteristics!", __FUNCTION__);
654     return ret;
655   }
656 
657   if (!EmulatedSensor::AreCharacteristicsSupported(sensor_characteristics)) {
658     ALOGE("%s: Sensor characteristics not supported!", __FUNCTION__);
659     return BAD_VALUE;
660   }
661 
662   // Although we don't support HdrPlus, this data is still required by HWL
663   int32_t payload_frames = 0;
664   static_meta->Set(google_camera_hal::kHdrplusPayloadFrames, &payload_frames, 1);
665 
666   if (id < 0) {
667     static_metadata_.push_back(std::move(static_meta));
668     id = static_metadata_.size() - 1;
669   } else {
670     static_metadata_[id] = std::move(static_meta);
671   }
672 
673   return id;
674 }
675 
WaitForQemuSfFakeCameraPropertyAvailable()676 status_t EmulatedCameraProviderHwlImpl::WaitForQemuSfFakeCameraPropertyAvailable() {
677   // Camera service may start running before qemu-props sets
678   // vendor.qemu.sf.fake_camera to any of the following four values:
679   // "none,front,back,both"; so we need to wait.
680   int num_attempts = 100;
681   char prop[PROPERTY_VALUE_MAX];
682   bool timeout = true;
683   for (int i = 0; i < num_attempts; ++i) {
684     if (property_get("vendor.qemu.sf.fake_camera", prop, nullptr) != 0) {
685       timeout = false;
686       break;
687     }
688     usleep(5000);
689   }
690   if (timeout) {
691     ALOGE("timeout (%dms) waiting for property vendor.qemu.sf.fake_camera to be set\n",
692           5 * num_attempts);
693     return BAD_VALUE;
694   }
695   return OK;
696 }
697 
Initialize()698 status_t EmulatedCameraProviderHwlImpl::Initialize() {
699   // GCH expects all physical ids to be bigger than the logical ones.
700   // Resize 'static_metadata_' to fit all logical devices and insert them
701   // accordingly, push any remaining physical cameras in the back.
702   std::string config;
703   size_t logical_id = 0;
704   std::vector<std::string> config_file_locations;
705   std::string config_dir = "";
706   struct stat st;
707   if (stat(kConfigurationFileDirApex.data(), &st) == 0) {
708     config_dir += kConfigurationFileDirApex.data();
709   } else {
710     config_dir += kConfigurationFileDirVendor.data();
711   }
712   char prop[PROPERTY_VALUE_MAX];
713   if (!property_get_bool("ro.boot.qemu", false)) {
714     for (const auto& iter : kConfigurationFileNames) {
715       config_file_locations.emplace_back(config_dir + iter.data());
716     }
717   } else {
718     // Android Studio Emulator
719     if (!property_get_bool("ro.boot.qemu.legacy_fake_camera", false)) {
720       if (WaitForQemuSfFakeCameraPropertyAvailable() == OK) {
721         property_get("vendor.qemu.sf.fake_camera", prop, nullptr);
722         if (strcmp(prop, "both") == 0) {
723           config_file_locations.emplace_back(config_dir +
724                                              kConfigurationFileNames[0].data());
725           config_file_locations.emplace_back(config_dir +
726                                              kConfigurationFileNames[1].data());
727         } else if (strcmp(prop, "front") == 0) {
728           config_file_locations.emplace_back(config_dir +
729                                              kConfigurationFileNames[1].data());
730           logical_id = 1;
731         } else if (strcmp(prop, "back") == 0) {
732           config_file_locations.emplace_back(config_dir +
733                                              kConfigurationFileNames[0].data());
734           logical_id = 1;
735         }
736       }
737     }
738   }
739   static_metadata_.resize(ARRAY_SIZE(kConfigurationFileNames));
740 
741   for (const auto& config_path : config_file_locations) {
742     if (!android::base::ReadFileToString(config_path, &config)) {
743       ALOGW("%s: Could not open configuration file: %s", __FUNCTION__,
744             config_path.c_str());
745       continue;
746     }
747 
748     Json::CharReaderBuilder builder;
749     std::unique_ptr<Json::CharReader> config_reader(builder.newCharReader());
750     Json::Value root;
751     std::string error_message;
752     if (!config_reader->parse(&*config.begin(), &*config.end(), &root,
753                               &error_message)) {
754       ALOGE("Could not parse configuration file: %s", error_message.c_str());
755       return BAD_VALUE;
756     }
757 
758     if (root.isArray()) {
759       auto device_iter = root.begin();
760       auto result_id = ParseCharacteristics(*device_iter, logical_id);
761       if (logical_id != result_id) {
762         return result_id;
763       }
764       device_iter++;
765 
766       // The first device entry is always the logical camera followed by the
767       // physical devices. They must be at least 2.
768       camera_id_map_.emplace(logical_id, std::vector<std::pair<CameraDeviceStatus, uint32_t>>());
769       if (root.size() >= 3) {
770         camera_id_map_[logical_id].reserve(root.size() - 1);
771         size_t current_physical_device = 0;
772         while (device_iter != root.end()) {
773           auto physical_id = ParseCharacteristics(*device_iter, /*id*/ -1);
774           if (physical_id < 0) {
775             return physical_id;
776           }
777           // Only notify unavailable physical camera if there are more than 2
778           // physical cameras backing the logical camera
779           auto device_status = (current_physical_device < 2) ? CameraDeviceStatus::kPresent :
780               CameraDeviceStatus::kNotPresent;
781           camera_id_map_[logical_id].push_back(std::make_pair(device_status, physical_id));
782           device_iter++; current_physical_device++;
783         }
784 
785         auto physical_devices = std::make_unique<PhysicalDeviceMap>();
786         for (const auto& physical_device : camera_id_map_[logical_id]) {
787           physical_devices->emplace(
788               physical_device.second, std::make_pair(physical_device.first,
789               HalCameraMetadata::Clone(
790                   static_metadata_[physical_device.second].get())));
791         }
792         auto updated_logical_chars =
793             EmulatedLogicalRequestState::AdaptLogicalCharacteristics(
794                 HalCameraMetadata::Clone(static_metadata_[logical_id].get()),
795                 std::move(physical_devices));
796         if (updated_logical_chars.get() != nullptr) {
797           static_metadata_[logical_id].swap(updated_logical_chars);
798         } else {
799           ALOGE("%s: Failed to updating logical camera characteristics!",
800                 __FUNCTION__);
801           return BAD_VALUE;
802         }
803       }
804     } else {
805       auto result_id = ParseCharacteristics(root, logical_id);
806       if (result_id != logical_id) {
807         return result_id;
808       }
809       camera_id_map_.emplace(logical_id, std::vector<std::pair<CameraDeviceStatus, uint32_t>>());
810     }
811 
812     logical_id++;
813   }
814 
815   return OK;
816 }
817 
SetCallback(const HwlCameraProviderCallback & callback)818 status_t EmulatedCameraProviderHwlImpl::SetCallback(
819     const HwlCameraProviderCallback& callback) {
820   torch_cb_ = callback.torch_mode_status_change;
821   physical_camera_status_cb_ = callback.physical_camera_device_status_change;
822 
823   return OK;
824 }
825 
TriggerDeferredCallbacks()826 status_t EmulatedCameraProviderHwlImpl::TriggerDeferredCallbacks() {
827   std::lock_guard<std::mutex> lock(status_callback_future_lock_);
828   if (status_callback_future_.valid()) {
829     return OK;
830   }
831 
832   status_callback_future_ = std::async(
833       std::launch::async,
834       &EmulatedCameraProviderHwlImpl::NotifyPhysicalCameraUnavailable, this);
835   return OK;
836 }
837 
WaitForStatusCallbackFuture()838 void EmulatedCameraProviderHwlImpl::WaitForStatusCallbackFuture() {
839   {
840     std::lock_guard<std::mutex> lock(status_callback_future_lock_);
841     if (!status_callback_future_.valid()) {
842       // If there is no future pending, construct a dummy one.
843       status_callback_future_ = std::async([]() { return; });
844     }
845   }
846   status_callback_future_.wait();
847 }
848 
NotifyPhysicalCameraUnavailable()849 void EmulatedCameraProviderHwlImpl::NotifyPhysicalCameraUnavailable() {
850   for (const auto& one_map : camera_id_map_) {
851     for (const auto& physical_device : one_map.second) {
852       if (physical_device.first != CameraDeviceStatus::kNotPresent) {
853         continue;
854       }
855 
856       uint32_t logical_camera_id = one_map.first;
857       uint32_t physical_camera_id = physical_device.second;
858       physical_camera_status_cb_(
859           logical_camera_id, physical_camera_id,
860           CameraDeviceStatus::kNotPresent);
861     }
862   }
863 }
864 
GetVendorTags(std::vector<VendorTagSection> * vendor_tag_sections)865 status_t EmulatedCameraProviderHwlImpl::GetVendorTags(
866     std::vector<VendorTagSection>* vendor_tag_sections) {
867   if (vendor_tag_sections == nullptr) {
868     ALOGE("%s: vendor_tag_sections is nullptr.", __FUNCTION__);
869     return BAD_VALUE;
870   }
871 
872   // No vendor specific tags as of now
873   return OK;
874 }
875 
GetVisibleCameraIds(std::vector<std::uint32_t> * camera_ids)876 status_t EmulatedCameraProviderHwlImpl::GetVisibleCameraIds(
877     std::vector<std::uint32_t>* camera_ids) {
878   if (camera_ids == nullptr) {
879     ALOGE("%s: camera_ids is nullptr.", __FUNCTION__);
880     return BAD_VALUE;
881   }
882 
883   for (const auto& device : camera_id_map_) {
884     camera_ids->push_back(device.first);
885   }
886 
887   return OK;
888 }
889 
CreateCameraDeviceHwl(uint32_t camera_id,std::unique_ptr<CameraDeviceHwl> * camera_device_hwl)890 status_t EmulatedCameraProviderHwlImpl::CreateCameraDeviceHwl(
891     uint32_t camera_id, std::unique_ptr<CameraDeviceHwl>* camera_device_hwl) {
892   if (camera_device_hwl == nullptr) {
893     ALOGE("%s: camera_device_hwl is nullptr.", __FUNCTION__);
894     return BAD_VALUE;
895   }
896 
897   if (camera_id_map_.find(camera_id) == camera_id_map_.end()) {
898     ALOGE("%s: Invalid camera id: %u", __func__, camera_id);
899     return BAD_VALUE;
900   }
901 
902   std::unique_ptr<HalCameraMetadata> meta =
903       HalCameraMetadata::Clone(static_metadata_[camera_id].get());
904 
905   std::shared_ptr<EmulatedTorchState> torch_state;
906   camera_metadata_ro_entry entry;
907   bool flash_supported = false;
908   auto ret = meta->Get(ANDROID_FLASH_INFO_AVAILABLE, &entry);
909   if ((ret == OK) && (entry.count == 1)) {
910     if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
911       flash_supported = true;
912     }
913   }
914 
915   if (flash_supported) {
916     torch_state = std::make_shared<EmulatedTorchState>(camera_id, torch_cb_);
917   }
918 
919   auto physical_devices = std::make_unique<PhysicalDeviceMap>();
920   for (const auto& physical_device : camera_id_map_[camera_id]) {
921       physical_devices->emplace(
922           physical_device.second, std::make_pair(physical_device.first,
923           HalCameraMetadata::Clone(static_metadata_[physical_device.second].get())));
924   }
925   *camera_device_hwl = EmulatedCameraDeviceHwlImpl::Create(
926       camera_id, std::move(meta), std::move(physical_devices), torch_state);
927   if (*camera_device_hwl == nullptr) {
928     ALOGE("%s: Cannot create EmulatedCameraDeviceHWlImpl.", __FUNCTION__);
929     return BAD_VALUE;
930   }
931 
932   return OK;
933 }
934 
CreateBufferAllocatorHwl(std::unique_ptr<CameraBufferAllocatorHwl> * camera_buffer_allocator_hwl)935 status_t EmulatedCameraProviderHwlImpl::CreateBufferAllocatorHwl(
936     std::unique_ptr<CameraBufferAllocatorHwl>* camera_buffer_allocator_hwl) {
937   if (camera_buffer_allocator_hwl == nullptr) {
938     ALOGE("%s: camera_buffer_allocator_hwl is nullptr.", __FUNCTION__);
939     return BAD_VALUE;
940   }
941 
942   // Currently not supported
943   return INVALID_OPERATION;
944 }
945 
NotifyDeviceStateChange(DeviceState)946 status_t EmulatedCameraProviderHwlImpl::NotifyDeviceStateChange(
947     DeviceState /*device_state*/) {
948   return OK;
949 }
950 }  // namespace android
951