• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  _* Copyright (C) 2013-2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <unordered_set>
18 #define LOG_TAG "HWLUtils"
19 #include "HWLUtils.h"
20 #include <log/log.h>
21 #include "utils.h"
22 
23 #include <map>
24 
25 namespace android {
26 
27 using google_camera_hal::ColorSpaceProfile;
28 using google_camera_hal::DynamicRangeProfile;
29 using google_camera_hal::utils::HasCapability;
30 
GetLastStreamUseCase(const HalCameraMetadata * metadata)31 static int64_t GetLastStreamUseCase(const HalCameraMetadata* metadata) {
32   status_t ret = OK;
33   camera_metadata_ro_entry_t entry;
34   int64_t cropped_raw_use_case =
35       ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
36   int64_t video_call_use_case =
37       ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
38   ret = metadata->Get(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
39   if (ret != OK) {
40     return ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
41   }
42   if (std::find(entry.data.i64, entry.data.i64 + entry.count,
43                 cropped_raw_use_case) != entry.data.i64 + entry.count) {
44     return cropped_raw_use_case;
45   }
46   return video_call_use_case;
47 }
SupportsSessionHalBufManager(const HalCameraMetadata * metadata,bool * result)48 status_t SupportsSessionHalBufManager(const HalCameraMetadata* metadata,
49                                       bool* result /*out*/) {
50   if ((metadata == nullptr) || (result == nullptr)) {
51     return BAD_VALUE;
52   }
53 
54   status_t ret = OK;
55   camera_metadata_ro_entry_t entry;
56   *result = false;
57   ret = metadata->Get(ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry);
58   if (ret != OK) {
59     return OK;
60   }
61   if ((ret == OK) && (entry.count != 1)) {
62     ALOGE("%s: Invalid ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION!",
63           __FUNCTION__);
64     return BAD_VALUE;
65   }
66   *result =
67       (entry.data.u8[0] ==
68        ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_SESSION_CONFIGURABLE);
69   return OK;
70 }
71 
GetSensorCharacteristics(const HalCameraMetadata * metadata,SensorCharacteristics * sensor_chars)72 status_t GetSensorCharacteristics(const HalCameraMetadata* metadata,
73                                   SensorCharacteristics* sensor_chars /*out*/) {
74   if ((metadata == nullptr) || (sensor_chars == nullptr)) {
75     return BAD_VALUE;
76   }
77 
78   status_t ret = OK;
79   camera_metadata_ro_entry_t entry;
80   ret = metadata->Get(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, &entry);
81   if ((ret != OK) || (entry.count != 2)) {
82     ALOGE("%s: Invalid ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE!", __FUNCTION__);
83     return BAD_VALUE;
84   }
85   sensor_chars->width = entry.data.i32[0];
86   sensor_chars->height = entry.data.i32[1];
87   sensor_chars->full_res_width = sensor_chars->width;
88   sensor_chars->full_res_height = sensor_chars->height;
89 
90   ret = metadata->Get(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
91                       &entry);
92   if ((ret == OK) && (entry.count == 2)) {
93     sensor_chars->full_res_width = entry.data.i32[0];
94     sensor_chars->full_res_height = entry.data.i32[1];
95     sensor_chars->quad_bayer_sensor = true;
96   }
97 
98   if (sensor_chars->quad_bayer_sensor) {
99     ret = metadata->Get(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &entry);
100     if ((ret == OK) && (entry.count == 4)) {
101       google_camera_hal::Rect rect;
102       if (google_camera_hal::utils::GetSensorActiveArraySize(metadata, &rect) !=
103           OK) {
104         return BAD_VALUE;
105       }
106       sensor_chars->raw_crop_region_unzoomed[0] = rect.left;    // left
107       sensor_chars->raw_crop_region_unzoomed[1] = rect.top;     // top
108       sensor_chars->raw_crop_region_unzoomed[2] = rect.right;   // right
109       sensor_chars->raw_crop_region_unzoomed[3] = rect.bottom;  // bottom
110 
111       // 2x zoom , raw crop width / height = 1/2 sensor width / height. top /
112       // left edge = 1/4 sensor width. bottom / right edge = 1/2 + 1 /4 * sensor
113       // width / height: Refer to case 1 in
114       // https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#SCALER_CROP_REGION
115       // for a visual representation.
116       sensor_chars->raw_crop_region_zoomed[0] =
117           rect.left + (rect.right - rect.left) / 4;  // left
118       sensor_chars->raw_crop_region_zoomed[1] =
119           rect.top + (rect.bottom - rect.top) / 4;  // top
120       sensor_chars->raw_crop_region_zoomed[2] =
121           sensor_chars->raw_crop_region_zoomed[0] +
122           (rect.right - rect.left) / 2;  // right
123       sensor_chars->raw_crop_region_zoomed[3] =
124           sensor_chars->raw_crop_region_zoomed[1] +
125           (rect.bottom - rect.top) / 2;  // bottom
126     }
127   }
128 
129   ret = metadata->Get(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, &entry);
130   if ((ret != OK) || (entry.count != 3)) {
131     ALOGE("%s: Invalid ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS!", __FUNCTION__);
132     return BAD_VALUE;
133   }
134 
135   sensor_chars->max_raw_streams = entry.data.i32[0];
136   sensor_chars->max_processed_streams = entry.data.i32[1];
137   sensor_chars->max_stalling_streams = entry.data.i32[2];
138 
139   if (HasCapability(
140           metadata,
141           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
142     ret = metadata->Get(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
143                         &entry);
144     if ((ret != OK) || ((entry.count % 3) != 0)) {
145       ALOGE("%s: Invalid ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP!",
146             __FUNCTION__);
147       return BAD_VALUE;
148     }
149 
150     for (size_t i = 0; i < entry.count; i += 3) {
151       sensor_chars->dynamic_range_profiles.emplace(
152           static_cast<DynamicRangeProfile>(entry.data.i64[i]),
153           std::unordered_set<DynamicRangeProfile>());
154       const auto profile_end =
155           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
156           << 1;
157       uint64_t current_profile =
158           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
159       for (; current_profile != profile_end; current_profile <<= 1) {
160         if (entry.data.i64[i + 1] & current_profile) {
161           sensor_chars->dynamic_range_profiles
162               .at(static_cast<DynamicRangeProfile>(entry.data.i64[i]))
163               .emplace(static_cast<DynamicRangeProfile>(current_profile));
164         }
165       }
166     }
167 
168     sensor_chars->is_10bit_dynamic_range_capable = true;
169   }
170 
171   if (HasCapability(
172           metadata,
173           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
174     ret = metadata->Get(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
175                         &entry);
176     if ((ret != OK) || ((entry.count % 3) != 0)) {
177       ALOGE("%s: Invalid ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP!",
178             __FUNCTION__);
179       return BAD_VALUE;
180     }
181 
182     for (size_t i = 0; i < entry.count; i += 3) {
183       ColorSpaceProfile color_space =
184           static_cast<ColorSpaceProfile>(entry.data.i64[i]);
185       int image_format = static_cast<int>(entry.data.i64[i + 1]);
186 
187       if (sensor_chars->color_space_profiles.find(color_space) ==
188           sensor_chars->color_space_profiles.end()) {
189         sensor_chars->color_space_profiles.emplace(
190             color_space,
191             std::unordered_map<int, std::unordered_set<DynamicRangeProfile>>());
192       }
193 
194       std::unordered_map<int, std::unordered_set<DynamicRangeProfile>>&
195           image_format_map = sensor_chars->color_space_profiles.at(color_space);
196 
197       if (image_format_map.find(image_format) == image_format_map.end()) {
198         image_format_map.emplace(image_format,
199                                  std::unordered_set<DynamicRangeProfile>());
200       }
201 
202       const auto profile_end =
203           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
204           << 1;
205       uint64_t current_profile =
206           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
207       for (; current_profile != profile_end; current_profile <<= 1) {
208         if (entry.data.i64[i + 2] & current_profile) {
209           image_format_map.at(image_format)
210               .emplace(static_cast<DynamicRangeProfile>(current_profile));
211         }
212       }
213     }
214 
215     sensor_chars->support_color_space_profiles = true;
216   }
217 
218   if (HasCapability(metadata,
219                     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
220     ret = metadata->Get(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, &entry);
221     if ((ret != OK) ||
222         (entry.count != ARRAY_SIZE(sensor_chars->exposure_time_range))) {
223       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE!",
224             __FUNCTION__);
225       return BAD_VALUE;
226     }
227     memcpy(sensor_chars->exposure_time_range, entry.data.i64,
228            sizeof(sensor_chars->exposure_time_range));
229 
230     ret = metadata->Get(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &entry);
231     if ((ret != OK) || (entry.count != 1)) {
232       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_MAX_FRAME_DURATION!", __FUNCTION__);
233       return BAD_VALUE;
234     }
235     sensor_chars->frame_duration_range[1] = entry.data.i64[0];
236     sensor_chars->frame_duration_range[0] =
237         EmulatedSensor::kSupportedFrameDurationRange[0];
238 
239     ret = metadata->Get(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, &entry);
240     if ((ret != OK) ||
241         (entry.count != ARRAY_SIZE(sensor_chars->sensitivity_range))) {
242       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_SENSITIVITY_RANGE!", __FUNCTION__);
243       return BAD_VALUE;
244     }
245     memcpy(sensor_chars->sensitivity_range, entry.data.i64,
246            sizeof(sensor_chars->sensitivity_range));
247   } else {
248     memcpy(sensor_chars->exposure_time_range,
249            EmulatedSensor::kSupportedExposureTimeRange,
250            sizeof(sensor_chars->exposure_time_range));
251     memcpy(sensor_chars->frame_duration_range,
252            EmulatedSensor::kSupportedFrameDurationRange,
253            sizeof(sensor_chars->frame_duration_range));
254     memcpy(sensor_chars->sensitivity_range,
255            EmulatedSensor::kSupportedSensitivityRange,
256            sizeof(sensor_chars->sensitivity_range));
257   }
258 
259   if (HasCapability(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
260     ret = metadata->Get(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &entry);
261     if ((ret != OK) || (entry.count != 1)) {
262       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT!",
263             __FUNCTION__);
264       return BAD_VALUE;
265     }
266 
267     sensor_chars->color_arangement = static_cast<
268         camera_metadata_enum_android_sensor_info_color_filter_arrangement>(
269         entry.data.u8[0]);
270 
271     ret = metadata->Get(ANDROID_SENSOR_INFO_WHITE_LEVEL, &entry);
272     if ((ret != OK) || (entry.count != 1)) {
273       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_WHITE_LEVEL!", __FUNCTION__);
274       return BAD_VALUE;
275     }
276     sensor_chars->max_raw_value = entry.data.i32[0];
277 
278     ret = metadata->Get(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry);
279     if ((ret != OK) ||
280         (entry.count != ARRAY_SIZE(sensor_chars->black_level_pattern))) {
281       ALOGE("%s: Invalid ANDROID_SENSOR_BLACK_LEVEL_PATTERN!", __FUNCTION__);
282       return BAD_VALUE;
283     }
284 
285     memcpy(sensor_chars->black_level_pattern, entry.data.i32,
286            sizeof(sensor_chars->black_level_pattern));
287 
288     ret = metadata->Get(ANDROID_LENS_INFO_SHADING_MAP_SIZE, &entry);
289     if ((ret == OK) && (entry.count == 2)) {
290       sensor_chars->lens_shading_map_size[0] = entry.data.i32[0];
291       sensor_chars->lens_shading_map_size[1] = entry.data.i32[1];
292     } else {
293       ALOGE("%s: No available shading map size!", __FUNCTION__);
294       return BAD_VALUE;
295     }
296 
297     ret = metadata->Get(ANDROID_SENSOR_COLOR_TRANSFORM1, &entry);
298     if ((ret != OK) || (entry.count != (3 * 3))) {  // 3x3 rational matrix
299       ALOGE("%s: Invalid ANDROID_SENSOR_COLOR_TRANSFORM1!", __FUNCTION__);
300       return BAD_VALUE;
301     }
302 
303     sensor_chars->color_filter.rX = RAT_TO_FLOAT(entry.data.r[0]);
304     sensor_chars->color_filter.rY = RAT_TO_FLOAT(entry.data.r[1]);
305     sensor_chars->color_filter.rZ = RAT_TO_FLOAT(entry.data.r[2]);
306     sensor_chars->color_filter.grX = RAT_TO_FLOAT(entry.data.r[3]);
307     sensor_chars->color_filter.grY = RAT_TO_FLOAT(entry.data.r[4]);
308     sensor_chars->color_filter.grZ = RAT_TO_FLOAT(entry.data.r[5]);
309     sensor_chars->color_filter.gbX = RAT_TO_FLOAT(entry.data.r[3]);
310     sensor_chars->color_filter.gbY = RAT_TO_FLOAT(entry.data.r[4]);
311     sensor_chars->color_filter.gbZ = RAT_TO_FLOAT(entry.data.r[5]);
312     sensor_chars->color_filter.bX = RAT_TO_FLOAT(entry.data.r[6]);
313     sensor_chars->color_filter.bY = RAT_TO_FLOAT(entry.data.r[7]);
314     sensor_chars->color_filter.bZ = RAT_TO_FLOAT(entry.data.r[8]);
315 
316     ret = metadata->Get(ANDROID_SENSOR_FORWARD_MATRIX1, &entry);
317     if ((ret != OK) || (entry.count != (3 * 3))) {
318       ALOGE("%s: Invalid ANDROID_SENSOR_FORWARD_MATRIX1!", __FUNCTION__);
319       return BAD_VALUE;
320     }
321 
322     sensor_chars->forward_matrix.rX = RAT_TO_FLOAT(entry.data.r[0]);
323     sensor_chars->forward_matrix.gX = RAT_TO_FLOAT(entry.data.r[1]);
324     sensor_chars->forward_matrix.bX = RAT_TO_FLOAT(entry.data.r[2]);
325     sensor_chars->forward_matrix.rY = RAT_TO_FLOAT(entry.data.r[3]);
326     sensor_chars->forward_matrix.gY = RAT_TO_FLOAT(entry.data.r[4]);
327     sensor_chars->forward_matrix.bY = RAT_TO_FLOAT(entry.data.r[5]);
328     sensor_chars->forward_matrix.rZ = RAT_TO_FLOAT(entry.data.r[6]);
329     sensor_chars->forward_matrix.gZ = RAT_TO_FLOAT(entry.data.r[7]);
330     sensor_chars->forward_matrix.bZ = RAT_TO_FLOAT(entry.data.r[8]);
331   } else {
332     sensor_chars->color_arangement = static_cast<
333         camera_metadata_enum_android_sensor_info_color_filter_arrangement>(
334         EmulatedSensor::kSupportedColorFilterArrangement);
335     sensor_chars->max_raw_value = EmulatedSensor::kDefaultMaxRawValue;
336     memcpy(sensor_chars->black_level_pattern,
337            EmulatedSensor::kDefaultBlackLevelPattern,
338            sizeof(sensor_chars->black_level_pattern));
339   }
340 
341   if (HasCapability(
342           metadata,
343           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING) ||
344       HasCapability(metadata,
345                     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING)) {
346     ret = metadata->Get(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &entry);
347     if ((ret != OK) || (entry.count != 1)) {
348       ALOGE("%s: Invalid ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS!", __FUNCTION__);
349       return BAD_VALUE;
350     }
351 
352     sensor_chars->max_input_streams = entry.data.i32[0];
353   }
354 
355   ret = metadata->Get(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &entry);
356   if ((ret == OK) && (entry.count == 1)) {
357     if (entry.data.u8[0] == 0) {
358       ALOGE("%s: Maximum request pipeline must have a non zero value!",
359             __FUNCTION__);
360       return BAD_VALUE;
361     }
362     sensor_chars->max_pipeline_depth = entry.data.u8[0];
363   } else {
364     ALOGE("%s: Maximum request pipeline depth absent!", __FUNCTION__);
365     return BAD_VALUE;
366   }
367 
368   ret = metadata->Get(ANDROID_SENSOR_ORIENTATION, &entry);
369   if ((ret == OK) && (entry.count == 1)) {
370     sensor_chars->orientation = entry.data.i32[0];
371   } else {
372     ALOGE("%s: Sensor orientation absent!", __FUNCTION__);
373     return BAD_VALUE;
374   }
375 
376   ret = metadata->Get(ANDROID_LENS_FACING, &entry);
377   if ((ret == OK) && (entry.count == 1)) {
378     sensor_chars->is_front_facing = false;
379     if (ANDROID_LENS_FACING_FRONT == entry.data.u8[0]) {
380       sensor_chars->is_front_facing = true;
381     }
382   } else {
383     ALOGE("%s: Lens facing absent!", __FUNCTION__);
384     return BAD_VALUE;
385   }
386 
387   if (HasCapability(metadata,
388                     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE)) {
389     sensor_chars->support_stream_use_case = true;
390     sensor_chars->end_valid_stream_use_case = GetLastStreamUseCase(metadata);
391 
392   } else {
393     sensor_chars->support_stream_use_case = false;
394   }
395 
396   return ret;
397 }
398 
ClonePhysicalDeviceMap(const PhysicalDeviceMapPtr & src)399 PhysicalDeviceMapPtr ClonePhysicalDeviceMap(const PhysicalDeviceMapPtr& src) {
400   auto ret = std::make_unique<PhysicalDeviceMap>();
401   for (const auto& it : *src) {
402     ret->emplace(it.first, std::make_pair(it.second.first,
403         HalCameraMetadata::Clone(it.second.second.get())));
404   }
405   return ret;
406 }
407 
408 }  // namespace android
409