• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  _* Copyright (C) 2013-2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <unordered_set>
18 #define LOG_TAG "HWLUtils"
19 #include "HWLUtils.h"
20 #include <log/log.h>
21 #include "utils.h"
22 
23 #include <map>
24 
25 namespace android {
26 
27 using google_camera_hal::ColorSpaceProfile;
28 using google_camera_hal::DynamicRangeProfile;
29 using google_camera_hal::utils::HasCapability;
30 
GetLastStreamUseCase(const HalCameraMetadata * metadata)31 static int64_t GetLastStreamUseCase(const HalCameraMetadata* metadata) {
32   status_t ret = OK;
33   camera_metadata_ro_entry_t entry;
34   int64_t cropped_raw_use_case =
35       ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
36   int64_t video_call_use_case =
37       ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
38   ret = metadata->Get(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry);
39   if (ret != OK) {
40     return ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
41   }
42   if (std::find(entry.data.i64, entry.data.i64 + entry.count,
43                 cropped_raw_use_case) != entry.data.i64 + entry.count) {
44     return cropped_raw_use_case;
45   }
46   return video_call_use_case;
47 }
48 
GetSensorCharacteristics(const HalCameraMetadata * metadata,SensorCharacteristics * sensor_chars)49 status_t GetSensorCharacteristics(const HalCameraMetadata* metadata,
50                                   SensorCharacteristics* sensor_chars /*out*/) {
51   if ((metadata == nullptr) || (sensor_chars == nullptr)) {
52     return BAD_VALUE;
53   }
54 
55   status_t ret = OK;
56   camera_metadata_ro_entry_t entry;
57   ret = metadata->Get(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, &entry);
58   if ((ret != OK) || (entry.count != 2)) {
59     ALOGE("%s: Invalid ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE!", __FUNCTION__);
60     return BAD_VALUE;
61   }
62   sensor_chars->width = entry.data.i32[0];
63   sensor_chars->height = entry.data.i32[1];
64   sensor_chars->full_res_width = sensor_chars->width;
65   sensor_chars->full_res_height = sensor_chars->height;
66 
67   ret = metadata->Get(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
68                       &entry);
69   if ((ret == OK) && (entry.count == 2)) {
70     sensor_chars->full_res_width = entry.data.i32[0];
71     sensor_chars->full_res_height = entry.data.i32[1];
72     sensor_chars->quad_bayer_sensor = true;
73   }
74 
75   if (sensor_chars->quad_bayer_sensor) {
76     ret = metadata->Get(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &entry);
77     if ((ret == OK) && (entry.count == 4)) {
78       google_camera_hal::Rect rect;
79       if (google_camera_hal::utils::GetSensorActiveArraySize(metadata, &rect) !=
80           OK) {
81         return BAD_VALUE;
82       }
83       sensor_chars->raw_crop_region_unzoomed[0] = rect.left;    // left
84       sensor_chars->raw_crop_region_unzoomed[1] = rect.top;     // top
85       sensor_chars->raw_crop_region_unzoomed[2] = rect.right;   // right
86       sensor_chars->raw_crop_region_unzoomed[3] = rect.bottom;  // bottom
87 
88       // 2x zoom , raw crop width / height = 1/2 sensor width / height. top /
89       // left edge = 1/4 sensor width. bottom / right edge = 1/2 + 1 /4 * sensor
90       // width / height: Refer to case 1 in
91       // https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#SCALER_CROP_REGION
92       // for a visual representation.
93       sensor_chars->raw_crop_region_zoomed[0] =
94           rect.left + (rect.right - rect.left) / 4;  // left
95       sensor_chars->raw_crop_region_zoomed[1] =
96           rect.top + (rect.bottom - rect.top) / 4;  // top
97       sensor_chars->raw_crop_region_zoomed[2] =
98           sensor_chars->raw_crop_region_zoomed[0] +
99           (rect.right - rect.left) / 2;  // right
100       sensor_chars->raw_crop_region_zoomed[3] =
101           sensor_chars->raw_crop_region_zoomed[1] +
102           (rect.bottom - rect.top) / 2;  // bottom
103     }
104   }
105 
106   ret = metadata->Get(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, &entry);
107   if ((ret != OK) || (entry.count != 3)) {
108     ALOGE("%s: Invalid ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS!", __FUNCTION__);
109     return BAD_VALUE;
110   }
111 
112   sensor_chars->max_raw_streams = entry.data.i32[0];
113   sensor_chars->max_processed_streams = entry.data.i32[1];
114   sensor_chars->max_stalling_streams = entry.data.i32[2];
115 
116   if (HasCapability(
117           metadata,
118           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
119     ret = metadata->Get(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
120                         &entry);
121     if ((ret != OK) || ((entry.count % 3) != 0)) {
122       ALOGE("%s: Invalid ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP!",
123             __FUNCTION__);
124       return BAD_VALUE;
125     }
126 
127     for (size_t i = 0; i < entry.count; i += 3) {
128       sensor_chars->dynamic_range_profiles.emplace(
129           static_cast<DynamicRangeProfile>(entry.data.i64[i]),
130           std::unordered_set<DynamicRangeProfile>());
131       const auto profile_end =
132           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
133           << 1;
134       uint64_t current_profile =
135           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
136       for (; current_profile != profile_end; current_profile <<= 1) {
137         if (entry.data.i64[i + 1] & current_profile) {
138           sensor_chars->dynamic_range_profiles
139               .at(static_cast<DynamicRangeProfile>(entry.data.i64[i]))
140               .emplace(static_cast<DynamicRangeProfile>(current_profile));
141         }
142       }
143     }
144 
145     sensor_chars->is_10bit_dynamic_range_capable = true;
146   }
147 
148   if (HasCapability(
149           metadata,
150           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
151     ret = metadata->Get(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
152                         &entry);
153     if ((ret != OK) || ((entry.count % 3) != 0)) {
154       ALOGE("%s: Invalid ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP!",
155             __FUNCTION__);
156       return BAD_VALUE;
157     }
158 
159     for (size_t i = 0; i < entry.count; i += 3) {
160       ColorSpaceProfile color_space =
161           static_cast<ColorSpaceProfile>(entry.data.i64[i]);
162       int image_format = static_cast<int>(entry.data.i64[i + 1]);
163 
164       if (sensor_chars->color_space_profiles.find(color_space) ==
165           sensor_chars->color_space_profiles.end()) {
166         sensor_chars->color_space_profiles.emplace(
167             color_space,
168             std::unordered_map<int, std::unordered_set<DynamicRangeProfile>>());
169       }
170 
171       std::unordered_map<int, std::unordered_set<DynamicRangeProfile>>&
172           image_format_map = sensor_chars->color_space_profiles.at(color_space);
173 
174       if (image_format_map.find(image_format) == image_format_map.end()) {
175         image_format_map.emplace(image_format,
176                                  std::unordered_set<DynamicRangeProfile>());
177       }
178 
179       const auto profile_end =
180           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
181           << 1;
182       uint64_t current_profile =
183           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
184       for (; current_profile != profile_end; current_profile <<= 1) {
185         if (entry.data.i64[i + 2] & current_profile) {
186           image_format_map.at(image_format)
187               .emplace(static_cast<DynamicRangeProfile>(current_profile));
188         }
189       }
190     }
191 
192     sensor_chars->support_color_space_profiles = true;
193   }
194 
195   if (HasCapability(metadata,
196                     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
197     ret = metadata->Get(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, &entry);
198     if ((ret != OK) ||
199         (entry.count != ARRAY_SIZE(sensor_chars->exposure_time_range))) {
200       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE!",
201             __FUNCTION__);
202       return BAD_VALUE;
203     }
204     memcpy(sensor_chars->exposure_time_range, entry.data.i64,
205            sizeof(sensor_chars->exposure_time_range));
206 
207     ret = metadata->Get(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &entry);
208     if ((ret != OK) || (entry.count != 1)) {
209       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_MAX_FRAME_DURATION!", __FUNCTION__);
210       return BAD_VALUE;
211     }
212     sensor_chars->frame_duration_range[1] = entry.data.i64[0];
213     sensor_chars->frame_duration_range[0] =
214         EmulatedSensor::kSupportedFrameDurationRange[0];
215 
216     ret = metadata->Get(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, &entry);
217     if ((ret != OK) ||
218         (entry.count != ARRAY_SIZE(sensor_chars->sensitivity_range))) {
219       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_SENSITIVITY_RANGE!", __FUNCTION__);
220       return BAD_VALUE;
221     }
222     memcpy(sensor_chars->sensitivity_range, entry.data.i64,
223            sizeof(sensor_chars->sensitivity_range));
224   } else {
225     memcpy(sensor_chars->exposure_time_range,
226            EmulatedSensor::kSupportedExposureTimeRange,
227            sizeof(sensor_chars->exposure_time_range));
228     memcpy(sensor_chars->frame_duration_range,
229            EmulatedSensor::kSupportedFrameDurationRange,
230            sizeof(sensor_chars->frame_duration_range));
231     memcpy(sensor_chars->sensitivity_range,
232            EmulatedSensor::kSupportedSensitivityRange,
233            sizeof(sensor_chars->sensitivity_range));
234   }
235 
236   if (HasCapability(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
237     ret = metadata->Get(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &entry);
238     if ((ret != OK) || (entry.count != 1)) {
239       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT!",
240             __FUNCTION__);
241       return BAD_VALUE;
242     }
243 
244     sensor_chars->color_arangement = static_cast<
245         camera_metadata_enum_android_sensor_info_color_filter_arrangement>(
246         entry.data.u8[0]);
247 
248     ret = metadata->Get(ANDROID_SENSOR_INFO_WHITE_LEVEL, &entry);
249     if ((ret != OK) || (entry.count != 1)) {
250       ALOGE("%s: Invalid ANDROID_SENSOR_INFO_WHITE_LEVEL!", __FUNCTION__);
251       return BAD_VALUE;
252     }
253     sensor_chars->max_raw_value = entry.data.i32[0];
254 
255     ret = metadata->Get(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry);
256     if ((ret != OK) ||
257         (entry.count != ARRAY_SIZE(sensor_chars->black_level_pattern))) {
258       ALOGE("%s: Invalid ANDROID_SENSOR_BLACK_LEVEL_PATTERN!", __FUNCTION__);
259       return BAD_VALUE;
260     }
261 
262     memcpy(sensor_chars->black_level_pattern, entry.data.i32,
263            sizeof(sensor_chars->black_level_pattern));
264 
265     ret = metadata->Get(ANDROID_LENS_INFO_SHADING_MAP_SIZE, &entry);
266     if ((ret == OK) && (entry.count == 2)) {
267       sensor_chars->lens_shading_map_size[0] = entry.data.i32[0];
268       sensor_chars->lens_shading_map_size[1] = entry.data.i32[1];
269     } else {
270       ALOGE("%s: No available shading map size!", __FUNCTION__);
271       return BAD_VALUE;
272     }
273 
274     ret = metadata->Get(ANDROID_SENSOR_COLOR_TRANSFORM1, &entry);
275     if ((ret != OK) || (entry.count != (3 * 3))) {  // 3x3 rational matrix
276       ALOGE("%s: Invalid ANDROID_SENSOR_COLOR_TRANSFORM1!", __FUNCTION__);
277       return BAD_VALUE;
278     }
279 
280     sensor_chars->color_filter.rX = RAT_TO_FLOAT(entry.data.r[0]);
281     sensor_chars->color_filter.rY = RAT_TO_FLOAT(entry.data.r[1]);
282     sensor_chars->color_filter.rZ = RAT_TO_FLOAT(entry.data.r[2]);
283     sensor_chars->color_filter.grX = RAT_TO_FLOAT(entry.data.r[3]);
284     sensor_chars->color_filter.grY = RAT_TO_FLOAT(entry.data.r[4]);
285     sensor_chars->color_filter.grZ = RAT_TO_FLOAT(entry.data.r[5]);
286     sensor_chars->color_filter.gbX = RAT_TO_FLOAT(entry.data.r[3]);
287     sensor_chars->color_filter.gbY = RAT_TO_FLOAT(entry.data.r[4]);
288     sensor_chars->color_filter.gbZ = RAT_TO_FLOAT(entry.data.r[5]);
289     sensor_chars->color_filter.bX = RAT_TO_FLOAT(entry.data.r[6]);
290     sensor_chars->color_filter.bY = RAT_TO_FLOAT(entry.data.r[7]);
291     sensor_chars->color_filter.bZ = RAT_TO_FLOAT(entry.data.r[8]);
292 
293     ret = metadata->Get(ANDROID_SENSOR_FORWARD_MATRIX1, &entry);
294     if ((ret != OK) || (entry.count != (3 * 3))) {
295       ALOGE("%s: Invalid ANDROID_SENSOR_FORWARD_MATRIX1!", __FUNCTION__);
296       return BAD_VALUE;
297     }
298 
299     sensor_chars->forward_matrix.rX = RAT_TO_FLOAT(entry.data.r[0]);
300     sensor_chars->forward_matrix.gX = RAT_TO_FLOAT(entry.data.r[1]);
301     sensor_chars->forward_matrix.bX = RAT_TO_FLOAT(entry.data.r[2]);
302     sensor_chars->forward_matrix.rY = RAT_TO_FLOAT(entry.data.r[3]);
303     sensor_chars->forward_matrix.gY = RAT_TO_FLOAT(entry.data.r[4]);
304     sensor_chars->forward_matrix.bY = RAT_TO_FLOAT(entry.data.r[5]);
305     sensor_chars->forward_matrix.rZ = RAT_TO_FLOAT(entry.data.r[6]);
306     sensor_chars->forward_matrix.gZ = RAT_TO_FLOAT(entry.data.r[7]);
307     sensor_chars->forward_matrix.bZ = RAT_TO_FLOAT(entry.data.r[8]);
308   } else {
309     sensor_chars->color_arangement = static_cast<
310         camera_metadata_enum_android_sensor_info_color_filter_arrangement>(
311         EmulatedSensor::kSupportedColorFilterArrangement);
312     sensor_chars->max_raw_value = EmulatedSensor::kDefaultMaxRawValue;
313     memcpy(sensor_chars->black_level_pattern,
314            EmulatedSensor::kDefaultBlackLevelPattern,
315            sizeof(sensor_chars->black_level_pattern));
316   }
317 
318   if (HasCapability(
319           metadata,
320           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING) ||
321       HasCapability(metadata,
322                     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING)) {
323     ret = metadata->Get(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &entry);
324     if ((ret != OK) || (entry.count != 1)) {
325       ALOGE("%s: Invalid ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS!", __FUNCTION__);
326       return BAD_VALUE;
327     }
328 
329     sensor_chars->max_input_streams = entry.data.i32[0];
330   }
331 
332   ret = metadata->Get(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &entry);
333   if ((ret == OK) && (entry.count == 1)) {
334     if (entry.data.u8[0] == 0) {
335       ALOGE("%s: Maximum request pipeline must have a non zero value!",
336             __FUNCTION__);
337       return BAD_VALUE;
338     }
339     sensor_chars->max_pipeline_depth = entry.data.u8[0];
340   } else {
341     ALOGE("%s: Maximum request pipeline depth absent!", __FUNCTION__);
342     return BAD_VALUE;
343   }
344 
345   ret = metadata->Get(ANDROID_SENSOR_ORIENTATION, &entry);
346   if ((ret == OK) && (entry.count == 1)) {
347     sensor_chars->orientation = entry.data.i32[0];
348   } else {
349     ALOGE("%s: Sensor orientation absent!", __FUNCTION__);
350     return BAD_VALUE;
351   }
352 
353   ret = metadata->Get(ANDROID_LENS_FACING, &entry);
354   if ((ret == OK) && (entry.count == 1)) {
355     sensor_chars->is_front_facing = false;
356     if (ANDROID_LENS_FACING_FRONT == entry.data.u8[0]) {
357       sensor_chars->is_front_facing = true;
358     }
359   } else {
360     ALOGE("%s: Lens facing absent!", __FUNCTION__);
361     return BAD_VALUE;
362   }
363 
364   if (HasCapability(metadata,
365                     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE)) {
366     sensor_chars->support_stream_use_case = true;
367     sensor_chars->end_valid_stream_use_case = GetLastStreamUseCase(metadata);
368 
369   } else {
370     sensor_chars->support_stream_use_case = false;
371   }
372 
373   return ret;
374 }
375 
ClonePhysicalDeviceMap(const PhysicalDeviceMapPtr & src)376 PhysicalDeviceMapPtr ClonePhysicalDeviceMap(const PhysicalDeviceMapPtr& src) {
377   auto ret = std::make_unique<PhysicalDeviceMap>();
378   for (const auto& it : *src) {
379     ret->emplace(it.first, std::make_pair(it.second.first,
380         HalCameraMetadata::Clone(it.second.second.get())));
381   }
382   return ret;
383 }
384 
385 }  // namespace android
386