1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "GCH_HalUtils"
19 #include "hal_utils.h"
20
21 #include <cutils/properties.h>
22 #include <inttypes.h>
23 #include <log/log.h>
24
25 #include <string>
26
27 #include "vendor_tag_defs.h"
28
29 namespace android {
30 namespace google_camera_hal {
31 namespace hal_utils {
32
CreateHwlPipelineRequest(HwlPipelineRequest * hwl_request,uint32_t pipeline_id,const CaptureRequest & request)33 status_t CreateHwlPipelineRequest(HwlPipelineRequest* hwl_request,
34 uint32_t pipeline_id,
35 const CaptureRequest& request) {
36 if (hwl_request == nullptr) {
37 ALOGE("%s: hwl_request is nullptr", __FUNCTION__);
38 return BAD_VALUE;
39 }
40
41 hwl_request->pipeline_id = pipeline_id;
42 hwl_request->settings = HalCameraMetadata::Clone(request.settings.get());
43 hwl_request->input_buffers = request.input_buffers;
44 hwl_request->output_buffers = request.output_buffers;
45
46 for (auto& metadata : request.input_buffer_metadata) {
47 hwl_request->input_buffer_metadata.push_back(
48 HalCameraMetadata::Clone(metadata.get()));
49 }
50
51 return OK;
52 }
53
CreateHwlPipelineRequests(std::vector<HwlPipelineRequest> * hwl_requests,const std::vector<uint32_t> & pipeline_ids,const std::vector<ProcessBlockRequest> & requests)54 status_t CreateHwlPipelineRequests(
55 std::vector<HwlPipelineRequest>* hwl_requests,
56 const std::vector<uint32_t>& pipeline_ids,
57 const std::vector<ProcessBlockRequest>& requests) {
58 if (hwl_requests == nullptr) {
59 ALOGE("%s: hwl_requests is nullptr", __FUNCTION__);
60 return BAD_VALUE;
61 }
62
63 if (pipeline_ids.size() != requests.size()) {
64 ALOGE("%s: There are %zu pipeline IDs but %zu requests", __FUNCTION__,
65 pipeline_ids.size(), requests.size());
66 return BAD_VALUE;
67 }
68
69 status_t res;
70 for (size_t i = 0; i < pipeline_ids.size(); i++) {
71 HwlPipelineRequest hwl_request;
72 res = CreateHwlPipelineRequest(&hwl_request, pipeline_ids[i],
73 requests[i].request);
74 if (res != OK) {
75 ALOGE("%s: Creating a HWL pipeline request failed: %s(%d)", __FUNCTION__,
76 strerror(-res), res);
77 return res;
78 }
79
80 hwl_requests->push_back(std::move(hwl_request));
81 }
82
83 return OK;
84 }
85
ConvertToCaptureResult(std::unique_ptr<HwlPipelineResult> hwl_result)86 std::unique_ptr<CaptureResult> ConvertToCaptureResult(
87 std::unique_ptr<HwlPipelineResult> hwl_result) {
88 if (hwl_result == nullptr) {
89 ALOGE("%s: hwl_result is nullptr", __FUNCTION__);
90 return nullptr;
91 }
92
93 auto capture_result = std::make_unique<CaptureResult>();
94 if (capture_result == nullptr) {
95 ALOGE("%s: Creating capture_result failed.", __FUNCTION__);
96 return nullptr;
97 }
98
99 capture_result->frame_number = hwl_result->frame_number;
100 capture_result->result_metadata = std::move(hwl_result->result_metadata);
101 capture_result->output_buffers = std::move(hwl_result->output_buffers);
102 capture_result->input_buffers = std::move(hwl_result->input_buffers);
103 capture_result->partial_result = hwl_result->partial_result;
104
105 capture_result->physical_metadata.reserve(
106 hwl_result->physical_camera_results.size());
107 for (const auto& [camera_id, metadata] : hwl_result->physical_camera_results) {
108 capture_result->physical_metadata.push_back(PhysicalCameraMetadata(
109 {camera_id, HalCameraMetadata::Clone(metadata.get())}));
110 }
111
112 return capture_result;
113 }
114
ContainsOutputBuffer(const CaptureRequest & request,const buffer_handle_t & buffer)115 bool ContainsOutputBuffer(const CaptureRequest& request,
116 const buffer_handle_t& buffer) {
117 for (auto& request_buffer : request.output_buffers) {
118 if (request_buffer.buffer == buffer) {
119 return true;
120 }
121 }
122
123 return false;
124 }
125
AreAllRemainingBuffersRequested(const std::vector<ProcessBlockRequest> & process_block_requests,const CaptureRequest & remaining_session_request)126 bool AreAllRemainingBuffersRequested(
127 const std::vector<ProcessBlockRequest>& process_block_requests,
128 const CaptureRequest& remaining_session_request) {
129 for (auto& buffer : remaining_session_request.output_buffers) {
130 bool found = false;
131
132 for (auto& block_request : process_block_requests) {
133 if (ContainsOutputBuffer(block_request.request, buffer.buffer)) {
134 found = true;
135 break;
136 }
137 }
138
139 if (!found) {
140 ALOGE("%s: A buffer %" PRIu64 " of stream %d is not requested.",
141 __FUNCTION__, buffer.buffer_id, buffer.stream_id);
142 return false;
143 }
144 }
145
146 return true;
147 }
148
GetColorFilterArrangement(const HalCameraMetadata * characteristics,uint8_t * cfa)149 static status_t GetColorFilterArrangement(
150 const HalCameraMetadata* characteristics, uint8_t* cfa) {
151 if (characteristics == nullptr || cfa == nullptr) {
152 ALOGE("%s: characteristics (%p) or cfa (%p) is nullptr", __FUNCTION__,
153 characteristics, cfa);
154 return BAD_VALUE;
155 }
156
157 camera_metadata_ro_entry entry;
158 status_t res = characteristics->Get(
159 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &entry);
160 if (res != OK || entry.count != 1) {
161 ALOGE("%s: Getting COLOR_FILTER_ARRANGEMENT failed: %s(%d) count: %zu",
162 __FUNCTION__, strerror(-res), res, entry.count);
163 return res;
164 }
165
166 *cfa = entry.data.u8[0];
167 return OK;
168 }
169
IsIrCamera(const HalCameraMetadata * characteristics)170 bool IsIrCamera(const HalCameraMetadata* characteristics) {
171 uint8_t cfa;
172 status_t res = GetColorFilterArrangement(characteristics, &cfa);
173 if (res != OK) {
174 ALOGE("%s: Getting color filter arrangement failed: %s(%d)", __FUNCTION__,
175 strerror(-res), res);
176 return false;
177 }
178
179 return cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR;
180 }
181
IsMonoCamera(const HalCameraMetadata * characteristics)182 bool IsMonoCamera(const HalCameraMetadata* characteristics) {
183 uint8_t cfa;
184 status_t res = GetColorFilterArrangement(characteristics, &cfa);
185 if (res != OK) {
186 ALOGE("%s: Getting color filter arrangement failed: %s(%d)", __FUNCTION__,
187 strerror(-res), res);
188 return false;
189 }
190
191 return cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO;
192 }
193
IsBayerCamera(const HalCameraMetadata * characteristics)194 bool IsBayerCamera(const HalCameraMetadata* characteristics) {
195 uint8_t cfa;
196 status_t res = GetColorFilterArrangement(characteristics, &cfa);
197 if (res != OK) {
198 ALOGE("%s: Getting color filter arrangement failed: %s(%d)", __FUNCTION__,
199 strerror(-res), res);
200 return false;
201 }
202
203 if (cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB ||
204 cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG ||
205 cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG ||
206 cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR) {
207 return true;
208 }
209
210 return false;
211 }
212
IsFixedFocusCamera(const HalCameraMetadata * characteristics)213 bool IsFixedFocusCamera(const HalCameraMetadata* characteristics) {
214 if (characteristics == nullptr) {
215 ALOGE("%s: characteristics (%p) is nullptr", __FUNCTION__, characteristics);
216 return false;
217 }
218
219 camera_metadata_ro_entry entry = {};
220 status_t res =
221 characteristics->Get(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &entry);
222 if (res != OK || entry.count != 1) {
223 ALOGE("%s: Getting ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE failed: %s(%d)",
224 __FUNCTION__, strerror(-res), res);
225 return false;
226 }
227
228 return entry.data.f[0] == 0.0f;
229 }
230
IsRequestHdrplusCompatible(const CaptureRequest & request,int32_t preview_stream_id)231 bool IsRequestHdrplusCompatible(const CaptureRequest& request,
232 int32_t preview_stream_id) {
233 if (request.settings == nullptr) {
234 return false;
235 }
236
237 camera_metadata_ro_entry entry;
238 if (request.settings->Get(ANDROID_CONTROL_CAPTURE_INTENT, &entry) != OK ||
239 *entry.data.u8 != ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
240 ALOGV("%s: ANDROID_CONTROL_CAPTURE_INTENT is not STILL_CAPTURE",
241 __FUNCTION__);
242 return false;
243 }
244
245 if (request.settings->Get(ANDROID_CONTROL_ENABLE_ZSL_TRUE, &entry) != OK ||
246 *entry.data.u8 != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
247 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true", __FUNCTION__);
248 return false;
249 }
250
251 if (request.settings->Get(ANDROID_NOISE_REDUCTION_MODE, &entry) != OK ||
252 *entry.data.u8 != ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
253 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ", __FUNCTION__);
254 return false;
255 }
256
257 if (request.settings->Get(ANDROID_EDGE_MODE, &entry) != OK ||
258 *entry.data.u8 != ANDROID_EDGE_MODE_HIGH_QUALITY) {
259 ALOGV("%s: ANDROID_EDGE_MODE is not HQ", __FUNCTION__);
260 return false;
261 }
262
263 if (request.settings->Get(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &entry) !=
264 OK ||
265 *entry.data.u8 != ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
266 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ",
267 __FUNCTION__);
268 return false;
269 }
270
271 if (request.settings->Get(ANDROID_CONTROL_AE_MODE, &entry) != OK ||
272 (*entry.data.u8 != ANDROID_CONTROL_AE_MODE_ON &&
273 *entry.data.u8 != ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
274 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH",
275 __FUNCTION__);
276 return false;
277 }
278
279 if (request.settings->Get(ANDROID_CONTROL_AWB_MODE, &entry) != OK ||
280 *entry.data.u8 != ANDROID_CONTROL_AWB_MODE_AUTO) {
281 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not HQ", __FUNCTION__);
282 return false;
283 }
284
285 if (request.settings->Get(ANDROID_CONTROL_EFFECT_MODE, &entry) != OK ||
286 *entry.data.u8 != ANDROID_CONTROL_EFFECT_MODE_OFF) {
287 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE is not HQ", __FUNCTION__);
288 return false;
289 }
290
291 if (request.settings->Get(ANDROID_CONTROL_MODE, &entry) != OK ||
292 (*entry.data.u8 != ANDROID_CONTROL_MODE_AUTO &&
293 *entry.data.u8 != ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
294 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE",
295 __FUNCTION__);
296 return false;
297 }
298
299 if (request.settings->Get(ANDROID_FLASH_MODE, &entry) != OK ||
300 *entry.data.u8 != ANDROID_FLASH_MODE_OFF) {
301 ALOGV("%s: ANDROID_FLASH_MODE is not OFF", __FUNCTION__);
302 return false;
303 }
304
305 if (request.settings->Get(ANDROID_TONEMAP_MODE, &entry) != OK ||
306 *entry.data.u8 != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
307 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ", __FUNCTION__);
308 return false;
309 }
310
311 // For b/129798167 - AOSP camera AP can't trigger the snapshot
312 if (request.settings->Get(ANDROID_CONTROL_AF_TRIGGER, &entry) != OK ||
313 *entry.data.u8 != ANDROID_CONTROL_AF_TRIGGER_IDLE) {
314 ALOGI("%s: (%d)ANDROID_CONTROL_AF_TRIGGER is not IDLE", __FUNCTION__,
315 request.frame_number);
316 return false;
317 }
318
319 // For b/130768200, treat the request as non-HDR+ request
320 // if only request one preview frame output.
321 if (preview_stream_id != -1 && request.output_buffers.size() == 1 &&
322 request.output_buffers[0].stream_id == preview_stream_id) {
323 ALOGI("%s: (%d)Only request preview frame", __FUNCTION__,
324 request.frame_number);
325 return false;
326 }
327
328 return true;
329 }
330
IsStreamHdrplusCompatible(const StreamConfiguration & stream_config,const HalCameraMetadata * characteristics)331 bool IsStreamHdrplusCompatible(const StreamConfiguration& stream_config,
332 const HalCameraMetadata* characteristics) {
333 static const uint32_t kHdrplusSensorMaxFps = 30;
334 if (characteristics == nullptr) {
335 ALOGE("%s: characteristics is nullptr", __FUNCTION__);
336 return false;
337 }
338
339 if (property_get_bool("persist.camera.hdrplus.disable", false)) {
340 ALOGI("%s: HDR+ is disabled by property", __FUNCTION__);
341 return false;
342 }
343
344 camera_metadata_ro_entry entry;
345 status_t res =
346 characteristics->Get(VendorTagIds::kHdrplusPayloadFrames, &entry);
347 if (res != OK || entry.data.i32[0] <= 0) {
348 ALOGW("%s: Getting kHdrplusPayloadFrames failed or number <= 0",
349 __FUNCTION__);
350 return false;
351 }
352
353 if (stream_config.operation_mode != StreamConfigurationMode::kNormal) {
354 ALOGI("%s: Only support normal mode. operation_mode = %d", __FUNCTION__,
355 stream_config.operation_mode);
356 return false;
357 }
358
359 if (property_get_bool("persist.camera.fatp.enable", false)) {
360 ALOGI("%s: Do not use HDR+ for FATP mode", __FUNCTION__);
361 return false;
362 }
363
364 if (stream_config.session_params != nullptr &&
365 stream_config.session_params->Get(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
366 &entry) == OK) {
367 uint32_t max_fps = entry.data.i32[1];
368 if (max_fps > kHdrplusSensorMaxFps) {
369 ALOGI("%s: the fps (%d) is over HDR+ support.", __FUNCTION__, max_fps);
370 return false;
371 }
372 }
373
374 if (stream_config.session_params != nullptr) {
375 camera_metadata_ro_entry entry;
376 status_t result = stream_config.session_params->Get(
377 VendorTagIds::kHdrPlusDisabled, &entry);
378
379 if ((result == OK) && (entry.data.u8[0] == 1)) {
380 ALOGI("%s: request.disable_hdrplus true", __FUNCTION__);
381 return false;
382 }
383 }
384
385 bool preview_stream = false;
386 bool jpeg_stream = false;
387 bool has_logical_stream = false;
388 bool has_physical_stream = false;
389 uint32_t yuv_num = 0;
390 uint32_t last_physical_cam_id = 0;
391
392 for (auto stream : stream_config.streams) {
393 if (utils::IsPreviewStream(stream)) {
394 preview_stream = true;
395 } else if (utils::IsJPEGSnapshotStream(stream)) {
396 jpeg_stream = true;
397 } else if (utils::IsDepthStream(stream)) {
398 ALOGI("%s: Don't support depth stream", __FUNCTION__);
399 return false;
400 } else if (utils::IsVideoStream(stream)) {
401 ALOGI("%s: Don't support video stream", __FUNCTION__);
402 return false;
403 } else if (utils::IsArbitraryDataSpaceRawStream(stream)) {
404 ALOGI("%s: Don't support raw stream", __FUNCTION__);
405 return false;
406 } else if (utils::IsYUVSnapshotStream(stream)) {
407 yuv_num++;
408 } else {
409 ALOGE("%s: Unknown stream type %d, res %ux%u, format %d, usage %" PRIu64,
410 __FUNCTION__, stream.stream_type, stream.width, stream.height,
411 stream.format, stream.usage);
412 return false;
413 }
414
415 if (stream.is_physical_camera_stream) {
416 if (has_physical_stream &&
417 stream.physical_camera_id != last_physical_cam_id) {
418 // b/137721824, we don't support HDR+ if stream configuration contains
419 // different physical camera id streams.
420 ALOGI("%s: Don't support different physical camera id streams",
421 __FUNCTION__);
422 return false;
423 }
424 has_physical_stream = true;
425 last_physical_cam_id = stream.physical_camera_id;
426 } else {
427 has_logical_stream = true;
428 }
429 }
430
431 // Only preview is configured.
432 if (preview_stream == true && jpeg_stream == false && yuv_num == 0) {
433 ALOGI("%s: Only preview is configured.", __FUNCTION__);
434 return false;
435 }
436
437 // No preview is configured.
438 if (preview_stream == false) {
439 ALOGI("%s: no preview is configured.", __FUNCTION__);
440 return false;
441 }
442
443 // b/137721824, we don't support HDR+ if stream configuration contains
444 // logical and physical streams.
445 if (has_logical_stream == true && has_physical_stream == true) {
446 ALOGI("%s: Don't support logical and physical combination", __FUNCTION__);
447 return false;
448 }
449
450 // TODO(b/128633958): remove this after depth block is in place
451 if (property_get_bool("persist.camera.rgbird.forceinternal", false)) {
452 return false;
453 }
454
455 return true;
456 }
457
SetEnableZslMetadata(HalCameraMetadata * metadata,bool enable)458 status_t SetEnableZslMetadata(HalCameraMetadata* metadata, bool enable) {
459 if (metadata == nullptr) {
460 ALOGE("%s: metadata is nullptr", __FUNCTION__);
461 return BAD_VALUE;
462 }
463
464 uint8_t enable_zsl = enable ? 1 : 0;
465 status_t res = metadata->Set(ANDROID_CONTROL_ENABLE_ZSL, &enable_zsl, 1);
466 if (res != OK) {
467 ALOGE("%s: set %d fail", __FUNCTION__, enable_zsl);
468 return res;
469 }
470
471 return OK;
472 }
473
SetHybridAeMetadata(HalCameraMetadata * metadata,bool enable)474 status_t SetHybridAeMetadata(HalCameraMetadata* metadata, bool enable) {
475 if (metadata == nullptr) {
476 ALOGE("%s: metadata is nullptr", __FUNCTION__);
477 return BAD_VALUE;
478 }
479
480 status_t res;
481 int32_t enable_hybrid_ae = enable ? 1 : 0;
482 res = metadata->Set(VendorTagIds::kHybridAeEnabled, &enable_hybrid_ae,
483 /*data_count=*/1);
484 if (res != OK) {
485 ALOGE("%s: enable_hybrid_ae(%d) fail", __FUNCTION__, enable_hybrid_ae);
486 return res;
487 }
488
489 return OK;
490 }
491
ForceLensShadingMapModeOn(HalCameraMetadata * metadata)492 status_t ForceLensShadingMapModeOn(HalCameraMetadata* metadata) {
493 if (metadata == nullptr) {
494 ALOGE("%s: metadata is nullptr", __FUNCTION__);
495 return BAD_VALUE;
496 }
497
498 camera_metadata_ro_entry entry;
499 if (metadata->Get(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &entry) == OK &&
500 *entry.data.u8 == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
501 // Force enabling LENS_SHADING_MAP_MODE_ON.
502 uint8_t mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
503 status_t result =
504 metadata->Set(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &mode, 1);
505 if (result != OK) {
506 ALOGE("%s: Set LENS_SHADING_MAP_MODE on fail", __FUNCTION__);
507 return result;
508 }
509 }
510
511 return OK;
512 }
513
ModifyRealtimeRequestForHdrplus(HalCameraMetadata * metadata,const bool hybrid_ae_enable)514 status_t ModifyRealtimeRequestForHdrplus(HalCameraMetadata* metadata,
515 const bool hybrid_ae_enable) {
516 if (metadata == nullptr) {
517 ALOGE("%s: metadata is nullptr", __FUNCTION__);
518 return BAD_VALUE;
519 }
520
521 // Update hybrid AE
522 status_t result = SetHybridAeMetadata(metadata, hybrid_ae_enable);
523 if (result != OK) {
524 ALOGE("%s: SetHybridAeMetadata fail", __FUNCTION__);
525 return result;
526 }
527
528 // Update FD mode
529 camera_metadata_ro_entry entry;
530 if (metadata->Get(ANDROID_STATISTICS_FACE_DETECT_MODE, &entry) == OK &&
531 *entry.data.u8 == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
532 // Force enabling face detect mode to simple.
533 uint8_t mode = ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE;
534 result = metadata->Set(ANDROID_STATISTICS_FACE_DETECT_MODE, &mode, 1);
535 if (result != OK) {
536 ALOGE("%s: update FD simple mode fail", __FUNCTION__);
537 return result;
538 }
539 }
540
541 // Force lens shading mode to on
542 result = ForceLensShadingMapModeOn(metadata);
543 if (result != OK) {
544 ALOGE("%s: ForceLensShadingMapModeOn fail", __FUNCTION__);
545 return result;
546 }
547
548 return OK;
549 }
550
GetLensShadingMapMode(const CaptureRequest & request,uint8_t * lens_shading_mode)551 status_t GetLensShadingMapMode(const CaptureRequest& request,
552 uint8_t* lens_shading_mode) {
553 if (request.settings == nullptr || lens_shading_mode == nullptr) {
554 ALOGE("%s: request.settings or lens_shading_mode is nullptr", __FUNCTION__);
555 return BAD_VALUE;
556 }
557
558 camera_metadata_ro_entry entry;
559 status_t result =
560 request.settings->Get(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &entry);
561 if (result != OK) {
562 ALOGV("%s: Get LENS_SHADING_MAP_MODE fail", __FUNCTION__);
563 return result;
564 }
565 *lens_shading_mode = *entry.data.u8;
566
567 return OK;
568 }
569
RemoveLsInfoFromResult(HalCameraMetadata * metadata)570 status_t RemoveLsInfoFromResult(HalCameraMetadata* metadata) {
571 if (metadata == nullptr) {
572 ALOGE("%s: metadata is nullptr", __FUNCTION__);
573 return BAD_VALUE;
574 }
575
576 camera_metadata_ro_entry entry;
577 status_t res;
578 if (metadata->Get(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &entry) == OK) {
579 // Change lens shading map mode to OFF.
580 uint8_t mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
581 res = metadata->Set(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &mode, 1);
582 if (res != OK) {
583 ALOGE("%s: Set LENS_SHADING_MAP_MODE off fail", __FUNCTION__);
584 return res;
585 }
586 }
587
588 // Erase lens shading map.
589 res = metadata->Erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
590 if (res != OK) {
591 ALOGE("%s: erase LENS_SHADING_MAP fail", __FUNCTION__);
592 return res;
593 }
594
595 return OK;
596 }
597
GetFdMode(const CaptureRequest & request,uint8_t * face_detect_mode)598 status_t GetFdMode(const CaptureRequest& request, uint8_t* face_detect_mode) {
599 if (request.settings == nullptr || face_detect_mode == nullptr) {
600 ALOGE("%s: request.settings or face_detect_mode is nullptr", __FUNCTION__);
601 return BAD_VALUE;
602 }
603
604 camera_metadata_ro_entry entry;
605 status_t result =
606 request.settings->Get(ANDROID_STATISTICS_FACE_DETECT_MODE, &entry);
607 if (result != OK) {
608 ALOGV("%s: Get FACE_DETECT_MODE fail", __FUNCTION__);
609 return result;
610 }
611 *face_detect_mode = *entry.data.u8;
612
613 return OK;
614 }
615
RemoveFdInfoFromResult(HalCameraMetadata * metadata)616 status_t RemoveFdInfoFromResult(HalCameraMetadata* metadata) {
617 if (metadata == nullptr) {
618 ALOGE("%s: metadata is nullptr", __FUNCTION__);
619 return BAD_VALUE;
620 }
621
622 camera_metadata_ro_entry entry;
623 status_t res;
624 if (metadata->Get(ANDROID_STATISTICS_FACE_DETECT_MODE, &entry) == OK) {
625 uint8_t mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
626 res = metadata->Set(ANDROID_STATISTICS_FACE_DETECT_MODE, &mode, 1);
627 if (res != OK) {
628 ALOGE("%s: update FD off mode fail", __FUNCTION__);
629 return res;
630 }
631 }
632
633 res = metadata->Erase(ANDROID_STATISTICS_FACE_RECTANGLES);
634 if (res != OK) {
635 ALOGE("%s: erase face rectangles fail", __FUNCTION__);
636 return res;
637 }
638
639 res = metadata->Erase(ANDROID_STATISTICS_FACE_SCORES);
640 if (res != OK) {
641 ALOGE("%s: erase face scores fail", __FUNCTION__);
642 return res;
643 }
644
645 return OK;
646 }
647
DumpStreamConfiguration(const StreamConfiguration & stream_configuration,std::string title)648 void DumpStreamConfiguration(const StreamConfiguration& stream_configuration,
649 std::string title) {
650 std::string str = "======== " + title + " ========";
651 ALOGI("%s", str.c_str());
652 ALOGI("== stream num: %zu, operation_mode:%d",
653 stream_configuration.streams.size(),
654 stream_configuration.operation_mode);
655 for (uint32_t i = 0; i < stream_configuration.streams.size(); i++) {
656 auto& stream = stream_configuration.streams[i];
657 ALOGI("==== [%u]stream_id %d, format %d, res %ux%u, usage %" PRIu64
658 ", is_phy %d, phy_cam_id %u",
659 i, stream.id, stream.format, stream.width, stream.height, stream.usage,
660 stream.is_physical_camera_stream, stream.physical_camera_id);
661 }
662 ALOGI("%s", str.c_str());
663 }
664
DumpHalConfiguredStreams(const std::vector<HalStream> & hal_configured_streams,std::string title)665 void DumpHalConfiguredStreams(
666 const std::vector<HalStream>& hal_configured_streams, std::string title) {
667 std::string str = "======== " + title + " ========";
668 ALOGI("%s", str.c_str());
669 ALOGI("== stream num: %zu", hal_configured_streams.size());
670 for (uint32_t i = 0; i < hal_configured_streams.size(); i++) {
671 auto& stream = hal_configured_streams[i];
672 ALOGI("==== [%u]stream_id:%5d override_format:%8x p_usage:%" PRIu64
673 " c_usage:%" PRIu64 " max_buf:%u is_phy:%d",
674 i, stream.id, stream.override_format, stream.producer_usage,
675 stream.consumer_usage, stream.max_buffers,
676 stream.is_physical_camera_stream);
677 }
678 ALOGI("%s", str.c_str());
679 }
680
DumpCaptureRequest(const CaptureRequest & request,std::string title)681 void DumpCaptureRequest(const CaptureRequest& request, std::string title) {
682 std::string str = "======== " + title + " ========";
683 ALOGI("%s", str.c_str());
684 ALOGI("== frame_number:%u", request.frame_number);
685 ALOGI("== settings:%p", request.settings.get());
686 ALOGI("== num_output_buffers:%zu", request.output_buffers.size());
687 for (uint32_t i = 0; i < request.output_buffers.size(); i++) {
688 ALOGI("==== buf[%d] stream_id:%d buf:%p", i,
689 request.output_buffers[i].stream_id, request.output_buffers[i].buffer);
690 }
691 ALOGI("== num_input_buffers:%zu", request.input_buffers.size());
692 for (uint32_t i = 0; i < request.input_buffers.size(); i++) {
693 ALOGI("==== buf[%d] stream_id:%d buf:%p", i,
694 request.input_buffers[i].stream_id, request.input_buffers[i].buffer);
695 }
696 ALOGI("%s", str.c_str());
697 }
698
DumpCaptureResult(const ProcessBlockResult & result,std::string title)699 void DumpCaptureResult(const ProcessBlockResult& result, std::string title) {
700 std::string str = "======== " + title + " ========";
701 ALOGI("%s", str.c_str());
702 ALOGI("== frame_number:%u", result.result->frame_number);
703 ALOGI("== num_output_buffers:%zu", result.result->output_buffers.size());
704 for (uint32_t i = 0; i < result.result->output_buffers.size(); i++) {
705 ALOGI("==== buf[%d] stream_id:%d bud:%" PRIu64 " handle: %p status: %d", i,
706 result.result->output_buffers[i].stream_id,
707 result.result->output_buffers[i].buffer_id,
708 result.result->output_buffers[i].buffer,
709 result.result->output_buffers[i].status);
710 }
711 ALOGI("== has_metadata:%d", result.result->result_metadata != nullptr);
712 ALOGI("== request_id:%d", result.request_id);
713 ALOGI("%s", str.c_str());
714 }
715
DumpCaptureResult(const CaptureResult & result,std::string title)716 void DumpCaptureResult(const CaptureResult& result, std::string title) {
717 std::string str = "======== " + title + " ========";
718 ALOGI("%s", str.c_str());
719 ALOGI("== frame_number:%u", result.frame_number);
720 ALOGI("== num_output_buffers:%zu", result.output_buffers.size());
721 for (uint32_t i = 0; i < result.output_buffers.size(); i++) {
722 ALOGI("==== buf[%d] stream_id:%d bud:%" PRIu64 " handle: %p status: %d", i,
723 result.output_buffers[i].stream_id, result.output_buffers[i].buffer_id,
724 result.output_buffers[i].buffer, result.output_buffers[i].status);
725 }
726 ALOGI("== has_metadata:%d", result.result_metadata != nullptr);
727 ALOGI("%s", str.c_str());
728 }
729
DumpNotify(const NotifyMessage & message,std::string title)730 void DumpNotify(const NotifyMessage& message, std::string title) {
731 std::string str = "======== " + title + " ========";
732 ALOGI("%s", str.c_str());
733 if (message.type == MessageType::kShutter) {
734 ALOGI("== frame_number:%u", message.message.shutter.frame_number);
735 ALOGI("== time_stamp:%" PRIu64, message.message.shutter.timestamp_ns);
736 } else if (message.type == MessageType::kError) {
737 ALOGI("== frame_number:%u", message.message.error.frame_number);
738 ALOGI("== error_code:%u", message.message.error.error_code);
739 }
740 ALOGI("%s", str.c_str());
741 }
742
DumpStream(const Stream & stream,std::string title)743 void DumpStream(const Stream& stream, std::string title) {
744 std::string str = "======== " + title + " ========";
745 ALOGI("%s", str.c_str());
746 ALOGI("== stream_id %d, format %d, res %ux%u, usage %" PRIu64
747 ", is_phy %d, phy_cam_id %u",
748 stream.id, stream.format, stream.width, stream.height, stream.usage,
749 stream.is_physical_camera_stream, stream.physical_camera_id);
750 ALOGI("%s", str.c_str());
751 }
752
753 // Dump HalStream
DumpHalStream(const HalStream & hal_stream,std::string title)754 void DumpHalStream(const HalStream& hal_stream, std::string title) {
755 std::string str = "======== " + title + " ========";
756 ALOGI("%s", str.c_str());
757 ALOGI("== id %d, override_format %d, producer_usage %" PRIu64 ", %" PRIu64
758 ", max_buffers %u, override_data_space %u, is_phy %u, phy_cam_id %d",
759 hal_stream.id, hal_stream.override_format, hal_stream.producer_usage,
760 hal_stream.consumer_usage, hal_stream.max_buffers,
761 hal_stream.override_data_space, hal_stream.is_physical_camera_stream,
762 hal_stream.physical_camera_id);
763 ALOGI("%s", str.c_str());
764 }
765
DumpBufferReturn(const std::vector<StreamBuffer> & stream_buffers,std::string title)766 void DumpBufferReturn(const std::vector<StreamBuffer>& stream_buffers,
767 std::string title) {
768 std::string str = "======== " + title + " ========";
769 ALOGI("%s", str.c_str());
770 for (auto stream_buffer : stream_buffers) {
771 ALOGI("== Strm id:%d, buf id:%" PRIu64, stream_buffer.stream_id,
772 stream_buffer.buffer_id);
773 }
774 ALOGI("%s", str.c_str());
775 }
776
DumpBufferRequest(const std::vector<BufferRequest> & hal_buffer_requests,const std::vector<BufferReturn> * hal_buffer_returns,std::string title)777 void DumpBufferRequest(const std::vector<BufferRequest>& hal_buffer_requests,
778 const std::vector<BufferReturn>* hal_buffer_returns,
779 std::string title) {
780 std::string str = "======== " + title + " ========";
781 ALOGI("%s", str.c_str());
782 for (const auto& buffer_request : hal_buffer_requests) {
783 ALOGI("== Strm id:%d", buffer_request.stream_id);
784 }
785 ALOGI("===");
786 for (const auto& buffer_return : *hal_buffer_returns) {
787 for (const auto& stream_buffer : buffer_return.val.buffers) {
788 ALOGI("== buf id:%" PRIu64 " stm id:%d buf:%p", stream_buffer.buffer_id,
789 stream_buffer.stream_id, stream_buffer.buffer);
790 }
791 }
792 ALOGI("%s", str.c_str());
793 }
794
795 } // namespace hal_utils
796 } // namespace google_camera_hal
797 } // namespace android
798