1 /*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "dmetadata_processor.h"
17
18 #include "dbuffer_manager.h"
19 #include "dcamera.h"
20 #include "distributed_hardware_log.h"
21 #include "json/json.h"
22 #include "metadata_utils.h"
23
24 namespace OHOS {
25 namespace DistributedHardware {
InitDCameraAbility(const std::string & sinkAbilityInfo,const std::string & sourceAbilityInfo)26 DCamRetCode DMetadataProcessor::InitDCameraAbility(const std::string &sinkAbilityInfo,
27 const std::string &sourceAbilityInfo)
28 {
29 JSONCPP_STRING errs;
30 Json::CharReaderBuilder readerBuilder;
31 Json::Value rootValue;
32
33 std::unique_ptr<Json::CharReader> const jsonReader(readerBuilder.newCharReader());
34 if (jsonReader->parse(sinkAbilityInfo.c_str(), sinkAbilityInfo.c_str() + sinkAbilityInfo.length(),
35 &rootValue, &errs) && rootValue.isObject()) {
36 if (rootValue.isMember("MetaData") && rootValue["MetaData"].isString()) {
37 std::string metadataStr = rootValue["MetaData"].asString();
38 if (!metadataStr.empty()) {
39 std::hash<std::string> h;
40 DHLOGI("Decode distributed camera metadata from base64, hash: %zu, length: %zu",
41 h(metadataStr), metadataStr.length());
42 std::string decodeString = Base64Decode(metadataStr);
43 DHLOGI("Decode distributed camera metadata from string, hash: %zu, length: %zu",
44 h(decodeString), decodeString.length());
45 dCameraAbility_ = OHOS::Camera::MetadataUtils::DecodeFromString(decodeString);
46 DHLOGI("Decode distributed camera metadata from string success.");
47 }
48 }
49 }
50
51 if (dCameraAbility_ == nullptr) {
52 DHLOGE("Metadata is null in ability set or failed to decode metadata ability from string.");
53 dCameraAbility_ = std::make_shared<CameraAbility>(DEFAULT_ENTRY_CAPACITY, DEFAULT_DATA_CAPACITY);
54 }
55
56 if (OHOS::Camera::GetCameraMetadataItemCount(dCameraAbility_->get()) <= 0) {
57 DCamRetCode ret = InitDCameraDefaultAbilityKeys(sinkAbilityInfo);
58 if (ret != SUCCESS) {
59 DHLOGE("Init distributed camera defalult abilily keys failed.");
60 dCameraAbility_ = nullptr;
61 return ret;
62 }
63 }
64
65 DCamRetCode ret = InitDCameraOutputAbilityKeys(sinkAbilityInfo, sourceAbilityInfo);
66 if (ret != SUCCESS) {
67 DHLOGE("Init distributed camera output abilily keys failed.");
68 dCameraAbility_ = nullptr;
69 return ret;
70 }
71
72 camera_metadata_item_entry_t* itemEntry = OHOS::Camera::GetMetadataItems(dCameraAbility_->get());
73 uint32_t count = dCameraAbility_->get()->item_count;
74 for (uint32_t i = 0; i < count; i++, itemEntry++) {
75 allResultSet_.insert((MetaType)(itemEntry->item));
76 }
77 return SUCCESS;
78 }
79
InitDcameraBaseAbility()80 void DMetadataProcessor::InitDcameraBaseAbility()
81 {
82 const uint8_t cameraType = OHOS_CAMERA_TYPE_LOGICAL;
83 AddAbilityEntry(OHOS_ABILITY_CAMERA_TYPE, &cameraType, 1);
84
85 const int64_t exposureTime = 0xFFFFFFFFFFFFFFFF;
86 AddAbilityEntry(OHOS_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
87
88 const float correctionGain = 0.0;
89 AddAbilityEntry(OHOS_SENSOR_COLOR_CORRECTION_GAINS, &correctionGain, 1);
90
91 const uint8_t faceDetectMode = OHOS_CAMERA_FACE_DETECT_MODE_OFF;
92 AddAbilityEntry(OHOS_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
93
94 const uint8_t histogramMode = OHOS_CAMERA_HISTOGRAM_MODE_OFF;
95 AddAbilityEntry(OHOS_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
96
97 const uint8_t aeAntibandingMode = OHOS_CAMERA_AE_ANTIBANDING_MODE_OFF;
98 AddAbilityEntry(OHOS_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
99
100 int32_t aeExposureCompensation = 0xFFFFFFFF;
101 AddAbilityEntry(OHOS_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExposureCompensation, 1);
102
103 const uint8_t aeLock = OHOS_CAMERA_AE_LOCK_OFF;
104 AddAbilityEntry(OHOS_CONTROL_AE_LOCK, &aeLock, 1);
105
106 const uint8_t aeMode = OHOS_CAMERA_AE_MODE_OFF;
107 AddAbilityEntry(OHOS_CONTROL_AE_MODE, &aeMode, 1);
108
109 const uint8_t afMode = OHOS_CAMERA_AF_MODE_OFF;
110 AddAbilityEntry(OHOS_CONTROL_AF_MODE, &afMode, 1);
111
112 const uint8_t awbLock = OHOS_CAMERA_AWB_LOCK_OFF;
113 AddAbilityEntry(OHOS_CONTROL_AWB_LOCK, &awbLock, 1);
114
115 const uint8_t awbMode = OHOS_CAMERA_AWB_MODE_OFF;
116 AddAbilityEntry(OHOS_CONTROL_AWB_MODE, &awbMode, 1);
117
118 const uint8_t aeAntibandingModes = OHOS_CAMERA_AE_ANTIBANDING_MODE_AUTO;
119 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, &aeAntibandingModes, 1);
120
121 const uint8_t aeAvailableModes = OHOS_CAMERA_AE_MODE_ON;
122 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_MODES, &aeAvailableModes, 1);
123
124 const int32_t compensationRange[] = { 0, 0 };
125 AddAbilityEntry(OHOS_CONTROL_AE_COMPENSATION_RANGE, compensationRange,
126 (sizeof(compensationRange) / sizeof(compensationRange[0])));
127
128 const camera_rational_t compensationStep[] = { { 0, 1 } };
129 AddAbilityEntry(OHOS_CONTROL_AE_COMPENSATION_STEP, compensationStep,
130 (sizeof(compensationStep) / sizeof(compensationStep[0])));
131
132 const uint8_t afAvailableModes[] = { OHOS_CAMERA_AF_MODE_AUTO, OHOS_CAMERA_AF_MODE_OFF };
133 AddAbilityEntry(OHOS_CONTROL_AF_AVAILABLE_MODES, afAvailableModes,
134 (sizeof(afAvailableModes) / sizeof(afAvailableModes[0])));
135
136 const uint8_t awbAvailableModes = OHOS_CAMERA_AWB_MODE_AUTO;
137 AddAbilityEntry(OHOS_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableModes, 1);
138
139 const uint8_t deviceExposureMode = OHOS_CAMERA_EXPOSURE_MODE_CONTINUOUS_AUTO;
140 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_EXPOSUREMODES, &deviceExposureMode, 1);
141
142 const uint8_t controlExposureMode = OHOS_CAMERA_EXPOSURE_MODE_CONTINUOUS_AUTO;
143 AddAbilityEntry(OHOS_CONTROL_EXPOSUREMODE, &controlExposureMode, 1);
144
145 const uint8_t deviceFocusModes = OHOS_CAMERA_FOCUS_MODE_AUTO;
146 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_FOCUSMODES, &deviceFocusModes, 1);
147 SetFpsRanges();
148 }
149
SetFpsRanges()150 void DMetadataProcessor::SetFpsRanges()
151 {
152 std::vector<int32_t> fpsRanges;
153 fpsRanges.push_back(MIN_SUPPORT_DEFAULT_FPS);
154 fpsRanges.push_back(MAX_SUPPORT_DEFAULT_FPS);
155 AddAbilityEntry(OHOS_CONTROL_AE_TARGET_FPS_RANGE, fpsRanges.data(), fpsRanges.size());
156 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), fpsRanges.size());
157 }
158
InitDCameraDefaultAbilityKeys(const std::string & sinkAbilityInfo)159 DCamRetCode DMetadataProcessor::InitDCameraDefaultAbilityKeys(const std::string &sinkAbilityInfo)
160 {
161 JSONCPP_STRING errs;
162 Json::CharReaderBuilder readerBuilder;
163 Json::Value rootValue;
164
165 std::unique_ptr<Json::CharReader> const jsonReader(readerBuilder.newCharReader());
166 if (jsonReader->parse(sinkAbilityInfo.c_str(), sinkAbilityInfo.c_str() + sinkAbilityInfo.length(), &rootValue,
167 &errs) && rootValue.isObject()) {
168 if (rootValue.isMember("ProtocolVer") && rootValue["ProtocolVer"].isString()) {
169 protocolVersion_ = rootValue["ProtocolVer"].asString();
170 }
171 if (rootValue.isMember("Position") && rootValue["Position"].isString()) {
172 dCameraPosition_ = rootValue["Position"].asString();
173 }
174 }
175
176 if (dCameraPosition_ == "BACK") {
177 const uint8_t position = OHOS_CAMERA_POSITION_BACK;
178 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
179 } else if (dCameraPosition_ == "FRONT") {
180 const uint8_t position = OHOS_CAMERA_POSITION_FRONT;
181 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
182 } else {
183 const uint8_t position = OHOS_CAMERA_POSITION_OTHER;
184 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
185 }
186
187 InitDcameraBaseAbility();
188
189 const uint8_t controlFocusMode = OHOS_CAMERA_FOCUS_MODE_AUTO;
190 AddAbilityEntry(OHOS_CONTROL_FOCUSMODE, &controlFocusMode, 1);
191
192 const uint8_t deviceFlashModes = OHOS_CAMERA_FLASH_MODE_AUTO;
193 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_FLASHMODES, &deviceFlashModes, 1);
194
195 const uint8_t controlFlashMode = OHOS_CAMERA_FLASH_MODE_CLOSE;
196 AddAbilityEntry(OHOS_CONTROL_FLASHMODE, &controlFlashMode, 1);
197
198 float zoomRatioRange[1] = {1.0};
199 AddAbilityEntry(OHOS_ABILITY_ZOOM_RATIO_RANGE, zoomRatioRange,
200 (sizeof(zoomRatioRange) / sizeof(zoomRatioRange[0])));
201
202 const float zoomRatio = 1.0;
203 AddAbilityEntry(OHOS_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
204
205 int32_t activeArraySize[] = {0, 0, static_cast<int32_t>(maxPreviewResolution_.width_),
206 static_cast<int32_t>(maxPreviewResolution_.height_)};
207 AddAbilityEntry(OHOS_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize,
208 (sizeof(activeArraySize) / sizeof(activeArraySize[0])));
209
210 int32_t pixelArraySize[] = {
211 static_cast<int32_t>(maxPreviewResolution_.width_), static_cast<int32_t>(maxPreviewResolution_.height_)
212 };
213 AddAbilityEntry(OHOS_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize,
214 (sizeof(pixelArraySize) / sizeof(pixelArraySize[0])));
215
216 const int32_t jpegThumbnailSizes[] = {0, 0, DEGREE_240, DEGREE_180};
217 AddAbilityEntry(OHOS_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegThumbnailSizes,
218 (sizeof(jpegThumbnailSizes) / sizeof(jpegThumbnailSizes[0])));
219
220 return SUCCESS;
221 }
222
InitDCameraOutputAbilityKeys(const std::string & sinkAbilityInfo,const std::string & sourceAbilityInfo)223 DCamRetCode DMetadataProcessor::InitDCameraOutputAbilityKeys(const std::string &sinkAbilityInfo,
224 const std::string &sourceAbilityInfo)
225 {
226 std::map<int, std::vector<DCResolution>> sinkSupportedFormats = GetDCameraSupportedFormats(sinkAbilityInfo, true);
227 std::map<int, std::vector<DCResolution>> srcSupportedFormats = GetDCameraSupportedFormats(sourceAbilityInfo, false);
228
229 std::vector<int32_t> sinkStreamConfigs;
230 InitBasicConfigTag(sinkSupportedFormats, srcSupportedFormats, sinkStreamConfigs,
231 sinkAbilityInfo, sourceAbilityInfo);
232 std::vector<int32_t> sinkExtendStreamConfigs;
233 camera_metadata_item_t item;
234 int ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(),
235 OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, &item);
236 if (ret == CAM_META_SUCCESS && item.count != 0) {
237 sinkExtendStreamConfigs.push_back(item.data.i32[0]);
238 }
239 InitExtendConfigTag(sinkSupportedFormats, srcSupportedFormats, sinkExtendStreamConfigs,
240 sinkAbilityInfo, sourceAbilityInfo);
241 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // mode eof
242
243 UpdateAbilityEntry(OHOS_ABILITY_STREAM_AVAILABLE_BASIC_CONFIGURATIONS, sinkStreamConfigs.data(),
244 sinkStreamConfigs.size());
245
246 UpdateAbilityEntry(OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, sinkExtendStreamConfigs.data(),
247 sinkExtendStreamConfigs.size());
248
249 UpdateAbilityEntry(OHOS_SENSOR_INFO_MAX_FRAME_DURATION, &MAX_FRAME_DURATION, 1);
250
251 const int32_t jpegMaxSize = maxPhotoResolution_.width_ * maxPhotoResolution_.height_;
252 UpdateAbilityEntry(OHOS_JPEG_MAX_SIZE, &jpegMaxSize, 1);
253
254 const uint8_t connectionType = OHOS_CAMERA_CONNECTION_TYPE_REMOTE;
255 UpdateAbilityEntry(OHOS_ABILITY_CAMERA_CONNECTION_TYPE, &connectionType, 1);
256
257 return SUCCESS;
258 }
259
InitBasicConfigTag(std::map<int,std::vector<DCResolution>> & sinkSupportedFormats,std::map<int,std::vector<DCResolution>> & srcSupportedFormats,std::vector<int32_t> & sinkStreamConfigs,const std::string & sinkAbilityInfo,const std::string & sourceAbilityInfo)260 void DMetadataProcessor::InitBasicConfigTag(std::map<int, std::vector<DCResolution>> &sinkSupportedFormats,
261 std::map<int, std::vector<DCResolution>> &srcSupportedFormats, std::vector<int32_t> &sinkStreamConfigs,
262 const std::string &sinkAbilityInfo, const std::string &sourceAbilityInfo)
263 {
264 std::map<int, std::vector<DCResolution>>::iterator iter;
265 for (iter = sinkSupportedFormats.begin(); iter != sinkSupportedFormats.end(); ++iter) {
266 std::vector<DCResolution> resolutionList = iter->second;
267 for (auto resolution : resolutionList) {
268 DHLOGI("DMetadataProcessor::sink supported formats: { format=%d, width=%d, height=%d }", iter->first,
269 resolution.width_, resolution.height_);
270 sinkStreamConfigs.push_back(iter->first);
271 sinkStreamConfigs.push_back(resolution.width_);
272 sinkStreamConfigs.push_back(resolution.height_);
273 }
274 }
275
276 if (sinkAbilityInfo != sourceAbilityInfo) {
277 AddSrcConfigToSinkOfBasicTag(sinkSupportedFormats, srcSupportedFormats, sinkStreamConfigs);
278 }
279 }
280
AddSrcConfigToSinkOfBasicTag(std::map<int,std::vector<DCResolution>> & sinkSupportedFormats,std::map<int,std::vector<DCResolution>> & srcSupportedFormats,std::vector<int32_t> & sinkStreamConfigs)281 void DMetadataProcessor::AddSrcConfigToSinkOfBasicTag(std::map<int, std::vector<DCResolution>> &sinkSupportedFormats,
282 std::map<int, std::vector<DCResolution>> &srcSupportedFormats, std::vector<int32_t> &sinkStreamConfigs)
283 {
284 std::vector<int> formats;
285 for (const auto &format : srcSupportedFormats) {
286 if (find(sinkPhotoFormats_.begin(), sinkPhotoFormats_.end(), format.first) == sinkPhotoFormats_.end()) {
287 formats.push_back(format.first);
288 }
289 }
290
291 for (const auto &format : formats) {
292 for (const auto &resolution : srcSupportedFormats[format]) {
293 std::vector<DCResolution> resolutionList;
294 if (sinkSupportedFormats.count(format) > 0) {
295 resolutionList = sinkSupportedFormats[format];
296 }
297 if (find(resolutionList.begin(), resolutionList.end(), resolution) != resolutionList.end()) {
298 continue;
299 }
300 sinkStreamConfigs.push_back(format);
301 sinkStreamConfigs.push_back(resolution.width_);
302 sinkStreamConfigs.push_back(resolution.height_);
303 }
304 }
305 }
306
InitExtendConfigTag(std::map<int,std::vector<DCResolution>> & sinkSupportedFormats,std::map<int,std::vector<DCResolution>> & srcSupportedFormats,std::vector<int32_t> & sinkExtendStreamConfigs,const std::string & sinkAbilityInfo,const std::string & sourceAbilityInfo)307 void DMetadataProcessor::InitExtendConfigTag(std::map<int, std::vector<DCResolution>> &sinkSupportedFormats,
308 std::map<int, std::vector<DCResolution>> &srcSupportedFormats, std::vector<int32_t> &sinkExtendStreamConfigs,
309 const std::string &sinkAbilityInfo, const std::string &sourceAbilityInfo)
310 {
311 sinkExtendStreamConfigs.push_back(EXTEND_PREVIEW); // preview
312 std::map<int, std::vector<DCResolution>>::iterator previewIter;
313 for (previewIter = sinkPreviewProfiles_.begin(); previewIter != sinkPreviewProfiles_.end(); ++previewIter) {
314 std::vector<DCResolution> resolutionList = previewIter->second;
315 for (auto resolution : resolutionList) {
316 DHLOGI("sink extend supported preview formats: { format=%d, width=%d, height=%d }",
317 previewIter->first, resolution.width_, resolution.height_);
318 AddConfigs(sinkExtendStreamConfigs, previewIter->first, resolution.width_, resolution.height_, PREVIEW_FPS);
319 }
320 }
321 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // preview eof
322
323 sinkExtendStreamConfigs.push_back(EXTEND_VIDEO); // video
324 std::map<int, std::vector<DCResolution>>::iterator videoIter;
325 for (videoIter = sinkVideoProfiles_.begin(); videoIter != sinkVideoProfiles_.end(); ++videoIter) {
326 std::vector<DCResolution> resolutionList = videoIter->second;
327 for (auto resolution : resolutionList) {
328 DHLOGI("sink extend supported video formats: { format=%d, width=%d, height=%d }",
329 videoIter->first, resolution.width_, resolution.height_);
330 AddConfigs(sinkExtendStreamConfigs, videoIter->first, resolution.width_, resolution.height_, VIDEO_FPS);
331 }
332 }
333 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // video eof
334
335 sinkExtendStreamConfigs.push_back(EXTEND_PHOTO); // photo
336 std::map<int, std::vector<DCResolution>>::iterator photoIter;
337 for (photoIter = sinkPhotoProfiles_.begin(); photoIter != sinkPhotoProfiles_.end(); ++photoIter) {
338 std::vector<DCResolution> resolutionList = photoIter->second;
339 for (auto resolution : resolutionList) {
340 DHLOGI("sink extend supported photo formats: { format=%d, width=%d, height=%d }",
341 photoIter->first, resolution.width_, resolution.height_);
342 AddConfigs(sinkExtendStreamConfigs, photoIter->first, resolution.width_, resolution.height_, PHOTO_FPS);
343 }
344 }
345 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // photo eof
346
347 if (sinkAbilityInfo != sourceAbilityInfo) {
348 std::vector<int32_t> formats;
349 for (const auto &format : srcSupportedFormats) {
350 if (find(sinkPhotoFormats_.begin(), sinkPhotoFormats_.end(), format.first) == sinkPhotoFormats_.end()) {
351 formats.push_back(format.first);
352 }
353 }
354 for (const auto &format : formats) {
355 AddSrcConfigsToSinkOfExtendTag(sinkExtendStreamConfigs, format);
356 }
357 }
358 }
359
AddSrcConfigsToSinkOfExtendTag(std::vector<int32_t> & sinkExtendStreamConfigs,int32_t format)360 void DMetadataProcessor::AddSrcConfigsToSinkOfExtendTag(std::vector<int32_t> &sinkExtendStreamConfigs, int32_t format)
361 {
362 if (srcPreviewProfiles_.count(format) > 0) {
363 sinkExtendStreamConfigs.push_back(EXTEND_PREVIEW); // preview
364 for (const auto &resolution : srcPreviewProfiles_[format]) {
365 std::vector<DCResolution> resolutionList;
366 if (sinkPreviewProfiles_.count(format) > 0) {
367 resolutionList = sinkPreviewProfiles_[format];
368 }
369 if (find(resolutionList.begin(), resolutionList.end(), resolution) != resolutionList.end()) {
370 continue;
371 }
372 AddConfigs(sinkExtendStreamConfigs, format, resolution.width_, resolution.height_, PREVIEW_FPS);
373 }
374 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // preview eof
375 } else if (srcVideoProfiles_.count(format) > 0) {
376 sinkExtendStreamConfigs.push_back(EXTEND_VIDEO); // video
377 for (const auto &resolution : srcVideoProfiles_[format]) {
378 std::vector<DCResolution> resolutionList;
379 if (sinkVideoProfiles_.count(format) > 0) {
380 resolutionList = sinkVideoProfiles_[format];
381 }
382 if (find(resolutionList.begin(), resolutionList.end(), resolution) != resolutionList.end()) {
383 continue;
384 }
385 AddConfigs(sinkExtendStreamConfigs, format, resolution.width_, resolution.height_, VIDEO_FPS);
386 }
387 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // video eof
388 }
389 }
390
AddConfigs(std::vector<int32_t> & sinkExtendStreamConfigs,int32_t format,int32_t width,int32_t height,int32_t fps)391 void DMetadataProcessor::AddConfigs(std::vector<int32_t> &sinkExtendStreamConfigs, int32_t format,
392 int32_t width, int32_t height, int32_t fps)
393 {
394 sinkExtendStreamConfigs.push_back(format);
395 sinkExtendStreamConfigs.push_back(width);
396 sinkExtendStreamConfigs.push_back(height);
397 sinkExtendStreamConfigs.push_back(fps); // fixedfps
398 sinkExtendStreamConfigs.push_back(fps); // minfps
399 sinkExtendStreamConfigs.push_back(fps); // maxfps
400 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // eof
401 }
402
AddAbilityEntry(uint32_t tag,const void * data,size_t size)403 DCamRetCode DMetadataProcessor::AddAbilityEntry(uint32_t tag, const void *data, size_t size)
404 {
405 if (dCameraAbility_ == nullptr) {
406 DHLOGE("Distributed camera abilily is null.");
407 return DCamRetCode::INVALID_ARGUMENT;
408 }
409
410 camera_metadata_item_t item;
411 int ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(), tag, &item);
412 if (ret != CAM_META_SUCCESS) {
413 if (!dCameraAbility_->addEntry(tag, data, size)) {
414 DHLOGE("Add tag %u failed.", tag);
415 return FAILED;
416 }
417 }
418 return SUCCESS;
419 }
420
UpdateAbilityEntry(uint32_t tag,const void * data,size_t size)421 DCamRetCode DMetadataProcessor::UpdateAbilityEntry(uint32_t tag, const void *data, size_t size)
422 {
423 if (dCameraAbility_ == nullptr) {
424 DHLOGE("Distributed camera abilily is null.");
425 return DCamRetCode::INVALID_ARGUMENT;
426 }
427
428 camera_metadata_item_t item;
429 int ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(), tag, &item);
430 if (ret == CAM_META_SUCCESS) {
431 if (!dCameraAbility_->updateEntry(tag, data, size)) {
432 DHLOGE("Update tag %u failed.", tag);
433 return FAILED;
434 }
435 }
436 return SUCCESS;
437 }
438
GetDCameraAbility(std::shared_ptr<CameraAbility> & ability)439 DCamRetCode DMetadataProcessor::GetDCameraAbility(std::shared_ptr<CameraAbility> &ability)
440 {
441 ability = dCameraAbility_;
442 return SUCCESS;
443 }
444
SetMetadataResultMode(const ResultCallbackMode & mode)445 DCamRetCode DMetadataProcessor::SetMetadataResultMode(const ResultCallbackMode &mode)
446 {
447 if (mode < ResultCallbackMode::PER_FRAME || mode > ResultCallbackMode::ON_CHANGED) {
448 DHLOGE("Invalid result callback mode.");
449 return DCamRetCode::INVALID_ARGUMENT;
450 }
451 metaResultMode_ = mode;
452 return SUCCESS;
453 }
454
GetEnabledMetadataResults(std::vector<MetaType> & results)455 DCamRetCode DMetadataProcessor::GetEnabledMetadataResults(std::vector<MetaType> &results)
456 {
457 auto iter = enabledResultSet_.begin();
458 while (iter != enabledResultSet_.end()) {
459 results.push_back(*iter);
460 iter++;
461 }
462 return SUCCESS;
463 }
464
EnableMetadataResult(const std::vector<MetaType> & results)465 DCamRetCode DMetadataProcessor::EnableMetadataResult(const std::vector<MetaType> &results)
466 {
467 if (results.size() == 0) {
468 DHLOGE("Enable metadata result list is empty.");
469 return SUCCESS;
470 }
471
472 for (size_t i = 0; i < results.size(); i++) {
473 auto iter = allResultSet_.find(results[i]);
474 if (iter != allResultSet_.end()) {
475 auto anoIter = enabledResultSet_.find(results[i]);
476 if (anoIter == enabledResultSet_.end()) {
477 enabledResultSet_.insert(results[i]);
478 }
479 } else {
480 DHLOGE("Cannot find match metatype.");
481 return SUCCESS;
482 }
483 }
484 return SUCCESS;
485 }
486
DisableMetadataResult(const std::vector<MetaType> & results)487 DCamRetCode DMetadataProcessor::DisableMetadataResult(const std::vector<MetaType> &results)
488 {
489 if (results.size() == 0) {
490 DHLOGE("Disable metadata result list is empty.");
491 return SUCCESS;
492 }
493
494 for (size_t i = 0; i < results.size(); i++) {
495 auto iter = allResultSet_.find(results[i]);
496 if (iter != allResultSet_.end()) {
497 auto anoIter = enabledResultSet_.find(results[i]);
498 if (anoIter != enabledResultSet_.end()) {
499 enabledResultSet_.erase(*iter);
500 }
501 } else {
502 DHLOGE("Cannot find match metatype.");
503 return SUCCESS;
504 }
505 }
506 return SUCCESS;
507 }
508
ResetEnableResults()509 DCamRetCode DMetadataProcessor::ResetEnableResults()
510 {
511 if (enabledResultSet_.size() < allResultSet_.size()) {
512 for (auto result : allResultSet_) {
513 enabledResultSet_.insert(result);
514 }
515 }
516 return SUCCESS;
517 }
518
UpdateResultMetadata(const uint64_t & resultTimestamp)519 void DMetadataProcessor::UpdateResultMetadata(const uint64_t &resultTimestamp)
520 {
521 DHLOGD("DMetadataProcessor::UpdateResultMetadata result callback mode: %d", metaResultMode_);
522 if (metaResultMode_ != ResultCallbackMode::PER_FRAME) {
523 return;
524 }
525
526 std::lock_guard<std::mutex> autoLock(producerMutex_);
527 if (latestProducerMetadataResult_ == nullptr) {
528 DHLOGD("DMetadataProcessor::UpdateResultMetadata latest producer metadata result is null");
529 return;
530 }
531
532 UpdateAllResult(resultTimestamp);
533 }
534
SetResultCallback(std::function<void (uint64_t,std::shared_ptr<OHOS::Camera::CameraMetadata>)> & resultCbk)535 void DMetadataProcessor::SetResultCallback(
536 std::function<void(uint64_t, std::shared_ptr<OHOS::Camera::CameraMetadata>)> &resultCbk)
537 {
538 resultCallback_ = resultCbk;
539 }
540
UpdateAllResult(const uint64_t & resultTimestamp)541 void DMetadataProcessor::UpdateAllResult(const uint64_t &resultTimestamp)
542 {
543 uint32_t itemCap = OHOS::Camera::GetCameraMetadataItemCapacity(latestProducerMetadataResult_->get());
544 uint32_t dataSize = OHOS::Camera::GetCameraMetadataDataSize(latestProducerMetadataResult_->get());
545 DHLOGD("DMetadataProcessor::UpdateAllResult itemCapacity: %u, dataSize: %u", itemCap, dataSize);
546 std::shared_ptr<OHOS::Camera::CameraMetadata> result =
547 std::make_shared<OHOS::Camera::CameraMetadata>(itemCap, dataSize);
548 int32_t ret = OHOS::Camera::CopyCameraMetadataItems(result->get(), latestProducerMetadataResult_->get());
549 if (ret != CAM_META_SUCCESS) {
550 DHLOGE("DMetadataProcessor::UpdateAllResult copy metadata item failed, ret: %d", ret);
551 return;
552 }
553 resultCallback_(resultTimestamp, result);
554 }
555
UpdateOnChanged(const uint64_t & resultTimestamp)556 void DMetadataProcessor::UpdateOnChanged(const uint64_t &resultTimestamp)
557 {
558 bool needReturn = false;
559 uint32_t itemCap = OHOS::Camera::GetCameraMetadataItemCapacity(latestProducerMetadataResult_->get());
560 uint32_t dataSize = OHOS::Camera::GetCameraMetadataDataSize(latestProducerMetadataResult_->get());
561 DHLOGD("DMetadataProcessor::UpdateOnChanged itemCapacity: %u, dataSize: %u", itemCap, dataSize);
562 std::shared_ptr<OHOS::Camera::CameraMetadata> result =
563 std::make_shared<OHOS::Camera::CameraMetadata>(itemCap, dataSize);
564 DHLOGD("DMetadataProcessor::UpdateOnChanged enabledResultSet size: %d", enabledResultSet_.size());
565 for (auto tag : enabledResultSet_) {
566 DHLOGD("DMetadataProcessor::UpdateOnChanged cameta device metadata tag: %d", tag);
567 camera_metadata_item_t item;
568 camera_metadata_item_t anoItem;
569 int ret1 = OHOS::Camera::FindCameraMetadataItem(latestProducerMetadataResult_->get(), tag, &item);
570 int ret2 = OHOS::Camera::FindCameraMetadataItem(latestConsumerMetadataResult_->get(), tag, &anoItem);
571 DHLOGD("DMetadataProcessor::UpdateOnChanged find metadata item ret: %d, %d", ret1, ret2);
572 if (ret1 != CAM_META_SUCCESS) {
573 continue;
574 }
575
576 if (ret2 == CAM_META_SUCCESS) {
577 if ((item.count != anoItem.count) || (item.data_type != anoItem.data_type)) {
578 needReturn = true;
579 result->addEntry(tag, GetMetadataItemData(item), item.count);
580 continue;
581 }
582 uint32_t size = GetDataSize(item.data_type);
583 DHLOGD("DMetadataProcessor::UpdateOnChanged data size: %u", size);
584 for (uint32_t i = 0; i < (size * static_cast<uint32_t>(item.count)); i++) {
585 if (*(item.data.u8 + i) != *(anoItem.data.u8 + i)) {
586 needReturn = true;
587 result->addEntry(tag, GetMetadataItemData(item), item.count);
588 break;
589 }
590 }
591 } else {
592 needReturn = true;
593 result->addEntry(tag, GetMetadataItemData(item), item.count);
594 continue;
595 }
596 }
597
598 if (needReturn) {
599 resultCallback_(resultTimestamp, result);
600 }
601 }
602
SaveResultMetadata(std::string resultStr)603 DCamRetCode DMetadataProcessor::SaveResultMetadata(std::string resultStr)
604 {
605 if (resultStr.empty()) {
606 DHLOGE("Input result string is null.");
607 return DCamRetCode::INVALID_ARGUMENT;
608 }
609
610 std::string metadataStr = Base64Decode(resultStr);
611 std::lock_guard<std::mutex> autoLock(producerMutex_);
612 latestConsumerMetadataResult_ = latestProducerMetadataResult_;
613 latestProducerMetadataResult_ = OHOS::Camera::MetadataUtils::DecodeFromString(metadataStr);
614 if (latestProducerMetadataResult_ == nullptr) {
615 DHLOGE("Failed to decode metadata setting from string.");
616 return DCamRetCode::INVALID_ARGUMENT;
617 }
618
619 if (!OHOS::Camera::GetCameraMetadataItemCount(latestProducerMetadataResult_->get())) {
620 DHLOGE("Input result metadata item is empty.");
621 return DCamRetCode::INVALID_ARGUMENT;
622 }
623
624 DHLOGD("DMetadataProcessor::SaveResultMetadata result callback mode: %d", metaResultMode_);
625 if (metaResultMode_ != ResultCallbackMode::ON_CHANGED) {
626 return SUCCESS;
627 }
628
629 uint64_t resultTimestamp = GetCurrentLocalTimeStamp();
630 if (latestConsumerMetadataResult_ == nullptr) {
631 UpdateAllResult(resultTimestamp);
632 return SUCCESS;
633 }
634
635 camera_metadata_item_entry_t* itemEntry = OHOS::Camera::GetMetadataItems(latestProducerMetadataResult_->get());
636 uint32_t count = latestProducerMetadataResult_->get()->item_count;
637 for (uint32_t i = 0; i < count; i++, itemEntry++) {
638 enabledResultSet_.insert((MetaType)(itemEntry->item));
639 }
640 UpdateOnChanged(resultTimestamp);
641 return SUCCESS;
642 }
643
ConvertToCameraMetadata(common_metadata_header_t * & input,std::shared_ptr<OHOS::Camera::CameraMetadata> & output)644 void DMetadataProcessor::ConvertToCameraMetadata(common_metadata_header_t *&input,
645 std::shared_ptr<OHOS::Camera::CameraMetadata> &output)
646 {
647 auto ret = OHOS::Camera::CopyCameraMetadataItems(output->get(), input);
648 if (ret != CAM_META_SUCCESS) {
649 DHLOGE("Failed to copy the old metadata to new metadata.");
650 output = nullptr;
651 }
652 }
653
ResizeMetadataHeader(common_metadata_header_t * & header,uint32_t itemCapacity,uint32_t dataCapacity)654 void DMetadataProcessor::ResizeMetadataHeader(common_metadata_header_t *&header,
655 uint32_t itemCapacity, uint32_t dataCapacity)
656 {
657 if (header) {
658 OHOS::Camera::FreeCameraMetadataBuffer(header);
659 }
660 header = OHOS::Camera::AllocateCameraMetadataBuffer(itemCapacity, dataCapacity);
661 }
662
GetDataSize(uint32_t type)663 uint32_t DMetadataProcessor::GetDataSize(uint32_t type)
664 {
665 uint32_t size = 0;
666 if (type == META_TYPE_BYTE) {
667 size = sizeof(uint8_t);
668 } else if (type == META_TYPE_INT32) {
669 size = sizeof(int32_t);
670 } else if (type == META_TYPE_UINT32) {
671 size = sizeof(uint32_t);
672 } else if (type == META_TYPE_FLOAT) {
673 size = sizeof(float);
674 } else if (type == META_TYPE_INT64) {
675 size = sizeof(int64_t);
676 } else if (type == META_TYPE_DOUBLE) {
677 size = sizeof(double);
678 } else if (type == META_TYPE_RATIONAL) {
679 size = sizeof(camera_rational_t);
680 } else {
681 size = 0;
682 }
683 return size;
684 }
685
GetMetadataItemData(const camera_metadata_item_t & item)686 void* DMetadataProcessor::GetMetadataItemData(const camera_metadata_item_t &item)
687 {
688 switch (item.data_type) {
689 case META_TYPE_BYTE: {
690 return item.data.u8;
691 }
692 case META_TYPE_INT32: {
693 return item.data.i32;
694 }
695 case META_TYPE_UINT32: {
696 return item.data.ui32;
697 }
698 case META_TYPE_FLOAT: {
699 return item.data.f;
700 }
701 case META_TYPE_INT64: {
702 return item.data.i64;
703 }
704 case META_TYPE_DOUBLE: {
705 return item.data.d;
706 }
707 case META_TYPE_RATIONAL: {
708 return item.data.r;
709 }
710 default: {
711 DHLOGE("DMetadataProcessor::GetMetadataItemData invalid data type: %u", item.data_type);
712 return nullptr;
713 }
714 }
715 }
716
GetEachNodeSupportedResolution(std::vector<int> & formats,const std::string rootNode,std::map<int,std::vector<DCResolution>> & supportedFormats,Json::Value & rootValue,const bool isSink)717 void DMetadataProcessor::GetEachNodeSupportedResolution(std::vector<int>& formats, const std::string rootNode,
718 std::map<int, std::vector<DCResolution>>& supportedFormats, Json::Value& rootValue, const bool isSink)
719 {
720 for (const auto &format : formats) {
721 std::string formatStr = std::to_string(format);
722 if (!rootValue[rootNode].isMember("Resolution") || !rootValue[rootNode]["Resolution"].isMember(formatStr) ||
723 !rootValue[rootNode]["Resolution"][formatStr].isArray() ||
724 rootValue[rootNode]["Resolution"][formatStr].size() == 0 ||
725 rootValue[rootNode]["Resolution"][formatStr].size() > JSON_ARRAY_MAX_SIZE) {
726 DHLOGE("Resolution or %s error.", formatStr.c_str());
727 continue;
728 }
729 GetNodeSupportedResolution(format, rootNode, supportedFormats, rootValue, isSink);
730 }
731 }
732
GetNodeSupportedResolution(int format,const std::string rootNode,std::map<int,std::vector<DCResolution>> & supportedFormats,Json::Value & rootValue,const bool isSink)733 void DMetadataProcessor::GetNodeSupportedResolution(int format, const std::string rootNode,
734 std::map<int, std::vector<DCResolution>>& supportedFormats, Json::Value& rootValue, const bool isSink)
735 {
736 std::vector<DCResolution> resolutionVec;
737 std::string formatStr = std::to_string(format);
738 uint32_t size = rootValue[rootNode]["Resolution"][formatStr].size();
739 for (uint32_t i = 0; i < size; i++) {
740 if (!rootValue[rootNode]["Resolution"][formatStr][i].isString()) {
741 DHLOGE("Resolution %s %d ,is not string.", formatStr.c_str(), i);
742 continue;
743 }
744 std::string resoStr = rootValue[rootNode]["Resolution"][formatStr][i].asString();
745 std::vector<std::string> reso;
746 SplitString(resoStr, reso, STAR_SEPARATOR);
747 if (reso.size() != SIZE_FMT_LEN) {
748 continue;
749 }
750 uint32_t width = static_cast<uint32_t>(std::stoi(reso[0]));
751 uint32_t height = static_cast<uint32_t>(std::stoi(reso[1]));
752 if (height == 0 || width == 0 || ((rootNode == "Photo") &&
753 ((width * height) > (MAX_SUPPORT_PHOTO_WIDTH * MAX_SUPPORT_PHOTO_HEIGHT))) ||
754 ((rootNode != "Photo") && (width > MAX_SUPPORT_PREVIEW_WIDTH || height > MAX_SUPPORT_PREVIEW_HEIGHT))) {
755 continue;
756 }
757 DCResolution resolution(width, height);
758 resolutionVec.push_back(resolution);
759 }
760 if (!resolutionVec.empty()) {
761 std::sort(resolutionVec.begin(), resolutionVec.end());
762 supportedFormats[format] = resolutionVec;
763 if ((rootNode != "Photo") && (maxPreviewResolution_ < resolutionVec[0])) {
764 maxPreviewResolution_.width_ = resolutionVec[0].width_;
765 maxPreviewResolution_.height_ = resolutionVec[0].height_;
766 }
767 if ((rootNode == "Photo") && (maxPhotoResolution_ < resolutionVec[0])) {
768 maxPhotoResolution_.width_ = resolutionVec[0].width_;
769 maxPhotoResolution_.height_ = resolutionVec[0].height_;
770 }
771 StoreSinkAndSrcConfig(format, rootNode, isSink, resolutionVec);
772 }
773 }
774
StoreSinkAndSrcConfig(int format,const std::string rootNode,const bool isSink,std::vector<DCResolution> & resolutionVec)775 void DMetadataProcessor::StoreSinkAndSrcConfig(int format, const std::string rootNode, const bool isSink,
776 std::vector<DCResolution> &resolutionVec)
777 {
778 if (isSink) {
779 if (rootNode == "Photo") {
780 sinkPhotoProfiles_[format] = resolutionVec;
781 } else if (rootNode == "Preview") {
782 sinkPreviewProfiles_[format] = resolutionVec;
783 } else if (rootNode == "Video") {
784 sinkVideoProfiles_[format] = resolutionVec;
785 }
786 } else {
787 if (rootNode == "Photo") {
788 srcPhotoProfiles_[format] = resolutionVec;
789 } else if (rootNode == "Preview") {
790 srcPreviewProfiles_[format] = resolutionVec;
791 } else if (rootNode == "Video") {
792 srcVideoProfiles_[format] = resolutionVec;
793 }
794 }
795 }
796
GetDCameraSupportedFormats(const std::string & abilityInfo,const bool isSink)797 std::map<int, std::vector<DCResolution>> DMetadataProcessor::GetDCameraSupportedFormats(
798 const std::string &abilityInfo, const bool isSink)
799 {
800 std::map<int, std::vector<DCResolution>> supportedFormats;
801 JSONCPP_STRING errs;
802 Json::CharReaderBuilder readerBuilder;
803 Json::Value rootValue;
804
805 std::unique_ptr<Json::CharReader> const jsonReader(readerBuilder.newCharReader());
806 if (!jsonReader->parse(abilityInfo.c_str(), abilityInfo.c_str() + abilityInfo.length(),
807 &rootValue, &errs) || !rootValue.isObject()) {
808 return supportedFormats;
809 }
810 ParsePhotoFormats(rootValue, supportedFormats, isSink);
811 ParsePreviewFormats(rootValue, supportedFormats, isSink);
812 ParseVideoFormats(rootValue, supportedFormats, isSink);
813 return supportedFormats;
814 }
815
ParsePhotoFormats(Json::Value & rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats,const bool isSink)816 void DMetadataProcessor::ParsePhotoFormats(Json::Value& rootValue,
817 std::map<int, std::vector<DCResolution>>& supportedFormats, const bool isSink)
818 {
819 if (!rootValue.isMember("Photo") || !rootValue["Photo"].isMember("OutputFormat") ||
820 !rootValue["Photo"]["OutputFormat"].isArray() || rootValue["Photo"]["OutputFormat"].size() == 0 ||
821 rootValue["Photo"]["OutputFormat"].size() > JSON_ARRAY_MAX_SIZE) {
822 DHLOGE("Photo or photo output format error.");
823 return;
824 }
825 std::vector<int> photoFormats;
826 uint32_t size = rootValue["Photo"]["OutputFormat"].size();
827 for (uint32_t i = 0; i < size; i++) {
828 if ((rootValue["Photo"]["OutputFormat"][i]).isInt()) {
829 photoFormats.push_back((rootValue["Photo"]["OutputFormat"][i]).asInt());
830 }
831 }
832 if (isSink) {
833 sinkPhotoFormats_ = photoFormats;
834 }
835 GetEachNodeSupportedResolution(photoFormats, "Photo", supportedFormats, rootValue, isSink);
836 }
837
ParsePreviewFormats(Json::Value & rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats,const bool isSink)838 void DMetadataProcessor::ParsePreviewFormats(Json::Value& rootValue,
839 std::map<int, std::vector<DCResolution>>& supportedFormats, const bool isSink)
840 {
841 if (!rootValue.isMember("Preview") || !rootValue["Preview"].isMember("OutputFormat") ||
842 !rootValue["Preview"]["OutputFormat"].isArray() || rootValue["Preview"]["OutputFormat"].size() == 0 ||
843 rootValue["Preview"]["OutputFormat"].size() > JSON_ARRAY_MAX_SIZE) {
844 DHLOGE("Preview or preview output format error.");
845 return;
846 }
847 std::vector<int> previewFormats;
848 uint32_t size = rootValue["Preview"]["OutputFormat"].size();
849 for (uint32_t i = 0; i < size; i++) {
850 if ((rootValue["Preview"]["OutputFormat"][i]).isInt()) {
851 previewFormats.push_back((rootValue["Preview"]["OutputFormat"][i]).asInt());
852 }
853 }
854 GetEachNodeSupportedResolution(previewFormats, "Preview", supportedFormats, rootValue, isSink);
855 }
856
ParseVideoFormats(Json::Value & rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats,const bool isSink)857 void DMetadataProcessor::ParseVideoFormats(Json::Value& rootValue,
858 std::map<int, std::vector<DCResolution>>& supportedFormats, const bool isSink)
859 {
860 if (!rootValue.isMember("Video") || !rootValue["Video"].isMember("OutputFormat") ||
861 !rootValue["Video"]["OutputFormat"].isArray() || rootValue["Video"]["OutputFormat"].size() == 0 ||
862 rootValue["Video"]["OutputFormat"].size() > JSON_ARRAY_MAX_SIZE) {
863 DHLOGE("Video or video output format error.");
864 return;
865 }
866 std::vector<int> videoFormats;
867 uint32_t size = rootValue["Video"]["OutputFormat"].size();
868 for (uint32_t i = 0; i < size; i++) {
869 if ((rootValue["Video"]["OutputFormat"][i]).isInt()) {
870 videoFormats.push_back((rootValue["Video"]["OutputFormat"][i]).asInt());
871 }
872 }
873 GetEachNodeSupportedResolution(videoFormats, "Video", supportedFormats, rootValue, isSink);
874 }
875
PrintDCameraMetadata(const common_metadata_header_t * metadata)876 void DMetadataProcessor::PrintDCameraMetadata(const common_metadata_header_t *metadata)
877 {
878 if (metadata == nullptr) {
879 DHLOGE("Failed to print metadata, input metadata is null.");
880 return;
881 }
882
883 uint32_t tagCount = OHOS::Camera::GetCameraMetadataItemCount(metadata);
884 DHLOGD("DMetadataProcessor::PrintDCameraMetadata, input metadata item count = %d.", tagCount);
885 for (uint32_t i = 0; i < tagCount; i++) {
886 camera_metadata_item_t item;
887 int ret = OHOS::Camera::GetCameraMetadataItem(metadata, i, &item);
888 if (ret != 0) {
889 continue;
890 }
891
892 const char *name = OHOS::Camera::GetCameraMetadataItemName(item.item);
893 if (item.data_type == META_TYPE_BYTE) {
894 for (size_t k = 0; k < item.count; k++) {
895 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (uint8_t)(item.data.u8[k]));
896 }
897 } else if (item.data_type == META_TYPE_INT32) {
898 for (size_t k = 0; k < item.count; k++) {
899 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (int32_t)(item.data.i32[k]));
900 }
901 } else if (item.data_type == META_TYPE_UINT32) {
902 for (size_t k = 0; k < item.count; k++) {
903 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (uint32_t)(item.data.ui32[k]));
904 }
905 } else if (item.data_type == META_TYPE_FLOAT) {
906 for (size_t k = 0; k < item.count; k++) {
907 DHLOGI("tag index:%d, name:%s, value:%f", item.index, name, (float)(item.data.f[k]));
908 }
909 } else if (item.data_type == META_TYPE_INT64) {
910 for (size_t k = 0; k < item.count; k++) {
911 DHLOGI("tag index:%d, name:%s, value:%lld", item.index, name, (long long)(item.data.i64[k]));
912 }
913 } else if (item.data_type == META_TYPE_DOUBLE) {
914 for (size_t k = 0; k < item.count; k++) {
915 DHLOGI("tag index:%d, name:%s, value:%lf", item.index, name, (double)(item.data.d[k]));
916 }
917 } else {
918 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, *(item.data.r));
919 }
920 }
921 }
922 } // namespace DistributedHardware
923 } // namespace OHOS
924