1 /*
2 * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "dmetadata_processor.h"
17
18 #include "dbuffer_manager.h"
19 #include "dcamera.h"
20 #include "distributed_hardware_log.h"
21 #include "cJSON.h"
22 #include "metadata_utils.h"
23
24 namespace OHOS {
25 namespace DistributedHardware {
InitDCameraAbility(const std::string & sinkAbilityInfo)26 DCamRetCode DMetadataProcessor::InitDCameraAbility(const std::string &sinkAbilityInfo)
27 {
28 cJSON *rootValue = cJSON_Parse(sinkAbilityInfo.c_str());
29 CHECK_NULL_RETURN_LOG(rootValue, FAILED, "The sinkAbilityInfo is null.");
30 CHECK_OBJECT_FREE_RETURN(rootValue, FAILED, "The sinkAbilityInfo is not object.");
31 cJSON *metaObj = cJSON_GetObjectItemCaseSensitive(rootValue, "MetaData");
32 if (metaObj == nullptr || !cJSON_IsString(metaObj) || (metaObj->valuestring == nullptr)) {
33 cJSON_Delete(rootValue);
34 return FAILED;
35 }
36 std::string metadataStr = std::string(metaObj->valuestring);
37 if (!metadataStr.empty()) {
38 std::hash<std::string> h;
39 DHLOGI("Decode distributed camera metadata from base64, hash: %{public}zu, length: %{public}zu",
40 h(metadataStr), metadataStr.length());
41 std::string decodeString = Base64Decode(metadataStr);
42 DHLOGI("Decode distributed camera metadata from string, hash: %{public}zu, length: %{public}zu",
43 h(decodeString), decodeString.length());
44 dCameraAbility_ = OHOS::Camera::MetadataUtils::DecodeFromString(decodeString);
45 DHLOGI("Decode distributed camera metadata from string success.");
46 }
47
48 if (dCameraAbility_ == nullptr) {
49 DHLOGE("Metadata is null in ability set or failed to decode metadata ability from string.");
50 dCameraAbility_ = std::make_shared<CameraAbility>(DEFAULT_ENTRY_CAPACITY, DEFAULT_DATA_CAPACITY);
51 }
52
53 if (OHOS::Camera::GetCameraMetadataItemCount(dCameraAbility_->get()) <= 0) {
54 DCamRetCode ret = InitDCameraDefaultAbilityKeys(sinkAbilityInfo);
55 if (ret != SUCCESS) {
56 DHLOGE("Init distributed camera defalult abilily keys failed.");
57 dCameraAbility_ = nullptr;
58 cJSON_Delete(rootValue);
59 return ret;
60 }
61 }
62 DCamRetCode ret = InitDCameraOutputAbilityKeys(sinkAbilityInfo);
63 if (ret != SUCCESS) {
64 DHLOGE("Init distributed camera output abilily keys failed.");
65 dCameraAbility_ = nullptr;
66 cJSON_Delete(rootValue);
67 return ret;
68 }
69
70 camera_metadata_item_entry_t* itemEntry = OHOS::Camera::GetMetadataItems(dCameraAbility_->get());
71 CHECK_AND_FREE_RETURN_RET_LOG(itemEntry == nullptr, FAILED, rootValue, "get itemEntry failed.");
72 uint32_t count = dCameraAbility_->get()->item_count;
73 for (uint32_t i = 0; i < count; i++, itemEntry++) {
74 allResultSet_.insert((MetaType)(itemEntry->item));
75 }
76 cJSON_Delete(rootValue);
77 return SUCCESS;
78 }
79
InitDcameraBaseAbility()80 void DMetadataProcessor::InitDcameraBaseAbility()
81 {
82 const uint8_t cameraType = OHOS_CAMERA_TYPE_LOGICAL;
83 AddAbilityEntry(OHOS_ABILITY_CAMERA_TYPE, &cameraType, 1);
84
85 const int64_t exposureTime = 0xFFFFFFFFFFFFFFFF;
86 AddAbilityEntry(OHOS_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
87
88 const float correctionGain = 0.0;
89 AddAbilityEntry(OHOS_SENSOR_COLOR_CORRECTION_GAINS, &correctionGain, 1);
90
91 const uint8_t faceDetectMode = OHOS_CAMERA_FACE_DETECT_MODE_OFF;
92 AddAbilityEntry(OHOS_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
93
94 const uint8_t histogramMode = OHOS_CAMERA_HISTOGRAM_MODE_OFF;
95 AddAbilityEntry(OHOS_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
96
97 const uint8_t aeAntibandingMode = OHOS_CAMERA_AE_ANTIBANDING_MODE_OFF;
98 AddAbilityEntry(OHOS_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
99
100 int32_t aeExposureCompensation = 0xFFFFFFFF;
101 AddAbilityEntry(OHOS_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExposureCompensation, 1);
102
103 const uint8_t aeLock = OHOS_CAMERA_AE_LOCK_OFF;
104 AddAbilityEntry(OHOS_CONTROL_AE_LOCK, &aeLock, 1);
105
106 const uint8_t aeMode = OHOS_CAMERA_AE_MODE_OFF;
107 AddAbilityEntry(OHOS_CONTROL_AE_MODE, &aeMode, 1);
108
109 const uint8_t afMode = OHOS_CAMERA_AF_MODE_OFF;
110 AddAbilityEntry(OHOS_CONTROL_AF_MODE, &afMode, 1);
111
112 const uint8_t awbLock = OHOS_CAMERA_AWB_LOCK_OFF;
113 AddAbilityEntry(OHOS_CONTROL_AWB_LOCK, &awbLock, 1);
114
115 const uint8_t awbMode = OHOS_CAMERA_AWB_MODE_OFF;
116 AddAbilityEntry(OHOS_CONTROL_AWB_MODE, &awbMode, 1);
117
118 const uint8_t aeAntibandingModes = OHOS_CAMERA_AE_ANTIBANDING_MODE_AUTO;
119 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, &aeAntibandingModes, 1);
120
121 const uint8_t aeAvailableModes = OHOS_CAMERA_AE_MODE_ON;
122 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_MODES, &aeAvailableModes, 1);
123
124 const int32_t compensationRange[] = { 0, 0 };
125 AddAbilityEntry(OHOS_ABILITY_AE_COMPENSATION_RANGE, compensationRange,
126 (sizeof(compensationRange) / sizeof(compensationRange[0])));
127
128 const camera_rational_t compensationStep[] = { { 0, 1 } };
129 AddAbilityEntry(OHOS_ABILITY_AE_COMPENSATION_STEP, compensationStep,
130 (sizeof(compensationStep) / sizeof(compensationStep[0])));
131
132 const uint8_t afAvailableModes[] = { OHOS_CAMERA_AF_MODE_AUTO, OHOS_CAMERA_AF_MODE_OFF };
133 AddAbilityEntry(OHOS_CONTROL_AF_AVAILABLE_MODES, afAvailableModes,
134 (sizeof(afAvailableModes) / sizeof(afAvailableModes[0])));
135
136 const uint8_t awbAvailableModes = OHOS_CAMERA_AWB_MODE_AUTO;
137 AddAbilityEntry(OHOS_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableModes, 1);
138
139 const uint8_t deviceExposureMode = OHOS_CAMERA_EXPOSURE_MODE_CONTINUOUS_AUTO;
140 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_EXPOSUREMODES, &deviceExposureMode, 1);
141
142 const uint8_t controlExposureMode = OHOS_CAMERA_EXPOSURE_MODE_CONTINUOUS_AUTO;
143 AddAbilityEntry(OHOS_CONTROL_EXPOSUREMODE, &controlExposureMode, 1);
144
145 const uint8_t deviceFocusModes = OHOS_CAMERA_FOCUS_MODE_AUTO;
146 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_FOCUSMODES, &deviceFocusModes, 1);
147 SetFpsRanges();
148 }
149
SetFpsRanges()150 void DMetadataProcessor::SetFpsRanges()
151 {
152 std::vector<int32_t> fpsRanges;
153 fpsRanges.push_back(MIN_SUPPORT_DEFAULT_FPS);
154 fpsRanges.push_back(MAX_SUPPORT_DEFAULT_FPS);
155 AddAbilityEntry(OHOS_CONTROL_AE_TARGET_FPS_RANGE, fpsRanges.data(), fpsRanges.size());
156 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), fpsRanges.size());
157 }
158
GetInfoFromJson(const std::string & sinkAbilityInfo)159 bool DMetadataProcessor::GetInfoFromJson(const std::string& sinkAbilityInfo)
160 {
161 cJSON *rootValue = cJSON_Parse(sinkAbilityInfo.c_str());
162 CHECK_NULL_RETURN_LOG(rootValue, false, "The sinkAbilityInfo is null.");
163 CHECK_OBJECT_FREE_RETURN(rootValue, false, "The sinkAbilityInfo is not object.");
164 cJSON *verObj = cJSON_GetObjectItemCaseSensitive(rootValue, "ProtocolVer");
165 if (verObj == nullptr || !cJSON_IsString(verObj) || (verObj->valuestring == nullptr)) {
166 cJSON_Delete(rootValue);
167 return false;
168 }
169 protocolVersion_ = std::string(verObj->valuestring);
170
171 cJSON *positionObj = cJSON_GetObjectItemCaseSensitive(rootValue, "Position");
172 if (positionObj == nullptr || !cJSON_IsString(positionObj) || (positionObj->valuestring == nullptr)) {
173 cJSON_Delete(rootValue);
174 return false;
175 }
176 dCameraPosition_ = std::string(positionObj->valuestring);
177 cJSON_Delete(rootValue);
178 return true;
179 }
180
InitDCameraDefaultAbilityKeys(const std::string & sinkAbilityInfo)181 DCamRetCode DMetadataProcessor::InitDCameraDefaultAbilityKeys(const std::string &sinkAbilityInfo)
182 {
183 if (!GetInfoFromJson(sinkAbilityInfo)) {
184 return FAILED;
185 }
186 if (dCameraPosition_ == "BACK") {
187 const uint8_t position = OHOS_CAMERA_POSITION_BACK;
188 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
189 } else if (dCameraPosition_ == "FRONT") {
190 const uint8_t position = OHOS_CAMERA_POSITION_FRONT;
191 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
192 } else {
193 const uint8_t position = OHOS_CAMERA_POSITION_OTHER;
194 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
195 }
196
197 InitDcameraBaseAbility();
198
199 const uint8_t controlFocusMode = OHOS_CAMERA_FOCUS_MODE_AUTO;
200 AddAbilityEntry(OHOS_CONTROL_FOCUSMODE, &controlFocusMode, 1);
201
202 const uint8_t deviceFlashModes = OHOS_CAMERA_FLASH_MODE_AUTO;
203 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_FLASHMODES, &deviceFlashModes, 1);
204
205 const uint8_t controlFlashMode = OHOS_CAMERA_FLASH_MODE_CLOSE;
206 AddAbilityEntry(OHOS_CONTROL_FLASHMODE, &controlFlashMode, 1);
207
208 float zoomRatioRange[1] = {1.0};
209 AddAbilityEntry(OHOS_ABILITY_ZOOM_RATIO_RANGE, zoomRatioRange,
210 (sizeof(zoomRatioRange) / sizeof(zoomRatioRange[0])));
211
212 const float zoomRatio = 1.0;
213 AddAbilityEntry(OHOS_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
214
215 int32_t activeArraySize[] = {0, 0, static_cast<int32_t>(maxPreviewResolution_.width_),
216 static_cast<int32_t>(maxPreviewResolution_.height_)};
217 AddAbilityEntry(OHOS_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize,
218 (sizeof(activeArraySize) / sizeof(activeArraySize[0])));
219
220 int32_t pixelArraySize[] = {
221 static_cast<int32_t>(maxPreviewResolution_.width_), static_cast<int32_t>(maxPreviewResolution_.height_)
222 };
223 AddAbilityEntry(OHOS_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize,
224 (sizeof(pixelArraySize) / sizeof(pixelArraySize[0])));
225
226 const int32_t jpegThumbnailSizes[] = {0, 0, DEGREE_240, DEGREE_180};
227 AddAbilityEntry(OHOS_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegThumbnailSizes,
228 (sizeof(jpegThumbnailSizes) / sizeof(jpegThumbnailSizes[0])));
229
230 std::vector<int32_t> streamDefault = {0};
231 streamDefault.assign(DEFAULT_EXTEND_SIZE, 0);
232 AddAbilityEntry(OHOS_ABILITY_STREAM_AVAILABLE_BASIC_CONFIGURATIONS, streamDefault.data(), streamDefault.size());
233
234 AddAbilityEntry(OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, streamDefault.data(), streamDefault.size());
235
236 AddAbilityEntry(OHOS_SENSOR_INFO_MAX_FRAME_DURATION, &MAX_FRAME_DURATION, 1);
237
238 const int32_t jpegMaxSizeDefault = MAX_SUPPORT_PREVIEW_WIDTH * MAX_SUPPORT_PREVIEW_HEIGHT;
239 AddAbilityEntry(OHOS_JPEG_MAX_SIZE, &jpegMaxSizeDefault, 1);
240
241 const uint8_t connectionTypeDefault = OHOS_CAMERA_CONNECTION_TYPE_REMOTE;
242 AddAbilityEntry(OHOS_ABILITY_CAMERA_CONNECTION_TYPE, &connectionTypeDefault, 1);
243 return SUCCESS;
244 }
245
InitOutputAbilityWithoutMode(const std::string & sinkAbilityInfo)246 void DMetadataProcessor::InitOutputAbilityWithoutMode(const std::string &sinkAbilityInfo)
247 {
248 DHLOGI("InitOutputAbilityWithoutMode enter.");
249 std::map<int, std::vector<DCResolution>> supportedFormats = GetDCameraSupportedFormats(sinkAbilityInfo);
250
251 std::vector<int32_t> streamConfigs;
252 std::vector<int32_t> extendStreamConfigs;
253 if (dCameraAbility_ == nullptr) {
254 DHLOGE("Distributed camera abilily is null.");
255 return;
256 }
257 for (uint32_t i = 0; i < ADD_MODE; i++) { // Compatible camera framework modification
258 camera_metadata_item_t item;
259 int32_t ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(),
260 OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, &item);
261 if (ret == CAM_META_SUCCESS && item.count != 0) {
262 extendStreamConfigs.push_back(i);
263 }
264 InitBasicConfigTag(supportedFormats, streamConfigs);
265 InitExtendConfigTag(supportedFormats, extendStreamConfigs);
266 extendStreamConfigs.push_back(EXTEND_EOF); // mode eof
267 }
268
269 UpdateAbilityTag(streamConfigs, extendStreamConfigs);
270 }
271
InitDCameraOutputAbilityKeys(const std::string & sinkAbilityInfo)272 DCamRetCode DMetadataProcessor::InitDCameraOutputAbilityKeys(const std::string &sinkAbilityInfo)
273 {
274 cJSON *rootValue = cJSON_Parse(sinkAbilityInfo.c_str());
275 CHECK_NULL_RETURN_LOG(rootValue, FAILED, "The sinkAbilityInfo is null.");
276 CHECK_OBJECT_FREE_RETURN(rootValue, FAILED, "The sinkAbilityInfo is not object.");
277
278 cJSON *modeArray = cJSON_GetObjectItemCaseSensitive(rootValue, CAMERA_SUPPORT_MODE.c_str());
279 if (modeArray == nullptr || !cJSON_IsArray(modeArray)) {
280 InitOutputAbilityWithoutMode(sinkAbilityInfo);
281 cJSON_Delete(rootValue);
282 return SUCCESS;
283 }
284 CHECK_AND_FREE_RETURN_RET_LOG(cJSON_GetArraySize(modeArray) == 0 || static_cast<uint32_t>(
285 cJSON_GetArraySize(modeArray)) > JSON_ARRAY_MAX_SIZE, FAILED, rootValue, "modeArray create error.");
286
287 std::vector<std::string> keys;
288 int32_t arraySize = cJSON_GetArraySize(modeArray);
289 for (int32_t i = 0; i < arraySize; ++i) {
290 cJSON *number = cJSON_GetArrayItem(modeArray, i);
291 if (number != nullptr && cJSON_IsNumber(number)) {
292 keys.push_back(std::to_string(number->valueint));
293 }
294 }
295 std::vector<int32_t> streamConfigs;
296 std::vector<int32_t> extendStreamConfigs;
297 CHECK_AND_FREE_RETURN_RET_LOG(dCameraAbility_ == nullptr, FAILED, rootValue, "dCameraAbility_ null.");
298 for (std::string key : keys) {
299 cJSON *value = cJSON_GetObjectItem(rootValue, key.c_str());
300 CHECK_AND_FREE_RETURN_RET_LOG(value == nullptr || !cJSON_IsObject(value), FAILED, rootValue, "mode get error.");
301
302 char *jsonValue = cJSON_Print(value);
303 CHECK_AND_FREE_RETURN_RET_LOG(jsonValue == nullptr, FAILED, rootValue, "cJson print value error.");
304 std::string format(jsonValue);
305 DHLOGI("the current mode :%{public}s. value :%{public}s", key.c_str(), format.c_str());
306 std::map<int, std::vector<DCResolution>> supportedFormats = GetDCameraSupportedFormats(format);
307
308 camera_metadata_item_t item;
309 int32_t ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(),
310 OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, &item);
311 if (ret == CAM_META_SUCCESS && item.count != 0) {
312 extendStreamConfigs.push_back(std::atoi(key.c_str())); // mode
313 }
314
315 InitBasicConfigTag(supportedFormats, streamConfigs);
316 InitExtendConfigTag(supportedFormats, extendStreamConfigs);
317 extendStreamConfigs.push_back(EXTEND_EOF); // mode eof
318
319 cJSON_free(jsonValue);
320 sinkPhotoProfiles_.clear();
321 sinkPreviewProfiles_.clear();
322 sinkVideoProfiles_.clear();
323 }
324 UpdateAbilityTag(streamConfigs, extendStreamConfigs);
325
326 cJSON_Delete(rootValue);
327 return SUCCESS;
328 }
329
UpdateAbilityTag(std::vector<int32_t> & streamConfigs,std::vector<int32_t> & extendStreamConfigs)330 void DMetadataProcessor::UpdateAbilityTag(std::vector<int32_t> &streamConfigs,
331 std::vector<int32_t> &extendStreamConfigs)
332 {
333 UpdateAbilityEntry(OHOS_ABILITY_STREAM_AVAILABLE_BASIC_CONFIGURATIONS, streamConfigs.data(),
334 streamConfigs.size());
335
336 UpdateAbilityEntry(OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, extendStreamConfigs.data(),
337 extendStreamConfigs.size());
338
339 UpdateAbilityEntry(OHOS_SENSOR_INFO_MAX_FRAME_DURATION, &MAX_FRAME_DURATION, 1);
340
341 const int32_t jpegMaxSize = maxPhotoResolution_.width_ * maxPhotoResolution_.height_;
342 UpdateAbilityEntry(OHOS_JPEG_MAX_SIZE, &jpegMaxSize, 1);
343
344 const uint8_t connectionType = OHOS_CAMERA_CONNECTION_TYPE_REMOTE;
345 UpdateAbilityEntry(OHOS_ABILITY_CAMERA_CONNECTION_TYPE, &connectionType, 1);
346 }
347
InitBasicConfigTag(std::map<int,std::vector<DCResolution>> & supportedFormats,std::vector<int32_t> & streamConfigs)348 void DMetadataProcessor::InitBasicConfigTag(std::map<int, std::vector<DCResolution>> &supportedFormats,
349 std::vector<int32_t> &streamConfigs)
350 {
351 std::map<int, std::vector<DCResolution>>::iterator iter;
352 for (iter = supportedFormats.begin(); iter != supportedFormats.end(); ++iter) {
353 std::vector<DCResolution> resolutionList = iter->second;
354 for (auto resolution : resolutionList) {
355 DHLOGI("DMetadataProcessor::sink supported formats: { format=%{public}d, width=%{public}d, height="
356 "%{public}d }", iter->first, resolution.width_, resolution.height_);
357 streamConfigs.push_back(iter->first);
358 streamConfigs.push_back(resolution.width_);
359 streamConfigs.push_back(resolution.height_);
360 }
361 }
362 }
363
InitExtendConfigTag(std::map<int,std::vector<DCResolution>> & supportedFormats,std::vector<int32_t> & extendStreamConfigs)364 void DMetadataProcessor::InitExtendConfigTag(std::map<int, std::vector<DCResolution>> &supportedFormats,
365 std::vector<int32_t> &extendStreamConfigs)
366 {
367 extendStreamConfigs.push_back(EXTEND_PREVIEW); // preview
368 std::map<int, std::vector<DCResolution>>::iterator previewIter;
369 for (previewIter = sinkPreviewProfiles_.begin(); previewIter != sinkPreviewProfiles_.end(); ++previewIter) {
370 std::vector<DCResolution> resolutionList = previewIter->second;
371 for (auto resolution : resolutionList) {
372 DHLOGI("sink extend supported preview formats: { format=%{public}d, width=%{public}d, height=%{public}d }",
373 previewIter->first, resolution.width_, resolution.height_);
374 AddConfigs(extendStreamConfigs, previewIter->first, resolution.width_, resolution.height_, PREVIEW_FPS);
375 }
376 }
377 extendStreamConfigs.push_back(EXTEND_EOF); // preview eof
378
379 extendStreamConfigs.push_back(EXTEND_VIDEO); // video
380 std::map<int, std::vector<DCResolution>>::iterator videoIter;
381 for (videoIter = sinkVideoProfiles_.begin(); videoIter != sinkVideoProfiles_.end(); ++videoIter) {
382 std::vector<DCResolution> resolutionList = videoIter->second;
383 for (auto resolution : resolutionList) {
384 DHLOGI("sink extend supported video formats: { format=%{public}d, width=%{public}d, height=%{public}d }",
385 videoIter->first, resolution.width_, resolution.height_);
386 AddConfigs(extendStreamConfigs, videoIter->first, resolution.width_, resolution.height_, VIDEO_FPS);
387 }
388 }
389 extendStreamConfigs.push_back(EXTEND_EOF); // video eof
390
391 if (!sinkPhotoProfiles_.empty()) {
392 extendStreamConfigs.push_back(EXTEND_PHOTO); // photo
393 std::map<int, std::vector<DCResolution>>::iterator photoIter;
394 for (photoIter = sinkPhotoProfiles_.begin(); photoIter != sinkPhotoProfiles_.end(); ++photoIter) {
395 std::vector<DCResolution> resolutionList = photoIter->second;
396 for (auto resolution : resolutionList) {
397 DHLOGI("sink extend supported photo formats: {format=%{public}d, width=%{public}d, height=%{public}d}",
398 photoIter->first, resolution.width_, resolution.height_);
399 AddConfigs(extendStreamConfigs, photoIter->first, resolution.width_, resolution.height_, PHOTO_FPS);
400 }
401 }
402 extendStreamConfigs.push_back(EXTEND_EOF); // photo eof
403 }
404 }
405
AddConfigs(std::vector<int32_t> & sinkExtendStreamConfigs,int32_t format,int32_t width,int32_t height,int32_t fps)406 void DMetadataProcessor::AddConfigs(std::vector<int32_t> &sinkExtendStreamConfigs, int32_t format,
407 int32_t width, int32_t height, int32_t fps)
408 {
409 sinkExtendStreamConfigs.push_back(format);
410 sinkExtendStreamConfigs.push_back(width);
411 sinkExtendStreamConfigs.push_back(height);
412 sinkExtendStreamConfigs.push_back(fps); // fixedfps
413 sinkExtendStreamConfigs.push_back(fps); // minfps
414 sinkExtendStreamConfigs.push_back(fps); // maxfps
415 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // eof
416 }
417
AddAbilityEntry(uint32_t tag,const void * data,size_t size)418 DCamRetCode DMetadataProcessor::AddAbilityEntry(uint32_t tag, const void *data, size_t size)
419 {
420 if (dCameraAbility_ == nullptr) {
421 DHLOGE("Distributed camera abilily is null.");
422 return DCamRetCode::INVALID_ARGUMENT;
423 }
424
425 camera_metadata_item_t item;
426 int ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(), tag, &item);
427 if (ret != CAM_META_SUCCESS) {
428 if (!dCameraAbility_->addEntry(tag, data, size)) {
429 DHLOGE("Add tag %{public}u failed.", tag);
430 return FAILED;
431 }
432 }
433 return SUCCESS;
434 }
435
UpdateAbilityEntry(uint32_t tag,const void * data,size_t size)436 DCamRetCode DMetadataProcessor::UpdateAbilityEntry(uint32_t tag, const void *data, size_t size)
437 {
438 if (dCameraAbility_ == nullptr) {
439 DHLOGE("Distributed camera abilily is null.");
440 return DCamRetCode::INVALID_ARGUMENT;
441 }
442
443 camera_metadata_item_t item;
444 int ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(), tag, &item);
445 if (ret == CAM_META_SUCCESS) {
446 if (!dCameraAbility_->updateEntry(tag, data, size)) {
447 DHLOGE("Update tag %{public}u failed.", tag);
448 return FAILED;
449 }
450 }
451 return SUCCESS;
452 }
453
GetDCameraAbility(std::shared_ptr<CameraAbility> & ability)454 DCamRetCode DMetadataProcessor::GetDCameraAbility(std::shared_ptr<CameraAbility> &ability)
455 {
456 ability = dCameraAbility_;
457 return SUCCESS;
458 }
459
SetMetadataResultMode(const ResultCallbackMode & mode)460 DCamRetCode DMetadataProcessor::SetMetadataResultMode(const ResultCallbackMode &mode)
461 {
462 if (mode < ResultCallbackMode::PER_FRAME || mode > ResultCallbackMode::ON_CHANGED) {
463 DHLOGE("Invalid result callback mode.");
464 return DCamRetCode::INVALID_ARGUMENT;
465 }
466 metaResultMode_ = mode;
467 return SUCCESS;
468 }
469
GetEnabledMetadataResults(std::vector<MetaType> & results)470 DCamRetCode DMetadataProcessor::GetEnabledMetadataResults(std::vector<MetaType> &results)
471 {
472 auto iter = enabledResultSet_.begin();
473 while (iter != enabledResultSet_.end()) {
474 results.push_back(*iter);
475 iter++;
476 }
477 return SUCCESS;
478 }
479
EnableMetadataResult(const std::vector<MetaType> & results)480 DCamRetCode DMetadataProcessor::EnableMetadataResult(const std::vector<MetaType> &results)
481 {
482 if (results.size() == 0) {
483 DHLOGE("Enable metadata result list is empty.");
484 return SUCCESS;
485 }
486
487 for (size_t i = 0; i < results.size(); i++) {
488 auto iter = allResultSet_.find(results[i]);
489 if (iter != allResultSet_.end()) {
490 auto anoIter = enabledResultSet_.find(results[i]);
491 if (anoIter == enabledResultSet_.end()) {
492 enabledResultSet_.insert(results[i]);
493 }
494 } else {
495 DHLOGE("Cannot find match metatype.");
496 return SUCCESS;
497 }
498 }
499 return SUCCESS;
500 }
501
DisableMetadataResult(const std::vector<MetaType> & results)502 DCamRetCode DMetadataProcessor::DisableMetadataResult(const std::vector<MetaType> &results)
503 {
504 if (results.size() == 0) {
505 DHLOGE("Disable metadata result list is empty.");
506 return SUCCESS;
507 }
508
509 for (size_t i = 0; i < results.size(); i++) {
510 auto iter = allResultSet_.find(results[i]);
511 if (iter != allResultSet_.end()) {
512 auto anoIter = enabledResultSet_.find(results[i]);
513 if (anoIter != enabledResultSet_.end()) {
514 enabledResultSet_.erase(*iter);
515 }
516 } else {
517 DHLOGE("Cannot find match metatype.");
518 return SUCCESS;
519 }
520 }
521 return SUCCESS;
522 }
523
ResetEnableResults()524 DCamRetCode DMetadataProcessor::ResetEnableResults()
525 {
526 if (enabledResultSet_.size() < allResultSet_.size()) {
527 for (auto result : allResultSet_) {
528 enabledResultSet_.insert(result);
529 }
530 }
531 return SUCCESS;
532 }
533
UpdateResultMetadata(const uint64_t & resultTimestamp)534 void DMetadataProcessor::UpdateResultMetadata(const uint64_t &resultTimestamp)
535 {
536 DHLOGD("DMetadataProcessor::UpdateResultMetadata result callback mode: %{public}d", metaResultMode_);
537 if (metaResultMode_ != ResultCallbackMode::PER_FRAME) {
538 return;
539 }
540
541 std::lock_guard<std::mutex> autoLock(producerMutex_);
542 if (latestProducerMetadataResult_ == nullptr) {
543 DHLOGD("DMetadataProcessor::UpdateResultMetadata latest producer metadata result is null");
544 return;
545 }
546
547 UpdateAllResult(resultTimestamp);
548 }
549
SetResultCallback(std::function<void (uint64_t,std::shared_ptr<OHOS::Camera::CameraMetadata>)> & resultCbk)550 void DMetadataProcessor::SetResultCallback(
551 std::function<void(uint64_t, std::shared_ptr<OHOS::Camera::CameraMetadata>)> &resultCbk)
552 {
553 resultCallback_ = resultCbk;
554 }
555
UpdateAllResult(const uint64_t & resultTimestamp)556 void DMetadataProcessor::UpdateAllResult(const uint64_t &resultTimestamp)
557 {
558 uint32_t itemCap = OHOS::Camera::GetCameraMetadataItemCapacity(latestProducerMetadataResult_->get());
559 uint32_t dataSize = OHOS::Camera::GetCameraMetadataDataSize(latestProducerMetadataResult_->get());
560 DHLOGD("DMetadataProcessor::UpdateAllResult itemCapacity: %{public}u, dataSize: %{public}u", itemCap, dataSize);
561 std::shared_ptr<OHOS::Camera::CameraMetadata> result =
562 std::make_shared<OHOS::Camera::CameraMetadata>(itemCap, dataSize);
563 int32_t ret = OHOS::Camera::CopyCameraMetadataItems(result->get(), latestProducerMetadataResult_->get());
564 if (ret != CAM_META_SUCCESS) {
565 DHLOGE("DMetadataProcessor::UpdateAllResult copy metadata item failed, ret: %{public}d", ret);
566 return;
567 }
568 resultCallback_(resultTimestamp, result);
569 }
570
UpdateOnChanged(const uint64_t & resultTimestamp)571 void DMetadataProcessor::UpdateOnChanged(const uint64_t &resultTimestamp)
572 {
573 bool needReturn = false;
574 if (latestProducerMetadataResult_ == nullptr || latestConsumerMetadataResult_ == nullptr) {
575 DHLOGD("DMetadataProcessor::UpdateResultMetadata latest producer metadata result is null");
576 return;
577 }
578 uint32_t itemCap = OHOS::Camera::GetCameraMetadataItemCapacity(latestProducerMetadataResult_->get());
579 uint32_t dataSize = OHOS::Camera::GetCameraMetadataDataSize(latestProducerMetadataResult_->get());
580 DHLOGD("DMetadataProcessor::UpdateOnChanged itemCapacity: %{public}u, dataSize: %{public}u", itemCap, dataSize);
581 std::shared_ptr<OHOS::Camera::CameraMetadata> result =
582 std::make_shared<OHOS::Camera::CameraMetadata>(itemCap, dataSize);
583 DHLOGD("DMetadataProcessor::UpdateOnChanged enabledResultSet size: %{public}zu", enabledResultSet_.size());
584 for (auto tag : enabledResultSet_) {
585 DHLOGD("DMetadataProcessor::UpdateOnChanged cameta device metadata tag: %{public}d", tag);
586 camera_metadata_item_t item;
587 camera_metadata_item_t anoItem;
588 int ret1 = OHOS::Camera::FindCameraMetadataItem(latestProducerMetadataResult_->get(), tag, &item);
589 int ret2 = OHOS::Camera::FindCameraMetadataItem(latestConsumerMetadataResult_->get(), tag, &anoItem);
590 DHLOGD("DMetadataProcessor::UpdateOnChanged find metadata item ret: %{public}d, %{public}d", ret1, ret2);
591 if (ret1 != CAM_META_SUCCESS) {
592 continue;
593 }
594
595 if (ret2 == CAM_META_SUCCESS) {
596 if ((item.count != anoItem.count) || (item.data_type != anoItem.data_type)) {
597 needReturn = true;
598 result->addEntry(tag, GetMetadataItemData(item), item.count);
599 continue;
600 }
601 uint32_t size = GetDataSize(item.data_type);
602 DHLOGD("DMetadataProcessor::UpdateOnChanged data size: %{public}u", size);
603 for (uint32_t i = 0; i < (size * static_cast<uint32_t>(item.count)); i++) {
604 if (*(item.data.u8 + i) != *(anoItem.data.u8 + i)) {
605 needReturn = true;
606 result->addEntry(tag, GetMetadataItemData(item), item.count);
607 break;
608 }
609 }
610 } else {
611 needReturn = true;
612 result->addEntry(tag, GetMetadataItemData(item), item.count);
613 continue;
614 }
615 }
616
617 if (needReturn) {
618 resultCallback_(resultTimestamp, result);
619 }
620 }
621
SaveResultMetadata(std::string resultStr)622 DCamRetCode DMetadataProcessor::SaveResultMetadata(std::string resultStr)
623 {
624 if (resultStr.empty()) {
625 DHLOGE("Input result string is null.");
626 return DCamRetCode::INVALID_ARGUMENT;
627 }
628
629 std::string metadataStr = Base64Decode(resultStr);
630 std::lock_guard<std::mutex> autoLock(producerMutex_);
631 latestConsumerMetadataResult_ = latestProducerMetadataResult_;
632 latestProducerMetadataResult_ = OHOS::Camera::MetadataUtils::DecodeFromString(metadataStr);
633 if (latestProducerMetadataResult_ == nullptr) {
634 DHLOGE("Failed to decode metadata setting from string.");
635 return DCamRetCode::INVALID_ARGUMENT;
636 }
637
638 if (!OHOS::Camera::GetCameraMetadataItemCount(latestProducerMetadataResult_->get())) {
639 DHLOGE("Input result metadata item is empty.");
640 return DCamRetCode::INVALID_ARGUMENT;
641 }
642
643 DHLOGD("DMetadataProcessor::SaveResultMetadata result callback mode: %{public}d", metaResultMode_);
644 if (metaResultMode_ != ResultCallbackMode::ON_CHANGED) {
645 return SUCCESS;
646 }
647
648 uint64_t resultTimestamp = GetCurrentLocalTimeStamp();
649 if (latestConsumerMetadataResult_ == nullptr) {
650 UpdateAllResult(resultTimestamp);
651 return SUCCESS;
652 }
653
654 camera_metadata_item_entry_t* itemEntry = OHOS::Camera::GetMetadataItems(latestProducerMetadataResult_->get());
655 if (itemEntry == nullptr) {
656 DHLOGE("Failed to get metadata items from latest producer metadata result.");
657 return DCamRetCode::INVALID_ARGUMENT;
658 }
659 uint32_t count = latestProducerMetadataResult_->get()->item_count;
660 for (uint32_t i = 0; i < count; i++, itemEntry++) {
661 enabledResultSet_.insert((MetaType)(itemEntry->item));
662 }
663 UpdateOnChanged(resultTimestamp);
664 return SUCCESS;
665 }
666
ConvertToCameraMetadata(common_metadata_header_t * & input,std::shared_ptr<OHOS::Camera::CameraMetadata> & output)667 void DMetadataProcessor::ConvertToCameraMetadata(common_metadata_header_t *&input,
668 std::shared_ptr<OHOS::Camera::CameraMetadata> &output)
669 {
670 CHECK_AND_RETURN_LOG(output == nullptr, "output is nullptr");
671 auto ret = OHOS::Camera::CopyCameraMetadataItems(output->get(), input);
672 if (ret != CAM_META_SUCCESS) {
673 DHLOGE("Failed to copy the old metadata to new metadata.");
674 output = nullptr;
675 }
676 }
677
ResizeMetadataHeader(common_metadata_header_t * & header,uint32_t itemCapacity,uint32_t dataCapacity)678 void DMetadataProcessor::ResizeMetadataHeader(common_metadata_header_t *&header,
679 uint32_t itemCapacity, uint32_t dataCapacity)
680 {
681 if (header) {
682 OHOS::Camera::FreeCameraMetadataBuffer(header);
683 }
684 header = OHOS::Camera::AllocateCameraMetadataBuffer(itemCapacity, dataCapacity);
685 }
686
GetDataSize(uint32_t type)687 uint32_t DMetadataProcessor::GetDataSize(uint32_t type)
688 {
689 uint32_t size = 0;
690 if (type == META_TYPE_BYTE) {
691 size = sizeof(uint8_t);
692 } else if (type == META_TYPE_INT32) {
693 size = sizeof(int32_t);
694 } else if (type == META_TYPE_UINT32) {
695 size = sizeof(uint32_t);
696 } else if (type == META_TYPE_FLOAT) {
697 size = sizeof(float);
698 } else if (type == META_TYPE_INT64) {
699 size = sizeof(int64_t);
700 } else if (type == META_TYPE_DOUBLE) {
701 size = sizeof(double);
702 } else if (type == META_TYPE_RATIONAL) {
703 size = sizeof(camera_rational_t);
704 } else {
705 size = 0;
706 }
707 return size;
708 }
709
GetMetadataItemData(const camera_metadata_item_t & item)710 void* DMetadataProcessor::GetMetadataItemData(const camera_metadata_item_t &item)
711 {
712 switch (item.data_type) {
713 case META_TYPE_BYTE: {
714 return item.data.u8;
715 }
716 case META_TYPE_INT32: {
717 return item.data.i32;
718 }
719 case META_TYPE_UINT32: {
720 return item.data.ui32;
721 }
722 case META_TYPE_FLOAT: {
723 return item.data.f;
724 }
725 case META_TYPE_INT64: {
726 return item.data.i64;
727 }
728 case META_TYPE_DOUBLE: {
729 return item.data.d;
730 }
731 case META_TYPE_RATIONAL: {
732 return item.data.r;
733 }
734 default: {
735 DHLOGE("DMetadataProcessor::GetMetadataItemData invalid data type: %{public}u", item.data_type);
736 return nullptr;
737 }
738 }
739 }
740
GetFormatObj(const std::string rootNode,cJSON * rootValue,std::string & formatStr)741 cJSON* DMetadataProcessor::GetFormatObj(const std::string rootNode, cJSON* rootValue, std::string& formatStr)
742 {
743 cJSON* nodeObj = cJSON_GetObjectItemCaseSensitive(rootValue, rootNode.c_str());
744 if (nodeObj == nullptr || !cJSON_IsObject(nodeObj)) {
745 return nullptr;
746 }
747
748 cJSON* resObj = cJSON_GetObjectItemCaseSensitive(nodeObj, "Resolution");
749 if (resObj == nullptr || !cJSON_IsObject(resObj)) {
750 return nullptr;
751 }
752 cJSON *formatObj = cJSON_GetObjectItemCaseSensitive(resObj, formatStr.c_str());
753 if (formatObj == nullptr || !cJSON_IsArray(formatObj) || cJSON_GetArraySize(formatObj) == 0 ||
754 static_cast<uint32_t>(cJSON_GetArraySize(formatObj)) > JSON_ARRAY_MAX_SIZE) {
755 return nullptr;
756 }
757 return formatObj;
758 }
759
GetEachNodeSupportedResolution(std::vector<int> & formats,const std::string rootNode,std::map<int,std::vector<DCResolution>> & supportedFormats,cJSON * rootValue)760 void DMetadataProcessor::GetEachNodeSupportedResolution(std::vector<int>& formats, const std::string rootNode,
761 std::map<int, std::vector<DCResolution>>& supportedFormats, cJSON* rootValue)
762 {
763 for (const auto &format : formats) {
764 std::string formatStr = std::to_string(format);
765 cJSON *formatObj = GetFormatObj(rootNode, rootValue, formatStr);
766 if (formatObj == nullptr) {
767 DHLOGE("Resolution or %{public}s error.", formatStr.c_str());
768 continue;
769 }
770 GetNodeSupportedResolution(format, rootNode, supportedFormats, rootValue);
771 }
772 }
773
GetNodeSupportedResolution(int format,const std::string rootNode,std::map<int,std::vector<DCResolution>> & supportedFormats,cJSON * rootValue)774 void DMetadataProcessor::GetNodeSupportedResolution(int format, const std::string rootNode,
775 std::map<int, std::vector<DCResolution>>& supportedFormats, cJSON* rootValue)
776 {
777 std::vector<DCResolution> resolutionVec;
778 std::string formatStr = std::to_string(format);
779 cJSON* formatObj = GetFormatObj(rootNode, rootValue, formatStr);
780 if (formatObj == nullptr) {
781 return;
782 }
783 int32_t size = cJSON_GetArraySize(formatObj);
784 for (int32_t i = 0; i < size; i++) {
785 cJSON *item = cJSON_GetArrayItem(formatObj, i);
786 if (item == nullptr || !cJSON_IsString(item)) {
787 DHLOGE("Resolution %{public}s %{public}d ,is not string.", formatStr.c_str(), i);
788 continue;
789 }
790 std::string resoStr = std::string(item->valuestring);
791 std::vector<std::string> reso;
792 SplitString(resoStr, reso, STAR_SEPARATOR);
793 if (reso.size() != SIZE_FMT_LEN) {
794 continue;
795 }
796 uint32_t width = static_cast<uint32_t>(std::atoi(reso[0].c_str()));
797 uint32_t height = static_cast<uint32_t>(std::atoi(reso[1].c_str()));
798 if (height == 0 || width == 0 || ((rootNode == "Photo") &&
799 ((width * height) > (MAX_SUPPORT_PHOTO_WIDTH * MAX_SUPPORT_PHOTO_HEIGHT))) ||
800 ((rootNode != "Photo") && (width > MAX_SUPPORT_PREVIEW_WIDTH || height > MAX_SUPPORT_PREVIEW_HEIGHT))) {
801 continue;
802 }
803 DCResolution resolution(width, height);
804 resolutionVec.push_back(resolution);
805 }
806 if (!resolutionVec.empty()) {
807 std::sort(resolutionVec.begin(), resolutionVec.end());
808 supportedFormats[format] = resolutionVec;
809 if ((rootNode != "Photo") && (maxPreviewResolution_ < resolutionVec[0])) {
810 maxPreviewResolution_.width_ = resolutionVec[0].width_;
811 maxPreviewResolution_.height_ = resolutionVec[0].height_;
812 }
813 if ((rootNode == "Photo") && (maxPhotoResolution_ < resolutionVec[0])) {
814 maxPhotoResolution_.width_ = resolutionVec[0].width_;
815 maxPhotoResolution_.height_ = resolutionVec[0].height_;
816 }
817 StoreSinkAndSrcConfig(format, rootNode, resolutionVec);
818 }
819 }
820
StoreSinkAndSrcConfig(int format,const std::string rootNode,std::vector<DCResolution> & resolutionVec)821 void DMetadataProcessor::StoreSinkAndSrcConfig(int format, const std::string rootNode,
822 std::vector<DCResolution> &resolutionVec)
823 {
824 if (rootNode == "Photo") {
825 sinkPhotoProfiles_[format] = resolutionVec;
826 } else if (rootNode == "Preview") {
827 sinkPreviewProfiles_[format] = resolutionVec;
828 } else if (rootNode == "Video") {
829 sinkVideoProfiles_[format] = resolutionVec;
830 }
831 }
832
GetDCameraSupportedFormats(const std::string & abilityInfo)833 std::map<int, std::vector<DCResolution>> DMetadataProcessor::GetDCameraSupportedFormats(
834 const std::string &abilityInfo)
835 {
836 std::map<int, std::vector<DCResolution>> supportedFormats;
837 cJSON *rootValue = cJSON_Parse(abilityInfo.c_str());
838 CHECK_NULL_RETURN_LOG(rootValue, supportedFormats, "The sinkAbilityInfo is null.");
839 CHECK_OBJECT_FREE_RETURN(rootValue, supportedFormats, "The sinkAbilityInfo is not object.");
840 ParsePhotoFormats(rootValue, supportedFormats);
841 ParsePreviewFormats(rootValue, supportedFormats);
842 ParseVideoFormats(rootValue, supportedFormats);
843 cJSON_Delete(rootValue);
844 return supportedFormats;
845 }
846
ParsePhotoFormats(cJSON * rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats)847 void DMetadataProcessor::ParsePhotoFormats(cJSON* rootValue,
848 std::map<int, std::vector<DCResolution>>& supportedFormats)
849 {
850 cJSON *photoObj = cJSON_GetObjectItemCaseSensitive(rootValue, "Photo");
851 if (photoObj == nullptr || !cJSON_IsObject(photoObj)) {
852 DHLOGE("Input Photo info is null.");
853 return;
854 }
855
856 cJSON *formatObj = cJSON_GetObjectItemCaseSensitive(photoObj, "OutputFormat");
857 if (formatObj == nullptr || !cJSON_IsArray(formatObj) || cJSON_GetArraySize(formatObj) == 0 ||
858 static_cast<uint32_t>(cJSON_GetArraySize(formatObj)) > JSON_ARRAY_MAX_SIZE) {
859 DHLOGE("Photo output format error.");
860 return;
861 }
862
863 std::vector<int> photoFormats;
864 int32_t size = cJSON_GetArraySize(formatObj);
865 for (int32_t i = 0; i < size; i++) {
866 cJSON *item = cJSON_GetArrayItem(formatObj, i);
867 if (item !=nullptr && cJSON_IsNumber(item)) {
868 photoFormats.push_back(item->valueint);
869 }
870 }
871 sinkPhotoFormats_ = photoFormats;
872 GetEachNodeSupportedResolution(photoFormats, "Photo", supportedFormats, rootValue);
873 }
874
ParsePreviewFormats(cJSON * rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats)875 void DMetadataProcessor::ParsePreviewFormats(cJSON* rootValue,
876 std::map<int, std::vector<DCResolution>>& supportedFormats)
877 {
878 cJSON *previewObj = cJSON_GetObjectItemCaseSensitive(rootValue, "Preview");
879 if (previewObj == nullptr || !cJSON_IsObject(previewObj)) {
880 DHLOGE("Preview error.");
881 return;
882 }
883 cJSON *formatObj = cJSON_GetObjectItemCaseSensitive(previewObj, "OutputFormat");
884 if (formatObj == nullptr || !cJSON_IsArray(formatObj) || cJSON_GetArraySize(formatObj) == 0 ||
885 static_cast<uint32_t>(cJSON_GetArraySize(formatObj)) > JSON_ARRAY_MAX_SIZE) {
886 DHLOGE("Preview output format error.");
887 return;
888 }
889 std::vector<int> previewFormats;
890 int32_t size = cJSON_GetArraySize(formatObj);
891 for (int32_t i = 0; i < size; i++) {
892 cJSON *item = cJSON_GetArrayItem(formatObj, i);
893 if (item !=nullptr && cJSON_IsNumber(item)) {
894 previewFormats.push_back(item->valueint);
895 }
896 }
897 GetEachNodeSupportedResolution(previewFormats, "Preview", supportedFormats, rootValue);
898 }
899
ParseVideoFormats(cJSON * rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats)900 void DMetadataProcessor::ParseVideoFormats(cJSON* rootValue,
901 std::map<int, std::vector<DCResolution>>& supportedFormats)
902 {
903 cJSON *videoObj = cJSON_GetObjectItemCaseSensitive(rootValue, "Video");
904 if (videoObj == nullptr || !cJSON_IsObject(videoObj)) {
905 DHLOGE("Video error.");
906 return;
907 }
908 cJSON *formatObj = cJSON_GetObjectItemCaseSensitive(videoObj, "OutputFormat");
909 if (formatObj == nullptr || !cJSON_IsArray(formatObj) || cJSON_GetArraySize(formatObj) == 0 ||
910 static_cast<uint32_t>(cJSON_GetArraySize(formatObj)) > JSON_ARRAY_MAX_SIZE) {
911 DHLOGE("Video output format error.");
912 return;
913 }
914 std::vector<int> videoFormats;
915 int32_t size = cJSON_GetArraySize(formatObj);
916 for (int32_t i = 0; i < size; i++) {
917 cJSON *item = cJSON_GetArrayItem(formatObj, i);
918 if (item !=nullptr && cJSON_IsNumber(item)) {
919 videoFormats.push_back(item->valueint);
920 }
921 }
922 GetEachNodeSupportedResolution(videoFormats, "Video", supportedFormats, rootValue);
923 }
924
PrintDCameraMetadata(const common_metadata_header_t * metadata)925 void DMetadataProcessor::PrintDCameraMetadata(const common_metadata_header_t *metadata)
926 {
927 if (metadata == nullptr) {
928 DHLOGE("Failed to print metadata, input metadata is null.");
929 return;
930 }
931
932 uint32_t tagCount = OHOS::Camera::GetCameraMetadataItemCount(metadata);
933 DHLOGD("DMetadataProcessor::PrintDCameraMetadata, input metadata item count = %{public}d.", tagCount);
934 for (uint32_t i = 0; i < tagCount; i++) {
935 camera_metadata_item_t item;
936 int ret = OHOS::Camera::GetCameraMetadataItem(metadata, i, &item);
937 if (ret != 0) {
938 continue;
939 }
940
941 const char *name = OHOS::Camera::GetCameraMetadataItemName(item.item);
942 if (item.data_type == META_TYPE_BYTE) {
943 for (size_t k = 0; k < item.count; k++) {
944 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (uint8_t)(item.data.u8[k]));
945 }
946 } else if (item.data_type == META_TYPE_INT32) {
947 for (size_t k = 0; k < item.count; k++) {
948 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (int32_t)(item.data.i32[k]));
949 }
950 } else if (item.data_type == META_TYPE_UINT32) {
951 for (size_t k = 0; k < item.count; k++) {
952 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (uint32_t)(item.data.ui32[k]));
953 }
954 } else if (item.data_type == META_TYPE_FLOAT) {
955 for (size_t k = 0; k < item.count; k++) {
956 DHLOGI("tag index:%d, name:%s, value:%f", item.index, name, (float)(item.data.f[k]));
957 }
958 } else if (item.data_type == META_TYPE_INT64) {
959 for (size_t k = 0; k < item.count; k++) {
960 DHLOGI("tag index:%d, name:%s, value:%lld", item.index, name, (long long)(item.data.i64[k]));
961 }
962 } else if (item.data_type == META_TYPE_DOUBLE) {
963 for (size_t k = 0; k < item.count; k++) {
964 DHLOGI("tag index:%d, name:%s, value:%lf", item.index, name, (double)(item.data.d[k]));
965 }
966 } else {
967 DHLOGI("tag index:%d, name:%s", item.index, name);
968 }
969 }
970 }
971 } // namespace DistributedHardware
972 } // namespace OHOS
973