1 /*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "dmetadata_processor.h"
17
18 #include "dbuffer_manager.h"
19 #include "dcamera.h"
20 #include "distributed_hardware_log.h"
21 #include "cJSON.h"
22 #include "metadata_utils.h"
23
24 namespace OHOS {
25 namespace DistributedHardware {
InitDCameraAbility(const std::string & sinkAbilityInfo)26 DCamRetCode DMetadataProcessor::InitDCameraAbility(const std::string &sinkAbilityInfo)
27 {
28 cJSON *rootValue = cJSON_Parse(sinkAbilityInfo.c_str());
29 CHECK_NULL_RETURN_LOG(rootValue, FAILED, "The sinkAbilityInfo is null.");
30 CHECK_OBJECT_FREE_RETURN(rootValue, FAILED, "The sinkAbilityInfo is not object.");
31 cJSON *metaObj = cJSON_GetObjectItemCaseSensitive(rootValue, "MetaData");
32 if (metaObj == nullptr || !cJSON_IsString(metaObj) || (metaObj->valuestring == nullptr)) {
33 cJSON_Delete(rootValue);
34 return FAILED;
35 }
36 std::string metadataStr = std::string(metaObj->valuestring);
37 if (!metadataStr.empty()) {
38 std::hash<std::string> h;
39 DHLOGI("Decode distributed camera metadata from base64, hash: %{public}zu, length: %{public}zu",
40 h(metadataStr), metadataStr.length());
41 std::string decodeString = Base64Decode(metadataStr);
42 DHLOGI("Decode distributed camera metadata from string, hash: %{public}zu, length: %{public}zu",
43 h(decodeString), decodeString.length());
44 dCameraAbility_ = OHOS::Camera::MetadataUtils::DecodeFromString(decodeString);
45 DHLOGI("Decode distributed camera metadata from string success.");
46 }
47
48 if (dCameraAbility_ == nullptr) {
49 DHLOGE("Metadata is null in ability set or failed to decode metadata ability from string.");
50 dCameraAbility_ = std::make_shared<CameraAbility>(DEFAULT_ENTRY_CAPACITY, DEFAULT_DATA_CAPACITY);
51 }
52
53 if (OHOS::Camera::GetCameraMetadataItemCount(dCameraAbility_->get()) <= 0) {
54 DCamRetCode ret = InitDCameraDefaultAbilityKeys(sinkAbilityInfo);
55 if (ret != SUCCESS) {
56 DHLOGE("Init distributed camera defalult abilily keys failed.");
57 dCameraAbility_ = nullptr;
58 cJSON_Delete(rootValue);
59 return ret;
60 }
61 }
62 DCamRetCode ret = InitDCameraOutputAbilityKeys(sinkAbilityInfo);
63 if (ret != SUCCESS) {
64 DHLOGE("Init distributed camera output abilily keys failed.");
65 dCameraAbility_ = nullptr;
66 cJSON_Delete(rootValue);
67 return ret;
68 }
69
70 camera_metadata_item_entry_t* itemEntry = OHOS::Camera::GetMetadataItems(dCameraAbility_->get());
71 uint32_t count = dCameraAbility_->get()->item_count;
72 for (uint32_t i = 0; i < count; i++, itemEntry++) {
73 allResultSet_.insert((MetaType)(itemEntry->item));
74 }
75 cJSON_Delete(rootValue);
76 return SUCCESS;
77 }
78
InitDcameraBaseAbility()79 void DMetadataProcessor::InitDcameraBaseAbility()
80 {
81 const uint8_t cameraType = OHOS_CAMERA_TYPE_LOGICAL;
82 AddAbilityEntry(OHOS_ABILITY_CAMERA_TYPE, &cameraType, 1);
83
84 const int64_t exposureTime = 0xFFFFFFFFFFFFFFFF;
85 AddAbilityEntry(OHOS_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
86
87 const float correctionGain = 0.0;
88 AddAbilityEntry(OHOS_SENSOR_COLOR_CORRECTION_GAINS, &correctionGain, 1);
89
90 const uint8_t faceDetectMode = OHOS_CAMERA_FACE_DETECT_MODE_OFF;
91 AddAbilityEntry(OHOS_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
92
93 const uint8_t histogramMode = OHOS_CAMERA_HISTOGRAM_MODE_OFF;
94 AddAbilityEntry(OHOS_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
95
96 const uint8_t aeAntibandingMode = OHOS_CAMERA_AE_ANTIBANDING_MODE_OFF;
97 AddAbilityEntry(OHOS_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
98
99 int32_t aeExposureCompensation = 0xFFFFFFFF;
100 AddAbilityEntry(OHOS_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExposureCompensation, 1);
101
102 const uint8_t aeLock = OHOS_CAMERA_AE_LOCK_OFF;
103 AddAbilityEntry(OHOS_CONTROL_AE_LOCK, &aeLock, 1);
104
105 const uint8_t aeMode = OHOS_CAMERA_AE_MODE_OFF;
106 AddAbilityEntry(OHOS_CONTROL_AE_MODE, &aeMode, 1);
107
108 const uint8_t afMode = OHOS_CAMERA_AF_MODE_OFF;
109 AddAbilityEntry(OHOS_CONTROL_AF_MODE, &afMode, 1);
110
111 const uint8_t awbLock = OHOS_CAMERA_AWB_LOCK_OFF;
112 AddAbilityEntry(OHOS_CONTROL_AWB_LOCK, &awbLock, 1);
113
114 const uint8_t awbMode = OHOS_CAMERA_AWB_MODE_OFF;
115 AddAbilityEntry(OHOS_CONTROL_AWB_MODE, &awbMode, 1);
116
117 const uint8_t aeAntibandingModes = OHOS_CAMERA_AE_ANTIBANDING_MODE_AUTO;
118 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, &aeAntibandingModes, 1);
119
120 const uint8_t aeAvailableModes = OHOS_CAMERA_AE_MODE_ON;
121 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_MODES, &aeAvailableModes, 1);
122
123 const int32_t compensationRange[] = { 0, 0 };
124 AddAbilityEntry(OHOS_ABILITY_AE_COMPENSATION_RANGE, compensationRange,
125 (sizeof(compensationRange) / sizeof(compensationRange[0])));
126
127 const camera_rational_t compensationStep[] = { { 0, 1 } };
128 AddAbilityEntry(OHOS_ABILITY_AE_COMPENSATION_STEP, compensationStep,
129 (sizeof(compensationStep) / sizeof(compensationStep[0])));
130
131 const uint8_t afAvailableModes[] = { OHOS_CAMERA_AF_MODE_AUTO, OHOS_CAMERA_AF_MODE_OFF };
132 AddAbilityEntry(OHOS_CONTROL_AF_AVAILABLE_MODES, afAvailableModes,
133 (sizeof(afAvailableModes) / sizeof(afAvailableModes[0])));
134
135 const uint8_t awbAvailableModes = OHOS_CAMERA_AWB_MODE_AUTO;
136 AddAbilityEntry(OHOS_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableModes, 1);
137
138 const uint8_t deviceExposureMode = OHOS_CAMERA_EXPOSURE_MODE_CONTINUOUS_AUTO;
139 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_EXPOSUREMODES, &deviceExposureMode, 1);
140
141 const uint8_t controlExposureMode = OHOS_CAMERA_EXPOSURE_MODE_CONTINUOUS_AUTO;
142 AddAbilityEntry(OHOS_CONTROL_EXPOSUREMODE, &controlExposureMode, 1);
143
144 const uint8_t deviceFocusModes = OHOS_CAMERA_FOCUS_MODE_AUTO;
145 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_FOCUSMODES, &deviceFocusModes, 1);
146 SetFpsRanges();
147 }
148
SetFpsRanges()149 void DMetadataProcessor::SetFpsRanges()
150 {
151 std::vector<int32_t> fpsRanges;
152 fpsRanges.push_back(MIN_SUPPORT_DEFAULT_FPS);
153 fpsRanges.push_back(MAX_SUPPORT_DEFAULT_FPS);
154 AddAbilityEntry(OHOS_CONTROL_AE_TARGET_FPS_RANGE, fpsRanges.data(), fpsRanges.size());
155 AddAbilityEntry(OHOS_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), fpsRanges.size());
156 }
157
GetInfoFromJson(const std::string & sinkAbilityInfo)158 bool DMetadataProcessor::GetInfoFromJson(const std::string& sinkAbilityInfo)
159 {
160 cJSON *rootValue = cJSON_Parse(sinkAbilityInfo.c_str());
161 CHECK_NULL_RETURN_LOG(rootValue, false, "The sinkAbilityInfo is null.");
162 CHECK_OBJECT_FREE_RETURN(rootValue, false, "The sinkAbilityInfo is not object.");
163 cJSON *verObj = cJSON_GetObjectItemCaseSensitive(rootValue, "ProtocolVer");
164 if (verObj == nullptr || !cJSON_IsString(verObj) || (verObj->valuestring == nullptr)) {
165 cJSON_Delete(rootValue);
166 return false;
167 }
168 protocolVersion_ = std::string(verObj->valuestring);
169
170 cJSON *positionObj = cJSON_GetObjectItemCaseSensitive(rootValue, "Position");
171 if (positionObj == nullptr || !cJSON_IsString(positionObj) || (positionObj->valuestring == nullptr)) {
172 cJSON_Delete(rootValue);
173 return false;
174 }
175 dCameraPosition_ = std::string(positionObj->valuestring);
176 cJSON_Delete(rootValue);
177 return true;
178 }
179
InitDCameraDefaultAbilityKeys(const std::string & sinkAbilityInfo)180 DCamRetCode DMetadataProcessor::InitDCameraDefaultAbilityKeys(const std::string &sinkAbilityInfo)
181 {
182 if (!GetInfoFromJson(sinkAbilityInfo)) {
183 return FAILED;
184 }
185 if (dCameraPosition_ == "BACK") {
186 const uint8_t position = OHOS_CAMERA_POSITION_BACK;
187 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
188 } else if (dCameraPosition_ == "FRONT") {
189 const uint8_t position = OHOS_CAMERA_POSITION_FRONT;
190 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
191 } else {
192 const uint8_t position = OHOS_CAMERA_POSITION_OTHER;
193 AddAbilityEntry(OHOS_ABILITY_CAMERA_POSITION, &position, 1);
194 }
195
196 InitDcameraBaseAbility();
197
198 const uint8_t controlFocusMode = OHOS_CAMERA_FOCUS_MODE_AUTO;
199 AddAbilityEntry(OHOS_CONTROL_FOCUSMODE, &controlFocusMode, 1);
200
201 const uint8_t deviceFlashModes = OHOS_CAMERA_FLASH_MODE_AUTO;
202 AddAbilityEntry(OHOS_ABILITY_DEVICE_AVAILABLE_FLASHMODES, &deviceFlashModes, 1);
203
204 const uint8_t controlFlashMode = OHOS_CAMERA_FLASH_MODE_CLOSE;
205 AddAbilityEntry(OHOS_CONTROL_FLASHMODE, &controlFlashMode, 1);
206
207 float zoomRatioRange[1] = {1.0};
208 AddAbilityEntry(OHOS_ABILITY_ZOOM_RATIO_RANGE, zoomRatioRange,
209 (sizeof(zoomRatioRange) / sizeof(zoomRatioRange[0])));
210
211 const float zoomRatio = 1.0;
212 AddAbilityEntry(OHOS_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
213
214 int32_t activeArraySize[] = {0, 0, static_cast<int32_t>(maxPreviewResolution_.width_),
215 static_cast<int32_t>(maxPreviewResolution_.height_)};
216 AddAbilityEntry(OHOS_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize,
217 (sizeof(activeArraySize) / sizeof(activeArraySize[0])));
218
219 int32_t pixelArraySize[] = {
220 static_cast<int32_t>(maxPreviewResolution_.width_), static_cast<int32_t>(maxPreviewResolution_.height_)
221 };
222 AddAbilityEntry(OHOS_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize,
223 (sizeof(pixelArraySize) / sizeof(pixelArraySize[0])));
224
225 const int32_t jpegThumbnailSizes[] = {0, 0, DEGREE_240, DEGREE_180};
226 AddAbilityEntry(OHOS_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegThumbnailSizes,
227 (sizeof(jpegThumbnailSizes) / sizeof(jpegThumbnailSizes[0])));
228 return SUCCESS;
229 }
230
InitOutputAbilityWithoutMode(const std::string & sinkAbilityInfo)231 void DMetadataProcessor::InitOutputAbilityWithoutMode(const std::string &sinkAbilityInfo)
232 {
233 DHLOGI("InitOutputAbilityWithoutMode enter.");
234 std::map<int, std::vector<DCResolution>> supportedFormats = GetDCameraSupportedFormats(sinkAbilityInfo);
235
236 std::vector<int32_t> streamConfigs;
237 std::vector<int32_t> extendStreamConfigs;
238 for (uint32_t i = 0; i < ADD_MODE; i++) { // Compatible camera framework modification
239 camera_metadata_item_t item;
240 int32_t ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(),
241 OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, &item);
242 if (ret == CAM_META_SUCCESS && item.count != 0) {
243 extendStreamConfigs.push_back(i);
244 }
245 InitBasicConfigTag(supportedFormats, streamConfigs);
246 InitExtendConfigTag(supportedFormats, extendStreamConfigs);
247 extendStreamConfigs.push_back(EXTEND_EOF); // mode eof
248 }
249
250 UpdateAbilityTag(streamConfigs, extendStreamConfigs);
251 }
252
InitDCameraOutputAbilityKeys(const std::string & sinkAbilityInfo)253 DCamRetCode DMetadataProcessor::InitDCameraOutputAbilityKeys(const std::string &sinkAbilityInfo)
254 {
255 cJSON *rootValue = cJSON_Parse(sinkAbilityInfo.c_str());
256 CHECK_NULL_RETURN_LOG(rootValue, FAILED, "The sinkAbilityInfo is null.");
257 CHECK_OBJECT_FREE_RETURN(rootValue, FAILED, "The sinkAbilityInfo is not object.");
258
259 cJSON *modeArray = cJSON_GetObjectItemCaseSensitive(rootValue, CAMERA_SUPPORT_MODE.c_str());
260 if (modeArray == nullptr || !cJSON_IsArray(modeArray)) {
261 InitOutputAbilityWithoutMode(sinkAbilityInfo);
262 cJSON_Delete(rootValue);
263 return SUCCESS;
264 }
265 CHECK_AND_FREE_RETURN_RET_LOG(cJSON_GetArraySize(modeArray) == 0 || static_cast<uint32_t>(
266 cJSON_GetArraySize(modeArray)) > JSON_ARRAY_MAX_SIZE, FAILED, rootValue, "modeArray create error.");
267
268 std::vector<std::string> keys;
269 int32_t arraySize = cJSON_GetArraySize(modeArray);
270 for (int32_t i = 0; i < arraySize; ++i) {
271 cJSON *number = cJSON_GetArrayItem(modeArray, i);
272 if (number != nullptr && cJSON_IsNumber(number)) {
273 keys.push_back(std::to_string(number->valueint));
274 }
275 }
276 std::vector<int32_t> streamConfigs;
277 std::vector<int32_t> extendStreamConfigs;
278 for (std::string key : keys) {
279 cJSON *value = cJSON_GetObjectItem(rootValue, key.c_str());
280 CHECK_AND_FREE_RETURN_RET_LOG(value == nullptr || !cJSON_IsObject(value), FAILED, rootValue, "mode get error.");
281
282 char *jsonValue = cJSON_Print(value);
283 std::string format(jsonValue);
284 DHLOGI("the current mode :%{public}s. value :%{public}s", key.c_str(), format.c_str());
285 std::map<int, std::vector<DCResolution>> supportedFormats = GetDCameraSupportedFormats(format);
286
287 camera_metadata_item_t item;
288 int32_t ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(),
289 OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, &item);
290 if (ret == CAM_META_SUCCESS && item.count != 0) {
291 extendStreamConfigs.push_back(std::stoi(key)); // mode
292 }
293
294 InitBasicConfigTag(supportedFormats, streamConfigs);
295 InitExtendConfigTag(supportedFormats, extendStreamConfigs);
296 extendStreamConfigs.push_back(EXTEND_EOF); // mode eof
297
298 cJSON_free(jsonValue);
299 sinkPhotoProfiles_.clear();
300 sinkPreviewProfiles_.clear();
301 sinkVideoProfiles_.clear();
302 }
303 UpdateAbilityTag(streamConfigs, extendStreamConfigs);
304
305 cJSON_Delete(rootValue);
306 return SUCCESS;
307 }
308
UpdateAbilityTag(std::vector<int32_t> & streamConfigs,std::vector<int32_t> & extendStreamConfigs)309 void DMetadataProcessor::UpdateAbilityTag(std::vector<int32_t> &streamConfigs,
310 std::vector<int32_t> &extendStreamConfigs)
311 {
312 UpdateAbilityEntry(OHOS_ABILITY_STREAM_AVAILABLE_BASIC_CONFIGURATIONS, streamConfigs.data(),
313 streamConfigs.size());
314
315 UpdateAbilityEntry(OHOS_ABILITY_STREAM_AVAILABLE_EXTEND_CONFIGURATIONS, extendStreamConfigs.data(),
316 extendStreamConfigs.size());
317
318 UpdateAbilityEntry(OHOS_SENSOR_INFO_MAX_FRAME_DURATION, &MAX_FRAME_DURATION, 1);
319
320 const int32_t jpegMaxSize = maxPhotoResolution_.width_ * maxPhotoResolution_.height_;
321 UpdateAbilityEntry(OHOS_JPEG_MAX_SIZE, &jpegMaxSize, 1);
322
323 const uint8_t connectionType = OHOS_CAMERA_CONNECTION_TYPE_REMOTE;
324 UpdateAbilityEntry(OHOS_ABILITY_CAMERA_CONNECTION_TYPE, &connectionType, 1);
325 }
326
InitBasicConfigTag(std::map<int,std::vector<DCResolution>> & supportedFormats,std::vector<int32_t> & streamConfigs)327 void DMetadataProcessor::InitBasicConfigTag(std::map<int, std::vector<DCResolution>> &supportedFormats,
328 std::vector<int32_t> &streamConfigs)
329 {
330 std::map<int, std::vector<DCResolution>>::iterator iter;
331 for (iter = supportedFormats.begin(); iter != supportedFormats.end(); ++iter) {
332 std::vector<DCResolution> resolutionList = iter->second;
333 for (auto resolution : resolutionList) {
334 DHLOGI("DMetadataProcessor::sink supported formats: { format=%{public}d, width=%{public}d, height="
335 "%{public}d }", iter->first, resolution.width_, resolution.height_);
336 streamConfigs.push_back(iter->first);
337 streamConfigs.push_back(resolution.width_);
338 streamConfigs.push_back(resolution.height_);
339 }
340 }
341 }
342
InitExtendConfigTag(std::map<int,std::vector<DCResolution>> & supportedFormats,std::vector<int32_t> & extendStreamConfigs)343 void DMetadataProcessor::InitExtendConfigTag(std::map<int, std::vector<DCResolution>> &supportedFormats,
344 std::vector<int32_t> &extendStreamConfigs)
345 {
346 extendStreamConfigs.push_back(EXTEND_PREVIEW); // preview
347 std::map<int, std::vector<DCResolution>>::iterator previewIter;
348 for (previewIter = sinkPreviewProfiles_.begin(); previewIter != sinkPreviewProfiles_.end(); ++previewIter) {
349 std::vector<DCResolution> resolutionList = previewIter->second;
350 for (auto resolution : resolutionList) {
351 DHLOGI("sink extend supported preview formats: { format=%{public}d, width=%{public}d, height=%{public}d }",
352 previewIter->first, resolution.width_, resolution.height_);
353 AddConfigs(extendStreamConfigs, previewIter->first, resolution.width_, resolution.height_, PREVIEW_FPS);
354 }
355 }
356 extendStreamConfigs.push_back(EXTEND_EOF); // preview eof
357
358 extendStreamConfigs.push_back(EXTEND_VIDEO); // video
359 std::map<int, std::vector<DCResolution>>::iterator videoIter;
360 for (videoIter = sinkVideoProfiles_.begin(); videoIter != sinkVideoProfiles_.end(); ++videoIter) {
361 std::vector<DCResolution> resolutionList = videoIter->second;
362 for (auto resolution : resolutionList) {
363 DHLOGI("sink extend supported video formats: { format=%{public}d, width=%{public}d, height=%{public}d }",
364 videoIter->first, resolution.width_, resolution.height_);
365 AddConfigs(extendStreamConfigs, videoIter->first, resolution.width_, resolution.height_, VIDEO_FPS);
366 }
367 }
368 extendStreamConfigs.push_back(EXTEND_EOF); // video eof
369
370 if (!sinkPhotoProfiles_.empty()) {
371 extendStreamConfigs.push_back(EXTEND_PHOTO); // photo
372 std::map<int, std::vector<DCResolution>>::iterator photoIter;
373 for (photoIter = sinkPhotoProfiles_.begin(); photoIter != sinkPhotoProfiles_.end(); ++photoIter) {
374 std::vector<DCResolution> resolutionList = photoIter->second;
375 for (auto resolution : resolutionList) {
376 DHLOGI("sink extend supported photo formats: {format=%{public}d, width=%{public}d, height=%{public}d}",
377 photoIter->first, resolution.width_, resolution.height_);
378 AddConfigs(extendStreamConfigs, photoIter->first, resolution.width_, resolution.height_, PHOTO_FPS);
379 }
380 }
381 extendStreamConfigs.push_back(EXTEND_EOF); // photo eof
382 }
383 }
384
AddConfigs(std::vector<int32_t> & sinkExtendStreamConfigs,int32_t format,int32_t width,int32_t height,int32_t fps)385 void DMetadataProcessor::AddConfigs(std::vector<int32_t> &sinkExtendStreamConfigs, int32_t format,
386 int32_t width, int32_t height, int32_t fps)
387 {
388 sinkExtendStreamConfigs.push_back(format);
389 sinkExtendStreamConfigs.push_back(width);
390 sinkExtendStreamConfigs.push_back(height);
391 sinkExtendStreamConfigs.push_back(fps); // fixedfps
392 sinkExtendStreamConfigs.push_back(fps); // minfps
393 sinkExtendStreamConfigs.push_back(fps); // maxfps
394 sinkExtendStreamConfigs.push_back(EXTEND_EOF); // eof
395 }
396
AddAbilityEntry(uint32_t tag,const void * data,size_t size)397 DCamRetCode DMetadataProcessor::AddAbilityEntry(uint32_t tag, const void *data, size_t size)
398 {
399 if (dCameraAbility_ == nullptr) {
400 DHLOGE("Distributed camera abilily is null.");
401 return DCamRetCode::INVALID_ARGUMENT;
402 }
403
404 camera_metadata_item_t item;
405 int ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(), tag, &item);
406 if (ret != CAM_META_SUCCESS) {
407 if (!dCameraAbility_->addEntry(tag, data, size)) {
408 DHLOGE("Add tag %{public}u failed.", tag);
409 return FAILED;
410 }
411 }
412 return SUCCESS;
413 }
414
UpdateAbilityEntry(uint32_t tag,const void * data,size_t size)415 DCamRetCode DMetadataProcessor::UpdateAbilityEntry(uint32_t tag, const void *data, size_t size)
416 {
417 if (dCameraAbility_ == nullptr) {
418 DHLOGE("Distributed camera abilily is null.");
419 return DCamRetCode::INVALID_ARGUMENT;
420 }
421
422 camera_metadata_item_t item;
423 int ret = OHOS::Camera::FindCameraMetadataItem(dCameraAbility_->get(), tag, &item);
424 if (ret == CAM_META_SUCCESS) {
425 if (!dCameraAbility_->updateEntry(tag, data, size)) {
426 DHLOGE("Update tag %{public}u failed.", tag);
427 return FAILED;
428 }
429 }
430 return SUCCESS;
431 }
432
GetDCameraAbility(std::shared_ptr<CameraAbility> & ability)433 DCamRetCode DMetadataProcessor::GetDCameraAbility(std::shared_ptr<CameraAbility> &ability)
434 {
435 ability = dCameraAbility_;
436 return SUCCESS;
437 }
438
SetMetadataResultMode(const ResultCallbackMode & mode)439 DCamRetCode DMetadataProcessor::SetMetadataResultMode(const ResultCallbackMode &mode)
440 {
441 if (mode < ResultCallbackMode::PER_FRAME || mode > ResultCallbackMode::ON_CHANGED) {
442 DHLOGE("Invalid result callback mode.");
443 return DCamRetCode::INVALID_ARGUMENT;
444 }
445 metaResultMode_ = mode;
446 return SUCCESS;
447 }
448
GetEnabledMetadataResults(std::vector<MetaType> & results)449 DCamRetCode DMetadataProcessor::GetEnabledMetadataResults(std::vector<MetaType> &results)
450 {
451 auto iter = enabledResultSet_.begin();
452 while (iter != enabledResultSet_.end()) {
453 results.push_back(*iter);
454 iter++;
455 }
456 return SUCCESS;
457 }
458
EnableMetadataResult(const std::vector<MetaType> & results)459 DCamRetCode DMetadataProcessor::EnableMetadataResult(const std::vector<MetaType> &results)
460 {
461 if (results.size() == 0) {
462 DHLOGE("Enable metadata result list is empty.");
463 return SUCCESS;
464 }
465
466 for (size_t i = 0; i < results.size(); i++) {
467 auto iter = allResultSet_.find(results[i]);
468 if (iter != allResultSet_.end()) {
469 auto anoIter = enabledResultSet_.find(results[i]);
470 if (anoIter == enabledResultSet_.end()) {
471 enabledResultSet_.insert(results[i]);
472 }
473 } else {
474 DHLOGE("Cannot find match metatype.");
475 return SUCCESS;
476 }
477 }
478 return SUCCESS;
479 }
480
DisableMetadataResult(const std::vector<MetaType> & results)481 DCamRetCode DMetadataProcessor::DisableMetadataResult(const std::vector<MetaType> &results)
482 {
483 if (results.size() == 0) {
484 DHLOGE("Disable metadata result list is empty.");
485 return SUCCESS;
486 }
487
488 for (size_t i = 0; i < results.size(); i++) {
489 auto iter = allResultSet_.find(results[i]);
490 if (iter != allResultSet_.end()) {
491 auto anoIter = enabledResultSet_.find(results[i]);
492 if (anoIter != enabledResultSet_.end()) {
493 enabledResultSet_.erase(*iter);
494 }
495 } else {
496 DHLOGE("Cannot find match metatype.");
497 return SUCCESS;
498 }
499 }
500 return SUCCESS;
501 }
502
ResetEnableResults()503 DCamRetCode DMetadataProcessor::ResetEnableResults()
504 {
505 if (enabledResultSet_.size() < allResultSet_.size()) {
506 for (auto result : allResultSet_) {
507 enabledResultSet_.insert(result);
508 }
509 }
510 return SUCCESS;
511 }
512
UpdateResultMetadata(const uint64_t & resultTimestamp)513 void DMetadataProcessor::UpdateResultMetadata(const uint64_t &resultTimestamp)
514 {
515 DHLOGD("DMetadataProcessor::UpdateResultMetadata result callback mode: %{public}d", metaResultMode_);
516 if (metaResultMode_ != ResultCallbackMode::PER_FRAME) {
517 return;
518 }
519
520 std::lock_guard<std::mutex> autoLock(producerMutex_);
521 if (latestProducerMetadataResult_ == nullptr) {
522 DHLOGD("DMetadataProcessor::UpdateResultMetadata latest producer metadata result is null");
523 return;
524 }
525
526 UpdateAllResult(resultTimestamp);
527 }
528
SetResultCallback(std::function<void (uint64_t,std::shared_ptr<OHOS::Camera::CameraMetadata>)> & resultCbk)529 void DMetadataProcessor::SetResultCallback(
530 std::function<void(uint64_t, std::shared_ptr<OHOS::Camera::CameraMetadata>)> &resultCbk)
531 {
532 resultCallback_ = resultCbk;
533 }
534
UpdateAllResult(const uint64_t & resultTimestamp)535 void DMetadataProcessor::UpdateAllResult(const uint64_t &resultTimestamp)
536 {
537 uint32_t itemCap = OHOS::Camera::GetCameraMetadataItemCapacity(latestProducerMetadataResult_->get());
538 uint32_t dataSize = OHOS::Camera::GetCameraMetadataDataSize(latestProducerMetadataResult_->get());
539 DHLOGD("DMetadataProcessor::UpdateAllResult itemCapacity: %{public}u, dataSize: %{public}u", itemCap, dataSize);
540 std::shared_ptr<OHOS::Camera::CameraMetadata> result =
541 std::make_shared<OHOS::Camera::CameraMetadata>(itemCap, dataSize);
542 int32_t ret = OHOS::Camera::CopyCameraMetadataItems(result->get(), latestProducerMetadataResult_->get());
543 if (ret != CAM_META_SUCCESS) {
544 DHLOGE("DMetadataProcessor::UpdateAllResult copy metadata item failed, ret: %{public}d", ret);
545 return;
546 }
547 resultCallback_(resultTimestamp, result);
548 }
549
UpdateOnChanged(const uint64_t & resultTimestamp)550 void DMetadataProcessor::UpdateOnChanged(const uint64_t &resultTimestamp)
551 {
552 bool needReturn = false;
553 uint32_t itemCap = OHOS::Camera::GetCameraMetadataItemCapacity(latestProducerMetadataResult_->get());
554 uint32_t dataSize = OHOS::Camera::GetCameraMetadataDataSize(latestProducerMetadataResult_->get());
555 DHLOGD("DMetadataProcessor::UpdateOnChanged itemCapacity: %{public}u, dataSize: %{public}u", itemCap, dataSize);
556 std::shared_ptr<OHOS::Camera::CameraMetadata> result =
557 std::make_shared<OHOS::Camera::CameraMetadata>(itemCap, dataSize);
558 DHLOGD("DMetadataProcessor::UpdateOnChanged enabledResultSet size: %{public}zu", enabledResultSet_.size());
559 for (auto tag : enabledResultSet_) {
560 DHLOGD("DMetadataProcessor::UpdateOnChanged cameta device metadata tag: %{public}d", tag);
561 camera_metadata_item_t item;
562 camera_metadata_item_t anoItem;
563 int ret1 = OHOS::Camera::FindCameraMetadataItem(latestProducerMetadataResult_->get(), tag, &item);
564 int ret2 = OHOS::Camera::FindCameraMetadataItem(latestConsumerMetadataResult_->get(), tag, &anoItem);
565 DHLOGD("DMetadataProcessor::UpdateOnChanged find metadata item ret: %{public}d, %{public}d", ret1, ret2);
566 if (ret1 != CAM_META_SUCCESS) {
567 continue;
568 }
569
570 if (ret2 == CAM_META_SUCCESS) {
571 if ((item.count != anoItem.count) || (item.data_type != anoItem.data_type)) {
572 needReturn = true;
573 result->addEntry(tag, GetMetadataItemData(item), item.count);
574 continue;
575 }
576 uint32_t size = GetDataSize(item.data_type);
577 DHLOGD("DMetadataProcessor::UpdateOnChanged data size: %{public}u", size);
578 for (uint32_t i = 0; i < (size * static_cast<uint32_t>(item.count)); i++) {
579 if (*(item.data.u8 + i) != *(anoItem.data.u8 + i)) {
580 needReturn = true;
581 result->addEntry(tag, GetMetadataItemData(item), item.count);
582 break;
583 }
584 }
585 } else {
586 needReturn = true;
587 result->addEntry(tag, GetMetadataItemData(item), item.count);
588 continue;
589 }
590 }
591
592 if (needReturn) {
593 resultCallback_(resultTimestamp, result);
594 }
595 }
596
SaveResultMetadata(std::string resultStr)597 DCamRetCode DMetadataProcessor::SaveResultMetadata(std::string resultStr)
598 {
599 if (resultStr.empty()) {
600 DHLOGE("Input result string is null.");
601 return DCamRetCode::INVALID_ARGUMENT;
602 }
603
604 std::string metadataStr = Base64Decode(resultStr);
605 std::lock_guard<std::mutex> autoLock(producerMutex_);
606 latestConsumerMetadataResult_ = latestProducerMetadataResult_;
607 latestProducerMetadataResult_ = OHOS::Camera::MetadataUtils::DecodeFromString(metadataStr);
608 if (latestProducerMetadataResult_ == nullptr) {
609 DHLOGE("Failed to decode metadata setting from string.");
610 return DCamRetCode::INVALID_ARGUMENT;
611 }
612
613 if (!OHOS::Camera::GetCameraMetadataItemCount(latestProducerMetadataResult_->get())) {
614 DHLOGE("Input result metadata item is empty.");
615 return DCamRetCode::INVALID_ARGUMENT;
616 }
617
618 DHLOGD("DMetadataProcessor::SaveResultMetadata result callback mode: %{public}d", metaResultMode_);
619 if (metaResultMode_ != ResultCallbackMode::ON_CHANGED) {
620 return SUCCESS;
621 }
622
623 uint64_t resultTimestamp = GetCurrentLocalTimeStamp();
624 if (latestConsumerMetadataResult_ == nullptr) {
625 UpdateAllResult(resultTimestamp);
626 return SUCCESS;
627 }
628
629 camera_metadata_item_entry_t* itemEntry = OHOS::Camera::GetMetadataItems(latestProducerMetadataResult_->get());
630 uint32_t count = latestProducerMetadataResult_->get()->item_count;
631 for (uint32_t i = 0; i < count; i++, itemEntry++) {
632 enabledResultSet_.insert((MetaType)(itemEntry->item));
633 }
634 UpdateOnChanged(resultTimestamp);
635 return SUCCESS;
636 }
637
ConvertToCameraMetadata(common_metadata_header_t * & input,std::shared_ptr<OHOS::Camera::CameraMetadata> & output)638 void DMetadataProcessor::ConvertToCameraMetadata(common_metadata_header_t *&input,
639 std::shared_ptr<OHOS::Camera::CameraMetadata> &output)
640 {
641 CHECK_AND_RETURN_LOG(output == nullptr, "output is nullptr");
642 auto ret = OHOS::Camera::CopyCameraMetadataItems(output->get(), input);
643 if (ret != CAM_META_SUCCESS) {
644 DHLOGE("Failed to copy the old metadata to new metadata.");
645 output = nullptr;
646 }
647 }
648
ResizeMetadataHeader(common_metadata_header_t * & header,uint32_t itemCapacity,uint32_t dataCapacity)649 void DMetadataProcessor::ResizeMetadataHeader(common_metadata_header_t *&header,
650 uint32_t itemCapacity, uint32_t dataCapacity)
651 {
652 if (header) {
653 OHOS::Camera::FreeCameraMetadataBuffer(header);
654 }
655 header = OHOS::Camera::AllocateCameraMetadataBuffer(itemCapacity, dataCapacity);
656 }
657
GetDataSize(uint32_t type)658 uint32_t DMetadataProcessor::GetDataSize(uint32_t type)
659 {
660 uint32_t size = 0;
661 if (type == META_TYPE_BYTE) {
662 size = sizeof(uint8_t);
663 } else if (type == META_TYPE_INT32) {
664 size = sizeof(int32_t);
665 } else if (type == META_TYPE_UINT32) {
666 size = sizeof(uint32_t);
667 } else if (type == META_TYPE_FLOAT) {
668 size = sizeof(float);
669 } else if (type == META_TYPE_INT64) {
670 size = sizeof(int64_t);
671 } else if (type == META_TYPE_DOUBLE) {
672 size = sizeof(double);
673 } else if (type == META_TYPE_RATIONAL) {
674 size = sizeof(camera_rational_t);
675 } else {
676 size = 0;
677 }
678 return size;
679 }
680
GetMetadataItemData(const camera_metadata_item_t & item)681 void* DMetadataProcessor::GetMetadataItemData(const camera_metadata_item_t &item)
682 {
683 switch (item.data_type) {
684 case META_TYPE_BYTE: {
685 return item.data.u8;
686 }
687 case META_TYPE_INT32: {
688 return item.data.i32;
689 }
690 case META_TYPE_UINT32: {
691 return item.data.ui32;
692 }
693 case META_TYPE_FLOAT: {
694 return item.data.f;
695 }
696 case META_TYPE_INT64: {
697 return item.data.i64;
698 }
699 case META_TYPE_DOUBLE: {
700 return item.data.d;
701 }
702 case META_TYPE_RATIONAL: {
703 return item.data.r;
704 }
705 default: {
706 DHLOGE("DMetadataProcessor::GetMetadataItemData invalid data type: %{public}u", item.data_type);
707 return nullptr;
708 }
709 }
710 }
711
GetFormatObj(const std::string rootNode,cJSON * rootValue,std::string & formatStr)712 cJSON* DMetadataProcessor::GetFormatObj(const std::string rootNode, cJSON* rootValue, std::string& formatStr)
713 {
714 cJSON* nodeObj = cJSON_GetObjectItemCaseSensitive(rootValue, rootNode.c_str());
715 if (nodeObj == nullptr || !cJSON_IsObject(nodeObj)) {
716 return nullptr;
717 }
718
719 cJSON* resObj = cJSON_GetObjectItemCaseSensitive(nodeObj, "Resolution");
720 if (resObj == nullptr || !cJSON_IsObject(resObj)) {
721 return nullptr;
722 }
723 cJSON *formatObj = cJSON_GetObjectItemCaseSensitive(resObj, formatStr.c_str());
724 if (formatObj == nullptr || !cJSON_IsArray(formatObj) || cJSON_GetArraySize(formatObj) == 0 ||
725 static_cast<uint32_t>(cJSON_GetArraySize(formatObj)) > JSON_ARRAY_MAX_SIZE) {
726 return nullptr;
727 }
728 return formatObj;
729 }
730
GetEachNodeSupportedResolution(std::vector<int> & formats,const std::string rootNode,std::map<int,std::vector<DCResolution>> & supportedFormats,cJSON * rootValue)731 void DMetadataProcessor::GetEachNodeSupportedResolution(std::vector<int>& formats, const std::string rootNode,
732 std::map<int, std::vector<DCResolution>>& supportedFormats, cJSON* rootValue)
733 {
734 for (const auto &format : formats) {
735 std::string formatStr = std::to_string(format);
736 cJSON *formatObj = GetFormatObj(rootNode, rootValue, formatStr);
737 if (formatObj == nullptr) {
738 DHLOGE("Resolution or %{public}s error.", formatStr.c_str());
739 continue;
740 }
741 GetNodeSupportedResolution(format, rootNode, supportedFormats, rootValue);
742 }
743 }
744
GetNodeSupportedResolution(int format,const std::string rootNode,std::map<int,std::vector<DCResolution>> & supportedFormats,cJSON * rootValue)745 void DMetadataProcessor::GetNodeSupportedResolution(int format, const std::string rootNode,
746 std::map<int, std::vector<DCResolution>>& supportedFormats, cJSON* rootValue)
747 {
748 std::vector<DCResolution> resolutionVec;
749 std::string formatStr = std::to_string(format);
750 cJSON* formatObj = GetFormatObj(rootNode, rootValue, formatStr);
751 if (formatObj == nullptr) {
752 return;
753 }
754 int32_t size = cJSON_GetArraySize(formatObj);
755 for (int32_t i = 0; i < size; i++) {
756 cJSON *item = cJSON_GetArrayItem(formatObj, i);
757 if (item == nullptr || !cJSON_IsString(item)) {
758 DHLOGE("Resolution %{public}s %{public}d ,is not string.", formatStr.c_str(), i);
759 continue;
760 }
761 std::string resoStr = std::string(item->valuestring);
762 std::vector<std::string> reso;
763 SplitString(resoStr, reso, STAR_SEPARATOR);
764 if (reso.size() != SIZE_FMT_LEN) {
765 continue;
766 }
767 uint32_t width = static_cast<uint32_t>(std::stoi(reso[0]));
768 uint32_t height = static_cast<uint32_t>(std::stoi(reso[1]));
769 if (height == 0 || width == 0 || ((rootNode == "Photo") &&
770 ((width * height) > (MAX_SUPPORT_PHOTO_WIDTH * MAX_SUPPORT_PHOTO_HEIGHT))) ||
771 ((rootNode != "Photo") && (width > MAX_SUPPORT_PREVIEW_WIDTH || height > MAX_SUPPORT_PREVIEW_HEIGHT))) {
772 continue;
773 }
774 DCResolution resolution(width, height);
775 resolutionVec.push_back(resolution);
776 }
777 if (!resolutionVec.empty()) {
778 std::sort(resolutionVec.begin(), resolutionVec.end());
779 supportedFormats[format] = resolutionVec;
780 if ((rootNode != "Photo") && (maxPreviewResolution_ < resolutionVec[0])) {
781 maxPreviewResolution_.width_ = resolutionVec[0].width_;
782 maxPreviewResolution_.height_ = resolutionVec[0].height_;
783 }
784 if ((rootNode == "Photo") && (maxPhotoResolution_ < resolutionVec[0])) {
785 maxPhotoResolution_.width_ = resolutionVec[0].width_;
786 maxPhotoResolution_.height_ = resolutionVec[0].height_;
787 }
788 StoreSinkAndSrcConfig(format, rootNode, resolutionVec);
789 }
790 }
791
StoreSinkAndSrcConfig(int format,const std::string rootNode,std::vector<DCResolution> & resolutionVec)792 void DMetadataProcessor::StoreSinkAndSrcConfig(int format, const std::string rootNode,
793 std::vector<DCResolution> &resolutionVec)
794 {
795 if (rootNode == "Photo") {
796 sinkPhotoProfiles_[format] = resolutionVec;
797 } else if (rootNode == "Preview") {
798 sinkPreviewProfiles_[format] = resolutionVec;
799 } else if (rootNode == "Video") {
800 sinkVideoProfiles_[format] = resolutionVec;
801 }
802 }
803
GetDCameraSupportedFormats(const std::string & abilityInfo)804 std::map<int, std::vector<DCResolution>> DMetadataProcessor::GetDCameraSupportedFormats(
805 const std::string &abilityInfo)
806 {
807 std::map<int, std::vector<DCResolution>> supportedFormats;
808 cJSON *rootValue = cJSON_Parse(abilityInfo.c_str());
809 CHECK_NULL_RETURN_LOG(rootValue, supportedFormats, "The sinkAbilityInfo is null.");
810 CHECK_OBJECT_FREE_RETURN(rootValue, supportedFormats, "The sinkAbilityInfo is not object.");
811 ParsePhotoFormats(rootValue, supportedFormats);
812 ParsePreviewFormats(rootValue, supportedFormats);
813 ParseVideoFormats(rootValue, supportedFormats);
814 cJSON_Delete(rootValue);
815 return supportedFormats;
816 }
817
ParsePhotoFormats(cJSON * rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats)818 void DMetadataProcessor::ParsePhotoFormats(cJSON* rootValue,
819 std::map<int, std::vector<DCResolution>>& supportedFormats)
820 {
821 cJSON *photoObj = cJSON_GetObjectItemCaseSensitive(rootValue, "Photo");
822 if (photoObj == nullptr || !cJSON_IsObject(photoObj)) {
823 DHLOGE("Input Photo info is null.");
824 return;
825 }
826
827 cJSON *formatObj = cJSON_GetObjectItemCaseSensitive(photoObj, "OutputFormat");
828 if (formatObj == nullptr || !cJSON_IsArray(formatObj) || cJSON_GetArraySize(formatObj) == 0 ||
829 static_cast<uint32_t>(cJSON_GetArraySize(formatObj)) > JSON_ARRAY_MAX_SIZE) {
830 DHLOGE("Photo output format error.");
831 return;
832 }
833
834 std::vector<int> photoFormats;
835 int32_t size = cJSON_GetArraySize(formatObj);
836 for (int32_t i = 0; i < size; i++) {
837 cJSON *item = cJSON_GetArrayItem(formatObj, i);
838 if (item !=nullptr && cJSON_IsNumber(item)) {
839 photoFormats.push_back(item->valueint);
840 }
841 }
842 sinkPhotoFormats_ = photoFormats;
843 GetEachNodeSupportedResolution(photoFormats, "Photo", supportedFormats, rootValue);
844 }
845
ParsePreviewFormats(cJSON * rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats)846 void DMetadataProcessor::ParsePreviewFormats(cJSON* rootValue,
847 std::map<int, std::vector<DCResolution>>& supportedFormats)
848 {
849 cJSON *previewObj = cJSON_GetObjectItemCaseSensitive(rootValue, "Preview");
850 if (previewObj == nullptr || !cJSON_IsObject(previewObj)) {
851 DHLOGE("Preview error.");
852 return;
853 }
854 cJSON *formatObj = cJSON_GetObjectItemCaseSensitive(previewObj, "OutputFormat");
855 if (formatObj == nullptr || !cJSON_IsArray(formatObj) || cJSON_GetArraySize(formatObj) == 0 ||
856 static_cast<uint32_t>(cJSON_GetArraySize(formatObj)) > JSON_ARRAY_MAX_SIZE) {
857 DHLOGE("Preview output format error.");
858 return;
859 }
860 std::vector<int> previewFormats;
861 int32_t size = cJSON_GetArraySize(formatObj);
862 for (int32_t i = 0; i < size; i++) {
863 cJSON *item = cJSON_GetArrayItem(formatObj, i);
864 if (item !=nullptr && cJSON_IsNumber(item)) {
865 previewFormats.push_back(item->valueint);
866 }
867 }
868 GetEachNodeSupportedResolution(previewFormats, "Preview", supportedFormats, rootValue);
869 }
870
ParseVideoFormats(cJSON * rootValue,std::map<int,std::vector<DCResolution>> & supportedFormats)871 void DMetadataProcessor::ParseVideoFormats(cJSON* rootValue,
872 std::map<int, std::vector<DCResolution>>& supportedFormats)
873 {
874 cJSON *videoObj = cJSON_GetObjectItemCaseSensitive(rootValue, "Video");
875 if (videoObj == nullptr || !cJSON_IsObject(videoObj)) {
876 DHLOGE("Video error.");
877 return;
878 }
879 cJSON *formatObj = cJSON_GetObjectItemCaseSensitive(videoObj, "OutputFormat");
880 if (formatObj == nullptr || !cJSON_IsArray(formatObj) || cJSON_GetArraySize(formatObj) == 0 ||
881 static_cast<uint32_t>(cJSON_GetArraySize(formatObj)) > JSON_ARRAY_MAX_SIZE) {
882 DHLOGE("Video output format error.");
883 return;
884 }
885 std::vector<int> videoFormats;
886 int32_t size = cJSON_GetArraySize(formatObj);
887 for (int32_t i = 0; i < size; i++) {
888 cJSON *item = cJSON_GetArrayItem(formatObj, i);
889 if (item !=nullptr && cJSON_IsNumber(item)) {
890 videoFormats.push_back(item->valueint);
891 }
892 }
893 GetEachNodeSupportedResolution(videoFormats, "Video", supportedFormats, rootValue);
894 }
895
PrintDCameraMetadata(const common_metadata_header_t * metadata)896 void DMetadataProcessor::PrintDCameraMetadata(const common_metadata_header_t *metadata)
897 {
898 if (metadata == nullptr) {
899 DHLOGE("Failed to print metadata, input metadata is null.");
900 return;
901 }
902
903 uint32_t tagCount = OHOS::Camera::GetCameraMetadataItemCount(metadata);
904 DHLOGD("DMetadataProcessor::PrintDCameraMetadata, input metadata item count = %{public}d.", tagCount);
905 for (uint32_t i = 0; i < tagCount; i++) {
906 camera_metadata_item_t item;
907 int ret = OHOS::Camera::GetCameraMetadataItem(metadata, i, &item);
908 if (ret != 0) {
909 continue;
910 }
911
912 const char *name = OHOS::Camera::GetCameraMetadataItemName(item.item);
913 if (item.data_type == META_TYPE_BYTE) {
914 for (size_t k = 0; k < item.count; k++) {
915 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (uint8_t)(item.data.u8[k]));
916 }
917 } else if (item.data_type == META_TYPE_INT32) {
918 for (size_t k = 0; k < item.count; k++) {
919 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (int32_t)(item.data.i32[k]));
920 }
921 } else if (item.data_type == META_TYPE_UINT32) {
922 for (size_t k = 0; k < item.count; k++) {
923 DHLOGI("tag index:%d, name:%s, value:%d", item.index, name, (uint32_t)(item.data.ui32[k]));
924 }
925 } else if (item.data_type == META_TYPE_FLOAT) {
926 for (size_t k = 0; k < item.count; k++) {
927 DHLOGI("tag index:%d, name:%s, value:%f", item.index, name, (float)(item.data.f[k]));
928 }
929 } else if (item.data_type == META_TYPE_INT64) {
930 for (size_t k = 0; k < item.count; k++) {
931 DHLOGI("tag index:%d, name:%s, value:%lld", item.index, name, (long long)(item.data.i64[k]));
932 }
933 } else if (item.data_type == META_TYPE_DOUBLE) {
934 for (size_t k = 0; k < item.count; k++) {
935 DHLOGI("tag index:%d, name:%s, value:%lf", item.index, name, (double)(item.data.d[k]));
936 }
937 } else {
938 DHLOGI("tag index:%d, name:%s", item.index, name);
939 }
940 }
941 }
942 } // namespace DistributedHardware
943 } // namespace OHOS
944