1 /*
2 * Copyright (C) 2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include <fstream>
16 #include <iomanip>
17 #include <vector>
18 #include <string>
19 #include <sstream>
20 #include "video_sample.h"
21 #include "sync_fence.h"
22 #include "nlohmann/json.hpp"
23 #include "securec.h"
24 using namespace OHOS;
25 using namespace std;
26 using namespace nlohmann;
27
28 constexpr int64_t NANOS_IN_SECOND = 1000000000L;
29 constexpr int64_t NANOS_IN_MICRO = 1000L;
30 constexpr int32_t THREE = 3;
31 constexpr int32_t FOUR = 4;
32 constexpr int32_t SIXTEEN = 16;
33
GetSystemTimeUs()34 static int64_t GetSystemTimeUs()
35 {
36 struct timespec now;
37 (void)clock_gettime(CLOCK_BOOTTIME, &now);
38 int64_t nanoTime = static_cast<int64_t>(now.tv_sec) * NANOS_IN_SECOND + now.tv_nsec;
39 return nanoTime / NANOS_IN_MICRO;
40 }
41
LoadHashFile()42 vector<uint8_t> VideoSample::LoadHashFile()
43 {
44 ifstream f(hashValJsonPath, ios::in);
45 vector<uint8_t> ret;
46 if (f) {
47 json data = json::parse(f);
48 filesystem::path filePath = inputFilePath;
49 string fileName = filePath.filename();
50 string hashValue = data[fileName.c_str()][defaultPixelFormat][convertType];
51 stringstream ss(hashValue);
52 string item;
53 while (getline(ss, item, ',')) {
54 if (!item.empty()) {
55 ret.push_back(stol(item, nullptr, SIXTEEN));
56 }
57 }
58 }
59 return ret;
60 }
61
ShowHashValue(uint8_t source[])62 void VideoSample::ShowHashValue(uint8_t source[])
63 {
64 for (int32_t i = 0; i < SHA512_DIGEST_LENGTH; i++) {
65 cout << std::hex << std::setfill('0') << std::setw(sizeof(uint16_t)) << int(source[i]);
66 if (i != SHA512_DIGEST_LENGTH - 1) {
67 cout << ",";
68 }
69 }
70 cout << std::dec;
71 cout << endl;
72 }
73
MdCompare(uint8_t source[])74 bool VideoSample::MdCompare(uint8_t source[])
75 {
76 std::vector<uint8_t> srcHashVal = LoadHashFile();
77 if (srcHashVal.size() != SHA512_DIGEST_LENGTH) {
78 cout << "get hash value failed, size " <<srcHashVal.size() << endl;
79 return false;
80 }
81 for (int32_t i = 0; i < SHA512_DIGEST_LENGTH; i++) {
82 if (source[i] != srcHashVal[i]) {
83 cout << "decoded hash value mismatch" << endl;
84 ShowHashValue(source);
85 return false;
86 }
87 }
88 return true;
89 }
90
ProcessOutputData(SurfaceBuffer * buffer)91 void VideoSample::ProcessOutputData(SurfaceBuffer *buffer)
92 {
93 int32_t picWidth = buffer->GetWidth();
94 int32_t picHeight = buffer->GetHeight();
95 uint8_t *bufferAddr = reinterpret_cast<uint8_t *>(buffer->GetVirAddr());
96 uint32_t cropSize = buffer->GetSize();
97 if (param_.outFmt == NATIVEBUFFER_PIXEL_FMT_YCBCR_P010 ||
98 param_.outFmt == NATIVEBUFFER_PIXEL_FMT_YCRCB_P010) {
99 cropSize = picWidth * picHeight * THREE;
100 } else if (param_.outFmt == NATIVEBUFFER_PIXEL_FMT_RGBA_1010102 ||
101 param_.outFmt == NATIVEBUFFER_PIXEL_FMT_RGBA_8888) {
102 cropSize = picWidth * picHeight * FOUR;
103 } else if (param_.outFmt == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP ||
104 param_.outFmt == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP) {
105 cropSize = (picWidth * picHeight * THREE) >> 1;
106 }
107 uint8_t *cropBuffer = new uint8_t[cropSize];
108 if (param_.outFmt == NATIVEBUFFER_PIXEL_FMT_RGBA_1010102 ||
109 param_.outFmt == NATIVEBUFFER_PIXEL_FMT_RGBA_8888) {
110 memcpy_s(cropBuffer, cropSize, bufferAddr, picWidth * picHeight * FOUR);
111 } else if (param_.outFmt == NATIVEBUFFER_PIXEL_FMT_YCBCR_P010 ||
112 param_.outFmt == NATIVEBUFFER_PIXEL_FMT_YCRCB_P010) {
113 memcpy_s(cropBuffer, cropSize, bufferAddr, picWidth * picHeight * THREE);
114 } else if (param_.outFmt == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP ||
115 param_.outFmt == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP) {
116 memcpy_s(cropBuffer, cropSize, bufferAddr, (picWidth * picHeight * THREE) >> 1);
117 }
118 outFile->write(reinterpret_cast<char *>(cropBuffer), cropSize);
119 outFile->close();
120 SHA512_Update(&ctx, cropBuffer, cropSize);
121 delete[] cropBuffer;
122 }
123
OnError(OH_VideoProcessing * videoProcessor,VideoProcessing_ErrorCode error,void * userData)124 static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData)
125 {
126 VideoSample *sample = reinterpret_cast<VideoSample*>(userData);
127 sample->isRunning = false;
128 sample->errCount++;
129 std::cout << "OnError callback recv errorcode:" << error << std::endl;
130 }
131
OnState(OH_VideoProcessing * videoProcessor,VideoProcessing_State state,void * userData)132 static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData)
133 {
134 std::cout << "OnState callback called, new state is "<< state << std::endl;
135 }
136
OnNewOutputBuffer(OH_VideoProcessing * videoProcessor,uint32_t index,void * userData)137 static void OnNewOutputBuffer(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData)
138 {
139 VideoSample *sample = reinterpret_cast<VideoSample*>(userData);
140 VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index);
141 if (ret != VIDEO_PROCESSING_SUCCESS) {
142 sample->errCount++;
143 std::cout << "Render output buffer failed,errcode: "<< ret << std::endl;
144 }
145 }
146
147
148 class VPEConsumerListener : public IBufferConsumerListener {
149 public:
VPEConsumerListener(sptr<Surface> cs,VideoSample * sample)150 explicit VPEConsumerListener(sptr<Surface> cs, VideoSample *sample) : sample(sample), cs(cs){};
~VPEConsumerListener()151 ~VPEConsumerListener() {}
OnBufferAvailable()152 void OnBufferAvailable() override
153 {
154 sptr<SurfaceBuffer> buffer;
155 cs->AcquireBuffer(buffer, flushFence, timestamp, damage);
156 unique_lock<mutex> lock(sample->mutex2_);
157 if (buffer) {
158 sample->outputBufferAvilQue_.push(buffer);
159 } else {
160 cout << "acquire buffer failed" <<endl;
161 }
162 }
163
164 private:
165 int64_t timestamp = 0;
166 Rect damage = {};
167 VideoSample *sample = nullptr;
168 int32_t flushFence = -1;
169 sptr<Surface> cs {nullptr};
170 };
171
172
~VideoSample()173 VideoSample::~VideoSample()
174 {
175 if (inFile != nullptr) {
176 if (inFile->is_open()) {
177 inFile->close();
178 }
179 inFile.reset();
180 inFile = nullptr;
181 }
182 if (callback) {
183 OH_VideoProcessingCallback_Destroy(callback);
184 callback = nullptr;
185 }
186 if (rect) {
187 delete rect;
188 rect = nullptr;
189 }
190 if (metaData) {
191 delete[] metaData;
192 metaData = nullptr;
193 }
194 if (metaDataFile != nullptr) {
195 if (metaDataFile->is_open()) {
196 metaDataFile->close();
197 }
198 metaDataFile.reset();
199 metaDataFile = nullptr;
200 }
201 OH_VideoProcessing_Destroy(videoProcessor);
202 OH_NativeWindow_DestroyNativeWindow(outWindow);
203 OH_NativeWindow_DestroyNativeWindow(inWindow);
204 if (cs) {
205 for (; !outputBufferAvilQue_.empty(); outputBufferAvilQue_.pop()) {
206 cs->ReleaseBuffer(outputBufferAvilQue_.front(), -1);
207 }
208 cs->UnregisterConsumerListener();
209 }
210 }
211
InitVideoSample(const int32_t type,int32_t width,int32_t height,VideoProcessParam param)212 int32_t VideoSample::InitVideoSample(const int32_t type, int32_t width, int32_t height, VideoProcessParam param)
213 {
214 width_ = width;
215 height_ = height;
216 isRunning = true;
217 param_ = param;
218 if (type == VIDEO_PROCESSING_TYPE_METADATA_GENERATION) {
219 isMetadataGen = true;
220 }
221 if (OH_VideoProcessing_Create(&videoProcessor, type) != VIDEO_PROCESSING_SUCCESS) {
222 return -1;
223 }
224 cs = Surface::CreateSurfaceAsConsumer();
225 sptr<IBufferConsumerListener> listener = new VPEConsumerListener(cs, this);
226 cs->RegisterConsumerListener(listener);
227 auto p = cs->GetProducer();
228 sptr<Surface> ps = Surface::CreateSurfaceAsProducer(p);
229 outWindow = CreateNativeWindowFromSurface(&ps);
230 (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt);
231 if (!isMetadataGen) {
232 (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow, SET_FORMAT, param_.outFmt);
233 OH_NativeWindow_SetColorSpace(outWindow, param_.outColorSpace);
234 }
235 if (isHDRVividOut) {
236 uint8_t val = OH_VIDEO_HDR_VIVID;
237 int32_t ret = OH_NativeWindow_SetMetadataValue(outWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val);
238 if (ret != 0) {
239 cout << "set metadata failed" << endl;
240 }
241 }
242 if (!isHDRVivid && param_.inColorSpace == OH_COLORSPACE_BT2020_PQ_LIMIT) {
243 uint8_t outMeta = OH_VIDEO_HDR_HLG;
244 OH_NativeWindow_SetMetadataValue(outWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &outMeta);
245 }
246 if (OH_VideoProcessing_SetSurface(videoProcessor, outWindow) != VIDEO_PROCESSING_SUCCESS) {
247 return -1;
248 }
249 if (OH_VideoProcessing_GetSurface(videoProcessor, &inWindow) != VIDEO_PROCESSING_SUCCESS) {
250 return -1;
251 }
252 SetInputWindowParam();
253 OH_VideoProcessingCallback_Create(&callback);
254 OH_VideoProcessingCallback_BindOnError(callback, OnError);
255 OH_VideoProcessingCallback_BindOnState(callback, OnState);
256 OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback, OnNewOutputBuffer);
257 if (OH_VideoProcessing_RegisterCallback(videoProcessor, callback, this) != VIDEO_PROCESSING_SUCCESS) {
258 return -1;
259 }
260 return VIDEO_PROCESSING_SUCCESS;
261 }
262
SetInputWindowParam()263 void VideoSample::SetInputWindowParam()
264 {
265 (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_BUFFER_GEOMETRY, width_, height_);
266 (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow, SET_FORMAT, param_.inFmt);
267 OH_NativeWindow_SetColorSpace(inWindow, param_.inColorSpace);
268 int32_t err = 0;
269 if (isMetadataGen) {
270 uint8_t val = OH_VIDEO_HDR_VIVID;
271 err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val);
272 if (err != 0) {
273 cout << "set metadata type failed" << endl;
274 }
275 }
276 if (isHDRVivid) {
277 metaDataFile = std::make_unique<std::ifstream>(inputMetaPath);
278 metaDataFile->seekg(0, ios::end);
279 metadataSize = metaDataFile->tellg();
280 metaDataFile->seekg(0, ios::beg);
281 metaData = new uint8_t[metadataSize];
282 metaDataFile->read(reinterpret_cast<char*>(metaData), metadataSize);
283 uint8_t val = OH_VIDEO_HDR_VIVID;
284 err = OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &val);
285 if (err != 0) {
286 cout << "set metadata type failed" << endl;
287 }
288 } else if (!isHDRVivid && param_.inColorSpace == OH_COLORSPACE_BT2020_PQ_LIMIT) {
289 uint8_t inMeta = OH_VIDEO_HDR_HDR10;
290 OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_METADATA_TYPE, sizeof(uint8_t), &inMeta);
291 }
292 rect = new Region::Rect();
293 rect->x = 0;
294 rect->y = 0;
295 rect->w = width_;
296 rect->h = height_;
297 region.rects = rect;
298 }
299
GetMetadata(OH_NativeBuffer * buffer)300 void VideoSample::GetMetadata(OH_NativeBuffer *buffer)
301 {
302 uint8_t *staticMetadata = nullptr;
303 int32_t staticSize = 0;
304 uint8_t *dynamicMetadata = nullptr;
305 int32_t dynamicSize = 0;
306 int32_t ret = OH_NativeBuffer_GetMetadataValue(buffer, OH_HDR_STATIC_METADATA, &staticSize, &staticMetadata);
307 if (ret) {
308 cout << "get static metadata failed" << ret << endl;
309 errCount++;
310 }
311 ret = OH_NativeBuffer_GetMetadataValue(buffer, OH_HDR_DYNAMIC_METADATA, &dynamicSize, &dynamicMetadata);
312 if (ret) {
313 cout << "get dynamic metadata failed" << ret << endl;
314 errCount++;
315 return;
316 }
317 outFile->write(reinterpret_cast<char *>(dynamicMetadata), dynamicSize);
318 outFile->close();
319 }
320
InputFunc()321 int32_t VideoSample::InputFunc()
322 {
323 inFile = std::make_unique<std::ifstream>(inputFilePath);
324 if (!inFile->is_open()) {
325 return 1;
326 }
327 int fenceFd = -1;
328 OHNativeWindowBuffer *ohNativeWindowBuffer;
329 if (isHDRVivid) {
330 if (OH_NativeWindow_SetMetadataValue(inWindow, OH_HDR_DYNAMIC_METADATA, metadataSize, metaData) != 0) {
331 return -1;
332 }
333 }
334 if (OH_NativeWindow_NativeWindowRequestBuffer(inWindow, &ohNativeWindowBuffer, &fenceFd) != 0) {
335 return -1;
336 }
337 if (fenceFd > 0) {
338 close(fenceFd);
339 }
340 OH_NativeBuffer *nativeBuffer = nullptr;
341 if (OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer) != 0) {
342 return -1;
343 }
344 void *virAddr = nullptr;
345 OH_NativeBuffer_Config config;
346 OH_NativeBuffer_GetConfig(nativeBuffer, &config);
347 if (OH_NativeBuffer_Map(nativeBuffer, &virAddr) != 0) {
348 return -1;
349 }
350 if (param_.inFmt == NATIVEBUFFER_PIXEL_FMT_YCBCR_P010 ||
351 param_.inFmt == NATIVEBUFFER_PIXEL_FMT_YCRCB_P010) {
352 ReadOneFrameP010(reinterpret_cast<uint8_t *>(virAddr), config);
353 } else if (param_.inFmt == NATIVEBUFFER_PIXEL_FMT_RGBA_1010102 ||
354 param_.inFmt == NATIVEBUFFER_PIXEL_FMT_RGBA_8888) {
355 ReadOneFrameRGBA10(reinterpret_cast<uint8_t *>(virAddr), config);
356 } else if (param_.inFmt == NATIVEBUFFER_PIXEL_FMT_YCRCB_420_SP ||
357 param_.inFmt == NATIVEBUFFER_PIXEL_FMT_YCBCR_420_SP) {
358 ReadOneFrameYUV420SP(reinterpret_cast<uint8_t *>(virAddr), config);
359 }
360 NativeWindowHandleOpt(inWindow, SET_UI_TIMESTAMP, GetSystemTimeUs());
361 if (OH_NativeBuffer_Unmap(nativeBuffer) != 0) {
362 return -1;
363 }
364 if (OH_NativeWindow_NativeWindowFlushBuffer(inWindow, ohNativeWindowBuffer, -1, region) != 0) {
365 return -1;
366 }
367 inCount++;
368 inputFuncFinished = true;
369 inFile->close();
370 return 0;
371 }
372
OutputThread()373 void VideoSample::OutputThread()
374 {
375 if (isMetadataGen) {
376 outFile = std::make_unique<std::ofstream>(outputMetaPath);
377 } else {
378 outFile = std::make_unique<std::ofstream>(outputFilePath);
379 }
380 SHA512_Init(&ctx);
381 while (isRunning) {
382 if (outputBufferAvilQue_.size() > 0) {
383 unique_lock<mutex> lock(mutex2_);
384 auto buffer = outputBufferAvilQue_.front();
385 ProcessOutputData(buffer);
386 outCount++;
387 lock.unlock();
388 cs->ReleaseBuffer(buffer, -1);
389 outputBufferAvilQue_.pop();
390 }
391 if (outputBufferAvilQue_.size() == 0 && inputFuncFinished && (inCount == outCount)) {
392 break;
393 }
394 usleep(NANOS_IN_MICRO);
395 }
396 SHA512_Final(md, &ctx);
397 OPENSSL_cleanse(&ctx, sizeof(ctx));
398 if (!MdCompare(md)) {
399 md5Equal = false;
400 }
401 }
402
StartProcess()403 int32_t VideoSample::StartProcess()
404 {
405 if (OH_VideoProcessing_Start(videoProcessor) != VIDEO_PROCESSING_SUCCESS) {
406 return -1;
407 }
408 inputLoop_ = make_unique<thread>(&VideoSample::InputFunc, this);
409 outputLoop_ = make_unique<thread>(&VideoSample::OutputThread, this);
410 return VIDEO_PROCESSING_SUCCESS;
411 }
412
WaitAndStopSample()413 int32_t VideoSample::WaitAndStopSample()
414 {
415 inputLoop_->join();
416 outputLoop_->join();
417 if (OH_VideoProcessing_Stop(videoProcessor) != VIDEO_PROCESSING_SUCCESS) {
418 return -1;
419 }
420 return 0;
421 }
422
ReadOneFrameYUV420SP(uint8_t * addr,OH_NativeBuffer_Config config)423 int32_t VideoSample::ReadOneFrameYUV420SP(uint8_t *addr, OH_NativeBuffer_Config config)
424 {
425 uint8_t *start = addr;
426 for (uint32_t i = 0; i < config.height; i++) {
427 inFile->read(reinterpret_cast<char *>(addr), width_);
428 addr += config.stride;
429 }
430 for (uint32_t i = 0; i < config.height / sizeof(uint16_t); i++) {
431 inFile->read(reinterpret_cast<char *>(addr), width_);
432 addr += config.stride;
433 }
434 return addr - start;
435 }
436
ReadOneFrameP010(uint8_t * addr,OH_NativeBuffer_Config config)437 int32_t VideoSample::ReadOneFrameP010(uint8_t *addr, OH_NativeBuffer_Config config)
438 {
439 uint8_t *start = addr;
440 // copy Y
441 for (uint32_t i = 0; i < config.height; i++) {
442 inFile->read(reinterpret_cast<char *>(addr), width_ * sizeof(uint16_t));
443 addr += config.stride;
444 }
445 // copy UV
446 for (uint32_t i = 0; i < config.height / sizeof(uint16_t); i++) {
447 inFile->read(reinterpret_cast<char *>(addr), width_ * sizeof(uint16_t));
448 addr += config.stride;
449 }
450 return addr - start;
451 }
452
ReadOneFrameRGBA10(uint8_t * addr,OH_NativeBuffer_Config config)453 int32_t VideoSample::ReadOneFrameRGBA10(uint8_t *addr, OH_NativeBuffer_Config config)
454 {
455 uint8_t *start = addr;
456 for (uint32_t i = 0; i < height_; i++) {
457 inFile->read(reinterpret_cast<char *>(addr), width_ * sizeof(uint32_t));
458 addr += config.stride;
459 }
460 return addr - start;
461 }
462