• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "emul/VideoCapture.h"
18 
19 #include <assert.h>
20 #include <errno.h>
21 #include <error.h>
22 #include <fcntl.h>
23 #include <memory.h>
24 #include <processgroup/sched_policy.h>
25 #include <pthread.h>
26 #include <stdio.h>
27 #include <stdlib.h>
28 #include <sys/ioctl.h>
29 #include <sys/mman.h>
30 #include <unistd.h>
31 
32 #include <fstream>
33 #include <iomanip>
34 
35 #include <android-base/logging.h>
36 #include <png.h>
37 
38 using namespace std;
39 
40 namespace {
41     const char* kPngFileExtension = ".png";
42     const char* kDumpFileExtension = ".bin";
43 
validatePng(std::ifstream & source)44     bool validatePng(std::ifstream& source) {
45         const int kSigSize = 8;
46         png_byte header[kSigSize] = {0};
47         source.read((char*)header, kSigSize);
48 
49         return source.good() &&
50                (png_sig_cmp(header, 0, kSigSize) == 0);
51     }
52 
53 
readPngDataFromStream(png_structp pngPtr,png_bytep data,png_size_t length)54     void readPngDataFromStream(png_structp pngPtr,
55                                       png_bytep data,
56                                       png_size_t length) {
57         png_voidp p = png_get_io_ptr(pngPtr);
58         ((std::ifstream*)p)->read((char*)data, length);
59     }
60 
61 
fillBufferFromPng(const string & filename,imageMetadata & info)62     char* fillBufferFromPng(const string& filename,
63                                   imageMetadata& info) {
64         // Open a PNG file
65         std::ifstream source(filename, ios::in | ios::binary);
66         if (!source.is_open()) {
67             LOG(ERROR) << "Failed to open " << filename;
68             return nullptr;
69         }
70 
71         // Validate an input PNG file
72         if (!validatePng(source)) {
73             LOG(ERROR) << filename << " is not a valid PNG file";
74             source.close();
75             return nullptr;
76         }
77 
78         // Prepare a control structure
79         png_structp pngPtr = png_create_read_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
80         if (!pngPtr) {
81             LOG(ERROR) << "Failed to create a control structure";
82             source.close();
83             return nullptr;
84         }
85 
86         // Set up an image info
87         png_infop infoPtr = png_create_info_struct(pngPtr);
88         if (!infoPtr) {
89             LOG(ERROR) << " Failed to initialize a png_info";
90             png_destroy_read_struct(&pngPtr, nullptr, nullptr);
91             source.close();
92             return nullptr;
93         }
94 
95         // Set up an error handler
96         if (setjmp(png_jmpbuf(pngPtr))) {
97             png_destroy_read_struct(&pngPtr, &infoPtr, nullptr);
98             source.close();
99             return nullptr;
100         }
101 
102         // Set up PNG reader and fetch the remaining header bytes
103         png_set_read_fn(pngPtr, (png_voidp)&source, readPngDataFromStream);
104         const int kSigSize = 8;
105         png_set_sig_bytes(pngPtr, kSigSize);
106         png_read_info(pngPtr, infoPtr);
107 
108         // Get basic image information
109         png_uint_32 width = png_get_image_width(pngPtr, infoPtr);
110         png_uint_32 height = png_get_image_height(pngPtr, infoPtr);
111         png_uint_32 bitdepth = png_get_bit_depth(pngPtr, infoPtr);
112         png_uint_32 channels = png_get_channels(pngPtr, infoPtr);
113         png_uint_32 colorType = png_get_color_type(pngPtr, infoPtr);
114 
115         // Record video device info
116         info.width = width;
117         info.height = height;
118         switch(colorType) {
119             case PNG_COLOR_TYPE_GRAY:
120                 png_set_expand_gray_1_2_4_to_8(pngPtr);
121                 bitdepth = 8;
122                 info.format = V4L2_PIX_FMT_GREY;
123                 break;
124 
125             case PNG_COLOR_TYPE_RGB:
126                 info.format = V4L2_PIX_FMT_XBGR32;
127                 break;
128 
129             case PNG_COLOR_TYPE_RGB_ALPHA:
130                 info.format = V4L2_PIX_FMT_ABGR32;
131                 break;
132 
133             default:
134                 LOG(INFO) << "Unsupported PNG color type: " << colorType;
135                 return nullptr;
136         }
137 
138         // If the image has a transparancy set, convert it to a full Alpha channel
139         if (png_get_valid(pngPtr, infoPtr, PNG_INFO_tRNS)) {
140             png_set_tRNS_to_alpha(pngPtr);
141             channels += 1;
142             info.format = V4L2_PIX_FMT_ABGR32;
143         }
144 
145         // Refresh PNG info
146         png_read_update_info(pngPtr, infoPtr);
147 
148         // Allocate a buffer to contain pixel data.  This buffer will be managed
149         // by the caller.
150         const int stride = png_get_rowbytes(pngPtr, infoPtr);
151         info.stride = stride;
152         LOG(DEBUG) << "width = " << width
153                    << ", height = " << height
154                    << ", bitdepth = " << bitdepth
155                    << ", channels = " << channels
156                    << ", colorType = " << colorType
157                    << ", stride = " << stride;
158 
159         char* buffer = new char[info.stride * height];
160         png_bytep* rowPtrs = new png_bytep[height];
161         for (int r = 0; r < height; ++r) {
162             rowPtrs[r] = reinterpret_cast<unsigned char*>(buffer) + r * stride;
163         }
164 
165         // Read the image
166         png_read_image(pngPtr, rowPtrs);
167         png_read_end(pngPtr, nullptr);
168 
169         // Clean up
170         png_destroy_read_struct(&pngPtr, &infoPtr, nullptr);
171         delete[] rowPtrs;
172         source.close();
173 
174         return buffer;
175     }
176 } // namespace
177 
178 
179 namespace android {
180 namespace automotive {
181 namespace evs {
182 namespace V1_1 {
183 namespace implementation {
184 
~VideoCapture()185 VideoCapture::~VideoCapture() {
186     // Stop active stream
187     stopStream();
188 
189     // Close the device
190     close();
191 }
192 
193 
open(const std::string & path,const std::chrono::nanoseconds interval)194 bool VideoCapture::open(const std::string& path,
195                         const std::chrono::nanoseconds interval) {
196     // Report device properties
197     LOG(INFO) << "Open a virtual video stream with data from " << path;
198 
199     // Store the source location
200     if (!filesystem::exists(path) || !filesystem::is_directory(path)) {
201         LOG(INFO) << path << " does not exist or is not a directory.";
202         return false;
203     }
204 
205     // Sets a directory iterator
206     LOG(INFO) << "directory_iterator is set to " << path;
207     mSrcIter = filesystem::directory_iterator(path);
208     mSourceDir = path;
209 
210     // Set a frame rate
211     mDesiredFrameInterval = interval;
212 
213     // Make sure we're initialized to the STOPPED state
214     mRunMode = STOPPED;
215     mFrameReady = false;
216     mVideoReady = true;
217 
218     // Ready to go!
219     return true;
220 }
221 
222 
close()223 void VideoCapture::close() {
224     LOG(DEBUG) << __FUNCTION__;
225 
226     // Stream must be stopped first!
227     assert(mRunMode == STOPPED);
228 
229     // Tell this is now closed
230     mVideoReady = false;
231 
232     // Free allocated resources
233     delete[] mPixelBuffer;
234 }
235 
236 
startStream(std::function<void (VideoCapture *,imageBufferDesc *,void *)> callback)237 bool VideoCapture::startStream(
238         std::function<void(VideoCapture*, imageBufferDesc*, void*)> callback) {
239     // Set the state of our background thread
240     int prevRunMode = mRunMode.fetch_or(RUN);
241     if (prevRunMode & RUN) {
242         // The background thread is already running, so we can't start a new stream
243         LOG(ERROR) << "Already in RUN state, so we can't start a new streaming thread";
244         return false;
245     }
246 
247     // Remembers who to tell about new frames as they arrive
248     mCallback = callback;
249 
250     // Fires up a thread to generate and dispatch the video frames
251     mCaptureThread = std::thread([&](){
252         if (mCurrentStreamEvent != StreamEvent::INIT) {
253             LOG(ERROR) << "Not in the right state to start a video stream.  Current state is "
254                        << mCurrentStreamEvent;
255             return;
256         }
257 
258         // We'll periodically send a new frame
259         mCurrentStreamEvent = StreamEvent::PERIODIC;
260 
261         // Sets a background priority
262         if (set_sched_policy(0, SP_BACKGROUND) != 0) {
263             PLOG(WARNING) << "Failed to set background scheduling priority";
264         }
265 
266         // Sets a looper for the communication
267         if (android::Looper::getForThread() != nullptr) {
268             LOG(DEBUG) << "Use existing looper thread";
269         }
270 
271         mLooper = android::Looper::prepare(/*opts=*/0);
272         if (mLooper == nullptr) {
273             LOG(ERROR) << "Failed to initialize the looper.  Exiting the thread.";
274             return;
275         }
276 
277         // Requests to start generating frames periodically
278         mLooper->sendMessage(this, StreamEvent::PERIODIC);
279 
280         // Polling the messages until the stream stops
281         while (mRunMode == RUN) {
282             mLooper->pollAll(/*timeoutMillis=*/-1);
283         }
284 
285         LOG(INFO) << "Capture thread is exiting!!!";
286     });
287 
288     LOG(DEBUG) << "Stream started.";
289     return true;
290 }
291 
292 
stopStream()293 void VideoCapture::stopStream() {
294     // Tell the background thread to stop
295     int prevRunMode = mRunMode.fetch_or(STOPPING);
296     if (prevRunMode == STOPPED) {
297         // The background thread wasn't running, so set the flag back to STOPPED
298         mRunMode = STOPPED;
299     } else if (prevRunMode & STOPPING) {
300         LOG(ERROR) << "stopStream called while stream is already stopping.  "
301                    << "Reentrancy is not supported!";
302         return;
303     } else {
304         // Block until the background thread is stopped
305         if (mCaptureThread.joinable()) {
306             // Removes all pending messages and awake the looper
307             mLooper->removeMessages(this, StreamEvent::PERIODIC);
308             mLooper->wake();
309             mCaptureThread.join();
310         } else {
311             LOG(ERROR) << "Capture thread is not joinable";
312         }
313 
314         mRunMode = STOPPED;
315         LOG(DEBUG) << "Capture thread stopped.";
316     }
317 
318     // Drop our reference to the frame delivery callback interface
319     mCallback = nullptr;
320 }
321 
322 
markFrameReady()323 void VideoCapture::markFrameReady() {
324     mFrameReady = true;
325 }
326 
327 
returnFrame()328 bool VideoCapture::returnFrame() {
329     // We're using a single buffer synchronousely so just need to set
330     // mFrameReady as false.
331     mFrameReady = false;
332 
333     return true;
334 }
335 
336 
337 // This runs on a background thread to receive and dispatch video frames
collectFrames()338 void VideoCapture::collectFrames() {
339     const filesystem::directory_iterator end_iter;
340     imageMetadata header = {};
341     static uint64_t sequence = 0; // counting frames
342 
343     while (mPixelBuffer == nullptr && mSrcIter != end_iter) {
344         LOG(INFO) << "Synthesizing a frame from " << mSrcIter->path();
345         auto ext = mSrcIter->path().extension();
346         if (ext == kPngFileExtension) {
347             // Read PNG image; a buffer will be allocated inside
348             mPixelBuffer =
349                 fillBufferFromPng(mSrcIter->path(), header);
350 
351             // Update frame info
352             mPixelBufferSize = header.stride * header.height;
353         } else if (ext == kDumpFileExtension) {
354             // Read files dumped by the reference EVS HAL implementation
355             std::ifstream fin(mSrcIter->path(), ios::in | ios::binary);
356             if (fin.is_open()) {
357                 // Read a header
358                 fin.read((char*)&header, sizeof(header));
359                 const size_t length = header.stride * header.height;
360 
361                 // Allocate memory for pixel data
362                 mPixelBuffer = new char[length];
363                 mPixelBufferSize = length;
364 
365                 // Read pixels
366                 fin.read(mPixelBuffer, length);
367                 if (fin.gcount() != length) {
368                     LOG(WARNING) << mSrcIter->path() << " contains less than expected.";
369                 }
370                 fin.close();
371             } else {
372                 PLOG(ERROR) << "Failed to open " << mSrcIter->path();
373             }
374         } else {
375             LOG(DEBUG) << "Unsupported file extension.  Ignores "
376                        << mSrcIter->path().filename();
377         }
378 
379         // Moves to next file
380         ++mSrcIter;
381     }
382 
383     // Fill the buffer metadata
384     mBufferInfo.info = header;
385     mBufferInfo.sequence = sequence++;
386 
387     int64_t now = nanoseconds_to_milliseconds(systemTime(SYSTEM_TIME_MONOTONIC));
388     mBufferInfo.timestamp.tv_sec = (time_t)(now / 1000LL);
389     mBufferInfo.timestamp.tv_usec = (suseconds_t)((now % 1000LL) * 1000LL);
390 
391     if (mCallback != nullptr) {
392         mCallback(this, &mBufferInfo, mPixelBuffer);
393     }
394 
395     // Delete a consumed pixel buffer
396     delete[] mPixelBuffer;
397     mPixelBuffer = nullptr;
398     mPixelBufferSize = 0;
399 
400     // If the last file is processed, reset the iterator to the first file.
401     if (mSrcIter == end_iter) {
402         LOG(DEBUG) << "Rewinds the iterator to the beginning.";
403         mSrcIter = filesystem::directory_iterator(mSourceDir);
404     }
405 }
406 
407 
setParameter(v4l2_control &)408 int VideoCapture::setParameter(v4l2_control& /*control*/) {
409     // Not implemented yet.
410     return -ENOSYS;
411 }
412 
413 
getParameter(v4l2_control &)414 int VideoCapture::getParameter(v4l2_control& /*control*/) {
415     // Not implemented yet.
416     return -ENOSYS;
417 }
418 
419 
handleMessage(const android::Message & message)420 void VideoCapture::handleMessage(const android::Message& message) {
421     const auto received = static_cast<StreamEvent>(message.what);
422     switch (received) {
423         case StreamEvent::PERIODIC: {
424             // Generates a new frame and send
425             collectFrames();
426 
427             // Updates a timestamp and arms a message for next frame
428             mLastTimeFrameSent = systemTime(SYSTEM_TIME_MONOTONIC);
429             const auto next = mLastTimeFrameSent + mDesiredFrameInterval.count();
430             mLooper->sendMessageAtTime(next, this, received);
431             break;
432         }
433 
434         case StreamEvent::STOP: {
435             // Stopping a frame generation
436             LOG(INFO) << "Stop generating frames";
437             break;
438         }
439 
440         default:
441             LOG(WARNING) << "Unknown event is received: " << received;
442             break;
443     }
444 }
445 
446 } // namespace implementation
447 } // namespace V1_1
448 } // namespace evs
449 } // namespace automotive
450 } // namespace android
451