• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
12 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
13 
14 #include <string>
15 
16 #include "webrtc/base/checks.h"
17 #include "webrtc/common_video/libyuv/include/scaler.h"
18 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
19 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
20 #include "webrtc/modules/video_coding/codecs/test/packet_manipulator.h"
21 #include "webrtc/modules/video_coding/codecs/test/stats.h"
22 #include "webrtc/system_wrappers/include/tick_util.h"
23 #include "webrtc/test/testsupport/frame_reader.h"
24 #include "webrtc/test/testsupport/frame_writer.h"
25 #include "webrtc/video_frame.h"
26 
27 namespace webrtc {
28 namespace test {
29 
30 // Defines which frame types shall be excluded from packet loss and when.
31 enum ExcludeFrameTypes {
32   // Will exclude the first keyframe in the video sequence from packet loss.
33   // Following keyframes will be targeted for packet loss.
34   kExcludeOnlyFirstKeyFrame,
35   // Exclude all keyframes from packet loss, no matter where in the video
36   // sequence they occur.
37   kExcludeAllKeyFrames
38 };
39 // Returns a string representation of the enum value.
40 const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e);
41 
42 // Test configuration for a test run
43 struct TestConfig {
44   TestConfig();
45   ~TestConfig();
46 
47   // Name of the test. This is purely metadata and does not affect
48   // the test in any way.
49   std::string name;
50 
51   // More detailed description of the test. This is purely metadata and does
52   // not affect the test in any way.
53   std::string description;
54 
55   // Number of this test. Useful if multiple runs of the same test with
56   // different configurations shall be managed.
57   int test_number;
58 
59   // File to process for the test. This must be a video file in the YUV format.
60   std::string input_filename;
61 
62   // File to write to during processing for the test. Will be a video file
63   // in the YUV format.
64   std::string output_filename;
65 
66   // Path to the directory where encoded files will be put
67   // (absolute or relative to the executable). Default: "out".
68   std::string output_dir;
69 
70   // Configurations related to networking.
71   NetworkingConfig networking_config;
72 
73   // Decides how the packet loss simulations shall exclude certain frames
74   // from packet loss. Default: kExcludeOnlyFirstKeyFrame.
75   ExcludeFrameTypes exclude_frame_types;
76 
77   // The length of a single frame of the input video file. This value is
78   // calculated out of the width and height according to the video format
79   // specification. Must be set before processing.
80   size_t frame_length_in_bytes;
81 
82   // Force the encoder and decoder to use a single core for processing.
83   // Using a single core is necessary to get a deterministic behavior for the
84   // encoded frames - using multiple cores will produce different encoded frames
85   // since multiple cores are competing to consume the byte budget for each
86   // frame in parallel.
87   // If set to false, the maximum number of available cores will be used.
88   // Default: false.
89   bool use_single_core;
90 
91   // If set to a value >0 this setting forces the encoder to create a keyframe
92   // every Nth frame. Note that the encoder may create a keyframe in other
93   // locations in addition to the interval that is set using this parameter.
94   // Forcing key frames may also affect encoder planning optimizations in
95   // a negative way, since it will suddenly be forced to produce an expensive
96   // key frame.
97   // Default: 0.
98   int keyframe_interval;
99 
100   // The codec settings to use for the test (target bitrate, video size,
101   // framerate and so on). This struct must be created and filled in using
102   // the VideoCodingModule::Codec() method.
103   webrtc::VideoCodec* codec_settings;
104 
105   // If printing of information to stdout shall be performed during processing.
106   bool verbose;
107 };
108 
109 // Returns a string representation of the enum value.
110 const char* VideoCodecTypeToStr(webrtc::VideoCodecType e);
111 
112 // Handles encoding/decoding of video using the VideoEncoder/VideoDecoder
113 // interfaces. This is done in a sequential manner in order to be able to
114 // measure times properly.
115 // The class processes a frame at the time for the configured input file.
116 // It maintains state of where in the source input file the processing is at.
117 //
118 // Regarding packet loss: Note that keyframes are excluded (first or all
119 // depending on the ExcludeFrameTypes setting). This is because if key frames
120 // would be altered, all the following delta frames would be pretty much
121 // worthless. VP8 has an error-resilience feature that makes it able to handle
122 // packet loss in key non-first keyframes, which is why only the first is
123 // excluded by default.
124 // Packet loss in such important frames is handled on a higher level in the
125 // Video Engine, where signaling would request a retransmit of the lost packets,
126 // since they're so important.
127 //
128 // Note this class is not thread safe in any way and is meant for simple testing
129 // purposes.
130 class VideoProcessor {
131  public:
~VideoProcessor()132   virtual ~VideoProcessor() {}
133 
134   // Performs initial calculations about frame size, sets up callbacks etc.
135   // Returns false if an error has occurred, in addition to printing to stderr.
136   virtual bool Init() = 0;
137 
138   // Processes a single frame. Returns true as long as there's more frames
139   // available in the source clip.
140   // Frame number must be an integer >=0.
141   virtual bool ProcessFrame(int frame_number) = 0;
142 
143   // Updates the encoder with the target bit rate and the frame rate.
144   virtual void SetRates(int bit_rate, int frame_rate) = 0;
145 
146   // Return the size of the encoded frame in bytes. Dropped frames by the
147   // encoder are regarded as zero size.
148   virtual size_t EncodedFrameSize() = 0;
149 
150   // Return the encoded frame type (key or delta).
151   virtual FrameType EncodedFrameType() = 0;
152 
153   // Return the number of dropped frames.
154   virtual int NumberDroppedFrames() = 0;
155 
156   // Return the number of spatial resizes.
157   virtual int NumberSpatialResizes() = 0;
158 };
159 
160 class VideoProcessorImpl : public VideoProcessor {
161  public:
162   VideoProcessorImpl(webrtc::VideoEncoder* encoder,
163                      webrtc::VideoDecoder* decoder,
164                      FrameReader* frame_reader,
165                      FrameWriter* frame_writer,
166                      PacketManipulator* packet_manipulator,
167                      const TestConfig& config,
168                      Stats* stats);
169   virtual ~VideoProcessorImpl();
170   bool Init() override;
171   bool ProcessFrame(int frame_number) override;
172 
173  private:
174   // Invoked by the callback when a frame has completed encoding.
175   void FrameEncoded(const webrtc::EncodedImage& encodedImage);
176   // Invoked by the callback when a frame has completed decoding.
177   void FrameDecoded(const webrtc::VideoFrame& image);
178   // Used for getting a 32-bit integer representing time
179   // (checks the size is within signed 32-bit bounds before casting it)
180   int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
181                                  const webrtc::TickTime& stop);
182   // Updates the encoder with the target bit rate and the frame rate.
183   void SetRates(int bit_rate, int frame_rate) override;
184   // Return the size of the encoded frame in bytes.
185   size_t EncodedFrameSize() override;
186   // Return the encoded frame type (key or delta).
187   FrameType EncodedFrameType() override;
188   // Return the number of dropped frames.
189   int NumberDroppedFrames() override;
190   // Return the number of spatial resizes.
191   int NumberSpatialResizes() override;
192 
193   webrtc::VideoEncoder* encoder_;
194   webrtc::VideoDecoder* decoder_;
195   FrameReader* frame_reader_;
196   FrameWriter* frame_writer_;
197   PacketManipulator* packet_manipulator_;
198   const TestConfig& config_;
199   Stats* stats_;
200 
201   EncodedImageCallback* encode_callback_;
202   DecodedImageCallback* decode_callback_;
203   // Buffer used for reading the source video file:
204   uint8_t* source_buffer_;
205   // Keep track of the last successful frame, since we need to write that
206   // when decoding fails:
207   uint8_t* last_successful_frame_buffer_;
208   webrtc::VideoFrame source_frame_;
209   // To keep track of if we have excluded the first key frame from packet loss:
210   bool first_key_frame_has_been_excluded_;
211   // To tell the decoder previous frame have been dropped due to packet loss:
212   bool last_frame_missing_;
213   // If Init() has executed successfully.
214   bool initialized_;
215   size_t encoded_frame_size_;
216   FrameType encoded_frame_type_;
217   int prev_time_stamp_;
218   int num_dropped_frames_;
219   int num_spatial_resizes_;
220   int last_encoder_frame_width_;
221   int last_encoder_frame_height_;
222   Scaler scaler_;
223 
224   // Statistics
225   double bit_rate_factor_;  // multiply frame length with this to get bit rate
226   webrtc::TickTime encode_start_;
227   webrtc::TickTime decode_start_;
228 
229   // Callback class required to implement according to the VideoEncoder API.
230   class VideoProcessorEncodeCompleteCallback
231       : public webrtc::EncodedImageCallback {
232    public:
VideoProcessorEncodeCompleteCallback(VideoProcessorImpl * vp)233     explicit VideoProcessorEncodeCompleteCallback(VideoProcessorImpl* vp)
234         : video_processor_(vp) {}
235     int32_t Encoded(
236         const webrtc::EncodedImage& encoded_image,
237         const webrtc::CodecSpecificInfo* codec_specific_info,
238         const webrtc::RTPFragmentationHeader* fragmentation) override;
239 
240    private:
241     VideoProcessorImpl* video_processor_;
242   };
243 
244   // Callback class required to implement according to the VideoDecoder API.
245   class VideoProcessorDecodeCompleteCallback
246       : public webrtc::DecodedImageCallback {
247    public:
VideoProcessorDecodeCompleteCallback(VideoProcessorImpl * vp)248     explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
249         : video_processor_(vp) {}
250     int32_t Decoded(webrtc::VideoFrame& image) override;
Decoded(webrtc::VideoFrame & image,int64_t decode_time_ms)251     int32_t Decoded(webrtc::VideoFrame& image,
252                     int64_t decode_time_ms) override {
253       RTC_NOTREACHED();
254       return -1;
255     }
256 
257    private:
258     VideoProcessorImpl* video_processor_;
259   };
260 };
261 
262 }  // namespace test
263 }  // namespace webrtc
264 
265 #endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
266