1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/modules/video_render/incoming_video_stream.h"
12
13 #include <assert.h>
14
15 #if defined(_WIN32)
16 #include <windows.h>
17 #elif defined(WEBRTC_LINUX)
18 #include <sys/time.h>
19 #include <time.h>
20 #else
21 #include <sys/time.h>
22 #endif
23
24 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
25 #include "webrtc/modules/video_render/video_render_frames.h"
26 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
27 #include "webrtc/system_wrappers/interface/event_wrapper.h"
28 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
29 #include "webrtc/system_wrappers/interface/tick_util.h"
30 #include "webrtc/system_wrappers/interface/trace.h"
31
32 namespace webrtc {
33
IncomingVideoStream(const int32_t module_id,const uint32_t stream_id)34 IncomingVideoStream::IncomingVideoStream(const int32_t module_id,
35 const uint32_t stream_id)
36 : module_id_(module_id),
37 stream_id_(stream_id),
38 stream_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
39 thread_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
40 buffer_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
41 incoming_render_thread_(),
42 deliver_buffer_event_(*EventWrapper::Create()),
43 running_(false),
44 external_callback_(NULL),
45 render_callback_(NULL),
46 render_buffers_(*(new VideoRenderFrames)),
47 callbackVideoType_(kVideoI420),
48 callbackWidth_(0),
49 callbackHeight_(0),
50 incoming_rate_(0),
51 last_rate_calculation_time_ms_(0),
52 num_frames_since_last_calculation_(0),
53 last_rendered_frame_(),
54 temp_frame_(),
55 start_image_(),
56 timeout_image_(),
57 timeout_time_(),
58 mirror_frames_enabled_(false),
59 mirroring_(),
60 transformed_video_frame_() {
61 WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_,
62 "%s created for stream %d", __FUNCTION__, stream_id);
63 }
64
~IncomingVideoStream()65 IncomingVideoStream::~IncomingVideoStream() {
66 WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_,
67 "%s deleted for stream %d", __FUNCTION__, stream_id_);
68
69 Stop();
70
71 // incoming_render_thread_ - Delete in stop
72 delete &render_buffers_;
73 delete &stream_critsect_;
74 delete &buffer_critsect_;
75 delete &thread_critsect_;
76 delete &deliver_buffer_event_;
77 }
78
ChangeModuleId(const int32_t id)79 int32_t IncomingVideoStream::ChangeModuleId(const int32_t id) {
80 CriticalSectionScoped cs(&stream_critsect_);
81 module_id_ = id;
82 return 0;
83 }
84
ModuleCallback()85 VideoRenderCallback* IncomingVideoStream::ModuleCallback() {
86 CriticalSectionScoped cs(&stream_critsect_);
87 return this;
88 }
89
RenderFrame(const uint32_t stream_id,I420VideoFrame & video_frame)90 int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
91 I420VideoFrame& video_frame) {
92 CriticalSectionScoped csS(&stream_critsect_);
93 WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
94 "%s for stream %d, render time: %u", __FUNCTION__, stream_id_,
95 video_frame.render_time_ms());
96
97 if (!running_) {
98 WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
99 "%s: Not running", __FUNCTION__);
100 return -1;
101 }
102
103 // Mirroring is not supported if the frame is backed by a texture.
104 if (true == mirror_frames_enabled_ && video_frame.native_handle() == NULL) {
105 transformed_video_frame_.CreateEmptyFrame(video_frame.width(),
106 video_frame.height(),
107 video_frame.stride(kYPlane),
108 video_frame.stride(kUPlane),
109 video_frame.stride(kVPlane));
110 if (mirroring_.mirror_x_axis) {
111 MirrorI420UpDown(&video_frame,
112 &transformed_video_frame_);
113 video_frame.SwapFrame(&transformed_video_frame_);
114 }
115 if (mirroring_.mirror_y_axis) {
116 MirrorI420LeftRight(&video_frame,
117 &transformed_video_frame_);
118 video_frame.SwapFrame(&transformed_video_frame_);
119 }
120 }
121
122 // Rate statistics.
123 num_frames_since_last_calculation_++;
124 int64_t now_ms = TickTime::MillisecondTimestamp();
125 if (now_ms >= last_rate_calculation_time_ms_ + KFrameRatePeriodMs) {
126 incoming_rate_ =
127 static_cast<uint32_t>(1000 * num_frames_since_last_calculation_ /
128 (now_ms - last_rate_calculation_time_ms_));
129 num_frames_since_last_calculation_ = 0;
130 last_rate_calculation_time_ms_ = now_ms;
131 }
132
133 // Insert frame.
134 CriticalSectionScoped csB(&buffer_critsect_);
135 if (render_buffers_.AddFrame(&video_frame) == 1)
136 deliver_buffer_event_.Set();
137
138 return 0;
139 }
140
SetStartImage(const I420VideoFrame & video_frame)141 int32_t IncomingVideoStream::SetStartImage(
142 const I420VideoFrame& video_frame) {
143 CriticalSectionScoped csS(&thread_critsect_);
144 return start_image_.CopyFrame(video_frame);
145 }
146
SetTimeoutImage(const I420VideoFrame & video_frame,const uint32_t timeout)147 int32_t IncomingVideoStream::SetTimeoutImage(
148 const I420VideoFrame& video_frame, const uint32_t timeout) {
149 CriticalSectionScoped csS(&thread_critsect_);
150 timeout_time_ = timeout;
151 return timeout_image_.CopyFrame(video_frame);
152 }
153
SetRenderCallback(VideoRenderCallback * render_callback)154 int32_t IncomingVideoStream::SetRenderCallback(
155 VideoRenderCallback* render_callback) {
156 CriticalSectionScoped cs(&stream_critsect_);
157
158 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
159 "%s(%x) for stream %d", __FUNCTION__, render_callback,
160 stream_id_);
161 render_callback_ = render_callback;
162 return 0;
163 }
164
EnableMirroring(const bool enable,const bool mirror_x_axis,const bool mirror_y_axis)165 int32_t IncomingVideoStream::EnableMirroring(const bool enable,
166 const bool mirror_x_axis,
167 const bool mirror_y_axis) {
168 CriticalSectionScoped cs(&stream_critsect_);
169 mirror_frames_enabled_ = enable;
170 mirroring_.mirror_x_axis = mirror_x_axis;
171 mirroring_.mirror_y_axis = mirror_y_axis;
172
173 return 0;
174 }
175
SetExpectedRenderDelay(int32_t delay_ms)176 int32_t IncomingVideoStream::SetExpectedRenderDelay(
177 int32_t delay_ms) {
178 CriticalSectionScoped csS(&stream_critsect_);
179 if (running_) {
180 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
181 "%s(%d) for stream %d", __FUNCTION__, delay_ms, stream_id_);
182 return -1;
183 }
184 CriticalSectionScoped cs(&buffer_critsect_);
185 return render_buffers_.SetRenderDelay(delay_ms);
186 }
187
SetExternalCallback(VideoRenderCallback * external_callback)188 int32_t IncomingVideoStream::SetExternalCallback(
189 VideoRenderCallback* external_callback) {
190 CriticalSectionScoped cs(&stream_critsect_);
191 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
192 "%s(%x) for stream %d", __FUNCTION__, external_callback,
193 stream_id_);
194 external_callback_ = external_callback;
195 callbackVideoType_ = kVideoI420;
196 callbackWidth_ = 0;
197 callbackHeight_ = 0;
198 return 0;
199 }
200
Start()201 int32_t IncomingVideoStream::Start() {
202 CriticalSectionScoped csS(&stream_critsect_);
203 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
204 "%s for stream %d", __FUNCTION__, stream_id_);
205 if (running_) {
206 WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
207 "%s: Already running", __FUNCTION__);
208 return 0;
209 }
210
211 CriticalSectionScoped csT(&thread_critsect_);
212 assert(incoming_render_thread_ == NULL);
213
214 incoming_render_thread_ = ThreadWrapper::CreateThread(
215 IncomingVideoStreamThreadFun, this, kRealtimePriority,
216 "IncomingVideoStreamThread");
217 if (!incoming_render_thread_) {
218 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_,
219 "%s: No thread", __FUNCTION__);
220 return -1;
221 }
222
223 unsigned int t_id = 0;
224 if (incoming_render_thread_->Start(t_id)) {
225 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
226 "%s: thread started: %u", __FUNCTION__, t_id);
227 } else {
228 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_,
229 "%s: Could not start send thread", __FUNCTION__);
230 return -1;
231 }
232 deliver_buffer_event_.StartTimer(false, KEventStartupTimeMS);
233
234 running_ = true;
235 return 0;
236 }
237
Stop()238 int32_t IncomingVideoStream::Stop() {
239 CriticalSectionScoped cs_stream(&stream_critsect_);
240 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
241 "%s for stream %d", __FUNCTION__, stream_id_);
242
243 if (!running_) {
244 WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
245 "%s: Not running", __FUNCTION__);
246 return 0;
247 }
248
249 thread_critsect_.Enter();
250 if (incoming_render_thread_) {
251 ThreadWrapper* thread = incoming_render_thread_;
252 incoming_render_thread_ = NULL;
253 thread->SetNotAlive();
254 #ifndef WIN32_
255 deliver_buffer_event_.StopTimer();
256 #endif
257 thread_critsect_.Leave();
258 if (thread->Stop()) {
259 delete thread;
260 } else {
261 assert(false);
262 WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
263 "%s: Not able to stop thread, leaking", __FUNCTION__);
264 }
265 } else {
266 thread_critsect_.Leave();
267 }
268 running_ = false;
269 return 0;
270 }
271
Reset()272 int32_t IncomingVideoStream::Reset() {
273 CriticalSectionScoped cs_stream(&stream_critsect_);
274 CriticalSectionScoped cs_buffer(&buffer_critsect_);
275 render_buffers_.ReleaseAllFrames();
276 return 0;
277 }
278
StreamId() const279 uint32_t IncomingVideoStream::StreamId() const {
280 CriticalSectionScoped cs_stream(&stream_critsect_);
281 return stream_id_;
282 }
283
IncomingRate() const284 uint32_t IncomingVideoStream::IncomingRate() const {
285 CriticalSectionScoped cs(&stream_critsect_);
286 return incoming_rate_;
287 }
288
IncomingVideoStreamThreadFun(void * obj)289 bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj) {
290 return static_cast<IncomingVideoStream*>(obj)->IncomingVideoStreamProcess();
291 }
292
IncomingVideoStreamProcess()293 bool IncomingVideoStream::IncomingVideoStreamProcess() {
294 if (kEventError != deliver_buffer_event_.Wait(KEventMaxWaitTimeMs)) {
295 thread_critsect_.Enter();
296 if (incoming_render_thread_ == NULL) {
297 // Terminating
298 thread_critsect_.Leave();
299 return false;
300 }
301
302 I420VideoFrame* frame_to_render = NULL;
303
304 // Get a new frame to render and the time for the frame after this one.
305 buffer_critsect_.Enter();
306 frame_to_render = render_buffers_.FrameToRender();
307 uint32_t wait_time = render_buffers_.TimeToNextFrameRelease();
308 buffer_critsect_.Leave();
309
310 // Set timer for next frame to render.
311 if (wait_time > KEventMaxWaitTimeMs) {
312 wait_time = KEventMaxWaitTimeMs;
313 }
314 deliver_buffer_event_.StartTimer(false, wait_time);
315
316 if (!frame_to_render) {
317 if (render_callback_) {
318 if (last_rendered_frame_.render_time_ms() == 0 &&
319 !start_image_.IsZeroSize()) {
320 // We have not rendered anything and have a start image.
321 temp_frame_.CopyFrame(start_image_);
322 render_callback_->RenderFrame(stream_id_, temp_frame_);
323 } else if (!timeout_image_.IsZeroSize() &&
324 last_rendered_frame_.render_time_ms() + timeout_time_ <
325 TickTime::MillisecondTimestamp()) {
326 // Render a timeout image.
327 temp_frame_.CopyFrame(timeout_image_);
328 render_callback_->RenderFrame(stream_id_, temp_frame_);
329 }
330 }
331
332 // No frame.
333 thread_critsect_.Leave();
334 return true;
335 }
336
337 // Send frame for rendering.
338 if (external_callback_) {
339 WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
340 "%s: executing external renderer callback to deliver frame",
341 __FUNCTION__, frame_to_render->render_time_ms());
342 external_callback_->RenderFrame(stream_id_, *frame_to_render);
343 } else {
344 if (render_callback_) {
345 WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
346 "%s: Render frame, time: ", __FUNCTION__,
347 frame_to_render->render_time_ms());
348 render_callback_->RenderFrame(stream_id_, *frame_to_render);
349 }
350 }
351
352 // Release critsect before calling the module user.
353 thread_critsect_.Leave();
354
355 // We're done with this frame, delete it.
356 if (frame_to_render) {
357 CriticalSectionScoped cs(&buffer_critsect_);
358 last_rendered_frame_.SwapFrame(frame_to_render);
359 render_buffers_.ReturnFrame(frame_to_render);
360 }
361 }
362 return true;
363 }
364
GetLastRenderedFrame(I420VideoFrame & video_frame) const365 int32_t IncomingVideoStream::GetLastRenderedFrame(
366 I420VideoFrame& video_frame) const {
367 CriticalSectionScoped cs(&buffer_critsect_);
368 return video_frame.CopyFrame(last_rendered_frame_);
369 }
370
371 } // namespace webrtc
372