1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/modules/video_render/android/video_render_android_impl.h"
12
13 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
14 #include "webrtc/system_wrappers/interface/event_wrapper.h"
15 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
16 #include "webrtc/system_wrappers/interface/tick_util.h"
17
18 #ifdef ANDROID
19 #include <android/log.h>
20 #include <stdio.h>
21
22 #undef WEBRTC_TRACE
23 #define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
24 #else
25 #include "webrtc/system_wrappers/interface/trace.h"
26 #endif
27
28 namespace webrtc {
29
30 JavaVM* VideoRenderAndroid::g_jvm = NULL;
31
32 #if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
SetRenderAndroidVM(void * javaVM)33 int32_t SetRenderAndroidVM(void* javaVM) {
34 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
35 VideoRenderAndroid::g_jvm = (JavaVM*)javaVM;
36 return 0;
37 }
38 #endif
39
VideoRenderAndroid(const int32_t id,const VideoRenderType videoRenderType,void * window,const bool)40 VideoRenderAndroid::VideoRenderAndroid(
41 const int32_t id,
42 const VideoRenderType videoRenderType,
43 void* window,
44 const bool /*fullscreen*/):
45 _id(id),
46 _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
47 _renderType(videoRenderType),
48 _ptrWindow((jobject)(window)),
49 _javaShutDownFlag(false),
50 _javaShutdownEvent(*EventWrapper::Create()),
51 _javaRenderEvent(*EventWrapper::Create()),
52 _lastJavaRenderEvent(0),
53 _javaRenderJniEnv(NULL),
54 _javaRenderThread(NULL) {
55 }
56
~VideoRenderAndroid()57 VideoRenderAndroid::~VideoRenderAndroid() {
58 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
59 "VideoRenderAndroid dtor");
60
61 if (_javaRenderThread)
62 StopRender();
63
64 for (AndroidStreamMap::iterator it = _streamsMap.begin();
65 it != _streamsMap.end();
66 ++it) {
67 delete it->second;
68 }
69 delete &_javaShutdownEvent;
70 delete &_javaRenderEvent;
71 delete &_critSect;
72 }
73
ChangeUniqueId(const int32_t id)74 int32_t VideoRenderAndroid::ChangeUniqueId(const int32_t id) {
75 CriticalSectionScoped cs(&_critSect);
76 _id = id;
77
78 return 0;
79 }
80
ChangeWindow(void *)81 int32_t VideoRenderAndroid::ChangeWindow(void* /*window*/) {
82 return -1;
83 }
84
85 VideoRenderCallback*
AddIncomingRenderStream(const uint32_t streamId,const uint32_t zOrder,const float left,const float top,const float right,const float bottom)86 VideoRenderAndroid::AddIncomingRenderStream(const uint32_t streamId,
87 const uint32_t zOrder,
88 const float left, const float top,
89 const float right,
90 const float bottom) {
91 CriticalSectionScoped cs(&_critSect);
92
93 AndroidStream* renderStream = NULL;
94 AndroidStreamMap::iterator item = _streamsMap.find(streamId);
95 if (item != _streamsMap.end() && item->second != NULL) {
96 WEBRTC_TRACE(kTraceInfo,
97 kTraceVideoRenderer,
98 -1,
99 "%s: Render stream already exists",
100 __FUNCTION__);
101 return renderStream;
102 }
103
104 renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
105 right, bottom, *this);
106 if (renderStream) {
107 _streamsMap[streamId] = renderStream;
108 }
109 else {
110 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
111 "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
112 return NULL;
113 }
114 return renderStream;
115 }
116
DeleteIncomingRenderStream(const uint32_t streamId)117 int32_t VideoRenderAndroid::DeleteIncomingRenderStream(
118 const uint32_t streamId) {
119 CriticalSectionScoped cs(&_critSect);
120
121 AndroidStreamMap::iterator item = _streamsMap.find(streamId);
122 if (item == _streamsMap.end()) {
123 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
124 "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
125 return -1;
126 }
127 delete item->second;
128 _streamsMap.erase(item);
129 return 0;
130 }
131
GetIncomingRenderStreamProperties(const uint32_t streamId,uint32_t & zOrder,float & left,float & top,float & right,float & bottom) const132 int32_t VideoRenderAndroid::GetIncomingRenderStreamProperties(
133 const uint32_t streamId,
134 uint32_t& zOrder,
135 float& left,
136 float& top,
137 float& right,
138 float& bottom) const {
139 return -1;
140 }
141
StartRender()142 int32_t VideoRenderAndroid::StartRender() {
143 CriticalSectionScoped cs(&_critSect);
144
145 if (_javaRenderThread) {
146 // StartRender is called when this stream should start render.
147 // However StopRender is not called when the streams stop rendering.
148 // Thus the the thread is only deleted when the renderer is removed.
149 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
150 "%s, Render thread already exist", __FUNCTION__);
151 return 0;
152 }
153
154 _javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
155 kRealtimePriority,
156 "AndroidRenderThread");
157 if (!_javaRenderThread) {
158 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
159 "%s: No thread", __FUNCTION__);
160 return -1;
161 }
162
163 unsigned int tId = 0;
164 if (_javaRenderThread->Start(tId))
165 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
166 "%s: thread started: %u", __FUNCTION__, tId);
167 else {
168 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
169 "%s: Could not start send thread", __FUNCTION__);
170 return -1;
171 }
172 return 0;
173 }
174
StopRender()175 int32_t VideoRenderAndroid::StopRender() {
176 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
177 {
178 CriticalSectionScoped cs(&_critSect);
179 if (!_javaRenderThread)
180 {
181 return -1;
182 }
183 _javaShutDownFlag = true;
184 _javaRenderEvent.Set();
185 }
186
187 _javaShutdownEvent.Wait(3000);
188 CriticalSectionScoped cs(&_critSect);
189 _javaRenderThread->SetNotAlive();
190 if (_javaRenderThread->Stop()) {
191 delete _javaRenderThread;
192 _javaRenderThread = NULL;
193 }
194 else {
195 assert(false);
196 WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
197 "%s: Not able to stop thread, leaking", __FUNCTION__);
198 _javaRenderThread = NULL;
199 }
200 return 0;
201 }
202
ReDraw()203 void VideoRenderAndroid::ReDraw() {
204 CriticalSectionScoped cs(&_critSect);
205 // Allow redraw if it was more than 20ms since last.
206 if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) {
207 _lastJavaRenderEvent = TickTime::MillisecondTimestamp();
208 _javaRenderEvent.Set();
209 }
210 }
211
JavaRenderThreadFun(void * obj)212 bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
213 return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
214 }
215
JavaRenderThreadProcess()216 bool VideoRenderAndroid::JavaRenderThreadProcess()
217 {
218 _javaRenderEvent.Wait(1000);
219
220 CriticalSectionScoped cs(&_critSect);
221 if (!_javaRenderJniEnv) {
222 // try to attach the thread and get the env
223 // Attach this thread to JVM
224 jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
225
226 // Get the JNI env for this thread
227 if ((res < 0) || !_javaRenderJniEnv) {
228 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
229 "%s: Could not attach thread to JVM (%d, %p)",
230 __FUNCTION__, res, _javaRenderJniEnv);
231 return false;
232 }
233 }
234
235 for (AndroidStreamMap::iterator it = _streamsMap.begin();
236 it != _streamsMap.end();
237 ++it) {
238 it->second->DeliverFrame(_javaRenderJniEnv);
239 }
240
241 if (_javaShutDownFlag) {
242 if (g_jvm->DetachCurrentThread() < 0)
243 WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
244 "%s: Could not detach thread from JVM", __FUNCTION__);
245 else {
246 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
247 "%s: Java thread detached", __FUNCTION__);
248 }
249 _javaRenderJniEnv = NULL;
250 _javaShutDownFlag = false;
251 _javaShutdownEvent.Set();
252 return false; // Do not run this thread again.
253 }
254 return true;
255 }
256
RenderType()257 VideoRenderType VideoRenderAndroid::RenderType() {
258 return _renderType;
259 }
260
PerferedVideoType()261 RawVideoType VideoRenderAndroid::PerferedVideoType() {
262 return kVideoI420;
263 }
264
FullScreen()265 bool VideoRenderAndroid::FullScreen() {
266 return false;
267 }
268
GetGraphicsMemory(uint64_t &,uint64_t &) const269 int32_t VideoRenderAndroid::GetGraphicsMemory(
270 uint64_t& /*totalGraphicsMemory*/,
271 uint64_t& /*availableGraphicsMemory*/) const {
272 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
273 "%s - not supported on Android", __FUNCTION__);
274 return -1;
275 }
276
GetScreenResolution(uint32_t &,uint32_t &) const277 int32_t VideoRenderAndroid::GetScreenResolution(
278 uint32_t& /*screenWidth*/,
279 uint32_t& /*screenHeight*/) const {
280 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
281 "%s - not supported on Android", __FUNCTION__);
282 return -1;
283 }
284
RenderFrameRate(const uint32_t)285 uint32_t VideoRenderAndroid::RenderFrameRate(
286 const uint32_t /*streamId*/) {
287 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
288 "%s - not supported on Android", __FUNCTION__);
289 return -1;
290 }
291
SetStreamCropping(const uint32_t,const float,const float,const float,const float)292 int32_t VideoRenderAndroid::SetStreamCropping(
293 const uint32_t /*streamId*/,
294 const float /*left*/,
295 const float /*top*/,
296 const float /*right*/,
297 const float /*bottom*/) {
298 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
299 "%s - not supported on Android", __FUNCTION__);
300 return -1;
301 }
302
SetTransparentBackground(const bool enable)303 int32_t VideoRenderAndroid::SetTransparentBackground(const bool enable) {
304 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
305 "%s - not supported on Android", __FUNCTION__);
306 return -1;
307 }
308
ConfigureRenderer(const uint32_t streamId,const unsigned int zOrder,const float left,const float top,const float right,const float bottom)309 int32_t VideoRenderAndroid::ConfigureRenderer(
310 const uint32_t streamId,
311 const unsigned int zOrder,
312 const float left,
313 const float top,
314 const float right,
315 const float bottom) {
316 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
317 "%s - not supported on Android", __FUNCTION__);
318 return -1;
319 }
320
SetText(const uint8_t textId,const uint8_t * text,const int32_t textLength,const uint32_t textColorRef,const uint32_t backgroundColorRef,const float left,const float top,const float rigth,const float bottom)321 int32_t VideoRenderAndroid::SetText(
322 const uint8_t textId,
323 const uint8_t* text,
324 const int32_t textLength,
325 const uint32_t textColorRef,
326 const uint32_t backgroundColorRef,
327 const float left, const float top,
328 const float rigth, const float bottom) {
329 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
330 "%s - not supported on Android", __FUNCTION__);
331 return -1;
332 }
333
SetBitmap(const void * bitMap,const uint8_t pictureId,const void * colorKey,const float left,const float top,const float right,const float bottom)334 int32_t VideoRenderAndroid::SetBitmap(const void* bitMap,
335 const uint8_t pictureId,
336 const void* colorKey,
337 const float left, const float top,
338 const float right,
339 const float bottom) {
340 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
341 "%s - not supported on Android", __FUNCTION__);
342 return -1;
343 }
344
345 } // namespace webrtc
346