1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/engine_configurations.h"
12
13 #if defined(CARBON_RENDERING)
14
15 #include "webrtc/modules/video_render/mac/video_render_agl.h"
16
17 // includes
18 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
19 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
20 #include "webrtc/system_wrappers/interface/event_wrapper.h"
21 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
22 #include "webrtc/system_wrappers/interface/trace.h"
23
24 namespace webrtc {
25
26 /*
27 *
28 * VideoChannelAGL
29 *
30 */
31
32 #pragma mark VideoChannelAGL constructor
33
VideoChannelAGL(AGLContext & aglContext,int iId,VideoRenderAGL * owner)34 VideoChannelAGL::VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner) :
35 _aglContext( aglContext),
36 _id( iId),
37 _owner( owner),
38 _width( 0),
39 _height( 0),
40 _stretchedWidth( 0),
41 _stretchedHeight( 0),
42 _startWidth( 0.0f),
43 _startHeight( 0.0f),
44 _stopWidth( 0.0f),
45 _stopHeight( 0.0f),
46 _xOldWidth( 0),
47 _yOldHeight( 0),
48 _oldStretchedHeight(0),
49 _oldStretchedWidth( 0),
50 _buffer( 0),
51 _bufferSize( 0),
52 _incommingBufferSize(0),
53 _bufferIsUpdated( false),
54 _sizeInitialized( false),
55 _numberOfStreams( 0),
56 _bVideoSizeStartedChanging(false),
57 _pixelFormat( GL_RGBA),
58 _pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
59 _texture( 0)
60
61 {
62 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Constructor", __FUNCTION__, __LINE__);
63 }
64
~VideoChannelAGL()65 VideoChannelAGL::~VideoChannelAGL()
66 {
67 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Destructor", __FUNCTION__, __LINE__);
68 if (_buffer)
69 {
70 delete [] _buffer;
71 _buffer = NULL;
72 }
73
74 aglSetCurrentContext(_aglContext);
75
76 if (_texture != 0)
77 {
78 glDeleteTextures(1, (const GLuint*) &_texture);
79 _texture = 0;
80 }
81 }
82
RenderFrame(const uint32_t streamId,I420VideoFrame & videoFrame)83 int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
84 I420VideoFrame& videoFrame) {
85 _owner->LockAGLCntx();
86 if (_width != videoFrame.width() ||
87 _height != videoFrame.height()) {
88 if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
89 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize
90 Change returned an error", __FUNCTION__, __LINE__);
91 _owner->UnlockAGLCntx();
92 return -1;
93 }
94 }
95
96 _owner->UnlockAGLCntx();
97 return DeliverFrame(videoFrame);
98 }
99
UpdateSize(int,int)100 int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
101 {
102 _owner->LockAGLCntx();
103 _owner->UnlockAGLCntx();
104 return 0;
105 }
106
UpdateStretchSize(int stretchHeight,int stretchWidth)107 int VideoChannelAGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
108 {
109
110 _owner->LockAGLCntx();
111 _stretchedHeight = stretchHeight;
112 _stretchedWidth = stretchWidth;
113 _owner->UnlockAGLCntx();
114 return 0;
115 }
116
FrameSizeChange(int width,int height,int numberOfStreams)117 int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
118 {
119 // We'll get a new frame size from VideoAPI, prepare the buffer
120
121 _owner->LockAGLCntx();
122
123 if (width == _width && _height == height)
124 {
125 // We already have a correct buffer size
126 _numberOfStreams = numberOfStreams;
127 _owner->UnlockAGLCntx();
128 return 0;
129 }
130
131 _width = width;
132 _height = height;
133
134 // Delete the old buffer, create a new one with correct size.
135 if (_buffer)
136 {
137 delete [] _buffer;
138 _bufferSize = 0;
139 }
140
141 _incommingBufferSize = CalcBufferSize(kI420, _width, _height);
142 _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
143 _buffer = new unsigned char [_bufferSize];
144 memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
145
146 if (aglSetCurrentContext(_aglContext) == false)
147 {
148 _owner->UnlockAGLCntx();
149 return -1;
150 }
151
152 // Delete a possible old texture
153 if (_texture != 0)
154 {
155 glDeleteTextures(1, (const GLuint*) &_texture);
156 _texture = 0;
157 }
158
159 // Create a new texture
160 glGenTextures(1, (GLuint *) &_texture);
161
162 GLenum glErr = glGetError();
163
164 if (glErr != GL_NO_ERROR)
165 {
166 }
167
168 // Do the setup for both textures
169 // Note: we setup two textures even if we're not running full screen
170 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
171
172 // Set texture parameters
173 glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
174
175 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
176 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
177
178 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
179 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
180 //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
181 //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
182
183 glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
184
185 glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
186
187 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
188
189 // Maximum width/height for a texture
190 GLint texSize;
191 glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
192
193 if (texSize < _width || texSize < _height)
194 {
195 // Image too big for memory
196 _owner->UnlockAGLCntx();
197 return -1;
198 }
199
200 // Set up th texture type and size
201 glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
202 0, // level
203 GL_RGBA, // internal format
204 _width, // width
205 _height, // height
206 0, // border 0/1 = off/on
207 _pixelFormat, // format, GL_BGRA
208 _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
209 _buffer); // pixel data
210
211 glErr = glGetError();
212 if (glErr != GL_NO_ERROR)
213 {
214 _owner->UnlockAGLCntx();
215 return -1;
216 }
217
218 _owner->UnlockAGLCntx();
219 return 0;
220 }
221
222 // Called from video engine when a new frame should be rendered.
DeliverFrame(const I420VideoFrame & videoFrame)223 int VideoChannelAGL::DeliverFrame(const I420VideoFrame& videoFrame) {
224 _owner->LockAGLCntx();
225
226 if (_texture == 0) {
227 _owner->UnlockAGLCntx();
228 return 0;
229 }
230
231 int length = CalcBufferSize(kI420, videoFrame.width(), videoFrame.height());
232 if (length != _incommingBufferSize) {
233 _owner->UnlockAGLCntx();
234 return -1;
235 }
236
237 // Setting stride = width.
238 int rgbret = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
239 if (rgbret < 0) {
240 _owner->UnlockAGLCntx();
241 return -1;
242 }
243
244 aglSetCurrentContext(_aglContext);
245
246 // Put the new frame into the graphic card texture.
247 // Make sure this texture is the active one
248 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
249 GLenum glErr = glGetError();
250 if (glErr != GL_NO_ERROR) {
251 _owner->UnlockAGLCntx();
252 return -1;
253 }
254
255 // Copy buffer to texture
256 glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
257 0, // Level, not use
258 0, // start point x, (low left of pic)
259 0, // start point y,
260 _width, // width
261 _height, // height
262 _pixelFormat, // pictue format for _buffer
263 _pixelDataType, // data type of _buffer
264 (const GLvoid*) _buffer); // the pixel data
265
266 if (glGetError() != GL_NO_ERROR) {
267 _owner->UnlockAGLCntx();
268 return -1;
269 }
270
271 _bufferIsUpdated = true;
272 _owner->UnlockAGLCntx();
273
274 return 0;
275 }
276
RenderOffScreenBuffer()277 int VideoChannelAGL::RenderOffScreenBuffer()
278 {
279
280 _owner->LockAGLCntx();
281
282 if (_texture == 0)
283 {
284 _owner->UnlockAGLCntx();
285 return 0;
286 }
287
288 GLfloat xStart = 2.0f * _startWidth - 1.0f;
289 GLfloat xStop = 2.0f * _stopWidth - 1.0f;
290 GLfloat yStart = 1.0f - 2.0f * _stopHeight;
291 GLfloat yStop = 1.0f - 2.0f * _startHeight;
292
293 aglSetCurrentContext(_aglContext);
294 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
295
296 if(_stretchedWidth != _oldStretchedWidth || _stretchedHeight != _oldStretchedHeight)
297 {
298 glViewport(0, 0, _stretchedWidth, _stretchedHeight);
299 }
300 _oldStretchedHeight = _stretchedHeight;
301 _oldStretchedWidth = _stretchedWidth;
302
303 // Now really put the texture into the framebuffer
304 glLoadIdentity();
305
306 glEnable(GL_TEXTURE_RECTANGLE_EXT);
307
308 glBegin(GL_POLYGON);
309 {
310 glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
311 glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
312 glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
313 glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
314 }
315 glEnd();
316
317 glDisable(GL_TEXTURE_RECTANGLE_EXT);
318
319 _bufferIsUpdated = false;
320
321 _owner->UnlockAGLCntx();
322 return 0;
323 }
324
IsUpdated(bool & isUpdated)325 int VideoChannelAGL::IsUpdated(bool& isUpdated)
326 {
327 _owner->LockAGLCntx();
328 isUpdated = _bufferIsUpdated;
329 _owner->UnlockAGLCntx();
330
331 return 0;
332 }
333
SetStreamSettings(int,float startWidth,float startHeight,float stopWidth,float stopHeight)334 int VideoChannelAGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
335 {
336
337 _owner->LockAGLCntx();
338
339 _startWidth = startWidth;
340 _stopWidth = stopWidth;
341 _startHeight = startHeight;
342 _stopHeight = stopHeight;
343
344 int oldWidth = _width;
345 int oldHeight = _height;
346 int oldNumberOfStreams = _numberOfStreams;
347
348 _width = 0;
349 _height = 0;
350
351 int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
352
353 _owner->UnlockAGLCntx();
354
355 return retVal;
356 }
357
SetStreamCropSettings(int,float,float,float,float)358 int VideoChannelAGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
359 {
360 return -1;
361 }
362
363 #pragma mark VideoRenderAGL WindowRef constructor
364
VideoRenderAGL(WindowRef windowRef,bool fullscreen,int iId)365 VideoRenderAGL::VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId) :
366 _hiviewRef( 0),
367 _windowRef( windowRef),
368 _fullScreen( fullscreen),
369 _id( iId),
370 _renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
371 _screenUpdateThread( 0),
372 _screenUpdateEvent( 0),
373 _isHIViewRef( false),
374 _aglContext( 0),
375 _windowWidth( 0),
376 _windowHeight( 0),
377 _lastWindowWidth( -1),
378 _lastWindowHeight( -1),
379 _lastHiViewWidth( -1),
380 _lastHiViewHeight( -1),
381 _currentParentWindowHeight( 0),
382 _currentParentWindowWidth( 0),
383 _currentParentWindowBounds( ),
384 _windowHasResized( false),
385 _lastParentWindowBounds( ),
386 _currentHIViewBounds( ),
387 _lastHIViewBounds( ),
388 _windowRect( ),
389 _aglChannels( ),
390 _zOrderToChannel( ),
391 _hiviewEventHandlerRef( NULL),
392 _windowEventHandlerRef( NULL),
393 _currentViewBounds( ),
394 _lastViewBounds( ),
395 _renderingIsPaused( false),
396 _threadID( )
397
398 {
399 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s");
400
401 _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
402 _screenUpdateEvent = EventWrapper::Create();
403
404 if(!IsValidWindowPtr(_windowRef))
405 {
406 //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef:0x%x", __FUNCTION__, __LINE__, _windowRef);
407 }
408 else
409 {
410 //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d WindowRef 0x%x is valid", __FUNCTION__, __LINE__, _windowRef);
411 }
412
413 GetWindowRect(_windowRect);
414
415 _lastViewBounds.origin.x = 0;
416 _lastViewBounds.origin.y = 0;
417 _lastViewBounds.size.width = 0;
418 _lastViewBounds.size.height = 0;
419
420 }
421
422 // this is a static function. It has been registered (in class constructor) to be called on various window redrawing or resizing.
423 // Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
424 #pragma mark WindowRef Event Handler
sHandleWindowResized(EventHandlerCallRef,EventRef theEvent,void * userData)425 pascal OSStatus VideoRenderAGL::sHandleWindowResized (EventHandlerCallRef /*nextHandler*/,
426 EventRef theEvent,
427 void* userData)
428 {
429 WindowRef windowRef = NULL;
430
431 int eventType = GetEventKind(theEvent);
432
433 // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
434 GetEventParameter (theEvent,
435 kEventParamDirectObject,
436 typeWindowRef,
437 NULL,
438 sizeof (WindowRef),
439 NULL,
440 &windowRef);
441
442 VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
443
444 bool updateUI = true;
445 if(kEventWindowBoundsChanged == eventType)
446 {
447 }
448 else if(kEventWindowBoundsChanging == eventType)
449 {
450 }
451 else if(kEventWindowZoomed == eventType)
452 {
453 }
454 else if(kEventWindowExpanding == eventType)
455 {
456 }
457 else if(kEventWindowExpanded == eventType)
458 {
459 }
460 else if(kEventWindowClickResizeRgn == eventType)
461 {
462 }
463 else if(kEventWindowClickDragRgn == eventType)
464 {
465 }
466 else
467 {
468 updateUI = false;
469 }
470
471 if(true == updateUI)
472 {
473 obj->ParentWindowResized(windowRef);
474 obj->UpdateClipping();
475 obj->RenderOffScreenBuffers();
476 }
477
478 return noErr;
479 }
480
481 #pragma mark VideoRenderAGL HIViewRef constructor
482
VideoRenderAGL(HIViewRef windowRef,bool fullscreen,int iId)483 VideoRenderAGL::VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId) :
484 _hiviewRef( windowRef),
485 _windowRef( 0),
486 _fullScreen( fullscreen),
487 _id( iId),
488 _renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
489 _screenUpdateThread( 0),
490 _screenUpdateEvent( 0),
491 _isHIViewRef( false),
492 _aglContext( 0),
493 _windowWidth( 0),
494 _windowHeight( 0),
495 _lastWindowWidth( -1),
496 _lastWindowHeight( -1),
497 _lastHiViewWidth( -1),
498 _lastHiViewHeight( -1),
499 _currentParentWindowHeight( 0),
500 _currentParentWindowWidth( 0),
501 _currentParentWindowBounds( ),
502 _windowHasResized( false),
503 _lastParentWindowBounds( ),
504 _currentHIViewBounds( ),
505 _lastHIViewBounds( ),
506 _windowRect( ),
507 _aglChannels( ),
508 _zOrderToChannel( ),
509 _hiviewEventHandlerRef( NULL),
510 _windowEventHandlerRef( NULL),
511 _currentViewBounds( ),
512 _lastViewBounds( ),
513 _renderingIsPaused( false),
514 _threadID( )
515 {
516 //WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__);
517 // _renderCritSec = CriticalSectionWrapper::CreateCriticalSection();
518
519 _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
520 _screenUpdateEvent = EventWrapper::Create();
521
522 GetWindowRect(_windowRect);
523
524 _lastViewBounds.origin.x = 0;
525 _lastViewBounds.origin.y = 0;
526 _lastViewBounds.size.width = 0;
527 _lastViewBounds.size.height = 0;
528
529 #ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
530 // This gets the parent window of the HIViewRef that's passed in and installs a WindowRef event handler on it
531 // The event handler looks for window resize events and adjusts the offset of the controls.
532
533 //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef's parent window", __FUNCTION__, __LINE__);
534
535
536 static const EventTypeSpec windowEventTypes[] =
537 {
538 kEventClassWindow, kEventWindowBoundsChanged,
539 kEventClassWindow, kEventWindowBoundsChanging,
540 kEventClassWindow, kEventWindowZoomed,
541 kEventClassWindow, kEventWindowExpanded,
542 kEventClassWindow, kEventWindowClickResizeRgn,
543 kEventClassWindow, kEventWindowClickDragRgn
544 };
545
546 WindowRef parentWindow = HIViewGetWindow(windowRef);
547
548 InstallWindowEventHandler (parentWindow,
549 NewEventHandlerUPP (sHandleWindowResized),
550 GetEventTypeCount(windowEventTypes),
551 windowEventTypes,
552 (void *) this, // this is an arbitrary parameter that will be passed on to your event handler when it is called later
553 &_windowEventHandlerRef);
554
555 #endif
556
557 #ifdef NEW_HIVIEW_EVENT_HANDLER
558 //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef", __FUNCTION__, __LINE__);
559
560 static const EventTypeSpec hiviewEventTypes[] =
561 {
562 kEventClassControl, kEventControlBoundsChanged,
563 kEventClassControl, kEventControlDraw
564 // kEventControlDragLeave
565 // kEventControlDragReceive
566 // kEventControlGetFocusPart
567 // kEventControlApplyBackground
568 // kEventControlDraw
569 // kEventControlHit
570
571 };
572
573 HIViewInstallEventHandler(_hiviewRef,
574 NewEventHandlerUPP(sHandleHiViewResized),
575 GetEventTypeCount(hiviewEventTypes),
576 hiviewEventTypes,
577 (void *) this,
578 &_hiviewEventHandlerRef);
579
580 #endif
581 }
582
583 // this is a static function. It has been registered (in constructor) to be called on various window redrawing or resizing.
584 // Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
585 #pragma mark HIViewRef Event Handler
sHandleHiViewResized(EventHandlerCallRef nextHandler,EventRef theEvent,void * userData)586 pascal OSStatus VideoRenderAGL::sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
587 {
588 //static int callbackCounter = 1;
589 HIViewRef hiviewRef = NULL;
590
591 // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
592 int eventType = GetEventKind(theEvent);
593 OSStatus status = noErr;
594 status = GetEventParameter (theEvent,
595 kEventParamDirectObject,
596 typeControlRef,
597 NULL,
598 sizeof (ControlRef),
599 NULL,
600 &hiviewRef);
601
602 VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
603 WindowRef parentWindow = HIViewGetWindow(hiviewRef);
604 bool updateUI = true;
605
606 if(kEventControlBoundsChanged == eventType)
607 {
608 }
609 else if(kEventControlDraw == eventType)
610 {
611 }
612 else
613 {
614 updateUI = false;
615 }
616
617 if(true == updateUI)
618 {
619 obj->ParentWindowResized(parentWindow);
620 obj->UpdateClipping();
621 obj->RenderOffScreenBuffers();
622 }
623
624 return status;
625 }
626
~VideoRenderAGL()627 VideoRenderAGL::~VideoRenderAGL()
628 {
629
630 //WEBRTC_TRACE(kTraceDebug, "%s:%d Destructor", __FUNCTION__, __LINE__);
631
632
633 #ifdef USE_EVENT_HANDLERS
634 // remove event handlers
635 OSStatus status;
636 if(_isHIViewRef)
637 {
638 status = RemoveEventHandler(_hiviewEventHandlerRef);
639 }
640 else
641 {
642 status = RemoveEventHandler(_windowEventHandlerRef);
643 }
644 if(noErr != status)
645 {
646 if(_isHIViewRef)
647 {
648
649 //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
650 }
651 else
652 {
653 //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
654 }
655 }
656
657 #endif
658
659 OSStatus status;
660 #ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
661 if(_windowEventHandlerRef)
662 {
663 status = RemoveEventHandler(_windowEventHandlerRef);
664 if(status != noErr)
665 {
666 //WEBRTC_TRACE(kTraceDebug, "%s:%d failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
667 }
668 }
669 #endif
670
671 #ifdef NEW_HIVIEW_EVENT_HANDLER
672 if(_hiviewEventHandlerRef)
673 {
674 status = RemoveEventHandler(_hiviewEventHandlerRef);
675 if(status != noErr)
676 {
677 //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
678 }
679 }
680 #endif
681
682 // Signal event to exit thread, then delete it
683 ThreadWrapper* tmpPtr = _screenUpdateThread;
684 _screenUpdateThread = NULL;
685
686 if (tmpPtr)
687 {
688 tmpPtr->SetNotAlive();
689 _screenUpdateEvent->Set();
690 _screenUpdateEvent->StopTimer();
691
692 if (tmpPtr->Stop())
693 {
694 delete tmpPtr;
695 }
696 delete _screenUpdateEvent;
697 _screenUpdateEvent = NULL;
698 }
699
700 if (_aglContext != 0)
701 {
702 aglSetCurrentContext(_aglContext);
703 aglDestroyContext(_aglContext);
704 _aglContext = 0;
705 }
706
707 // Delete all channels
708 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
709 while (it!= _aglChannels.end())
710 {
711 delete it->second;
712 _aglChannels.erase(it);
713 it = _aglChannels.begin();
714 }
715 _aglChannels.clear();
716
717 // Clean the zOrder map
718 std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
719 while(zIt != _zOrderToChannel.end())
720 {
721 _zOrderToChannel.erase(zIt);
722 zIt = _zOrderToChannel.begin();
723 }
724 _zOrderToChannel.clear();
725
726 //delete _renderCritSec;
727
728
729 }
730
GetOpenGLVersion(int & aglMajor,int & aglMinor)731 int VideoRenderAGL::GetOpenGLVersion(int& aglMajor, int& aglMinor)
732 {
733 aglGetVersion((GLint *) &aglMajor, (GLint *) &aglMinor);
734 return 0;
735 }
736
Init()737 int VideoRenderAGL::Init()
738 {
739 LockAGLCntx();
740
741 // Start rendering thread...
742 if (!_screenUpdateThread)
743 {
744 UnlockAGLCntx();
745 //WEBRTC_TRACE(kTraceError, "%s:%d Thread not created", __FUNCTION__, __LINE__);
746 return -1;
747 }
748 unsigned int threadId;
749 _screenUpdateThread->Start(threadId);
750
751 // Start the event triggering the render process
752 unsigned int monitorFreq = 60;
753 _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
754
755 // Create mixing textures
756 if (CreateMixingContext() == -1)
757 {
758 //WEBRTC_TRACE(kTraceError, "%s:%d Could not create a mixing context", __FUNCTION__, __LINE__);
759 UnlockAGLCntx();
760 return -1;
761 }
762
763 UnlockAGLCntx();
764 return 0;
765 }
766
CreateAGLChannel(int channel,int zOrder,float startWidth,float startHeight,float stopWidth,float stopHeight)767 VideoChannelAGL* VideoRenderAGL::CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
768 {
769
770 LockAGLCntx();
771
772 //WEBRTC_TRACE(kTraceInfo, "%s:%d Creating AGL channel: %d", __FUNCTION__, __LINE__, channel);
773
774 if (HasChannel(channel))
775 {
776 //WEBRTC_TRACE(kTraceError, "%s:%d Channel already exists", __FUNCTION__, __LINE__);
777 UnlockAGLCntx();k
778 return NULL;
779 }
780
781 if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
782 {
783 // There are already one channel using this zOrder
784 // TODO: Allow multiple channels with same zOrder
785 }
786
787 VideoChannelAGL* newAGLChannel = new VideoChannelAGL(_aglContext, _id, this);
788
789 if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
790 {
791 if (newAGLChannel)
792 {
793 delete newAGLChannel;
794 newAGLChannel = NULL;
795 }
796 //WEBRTC_LOG(kTraceError, "Could not create AGL channel");
797 //WEBRTC_TRACE(kTraceError, "%s:%d Could not create AGL channel", __FUNCTION__, __LINE__);
798 UnlockAGLCntx();
799 return NULL;
800 }
801 k
802 _aglChannels[channel] = newAGLChannel;
803 _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
804
805 UnlockAGLCntx();
806 return newAGLChannel;
807 }
808
DeleteAllAGLChannels()809 int VideoRenderAGL::DeleteAllAGLChannels()
810 {
811 CriticalSectionScoped cs(&_renderCritSec);
812
813 //WEBRTC_TRACE(kTraceInfo, "%s:%d Deleting all AGL channels", __FUNCTION__, __LINE__);
814 //int i = 0 ;
815 std::map<int, VideoChannelAGL*>::iterator it;
816 it = _aglChannels.begin();
817
818 while (it != _aglChannels.end())
819 {
820 VideoChannelAGL* channel = it->second;
821 if (channel)
822 delete channel;
823
824 _aglChannels.erase(it);
825 it = _aglChannels.begin();
826 }
827 _aglChannels.clear();
828 return 0;
829 }
830
DeleteAGLChannel(int channel)831 int VideoRenderAGL::DeleteAGLChannel(int channel)
832 {
833 CriticalSectionScoped cs(&_renderCritSec);
834 //WEBRTC_TRACE(kTraceDebug, "%s:%d Deleting AGL channel %d", __FUNCTION__, __LINE__, channel);
835
836 std::map<int, VideoChannelAGL*>::iterator it;
837 it = _aglChannels.find(channel);
838 if (it != _aglChannels.end())
839 {
840 delete it->second;
841 _aglChannels.erase(it);
842 }
843 else
844 {
845 //WEBRTC_TRACE(kTraceWarning, "%s:%d Channel not found", __FUNCTION__, __LINE__);
846 return -1;
847 }
848
849 std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
850 while( zIt != _zOrderToChannel.end())
851 {
852 if (zIt->second == channel)
853 {
854 _zOrderToChannel.erase(zIt);
855 break;
856 }
857 zIt++;// = _zOrderToChannel.begin();
858 }
859
860 return 0;
861 }
862
StopThread()863 int VideoRenderAGL::StopThread()
864 {
865 CriticalSectionScoped cs(&_renderCritSec);
866 ThreadWrapper* tmpPtr = _screenUpdateThread;
867 //_screenUpdateThread = NULL;
868
869 if (tmpPtr)
870 {
871 tmpPtr->SetNotAlive();
872 _screenUpdateEvent->Set();
873 if (tmpPtr->Stop())
874 {
875 delete tmpPtr;
876 }
877 }
878
879 delete _screenUpdateEvent;
880 _screenUpdateEvent = NULL;
881
882 return 0;
883 }
884
IsFullScreen()885 bool VideoRenderAGL::IsFullScreen()
886 {
887 CriticalSectionScoped cs(&_renderCritSec);
888 return _fullScreen;
889 }
890
HasChannels()891 bool VideoRenderAGL::HasChannels()
892 {
893
894 CriticalSectionScoped cs(&_renderCritSec);
895
896 if (_aglChannels.begin() != _aglChannels.end())
897 {
898 return true;
899 }
900
901 return false;
902 }
903
HasChannel(int channel)904 bool VideoRenderAGL::HasChannel(int channel)
905 {
906 CriticalSectionScoped cs(&_renderCritSec);
907
908 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
909 if (it != _aglChannels.end())
910 {
911 return true;
912 }
913
914 return false;
915 }
916
GetChannels(std::list<int> & channelList)917 int VideoRenderAGL::GetChannels(std::list<int>& channelList)
918 {
919
920 CriticalSectionScoped cs(&_renderCritSec);
921 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
922
923 while (it != _aglChannels.end())
924 {
925 channelList.push_back(it->first);
926 it++;
927 }
928
929 return 0;
930 }
931
ConfigureAGLChannel(int channel,int zOrder,float startWidth,float startHeight,float stopWidth,float stopHeight)932 VideoChannelAGL* VideoRenderAGL::ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
933 {
934
935 CriticalSectionScoped cs(&_renderCritSec);
936
937 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
938
939 if (it != _aglChannels.end())
940 {
941 VideoChannelAGL* aglChannel = it->second;
942 if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
943 {
944 return NULL;
945 }
946
947 std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
948 while(it != _zOrderToChannel.end())
949 {
950 if (it->second == channel)
951 {
952 if (it->first != zOrder)
953 {
954 _zOrderToChannel.erase(it);
955 _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
956 }
957 break;
958 }
959 it++;
960 }
961 return aglChannel;
962 }
963
964 return NULL;
965 }
966
ScreenUpdateThreadProc(void * obj)967 bool VideoRenderAGL::ScreenUpdateThreadProc(void* obj)
968 {
969 return static_cast<VideoRenderAGL*>(obj)->ScreenUpdateProcess();
970 }
971
ScreenUpdateProcess()972 bool VideoRenderAGL::ScreenUpdateProcess()
973 {
974 _screenUpdateEvent->Wait(100);
975
976 LockAGLCntx();
977
978 if (!_screenUpdateThread)
979 {
980 UnlockAGLCntx();
981 return false;
982 }
983
984 if (aglSetCurrentContext(_aglContext) == GL_FALSE)
985 {
986 UnlockAGLCntx();
987 return true;
988 }
989
990 if (GetWindowRect(_windowRect) == -1)
991 {
992 UnlockAGLCntx();
993 return true;
994 }
995
996 if (_windowWidth != (_windowRect.right - _windowRect.left)
997 || _windowHeight != (_windowRect.bottom - _windowRect.top))
998 {
999 // We have a new window size, update the context.
1000 if (aglUpdateContext(_aglContext) == GL_FALSE)
1001 {
1002 UnlockAGLCntx();
1003 return true;
1004 }
1005 _windowWidth = _windowRect.right - _windowRect.left;
1006 _windowHeight = _windowRect.bottom - _windowRect.top;
1007 }
1008
1009 // this section will poll to see if the window size has changed
1010 // this is causing problem w/invalid windowRef
1011 // this code has been modified and exists now in the window event handler
1012 #ifndef NEW_HIVIEW_PARENT_EVENT_HANDLER
1013 if (_isHIViewRef)
1014 {
1015
1016 if(FALSE == HIViewIsValid(_hiviewRef))
1017 {
1018
1019 //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalid windowRef", __FUNCTION__, __LINE__);
1020 UnlockAGLCntx();
1021 return true;
1022 }
1023 WindowRef window = HIViewGetWindow(_hiviewRef);
1024
1025 if(FALSE == IsValidWindowPtr(window))
1026 {
1027 //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalide hiviewRef", __FUNCTION__, __LINE__);
1028 UnlockAGLCntx();
1029 return true;
1030 }
1031 if (window == NULL)
1032 {
1033 //WEBRTC_TRACE(kTraceDebug, "%s:%d WindowRef = NULL", __FUNCTION__, __LINE__);
1034 UnlockAGLCntx();
1035 return true;
1036 }
1037
1038 if(FALSE == MacIsWindowVisible(window))
1039 {
1040 //WEBRTC_TRACE(kTraceDebug, "%s:%d MacIsWindowVisible == FALSE. Returning early", __FUNCTION__, __LINE__);
1041 UnlockAGLCntx();
1042 return true;
1043 }
1044
1045 HIRect viewBounds; // Placement and size for HIView
1046 int windowWidth = 0; // Parent window width
1047 int windowHeight = 0; // Parent window height
1048
1049 // NOTE: Calling GetWindowBounds with kWindowStructureRgn will crash intermittentaly if the OS decides it needs to push it into the back for a moment.
1050 // To counter this, we get the titlebar height on class construction and then add it to the content region here. Content regions seems not to crash
1051 Rect contentBounds =
1052 { 0, 0, 0, 0}; // The bounds for the parent window
1053
1054 #if defined(USE_CONTENT_RGN)
1055 GetWindowBounds(window, kWindowContentRgn, &contentBounds);
1056 #elif defined(USE_STRUCT_RGN)
1057 GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
1058 #endif
1059
1060 Rect globalBounds =
1061 { 0, 0, 0, 0}; // The bounds for the parent window
1062 globalBounds.top = contentBounds.top;
1063 globalBounds.right = contentBounds.right;
1064 globalBounds.bottom = contentBounds.bottom;
1065 globalBounds.left = contentBounds.left;
1066
1067 windowHeight = globalBounds.bottom - globalBounds.top;
1068 windowWidth = globalBounds.right - globalBounds.left;
1069
1070 // Get the size of the HIViewRef
1071 HIViewGetBounds(_hiviewRef, &viewBounds);
1072 HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
1073
1074 // Check if this is the first call..
1075 if (_lastWindowHeight == -1 &&
1076 _lastWindowWidth == -1)
1077 {
1078 _lastWindowWidth = windowWidth;
1079 _lastWindowHeight = windowHeight;
1080
1081 _lastViewBounds.origin.x = viewBounds.origin.x;
1082 _lastViewBounds.origin.y = viewBounds.origin.y;
1083 _lastViewBounds.size.width = viewBounds.size.width;
1084 _lastViewBounds.size.height = viewBounds.size.height;
1085 }
1086 sfasdfasdf
1087
1088 bool resized = false;
1089
1090 // Check if parent window size has changed
1091 if (windowHeight != _lastWindowHeight ||
1092 windowWidth != _lastWindowWidth)
1093 {
1094 resized = true;
1095 }
1096
1097 // Check if the HIView has new size or is moved in the parent window
1098 if (_lastViewBounds.origin.x != viewBounds.origin.x ||
1099 _lastViewBounds.origin.y != viewBounds.origin.y ||
1100 _lastViewBounds.size.width != viewBounds.size.width ||
1101 _lastViewBounds.size.height != viewBounds.size.height)
1102 {
1103 // The HiView is resized or has moved.
1104 resized = true;
1105 }
1106
1107 if (resized)
1108 {
1109
1110 //WEBRTC_TRACE(kTraceDebug, "%s:%d Window has resized", __FUNCTION__, __LINE__);
1111
1112 // Calculate offset between the windows
1113 // {x, y, widht, height}, x,y = lower left corner
1114 const GLint offs[4] =
1115 { (int)(0.5f + viewBounds.origin.x),
1116 (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
1117 viewBounds.size.width, viewBounds.size.height};
1118
1119 //WEBRTC_TRACE(kTraceDebug, "%s:%d contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, __LINE__,
1120 contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
1121 //WEBRTC_TRACE(kTraceDebug, "%s:%d windowHeight=%d", __FUNCTION__, __LINE__, windowHeight);
1122 //WEBRTC_TRACE(kTraceDebug, "%s:%d offs[4] = %d, %d, %d, %d", __FUNCTION__, __LINE__, offs[0], offs[1], offs[2], offs[3]);
1123
1124 aglSetDrawable (_aglContext, GetWindowPort(window));
1125 aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
1126 aglEnable(_aglContext, AGL_BUFFER_RECT);
1127
1128 // We need to change the viewport too if the HIView size has changed
1129 glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
1130
1131 }
1132 _lastWindowWidth = windowWidth;
1133 _lastWindowHeight = windowHeight;
1134
1135 _lastViewBounds.origin.x = viewBounds.origin.x;
1136 _lastViewBounds.origin.y = viewBounds.origin.y;
1137 _lastViewBounds.size.width = viewBounds.size.width;
1138 _lastViewBounds.size.height = viewBounds.size.height;
1139
1140 }
1141 #endif
1142 if (_fullScreen)
1143 {
1144 // TODO
1145 // We use double buffers, must always update
1146 //RenderOffScreenBuffersToBackBuffer();
1147 }
1148 else
1149 {
1150 // Check if there are any updated buffers
1151 bool updated = false;
1152
1153 // TODO: check if window size is updated!
1154 // TODO Improvement: Walk through the zOrder Map to only render the ones in need of update
1155 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
1156 while (it != _aglChannels.end())
1157 {
1158
1159 VideoChannelAGL* aglChannel = it->second;
1160 aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
1161 aglChannel->IsUpdated(updated);
1162 if (updated)
1163 {
1164 break;
1165 }
1166 it++;
1167 }
1168
1169 if (updated)
1170 {
1171 // At least on buffers is updated, we need to repaint the texture
1172 if (RenderOffScreenBuffers() != -1)
1173 {
1174 // MF
1175 //SwapAndDisplayBuffers();
1176 }
1177 else
1178 {
1179 // Error updating the mixing texture, don't swap.
1180 }
1181 }
1182 }
1183
1184 UnlockAGLCntx();
1185
1186 //WEBRTC_LOG(kTraceDebug, "Leaving ScreenUpdateProcess()");
1187 return true;
1188 }
1189
ParentWindowResized(WindowRef window)1190 void VideoRenderAGL::ParentWindowResized(WindowRef window)
1191 {
1192 //WEBRTC_LOG(kTraceDebug, "%s HIViewRef:%d owner window has resized", __FUNCTION__, (int)_hiviewRef);
1193
1194 LockAGLCntx();
1195 k
1196 // set flag
1197 _windowHasResized = false;
1198
1199 if(FALSE == HIViewIsValid(_hiviewRef))
1200 {
1201 //WEBRTC_LOG(kTraceDebug, "invalid windowRef");
1202 UnlockAGLCntx();
1203 return;
1204 }
1205
1206 if(FALSE == IsValidWindowPtr(window))
1207 {
1208 //WEBRTC_LOG(kTraceError, "invalid windowRef");
1209 UnlockAGLCntx();
1210 return;
1211 }
1212
1213 if (window == NULL)
1214 {
1215 //WEBRTC_LOG(kTraceError, "windowRef = NULL");
1216 UnlockAGLCntx();
1217 return;
1218 }
1219
1220 if(FALSE == MacIsWindowVisible(window))
1221 {
1222 //WEBRTC_LOG(kTraceDebug, "MacIsWindowVisible = FALSE. Returning early.");
1223 UnlockAGLCntx();
1224 return;
1225 }
1226
1227 Rect contentBounds =
1228 { 0, 0, 0, 0};
1229
1230 #if defined(USE_CONTENT_RGN)
1231 GetWindowBounds(window, kWindowContentRgn, &contentBounds);
1232 #elif defined(USE_STRUCT_RGN)
1233 GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
1234 #endif
1235
1236 //WEBRTC_LOG(kTraceDebug, "%s contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
1237
1238 // update global vars
1239 _currentParentWindowBounds.top = contentBounds.top;
1240 _currentParentWindowBounds.left = contentBounds.left;
1241 _currentParentWindowBounds.bottom = contentBounds.bottom;
1242 _currentParentWindowBounds.right = contentBounds.right;
1243
1244 _currentParentWindowWidth = _currentParentWindowBounds.right - _currentParentWindowBounds.left;
1245 _currentParentWindowHeight = _currentParentWindowBounds.bottom - _currentParentWindowBounds.top;
1246
1247 _windowHasResized = true;
1248
1249 // ********* update AGL offsets
1250 HIRect viewBounds;
1251 HIViewGetBounds(_hiviewRef, &viewBounds);
1252 HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
1253
1254 const GLint offs[4] =
1255 { (int)(0.5f + viewBounds.origin.x),
1256 (int)(0.5f + _currentParentWindowHeight - (viewBounds.origin.y + viewBounds.size.height)),
1257 viewBounds.size.width, viewBounds.size.height};
1258 //WEBRTC_LOG(kTraceDebug, "%s _currentParentWindowHeight=%d", __FUNCTION__, _currentParentWindowHeight);
1259 //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
1260
1261 aglSetCurrentContext(_aglContext);
1262 aglSetDrawable (_aglContext, GetWindowPort(window));
1263 aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
1264 aglEnable(_aglContext, AGL_BUFFER_RECT);
1265
1266 // We need to change the viewport too if the HIView size has changed
1267 glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
1268
1269 UnlockAGLCntx();
1270
1271 return;
1272 }
1273
CreateMixingContext()1274 int VideoRenderAGL::CreateMixingContext()
1275 {
1276
1277 LockAGLCntx();
1278
1279 //WEBRTC_LOG(kTraceDebug, "Entering CreateMixingContext()");
1280
1281 // Use both AGL_ACCELERATED and AGL_NO_RECOVERY to make sure
1282 // a hardware renderer is used and not a software renderer.
1283
1284 GLint attributes[] =
1285 {
1286 AGL_DOUBLEBUFFER,
1287 AGL_WINDOW,
1288 AGL_RGBA,
1289 AGL_NO_RECOVERY,
1290 AGL_ACCELERATED,
1291 AGL_RED_SIZE, 8,
1292 AGL_GREEN_SIZE, 8,
1293 AGL_BLUE_SIZE, 8,
1294 AGL_ALPHA_SIZE, 8,
1295 AGL_DEPTH_SIZE, 24,
1296 AGL_NONE,
1297 };
1298
1299 AGLPixelFormat aglPixelFormat;
1300
1301 // ***** Set up the OpenGL Context *****
1302
1303 // Get a pixel format for the attributes above
1304 aglPixelFormat = aglChoosePixelFormat(NULL, 0, attributes);
1305 if (NULL == aglPixelFormat)
1306 {
1307 //WEBRTC_LOG(kTraceError, "Could not create pixel format");
1308 UnlockAGLCntx();
1309 return -1;
1310 }
1311
1312 // Create an AGL context
1313 _aglContext = aglCreateContext(aglPixelFormat, NULL);
1314 if (_aglContext == NULL)
1315 {
1316 //WEBRTC_LOG(kTraceError, "Could no create AGL context");
1317 UnlockAGLCntx();
1318 return -1;
1319 }
1320
1321 // Release the pixel format memory
1322 aglDestroyPixelFormat(aglPixelFormat);
1323
1324 // Set the current AGL context for the rest of the settings
1325 if (aglSetCurrentContext(_aglContext) == false)
1326 {
1327 //WEBRTC_LOG(kTraceError, "Could not set current context: %d", aglGetError());
1328 UnlockAGLCntx();
1329 return -1;
1330 }
1331
1332 if (_isHIViewRef)
1333 {
1334 //---------------------------
1335 // BEGIN: new test code
1336 #if 0
1337 // Don't use this one!
1338 // There seems to be an OS X bug that can't handle
1339 // movements and resizing of the parent window
1340 // and or the HIView
1341 if (aglSetHIViewRef(_aglContext,_hiviewRef) == false)
1342 {
1343 //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
1344 UnlockAGLCntx();
1345 return -1;
1346 }
1347 #else
1348
1349 // Get the parent window for this control
1350 WindowRef window = GetControlOwner(_hiviewRef);
1351
1352 Rect globalBounds =
1353 { 0,0,0,0}; // The bounds for the parent window
1354 HIRect viewBounds; // Placemnt in the parent window and size.
1355 int windowHeight = 0;
1356
1357 // Rect titleBounds = {0,0,0,0};
1358 // GetWindowBounds(window, kWindowTitleBarRgn, &titleBounds);
1359 // _titleBarHeight = titleBounds.top - titleBounds.bottom;
1360 // if(0 == _titleBarHeight)
1361 // {
1362 // //WEBRTC_LOG(kTraceError, "Titlebar height = 0");
1363 // //return -1;
1364 // }
1365
1366
1367 // Get the bounds for the parent window
1368 #if defined(USE_CONTENT_RGN)
1369 GetWindowBounds(window, kWindowContentRgn, &globalBounds);
1370 #elif defined(USE_STRUCT_RGN)
1371 GetWindowBounds(window, kWindowStructureRgn, &globalBounds);
1372 #endif
1373 windowHeight = globalBounds.bottom - globalBounds.top;
1374
1375 // Get the bounds for the HIView
1376 HIViewGetBounds(_hiviewRef, &viewBounds);
1377
1378 HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
1379
1380 const GLint offs[4] =
1381 { (int)(0.5f + viewBounds.origin.x),
1382 (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
1383 viewBounds.size.width, viewBounds.size.height};
1384
1385 //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
1386
1387
1388 aglSetDrawable (_aglContext, GetWindowPort(window));
1389 aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
1390 aglEnable(_aglContext, AGL_BUFFER_RECT);
1391
1392 GLint surfaceOrder = 1; // 1: above window, -1 below.
1393 //OSStatus status = aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
1394 aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
1395
1396 glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
1397 #endif
1398
1399 }
1400 else
1401 {
1402 if(GL_FALSE == aglSetDrawable (_aglContext, GetWindowPort(_windowRef)))
1403 {
1404 //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
1405 UnlockAGLCntx();
1406 return -1;
1407 }
1408 }
1409
1410 _windowWidth = _windowRect.right - _windowRect.left;
1411 _windowHeight = _windowRect.bottom - _windowRect.top;
1412
1413 // opaque surface
1414 int surfaceOpacity = 1;
1415 if (aglSetInteger(_aglContext, AGL_SURFACE_OPACITY, (const GLint *) &surfaceOpacity) == false)
1416 {
1417 //WEBRTC_LOG(kTraceError, "Could not set surface opacity: %d", aglGetError());
1418 UnlockAGLCntx();
1419 return -1;
1420 }
1421
1422 // 1 -> sync to screen rat, slow...
1423 //int swapInterval = 0; // 0 don't sync with vertical trace
1424 int swapInterval = 0; // 1 sync with vertical trace
1425 if (aglSetInteger(_aglContext, AGL_SWAP_INTERVAL, (const GLint *) &swapInterval) == false)
1426 {
1427 //WEBRTC_LOG(kTraceError, "Could not set swap interval: %d", aglGetError());
1428 UnlockAGLCntx();
1429 return -1;
1430 }
1431
1432 // Update the rect with the current size
1433 if (GetWindowRect(_windowRect) == -1)
1434 {
1435 //WEBRTC_LOG(kTraceError, "Could not get window size");
1436 UnlockAGLCntx();
1437 return -1;
1438 }
1439
1440 // Disable not needed functionality to increase performance
1441 glDisable(GL_DITHER);
1442 glDisable(GL_ALPHA_TEST);
1443 glDisable(GL_STENCIL_TEST);
1444 glDisable(GL_FOG);
1445 glDisable(GL_TEXTURE_2D);
1446 glPixelZoom(1.0, 1.0);
1447
1448 glDisable(GL_BLEND);
1449 glDisable(GL_DEPTH_TEST);
1450 glDepthMask(GL_FALSE);
1451 glDisable(GL_CULL_FACE);
1452
1453 glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
1454 glClear(GL_COLOR_BUFFER_BIT);
1455
1456 GLenum glErr = glGetError();
1457
1458 if (glErr)
1459 {
1460 }
1461
1462 UpdateClipping();
1463
1464 //WEBRTC_LOG(kTraceDebug, "Leaving CreateMixingContext()");
1465
1466 UnlockAGLCntx();
1467 return 0;
1468 }
1469
RenderOffScreenBuffers()1470 int VideoRenderAGL::RenderOffScreenBuffers()
1471 {
1472 LockAGLCntx();
1473
1474 // Get the current window size, it might have changed since last render.
1475 if (GetWindowRect(_windowRect) == -1)
1476 {
1477 //WEBRTC_LOG(kTraceError, "Could not get window rect");
1478 UnlockAGLCntx();
1479 return -1;
1480 }
1481
1482 if (aglSetCurrentContext(_aglContext) == false)
1483 {
1484 //WEBRTC_LOG(kTraceError, "Could not set current context for rendering");
1485 UnlockAGLCntx();
1486 return -1;
1487 }
1488
1489 // HERE - onl if updated!
1490 glClear(GL_COLOR_BUFFER_BIT);
1491
1492 // Loop through all channels starting highest zOrder ending with lowest.
1493 for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
1494 rIt != _zOrderToChannel.rend();
1495 rIt++)
1496 {
1497 int channelId = rIt->second;
1498 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channelId);
1499
1500 VideoChannelAGL* aglChannel = it->second;
1501
1502 aglChannel->RenderOffScreenBuffer();
1503 }
1504
1505 SwapAndDisplayBuffers();
1506
1507 UnlockAGLCntx();
1508 return 0;
1509 }
1510
SwapAndDisplayBuffers()1511 int VideoRenderAGL::SwapAndDisplayBuffers()
1512 {
1513
1514 LockAGLCntx();
1515 if (_fullScreen)
1516 {
1517 // TODO:
1518 // Swap front and back buffers, rendering taking care of in the same call
1519 //aglSwapBuffers(_aglContext);
1520 // Update buffer index to the idx for the next rendering!
1521 //_textureIdx = (_textureIdx + 1) & 1;
1522 }
1523 else
1524 {
1525 // Single buffer rendering, only update context.
1526 glFlush();
1527 aglSwapBuffers(_aglContext);
1528 HIViewSetNeedsDisplay(_hiviewRef, true);
1529 }
1530
1531 UnlockAGLCntx();
1532 return 0;
1533 }
1534
GetWindowRect(Rect & rect)1535 int VideoRenderAGL::GetWindowRect(Rect& rect)
1536 {
1537
1538 LockAGLCntx();
1539
1540 if (_isHIViewRef)
1541 {
1542 if (_hiviewRef)
1543 {
1544 HIRect HIViewRect1;
1545 if(FALSE == HIViewIsValid(_hiviewRef))
1546 {
1547 rect.top = 0;
1548 rect.left = 0;
1549 rect.right = 0;
1550 rect.bottom = 0;
1551 //WEBRTC_LOG(kTraceError,"GetWindowRect() HIViewIsValid() returned false");
1552 UnlockAGLCntx();
1553 }
1554 HIViewGetBounds(_hiviewRef,&HIViewRect1);
1555 HIRectConvert(&HIViewRect1, 1, NULL, 2, NULL);
1556 if(HIViewRect1.origin.x < 0)
1557 {
1558 rect.top = 0;
1559 //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.top = 0");
1560 }
1561 else
1562 {
1563 rect.top = HIViewRect1.origin.x;
1564 }
1565
1566 if(HIViewRect1.origin.y < 0)
1567 {
1568 rect.left = 0;
1569 //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.left = 0");
1570 }
1571 else
1572 {
1573 rect.left = HIViewRect1.origin.y;
1574 }
1575
1576 if(HIViewRect1.size.width < 0)
1577 {
1578 rect.right = 0;
1579 //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.right = 0");
1580 }
1581 else
1582 {
1583 rect.right = HIViewRect1.size.width;
1584 }
1585
1586 if(HIViewRect1.size.height < 0)
1587 {
1588 rect.bottom = 0;
1589 //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.bottom = 0");
1590 }
1591 else
1592 {
1593 rect.bottom = HIViewRect1.size.height;
1594 }
1595
1596 ////WEBRTC_LOG(kTraceDebug,"GetWindowRect() HIViewRef: rect.top = %d, rect.left = %d, rect.right = %d, rect.bottom =%d in GetWindowRect", rect.top,rect.left,rect.right,rect.bottom);
1597 UnlockAGLCntx();
1598 }
1599 else
1600 {
1601 //WEBRTC_LOG(kTraceError, "invalid HIViewRef");
1602 UnlockAGLCntx();
1603 }
1604 }
1605 else
1606 {
1607 if (_windowRef)
1608 {
1609 GetWindowBounds(_windowRef, kWindowContentRgn, &rect);
1610 UnlockAGLCntx();
1611 }
1612 else
1613 {
1614 //WEBRTC_LOG(kTraceError, "No WindowRef");
1615 UnlockAGLCntx();
1616 }
1617 }
1618 }
1619
UpdateClipping()1620 int VideoRenderAGL::UpdateClipping()
1621 {
1622 //WEBRTC_LOG(kTraceDebug, "Entering UpdateClipping()");
1623 LockAGLCntx();
1624
1625 if(_isHIViewRef)
1626 {
1627 if(FALSE == HIViewIsValid(_hiviewRef))
1628 {
1629 //WEBRTC_LOG(kTraceError, "UpdateClipping() _isHIViewRef is invalid. Returning -1");
1630 UnlockAGLCntx();
1631 return -1;
1632 }
1633
1634 RgnHandle visibleRgn = NewRgn();
1635 SetEmptyRgn (visibleRgn);
1636
1637 if(-1 == CalculateVisibleRegion((ControlRef)_hiviewRef, visibleRgn, true))
1638 {
1639 }
1640
1641 if(GL_FALSE == aglSetCurrentContext(_aglContext))
1642 {
1643 GLenum glErr = aglGetError();
1644 //WEBRTC_LOG(kTraceError, "aglSetCurrentContext returned FALSE with error code %d at line %d", glErr, __LINE__);
1645 }
1646
1647 if(GL_FALSE == aglEnable(_aglContext, AGL_CLIP_REGION))
1648 {
1649 GLenum glErr = aglGetError();
1650 //WEBRTC_LOG(kTraceError, "aglEnable returned FALSE with error code %d at line %d\n", glErr, __LINE__);
1651 }
1652
1653 if(GL_FALSE == aglSetInteger(_aglContext, AGL_CLIP_REGION, (const GLint*)visibleRgn))
1654 {
1655 GLenum glErr = aglGetError();
1656 //WEBRTC_LOG(kTraceError, "aglSetInteger returned FALSE with error code %d at line %d\n", glErr, __LINE__);
1657 }
1658
1659 DisposeRgn(visibleRgn);
1660 }
1661 else
1662 {
1663 //WEBRTC_LOG(kTraceDebug, "Not using a hiviewref!\n");
1664 }
1665
1666 //WEBRTC_LOG(kTraceDebug, "Leaving UpdateClipping()");
1667 UnlockAGLCntx();
1668 return true;
1669 }
1670
CalculateVisibleRegion(ControlRef control,RgnHandle & visibleRgn,bool clipChildren)1671 int VideoRenderAGL::CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren)
1672 {
1673
1674 // LockAGLCntx();
1675
1676 //WEBRTC_LOG(kTraceDebug, "Entering CalculateVisibleRegion()");
1677 OSStatus osStatus = 0;
1678 OSErr osErr = 0;
1679
1680 RgnHandle tempRgn = NewRgn();
1681 if (IsControlVisible(control))
1682 {
1683 RgnHandle childRgn = NewRgn();
1684 WindowRef window = GetControlOwner(control);
1685 ControlRef rootControl;
1686 GetRootControl(window, &rootControl); // 'wvnc'
1687 ControlRef masterControl;
1688 osStatus = GetSuperControl(rootControl, &masterControl);
1689 // //WEBRTC_LOG(kTraceDebug, "IBM GetSuperControl=%d", osStatus);
1690
1691 if (masterControl != NULL)
1692 {
1693 CheckValidRegion(visibleRgn);
1694 // init visibleRgn with region of 'wvnc'
1695 osStatus = GetControlRegion(rootControl, kControlStructureMetaPart, visibleRgn);
1696 // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
1697 //GetSuperControl(rootControl, &rootControl);
1698 ControlRef tempControl = control, lastControl = 0;
1699 while (tempControl != masterControl) // current control != master
1700
1701 {
1702 CheckValidRegion(tempRgn);
1703
1704 // //WEBRTC_LOG(kTraceDebug, "IBM tempControl=%d masterControl=%d", tempControl, masterControl);
1705 ControlRef subControl;
1706
1707 osStatus = GetControlRegion(tempControl, kControlStructureMetaPart, tempRgn); // intersect the region of the current control with visibleRgn
1708 // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
1709 CheckValidRegion(tempRgn);
1710
1711 osErr = HIViewConvertRegion(tempRgn, tempControl, rootControl);
1712 // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d : %d", osErr, __LINE__);
1713 CheckValidRegion(tempRgn);
1714
1715 SectRgn(tempRgn, visibleRgn, visibleRgn);
1716 CheckValidRegion(tempRgn);
1717 CheckValidRegion(visibleRgn);
1718 if (EmptyRgn(visibleRgn)) // if the region is empty, bail
1719 break;
1720
1721 if (clipChildren || tempControl != control) // clip children if true, cut out the tempControl if it's not one passed to this function
1722
1723 {
1724 UInt16 numChildren;
1725 osStatus = CountSubControls(tempControl, &numChildren); // count the subcontrols
1726 // //WEBRTC_LOG(kTraceDebug, "IBM CountSubControls=%d : %d", osStatus, __LINE__);
1727
1728 // //WEBRTC_LOG(kTraceDebug, "IBM numChildren=%d", numChildren);
1729 for (int i = 0; i < numChildren; i++)
1730 {
1731 osErr = GetIndexedSubControl(tempControl, numChildren - i, &subControl); // retrieve the subcontrol in order by zorder
1732 // //WEBRTC_LOG(kTraceDebug, "IBM GetIndexedSubControls=%d : %d", osErr, __LINE__);
1733 if ( subControl == lastControl ) // break because of zorder
1734
1735 {
1736 // //WEBRTC_LOG(kTraceDebug, "IBM breaking because of zorder %d", __LINE__);
1737 break;
1738 }
1739
1740 if (!IsControlVisible(subControl)) // dont' clip invisible controls
1741
1742 {
1743 // //WEBRTC_LOG(kTraceDebug, "IBM continue. Control is not visible %d", __LINE__);
1744 continue;
1745 }
1746
1747 if(!subControl) continue;
1748
1749 osStatus = GetControlRegion(subControl, kControlStructureMetaPart, tempRgn); //get the region of the current control and union to childrg
1750 // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d %d", osStatus, __LINE__);
1751 CheckValidRegion(tempRgn);
1752 if(osStatus != 0)
1753 {
1754 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
1755 continue;
1756 }
1757 if(!tempRgn)
1758 {
1759 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !tempRgn %d", osStatus, __LINE__);
1760 continue;
1761 }
1762
1763 osStatus = HIViewConvertRegion(tempRgn, subControl, rootControl);
1764 CheckValidRegion(tempRgn);
1765 // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d %d", osStatus, __LINE__);
1766 if(osStatus != 0)
1767 {
1768 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
1769 continue;
1770 }
1771 if(!rootControl)
1772 {
1773 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !rootControl %d", osStatus, __LINE__);
1774 continue;
1775 }
1776
1777 UnionRgn(tempRgn, childRgn, childRgn);
1778 CheckValidRegion(tempRgn);
1779 CheckValidRegion(childRgn);
1780 CheckValidRegion(visibleRgn);
1781 if(!childRgn)
1782 {
1783 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !childRgn %d", osStatus, __LINE__);
1784 continue;
1785 }
1786
1787 } // next child control
1788 }
1789 lastControl = tempControl;
1790 GetSuperControl(tempControl, &subControl);
1791 tempControl = subControl;
1792 }
1793
1794 DiffRgn(visibleRgn, childRgn, visibleRgn);
1795 CheckValidRegion(visibleRgn);
1796 CheckValidRegion(childRgn);
1797 DisposeRgn(childRgn);
1798 }
1799 else
1800 {
1801 CopyRgn(tempRgn, visibleRgn);
1802 CheckValidRegion(tempRgn);
1803 CheckValidRegion(visibleRgn);
1804 }
1805 DisposeRgn(tempRgn);
1806 }
1807
1808 //WEBRTC_LOG(kTraceDebug, "Leaving CalculateVisibleRegion()");
1809 //_aglCritPtr->Leave();
1810 return 0;
1811 }
1812
CheckValidRegion(RgnHandle rHandle)1813 bool VideoRenderAGL::CheckValidRegion(RgnHandle rHandle)
1814 {
1815
1816 Handle hndSize = (Handle)rHandle;
1817 long size = GetHandleSize(hndSize);
1818 if(0 == size)
1819 {
1820
1821 OSErr memErr = MemError();
1822 if(noErr != memErr)
1823 {
1824 // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle. MemError() returned %d", memErr);
1825 }
1826 else
1827 {
1828 // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle yet MemError() returned noErr");
1829 }
1830
1831 }
1832 else
1833 {
1834 // //WEBRTC_LOG(kTraceDebug, "IBM handleSize = %d", size);
1835 }
1836
1837 if(false == IsValidRgnHandle(rHandle))
1838 {
1839 // //WEBRTC_LOG(kTraceError, "IBM ERROR Invalid Region found : $%d", rHandle);
1840 assert(false);
1841 }
1842
1843 int err = QDError();
1844 switch(err)
1845 {
1846 case 0:
1847 break;
1848 case -147:
1849 //WEBRTC_LOG(kTraceError, "ERROR region too big");
1850 assert(false);
1851 break;
1852
1853 case -149:
1854 //WEBRTC_LOG(kTraceError, "ERROR not enough stack");
1855 assert(false);
1856 break;
1857
1858 default:
1859 //WEBRTC_LOG(kTraceError, "ERROR Unknown QDError %d", err);
1860 assert(false);
1861 break;
1862 }
1863
1864 return true;
1865 }
1866
ChangeWindow(void * newWindowRef)1867 int VideoRenderAGL::ChangeWindow(void* newWindowRef)
1868 {
1869
1870 LockAGLCntx();
1871
1872 UnlockAGLCntx();
1873 return -1;
1874 }
ChangeUniqueID(int32_t id)1875 int32_t VideoRenderAGL::ChangeUniqueID(int32_t id)
1876 {
1877 LockAGLCntx();
1878
1879 UnlockAGLCntx();
1880 return -1;
1881 }
1882
StartRender()1883 int32_t VideoRenderAGL::StartRender()
1884 {
1885
1886 LockAGLCntx();
1887 const unsigned int MONITOR_FREQ = 60;
1888 if(TRUE == _renderingIsPaused)
1889 {
1890 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Rendering is paused. Restarting now", __FUNCTION__, __LINE__);
1891
1892 // we already have the thread. Most likely StopRender() was called and they were paused
1893 if(FALSE == _screenUpdateThread->Start(_threadID))
1894 {
1895 //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
1896 UnlockAGLCntx();
1897 return -1;
1898 }
1899 if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
1900 {
1901 //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__);
1902 UnlockAGLCntx();
1903 return -1;
1904 }
1905
1906 return 0;
1907 }
1908
1909 _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
1910 _screenUpdateEvent = EventWrapper::Create();
1911
1912 if (!_screenUpdateThread)
1913 {
1914 //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
1915 UnlockAGLCntx();
1916 return -1;
1917 }
1918
1919 _screenUpdateThread->Start(_threadID);
1920 _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ);
1921
1922 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__);
1923
1924 UnlockAGLCntx();
1925 return 0;
1926
1927 }
1928
StopRender()1929 int32_t VideoRenderAGL::StopRender()
1930 {
1931 LockAGLCntx();
1932
1933 if(!_screenUpdateThread || !_screenUpdateEvent)
1934 {
1935 _renderingIsPaused = TRUE;
1936 UnlockAGLCntx();
1937 return 0;
1938 }
1939
1940 if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
1941 {
1942 _renderingIsPaused = FALSE;
1943 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Could not stop either: screenUpdateThread or screenUpdateEvent", __FUNCTION__, __LINE__);
1944 UnlockAGLCntx();
1945 return -1;
1946 }
1947
1948 _renderingIsPaused = TRUE;
1949
1950 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Stopped screenUpdateThread", __FUNCTION__, __LINE__);
1951 UnlockAGLCntx();
1952 return 0;
1953 }
1954
DeleteAGLChannel(const uint32_t streamID)1955 int32_t VideoRenderAGL::DeleteAGLChannel(const uint32_t streamID)
1956 {
1957
1958 LockAGLCntx();
1959
1960 std::map<int, VideoChannelAGL*>::iterator it;
1961 it = _aglChannels.begin();
1962
1963 while (it != _aglChannels.end())
1964 {
1965 VideoChannelAGL* channel = it->second;
1966 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Deleting channel %d", __FUNCTION__, __LINE__, streamID);
1967 delete channel;
1968 it++;
1969 }
1970 _aglChannels.clear();
1971
1972 UnlockAGLCntx();
1973 return 0;
1974 }
1975
GetChannelProperties(const uint16_t streamId,uint32_t & zOrder,float & left,float & top,float & right,float & bottom)1976 int32_t VideoRenderAGL::GetChannelProperties(const uint16_t streamId,
1977 uint32_t& zOrder,
1978 float& left,
1979 float& top,
1980 float& right,
1981 float& bottom)
1982 {
1983
1984 LockAGLCntx();
1985 UnlockAGLCntx();
1986 return -1;
1987
1988 }
1989
LockAGLCntx()1990 void VideoRenderAGL::LockAGLCntx()
1991 {
1992 _renderCritSec.Enter();
1993 }
UnlockAGLCntx()1994 void VideoRenderAGL::UnlockAGLCntx()
1995 {
1996 _renderCritSec.Leave();
1997 }
1998
1999 } // namespace webrtc
2000
2001 #endif // CARBON_RENDERING
2002