1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/engine_configurations.h"
12
13 #if defined(CARBON_RENDERING)
14
15 #include "webrtc/modules/video_render/mac/video_render_agl.h"
16
17 // includes
18 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
19 #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
20 #include "webrtc/system_wrappers/include/event_wrapper.h"
21 #include "webrtc/system_wrappers/include/trace.h"
22
23 namespace webrtc {
24
25 /*
26 *
27 * VideoChannelAGL
28 *
29 */
30
31 #pragma mark VideoChannelAGL constructor
32
VideoChannelAGL(AGLContext & aglContext,int iId,VideoRenderAGL * owner)33 VideoChannelAGL::VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner) :
34 _aglContext( aglContext),
35 _id( iId),
36 _owner( owner),
37 _width( 0),
38 _height( 0),
39 _stretchedWidth( 0),
40 _stretchedHeight( 0),
41 _startWidth( 0.0f),
42 _startHeight( 0.0f),
43 _stopWidth( 0.0f),
44 _stopHeight( 0.0f),
45 _xOldWidth( 0),
46 _yOldHeight( 0),
47 _oldStretchedHeight(0),
48 _oldStretchedWidth( 0),
49 _buffer( 0),
50 _bufferSize( 0),
51 _incomingBufferSize(0),
52 _bufferIsUpdated( false),
53 _sizeInitialized( false),
54 _numberOfStreams( 0),
55 _bVideoSizeStartedChanging(false),
56 _pixelFormat( GL_RGBA),
57 _pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
58 _texture( 0)
59
60 {
61 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Constructor", __FUNCTION__, __LINE__);
62 }
63
~VideoChannelAGL()64 VideoChannelAGL::~VideoChannelAGL()
65 {
66 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Destructor", __FUNCTION__, __LINE__);
67 if (_buffer)
68 {
69 delete [] _buffer;
70 _buffer = NULL;
71 }
72
73 aglSetCurrentContext(_aglContext);
74
75 if (_texture != 0)
76 {
77 glDeleteTextures(1, (const GLuint*) &_texture);
78 _texture = 0;
79 }
80 }
81
RenderFrame(const uint32_t streamId,VideoFrame & videoFrame)82 int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
83 VideoFrame& videoFrame) {
84 _owner->LockAGLCntx();
85 if (_width != videoFrame.width() ||
86 _height != videoFrame.height()) {
87 if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
88 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize
89 Change returned an error", __FUNCTION__, __LINE__);
90 _owner->UnlockAGLCntx();
91 return -1;
92 }
93 }
94
95 _owner->UnlockAGLCntx();
96 return DeliverFrame(videoFrame);
97 }
98
UpdateSize(int,int)99 int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
100 {
101 _owner->LockAGLCntx();
102 _owner->UnlockAGLCntx();
103 return 0;
104 }
105
UpdateStretchSize(int stretchHeight,int stretchWidth)106 int VideoChannelAGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
107 {
108
109 _owner->LockAGLCntx();
110 _stretchedHeight = stretchHeight;
111 _stretchedWidth = stretchWidth;
112 _owner->UnlockAGLCntx();
113 return 0;
114 }
115
FrameSizeChange(int width,int height,int numberOfStreams)116 int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
117 {
118 // We'll get a new frame size from VideoAPI, prepare the buffer
119
120 _owner->LockAGLCntx();
121
122 if (width == _width && _height == height)
123 {
124 // We already have a correct buffer size
125 _numberOfStreams = numberOfStreams;
126 _owner->UnlockAGLCntx();
127 return 0;
128 }
129
130 _width = width;
131 _height = height;
132
133 // Delete the old buffer, create a new one with correct size.
134 if (_buffer)
135 {
136 delete [] _buffer;
137 _bufferSize = 0;
138 }
139
140 _incomingBufferSize = CalcBufferSize(kI420, _width, _height);
141 _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
142 _buffer = new unsigned char [_bufferSize];
143 memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
144
145 if (aglSetCurrentContext(_aglContext) == false)
146 {
147 _owner->UnlockAGLCntx();
148 return -1;
149 }
150
151 // Delete a possible old texture
152 if (_texture != 0)
153 {
154 glDeleteTextures(1, (const GLuint*) &_texture);
155 _texture = 0;
156 }
157
158 // Create a new texture
159 glGenTextures(1, (GLuint *) &_texture);
160
161 GLenum glErr = glGetError();
162
163 if (glErr != GL_NO_ERROR)
164 {
165 }
166
167 // Do the setup for both textures
168 // Note: we setup two textures even if we're not running full screen
169 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
170
171 // Set texture parameters
172 glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
173
174 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
175 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
176
177 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
178 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
179 //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
180 //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
181
182 glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
183
184 glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
185
186 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
187
188 // Maximum width/height for a texture
189 GLint texSize;
190 glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
191
192 if (texSize < _width || texSize < _height)
193 {
194 // Image too big for memory
195 _owner->UnlockAGLCntx();
196 return -1;
197 }
198
199 // Set up th texture type and size
200 glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
201 0, // level
202 GL_RGBA, // internal format
203 _width, // width
204 _height, // height
205 0, // border 0/1 = off/on
206 _pixelFormat, // format, GL_BGRA
207 _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
208 _buffer); // pixel data
209
210 glErr = glGetError();
211 if (glErr != GL_NO_ERROR)
212 {
213 _owner->UnlockAGLCntx();
214 return -1;
215 }
216
217 _owner->UnlockAGLCntx();
218 return 0;
219 }
220
221 // Called from video engine when a new frame should be rendered.
DeliverFrame(const VideoFrame & videoFrame)222 int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) {
223 _owner->LockAGLCntx();
224
225 if (_texture == 0) {
226 _owner->UnlockAGLCntx();
227 return 0;
228 }
229
230 if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) !=
231 _incomingBufferSize) {
232 _owner->UnlockAGLCntx();
233 return -1;
234 }
235
236 // Setting stride = width.
237 int rgbret = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
238 if (rgbret < 0) {
239 _owner->UnlockAGLCntx();
240 return -1;
241 }
242
243 aglSetCurrentContext(_aglContext);
244
245 // Put the new frame into the graphic card texture.
246 // Make sure this texture is the active one
247 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
248 GLenum glErr = glGetError();
249 if (glErr != GL_NO_ERROR) {
250 _owner->UnlockAGLCntx();
251 return -1;
252 }
253
254 // Copy buffer to texture
255 glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
256 0, // Level, not use
257 0, // start point x, (low left of pic)
258 0, // start point y,
259 _width, // width
260 _height, // height
261 _pixelFormat, // pictue format for _buffer
262 _pixelDataType, // data type of _buffer
263 (const GLvoid*) _buffer); // the pixel data
264
265 if (glGetError() != GL_NO_ERROR) {
266 _owner->UnlockAGLCntx();
267 return -1;
268 }
269
270 _bufferIsUpdated = true;
271 _owner->UnlockAGLCntx();
272
273 return 0;
274 }
275
RenderOffScreenBuffer()276 int VideoChannelAGL::RenderOffScreenBuffer()
277 {
278
279 _owner->LockAGLCntx();
280
281 if (_texture == 0)
282 {
283 _owner->UnlockAGLCntx();
284 return 0;
285 }
286
287 GLfloat xStart = 2.0f * _startWidth - 1.0f;
288 GLfloat xStop = 2.0f * _stopWidth - 1.0f;
289 GLfloat yStart = 1.0f - 2.0f * _stopHeight;
290 GLfloat yStop = 1.0f - 2.0f * _startHeight;
291
292 aglSetCurrentContext(_aglContext);
293 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
294
295 if(_stretchedWidth != _oldStretchedWidth || _stretchedHeight != _oldStretchedHeight)
296 {
297 glViewport(0, 0, _stretchedWidth, _stretchedHeight);
298 }
299 _oldStretchedHeight = _stretchedHeight;
300 _oldStretchedWidth = _stretchedWidth;
301
302 // Now really put the texture into the framebuffer
303 glLoadIdentity();
304
305 glEnable(GL_TEXTURE_RECTANGLE_EXT);
306
307 glBegin(GL_POLYGON);
308 {
309 glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
310 glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
311 glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
312 glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
313 }
314 glEnd();
315
316 glDisable(GL_TEXTURE_RECTANGLE_EXT);
317
318 _bufferIsUpdated = false;
319
320 _owner->UnlockAGLCntx();
321 return 0;
322 }
323
IsUpdated(bool & isUpdated)324 int VideoChannelAGL::IsUpdated(bool& isUpdated)
325 {
326 _owner->LockAGLCntx();
327 isUpdated = _bufferIsUpdated;
328 _owner->UnlockAGLCntx();
329
330 return 0;
331 }
332
SetStreamSettings(int,float startWidth,float startHeight,float stopWidth,float stopHeight)333 int VideoChannelAGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
334 {
335
336 _owner->LockAGLCntx();
337
338 _startWidth = startWidth;
339 _stopWidth = stopWidth;
340 _startHeight = startHeight;
341 _stopHeight = stopHeight;
342
343 int oldWidth = _width;
344 int oldHeight = _height;
345 int oldNumberOfStreams = _numberOfStreams;
346
347 _width = 0;
348 _height = 0;
349
350 int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
351
352 _owner->UnlockAGLCntx();
353
354 return retVal;
355 }
356
SetStreamCropSettings(int,float,float,float,float)357 int VideoChannelAGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
358 {
359 return -1;
360 }
361
362 #pragma mark VideoRenderAGL WindowRef constructor
363
VideoRenderAGL(WindowRef windowRef,bool fullscreen,int iId)364 VideoRenderAGL::VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId) :
365 _hiviewRef( 0),
366 _windowRef( windowRef),
367 _fullScreen( fullscreen),
368 _id( iId),
369 _renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
370 _screenUpdateEvent( 0),
371 _isHIViewRef( false),
372 _aglContext( 0),
373 _windowWidth( 0),
374 _windowHeight( 0),
375 _lastWindowWidth( -1),
376 _lastWindowHeight( -1),
377 _lastHiViewWidth( -1),
378 _lastHiViewHeight( -1),
379 _currentParentWindowHeight( 0),
380 _currentParentWindowWidth( 0),
381 _currentParentWindowBounds( ),
382 _windowHasResized( false),
383 _lastParentWindowBounds( ),
384 _currentHIViewBounds( ),
385 _lastHIViewBounds( ),
386 _windowRect( ),
387 _aglChannels( ),
388 _zOrderToChannel( ),
389 _hiviewEventHandlerRef( NULL),
390 _windowEventHandlerRef( NULL),
391 _currentViewBounds( ),
392 _lastViewBounds( ),
393 _renderingIsPaused( false),
394
395 {
396 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s");
397
398 _screenUpdateThread.reset(
399 new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
400 _screenUpdateEvent = EventWrapper::Create();
401
402 if(!IsValidWindowPtr(_windowRef))
403 {
404 //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef:0x%x", __FUNCTION__, __LINE__, _windowRef);
405 }
406 else
407 {
408 //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d WindowRef 0x%x is valid", __FUNCTION__, __LINE__, _windowRef);
409 }
410
411 GetWindowRect(_windowRect);
412
413 _lastViewBounds.origin.x = 0;
414 _lastViewBounds.origin.y = 0;
415 _lastViewBounds.size.width = 0;
416 _lastViewBounds.size.height = 0;
417
418 }
419
420 // this is a static function. It has been registered (in class constructor) to be called on various window redrawing or resizing.
421 // Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
422 #pragma mark WindowRef Event Handler
sHandleWindowResized(EventHandlerCallRef,EventRef theEvent,void * userData)423 pascal OSStatus VideoRenderAGL::sHandleWindowResized (EventHandlerCallRef /*nextHandler*/,
424 EventRef theEvent,
425 void* userData)
426 {
427 WindowRef windowRef = NULL;
428
429 int eventType = GetEventKind(theEvent);
430
431 // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
432 GetEventParameter (theEvent,
433 kEventParamDirectObject,
434 typeWindowRef,
435 NULL,
436 sizeof (WindowRef),
437 NULL,
438 &windowRef);
439
440 VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
441
442 bool updateUI = true;
443 if(kEventWindowBoundsChanged == eventType)
444 {
445 }
446 else if(kEventWindowBoundsChanging == eventType)
447 {
448 }
449 else if(kEventWindowZoomed == eventType)
450 {
451 }
452 else if(kEventWindowExpanding == eventType)
453 {
454 }
455 else if(kEventWindowExpanded == eventType)
456 {
457 }
458 else if(kEventWindowClickResizeRgn == eventType)
459 {
460 }
461 else if(kEventWindowClickDragRgn == eventType)
462 {
463 }
464 else
465 {
466 updateUI = false;
467 }
468
469 if(true == updateUI)
470 {
471 obj->ParentWindowResized(windowRef);
472 obj->UpdateClipping();
473 obj->RenderOffScreenBuffers();
474 }
475
476 return noErr;
477 }
478
479 #pragma mark VideoRenderAGL HIViewRef constructor
480
VideoRenderAGL(HIViewRef windowRef,bool fullscreen,int iId)481 VideoRenderAGL::VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId) :
482 _hiviewRef( windowRef),
483 _windowRef( 0),
484 _fullScreen( fullscreen),
485 _id( iId),
486 _renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
487 _screenUpdateEvent( 0),
488 _isHIViewRef( false),
489 _aglContext( 0),
490 _windowWidth( 0),
491 _windowHeight( 0),
492 _lastWindowWidth( -1),
493 _lastWindowHeight( -1),
494 _lastHiViewWidth( -1),
495 _lastHiViewHeight( -1),
496 _currentParentWindowHeight( 0),
497 _currentParentWindowWidth( 0),
498 _currentParentWindowBounds( ),
499 _windowHasResized( false),
500 _lastParentWindowBounds( ),
501 _currentHIViewBounds( ),
502 _lastHIViewBounds( ),
503 _windowRect( ),
504 _aglChannels( ),
505 _zOrderToChannel( ),
506 _hiviewEventHandlerRef( NULL),
507 _windowEventHandlerRef( NULL),
508 _currentViewBounds( ),
509 _lastViewBounds( ),
510 _renderingIsPaused( false),
511 {
512 //WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__);
513 // _renderCritSec = CriticalSectionWrapper::CreateCriticalSection();
514
515 _screenUpdateThread.reset(new rtc::PlatformThread(
516 ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
517 _screenUpdateEvent = EventWrapper::Create();
518
519 GetWindowRect(_windowRect);
520
521 _lastViewBounds.origin.x = 0;
522 _lastViewBounds.origin.y = 0;
523 _lastViewBounds.size.width = 0;
524 _lastViewBounds.size.height = 0;
525
526 #ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
527 // This gets the parent window of the HIViewRef that's passed in and installs a WindowRef event handler on it
528 // The event handler looks for window resize events and adjusts the offset of the controls.
529
530 //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef's parent window", __FUNCTION__, __LINE__);
531
532
533 static const EventTypeSpec windowEventTypes[] =
534 {
535 kEventClassWindow, kEventWindowBoundsChanged,
536 kEventClassWindow, kEventWindowBoundsChanging,
537 kEventClassWindow, kEventWindowZoomed,
538 kEventClassWindow, kEventWindowExpanded,
539 kEventClassWindow, kEventWindowClickResizeRgn,
540 kEventClassWindow, kEventWindowClickDragRgn
541 };
542
543 WindowRef parentWindow = HIViewGetWindow(windowRef);
544
545 InstallWindowEventHandler (parentWindow,
546 NewEventHandlerUPP (sHandleWindowResized),
547 GetEventTypeCount(windowEventTypes),
548 windowEventTypes,
549 (void *) this, // this is an arbitrary parameter that will be passed on to your event handler when it is called later
550 &_windowEventHandlerRef);
551
552 #endif
553
554 #ifdef NEW_HIVIEW_EVENT_HANDLER
555 //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef", __FUNCTION__, __LINE__);
556
557 static const EventTypeSpec hiviewEventTypes[] =
558 {
559 kEventClassControl, kEventControlBoundsChanged,
560 kEventClassControl, kEventControlDraw
561 // kEventControlDragLeave
562 // kEventControlDragReceive
563 // kEventControlGetFocusPart
564 // kEventControlApplyBackground
565 // kEventControlDraw
566 // kEventControlHit
567
568 };
569
570 HIViewInstallEventHandler(_hiviewRef,
571 NewEventHandlerUPP(sHandleHiViewResized),
572 GetEventTypeCount(hiviewEventTypes),
573 hiviewEventTypes,
574 (void *) this,
575 &_hiviewEventHandlerRef);
576
577 #endif
578 }
579
580 // this is a static function. It has been registered (in constructor) to be called on various window redrawing or resizing.
581 // Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
582 #pragma mark HIViewRef Event Handler
sHandleHiViewResized(EventHandlerCallRef nextHandler,EventRef theEvent,void * userData)583 pascal OSStatus VideoRenderAGL::sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
584 {
585 //static int callbackCounter = 1;
586 HIViewRef hiviewRef = NULL;
587
588 // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
589 int eventType = GetEventKind(theEvent);
590 OSStatus status = noErr;
591 status = GetEventParameter (theEvent,
592 kEventParamDirectObject,
593 typeControlRef,
594 NULL,
595 sizeof (ControlRef),
596 NULL,
597 &hiviewRef);
598
599 VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
600 WindowRef parentWindow = HIViewGetWindow(hiviewRef);
601 bool updateUI = true;
602
603 if(kEventControlBoundsChanged == eventType)
604 {
605 }
606 else if(kEventControlDraw == eventType)
607 {
608 }
609 else
610 {
611 updateUI = false;
612 }
613
614 if(true == updateUI)
615 {
616 obj->ParentWindowResized(parentWindow);
617 obj->UpdateClipping();
618 obj->RenderOffScreenBuffers();
619 }
620
621 return status;
622 }
623
~VideoRenderAGL()624 VideoRenderAGL::~VideoRenderAGL()
625 {
626
627 //WEBRTC_TRACE(kTraceDebug, "%s:%d Destructor", __FUNCTION__, __LINE__);
628
629
630 #ifdef USE_EVENT_HANDLERS
631 // remove event handlers
632 OSStatus status;
633 if(_isHIViewRef)
634 {
635 status = RemoveEventHandler(_hiviewEventHandlerRef);
636 }
637 else
638 {
639 status = RemoveEventHandler(_windowEventHandlerRef);
640 }
641 if(noErr != status)
642 {
643 if(_isHIViewRef)
644 {
645
646 //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
647 }
648 else
649 {
650 //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
651 }
652 }
653
654 #endif
655
656 OSStatus status;
657 #ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
658 if(_windowEventHandlerRef)
659 {
660 status = RemoveEventHandler(_windowEventHandlerRef);
661 if(status != noErr)
662 {
663 //WEBRTC_TRACE(kTraceDebug, "%s:%d failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
664 }
665 }
666 #endif
667
668 #ifdef NEW_HIVIEW_EVENT_HANDLER
669 if(_hiviewEventHandlerRef)
670 {
671 status = RemoveEventHandler(_hiviewEventHandlerRef);
672 if(status != noErr)
673 {
674 //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
675 }
676 }
677 #endif
678
679 // Signal event to exit thread, then delete it
680 rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
681
682 if (tmpPtr)
683 {
684 _screenUpdateEvent->Set();
685 _screenUpdateEvent->StopTimer();
686
687 tmpPtr->Stop();
688 delete tmpPtr;
689 delete _screenUpdateEvent;
690 _screenUpdateEvent = NULL;
691 }
692
693 if (_aglContext != 0)
694 {
695 aglSetCurrentContext(_aglContext);
696 aglDestroyContext(_aglContext);
697 _aglContext = 0;
698 }
699
700 // Delete all channels
701 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
702 while (it!= _aglChannels.end())
703 {
704 delete it->second;
705 _aglChannels.erase(it);
706 it = _aglChannels.begin();
707 }
708 _aglChannels.clear();
709
710 // Clean the zOrder map
711 std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
712 while(zIt != _zOrderToChannel.end())
713 {
714 _zOrderToChannel.erase(zIt);
715 zIt = _zOrderToChannel.begin();
716 }
717 _zOrderToChannel.clear();
718
719 //delete _renderCritSec;
720
721
722 }
723
GetOpenGLVersion(int & aglMajor,int & aglMinor)724 int VideoRenderAGL::GetOpenGLVersion(int& aglMajor, int& aglMinor)
725 {
726 aglGetVersion((GLint *) &aglMajor, (GLint *) &aglMinor);
727 return 0;
728 }
729
Init()730 int VideoRenderAGL::Init()
731 {
732 LockAGLCntx();
733
734 // Start rendering thread...
735 if (!_screenUpdateThread)
736 {
737 UnlockAGLCntx();
738 //WEBRTC_TRACE(kTraceError, "%s:%d Thread not created", __FUNCTION__, __LINE__);
739 return -1;
740 }
741 _screenUpdateThread->Start();
742 _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
743
744 // Start the event triggering the render process
745 unsigned int monitorFreq = 60;
746 _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
747
748 // Create mixing textures
749 if (CreateMixingContext() == -1)
750 {
751 //WEBRTC_TRACE(kTraceError, "%s:%d Could not create a mixing context", __FUNCTION__, __LINE__);
752 UnlockAGLCntx();
753 return -1;
754 }
755
756 UnlockAGLCntx();
757 return 0;
758 }
759
CreateAGLChannel(int channel,int zOrder,float startWidth,float startHeight,float stopWidth,float stopHeight)760 VideoChannelAGL* VideoRenderAGL::CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
761 {
762
763 LockAGLCntx();
764
765 //WEBRTC_TRACE(kTraceInfo, "%s:%d Creating AGL channel: %d", __FUNCTION__, __LINE__, channel);
766
767 if (HasChannel(channel))
768 {
769 //WEBRTC_TRACE(kTraceError, "%s:%d Channel already exists", __FUNCTION__, __LINE__);
770 UnlockAGLCntx();k
771 return NULL;
772 }
773
774 if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
775 {
776 // There are already one channel using this zOrder
777 // TODO: Allow multiple channels with same zOrder
778 }
779
780 VideoChannelAGL* newAGLChannel = new VideoChannelAGL(_aglContext, _id, this);
781
782 if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
783 {
784 if (newAGLChannel)
785 {
786 delete newAGLChannel;
787 newAGLChannel = NULL;
788 }
789 //WEBRTC_LOG(kTraceError, "Could not create AGL channel");
790 //WEBRTC_TRACE(kTraceError, "%s:%d Could not create AGL channel", __FUNCTION__, __LINE__);
791 UnlockAGLCntx();
792 return NULL;
793 }
794 k
795 _aglChannels[channel] = newAGLChannel;
796 _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
797
798 UnlockAGLCntx();
799 return newAGLChannel;
800 }
801
DeleteAllAGLChannels()802 int VideoRenderAGL::DeleteAllAGLChannels()
803 {
804 CriticalSectionScoped cs(&_renderCritSec);
805
806 //WEBRTC_TRACE(kTraceInfo, "%s:%d Deleting all AGL channels", __FUNCTION__, __LINE__);
807 //int i = 0 ;
808 std::map<int, VideoChannelAGL*>::iterator it;
809 it = _aglChannels.begin();
810
811 while (it != _aglChannels.end())
812 {
813 VideoChannelAGL* channel = it->second;
814 if (channel)
815 delete channel;
816
817 _aglChannels.erase(it);
818 it = _aglChannels.begin();
819 }
820 _aglChannels.clear();
821 return 0;
822 }
823
DeleteAGLChannel(int channel)824 int VideoRenderAGL::DeleteAGLChannel(int channel)
825 {
826 CriticalSectionScoped cs(&_renderCritSec);
827 //WEBRTC_TRACE(kTraceDebug, "%s:%d Deleting AGL channel %d", __FUNCTION__, __LINE__, channel);
828
829 std::map<int, VideoChannelAGL*>::iterator it;
830 it = _aglChannels.find(channel);
831 if (it != _aglChannels.end())
832 {
833 delete it->second;
834 _aglChannels.erase(it);
835 }
836 else
837 {
838 //WEBRTC_TRACE(kTraceWarning, "%s:%d Channel not found", __FUNCTION__, __LINE__);
839 return -1;
840 }
841
842 std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
843 while( zIt != _zOrderToChannel.end())
844 {
845 if (zIt->second == channel)
846 {
847 _zOrderToChannel.erase(zIt);
848 break;
849 }
850 zIt++;// = _zOrderToChannel.begin();
851 }
852
853 return 0;
854 }
855
StopThread()856 int VideoRenderAGL::StopThread()
857 {
858 CriticalSectionScoped cs(&_renderCritSec);
859 rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
860
861 if (tmpPtr)
862 {
863 _screenUpdateEvent->Set();
864 _renderCritSec.Leave();
865 tmpPtr->Stop();
866 delete tmpPtr;
867 _renderCritSec.Enter();
868 }
869
870 delete _screenUpdateEvent;
871 _screenUpdateEvent = NULL;
872
873 return 0;
874 }
875
IsFullScreen()876 bool VideoRenderAGL::IsFullScreen()
877 {
878 CriticalSectionScoped cs(&_renderCritSec);
879 return _fullScreen;
880 }
881
HasChannels()882 bool VideoRenderAGL::HasChannels()
883 {
884
885 CriticalSectionScoped cs(&_renderCritSec);
886
887 if (_aglChannels.begin() != _aglChannels.end())
888 {
889 return true;
890 }
891
892 return false;
893 }
894
HasChannel(int channel)895 bool VideoRenderAGL::HasChannel(int channel)
896 {
897 CriticalSectionScoped cs(&_renderCritSec);
898
899 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
900 if (it != _aglChannels.end())
901 {
902 return true;
903 }
904
905 return false;
906 }
907
GetChannels(std::list<int> & channelList)908 int VideoRenderAGL::GetChannels(std::list<int>& channelList)
909 {
910
911 CriticalSectionScoped cs(&_renderCritSec);
912 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
913
914 while (it != _aglChannels.end())
915 {
916 channelList.push_back(it->first);
917 it++;
918 }
919
920 return 0;
921 }
922
ConfigureAGLChannel(int channel,int zOrder,float startWidth,float startHeight,float stopWidth,float stopHeight)923 VideoChannelAGL* VideoRenderAGL::ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
924 {
925
926 CriticalSectionScoped cs(&_renderCritSec);
927
928 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
929
930 if (it != _aglChannels.end())
931 {
932 VideoChannelAGL* aglChannel = it->second;
933 if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
934 {
935 return NULL;
936 }
937
938 std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
939 while(it != _zOrderToChannel.end())
940 {
941 if (it->second == channel)
942 {
943 if (it->first != zOrder)
944 {
945 _zOrderToChannel.erase(it);
946 _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
947 }
948 break;
949 }
950 it++;
951 }
952 return aglChannel;
953 }
954
955 return NULL;
956 }
957
ScreenUpdateThreadProc(void * obj)958 bool VideoRenderAGL::ScreenUpdateThreadProc(void* obj)
959 {
960 return static_cast<VideoRenderAGL*>(obj)->ScreenUpdateProcess();
961 }
962
ScreenUpdateProcess()963 bool VideoRenderAGL::ScreenUpdateProcess()
964 {
965 _screenUpdateEvent->Wait(100);
966
967 LockAGLCntx();
968
969 if (!_screenUpdateThread)
970 {
971 UnlockAGLCntx();
972 return false;
973 }
974
975 if (aglSetCurrentContext(_aglContext) == GL_FALSE)
976 {
977 UnlockAGLCntx();
978 return true;
979 }
980
981 if (GetWindowRect(_windowRect) == -1)
982 {
983 UnlockAGLCntx();
984 return true;
985 }
986
987 if (_windowWidth != (_windowRect.right - _windowRect.left)
988 || _windowHeight != (_windowRect.bottom - _windowRect.top))
989 {
990 // We have a new window size, update the context.
991 if (aglUpdateContext(_aglContext) == GL_FALSE)
992 {
993 UnlockAGLCntx();
994 return true;
995 }
996 _windowWidth = _windowRect.right - _windowRect.left;
997 _windowHeight = _windowRect.bottom - _windowRect.top;
998 }
999
1000 // this section will poll to see if the window size has changed
1001 // this is causing problem w/invalid windowRef
1002 // this code has been modified and exists now in the window event handler
1003 #ifndef NEW_HIVIEW_PARENT_EVENT_HANDLER
1004 if (_isHIViewRef)
1005 {
1006
1007 if(FALSE == HIViewIsValid(_hiviewRef))
1008 {
1009
1010 //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalid windowRef", __FUNCTION__, __LINE__);
1011 UnlockAGLCntx();
1012 return true;
1013 }
1014 WindowRef window = HIViewGetWindow(_hiviewRef);
1015
1016 if(FALSE == IsValidWindowPtr(window))
1017 {
1018 //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalide hiviewRef", __FUNCTION__, __LINE__);
1019 UnlockAGLCntx();
1020 return true;
1021 }
1022 if (window == NULL)
1023 {
1024 //WEBRTC_TRACE(kTraceDebug, "%s:%d WindowRef = NULL", __FUNCTION__, __LINE__);
1025 UnlockAGLCntx();
1026 return true;
1027 }
1028
1029 if(FALSE == MacIsWindowVisible(window))
1030 {
1031 //WEBRTC_TRACE(kTraceDebug, "%s:%d MacIsWindowVisible == FALSE. Returning early", __FUNCTION__, __LINE__);
1032 UnlockAGLCntx();
1033 return true;
1034 }
1035
1036 HIRect viewBounds; // Placement and size for HIView
1037 int windowWidth = 0; // Parent window width
1038 int windowHeight = 0; // Parent window height
1039
1040 // NOTE: Calling GetWindowBounds with kWindowStructureRgn will crash intermittentaly if the OS decides it needs to push it into the back for a moment.
1041 // To counter this, we get the titlebar height on class construction and then add it to the content region here. Content regions seems not to crash
1042 Rect contentBounds =
1043 { 0, 0, 0, 0}; // The bounds for the parent window
1044
1045 #if defined(USE_CONTENT_RGN)
1046 GetWindowBounds(window, kWindowContentRgn, &contentBounds);
1047 #elif defined(USE_STRUCT_RGN)
1048 GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
1049 #endif
1050
1051 Rect globalBounds =
1052 { 0, 0, 0, 0}; // The bounds for the parent window
1053 globalBounds.top = contentBounds.top;
1054 globalBounds.right = contentBounds.right;
1055 globalBounds.bottom = contentBounds.bottom;
1056 globalBounds.left = contentBounds.left;
1057
1058 windowHeight = globalBounds.bottom - globalBounds.top;
1059 windowWidth = globalBounds.right - globalBounds.left;
1060
1061 // Get the size of the HIViewRef
1062 HIViewGetBounds(_hiviewRef, &viewBounds);
1063 HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
1064
1065 // Check if this is the first call..
1066 if (_lastWindowHeight == -1 &&
1067 _lastWindowWidth == -1)
1068 {
1069 _lastWindowWidth = windowWidth;
1070 _lastWindowHeight = windowHeight;
1071
1072 _lastViewBounds.origin.x = viewBounds.origin.x;
1073 _lastViewBounds.origin.y = viewBounds.origin.y;
1074 _lastViewBounds.size.width = viewBounds.size.width;
1075 _lastViewBounds.size.height = viewBounds.size.height;
1076 }
1077 sfasdfasdf
1078
1079 bool resized = false;
1080
1081 // Check if parent window size has changed
1082 if (windowHeight != _lastWindowHeight ||
1083 windowWidth != _lastWindowWidth)
1084 {
1085 resized = true;
1086 }
1087
1088 // Check if the HIView has new size or is moved in the parent window
1089 if (_lastViewBounds.origin.x != viewBounds.origin.x ||
1090 _lastViewBounds.origin.y != viewBounds.origin.y ||
1091 _lastViewBounds.size.width != viewBounds.size.width ||
1092 _lastViewBounds.size.height != viewBounds.size.height)
1093 {
1094 // The HiView is resized or has moved.
1095 resized = true;
1096 }
1097
1098 if (resized)
1099 {
1100
1101 //WEBRTC_TRACE(kTraceDebug, "%s:%d Window has resized", __FUNCTION__, __LINE__);
1102
1103 // Calculate offset between the windows
1104 // {x, y, widht, height}, x,y = lower left corner
1105 const GLint offs[4] =
1106 { (int)(0.5f + viewBounds.origin.x),
1107 (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
1108 viewBounds.size.width, viewBounds.size.height};
1109
1110 //WEBRTC_TRACE(kTraceDebug, "%s:%d contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, __LINE__,
1111 contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
1112 //WEBRTC_TRACE(kTraceDebug, "%s:%d windowHeight=%d", __FUNCTION__, __LINE__, windowHeight);
1113 //WEBRTC_TRACE(kTraceDebug, "%s:%d offs[4] = %d, %d, %d, %d", __FUNCTION__, __LINE__, offs[0], offs[1], offs[2], offs[3]);
1114
1115 aglSetDrawable (_aglContext, GetWindowPort(window));
1116 aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
1117 aglEnable(_aglContext, AGL_BUFFER_RECT);
1118
1119 // We need to change the viewport too if the HIView size has changed
1120 glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
1121
1122 }
1123 _lastWindowWidth = windowWidth;
1124 _lastWindowHeight = windowHeight;
1125
1126 _lastViewBounds.origin.x = viewBounds.origin.x;
1127 _lastViewBounds.origin.y = viewBounds.origin.y;
1128 _lastViewBounds.size.width = viewBounds.size.width;
1129 _lastViewBounds.size.height = viewBounds.size.height;
1130
1131 }
1132 #endif
1133 if (_fullScreen)
1134 {
1135 // TODO
1136 // We use double buffers, must always update
1137 //RenderOffScreenBuffersToBackBuffer();
1138 }
1139 else
1140 {
1141 // Check if there are any updated buffers
1142 bool updated = false;
1143
1144 // TODO: check if window size is updated!
1145 // TODO Improvement: Walk through the zOrder Map to only render the ones in need of update
1146 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
1147 while (it != _aglChannels.end())
1148 {
1149
1150 VideoChannelAGL* aglChannel = it->second;
1151 aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
1152 aglChannel->IsUpdated(updated);
1153 if (updated)
1154 {
1155 break;
1156 }
1157 it++;
1158 }
1159
1160 if (updated)
1161 {
1162 // At least on buffers is updated, we need to repaint the texture
1163 if (RenderOffScreenBuffers() != -1)
1164 {
1165 // MF
1166 //SwapAndDisplayBuffers();
1167 }
1168 else
1169 {
1170 // Error updating the mixing texture, don't swap.
1171 }
1172 }
1173 }
1174
1175 UnlockAGLCntx();
1176
1177 //WEBRTC_LOG(kTraceDebug, "Leaving ScreenUpdateProcess()");
1178 return true;
1179 }
1180
ParentWindowResized(WindowRef window)1181 void VideoRenderAGL::ParentWindowResized(WindowRef window)
1182 {
1183 //WEBRTC_LOG(kTraceDebug, "%s HIViewRef:%d owner window has resized", __FUNCTION__, (int)_hiviewRef);
1184
1185 LockAGLCntx();
1186 k
1187 // set flag
1188 _windowHasResized = false;
1189
1190 if(FALSE == HIViewIsValid(_hiviewRef))
1191 {
1192 //WEBRTC_LOG(kTraceDebug, "invalid windowRef");
1193 UnlockAGLCntx();
1194 return;
1195 }
1196
1197 if(FALSE == IsValidWindowPtr(window))
1198 {
1199 //WEBRTC_LOG(kTraceError, "invalid windowRef");
1200 UnlockAGLCntx();
1201 return;
1202 }
1203
1204 if (window == NULL)
1205 {
1206 //WEBRTC_LOG(kTraceError, "windowRef = NULL");
1207 UnlockAGLCntx();
1208 return;
1209 }
1210
1211 if(FALSE == MacIsWindowVisible(window))
1212 {
1213 //WEBRTC_LOG(kTraceDebug, "MacIsWindowVisible = FALSE. Returning early.");
1214 UnlockAGLCntx();
1215 return;
1216 }
1217
1218 Rect contentBounds =
1219 { 0, 0, 0, 0};
1220
1221 #if defined(USE_CONTENT_RGN)
1222 GetWindowBounds(window, kWindowContentRgn, &contentBounds);
1223 #elif defined(USE_STRUCT_RGN)
1224 GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
1225 #endif
1226
1227 //WEBRTC_LOG(kTraceDebug, "%s contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
1228
1229 // update global vars
1230 _currentParentWindowBounds.top = contentBounds.top;
1231 _currentParentWindowBounds.left = contentBounds.left;
1232 _currentParentWindowBounds.bottom = contentBounds.bottom;
1233 _currentParentWindowBounds.right = contentBounds.right;
1234
1235 _currentParentWindowWidth = _currentParentWindowBounds.right - _currentParentWindowBounds.left;
1236 _currentParentWindowHeight = _currentParentWindowBounds.bottom - _currentParentWindowBounds.top;
1237
1238 _windowHasResized = true;
1239
1240 // ********* update AGL offsets
1241 HIRect viewBounds;
1242 HIViewGetBounds(_hiviewRef, &viewBounds);
1243 HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
1244
1245 const GLint offs[4] =
1246 { (int)(0.5f + viewBounds.origin.x),
1247 (int)(0.5f + _currentParentWindowHeight - (viewBounds.origin.y + viewBounds.size.height)),
1248 viewBounds.size.width, viewBounds.size.height};
1249 //WEBRTC_LOG(kTraceDebug, "%s _currentParentWindowHeight=%d", __FUNCTION__, _currentParentWindowHeight);
1250 //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
1251
1252 aglSetCurrentContext(_aglContext);
1253 aglSetDrawable (_aglContext, GetWindowPort(window));
1254 aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
1255 aglEnable(_aglContext, AGL_BUFFER_RECT);
1256
1257 // We need to change the viewport too if the HIView size has changed
1258 glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
1259
1260 UnlockAGLCntx();
1261
1262 return;
1263 }
1264
CreateMixingContext()1265 int VideoRenderAGL::CreateMixingContext()
1266 {
1267
1268 LockAGLCntx();
1269
1270 //WEBRTC_LOG(kTraceDebug, "Entering CreateMixingContext()");
1271
1272 // Use both AGL_ACCELERATED and AGL_NO_RECOVERY to make sure
1273 // a hardware renderer is used and not a software renderer.
1274
1275 GLint attributes[] =
1276 {
1277 AGL_DOUBLEBUFFER,
1278 AGL_WINDOW,
1279 AGL_RGBA,
1280 AGL_NO_RECOVERY,
1281 AGL_ACCELERATED,
1282 AGL_RED_SIZE, 8,
1283 AGL_GREEN_SIZE, 8,
1284 AGL_BLUE_SIZE, 8,
1285 AGL_ALPHA_SIZE, 8,
1286 AGL_DEPTH_SIZE, 24,
1287 AGL_NONE,
1288 };
1289
1290 AGLPixelFormat aglPixelFormat;
1291
1292 // ***** Set up the OpenGL Context *****
1293
1294 // Get a pixel format for the attributes above
1295 aglPixelFormat = aglChoosePixelFormat(NULL, 0, attributes);
1296 if (NULL == aglPixelFormat)
1297 {
1298 //WEBRTC_LOG(kTraceError, "Could not create pixel format");
1299 UnlockAGLCntx();
1300 return -1;
1301 }
1302
1303 // Create an AGL context
1304 _aglContext = aglCreateContext(aglPixelFormat, NULL);
1305 if (_aglContext == NULL)
1306 {
1307 //WEBRTC_LOG(kTraceError, "Could no create AGL context");
1308 UnlockAGLCntx();
1309 return -1;
1310 }
1311
1312 // Release the pixel format memory
1313 aglDestroyPixelFormat(aglPixelFormat);
1314
1315 // Set the current AGL context for the rest of the settings
1316 if (aglSetCurrentContext(_aglContext) == false)
1317 {
1318 //WEBRTC_LOG(kTraceError, "Could not set current context: %d", aglGetError());
1319 UnlockAGLCntx();
1320 return -1;
1321 }
1322
1323 if (_isHIViewRef)
1324 {
1325 //---------------------------
1326 // BEGIN: new test code
1327 #if 0
1328 // Don't use this one!
1329 // There seems to be an OS X bug that can't handle
1330 // movements and resizing of the parent window
1331 // and or the HIView
1332 if (aglSetHIViewRef(_aglContext,_hiviewRef) == false)
1333 {
1334 //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
1335 UnlockAGLCntx();
1336 return -1;
1337 }
1338 #else
1339
1340 // Get the parent window for this control
1341 WindowRef window = GetControlOwner(_hiviewRef);
1342
1343 Rect globalBounds =
1344 { 0,0,0,0}; // The bounds for the parent window
1345 HIRect viewBounds; // Placemnt in the parent window and size.
1346 int windowHeight = 0;
1347
1348 // Rect titleBounds = {0,0,0,0};
1349 // GetWindowBounds(window, kWindowTitleBarRgn, &titleBounds);
1350 // _titleBarHeight = titleBounds.top - titleBounds.bottom;
1351 // if(0 == _titleBarHeight)
1352 // {
1353 // //WEBRTC_LOG(kTraceError, "Titlebar height = 0");
1354 // //return -1;
1355 // }
1356
1357
1358 // Get the bounds for the parent window
1359 #if defined(USE_CONTENT_RGN)
1360 GetWindowBounds(window, kWindowContentRgn, &globalBounds);
1361 #elif defined(USE_STRUCT_RGN)
1362 GetWindowBounds(window, kWindowStructureRgn, &globalBounds);
1363 #endif
1364 windowHeight = globalBounds.bottom - globalBounds.top;
1365
1366 // Get the bounds for the HIView
1367 HIViewGetBounds(_hiviewRef, &viewBounds);
1368
1369 HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
1370
1371 const GLint offs[4] =
1372 { (int)(0.5f + viewBounds.origin.x),
1373 (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
1374 viewBounds.size.width, viewBounds.size.height};
1375
1376 //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
1377
1378
1379 aglSetDrawable (_aglContext, GetWindowPort(window));
1380 aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
1381 aglEnable(_aglContext, AGL_BUFFER_RECT);
1382
1383 GLint surfaceOrder = 1; // 1: above window, -1 below.
1384 //OSStatus status = aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
1385 aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
1386
1387 glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
1388 #endif
1389
1390 }
1391 else
1392 {
1393 if(GL_FALSE == aglSetDrawable (_aglContext, GetWindowPort(_windowRef)))
1394 {
1395 //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
1396 UnlockAGLCntx();
1397 return -1;
1398 }
1399 }
1400
1401 _windowWidth = _windowRect.right - _windowRect.left;
1402 _windowHeight = _windowRect.bottom - _windowRect.top;
1403
1404 // opaque surface
1405 int surfaceOpacity = 1;
1406 if (aglSetInteger(_aglContext, AGL_SURFACE_OPACITY, (const GLint *) &surfaceOpacity) == false)
1407 {
1408 //WEBRTC_LOG(kTraceError, "Could not set surface opacity: %d", aglGetError());
1409 UnlockAGLCntx();
1410 return -1;
1411 }
1412
1413 // 1 -> sync to screen rat, slow...
1414 //int swapInterval = 0; // 0 don't sync with vertical trace
1415 int swapInterval = 0; // 1 sync with vertical trace
1416 if (aglSetInteger(_aglContext, AGL_SWAP_INTERVAL, (const GLint *) &swapInterval) == false)
1417 {
1418 //WEBRTC_LOG(kTraceError, "Could not set swap interval: %d", aglGetError());
1419 UnlockAGLCntx();
1420 return -1;
1421 }
1422
1423 // Update the rect with the current size
1424 if (GetWindowRect(_windowRect) == -1)
1425 {
1426 //WEBRTC_LOG(kTraceError, "Could not get window size");
1427 UnlockAGLCntx();
1428 return -1;
1429 }
1430
1431 // Disable not needed functionality to increase performance
1432 glDisable(GL_DITHER);
1433 glDisable(GL_ALPHA_TEST);
1434 glDisable(GL_STENCIL_TEST);
1435 glDisable(GL_FOG);
1436 glDisable(GL_TEXTURE_2D);
1437 glPixelZoom(1.0, 1.0);
1438
1439 glDisable(GL_BLEND);
1440 glDisable(GL_DEPTH_TEST);
1441 glDepthMask(GL_FALSE);
1442 glDisable(GL_CULL_FACE);
1443
1444 glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
1445 glClear(GL_COLOR_BUFFER_BIT);
1446
1447 GLenum glErr = glGetError();
1448
1449 if (glErr)
1450 {
1451 }
1452
1453 UpdateClipping();
1454
1455 //WEBRTC_LOG(kTraceDebug, "Leaving CreateMixingContext()");
1456
1457 UnlockAGLCntx();
1458 return 0;
1459 }
1460
RenderOffScreenBuffers()1461 int VideoRenderAGL::RenderOffScreenBuffers()
1462 {
1463 LockAGLCntx();
1464
1465 // Get the current window size, it might have changed since last render.
1466 if (GetWindowRect(_windowRect) == -1)
1467 {
1468 //WEBRTC_LOG(kTraceError, "Could not get window rect");
1469 UnlockAGLCntx();
1470 return -1;
1471 }
1472
1473 if (aglSetCurrentContext(_aglContext) == false)
1474 {
1475 //WEBRTC_LOG(kTraceError, "Could not set current context for rendering");
1476 UnlockAGLCntx();
1477 return -1;
1478 }
1479
1480 // HERE - onl if updated!
1481 glClear(GL_COLOR_BUFFER_BIT);
1482
1483 // Loop through all channels starting highest zOrder ending with lowest.
1484 for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
1485 rIt != _zOrderToChannel.rend();
1486 rIt++)
1487 {
1488 int channelId = rIt->second;
1489 std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channelId);
1490
1491 VideoChannelAGL* aglChannel = it->second;
1492
1493 aglChannel->RenderOffScreenBuffer();
1494 }
1495
1496 SwapAndDisplayBuffers();
1497
1498 UnlockAGLCntx();
1499 return 0;
1500 }
1501
SwapAndDisplayBuffers()1502 int VideoRenderAGL::SwapAndDisplayBuffers()
1503 {
1504
1505 LockAGLCntx();
1506 if (_fullScreen)
1507 {
1508 // TODO:
1509 // Swap front and back buffers, rendering taking care of in the same call
1510 //aglSwapBuffers(_aglContext);
1511 // Update buffer index to the idx for the next rendering!
1512 //_textureIdx = (_textureIdx + 1) & 1;
1513 }
1514 else
1515 {
1516 // Single buffer rendering, only update context.
1517 glFlush();
1518 aglSwapBuffers(_aglContext);
1519 HIViewSetNeedsDisplay(_hiviewRef, true);
1520 }
1521
1522 UnlockAGLCntx();
1523 return 0;
1524 }
1525
GetWindowRect(Rect & rect)1526 int VideoRenderAGL::GetWindowRect(Rect& rect)
1527 {
1528
1529 LockAGLCntx();
1530
1531 if (_isHIViewRef)
1532 {
1533 if (_hiviewRef)
1534 {
1535 HIRect HIViewRect1;
1536 if(FALSE == HIViewIsValid(_hiviewRef))
1537 {
1538 rect.top = 0;
1539 rect.left = 0;
1540 rect.right = 0;
1541 rect.bottom = 0;
1542 //WEBRTC_LOG(kTraceError,"GetWindowRect() HIViewIsValid() returned false");
1543 UnlockAGLCntx();
1544 }
1545 HIViewGetBounds(_hiviewRef,&HIViewRect1);
1546 HIRectConvert(&HIViewRect1, 1, NULL, 2, NULL);
1547 if(HIViewRect1.origin.x < 0)
1548 {
1549 rect.top = 0;
1550 //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.top = 0");
1551 }
1552 else
1553 {
1554 rect.top = HIViewRect1.origin.x;
1555 }
1556
1557 if(HIViewRect1.origin.y < 0)
1558 {
1559 rect.left = 0;
1560 //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.left = 0");
1561 }
1562 else
1563 {
1564 rect.left = HIViewRect1.origin.y;
1565 }
1566
1567 if(HIViewRect1.size.width < 0)
1568 {
1569 rect.right = 0;
1570 //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.right = 0");
1571 }
1572 else
1573 {
1574 rect.right = HIViewRect1.size.width;
1575 }
1576
1577 if(HIViewRect1.size.height < 0)
1578 {
1579 rect.bottom = 0;
1580 //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.bottom = 0");
1581 }
1582 else
1583 {
1584 rect.bottom = HIViewRect1.size.height;
1585 }
1586
1587 ////WEBRTC_LOG(kTraceDebug,"GetWindowRect() HIViewRef: rect.top = %d, rect.left = %d, rect.right = %d, rect.bottom =%d in GetWindowRect", rect.top,rect.left,rect.right,rect.bottom);
1588 UnlockAGLCntx();
1589 }
1590 else
1591 {
1592 //WEBRTC_LOG(kTraceError, "invalid HIViewRef");
1593 UnlockAGLCntx();
1594 }
1595 }
1596 else
1597 {
1598 if (_windowRef)
1599 {
1600 GetWindowBounds(_windowRef, kWindowContentRgn, &rect);
1601 UnlockAGLCntx();
1602 }
1603 else
1604 {
1605 //WEBRTC_LOG(kTraceError, "No WindowRef");
1606 UnlockAGLCntx();
1607 }
1608 }
1609 }
1610
UpdateClipping()1611 int VideoRenderAGL::UpdateClipping()
1612 {
1613 //WEBRTC_LOG(kTraceDebug, "Entering UpdateClipping()");
1614 LockAGLCntx();
1615
1616 if(_isHIViewRef)
1617 {
1618 if(FALSE == HIViewIsValid(_hiviewRef))
1619 {
1620 //WEBRTC_LOG(kTraceError, "UpdateClipping() _isHIViewRef is invalid. Returning -1");
1621 UnlockAGLCntx();
1622 return -1;
1623 }
1624
1625 RgnHandle visibleRgn = NewRgn();
1626 SetEmptyRgn (visibleRgn);
1627
1628 if(-1 == CalculateVisibleRegion((ControlRef)_hiviewRef, visibleRgn, true))
1629 {
1630 }
1631
1632 if(GL_FALSE == aglSetCurrentContext(_aglContext))
1633 {
1634 GLenum glErr = aglGetError();
1635 //WEBRTC_LOG(kTraceError, "aglSetCurrentContext returned FALSE with error code %d at line %d", glErr, __LINE__);
1636 }
1637
1638 if(GL_FALSE == aglEnable(_aglContext, AGL_CLIP_REGION))
1639 {
1640 GLenum glErr = aglGetError();
1641 //WEBRTC_LOG(kTraceError, "aglEnable returned FALSE with error code %d at line %d\n", glErr, __LINE__);
1642 }
1643
1644 if(GL_FALSE == aglSetInteger(_aglContext, AGL_CLIP_REGION, (const GLint*)visibleRgn))
1645 {
1646 GLenum glErr = aglGetError();
1647 //WEBRTC_LOG(kTraceError, "aglSetInteger returned FALSE with error code %d at line %d\n", glErr, __LINE__);
1648 }
1649
1650 DisposeRgn(visibleRgn);
1651 }
1652 else
1653 {
1654 //WEBRTC_LOG(kTraceDebug, "Not using a hiviewref!\n");
1655 }
1656
1657 //WEBRTC_LOG(kTraceDebug, "Leaving UpdateClipping()");
1658 UnlockAGLCntx();
1659 return true;
1660 }
1661
CalculateVisibleRegion(ControlRef control,RgnHandle & visibleRgn,bool clipChildren)1662 int VideoRenderAGL::CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren)
1663 {
1664
1665 // LockAGLCntx();
1666
1667 //WEBRTC_LOG(kTraceDebug, "Entering CalculateVisibleRegion()");
1668 OSStatus osStatus = 0;
1669 OSErr osErr = 0;
1670
1671 RgnHandle tempRgn = NewRgn();
1672 if (IsControlVisible(control))
1673 {
1674 RgnHandle childRgn = NewRgn();
1675 WindowRef window = GetControlOwner(control);
1676 ControlRef rootControl;
1677 GetRootControl(window, &rootControl); // 'wvnc'
1678 ControlRef masterControl;
1679 osStatus = GetSuperControl(rootControl, &masterControl);
1680 // //WEBRTC_LOG(kTraceDebug, "IBM GetSuperControl=%d", osStatus);
1681
1682 if (masterControl != NULL)
1683 {
1684 CheckValidRegion(visibleRgn);
1685 // init visibleRgn with region of 'wvnc'
1686 osStatus = GetControlRegion(rootControl, kControlStructureMetaPart, visibleRgn);
1687 // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
1688 //GetSuperControl(rootControl, &rootControl);
1689 ControlRef tempControl = control, lastControl = 0;
1690 while (tempControl != masterControl) // current control != master
1691
1692 {
1693 CheckValidRegion(tempRgn);
1694
1695 // //WEBRTC_LOG(kTraceDebug, "IBM tempControl=%d masterControl=%d", tempControl, masterControl);
1696 ControlRef subControl;
1697
1698 osStatus = GetControlRegion(tempControl, kControlStructureMetaPart, tempRgn); // intersect the region of the current control with visibleRgn
1699 // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
1700 CheckValidRegion(tempRgn);
1701
1702 osErr = HIViewConvertRegion(tempRgn, tempControl, rootControl);
1703 // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d : %d", osErr, __LINE__);
1704 CheckValidRegion(tempRgn);
1705
1706 SectRgn(tempRgn, visibleRgn, visibleRgn);
1707 CheckValidRegion(tempRgn);
1708 CheckValidRegion(visibleRgn);
1709 if (EmptyRgn(visibleRgn)) // if the region is empty, bail
1710 break;
1711
1712 if (clipChildren || tempControl != control) // clip children if true, cut out the tempControl if it's not one passed to this function
1713
1714 {
1715 UInt16 numChildren;
1716 osStatus = CountSubControls(tempControl, &numChildren); // count the subcontrols
1717 // //WEBRTC_LOG(kTraceDebug, "IBM CountSubControls=%d : %d", osStatus, __LINE__);
1718
1719 // //WEBRTC_LOG(kTraceDebug, "IBM numChildren=%d", numChildren);
1720 for (int i = 0; i < numChildren; i++)
1721 {
1722 osErr = GetIndexedSubControl(tempControl, numChildren - i, &subControl); // retrieve the subcontrol in order by zorder
1723 // //WEBRTC_LOG(kTraceDebug, "IBM GetIndexedSubControls=%d : %d", osErr, __LINE__);
1724 if ( subControl == lastControl ) // break because of zorder
1725
1726 {
1727 // //WEBRTC_LOG(kTraceDebug, "IBM breaking because of zorder %d", __LINE__);
1728 break;
1729 }
1730
1731 if (!IsControlVisible(subControl)) // dont' clip invisible controls
1732
1733 {
1734 // //WEBRTC_LOG(kTraceDebug, "IBM continue. Control is not visible %d", __LINE__);
1735 continue;
1736 }
1737
1738 if(!subControl) continue;
1739
1740 osStatus = GetControlRegion(subControl, kControlStructureMetaPart, tempRgn); //get the region of the current control and union to childrg
1741 // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d %d", osStatus, __LINE__);
1742 CheckValidRegion(tempRgn);
1743 if(osStatus != 0)
1744 {
1745 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
1746 continue;
1747 }
1748 if(!tempRgn)
1749 {
1750 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !tempRgn %d", osStatus, __LINE__);
1751 continue;
1752 }
1753
1754 osStatus = HIViewConvertRegion(tempRgn, subControl, rootControl);
1755 CheckValidRegion(tempRgn);
1756 // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d %d", osStatus, __LINE__);
1757 if(osStatus != 0)
1758 {
1759 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
1760 continue;
1761 }
1762 if(!rootControl)
1763 {
1764 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !rootControl %d", osStatus, __LINE__);
1765 continue;
1766 }
1767
1768 UnionRgn(tempRgn, childRgn, childRgn);
1769 CheckValidRegion(tempRgn);
1770 CheckValidRegion(childRgn);
1771 CheckValidRegion(visibleRgn);
1772 if(!childRgn)
1773 {
1774 // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !childRgn %d", osStatus, __LINE__);
1775 continue;
1776 }
1777
1778 } // next child control
1779 }
1780 lastControl = tempControl;
1781 GetSuperControl(tempControl, &subControl);
1782 tempControl = subControl;
1783 }
1784
1785 DiffRgn(visibleRgn, childRgn, visibleRgn);
1786 CheckValidRegion(visibleRgn);
1787 CheckValidRegion(childRgn);
1788 DisposeRgn(childRgn);
1789 }
1790 else
1791 {
1792 CopyRgn(tempRgn, visibleRgn);
1793 CheckValidRegion(tempRgn);
1794 CheckValidRegion(visibleRgn);
1795 }
1796 DisposeRgn(tempRgn);
1797 }
1798
1799 //WEBRTC_LOG(kTraceDebug, "Leaving CalculateVisibleRegion()");
1800 //_aglCritPtr->Leave();
1801 return 0;
1802 }
1803
CheckValidRegion(RgnHandle rHandle)1804 bool VideoRenderAGL::CheckValidRegion(RgnHandle rHandle)
1805 {
1806
1807 Handle hndSize = (Handle)rHandle;
1808 long size = GetHandleSize(hndSize);
1809 if(0 == size)
1810 {
1811
1812 OSErr memErr = MemError();
1813 if(noErr != memErr)
1814 {
1815 // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle. MemError() returned %d", memErr);
1816 }
1817 else
1818 {
1819 // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle yet MemError() returned noErr");
1820 }
1821
1822 }
1823 else
1824 {
1825 // //WEBRTC_LOG(kTraceDebug, "IBM handleSize = %d", size);
1826 }
1827
1828 if(false == IsValidRgnHandle(rHandle))
1829 {
1830 // //WEBRTC_LOG(kTraceError, "IBM ERROR Invalid Region found : $%d", rHandle);
1831 assert(false);
1832 }
1833
1834 int err = QDError();
1835 switch(err)
1836 {
1837 case 0:
1838 break;
1839 case -147:
1840 //WEBRTC_LOG(kTraceError, "ERROR region too big");
1841 assert(false);
1842 break;
1843
1844 case -149:
1845 //WEBRTC_LOG(kTraceError, "ERROR not enough stack");
1846 assert(false);
1847 break;
1848
1849 default:
1850 //WEBRTC_LOG(kTraceError, "ERROR Unknown QDError %d", err);
1851 assert(false);
1852 break;
1853 }
1854
1855 return true;
1856 }
1857
ChangeWindow(void * newWindowRef)1858 int VideoRenderAGL::ChangeWindow(void* newWindowRef)
1859 {
1860
1861 LockAGLCntx();
1862
1863 UnlockAGLCntx();
1864 return -1;
1865 }
1866
StartRender()1867 int32_t VideoRenderAGL::StartRender()
1868 {
1869
1870 LockAGLCntx();
1871 const unsigned int MONITOR_FREQ = 60;
1872 if(TRUE == _renderingIsPaused)
1873 {
1874 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Rendering is paused. Restarting now", __FUNCTION__, __LINE__);
1875
1876 // we already have the thread. Most likely StopRender() was called and they were paused
1877 if(FALSE == _screenUpdateThread->Start())
1878 {
1879 //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
1880 UnlockAGLCntx();
1881 return -1;
1882 }
1883 _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
1884 if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
1885 {
1886 //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__);
1887 UnlockAGLCntx();
1888 return -1;
1889 }
1890
1891 return 0;
1892 }
1893
1894 _screenUpdateThread.reset(
1895 new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
1896 _screenUpdateEvent = EventWrapper::Create();
1897
1898 if (!_screenUpdateThread)
1899 {
1900 //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
1901 UnlockAGLCntx();
1902 return -1;
1903 }
1904
1905 _screenUpdateThread->Start();
1906 _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
1907 _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ);
1908
1909 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__);
1910
1911 UnlockAGLCntx();
1912 return 0;
1913 }
1914
StopRender()1915 int32_t VideoRenderAGL::StopRender()
1916 {
1917 LockAGLCntx();
1918
1919 if(!_screenUpdateThread || !_screenUpdateEvent)
1920 {
1921 _renderingIsPaused = TRUE;
1922 UnlockAGLCntx();
1923 return 0;
1924 }
1925
1926 if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
1927 {
1928 _renderingIsPaused = FALSE;
1929 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Could not stop either: screenUpdateThread or screenUpdateEvent", __FUNCTION__, __LINE__);
1930 UnlockAGLCntx();
1931 return -1;
1932 }
1933
1934 _renderingIsPaused = TRUE;
1935
1936 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Stopped screenUpdateThread", __FUNCTION__, __LINE__);
1937 UnlockAGLCntx();
1938 return 0;
1939 }
1940
DeleteAGLChannel(const uint32_t streamID)1941 int32_t VideoRenderAGL::DeleteAGLChannel(const uint32_t streamID)
1942 {
1943
1944 LockAGLCntx();
1945
1946 std::map<int, VideoChannelAGL*>::iterator it;
1947 it = _aglChannels.begin();
1948
1949 while (it != _aglChannels.end())
1950 {
1951 VideoChannelAGL* channel = it->second;
1952 //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Deleting channel %d", __FUNCTION__, __LINE__, streamID);
1953 delete channel;
1954 it++;
1955 }
1956 _aglChannels.clear();
1957
1958 UnlockAGLCntx();
1959 return 0;
1960 }
1961
GetChannelProperties(const uint16_t streamId,uint32_t & zOrder,float & left,float & top,float & right,float & bottom)1962 int32_t VideoRenderAGL::GetChannelProperties(const uint16_t streamId,
1963 uint32_t& zOrder,
1964 float& left,
1965 float& top,
1966 float& right,
1967 float& bottom)
1968 {
1969
1970 LockAGLCntx();
1971 UnlockAGLCntx();
1972 return -1;
1973
1974 }
1975
LockAGLCntx()1976 void VideoRenderAGL::LockAGLCntx()
1977 {
1978 _renderCritSec.Enter();
1979 }
UnlockAGLCntx()1980 void VideoRenderAGL::UnlockAGLCntx()
1981 {
1982 _renderCritSec.Leave();
1983 }
1984
1985 } // namespace webrtc
1986
1987 #endif // CARBON_RENDERING
1988