OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #include "webrtc/engine_configurations.h" | |
12 #if defined(COCOA_RENDERING) | |
13 | |
14 #include "webrtc/base/platform_thread.h" | |
15 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | |
16 #include "webrtc/modules/video_render/mac/video_render_nsopengl.h" | |
17 #include "webrtc/system_wrappers/include/critical_section_wrapper.h" | |
18 #include "webrtc/system_wrappers/include/event_wrapper.h" | |
19 #include "webrtc/system_wrappers/include/trace.h" | |
20 | |
21 namespace webrtc { | |
22 | |
23 VideoChannelNSOpenGL::VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId
, VideoRenderNSOpenGL* owner) : | |
24 _nsglContext( nsglContext), | |
25 _id( iId), | |
26 _owner( owner), | |
27 _width( 0), | |
28 _height( 0), | |
29 _startWidth( 0.0f), | |
30 _startHeight( 0.0f), | |
31 _stopWidth( 0.0f), | |
32 _stopHeight( 0.0f), | |
33 _stretchedWidth( 0), | |
34 _stretchedHeight( 0), | |
35 _oldStretchedHeight( 0), | |
36 _oldStretchedWidth( 0), | |
37 _buffer( 0), | |
38 _bufferSize( 0), | |
39 _incomingBufferSize( 0), | |
40 _bufferIsUpdated( false), | |
41 _numberOfStreams( 0), | |
42 _pixelFormat( GL_RGBA), | |
43 _pixelDataType( GL_UNSIGNED_INT_8_8_8_8), | |
44 _texture( 0) | |
45 { | |
46 | |
47 } | |
48 | |
49 VideoChannelNSOpenGL::~VideoChannelNSOpenGL() | |
50 { | |
51 if (_buffer) | |
52 { | |
53 delete [] _buffer; | |
54 _buffer = NULL; | |
55 } | |
56 | |
57 if (_texture != 0) | |
58 { | |
59 [_nsglContext makeCurrentContext]; | |
60 glDeleteTextures(1, (const GLuint*) &_texture); | |
61 _texture = 0; | |
62 } | |
63 } | |
64 | |
65 int VideoChannelNSOpenGL::ChangeContext(NSOpenGLContext *nsglContext) | |
66 { | |
67 _owner->LockAGLCntx(); | |
68 | |
69 _nsglContext = nsglContext; | |
70 [_nsglContext makeCurrentContext]; | |
71 | |
72 _owner->UnlockAGLCntx(); | |
73 return 0; | |
74 | |
75 } | |
76 | |
77 int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top, | |
78 float& right, float& bottom) | |
79 { | |
80 | |
81 _owner->LockAGLCntx(); | |
82 | |
83 left = _startWidth; | |
84 top = _startHeight; | |
85 right = _stopWidth; | |
86 bottom = _stopHeight; | |
87 | |
88 _owner->UnlockAGLCntx(); | |
89 return 0; | |
90 } | |
91 | |
92 int32_t VideoChannelNSOpenGL::RenderFrame(const uint32_t /*streamId*/, | |
93 const VideoFrame& videoFrame) { | |
94 _owner->LockAGLCntx(); | |
95 | |
96 if(_width != videoFrame.width() || | |
97 _height != videoFrame.height()) { | |
98 if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { | |
99 _owner->UnlockAGLCntx(); | |
100 return -1; | |
101 } | |
102 } | |
103 int ret = DeliverFrame(videoFrame); | |
104 | |
105 _owner->UnlockAGLCntx(); | |
106 return ret; | |
107 } | |
108 | |
109 int VideoChannelNSOpenGL::UpdateSize(int width, int height) | |
110 { | |
111 _owner->LockAGLCntx(); | |
112 _width = width; | |
113 _height = height; | |
114 _owner->UnlockAGLCntx(); | |
115 return 0; | |
116 } | |
117 | |
118 int VideoChannelNSOpenGL::UpdateStretchSize(int stretchHeight, int stretchWidth) | |
119 { | |
120 | |
121 _owner->LockAGLCntx(); | |
122 _stretchedHeight = stretchHeight; | |
123 _stretchedWidth = stretchWidth; | |
124 _owner->UnlockAGLCntx(); | |
125 return 0; | |
126 } | |
127 | |
128 int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStr
eams) | |
129 { | |
130 // We got a new frame size from VideoAPI, prepare the buffer | |
131 | |
132 _owner->LockAGLCntx(); | |
133 | |
134 if (width == _width && _height == height) | |
135 { | |
136 // We already have a correct buffer size | |
137 _numberOfStreams = numberOfStreams; | |
138 _owner->UnlockAGLCntx(); | |
139 return 0; | |
140 } | |
141 | |
142 _width = width; | |
143 _height = height; | |
144 | |
145 // Delete the old buffer, create a new one with correct size. | |
146 if (_buffer) | |
147 { | |
148 delete [] _buffer; | |
149 _bufferSize = 0; | |
150 } | |
151 | |
152 _incomingBufferSize = CalcBufferSize(kI420, _width, _height); | |
153 _bufferSize = CalcBufferSize(kARGB, _width, _height); | |
154 _buffer = new unsigned char [_bufferSize]; | |
155 memset(_buffer, 0, _bufferSize * sizeof(unsigned char)); | |
156 | |
157 [_nsglContext makeCurrentContext]; | |
158 | |
159 if(glIsTexture(_texture)) | |
160 { | |
161 glDeleteTextures(1, (const GLuint*) &_texture); | |
162 _texture = 0; | |
163 } | |
164 | |
165 // Create a new texture | |
166 glGenTextures(1, (GLuint *) &_texture); | |
167 | |
168 GLenum glErr = glGetError(); | |
169 | |
170 if (glErr != GL_NO_ERROR) | |
171 { | |
172 | |
173 } | |
174 | |
175 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); | |
176 | |
177 GLint texSize; | |
178 glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize); | |
179 | |
180 if (texSize < _width || texSize < _height) | |
181 { | |
182 _owner->UnlockAGLCntx(); | |
183 return -1; | |
184 } | |
185 | |
186 // Set up th texture type and size | |
187 glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target | |
188 0, // level | |
189 GL_RGBA, // internal format | |
190 _width, // width | |
191 _height, // height | |
192 0, // border 0/1 = off/on | |
193 _pixelFormat, // format, GL_RGBA | |
194 _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8 | |
195 _buffer); // pixel data | |
196 | |
197 glErr = glGetError(); | |
198 if (glErr != GL_NO_ERROR) | |
199 { | |
200 _owner->UnlockAGLCntx(); | |
201 return -1; | |
202 } | |
203 | |
204 _owner->UnlockAGLCntx(); | |
205 return 0; | |
206 } | |
207 | |
208 int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) { | |
209 _owner->LockAGLCntx(); | |
210 | |
211 if (_texture == 0) { | |
212 _owner->UnlockAGLCntx(); | |
213 return 0; | |
214 } | |
215 | |
216 if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) != | |
217 _incomingBufferSize) { | |
218 _owner->UnlockAGLCntx(); | |
219 return -1; | |
220 } | |
221 | |
222 // Using the VideoFrame for YV12: YV12 is YVU; I420 assumes | |
223 // YUV. | |
224 // TODO(mikhal) : Use appropriate functionality. | |
225 // TODO(wu): See if we are using glTexSubImage2D correctly. | |
226 int rgbRet = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer); | |
227 if (rgbRet < 0) { | |
228 _owner->UnlockAGLCntx(); | |
229 return -1; | |
230 } | |
231 | |
232 [_nsglContext makeCurrentContext]; | |
233 | |
234 // Make sure this texture is the active one | |
235 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); | |
236 GLenum glErr = glGetError(); | |
237 if (glErr != GL_NO_ERROR) { | |
238 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, | |
239 "ERROR %d while calling glBindTexture", glErr); | |
240 _owner->UnlockAGLCntx(); | |
241 return -1; | |
242 } | |
243 | |
244 glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT, | |
245 0, // Level, not use | |
246 0, // start point x, (low left of pic) | |
247 0, // start point y, | |
248 _width, // width | |
249 _height, // height | |
250 _pixelFormat, // pictue format for _buffer | |
251 _pixelDataType, // data type of _buffer | |
252 (const GLvoid*) _buffer); // the pixel data | |
253 | |
254 glErr = glGetError(); | |
255 if (glErr != GL_NO_ERROR) { | |
256 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, | |
257 "ERROR %d while calling glTexSubImage2d", glErr); | |
258 _owner->UnlockAGLCntx(); | |
259 return -1; | |
260 } | |
261 | |
262 _bufferIsUpdated = true; | |
263 | |
264 _owner->UnlockAGLCntx(); | |
265 return 0; | |
266 } | |
267 | |
268 int VideoChannelNSOpenGL::RenderOffScreenBuffer() | |
269 { | |
270 | |
271 _owner->LockAGLCntx(); | |
272 | |
273 if (_texture == 0) | |
274 { | |
275 _owner->UnlockAGLCntx(); | |
276 return 0; | |
277 } | |
278 | |
279 // if(_fullscreen) | |
280 // { | |
281 // NSRect mainDisplayRect = [[NSScreen mainScreen] frame]; | |
282 // _width = mainDisplayRect.size.width; | |
283 // _height = mainDisplayRect.size.height; | |
284 // glViewport(0, 0, mainDisplayRect.size.width, mainDisplayRect.siz
e.height); | |
285 // float newX = mainDisplayRect.size.width/_width; | |
286 // float newY = mainDisplayRect.size.height/_height; | |
287 | |
288 // convert from 0.0 <= size <= 1.0 to | |
289 // open gl world -1.0 < size < 1.0 | |
290 GLfloat xStart = 2.0f * _startWidth - 1.0f; | |
291 GLfloat xStop = 2.0f * _stopWidth - 1.0f; | |
292 GLfloat yStart = 1.0f - 2.0f * _stopHeight; | |
293 GLfloat yStop = 1.0f - 2.0f * _startHeight; | |
294 | |
295 [_nsglContext makeCurrentContext]; | |
296 | |
297 glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); | |
298 _oldStretchedHeight = _stretchedHeight; | |
299 _oldStretchedWidth = _stretchedWidth; | |
300 | |
301 glLoadIdentity(); | |
302 glEnable(GL_TEXTURE_RECTANGLE_EXT); | |
303 glBegin(GL_POLYGON); | |
304 { | |
305 glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop); | |
306 glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop); | |
307 glTexCoord2f(_width, _height); glVertex2f(xStop, yStart); | |
308 glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart); | |
309 } | |
310 glEnd(); | |
311 | |
312 glDisable(GL_TEXTURE_RECTANGLE_EXT); | |
313 | |
314 _bufferIsUpdated = false; | |
315 | |
316 _owner->UnlockAGLCntx(); | |
317 return 0; | |
318 } | |
319 | |
320 int VideoChannelNSOpenGL::IsUpdated(bool& isUpdated) | |
321 { | |
322 _owner->LockAGLCntx(); | |
323 | |
324 isUpdated = _bufferIsUpdated; | |
325 | |
326 _owner->UnlockAGLCntx(); | |
327 return 0; | |
328 } | |
329 | |
330 int VideoChannelNSOpenGL::SetStreamSettings(int /*streamId*/, float startWidth,
float startHeight, float stopWidth, float stopHeight) | |
331 { | |
332 _owner->LockAGLCntx(); | |
333 | |
334 _startWidth = startWidth; | |
335 _stopWidth = stopWidth; | |
336 _startHeight = startHeight; | |
337 _stopHeight = stopHeight; | |
338 | |
339 int oldWidth = _width; | |
340 int oldHeight = _height; | |
341 int oldNumberOfStreams = _numberOfStreams; | |
342 | |
343 _width = 0; | |
344 _height = 0; | |
345 | |
346 int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams); | |
347 | |
348 _owner->UnlockAGLCntx(); | |
349 return retVal; | |
350 } | |
351 | |
352 int VideoChannelNSOpenGL::SetStreamCropSettings(int /*streamId*/, float /*startW
idth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/) | |
353 { | |
354 return -1; | |
355 } | |
356 | |
357 /* | |
358 * | |
359 * VideoRenderNSOpenGL | |
360 * | |
361 */ | |
362 | |
363 VideoRenderNSOpenGL::VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullSc
reen, int iId) : | |
364 _windowRef( (CocoaRenderView*)windowRef), | |
365 _fullScreen( fullScreen), | |
366 _id( iId), | |
367 _nsglContextCritSec( *CriticalSectionWrapper::CreateCriticalSection()), | |
368 _screenUpdateEvent(EventTimerWrapper::Create()), | |
369 _nsglContext( 0), | |
370 _nsglFullScreenContext( 0), | |
371 _fullScreenWindow( nil), | |
372 _windowRect( ), | |
373 _windowWidth( 0), | |
374 _windowHeight( 0), | |
375 _nsglChannels( ), | |
376 _zOrderToChannel( ), | |
377 _renderingIsPaused (FALSE), | |
378 _windowRefSuperView(NULL), | |
379 _windowRefSuperViewFrame(NSMakeRect(0,0,0,0)) | |
380 { | |
381 _screenUpdateThread.reset(new rtc::PlatformThread( | |
382 ScreenUpdateThreadProc, this, "ScreenUpdateNSOpenGL")); | |
383 } | |
384 | |
385 int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef) | |
386 { | |
387 | |
388 LockAGLCntx(); | |
389 | |
390 _windowRef = newWindowRef; | |
391 | |
392 if(CreateMixingContext() == -1) | |
393 { | |
394 UnlockAGLCntx(); | |
395 return -1; | |
396 } | |
397 | |
398 int error = 0; | |
399 std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin(); | |
400 while (it!= _nsglChannels.end()) | |
401 { | |
402 error |= (it->second)->ChangeContext(_nsglContext); | |
403 it++; | |
404 } | |
405 if(error != 0) | |
406 { | |
407 UnlockAGLCntx(); | |
408 return -1; | |
409 } | |
410 | |
411 UnlockAGLCntx(); | |
412 return 0; | |
413 } | |
414 | |
415 /* Check if the thread and event already exist. | |
416 * If so then they will simply be restarted | |
417 * If not then create them and continue | |
418 */ | |
419 int32_t VideoRenderNSOpenGL::StartRender() | |
420 { | |
421 | |
422 LockAGLCntx(); | |
423 | |
424 const unsigned int MONITOR_FREQ = 60; | |
425 if(TRUE == _renderingIsPaused) | |
426 { | |
427 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUp
dateThread"); | |
428 | |
429 // we already have the thread. Most likely StopRender() was called and t
hey were paused | |
430 _screenUpdateThread->Start(); | |
431 if (FALSE == | |
432 _screenUpdateEvent->StartTimer(true, 1000 / MONITOR_FREQ)) { | |
433 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to resta
rt screenUpdateThread or screenUpdateEvent"); | |
434 UnlockAGLCntx(); | |
435 return -1; | |
436 } | |
437 | |
438 _screenUpdateThread->SetPriority(rtc::kRealtimePriority); | |
439 | |
440 UnlockAGLCntx(); | |
441 return 0; | |
442 } | |
443 | |
444 | |
445 if (!_screenUpdateThread) | |
446 { | |
447 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "failed start screen
UpdateThread"); | |
448 UnlockAGLCntx(); | |
449 return -1; | |
450 } | |
451 | |
452 | |
453 UnlockAGLCntx(); | |
454 return 0; | |
455 } | |
456 int32_t VideoRenderNSOpenGL::StopRender() | |
457 { | |
458 | |
459 LockAGLCntx(); | |
460 | |
461 /* The code below is functional | |
462 * but it pauses for several seconds | |
463 */ | |
464 | |
465 // pause the update thread and the event timer | |
466 if(!_screenUpdateThread || !_screenUpdateEvent) | |
467 { | |
468 _renderingIsPaused = TRUE; | |
469 | |
470 UnlockAGLCntx(); | |
471 return 0; | |
472 } | |
473 | |
474 _screenUpdateThread->Stop(); | |
475 if (FALSE == _screenUpdateEvent->StopTimer()) { | |
476 _renderingIsPaused = FALSE; | |
477 | |
478 UnlockAGLCntx(); | |
479 return -1; | |
480 } | |
481 | |
482 _renderingIsPaused = TRUE; | |
483 | |
484 UnlockAGLCntx(); | |
485 return 0; | |
486 } | |
487 | |
488 int VideoRenderNSOpenGL::configureNSOpenGLView() | |
489 { | |
490 return 0; | |
491 | |
492 } | |
493 | |
494 int VideoRenderNSOpenGL::configureNSOpenGLEngine() | |
495 { | |
496 | |
497 LockAGLCntx(); | |
498 | |
499 // Disable not needed functionality to increase performance | |
500 glDisable(GL_DITHER); | |
501 glDisable(GL_ALPHA_TEST); | |
502 glDisable(GL_STENCIL_TEST); | |
503 glDisable(GL_FOG); | |
504 glDisable(GL_TEXTURE_2D); | |
505 glPixelZoom(1.0, 1.0); | |
506 glDisable(GL_BLEND); | |
507 glDisable(GL_DEPTH_TEST); | |
508 glDepthMask(GL_FALSE); | |
509 glDisable(GL_CULL_FACE); | |
510 | |
511 // Set texture parameters | |
512 glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0); | |
513 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDG
E); | |
514 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDG
E); | |
515 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
516 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
517 glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); | |
518 glPixelStorei(GL_UNPACK_ALIGNMENT, 1); | |
519 glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_
STORAGE_SHARED_APPLE); | |
520 | |
521 if (GetWindowRect(_windowRect) == -1) | |
522 { | |
523 UnlockAGLCntx(); | |
524 return true; | |
525 } | |
526 | |
527 if (_windowWidth != (_windowRect.right - _windowRect.left) | |
528 || _windowHeight != (_windowRect.bottom - _windowRect.top)) | |
529 { | |
530 _windowWidth = _windowRect.right - _windowRect.left; | |
531 _windowHeight = _windowRect.bottom - _windowRect.top; | |
532 } | |
533 glViewport(0, 0, _windowWidth, _windowHeight); | |
534 | |
535 // Synchronize buffer swaps with vertical refresh rate | |
536 GLint swapInt = 1; | |
537 [_nsglContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval]; | |
538 | |
539 UnlockAGLCntx(); | |
540 return 0; | |
541 } | |
542 | |
543 int VideoRenderNSOpenGL::setRenderTargetWindow() | |
544 { | |
545 LockAGLCntx(); | |
546 | |
547 | |
548 GLuint attribs[] = | |
549 { | |
550 NSOpenGLPFAColorSize, 24, | |
551 NSOpenGLPFAAlphaSize, 8, | |
552 NSOpenGLPFADepthSize, 16, | |
553 NSOpenGLPFAAccelerated, | |
554 0 | |
555 }; | |
556 | |
557 NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes: | |
558 (NSOpenGLPixelFormatAttribute*) attribs] autorelease]; | |
559 | |
560 if(_windowRef) | |
561 { | |
562 [_windowRef initCocoaRenderView:fmt]; | |
563 } | |
564 else | |
565 { | |
566 UnlockAGLCntx(); | |
567 return -1; | |
568 } | |
569 | |
570 _nsglContext = [_windowRef nsOpenGLContext]; | |
571 [_nsglContext makeCurrentContext]; | |
572 | |
573 glClearColor(0.0f, 0.0f, 0.0f, 1.0f); | |
574 glClear(GL_COLOR_BUFFER_BIT); | |
575 | |
576 | |
577 DisplayBuffers(); | |
578 | |
579 UnlockAGLCntx(); | |
580 return 0; | |
581 } | |
582 | |
583 int VideoRenderNSOpenGL::setRenderTargetFullScreen() | |
584 { | |
585 LockAGLCntx(); | |
586 | |
587 | |
588 GLuint attribs[] = | |
589 { | |
590 NSOpenGLPFAColorSize, 24, | |
591 NSOpenGLPFAAlphaSize, 8, | |
592 NSOpenGLPFADepthSize, 16, | |
593 NSOpenGLPFAAccelerated, | |
594 0 | |
595 }; | |
596 | |
597 NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes: | |
598 (NSOpenGLPixelFormatAttribute*) attribs] autorelease]; | |
599 | |
600 // Store original superview and frame for use when exiting full screens | |
601 _windowRefSuperViewFrame = [_windowRef frame]; | |
602 _windowRefSuperView = [_windowRef superview]; | |
603 | |
604 | |
605 // create new fullscreen window | |
606 NSRect screenRect = [[NSScreen mainScreen]frame]; | |
607 [_windowRef setFrame:screenRect]; | |
608 [_windowRef setBounds:screenRect]; | |
609 | |
610 | |
611 _fullScreenWindow = [[CocoaFullScreenWindow alloc]init]; | |
612 [_fullScreenWindow grabFullScreen]; | |
613 [[[_fullScreenWindow window] contentView] addSubview:_windowRef]; | |
614 | |
615 if(_windowRef) | |
616 { | |
617 [_windowRef initCocoaRenderViewFullScreen:fmt]; | |
618 } | |
619 else | |
620 { | |
621 UnlockAGLCntx(); | |
622 return -1; | |
623 } | |
624 | |
625 _nsglContext = [_windowRef nsOpenGLContext]; | |
626 [_nsglContext makeCurrentContext]; | |
627 | |
628 glClearColor(0.0f, 0.0f, 0.0f, 1.0f); | |
629 glClear(GL_COLOR_BUFFER_BIT); | |
630 | |
631 DisplayBuffers(); | |
632 | |
633 UnlockAGLCntx(); | |
634 return 0; | |
635 } | |
636 | |
637 VideoRenderNSOpenGL::~VideoRenderNSOpenGL() | |
638 { | |
639 | |
640 if(_fullScreen) | |
641 { | |
642 if(_fullScreenWindow) | |
643 { | |
644 // Detach CocoaRenderView from full screen view back to | |
645 // it's original parent. | |
646 [_windowRef removeFromSuperview]; | |
647 if(_windowRefSuperView) | |
648 { | |
649 [_windowRefSuperView addSubview:_windowRef]; | |
650 [_windowRef setFrame:_windowRefSuperViewFrame]; | |
651 } | |
652 | |
653 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting
to release fullscreen window", __FUNCTION__, __LINE__); | |
654 [_fullScreenWindow releaseFullScreen]; | |
655 | |
656 } | |
657 } | |
658 | |
659 // Signal event to exit thread, then delete it | |
660 rtc::PlatformThread* tmpPtr = _screenUpdateThread.release(); | |
661 | |
662 if (tmpPtr) | |
663 { | |
664 _screenUpdateEvent->Set(); | |
665 _screenUpdateEvent->StopTimer(); | |
666 | |
667 tmpPtr->Stop(); | |
668 delete tmpPtr; | |
669 delete _screenUpdateEvent; | |
670 _screenUpdateEvent = NULL; | |
671 } | |
672 | |
673 if (_nsglContext != 0) | |
674 { | |
675 [_nsglContext makeCurrentContext]; | |
676 _nsglContext = nil; | |
677 } | |
678 | |
679 // Delete all channels | |
680 std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin(); | |
681 while (it!= _nsglChannels.end()) | |
682 { | |
683 delete it->second; | |
684 _nsglChannels.erase(it); | |
685 it = _nsglChannels.begin(); | |
686 } | |
687 _nsglChannels.clear(); | |
688 | |
689 // Clean the zOrder map | |
690 std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin(); | |
691 while(zIt != _zOrderToChannel.end()) | |
692 { | |
693 _zOrderToChannel.erase(zIt); | |
694 zIt = _zOrderToChannel.begin(); | |
695 } | |
696 _zOrderToChannel.clear(); | |
697 | |
698 } | |
699 | |
700 /* static */ | |
701 int VideoRenderNSOpenGL::GetOpenGLVersion(int& /*nsglMajor*/, int& /*nsglMinor*/
) | |
702 { | |
703 return -1; | |
704 } | |
705 | |
706 int VideoRenderNSOpenGL::Init() | |
707 { | |
708 | |
709 LockAGLCntx(); | |
710 if (!_screenUpdateThread) | |
711 { | |
712 UnlockAGLCntx(); | |
713 return -1; | |
714 } | |
715 | |
716 _screenUpdateThread->Start(); | |
717 _screenUpdateThread->SetPriority(rtc::kRealtimePriority); | |
718 | |
719 // Start the event triggering the render process | |
720 unsigned int monitorFreq = 60; | |
721 _screenUpdateEvent->StartTimer(true, 1000/monitorFreq); | |
722 | |
723 if (CreateMixingContext() == -1) | |
724 { | |
725 UnlockAGLCntx(); | |
726 return -1; | |
727 } | |
728 | |
729 UnlockAGLCntx(); | |
730 return 0; | |
731 } | |
732 | |
733 VideoChannelNSOpenGL* VideoRenderNSOpenGL::CreateNSGLChannel(int channel, int zO
rder, float startWidth, float startHeight, float stopWidth, float stopHeight) | |
734 { | |
735 CriticalSectionScoped cs(&_nsglContextCritSec); | |
736 | |
737 if (HasChannel(channel)) | |
738 { | |
739 return NULL; | |
740 } | |
741 | |
742 if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end()) | |
743 { | |
744 | |
745 } | |
746 | |
747 VideoChannelNSOpenGL* newAGLChannel = new VideoChannelNSOpenGL(_nsglContext,
_id, this); | |
748 if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth,
stopHeight) == -1) | |
749 { | |
750 if (newAGLChannel) | |
751 { | |
752 delete newAGLChannel; | |
753 newAGLChannel = NULL; | |
754 } | |
755 | |
756 return NULL; | |
757 } | |
758 | |
759 _nsglChannels[channel] = newAGLChannel; | |
760 _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel)); | |
761 | |
762 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s successfully created
NSGL channel number %d", __FUNCTION__, channel); | |
763 | |
764 return newAGLChannel; | |
765 } | |
766 | |
767 int VideoRenderNSOpenGL::DeleteAllNSGLChannels() | |
768 { | |
769 | |
770 CriticalSectionScoped cs(&_nsglContextCritSec); | |
771 | |
772 std::map<int, VideoChannelNSOpenGL*>::iterator it; | |
773 it = _nsglChannels.begin(); | |
774 | |
775 while (it != _nsglChannels.end()) | |
776 { | |
777 VideoChannelNSOpenGL* channel = it->second; | |
778 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Deleting channel
%d", __FUNCTION__, channel); | |
779 delete channel; | |
780 it++; | |
781 } | |
782 _nsglChannels.clear(); | |
783 return 0; | |
784 } | |
785 | |
786 int32_t VideoRenderNSOpenGL::DeleteNSGLChannel(const uint32_t channel) | |
787 { | |
788 | |
789 CriticalSectionScoped cs(&_nsglContextCritSec); | |
790 | |
791 std::map<int, VideoChannelNSOpenGL*>::iterator it; | |
792 it = _nsglChannels.find(channel); | |
793 if (it != _nsglChannels.end()) | |
794 { | |
795 delete it->second; | |
796 _nsglChannels.erase(it); | |
797 } | |
798 else | |
799 { | |
800 return -1; | |
801 } | |
802 | |
803 std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin(); | |
804 while( zIt != _zOrderToChannel.end()) | |
805 { | |
806 if (zIt->second == (int)channel) | |
807 { | |
808 _zOrderToChannel.erase(zIt); | |
809 break; | |
810 } | |
811 zIt++; | |
812 } | |
813 | |
814 return 0; | |
815 } | |
816 | |
817 int32_t VideoRenderNSOpenGL::GetChannelProperties(const uint16_t streamId, | |
818 uint32_t& zOrder, | |
819 float& left, | |
820 float& top, | |
821 float& right, | |
822 float& bottom) | |
823 { | |
824 | |
825 CriticalSectionScoped cs(&_nsglContextCritSec); | |
826 | |
827 bool channelFound = false; | |
828 | |
829 // Loop through all channels until we find a match. | |
830 // From that, get zorder. | |
831 // From that, get T, L, R, B | |
832 for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin
(); | |
833 rIt != _zOrderToChannel.rend(); | |
834 rIt++) | |
835 { | |
836 if(streamId == rIt->second) | |
837 { | |
838 channelFound = true; | |
839 | |
840 zOrder = rIt->second; | |
841 | |
842 std::map<int, VideoChannelNSOpenGL*>::iterator rIt = _nsglChannels.f
ind(streamId); | |
843 VideoChannelNSOpenGL* tempChannel = rIt->second; | |
844 | |
845 if(-1 == tempChannel->GetChannelProperties(left, top, right, bottom)
) | |
846 { | |
847 return -1; | |
848 } | |
849 break; | |
850 } | |
851 } | |
852 | |
853 if(false == channelFound) | |
854 { | |
855 | |
856 return -1; | |
857 } | |
858 | |
859 return 0; | |
860 } | |
861 | |
862 int VideoRenderNSOpenGL::StopThread() | |
863 { | |
864 | |
865 rtc::PlatformThread* tmpPtr = _screenUpdateThread.release(); | |
866 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, | |
867 "%s Stopping thread ", __FUNCTION__, tmpPtr); | |
868 | |
869 if (tmpPtr) | |
870 { | |
871 _screenUpdateEvent->Set(); | |
872 tmpPtr->Stop(); | |
873 delete tmpPtr; | |
874 } | |
875 | |
876 delete _screenUpdateEvent; | |
877 _screenUpdateEvent = NULL; | |
878 | |
879 return 0; | |
880 } | |
881 | |
882 bool VideoRenderNSOpenGL::IsFullScreen() | |
883 { | |
884 | |
885 CriticalSectionScoped cs(&_nsglContextCritSec); | |
886 return _fullScreen; | |
887 } | |
888 | |
889 bool VideoRenderNSOpenGL::HasChannels() | |
890 { | |
891 CriticalSectionScoped cs(&_nsglContextCritSec); | |
892 | |
893 if (_nsglChannels.begin() != _nsglChannels.end()) | |
894 { | |
895 return true; | |
896 } | |
897 return false; | |
898 } | |
899 | |
900 bool VideoRenderNSOpenGL::HasChannel(int channel) | |
901 { | |
902 | |
903 CriticalSectionScoped cs(&_nsglContextCritSec); | |
904 | |
905 std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(chann
el); | |
906 | |
907 if (it != _nsglChannels.end()) | |
908 { | |
909 return true; | |
910 } | |
911 return false; | |
912 } | |
913 | |
914 int VideoRenderNSOpenGL::GetChannels(std::list<int>& channelList) | |
915 { | |
916 | |
917 CriticalSectionScoped cs(&_nsglContextCritSec); | |
918 | |
919 std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin(); | |
920 | |
921 while (it != _nsglChannels.end()) | |
922 { | |
923 channelList.push_back(it->first); | |
924 it++; | |
925 } | |
926 | |
927 return 0; | |
928 } | |
929 | |
930 VideoChannelNSOpenGL* VideoRenderNSOpenGL::ConfigureNSGLChannel(int channel, int
zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight) | |
931 { | |
932 | |
933 CriticalSectionScoped cs(&_nsglContextCritSec); | |
934 | |
935 std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(chann
el); | |
936 | |
937 if (it != _nsglChannels.end()) | |
938 { | |
939 VideoChannelNSOpenGL* aglChannel = it->second; | |
940 if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth,
stopHeight) == -1) | |
941 { | |
942 WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s failed to se
t stream settings: channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d
stopWidth=%d stopHeight=%d", | |
943 __FUNCTION__, channel, zOrder, startWidth, startHeight, stop
Width, stopHeight); | |
944 return NULL; | |
945 } | |
946 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Configuring chann
el %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight
=%d", | |
947 __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidt
h, stopHeight); | |
948 | |
949 std::multimap<int, int>::iterator it = _zOrderToChannel.begin(); | |
950 while(it != _zOrderToChannel.end()) | |
951 { | |
952 if (it->second == channel) | |
953 { | |
954 if (it->first != zOrder) | |
955 { | |
956 _zOrderToChannel.erase(it); | |
957 _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel)
); | |
958 } | |
959 break; | |
960 } | |
961 it++; | |
962 } | |
963 return aglChannel; | |
964 } | |
965 | |
966 return NULL; | |
967 } | |
968 | |
969 /* | |
970 * | |
971 * Rendering process | |
972 * | |
973 */ | |
974 | |
975 bool VideoRenderNSOpenGL::ScreenUpdateThreadProc(void* obj) | |
976 { | |
977 return static_cast<VideoRenderNSOpenGL*>(obj)->ScreenUpdateProcess(); | |
978 } | |
979 | |
980 bool VideoRenderNSOpenGL::ScreenUpdateProcess() | |
981 { | |
982 | |
983 _screenUpdateEvent->Wait(10); | |
984 LockAGLCntx(); | |
985 | |
986 if (!_screenUpdateThread) | |
987 { | |
988 WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s no screen upda
te thread", __FUNCTION__); | |
989 UnlockAGLCntx(); | |
990 return false; | |
991 } | |
992 | |
993 [_nsglContext makeCurrentContext]; | |
994 | |
995 if (GetWindowRect(_windowRect) == -1) | |
996 { | |
997 UnlockAGLCntx(); | |
998 return true; | |
999 } | |
1000 | |
1001 if (_windowWidth != (_windowRect.right - _windowRect.left) | |
1002 || _windowHeight != (_windowRect.bottom - _windowRect.top)) | |
1003 { | |
1004 _windowWidth = _windowRect.right - _windowRect.left; | |
1005 _windowHeight = _windowRect.bottom - _windowRect.top; | |
1006 glViewport(0, 0, _windowWidth, _windowHeight); | |
1007 } | |
1008 | |
1009 // Check if there are any updated buffers | |
1010 bool updated = false; | |
1011 std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin(); | |
1012 while (it != _nsglChannels.end()) | |
1013 { | |
1014 | |
1015 VideoChannelNSOpenGL* aglChannel = it->second; | |
1016 aglChannel->UpdateStretchSize(_windowHeight, _windowWidth); | |
1017 aglChannel->IsUpdated(updated); | |
1018 if (updated) | |
1019 { | |
1020 break; | |
1021 } | |
1022 it++; | |
1023 } | |
1024 | |
1025 if (updated) | |
1026 { | |
1027 | |
1028 // At least on buffers is updated, we need to repaint the texture | |
1029 if (RenderOffScreenBuffers() != -1) | |
1030 { | |
1031 UnlockAGLCntx(); | |
1032 return true; | |
1033 } | |
1034 } | |
1035 // } | |
1036 UnlockAGLCntx(); | |
1037 return true; | |
1038 } | |
1039 | |
1040 /* | |
1041 * | |
1042 * Functions for creating mixing buffers and screen settings | |
1043 * | |
1044 */ | |
1045 | |
1046 int VideoRenderNSOpenGL::CreateMixingContext() | |
1047 { | |
1048 | |
1049 CriticalSectionScoped cs(&_nsglContextCritSec); | |
1050 | |
1051 if(_fullScreen) | |
1052 { | |
1053 if(-1 == setRenderTargetFullScreen()) | |
1054 { | |
1055 return -1; | |
1056 } | |
1057 } | |
1058 else | |
1059 { | |
1060 | |
1061 if(-1 == setRenderTargetWindow()) | |
1062 { | |
1063 return -1; | |
1064 } | |
1065 } | |
1066 | |
1067 configureNSOpenGLEngine(); | |
1068 | |
1069 DisplayBuffers(); | |
1070 | |
1071 GLenum glErr = glGetError(); | |
1072 if (glErr) | |
1073 { | |
1074 } | |
1075 | |
1076 return 0; | |
1077 } | |
1078 | |
1079 /* | |
1080 * | |
1081 * Rendering functions | |
1082 * | |
1083 */ | |
1084 | |
1085 int VideoRenderNSOpenGL::RenderOffScreenBuffers() | |
1086 { | |
1087 LockAGLCntx(); | |
1088 | |
1089 // Get the current window size, it might have changed since last render. | |
1090 if (GetWindowRect(_windowRect) == -1) | |
1091 { | |
1092 UnlockAGLCntx(); | |
1093 return -1; | |
1094 } | |
1095 | |
1096 [_nsglContext makeCurrentContext]; | |
1097 glClear(GL_COLOR_BUFFER_BIT); | |
1098 | |
1099 // Loop through all channels starting highest zOrder ending with lowest. | |
1100 for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin
(); | |
1101 rIt != _zOrderToChannel.rend(); | |
1102 rIt++) | |
1103 { | |
1104 int channelId = rIt->second; | |
1105 std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(c
hannelId); | |
1106 | |
1107 VideoChannelNSOpenGL* aglChannel = it->second; | |
1108 | |
1109 aglChannel->RenderOffScreenBuffer(); | |
1110 } | |
1111 | |
1112 DisplayBuffers(); | |
1113 | |
1114 UnlockAGLCntx(); | |
1115 return 0; | |
1116 } | |
1117 | |
1118 /* | |
1119 * | |
1120 * Help functions | |
1121 * | |
1122 * All help functions assumes external protections | |
1123 * | |
1124 */ | |
1125 | |
1126 int VideoRenderNSOpenGL::DisplayBuffers() | |
1127 { | |
1128 | |
1129 LockAGLCntx(); | |
1130 | |
1131 glFinish(); | |
1132 [_nsglContext flushBuffer]; | |
1133 | |
1134 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s glFinish and [_nsglC
ontext flushBuffer]", __FUNCTION__); | |
1135 | |
1136 UnlockAGLCntx(); | |
1137 return 0; | |
1138 } | |
1139 | |
1140 int VideoRenderNSOpenGL::GetWindowRect(Rect& rect) | |
1141 { | |
1142 | |
1143 CriticalSectionScoped cs(&_nsglContextCritSec); | |
1144 | |
1145 if (_windowRef) | |
1146 { | |
1147 if(_fullScreen) | |
1148 { | |
1149 NSRect mainDisplayRect = [[NSScreen mainScreen] frame]; | |
1150 rect.bottom = 0; | |
1151 rect.left = 0; | |
1152 rect.right = mainDisplayRect.size.width; | |
1153 rect.top = mainDisplayRect.size.height; | |
1154 } | |
1155 else | |
1156 { | |
1157 rect.top = [_windowRef frame].origin.y; | |
1158 rect.left = [_windowRef frame].origin.x; | |
1159 rect.bottom = [_windowRef frame].origin.y + [_windowRef frame].size.
height; | |
1160 rect.right = [_windowRef frame].origin.x + [_windowRef frame].size.w
idth; | |
1161 } | |
1162 | |
1163 return 0; | |
1164 } | |
1165 else | |
1166 { | |
1167 return -1; | |
1168 } | |
1169 } | |
1170 | |
1171 int32_t VideoRenderNSOpenGL::SetText(const uint8_t /*textId*/, | |
1172 const uint8_t* /*text*/, | |
1173 const int32_t /*textLength*/, | |
1174 const uint32_t /*textColorRef*/, | |
1175 const uint32_t /*backgroundColorRef*/, | |
1176 const float /*left*/, | |
1177 const float /*top*/, | |
1178 const float /*right*/, | |
1179 const float /*bottom*/) | |
1180 { | |
1181 | |
1182 return 0; | |
1183 | |
1184 } | |
1185 | |
1186 void VideoRenderNSOpenGL::LockAGLCntx() | |
1187 { | |
1188 _nsglContextCritSec.Enter(); | |
1189 } | |
1190 void VideoRenderNSOpenGL::UnlockAGLCntx() | |
1191 { | |
1192 _nsglContextCritSec.Leave(); | |
1193 } | |
1194 | |
1195 /* | |
1196 | |
1197 bool VideoRenderNSOpenGL::SetFullScreen(bool fullscreen) | |
1198 { | |
1199 NSRect mainDisplayRect, viewRect; | |
1200 | |
1201 // Create a screen-sized window on the display you want to take over | |
1202 // Note, mainDisplayRect has a non-zero origin if the key window is on a second
ary display | |
1203 mainDisplayRect = [[NSScreen mainScreen] frame]; | |
1204 fullScreenWindow = [[NSWindow alloc] initWithContentRect:mainDisplayRect styleM
ask:NSBorderlessWindowMask | |
1205 backing:NSBackingStoreBuffered defer:YES]; | |
1206 | |
1207 // Set the window level to be above the menu bar | |
1208 [fullScreenWindow setLevel:NSMainMenuWindowLevel+1]; | |
1209 | |
1210 // Perform any other window configuration you desire | |
1211 [fullScreenWindow setOpaque:YES]; | |
1212 [fullScreenWindow setHidesOnDeactivate:YES]; | |
1213 | |
1214 // Create a view with a double-buffered OpenGL context and attach it to the win
dow | |
1215 // By specifying the non-fullscreen context as the shareContext, we automatical
ly inherit the OpenGL objects (textures, etc) it has defined | |
1216 viewRect = NSMakeRect(0.0, 0.0, mainDisplayRect.size.width, mainDisplayRect.siz
e.height); | |
1217 fullScreenView = [[MyOpenGLView alloc] initWithFrame:viewRect shareContext:[ope
nGLView openGLContext]]; | |
1218 [fullScreenWindow setContentView:fullScreenView]; | |
1219 | |
1220 // Show the window | |
1221 [fullScreenWindow makeKeyAndOrderFront:self]; | |
1222 | |
1223 // Set the scene with the full-screen viewport and viewing transformation | |
1224 [scene setViewportRect:viewRect]; | |
1225 | |
1226 // Assign the view's MainController to self | |
1227 [fullScreenView setMainController:self]; | |
1228 | |
1229 if (!isAnimating) { | |
1230 // Mark the view as needing drawing to initalize its contents | |
1231 [fullScreenView setNeedsDisplay:YES]; | |
1232 } | |
1233 else { | |
1234 // Start playing the animation | |
1235 [fullScreenView startAnimation]; | |
1236 } | |
1237 | |
1238 } | |
1239 | |
1240 | |
1241 | |
1242 */ | |
1243 | |
1244 | |
1245 } // namespace webrtc | |
1246 | |
1247 #endif // COCOA_RENDERING | |
OLD | NEW |