OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
374 } | 374 } |
375 | 375 |
376 if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) { | 376 if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) { |
377 return -1; | 377 return -1; |
378 } | 378 } |
379 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, | 379 WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, |
380 "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__); | 380 "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__); |
381 return 0; | 381 return 0; |
382 } | 382 } |
383 | 383 |
384 int32_t AndroidNativeOpenGl2Channel::RenderFrame(const uint32_t /*streamId*/, | 384 void AndroidNativeOpenGl2Channel::OnFrame(const VideoFrame& videoFrame) { |
385 const VideoFrame& videoFrame) { | |
386 // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); | 385 // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); |
387 _renderCritSect.Enter(); | 386 _renderCritSect.Enter(); |
388 _bufferToRender = videoFrame; | 387 _bufferToRender = videoFrame; |
389 _renderCritSect.Leave(); | 388 _renderCritSect.Leave(); |
390 _renderer.ReDraw(); | 389 _renderer.ReDraw(); |
391 return 0; | |
392 } | 390 } |
393 | 391 |
394 /*Implements AndroidStream | 392 /*Implements AndroidStream |
395 * Calls the Java object and render the buffer in _bufferToRender | 393 * Calls the Java object and render the buffer in _bufferToRender |
396 */ | 394 */ |
397 void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) { | 395 void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) { |
398 //TickTime timeNow=TickTime::Now(); | 396 //TickTime timeNow=TickTime::Now(); |
399 | 397 |
400 //Draw the Surface | 398 //Draw the Surface |
401 jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid); | 399 jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
441 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__); | 439 WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__); |
442 return renderChannel->CreateOpenGLNative(width, height); | 440 return renderChannel->CreateOpenGLNative(width, height); |
443 } | 441 } |
444 | 442 |
445 jint AndroidNativeOpenGl2Channel::CreateOpenGLNative( | 443 jint AndroidNativeOpenGl2Channel::CreateOpenGLNative( |
446 int width, int height) { | 444 int width, int height) { |
447 return _openGLRenderer.Setup(width, height); | 445 return _openGLRenderer.Setup(width, height); |
448 } | 446 } |
449 | 447 |
450 } // namespace webrtc | 448 } // namespace webrtc |
OLD | NEW |