OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2013 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 package org.webrtc; | 11 package org.webrtc; |
12 | 12 |
13 import android.annotation.TargetApi; | 13 import android.annotation.TargetApi; |
| 14 import android.graphics.Matrix; |
14 import android.media.MediaCodec; | 15 import android.media.MediaCodec; |
15 import android.media.MediaCodecInfo; | 16 import android.media.MediaCodecInfo; |
16 import android.media.MediaCodecInfo.CodecCapabilities; | 17 import android.media.MediaCodecInfo.CodecCapabilities; |
17 import android.media.MediaCodecList; | 18 import android.media.MediaCodecList; |
18 import android.media.MediaFormat; | 19 import android.media.MediaFormat; |
19 import android.opengl.GLES20; | 20 import android.opengl.GLES20; |
20 import android.os.Build; | 21 import android.os.Build; |
21 import android.os.Bundle; | 22 import android.os.Bundle; |
22 import android.view.Surface; | 23 import android.view.Surface; |
23 import java.nio.ByteBuffer; | 24 import java.nio.ByteBuffer; |
(...skipping 567 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
591 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); | 592 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); |
592 drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, wi
dth, height); | 593 drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, wi
dth, height); |
593 eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)
); | 594 eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)
); |
594 return true; | 595 return true; |
595 } catch (RuntimeException e) { | 596 } catch (RuntimeException e) { |
596 Logging.e(TAG, "encodeTexture failed", e); | 597 Logging.e(TAG, "encodeTexture failed", e); |
597 return false; | 598 return false; |
598 } | 599 } |
599 } | 600 } |
600 | 601 |
| 602 /** |
| 603 * Encodes a new style VideoFrame. Called by JNI. |bufferIndex| is -1 if we ar
e not encoding in |
| 604 * surface mode. |
| 605 */ |
| 606 boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame,
int bufferIndex) { |
| 607 checkOnMediaCodecThread(); |
| 608 try { |
| 609 long presentationTimestampUs = TimeUnit.NANOSECONDS.toMicros(frame.getTime
stampNs()); |
| 610 checkKeyFrameRequired(isKeyframe, presentationTimestampUs); |
| 611 |
| 612 VideoFrame.Buffer buffer = frame.getBuffer(); |
| 613 if (buffer instanceof VideoFrame.TextureBuffer) { |
| 614 VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buff
er; |
| 615 eglBase.makeCurrent(); |
| 616 // TODO(perkj): glClear() shouldn't be necessary since every pixel is co
vered anyway, |
| 617 // but it's a workaround for bug webrtc:5147. |
| 618 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); |
| 619 drawer.drawOes(textureBuffer.getTextureId(), |
| 620 RendererCommon.convertMatrixFromAndroidGraphicsMatrix( |
| 621 textureBuffer.getTransformMatrix()), |
| 622 width, height, 0, 0, width, height); |
| 623 eglBase.swapBuffers(frame.getTimestampNs()); |
| 624 } else { |
| 625 VideoFrame.I420Buffer i420Buffer = buffer.toI420(); |
| 626 nativeFillBuffer(nativeEncoder, bufferIndex, i420Buffer.getDataY(), i420
Buffer.getStrideY(), |
| 627 i420Buffer.getDataU(), i420Buffer.getStrideU(), i420Buffer.getDataV(
), |
| 628 i420Buffer.getStrideV()); |
| 629 i420Buffer.release(); |
| 630 // I420 consists of one full-resolution and two half-resolution planes. |
| 631 // 1 + 1 / 4 + 1 / 4 = 3 / 2 |
| 632 int yuvSize = width * height * 3 / 2; |
| 633 mediaCodec.queueInputBuffer(bufferIndex, 0, yuvSize, presentationTimesta
mpUs, 0); |
| 634 } |
| 635 return true; |
| 636 } catch (RuntimeException e) { |
| 637 Logging.e(TAG, "encodeFrame failed", e); |
| 638 return false; |
| 639 } |
| 640 } |
| 641 |
601 void release() { | 642 void release() { |
602 Logging.d(TAG, "Java releaseEncoder"); | 643 Logging.d(TAG, "Java releaseEncoder"); |
603 checkOnMediaCodecThread(); | 644 checkOnMediaCodecThread(); |
604 | 645 |
605 class CaughtException { | 646 class CaughtException { |
606 Exception e; | 647 Exception e; |
607 } | 648 } |
608 final CaughtException caughtException = new CaughtException(); | 649 final CaughtException caughtException = new CaughtException(); |
609 boolean stopHung = false; | 650 boolean stopHung = false; |
610 | 651 |
(...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
874 boolean releaseOutputBuffer(int index) { | 915 boolean releaseOutputBuffer(int index) { |
875 checkOnMediaCodecThread(); | 916 checkOnMediaCodecThread(); |
876 try { | 917 try { |
877 mediaCodec.releaseOutputBuffer(index, false); | 918 mediaCodec.releaseOutputBuffer(index, false); |
878 return true; | 919 return true; |
879 } catch (IllegalStateException e) { | 920 } catch (IllegalStateException e) { |
880 Logging.e(TAG, "releaseOutputBuffer failed", e); | 921 Logging.e(TAG, "releaseOutputBuffer failed", e); |
881 return false; | 922 return false; |
882 } | 923 } |
883 } | 924 } |
| 925 |
| 926 /** Fills an inputBuffer with the given index with data from the byte buffers.
*/ |
| 927 private static native void nativeFillBuffer(long nativeEncoder, int inputBuffe
r, ByteBuffer dataY, |
| 928 int strideY, ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV)
; |
884 } | 929 } |
OLD | NEW |