| Index: webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java
|
| diff --git a/webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java b/webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java
|
| new file mode 100644
|
| index 0000000000000000000000000000000000000000..379d1f4105412732b28b98694706b244122280d1
|
| --- /dev/null
|
| +++ b/webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java
|
| @@ -0,0 +1,136 @@
|
| +/*
|
| + * Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
| + *
|
| + * Use of this source code is governed by a BSD-style license
|
| + * that can be found in the LICENSE file in the root of the source
|
| + * tree. An additional intellectual property rights grant can be found
|
| + * in the file PATENTS. All contributing project authors may
|
| + * be found in the AUTHORS file in the root of the source tree.
|
| + */
|
| +package org.webrtc;
|
| +
|
| +import android.os.Handler;
|
| +import android.os.HandlerThread;
|
| +
|
| +import java.nio.ByteBuffer;
|
| +import java.io.FileOutputStream;
|
| +import java.io.IOException;
|
| +
|
| +/**
|
| + * Can be used to saves the video frames to file.
|
| + */
|
| +public class VideoFileRenderer implements VideoRenderer.Callbacks {
|
| + private static final String TAG = "VideoFileRenderer";
|
| +
|
| + private final SurfaceTextureHelper.YuvConverter yuvConverter;
|
| + private final HandlerThread renderThread;
|
| + private final Handler renderThreadHandler;
|
| + private final FileOutputStream videoOutFile;
|
| + private final int outputFileWidth;
|
| + private final int outputFileHeight;
|
| + private final int outputFrameSize;
|
| + private final ByteBuffer outputFrameBuffer;
|
| +
|
| + public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
|
| + EglBase.Context sharedContext) throws IOException {
|
| + if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
|
| + throw new IllegalArgumentException("Does not support uneven width or height");
|
| + }
|
| + yuvConverter = new SurfaceTextureHelper.YuvConverter(sharedContext);
|
| +
|
| + this.outputFileWidth = outputFileWidth;
|
| + this.outputFileHeight = outputFileHeight;
|
| +
|
| + outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
|
| + outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
|
| +
|
| + videoOutFile = new FileOutputStream(outputFile);
|
| + videoOutFile.write(
|
| + ("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
|
| + .getBytes());
|
| +
|
| + renderThread = new HandlerThread(TAG);
|
| + renderThread.start();
|
| + renderThreadHandler = new Handler(renderThread.getLooper());
|
| + }
|
| +
|
| + @Override
|
| + public void renderFrame(final VideoRenderer.I420Frame frame) {
|
| + renderThreadHandler.post(new Runnable() {
|
| + @Override
|
| + public void run() {
|
| + renderFrameOnRenderThread(frame);
|
| + }
|
| + });
|
| + }
|
| +
|
| + private void renderFrameOnRenderThread(VideoRenderer.I420Frame frame) {
|
| + float frameAspectRatio = (frame.rotationDegree % 180 == 0) ? (float) frame.width / frame.height
|
| + : (float) frame.height / frame.width;
|
| +
|
| + final float[] rotatedSamplingMatrix =
|
| + RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
|
| + final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
|
| + false, frameAspectRatio, (float) outputFileWidth / outputFileHeight);
|
| + final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
| +
|
| + if (!frame.yuvFrame) {
|
| + yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth,
|
| + frame.textureId, texMatrix);
|
| +
|
| + VideoRenderer.renderFrameDone(frame);
|
| +
|
| + try {
|
| + videoOutFile.write("FRAME\n".getBytes());
|
| +
|
| + int stride = outputFileWidth;
|
| + byte[] data = outputFrameBuffer.array();
|
| + int offset = outputFrameBuffer.arrayOffset();
|
| +
|
| + Logging.d(TAG, "arrayOffset(): " + outputFrameBuffer.arrayOffset() + " hasArray: "
|
| + + outputFrameBuffer.hasArray());
|
| + // Write Y
|
| + videoOutFile.write(data, offset, outputFileWidth * outputFileHeight);
|
| +
|
| + // Write U
|
| + for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
| + videoOutFile.write(data, offset + r * stride, stride / 2);
|
| + }
|
| +
|
| + // Write V
|
| + for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
| + videoOutFile.write(data, offset + r * stride + stride / 2, stride / 2);
|
| + }
|
| + } catch (IOException e) {
|
| + Logging.e(TAG, "Failed to write to file for video out");
|
| + }
|
| + } else {
|
| + try {
|
| + videoOutFile.write("FRAME\n".getBytes());
|
| +
|
| + nativeI420Scale(frame.yuvPlanes[0], frame.yuvStrides[0], frame.yuvPlanes[1],
|
| + frame.yuvStrides[1], frame.yuvPlanes[2], frame.yuvStrides[2], frame.width, frame.height,
|
| + outputFrameBuffer, outputFileWidth, outputFileHeight);
|
| + videoOutFile.write(
|
| + outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
|
| + } catch (IOException e) {
|
| + Logging.e(TAG, "Failed to write to file for video out");
|
| + throw new RuntimeException(e);
|
| + } finally {
|
| + VideoRenderer.renderFrameDone(frame);
|
| + }
|
| + }
|
| + }
|
| +
|
| + public void release() {
|
| + try {
|
| + videoOutFile.close();
|
| + } catch (IOException e) {
|
| + Logging.d(TAG, "Error closing output video file");
|
| + }
|
| + }
|
| +
|
| + public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,
|
| + int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst,
|
| + int dstWidth, int dstHeight);
|
| +}
|
|
|