Index: webrtc/api/android/java/src/org/webrtc/ScreenCapturerAndroid.java |
diff --git a/webrtc/api/android/java/src/org/webrtc/ScreenCapturerAndroid.java b/webrtc/api/android/java/src/org/webrtc/ScreenCapturerAndroid.java |
new file mode 100644 |
index 0000000000000000000000000000000000000000..299e3ecf7fab32fe3269310cf0d1f7eb9b1a2712 |
--- /dev/null |
+++ b/webrtc/api/android/java/src/org/webrtc/ScreenCapturerAndroid.java |
@@ -0,0 +1,163 @@ |
+/* |
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved. |
+ * |
+ * Use of this source code is governed by a BSD-style license |
+ * that can be found in the LICENSE file in the root of the source |
+ * tree. An additional intellectual property rights grant can be found |
+ * in the file PATENTS. All contributing project authors may |
+ * be found in the AUTHORS file in the root of the source tree. |
+ */ |
+ |
+package org.webrtc; |
+ |
+import android.content.Context; |
+import android.hardware.display.VirtualDisplay; |
+import android.view.Surface; |
+ |
+import java.util.ArrayList; |
+import java.util.List; |
+ |
+/** |
+ * An implementation of VideoCapturer to capture the screen content as a video stream. |
+ * Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this |
+ * {@code SurfaceTexture} using a {@code SurfaceTextureHelper}. |
+ * The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in |
+ * {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it |
+ * as a texture to the native code via {@code CapturerObserver.onTextureFrameCaptured()}. This takes |
+ * place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame, |
+ * the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new |
+ * frames. At any time, at most one frame is being processed. |
+ * |
+ * Note that startCapture(), stopCapture(), and dispose() are called from native code. |
+ * Normally, a Java application should interact with {@code }VideSource} API, which indirectly calls |
+ * these methods on the underlying {@code VideoCapturer}. |
+ */ |
magjed_webrtc
2016/08/24 12:19:21
Add @TargetApi(20) here. You need to import androi
arsany
2016/08/26 02:07:36
We actually need API 21 for the media projection.
|
+public class ScreenCapturerAndroid implements |
+ VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener { |
+ |
+ private final static String TAG = "ScreenCapturerAndroid"; |
magjed_webrtc
2016/08/24 12:19:22
This is currently not used, so you can remove it.
arsany
2016/08/26 02:07:36
Done.
|
+ |
+ private int width; |
+ private int height; |
+ private VirtualDisplay virtualDisplay; |
+ private SurfaceTextureHelper surfaceTextureHelper; |
+ private CapturerObserver capturerObserver; |
+ private long numCapturedFrames = 0; |
+ |
+ // True once the captures has been disposed once. No methods should be called after that; |
+ // getInstance would return a new instance(); |
+ private boolean isDisposed; |
+ |
+ |
+ /** |
+ * Constructs a new Screen Capturer. |
+ **/ |
+ public ScreenCapturerAndroid() { |
+ isDisposed = false; |
+ } |
+ |
+ /** |
+ * Setup the capturer with the given VirtualDisplay and capture dimensions. |
+ * This has to be called before the capturer is started the first time. |
+ * If the dimensions of the captured screen change (e.g. phone orientation changed), |
+ * stop the video source, call this method with the new dimensions, and restart the source again. |
+ */ |
+ public void setup(VirtualDisplay virtualDisplay, int width, int height) { |
magjed_webrtc
2016/08/24 12:19:22
What's the benefit of creating the VirtualDisplay
arsany
2016/08/26 02:07:36
Originally I made this class create and manage the
magjed_webrtc
2016/08/26 10:23:49
I see. I agree that we should keep the class self-
|
+ checkNotDisposed(); |
+ this.width = width; |
+ this.height = height; |
+ this.virtualDisplay = virtualDisplay; |
+ } |
+ |
+ private void checkNotDisposed() { |
+ if (isDisposed) { |
+ throw new RuntimeException("capturer is disposed."); |
+ } |
+ } |
+ |
+ @Override |
+ public synchronized List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats() { |
+ List<CameraEnumerationAndroid.CaptureFormat> supportedFormats = new ArrayList<>(); |
+ supportedFormats.add(new CameraEnumerationAndroid.CaptureFormat( |
+ width, height, 1 /* minFrameRate */, 30 /* maxFrameRate */)); |
+ return supportedFormats; |
+ } |
+ |
+ @Override |
+ public synchronized void initialize( |
+ final SurfaceTextureHelper surfaceTextureHelper, |
+ final Context ignored_applicationContext, |
+ final VideoCapturer.CapturerObserver capturerObserver) { |
+ checkNotDisposed(); |
+ |
+ if (capturerObserver == null) { |
+ throw new RuntimeException("capturerObserver not set."); |
+ } |
+ this.capturerObserver = capturerObserver; |
+ |
+ if (surfaceTextureHelper == null) { |
+ throw new RuntimeException("surfaceTextureHelper not set."); |
+ } |
+ this.surfaceTextureHelper = surfaceTextureHelper; |
+ } |
+ |
+ // Initially called by native code. This can also be called from native code when the |
+ // enclosing VideoSource is "restarted" after being stopped. |
+ @Override |
+ public synchronized void startCapture( |
+ final int ignored_width, |
+ final int ignored_height, |
+ final int ignored_framerate) { |
+ checkNotDisposed(); |
+ |
+ this.surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); |
+ virtualDisplay.setSurface(new Surface(surfaceTextureHelper.getSurfaceTexture())); |
magjed_webrtc
2016/08/24 12:19:22
This function requires API 20. If we create the Vi
arsany
2016/08/26 02:07:36
I removed this function since I made this class cr
|
+ capturerObserver.onCapturerStarted(true); |
+ surfaceTextureHelper.startListening(ScreenCapturerAndroid.this); |
+ } |
+ |
+ // Called by native code to pause capturing. |
+ @Override |
+ public synchronized void stopCapture() { |
+ checkNotDisposed(); |
+ |
+ ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { |
+ @Override |
+ public void run() { |
+ surfaceTextureHelper.stopListening(); |
+ } |
+ }); |
+ } |
+ |
+ // Called from native code to dispose the capturer when the enclosing VideoSource is disposed. |
+ // The native code calls stopCapture() before calling this method. |
+ @Override |
+ public synchronized void dispose() { |
+ isDisposed = true; |
+ surfaceTextureHelper = null; |
+ capturerObserver = null; |
+ } |
+ |
+ @Override |
+ public void onOutputFormatRequest(int width, int height, int framerate) {} |
magjed_webrtc
2016/08/24 12:19:22
You can implement this with:
surfaceTextureHelper.
arsany
2016/08/26 02:07:36
Done.
|
+ |
+ @Override |
+ public void changeCaptureFormat(int width, int height, int framerate) {} |
magjed_webrtc
2016/08/24 12:19:21
Does this function make sense for screencast? We s
arsany
2016/08/26 02:07:36
We actually need this functionality, especially wh
|
+ |
+ // This is called on the internal looper thread of {@Code SurfaceTextureHelper}. |
+ @Override |
+ public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { |
+ numCapturedFrames++; |
+ capturerObserver.onTextureFrameCaptured(width, height, oesTextureId, transformMatrix, |
+ 0 /* rotation */, timestampNs); |
+ } |
+ |
+ @Override |
+ public boolean isScreencast() { |
+ return true; |
+ } |
+ |
+ public long getNumCapturedFrames() { |
+ return numCapturedFrames; |
+ } |
+} |