Index: webrtc/api/android/java/src/org/webrtc/VideoCapturer.java |
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java b/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java |
index 452009b3aceb301f63d95757aaab1b60a5100736..afa3a0563b8dd3211b7c43de121540f4eb793b8b 100644 |
--- a/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java |
+++ b/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java |
@@ -88,14 +88,21 @@ public interface VideoCapturer { |
List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats(); |
/** |
+ * This function is used to initialize the camera thread, the android application context, and the |
+ * capture observer. It will be called only once and before any startCapture() request. The |
+ * camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants |
+ * to deliver texture frames, it should do this by rendering on the SurfaceTexture in |
+ * |surfaceTextureHelper|, register itself as a listener, and forward the texture frames to |
+ * CapturerObserver.onTextureFrameCaptured(). |
+ */ |
+ void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, |
+ CapturerObserver capturerObserver); |
+ |
+ /** |
* Start capturing frames in a format that is as close as possible to |width| x |height| and |
- * |framerate|. If the VideoCapturer wants to deliver texture frames, it should do this by |
- * rendering on the SurfaceTexture in |surfaceTextureHelper|, register itself as a listener, |
- * and forward the texture frames to CapturerObserver.onTextureFrameCaptured(). |
+ * |framerate|. |
*/ |
- void startCapture( |
- int width, int height, int framerate, SurfaceTextureHelper surfaceTextureHelper, |
- Context applicationContext, CapturerObserver frameObserver); |
+ void startCapture(int width, int height, int framerate); |
/** |
* Stop capturing. This function should block until capture is actually stopped. |