Index: webrtc/api/java/jni/androidvideocapturer_jni.cc |
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc |
index 615fcb32d80c30895e4958215e96be5ef5ed9dda..2b2df1602213a489b561f755c9b56b7954fcc1d9 100644 |
--- a/webrtc/api/java/jni/androidvideocapturer_jni.cc |
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc |
@@ -30,17 +30,16 @@ int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni, |
return 0; |
} |
-AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni, |
- jobject j_video_capturer, |
- jobject j_egl_context) |
+AndroidVideoCapturerJni::AndroidVideoCapturerJni( |
+ JNIEnv* jni, |
+ jobject j_video_capturer, |
+ rtc::scoped_refptr<SurfaceTextureHelper> helper) |
: j_video_capturer_(jni, j_video_capturer), |
j_video_capturer_class_(jni, FindClass(jni, "org/webrtc/VideoCapturer")), |
j_observer_class_( |
jni, |
- FindClass(jni, |
- "org/webrtc/VideoCapturer$NativeObserver")), |
- surface_texture_helper_(SurfaceTextureHelper::create( |
- jni, "Camera SurfaceTextureHelper", j_egl_context)), |
+ FindClass(jni, "org/webrtc/VideoCapturer$NativeObserver")), |
+ surface_texture_helper_(helper), |
capturer_(nullptr) { |
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor"; |
thread_checker_.DetachFromThread(); |
@@ -169,21 +168,13 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame, |
int height, |
int rotation, |
int64_t timestamp_ns) { |
- const uint8_t* y_plane = static_cast<uint8_t*>(video_frame); |
- const uint8_t* vu_plane = y_plane + width * height; |
- |
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = |
- buffer_pool_.CreateBuffer(width, height); |
- libyuv::NV21ToI420( |
- y_plane, width, |
- vu_plane, width, |
- buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), |
- buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), |
- buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), |
- width, height); |
- AsyncCapturerInvoke("OnIncomingFrame", |
- &webrtc::AndroidVideoCapturer::OnIncomingFrame, |
- buffer, rotation, timestamp_ns); |
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
+ rotation == 270); |
+ rtc::CritScope cs(&capturer_lock_); |
+ capturer_->OnNV21Frame(static_cast<uint8_t*>(video_frame), length, |
+ width, height, |
+ static_cast<webrtc::VideoRotation>(rotation), |
+ timestamp_ns); |
} |
void AndroidVideoCapturerJni::OnTextureFrame(int width, |
@@ -191,12 +182,14 @@ void AndroidVideoCapturerJni::OnTextureFrame(int width, |
int rotation, |
int64_t timestamp_ns, |
const NativeHandleImpl& handle) { |
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer( |
- surface_texture_helper_->CreateTextureFrame(width, height, handle)); |
- |
- AsyncCapturerInvoke("OnIncomingFrame", |
- &webrtc::AndroidVideoCapturer::OnIncomingFrame, |
- buffer, rotation, timestamp_ns); |
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
+ rotation == 270); |
+ rtc::CritScope cs(&capturer_lock_); |
+ // TODO(nisse): Give the capturer access to the SurfaceTextureHelper |
+ // in some better way. |
+ capturer_->OnTextureFrame(width, height, |
+ static_cast<webrtc::VideoRotation>(rotation), |
+ timestamp_ns, handle); |
} |
void AndroidVideoCapturerJni::OnOutputFormatRequest(int width, |