Index: webrtc/api/androidvideocapturer.cc |
diff --git a/webrtc/api/androidvideocapturer.cc b/webrtc/api/androidvideocapturer.cc |
index 71a94fedb060b584c02c85469889558128856ac3..56168a9b1f472734033d3b65d1c42b74c82319cf 100644 |
--- a/webrtc/api/androidvideocapturer.cc |
+++ b/webrtc/api/androidvideocapturer.cc |
@@ -13,109 +13,19 @@ |
#include <memory> |
#include "webrtc/api/java/jni/native_handle_impl.h" |
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h" |
#include "webrtc/base/common.h" |
#include "webrtc/base/timeutils.h" |
#include "webrtc/media/engine/webrtcvideoframe.h" |
namespace webrtc { |
-// A hack for avoiding deep frame copies in |
-// cricket::VideoCapturer.SignalFrameCaptured() using a custom FrameFactory. |
-// A frame is injected using UpdateCapturedFrame(), and converted into a |
-// cricket::VideoFrame with CreateAliasedFrame(). UpdateCapturedFrame() should |
-// be called before CreateAliasedFrame() for every frame. |
-// TODO(magjed): Add an interface cricket::VideoCapturer::OnFrameCaptured() |
-// for ref counted I420 frames instead of this hack. |
-class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory { |
- public: |
- explicit FrameFactory( |
- const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate) |
- : delegate_(delegate) { |
- // Create a CapturedFrame that only contains header information, not the |
- // actual pixel data. |
- captured_frame_.pixel_height = 1; |
- captured_frame_.pixel_width = 1; |
- captured_frame_.data = nullptr; |
- captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize; |
- captured_frame_.fourcc = static_cast<uint32_t>(cricket::FOURCC_ANY); |
- } |
- |
- void UpdateCapturedFrame( |
- const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, |
- int rotation, |
- int64_t time_stamp_in_ns) { |
- RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
- rotation == 270); |
- buffer_ = buffer; |
- captured_frame_.width = buffer->width(); |
- captured_frame_.height = buffer->height(); |
- captured_frame_.time_stamp = time_stamp_in_ns; |
- captured_frame_.rotation = static_cast<webrtc::VideoRotation>(rotation); |
- } |
- |
- void ClearCapturedFrame() { |
- buffer_ = nullptr; |
- captured_frame_.width = 0; |
- captured_frame_.height = 0; |
- captured_frame_.time_stamp = 0; |
- } |
- |
- const cricket::CapturedFrame* GetCapturedFrame() const { |
- return &captured_frame_; |
- } |
- |
- cricket::VideoFrame* CreateAliasedFrame( |
- const cricket::CapturedFrame* captured_frame, |
- int dst_width, |
- int dst_height) const override { |
- // Check that captured_frame is actually our frame. |
- RTC_CHECK(captured_frame == &captured_frame_); |
- RTC_CHECK(buffer_->native_handle() == nullptr); |
- |
- std::unique_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame( |
- ShallowCenterCrop(buffer_, dst_width, dst_height), |
- captured_frame->time_stamp, captured_frame->rotation)); |
- // Caller takes ownership. |
- // TODO(magjed): Change CreateAliasedFrame() to return a std::unique_ptr. |
- return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy() |
- : frame.release(); |
- } |
- |
- cricket::VideoFrame* CreateAliasedFrame( |
- const cricket::CapturedFrame* input_frame, |
- int cropped_input_width, |
- int cropped_input_height, |
- int output_width, |
- int output_height) const override { |
- if (buffer_->native_handle() != nullptr) { |
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer( |
- static_cast<webrtc_jni::AndroidTextureBuffer*>(buffer_.get()) |
- ->CropScaleAndRotate(cropped_input_width, cropped_input_height, |
- output_width, output_height, |
- apply_rotation_ ? input_frame->rotation |
- : webrtc::kVideoRotation_0)); |
- return new cricket::WebRtcVideoFrame( |
- scaled_buffer, input_frame->time_stamp, |
- apply_rotation_ ? webrtc::kVideoRotation_0 : input_frame->rotation); |
- } |
- return VideoFrameFactory::CreateAliasedFrame(input_frame, |
- cropped_input_width, |
- cropped_input_height, |
- output_width, |
- output_height); |
- } |
- |
- private: |
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer_; |
- cricket::CapturedFrame captured_frame_; |
- rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_; |
-}; |
- |
AndroidVideoCapturer::AndroidVideoCapturer( |
- const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate) |
+ const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate, |
+ rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> helper) |
: running_(false), |
delegate_(delegate), |
- frame_factory_(NULL), |
+ surface_texture_helper_(helper), |
current_state_(cricket::CS_STOPPED) { |
thread_checker_.DetachFromThread(); |
SetSupportedFormats(delegate_->GetSupportedFormats()); |
@@ -133,9 +43,6 @@ cricket::CaptureState AndroidVideoCapturer::Start( |
LOG(LS_INFO) << " AndroidVideoCapturer::Start " << capture_format.width << "x" |
<< capture_format.height << "@" << fps; |
- frame_factory_ = new AndroidVideoCapturer::FrameFactory(delegate_.get()); |
- set_frame_factory(frame_factory_); |
- |
running_ = true; |
delegate_->Start(capture_format.width, capture_format.height, fps, this); |
SetCaptureFormat(&capture_format); |
@@ -176,14 +83,107 @@ void AndroidVideoCapturer::OnCapturerStarted(bool success) { |
SetCaptureState(new_state); |
} |
-void AndroidVideoCapturer::OnIncomingFrame( |
- const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, |
- int rotation, |
- int64_t time_stamp) { |
- RTC_CHECK(thread_checker_.CalledOnValidThread()); |
- frame_factory_->UpdateCapturedFrame(buffer, rotation, time_stamp); |
- SignalFrameCaptured(this, frame_factory_->GetCapturedFrame()); |
- frame_factory_->ClearCapturedFrame(); |
+void AndroidVideoCapturer::OnNV21Frame(const uint8_t* video_frame, |
+ int length, |
+ int width, |
+ int height, |
+ webrtc::VideoRotation rotation, |
+ int64_t timestamp_ns) { |
+ int adapted_width; |
+ int adapted_height; |
+ int crop_width; |
+ int crop_height; |
+ int crop_x; |
+ int crop_y; |
+ |
+ if (!AdaptFrame(width, height, &adapted_width, &adapted_height, |
+ &crop_width, &crop_height, &crop_x, &crop_y)) { |
+ return; |
+ } |
+ |
+ int rotated_width = crop_width; |
+ int rotated_height = crop_height; |
+ |
+ if (apply_rotation() && (rotation == webrtc::kVideoRotation_90 || |
+ rotation == webrtc::kVideoRotation_270)) { |
+ std::swap(adapted_width, adapted_height); |
+ std::swap(rotated_width, rotated_height); |
+ } |
+ |
+ // TODO(nisse): Move buffer_pool_ here (from AndroidVideoCapturerJni) |
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer( |
+ new rtc::RefCountedObject<I420Buffer>(rotated_width, rotated_height)); |
+ |
+ const uint8_t* y_plane = video_frame; |
+ const uint8_t* uv_plane = video_frame + width * height; |
+ |
+ // Can only crop at even pixels. |
+ crop_x &= ~1; |
+ |
+ libyuv::NV12ToI420Rotate( |
+ video_frame + width * crop_y + crop_x, width, // Y plane |
+ video_frame + width * (height + crop_y) + crop_x, width, // UV Plane |
+ buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), |
+ // Swap U and V, since we have NV21, not NV12. |
+ buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), |
+ buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), |
+ crop_width, crop_height, |
+ static_cast<libyuv::RotationMode>( |
+ apply_rotation() ? rotation : webrtc::kVideoRotation_0)); |
+ |
+ if (adapted_width != rotated_width || adapted_height != rotated_height) { |
+ buffer = I420Buffer::CropAndScale( |
+ buffer, 0, 0, rotated_width, rotated_height, |
+ adapted_width, adapted_height); |
+ } |
+ // TODO(nisse): Use microsecond time instead. |
+ OnFrame(cricket::WebRtcVideoFrame( |
+ buffer, timestamp_ns, |
+ apply_rotation() ? webrtc::kVideoRotation_0 : rotation), |
+ width, height); |
+} |
+ |
+void AndroidVideoCapturer::OnTextureFrame( |
+ int width, |
+ int height, |
+ webrtc::VideoRotation rotation, |
+ int64_t timestamp_ns, |
+ const webrtc_jni::NativeHandleImpl& handle) { |
+ int adapted_width; |
+ int adapted_height; |
+ int crop_width; |
+ int crop_height; |
+ int crop_x; |
+ int crop_y; |
+ |
+ if (!AdaptFrame(width, height, &adapted_width, &adapted_height, |
+ &crop_width, &crop_height, &crop_x, &crop_y)) { |
+ return; |
+ } |
+ |
+ webrtc_jni::NativeHandleImpl::Matrix matrix = handle.sampling_matrix; |
+ |
+ matrix.Crop(crop_width / static_cast<float>(width), |
+ crop_height / static_cast<float>(height), |
+ crop_x / static_cast<float>(width), |
+ crop_y / static_cast<float>(height)); |
+ |
+ if (apply_rotation()) { |
+ if (rotation == webrtc::kVideoRotation_90 || |
+ rotation == webrtc::kVideoRotation_270) { |
+ std::swap(adapted_width, adapted_height); |
+ } |
+ matrix.Rotate(rotation); |
+ } |
+ |
+ // TODO(nisse): Use microsecond time instead. |
+ OnFrame(cricket::WebRtcVideoFrame( |
+ surface_texture_helper_->CreateTextureFrame( |
+ adapted_width, adapted_height, |
+ webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), |
+ timestamp_ns, |
+ apply_rotation() ? webrtc::kVideoRotation_0 : rotation), |
+ width, height); |
} |
void AndroidVideoCapturer::OnOutputFormatRequest( |