Chromium Code Reviews| Index: webrtc/api/androidvideotracksource.cc |
| diff --git a/webrtc/api/androidvideotracksource.cc b/webrtc/api/androidvideotracksource.cc |
| index f49cc30c8c975189485005cbb2ad7e4d87c44f0b..e43f95dae167731b745b1ebc613ac3511b457d6d 100644 |
| --- a/webrtc/api/androidvideotracksource.cc |
| +++ b/webrtc/api/androidvideotracksource.cc |
| @@ -25,21 +25,9 @@ AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, |
| j_egl_context)), |
| is_screencast_(is_screencast) { |
| LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; |
| - worker_thread_checker_.DetachFromThread(); |
| camera_thread_checker_.DetachFromThread(); |
| } |
| -bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) { |
| - rtc::CritScope lock(&stats_crit_); |
| - |
| - if (!stats_) { |
| - return false; |
| - } |
| - |
| - *stats = *stats_; |
| - return true; |
| -} |
| - |
| void AndroidVideoTrackSource::SetState(SourceState state) { |
| if (rtc::Thread::Current() != signaling_thread_) { |
| invoker_.AsyncInvoke<void>( |
| @@ -54,34 +42,6 @@ void AndroidVideoTrackSource::SetState(SourceState state) { |
| } |
| } |
| -void AndroidVideoTrackSource::AddOrUpdateSink( |
| - rtc::VideoSinkInterface<cricket::VideoFrame>* sink, |
| - const rtc::VideoSinkWants& wants) { |
| - RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); |
| - |
| - broadcaster_.AddOrUpdateSink(sink, wants); |
| - OnSinkWantsChanged(broadcaster_.wants()); |
| -} |
| - |
| -void AndroidVideoTrackSource::RemoveSink( |
| - rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { |
| - RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); |
| - |
| - broadcaster_.RemoveSink(sink); |
| - OnSinkWantsChanged(broadcaster_.wants()); |
| -} |
| - |
| -void AndroidVideoTrackSource::OnSinkWantsChanged( |
| - const rtc::VideoSinkWants& wants) { |
| - { |
| - rtc::CritScope lock(&apply_rotation_crit_); |
| - apply_rotation_ = wants.rotation_applied; |
| - } |
| - |
| - video_adapter_.OnResolutionRequest(wants.max_pixel_count, |
| - wants.max_pixel_count_step_up); |
| -} |
| - |
| void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
| int length, |
| int width, |
| @@ -92,25 +52,35 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
| RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
| rotation == 270); |
| + int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; |
| + int64_t translated_camera_time_us = |
| + timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); |
| + |
| int adapted_width; |
| int adapted_height; |
| int crop_width; |
| int crop_height; |
| int crop_x; |
| int crop_y; |
| - int64_t translated_camera_time_us; |
| - if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, |
| + if (!AdaptFrame(width, height, camera_time_us, |
| &adapted_width, &adapted_height, &crop_width, &crop_height, |
| - &crop_x, &crop_y, &translated_camera_time_us)) { |
| + &crop_x, &crop_y)) { |
| return; |
| } |
| int rotated_width = crop_width; |
| int rotated_height = crop_height; |
| - rtc::CritScope lock(&apply_rotation_crit_); |
| - if (apply_rotation_ && (rotation == 90 || rotation == 270)) { |
| + // Make a local copy, since value of apply_rotation() may change |
| + // under our feet. |
| + |
| + // TODO(nisse, magjed): Don't rotate here, leave it to the base |
|
magjed_webrtc
2016/09/19 09:43:26
You need to rebase this function (I changed it rec
nisse-webrtc
2016/09/20 08:23:42
Done, hope I got the rebasing right.
|
| + // class instead. It's a fallback case and doesn't need to be well |
| + // optimized. |
| + bool do_rotate = apply_rotation(); |
| + |
| + if (do_rotate && (rotation == 90 || rotation == 270)) { |
| std::swap(adapted_width, adapted_height); |
| std::swap(rotated_width, rotated_height); |
| } |
| @@ -135,7 +105,7 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
| // Swap U and V, since we have NV21, not NV12. |
| buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(), |
| buffer->StrideU(), crop_width, crop_height, |
| - static_cast<libyuv::RotationMode>(apply_rotation_ ? rotation : 0)); |
| + static_cast<libyuv::RotationMode>(do_rotate ? rotation : 0)); |
| if (adapted_width != buffer->width() || adapted_height != buffer->height()) { |
| rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( |
| @@ -145,9 +115,8 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
| } |
| OnFrame(cricket::WebRtcVideoFrame( |
| - buffer, |
| - apply_rotation_ ? webrtc::kVideoRotation_0 |
| - : static_cast<webrtc::VideoRotation>(rotation), |
| + buffer, do_rotate ? webrtc::kVideoRotation_0 |
| + : static_cast<webrtc::VideoRotation>(rotation), |
| translated_camera_time_us, 0), |
| width, height); |
| } |
| @@ -162,17 +131,20 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured( |
| RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
| rotation == 270); |
| + int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; |
| + int64_t translated_camera_time_us = |
| + timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); |
| + |
| int adapted_width; |
| int adapted_height; |
| int crop_width; |
| int crop_height; |
| int crop_x; |
| int crop_y; |
| - int64_t translated_camera_time_us; |
| - if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, |
| + if (!AdaptFrame(width, height, camera_time_us, |
| &adapted_width, &adapted_height, &crop_width, &crop_height, |
| - &crop_x, &crop_y, &translated_camera_time_us)) { |
| + &crop_x, &crop_y)) { |
| surface_texture_helper_->ReturnTextureFrame(); |
| return; |
| } |
| @@ -184,8 +156,11 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured( |
| crop_x / static_cast<float>(width), |
| crop_y / static_cast<float>(height)); |
| - rtc::CritScope lock(&apply_rotation_crit_); |
| - if (apply_rotation_) { |
| + // Make a local copy, since value of apply_rotation() may change |
| + // under our feet. |
| + bool do_rotate = apply_rotation(); |
| + |
| + if (do_rotate) { |
| if (rotation == webrtc::kVideoRotation_90 || |
| rotation == webrtc::kVideoRotation_270) { |
| std::swap(adapted_width, adapted_height); |
| @@ -197,23 +172,12 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured( |
| surface_texture_helper_->CreateTextureFrame( |
| adapted_width, adapted_height, |
| webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), |
| - apply_rotation_ ? webrtc::kVideoRotation_0 |
| - : static_cast<webrtc::VideoRotation>(rotation), |
| + do_rotate ? webrtc::kVideoRotation_0 |
| + : static_cast<webrtc::VideoRotation>(rotation), |
| translated_camera_time_us, 0), |
| width, height); |
| } |
| -void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame, |
| - int width, |
| - int height) { |
| - { |
| - rtc::CritScope lock(&stats_crit_); |
| - stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height}); |
| - } |
| - |
| - broadcaster_.OnFrame(frame); |
| -} |
| - |
| void AndroidVideoTrackSource::OnOutputFormatRequest(int width, |
| int height, |
| int fps) { |
| @@ -221,39 +185,7 @@ void AndroidVideoTrackSource::OnOutputFormatRequest(int width, |
| cricket::VideoFormat format(width, height, |
| cricket::VideoFormat::FpsToInterval(fps), 0); |
| - video_adapter_.OnOutputFormatRequest(format); |
| -} |
| - |
| -bool AndroidVideoTrackSource::AdaptFrame(int width, |
| - int height, |
| - int64_t camera_time_us, |
| - int* out_width, |
| - int* out_height, |
| - int* crop_width, |
| - int* crop_height, |
| - int* crop_x, |
| - int* crop_y, |
| - int64_t* translated_camera_time_us) { |
| - RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
| - |
| - int64_t system_time_us = rtc::TimeMicros(); |
| - *translated_camera_time_us = |
| - timestamp_aligner_.TranslateTimestamp(camera_time_us, system_time_us); |
| - |
| - if (!broadcaster_.frame_wanted()) { |
| - return false; |
| - } |
| - |
| - if (!video_adapter_.AdaptFrameResolution( |
| - width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec, |
| - crop_width, crop_height, out_width, out_height)) { |
| - // VideoAdapter dropped the frame. |
| - return false; |
| - } |
| - *crop_x = (width - *crop_width) / 2; |
| - *crop_y = (height - *crop_height) / 2; |
| - |
| - return true; |
| + video_adapter()->OnOutputFormatRequest(format); |
| } |
| } // namespace webrtc |