Index: webrtc/api/androidvideotracksource.cc |
diff --git a/webrtc/api/androidvideotracksource.cc b/webrtc/api/androidvideotracksource.cc |
index f49cc30c8c975189485005cbb2ad7e4d87c44f0b..658d338c0e29f4f15157895d7348e3ef83c77e0f 100644 |
--- a/webrtc/api/androidvideotracksource.cc |
+++ b/webrtc/api/androidvideotracksource.cc |
@@ -25,21 +25,9 @@ AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, |
j_egl_context)), |
is_screencast_(is_screencast) { |
LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; |
- worker_thread_checker_.DetachFromThread(); |
camera_thread_checker_.DetachFromThread(); |
} |
-bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) { |
- rtc::CritScope lock(&stats_crit_); |
- |
- if (!stats_) { |
- return false; |
- } |
- |
- *stats = *stats_; |
- return true; |
-} |
- |
void AndroidVideoTrackSource::SetState(SourceState state) { |
if (rtc::Thread::Current() != signaling_thread_) { |
invoker_.AsyncInvoke<void>( |
@@ -54,34 +42,6 @@ void AndroidVideoTrackSource::SetState(SourceState state) { |
} |
} |
-void AndroidVideoTrackSource::AddOrUpdateSink( |
- rtc::VideoSinkInterface<cricket::VideoFrame>* sink, |
- const rtc::VideoSinkWants& wants) { |
- RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); |
- |
- broadcaster_.AddOrUpdateSink(sink, wants); |
- OnSinkWantsChanged(broadcaster_.wants()); |
-} |
- |
-void AndroidVideoTrackSource::RemoveSink( |
- rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { |
- RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); |
- |
- broadcaster_.RemoveSink(sink); |
- OnSinkWantsChanged(broadcaster_.wants()); |
-} |
- |
-void AndroidVideoTrackSource::OnSinkWantsChanged( |
- const rtc::VideoSinkWants& wants) { |
- { |
- rtc::CritScope lock(&apply_rotation_crit_); |
- apply_rotation_ = wants.rotation_applied; |
- } |
- |
- video_adapter_.OnResolutionRequest(wants.max_pixel_count, |
- wants.max_pixel_count_step_up); |
-} |
- |
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
int length, |
int width, |
@@ -92,25 +52,28 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
rotation == 270); |
+ int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; |
+ int64_t translated_camera_time_us = |
+ timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); |
+ |
int adapted_width; |
int adapted_height; |
int crop_width; |
int crop_height; |
int crop_x; |
int crop_y; |
- int64_t translated_camera_time_us; |
- if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, |
+ if (!AdaptFrame(width, height, camera_time_us, |
&adapted_width, &adapted_height, &crop_width, &crop_height, |
- &crop_x, &crop_y, &translated_camera_time_us)) { |
+ &crop_x, &crop_y)) { |
return; |
} |
int rotated_width = crop_width; |
int rotated_height = crop_height; |
- rtc::CritScope lock(&apply_rotation_crit_); |
- if (apply_rotation_ && (rotation == 90 || rotation == 270)) { |
+ rtc::CritScope lock(&apply_rotation_.lock); |
+ if (apply_rotation_.value && (rotation == 90 || rotation == 270)) { |
magjed_webrtc
2016/09/14 13:30:56
The |apply_rotation_| variable is not defined?
nisse-webrtc
2016/09/14 14:10:33
It's a protected variable in the base class. Maybe
nisse-webrtc
2016/09/15 07:39:17
Added now.
I wonder if there's some other way to
|
std::swap(adapted_width, adapted_height); |
std::swap(rotated_width, rotated_height); |
} |
@@ -135,7 +98,7 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
// Swap U and V, since we have NV21, not NV12. |
buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(), |
buffer->StrideU(), crop_width, crop_height, |
- static_cast<libyuv::RotationMode>(apply_rotation_ ? rotation : 0)); |
+ static_cast<libyuv::RotationMode>(apply_rotation_.value ? rotation : 0)); |
if (adapted_width != buffer->width() || adapted_height != buffer->height()) { |
rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( |
@@ -145,9 +108,9 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
} |
OnFrame(cricket::WebRtcVideoFrame( |
- buffer, |
- apply_rotation_ ? webrtc::kVideoRotation_0 |
- : static_cast<webrtc::VideoRotation>(rotation), |
+ buffer, apply_rotation_.value |
+ ? webrtc::kVideoRotation_0 |
+ : static_cast<webrtc::VideoRotation>(rotation), |
translated_camera_time_us, 0), |
width, height); |
} |
@@ -162,17 +125,20 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured( |
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
rotation == 270); |
+ int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; |
+ int64_t translated_camera_time_us = |
+ timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); |
+ |
int adapted_width; |
int adapted_height; |
int crop_width; |
int crop_height; |
int crop_x; |
int crop_y; |
- int64_t translated_camera_time_us; |
- if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, |
+ if (!AdaptFrame(width, height, camera_time_us, |
&adapted_width, &adapted_height, &crop_width, &crop_height, |
- &crop_x, &crop_y, &translated_camera_time_us)) { |
+ &crop_x, &crop_y)) { |
surface_texture_helper_->ReturnTextureFrame(); |
return; |
} |
@@ -184,8 +150,8 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured( |
crop_x / static_cast<float>(width), |
crop_y / static_cast<float>(height)); |
- rtc::CritScope lock(&apply_rotation_crit_); |
- if (apply_rotation_) { |
+ rtc::CritScope lock(&apply_rotation_.lock); |
+ if (apply_rotation_.value) { |
if (rotation == webrtc::kVideoRotation_90 || |
rotation == webrtc::kVideoRotation_270) { |
std::swap(adapted_width, adapted_height); |
@@ -193,25 +159,15 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured( |
matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); |
} |
- OnFrame(cricket::WebRtcVideoFrame( |
- surface_texture_helper_->CreateTextureFrame( |
- adapted_width, adapted_height, |
- webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), |
- apply_rotation_ ? webrtc::kVideoRotation_0 |
- : static_cast<webrtc::VideoRotation>(rotation), |
- translated_camera_time_us, 0), |
- width, height); |
-} |
- |
-void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame, |
- int width, |
- int height) { |
- { |
- rtc::CritScope lock(&stats_crit_); |
- stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height}); |
- } |
- |
- broadcaster_.OnFrame(frame); |
+ OnFrame( |
+ cricket::WebRtcVideoFrame( |
+ surface_texture_helper_->CreateTextureFrame( |
+ adapted_width, adapted_height, |
+ webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), |
+ apply_rotation_.value ? webrtc::kVideoRotation_0 |
+ : static_cast<webrtc::VideoRotation>(rotation), |
+ translated_camera_time_us, 0), |
+ width, height); |
} |
void AndroidVideoTrackSource::OnOutputFormatRequest(int width, |
@@ -224,36 +180,4 @@ void AndroidVideoTrackSource::OnOutputFormatRequest(int width, |
video_adapter_.OnOutputFormatRequest(format); |
} |
-bool AndroidVideoTrackSource::AdaptFrame(int width, |
- int height, |
- int64_t camera_time_us, |
- int* out_width, |
- int* out_height, |
- int* crop_width, |
- int* crop_height, |
- int* crop_x, |
- int* crop_y, |
- int64_t* translated_camera_time_us) { |
- RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
- |
- int64_t system_time_us = rtc::TimeMicros(); |
- *translated_camera_time_us = |
- timestamp_aligner_.TranslateTimestamp(camera_time_us, system_time_us); |
- |
- if (!broadcaster_.frame_wanted()) { |
- return false; |
- } |
- |
- if (!video_adapter_.AdaptFrameResolution( |
- width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec, |
- crop_width, crop_height, out_width, out_height)) { |
- // VideoAdapter dropped the frame. |
- return false; |
- } |
- *crop_x = (width - *crop_width) / 2; |
- *crop_y = (height - *crop_height) / 2; |
- |
- return true; |
-} |
- |
} // namespace webrtc |