Index: webrtc/video/video_receive_stream.cc |
diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc |
index 183f72b537fcf9e6f75afb6bfebe44eaf648275a..f7dc613eea759202b1b7b91ef71a86ea4c6dd13f 100644 |
--- a/webrtc/video/video_receive_stream.cc |
+++ b/webrtc/video/video_receive_stream.cc |
@@ -22,6 +22,8 @@ |
#include "webrtc/common_video/h264/profile_level_id.h" |
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
#include "webrtc/modules/congestion_controller/include/congestion_controller.h" |
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h" |
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h" |
#include "webrtc/modules/utility/include/process_thread.h" |
#include "webrtc/modules/video_coding/frame_object.h" |
#include "webrtc/modules/video_coding/include/video_coding.h" |
@@ -33,7 +35,6 @@ |
#include "webrtc/video/call_stats.h" |
#include "webrtc/video/receive_statistics_proxy.h" |
#include "webrtc/video_receive_stream.h" |
-#include "webrtc/voice_engine/include/voe_video_sync.h" |
namespace webrtc { |
@@ -191,7 +192,6 @@ VideoReceiveStream::VideoReceiveStream( |
CongestionController* congestion_controller, |
PacketRouter* packet_router, |
VideoReceiveStream::Config config, |
- webrtc::VoiceEngine* voice_engine, |
ProcessThread* process_thread, |
CallStats* call_stats, |
VieRemb* remb) |
@@ -223,7 +223,7 @@ VideoReceiveStream::VideoReceiveStream( |
this, // KeyFrameRequestSender |
this, // OnCompleteFrameCallback |
timing_.get()), |
- rtp_stream_sync_(&video_receiver_, &rtp_stream_receiver_), |
+ rtp_stream_sync_(this), |
jitter_buffer_experiment_( |
field_trial::FindFullName("WebRTC-NewVideoJitterBuffer") == |
"Enabled") { |
@@ -364,15 +364,8 @@ void VideoReceiveStream::Stop() { |
transport_adapter_.Disable(); |
} |
-void VideoReceiveStream::SetSyncChannel(VoiceEngine* voice_engine, |
- int audio_channel_id) { |
- if (voice_engine && audio_channel_id != -1) { |
- VoEVideoSync* voe_sync_interface = VoEVideoSync::GetInterface(voice_engine); |
- rtp_stream_sync_.ConfigureSync(audio_channel_id, voe_sync_interface); |
- voe_sync_interface->Release(); |
- } else { |
- rtp_stream_sync_.ConfigureSync(-1, nullptr); |
- } |
+void VideoReceiveStream::SetSync(Syncable* audio_syncable) { |
+ rtp_stream_sync_.ConfigureSync(audio_syncable); |
} |
VideoReceiveStream::Stats VideoReceiveStream::GetStats() const { |
@@ -492,5 +485,42 @@ void VideoReceiveStream::RequestKeyFrame() { |
rtp_stream_receiver_.RequestKeyFrame(); |
} |
+int VideoReceiveStream::id() const { |
+ // TODO(solenberg): This appears to be what the current code does, but I |
+ // believe we should be using remote_ssrc instead? |
stefan-webrtc
2017/01/26 08:40:50
I think that sounds correct. Feel free to change,
the sun
2017/01/30 15:43:12
Done.
|
+ return config_.rtp.local_ssrc; |
+} |
+ |
+rtc::Optional<Syncable::Info> VideoReceiveStream::GetInfo() const { |
+ // Called on Call's module_process_thread_. |
+ Syncable::Info info; |
+ |
+ RtpReceiver* rtp_receiver = rtp_stream_receiver_.GetRtpReceiver(); |
+ RTC_DCHECK(rtp_receiver); |
+ if (!rtp_receiver->Timestamp(&info.latest_receive_timestamp)) |
+ return rtc::Optional<Syncable::Info>(); |
+ if (!rtp_receiver->LastReceivedTimeMs(&info.latest_receive_time_ms)) |
+ return rtc::Optional<Syncable::Info>(); |
+ |
+ RtpRtcp* rtp_rtcp = rtp_stream_receiver_.rtp_rtcp(); |
+ RTC_DCHECK(rtp_rtcp); |
+ if (rtp_rtcp->RemoteNTP(&info.ntp_secs, &info.ntp_frac, nullptr, nullptr, |
+ &info.rtp_timestamp) != 0) { |
+ return rtc::Optional<Syncable::Info>(); |
+ } |
+ |
+ info.current_delay_ms = video_receiver_.Delay(); |
+ return rtc::Optional<Syncable::Info>(info); |
+} |
+ |
+uint32_t VideoReceiveStream::GetPlayoutTimestamp() const { |
+ RTC_NOTREACHED(); |
+} |
+ |
+void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { |
+ // Called on Call's module_process_thread_. |
+ video_receiver_.SetMinimumPlayoutDelay(delay_ms); |
+} |
+ |
} // namespace internal |
} // namespace webrtc |