Index: webrtc/video/video_receive_stream.cc |
diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc |
index 7835e779b0dca5aa11fe5c79f20a3976bd47b9c5..6450e59d880c563e3116c6a9dd891922d382bfbd 100644 |
--- a/webrtc/video/video_receive_stream.cc |
+++ b/webrtc/video/video_receive_stream.cc |
@@ -22,6 +22,8 @@ |
#include "webrtc/common_video/h264/profile_level_id.h" |
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
#include "webrtc/modules/congestion_controller/include/congestion_controller.h" |
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h" |
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h" |
#include "webrtc/modules/utility/include/process_thread.h" |
#include "webrtc/modules/video_coding/frame_object.h" |
#include "webrtc/modules/video_coding/include/video_coding.h" |
@@ -33,7 +35,6 @@ |
#include "webrtc/video/call_stats.h" |
#include "webrtc/video/receive_statistics_proxy.h" |
#include "webrtc/video_receive_stream.h" |
-#include "webrtc/voice_engine/include/voe_video_sync.h" |
namespace webrtc { |
@@ -191,7 +192,6 @@ VideoReceiveStream::VideoReceiveStream( |
CongestionController* congestion_controller, |
PacketRouter* packet_router, |
VideoReceiveStream::Config config, |
- webrtc::VoiceEngine* voice_engine, |
ProcessThread* process_thread, |
CallStats* call_stats, |
VieRemb* remb) |
@@ -223,7 +223,7 @@ VideoReceiveStream::VideoReceiveStream( |
this, // KeyFrameRequestSender |
this, // OnCompleteFrameCallback |
timing_.get()), |
- rtp_stream_sync_(&video_receiver_, &rtp_stream_receiver_) { |
+ rtp_stream_sync_(this) { |
LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString(); |
RTC_DCHECK(process_thread_); |
@@ -357,15 +357,8 @@ void VideoReceiveStream::Stop() { |
transport_adapter_.Disable(); |
} |
-void VideoReceiveStream::SetSyncChannel(VoiceEngine* voice_engine, |
- int audio_channel_id) { |
- if (voice_engine && audio_channel_id != -1) { |
- VoEVideoSync* voe_sync_interface = VoEVideoSync::GetInterface(voice_engine); |
- rtp_stream_sync_.ConfigureSync(audio_channel_id, voe_sync_interface); |
- voe_sync_interface->Release(); |
- } else { |
- rtp_stream_sync_.ConfigureSync(-1, nullptr); |
- } |
+void VideoReceiveStream::SetSync(Syncable* audio_syncable) { |
+ rtp_stream_sync_.ConfigureSync(audio_syncable); |
} |
VideoReceiveStream::Stats VideoReceiveStream::GetStats() const { |
@@ -480,5 +473,42 @@ void VideoReceiveStream::RequestKeyFrame() { |
rtp_stream_receiver_.RequestKeyFrame(); |
} |
+int VideoReceiveStream::id() const { |
+ // TODO(solenberg): This appears to be what the current code does, but I |
+ // believe we should be using remote_ssrc instead? |
+ return config_.rtp.local_ssrc; |
+} |
+ |
+rtc::Optional<Syncable::Info> VideoReceiveStream::GetInfo() const { |
+ // Called on Call's module_process_thread_. |
stefan-webrtc
2017/01/26 08:40:50
I wouldn't mind if thread checks were added here t
the sun
2017/01/30 15:43:12
Happy to oblige.
|
+ Syncable::Info info; |
+ |
+ RtpReceiver* rtp_receiver = rtp_stream_receiver_.GetRtpReceiver(); |
+ RTC_DCHECK(rtp_receiver); |
+ if (!rtp_receiver->Timestamp(&info.latest_receive_timestamp)) |
+ return rtc::Optional<Syncable::Info>(); |
+ if (!rtp_receiver->LastReceivedTimeMs(&info.latest_receive_time_ms)) |
+ return rtc::Optional<Syncable::Info>(); |
+ |
+ RtpRtcp* rtp_rtcp = rtp_stream_receiver_.rtp_rtcp(); |
+ RTC_DCHECK(rtp_rtcp); |
+ if (rtp_rtcp->RemoteNTP(&info.ntp_secs, &info.ntp_frac, nullptr, nullptr, |
+ &info.rtp_timestamp) != 0) { |
+ return rtc::Optional<Syncable::Info>(); |
+ } |
+ |
+ info.current_delay_ms = video_receiver_.Delay(); |
+ return rtc::Optional<Syncable::Info>(info); |
+} |
+ |
+uint32_t VideoReceiveStream::GetPlayoutTimestamp() const { |
+ RTC_NOTREACHED(); |
+} |
+ |
+void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { |
+ // Called on Call's module_process_thread_. |
+ video_receiver_.SetMinimumPlayoutDelay(delay_ms); |
+} |
+ |
} // namespace internal |
} // namespace webrtc |