| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #include "webrtc/video/vie_sync_module.h" | |
| 12 | |
| 13 #include "webrtc/base/checks.h" | |
| 14 #include "webrtc/base/logging.h" | |
| 15 #include "webrtc/base/timeutils.h" | |
| 16 #include "webrtc/base/trace_event.h" | |
| 17 #include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h" | |
| 18 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h" | |
| 19 #include "webrtc/modules/video_coding/video_coding_impl.h" | |
| 20 #include "webrtc/system_wrappers/include/clock.h" | |
| 21 #include "webrtc/video/stream_synchronization.h" | |
| 22 #include "webrtc/video_frame.h" | |
| 23 #include "webrtc/voice_engine/include/voe_video_sync.h" | |
| 24 | |
| 25 namespace webrtc { | |
| 26 namespace { | |
| 27 int UpdateMeasurements(StreamSynchronization::Measurements* stream, | |
| 28 const RtpRtcp& rtp_rtcp, const RtpReceiver& receiver) { | |
| 29 if (!receiver.Timestamp(&stream->latest_timestamp)) | |
| 30 return -1; | |
| 31 if (!receiver.LastReceivedTimeMs(&stream->latest_receive_time_ms)) | |
| 32 return -1; | |
| 33 | |
| 34 uint32_t ntp_secs = 0; | |
| 35 uint32_t ntp_frac = 0; | |
| 36 uint32_t rtp_timestamp = 0; | |
| 37 if (rtp_rtcp.RemoteNTP(&ntp_secs, &ntp_frac, nullptr, nullptr, | |
| 38 &rtp_timestamp) != 0) { | |
| 39 return -1; | |
| 40 } | |
| 41 | |
| 42 bool new_rtcp_sr = false; | |
| 43 if (!UpdateRtcpList( | |
| 44 ntp_secs, ntp_frac, rtp_timestamp, &stream->rtcp, &new_rtcp_sr)) { | |
| 45 return -1; | |
| 46 } | |
| 47 | |
| 48 return 0; | |
| 49 } | |
| 50 } // namespace | |
| 51 | |
| 52 ViESyncModule::ViESyncModule(vcm::VideoReceiver* video_receiver) | |
| 53 : video_receiver_(video_receiver), | |
| 54 clock_(Clock::GetRealTimeClock()), | |
| 55 rtp_receiver_(nullptr), | |
| 56 video_rtp_rtcp_(nullptr), | |
| 57 voe_channel_id_(-1), | |
| 58 voe_sync_interface_(nullptr), | |
| 59 last_sync_time_(rtc::TimeNanos()), | |
| 60 sync_() {} | |
| 61 | |
| 62 ViESyncModule::~ViESyncModule() { | |
| 63 } | |
| 64 | |
| 65 void ViESyncModule::ConfigureSync(int voe_channel_id, | |
| 66 VoEVideoSync* voe_sync_interface, | |
| 67 RtpRtcp* video_rtcp_module, | |
| 68 RtpReceiver* rtp_receiver) { | |
| 69 if (voe_channel_id != -1) | |
| 70 RTC_DCHECK(voe_sync_interface); | |
| 71 rtc::CritScope lock(&data_cs_); | |
| 72 // Prevent expensive no-ops. | |
| 73 if (voe_channel_id_ == voe_channel_id && | |
| 74 voe_sync_interface_ == voe_sync_interface && | |
| 75 rtp_receiver_ == rtp_receiver && video_rtp_rtcp_ == video_rtcp_module) { | |
| 76 return; | |
| 77 } | |
| 78 voe_channel_id_ = voe_channel_id; | |
| 79 voe_sync_interface_ = voe_sync_interface; | |
| 80 rtp_receiver_ = rtp_receiver; | |
| 81 video_rtp_rtcp_ = video_rtcp_module; | |
| 82 sync_.reset( | |
| 83 new StreamSynchronization(video_rtp_rtcp_->SSRC(), voe_channel_id)); | |
| 84 } | |
| 85 | |
| 86 int64_t ViESyncModule::TimeUntilNextProcess() { | |
| 87 const int64_t kSyncIntervalMs = 1000; | |
| 88 return kSyncIntervalMs - | |
| 89 (rtc::TimeNanos() - last_sync_time_) / rtc::kNumNanosecsPerMillisec; | |
| 90 } | |
| 91 | |
| 92 void ViESyncModule::Process() { | |
| 93 rtc::CritScope lock(&data_cs_); | |
| 94 last_sync_time_ = rtc::TimeNanos(); | |
| 95 | |
| 96 const int current_video_delay_ms = video_receiver_->Delay(); | |
| 97 | |
| 98 if (voe_channel_id_ == -1) { | |
| 99 return; | |
| 100 } | |
| 101 assert(video_rtp_rtcp_ && voe_sync_interface_); | |
| 102 assert(sync_.get()); | |
| 103 | |
| 104 int audio_jitter_buffer_delay_ms = 0; | |
| 105 int playout_buffer_delay_ms = 0; | |
| 106 if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_, | |
| 107 &audio_jitter_buffer_delay_ms, | |
| 108 &playout_buffer_delay_ms) != 0) { | |
| 109 return; | |
| 110 } | |
| 111 const int current_audio_delay_ms = audio_jitter_buffer_delay_ms + | |
| 112 playout_buffer_delay_ms; | |
| 113 | |
| 114 RtpRtcp* voice_rtp_rtcp = nullptr; | |
| 115 RtpReceiver* voice_receiver = nullptr; | |
| 116 if (voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &voice_rtp_rtcp, | |
| 117 &voice_receiver) != 0) { | |
| 118 return; | |
| 119 } | |
| 120 assert(voice_rtp_rtcp); | |
| 121 assert(voice_receiver); | |
| 122 | |
| 123 if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_, | |
| 124 *rtp_receiver_) != 0) { | |
| 125 return; | |
| 126 } | |
| 127 | |
| 128 if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp, | |
| 129 *voice_receiver) != 0) { | |
| 130 return; | |
| 131 } | |
| 132 | |
| 133 int relative_delay_ms; | |
| 134 // Calculate how much later or earlier the audio stream is compared to video. | |
| 135 if (!sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_, | |
| 136 &relative_delay_ms)) { | |
| 137 return; | |
| 138 } | |
| 139 | |
| 140 TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", current_video_delay_ms); | |
| 141 TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", current_audio_delay_ms); | |
| 142 TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms); | |
| 143 int target_audio_delay_ms = 0; | |
| 144 int target_video_delay_ms = current_video_delay_ms; | |
| 145 // Calculate the necessary extra audio delay and desired total video | |
| 146 // delay to get the streams in sync. | |
| 147 if (!sync_->ComputeDelays(relative_delay_ms, | |
| 148 current_audio_delay_ms, | |
| 149 &target_audio_delay_ms, | |
| 150 &target_video_delay_ms)) { | |
| 151 return; | |
| 152 } | |
| 153 | |
| 154 if (voe_sync_interface_->SetMinimumPlayoutDelay( | |
| 155 voe_channel_id_, target_audio_delay_ms) == -1) { | |
| 156 LOG(LS_ERROR) << "Error setting voice delay."; | |
| 157 } | |
| 158 video_receiver_->SetMinimumPlayoutDelay(target_video_delay_ms); | |
| 159 } | |
| 160 | |
| 161 bool ViESyncModule::GetStreamSyncOffsetInMs(const VideoFrame& frame, | |
| 162 int64_t* stream_offset_ms) const { | |
| 163 rtc::CritScope lock(&data_cs_); | |
| 164 if (voe_channel_id_ == -1) | |
| 165 return false; | |
| 166 | |
| 167 uint32_t playout_timestamp = 0; | |
| 168 if (voe_sync_interface_->GetPlayoutTimestamp(voe_channel_id_, | |
| 169 playout_timestamp) != 0) { | |
| 170 return false; | |
| 171 } | |
| 172 | |
| 173 int64_t latest_audio_ntp; | |
| 174 if (!RtpToNtpMs(playout_timestamp, audio_measurement_.rtcp, | |
| 175 &latest_audio_ntp)) { | |
| 176 return false; | |
| 177 } | |
| 178 | |
| 179 int64_t latest_video_ntp; | |
| 180 if (!RtpToNtpMs(frame.timestamp(), video_measurement_.rtcp, | |
| 181 &latest_video_ntp)) { | |
| 182 return false; | |
| 183 } | |
| 184 | |
| 185 int64_t time_to_render_ms = | |
| 186 frame.render_time_ms() - clock_->TimeInMilliseconds(); | |
| 187 if (time_to_render_ms > 0) | |
| 188 latest_video_ntp += time_to_render_ms; | |
| 189 | |
| 190 *stream_offset_ms = latest_audio_ntp - latest_video_ntp; | |
| 191 return true; | |
| 192 } | |
| 193 | |
| 194 } // namespace webrtc | |
| OLD | NEW |