| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 27 enum { kDefaultVideoFrequency = 90000 }; | 27 enum { kDefaultVideoFrequency = 90000 }; |
| 28 const double kNtpFracPerMs = 4.294967296E6; | 28 const double kNtpFracPerMs = 4.294967296E6; |
| 29 static const int kSmoothingFilter = 4 * 2; | 29 static const int kSmoothingFilter = 4 * 2; |
| 30 | 30 |
| 31 class Time { | 31 class Time { |
| 32 public: | 32 public: |
| 33 explicit Time(int64_t offset) | 33 explicit Time(int64_t offset) |
| 34 : kNtpJan1970(2208988800UL), | 34 : kNtpJan1970(2208988800UL), |
| 35 time_now_ms_(offset) {} | 35 time_now_ms_(offset) {} |
| 36 | 36 |
| 37 RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const { | |
| 38 RtcpMeasurement rtcp; | |
| 39 rtcp.ntp_time = GetNowNtp(); | |
| 40 rtcp.rtp_timestamp = GetNowRtp(frequency, offset); | |
| 41 return rtcp; | |
| 42 } | |
| 43 | |
| 44 NtpTime GetNowNtp() const { | 37 NtpTime GetNowNtp() const { |
| 45 uint32_t ntp_secs = time_now_ms_ / 1000 + kNtpJan1970; | 38 uint32_t ntp_secs = time_now_ms_ / 1000 + kNtpJan1970; |
| 46 int64_t remainder_ms = time_now_ms_ % 1000; | 39 int64_t remainder_ms = time_now_ms_ % 1000; |
| 47 uint32_t ntp_frac = static_cast<uint32_t>( | 40 uint32_t ntp_frac = static_cast<uint32_t>( |
| 48 static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5); | 41 static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5); |
| 49 return NtpTime(ntp_secs, ntp_frac); | 42 return NtpTime(ntp_secs, ntp_frac); |
| 50 } | 43 } |
| 51 | 44 |
| 52 uint32_t GetNowRtp(int frequency, uint32_t offset) const { | 45 uint32_t GetNowRtp(int frequency, uint32_t offset) const { |
| 53 return frequency * time_now_ms_ / 1000 + offset; | 46 return frequency * time_now_ms_ / 1000 + offset; |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 97 int audio_frequency = static_cast<int>(kDefaultAudioFrequency * | 90 int audio_frequency = static_cast<int>(kDefaultAudioFrequency * |
| 98 audio_clock_drift_ + 0.5); | 91 audio_clock_drift_ + 0.5); |
| 99 int audio_offset = 0; | 92 int audio_offset = 0; |
| 100 int video_frequency = static_cast<int>(kDefaultVideoFrequency * | 93 int video_frequency = static_cast<int>(kDefaultVideoFrequency * |
| 101 video_clock_drift_ + 0.5); | 94 video_clock_drift_ + 0.5); |
| 102 bool new_sr; | 95 bool new_sr; |
| 103 int video_offset = 0; | 96 int video_offset = 0; |
| 104 StreamSynchronization::Measurements audio; | 97 StreamSynchronization::Measurements audio; |
| 105 StreamSynchronization::Measurements video; | 98 StreamSynchronization::Measurements video; |
| 106 // Generate NTP/RTP timestamp pair for both streams corresponding to RTCP. | 99 // Generate NTP/RTP timestamp pair for both streams corresponding to RTCP. |
| 107 RtcpMeasurement rtcp = | 100 NtpTime ntp_time = send_time_->GetNowNtp(); |
| 108 send_time_->GenerateRtcp(audio_frequency, audio_offset); | 101 uint32_t rtp_timestamp = |
| 109 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(), | 102 send_time_->GetNowRtp(audio_frequency, audio_offset); |
| 110 rtcp.ntp_time.fractions(), rtcp.rtp_timestamp, | 103 EXPECT_TRUE(audio.rtp_to_ntp.UpdateMeasurements( |
| 111 &audio.rtcp, &new_sr)); | 104 ntp_time.seconds(), ntp_time.fractions(), rtp_timestamp, &new_sr)); |
| 112 send_time_->IncreaseTimeMs(100); | 105 send_time_->IncreaseTimeMs(100); |
| 113 receive_time_->IncreaseTimeMs(100); | 106 receive_time_->IncreaseTimeMs(100); |
| 114 rtcp = send_time_->GenerateRtcp(video_frequency, video_offset); | 107 ntp_time = send_time_->GetNowNtp(); |
| 115 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(), | 108 rtp_timestamp = send_time_->GetNowRtp(video_frequency, video_offset); |
| 116 rtcp.ntp_time.fractions(), rtcp.rtp_timestamp, | 109 EXPECT_TRUE(video.rtp_to_ntp.UpdateMeasurements( |
| 117 &video.rtcp, &new_sr)); | 110 ntp_time.seconds(), ntp_time.fractions(), rtp_timestamp, &new_sr)); |
| 118 send_time_->IncreaseTimeMs(900); | 111 send_time_->IncreaseTimeMs(900); |
| 119 receive_time_->IncreaseTimeMs(900); | 112 receive_time_->IncreaseTimeMs(900); |
| 120 rtcp = send_time_->GenerateRtcp(audio_frequency, audio_offset); | 113 ntp_time = send_time_->GetNowNtp(); |
| 121 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(), | 114 rtp_timestamp = send_time_->GetNowRtp(audio_frequency, audio_offset); |
| 122 rtcp.ntp_time.fractions(), rtcp.rtp_timestamp, | 115 EXPECT_TRUE(audio.rtp_to_ntp.UpdateMeasurements( |
| 123 &audio.rtcp, &new_sr)); | 116 ntp_time.seconds(), ntp_time.fractions(), rtp_timestamp, &new_sr)); |
| 124 send_time_->IncreaseTimeMs(100); | 117 send_time_->IncreaseTimeMs(100); |
| 125 receive_time_->IncreaseTimeMs(100); | 118 receive_time_->IncreaseTimeMs(100); |
| 126 rtcp = send_time_->GenerateRtcp(video_frequency, video_offset); | 119 ntp_time = send_time_->GetNowNtp(); |
| 127 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(), | 120 rtp_timestamp = send_time_->GetNowRtp(video_frequency, video_offset); |
| 128 rtcp.ntp_time.fractions(), rtcp.rtp_timestamp, | 121 EXPECT_TRUE(video.rtp_to_ntp.UpdateMeasurements( |
| 129 &video.rtcp, &new_sr)); | 122 ntp_time.seconds(), ntp_time.fractions(), rtp_timestamp, &new_sr)); |
| 130 | 123 |
| 131 send_time_->IncreaseTimeMs(900); | 124 send_time_->IncreaseTimeMs(900); |
| 132 receive_time_->IncreaseTimeMs(900); | 125 receive_time_->IncreaseTimeMs(900); |
| 133 | 126 |
| 134 // Capture an audio and a video frame at the same time. | 127 // Capture an audio and a video frame at the same time. |
| 135 audio.latest_timestamp = | 128 audio.latest_timestamp = |
| 136 send_time_->GetNowRtp(audio_frequency, audio_offset); | 129 send_time_->GetNowRtp(audio_frequency, audio_offset); |
| 137 video.latest_timestamp = | 130 video.latest_timestamp = |
| 138 send_time_->GetNowRtp(video_frequency, video_offset); | 131 send_time_->GetNowRtp(video_frequency, video_offset); |
| 139 | 132 |
| (...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 566 | 559 |
| 567 TEST_F(StreamSynchronizationTest, | 560 TEST_F(StreamSynchronizationTest, |
| 568 BothDelayedVideoLaterVideoClockDriftWithBaseDelay) { | 561 BothDelayedVideoLaterVideoClockDriftWithBaseDelay) { |
| 569 int base_target_delay_ms = 2000; | 562 int base_target_delay_ms = 2000; |
| 570 video_clock_drift_ = 1.05; | 563 video_clock_drift_ = 1.05; |
| 571 sync_->SetTargetBufferingDelay(base_target_delay_ms); | 564 sync_->SetTargetBufferingDelay(base_target_delay_ms); |
| 572 BothDelayedVideoLaterTest(base_target_delay_ms); | 565 BothDelayedVideoLaterTest(base_target_delay_ms); |
| 573 } | 566 } |
| 574 | 567 |
| 575 } // namespace webrtc | 568 } // namespace webrtc |
| OLD | NEW |