OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 18 matching lines...) Expand all Loading... |
29 static const int kSmoothingFilter = 4 * 2; | 29 static const int kSmoothingFilter = 4 * 2; |
30 | 30 |
31 class Time { | 31 class Time { |
32 public: | 32 public: |
33 explicit Time(int64_t offset) | 33 explicit Time(int64_t offset) |
34 : kNtpJan1970(2208988800UL), | 34 : kNtpJan1970(2208988800UL), |
35 time_now_ms_(offset) {} | 35 time_now_ms_(offset) {} |
36 | 36 |
37 RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const { | 37 RtcpMeasurement GenerateRtcp(int frequency, uint32_t offset) const { |
38 RtcpMeasurement rtcp; | 38 RtcpMeasurement rtcp; |
39 NowNtp(&rtcp.ntp_secs, &rtcp.ntp_frac); | 39 rtcp.ntp_time = GetNowNtp(); |
40 rtcp.rtp_timestamp = NowRtp(frequency, offset); | 40 rtcp.rtp_timestamp = GetNowRtp(frequency, offset); |
41 return rtcp; | 41 return rtcp; |
42 } | 42 } |
43 | 43 |
44 void NowNtp(uint32_t* ntp_secs, uint32_t* ntp_frac) const { | 44 NtpTime GetNowNtp() const { |
45 *ntp_secs = time_now_ms_ / 1000 + kNtpJan1970; | 45 uint32_t ntp_secs = time_now_ms_ / 1000 + kNtpJan1970; |
46 int64_t remainder_ms = time_now_ms_ % 1000; | 46 int64_t remainder_ms = time_now_ms_ % 1000; |
47 *ntp_frac = static_cast<uint32_t>( | 47 uint32_t ntp_frac = static_cast<uint32_t>( |
48 static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5); | 48 static_cast<double>(remainder_ms) * kNtpFracPerMs + 0.5); |
| 49 return NtpTime(ntp_secs, ntp_frac); |
49 } | 50 } |
50 | 51 |
51 uint32_t NowRtp(int frequency, uint32_t offset) const { | 52 uint32_t GetNowRtp(int frequency, uint32_t offset) const { |
52 return frequency * time_now_ms_ / 1000 + offset; | 53 return frequency * time_now_ms_ / 1000 + offset; |
53 } | 54 } |
54 | 55 |
55 void IncreaseTimeMs(int64_t inc) { | 56 void IncreaseTimeMs(int64_t inc) { |
56 time_now_ms_ += inc; | 57 time_now_ms_ += inc; |
57 } | 58 } |
58 | 59 |
59 int64_t time_now_ms() const { | 60 int64_t time_now_ms() const { |
60 return time_now_ms_; | 61 return time_now_ms_; |
61 } | 62 } |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
98 int audio_offset = 0; | 99 int audio_offset = 0; |
99 int video_frequency = static_cast<int>(kDefaultVideoFrequency * | 100 int video_frequency = static_cast<int>(kDefaultVideoFrequency * |
100 video_clock_drift_ + 0.5); | 101 video_clock_drift_ + 0.5); |
101 bool new_sr; | 102 bool new_sr; |
102 int video_offset = 0; | 103 int video_offset = 0; |
103 StreamSynchronization::Measurements audio; | 104 StreamSynchronization::Measurements audio; |
104 StreamSynchronization::Measurements video; | 105 StreamSynchronization::Measurements video; |
105 // Generate NTP/RTP timestamp pair for both streams corresponding to RTCP. | 106 // Generate NTP/RTP timestamp pair for both streams corresponding to RTCP. |
106 RtcpMeasurement rtcp = | 107 RtcpMeasurement rtcp = |
107 send_time_->GenerateRtcp(audio_frequency, audio_offset); | 108 send_time_->GenerateRtcp(audio_frequency, audio_offset); |
108 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_secs, rtcp.ntp_frac, rtcp.rtp_timestamp, | 109 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(), |
| 110 rtcp.ntp_time.fractions(), rtcp.rtp_timestamp, |
109 &audio.rtcp, &new_sr)); | 111 &audio.rtcp, &new_sr)); |
110 send_time_->IncreaseTimeMs(100); | 112 send_time_->IncreaseTimeMs(100); |
111 receive_time_->IncreaseTimeMs(100); | 113 receive_time_->IncreaseTimeMs(100); |
112 rtcp = send_time_->GenerateRtcp(video_frequency, video_offset); | 114 rtcp = send_time_->GenerateRtcp(video_frequency, video_offset); |
113 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_secs, rtcp.ntp_frac, rtcp.rtp_timestamp, | 115 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(), |
| 116 rtcp.ntp_time.fractions(), rtcp.rtp_timestamp, |
114 &video.rtcp, &new_sr)); | 117 &video.rtcp, &new_sr)); |
115 send_time_->IncreaseTimeMs(900); | 118 send_time_->IncreaseTimeMs(900); |
116 receive_time_->IncreaseTimeMs(900); | 119 receive_time_->IncreaseTimeMs(900); |
117 rtcp = send_time_->GenerateRtcp(audio_frequency, audio_offset); | 120 rtcp = send_time_->GenerateRtcp(audio_frequency, audio_offset); |
118 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_secs, rtcp.ntp_frac, rtcp.rtp_timestamp, | 121 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(), |
| 122 rtcp.ntp_time.fractions(), rtcp.rtp_timestamp, |
119 &audio.rtcp, &new_sr)); | 123 &audio.rtcp, &new_sr)); |
120 send_time_->IncreaseTimeMs(100); | 124 send_time_->IncreaseTimeMs(100); |
121 receive_time_->IncreaseTimeMs(100); | 125 receive_time_->IncreaseTimeMs(100); |
122 rtcp = send_time_->GenerateRtcp(video_frequency, video_offset); | 126 rtcp = send_time_->GenerateRtcp(video_frequency, video_offset); |
123 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_secs, rtcp.ntp_frac, rtcp.rtp_timestamp, | 127 EXPECT_TRUE(UpdateRtcpList(rtcp.ntp_time.seconds(), |
| 128 rtcp.ntp_time.fractions(), rtcp.rtp_timestamp, |
124 &video.rtcp, &new_sr)); | 129 &video.rtcp, &new_sr)); |
| 130 |
125 send_time_->IncreaseTimeMs(900); | 131 send_time_->IncreaseTimeMs(900); |
126 receive_time_->IncreaseTimeMs(900); | 132 receive_time_->IncreaseTimeMs(900); |
127 | 133 |
128 // Capture an audio and a video frame at the same time. | 134 // Capture an audio and a video frame at the same time. |
129 audio.latest_timestamp = send_time_->NowRtp(audio_frequency, | 135 audio.latest_timestamp = |
130 audio_offset); | 136 send_time_->GetNowRtp(audio_frequency, audio_offset); |
131 video.latest_timestamp = send_time_->NowRtp(video_frequency, | 137 video.latest_timestamp = |
132 video_offset); | 138 send_time_->GetNowRtp(video_frequency, video_offset); |
133 | 139 |
134 if (audio_delay_ms > video_delay_ms) { | 140 if (audio_delay_ms > video_delay_ms) { |
135 // Audio later than video. | 141 // Audio later than video. |
136 receive_time_->IncreaseTimeMs(video_delay_ms); | 142 receive_time_->IncreaseTimeMs(video_delay_ms); |
137 video.latest_receive_time_ms = receive_time_->time_now_ms(); | 143 video.latest_receive_time_ms = receive_time_->time_now_ms(); |
138 receive_time_->IncreaseTimeMs(audio_delay_ms - video_delay_ms); | 144 receive_time_->IncreaseTimeMs(audio_delay_ms - video_delay_ms); |
139 audio.latest_receive_time_ms = receive_time_->time_now_ms(); | 145 audio.latest_receive_time_ms = receive_time_->time_now_ms(); |
140 } else { | 146 } else { |
141 // Video later than audio. | 147 // Video later than audio. |
142 receive_time_->IncreaseTimeMs(audio_delay_ms); | 148 receive_time_->IncreaseTimeMs(audio_delay_ms); |
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
560 | 566 |
561 TEST_F(StreamSynchronizationTest, | 567 TEST_F(StreamSynchronizationTest, |
562 BothDelayedVideoLaterVideoClockDriftWithBaseDelay) { | 568 BothDelayedVideoLaterVideoClockDriftWithBaseDelay) { |
563 int base_target_delay_ms = 2000; | 569 int base_target_delay_ms = 2000; |
564 video_clock_drift_ = 1.05; | 570 video_clock_drift_ = 1.05; |
565 sync_->SetTargetBufferingDelay(base_target_delay_ms); | 571 sync_->SetTargetBufferingDelay(base_target_delay_ms); |
566 BothDelayedVideoLaterTest(base_target_delay_ms); | 572 BothDelayedVideoLaterTest(base_target_delay_ms); |
567 } | 573 } |
568 | 574 |
569 } // namespace webrtc | 575 } // namespace webrtc |
OLD | NEW |