| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #include "webrtc/modules/video_coding/main/source/receiver.h" | |
| 12 | |
| 13 #include <assert.h> | |
| 14 | |
| 15 #include <cstdlib> | |
| 16 | |
| 17 #include "webrtc/base/logging.h" | |
| 18 #include "webrtc/base/trace_event.h" | |
| 19 #include "webrtc/modules/video_coding/main/source/encoded_frame.h" | |
| 20 #include "webrtc/modules/video_coding/main/source/internal_defines.h" | |
| 21 #include "webrtc/modules/video_coding/main/source/media_opt_util.h" | |
| 22 #include "webrtc/system_wrappers/include/clock.h" | |
| 23 | |
| 24 namespace webrtc { | |
| 25 | |
| 26 enum { kMaxReceiverDelayMs = 10000 }; | |
| 27 | |
| 28 VCMReceiver::VCMReceiver(VCMTiming* timing, | |
| 29 Clock* clock, | |
| 30 EventFactory* event_factory) | |
| 31 : VCMReceiver(timing, | |
| 32 clock, | |
| 33 rtc::scoped_ptr<EventWrapper>(event_factory->CreateEvent()), | |
| 34 rtc::scoped_ptr<EventWrapper>(event_factory->CreateEvent())) { | |
| 35 } | |
| 36 | |
| 37 VCMReceiver::VCMReceiver(VCMTiming* timing, | |
| 38 Clock* clock, | |
| 39 rtc::scoped_ptr<EventWrapper> receiver_event, | |
| 40 rtc::scoped_ptr<EventWrapper> jitter_buffer_event) | |
| 41 : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), | |
| 42 clock_(clock), | |
| 43 jitter_buffer_(clock_, jitter_buffer_event.Pass()), | |
| 44 timing_(timing), | |
| 45 render_wait_event_(receiver_event.Pass()), | |
| 46 max_video_delay_ms_(kMaxVideoDelayMs) { | |
| 47 Reset(); | |
| 48 } | |
| 49 | |
| 50 VCMReceiver::~VCMReceiver() { | |
| 51 render_wait_event_->Set(); | |
| 52 delete crit_sect_; | |
| 53 } | |
| 54 | |
| 55 void VCMReceiver::Reset() { | |
| 56 CriticalSectionScoped cs(crit_sect_); | |
| 57 if (!jitter_buffer_.Running()) { | |
| 58 jitter_buffer_.Start(); | |
| 59 } else { | |
| 60 jitter_buffer_.Flush(); | |
| 61 } | |
| 62 } | |
| 63 | |
| 64 void VCMReceiver::UpdateRtt(int64_t rtt) { | |
| 65 jitter_buffer_.UpdateRtt(rtt); | |
| 66 } | |
| 67 | |
| 68 int32_t VCMReceiver::InsertPacket(const VCMPacket& packet, | |
| 69 uint16_t frame_width, | |
| 70 uint16_t frame_height) { | |
| 71 // Insert the packet into the jitter buffer. The packet can either be empty or | |
| 72 // contain media at this point. | |
| 73 bool retransmitted = false; | |
| 74 const VCMFrameBufferEnum ret = jitter_buffer_.InsertPacket(packet, | |
| 75 &retransmitted); | |
| 76 if (ret == kOldPacket) { | |
| 77 return VCM_OK; | |
| 78 } else if (ret == kFlushIndicator) { | |
| 79 return VCM_FLUSH_INDICATOR; | |
| 80 } else if (ret < 0) { | |
| 81 return VCM_JITTER_BUFFER_ERROR; | |
| 82 } | |
| 83 if (ret == kCompleteSession && !retransmitted) { | |
| 84 // We don't want to include timestamps which have suffered from | |
| 85 // retransmission here, since we compensate with extra retransmission | |
| 86 // delay within the jitter estimate. | |
| 87 timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds()); | |
| 88 } | |
| 89 return VCM_OK; | |
| 90 } | |
| 91 | |
| 92 void VCMReceiver::TriggerDecoderShutdown() { | |
| 93 jitter_buffer_.Stop(); | |
| 94 render_wait_event_->Set(); | |
| 95 } | |
| 96 | |
| 97 VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, | |
| 98 int64_t& next_render_time_ms, | |
| 99 bool render_timing) { | |
| 100 const int64_t start_time_ms = clock_->TimeInMilliseconds(); | |
| 101 uint32_t frame_timestamp = 0; | |
| 102 // Exhaust wait time to get a complete frame for decoding. | |
| 103 bool found_frame = jitter_buffer_.NextCompleteTimestamp( | |
| 104 max_wait_time_ms, &frame_timestamp); | |
| 105 | |
| 106 if (!found_frame) | |
| 107 found_frame = jitter_buffer_.NextMaybeIncompleteTimestamp(&frame_timestamp); | |
| 108 | |
| 109 if (!found_frame) | |
| 110 return NULL; | |
| 111 | |
| 112 // We have a frame - Set timing and render timestamp. | |
| 113 timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs()); | |
| 114 const int64_t now_ms = clock_->TimeInMilliseconds(); | |
| 115 timing_->UpdateCurrentDelay(frame_timestamp); | |
| 116 next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms); | |
| 117 // Check render timing. | |
| 118 bool timing_error = false; | |
| 119 // Assume that render timing errors are due to changes in the video stream. | |
| 120 if (next_render_time_ms < 0) { | |
| 121 timing_error = true; | |
| 122 } else if (std::abs(next_render_time_ms - now_ms) > max_video_delay_ms_) { | |
| 123 int frame_delay = static_cast<int>(std::abs(next_render_time_ms - now_ms)); | |
| 124 LOG(LS_WARNING) << "A frame about to be decoded is out of the configured " | |
| 125 << "delay bounds (" << frame_delay << " > " | |
| 126 << max_video_delay_ms_ | |
| 127 << "). Resetting the video jitter buffer."; | |
| 128 timing_error = true; | |
| 129 } else if (static_cast<int>(timing_->TargetVideoDelay()) > | |
| 130 max_video_delay_ms_) { | |
| 131 LOG(LS_WARNING) << "The video target delay has grown larger than " | |
| 132 << max_video_delay_ms_ << " ms. Resetting jitter buffer."; | |
| 133 timing_error = true; | |
| 134 } | |
| 135 | |
| 136 if (timing_error) { | |
| 137 // Timing error => reset timing and flush the jitter buffer. | |
| 138 jitter_buffer_.Flush(); | |
| 139 timing_->Reset(); | |
| 140 return NULL; | |
| 141 } | |
| 142 | |
| 143 if (!render_timing) { | |
| 144 // Decode frame as close as possible to the render timestamp. | |
| 145 const int32_t available_wait_time = max_wait_time_ms - | |
| 146 static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms); | |
| 147 uint16_t new_max_wait_time = static_cast<uint16_t>( | |
| 148 VCM_MAX(available_wait_time, 0)); | |
| 149 uint32_t wait_time_ms = timing_->MaxWaitingTime( | |
| 150 next_render_time_ms, clock_->TimeInMilliseconds()); | |
| 151 if (new_max_wait_time < wait_time_ms) { | |
| 152 // We're not allowed to wait until the frame is supposed to be rendered, | |
| 153 // waiting as long as we're allowed to avoid busy looping, and then return | |
| 154 // NULL. Next call to this function might return the frame. | |
| 155 render_wait_event_->Wait(new_max_wait_time); | |
| 156 return NULL; | |
| 157 } | |
| 158 // Wait until it's time to render. | |
| 159 render_wait_event_->Wait(wait_time_ms); | |
| 160 } | |
| 161 | |
| 162 // Extract the frame from the jitter buffer and set the render time. | |
| 163 VCMEncodedFrame* frame = jitter_buffer_.ExtractAndSetDecode(frame_timestamp); | |
| 164 if (frame == NULL) { | |
| 165 return NULL; | |
| 166 } | |
| 167 frame->SetRenderTime(next_render_time_ms); | |
| 168 TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->TimeStamp(), | |
| 169 "SetRenderTS", "render_time", next_render_time_ms); | |
| 170 if (!frame->Complete()) { | |
| 171 // Update stats for incomplete frames. | |
| 172 bool retransmitted = false; | |
| 173 const int64_t last_packet_time_ms = | |
| 174 jitter_buffer_.LastPacketTime(frame, &retransmitted); | |
| 175 if (last_packet_time_ms >= 0 && !retransmitted) { | |
| 176 // We don't want to include timestamps which have suffered from | |
| 177 // retransmission here, since we compensate with extra retransmission | |
| 178 // delay within the jitter estimate. | |
| 179 timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms); | |
| 180 } | |
| 181 } | |
| 182 return frame; | |
| 183 } | |
| 184 | |
| 185 void VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame) { | |
| 186 jitter_buffer_.ReleaseFrame(frame); | |
| 187 } | |
| 188 | |
| 189 void VCMReceiver::ReceiveStatistics(uint32_t* bitrate, | |
| 190 uint32_t* framerate) { | |
| 191 assert(bitrate); | |
| 192 assert(framerate); | |
| 193 jitter_buffer_.IncomingRateStatistics(framerate, bitrate); | |
| 194 } | |
| 195 | |
| 196 uint32_t VCMReceiver::DiscardedPackets() const { | |
| 197 return jitter_buffer_.num_discarded_packets(); | |
| 198 } | |
| 199 | |
| 200 void VCMReceiver::SetNackMode(VCMNackMode nackMode, | |
| 201 int64_t low_rtt_nack_threshold_ms, | |
| 202 int64_t high_rtt_nack_threshold_ms) { | |
| 203 CriticalSectionScoped cs(crit_sect_); | |
| 204 // Default to always having NACK enabled in hybrid mode. | |
| 205 jitter_buffer_.SetNackMode(nackMode, low_rtt_nack_threshold_ms, | |
| 206 high_rtt_nack_threshold_ms); | |
| 207 } | |
| 208 | |
| 209 void VCMReceiver::SetNackSettings(size_t max_nack_list_size, | |
| 210 int max_packet_age_to_nack, | |
| 211 int max_incomplete_time_ms) { | |
| 212 jitter_buffer_.SetNackSettings(max_nack_list_size, | |
| 213 max_packet_age_to_nack, | |
| 214 max_incomplete_time_ms); | |
| 215 } | |
| 216 | |
| 217 VCMNackMode VCMReceiver::NackMode() const { | |
| 218 CriticalSectionScoped cs(crit_sect_); | |
| 219 return jitter_buffer_.nack_mode(); | |
| 220 } | |
| 221 | |
| 222 std::vector<uint16_t> VCMReceiver::NackList(bool* request_key_frame) { | |
| 223 return jitter_buffer_.GetNackList(request_key_frame); | |
| 224 } | |
| 225 | |
| 226 void VCMReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) { | |
| 227 jitter_buffer_.SetDecodeErrorMode(decode_error_mode); | |
| 228 } | |
| 229 | |
| 230 VCMDecodeErrorMode VCMReceiver::DecodeErrorMode() const { | |
| 231 return jitter_buffer_.decode_error_mode(); | |
| 232 } | |
| 233 | |
| 234 int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) { | |
| 235 CriticalSectionScoped cs(crit_sect_); | |
| 236 if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) { | |
| 237 return -1; | |
| 238 } | |
| 239 max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs; | |
| 240 // Initializing timing to the desired delay. | |
| 241 timing_->set_min_playout_delay(desired_delay_ms); | |
| 242 return 0; | |
| 243 } | |
| 244 | |
| 245 int VCMReceiver::RenderBufferSizeMs() { | |
| 246 uint32_t timestamp_start = 0u; | |
| 247 uint32_t timestamp_end = 0u; | |
| 248 // Render timestamps are computed just prior to decoding. Therefore this is | |
| 249 // only an estimate based on frames' timestamps and current timing state. | |
| 250 jitter_buffer_.RenderBufferSize(×tamp_start, ×tamp_end); | |
| 251 if (timestamp_start == timestamp_end) { | |
| 252 return 0; | |
| 253 } | |
| 254 // Update timing. | |
| 255 const int64_t now_ms = clock_->TimeInMilliseconds(); | |
| 256 timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs()); | |
| 257 // Get render timestamps. | |
| 258 uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms); | |
| 259 uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms); | |
| 260 return render_end - render_start; | |
| 261 } | |
| 262 | |
| 263 void VCMReceiver::RegisterStatsCallback( | |
| 264 VCMReceiveStatisticsCallback* callback) { | |
| 265 jitter_buffer_.RegisterStatsCallback(callback); | |
| 266 } | |
| 267 | |
| 268 } // namespace webrtc | |
| OLD | NEW |