OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/video/vie_encoder.h" | 11 #include "webrtc/video/vie_encoder.h" |
12 | 12 |
13 #include <algorithm> | 13 #include <algorithm> |
14 #include <limits> | 14 #include <limits> |
15 | 15 |
16 #include "webrtc/base/checks.h" | 16 #include "webrtc/base/checks.h" |
17 #include "webrtc/base/logging.h" | 17 #include "webrtc/base/logging.h" |
18 #include "webrtc/base/trace_event.h" | 18 #include "webrtc/base/trace_event.h" |
19 #include "webrtc/base/timeutils.h" | 19 #include "webrtc/base/timeutils.h" |
20 #include "webrtc/modules/pacing/paced_sender.h" | 20 #include "webrtc/modules/pacing/paced_sender.h" |
21 #include "webrtc/modules/video_coding/include/video_coding.h" | 21 #include "webrtc/modules/video_coding/include/video_coding.h" |
22 #include "webrtc/modules/video_coding/include/video_coding_defines.h" | 22 #include "webrtc/modules/video_coding/include/video_coding_defines.h" |
23 #include "webrtc/system_wrappers/include/metrics.h" | 23 #include "webrtc/system_wrappers/include/metrics.h" |
24 #include "webrtc/video/overuse_frame_detector.h" | 24 #include "webrtc/video/overuse_frame_detector.h" |
25 #include "webrtc/video/send_statistics_proxy.h" | 25 #include "webrtc/video/send_statistics_proxy.h" |
26 #include "webrtc/video_frame.h" | 26 #include "webrtc/video_frame.h" |
27 | 27 |
28 namespace webrtc { | 28 namespace webrtc { |
29 | 29 |
| 30 namespace { |
| 31 |
| 32 VideoCodecType PayloadNameToCodecType(const std::string& payload_name) { |
| 33 if (payload_name == "VP8") |
| 34 return kVideoCodecVP8; |
| 35 if (payload_name == "VP9") |
| 36 return kVideoCodecVP9; |
| 37 if (payload_name == "H264") |
| 38 return kVideoCodecH264; |
| 39 return kVideoCodecGeneric; |
| 40 } |
| 41 |
| 42 VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config, |
| 43 const std::string& payload_name, |
| 44 int payload_type) { |
| 45 const std::vector<VideoStream>& streams = config.streams; |
| 46 static const int kEncoderMinBitrateKbps = 30; |
| 47 RTC_DCHECK(!streams.empty()); |
| 48 RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0); |
| 49 |
| 50 VideoCodec video_codec; |
| 51 memset(&video_codec, 0, sizeof(video_codec)); |
| 52 video_codec.codecType = PayloadNameToCodecType(payload_name); |
| 53 |
| 54 switch (config.content_type) { |
| 55 case VideoEncoderConfig::ContentType::kRealtimeVideo: |
| 56 video_codec.mode = kRealtimeVideo; |
| 57 break; |
| 58 case VideoEncoderConfig::ContentType::kScreen: |
| 59 video_codec.mode = kScreensharing; |
| 60 if (config.streams.size() == 1 && |
| 61 config.streams[0].temporal_layer_thresholds_bps.size() == 1) { |
| 62 video_codec.targetBitrate = |
| 63 config.streams[0].temporal_layer_thresholds_bps[0] / 1000; |
| 64 } |
| 65 break; |
| 66 } |
| 67 |
| 68 switch (video_codec.codecType) { |
| 69 case kVideoCodecVP8: { |
| 70 if (config.encoder_specific_settings) { |
| 71 video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>( |
| 72 config.encoder_specific_settings); |
| 73 } else { |
| 74 video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings(); |
| 75 } |
| 76 video_codec.codecSpecific.VP8.numberOfTemporalLayers = |
| 77 static_cast<unsigned char>( |
| 78 streams.back().temporal_layer_thresholds_bps.size() + 1); |
| 79 break; |
| 80 } |
| 81 case kVideoCodecVP9: { |
| 82 if (config.encoder_specific_settings) { |
| 83 video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>( |
| 84 config.encoder_specific_settings); |
| 85 if (video_codec.mode == kScreensharing) { |
| 86 video_codec.codecSpecific.VP9.flexibleMode = true; |
| 87 // For now VP9 screensharing use 1 temporal and 2 spatial layers. |
| 88 RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, |
| 89 1); |
| 90 RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2); |
| 91 } |
| 92 } else { |
| 93 video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings(); |
| 94 } |
| 95 video_codec.codecSpecific.VP9.numberOfTemporalLayers = |
| 96 static_cast<unsigned char>( |
| 97 streams.back().temporal_layer_thresholds_bps.size() + 1); |
| 98 break; |
| 99 } |
| 100 case kVideoCodecH264: { |
| 101 if (config.encoder_specific_settings) { |
| 102 video_codec.codecSpecific.H264 = |
| 103 *reinterpret_cast<const VideoCodecH264*>( |
| 104 config.encoder_specific_settings); |
| 105 } else { |
| 106 video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings(); |
| 107 } |
| 108 break; |
| 109 } |
| 110 default: |
| 111 // TODO(pbos): Support encoder_settings codec-agnostically. |
| 112 RTC_DCHECK(!config.encoder_specific_settings) |
| 113 << "Encoder-specific settings for codec type not wired up."; |
| 114 break; |
| 115 } |
| 116 |
| 117 strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1); |
| 118 video_codec.plName[kPayloadNameSize - 1] = '\0'; |
| 119 video_codec.plType = payload_type; |
| 120 video_codec.numberOfSimulcastStreams = |
| 121 static_cast<unsigned char>(streams.size()); |
| 122 video_codec.minBitrate = streams[0].min_bitrate_bps / 1000; |
| 123 if (video_codec.minBitrate < kEncoderMinBitrateKbps) |
| 124 video_codec.minBitrate = kEncoderMinBitrateKbps; |
| 125 RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams)); |
| 126 if (video_codec.codecType == kVideoCodecVP9) { |
| 127 // If the vector is empty, bitrates will be configured automatically. |
| 128 RTC_DCHECK(config.spatial_layers.empty() || |
| 129 config.spatial_layers.size() == |
| 130 video_codec.codecSpecific.VP9.numberOfSpatialLayers); |
| 131 RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers, |
| 132 kMaxSimulcastStreams); |
| 133 for (size_t i = 0; i < config.spatial_layers.size(); ++i) |
| 134 video_codec.spatialLayers[i] = config.spatial_layers[i]; |
| 135 } |
| 136 for (size_t i = 0; i < streams.size(); ++i) { |
| 137 SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; |
| 138 RTC_DCHECK_GT(streams[i].width, 0u); |
| 139 RTC_DCHECK_GT(streams[i].height, 0u); |
| 140 RTC_DCHECK_GT(streams[i].max_framerate, 0); |
| 141 // Different framerates not supported per stream at the moment. |
| 142 RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate); |
| 143 RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0); |
| 144 RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps); |
| 145 RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps); |
| 146 RTC_DCHECK_GE(streams[i].max_qp, 0); |
| 147 |
| 148 sim_stream->width = static_cast<uint16_t>(streams[i].width); |
| 149 sim_stream->height = static_cast<uint16_t>(streams[i].height); |
| 150 sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000; |
| 151 sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000; |
| 152 sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000; |
| 153 sim_stream->qpMax = streams[i].max_qp; |
| 154 sim_stream->numberOfTemporalLayers = static_cast<unsigned char>( |
| 155 streams[i].temporal_layer_thresholds_bps.size() + 1); |
| 156 |
| 157 video_codec.width = |
| 158 std::max(video_codec.width, static_cast<uint16_t>(streams[i].width)); |
| 159 video_codec.height = |
| 160 std::max(video_codec.height, static_cast<uint16_t>(streams[i].height)); |
| 161 video_codec.minBitrate = |
| 162 std::min(static_cast<uint16_t>(video_codec.minBitrate), |
| 163 static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000)); |
| 164 video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000; |
| 165 video_codec.qpMax = std::max(video_codec.qpMax, |
| 166 static_cast<unsigned int>(streams[i].max_qp)); |
| 167 } |
| 168 |
| 169 if (video_codec.maxBitrate == 0) { |
| 170 // Unset max bitrate -> cap to one bit per pixel. |
| 171 video_codec.maxBitrate = |
| 172 (video_codec.width * video_codec.height * video_codec.maxFramerate) / |
| 173 1000; |
| 174 } |
| 175 if (video_codec.maxBitrate < kEncoderMinBitrateKbps) |
| 176 video_codec.maxBitrate = kEncoderMinBitrateKbps; |
| 177 |
| 178 RTC_DCHECK_GT(streams[0].max_framerate, 0); |
| 179 video_codec.maxFramerate = streams[0].max_framerate; |
| 180 video_codec.expect_encode_from_texture = config.expect_encode_from_texture; |
| 181 |
| 182 return video_codec; |
| 183 } |
| 184 |
| 185 // TODO(pbos): Lower these thresholds (to closer to 100%) when we handle |
| 186 // pipelining encoders better (multiple input frames before something comes |
| 187 // out). This should effectively turn off CPU adaptations for systems that |
| 188 // remotely cope with the load right now. |
| 189 CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) { |
| 190 CpuOveruseOptions options; |
| 191 if (full_overuse_time) { |
| 192 options.low_encode_usage_threshold_percent = 150; |
| 193 options.high_encode_usage_threshold_percent = 200; |
| 194 } |
| 195 return options; |
| 196 } |
| 197 |
| 198 } // namespace |
| 199 |
| 200 class ViEEncoder::EncodeTask : public rtc::QueuedTask { |
| 201 public: |
| 202 EncodeTask(const VideoFrame& frame, ViEEncoder* vie_encoder) |
| 203 : vie_encoder_(vie_encoder) { |
| 204 frame_.ShallowCopy(frame); |
| 205 ++vie_encoder_->posted_frames_waiting_for_encode_; |
| 206 } |
| 207 |
| 208 private: |
| 209 bool Run() override { |
| 210 RTC_DCHECK_GT(vie_encoder_->posted_frames_waiting_for_encode_.Value(), 0); |
| 211 if (--vie_encoder_->posted_frames_waiting_for_encode_ == 0) { |
| 212 vie_encoder_->EncodeVideoFrame(frame_); |
| 213 } else { |
| 214 // There is a newer frame in flight. Do not encode this frame. |
| 215 LOG(LS_VERBOSE) |
| 216 << "Incoming frame dropped due to that the encoder is blocked."; |
| 217 } |
| 218 return true; |
| 219 } |
| 220 VideoFrame frame_; |
| 221 ViEEncoder* vie_encoder_; |
| 222 }; |
| 223 |
30 ViEEncoder::ViEEncoder(uint32_t number_of_cores, | 224 ViEEncoder::ViEEncoder(uint32_t number_of_cores, |
31 ProcessThread* module_process_thread, | |
32 SendStatisticsProxy* stats_proxy, | 225 SendStatisticsProxy* stats_proxy, |
33 OveruseFrameDetector* overuse_detector, | 226 const VideoSendStream::Config::EncoderSettings& settings, |
34 EncodedImageCallback* sink) | 227 rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback, |
35 : number_of_cores_(number_of_cores), | 228 LoadObserver* overuse_callback, |
36 sink_(sink), | 229 EncodedFrameObserver* encoder_timing) |
| 230 : shutdown_event_(true /* manual_reset */, false), |
| 231 number_of_cores_(number_of_cores), |
| 232 settings_(settings), |
37 vp_(VideoProcessing::Create()), | 233 vp_(VideoProcessing::Create()), |
38 video_sender_(Clock::GetRealTimeClock(), this, this), | 234 video_sender_(Clock::GetRealTimeClock(), this, this), |
| 235 overuse_detector_(Clock::GetRealTimeClock(), |
| 236 GetCpuOveruseOptions(settings.full_overuse_time), |
| 237 this, |
| 238 encoder_timing, |
| 239 stats_proxy), |
| 240 load_observer_(overuse_callback), |
39 stats_proxy_(stats_proxy), | 241 stats_proxy_(stats_proxy), |
40 overuse_detector_(overuse_detector), | 242 pre_encode_callback_(pre_encode_callback), |
41 time_of_last_frame_activity_ms_(std::numeric_limits<int64_t>::max()), | 243 module_process_thread_(nullptr), |
42 encoder_config_(), | 244 encoder_config_(), |
| 245 encoder_start_bitrate_bps_(0), |
43 last_observed_bitrate_bps_(0), | 246 last_observed_bitrate_bps_(0), |
44 encoder_paused_and_dropped_frame_(false), | 247 encoder_paused_and_dropped_frame_(false), |
45 module_process_thread_(module_process_thread), | |
46 has_received_sli_(false), | 248 has_received_sli_(false), |
47 picture_id_sli_(0), | 249 picture_id_sli_(0), |
48 has_received_rpsi_(false), | 250 has_received_rpsi_(false), |
49 picture_id_rpsi_(0), | 251 picture_id_rpsi_(0), |
50 video_suspended_(false) { | 252 clock_(Clock::GetRealTimeClock()), |
| 253 last_captured_timestamp_(0), |
| 254 delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() - |
| 255 clock_->TimeInMilliseconds()), |
| 256 encoder_queue_("EncoderQueue") { |
| 257 vp_->EnableTemporalDecimation(false); |
| 258 |
| 259 encoder_queue_.PostTask([this] { |
| 260 RTC_DCHECK_RUN_ON(&encoder_queue_); |
| 261 video_sender_.RegisterExternalEncoder( |
| 262 settings_.encoder, settings_.payload_type, settings_.internal_source); |
| 263 }); |
| 264 } |
| 265 |
| 266 ViEEncoder::~ViEEncoder() { |
| 267 RTC_DCHECK(shutdown_event_.Wait(0)) |
| 268 << "Must call ::Stop() before destruction."; |
| 269 } |
| 270 |
| 271 void ViEEncoder::Stop() { |
| 272 if (!encoder_queue_.IsCurrent()) { |
| 273 encoder_queue_.PostTask([this] { Stop(); }); |
| 274 shutdown_event_.Wait(rtc::Event::kForever); |
| 275 return; |
| 276 } |
| 277 RTC_DCHECK_RUN_ON(&encoder_queue_); |
| 278 video_sender_.RegisterExternalEncoder(nullptr, settings_.payload_type, false); |
| 279 shutdown_event_.Set(); |
| 280 } |
| 281 |
| 282 void ViEEncoder::RegisterProcessThread(ProcessThread* module_process_thread) { |
| 283 RTC_DCHECK(!module_process_thread_); |
| 284 module_process_thread_ = module_process_thread; |
| 285 module_process_thread_->RegisterModule(&overuse_detector_); |
51 module_process_thread_->RegisterModule(&video_sender_); | 286 module_process_thread_->RegisterModule(&video_sender_); |
52 vp_->EnableTemporalDecimation(true); | 287 module_process_thread_checker_.DetachFromThread(); |
53 } | 288 } |
54 | 289 |
55 vcm::VideoSender* ViEEncoder::video_sender() { | 290 void ViEEncoder::DeRegisterProcessThread() { |
56 return &video_sender_; | 291 module_process_thread_->DeRegisterModule(&overuse_detector_); |
57 } | |
58 | |
59 ViEEncoder::~ViEEncoder() { | |
60 module_process_thread_->DeRegisterModule(&video_sender_); | 292 module_process_thread_->DeRegisterModule(&video_sender_); |
61 } | 293 } |
62 | 294 |
63 int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder, | 295 void ViEEncoder::SetSink(EncodedImageCallback* sink) { |
64 uint8_t pl_type, | 296 encoder_queue_.PostTask([this, sink] { |
65 bool internal_source) { | 297 RTC_DCHECK_RUN_ON(&encoder_queue_); |
66 video_sender_.RegisterExternalEncoder(encoder, pl_type, internal_source); | 298 sink_ = sink; |
67 return 0; | 299 }); |
68 } | 300 } |
69 | 301 |
70 int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) { | 302 void ViEEncoder::SetStartBitrate(int start_bitrate_bps) { |
71 video_sender_.RegisterExternalEncoder(nullptr, pl_type, false); | 303 encoder_queue_.PostTask([this, start_bitrate_bps] { |
72 return 0; | 304 RTC_DCHECK_RUN_ON(&encoder_queue_); |
73 } | 305 encoder_start_bitrate_bps_ = start_bitrate_bps; |
74 | 306 }); |
75 void ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec, | 307 } |
76 size_t max_data_payload_length) { | 308 |
| 309 void ViEEncoder::ConfigureEncoder(const VideoEncoderConfig& config, |
| 310 size_t max_data_payload_length) { |
| 311 VideoCodec video_codec = VideoEncoderConfigToVideoCodec( |
| 312 config, settings_.payload_name, settings_.payload_type); |
| 313 encoder_queue_.PostTask([this, video_codec, max_data_payload_length] { |
| 314 ConfigureEncoderInternal(video_codec, max_data_payload_length); |
| 315 }); |
| 316 return; |
| 317 } |
| 318 |
| 319 void ViEEncoder::ConfigureEncoderInternal(const VideoCodec& video_codec, |
| 320 size_t max_data_payload_length) { |
| 321 RTC_DCHECK_RUN_ON(&encoder_queue_); |
| 322 RTC_DCHECK_GE(encoder_start_bitrate_bps_, 0); |
| 323 RTC_DCHECK(sink_); |
| 324 |
77 // Setting target width and height for VPM. | 325 // Setting target width and height for VPM. |
78 RTC_CHECK_EQ(VPM_OK, | 326 RTC_CHECK_EQ(VPM_OK, |
79 vp_->SetTargetResolution(video_codec.width, video_codec.height, | 327 vp_->SetTargetResolution(video_codec.width, video_codec.height, |
80 video_codec.maxFramerate)); | 328 video_codec.maxFramerate)); |
81 { | 329 |
82 rtc::CritScope lock(&data_cs_); | 330 encoder_config_ = video_codec; |
83 encoder_config_ = video_codec; | 331 encoder_config_.startBitrate = encoder_start_bitrate_bps_ / 1000; |
84 } | 332 encoder_config_.startBitrate = |
| 333 std::max(encoder_config_.startBitrate, video_codec.minBitrate); |
| 334 encoder_config_.startBitrate = |
| 335 std::min(encoder_config_.startBitrate, video_codec.maxBitrate); |
85 | 336 |
86 bool success = video_sender_.RegisterSendCodec( | 337 bool success = video_sender_.RegisterSendCodec( |
87 &video_codec, number_of_cores_, | 338 &encoder_config_, number_of_cores_, |
88 static_cast<uint32_t>(max_data_payload_length)) == VCM_OK; | 339 static_cast<uint32_t>(max_data_payload_length)) == VCM_OK; |
89 | 340 |
90 if (!success) { | 341 if (!success) { |
91 LOG(LS_ERROR) << "Failed to configure encoder."; | 342 LOG(LS_ERROR) << "Failed to configure encoder."; |
92 RTC_DCHECK(success); | 343 RTC_DCHECK(success); |
93 } | 344 } |
94 | 345 |
95 if (stats_proxy_) { | 346 if (stats_proxy_) { |
96 VideoEncoderConfig::ContentType content_type = | 347 VideoEncoderConfig::ContentType content_type = |
97 VideoEncoderConfig::ContentType::kRealtimeVideo; | 348 VideoEncoderConfig::ContentType::kRealtimeVideo; |
98 switch (video_codec.mode) { | 349 switch (video_codec.mode) { |
99 case kRealtimeVideo: | 350 case kRealtimeVideo: |
100 content_type = VideoEncoderConfig::ContentType::kRealtimeVideo; | 351 content_type = VideoEncoderConfig::ContentType::kRealtimeVideo; |
101 break; | 352 break; |
102 case kScreensharing: | 353 case kScreensharing: |
103 content_type = VideoEncoderConfig::ContentType::kScreen; | 354 content_type = VideoEncoderConfig::ContentType::kScreen; |
104 break; | 355 break; |
105 default: | 356 default: |
106 RTC_NOTREACHED(); | 357 RTC_NOTREACHED(); |
107 break; | 358 break; |
108 } | 359 } |
109 stats_proxy_->SetContentType(content_type); | 360 stats_proxy_->SetContentType(content_type); |
110 } | 361 } |
111 } | 362 } |
112 | 363 |
| 364 void ViEEncoder::IncomingCapturedFrame(const VideoFrame& video_frame) { |
| 365 RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_); |
| 366 stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height()); |
| 367 |
| 368 VideoFrame incoming_frame = video_frame; |
| 369 |
| 370 // Local time in webrtc time base. |
| 371 int64_t current_time = clock_->TimeInMilliseconds(); |
| 372 incoming_frame.set_render_time_ms(current_time); |
| 373 |
| 374 // Capture time may come from clock with an offset and drift from clock_. |
| 375 int64_t capture_ntp_time_ms; |
| 376 if (video_frame.ntp_time_ms() != 0) { |
| 377 capture_ntp_time_ms = video_frame.ntp_time_ms(); |
| 378 } else if (video_frame.render_time_ms() != 0) { |
| 379 capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_; |
| 380 } else { |
| 381 capture_ntp_time_ms = current_time + delta_ntp_internal_ms_; |
| 382 } |
| 383 incoming_frame.set_ntp_time_ms(capture_ntp_time_ms); |
| 384 |
| 385 // Convert NTP time, in ms, to RTP timestamp. |
| 386 const int kMsToRtpTimestamp = 90; |
| 387 incoming_frame.set_timestamp( |
| 388 kMsToRtpTimestamp * static_cast<uint32_t>(incoming_frame.ntp_time_ms())); |
| 389 |
| 390 if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) { |
| 391 // We don't allow the same capture time for two frames, drop this one. |
| 392 LOG(LS_WARNING) << "Same/old NTP timestamp (" |
| 393 << incoming_frame.ntp_time_ms() |
| 394 << " <= " << last_captured_timestamp_ |
| 395 << ") for incoming frame. Dropping."; |
| 396 return; |
| 397 } |
| 398 |
| 399 last_captured_timestamp_ = incoming_frame.ntp_time_ms(); |
| 400 overuse_detector_.FrameCaptured(incoming_frame); |
| 401 encoder_queue_.PostTask( |
| 402 std::unique_ptr<rtc::QueuedTask>(new EncodeTask(incoming_frame, this))); |
| 403 } |
| 404 |
113 bool ViEEncoder::EncoderPaused() const { | 405 bool ViEEncoder::EncoderPaused() const { |
| 406 RTC_DCHECK_RUN_ON(&encoder_queue_); |
114 // Pause video if paused by caller or as long as the network is down or the | 407 // Pause video if paused by caller or as long as the network is down or the |
115 // pacer queue has grown too large in buffered mode. | 408 // pacer queue has grown too large in buffered mode. |
116 // If the pacer queue has grown too large or the network is down, | 409 // If the pacer queue has grown too large or the network is down, |
117 // last_observed_bitrate_bps_ will be 0. | 410 // last_observed_bitrate_bps_ will be 0. |
118 return video_suspended_ || last_observed_bitrate_bps_ == 0; | 411 return last_observed_bitrate_bps_ == 0; |
119 } | 412 } |
120 | 413 |
121 void ViEEncoder::TraceFrameDropStart() { | 414 void ViEEncoder::TraceFrameDropStart() { |
| 415 RTC_DCHECK_RUN_ON(&encoder_queue_); |
122 // Start trace event only on the first frame after encoder is paused. | 416 // Start trace event only on the first frame after encoder is paused. |
123 if (!encoder_paused_and_dropped_frame_) { | 417 if (!encoder_paused_and_dropped_frame_) { |
124 TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this); | 418 TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this); |
125 } | 419 } |
126 encoder_paused_and_dropped_frame_ = true; | 420 encoder_paused_and_dropped_frame_ = true; |
127 return; | 421 return; |
128 } | 422 } |
129 | 423 |
130 void ViEEncoder::TraceFrameDropEnd() { | 424 void ViEEncoder::TraceFrameDropEnd() { |
| 425 RTC_DCHECK_RUN_ON(&encoder_queue_); |
131 // End trace event on first frame after encoder resumes, if frame was dropped. | 426 // End trace event on first frame after encoder resumes, if frame was dropped. |
132 if (encoder_paused_and_dropped_frame_) { | 427 if (encoder_paused_and_dropped_frame_) { |
133 TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this); | 428 TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this); |
134 } | 429 } |
135 encoder_paused_and_dropped_frame_ = false; | 430 encoder_paused_and_dropped_frame_ = false; |
136 } | 431 } |
137 | 432 |
138 void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) { | 433 void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) { |
139 VideoCodecType codec_type; | 434 RTC_DCHECK_RUN_ON(&encoder_queue_); |
140 { | 435 if (pre_encode_callback_) |
141 rtc::CritScope lock(&data_cs_); | 436 pre_encode_callback_->OnFrame(video_frame); |
142 time_of_last_frame_activity_ms_ = rtc::TimeMillis(); | 437 |
143 if (EncoderPaused()) { | 438 if (EncoderPaused()) { |
144 TraceFrameDropStart(); | 439 TraceFrameDropStart(); |
145 return; | 440 return; |
146 } | |
147 TraceFrameDropEnd(); | |
148 codec_type = encoder_config_.codecType; | |
149 } | 441 } |
| 442 TraceFrameDropEnd(); |
150 | 443 |
151 TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(), | 444 TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(), |
152 "Encode"); | 445 "Encode"); |
153 const VideoFrame* frame_to_send = &video_frame; | 446 const VideoFrame* frame_to_send = &video_frame; |
154 // TODO(wuchengli): support texture frames. | 447 // TODO(wuchengli): support texture frames. |
155 if (!video_frame.video_frame_buffer()->native_handle()) { | 448 if (!video_frame.video_frame_buffer()->native_handle()) { |
156 // Pass frame via preprocessor. | 449 // Pass frame via preprocessor. |
157 frame_to_send = vp_->PreprocessFrame(video_frame); | 450 frame_to_send = vp_->PreprocessFrame(video_frame); |
158 if (!frame_to_send) { | 451 if (!frame_to_send) { |
159 // Drop this frame, or there was an error processing it. | 452 // Drop this frame, or there was an error processing it. |
160 return; | 453 return; |
161 } | 454 } |
162 } | 455 } |
163 | 456 |
164 if (codec_type == webrtc::kVideoCodecVP8) { | 457 if (encoder_config_.codecType == webrtc::kVideoCodecVP8) { |
165 webrtc::CodecSpecificInfo codec_specific_info; | 458 webrtc::CodecSpecificInfo codec_specific_info; |
166 codec_specific_info.codecType = webrtc::kVideoCodecVP8; | 459 codec_specific_info.codecType = webrtc::kVideoCodecVP8; |
167 { | 460 |
168 rtc::CritScope lock(&data_cs_); | |
169 codec_specific_info.codecSpecific.VP8.hasReceivedRPSI = | 461 codec_specific_info.codecSpecific.VP8.hasReceivedRPSI = |
170 has_received_rpsi_; | 462 has_received_rpsi_; |
171 codec_specific_info.codecSpecific.VP8.hasReceivedSLI = | 463 codec_specific_info.codecSpecific.VP8.hasReceivedSLI = |
172 has_received_sli_; | 464 has_received_sli_; |
173 codec_specific_info.codecSpecific.VP8.pictureIdRPSI = | 465 codec_specific_info.codecSpecific.VP8.pictureIdRPSI = |
174 picture_id_rpsi_; | 466 picture_id_rpsi_; |
175 codec_specific_info.codecSpecific.VP8.pictureIdSLI = | 467 codec_specific_info.codecSpecific.VP8.pictureIdSLI = |
176 picture_id_sli_; | 468 picture_id_sli_; |
177 has_received_sli_ = false; | 469 has_received_sli_ = false; |
178 has_received_rpsi_ = false; | 470 has_received_rpsi_ = false; |
179 } | |
180 | 471 |
181 video_sender_.AddVideoFrame(*frame_to_send, &codec_specific_info); | 472 video_sender_.AddVideoFrame(*frame_to_send, &codec_specific_info); |
182 return; | 473 return; |
183 } | 474 } |
184 video_sender_.AddVideoFrame(*frame_to_send, nullptr); | 475 video_sender_.AddVideoFrame(*frame_to_send, nullptr); |
185 } | 476 } |
186 | 477 |
187 void ViEEncoder::SendKeyFrame() { | 478 void ViEEncoder::SendKeyFrame() { |
| 479 if (!encoder_queue_.IsCurrent()) { |
| 480 encoder_queue_.PostTask([this] { SendKeyFrame(); }); |
| 481 return; |
| 482 } |
| 483 RTC_DCHECK_RUN_ON(&encoder_queue_); |
188 video_sender_.IntraFrameRequest(0); | 484 video_sender_.IntraFrameRequest(0); |
189 } | 485 } |
190 | 486 |
191 int64_t ViEEncoder::time_of_last_frame_activity_ms() { | |
192 rtc::CritScope lock(&data_cs_); | |
193 return time_of_last_frame_activity_ms_; | |
194 } | |
195 | |
196 EncodedImageCallback::Result ViEEncoder::OnEncodedImage( | 487 EncodedImageCallback::Result ViEEncoder::OnEncodedImage( |
197 const EncodedImage& encoded_image, | 488 const EncodedImage& encoded_image, |
198 const CodecSpecificInfo* codec_specific_info, | 489 const CodecSpecificInfo* codec_specific_info, |
199 const RTPFragmentationHeader* fragmentation) { | 490 const RTPFragmentationHeader* fragmentation) { |
200 { | 491 // Encoded is called on whatever thread the real encoder implementation run |
201 rtc::CritScope lock(&data_cs_); | 492 // on. In the case of hardware encoders, there might be several encoders |
202 time_of_last_frame_activity_ms_ = rtc::TimeMillis(); | 493 // running in parallel on different threads. |
203 } | |
204 if (stats_proxy_) { | 494 if (stats_proxy_) { |
205 stats_proxy_->OnSendEncodedImage(encoded_image, codec_specific_info); | 495 stats_proxy_->OnSendEncodedImage(encoded_image, codec_specific_info); |
206 } | 496 } |
207 | 497 |
208 EncodedImageCallback::Result result = | 498 EncodedImageCallback::Result result = |
209 sink_->OnEncodedImage(encoded_image, codec_specific_info, fragmentation); | 499 sink_->OnEncodedImage(encoded_image, codec_specific_info, fragmentation); |
210 | 500 |
211 overuse_detector_->FrameSent(encoded_image._timeStamp); | 501 overuse_detector_.FrameSent(encoded_image._timeStamp); |
212 return result; | 502 return result; |
213 } | 503 } |
214 | 504 |
215 void ViEEncoder::SendStatistics(uint32_t bit_rate, uint32_t frame_rate) { | 505 void ViEEncoder::SendStatistics(uint32_t bit_rate, uint32_t frame_rate) { |
| 506 RTC_DCHECK(module_process_thread_checker_.CalledOnValidThread()); |
216 if (stats_proxy_) | 507 if (stats_proxy_) |
217 stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate); | 508 stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate); |
218 } | 509 } |
219 | 510 |
220 void ViEEncoder::OnReceivedSLI(uint8_t picture_id) { | 511 void ViEEncoder::OnReceivedSLI(uint8_t picture_id) { |
221 rtc::CritScope lock(&data_cs_); | 512 if (!encoder_queue_.IsCurrent()) { |
| 513 encoder_queue_.PostTask([this, picture_id] { OnReceivedSLI(picture_id); }); |
| 514 return; |
| 515 } |
| 516 RTC_DCHECK_RUN_ON(&encoder_queue_); |
222 picture_id_sli_ = picture_id; | 517 picture_id_sli_ = picture_id; |
223 has_received_sli_ = true; | 518 has_received_sli_ = true; |
224 } | 519 } |
225 | 520 |
226 void ViEEncoder::OnReceivedRPSI(uint64_t picture_id) { | 521 void ViEEncoder::OnReceivedRPSI(uint64_t picture_id) { |
227 rtc::CritScope lock(&data_cs_); | 522 if (!encoder_queue_.IsCurrent()) { |
| 523 encoder_queue_.PostTask([this, picture_id] { OnReceivedRPSI(picture_id); }); |
| 524 return; |
| 525 } |
| 526 RTC_DCHECK_RUN_ON(&encoder_queue_); |
228 picture_id_rpsi_ = picture_id; | 527 picture_id_rpsi_ = picture_id; |
229 has_received_rpsi_ = true; | 528 has_received_rpsi_ = true; |
230 } | 529 } |
231 | 530 |
232 void ViEEncoder::OnReceivedIntraFrameRequest(size_t stream_index) { | 531 void ViEEncoder::OnReceivedIntraFrameRequest(size_t stream_index) { |
| 532 if (!encoder_queue_.IsCurrent()) { |
| 533 encoder_queue_.PostTask( |
| 534 [this, stream_index] { OnReceivedIntraFrameRequest(stream_index); }); |
| 535 return; |
| 536 } |
| 537 RTC_DCHECK_RUN_ON(&encoder_queue_); |
233 // Key frame request from remote side, signal to VCM. | 538 // Key frame request from remote side, signal to VCM. |
234 TRACE_EVENT0("webrtc", "OnKeyFrameRequest"); | 539 TRACE_EVENT0("webrtc", "OnKeyFrameRequest"); |
235 video_sender_.IntraFrameRequest(stream_index); | 540 video_sender_.IntraFrameRequest(stream_index); |
236 } | 541 } |
237 | 542 |
238 void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps, | 543 void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps, |
239 uint8_t fraction_lost, | 544 uint8_t fraction_lost, |
240 int64_t round_trip_time_ms) { | 545 int64_t round_trip_time_ms) { |
| 546 if (!encoder_queue_.IsCurrent()) { |
| 547 encoder_queue_.PostTask( |
| 548 [this, bitrate_bps, fraction_lost, round_trip_time_ms] { |
| 549 OnBitrateUpdated(bitrate_bps, fraction_lost, round_trip_time_ms); |
| 550 }); |
| 551 return; |
| 552 } |
| 553 RTC_DCHECK_RUN_ON(&encoder_queue_); |
| 554 RTC_DCHECK(sink_) << "sink_ must be set before the encoder is active."; |
| 555 |
241 LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate " << bitrate_bps | 556 LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate " << bitrate_bps |
242 << " packet loss " << static_cast<int>(fraction_lost) | 557 << " packet loss " << static_cast<int>(fraction_lost) |
243 << " rtt " << round_trip_time_ms; | 558 << " rtt " << round_trip_time_ms; |
| 559 |
244 video_sender_.SetChannelParameters(bitrate_bps, fraction_lost, | 560 video_sender_.SetChannelParameters(bitrate_bps, fraction_lost, |
245 round_trip_time_ms); | 561 round_trip_time_ms); |
246 bool video_suspension_changed; | 562 |
| 563 encoder_start_bitrate_bps_ = |
| 564 bitrate_bps != 0 ? bitrate_bps : encoder_start_bitrate_bps_; |
247 bool video_is_suspended = bitrate_bps == 0; | 565 bool video_is_suspended = bitrate_bps == 0; |
248 { | 566 bool video_suspension_changed = |
249 rtc::CritScope lock(&data_cs_); | 567 video_is_suspended != (last_observed_bitrate_bps_ == 0); |
250 last_observed_bitrate_bps_ = bitrate_bps; | 568 last_observed_bitrate_bps_ = bitrate_bps; |
251 video_suspension_changed = video_suspended_ != video_is_suspended; | |
252 video_suspended_ = video_is_suspended; | |
253 // Set |time_of_last_frame_activity_ms_| to now if this is the first time | |
254 // the encoder is supposed to produce encoded frames. | |
255 // TODO(perkj): Remove this hack. It is here to avoid a race that the | |
256 // encoder report that it has timed out before it has processed the first | |
257 // frame. | |
258 if (last_observed_bitrate_bps_ != 0 && | |
259 time_of_last_frame_activity_ms_ == | |
260 std::numeric_limits<int64_t>::max()) { | |
261 time_of_last_frame_activity_ms_ = rtc::TimeMillis(); | |
262 } | |
263 } | |
264 | 569 |
265 if (stats_proxy_ && video_suspension_changed) { | 570 if (stats_proxy_ && video_suspension_changed) { |
266 LOG(LS_INFO) << "Video suspend state changed to: " | 571 LOG(LS_INFO) << "Video suspend state changed to: " |
267 << (video_is_suspended ? "suspended" : "not suspended"); | 572 << (video_is_suspended ? "suspended" : "not suspended"); |
268 stats_proxy_->OnSuspendChange(video_is_suspended); | 573 stats_proxy_->OnSuspendChange(video_is_suspended); |
269 } | 574 } |
270 } | 575 } |
271 | 576 |
| 577 void ViEEncoder::OveruseDetected() { |
| 578 RTC_DCHECK_RUN_ON(&module_process_thread_checker_); |
| 579 // TODO(perkj): When ViEEncoder inherit rtc::VideoSink instead of |
| 580 // VideoCaptureInput |load_observer_| should be removed and overuse be |
| 581 // expressed as rtc::VideoSinkWants instead. |
| 582 if (load_observer_) |
| 583 load_observer_->OnLoadUpdate(LoadObserver::kOveruse); |
| 584 } |
| 585 |
| 586 void ViEEncoder::NormalUsage() { |
| 587 RTC_DCHECK_RUN_ON(&module_process_thread_checker_); |
| 588 if (load_observer_) |
| 589 load_observer_->OnLoadUpdate(LoadObserver::kUnderuse); |
| 590 } |
| 591 |
272 } // namespace webrtc | 592 } // namespace webrtc |
OLD | NEW |