OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | |
11 #include "webrtc/video/video_send_stream.h" | 10 #include "webrtc/video/video_send_stream.h" |
12 | 11 |
13 #include <algorithm> | 12 #include <algorithm> |
14 #include <sstream> | 13 #include <sstream> |
15 #include <string> | 14 #include <string> |
16 #include <utility> | 15 #include <utility> |
17 #include <vector> | 16 #include <vector> |
18 | 17 |
19 #include "webrtc/base/checks.h" | 18 #include "webrtc/base/checks.h" |
20 #include "webrtc/base/logging.h" | 19 #include "webrtc/base/logging.h" |
21 #include "webrtc/base/trace_event.h" | 20 #include "webrtc/base/trace_event.h" |
22 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | |
23 #include "webrtc/modules/bitrate_controller/include/bitrate_controller.h" | 21 #include "webrtc/modules/bitrate_controller/include/bitrate_controller.h" |
24 #include "webrtc/modules/congestion_controller/include/congestion_controller.h" | 22 #include "webrtc/modules/congestion_controller/include/congestion_controller.h" |
25 #include "webrtc/modules/pacing/packet_router.h" | 23 #include "webrtc/modules/pacing/packet_router.h" |
26 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h" | 24 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h" |
27 #include "webrtc/modules/utility/include/process_thread.h" | 25 #include "webrtc/modules/utility/include/process_thread.h" |
28 #include "webrtc/modules/video_coding/utility/ivf_file_writer.h" | 26 #include "webrtc/modules/video_coding/utility/ivf_file_writer.h" |
29 #include "webrtc/video/call_stats.h" | 27 #include "webrtc/video/call_stats.h" |
30 #include "webrtc/video/video_capture_input.h" | |
31 #include "webrtc/video/vie_remb.h" | 28 #include "webrtc/video/vie_remb.h" |
32 #include "webrtc/video_send_stream.h" | 29 #include "webrtc/video_send_stream.h" |
33 | 30 |
34 namespace webrtc { | 31 namespace webrtc { |
35 | 32 |
36 class RtcpIntraFrameObserver; | |
37 class TransportFeedbackObserver; | |
38 | |
39 static const int kMinSendSidePacketHistorySize = 600; | 33 static const int kMinSendSidePacketHistorySize = 600; |
40 static const int kEncoderTimeOutMs = 2000; | |
41 | |
42 namespace { | 34 namespace { |
43 | 35 |
44 std::vector<RtpRtcp*> CreateRtpRtcpModules( | 36 std::vector<RtpRtcp*> CreateRtpRtcpModules( |
45 Transport* outgoing_transport, | 37 Transport* outgoing_transport, |
46 RtcpIntraFrameObserver* intra_frame_callback, | 38 RtcpIntraFrameObserver* intra_frame_callback, |
47 RtcpBandwidthObserver* bandwidth_callback, | 39 RtcpBandwidthObserver* bandwidth_callback, |
48 TransportFeedbackObserver* transport_feedback_callback, | 40 TransportFeedbackObserver* transport_feedback_callback, |
49 RtcpRttStats* rtt_stats, | 41 RtcpRttStats* rtt_stats, |
50 RtpPacketSender* paced_sender, | 42 RtpPacketSender* paced_sender, |
51 TransportSequenceNumberAllocator* transport_sequence_number_allocator, | 43 TransportSequenceNumberAllocator* transport_sequence_number_allocator, |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
143 } | 135 } |
144 | 136 |
145 std::string VideoSendStream::Config::ToString() const { | 137 std::string VideoSendStream::Config::ToString() const { |
146 std::stringstream ss; | 138 std::stringstream ss; |
147 ss << "{encoder_settings: " << encoder_settings.ToString(); | 139 ss << "{encoder_settings: " << encoder_settings.ToString(); |
148 ss << ", rtp: " << rtp.ToString(); | 140 ss << ", rtp: " << rtp.ToString(); |
149 ss << ", pre_encode_callback: " | 141 ss << ", pre_encode_callback: " |
150 << (pre_encode_callback ? "(I420FrameCallback)" : "nullptr"); | 142 << (pre_encode_callback ? "(I420FrameCallback)" : "nullptr"); |
151 ss << ", post_encode_callback: " | 143 ss << ", post_encode_callback: " |
152 << (post_encode_callback ? "(EncodedFrameObserver)" : "nullptr"); | 144 << (post_encode_callback ? "(EncodedFrameObserver)" : "nullptr"); |
153 ss << ", local_renderer: " | |
154 << (local_renderer ? "(VideoRenderer)" : "nullptr"); | |
155 ss << ", render_delay_ms: " << render_delay_ms; | 145 ss << ", render_delay_ms: " << render_delay_ms; |
156 ss << ", target_delay_ms: " << target_delay_ms; | 146 ss << ", target_delay_ms: " << target_delay_ms; |
157 ss << ", suspend_below_min_bitrate: " << (suspend_below_min_bitrate ? "on" | 147 ss << ", suspend_below_min_bitrate: " << (suspend_below_min_bitrate ? "on" |
158 : "off"); | 148 : "off"); |
159 ss << '}'; | 149 ss << '}'; |
160 return ss.str(); | 150 return ss.str(); |
161 } | 151 } |
162 | 152 |
163 namespace { | 153 namespace { |
164 | 154 |
165 VideoCodecType PayloadNameToCodecType(const std::string& payload_name) { | |
166 if (payload_name == "VP8") | |
167 return kVideoCodecVP8; | |
168 if (payload_name == "VP9") | |
169 return kVideoCodecVP9; | |
170 if (payload_name == "H264") | |
171 return kVideoCodecH264; | |
172 return kVideoCodecGeneric; | |
173 } | |
174 | |
175 bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) { | 155 bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) { |
176 switch (PayloadNameToCodecType(payload_name)) { | 156 if (payload_name == "VP8" || payload_name == "VP9") |
177 case kVideoCodecVP8: | 157 return true; |
178 case kVideoCodecVP9: | 158 RTC_DCHECK(payload_name == "H264" || payload_name == "FAKE") |
179 return true; | 159 << "unknown payload_name " << payload_name; |
180 case kVideoCodecH264: | |
181 case kVideoCodecGeneric: | |
182 return false; | |
183 case kVideoCodecI420: | |
184 case kVideoCodecRED: | |
185 case kVideoCodecULPFEC: | |
186 case kVideoCodecUnknown: | |
187 RTC_NOTREACHED(); | |
188 return false; | |
189 } | |
190 RTC_NOTREACHED(); | |
191 return false; | 160 return false; |
192 } | 161 } |
193 | 162 |
194 // TODO(pbos): Lower these thresholds (to closer to 100%) when we handle | 163 int CalculateMaxPadBitrateBps(const VideoEncoderConfig& config, |
195 // pipelining encoders better (multiple input frames before something comes | |
196 // out). This should effectively turn off CPU adaptations for systems that | |
197 // remotely cope with the load right now. | |
198 CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) { | |
199 CpuOveruseOptions options; | |
200 if (full_overuse_time) { | |
201 options.low_encode_usage_threshold_percent = 150; | |
202 options.high_encode_usage_threshold_percent = 200; | |
203 } | |
204 return options; | |
205 } | |
206 | |
207 VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config, | |
208 const std::string& payload_name, | |
209 int payload_type) { | |
210 const std::vector<VideoStream>& streams = config.streams; | |
211 static const int kEncoderMinBitrateKbps = 30; | |
212 RTC_DCHECK(!streams.empty()); | |
213 RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0); | |
214 | |
215 VideoCodec video_codec; | |
216 memset(&video_codec, 0, sizeof(video_codec)); | |
217 video_codec.codecType = PayloadNameToCodecType(payload_name); | |
218 | |
219 switch (config.content_type) { | |
220 case VideoEncoderConfig::ContentType::kRealtimeVideo: | |
221 video_codec.mode = kRealtimeVideo; | |
222 break; | |
223 case VideoEncoderConfig::ContentType::kScreen: | |
224 video_codec.mode = kScreensharing; | |
225 if (config.streams.size() == 1 && | |
226 config.streams[0].temporal_layer_thresholds_bps.size() == 1) { | |
227 video_codec.targetBitrate = | |
228 config.streams[0].temporal_layer_thresholds_bps[0] / 1000; | |
229 } | |
230 break; | |
231 } | |
232 | |
233 switch (video_codec.codecType) { | |
234 case kVideoCodecVP8: { | |
235 if (config.encoder_specific_settings) { | |
236 video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>( | |
237 config.encoder_specific_settings); | |
238 } else { | |
239 video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings(); | |
240 } | |
241 video_codec.codecSpecific.VP8.numberOfTemporalLayers = | |
242 static_cast<unsigned char>( | |
243 streams.back().temporal_layer_thresholds_bps.size() + 1); | |
244 break; | |
245 } | |
246 case kVideoCodecVP9: { | |
247 if (config.encoder_specific_settings) { | |
248 video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>( | |
249 config.encoder_specific_settings); | |
250 if (video_codec.mode == kScreensharing) { | |
251 video_codec.codecSpecific.VP9.flexibleMode = true; | |
252 // For now VP9 screensharing use 1 temporal and 2 spatial layers. | |
253 RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, | |
254 1); | |
255 RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2); | |
256 } | |
257 } else { | |
258 video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings(); | |
259 } | |
260 video_codec.codecSpecific.VP9.numberOfTemporalLayers = | |
261 static_cast<unsigned char>( | |
262 streams.back().temporal_layer_thresholds_bps.size() + 1); | |
263 break; | |
264 } | |
265 case kVideoCodecH264: { | |
266 if (config.encoder_specific_settings) { | |
267 video_codec.codecSpecific.H264 = | |
268 *reinterpret_cast<const VideoCodecH264*>( | |
269 config.encoder_specific_settings); | |
270 } else { | |
271 video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings(); | |
272 } | |
273 break; | |
274 } | |
275 default: | |
276 // TODO(pbos): Support encoder_settings codec-agnostically. | |
277 RTC_DCHECK(!config.encoder_specific_settings) | |
278 << "Encoder-specific settings for codec type not wired up."; | |
279 break; | |
280 } | |
281 | |
282 strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1); | |
283 video_codec.plName[kPayloadNameSize - 1] = '\0'; | |
284 video_codec.plType = payload_type; | |
285 video_codec.numberOfSimulcastStreams = | |
286 static_cast<unsigned char>(streams.size()); | |
287 video_codec.minBitrate = streams[0].min_bitrate_bps / 1000; | |
288 if (video_codec.minBitrate < kEncoderMinBitrateKbps) | |
289 video_codec.minBitrate = kEncoderMinBitrateKbps; | |
290 RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams)); | |
291 if (video_codec.codecType == kVideoCodecVP9) { | |
292 // If the vector is empty, bitrates will be configured automatically. | |
293 RTC_DCHECK(config.spatial_layers.empty() || | |
294 config.spatial_layers.size() == | |
295 video_codec.codecSpecific.VP9.numberOfSpatialLayers); | |
296 RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers, | |
297 kMaxSimulcastStreams); | |
298 for (size_t i = 0; i < config.spatial_layers.size(); ++i) | |
299 video_codec.spatialLayers[i] = config.spatial_layers[i]; | |
300 } | |
301 for (size_t i = 0; i < streams.size(); ++i) { | |
302 SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; | |
303 RTC_DCHECK_GT(streams[i].width, 0u); | |
304 RTC_DCHECK_GT(streams[i].height, 0u); | |
305 RTC_DCHECK_GT(streams[i].max_framerate, 0); | |
306 // Different framerates not supported per stream at the moment. | |
307 RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate); | |
308 RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0); | |
309 RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps); | |
310 RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps); | |
311 RTC_DCHECK_GE(streams[i].max_qp, 0); | |
312 | |
313 sim_stream->width = static_cast<uint16_t>(streams[i].width); | |
314 sim_stream->height = static_cast<uint16_t>(streams[i].height); | |
315 sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000; | |
316 sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000; | |
317 sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000; | |
318 sim_stream->qpMax = streams[i].max_qp; | |
319 sim_stream->numberOfTemporalLayers = static_cast<unsigned char>( | |
320 streams[i].temporal_layer_thresholds_bps.size() + 1); | |
321 | |
322 video_codec.width = std::max(video_codec.width, | |
323 static_cast<uint16_t>(streams[i].width)); | |
324 video_codec.height = std::max( | |
325 video_codec.height, static_cast<uint16_t>(streams[i].height)); | |
326 video_codec.minBitrate = | |
327 std::min(static_cast<uint16_t>(video_codec.minBitrate), | |
328 static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000)); | |
329 video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000; | |
330 video_codec.qpMax = std::max(video_codec.qpMax, | |
331 static_cast<unsigned int>(streams[i].max_qp)); | |
332 } | |
333 | |
334 if (video_codec.maxBitrate == 0) { | |
335 // Unset max bitrate -> cap to one bit per pixel. | |
336 video_codec.maxBitrate = | |
337 (video_codec.width * video_codec.height * video_codec.maxFramerate) / | |
338 1000; | |
339 } | |
340 if (video_codec.maxBitrate < kEncoderMinBitrateKbps) | |
341 video_codec.maxBitrate = kEncoderMinBitrateKbps; | |
342 | |
343 RTC_DCHECK_GT(streams[0].max_framerate, 0); | |
344 video_codec.maxFramerate = streams[0].max_framerate; | |
345 video_codec.expect_encode_from_texture = config.expect_encode_from_texture; | |
346 | |
347 return video_codec; | |
348 } | |
349 | |
350 int CalulcateMaxPadBitrateBps(const VideoEncoderConfig& config, | |
351 bool pad_to_min_bitrate) { | 164 bool pad_to_min_bitrate) { |
352 int pad_up_to_bitrate_bps = 0; | 165 int pad_up_to_bitrate_bps = 0; |
353 // Calculate max padding bitrate for a multi layer codec. | 166 // Calculate max padding bitrate for a multi layer codec. |
354 if (config.streams.size() > 1) { | 167 if (config.streams.size() > 1) { |
355 // Pad to min bitrate of the highest layer. | 168 // Pad to min bitrate of the highest layer. |
356 pad_up_to_bitrate_bps = | 169 pad_up_to_bitrate_bps = |
357 config.streams[config.streams.size() - 1].min_bitrate_bps; | 170 config.streams[config.streams.size() - 1].min_bitrate_bps; |
358 // Add target_bitrate_bps of the lower layers. | 171 // Add target_bitrate_bps of the lower layers. |
359 for (size_t i = 0; i < config.streams.size() - 1; ++i) | 172 for (size_t i = 0; i < config.streams.size() - 1; ++i) |
360 pad_up_to_bitrate_bps += config.streams[i].target_bitrate_bps; | 173 pad_up_to_bitrate_bps += config.streams[i].target_bitrate_bps; |
361 } else if (pad_to_min_bitrate) { | 174 } else if (pad_to_min_bitrate) { |
362 pad_up_to_bitrate_bps = config.streams[0].min_bitrate_bps; | 175 pad_up_to_bitrate_bps = config.streams[0].min_bitrate_bps; |
363 } | 176 } |
364 | 177 |
365 pad_up_to_bitrate_bps = | 178 pad_up_to_bitrate_bps = |
366 std::max(pad_up_to_bitrate_bps, config.min_transmit_bitrate_bps); | 179 std::max(pad_up_to_bitrate_bps, config.min_transmit_bitrate_bps); |
367 | 180 |
368 return pad_up_to_bitrate_bps; | 181 return pad_up_to_bitrate_bps; |
369 } | 182 } |
370 | 183 |
371 } // namespace | 184 } // namespace |
372 | 185 |
373 namespace internal { | 186 namespace internal { |
187 | |
188 // TODO(tommi): See if there's a more elegant way to create a task that creates | |
189 // an object on the correct task queue. | |
190 class VideoSendStream::ConstructionTask : public rtc::QueuedTask { | |
stefan-webrtc
2016/07/08 15:56:42
How much value does it add to construct/destruct t
perkj_webrtc
2016/07/11 11:41:08
The ctor still calls a few constructors and other
| |
191 public: | |
192 ConstructionTask(std::unique_ptr<VideoSendStreamInternal>* send_stream, | |
193 rtc::Event* done_event, | |
194 SendStatisticsProxy* stats_proxy, | |
195 ViEEncoder* vie_encoder, | |
196 ProcessThread* module_process_thread, | |
197 CallStats* call_stats, | |
198 CongestionController* congestion_controller, | |
199 BitrateAllocator* bitrate_allocator, | |
200 SendDelayStats* send_delay_stats, | |
201 VieRemb* remb, | |
202 RtcEventLog* event_log, | |
203 const VideoSendStream::Config* config, | |
204 const std::map<uint32_t, RtpState>& suspended_ssrcs) | |
205 : send_stream_(send_stream), | |
206 done_event_(done_event), | |
207 stats_proxy_(stats_proxy), | |
208 vie_encoder_(vie_encoder), | |
209 call_stats_(call_stats), | |
210 congestion_controller_(congestion_controller), | |
211 bitrate_allocator_(bitrate_allocator), | |
212 send_delay_stats_(send_delay_stats), | |
213 remb_(remb), | |
214 event_log_(event_log), | |
215 config_(config), | |
216 suspended_ssrcs_(suspended_ssrcs) {} | |
217 ~ConstructionTask() override { done_event_->Set(); } | |
stefan-webrtc
2016/07/08 15:56:42
Empty line above
perkj_webrtc
2016/07/11 11:41:08
Done.
| |
218 | |
219 private: | |
220 bool Run() override { | |
221 send_stream_->reset(new VideoSendStreamInternal( | |
222 stats_proxy_, rtc::TaskQueue::Current(), call_stats_, | |
223 congestion_controller_, bitrate_allocator_, send_delay_stats_, remb_, | |
224 vie_encoder_, event_log_, config_, std::move(suspended_ssrcs_))); | |
225 return true; | |
226 } | |
227 | |
228 std::unique_ptr<VideoSendStreamInternal>* send_stream_; | |
229 rtc::Event* done_event_; | |
230 SendStatisticsProxy* const stats_proxy_; | |
231 ViEEncoder* const vie_encoder_; | |
232 CallStats* const call_stats_; | |
233 CongestionController* const congestion_controller_; | |
234 BitrateAllocator* const bitrate_allocator_; | |
235 SendDelayStats* const send_delay_stats_; | |
236 VieRemb* const remb_; | |
237 RtcEventLog* const event_log_; | |
238 const VideoSendStream::Config* config_; | |
239 std::map<uint32_t, RtpState> suspended_ssrcs_; | |
240 }; | |
241 | |
242 class VideoSendStream::DestructAndGetRTPStateTask : public rtc::QueuedTask { | |
stefan-webrtc
2016/07/08 15:56:42
Rtp
perkj_webrtc
2016/07/11 11:41:08
Done.
| |
243 public: | |
244 DestructAndGetRTPStateTask( | |
245 VideoSendStream::RtpStateMap* state_map, | |
246 std::unique_ptr<VideoSendStreamInternal> send_stream, | |
247 rtc::Event* done_event) | |
248 : state_map_(state_map), | |
249 send_stream_(std::move(send_stream)), | |
250 done_event_(done_event) {} | |
251 ~DestructAndGetRTPStateTask() override { | |
stefan-webrtc
2016/07/08 15:56:42
Empty line above
perkj_webrtc
2016/07/11 11:41:07
Done.
| |
252 send_stream_.reset(); | |
253 done_event_->Set(); | |
254 } | |
255 | |
256 private: | |
257 bool Run() override { | |
258 send_stream_->Stop(); | |
259 *state_map_ = send_stream_->GetRtpStates(); | |
260 send_stream_.reset(); | |
261 return true; | |
262 } | |
263 | |
264 VideoSendStream::RtpStateMap* state_map_; | |
265 std::unique_ptr<VideoSendStreamInternal> send_stream_; | |
266 rtc::Event* done_event_; | |
267 }; | |
268 | |
269 // CheckEncoderActivityTask is used for tracking when the encoder last produced | |
270 // and encoded video frame. If the encoder has not produced anything the last | |
271 // kEncoderTimeOutMs we also want to stop sending padding. | |
272 class VideoSendStreamInternal::CheckEncoderActivityTask | |
273 : public rtc::QueuedTask { | |
274 public: | |
275 static const int kEncoderTimeOutMs = 2000; | |
276 explicit CheckEncoderActivityTask(VideoSendStreamInternal* send_stream) | |
277 : activity_(0), send_stream_(send_stream), timed_out_(false) { | |
278 encoder_thread_checker_.DetachFromThread(); | |
279 } | |
280 | |
281 void Stop() { | |
282 RTC_DCHECK_RUN_ON(&thread_checker_); | |
283 send_stream_ = nullptr; | |
284 } | |
285 | |
286 void UpdateEncoderActivity() { | |
287 RTC_DCHECK_RUN_ON(&encoder_thread_checker_); | |
288 rtc::AtomicOps::ReleaseStore(&activity_, 1); | |
289 } | |
290 | |
291 private: | |
292 bool Run() override { | |
293 RTC_DCHECK_RUN_ON(&thread_checker_); | |
294 if (!send_stream_) | |
295 return true; | |
296 if (!rtc::AtomicOps::AcquireLoad(&activity_)) { | |
297 if (!timed_out_) { | |
298 send_stream_->SignalEncoderTimedOut(); | |
299 } | |
300 timed_out_ = true; | |
301 } else if (timed_out_) { | |
302 send_stream_->SignalEncoderActive(); | |
303 timed_out_ = false; | |
304 } | |
305 rtc::AtomicOps::ReleaseStore(&activity_, 0); | |
306 | |
307 rtc::TaskQueue::Current()->PostDelayedTask( | |
308 std::unique_ptr<rtc::QueuedTask>(this), kEncoderTimeOutMs); | |
309 // Return false to prevent this task from being deleted. Ownership has been | |
310 // transferred to the task queue when PostDelayedTask was called. | |
311 return false; | |
312 } | |
313 | |
314 rtc::ThreadChecker encoder_thread_checker_; | |
315 volatile int activity_; | |
316 | |
317 rtc::ThreadChecker thread_checker_; | |
318 VideoSendStreamInternal* send_stream_; | |
319 bool timed_out_; | |
320 }; | |
321 | |
322 class ReconfigureVideoEncoderTask : public rtc::QueuedTask { | |
323 public: | |
324 ReconfigureVideoEncoderTask(VideoSendStreamInternal* send_stream, | |
325 VideoEncoderConfig config) | |
326 : send_stream_(send_stream), config_(std::move(config)) {} | |
327 | |
328 private: | |
329 bool Run() override { | |
330 send_stream_->ReconfigureVideoEncoder(std::move(config_)); | |
331 return true; | |
332 } | |
333 | |
334 VideoSendStreamInternal* send_stream_; | |
335 VideoEncoderConfig config_; | |
336 }; | |
337 | |
374 VideoSendStream::VideoSendStream( | 338 VideoSendStream::VideoSendStream( |
375 int num_cpu_cores, | 339 int num_cpu_cores, |
376 ProcessThread* module_process_thread, | 340 ProcessThread* module_process_thread, |
341 rtc::TaskQueue* worker_queue, | |
377 CallStats* call_stats, | 342 CallStats* call_stats, |
378 CongestionController* congestion_controller, | 343 CongestionController* congestion_controller, |
379 BitrateAllocator* bitrate_allocator, | 344 BitrateAllocator* bitrate_allocator, |
380 SendDelayStats* send_delay_stats, | 345 SendDelayStats* send_delay_stats, |
381 VieRemb* remb, | 346 VieRemb* remb, |
382 RtcEventLog* event_log, | 347 RtcEventLog* event_log, |
383 const VideoSendStream::Config& config, | 348 VideoSendStream::Config config, |
384 const VideoEncoderConfig& encoder_config, | 349 VideoEncoderConfig encoder_config, |
385 const std::map<uint32_t, RtpState>& suspended_ssrcs) | 350 const std::map<uint32_t, RtpState>& suspended_ssrcs) |
386 : stats_proxy_(Clock::GetRealTimeClock(), | 351 : worker_queue_(worker_queue), |
352 thread_sync_event_(false /* manual_reset */, false), | |
353 stats_proxy_(Clock::GetRealTimeClock(), | |
387 config, | 354 config, |
388 encoder_config.content_type), | 355 encoder_config.content_type), |
356 config_(std::move(config)) { | |
357 vie_encoder_.reset( | |
358 new ViEEncoder(num_cpu_cores, &stats_proxy_, config_.encoder_settings, | |
359 config_.pre_encode_callback, config_.overuse_callback, | |
360 config_.post_encode_callback)); | |
361 | |
362 worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>(new ConstructionTask( | |
363 &send_stream_, &thread_sync_event_, &stats_proxy_, vie_encoder_.get(), | |
364 module_process_thread, call_stats, congestion_controller, | |
365 bitrate_allocator, send_delay_stats, remb, event_log, &config_, | |
366 suspended_ssrcs))); | |
367 | |
368 // Wait for |construction_task| to complete so that |module_process_thread| | |
369 // can be registered. | |
stefan-webrtc
2016/07/08 15:56:42
I think a better comment would be:
"Wait for Const
perkj_webrtc
2016/07/11 11:41:08
Added description to VideoSendStreamImpl.
| |
370 thread_sync_event_.Wait(rtc::Event::kForever); | |
371 send_stream_->RegisterProcessThread(module_process_thread); | |
372 | |
373 vie_encoder_->RegisterProcessThread(module_process_thread); | |
374 | |
375 ReconfigureVideoEncoder(std::move(encoder_config)); | |
376 } | |
377 | |
378 VideoSendStream::~VideoSendStream() { | |
379 RTC_DCHECK_RUN_ON(&thread_checker_); | |
380 RTC_DCHECK(!send_stream_); | |
381 } | |
382 | |
383 void VideoSendStream::Start() { | |
384 RTC_DCHECK_RUN_ON(&thread_checker_); | |
385 LOG(LS_INFO) << "VideoSendStream::Start"; | |
386 VideoSendStreamInternal* send_stream = send_stream_.get(); | |
387 worker_queue_->PostTask([this, send_stream] { | |
388 send_stream->Start(); | |
389 thread_sync_event_.Set(); | |
390 }); | |
391 | |
392 // This is needed for synchronizing with tests WebRtcVideoChannel2BaseTest. | |
393 // in webrtcvideosendstream... | |
394 // Ie, if VideoSendStreamInternal::Start has not completed before the first | |
395 // frame is forwarded to the encoder, the frame will be dropped. | |
stefan-webrtc
2016/07/08 15:56:42
Does this mean the tests should be changed, or is
perkj_webrtc
2016/07/11 11:41:08
I guess we should decide. But it make sense that a
| |
396 thread_sync_event_.Wait(rtc::Event::kForever); | |
397 } | |
398 | |
399 void VideoSendStream::Stop() { | |
400 RTC_DCHECK_RUN_ON(&thread_checker_); | |
401 LOG(LS_INFO) << "VideoSendStream::Stop"; | |
402 VideoSendStreamInternal* send_stream = send_stream_.get(); | |
403 worker_queue_->PostTask([send_stream] { send_stream->Stop(); }); | |
404 } | |
405 | |
406 VideoCaptureInput* VideoSendStream::Input() { | |
407 // Input() will be called on the thread that deliverers video frames from | |
408 // libjingle. | |
409 // TODO(perkj): Refactor ViEEncoder to register directly as a VideoSink to the | |
410 // VideoSource. | |
411 return vie_encoder_.get(); | |
412 } | |
413 | |
414 void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { | |
415 // ReconfigureVideoEncoder will be called on the thread that deliverers video | |
416 // frames. We must change the encoder settings immediately so that | |
417 // the codec settings matches the next frame. | |
418 // TODO(perkj): Move logic for reconfiguration the encoder due to frame size | |
419 // change from WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame to | |
420 // be internally handled by ViEEncoder. | |
421 vie_encoder_->ConfigureEncoder( | |
422 config, | |
423 config_.rtp.max_packet_size - 20); // - 20 for RTP header size. | |
stefan-webrtc
2016/07/08 15:56:42
20 should be a named constant.
Also, I'm not sure
perkj_webrtc
2016/07/11 11:41:08
use static const size_t kRtpPacketSizeOverhead = 2
perkj_webrtc
2016/07/11 12:10:53
Actually- I removed -20 all togheter. It turns out
| |
424 | |
425 worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>( | |
426 new ReconfigureVideoEncoderTask(send_stream_.get(), std::move(config)))); | |
427 } | |
428 | |
429 VideoSendStream::Stats VideoSendStream::GetStats() { | |
430 // TODO(perkj, solenberg): Some test cases in EndToEndTest call GetStats from | |
431 // a network thread. See comment in Call::GetStats(). | |
432 // RTC_DCHECK_RUN_ON(&thread_checker_); | |
433 return stats_proxy_.GetStats(); | |
434 } | |
435 | |
436 void VideoSendStream::SignalNetworkState(NetworkState state) { | |
437 RTC_DCHECK_RUN_ON(&thread_checker_); | |
438 VideoSendStreamInternal* send_stream = send_stream_.get(); | |
439 worker_queue_->PostTask( | |
440 [send_stream, state] { send_stream->SignalNetworkState(state); }); | |
441 } | |
442 | |
443 VideoSendStream::RtpStateMap VideoSendStream::StopPermanentlyAndGetRtpStates() { | |
444 RTC_DCHECK_RUN_ON(&thread_checker_); | |
445 vie_encoder_->Stop(); | |
446 vie_encoder_->DeRegisterProcessThread(); | |
447 VideoSendStream::RtpStateMap state_map; | |
448 send_stream_->DeRegisterProcessThread(); | |
449 worker_queue_->PostTask( | |
450 std::unique_ptr<rtc::QueuedTask>(new DestructAndGetRTPStateTask( | |
451 &state_map, std::move(send_stream_), &thread_sync_event_))); | |
452 thread_sync_event_.Wait(rtc::Event::kForever); | |
453 return state_map; | |
454 } | |
455 | |
456 bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { | |
457 // Called on a network thread. | |
458 return send_stream_->DeliverRtcp(packet, length); | |
459 } | |
460 | |
461 VideoSendStreamInternal::VideoSendStreamInternal( | |
462 SendStatisticsProxy* stats_proxy, | |
463 rtc::TaskQueue* worker_queue, | |
464 CallStats* call_stats, | |
465 CongestionController* congestion_controller, | |
466 BitrateAllocator* bitrate_allocator, | |
467 SendDelayStats* send_delay_stats, | |
468 VieRemb* remb, | |
469 ViEEncoder* vie_encoder, | |
470 RtcEventLog* event_log, | |
471 const VideoSendStream::Config* config, | |
472 std::map<uint32_t, RtpState> suspended_ssrcs) | |
473 : stats_proxy_(stats_proxy), | |
389 config_(config), | 474 config_(config), |
390 suspended_ssrcs_(suspended_ssrcs), | 475 suspended_ssrcs_(std::move(suspended_ssrcs)), |
391 module_process_thread_(module_process_thread), | 476 module_process_thread_(nullptr), |
477 worker_queue_(worker_queue), | |
478 check_encoder_activity_task_(nullptr), | |
392 call_stats_(call_stats), | 479 call_stats_(call_stats), |
393 congestion_controller_(congestion_controller), | 480 congestion_controller_(congestion_controller), |
394 bitrate_allocator_(bitrate_allocator), | 481 bitrate_allocator_(bitrate_allocator), |
395 remb_(remb), | 482 remb_(remb), |
396 encoder_thread_(EncoderThreadFunction, this, "EncoderThread"), | 483 max_padding_bitrate_(0), |
397 encoder_wakeup_event_(false, false), | 484 encoder_min_bitrate_bps_(0), |
398 stop_encoder_thread_(0), | |
399 encoder_max_bitrate_bps_(0), | 485 encoder_max_bitrate_bps_(0), |
400 encoder_target_rate_bps_(0), | 486 encoder_target_rate_bps_(0), |
401 state_(State::kStopped), | 487 vie_encoder_(vie_encoder), |
402 overuse_detector_( | |
403 Clock::GetRealTimeClock(), | |
404 GetCpuOveruseOptions(config.encoder_settings.full_overuse_time), | |
405 this, | |
406 config.post_encode_callback, | |
407 &stats_proxy_), | |
408 vie_encoder_(num_cpu_cores, | |
409 module_process_thread_, | |
410 &stats_proxy_, | |
411 &overuse_detector_, | |
412 this), | |
413 encoder_feedback_(Clock::GetRealTimeClock(), | 488 encoder_feedback_(Clock::GetRealTimeClock(), |
414 config.rtp.ssrcs, | 489 config_->rtp.ssrcs, |
415 &vie_encoder_), | 490 vie_encoder), |
416 protection_bitrate_calculator_(Clock::GetRealTimeClock(), this), | 491 protection_bitrate_calculator_(Clock::GetRealTimeClock(), this), |
417 video_sender_(vie_encoder_.video_sender()), | |
418 bandwidth_observer_(congestion_controller_->GetBitrateController() | 492 bandwidth_observer_(congestion_controller_->GetBitrateController() |
419 ->CreateRtcpBandwidthObserver()), | 493 ->CreateRtcpBandwidthObserver()), |
420 rtp_rtcp_modules_(CreateRtpRtcpModules( | 494 rtp_rtcp_modules_(CreateRtpRtcpModules( |
421 config.send_transport, | 495 config_->send_transport, |
422 &encoder_feedback_, | 496 &encoder_feedback_, |
423 bandwidth_observer_.get(), | 497 bandwidth_observer_.get(), |
424 congestion_controller_->GetTransportFeedbackObserver(), | 498 congestion_controller_->GetTransportFeedbackObserver(), |
425 call_stats_->rtcp_rtt_stats(), | 499 call_stats_->rtcp_rtt_stats(), |
426 congestion_controller_->pacer(), | 500 congestion_controller_->pacer(), |
427 congestion_controller_->packet_router(), | 501 congestion_controller_->packet_router(), |
428 &stats_proxy_, | 502 stats_proxy_, |
429 send_delay_stats, | 503 send_delay_stats, |
430 event_log, | 504 event_log, |
431 config_.rtp.ssrcs.size())), | 505 config_->rtp.ssrcs.size())), |
432 payload_router_(rtp_rtcp_modules_, config.encoder_settings.payload_type), | 506 payload_router_(rtp_rtcp_modules_, |
433 input_(&encoder_wakeup_event_, | 507 config_->encoder_settings.payload_type) { |
434 config_.local_renderer, | 508 RTC_DCHECK_RUN_ON(worker_queue_); |
435 &stats_proxy_, | 509 LOG(LS_INFO) << "VideoSendStreamInternal: " << config_->ToString(); |
436 &overuse_detector_) { | 510 module_process_thread_checker_.DetachFromThread(); |
437 LOG(LS_INFO) << "VideoSendStream: " << config_.ToString(); | 511 |
438 | 512 RTC_DCHECK(!config_->rtp.ssrcs.empty()); |
439 RTC_DCHECK(!config_.rtp.ssrcs.empty()); | |
440 RTC_DCHECK(module_process_thread_); | |
441 RTC_DCHECK(call_stats_); | 513 RTC_DCHECK(call_stats_); |
442 RTC_DCHECK(congestion_controller_); | 514 RTC_DCHECK(congestion_controller_); |
443 RTC_DCHECK(remb_); | 515 RTC_DCHECK(remb_); |
444 | 516 |
445 // RTP/RTCP initialization. | 517 // RTP/RTCP initialization. |
446 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 518 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
447 module_process_thread_->RegisterModule(rtp_rtcp); | |
448 congestion_controller_->packet_router()->AddRtpModule(rtp_rtcp); | 519 congestion_controller_->packet_router()->AddRtpModule(rtp_rtcp); |
449 } | 520 } |
450 | 521 |
451 for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) { | 522 for (size_t i = 0; i < config_->rtp.extensions.size(); ++i) { |
452 const std::string& extension = config_.rtp.extensions[i].uri; | 523 const std::string& extension = config_->rtp.extensions[i].uri; |
453 int id = config_.rtp.extensions[i].id; | 524 int id = config_->rtp.extensions[i].id; |
454 // One-byte-extension local identifiers are in the range 1-14 inclusive. | 525 // One-byte-extension local identifiers are in the range 1-14 inclusive. |
455 RTC_DCHECK_GE(id, 1); | 526 RTC_DCHECK_GE(id, 1); |
456 RTC_DCHECK_LE(id, 14); | 527 RTC_DCHECK_LE(id, 14); |
457 RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension)); | 528 RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension)); |
458 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 529 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
459 RTC_CHECK_EQ(0, rtp_rtcp->RegisterSendRtpHeaderExtension( | 530 RTC_CHECK_EQ(0, rtp_rtcp->RegisterSendRtpHeaderExtension( |
460 StringToRtpExtensionType(extension), id)); | 531 StringToRtpExtensionType(extension), id)); |
461 } | 532 } |
462 } | 533 } |
463 | 534 |
464 remb_->AddRembSender(rtp_rtcp_modules_[0]); | 535 remb_->AddRembSender(rtp_rtcp_modules_[0]); |
465 rtp_rtcp_modules_[0]->SetREMBStatus(true); | 536 rtp_rtcp_modules_[0]->SetREMBStatus(true); |
466 | 537 |
467 ConfigureProtection(); | 538 ConfigureProtection(); |
468 ConfigureSsrcs(); | 539 ConfigureSsrcs(); |
469 | 540 |
470 // TODO(pbos): Should we set CNAME on all RTP modules? | 541 // TODO(pbos): Should we set CNAME on all RTP modules? |
471 rtp_rtcp_modules_.front()->SetCNAME(config_.rtp.c_name.c_str()); | 542 rtp_rtcp_modules_.front()->SetCNAME(config_->rtp.c_name.c_str()); |
472 // 28 to match packet overhead in ModuleRtpRtcpImpl. | 543 // 28 to match packet overhead in ModuleRtpRtcpImpl. |
473 static const size_t kRtpPacketSizeOverhead = 28; | 544 static const size_t kRtpPacketSizeOverhead = 28; |
474 RTC_DCHECK_LE(config_.rtp.max_packet_size, 0xFFFFu + kRtpPacketSizeOverhead); | 545 RTC_DCHECK_LE(config_->rtp.max_packet_size, 0xFFFFu + kRtpPacketSizeOverhead); |
475 const uint16_t mtu = static_cast<uint16_t>(config_.rtp.max_packet_size + | 546 const uint16_t mtu = static_cast<uint16_t>(config_->rtp.max_packet_size + |
476 kRtpPacketSizeOverhead); | 547 kRtpPacketSizeOverhead); |
477 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 548 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
478 rtp_rtcp->RegisterRtcpStatisticsCallback(&stats_proxy_); | 549 rtp_rtcp->RegisterRtcpStatisticsCallback(stats_proxy_); |
479 rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(&stats_proxy_); | 550 rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(stats_proxy_); |
480 rtp_rtcp->SetMaxTransferUnit(mtu); | 551 rtp_rtcp->SetMaxTransferUnit(mtu); |
481 rtp_rtcp->RegisterVideoSendPayload( | 552 rtp_rtcp->RegisterVideoSendPayload( |
482 config_.encoder_settings.payload_type, | 553 config_->encoder_settings.payload_type, |
483 config_.encoder_settings.payload_name.c_str()); | 554 config_->encoder_settings.payload_name.c_str()); |
484 } | 555 } |
485 | 556 |
486 RTC_DCHECK(config.encoder_settings.encoder); | 557 RTC_DCHECK(config_->encoder_settings.encoder); |
487 RTC_DCHECK_GE(config.encoder_settings.payload_type, 0); | 558 RTC_DCHECK_GE(config_->encoder_settings.payload_type, 0); |
488 RTC_DCHECK_LE(config.encoder_settings.payload_type, 127); | 559 RTC_DCHECK_LE(config_->encoder_settings.payload_type, 127); |
489 ReconfigureVideoEncoder(encoder_config); | |
490 | 560 |
491 module_process_thread_->RegisterModule(&overuse_detector_); | 561 vie_encoder_->SetStartBitrate(bitrate_allocator_->GetStartBitrate(this)); |
492 | 562 vie_encoder_->SetSink(this); |
493 encoder_thread_checker_.DetachFromThread(); | |
494 encoder_thread_.Start(); | |
495 encoder_thread_.SetPriority(rtc::kHighPriority); | |
496 } | 563 } |
497 | 564 |
498 VideoSendStream::~VideoSendStream() { | 565 void VideoSendStreamInternal::RegisterProcessThread( |
499 LOG(LS_INFO) << "~VideoSendStream: " << config_.ToString(); | 566 ProcessThread* module_process_thread) { |
567 RTC_DCHECK_RUN_ON(&module_process_thread_checker_); | |
568 RTC_DCHECK(!module_process_thread_); | |
569 module_process_thread_ = module_process_thread; | |
500 | 570 |
501 Stop(); | 571 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
stefan-webrtc
2016/07/08 15:56:42
No {}
perkj_webrtc
2016/07/11 11:41:08
Done.
| |
572 module_process_thread_->RegisterModule(rtp_rtcp); | |
573 } | |
574 } | |
502 | 575 |
503 // Stop the encoder thread permanently. | 576 void VideoSendStreamInternal::DeRegisterProcessThread() { |
504 rtc::AtomicOps::ReleaseStore(&stop_encoder_thread_, 1); | 577 RTC_DCHECK_RUN_ON(&module_process_thread_checker_); |
505 encoder_wakeup_event_.Set(); | 578 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
stefan-webrtc
2016/07/08 15:56:42
Same here.
perkj_webrtc
2016/07/11 11:41:07
Done.
| |
506 encoder_thread_.Stop(); | 579 module_process_thread_->DeRegisterModule(rtp_rtcp); |
580 } | |
581 } | |
507 | 582 |
508 // This needs to happen after stopping the encoder thread, | 583 VideoSendStreamInternal::~VideoSendStreamInternal() { |
509 // since the encoder thread calls AddObserver. | 584 RTC_DCHECK_RUN_ON(worker_queue_); |
585 LOG(LS_INFO) << "~VideoSendStreamInternal: " << config_->ToString(); | |
586 | |
510 bitrate_allocator_->RemoveObserver(this); | 587 bitrate_allocator_->RemoveObserver(this); |
511 | |
512 module_process_thread_->DeRegisterModule(&overuse_detector_); | |
513 | |
514 rtp_rtcp_modules_[0]->SetREMBStatus(false); | 588 rtp_rtcp_modules_[0]->SetREMBStatus(false); |
515 remb_->RemoveRembSender(rtp_rtcp_modules_[0]); | 589 remb_->RemoveRembSender(rtp_rtcp_modules_[0]); |
516 | 590 |
517 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 591 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
518 congestion_controller_->packet_router()->RemoveRtpModule(rtp_rtcp); | 592 congestion_controller_->packet_router()->RemoveRtpModule(rtp_rtcp); |
519 module_process_thread_->DeRegisterModule(rtp_rtcp); | |
520 delete rtp_rtcp; | 593 delete rtp_rtcp; |
521 } | 594 } |
522 } | 595 } |
523 | 596 |
524 bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { | 597 bool VideoSendStreamInternal::DeliverRtcp(const uint8_t* packet, |
598 size_t length) { | |
599 // Runs on a network thread. | |
600 RTC_DCHECK(!worker_queue_->IsCurrent()); | |
525 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) | 601 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) |
526 rtp_rtcp->IncomingRtcpPacket(packet, length); | 602 rtp_rtcp->IncomingRtcpPacket(packet, length); |
527 return true; | 603 return true; |
528 } | 604 } |
529 | 605 |
530 void VideoSendStream::Start() { | 606 void VideoSendStreamInternal::Start() { |
607 RTC_DCHECK_RUN_ON(worker_queue_); | |
531 LOG(LS_INFO) << "VideoSendStream::Start"; | 608 LOG(LS_INFO) << "VideoSendStream::Start"; |
532 if (payload_router_.active()) | 609 if (payload_router_.active()) |
533 return; | 610 return; |
534 TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start"); | 611 TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start"); |
535 payload_router_.set_active(true); | 612 payload_router_.set_active(true); |
613 | |
614 // Add our self as bitrate observer. | |
stefan-webrtc
2016/07/08 15:56:42
I think you can remove this comment
perkj_webrtc
2016/07/11 11:41:08
Done.
| |
615 bitrate_allocator_->AddObserver( | |
616 this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, | |
617 max_padding_bitrate_, !config_->suspend_below_min_bitrate); | |
618 | |
619 // Start monitoring encoder activity. | |
536 { | 620 { |
537 rtc::CritScope lock(&encoder_settings_crit_); | 621 rtc::CritScope lock(&encoder_activity_crit_sect_); |
538 pending_state_change_ = rtc::Optional<State>(State::kStarted); | 622 RTC_DCHECK(!check_encoder_activity_task_); |
623 check_encoder_activity_task_ = new CheckEncoderActivityTask(this); | |
624 worker_queue_->PostDelayedTask( | |
625 std::unique_ptr<rtc::QueuedTask>(check_encoder_activity_task_), | |
626 CheckEncoderActivityTask::kEncoderTimeOutMs); | |
539 } | 627 } |
540 encoder_wakeup_event_.Set(); | 628 |
629 vie_encoder_->SendKeyFrame(); | |
541 } | 630 } |
542 | 631 |
543 void VideoSendStream::Stop() { | 632 void VideoSendStreamInternal::Stop() { |
633 RTC_DCHECK_RUN_ON(worker_queue_); | |
544 LOG(LS_INFO) << "VideoSendStream::Stop"; | 634 LOG(LS_INFO) << "VideoSendStream::Stop"; |
545 if (!payload_router_.active()) | 635 if (!payload_router_.active()) |
546 return; | 636 return; |
547 TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); | 637 TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); |
548 payload_router_.set_active(false); | 638 payload_router_.set_active(false); |
639 bitrate_allocator_->RemoveObserver(this); | |
549 { | 640 { |
550 rtc::CritScope lock(&encoder_settings_crit_); | 641 rtc::CritScope lock(&encoder_activity_crit_sect_); |
551 pending_state_change_ = rtc::Optional<State>(State::kStopped); | 642 check_encoder_activity_task_->Stop(); |
643 check_encoder_activity_task_ = nullptr; | |
552 } | 644 } |
553 encoder_wakeup_event_.Set(); | 645 vie_encoder_->OnBitrateUpdated(0, 0, 0); |
646 stats_proxy_->OnSetEncoderTargetRate(0); | |
554 } | 647 } |
555 | 648 |
556 VideoCaptureInput* VideoSendStream::Input() { | 649 void VideoSendStreamInternal::SignalEncoderTimedOut() { |
557 return &input_; | 650 RTC_DCHECK_RUN_ON(worker_queue_); |
651 // If the encoder has not produced anything the last kEncoderTimeOutMs and it | |
652 // is supposed to, deregister as BitrateAllocatorObserver. This can happen | |
653 // if a camera stop producing frames, temporary or permanently during a call. | |
stefan-webrtc
2016/07/08 15:56:42
this should be "if a camera stops producing frames
perkj_webrtc
2016/07/11 11:41:07
Done.
| |
654 if (encoder_target_rate_bps_ > 0) { | |
655 LOG_F(LS_INFO) << "Encoder timed out."; | |
656 bitrate_allocator_->RemoveObserver(this); | |
657 } | |
558 } | 658 } |
559 | 659 |
560 bool VideoSendStream::EncoderThreadFunction(void* obj) { | 660 void VideoSendStreamInternal::SignalEncoderActive() { |
561 static_cast<VideoSendStream*>(obj)->EncoderProcess(); | 661 RTC_DCHECK_RUN_ON(worker_queue_); |
562 // We're done, return false to abort. | 662 LOG_F(LS_INFO) << "Encoder is active."; |
563 return false; | 663 bitrate_allocator_->AddObserver( |
664 this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, | |
665 max_padding_bitrate_, !config_->suspend_below_min_bitrate); | |
564 } | 666 } |
565 | 667 |
566 void VideoSendStream::EncoderProcess() { | 668 void VideoSendStreamInternal::ReconfigureVideoEncoder( |
567 RTC_CHECK_EQ(0, vie_encoder_.RegisterExternalEncoder( | 669 const VideoEncoderConfig& config) { |
568 config_.encoder_settings.encoder, | 670 RTC_DCHECK_GE(config_->rtp.ssrcs.size(), config.streams.size()); |
569 config_.encoder_settings.payload_type, | 671 TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder"); |
570 config_.encoder_settings.internal_source)); | 672 LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString(); |
571 RTC_DCHECK_RUN_ON(&encoder_thread_checker_); | 673 RTC_DCHECK_GE(config_->rtp.ssrcs.size(), config.streams.size()); |
572 while (true) { | 674 RTC_DCHECK_RUN_ON(worker_queue_); |
573 // Wake up every kEncodeCheckForActivityPeriodMs to check if the encoder is | |
574 // active. If not, deregister as BitrateAllocatorObserver. | |
575 const int kEncodeCheckForActivityPeriodMs = 1000; | |
576 encoder_wakeup_event_.Wait(kEncodeCheckForActivityPeriodMs); | |
577 if (rtc::AtomicOps::AcquireLoad(&stop_encoder_thread_)) | |
578 break; | |
579 bool change_settings = false; | |
580 rtc::Optional<State> pending_state_change; | |
581 { | |
582 rtc::CritScope lock(&encoder_settings_crit_); | |
583 if (pending_encoder_settings_) { | |
584 std::swap(current_encoder_settings_, pending_encoder_settings_); | |
585 pending_encoder_settings_.reset(); | |
586 change_settings = true; | |
587 } else if (pending_state_change_) { | |
588 swap(pending_state_change, pending_state_change_); | |
589 } | |
590 } | |
591 if (change_settings) { | |
592 current_encoder_settings_->video_codec.startBitrate = std::max( | |
593 bitrate_allocator_->GetStartBitrate(this) / 1000, | |
594 static_cast<int>(current_encoder_settings_->video_codec.minBitrate)); | |
595 | 675 |
596 if (state_ == State::kStarted) { | 676 const int kEncoderMinBitrateBps = 30000; |
597 bitrate_allocator_->AddObserver( | 677 encoder_min_bitrate_bps_ = |
598 this, current_encoder_settings_->video_codec.minBitrate * 1000, | 678 std::max(config.streams[0].min_bitrate_bps, kEncoderMinBitrateBps); |
599 current_encoder_settings_->video_codec.maxBitrate * 1000, | 679 encoder_max_bitrate_bps_ = 0; |
600 CalulcateMaxPadBitrateBps(current_encoder_settings_->config, | 680 for (const auto& stream : config.streams) |
601 config_.suspend_below_min_bitrate), | 681 encoder_max_bitrate_bps_ += stream.max_bitrate_bps; |
602 !config_.suspend_below_min_bitrate); | 682 max_padding_bitrate_ = |
603 } | 683 CalculateMaxPadBitrateBps(config, config_->suspend_below_min_bitrate); |
604 | 684 |
605 payload_router_.SetSendStreams(current_encoder_settings_->config.streams); | 685 payload_router_.SetSendStreams(config.streams); |
606 vie_encoder_.SetEncoder(current_encoder_settings_->video_codec, | |
607 payload_router_.MaxPayloadLength()); | |
608 | 686 |
609 // Clear stats for disabled layers. | 687 // Clear stats for disabled layers. |
610 for (size_t i = current_encoder_settings_->config.streams.size(); | 688 for (size_t i = config.streams.size(); i < config_->rtp.ssrcs.size(); ++i) { |
611 i < config_.rtp.ssrcs.size(); ++i) { | 689 stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]); |
612 stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]); | 690 } |
613 } | |
614 | 691 |
615 size_t number_of_temporal_layers = | 692 size_t number_of_temporal_layers = |
616 current_encoder_settings_->config.streams.back() | 693 config.streams.back().temporal_layer_thresholds_bps.size() + 1; |
617 .temporal_layer_thresholds_bps.size() + | 694 protection_bitrate_calculator_.SetEncodingData( |
618 1; | 695 config.streams[0].width, config.streams[0].height, |
619 protection_bitrate_calculator_.SetEncodingData( | 696 number_of_temporal_layers, config_->rtp.max_packet_size); |
620 current_encoder_settings_->video_codec.width, | |
621 current_encoder_settings_->video_codec.height, | |
622 number_of_temporal_layers, payload_router_.MaxPayloadLength()); | |
623 | 697 |
624 // We might've gotten new settings while configuring the encoder settings, | 698 if (payload_router_.active()) { |
625 // restart from the top to see if that's the case before trying to encode | 699 // The send stream is started already. Update the allocator with new bitrate |
626 // a frame (which might correspond to the last frame size). | 700 // limits. |
627 encoder_wakeup_event_.Set(); | 701 bitrate_allocator_->AddObserver( |
628 continue; | 702 this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, |
629 } | 703 max_padding_bitrate_, !config_->suspend_below_min_bitrate); |
630 | |
631 if (pending_state_change) { | |
632 if (*pending_state_change == State::kStarted && | |
633 state_ == State::kStopped) { | |
634 bitrate_allocator_->AddObserver( | |
635 this, current_encoder_settings_->video_codec.minBitrate * 1000, | |
636 current_encoder_settings_->video_codec.maxBitrate * 1000, | |
637 CalulcateMaxPadBitrateBps(current_encoder_settings_->config, | |
638 config_.suspend_below_min_bitrate), | |
639 !config_.suspend_below_min_bitrate); | |
640 vie_encoder_.SendKeyFrame(); | |
641 state_ = State::kStarted; | |
642 LOG_F(LS_INFO) << "Encoder started."; | |
643 } else if (*pending_state_change == State::kStopped) { | |
644 bitrate_allocator_->RemoveObserver(this); | |
645 vie_encoder_.OnBitrateUpdated(0, 0, 0); | |
646 stats_proxy_.OnSetEncoderTargetRate(0); | |
647 state_ = State::kStopped; | |
648 LOG_F(LS_INFO) << "Encoder stopped."; | |
649 } | |
650 encoder_wakeup_event_.Set(); | |
651 continue; | |
652 } | |
653 | |
654 // Check if the encoder has produced anything the last kEncoderTimeOutMs. | |
655 // If not, deregister as BitrateAllocatorObserver. | |
656 if (state_ == State::kStarted && | |
657 vie_encoder_.time_of_last_frame_activity_ms() < | |
658 rtc::TimeMillis() - kEncoderTimeOutMs) { | |
659 // The encoder has timed out. | |
660 LOG_F(LS_INFO) << "Encoder timed out."; | |
661 bitrate_allocator_->RemoveObserver(this); | |
662 state_ = State::kEncoderTimedOut; | |
663 } | |
664 if (state_ == State::kEncoderTimedOut && | |
665 vie_encoder_.time_of_last_frame_activity_ms() > | |
666 rtc::TimeMillis() - kEncoderTimeOutMs) { | |
667 LOG_F(LS_INFO) << "Encoder is active."; | |
668 bitrate_allocator_->AddObserver( | |
669 this, current_encoder_settings_->video_codec.minBitrate * 1000, | |
670 current_encoder_settings_->video_codec.maxBitrate * 1000, | |
671 CalulcateMaxPadBitrateBps(current_encoder_settings_->config, | |
672 config_.suspend_below_min_bitrate), | |
673 !config_.suspend_below_min_bitrate); | |
674 state_ = State::kStarted; | |
675 } | |
676 | |
677 VideoFrame frame; | |
678 if (input_.GetVideoFrame(&frame)) { | |
679 // TODO(perkj): |pre_encode_callback| is only used by tests. Tests should | |
680 // register as a sink to the VideoSource instead. | |
681 if (config_.pre_encode_callback) { | |
682 config_.pre_encode_callback->OnFrame(frame); | |
683 } | |
684 vie_encoder_.EncodeVideoFrame(frame); | |
685 } | |
686 } | 704 } |
687 vie_encoder_.DeRegisterExternalEncoder(config_.encoder_settings.payload_type); | |
688 } | 705 } |
689 | 706 |
690 void VideoSendStream::ReconfigureVideoEncoder( | 707 int32_t VideoSendStreamInternal::Encoded( |
691 const VideoEncoderConfig& config) { | 708 const EncodedImage& encoded_image, |
692 TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder"); | 709 const CodecSpecificInfo* codec_specific_info, |
693 LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString(); | 710 const RTPFragmentationHeader* fragmentation) { |
694 RTC_DCHECK_GE(config_.rtp.ssrcs.size(), config.streams.size()); | 711 if (config_->post_encode_callback) { |
695 VideoCodec video_codec = VideoEncoderConfigToVideoCodec( | 712 config_->post_encode_callback->EncodedFrameCallback( |
696 config, config_.encoder_settings.payload_name, | |
697 config_.encoder_settings.payload_type); | |
698 { | |
699 rtc::CritScope lock(&encoder_settings_crit_); | |
700 encoder_max_bitrate_bps_ = video_codec.maxBitrate * 1000; | |
701 pending_encoder_settings_.reset(new EncoderSettings({video_codec, config})); | |
702 } | |
703 encoder_wakeup_event_.Set(); | |
704 } | |
705 | |
706 VideoSendStream::Stats VideoSendStream::GetStats() { | |
707 return stats_proxy_.GetStats(); | |
708 } | |
709 | |
710 void VideoSendStream::OveruseDetected() { | |
711 if (config_.overuse_callback) | |
712 config_.overuse_callback->OnLoadUpdate(LoadObserver::kOveruse); | |
713 } | |
714 | |
715 void VideoSendStream::NormalUsage() { | |
716 if (config_.overuse_callback) | |
717 config_.overuse_callback->OnLoadUpdate(LoadObserver::kUnderuse); | |
718 } | |
719 | |
720 int32_t VideoSendStream::Encoded(const EncodedImage& encoded_image, | |
721 const CodecSpecificInfo* codec_specific_info, | |
722 const RTPFragmentationHeader* fragmentation) { | |
723 if (config_.post_encode_callback) { | |
724 config_.post_encode_callback->EncodedFrameCallback( | |
725 EncodedFrame(encoded_image._buffer, encoded_image._length, | 713 EncodedFrame(encoded_image._buffer, encoded_image._length, |
726 encoded_image._frameType)); | 714 encoded_image._frameType)); |
727 } | 715 } |
716 { | |
717 rtc::CritScope lock(&encoder_activity_crit_sect_); | |
718 if (check_encoder_activity_task_) | |
719 check_encoder_activity_task_->UpdateEncoderActivity(); | |
720 } | |
728 | 721 |
729 protection_bitrate_calculator_.UpdateWithEncodedData(encoded_image); | 722 protection_bitrate_calculator_.UpdateWithEncodedData(encoded_image); |
730 int32_t return_value = payload_router_.Encoded( | 723 int32_t return_value = payload_router_.Encoded( |
731 encoded_image, codec_specific_info, fragmentation); | 724 encoded_image, codec_specific_info, fragmentation); |
732 | 725 |
733 if (kEnableFrameRecording) { | 726 if (kEnableFrameRecording) { |
734 int layer = codec_specific_info->codecType == kVideoCodecVP8 | 727 int layer = codec_specific_info->codecType == kVideoCodecVP8 |
735 ? codec_specific_info->codecSpecific.VP8.simulcastIdx | 728 ? codec_specific_info->codecSpecific.VP8.simulcastIdx |
736 : 0; | 729 : 0; |
737 IvfFileWriter* file_writer; | 730 IvfFileWriter* file_writer; |
738 { | 731 { |
739 if (file_writers_[layer] == nullptr) { | 732 if (file_writers_[layer] == nullptr) { |
740 std::ostringstream oss; | 733 std::ostringstream oss; |
741 oss << "send_bitstream_ssrc"; | 734 oss << "send_bitstream_ssrc"; |
742 for (uint32_t ssrc : config_.rtp.ssrcs) | 735 for (uint32_t ssrc : config_->rtp.ssrcs) |
743 oss << "_" << ssrc; | 736 oss << "_" << ssrc; |
744 oss << "_layer" << layer << ".ivf"; | 737 oss << "_layer" << layer << ".ivf"; |
745 file_writers_[layer] = | 738 file_writers_[layer] = |
746 IvfFileWriter::Open(oss.str(), codec_specific_info->codecType); | 739 IvfFileWriter::Open(oss.str(), codec_specific_info->codecType); |
747 } | 740 } |
748 file_writer = file_writers_[layer].get(); | 741 file_writer = file_writers_[layer].get(); |
749 } | 742 } |
750 if (file_writer) { | 743 if (file_writer) { |
751 bool ok = file_writer->WriteFrame(encoded_image); | 744 bool ok = file_writer->WriteFrame(encoded_image); |
752 RTC_DCHECK(ok); | 745 RTC_DCHECK(ok); |
753 } | 746 } |
754 } | 747 } |
755 | 748 |
756 return return_value; | 749 return return_value; |
757 } | 750 } |
758 | 751 |
759 void VideoSendStream::ConfigureProtection() { | 752 void VideoSendStreamInternal::ConfigureProtection() { |
753 RTC_DCHECK_RUN_ON(worker_queue_); | |
760 // Enable NACK, FEC or both. | 754 // Enable NACK, FEC or both. |
761 const bool enable_protection_nack = config_.rtp.nack.rtp_history_ms > 0; | 755 const bool enable_protection_nack = config_->rtp.nack.rtp_history_ms > 0; |
762 bool enable_protection_fec = config_.rtp.fec.ulpfec_payload_type != -1; | 756 bool enable_protection_fec = config_->rtp.fec.ulpfec_payload_type != -1; |
763 // Payload types without picture ID cannot determine that a stream is complete | 757 // Payload types without picture ID cannot determine that a stream is complete |
764 // without retransmitting FEC, so using FEC + NACK for H.264 (for instance) is | 758 // without retransmitting FEC, so using FEC + NACK for H.264 (for instance) is |
765 // a waste of bandwidth since FEC packets still have to be transmitted. Note | 759 // a waste of bandwidth since FEC packets still have to be transmitted. Note |
766 // that this is not the case with FLEXFEC. | 760 // that this is not the case with FLEXFEC. |
767 if (enable_protection_nack && | 761 if (enable_protection_nack && |
768 !PayloadTypeSupportsSkippingFecPackets( | 762 !PayloadTypeSupportsSkippingFecPackets( |
769 config_.encoder_settings.payload_name)) { | 763 config_->encoder_settings.payload_name)) { |
770 LOG(LS_WARNING) << "Transmitting payload type without picture ID using" | 764 LOG(LS_WARNING) << "Transmitting payload type without picture ID using" |
771 "NACK+FEC is a waste of bandwidth since FEC packets " | 765 "NACK+FEC is a waste of bandwidth since FEC packets " |
772 "also have to be retransmitted. Disabling FEC."; | 766 "also have to be retransmitted. Disabling FEC."; |
773 enable_protection_fec = false; | 767 enable_protection_fec = false; |
774 } | 768 } |
775 | 769 |
776 // Set to valid uint8_ts to be castable later without signed overflows. | 770 // Set to valid uint8_ts to be castable later without signed overflows. |
777 uint8_t payload_type_red = 0; | 771 uint8_t payload_type_red = 0; |
778 uint8_t payload_type_fec = 0; | 772 uint8_t payload_type_fec = 0; |
779 | 773 |
780 // TODO(changbin): Should set RTX for RED mapping in RTP sender in future. | 774 // TODO(changbin): Should set RTX for RED mapping in RTP sender in future. |
781 // Validate payload types. If either RED or FEC payload types are set then | 775 // Validate payload types. If either RED or FEC payload types are set then |
782 // both should be. If FEC is enabled then they both have to be set. | 776 // both should be. If FEC is enabled then they both have to be set. |
783 if (config_.rtp.fec.red_payload_type != -1) { | 777 if (config_->rtp.fec.red_payload_type != -1) { |
784 RTC_DCHECK_GE(config_.rtp.fec.red_payload_type, 0); | 778 RTC_DCHECK_GE(config_->rtp.fec.red_payload_type, 0); |
785 RTC_DCHECK_LE(config_.rtp.fec.red_payload_type, 127); | 779 RTC_DCHECK_LE(config_->rtp.fec.red_payload_type, 127); |
786 // TODO(holmer): We should only enable red if ulpfec is also enabled, but | 780 // TODO(holmer): We should only enable red if ulpfec is also enabled, but |
787 // but due to an incompatibility issue with previous versions the receiver | 781 // but due to an incompatibility issue with previous versions the receiver |
788 // assumes rtx packets are containing red if it has been configured to | 782 // assumes rtx packets are containing red if it has been configured to |
789 // receive red. Remove this in a few versions once the incompatibility | 783 // receive red. Remove this in a few versions once the incompatibility |
790 // issue is resolved (M53 timeframe). | 784 // issue is resolved (M53 timeframe). |
791 payload_type_red = static_cast<uint8_t>(config_.rtp.fec.red_payload_type); | 785 payload_type_red = static_cast<uint8_t>(config_->rtp.fec.red_payload_type); |
792 } | 786 } |
793 if (config_.rtp.fec.ulpfec_payload_type != -1) { | 787 if (config_->rtp.fec.ulpfec_payload_type != -1) { |
794 RTC_DCHECK_GE(config_.rtp.fec.ulpfec_payload_type, 0); | 788 RTC_DCHECK_GE(config_->rtp.fec.ulpfec_payload_type, 0); |
795 RTC_DCHECK_LE(config_.rtp.fec.ulpfec_payload_type, 127); | 789 RTC_DCHECK_LE(config_->rtp.fec.ulpfec_payload_type, 127); |
796 payload_type_fec = | 790 payload_type_fec = |
797 static_cast<uint8_t>(config_.rtp.fec.ulpfec_payload_type); | 791 static_cast<uint8_t>(config_->rtp.fec.ulpfec_payload_type); |
798 } | 792 } |
799 | 793 |
800 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 794 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
801 // Set NACK. | 795 // Set NACK. |
802 rtp_rtcp->SetStorePacketsStatus( | 796 rtp_rtcp->SetStorePacketsStatus( |
803 enable_protection_nack || congestion_controller_->pacer(), | 797 enable_protection_nack || congestion_controller_->pacer(), |
804 kMinSendSidePacketHistorySize); | 798 kMinSendSidePacketHistorySize); |
805 // Set FEC. | 799 // Set FEC. |
806 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 800 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
807 rtp_rtcp->SetGenericFECStatus(enable_protection_fec, payload_type_red, | 801 rtp_rtcp->SetGenericFECStatus(enable_protection_fec, payload_type_red, |
808 payload_type_fec); | 802 payload_type_fec); |
809 } | 803 } |
810 } | 804 } |
811 | 805 |
812 protection_bitrate_calculator_.SetProtectionMethod(enable_protection_fec, | 806 protection_bitrate_calculator_.SetProtectionMethod(enable_protection_fec, |
813 enable_protection_nack); | 807 enable_protection_nack); |
814 } | 808 } |
815 | 809 |
816 void VideoSendStream::ConfigureSsrcs() { | 810 void VideoSendStreamInternal::ConfigureSsrcs() { |
811 RTC_DCHECK_RUN_ON(worker_queue_); | |
817 // Configure regular SSRCs. | 812 // Configure regular SSRCs. |
818 for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { | 813 for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) { |
819 uint32_t ssrc = config_.rtp.ssrcs[i]; | 814 uint32_t ssrc = config_->rtp.ssrcs[i]; |
820 RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; | 815 RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; |
821 rtp_rtcp->SetSSRC(ssrc); | 816 rtp_rtcp->SetSSRC(ssrc); |
822 | 817 |
823 // Restore RTP state if previous existed. | 818 // Restore RTP state if previous existed. |
824 RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); | 819 VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); |
825 if (it != suspended_ssrcs_.end()) | 820 if (it != suspended_ssrcs_.end()) |
826 rtp_rtcp->SetRtpState(it->second); | 821 rtp_rtcp->SetRtpState(it->second); |
827 } | 822 } |
828 | 823 |
829 // Set up RTX if available. | 824 // Set up RTX if available. |
830 if (config_.rtp.rtx.ssrcs.empty()) | 825 if (config_->rtp.rtx.ssrcs.empty()) |
831 return; | 826 return; |
832 | 827 |
833 // Configure RTX SSRCs. | 828 // Configure RTX SSRCs. |
834 RTC_DCHECK_EQ(config_.rtp.rtx.ssrcs.size(), config_.rtp.ssrcs.size()); | 829 RTC_DCHECK_EQ(config_->rtp.rtx.ssrcs.size(), config_->rtp.ssrcs.size()); |
835 for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) { | 830 for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) { |
836 uint32_t ssrc = config_.rtp.rtx.ssrcs[i]; | 831 uint32_t ssrc = config_->rtp.rtx.ssrcs[i]; |
837 RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; | 832 RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; |
838 rtp_rtcp->SetRtxSsrc(ssrc); | 833 rtp_rtcp->SetRtxSsrc(ssrc); |
839 RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); | 834 VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); |
840 if (it != suspended_ssrcs_.end()) | 835 if (it != suspended_ssrcs_.end()) |
841 rtp_rtcp->SetRtxState(it->second); | 836 rtp_rtcp->SetRtxState(it->second); |
842 } | 837 } |
843 | 838 |
844 // Configure RTX payload types. | 839 // Configure RTX payload types. |
845 RTC_DCHECK_GE(config_.rtp.rtx.payload_type, 0); | 840 RTC_DCHECK_GE(config_->rtp.rtx.payload_type, 0); |
846 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 841 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
847 rtp_rtcp->SetRtxSendPayloadType(config_.rtp.rtx.payload_type, | 842 rtp_rtcp->SetRtxSendPayloadType(config_->rtp.rtx.payload_type, |
848 config_.encoder_settings.payload_type); | 843 config_->encoder_settings.payload_type); |
849 rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | kRtxRedundantPayloads); | 844 rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | kRtxRedundantPayloads); |
850 } | 845 } |
851 if (config_.rtp.fec.red_payload_type != -1 && | 846 if (config_->rtp.fec.red_payload_type != -1 && |
852 config_.rtp.fec.red_rtx_payload_type != -1) { | 847 config_->rtp.fec.red_rtx_payload_type != -1) { |
853 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 848 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
854 rtp_rtcp->SetRtxSendPayloadType(config_.rtp.fec.red_rtx_payload_type, | 849 rtp_rtcp->SetRtxSendPayloadType(config_->rtp.fec.red_rtx_payload_type, |
855 config_.rtp.fec.red_payload_type); | 850 config_->rtp.fec.red_payload_type); |
856 } | 851 } |
857 } | 852 } |
858 } | 853 } |
859 | 854 |
860 std::map<uint32_t, RtpState> VideoSendStream::GetRtpStates() const { | 855 std::map<uint32_t, RtpState> VideoSendStreamInternal::GetRtpStates() const { |
856 RTC_DCHECK_RUN_ON(worker_queue_); | |
861 std::map<uint32_t, RtpState> rtp_states; | 857 std::map<uint32_t, RtpState> rtp_states; |
862 for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { | 858 for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) { |
863 uint32_t ssrc = config_.rtp.ssrcs[i]; | 859 uint32_t ssrc = config_->rtp.ssrcs[i]; |
864 RTC_DCHECK_EQ(ssrc, rtp_rtcp_modules_[i]->SSRC()); | 860 RTC_DCHECK_EQ(ssrc, rtp_rtcp_modules_[i]->SSRC()); |
865 rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtpState(); | 861 rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtpState(); |
866 } | 862 } |
867 | 863 |
868 for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) { | 864 for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) { |
869 uint32_t ssrc = config_.rtp.rtx.ssrcs[i]; | 865 uint32_t ssrc = config_->rtp.rtx.ssrcs[i]; |
870 rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtxState(); | 866 rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtxState(); |
871 } | 867 } |
872 | 868 |
873 return rtp_states; | 869 return rtp_states; |
874 } | 870 } |
875 | 871 |
876 void VideoSendStream::SignalNetworkState(NetworkState state) { | 872 void VideoSendStreamInternal::SignalNetworkState(NetworkState state) { |
873 RTC_DCHECK_RUN_ON(worker_queue_); | |
877 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 874 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
878 rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode | 875 rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_->rtp.rtcp_mode |
879 : RtcpMode::kOff); | 876 : RtcpMode::kOff); |
880 } | 877 } |
881 } | 878 } |
882 | 879 |
883 uint32_t VideoSendStream::OnBitrateUpdated(uint32_t bitrate_bps, | 880 uint32_t VideoSendStreamInternal::OnBitrateUpdated(uint32_t bitrate_bps, |
884 uint8_t fraction_loss, | 881 uint8_t fraction_loss, |
885 int64_t rtt) { | 882 int64_t rtt) { |
883 RTC_DCHECK_RUN_ON(worker_queue_); | |
884 RTC_DCHECK(payload_router_.active()) | |
885 << "VideoSendStream::Start has not been called."; | |
886 payload_router_.SetTargetSendBitrate(bitrate_bps); | 886 payload_router_.SetTargetSendBitrate(bitrate_bps); |
887 // Get the encoder target rate. It is the estimated network rate - | 887 // Get the encoder target rate. It is the estimated network rate - |
888 // protection overhead. | 888 // protection overhead. |
889 uint32_t encoder_target_rate_bps = | 889 encoder_target_rate_bps_ = protection_bitrate_calculator_.SetTargetRates( |
890 protection_bitrate_calculator_.SetTargetRates( | 890 bitrate_bps, stats_proxy_->GetSendFrameRate(), fraction_loss, rtt); |
891 bitrate_bps, stats_proxy_.GetSendFrameRate(), fraction_loss, rtt); | 891 uint32_t protection_bitrate = bitrate_bps - encoder_target_rate_bps_; |
892 | 892 |
893 uint32_t protection_bitrate = bitrate_bps - encoder_target_rate_bps; | 893 encoder_target_rate_bps_ = |
894 { | 894 std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps_); |
895 // Limit the target bitrate to the configured max bitrate. | 895 vie_encoder_->OnBitrateUpdated(encoder_target_rate_bps_, fraction_loss, rtt); |
896 rtc::CritScope lock(&encoder_settings_crit_); | 896 stats_proxy_->OnSetEncoderTargetRate(encoder_target_rate_bps_); |
897 encoder_target_rate_bps = | |
898 std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps); | |
899 if ((encoder_target_rate_bps_ == 0 && encoder_target_rate_bps > 0) || | |
900 (encoder_target_rate_bps_ > 0 && encoder_target_rate_bps == 0)) { | |
901 LOG(LS_INFO) | |
902 << "OnBitrateUpdated: Encoder state changed, target bitrate " | |
903 << encoder_target_rate_bps << " bps."; | |
904 } | |
905 encoder_target_rate_bps_ = encoder_target_rate_bps; | |
906 } | |
907 vie_encoder_.OnBitrateUpdated(encoder_target_rate_bps, fraction_loss, rtt); | |
908 stats_proxy_.OnSetEncoderTargetRate(encoder_target_rate_bps); | |
909 | |
910 return protection_bitrate; | 897 return protection_bitrate; |
911 } | 898 } |
912 | 899 |
913 int VideoSendStream::ProtectionRequest(const FecProtectionParams* delta_params, | 900 int VideoSendStreamInternal::ProtectionRequest( |
914 const FecProtectionParams* key_params, | 901 const FecProtectionParams* delta_params, |
915 uint32_t* sent_video_rate_bps, | 902 const FecProtectionParams* key_params, |
916 uint32_t* sent_nack_rate_bps, | 903 uint32_t* sent_video_rate_bps, |
917 uint32_t* sent_fec_rate_bps) { | 904 uint32_t* sent_nack_rate_bps, |
905 uint32_t* sent_fec_rate_bps) { | |
906 RTC_DCHECK_RUN_ON(worker_queue_); | |
918 *sent_video_rate_bps = 0; | 907 *sent_video_rate_bps = 0; |
919 *sent_nack_rate_bps = 0; | 908 *sent_nack_rate_bps = 0; |
920 *sent_fec_rate_bps = 0; | 909 *sent_fec_rate_bps = 0; |
921 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 910 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
922 uint32_t not_used = 0; | 911 uint32_t not_used = 0; |
923 uint32_t module_video_rate = 0; | 912 uint32_t module_video_rate = 0; |
924 uint32_t module_fec_rate = 0; | 913 uint32_t module_fec_rate = 0; |
925 uint32_t module_nack_rate = 0; | 914 uint32_t module_nack_rate = 0; |
926 rtp_rtcp->SetFecParameters(delta_params, key_params); | 915 rtp_rtcp->SetFecParameters(delta_params, key_params); |
927 rtp_rtcp->BitrateSent(¬_used, &module_video_rate, &module_fec_rate, | 916 rtp_rtcp->BitrateSent(¬_used, &module_video_rate, &module_fec_rate, |
928 &module_nack_rate); | 917 &module_nack_rate); |
929 *sent_video_rate_bps += module_video_rate; | 918 *sent_video_rate_bps += module_video_rate; |
930 *sent_nack_rate_bps += module_nack_rate; | 919 *sent_nack_rate_bps += module_nack_rate; |
931 *sent_fec_rate_bps += module_fec_rate; | 920 *sent_fec_rate_bps += module_fec_rate; |
932 } | 921 } |
933 return 0; | 922 return 0; |
934 } | 923 } |
935 | 924 |
936 } // namespace internal | 925 } // namespace internal |
937 } // namespace webrtc | 926 } // namespace webrtc |
OLD | NEW |