OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | |
11 #include "webrtc/video/video_send_stream.h" | 10 #include "webrtc/video/video_send_stream.h" |
12 | 11 |
13 #include <algorithm> | 12 #include <algorithm> |
14 #include <sstream> | 13 #include <sstream> |
15 #include <string> | 14 #include <string> |
16 #include <utility> | 15 #include <utility> |
17 #include <vector> | 16 #include <vector> |
18 | 17 |
19 #include "webrtc/base/checks.h" | 18 #include "webrtc/base/checks.h" |
20 #include "webrtc/base/logging.h" | 19 #include "webrtc/base/logging.h" |
21 #include "webrtc/base/trace_event.h" | 20 #include "webrtc/base/trace_event.h" |
22 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | |
23 #include "webrtc/modules/bitrate_controller/include/bitrate_controller.h" | 21 #include "webrtc/modules/bitrate_controller/include/bitrate_controller.h" |
24 #include "webrtc/modules/congestion_controller/include/congestion_controller.h" | 22 #include "webrtc/modules/congestion_controller/include/congestion_controller.h" |
25 #include "webrtc/modules/pacing/packet_router.h" | 23 #include "webrtc/modules/pacing/packet_router.h" |
26 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h" | 24 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h" |
27 #include "webrtc/modules/utility/include/process_thread.h" | 25 #include "webrtc/modules/utility/include/process_thread.h" |
28 #include "webrtc/modules/video_coding/utility/ivf_file_writer.h" | 26 #include "webrtc/modules/video_coding/utility/ivf_file_writer.h" |
29 #include "webrtc/video/call_stats.h" | 27 #include "webrtc/video/call_stats.h" |
30 #include "webrtc/video/video_capture_input.h" | |
31 #include "webrtc/video/vie_remb.h" | 28 #include "webrtc/video/vie_remb.h" |
32 #include "webrtc/video_send_stream.h" | 29 #include "webrtc/video_send_stream.h" |
33 | 30 |
34 namespace webrtc { | 31 namespace webrtc { |
35 | 32 |
36 class RtcpIntraFrameObserver; | |
37 class TransportFeedbackObserver; | |
38 | |
39 static const int kMinSendSidePacketHistorySize = 600; | 33 static const int kMinSendSidePacketHistorySize = 600; |
40 static const int kEncoderTimeOutMs = 2000; | |
41 | |
42 namespace { | 34 namespace { |
43 | 35 |
44 std::vector<RtpRtcp*> CreateRtpRtcpModules( | 36 std::vector<RtpRtcp*> CreateRtpRtcpModules( |
45 Transport* outgoing_transport, | 37 Transport* outgoing_transport, |
46 RtcpIntraFrameObserver* intra_frame_callback, | 38 RtcpIntraFrameObserver* intra_frame_callback, |
47 RtcpBandwidthObserver* bandwidth_callback, | 39 RtcpBandwidthObserver* bandwidth_callback, |
48 TransportFeedbackObserver* transport_feedback_callback, | 40 TransportFeedbackObserver* transport_feedback_callback, |
49 RtcpRttStats* rtt_stats, | 41 RtcpRttStats* rtt_stats, |
50 RtpPacketSender* paced_sender, | 42 RtpPacketSender* paced_sender, |
51 TransportSequenceNumberAllocator* transport_sequence_number_allocator, | 43 TransportSequenceNumberAllocator* transport_sequence_number_allocator, |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
143 } | 135 } |
144 | 136 |
145 std::string VideoSendStream::Config::ToString() const { | 137 std::string VideoSendStream::Config::ToString() const { |
146 std::stringstream ss; | 138 std::stringstream ss; |
147 ss << "{encoder_settings: " << encoder_settings.ToString(); | 139 ss << "{encoder_settings: " << encoder_settings.ToString(); |
148 ss << ", rtp: " << rtp.ToString(); | 140 ss << ", rtp: " << rtp.ToString(); |
149 ss << ", pre_encode_callback: " | 141 ss << ", pre_encode_callback: " |
150 << (pre_encode_callback ? "(I420FrameCallback)" : "nullptr"); | 142 << (pre_encode_callback ? "(I420FrameCallback)" : "nullptr"); |
151 ss << ", post_encode_callback: " | 143 ss << ", post_encode_callback: " |
152 << (post_encode_callback ? "(EncodedFrameObserver)" : "nullptr"); | 144 << (post_encode_callback ? "(EncodedFrameObserver)" : "nullptr"); |
153 ss << ", local_renderer: " | |
154 << (local_renderer ? "(VideoRenderer)" : "nullptr"); | |
155 ss << ", render_delay_ms: " << render_delay_ms; | 145 ss << ", render_delay_ms: " << render_delay_ms; |
156 ss << ", target_delay_ms: " << target_delay_ms; | 146 ss << ", target_delay_ms: " << target_delay_ms; |
157 ss << ", suspend_below_min_bitrate: " << (suspend_below_min_bitrate ? "on" | 147 ss << ", suspend_below_min_bitrate: " << (suspend_below_min_bitrate ? "on" |
158 : "off"); | 148 : "off"); |
159 ss << '}'; | 149 ss << '}'; |
160 return ss.str(); | 150 return ss.str(); |
161 } | 151 } |
162 | 152 |
163 namespace { | 153 namespace { |
164 | 154 |
165 VideoCodecType PayloadNameToCodecType(const std::string& payload_name) { | |
166 if (payload_name == "VP8") | |
167 return kVideoCodecVP8; | |
168 if (payload_name == "VP9") | |
169 return kVideoCodecVP9; | |
170 if (payload_name == "H264") | |
171 return kVideoCodecH264; | |
172 return kVideoCodecGeneric; | |
173 } | |
174 | |
175 bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) { | 155 bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) { |
176 switch (PayloadNameToCodecType(payload_name)) { | 156 if (payload_name == "VP8" || payload_name == "VP9") |
177 case kVideoCodecVP8: | 157 return true; |
178 case kVideoCodecVP9: | 158 RTC_DCHECK(payload_name == "H264" || payload_name == "FAKE") |
179 return true; | 159 << "unknown payload_name " << payload_name; |
180 case kVideoCodecH264: | |
181 case kVideoCodecGeneric: | |
182 return false; | |
183 case kVideoCodecI420: | |
184 case kVideoCodecRED: | |
185 case kVideoCodecULPFEC: | |
186 case kVideoCodecUnknown: | |
187 RTC_NOTREACHED(); | |
188 return false; | |
189 } | |
190 RTC_NOTREACHED(); | |
191 return false; | 160 return false; |
192 } | 161 } |
193 | 162 |
194 // TODO(pbos): Lower these thresholds (to closer to 100%) when we handle | 163 int CalculateMaxPadBitrateBps(const VideoEncoderConfig& config, |
195 // pipelining encoders better (multiple input frames before something comes | |
196 // out). This should effectively turn off CPU adaptations for systems that | |
197 // remotely cope with the load right now. | |
198 CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) { | |
199 CpuOveruseOptions options; | |
200 if (full_overuse_time) { | |
201 options.low_encode_usage_threshold_percent = 150; | |
202 options.high_encode_usage_threshold_percent = 200; | |
203 } | |
204 return options; | |
205 } | |
206 | |
207 VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config, | |
208 const std::string& payload_name, | |
209 int payload_type) { | |
210 const std::vector<VideoStream>& streams = config.streams; | |
211 static const int kEncoderMinBitrateKbps = 30; | |
212 RTC_DCHECK(!streams.empty()); | |
213 RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0); | |
214 | |
215 VideoCodec video_codec; | |
216 memset(&video_codec, 0, sizeof(video_codec)); | |
217 video_codec.codecType = PayloadNameToCodecType(payload_name); | |
218 | |
219 switch (config.content_type) { | |
220 case VideoEncoderConfig::ContentType::kRealtimeVideo: | |
221 video_codec.mode = kRealtimeVideo; | |
222 break; | |
223 case VideoEncoderConfig::ContentType::kScreen: | |
224 video_codec.mode = kScreensharing; | |
225 if (config.streams.size() == 1 && | |
226 config.streams[0].temporal_layer_thresholds_bps.size() == 1) { | |
227 video_codec.targetBitrate = | |
228 config.streams[0].temporal_layer_thresholds_bps[0] / 1000; | |
229 } | |
230 break; | |
231 } | |
232 | |
233 switch (video_codec.codecType) { | |
234 case kVideoCodecVP8: { | |
235 if (config.encoder_specific_settings) { | |
236 video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>( | |
237 config.encoder_specific_settings); | |
238 } else { | |
239 video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings(); | |
240 } | |
241 video_codec.codecSpecific.VP8.numberOfTemporalLayers = | |
242 static_cast<unsigned char>( | |
243 streams.back().temporal_layer_thresholds_bps.size() + 1); | |
244 break; | |
245 } | |
246 case kVideoCodecVP9: { | |
247 if (config.encoder_specific_settings) { | |
248 video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>( | |
249 config.encoder_specific_settings); | |
250 if (video_codec.mode == kScreensharing) { | |
251 video_codec.codecSpecific.VP9.flexibleMode = true; | |
252 // For now VP9 screensharing use 1 temporal and 2 spatial layers. | |
253 RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, | |
254 1); | |
255 RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2); | |
256 } | |
257 } else { | |
258 video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings(); | |
259 } | |
260 video_codec.codecSpecific.VP9.numberOfTemporalLayers = | |
261 static_cast<unsigned char>( | |
262 streams.back().temporal_layer_thresholds_bps.size() + 1); | |
263 break; | |
264 } | |
265 case kVideoCodecH264: { | |
266 if (config.encoder_specific_settings) { | |
267 video_codec.codecSpecific.H264 = | |
268 *reinterpret_cast<const VideoCodecH264*>( | |
269 config.encoder_specific_settings); | |
270 } else { | |
271 video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings(); | |
272 } | |
273 break; | |
274 } | |
275 default: | |
276 // TODO(pbos): Support encoder_settings codec-agnostically. | |
277 RTC_DCHECK(!config.encoder_specific_settings) | |
278 << "Encoder-specific settings for codec type not wired up."; | |
279 break; | |
280 } | |
281 | |
282 strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1); | |
283 video_codec.plName[kPayloadNameSize - 1] = '\0'; | |
284 video_codec.plType = payload_type; | |
285 video_codec.numberOfSimulcastStreams = | |
286 static_cast<unsigned char>(streams.size()); | |
287 video_codec.minBitrate = streams[0].min_bitrate_bps / 1000; | |
288 if (video_codec.minBitrate < kEncoderMinBitrateKbps) | |
289 video_codec.minBitrate = kEncoderMinBitrateKbps; | |
290 RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams)); | |
291 if (video_codec.codecType == kVideoCodecVP9) { | |
292 // If the vector is empty, bitrates will be configured automatically. | |
293 RTC_DCHECK(config.spatial_layers.empty() || | |
294 config.spatial_layers.size() == | |
295 video_codec.codecSpecific.VP9.numberOfSpatialLayers); | |
296 RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers, | |
297 kMaxSimulcastStreams); | |
298 for (size_t i = 0; i < config.spatial_layers.size(); ++i) | |
299 video_codec.spatialLayers[i] = config.spatial_layers[i]; | |
300 } | |
301 for (size_t i = 0; i < streams.size(); ++i) { | |
302 SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; | |
303 RTC_DCHECK_GT(streams[i].width, 0u); | |
304 RTC_DCHECK_GT(streams[i].height, 0u); | |
305 RTC_DCHECK_GT(streams[i].max_framerate, 0); | |
306 // Different framerates not supported per stream at the moment. | |
307 RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate); | |
308 RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0); | |
309 RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps); | |
310 RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps); | |
311 RTC_DCHECK_GE(streams[i].max_qp, 0); | |
312 | |
313 sim_stream->width = static_cast<uint16_t>(streams[i].width); | |
314 sim_stream->height = static_cast<uint16_t>(streams[i].height); | |
315 sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000; | |
316 sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000; | |
317 sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000; | |
318 sim_stream->qpMax = streams[i].max_qp; | |
319 sim_stream->numberOfTemporalLayers = static_cast<unsigned char>( | |
320 streams[i].temporal_layer_thresholds_bps.size() + 1); | |
321 | |
322 video_codec.width = std::max(video_codec.width, | |
323 static_cast<uint16_t>(streams[i].width)); | |
324 video_codec.height = std::max( | |
325 video_codec.height, static_cast<uint16_t>(streams[i].height)); | |
326 video_codec.minBitrate = | |
327 std::min(static_cast<uint16_t>(video_codec.minBitrate), | |
328 static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000)); | |
329 video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000; | |
330 video_codec.qpMax = std::max(video_codec.qpMax, | |
331 static_cast<unsigned int>(streams[i].max_qp)); | |
332 } | |
333 | |
334 if (video_codec.maxBitrate == 0) { | |
335 // Unset max bitrate -> cap to one bit per pixel. | |
336 video_codec.maxBitrate = | |
337 (video_codec.width * video_codec.height * video_codec.maxFramerate) / | |
338 1000; | |
339 } | |
340 if (video_codec.maxBitrate < kEncoderMinBitrateKbps) | |
341 video_codec.maxBitrate = kEncoderMinBitrateKbps; | |
342 | |
343 RTC_DCHECK_GT(streams[0].max_framerate, 0); | |
344 video_codec.maxFramerate = streams[0].max_framerate; | |
345 video_codec.expect_encode_from_texture = config.expect_encode_from_texture; | |
346 | |
347 return video_codec; | |
348 } | |
349 | |
350 int CalulcateMaxPadBitrateBps(const VideoEncoderConfig& config, | |
351 bool pad_to_min_bitrate) { | 164 bool pad_to_min_bitrate) { |
352 int pad_up_to_bitrate_bps = 0; | 165 int pad_up_to_bitrate_bps = 0; |
353 // Calculate max padding bitrate for a multi layer codec. | 166 // Calculate max padding bitrate for a multi layer codec. |
354 if (config.streams.size() > 1) { | 167 if (config.streams.size() > 1) { |
355 // Pad to min bitrate of the highest layer. | 168 // Pad to min bitrate of the highest layer. |
356 pad_up_to_bitrate_bps = | 169 pad_up_to_bitrate_bps = |
357 config.streams[config.streams.size() - 1].min_bitrate_bps; | 170 config.streams[config.streams.size() - 1].min_bitrate_bps; |
358 // Add target_bitrate_bps of the lower layers. | 171 // Add target_bitrate_bps of the lower layers. |
359 for (size_t i = 0; i < config.streams.size() - 1; ++i) | 172 for (size_t i = 0; i < config.streams.size() - 1; ++i) |
360 pad_up_to_bitrate_bps += config.streams[i].target_bitrate_bps; | 173 pad_up_to_bitrate_bps += config.streams[i].target_bitrate_bps; |
361 } else if (pad_to_min_bitrate) { | 174 } else if (pad_to_min_bitrate) { |
362 pad_up_to_bitrate_bps = config.streams[0].min_bitrate_bps; | 175 pad_up_to_bitrate_bps = config.streams[0].min_bitrate_bps; |
363 } | 176 } |
364 | 177 |
365 pad_up_to_bitrate_bps = | 178 pad_up_to_bitrate_bps = |
366 std::max(pad_up_to_bitrate_bps, config.min_transmit_bitrate_bps); | 179 std::max(pad_up_to_bitrate_bps, config.min_transmit_bitrate_bps); |
367 | 180 |
368 return pad_up_to_bitrate_bps; | 181 return pad_up_to_bitrate_bps; |
369 } | 182 } |
370 | 183 |
371 } // namespace | 184 } // namespace |
372 | 185 |
373 namespace internal { | 186 namespace internal { |
187 | |
188 // TODO(tommi): See if there's a more elegant way to create a task that creates | |
189 // an object on the correct task queue. | |
190 class VideoSendStream::ConstructionTask : public rtc::QueuedTask { | |
191 public: | |
192 ConstructionTask(std::unique_ptr<VideoSendStreamInternal>* send_stream, | |
193 rtc::Event* done_event, | |
194 SendStatisticsProxy* stats_proxy, | |
195 ViEEncoder* vie_encoder, | |
196 ProcessThread* module_process_thread, | |
197 CallStats* call_stats, | |
198 CongestionController* congestion_controller, | |
199 BitrateAllocator* bitrate_allocator, | |
200 SendDelayStats* send_delay_stats, | |
201 VieRemb* remb, | |
202 RtcEventLog* event_log, | |
203 const VideoSendStream::Config* config, | |
204 const std::map<uint32_t, RtpState>& suspended_ssrcs) | |
205 : send_stream_(send_stream), | |
206 done_event_(done_event), | |
207 stats_proxy_(stats_proxy), | |
208 vie_encoder_(vie_encoder), | |
209 call_stats_(call_stats), | |
210 congestion_controller_(congestion_controller), | |
211 bitrate_allocator_(bitrate_allocator), | |
212 send_delay_stats_(send_delay_stats), | |
213 remb_(remb), | |
214 event_log_(event_log), | |
215 config_(config), | |
216 suspended_ssrcs_(suspended_ssrcs) {} | |
217 ~ConstructionTask() { done_event_->Set(); } | |
tommi
2016/07/06 11:15:02
override
perkj_webrtc
2016/07/06 13:08:54
Done.
| |
218 | |
219 bool Run() override { | |
tommi
2016/07/06 11:15:02
nit: make the implementation of Run() private
perkj_webrtc
2016/07/06 13:08:54
Done.
| |
220 send_stream_->reset(new VideoSendStreamInternal( | |
221 stats_proxy_, rtc::TaskQueue::Current(), call_stats_, | |
222 congestion_controller_, bitrate_allocator_, send_delay_stats_, remb_, | |
223 vie_encoder_, event_log_, config_, std::move(suspended_ssrcs_))); | |
224 return true; | |
225 } | |
226 | |
227 private: | |
228 std::unique_ptr<VideoSendStreamInternal>* send_stream_; | |
229 rtc::Event* done_event_; | |
230 SendStatisticsProxy* const stats_proxy_; | |
231 ViEEncoder* const vie_encoder_; | |
232 CallStats* const call_stats_; | |
233 CongestionController* const congestion_controller_; | |
234 BitrateAllocator* const bitrate_allocator_; | |
235 SendDelayStats* const send_delay_stats_; | |
236 VieRemb* const remb_; | |
237 RtcEventLog* const event_log_; | |
238 const VideoSendStream::Config* config_; | |
239 std::map<uint32_t, RtpState> suspended_ssrcs_; | |
240 }; | |
241 | |
242 class VideoSendStream::DestructAndGetRTPStateTask : public rtc::QueuedTask { | |
243 public: | |
244 DestructAndGetRTPStateTask( | |
245 VideoSendStream::RtpStateMap* state_map, | |
246 std::unique_ptr<VideoSendStreamInternal> send_stream, | |
247 rtc::Event* done_event) | |
248 : state_map_(state_map), | |
249 send_stream_(std::move(send_stream)), | |
250 done_event_(done_event) {} | |
251 ~DestructAndGetRTPStateTask() { | |
tommi
2016/07/06 11:15:02
override
perkj_webrtc
2016/07/06 13:08:54
Done.
| |
252 send_stream_.reset(); | |
253 done_event_->Set(); | |
254 } | |
255 | |
256 bool Run() override { | |
tommi
2016/07/06 11:15:02
private
perkj_webrtc
2016/07/06 13:08:54
Done.
| |
257 send_stream_->Stop(); | |
258 *state_map_ = send_stream_->GetRtpStates(); | |
259 send_stream_.reset(); | |
260 return true; | |
261 } | |
262 | |
263 private: | |
264 VideoSendStream::RtpStateMap* state_map_; | |
265 std::unique_ptr<VideoSendStreamInternal> send_stream_; | |
266 rtc::Event* done_event_; | |
267 }; | |
268 | |
269 // CheckEncoderActivityTask is used for tracking when the encoder last produced | |
270 // and encoded video frame. If the encoder has not produced anything the last | |
271 // kEncoderTimeOutMs we also want to stop sending padding. | |
272 class VideoSendStreamInternal::CheckEncoderActivityTask | |
273 : public rtc::QueuedTask { | |
274 public: | |
275 static const int kEncoderTimeOutMs = 2000; | |
sprang_webrtc
2016/07/06 15:02:51
Maybe we can use PacedSender::kMaxQueueLengthMs, w
perkj_webrtc
2016/07/07 08:37:12
But I don't think the pacer queue length is relate
sprang_webrtc
2016/07/07 09:25:10
It is at least a lower bound. What I'm worried is
perkj_webrtc
2016/07/08 06:36:28
I see. The timeout happens to be the same as Paced
sprang_webrtc
2016/07/08 07:18:21
Acknowledged.
| |
276 explicit CheckEncoderActivityTask(VideoSendStreamInternal* send_stream) | |
277 : activity_(0), send_stream_(send_stream), timed_out_(false) { | |
278 encoder_thread_checker_.DetachFromThread(); | |
279 } | |
280 | |
281 void Stop() { | |
282 RTC_DCHECK_RUN_ON(&thread_checker_); | |
283 send_stream_ = nullptr; | |
284 } | |
285 | |
286 void UpdateEncoderActivity() { | |
287 RTC_DCHECK_RUN_ON(&encoder_thread_checker_); | |
288 rtc::AtomicOps::ReleaseStore(&activity_, 1); | |
289 } | |
290 | |
291 private: | |
292 bool Run() override { | |
293 RTC_DCHECK_RUN_ON(&thread_checker_); | |
294 if (!send_stream_) | |
295 return true; | |
296 if (!rtc::AtomicOps::AcquireLoad(&activity_)) { | |
297 if (!timed_out_) { | |
298 send_stream_->EncoderTimedOut(); | |
299 } | |
300 timed_out_ = true; | |
301 } else if (timed_out_) { | |
302 send_stream_->EncoderIsActive(); | |
303 timed_out_ = false; | |
304 } | |
305 rtc::AtomicOps::ReleaseStore(&activity_, 0); | |
306 | |
307 rtc::TaskQueue::Current()->PostDelayedTask( | |
308 std::unique_ptr<rtc::QueuedTask>(this), kEncoderTimeOutMs); | |
309 // Return false to prevent this task from being deleted. Ownership has been | |
310 // transferred to the task queue when PostDelayedTask was called. | |
311 return false; | |
312 } | |
313 | |
314 rtc::ThreadChecker encoder_thread_checker_; | |
315 volatile int activity_; | |
316 | |
317 rtc::ThreadChecker thread_checker_; | |
318 VideoSendStreamInternal* send_stream_; | |
319 bool timed_out_; | |
320 }; | |
321 | |
322 class ReconfigureVideoEncoderTask : public rtc::QueuedTask { | |
323 public: | |
324 ReconfigureVideoEncoderTask(VideoSendStreamInternal* send_stream, | |
325 VideoEncoderConfig config) | |
326 : send_stream_(send_stream), config_(std::move(config)) {} | |
327 | |
328 private: | |
329 bool Run() override { | |
330 send_stream_->ReconfigureVideoEncoder(std::move(config_)); | |
331 return true; | |
332 } | |
333 | |
334 VideoSendStreamInternal* send_stream_; | |
335 VideoEncoderConfig config_; | |
336 }; | |
337 | |
374 VideoSendStream::VideoSendStream( | 338 VideoSendStream::VideoSendStream( |
375 int num_cpu_cores, | 339 int num_cpu_cores, |
376 ProcessThread* module_process_thread, | 340 ProcessThread* module_process_thread, |
341 rtc::TaskQueue* worker_queue, | |
377 CallStats* call_stats, | 342 CallStats* call_stats, |
378 CongestionController* congestion_controller, | 343 CongestionController* congestion_controller, |
379 BitrateAllocator* bitrate_allocator, | 344 BitrateAllocator* bitrate_allocator, |
380 SendDelayStats* send_delay_stats, | 345 SendDelayStats* send_delay_stats, |
381 VieRemb* remb, | 346 VieRemb* remb, |
382 RtcEventLog* event_log, | 347 RtcEventLog* event_log, |
383 const VideoSendStream::Config& config, | 348 VideoSendStream::Config config, |
384 const VideoEncoderConfig& encoder_config, | 349 VideoEncoderConfig encoder_config, |
385 const std::map<uint32_t, RtpState>& suspended_ssrcs) | 350 const std::map<uint32_t, RtpState>& suspended_ssrcs) |
386 : stats_proxy_(Clock::GetRealTimeClock(), | 351 : worker_queue_(worker_queue), |
352 thread_sync_event_(false /* manual_reset */, false), | |
353 stats_proxy_(Clock::GetRealTimeClock(), | |
387 config, | 354 config, |
388 encoder_config.content_type), | 355 encoder_config.content_type), |
356 config_(std::move(config)) { | |
357 vie_encoder_.reset( | |
358 new ViEEncoder(num_cpu_cores, &stats_proxy_, config_.encoder_settings, | |
359 config_.pre_encode_callback, config_.overuse_callback, | |
360 config_.post_encode_callback)); | |
361 | |
362 worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>(new ConstructionTask( | |
363 &send_stream_, &thread_sync_event_, &stats_proxy_, vie_encoder_.get(), | |
364 module_process_thread, call_stats, congestion_controller, | |
365 bitrate_allocator, send_delay_stats, remb, event_log, &config_, | |
366 suspended_ssrcs))); | |
367 | |
368 // Wait for |construction_task| to complete so that |module_process_thread| | |
369 // can be registered. | |
370 thread_sync_event_.Wait(rtc::Event::kForever); | |
371 send_stream_->RegisterProcessThread(module_process_thread); | |
372 | |
373 vie_encoder_->RegisterProcessThread(module_process_thread); | |
374 | |
375 ReconfigureVideoEncoder(std::move(encoder_config)); | |
376 } | |
377 | |
378 VideoSendStream::~VideoSendStream() { | |
379 RTC_DCHECK_RUN_ON(&thread_checker_); | |
380 LOG(LS_INFO) << "~VideoSendStream: "; | |
sprang_webrtc
2016/07/06 15:02:51
Is this useful? I see there was a log like this be
perkj_webrtc
2016/07/07 08:37:12
removed
| |
381 RTC_DCHECK(!send_stream_); | |
382 } | |
383 | |
384 void VideoSendStream::Start() { | |
385 RTC_DCHECK_RUN_ON(&thread_checker_); | |
386 VideoSendStreamInternal* send_stream = send_stream_.get(); | |
387 worker_queue_->PostTask([this, send_stream] { | |
388 send_stream->Start(); | |
389 thread_sync_event_.Set(); | |
390 }); | |
391 | |
392 // This is needed for synchronizing with tests WebRtcVideoChannel2BaseTest. | |
393 // in webrtcvideosendstream... | |
394 // Ie, if VideoSendStreamInternal::Start has not completed before the first | |
395 // frame is forwarded to the encoder, the frame will be dropped. | |
396 thread_sync_event_.Wait(rtc::Event::kForever); | |
397 } | |
398 | |
399 void VideoSendStream::Stop() { | |
400 RTC_DCHECK_RUN_ON(&thread_checker_); | |
401 VideoSendStreamInternal* send_stream = send_stream_.get(); | |
402 worker_queue_->PostTask([send_stream] { send_stream->Stop(); }); | |
403 } | |
404 | |
405 VideoCaptureInput* VideoSendStream::Input() { | |
406 // Input() will be called on the thread that deliverers video frames from | |
407 // libjingle. | |
408 // TODO(perkj): Refactor ViEEncoder to register directly as a VideoSink to the | |
409 // VideoSource. | |
410 return vie_encoder_.get(); | |
411 } | |
412 | |
413 void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { | |
414 // ReconfigureVideoEncoder will be called on the thread that deliverers video | |
415 // frames. We must change the encoder settings immediately so that | |
416 // the codec settings matches the next frame. | |
417 // TODO(perkj): Move logic for reconfiguration the encoder due to frame size | |
418 // change from WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame to | |
419 // be internally handled by ViEEncoder. | |
420 vie_encoder_->ConfigureEncoder( | |
421 config, | |
422 config_.rtp.max_packet_size - 20); // - 20 for RTP header size. | |
423 | |
424 worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>( | |
425 new ReconfigureVideoEncoderTask(send_stream_.get(), std::move(config)))); | |
426 } | |
427 | |
428 VideoSendStream::Stats VideoSendStream::GetStats() { | |
429 // TODO(perkj, solenberg): Some test cases in EndToEndTest call GetStats from | |
430 // a network thread. See comment in Call::GetStats(). | |
431 // RTC_DCHECK_RUN_ON(&thread_checker_); | |
432 return stats_proxy_.GetStats(); | |
433 } | |
434 | |
435 void VideoSendStream::SignalNetworkState(NetworkState state) { | |
436 RTC_DCHECK_RUN_ON(&thread_checker_); | |
437 VideoSendStreamInternal* send_stream = send_stream_.get(); | |
438 worker_queue_->PostTask( | |
439 [send_stream, state] { send_stream->SignalNetworkState(state); }); | |
440 } | |
441 | |
442 VideoSendStream::RtpStateMap VideoSendStream::StopPermanentlyAndGetRtpStates() { | |
443 RTC_DCHECK_RUN_ON(&thread_checker_); | |
444 vie_encoder_->Stop(); | |
445 vie_encoder_->DeRegisterProcessThread(); | |
446 VideoSendStream::RtpStateMap state_map; | |
447 send_stream_->DeRegisterProcessThread(); | |
448 worker_queue_->PostTask( | |
449 std::unique_ptr<rtc::QueuedTask>(new DestructAndGetRTPStateTask( | |
450 &state_map, std::move(send_stream_), &thread_sync_event_))); | |
451 thread_sync_event_.Wait(rtc::Event::kForever); | |
452 return state_map; | |
453 } | |
454 | |
455 bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { | |
456 // Called on a network thread. | |
sprang_webrtc
2016/07/06 15:02:51
Do we wanna dcheck that?
perkj_webrtc
2016/07/07 08:37:12
It is not that easy. It seems like it the libjingl
sprang_webrtc
2016/07/07 09:25:10
Acknowledged.
| |
457 return send_stream_->DeliverRtcp(packet, length); | |
458 } | |
459 | |
460 VideoSendStreamInternal::VideoSendStreamInternal( | |
461 SendStatisticsProxy* stats_proxy, | |
462 rtc::TaskQueue* worker_queue, | |
463 CallStats* call_stats, | |
464 CongestionController* congestion_controller, | |
465 BitrateAllocator* bitrate_allocator, | |
466 SendDelayStats* send_delay_stats, | |
467 VieRemb* remb, | |
468 ViEEncoder* vie_encoder, | |
469 RtcEventLog* event_log, | |
470 const VideoSendStream::Config* config, | |
471 std::map<uint32_t, RtpState> suspended_ssrcs) | |
472 : stats_proxy_(stats_proxy), | |
389 config_(config), | 473 config_(config), |
390 suspended_ssrcs_(suspended_ssrcs), | 474 suspended_ssrcs_(std::move(suspended_ssrcs)), |
391 module_process_thread_(module_process_thread), | 475 module_process_thread_(nullptr), |
476 worker_queue_(worker_queue), | |
477 check_encoder_activity_task_(nullptr), | |
392 call_stats_(call_stats), | 478 call_stats_(call_stats), |
393 congestion_controller_(congestion_controller), | 479 congestion_controller_(congestion_controller), |
394 bitrate_allocator_(bitrate_allocator), | 480 bitrate_allocator_(bitrate_allocator), |
395 remb_(remb), | 481 remb_(remb), |
396 encoder_thread_(EncoderThreadFunction, this, "EncoderThread"), | 482 max_padding_bitrate_(0), |
397 encoder_wakeup_event_(false, false), | 483 encoder_min_bitrate_bps_(0), |
398 stop_encoder_thread_(0), | 484 encoder_max_bitrate_bps_(0), |
399 state_(State::kStopped), | 485 encoder_target_rate_(0), |
400 overuse_detector_( | 486 vie_encoder_(vie_encoder), |
401 Clock::GetRealTimeClock(), | |
402 GetCpuOveruseOptions(config.encoder_settings.full_overuse_time), | |
403 this, | |
404 config.post_encode_callback, | |
405 &stats_proxy_), | |
406 vie_encoder_(num_cpu_cores, | |
407 module_process_thread_, | |
408 &stats_proxy_, | |
409 &overuse_detector_, | |
410 this), | |
411 encoder_feedback_(Clock::GetRealTimeClock(), | 487 encoder_feedback_(Clock::GetRealTimeClock(), |
412 config.rtp.ssrcs, | 488 config_->rtp.ssrcs, |
413 &vie_encoder_), | 489 vie_encoder), |
414 protection_bitrate_calculator_(Clock::GetRealTimeClock(), this), | 490 protection_bitrate_calculator_(Clock::GetRealTimeClock(), this), |
415 video_sender_(vie_encoder_.video_sender()), | |
416 bandwidth_observer_(congestion_controller_->GetBitrateController() | 491 bandwidth_observer_(congestion_controller_->GetBitrateController() |
417 ->CreateRtcpBandwidthObserver()), | 492 ->CreateRtcpBandwidthObserver()), |
418 rtp_rtcp_modules_(CreateRtpRtcpModules( | 493 rtp_rtcp_modules_(CreateRtpRtcpModules( |
419 config.send_transport, | 494 config_->send_transport, |
420 &encoder_feedback_, | 495 &encoder_feedback_, |
421 bandwidth_observer_.get(), | 496 bandwidth_observer_.get(), |
422 congestion_controller_->GetTransportFeedbackObserver(), | 497 congestion_controller_->GetTransportFeedbackObserver(), |
423 call_stats_->rtcp_rtt_stats(), | 498 call_stats_->rtcp_rtt_stats(), |
424 congestion_controller_->pacer(), | 499 congestion_controller_->pacer(), |
425 congestion_controller_->packet_router(), | 500 congestion_controller_->packet_router(), |
426 &stats_proxy_, | 501 stats_proxy_, |
427 send_delay_stats, | 502 send_delay_stats, |
428 event_log, | 503 event_log, |
429 config_.rtp.ssrcs.size())), | 504 config_->rtp.ssrcs.size())), |
430 payload_router_(rtp_rtcp_modules_, config.encoder_settings.payload_type), | 505 payload_router_(rtp_rtcp_modules_, |
431 input_(&encoder_wakeup_event_, | 506 config_->encoder_settings.payload_type) { |
432 config_.local_renderer, | 507 RTC_DCHECK_RUN_ON(worker_queue_); |
433 &stats_proxy_, | 508 LOG(LS_INFO) << "VideoSendStreamInternal: " << config_->ToString(); |
434 &overuse_detector_) { | 509 module_process_thread_checker_.DetachFromThread(); |
435 LOG(LS_INFO) << "VideoSendStream: " << config_.ToString(); | 510 |
436 | 511 RTC_DCHECK(!config_->rtp.ssrcs.empty()); |
437 RTC_DCHECK(!config_.rtp.ssrcs.empty()); | |
438 RTC_DCHECK(module_process_thread_); | |
439 RTC_DCHECK(call_stats_); | 512 RTC_DCHECK(call_stats_); |
440 RTC_DCHECK(congestion_controller_); | 513 RTC_DCHECK(congestion_controller_); |
441 RTC_DCHECK(remb_); | 514 RTC_DCHECK(remb_); |
442 | 515 |
443 // RTP/RTCP initialization. | 516 // RTP/RTCP initialization. |
444 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 517 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
445 module_process_thread_->RegisterModule(rtp_rtcp); | |
446 congestion_controller_->packet_router()->AddRtpModule(rtp_rtcp); | 518 congestion_controller_->packet_router()->AddRtpModule(rtp_rtcp); |
447 } | 519 } |
448 | 520 |
449 for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) { | 521 for (size_t i = 0; i < config_->rtp.extensions.size(); ++i) { |
450 const std::string& extension = config_.rtp.extensions[i].uri; | 522 const std::string& extension = config_->rtp.extensions[i].uri; |
451 int id = config_.rtp.extensions[i].id; | 523 int id = config_->rtp.extensions[i].id; |
452 // One-byte-extension local identifiers are in the range 1-14 inclusive. | 524 // One-byte-extension local identifiers are in the range 1-14 inclusive. |
453 RTC_DCHECK_GE(id, 1); | 525 RTC_DCHECK_GE(id, 1); |
454 RTC_DCHECK_LE(id, 14); | 526 RTC_DCHECK_LE(id, 14); |
455 RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension)); | 527 RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension)); |
456 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 528 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
457 RTC_CHECK_EQ(0, rtp_rtcp->RegisterSendRtpHeaderExtension( | 529 RTC_CHECK_EQ(0, rtp_rtcp->RegisterSendRtpHeaderExtension( |
458 StringToRtpExtensionType(extension), id)); | 530 StringToRtpExtensionType(extension), id)); |
459 } | 531 } |
460 } | 532 } |
461 | 533 |
462 remb_->AddRembSender(rtp_rtcp_modules_[0]); | 534 remb_->AddRembSender(rtp_rtcp_modules_[0]); |
463 rtp_rtcp_modules_[0]->SetREMBStatus(true); | 535 rtp_rtcp_modules_[0]->SetREMBStatus(true); |
464 | 536 |
465 ConfigureProtection(); | 537 ConfigureProtection(); |
466 ConfigureSsrcs(); | 538 ConfigureSsrcs(); |
467 | 539 |
468 // TODO(pbos): Should we set CNAME on all RTP modules? | 540 // TODO(pbos): Should we set CNAME on all RTP modules? |
469 rtp_rtcp_modules_.front()->SetCNAME(config_.rtp.c_name.c_str()); | 541 rtp_rtcp_modules_.front()->SetCNAME(config_->rtp.c_name.c_str()); |
470 // 28 to match packet overhead in ModuleRtpRtcpImpl. | 542 // 28 to match packet overhead in ModuleRtpRtcpImpl. |
471 static const size_t kRtpPacketSizeOverhead = 28; | 543 static const size_t kRtpPacketSizeOverhead = 28; |
472 RTC_DCHECK_LE(config_.rtp.max_packet_size, 0xFFFFu + kRtpPacketSizeOverhead); | 544 RTC_DCHECK_LE(config_->rtp.max_packet_size, 0xFFFFu + kRtpPacketSizeOverhead); |
473 const uint16_t mtu = static_cast<uint16_t>(config_.rtp.max_packet_size + | 545 const uint16_t mtu = static_cast<uint16_t>(config_->rtp.max_packet_size + |
474 kRtpPacketSizeOverhead); | 546 kRtpPacketSizeOverhead); |
475 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 547 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
476 rtp_rtcp->RegisterRtcpStatisticsCallback(&stats_proxy_); | 548 rtp_rtcp->RegisterRtcpStatisticsCallback(stats_proxy_); |
477 rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(&stats_proxy_); | 549 rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(stats_proxy_); |
478 rtp_rtcp->SetMaxTransferUnit(mtu); | 550 rtp_rtcp->SetMaxTransferUnit(mtu); |
479 rtp_rtcp->RegisterVideoSendPayload( | 551 rtp_rtcp->RegisterVideoSendPayload( |
480 config_.encoder_settings.payload_type, | 552 config_->encoder_settings.payload_type, |
481 config_.encoder_settings.payload_name.c_str()); | 553 config_->encoder_settings.payload_name.c_str()); |
482 } | 554 } |
483 | 555 |
484 RTC_DCHECK(config.encoder_settings.encoder); | 556 RTC_DCHECK(config_->encoder_settings.encoder); |
485 RTC_DCHECK_GE(config.encoder_settings.payload_type, 0); | 557 RTC_DCHECK_GE(config_->encoder_settings.payload_type, 0); |
486 RTC_DCHECK_LE(config.encoder_settings.payload_type, 127); | 558 RTC_DCHECK_LE(config_->encoder_settings.payload_type, 127); |
487 ReconfigureVideoEncoder(encoder_config); | |
488 | 559 |
489 module_process_thread_->RegisterModule(&overuse_detector_); | 560 vie_encoder_->SetStartBitrate(bitrate_allocator_->GetStartBitrate(this)); |
490 | 561 vie_encoder_->SetSink(this); |
491 encoder_thread_checker_.DetachFromThread(); | |
492 encoder_thread_.Start(); | |
493 encoder_thread_.SetPriority(rtc::kHighPriority); | |
494 } | 562 } |
495 | 563 |
496 VideoSendStream::~VideoSendStream() { | 564 void VideoSendStreamInternal::RegisterProcessThread( |
497 LOG(LS_INFO) << "~VideoSendStream: " << config_.ToString(); | 565 ProcessThread* module_process_thread) { |
566 RTC_DCHECK_RUN_ON(&module_process_thread_checker_); | |
567 RTC_DCHECK(!module_process_thread_); | |
568 module_process_thread_ = module_process_thread; | |
498 | 569 |
499 Stop(); | 570 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
571 module_process_thread_->RegisterModule(rtp_rtcp); | |
572 } | |
573 } | |
500 | 574 |
501 // Stop the encoder thread permanently. | 575 void VideoSendStreamInternal::DeRegisterProcessThread() { |
502 rtc::AtomicOps::ReleaseStore(&stop_encoder_thread_, 1); | 576 RTC_DCHECK_RUN_ON(&module_process_thread_checker_); |
503 encoder_wakeup_event_.Set(); | 577 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
504 encoder_thread_.Stop(); | 578 module_process_thread_->DeRegisterModule(rtp_rtcp); |
579 } | |
580 } | |
505 | 581 |
506 // This needs to happen after stopping the encoder thread, | 582 VideoSendStreamInternal::~VideoSendStreamInternal() { |
507 // since the encoder thread calls AddObserver. | 583 RTC_DCHECK_RUN_ON(worker_queue_); |
584 LOG(LS_INFO) << "~VideoSendStreamInternal: " << config_->ToString(); | |
585 | |
508 bitrate_allocator_->RemoveObserver(this); | 586 bitrate_allocator_->RemoveObserver(this); |
509 | |
510 module_process_thread_->DeRegisterModule(&overuse_detector_); | |
511 | |
512 rtp_rtcp_modules_[0]->SetREMBStatus(false); | 587 rtp_rtcp_modules_[0]->SetREMBStatus(false); |
513 remb_->RemoveRembSender(rtp_rtcp_modules_[0]); | 588 remb_->RemoveRembSender(rtp_rtcp_modules_[0]); |
514 | 589 |
515 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 590 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
516 congestion_controller_->packet_router()->RemoveRtpModule(rtp_rtcp); | 591 congestion_controller_->packet_router()->RemoveRtpModule(rtp_rtcp); |
517 module_process_thread_->DeRegisterModule(rtp_rtcp); | |
518 delete rtp_rtcp; | 592 delete rtp_rtcp; |
519 } | 593 } |
520 } | 594 } |
521 | 595 |
522 bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { | 596 bool VideoSendStreamInternal::DeliverRtcp(const uint8_t* packet, |
597 size_t length) { | |
523 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) | 598 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) |
524 rtp_rtcp->IncomingRtcpPacket(packet, length); | 599 rtp_rtcp->IncomingRtcpPacket(packet, length); |
525 return true; | 600 return true; |
526 } | 601 } |
527 | 602 |
528 void VideoSendStream::Start() { | 603 void VideoSendStreamInternal::Start() { |
604 RTC_DCHECK_RUN_ON(worker_queue_); | |
605 LOG_F(LS_INFO) << "Start"; | |
sprang_webrtc
2016/07/06 15:02:51
Was this left intentionally?
perkj_webrtc
2016/07/07 08:37:12
yes, but it should be LOG...
| |
529 if (payload_router_.active()) | 606 if (payload_router_.active()) |
530 return; | 607 return; |
531 TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start"); | 608 TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start"); |
532 payload_router_.set_active(true); | 609 payload_router_.set_active(true); |
610 | |
611 // Add our self as bitrate observer. | |
612 bitrate_allocator_->AddObserver( | |
613 this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, | |
614 max_padding_bitrate_, !config_->suspend_below_min_bitrate); | |
615 | |
616 // Start monitoring encoder activity. | |
533 { | 617 { |
534 rtc::CritScope lock(&encoder_settings_crit_); | 618 rtc::CritScope lock(&encoder_activity_crit_sect_); |
535 pending_state_change_ = rtc::Optional<State>(State::kStarted); | 619 RTC_DCHECK(!check_encoder_activity_task_); |
620 check_encoder_activity_task_ = new CheckEncoderActivityTask(this); | |
621 worker_queue_->PostDelayedTask( | |
622 std::unique_ptr<rtc::QueuedTask>(check_encoder_activity_task_), | |
623 CheckEncoderActivityTask::kEncoderTimeOutMs); | |
536 } | 624 } |
537 encoder_wakeup_event_.Set(); | 625 |
626 vie_encoder_->SendKeyFrame(); | |
538 } | 627 } |
539 | 628 |
540 void VideoSendStream::Stop() { | 629 void VideoSendStreamInternal::Stop() { |
630 RTC_DCHECK_RUN_ON(worker_queue_); | |
541 if (!payload_router_.active()) | 631 if (!payload_router_.active()) |
542 return; | 632 return; |
543 TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); | 633 TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); |
544 payload_router_.set_active(false); | 634 payload_router_.set_active(false); |
635 bitrate_allocator_->RemoveObserver(this); | |
545 { | 636 { |
546 rtc::CritScope lock(&encoder_settings_crit_); | 637 rtc::CritScope lock(&encoder_activity_crit_sect_); |
547 pending_state_change_ = rtc::Optional<State>(State::kStopped); | 638 check_encoder_activity_task_->Stop(); |
639 check_encoder_activity_task_ = nullptr; | |
548 } | 640 } |
549 encoder_wakeup_event_.Set(); | 641 vie_encoder_->OnBitrateUpdated(0, 0, 0); |
550 } | 642 } |
551 | 643 |
552 VideoCaptureInput* VideoSendStream::Input() { | 644 void VideoSendStreamInternal::EncoderTimedOut() { |
553 return &input_; | 645 RTC_DCHECK_RUN_ON(worker_queue_); |
646 // If the encoder has not produced anything the last kEncoderTimeOutMs and it | |
647 // is supposed to, deregister as BitrateAllocatorObserver. This can happen | |
648 // if a camera stop producing frames, temporary or permanently during a call. | |
649 if (encoder_target_rate_ > 0) { | |
650 LOG_F(LS_INFO) << "Encoder timed out."; | |
651 bitrate_allocator_->RemoveObserver(this); | |
652 } | |
554 } | 653 } |
555 | 654 |
556 bool VideoSendStream::EncoderThreadFunction(void* obj) { | 655 void VideoSendStreamInternal::EncoderIsActive() { |
sprang_webrtc
2016/07/06 15:02:51
Can we rename this SignalEncoderActive() or someth
perkj_webrtc
2016/07/07 08:37:12
Done.
| |
557 static_cast<VideoSendStream*>(obj)->EncoderProcess(); | 656 RTC_DCHECK_RUN_ON(worker_queue_); |
558 // We're done, return false to abort. | 657 LOG_F(LS_INFO) << "Encoder is active."; |
559 return false; | 658 bitrate_allocator_->AddObserver( |
659 this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, | |
660 max_padding_bitrate_, !config_->suspend_below_min_bitrate); | |
560 } | 661 } |
561 | 662 |
562 void VideoSendStream::EncoderProcess() { | 663 void VideoSendStreamInternal::ReconfigureVideoEncoder( |
563 RTC_CHECK_EQ(0, vie_encoder_.RegisterExternalEncoder( | 664 const VideoEncoderConfig& config) { |
564 config_.encoder_settings.encoder, | 665 RTC_DCHECK_GE(config_->rtp.ssrcs.size(), config.streams.size()); |
565 config_.encoder_settings.payload_type, | 666 TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder"); |
566 config_.encoder_settings.internal_source)); | 667 LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString(); |
567 RTC_DCHECK_RUN_ON(&encoder_thread_checker_); | 668 RTC_DCHECK_GE(config_->rtp.ssrcs.size(), config.streams.size()); |
568 while (true) { | 669 RTC_DCHECK_RUN_ON(worker_queue_); |
569 // Wake up every kEncodeCheckForActivityPeriodMs to check if the encoder is | |
570 // active. If not, deregister as BitrateAllocatorObserver. | |
571 const int kEncodeCheckForActivityPeriodMs = 1000; | |
572 encoder_wakeup_event_.Wait(kEncodeCheckForActivityPeriodMs); | |
573 if (rtc::AtomicOps::AcquireLoad(&stop_encoder_thread_)) | |
574 break; | |
575 bool change_settings = false; | |
576 rtc::Optional<State> pending_state_change; | |
577 { | |
578 rtc::CritScope lock(&encoder_settings_crit_); | |
579 if (pending_encoder_settings_) { | |
580 std::swap(current_encoder_settings_, pending_encoder_settings_); | |
581 pending_encoder_settings_.reset(); | |
582 change_settings = true; | |
583 } else if (pending_state_change_) { | |
584 swap(pending_state_change, pending_state_change_); | |
585 } | |
586 } | |
587 if (change_settings) { | |
588 current_encoder_settings_->video_codec.startBitrate = std::max( | |
589 bitrate_allocator_->GetStartBitrate(this) / 1000, | |
590 static_cast<int>(current_encoder_settings_->video_codec.minBitrate)); | |
591 payload_router_.SetSendStreams(current_encoder_settings_->config.streams); | |
592 vie_encoder_.SetEncoder(current_encoder_settings_->video_codec, | |
593 payload_router_.MaxPayloadLength()); | |
594 | 670 |
595 // Clear stats for disabled layers. | 671 const int kEncoderMinBitrateBps = 30000; |
596 for (size_t i = current_encoder_settings_->config.streams.size(); | 672 encoder_min_bitrate_bps_ = |
597 i < config_.rtp.ssrcs.size(); ++i) { | 673 std::max(config.streams[0].min_bitrate_bps, kEncoderMinBitrateBps); |
598 stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]); | 674 encoder_max_bitrate_bps_ = 0; |
599 } | 675 for (const auto& stream : config.streams) |
676 encoder_max_bitrate_bps_ += stream.max_bitrate_bps; | |
677 max_padding_bitrate_ = | |
678 CalculateMaxPadBitrateBps(config, config_->suspend_below_min_bitrate); | |
600 | 679 |
601 size_t number_of_temporal_layers = | 680 payload_router_.SetSendStreams(config.streams); |
602 current_encoder_settings_->config.streams.back() | |
603 .temporal_layer_thresholds_bps.size() + | |
604 1; | |
605 protection_bitrate_calculator_.SetEncodingData( | |
606 current_encoder_settings_->video_codec.width, | |
607 current_encoder_settings_->video_codec.height, | |
608 number_of_temporal_layers, payload_router_.MaxPayloadLength()); | |
609 | 681 |
610 // We might've gotten new settings while configuring the encoder settings, | 682 // Clear stats for disabled layers. |
611 // restart from the top to see if that's the case before trying to encode | 683 for (size_t i = config.streams.size(); i < config_->rtp.ssrcs.size(); ++i) { |
612 // a frame (which might correspond to the last frame size). | 684 stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]); |
613 encoder_wakeup_event_.Set(); | 685 } |
614 continue; | |
615 } | |
616 | 686 |
617 if (pending_state_change) { | 687 size_t number_of_temporal_layers = |
618 if (*pending_state_change == State::kStarted && | 688 config.streams.back().temporal_layer_thresholds_bps.size() + 1; |
619 state_ == State::kStopped) { | 689 protection_bitrate_calculator_.SetEncodingData( |
620 bitrate_allocator_->AddObserver( | 690 config.streams[0].width, config.streams[0].height, |
621 this, current_encoder_settings_->video_codec.minBitrate * 1000, | 691 number_of_temporal_layers, config_->rtp.max_packet_size); |
622 current_encoder_settings_->video_codec.maxBitrate * 1000, | |
623 CalulcateMaxPadBitrateBps(current_encoder_settings_->config, | |
624 config_.suspend_below_min_bitrate), | |
625 !config_.suspend_below_min_bitrate); | |
626 vie_encoder_.SendKeyFrame(); | |
627 state_ = State::kStarted; | |
628 LOG_F(LS_INFO) << "Encoder started."; | |
629 } else if (*pending_state_change == State::kStopped) { | |
630 bitrate_allocator_->RemoveObserver(this); | |
631 vie_encoder_.OnBitrateUpdated(0, 0, 0); | |
632 state_ = State::kStopped; | |
633 LOG_F(LS_INFO) << "Encoder stopped."; | |
634 } | |
635 encoder_wakeup_event_.Set(); | |
636 continue; | |
637 } | |
638 | 692 |
639 // Check if the encoder has produced anything the last kEncoderTimeOutMs. | 693 if (payload_router_.active()) { |
640 // If not, deregister as BitrateAllocatorObserver. | 694 // The send stream is started already. Update the allocator with new bitrate |
641 if (state_ == State::kStarted && | 695 // limits. |
642 vie_encoder_.time_of_last_frame_activity_ms() < | 696 bitrate_allocator_->AddObserver( |
643 rtc::TimeMillis() - kEncoderTimeOutMs) { | 697 this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_, |
644 // The encoder has timed out. | 698 max_padding_bitrate_, !config_->suspend_below_min_bitrate); |
645 LOG_F(LS_INFO) << "Encoder timed out."; | |
646 bitrate_allocator_->RemoveObserver(this); | |
647 state_ = State::kEncoderTimedOut; | |
648 } | |
649 if (state_ == State::kEncoderTimedOut && | |
650 vie_encoder_.time_of_last_frame_activity_ms() > | |
651 rtc::TimeMillis() - kEncoderTimeOutMs) { | |
652 LOG_F(LS_INFO) << "Encoder is active."; | |
653 bitrate_allocator_->AddObserver( | |
654 this, current_encoder_settings_->video_codec.minBitrate * 1000, | |
655 current_encoder_settings_->video_codec.maxBitrate * 1000, | |
656 CalulcateMaxPadBitrateBps(current_encoder_settings_->config, | |
657 config_.suspend_below_min_bitrate), | |
658 !config_.suspend_below_min_bitrate); | |
659 state_ = State::kStarted; | |
660 } | |
661 | |
662 VideoFrame frame; | |
663 if (input_.GetVideoFrame(&frame)) { | |
664 // TODO(perkj): |pre_encode_callback| is only used by tests. Tests should | |
665 // register as a sink to the VideoSource instead. | |
666 if (config_.pre_encode_callback) { | |
667 config_.pre_encode_callback->OnFrame(frame); | |
668 } | |
669 vie_encoder_.EncodeVideoFrame(frame); | |
670 } | |
671 } | 699 } |
672 vie_encoder_.DeRegisterExternalEncoder(config_.encoder_settings.payload_type); | |
673 } | 700 } |
674 | 701 |
675 void VideoSendStream::ReconfigureVideoEncoder( | 702 int32_t VideoSendStreamInternal::Encoded( |
676 const VideoEncoderConfig& config) { | 703 const EncodedImage& encoded_image, |
677 TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder"); | 704 const CodecSpecificInfo* codec_specific_info, |
678 LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString(); | 705 const RTPFragmentationHeader* fragmentation) { |
679 RTC_DCHECK_GE(config_.rtp.ssrcs.size(), config.streams.size()); | 706 if (config_->post_encode_callback) { |
680 VideoCodec video_codec = VideoEncoderConfigToVideoCodec( | 707 config_->post_encode_callback->EncodedFrameCallback( |
681 config, config_.encoder_settings.payload_name, | |
682 config_.encoder_settings.payload_type); | |
683 { | |
684 rtc::CritScope lock(&encoder_settings_crit_); | |
685 pending_encoder_settings_.reset(new EncoderSettings({video_codec, config})); | |
686 } | |
687 encoder_wakeup_event_.Set(); | |
688 } | |
689 | |
690 VideoSendStream::Stats VideoSendStream::GetStats() { | |
691 return stats_proxy_.GetStats(); | |
692 } | |
693 | |
694 void VideoSendStream::OveruseDetected() { | |
695 if (config_.overuse_callback) | |
696 config_.overuse_callback->OnLoadUpdate(LoadObserver::kOveruse); | |
697 } | |
698 | |
699 void VideoSendStream::NormalUsage() { | |
700 if (config_.overuse_callback) | |
701 config_.overuse_callback->OnLoadUpdate(LoadObserver::kUnderuse); | |
702 } | |
703 | |
704 int32_t VideoSendStream::Encoded(const EncodedImage& encoded_image, | |
705 const CodecSpecificInfo* codec_specific_info, | |
706 const RTPFragmentationHeader* fragmentation) { | |
707 if (config_.post_encode_callback) { | |
708 config_.post_encode_callback->EncodedFrameCallback( | |
709 EncodedFrame(encoded_image._buffer, encoded_image._length, | 708 EncodedFrame(encoded_image._buffer, encoded_image._length, |
710 encoded_image._frameType)); | 709 encoded_image._frameType)); |
711 } | 710 } |
711 { | |
712 rtc::CritScope lock(&encoder_activity_crit_sect_); | |
713 if (check_encoder_activity_task_) | |
714 check_encoder_activity_task_->UpdateEncoderActivity(); | |
715 } | |
712 | 716 |
713 protection_bitrate_calculator_.UpdateWithEncodedData(encoded_image); | 717 protection_bitrate_calculator_.UpdateWithEncodedData(encoded_image); |
714 int32_t return_value = payload_router_.Encoded( | 718 int32_t return_value = payload_router_.Encoded( |
715 encoded_image, codec_specific_info, fragmentation); | 719 encoded_image, codec_specific_info, fragmentation); |
716 | 720 |
717 if (kEnableFrameRecording) { | 721 if (kEnableFrameRecording) { |
718 int layer = codec_specific_info->codecType == kVideoCodecVP8 | 722 int layer = codec_specific_info->codecType == kVideoCodecVP8 |
719 ? codec_specific_info->codecSpecific.VP8.simulcastIdx | 723 ? codec_specific_info->codecSpecific.VP8.simulcastIdx |
720 : 0; | 724 : 0; |
721 IvfFileWriter* file_writer; | 725 IvfFileWriter* file_writer; |
722 { | 726 { |
723 if (file_writers_[layer] == nullptr) { | 727 if (file_writers_[layer] == nullptr) { |
724 std::ostringstream oss; | 728 std::ostringstream oss; |
725 oss << "send_bitstream_ssrc"; | 729 oss << "send_bitstream_ssrc"; |
726 for (uint32_t ssrc : config_.rtp.ssrcs) | 730 for (uint32_t ssrc : config_->rtp.ssrcs) |
727 oss << "_" << ssrc; | 731 oss << "_" << ssrc; |
728 oss << "_layer" << layer << ".ivf"; | 732 oss << "_layer" << layer << ".ivf"; |
729 file_writers_[layer] = | 733 file_writers_[layer] = |
730 IvfFileWriter::Open(oss.str(), codec_specific_info->codecType); | 734 IvfFileWriter::Open(oss.str(), codec_specific_info->codecType); |
731 } | 735 } |
732 file_writer = file_writers_[layer].get(); | 736 file_writer = file_writers_[layer].get(); |
733 } | 737 } |
734 if (file_writer) { | 738 if (file_writer) { |
735 bool ok = file_writer->WriteFrame(encoded_image); | 739 bool ok = file_writer->WriteFrame(encoded_image); |
736 RTC_DCHECK(ok); | 740 RTC_DCHECK(ok); |
737 } | 741 } |
738 } | 742 } |
739 | 743 |
740 return return_value; | 744 return return_value; |
741 } | 745 } |
742 | 746 |
743 void VideoSendStream::ConfigureProtection() { | 747 void VideoSendStreamInternal::ConfigureProtection() { |
748 RTC_DCHECK_RUN_ON(worker_queue_); | |
744 // Enable NACK, FEC or both. | 749 // Enable NACK, FEC or both. |
745 const bool enable_protection_nack = config_.rtp.nack.rtp_history_ms > 0; | 750 const bool enable_protection_nack = config_->rtp.nack.rtp_history_ms > 0; |
746 bool enable_protection_fec = config_.rtp.fec.ulpfec_payload_type != -1; | 751 bool enable_protection_fec = config_->rtp.fec.ulpfec_payload_type != -1; |
747 // Payload types without picture ID cannot determine that a stream is complete | 752 // Payload types without picture ID cannot determine that a stream is complete |
748 // without retransmitting FEC, so using FEC + NACK for H.264 (for instance) is | 753 // without retransmitting FEC, so using FEC + NACK for H.264 (for instance) is |
749 // a waste of bandwidth since FEC packets still have to be transmitted. Note | 754 // a waste of bandwidth since FEC packets still have to be transmitted. Note |
750 // that this is not the case with FLEXFEC. | 755 // that this is not the case with FLEXFEC. |
751 if (enable_protection_nack && | 756 if (enable_protection_nack && |
752 !PayloadTypeSupportsSkippingFecPackets( | 757 !PayloadTypeSupportsSkippingFecPackets( |
753 config_.encoder_settings.payload_name)) { | 758 config_->encoder_settings.payload_name)) { |
754 LOG(LS_WARNING) << "Transmitting payload type without picture ID using" | 759 LOG(LS_WARNING) << "Transmitting payload type without picture ID using" |
755 "NACK+FEC is a waste of bandwidth since FEC packets " | 760 "NACK+FEC is a waste of bandwidth since FEC packets " |
756 "also have to be retransmitted. Disabling FEC."; | 761 "also have to be retransmitted. Disabling FEC."; |
757 enable_protection_fec = false; | 762 enable_protection_fec = false; |
758 } | 763 } |
759 | 764 |
760 // Set to valid uint8_ts to be castable later without signed overflows. | 765 // Set to valid uint8_ts to be castable later without signed overflows. |
761 uint8_t payload_type_red = 0; | 766 uint8_t payload_type_red = 0; |
762 uint8_t payload_type_fec = 0; | 767 uint8_t payload_type_fec = 0; |
763 | 768 |
764 // TODO(changbin): Should set RTX for RED mapping in RTP sender in future. | 769 // TODO(changbin): Should set RTX for RED mapping in RTP sender in future. |
765 // Validate payload types. If either RED or FEC payload types are set then | 770 // Validate payload types. If either RED or FEC payload types are set then |
766 // both should be. If FEC is enabled then they both have to be set. | 771 // both should be. If FEC is enabled then they both have to be set. |
767 if (config_.rtp.fec.red_payload_type != -1) { | 772 if (config_->rtp.fec.red_payload_type != -1) { |
768 RTC_DCHECK_GE(config_.rtp.fec.red_payload_type, 0); | 773 RTC_DCHECK_GE(config_->rtp.fec.red_payload_type, 0); |
769 RTC_DCHECK_LE(config_.rtp.fec.red_payload_type, 127); | 774 RTC_DCHECK_LE(config_->rtp.fec.red_payload_type, 127); |
770 // TODO(holmer): We should only enable red if ulpfec is also enabled, but | 775 // TODO(holmer): We should only enable red if ulpfec is also enabled, but |
771 // but due to an incompatibility issue with previous versions the receiver | 776 // but due to an incompatibility issue with previous versions the receiver |
772 // assumes rtx packets are containing red if it has been configured to | 777 // assumes rtx packets are containing red if it has been configured to |
773 // receive red. Remove this in a few versions once the incompatibility | 778 // receive red. Remove this in a few versions once the incompatibility |
774 // issue is resolved (M53 timeframe). | 779 // issue is resolved (M53 timeframe). |
775 payload_type_red = static_cast<uint8_t>(config_.rtp.fec.red_payload_type); | 780 payload_type_red = static_cast<uint8_t>(config_->rtp.fec.red_payload_type); |
776 } | 781 } |
777 if (config_.rtp.fec.ulpfec_payload_type != -1) { | 782 if (config_->rtp.fec.ulpfec_payload_type != -1) { |
778 RTC_DCHECK_GE(config_.rtp.fec.ulpfec_payload_type, 0); | 783 RTC_DCHECK_GE(config_->rtp.fec.ulpfec_payload_type, 0); |
779 RTC_DCHECK_LE(config_.rtp.fec.ulpfec_payload_type, 127); | 784 RTC_DCHECK_LE(config_->rtp.fec.ulpfec_payload_type, 127); |
780 payload_type_fec = | 785 payload_type_fec = |
781 static_cast<uint8_t>(config_.rtp.fec.ulpfec_payload_type); | 786 static_cast<uint8_t>(config_->rtp.fec.ulpfec_payload_type); |
782 } | 787 } |
783 | 788 |
784 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 789 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
785 // Set NACK. | 790 // Set NACK. |
786 rtp_rtcp->SetStorePacketsStatus( | 791 rtp_rtcp->SetStorePacketsStatus( |
787 enable_protection_nack || congestion_controller_->pacer(), | 792 enable_protection_nack || congestion_controller_->pacer(), |
788 kMinSendSidePacketHistorySize); | 793 kMinSendSidePacketHistorySize); |
789 // Set FEC. | 794 // Set FEC. |
790 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 795 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
791 rtp_rtcp->SetGenericFECStatus(enable_protection_fec, payload_type_red, | 796 rtp_rtcp->SetGenericFECStatus(enable_protection_fec, payload_type_red, |
792 payload_type_fec); | 797 payload_type_fec); |
793 } | 798 } |
794 } | 799 } |
795 | 800 |
796 protection_bitrate_calculator_.SetProtectionMethod(enable_protection_fec, | 801 protection_bitrate_calculator_.SetProtectionMethod(enable_protection_fec, |
797 enable_protection_nack); | 802 enable_protection_nack); |
798 } | 803 } |
799 | 804 |
800 void VideoSendStream::ConfigureSsrcs() { | 805 void VideoSendStreamInternal::ConfigureSsrcs() { |
806 RTC_DCHECK_RUN_ON(worker_queue_); | |
801 // Configure regular SSRCs. | 807 // Configure regular SSRCs. |
802 for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { | 808 for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) { |
803 uint32_t ssrc = config_.rtp.ssrcs[i]; | 809 uint32_t ssrc = config_->rtp.ssrcs[i]; |
804 RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; | 810 RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; |
805 rtp_rtcp->SetSSRC(ssrc); | 811 rtp_rtcp->SetSSRC(ssrc); |
806 | 812 |
807 // Restore RTP state if previous existed. | 813 // Restore RTP state if previous existed. |
808 RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); | 814 VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); |
809 if (it != suspended_ssrcs_.end()) | 815 if (it != suspended_ssrcs_.end()) |
810 rtp_rtcp->SetRtpState(it->second); | 816 rtp_rtcp->SetRtpState(it->second); |
811 } | 817 } |
812 | 818 |
813 // Set up RTX if available. | 819 // Set up RTX if available. |
814 if (config_.rtp.rtx.ssrcs.empty()) | 820 if (config_->rtp.rtx.ssrcs.empty()) |
815 return; | 821 return; |
816 | 822 |
817 // Configure RTX SSRCs. | 823 // Configure RTX SSRCs. |
818 RTC_DCHECK_EQ(config_.rtp.rtx.ssrcs.size(), config_.rtp.ssrcs.size()); | 824 RTC_DCHECK_EQ(config_->rtp.rtx.ssrcs.size(), config_->rtp.ssrcs.size()); |
819 for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) { | 825 for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) { |
820 uint32_t ssrc = config_.rtp.rtx.ssrcs[i]; | 826 uint32_t ssrc = config_->rtp.rtx.ssrcs[i]; |
821 RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; | 827 RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; |
822 rtp_rtcp->SetRtxSsrc(ssrc); | 828 rtp_rtcp->SetRtxSsrc(ssrc); |
823 RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); | 829 VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); |
824 if (it != suspended_ssrcs_.end()) | 830 if (it != suspended_ssrcs_.end()) |
825 rtp_rtcp->SetRtxState(it->second); | 831 rtp_rtcp->SetRtxState(it->second); |
826 } | 832 } |
827 | 833 |
828 // Configure RTX payload types. | 834 // Configure RTX payload types. |
829 RTC_DCHECK_GE(config_.rtp.rtx.payload_type, 0); | 835 RTC_DCHECK_GE(config_->rtp.rtx.payload_type, 0); |
830 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 836 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
831 rtp_rtcp->SetRtxSendPayloadType(config_.rtp.rtx.payload_type, | 837 rtp_rtcp->SetRtxSendPayloadType(config_->rtp.rtx.payload_type, |
832 config_.encoder_settings.payload_type); | 838 config_->encoder_settings.payload_type); |
833 rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | kRtxRedundantPayloads); | 839 rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | kRtxRedundantPayloads); |
834 } | 840 } |
835 if (config_.rtp.fec.red_payload_type != -1 && | 841 if (config_->rtp.fec.red_payload_type != -1 && |
836 config_.rtp.fec.red_rtx_payload_type != -1) { | 842 config_->rtp.fec.red_rtx_payload_type != -1) { |
837 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 843 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
838 rtp_rtcp->SetRtxSendPayloadType(config_.rtp.fec.red_rtx_payload_type, | 844 rtp_rtcp->SetRtxSendPayloadType(config_->rtp.fec.red_rtx_payload_type, |
839 config_.rtp.fec.red_payload_type); | 845 config_->rtp.fec.red_payload_type); |
840 } | 846 } |
841 } | 847 } |
842 } | 848 } |
843 | 849 |
844 std::map<uint32_t, RtpState> VideoSendStream::GetRtpStates() const { | 850 std::map<uint32_t, RtpState> VideoSendStreamInternal::GetRtpStates() const { |
851 RTC_DCHECK_RUN_ON(worker_queue_); | |
845 std::map<uint32_t, RtpState> rtp_states; | 852 std::map<uint32_t, RtpState> rtp_states; |
846 for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) { | 853 for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) { |
847 uint32_t ssrc = config_.rtp.ssrcs[i]; | 854 uint32_t ssrc = config_->rtp.ssrcs[i]; |
848 RTC_DCHECK_EQ(ssrc, rtp_rtcp_modules_[i]->SSRC()); | 855 RTC_DCHECK_EQ(ssrc, rtp_rtcp_modules_[i]->SSRC()); |
849 rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtpState(); | 856 rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtpState(); |
850 } | 857 } |
851 | 858 |
852 for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) { | 859 for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) { |
853 uint32_t ssrc = config_.rtp.rtx.ssrcs[i]; | 860 uint32_t ssrc = config_->rtp.rtx.ssrcs[i]; |
854 rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtxState(); | 861 rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtxState(); |
855 } | 862 } |
856 | 863 |
857 return rtp_states; | 864 return rtp_states; |
858 } | 865 } |
859 | 866 |
860 void VideoSendStream::SignalNetworkState(NetworkState state) { | 867 void VideoSendStreamInternal::SignalNetworkState(NetworkState state) { |
868 RTC_DCHECK_RUN_ON(worker_queue_); | |
861 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 869 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
862 rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode | 870 rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_->rtp.rtcp_mode |
863 : RtcpMode::kOff); | 871 : RtcpMode::kOff); |
864 } | 872 } |
865 } | 873 } |
866 | 874 |
867 uint32_t VideoSendStream::OnBitrateUpdated(uint32_t bitrate_bps, | 875 uint32_t VideoSendStreamInternal::OnBitrateUpdated(uint32_t bitrate_bps, |
868 uint8_t fraction_loss, | 876 uint8_t fraction_loss, |
869 int64_t rtt) { | 877 int64_t rtt) { |
878 RTC_DCHECK_RUN_ON(worker_queue_); | |
870 payload_router_.SetTargetSendBitrate(bitrate_bps); | 879 payload_router_.SetTargetSendBitrate(bitrate_bps); |
871 // Get the encoder target rate. It is the estimated network rate - | 880 // Get the encoder target rate. It is the estimated network rate - |
872 // protection overhead. | 881 // protection overhead. |
873 uint32_t encoder_target_rate_bps = | 882 encoder_target_rate_ = protection_bitrate_calculator_.SetTargetRates( |
874 protection_bitrate_calculator_.SetTargetRates( | 883 bitrate_bps, stats_proxy_->GetSendFrameRate(), fraction_loss, rtt); |
875 bitrate_bps, stats_proxy_.GetSendFrameRate(), fraction_loss, rtt); | 884 uint32_t result = bitrate_bps - encoder_target_rate_; |
876 vie_encoder_.OnBitrateUpdated(encoder_target_rate_bps, fraction_loss, rtt); | 885 if (!payload_router_.active()) |
877 | 886 return result; // The send stream is currently not started. |
878 return bitrate_bps - encoder_target_rate_bps; | 887 vie_encoder_->OnBitrateUpdated(encoder_target_rate_, fraction_loss, rtt); |
888 return result; | |
879 } | 889 } |
880 | 890 |
881 int VideoSendStream::ProtectionRequest(const FecProtectionParams* delta_params, | 891 int VideoSendStreamInternal::ProtectionRequest( |
882 const FecProtectionParams* key_params, | 892 const FecProtectionParams* delta_params, |
883 uint32_t* sent_video_rate_bps, | 893 const FecProtectionParams* key_params, |
884 uint32_t* sent_nack_rate_bps, | 894 uint32_t* sent_video_rate_bps, |
885 uint32_t* sent_fec_rate_bps) { | 895 uint32_t* sent_nack_rate_bps, |
896 uint32_t* sent_fec_rate_bps) { | |
897 RTC_DCHECK_RUN_ON(worker_queue_); | |
886 *sent_video_rate_bps = 0; | 898 *sent_video_rate_bps = 0; |
887 *sent_nack_rate_bps = 0; | 899 *sent_nack_rate_bps = 0; |
888 *sent_fec_rate_bps = 0; | 900 *sent_fec_rate_bps = 0; |
889 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { | 901 for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
890 uint32_t not_used = 0; | 902 uint32_t not_used = 0; |
891 uint32_t module_video_rate = 0; | 903 uint32_t module_video_rate = 0; |
892 uint32_t module_fec_rate = 0; | 904 uint32_t module_fec_rate = 0; |
893 uint32_t module_nack_rate = 0; | 905 uint32_t module_nack_rate = 0; |
894 rtp_rtcp->SetFecParameters(delta_params, key_params); | 906 rtp_rtcp->SetFecParameters(delta_params, key_params); |
895 rtp_rtcp->BitrateSent(¬_used, &module_video_rate, &module_fec_rate, | 907 rtp_rtcp->BitrateSent(¬_used, &module_video_rate, &module_fec_rate, |
896 &module_nack_rate); | 908 &module_nack_rate); |
897 *sent_video_rate_bps += module_video_rate; | 909 *sent_video_rate_bps += module_video_rate; |
898 *sent_nack_rate_bps += module_nack_rate; | 910 *sent_nack_rate_bps += module_nack_rate; |
899 *sent_fec_rate_bps += module_fec_rate; | 911 *sent_fec_rate_bps += module_fec_rate; |
900 } | 912 } |
901 return 0; | 913 return 0; |
902 } | 914 } |
903 | 915 |
904 } // namespace internal | 916 } // namespace internal |
905 } // namespace webrtc | 917 } // namespace webrtc |
OLD | NEW |