OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #include "webrtc/video/rtp_stream_receiver.h" | |
12 | |
13 #include <vector> | |
14 #include <utility> | |
15 | |
16 #include "webrtc/base/checks.h" | |
17 #include "webrtc/base/location.h" | |
18 #include "webrtc/base/logging.h" | |
19 #include "webrtc/common_types.h" | |
20 #include "webrtc/config.h" | |
21 #include "webrtc/media/base/mediaconstants.h" | |
22 #include "webrtc/modules/pacing/packet_router.h" | |
23 #include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimat
or.h" | |
24 #include "webrtc/modules/rtp_rtcp/include/receive_statistics.h" | |
25 #include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h" | |
26 #include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h" | |
27 #include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h" | |
28 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h" | |
29 #include "webrtc/modules/rtp_rtcp/include/ulpfec_receiver.h" | |
30 #include "webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h" | |
31 #include "webrtc/modules/rtp_rtcp/source/rtp_packet_received.h" | |
32 #include "webrtc/modules/video_coding/frame_object.h" | |
33 #include "webrtc/modules/video_coding/h264_sprop_parameter_sets.h" | |
34 #include "webrtc/modules/video_coding/h264_sps_pps_tracker.h" | |
35 #include "webrtc/modules/video_coding/packet_buffer.h" | |
36 #include "webrtc/modules/video_coding/video_coding_impl.h" | |
37 #include "webrtc/system_wrappers/include/field_trial.h" | |
38 #include "webrtc/system_wrappers/include/metrics.h" | |
39 #include "webrtc/system_wrappers/include/timestamp_extrapolator.h" | |
40 #include "webrtc/video/receive_statistics_proxy.h" | |
41 | |
42 namespace webrtc { | |
43 | |
44 namespace { | |
45 constexpr int kPacketBufferStartSize = 32; | |
46 constexpr int kPacketBufferMaxSixe = 2048; | |
47 } | |
48 | |
49 std::unique_ptr<RtpRtcp> CreateRtpRtcpModule( | |
50 ReceiveStatistics* receive_statistics, | |
51 Transport* outgoing_transport, | |
52 RtcpRttStats* rtt_stats, | |
53 RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, | |
54 TransportSequenceNumberAllocator* transport_sequence_number_allocator) { | |
55 RtpRtcp::Configuration configuration; | |
56 configuration.audio = false; | |
57 configuration.receiver_only = true; | |
58 configuration.receive_statistics = receive_statistics; | |
59 configuration.outgoing_transport = outgoing_transport; | |
60 configuration.intra_frame_callback = nullptr; | |
61 configuration.rtt_stats = rtt_stats; | |
62 configuration.rtcp_packet_type_counter_observer = | |
63 rtcp_packet_type_counter_observer; | |
64 configuration.transport_sequence_number_allocator = | |
65 transport_sequence_number_allocator; | |
66 configuration.send_bitrate_observer = nullptr; | |
67 configuration.send_frame_count_observer = nullptr; | |
68 configuration.send_side_delay_observer = nullptr; | |
69 configuration.send_packet_observer = nullptr; | |
70 configuration.bandwidth_callback = nullptr; | |
71 configuration.transport_feedback_callback = nullptr; | |
72 | |
73 std::unique_ptr<RtpRtcp> rtp_rtcp(RtpRtcp::CreateRtpRtcp(configuration)); | |
74 rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); | |
75 | |
76 return rtp_rtcp; | |
77 } | |
78 | |
79 static const int kPacketLogIntervalMs = 10000; | |
80 | |
81 RtpStreamReceiver::RtpStreamReceiver( | |
82 Transport* transport, | |
83 RtcpRttStats* rtt_stats, | |
84 PacketRouter* packet_router, | |
85 const VideoReceiveStream::Config* config, | |
86 ReceiveStatisticsProxy* receive_stats_proxy, | |
87 ProcessThread* process_thread, | |
88 NackSender* nack_sender, | |
89 KeyFrameRequestSender* keyframe_request_sender, | |
90 video_coding::OnCompleteFrameCallback* complete_frame_callback, | |
91 VCMTiming* timing) | |
92 : clock_(Clock::GetRealTimeClock()), | |
93 config_(*config), | |
94 packet_router_(packet_router), | |
95 process_thread_(process_thread), | |
96 ntp_estimator_(clock_), | |
97 rtp_header_parser_(RtpHeaderParser::Create()), | |
98 rtp_receiver_(RtpReceiver::CreateVideoReceiver(clock_, | |
99 this, | |
100 this, | |
101 &rtp_payload_registry_)), | |
102 rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), | |
103 ulpfec_receiver_(UlpfecReceiver::Create(this)), | |
104 receiving_(false), | |
105 restored_packet_in_use_(false), | |
106 last_packet_log_ms_(-1), | |
107 rtp_rtcp_(CreateRtpRtcpModule(rtp_receive_statistics_.get(), | |
108 transport, | |
109 rtt_stats, | |
110 receive_stats_proxy, | |
111 packet_router)), | |
112 complete_frame_callback_(complete_frame_callback), | |
113 keyframe_request_sender_(keyframe_request_sender), | |
114 timing_(timing), | |
115 has_received_frame_(false) { | |
116 packet_router_->AddReceiveRtpModule(rtp_rtcp_.get()); | |
117 rtp_receive_statistics_->RegisterRtpStatisticsCallback(receive_stats_proxy); | |
118 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(receive_stats_proxy); | |
119 | |
120 RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff) | |
121 << "A stream should not be configured with RTCP disabled. This value is " | |
122 "reserved for internal usage."; | |
123 RTC_DCHECK(config_.rtp.remote_ssrc != 0); | |
124 // TODO(pbos): What's an appropriate local_ssrc for receive-only streams? | |
125 RTC_DCHECK(config_.rtp.local_ssrc != 0); | |
126 RTC_DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc); | |
127 | |
128 rtp_rtcp_->SetRTCPStatus(config_.rtp.rtcp_mode); | |
129 rtp_rtcp_->SetSSRC(config_.rtp.local_ssrc); | |
130 rtp_rtcp_->SetKeyFrameRequestMethod(kKeyFrameReqPliRtcp); | |
131 | |
132 for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) { | |
133 EnableReceiveRtpHeaderExtension(config_.rtp.extensions[i].uri, | |
134 config_.rtp.extensions[i].id); | |
135 } | |
136 | |
137 static const int kMaxPacketAgeToNack = 450; | |
138 const int max_reordering_threshold = (config_.rtp.nack.rtp_history_ms > 0) | |
139 ? kMaxPacketAgeToNack | |
140 : kDefaultMaxReorderingThreshold; | |
141 rtp_receive_statistics_->SetMaxReorderingThreshold(max_reordering_threshold); | |
142 | |
143 if (config_.rtp.rtx_ssrc) { | |
144 rtp_payload_registry_.SetRtxSsrc(config_.rtp.rtx_ssrc); | |
145 | |
146 for (const auto& kv : config_.rtp.rtx_payload_types) { | |
147 RTC_DCHECK(kv.second != 0); | |
148 rtp_payload_registry_.SetRtxPayloadType(kv.second, kv.first); | |
149 } | |
150 } | |
151 | |
152 if (IsUlpfecEnabled()) { | |
153 VideoCodec ulpfec_codec = {}; | |
154 ulpfec_codec.codecType = kVideoCodecULPFEC; | |
155 strncpy(ulpfec_codec.plName, "ulpfec", sizeof(ulpfec_codec.plName)); | |
156 ulpfec_codec.plType = config_.rtp.ulpfec.ulpfec_payload_type; | |
157 RTC_CHECK(AddReceiveCodec(ulpfec_codec)); | |
158 } | |
159 | |
160 if (IsRedEnabled()) { | |
161 VideoCodec red_codec = {}; | |
162 red_codec.codecType = kVideoCodecRED; | |
163 strncpy(red_codec.plName, "red", sizeof(red_codec.plName)); | |
164 red_codec.plType = config_.rtp.ulpfec.red_payload_type; | |
165 RTC_CHECK(AddReceiveCodec(red_codec)); | |
166 if (config_.rtp.ulpfec.red_rtx_payload_type != -1) { | |
167 rtp_payload_registry_.SetRtxPayloadType( | |
168 config_.rtp.ulpfec.red_rtx_payload_type, | |
169 config_.rtp.ulpfec.red_payload_type); | |
170 } | |
171 } | |
172 | |
173 if (config_.rtp.rtcp_xr.receiver_reference_time_report) | |
174 rtp_rtcp_->SetRtcpXrRrtrStatus(true); | |
175 | |
176 // Stats callback for CNAME changes. | |
177 rtp_rtcp_->RegisterRtcpStatisticsCallback(receive_stats_proxy); | |
178 | |
179 process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); | |
180 | |
181 if (config_.rtp.nack.rtp_history_ms != 0) { | |
182 nack_module_.reset( | |
183 new NackModule(clock_, nack_sender, keyframe_request_sender)); | |
184 process_thread_->RegisterModule(nack_module_.get(), RTC_FROM_HERE); | |
185 } | |
186 | |
187 packet_buffer_ = video_coding::PacketBuffer::Create( | |
188 clock_, kPacketBufferStartSize, kPacketBufferMaxSixe, this); | |
189 reference_finder_.reset(new video_coding::RtpFrameReferenceFinder(this)); | |
190 } | |
191 | |
192 RtpStreamReceiver::~RtpStreamReceiver() { | |
193 if (nack_module_) { | |
194 process_thread_->DeRegisterModule(nack_module_.get()); | |
195 } | |
196 | |
197 process_thread_->DeRegisterModule(rtp_rtcp_.get()); | |
198 | |
199 packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); | |
200 UpdateHistograms(); | |
201 } | |
202 | |
203 bool RtpStreamReceiver::AddReceiveCodec( | |
204 const VideoCodec& video_codec, | |
205 const std::map<std::string, std::string>& codec_params) { | |
206 pt_codec_params_.insert(make_pair(video_codec.plType, codec_params)); | |
207 return AddReceiveCodec(video_codec); | |
208 } | |
209 | |
210 bool RtpStreamReceiver::AddReceiveCodec(const VideoCodec& video_codec) { | |
211 int8_t old_pltype = -1; | |
212 if (rtp_payload_registry_.ReceivePayloadType(video_codec, &old_pltype) != | |
213 -1) { | |
214 rtp_payload_registry_.DeRegisterReceivePayload(old_pltype); | |
215 } | |
216 return rtp_payload_registry_.RegisterReceivePayload(video_codec) == 0; | |
217 } | |
218 | |
219 uint32_t RtpStreamReceiver::GetRemoteSsrc() const { | |
220 return rtp_receiver_->SSRC(); | |
221 } | |
222 | |
223 int RtpStreamReceiver::GetCsrcs(uint32_t* csrcs) const { | |
224 return rtp_receiver_->CSRCs(csrcs); | |
225 } | |
226 | |
227 RtpReceiver* RtpStreamReceiver::GetRtpReceiver() const { | |
228 return rtp_receiver_.get(); | |
229 } | |
230 | |
231 int32_t RtpStreamReceiver::OnReceivedPayloadData( | |
232 const uint8_t* payload_data, | |
233 size_t payload_size, | |
234 const WebRtcRTPHeader* rtp_header) { | |
235 WebRtcRTPHeader rtp_header_with_ntp = *rtp_header; | |
236 rtp_header_with_ntp.ntp_time_ms = | |
237 ntp_estimator_.Estimate(rtp_header->header.timestamp); | |
238 VCMPacket packet(payload_data, payload_size, rtp_header_with_ntp); | |
239 packet.timesNacked = | |
240 nack_module_ ? nack_module_->OnReceivedPacket(packet) : -1; | |
241 | |
242 // In the case of a video stream without picture ids and no rtx the | |
243 // RtpFrameReferenceFinder will need to know about padding to | |
244 // correctly calculate frame references. | |
245 if (packet.sizeBytes == 0) { | |
246 reference_finder_->PaddingReceived(packet.seqNum); | |
247 return 0; | |
248 } | |
249 | |
250 if (packet.codec == kVideoCodecH264) { | |
251 // Only when we start to receive packets will we know what payload type | |
252 // that will be used. When we know the payload type insert the correct | |
253 // sps/pps into the tracker. | |
254 if (packet.payloadType != last_payload_type_) { | |
255 last_payload_type_ = packet.payloadType; | |
256 InsertSpsPpsIntoTracker(packet.payloadType); | |
257 } | |
258 | |
259 switch (tracker_.CopyAndFixBitstream(&packet)) { | |
260 case video_coding::H264SpsPpsTracker::kRequestKeyframe: | |
261 keyframe_request_sender_->RequestKeyFrame(); | |
262 FALLTHROUGH(); | |
263 case video_coding::H264SpsPpsTracker::kDrop: | |
264 return 0; | |
265 case video_coding::H264SpsPpsTracker::kInsert: | |
266 break; | |
267 } | |
268 | |
269 } else { | |
270 uint8_t* data = new uint8_t[packet.sizeBytes]; | |
271 memcpy(data, packet.dataPtr, packet.sizeBytes); | |
272 packet.dataPtr = data; | |
273 } | |
274 | |
275 packet_buffer_->InsertPacket(&packet); | |
276 return 0; | |
277 } | |
278 | |
279 // TODO(nisse): Try to delete this method. Obstacles: It is used by | |
280 // ParseAndHandleEncapsulatingHeader, for handling Rtx packets, and | |
281 // for callbacks from |ulpfec_receiver_|. | |
282 void RtpStreamReceiver::OnRecoveredPacket(const uint8_t* rtp_packet, | |
283 size_t rtp_packet_length) { | |
284 RTPHeader header; | |
285 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) { | |
286 return; | |
287 } | |
288 header.payload_type_frequency = kVideoPayloadTypeFrequency; | |
289 bool in_order = IsPacketInOrder(header); | |
290 ReceivePacket(rtp_packet, rtp_packet_length, header, in_order); | |
291 } | |
292 | |
293 // TODO(pbos): Remove as soon as audio can handle a changing payload type | |
294 // without this callback. | |
295 int32_t RtpStreamReceiver::OnInitializeDecoder( | |
296 const int8_t payload_type, | |
297 const char payload_name[RTP_PAYLOAD_NAME_SIZE], | |
298 const int frequency, | |
299 const size_t channels, | |
300 const uint32_t rate) { | |
301 RTC_NOTREACHED(); | |
302 return 0; | |
303 } | |
304 | |
305 void RtpStreamReceiver::OnIncomingSSRCChanged(const uint32_t ssrc) { | |
306 rtp_rtcp_->SetRemoteSSRC(ssrc); | |
307 } | |
308 | |
309 // This method handles both regular RTP packets and packets recovered | |
310 // via FlexFEC. | |
311 void RtpStreamReceiver::OnRtpPacket(const RtpPacketReceived& packet) { | |
312 { | |
313 rtc::CritScope lock(&receive_cs_); | |
314 if (!receiving_) { | |
315 return; | |
316 } | |
317 | |
318 if (!packet.recovered()) { | |
319 int64_t now_ms = clock_->TimeInMilliseconds(); | |
320 | |
321 // Periodically log the RTP header of incoming packets. | |
322 if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) { | |
323 std::stringstream ss; | |
324 ss << "Packet received on SSRC: " << packet.Ssrc() | |
325 << " with payload type: " << static_cast<int>(packet.PayloadType()) | |
326 << ", timestamp: " << packet.Timestamp() | |
327 << ", sequence number: " << packet.SequenceNumber() | |
328 << ", arrival time: " << packet.arrival_time_ms(); | |
329 int32_t time_offset; | |
330 if (packet.GetExtension<TransmissionOffset>(&time_offset)) { | |
331 ss << ", toffset: " << time_offset; | |
332 } | |
333 uint32_t send_time; | |
334 if (packet.GetExtension<AbsoluteSendTime>(&send_time)) { | |
335 ss << ", abs send time: " << send_time; | |
336 } | |
337 LOG(LS_INFO) << ss.str(); | |
338 last_packet_log_ms_ = now_ms; | |
339 } | |
340 } | |
341 } | |
342 | |
343 // TODO(nisse): Delete use of GetHeader, but needs refactoring of | |
344 // ReceivePacket and IncomingPacket methods below. | |
345 RTPHeader header; | |
346 packet.GetHeader(&header); | |
347 | |
348 header.payload_type_frequency = kVideoPayloadTypeFrequency; | |
349 | |
350 bool in_order = IsPacketInOrder(header); | |
351 if (!packet.recovered()) { | |
352 // TODO(nisse): Why isn't this done for recovered packets? | |
353 rtp_payload_registry_.SetIncomingPayloadType(header); | |
354 } | |
355 ReceivePacket(packet.data(), packet.size(), header, in_order); | |
356 // Update receive statistics after ReceivePacket. | |
357 // Receive statistics will be reset if the payload type changes (make sure | |
358 // that the first packet is included in the stats). | |
359 if (!packet.recovered()) { | |
360 // TODO(nisse): We should pass a recovered flag to stats, to aid | |
361 // fixing bug bugs.webrtc.org/6339. | |
362 rtp_receive_statistics_->IncomingPacket( | |
363 header, packet.size(), IsPacketRetransmitted(header, in_order)); | |
364 } | |
365 } | |
366 | |
367 int32_t RtpStreamReceiver::RequestKeyFrame() { | |
368 return rtp_rtcp_->RequestKeyFrame(); | |
369 } | |
370 | |
371 bool RtpStreamReceiver::IsUlpfecEnabled() const { | |
372 return config_.rtp.ulpfec.ulpfec_payload_type != -1; | |
373 } | |
374 | |
375 bool RtpStreamReceiver::IsRedEnabled() const { | |
376 return config_.rtp.ulpfec.red_payload_type != -1; | |
377 } | |
378 | |
379 bool RtpStreamReceiver::IsRetransmissionsEnabled() const { | |
380 return config_.rtp.nack.rtp_history_ms > 0; | |
381 } | |
382 | |
383 void RtpStreamReceiver::RequestPacketRetransmit( | |
384 const std::vector<uint16_t>& sequence_numbers) { | |
385 rtp_rtcp_->SendNack(sequence_numbers); | |
386 } | |
387 | |
388 int32_t RtpStreamReceiver::ResendPackets(const uint16_t* sequence_numbers, | |
389 uint16_t length) { | |
390 return rtp_rtcp_->SendNACK(sequence_numbers, length); | |
391 } | |
392 | |
393 void RtpStreamReceiver::OnReceivedFrame( | |
394 std::unique_ptr<video_coding::RtpFrameObject> frame) { | |
395 | |
396 if (!has_received_frame_) { | |
397 has_received_frame_ = true; | |
398 if (frame->FrameType() != kVideoFrameKey) | |
399 keyframe_request_sender_->RequestKeyFrame(); | |
400 } | |
401 | |
402 if (!frame->delayed_by_retransmission()) | |
403 timing_->IncomingTimestamp(frame->timestamp, clock_->TimeInMilliseconds()); | |
404 reference_finder_->ManageFrame(std::move(frame)); | |
405 } | |
406 | |
407 void RtpStreamReceiver::OnCompleteFrame( | |
408 std::unique_ptr<video_coding::FrameObject> frame) { | |
409 { | |
410 rtc::CritScope lock(&last_seq_num_cs_); | |
411 video_coding::RtpFrameObject* rtp_frame = | |
412 static_cast<video_coding::RtpFrameObject*>(frame.get()); | |
413 last_seq_num_for_pic_id_[rtp_frame->picture_id] = rtp_frame->last_seq_num(); | |
414 } | |
415 complete_frame_callback_->OnCompleteFrame(std::move(frame)); | |
416 } | |
417 | |
418 void RtpStreamReceiver::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { | |
419 if (nack_module_) | |
420 nack_module_->UpdateRtt(max_rtt_ms); | |
421 } | |
422 | |
423 rtc::Optional<int64_t> RtpStreamReceiver::LastReceivedPacketMs() const { | |
424 return packet_buffer_->LastReceivedPacketMs(); | |
425 } | |
426 | |
427 rtc::Optional<int64_t> RtpStreamReceiver::LastReceivedKeyframePacketMs() const { | |
428 return packet_buffer_->LastReceivedKeyframePacketMs(); | |
429 } | |
430 | |
431 void RtpStreamReceiver::ReceivePacket(const uint8_t* packet, | |
432 size_t packet_length, | |
433 const RTPHeader& header, | |
434 bool in_order) { | |
435 if (rtp_payload_registry_.IsEncapsulated(header)) { | |
436 ParseAndHandleEncapsulatingHeader(packet, packet_length, header); | |
437 return; | |
438 } | |
439 const uint8_t* payload = packet + header.headerLength; | |
440 assert(packet_length >= header.headerLength); | |
441 size_t payload_length = packet_length - header.headerLength; | |
442 PayloadUnion payload_specific; | |
443 if (!rtp_payload_registry_.GetPayloadSpecifics(header.payloadType, | |
444 &payload_specific)) { | |
445 return; | |
446 } | |
447 rtp_receiver_->IncomingRtpPacket(header, payload, payload_length, | |
448 payload_specific, in_order); | |
449 } | |
450 | |
451 void RtpStreamReceiver::ParseAndHandleEncapsulatingHeader( | |
452 const uint8_t* packet, size_t packet_length, const RTPHeader& header) { | |
453 if (rtp_payload_registry_.IsRed(header)) { | |
454 int8_t ulpfec_pt = rtp_payload_registry_.ulpfec_payload_type(); | |
455 if (packet[header.headerLength] == ulpfec_pt) { | |
456 rtp_receive_statistics_->FecPacketReceived(header, packet_length); | |
457 // Notify video_receiver about received FEC packets to avoid NACKing these | |
458 // packets. | |
459 NotifyReceiverOfFecPacket(header); | |
460 } | |
461 if (ulpfec_receiver_->AddReceivedRedPacket(header, packet, packet_length, | |
462 ulpfec_pt) != 0) { | |
463 return; | |
464 } | |
465 ulpfec_receiver_->ProcessReceivedFec(); | |
466 } else if (rtp_payload_registry_.IsRtx(header)) { | |
467 if (header.headerLength + header.paddingLength == packet_length) { | |
468 // This is an empty packet and should be silently dropped before trying to | |
469 // parse the RTX header. | |
470 return; | |
471 } | |
472 // Remove the RTX header and parse the original RTP header. | |
473 if (packet_length < header.headerLength) | |
474 return; | |
475 if (packet_length > sizeof(restored_packet_)) | |
476 return; | |
477 rtc::CritScope lock(&receive_cs_); | |
478 if (restored_packet_in_use_) { | |
479 LOG(LS_WARNING) << "Multiple RTX headers detected, dropping packet."; | |
480 return; | |
481 } | |
482 if (!rtp_payload_registry_.RestoreOriginalPacket( | |
483 restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(), | |
484 header)) { | |
485 LOG(LS_WARNING) << "Incoming RTX packet: Invalid RTP header ssrc: " | |
486 << header.ssrc << " payload type: " | |
487 << static_cast<int>(header.payloadType); | |
488 return; | |
489 } | |
490 restored_packet_in_use_ = true; | |
491 OnRecoveredPacket(restored_packet_, packet_length); | |
492 restored_packet_in_use_ = false; | |
493 } | |
494 } | |
495 | |
496 void RtpStreamReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) { | |
497 int8_t last_media_payload_type = | |
498 rtp_payload_registry_.last_received_media_payload_type(); | |
499 if (last_media_payload_type < 0) { | |
500 LOG(LS_WARNING) << "Failed to get last media payload type."; | |
501 return; | |
502 } | |
503 // Fake an empty media packet. | |
504 WebRtcRTPHeader rtp_header = {}; | |
505 rtp_header.header = header; | |
506 rtp_header.header.payloadType = last_media_payload_type; | |
507 rtp_header.header.paddingLength = 0; | |
508 PayloadUnion payload_specific; | |
509 if (!rtp_payload_registry_.GetPayloadSpecifics(last_media_payload_type, | |
510 &payload_specific)) { | |
511 LOG(LS_WARNING) << "Failed to get payload specifics."; | |
512 return; | |
513 } | |
514 rtp_header.type.Video.codec = payload_specific.Video.videoCodecType; | |
515 rtp_header.type.Video.rotation = kVideoRotation_0; | |
516 if (header.extension.hasVideoRotation) { | |
517 rtp_header.type.Video.rotation = header.extension.videoRotation; | |
518 } | |
519 rtp_header.type.Video.content_type = VideoContentType::UNSPECIFIED; | |
520 if (header.extension.hasVideoContentType) { | |
521 rtp_header.type.Video.content_type = header.extension.videoContentType; | |
522 } | |
523 rtp_header.type.Video.playout_delay = header.extension.playout_delay; | |
524 | |
525 OnReceivedPayloadData(nullptr, 0, &rtp_header); | |
526 } | |
527 | |
528 bool RtpStreamReceiver::DeliverRtcp(const uint8_t* rtcp_packet, | |
529 size_t rtcp_packet_length) { | |
530 { | |
531 rtc::CritScope lock(&receive_cs_); | |
532 if (!receiving_) { | |
533 return false; | |
534 } | |
535 } | |
536 | |
537 rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length); | |
538 | |
539 int64_t rtt = 0; | |
540 rtp_rtcp_->RTT(rtp_receiver_->SSRC(), &rtt, nullptr, nullptr, nullptr); | |
541 if (rtt == 0) { | |
542 // Waiting for valid rtt. | |
543 return true; | |
544 } | |
545 uint32_t ntp_secs = 0; | |
546 uint32_t ntp_frac = 0; | |
547 uint32_t rtp_timestamp = 0; | |
548 if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, nullptr, nullptr, | |
549 &rtp_timestamp) != 0) { | |
550 // Waiting for RTCP. | |
551 return true; | |
552 } | |
553 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); | |
554 | |
555 return true; | |
556 } | |
557 | |
558 void RtpStreamReceiver::FrameContinuous(uint16_t picture_id) { | |
559 if (!nack_module_) | |
560 return; | |
561 | |
562 int seq_num = -1; | |
563 { | |
564 rtc::CritScope lock(&last_seq_num_cs_); | |
565 auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id); | |
566 if (seq_num_it != last_seq_num_for_pic_id_.end()) | |
567 seq_num = seq_num_it->second; | |
568 } | |
569 if (seq_num != -1) | |
570 nack_module_->ClearUpTo(seq_num); | |
571 } | |
572 | |
573 void RtpStreamReceiver::FrameDecoded(uint16_t picture_id) { | |
574 int seq_num = -1; | |
575 { | |
576 rtc::CritScope lock(&last_seq_num_cs_); | |
577 auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id); | |
578 if (seq_num_it != last_seq_num_for_pic_id_.end()) { | |
579 seq_num = seq_num_it->second; | |
580 last_seq_num_for_pic_id_.erase(last_seq_num_for_pic_id_.begin(), | |
581 ++seq_num_it); | |
582 } | |
583 } | |
584 if (seq_num != -1) { | |
585 packet_buffer_->ClearTo(seq_num); | |
586 reference_finder_->ClearTo(seq_num); | |
587 } | |
588 } | |
589 | |
590 void RtpStreamReceiver::SignalNetworkState(NetworkState state) { | |
591 rtp_rtcp_->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode | |
592 : RtcpMode::kOff); | |
593 } | |
594 | |
595 void RtpStreamReceiver::StartReceive() { | |
596 rtc::CritScope lock(&receive_cs_); | |
597 receiving_ = true; | |
598 } | |
599 | |
600 void RtpStreamReceiver::StopReceive() { | |
601 rtc::CritScope lock(&receive_cs_); | |
602 receiving_ = false; | |
603 } | |
604 | |
605 bool RtpStreamReceiver::IsPacketInOrder(const RTPHeader& header) const { | |
606 StreamStatistician* statistician = | |
607 rtp_receive_statistics_->GetStatistician(header.ssrc); | |
608 if (!statistician) | |
609 return false; | |
610 return statistician->IsPacketInOrder(header.sequenceNumber); | |
611 } | |
612 | |
613 bool RtpStreamReceiver::IsPacketRetransmitted(const RTPHeader& header, | |
614 bool in_order) const { | |
615 // Retransmissions are handled separately if RTX is enabled. | |
616 if (rtp_payload_registry_.RtxEnabled()) | |
617 return false; | |
618 StreamStatistician* statistician = | |
619 rtp_receive_statistics_->GetStatistician(header.ssrc); | |
620 if (!statistician) | |
621 return false; | |
622 // Check if this is a retransmission. | |
623 int64_t min_rtt = 0; | |
624 rtp_rtcp_->RTT(rtp_receiver_->SSRC(), nullptr, nullptr, &min_rtt, nullptr); | |
625 return !in_order && | |
626 statistician->IsRetransmitOfOldPacket(header, min_rtt); | |
627 } | |
628 | |
629 void RtpStreamReceiver::UpdateHistograms() { | |
630 FecPacketCounter counter = ulpfec_receiver_->GetPacketCounter(); | |
631 if (counter.first_packet_time_ms == -1) | |
632 return; | |
633 | |
634 int64_t elapsed_sec = | |
635 (clock_->TimeInMilliseconds() - counter.first_packet_time_ms) / 1000; | |
636 if (elapsed_sec < metrics::kMinRunTimeInSeconds) | |
637 return; | |
638 | |
639 if (counter.num_packets > 0) { | |
640 RTC_HISTOGRAM_PERCENTAGE( | |
641 "WebRTC.Video.ReceivedFecPacketsInPercent", | |
642 static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets)); | |
643 } | |
644 if (counter.num_fec_packets > 0) { | |
645 RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec", | |
646 static_cast<int>(counter.num_recovered_packets * | |
647 100 / counter.num_fec_packets)); | |
648 } | |
649 } | |
650 | |
651 void RtpStreamReceiver::EnableReceiveRtpHeaderExtension( | |
652 const std::string& extension, int id) { | |
653 // One-byte-extension local identifiers are in the range 1-14 inclusive. | |
654 RTC_DCHECK_GE(id, 1); | |
655 RTC_DCHECK_LE(id, 14); | |
656 RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension)); | |
657 RTC_CHECK(rtp_header_parser_->RegisterRtpHeaderExtension( | |
658 StringToRtpExtensionType(extension), id)); | |
659 } | |
660 | |
661 void RtpStreamReceiver::InsertSpsPpsIntoTracker(uint8_t payload_type) { | |
662 auto codec_params_it = pt_codec_params_.find(payload_type); | |
663 if (codec_params_it == pt_codec_params_.end()) | |
664 return; | |
665 | |
666 LOG(LS_INFO) << "Found out of band supplied codec parameters for" | |
667 << " payload type: " << static_cast<int>(payload_type); | |
668 | |
669 H264SpropParameterSets sprop_decoder; | |
670 auto sprop_base64_it = | |
671 codec_params_it->second.find(cricket::kH264FmtpSpropParameterSets); | |
672 | |
673 if (sprop_base64_it == codec_params_it->second.end()) | |
674 return; | |
675 | |
676 if (!sprop_decoder.DecodeSprop(sprop_base64_it->second.c_str())) | |
677 return; | |
678 | |
679 tracker_.InsertSpsPpsNalus(sprop_decoder.sps_nalu(), | |
680 sprop_decoder.pps_nalu()); | |
681 } | |
682 | |
683 } // namespace webrtc | |
OLD | NEW |