OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h" | 11 #include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h" |
12 | 12 |
13 #include <string.h> | 13 #include <string.h> |
14 | 14 |
15 #include "webrtc/base/logging.h" | 15 #include "webrtc/base/logging.h" |
16 #include "webrtc/base/trace_event.h" | 16 #include "webrtc/base/trace_event.h" |
17 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" | 17 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" |
18 #include "webrtc/modules/rtp_rtcp/source/byte_io.h" | 18 #include "webrtc/modules/rtp_rtcp/source/byte_io.h" |
| 19 #include "webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h" |
19 #include "webrtc/system_wrappers/include/tick_util.h" | 20 #include "webrtc/system_wrappers/include/tick_util.h" |
20 | 21 |
21 namespace webrtc { | 22 namespace webrtc { |
22 | 23 |
23 static const int kDtmfFrequencyHz = 8000; | 24 static const int kDtmfFrequencyHz = 8000; |
24 | 25 |
25 RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtpSender) | 26 RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtpSender) |
26 : _clock(clock), | 27 : _clock(clock), |
27 _rtpSender(rtpSender), | 28 _rtpSender(rtpSender), |
28 _packetSizeSamples(160), | 29 _packetSizeSamples(160), |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
146 return markerBit; | 147 return markerBit; |
147 } | 148 } |
148 | 149 |
149 int32_t RTPSenderAudio::SendAudio(FrameType frameType, | 150 int32_t RTPSenderAudio::SendAudio(FrameType frameType, |
150 int8_t payloadType, | 151 int8_t payloadType, |
151 uint32_t captureTimeStamp, | 152 uint32_t captureTimeStamp, |
152 const uint8_t* payloadData, | 153 const uint8_t* payloadData, |
153 size_t dataSize, | 154 size_t dataSize, |
154 const RTPFragmentationHeader* fragmentation) { | 155 const RTPFragmentationHeader* fragmentation) { |
155 // TODO(pwestin) Breakup function in smaller functions. | 156 // TODO(pwestin) Breakup function in smaller functions. |
156 size_t payloadSize = dataSize; | 157 size_t payload_size = dataSize; |
157 size_t maxPayloadLength = _rtpSender->MaxPayloadLength(); | 158 size_t maxPayloadLength = _rtpSender->MaxPayloadLength(); |
158 uint16_t dtmfLengthMS = 0; | 159 uint16_t dtmfLengthMS = 0; |
159 uint8_t key = 0; | 160 uint8_t key = 0; |
160 int red_payload_type; | 161 int red_payload_type; |
161 uint8_t audio_level_dbov; | 162 uint8_t audio_level_dbov; |
162 int8_t dtmf_payload_type; | 163 int8_t dtmf_payload_type; |
163 uint16_t packet_size_samples; | 164 uint16_t packet_size_samples; |
164 { | 165 { |
165 rtc::CritScope cs(&_sendAudioCritsect); | 166 rtc::CritScope cs(&_sendAudioCritsect); |
166 red_payload_type = _REDPayloadType; | 167 red_payload_type = _REDPayloadType; |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
232 static_cast<uint16_t>(dtmfDurationSamples), | 233 static_cast<uint16_t>(dtmfDurationSamples), |
233 !_dtmfEventFirstPacketSent) != 0) { | 234 !_dtmfEventFirstPacketSent) != 0) { |
234 return -1; | 235 return -1; |
235 } | 236 } |
236 _dtmfEventFirstPacketSent = true; | 237 _dtmfEventFirstPacketSent = true; |
237 return 0; | 238 return 0; |
238 } | 239 } |
239 } | 240 } |
240 return 0; | 241 return 0; |
241 } | 242 } |
242 if (payloadSize == 0 || payloadData == NULL) { | 243 if (payload_size == 0 || payloadData == NULL) { |
243 if (frameType == kEmptyFrame) { | 244 if (frameType == kEmptyFrame) { |
244 // we don't send empty audio RTP packets | 245 // we don't send empty audio RTP packets |
245 // no error since we use it to drive DTMF when we use VAD | 246 // no error since we use it to drive DTMF when we use VAD |
246 return 0; | 247 return 0; |
247 } | 248 } |
248 return -1; | 249 return -1; |
249 } | 250 } |
250 uint8_t dataBuffer[IP_PACKET_SIZE]; | 251 std::unique_ptr<RtpPacketToSend> packet = _rtpSender->CreatePacket(); |
251 bool markerBit = MarkerBit(frameType, payloadType); | 252 bool markerBit = MarkerBit(frameType, payloadType); |
252 | 253 |
253 int32_t rtpHeaderLength = 0; | |
254 uint16_t timestampOffset = 0; | 254 uint16_t timestampOffset = 0; |
255 | 255 |
256 if (red_payload_type >= 0 && fragmentation && !markerBit && | 256 if (red_payload_type >= 0 && fragmentation && !markerBit && |
257 fragmentation->fragmentationVectorSize > 1) { | 257 fragmentation->fragmentationVectorSize > 1) { |
258 // have we configured RED? use its payload type | 258 // have we configured RED? use its payload type |
259 // we need to get the current timestamp to calc the diff | 259 // we need to get the current timestamp to calc the diff |
260 uint32_t oldTimeStamp = _rtpSender->Timestamp(); | 260 uint32_t oldTimeStamp = _rtpSender->Timestamp(); |
261 rtpHeaderLength = _rtpSender->BuildRTPheader(dataBuffer, red_payload_type, | 261 _rtpSender->BuildRtpHeader(packet.get(), captureTimeStamp, true); |
262 markerBit, captureTimeStamp, | 262 timestampOffset = uint16_t(_rtpSender->Timestamp() - oldTimeStamp); |
263 _clock->TimeInMilliseconds()); | 263 packet->SetPayloadType(red_payload_type); |
| 264 } else { |
| 265 _rtpSender->BuildRtpHeader(packet.get(), captureTimeStamp, true); |
| 266 packet->SetPayloadType(payloadType); |
| 267 } |
| 268 packet->SetMarker(markerBit); |
| 269 // Update audio level extension, if included. |
| 270 packet->SetExtension<AudioLevel>(frameType == kAudioFrameSpeech, |
| 271 audio_level_dbov); |
264 | 272 |
265 timestampOffset = uint16_t(_rtpSender->Timestamp() - oldTimeStamp); | 273 if (maxPayloadLength < (packet->headers_size() + payload_size)) { |
266 } else { | |
267 rtpHeaderLength = _rtpSender->BuildRTPheader(dataBuffer, payloadType, | |
268 markerBit, captureTimeStamp, | |
269 _clock->TimeInMilliseconds()); | |
270 } | |
271 if (rtpHeaderLength <= 0) { | |
272 return -1; | |
273 } | |
274 if (maxPayloadLength < (rtpHeaderLength + payloadSize)) { | |
275 // Too large payload buffer. | 274 // Too large payload buffer. |
276 return -1; | 275 return -1; |
277 } | 276 } |
| 277 size_t red_header_size = 0; |
278 if (red_payload_type >= 0 && // Have we configured RED? | 278 if (red_payload_type >= 0 && // Have we configured RED? |
279 fragmentation && fragmentation->fragmentationVectorSize > 1 && | 279 fragmentation && fragmentation->fragmentationVectorSize > 1 && |
280 !markerBit) { | 280 !markerBit) { |
281 if (timestampOffset <= 0x3fff) { | 281 if (timestampOffset <= 0x3fff) { |
282 if (fragmentation->fragmentationVectorSize != 2) { | 282 if (fragmentation->fragmentationVectorSize != 2) { |
283 // we only support 2 codecs when using RED | 283 // we only support 2 codecs when using RED |
284 return -1; | 284 return -1; |
285 } | 285 } |
| 286 red_header_size = 5; |
| 287 payload_size = fragmentation->fragmentationLength[0] + |
| 288 fragmentation->fragmentationLength[1]; |
| 289 |
| 290 uint8_t* payload = |
| 291 packet->AllocatePayload(red_header_size + payload_size); |
286 // only 0x80 if we have multiple blocks | 292 // only 0x80 if we have multiple blocks |
287 dataBuffer[rtpHeaderLength++] = | 293 payload[0] = 0x80 + fragmentation->fragmentationPlType[1]; |
288 0x80 + fragmentation->fragmentationPlType[1]; | |
289 size_t blockLength = fragmentation->fragmentationLength[1]; | 294 size_t blockLength = fragmentation->fragmentationLength[1]; |
290 | 295 |
291 // sanity blockLength | 296 // sanity blockLength |
292 if (blockLength > 0x3ff) { // block length 10 bits 1023 bytes | 297 if (blockLength > 0x3ff) { // block length 10 bits 1023 bytes |
293 return -1; | 298 return -1; |
294 } | 299 } |
295 uint32_t REDheader = (timestampOffset << 10) + blockLength; | 300 uint32_t REDheader = (timestampOffset << 10) + blockLength; |
296 ByteWriter<uint32_t>::WriteBigEndian(dataBuffer + rtpHeaderLength, | 301 ByteWriter<uint32_t, 3>::WriteBigEndian(payload + 1, REDheader); |
297 REDheader); | |
298 rtpHeaderLength += 3; | |
299 | 302 |
300 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0]; | 303 payload[4] = fragmentation->fragmentationPlType[0]; |
301 // copy the RED data | 304 // copy the RED data |
302 memcpy(dataBuffer + rtpHeaderLength, | 305 memcpy(payload + red_header_size, |
303 payloadData + fragmentation->fragmentationOffset[1], | 306 payloadData + fragmentation->fragmentationOffset[1], |
304 fragmentation->fragmentationLength[1]); | 307 fragmentation->fragmentationLength[1]); |
305 | 308 |
306 // copy the normal data | 309 // copy the normal data |
307 memcpy( | 310 memcpy(payload + red_header_size + fragmentation->fragmentationLength[1], |
308 dataBuffer + rtpHeaderLength + fragmentation->fragmentationLength[1], | |
309 payloadData + fragmentation->fragmentationOffset[0], | |
310 fragmentation->fragmentationLength[0]); | |
311 | |
312 payloadSize = fragmentation->fragmentationLength[0] + | |
313 fragmentation->fragmentationLength[1]; | |
314 } else { | |
315 // silence for too long send only new data | |
316 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0]; | |
317 memcpy(dataBuffer + rtpHeaderLength, | |
318 payloadData + fragmentation->fragmentationOffset[0], | 311 payloadData + fragmentation->fragmentationOffset[0], |
319 fragmentation->fragmentationLength[0]); | 312 fragmentation->fragmentationLength[0]); |
320 | 313 } else { |
321 payloadSize = fragmentation->fragmentationLength[0]; | 314 red_header_size = 1; |
| 315 payload_size = fragmentation->fragmentationLength[0]; |
| 316 uint8_t* payload = |
| 317 packet->AllocatePayload(red_header_size + payload_size); |
| 318 // silence for too long send only new data |
| 319 payload[0] = fragmentation->fragmentationPlType[0]; |
| 320 memcpy(payload + red_header_size, |
| 321 payloadData + fragmentation->fragmentationOffset[0], |
| 322 fragmentation->fragmentationLength[0]); |
322 } | 323 } |
323 } else { | 324 } else { |
324 if (fragmentation && fragmentation->fragmentationVectorSize > 0) { | 325 if (fragmentation && fragmentation->fragmentationVectorSize > 0) { |
325 // use the fragment info if we have one | 326 // use the fragment info if we have one |
326 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0]; | 327 red_header_size = 1; |
327 memcpy(dataBuffer + rtpHeaderLength, | 328 payload_size = fragmentation->fragmentationLength[0]; |
| 329 uint8_t* payload = |
| 330 packet->AllocatePayload(red_header_size + payload_size); |
| 331 payload[0] = fragmentation->fragmentationPlType[0]; |
| 332 memcpy(payload + red_header_size, |
328 payloadData + fragmentation->fragmentationOffset[0], | 333 payloadData + fragmentation->fragmentationOffset[0], |
329 fragmentation->fragmentationLength[0]); | 334 fragmentation->fragmentationLength[0]); |
330 | |
331 payloadSize = fragmentation->fragmentationLength[0]; | |
332 } else { | 335 } else { |
333 memcpy(dataBuffer + rtpHeaderLength, payloadData, payloadSize); | 336 uint8_t* payload = packet->AllocatePayload(payload_size); |
| 337 memcpy(payload, payloadData, payload_size); |
334 } | 338 } |
335 } | 339 } |
336 | 340 |
337 { | 341 { |
338 rtc::CritScope cs(&_sendAudioCritsect); | 342 rtc::CritScope cs(&_sendAudioCritsect); |
339 _lastPayloadType = payloadType; | 343 _lastPayloadType = payloadType; |
340 } | 344 } |
341 // Update audio level extension, if included. | |
342 size_t packetSize = payloadSize + rtpHeaderLength; | |
343 RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize); | |
344 RTPHeader rtp_header; | |
345 rtp_parser.Parse(&rtp_header); | |
346 _rtpSender->UpdateAudioLevel(dataBuffer, packetSize, rtp_header, | |
347 (frameType == kAudioFrameSpeech), | |
348 audio_level_dbov); | |
349 TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp, "timestamp", | 345 TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp, "timestamp", |
350 _rtpSender->Timestamp(), "seqnum", | 346 packet->Timestamp(), "seqnum", |
351 _rtpSender->SequenceNumber()); | 347 packet->SequenceNumber()); |
352 int32_t send_result = _rtpSender->SendToNetwork( | 348 return _rtpSender->SendToNetwork( |
353 dataBuffer, payloadSize, rtpHeaderLength, | 349 std::move(packet), payload_size, TickTime::MillisecondTimestamp(), |
354 TickTime::MillisecondTimestamp(), kAllowRetransmission, | 350 kAllowRetransmission, RtpPacketSender::kHighPriority); |
355 RtpPacketSender::kHighPriority); | |
356 if (first_packet_sent_()) { | |
357 LOG(LS_INFO) << "First audio RTP packet sent to pacer"; | |
358 } | |
359 return send_result; | |
360 } | 351 } |
361 | 352 |
362 // Audio level magnitude and voice activity flag are set for each RTP packet | 353 // Audio level magnitude and voice activity flag are set for each RTP packet |
363 int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dBov) { | 354 int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dBov) { |
364 if (level_dBov > 127) { | 355 if (level_dBov > 127) { |
365 return -1; | 356 return -1; |
366 } | 357 } |
367 rtc::CritScope cs(&_sendAudioCritsect); | 358 rtc::CritScope cs(&_sendAudioCritsect); |
368 _audioLevel_dBov = level_dBov; | 359 _audioLevel_dBov = level_dBov; |
369 return 0; | 360 return 0; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
402 } | 393 } |
403 } | 394 } |
404 return AddDTMF(key, time_ms, level); | 395 return AddDTMF(key, time_ms, level); |
405 } | 396 } |
406 | 397 |
407 int32_t RTPSenderAudio::SendTelephoneEventPacket(bool ended, | 398 int32_t RTPSenderAudio::SendTelephoneEventPacket(bool ended, |
408 int8_t dtmf_payload_type, | 399 int8_t dtmf_payload_type, |
409 uint32_t dtmfTimeStamp, | 400 uint32_t dtmfTimeStamp, |
410 uint16_t duration, | 401 uint16_t duration, |
411 bool markerBit) { | 402 bool markerBit) { |
412 uint8_t dtmfbuffer[IP_PACKET_SIZE]; | |
413 uint8_t sendCount = 1; | 403 uint8_t sendCount = 1; |
414 int32_t retVal = 0; | 404 int32_t retVal = 0; |
415 | 405 |
416 if (ended) { | 406 if (ended) { |
417 // resend last packet in an event 3 times | 407 // resend last packet in an event 3 times |
418 sendCount = 3; | 408 sendCount = 3; |
419 } | 409 } |
420 do { | 410 do { |
| 411 std::unique_ptr<RtpPacketToSend> packet = _rtpSender->CreatePacket(); |
421 // Send DTMF data | 412 // Send DTMF data |
422 _rtpSender->BuildRTPheader(dtmfbuffer, dtmf_payload_type, markerBit, | 413 _rtpSender->BuildRtpHeader(packet.get(), dtmfTimeStamp, false); |
423 dtmfTimeStamp, _clock->TimeInMilliseconds()); | 414 packet->SetMarker(markerBit); |
424 | 415 packet->SetPayloadType(dtmf_payload_type); |
425 // reset CSRC and X bit | |
426 dtmfbuffer[0] &= 0xe0; | |
427 | |
428 // Create DTMF data | 416 // Create DTMF data |
429 /* From RFC 2833: | 417 /* From RFC 2833: |
430 | 418 |
431 0 1 2 3 | 419 0 1 2 3 |
432 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 | 420 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 |
433 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | 421 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |
434 | event |E|R| volume | duration | | 422 | event |E|R| volume | duration | |
435 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | 423 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |
436 */ | 424 */ |
437 // R bit always cleared | 425 // R bit always cleared |
438 uint8_t R = 0x00; | 426 uint8_t R = 0x00; |
439 uint8_t volume = _dtmfLevel; | 427 uint8_t volume = _dtmfLevel; |
440 | 428 |
441 // First packet un-ended | 429 // First packet un-ended |
442 uint8_t E = ended ? 0x80 : 0x00; | 430 uint8_t E = ended ? 0x80 : 0x00; |
| 431 uint8_t* dtmfbuffer = packet->AllocatePayload(4); |
443 | 432 |
444 // First byte is Event number, equals key number | 433 // First byte is Event number, equals key number |
445 dtmfbuffer[12] = _dtmfKey; | 434 dtmfbuffer[0] = _dtmfKey; |
446 dtmfbuffer[13] = E | R | volume; | 435 dtmfbuffer[1] = E | R | volume; |
447 ByteWriter<uint16_t>::WriteBigEndian(dtmfbuffer + 14, duration); | 436 ByteWriter<uint16_t>::WriteBigEndian(dtmfbuffer + 2, duration); |
448 | 437 |
449 TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), | 438 TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), |
450 "Audio::SendTelephoneEvent", "timestamp", | 439 "Audio::SendTelephoneEvent", "timestamp", |
451 dtmfTimeStamp, "seqnum", _rtpSender->SequenceNumber()); | 440 dtmfTimeStamp, "seqnum", _rtpSender->SequenceNumber()); |
452 retVal = _rtpSender->SendToNetwork( | 441 retVal = _rtpSender->SendToNetwork( |
453 dtmfbuffer, 4, 12, TickTime::MillisecondTimestamp(), | 442 std::move(packet), 4, TickTime::MillisecondTimestamp(), |
454 kAllowRetransmission, RtpPacketSender::kHighPriority); | 443 kAllowRetransmission, RtpPacketSender::kHighPriority); |
455 sendCount--; | 444 sendCount--; |
456 } while (sendCount > 0 && retVal == 0); | 445 } while (sendCount > 0 && retVal == 0); |
457 | 446 |
458 return retVal; | 447 return retVal; |
459 } | 448 } |
460 } // namespace webrtc | 449 } // namespace webrtc |
OLD | NEW |