Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(272)

Side by Side Diff: webrtc/voice_engine/channel.cc

Issue 1644633005: clang-format -i -style=file webrtc/voice_engine/channel.* (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « webrtc/voice_engine/channel.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
109 109
110 private: 110 private:
111 rtc::CriticalSection crit_; 111 rtc::CriticalSection crit_;
112 rtc::ThreadChecker thread_checker_; 112 rtc::ThreadChecker thread_checker_;
113 rtc::ThreadChecker pacer_thread_; 113 rtc::ThreadChecker pacer_thread_;
114 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_); 114 TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
115 }; 115 };
116 116
117 class RtpPacketSenderProxy : public RtpPacketSender { 117 class RtpPacketSenderProxy : public RtpPacketSender {
118 public: 118 public:
119 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) { 119 RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
120 }
121 120
122 void SetPacketSender(RtpPacketSender* rtp_packet_sender) { 121 void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
123 RTC_DCHECK(thread_checker_.CalledOnValidThread()); 122 RTC_DCHECK(thread_checker_.CalledOnValidThread());
124 rtc::CritScope lock(&crit_); 123 rtc::CritScope lock(&crit_);
125 rtp_packet_sender_ = rtp_packet_sender; 124 rtp_packet_sender_ = rtp_packet_sender;
126 } 125 }
127 126
128 // Implements RtpPacketSender. 127 // Implements RtpPacketSender.
129 void InsertPacket(Priority priority, 128 void InsertPacket(Priority priority,
130 uint32_t ssrc, 129 uint32_t ssrc,
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
223 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second; 222 number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
224 } 223 }
225 fraction_lost_aggregate += number_of_packets * block_it->fractionLost; 224 fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
226 total_number_of_packets += number_of_packets; 225 total_number_of_packets += number_of_packets;
227 226
228 extended_max_sequence_number_[block_it->sourceSSRC] = 227 extended_max_sequence_number_[block_it->sourceSSRC] =
229 block_it->extendedHighSeqNum; 228 block_it->extendedHighSeqNum;
230 } 229 }
231 int weighted_fraction_lost = 0; 230 int weighted_fraction_lost = 0;
232 if (total_number_of_packets > 0) { 231 if (total_number_of_packets > 0) {
233 weighted_fraction_lost = (fraction_lost_aggregate + 232 weighted_fraction_lost =
234 total_number_of_packets / 2) / total_number_of_packets; 233 (fraction_lost_aggregate + total_number_of_packets / 2) /
234 total_number_of_packets;
235 } 235 }
236 owner_->OnIncomingFractionLoss(weighted_fraction_lost); 236 owner_->OnIncomingFractionLoss(weighted_fraction_lost);
237 } 237 }
238 238
239 private: 239 private:
240 Channel* owner_; 240 Channel* owner_;
241 // Maps remote side ssrc to extended highest sequence number received. 241 // Maps remote side ssrc to extended highest sequence number received.
242 std::map<uint32_t, uint32_t> extended_max_sequence_number_; 242 std::map<uint32_t, uint32_t> extended_max_sequence_number_;
243 }; 243 };
244 244
245 int32_t 245 int32_t Channel::SendData(FrameType frameType,
246 Channel::SendData(FrameType frameType, 246 uint8_t payloadType,
247 uint8_t payloadType, 247 uint32_t timeStamp,
248 uint32_t timeStamp, 248 const uint8_t* payloadData,
249 const uint8_t* payloadData, 249 size_t payloadSize,
250 size_t payloadSize, 250 const RTPFragmentationHeader* fragmentation) {
251 const RTPFragmentationHeader* fragmentation) 251 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
252 { 252 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
253 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 253 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
254 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u," 254 frameType, payloadType, timeStamp, payloadSize, fragmentation);
255 " payloadSize=%" PRIuS ", fragmentation=0x%x)",
256 frameType, payloadType, timeStamp,
257 payloadSize, fragmentation);
258 255
259 if (_includeAudioLevelIndication) 256 if (_includeAudioLevelIndication) {
260 { 257 // Store current audio level in the RTP/RTCP module.
261 // Store current audio level in the RTP/RTCP module. 258 // The level will be used in combination with voice-activity state
262 // The level will be used in combination with voice-activity state 259 // (frameType) to add an RTP header extension
263 // (frameType) to add an RTP header extension 260 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
264 _rtpRtcpModule->SetAudioLevel(rms_level_.RMS()); 261 }
265 }
266 262
267 // Push data from ACM to RTP/RTCP-module to deliver audio frame for 263 // Push data from ACM to RTP/RTCP-module to deliver audio frame for
268 // packetization. 264 // packetization.
269 // This call will trigger Transport::SendPacket() from the RTP/RTCP module. 265 // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
270 if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType, 266 if (_rtpRtcpModule->SendOutgoingData(
271 payloadType, 267 (FrameType&)frameType, payloadType, timeStamp,
272 timeStamp, 268 // Leaving the time when this frame was
273 // Leaving the time when this frame was 269 // received from the capture device as
274 // received from the capture device as 270 // undefined for voice for now.
275 // undefined for voice for now. 271 -1, payloadData, payloadSize, fragmentation) == -1) {
276 -1, 272 _engineStatisticsPtr->SetLastError(
277 payloadData, 273 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
278 payloadSize, 274 "Channel::SendData() failed to send data to RTP/RTCP module");
279 fragmentation) == -1) 275 return -1;
280 { 276 }
281 _engineStatisticsPtr->SetLastError(
282 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
283 "Channel::SendData() failed to send data to RTP/RTCP module");
284 return -1;
285 }
286 277
287 _lastLocalTimeStamp = timeStamp; 278 _lastLocalTimeStamp = timeStamp;
288 _lastPayloadType = payloadType; 279 _lastPayloadType = payloadType;
289 280
290 return 0; 281 return 0;
291 } 282 }
292 283
293 int32_t 284 int32_t Channel::InFrameType(FrameType frame_type) {
294 Channel::InFrameType(FrameType frame_type) 285 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
295 { 286 "Channel::InFrameType(frame_type=%d)", frame_type);
296 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
297 "Channel::InFrameType(frame_type=%d)", frame_type);
298 287
299 rtc::CritScope cs(&_callbackCritSect); 288 rtc::CritScope cs(&_callbackCritSect);
300 _sendFrameType = (frame_type == kAudioFrameSpeech); 289 _sendFrameType = (frame_type == kAudioFrameSpeech);
301 return 0; 290 return 0;
302 } 291 }
303 292
304 int32_t 293 int32_t Channel::OnRxVadDetected(int vadDecision) {
305 Channel::OnRxVadDetected(int vadDecision) 294 rtc::CritScope cs(&_callbackCritSect);
306 { 295 if (_rxVadObserverPtr) {
307 rtc::CritScope cs(&_callbackCritSect); 296 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
308 if (_rxVadObserverPtr) 297 }
309 {
310 _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
311 }
312 298
313 return 0; 299 return 0;
314 } 300 }
315 301
316 bool Channel::SendRtp(const uint8_t* data, 302 bool Channel::SendRtp(const uint8_t* data,
317 size_t len, 303 size_t len,
318 const PacketOptions& options) { 304 const PacketOptions& options) {
319 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 305 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
320 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len); 306 "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
321 307
322 rtc::CritScope cs(&_callbackCritSect); 308 rtc::CritScope cs(&_callbackCritSect);
323 309
324 if (_transportPtr == NULL) 310 if (_transportPtr == NULL) {
325 { 311 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
326 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), 312 "Channel::SendPacket() failed to send RTP packet due to"
327 "Channel::SendPacket() failed to send RTP packet due to" 313 " invalid transport object");
328 " invalid transport object"); 314 return false;
329 return false; 315 }
330 }
331 316
332 uint8_t* bufferToSendPtr = (uint8_t*)data; 317 uint8_t* bufferToSendPtr = (uint8_t*)data;
333 size_t bufferLength = len; 318 size_t bufferLength = len;
334 319
335 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) { 320 if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
336 std::string transport_name = 321 std::string transport_name =
337 _externalTransport ? "external transport" : "WebRtc sockets"; 322 _externalTransport ? "external transport" : "WebRtc sockets";
338 WEBRTC_TRACE(kTraceError, kTraceVoice, 323 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
339 VoEId(_instanceId,_channelId), 324 "Channel::SendPacket() RTP transmission using %s failed",
340 "Channel::SendPacket() RTP transmission using %s failed", 325 transport_name.c_str());
341 transport_name.c_str()); 326 return false;
342 return false; 327 }
343 } 328 return true;
344 return true;
345 } 329 }
346 330
347 bool 331 bool Channel::SendRtcp(const uint8_t* data, size_t len) {
348 Channel::SendRtcp(const uint8_t *data, size_t len) 332 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
349 { 333 "Channel::SendRtcp(len=%" PRIuS ")", len);
350 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
351 "Channel::SendRtcp(len=%" PRIuS ")", len);
352 334
353 rtc::CritScope cs(&_callbackCritSect); 335 rtc::CritScope cs(&_callbackCritSect);
354 if (_transportPtr == NULL) 336 if (_transportPtr == NULL) {
355 { 337 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
356 WEBRTC_TRACE(kTraceError, kTraceVoice, 338 "Channel::SendRtcp() failed to send RTCP packet"
357 VoEId(_instanceId,_channelId), 339 " due to invalid transport object");
358 "Channel::SendRtcp() failed to send RTCP packet" 340 return false;
359 " due to invalid transport object"); 341 }
360 return false;
361 }
362 342
363 uint8_t* bufferToSendPtr = (uint8_t*)data; 343 uint8_t* bufferToSendPtr = (uint8_t*)data;
364 size_t bufferLength = len; 344 size_t bufferLength = len;
365 345
366 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength); 346 int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
367 if (n < 0) { 347 if (n < 0) {
368 std::string transport_name = 348 std::string transport_name =
369 _externalTransport ? "external transport" : "WebRtc sockets"; 349 _externalTransport ? "external transport" : "WebRtc sockets";
370 WEBRTC_TRACE(kTraceInfo, kTraceVoice, 350 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
371 VoEId(_instanceId,_channelId), 351 "Channel::SendRtcp() transmission using %s failed",
372 "Channel::SendRtcp() transmission using %s failed", 352 transport_name.c_str());
373 transport_name.c_str()); 353 return false;
374 return false; 354 }
375 } 355 return true;
376 return true;
377 } 356 }
378 357
379 void Channel::OnPlayTelephoneEvent(uint8_t event, 358 void Channel::OnPlayTelephoneEvent(uint8_t event,
380 uint16_t lengthMs, 359 uint16_t lengthMs,
381 uint8_t volume) { 360 uint8_t volume) {
382 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 361 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
383 "Channel::OnPlayTelephoneEvent(event=%u, lengthMs=%u," 362 "Channel::OnPlayTelephoneEvent(event=%u, lengthMs=%u,"
384 " volume=%u)", event, lengthMs, volume); 363 " volume=%u)",
364 event, lengthMs, volume);
385 365
386 if (!_playOutbandDtmfEvent || (event > 15)) 366 if (!_playOutbandDtmfEvent || (event > 15)) {
387 { 367 // Ignore callback since feedback is disabled or event is not a
388 // Ignore callback since feedback is disabled or event is not a 368 // Dtmf tone event.
389 // Dtmf tone event. 369 return;
390 return; 370 }
391 }
392 371
393 assert(_outputMixerPtr != NULL); 372 assert(_outputMixerPtr != NULL);
394 373
395 // Start playing out the Dtmf tone (if playout is enabled). 374 // Start playing out the Dtmf tone (if playout is enabled).
396 // Reduce length of tone with 80ms to the reduce risk of echo. 375 // Reduce length of tone with 80ms to the reduce risk of echo.
397 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume); 376 _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
398 } 377 }
399 378
400 void 379 void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
401 Channel::OnIncomingSSRCChanged(uint32_t ssrc) 380 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
402 { 381 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
403 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
404 "Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
405 382
406 // Update ssrc so that NTP for AV sync can be updated. 383 // Update ssrc so that NTP for AV sync can be updated.
407 _rtpRtcpModule->SetRemoteSSRC(ssrc); 384 _rtpRtcpModule->SetRemoteSSRC(ssrc);
408 } 385 }
409 386
410 void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) { 387 void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
411 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 388 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
412 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC, 389 "Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
413 added); 390 added);
414 } 391 }
415 392
416 int32_t Channel::OnInitializeDecoder( 393 int32_t Channel::OnInitializeDecoder(
417 int8_t payloadType, 394 int8_t payloadType,
418 const char payloadName[RTP_PAYLOAD_NAME_SIZE], 395 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
419 int frequency, 396 int frequency,
420 size_t channels, 397 size_t channels,
421 uint32_t rate) { 398 uint32_t rate) {
422 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 399 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
423 "Channel::OnInitializeDecoder(payloadType=%d, " 400 "Channel::OnInitializeDecoder(payloadType=%d, "
424 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)", 401 "payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
425 payloadType, payloadName, frequency, channels, rate); 402 payloadType, payloadName, frequency, channels, rate);
426 403
427 CodecInst receiveCodec = {0}; 404 CodecInst receiveCodec = {0};
428 CodecInst dummyCodec = {0}; 405 CodecInst dummyCodec = {0};
429 406
430 receiveCodec.pltype = payloadType; 407 receiveCodec.pltype = payloadType;
431 receiveCodec.plfreq = frequency; 408 receiveCodec.plfreq = frequency;
432 receiveCodec.channels = channels; 409 receiveCodec.channels = channels;
433 receiveCodec.rate = rate; 410 receiveCodec.rate = rate;
434 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1); 411 strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
435 412
436 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels); 413 audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
437 receiveCodec.pacsize = dummyCodec.pacsize; 414 receiveCodec.pacsize = dummyCodec.pacsize;
438 415
439 // Register the new codec to the ACM 416 // Register the new codec to the ACM
440 if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1) 417 if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1) {
441 { 418 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
442 WEBRTC_TRACE(kTraceWarning, kTraceVoice, 419 "Channel::OnInitializeDecoder() invalid codec ("
443 VoEId(_instanceId, _channelId), 420 "pt=%d, name=%s) received - 1",
444 "Channel::OnInitializeDecoder() invalid codec (" 421 payloadType, payloadName);
445 "pt=%d, name=%s) received - 1", payloadType, payloadName); 422 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
446 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR); 423 return -1;
447 return -1; 424 }
448 }
449 425
450 return 0; 426 return 0;
451 } 427 }
452 428
453 int32_t 429 int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
454 Channel::OnReceivedPayloadData(const uint8_t* payloadData, 430 size_t payloadSize,
455 size_t payloadSize, 431 const WebRtcRTPHeader* rtpHeader) {
456 const WebRtcRTPHeader* rtpHeader) 432 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
457 { 433 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
458 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 434 ","
459 "Channel::OnReceivedPayloadData(payloadSize=%" PRIuS "," 435 " payloadType=%u, audioChannel=%" PRIuS ")",
460 " payloadType=%u, audioChannel=%" PRIuS ")", 436 payloadSize, rtpHeader->header.payloadType,
461 payloadSize, 437 rtpHeader->type.Audio.channel);
462 rtpHeader->header.payloadType,
463 rtpHeader->type.Audio.channel);
464 438
465 if (!channel_state_.Get().playing) 439 if (!channel_state_.Get().playing) {
466 { 440 // Avoid inserting into NetEQ when we are not playing. Count the
467 // Avoid inserting into NetEQ when we are not playing. Count the 441 // packet as discarded.
468 // packet as discarded. 442 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
469 WEBRTC_TRACE(kTraceStream, kTraceVoice, 443 "received packet is discarded since playing is not"
470 VoEId(_instanceId, _channelId), 444 " activated");
471 "received packet is discarded since playing is not" 445 _numberOfDiscardedPackets++;
472 " activated"); 446 return 0;
473 _numberOfDiscardedPackets++; 447 }
474 return 0;
475 }
476 448
477 // Push the incoming payload (parsed and ready for decoding) into the ACM 449 // Push the incoming payload (parsed and ready for decoding) into the ACM
478 if (audio_coding_->IncomingPacket(payloadData, 450 if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
479 payloadSize, 451 0) {
480 *rtpHeader) != 0) 452 _engineStatisticsPtr->SetLastError(
481 { 453 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
482 _engineStatisticsPtr->SetLastError( 454 "Channel::OnReceivedPayloadData() unable to push data to the ACM");
483 VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning, 455 return -1;
484 "Channel::OnReceivedPayloadData() unable to push data to the ACM"); 456 }
485 return -1;
486 }
487 457
488 // Update the packet delay. 458 // Update the packet delay.
489 UpdatePacketDelay(rtpHeader->header.timestamp, 459 UpdatePacketDelay(rtpHeader->header.timestamp,
490 rtpHeader->header.sequenceNumber); 460 rtpHeader->header.sequenceNumber);
491 461
492 int64_t round_trip_time = 0; 462 int64_t round_trip_time = 0;
493 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, 463 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
494 NULL, NULL, NULL); 464 NULL);
495 465
496 std::vector<uint16_t> nack_list = audio_coding_->GetNackList( 466 std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
497 round_trip_time); 467 if (!nack_list.empty()) {
498 if (!nack_list.empty()) { 468 // Can't use nack_list.data() since it's not supported by all
499 // Can't use nack_list.data() since it's not supported by all 469 // compilers.
500 // compilers. 470 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
501 ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size())); 471 }
502 } 472 return 0;
503 return 0;
504 } 473 }
505 474
506 bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet, 475 bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
507 size_t rtp_packet_length) { 476 size_t rtp_packet_length) {
508 RTPHeader header; 477 RTPHeader header;
509 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) { 478 if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
510 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId, 479 WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
511 "IncomingPacket invalid RTP header"); 480 "IncomingPacket invalid RTP header");
512 return false; 481 return false;
513 } 482 }
514 header.payload_type_frequency = 483 header.payload_type_frequency =
515 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType); 484 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
516 if (header.payload_type_frequency < 0) 485 if (header.payload_type_frequency < 0)
517 return false; 486 return false;
518 return ReceivePacket(rtp_packet, rtp_packet_length, header, false); 487 return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
519 } 488 }
520 489
521 int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) 490 int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
522 { 491 if (event_log_) {
523 if (event_log_) { 492 unsigned int ssrc;
524 unsigned int ssrc; 493 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
525 RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0); 494 event_log_->LogAudioPlayout(ssrc);
526 event_log_->LogAudioPlayout(ssrc); 495 }
496 // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
497 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame) ==
498 -1) {
499 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
500 "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
501 // In all likelihood, the audio in this frame is garbage. We return an
502 // error so that the audio mixer module doesn't add it to the mix. As
503 // a result, it won't be played out and the actions skipped here are
504 // irrelevant.
505 return -1;
506 }
507
508 if (_RxVadDetection) {
509 UpdateRxVadDetection(*audioFrame);
510 }
511
512 // Convert module ID to internal VoE channel ID
513 audioFrame->id_ = VoEChannelId(audioFrame->id_);
514 // Store speech type for dead-or-alive detection
515 _outputSpeechType = audioFrame->speech_type_;
516
517 ChannelState::State state = channel_state_.Get();
518
519 if (state.rx_apm_is_enabled) {
520 int err = rx_audioproc_->ProcessStream(audioFrame);
521 if (err) {
522 LOG(LS_ERROR) << "ProcessStream() error: " << err;
523 assert(false);
527 } 524 }
528 // Get 10ms raw PCM data from the ACM (mixer limits output frequency) 525 }
529 if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, 526
530 audioFrame) == -1) 527 {
528 // Pass the audio buffers to an optional sink callback, before applying
529 // scaling/panning, as that applies to the mix operation.
530 // External recipients of the audio (e.g. via AudioTrack), will do their
531 // own mixing/dynamic processing.
532 rtc::CritScope cs(&_callbackCritSect);
533 if (audio_sink_) {
534 AudioSinkInterface::Data data(
535 &audioFrame->data_[0], audioFrame->samples_per_channel_,
536 audioFrame->sample_rate_hz_, audioFrame->num_channels_,
537 audioFrame->timestamp_);
538 audio_sink_->OnData(data);
539 }
540 }
541
542 float output_gain = 1.0f;
543 float left_pan = 1.0f;
544 float right_pan = 1.0f;
545 {
546 rtc::CritScope cs(&volume_settings_critsect_);
547 output_gain = _outputGain;
548 left_pan = _panLeft;
549 right_pan = _panRight;
550 }
551
552 // Output volume scaling
553 if (output_gain < 0.99f || output_gain > 1.01f) {
554 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
555 }
556
557 // Scale left and/or right channel(s) if stereo and master balance is
558 // active
559
560 if (left_pan != 1.0f || right_pan != 1.0f) {
561 if (audioFrame->num_channels_ == 1) {
562 // Emulate stereo mode since panning is active.
563 // The mono signal is copied to both left and right channels here.
564 AudioFrameOperations::MonoToStereo(audioFrame);
565 }
566 // For true stereo mode (when we are receiving a stereo signal), no
567 // action is needed.
568
569 // Do the panning operation (the audio frame contains stereo at this
570 // stage)
571 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
572 }
573
574 // Mix decoded PCM output with file if file mixing is enabled
575 if (state.output_file_playing) {
576 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
577 }
578
579 // External media
580 if (_outputExternalMedia) {
581 rtc::CritScope cs(&_callbackCritSect);
582 const bool isStereo = (audioFrame->num_channels_ == 2);
583 if (_outputExternalMediaCallbackPtr) {
584 _outputExternalMediaCallbackPtr->Process(
585 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
586 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
587 isStereo);
588 }
589 }
590
591 // Record playout if enabled
592 {
593 rtc::CritScope cs(&_fileCritSect);
594
595 if (_outputFileRecording && _outputFileRecorderPtr) {
596 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
597 }
598 }
599
600 // Measure audio level (0-9)
601 _outputAudioLevel.ComputeLevel(*audioFrame);
602
603 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
604 // The first frame with a valid rtp timestamp.
605 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
606 }
607
608 if (capture_start_rtp_time_stamp_ >= 0) {
609 // audioFrame.timestamp_ should be valid from now on.
610
611 // Compute elapsed time.
612 int64_t unwrap_timestamp =
613 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
614 audioFrame->elapsed_time_ms_ =
615 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
616 (GetPlayoutFrequency() / 1000);
617
531 { 618 {
532 WEBRTC_TRACE(kTraceError, kTraceVoice, 619 rtc::CritScope lock(&ts_stats_lock_);
533 VoEId(_instanceId,_channelId), 620 // Compute ntp time.
534 "Channel::GetAudioFrame() PlayoutData10Ms() failed!"); 621 audioFrame->ntp_time_ms_ =
535 // In all likelihood, the audio in this frame is garbage. We return an 622 ntp_estimator_.Estimate(audioFrame->timestamp_);
536 // error so that the audio mixer module doesn't add it to the mix. As 623 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
537 // a result, it won't be played out and the actions skipped here are 624 if (audioFrame->ntp_time_ms_ > 0) {
538 // irrelevant. 625 // Compute |capture_start_ntp_time_ms_| so that
539 return -1; 626 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
540 } 627 capture_start_ntp_time_ms_ =
541 628 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
542 if (_RxVadDetection)
543 {
544 UpdateRxVadDetection(*audioFrame);
545 }
546
547 // Convert module ID to internal VoE channel ID
548 audioFrame->id_ = VoEChannelId(audioFrame->id_);
549 // Store speech type for dead-or-alive detection
550 _outputSpeechType = audioFrame->speech_type_;
551
552 ChannelState::State state = channel_state_.Get();
553
554 if (state.rx_apm_is_enabled) {
555 int err = rx_audioproc_->ProcessStream(audioFrame);
556 if (err) {
557 LOG(LS_ERROR) << "ProcessStream() error: " << err;
558 assert(false);
559 } 629 }
560 } 630 }
631 }
561 632
562 { 633 return 0;
563 // Pass the audio buffers to an optional sink callback, before applying 634 }
564 // scaling/panning, as that applies to the mix operation. 635
565 // External recipients of the audio (e.g. via AudioTrack), will do their 636 int32_t Channel::NeededFrequency(int32_t id) const {
566 // own mixing/dynamic processing. 637 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
567 rtc::CritScope cs(&_callbackCritSect); 638 "Channel::NeededFrequency(id=%d)", id);
568 if (audio_sink_) { 639
569 AudioSinkInterface::Data data( 640 int highestNeeded = 0;
570 &audioFrame->data_[0], 641
571 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_, 642 // Determine highest needed receive frequency
572 audioFrame->num_channels_, audioFrame->timestamp_); 643 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
573 audio_sink_->OnData(data); 644
645 // Return the bigger of playout and receive frequency in the ACM.
646 if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
647 highestNeeded = audio_coding_->PlayoutFrequency();
648 } else {
649 highestNeeded = receiveFrequency;
650 }
651
652 // Special case, if we're playing a file on the playout side
653 // we take that frequency into consideration as well
654 // This is not needed on sending side, since the codec will
655 // limit the spectrum anyway.
656 if (channel_state_.Get().output_file_playing) {
657 rtc::CritScope cs(&_fileCritSect);
658 if (_outputFilePlayerPtr) {
659 if (_outputFilePlayerPtr->Frequency() > highestNeeded) {
660 highestNeeded = _outputFilePlayerPtr->Frequency();
574 } 661 }
575 } 662 }
663 }
576 664
577 float output_gain = 1.0f; 665 return (highestNeeded);
578 float left_pan = 1.0f;
579 float right_pan = 1.0f;
580 {
581 rtc::CritScope cs(&volume_settings_critsect_);
582 output_gain = _outputGain;
583 left_pan = _panLeft;
584 right_pan= _panRight;
585 }
586
587 // Output volume scaling
588 if (output_gain < 0.99f || output_gain > 1.01f)
589 {
590 AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
591 }
592
593 // Scale left and/or right channel(s) if stereo and master balance is
594 // active
595
596 if (left_pan != 1.0f || right_pan != 1.0f)
597 {
598 if (audioFrame->num_channels_ == 1)
599 {
600 // Emulate stereo mode since panning is active.
601 // The mono signal is copied to both left and right channels here.
602 AudioFrameOperations::MonoToStereo(audioFrame);
603 }
604 // For true stereo mode (when we are receiving a stereo signal), no
605 // action is needed.
606
607 // Do the panning operation (the audio frame contains stereo at this
608 // stage)
609 AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
610 }
611
612 // Mix decoded PCM output with file if file mixing is enabled
613 if (state.output_file_playing)
614 {
615 MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
616 }
617
618 // External media
619 if (_outputExternalMedia)
620 {
621 rtc::CritScope cs(&_callbackCritSect);
622 const bool isStereo = (audioFrame->num_channels_ == 2);
623 if (_outputExternalMediaCallbackPtr)
624 {
625 _outputExternalMediaCallbackPtr->Process(
626 _channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
627 audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
628 isStereo);
629 }
630 }
631
632 // Record playout if enabled
633 {
634 rtc::CritScope cs(&_fileCritSect);
635
636 if (_outputFileRecording && _outputFileRecorderPtr)
637 {
638 _outputFileRecorderPtr->RecordAudioToFile(*audioFrame);
639 }
640 }
641
642 // Measure audio level (0-9)
643 _outputAudioLevel.ComputeLevel(*audioFrame);
644
645 if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
646 // The first frame with a valid rtp timestamp.
647 capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
648 }
649
650 if (capture_start_rtp_time_stamp_ >= 0) {
651 // audioFrame.timestamp_ should be valid from now on.
652
653 // Compute elapsed time.
654 int64_t unwrap_timestamp =
655 rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
656 audioFrame->elapsed_time_ms_ =
657 (unwrap_timestamp - capture_start_rtp_time_stamp_) /
658 (GetPlayoutFrequency() / 1000);
659
660 {
661 rtc::CritScope lock(&ts_stats_lock_);
662 // Compute ntp time.
663 audioFrame->ntp_time_ms_ = ntp_estimator_.Estimate(
664 audioFrame->timestamp_);
665 // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
666 if (audioFrame->ntp_time_ms_ > 0) {
667 // Compute |capture_start_ntp_time_ms_| so that
668 // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
669 capture_start_ntp_time_ms_ =
670 audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
671 }
672 }
673 }
674
675 return 0;
676 }
677
678 int32_t
679 Channel::NeededFrequency(int32_t id) const
680 {
681 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
682 "Channel::NeededFrequency(id=%d)", id);
683
684 int highestNeeded = 0;
685
686 // Determine highest needed receive frequency
687 int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
688
689 // Return the bigger of playout and receive frequency in the ACM.
690 if (audio_coding_->PlayoutFrequency() > receiveFrequency)
691 {
692 highestNeeded = audio_coding_->PlayoutFrequency();
693 }
694 else
695 {
696 highestNeeded = receiveFrequency;
697 }
698
699 // Special case, if we're playing a file on the playout side
700 // we take that frequency into consideration as well
701 // This is not needed on sending side, since the codec will
702 // limit the spectrum anyway.
703 if (channel_state_.Get().output_file_playing)
704 {
705 rtc::CritScope cs(&_fileCritSect);
706 if (_outputFilePlayerPtr)
707 {
708 if(_outputFilePlayerPtr->Frequency()>highestNeeded)
709 {
710 highestNeeded=_outputFilePlayerPtr->Frequency();
711 }
712 }
713 }
714
715 return(highestNeeded);
716 } 666 }
717 667
718 int32_t Channel::CreateChannel(Channel*& channel, 668 int32_t Channel::CreateChannel(Channel*& channel,
719 int32_t channelId, 669 int32_t channelId,
720 uint32_t instanceId, 670 uint32_t instanceId,
721 RtcEventLog* const event_log, 671 RtcEventLog* const event_log,
722 const Config& config) { 672 const Config& config) {
723 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId), 673 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
724 "Channel::CreateChannel(channelId=%d, instanceId=%d)", 674 "Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
725 channelId, instanceId); 675 instanceId);
726 676
727 channel = new Channel(channelId, instanceId, event_log, config); 677 channel = new Channel(channelId, instanceId, event_log, config);
728 if (channel == NULL) 678 if (channel == NULL) {
729 { 679 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
730 WEBRTC_TRACE(kTraceMemory, kTraceVoice, 680 "Channel::CreateChannel() unable to allocate memory for"
731 VoEId(instanceId,channelId), 681 " channel");
732 "Channel::CreateChannel() unable to allocate memory for" 682 return -1;
733 " channel"); 683 }
734 return -1; 684 return 0;
735 }
736 return 0;
737 } 685 }
738 686
739 void 687 void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
740 Channel::PlayNotification(int32_t id, uint32_t durationMs) 688 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
741 { 689 "Channel::PlayNotification(id=%d, durationMs=%d)", id,
742 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 690 durationMs);
743 "Channel::PlayNotification(id=%d, durationMs=%d)",
744 id, durationMs);
745 691
746 // Not implement yet 692 // Not implement yet
747 } 693 }
748 694
749 void 695 void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
750 Channel::RecordNotification(int32_t id, uint32_t durationMs) 696 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
751 { 697 "Channel::RecordNotification(id=%d, durationMs=%d)", id,
752 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 698 durationMs);
753 "Channel::RecordNotification(id=%d, durationMs=%d)",
754 id, durationMs);
755 699
756 // Not implement yet 700 // Not implement yet
757 } 701 }
758 702
759 void 703 void Channel::PlayFileEnded(int32_t id) {
760 Channel::PlayFileEnded(int32_t id) 704 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
761 { 705 "Channel::PlayFileEnded(id=%d)", id);
762 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
763 "Channel::PlayFileEnded(id=%d)", id);
764 706
765 if (id == _inputFilePlayerId) 707 if (id == _inputFilePlayerId) {
766 { 708 channel_state_.SetInputFilePlaying(false);
767 channel_state_.SetInputFilePlaying(false); 709 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
768 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, 710 "Channel::PlayFileEnded() => input file player module is"
769 VoEId(_instanceId,_channelId), 711 " shutdown");
770 "Channel::PlayFileEnded() => input file player module is" 712 } else if (id == _outputFilePlayerId) {
771 " shutdown"); 713 channel_state_.SetOutputFilePlaying(false);
772 } 714 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
773 else if (id == _outputFilePlayerId) 715 "Channel::PlayFileEnded() => output file player module is"
774 { 716 " shutdown");
775 channel_state_.SetOutputFilePlaying(false); 717 }
776 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
777 VoEId(_instanceId,_channelId),
778 "Channel::PlayFileEnded() => output file player module is"
779 " shutdown");
780 }
781 } 718 }
782 719
783 void 720 void Channel::RecordFileEnded(int32_t id) {
784 Channel::RecordFileEnded(int32_t id) 721 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
785 { 722 "Channel::RecordFileEnded(id=%d)", id);
786 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
787 "Channel::RecordFileEnded(id=%d)", id);
788 723
789 assert(id == _outputFileRecorderId); 724 assert(id == _outputFileRecorderId);
790 725
791 rtc::CritScope cs(&_fileCritSect); 726 rtc::CritScope cs(&_fileCritSect);
792 727
793 _outputFileRecording = false; 728 _outputFileRecording = false;
794 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, 729 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
795 VoEId(_instanceId,_channelId), 730 "Channel::RecordFileEnded() => output file recorder module is"
796 "Channel::RecordFileEnded() => output file recorder module is" 731 " shutdown");
797 " shutdown");
798 } 732 }
799 733
800 Channel::Channel(int32_t channelId, 734 Channel::Channel(int32_t channelId,
801 uint32_t instanceId, 735 uint32_t instanceId,
802 RtcEventLog* const event_log, 736 RtcEventLog* const event_log,
803 const Config& config) 737 const Config& config)
804 : _instanceId(instanceId), 738 : _instanceId(instanceId),
805 _channelId(channelId), 739 _channelId(channelId),
806 event_log_(event_log), 740 event_log_(event_log),
807 rtp_header_parser_(RtpHeaderParser::Create()), 741 rtp_header_parser_(RtpHeaderParser::Create()),
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
878 rtcp_observer_(new VoERtcpObserver(this)), 812 rtcp_observer_(new VoERtcpObserver(this)),
879 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())), 813 network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
880 associate_send_channel_(ChannelOwner(nullptr)), 814 associate_send_channel_(ChannelOwner(nullptr)),
881 pacing_enabled_(config.Get<VoicePacing>().enabled), 815 pacing_enabled_(config.Get<VoicePacing>().enabled),
882 feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy() 816 feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy()
883 : nullptr), 817 : nullptr),
884 seq_num_allocator_proxy_( 818 seq_num_allocator_proxy_(
885 pacing_enabled_ ? new TransportSequenceNumberProxy() : nullptr), 819 pacing_enabled_ ? new TransportSequenceNumberProxy() : nullptr),
886 rtp_packet_sender_proxy_(pacing_enabled_ ? new RtpPacketSenderProxy() 820 rtp_packet_sender_proxy_(pacing_enabled_ ? new RtpPacketSenderProxy()
887 : nullptr) { 821 : nullptr) {
888 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId), 822 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
889 "Channel::Channel() - ctor"); 823 "Channel::Channel() - ctor");
890 AudioCodingModule::Config acm_config; 824 AudioCodingModule::Config acm_config;
891 acm_config.id = VoEModuleId(instanceId, channelId); 825 acm_config.id = VoEModuleId(instanceId, channelId);
892 if (config.Get<NetEqCapacityConfig>().enabled) { 826 if (config.Get<NetEqCapacityConfig>().enabled) {
893 // Clamping the buffer capacity at 20 packets. While going lower will 827 // Clamping the buffer capacity at 20 packets. While going lower will
894 // probably work, it makes little sense. 828 // probably work, it makes little sense.
895 acm_config.neteq_config.max_packets_in_buffer = 829 acm_config.neteq_config.max_packets_in_buffer =
896 std::max(20, config.Get<NetEqCapacityConfig>().capacity); 830 std::max(20, config.Get<NetEqCapacityConfig>().capacity);
897 } 831 }
898 acm_config.neteq_config.enable_fast_accelerate = 832 acm_config.neteq_config.enable_fast_accelerate =
899 config.Get<NetEqFastAccelerate>().enabled; 833 config.Get<NetEqFastAccelerate>().enabled;
900 audio_coding_.reset(AudioCodingModule::Create(acm_config)); 834 audio_coding_.reset(AudioCodingModule::Create(acm_config));
901 835
902 _inbandDtmfQueue.ResetDtmf(); 836 _inbandDtmfQueue.ResetDtmf();
903 _inbandDtmfGenerator.Init(); 837 _inbandDtmfGenerator.Init();
904 _outputAudioLevel.Clear(); 838 _outputAudioLevel.Clear();
905 839
906 RtpRtcp::Configuration configuration; 840 RtpRtcp::Configuration configuration;
907 configuration.audio = true; 841 configuration.audio = true;
908 configuration.outgoing_transport = this; 842 configuration.outgoing_transport = this;
909 configuration.audio_messages = this; 843 configuration.audio_messages = this;
910 configuration.receive_statistics = rtp_receive_statistics_.get(); 844 configuration.receive_statistics = rtp_receive_statistics_.get();
911 configuration.bandwidth_callback = rtcp_observer_.get(); 845 configuration.bandwidth_callback = rtcp_observer_.get();
912 configuration.paced_sender = rtp_packet_sender_proxy_.get(); 846 configuration.paced_sender = rtp_packet_sender_proxy_.get();
913 configuration.transport_sequence_number_allocator = 847 configuration.transport_sequence_number_allocator =
914 seq_num_allocator_proxy_.get(); 848 seq_num_allocator_proxy_.get();
915 configuration.transport_feedback_callback = feedback_observer_proxy_.get(); 849 configuration.transport_feedback_callback = feedback_observer_proxy_.get();
916 configuration.event_log = event_log; 850 configuration.event_log = event_log;
917 851
918 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration)); 852 _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
919 853
920 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC())); 854 statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
921 rtp_receive_statistics_->RegisterRtcpStatisticsCallback( 855 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
922 statistics_proxy_.get()); 856 statistics_proxy_.get());
923 857
924 Config audioproc_config; 858 Config audioproc_config;
925 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false)); 859 audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
926 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config)); 860 rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
927 } 861 }
928 862
929 Channel::~Channel() 863 Channel::~Channel() {
930 { 864 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
931 rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL); 865 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
932 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId), 866 "Channel::~Channel() - dtor");
933 "Channel::~Channel() - dtor"); 867
934 868 if (_outputExternalMedia) {
935 if (_outputExternalMedia) 869 DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
936 { 870 }
937 DeRegisterExternalMediaProcessing(kPlaybackPerChannel); 871 if (channel_state_.Get().input_external_media) {
938 } 872 DeRegisterExternalMediaProcessing(kRecordingPerChannel);
939 if (channel_state_.Get().input_external_media) 873 }
940 { 874 StopSend();
941 DeRegisterExternalMediaProcessing(kRecordingPerChannel); 875 StopPlayout();
942 } 876
943 StopSend(); 877 {
944 StopPlayout(); 878 rtc::CritScope cs(&_fileCritSect);
945 879 if (_inputFilePlayerPtr) {
946 { 880 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
947 rtc::CritScope cs(&_fileCritSect); 881 _inputFilePlayerPtr->StopPlayingFile();
948 if (_inputFilePlayerPtr) 882 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
949 { 883 _inputFilePlayerPtr = NULL;
950 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL); 884 }
951 _inputFilePlayerPtr->StopPlayingFile(); 885 if (_outputFilePlayerPtr) {
952 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr); 886 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
953 _inputFilePlayerPtr = NULL; 887 _outputFilePlayerPtr->StopPlayingFile();
954 } 888 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
955 if (_outputFilePlayerPtr) 889 _outputFilePlayerPtr = NULL;
956 { 890 }
957 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); 891 if (_outputFileRecorderPtr) {
958 _outputFilePlayerPtr->StopPlayingFile(); 892 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
959 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); 893 _outputFileRecorderPtr->StopRecording();
960 _outputFilePlayerPtr = NULL; 894 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
961 } 895 _outputFileRecorderPtr = NULL;
962 if (_outputFileRecorderPtr) 896 }
963 { 897 }
964 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); 898
965 _outputFileRecorderPtr->StopRecording(); 899 // The order to safely shutdown modules in a channel is:
966 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); 900 // 1. De-register callbacks in modules
967 _outputFileRecorderPtr = NULL; 901 // 2. De-register modules in process thread
968 } 902 // 3. Destroy modules
969 } 903 if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
970 904 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
971 // The order to safely shutdown modules in a channel is: 905 "~Channel() failed to de-register transport callback"
972 // 1. De-register callbacks in modules 906 " (Audio coding module)");
973 // 2. De-register modules in process thread 907 }
974 // 3. Destroy modules 908 if (audio_coding_->RegisterVADCallback(NULL) == -1) {
975 if (audio_coding_->RegisterTransportCallback(NULL) == -1) 909 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
976 { 910 "~Channel() failed to de-register VAD callback"
977 WEBRTC_TRACE(kTraceWarning, kTraceVoice, 911 " (Audio coding module)");
978 VoEId(_instanceId,_channelId), 912 }
979 "~Channel() failed to de-register transport callback" 913 // De-register modules in process thread
980 " (Audio coding module)"); 914 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
981 } 915
982 if (audio_coding_->RegisterVADCallback(NULL) == -1) 916 // End of modules shutdown
983 { 917 }
984 WEBRTC_TRACE(kTraceWarning, kTraceVoice, 918
985 VoEId(_instanceId,_channelId), 919 int32_t Channel::Init() {
986 "~Channel() failed to de-register VAD callback" 920 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
987 " (Audio coding module)"); 921 "Channel::Init()");
988 } 922
989 // De-register modules in process thread 923 channel_state_.Reset();
990 _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); 924
991 925 // --- Initial sanity
992 // End of modules shutdown 926
993 } 927 if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
994 928 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
995 int32_t 929 "Channel::Init() must call SetEngineInformation() first");
996 Channel::Init() 930 return -1;
997 { 931 }
998 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 932
999 "Channel::Init()"); 933 // --- Add modules to process thread (for periodic schedulation)
1000 934
1001 channel_state_.Reset(); 935 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
1002 936
1003 // --- Initial sanity 937 // --- ACM initialization
1004 938
1005 if ((_engineStatisticsPtr == NULL) || 939 if (audio_coding_->InitializeReceiver() == -1) {
1006 (_moduleProcessThreadPtr == NULL)) 940 _engineStatisticsPtr->SetLastError(
1007 { 941 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1008 WEBRTC_TRACE(kTraceError, kTraceVoice, 942 "Channel::Init() unable to initialize the ACM - 1");
1009 VoEId(_instanceId,_channelId), 943 return -1;
1010 "Channel::Init() must call SetEngineInformation() first"); 944 }
1011 return -1; 945
1012 } 946 // --- RTP/RTCP module initialization
1013 947
1014 // --- Add modules to process thread (for periodic schedulation) 948 // Ensure that RTCP is enabled by default for the created channel.
1015 949 // Note that, the module will keep generating RTCP until it is explicitly
1016 _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()); 950 // disabled by the user.
1017 951 // After StopListen (when no sockets exists), RTCP packets will no longer
1018 // --- ACM initialization 952 // be transmitted since the Transport object will then be invalid.
1019 953 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1020 if (audio_coding_->InitializeReceiver() == -1) { 954 // RTCP is enabled by default.
1021 _engineStatisticsPtr->SetLastError( 955 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
1022 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, 956 // --- Register all permanent callbacks
1023 "Channel::Init() unable to initialize the ACM - 1"); 957 const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
1024 return -1; 958 (audio_coding_->RegisterVADCallback(this) == -1);
1025 } 959
1026 960 if (fail) {
1027 // --- RTP/RTCP module initialization 961 _engineStatisticsPtr->SetLastError(
1028 962 VE_CANNOT_INIT_CHANNEL, kTraceError,
1029 // Ensure that RTCP is enabled by default for the created channel. 963 "Channel::Init() callbacks not registered");
1030 // Note that, the module will keep generating RTCP until it is explicitly 964 return -1;
1031 // disabled by the user. 965 }
1032 // After StopListen (when no sockets exists), RTCP packets will no longer 966
1033 // be transmitted since the Transport object will then be invalid. 967 // --- Register all supported codecs to the receiving side of the
1034 telephone_event_handler_->SetTelephoneEventForwardToDecoder(true); 968 // RTP/RTCP module
1035 // RTCP is enabled by default. 969
1036 _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound); 970 CodecInst codec;
1037 // --- Register all permanent callbacks 971 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1038 const bool fail = 972
1039 (audio_coding_->RegisterTransportCallback(this) == -1) || 973 for (int idx = 0; idx < nSupportedCodecs; idx++) {
1040 (audio_coding_->RegisterVADCallback(this) == -1); 974 // Open up the RTP/RTCP receiver for all supported codecs
1041 975 if ((audio_coding_->Codec(idx, &codec) == -1) ||
1042 if (fail) 976 (rtp_receiver_->RegisterReceivePayload(
1043 { 977 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1044 _engineStatisticsPtr->SetLastError( 978 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
1045 VE_CANNOT_INIT_CHANNEL, kTraceError, 979 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1046 "Channel::Init() callbacks not registered"); 980 "Channel::Init() unable to register %s "
1047 return -1; 981 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
1048 } 982 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1049 983 codec.rate);
1050 // --- Register all supported codecs to the receiving side of the 984 } else {
1051 // RTP/RTCP module 985 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1052 986 "Channel::Init() %s (%d/%d/%" PRIuS
1053 CodecInst codec; 987 "/%d) has been "
1054 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs(); 988 "added to the RTP/RTCP receiver",
1055 989 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1056 for (int idx = 0; idx < nSupportedCodecs; idx++) 990 codec.rate);
1057 { 991 }
1058 // Open up the RTP/RTCP receiver for all supported codecs 992
1059 if ((audio_coding_->Codec(idx, &codec) == -1) || 993 // Ensure that PCMU is used as default codec on the sending side
1060 (rtp_receiver_->RegisterReceivePayload( 994 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
1061 codec.plname, 995 SetSendCodec(codec);
1062 codec.pltype, 996 }
1063 codec.plfreq, 997
1064 codec.channels, 998 // Register default PT for outband 'telephone-event'
1065 (codec.rate < 0) ? 0 : codec.rate) == -1)) 999 if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
1066 { 1000 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
1067 WEBRTC_TRACE(kTraceWarning, kTraceVoice, 1001 (audio_coding_->RegisterReceiveCodec(codec) == -1)) {
1068 VoEId(_instanceId,_channelId), 1002 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1069 "Channel::Init() unable to register %s " 1003 "Channel::Init() failed to register outband "
1070 "(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver", 1004 "'telephone-event' (%d/%d) correctly",
1071 codec.plname, codec.pltype, codec.plfreq, 1005 codec.pltype, codec.plfreq);
1072 codec.channels, codec.rate); 1006 }
1073 } 1007 }
1074 else 1008
1075 { 1009 if (!STR_CASE_CMP(codec.plname, "CN")) {
1076 WEBRTC_TRACE(kTraceInfo, kTraceVoice, 1010 if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1077 VoEId(_instanceId,_channelId), 1011 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1078 "Channel::Init() %s (%d/%d/%" PRIuS "/%d) has been " 1012 (_rtpRtcpModule->RegisterSendPayload(codec) == -1)) {
1079 "added to the RTP/RTCP receiver", 1013 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1080 codec.plname, codec.pltype, codec.plfreq, 1014 "Channel::Init() failed to register CN (%d/%d) "
1081 codec.channels, codec.rate); 1015 "correctly - 1",
1082 } 1016 codec.pltype, codec.plfreq);
1083 1017 }
1084 // Ensure that PCMU is used as default codec on the sending side 1018 }
1085 if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
1086 {
1087 SetSendCodec(codec);
1088 }
1089
1090 // Register default PT for outband 'telephone-event'
1091 if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1092 {
1093 if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
1094 (audio_coding_->RegisterReceiveCodec(codec) == -1))
1095 {
1096 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1097 VoEId(_instanceId,_channelId),
1098 "Channel::Init() failed to register outband "
1099 "'telephone-event' (%d/%d) correctly",
1100 codec.pltype, codec.plfreq);
1101 }
1102 }
1103
1104 if (!STR_CASE_CMP(codec.plname, "CN"))
1105 {
1106 if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1107 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1108 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
1109 {
1110 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1111 VoEId(_instanceId,_channelId),
1112 "Channel::Init() failed to register CN (%d/%d) "
1113 "correctly - 1",
1114 codec.pltype, codec.plfreq);
1115 }
1116 }
1117 #ifdef WEBRTC_CODEC_RED 1019 #ifdef WEBRTC_CODEC_RED
1118 // Register RED to the receiving side of the ACM. 1020 // Register RED to the receiving side of the ACM.
1119 // We will not receive an OnInitializeDecoder() callback for RED. 1021 // We will not receive an OnInitializeDecoder() callback for RED.
1120 if (!STR_CASE_CMP(codec.plname, "RED")) 1022 if (!STR_CASE_CMP(codec.plname, "RED")) {
1121 { 1023 if (audio_coding_->RegisterReceiveCodec(codec) == -1) {
1122 if (audio_coding_->RegisterReceiveCodec(codec) == -1) 1024 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
1123 { 1025 "Channel::Init() failed to register RED (%d/%d) "
1124 WEBRTC_TRACE(kTraceWarning, kTraceVoice, 1026 "correctly",
1125 VoEId(_instanceId,_channelId), 1027 codec.pltype, codec.plfreq);
1126 "Channel::Init() failed to register RED (%d/%d) " 1028 }
1127 "correctly", 1029 }
1128 codec.pltype, codec.plfreq);
1129 }
1130 }
1131 #endif 1030 #endif
1132 } 1031 }
1133 1032
1134 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) { 1033 if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1135 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed."; 1034 LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
1136 return -1; 1035 return -1;
1137 } 1036 }
1138 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) { 1037 if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1139 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed."; 1038 LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
1140 return -1; 1039 return -1;
1141 } 1040 }
1142 1041
1143 return 0; 1042 return 0;
1144 } 1043 }
1145 1044
1146 int32_t 1045 int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
1147 Channel::SetEngineInformation(Statistics& engineStatistics, 1046 OutputMixer& outputMixer,
1148 OutputMixer& outputMixer, 1047 voe::TransmitMixer& transmitMixer,
1149 voe::TransmitMixer& transmitMixer, 1048 ProcessThread& moduleProcessThread,
1150 ProcessThread& moduleProcessThread, 1049 AudioDeviceModule& audioDeviceModule,
1151 AudioDeviceModule& audioDeviceModule, 1050 VoiceEngineObserver* voiceEngineObserver,
1152 VoiceEngineObserver* voiceEngineObserver, 1051 rtc::CriticalSection* callbackCritSect) {
1153 rtc::CriticalSection* callbackCritSect) 1052 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1154 { 1053 "Channel::SetEngineInformation()");
1155 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1054 _engineStatisticsPtr = &engineStatistics;
1156 "Channel::SetEngineInformation()"); 1055 _outputMixerPtr = &outputMixer;
1157 _engineStatisticsPtr = &engineStatistics; 1056 _transmitMixerPtr = &transmitMixer,
1158 _outputMixerPtr = &outputMixer; 1057 _moduleProcessThreadPtr = &moduleProcessThread;
1159 _transmitMixerPtr = &transmitMixer, 1058 _audioDeviceModulePtr = &audioDeviceModule;
1160 _moduleProcessThreadPtr = &moduleProcessThread; 1059 _voiceEngineObserverPtr = voiceEngineObserver;
1161 _audioDeviceModulePtr = &audioDeviceModule; 1060 _callbackCritSectPtr = callbackCritSect;
1162 _voiceEngineObserverPtr = voiceEngineObserver; 1061 return 0;
1163 _callbackCritSectPtr = callbackCritSect; 1062 }
1164 return 0; 1063
1165 } 1064 int32_t Channel::UpdateLocalTimeStamp() {
1166 1065 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1167 int32_t 1066 return 0;
1168 Channel::UpdateLocalTimeStamp()
1169 {
1170
1171 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
1172 return 0;
1173 } 1067 }
1174 1068
1175 void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) { 1069 void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) {
1176 rtc::CritScope cs(&_callbackCritSect); 1070 rtc::CritScope cs(&_callbackCritSect);
1177 audio_sink_ = std::move(sink); 1071 audio_sink_ = std::move(sink);
1178 } 1072 }
1179 1073
1180 int32_t 1074 int32_t Channel::StartPlayout() {
1181 Channel::StartPlayout() 1075 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1182 { 1076 "Channel::StartPlayout()");
1183 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1077 if (channel_state_.Get().playing) {
1184 "Channel::StartPlayout()"); 1078 return 0;
1185 if (channel_state_.Get().playing) 1079 }
1186 { 1080
1187 return 0; 1081 if (!_externalMixing) {
1082 // Add participant as candidates for mixing.
1083 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
1084 _engineStatisticsPtr->SetLastError(
1085 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1086 "StartPlayout() failed to add participant to mixer");
1087 return -1;
1188 } 1088 }
1089 }
1189 1090
1190 if (!_externalMixing) { 1091 channel_state_.SetPlaying(true);
1191 // Add participant as candidates for mixing. 1092 if (RegisterFilePlayingToMixer() != 0)
1192 if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) 1093 return -1;
1193 {
1194 _engineStatisticsPtr->SetLastError(
1195 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1196 "StartPlayout() failed to add participant to mixer");
1197 return -1;
1198 }
1199 }
1200 1094
1201 channel_state_.SetPlaying(true); 1095 return 0;
1202 if (RegisterFilePlayingToMixer() != 0)
1203 return -1;
1204
1205 return 0;
1206 } 1096 }
1207 1097
1208 int32_t 1098 int32_t Channel::StopPlayout() {
1209 Channel::StopPlayout() 1099 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1210 { 1100 "Channel::StopPlayout()");
1211 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1101 if (!channel_state_.Get().playing) {
1212 "Channel::StopPlayout()"); 1102 return 0;
1213 if (!channel_state_.Get().playing) 1103 }
1214 { 1104
1215 return 0; 1105 if (!_externalMixing) {
1106 // Remove participant as candidates for mixing
1107 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
1108 _engineStatisticsPtr->SetLastError(
1109 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1110 "StopPlayout() failed to remove participant from mixer");
1111 return -1;
1216 } 1112 }
1113 }
1217 1114
1218 if (!_externalMixing) { 1115 channel_state_.SetPlaying(false);
1219 // Remove participant as candidates for mixing 1116 _outputAudioLevel.Clear();
1220 if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1221 {
1222 _engineStatisticsPtr->SetLastError(
1223 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1224 "StopPlayout() failed to remove participant from mixer");
1225 return -1;
1226 }
1227 }
1228 1117
1229 channel_state_.SetPlaying(false); 1118 return 0;
1230 _outputAudioLevel.Clear();
1231
1232 return 0;
1233 } 1119 }
1234 1120
1235 int32_t 1121 int32_t Channel::StartSend() {
1236 Channel::StartSend() 1122 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1237 { 1123 "Channel::StartSend()");
1238 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1124 // Resume the previous sequence number which was reset by StopSend().
1239 "Channel::StartSend()"); 1125 // This needs to be done before |sending| is set to true.
1240 // Resume the previous sequence number which was reset by StopSend(). 1126 if (send_sequence_number_)
1241 // This needs to be done before |sending| is set to true. 1127 SetInitSequenceNumber(send_sequence_number_);
1242 if (send_sequence_number_)
1243 SetInitSequenceNumber(send_sequence_number_);
1244 1128
1245 if (channel_state_.Get().sending) 1129 if (channel_state_.Get().sending) {
1246 { 1130 return 0;
1247 return 0; 1131 }
1248 } 1132 channel_state_.SetSending(true);
1249 channel_state_.SetSending(true);
1250 1133
1251 if (_rtpRtcpModule->SetSendingStatus(true) != 0) 1134 if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
1252 { 1135 _engineStatisticsPtr->SetLastError(
1253 _engineStatisticsPtr->SetLastError( 1136 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1254 VE_RTP_RTCP_MODULE_ERROR, kTraceError, 1137 "StartSend() RTP/RTCP failed to start sending");
1255 "StartSend() RTP/RTCP failed to start sending"); 1138 rtc::CritScope cs(&_callbackCritSect);
1256 rtc::CritScope cs(&_callbackCritSect); 1139 channel_state_.SetSending(false);
1257 channel_state_.SetSending(false); 1140 return -1;
1258 return -1; 1141 }
1259 }
1260 1142
1261 return 0; 1143 return 0;
1262 } 1144 }
1263 1145
1264 int32_t 1146 int32_t Channel::StopSend() {
1265 Channel::StopSend() 1147 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1266 { 1148 "Channel::StopSend()");
1267 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1149 if (!channel_state_.Get().sending) {
1268 "Channel::StopSend()"); 1150 return 0;
1269 if (!channel_state_.Get().sending) 1151 }
1270 { 1152 channel_state_.SetSending(false);
1271 return 0;
1272 }
1273 channel_state_.SetSending(false);
1274 1153
1275 // Store the sequence number to be able to pick up the same sequence for 1154 // Store the sequence number to be able to pick up the same sequence for
1276 // the next StartSend(). This is needed for restarting device, otherwise 1155 // the next StartSend(). This is needed for restarting device, otherwise
1277 // it might cause libSRTP to complain about packets being replayed. 1156 // it might cause libSRTP to complain about packets being replayed.
1278 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring 1157 // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1279 // CL is landed. See issue 1158 // CL is landed. See issue
1280 // https://code.google.com/p/webrtc/issues/detail?id=2111 . 1159 // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1281 send_sequence_number_ = _rtpRtcpModule->SequenceNumber(); 1160 send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1282 1161
1283 // Reset sending SSRC and sequence number and triggers direct transmission 1162 // Reset sending SSRC and sequence number and triggers direct transmission
1284 // of RTCP BYE 1163 // of RTCP BYE
1285 if (_rtpRtcpModule->SetSendingStatus(false) == -1) 1164 if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
1286 { 1165 _engineStatisticsPtr->SetLastError(
1287 _engineStatisticsPtr->SetLastError( 1166 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1288 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning, 1167 "StartSend() RTP/RTCP failed to stop sending");
1289 "StartSend() RTP/RTCP failed to stop sending"); 1168 }
1290 }
1291 1169
1292 return 0; 1170 return 0;
1293 } 1171 }
1294 1172
1295 int32_t 1173 int32_t Channel::StartReceiving() {
1296 Channel::StartReceiving() 1174 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1297 { 1175 "Channel::StartReceiving()");
1298 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1176 if (channel_state_.Get().receiving) {
1299 "Channel::StartReceiving()");
1300 if (channel_state_.Get().receiving)
1301 {
1302 return 0;
1303 }
1304 channel_state_.SetReceiving(true);
1305 _numberOfDiscardedPackets = 0;
1306 return 0; 1177 return 0;
1178 }
1179 channel_state_.SetReceiving(true);
1180 _numberOfDiscardedPackets = 0;
1181 return 0;
1307 } 1182 }
1308 1183
1309 int32_t 1184 int32_t Channel::StopReceiving() {
1310 Channel::StopReceiving() 1185 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1311 { 1186 "Channel::StopReceiving()");
1312 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1187 if (!channel_state_.Get().receiving) {
1313 "Channel::StopReceiving()"); 1188 return 0;
1314 if (!channel_state_.Get().receiving) 1189 }
1315 {
1316 return 0;
1317 }
1318 1190
1319 channel_state_.SetReceiving(false); 1191 channel_state_.SetReceiving(false);
1320 return 0; 1192 return 0;
1321 } 1193 }
1322 1194
1323 int32_t 1195 int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
1324 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) 1196 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1325 { 1197 "Channel::RegisterVoiceEngineObserver()");
1326 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1198 rtc::CritScope cs(&_callbackCritSect);
1327 "Channel::RegisterVoiceEngineObserver()");
1328 rtc::CritScope cs(&_callbackCritSect);
1329 1199
1330 if (_voiceEngineObserverPtr) 1200 if (_voiceEngineObserverPtr) {
1331 { 1201 _engineStatisticsPtr->SetLastError(
1332 _engineStatisticsPtr->SetLastError( 1202 VE_INVALID_OPERATION, kTraceError,
1333 VE_INVALID_OPERATION, kTraceError, 1203 "RegisterVoiceEngineObserver() observer already enabled");
1334 "RegisterVoiceEngineObserver() observer already enabled"); 1204 return -1;
1335 return -1; 1205 }
1336 } 1206 _voiceEngineObserverPtr = &observer;
1337 _voiceEngineObserverPtr = &observer; 1207 return 0;
1338 return 0;
1339 } 1208 }
1340 1209
1341 int32_t 1210 int32_t Channel::DeRegisterVoiceEngineObserver() {
1342 Channel::DeRegisterVoiceEngineObserver() 1211 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1343 { 1212 "Channel::DeRegisterVoiceEngineObserver()");
1344 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1213 rtc::CritScope cs(&_callbackCritSect);
1345 "Channel::DeRegisterVoiceEngineObserver()");
1346 rtc::CritScope cs(&_callbackCritSect);
1347 1214
1348 if (!_voiceEngineObserverPtr) 1215 if (!_voiceEngineObserverPtr) {
1349 { 1216 _engineStatisticsPtr->SetLastError(
1350 _engineStatisticsPtr->SetLastError( 1217 VE_INVALID_OPERATION, kTraceWarning,
1351 VE_INVALID_OPERATION, kTraceWarning, 1218 "DeRegisterVoiceEngineObserver() observer already disabled");
1352 "DeRegisterVoiceEngineObserver() observer already disabled");
1353 return 0;
1354 }
1355 _voiceEngineObserverPtr = NULL;
1356 return 0; 1219 return 0;
1220 }
1221 _voiceEngineObserverPtr = NULL;
1222 return 0;
1357 } 1223 }
1358 1224
1359 int32_t 1225 int32_t Channel::GetSendCodec(CodecInst& codec) {
1360 Channel::GetSendCodec(CodecInst& codec)
1361 {
1362 auto send_codec = audio_coding_->SendCodec(); 1226 auto send_codec = audio_coding_->SendCodec();
1363 if (send_codec) { 1227 if (send_codec) {
1364 codec = *send_codec; 1228 codec = *send_codec;
1365 return 0; 1229 return 0;
1366 } 1230 }
1367 return -1; 1231 return -1;
1368 } 1232 }
1369 1233
1370 int32_t 1234 int32_t Channel::GetRecCodec(CodecInst& codec) {
1371 Channel::GetRecCodec(CodecInst& codec) 1235 return (audio_coding_->ReceiveCodec(&codec));
1372 {
1373 return (audio_coding_->ReceiveCodec(&codec));
1374 } 1236 }
1375 1237
1376 int32_t 1238 int32_t Channel::SetSendCodec(const CodecInst& codec) {
1377 Channel::SetSendCodec(const CodecInst& codec) 1239 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1378 { 1240 "Channel::SetSendCodec()");
1379 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1380 "Channel::SetSendCodec()");
1381 1241
1382 if (audio_coding_->RegisterSendCodec(codec) != 0) 1242 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1383 { 1243 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1384 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), 1244 "SetSendCodec() failed to register codec to ACM");
1385 "SetSendCodec() failed to register codec to ACM"); 1245 return -1;
1386 return -1; 1246 }
1247
1248 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1249 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1250 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1251 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1252 "SetSendCodec() failed to register codec to"
1253 " RTP/RTCP module");
1254 return -1;
1387 } 1255 }
1256 }
1388 1257
1389 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) 1258 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) {
1390 { 1259 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
1391 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype); 1260 "SetSendCodec() failed to set audio packet size");
1392 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) 1261 return -1;
1393 { 1262 }
1394 WEBRTC_TRACE(
1395 kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1396 "SetSendCodec() failed to register codec to"
1397 " RTP/RTCP module");
1398 return -1;
1399 }
1400 }
1401 1263
1402 if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0) 1264 return 0;
1403 {
1404 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1405 "SetSendCodec() failed to set audio packet size");
1406 return -1;
1407 }
1408
1409 return 0;
1410 } 1265 }
1411 1266
1412 void Channel::SetBitRate(int bitrate_bps) { 1267 void Channel::SetBitRate(int bitrate_bps) {
1413 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 1268 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1414 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps); 1269 "Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
1415 audio_coding_->SetBitRate(bitrate_bps); 1270 audio_coding_->SetBitRate(bitrate_bps);
1416 } 1271 }
1417 1272
1418 void Channel::OnIncomingFractionLoss(int fraction_lost) { 1273 void Channel::OnIncomingFractionLoss(int fraction_lost) {
1419 network_predictor_->UpdatePacketLossRate(fraction_lost); 1274 network_predictor_->UpdatePacketLossRate(fraction_lost);
1420 uint8_t average_fraction_loss = network_predictor_->GetLossRate(); 1275 uint8_t average_fraction_loss = network_predictor_->GetLossRate();
1421 1276
1422 // Normalizes rate to 0 - 100. 1277 // Normalizes rate to 0 - 100.
1423 if (audio_coding_->SetPacketLossRate( 1278 if (audio_coding_->SetPacketLossRate(100 * average_fraction_loss / 255) !=
1424 100 * average_fraction_loss / 255) != 0) { 1279 0) {
1425 assert(false); // This should not happen. 1280 assert(false); // This should not happen.
1426 } 1281 }
1427 } 1282 }
1428 1283
1429 int32_t 1284 int32_t Channel::SetVADStatus(bool enableVAD,
1430 Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX) 1285 ACMVADMode mode,
1431 { 1286 bool disableDTX) {
1432 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1287 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1433 "Channel::SetVADStatus(mode=%d)", mode); 1288 "Channel::SetVADStatus(mode=%d)", mode);
1434 assert(!(disableDTX && enableVAD)); // disableDTX mode is deprecated. 1289 assert(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
1435 // To disable VAD, DTX must be disabled too 1290 // To disable VAD, DTX must be disabled too
1436 disableDTX = ((enableVAD == false) ? true : disableDTX); 1291 disableDTX = ((enableVAD == false) ? true : disableDTX);
1437 if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0) 1292 if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0) {
1438 { 1293 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1439 _engineStatisticsPtr->SetLastError( 1294 kTraceError,
1440 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, 1295 "SetVADStatus() failed to set VAD");
1441 "SetVADStatus() failed to set VAD"); 1296 return -1;
1442 return -1; 1297 }
1298 return 0;
1299 }
1300
1301 int32_t Channel::GetVADStatus(bool& enabledVAD,
1302 ACMVADMode& mode,
1303 bool& disabledDTX) {
1304 if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0) {
1305 _engineStatisticsPtr->SetLastError(
1306 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1307 "GetVADStatus() failed to get VAD status");
1308 return -1;
1309 }
1310 disabledDTX = !disabledDTX;
1311 return 0;
1312 }
1313
1314 int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
1315 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1316 "Channel::SetRecPayloadType()");
1317
1318 if (channel_state_.Get().playing) {
1319 _engineStatisticsPtr->SetLastError(
1320 VE_ALREADY_PLAYING, kTraceError,
1321 "SetRecPayloadType() unable to set PT while playing");
1322 return -1;
1323 }
1324 if (channel_state_.Get().receiving) {
1325 _engineStatisticsPtr->SetLastError(
1326 VE_ALREADY_LISTENING, kTraceError,
1327 "SetRecPayloadType() unable to set PT while listening");
1328 return -1;
1329 }
1330
1331 if (codec.pltype == -1) {
1332 // De-register the selected codec (RTP/RTCP module and ACM)
1333
1334 int8_t pltype(-1);
1335 CodecInst rxCodec = codec;
1336
1337 // Get payload type for the given codec
1338 rtp_payload_registry_->ReceivePayloadType(
1339 rxCodec.plname, rxCodec.plfreq, rxCodec.channels,
1340 (rxCodec.rate < 0) ? 0 : rxCodec.rate, &pltype);
1341 rxCodec.pltype = pltype;
1342
1343 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
1344 _engineStatisticsPtr->SetLastError(
1345 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1346 "SetRecPayloadType() RTP/RTCP-module deregistration "
1347 "failed");
1348 return -1;
1349 }
1350 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
1351 _engineStatisticsPtr->SetLastError(
1352 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1353 "SetRecPayloadType() ACM deregistration failed - 1");
1354 return -1;
1443 } 1355 }
1444 return 0; 1356 return 0;
1357 }
1358
1359 if (rtp_receiver_->RegisterReceivePayload(
1360 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1361 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1362 // First attempt to register failed => de-register and try again
1363 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1364 if (rtp_receiver_->RegisterReceivePayload(
1365 codec.plname, codec.pltype, codec.plfreq, codec.channels,
1366 (codec.rate < 0) ? 0 : codec.rate) != 0) {
1367 _engineStatisticsPtr->SetLastError(
1368 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1369 "SetRecPayloadType() RTP/RTCP-module registration failed");
1370 return -1;
1371 }
1372 }
1373 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1374 audio_coding_->UnregisterReceiveCodec(codec.pltype);
1375 if (audio_coding_->RegisterReceiveCodec(codec) != 0) {
1376 _engineStatisticsPtr->SetLastError(
1377 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1378 "SetRecPayloadType() ACM registration failed - 1");
1379 return -1;
1380 }
1381 }
1382 return 0;
1445 } 1383 }
1446 1384
1447 int32_t 1385 int32_t Channel::GetRecPayloadType(CodecInst& codec) {
1448 Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX) 1386 int8_t payloadType(-1);
1449 { 1387 if (rtp_payload_registry_->ReceivePayloadType(
1450 if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0) 1388 codec.plname, codec.plfreq, codec.channels,
1451 { 1389 (codec.rate < 0) ? 0 : codec.rate, &payloadType) != 0) {
1452 _engineStatisticsPtr->SetLastError( 1390 _engineStatisticsPtr->SetLastError(
1453 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, 1391 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1454 "GetVADStatus() failed to get VAD status"); 1392 "GetRecPayloadType() failed to retrieve RX payload type");
1455 return -1; 1393 return -1;
1456 } 1394 }
1457 disabledDTX = !disabledDTX; 1395 codec.pltype = payloadType;
1458 return 0; 1396 return 0;
1459 } 1397 }
1460 1398
1461 int32_t 1399 int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
1462 Channel::SetRecPayloadType(const CodecInst& codec) 1400 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1463 { 1401 "Channel::SetSendCNPayloadType()");
1464 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1465 "Channel::SetRecPayloadType()");
1466 1402
1467 if (channel_state_.Get().playing) 1403 CodecInst codec;
1468 { 1404 int32_t samplingFreqHz(-1);
1469 _engineStatisticsPtr->SetLastError( 1405 const size_t kMono = 1;
1470 VE_ALREADY_PLAYING, kTraceError, 1406 if (frequency == kFreq32000Hz)
1471 "SetRecPayloadType() unable to set PT while playing"); 1407 samplingFreqHz = 32000;
1472 return -1; 1408 else if (frequency == kFreq16000Hz)
1409 samplingFreqHz = 16000;
1410
1411 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
1412 _engineStatisticsPtr->SetLastError(
1413 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1414 "SetSendCNPayloadType() failed to retrieve default CN codec "
1415 "settings");
1416 return -1;
1417 }
1418
1419 // Modify the payload type (must be set to dynamic range)
1420 codec.pltype = type;
1421
1422 if (audio_coding_->RegisterSendCodec(codec) != 0) {
1423 _engineStatisticsPtr->SetLastError(
1424 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1425 "SetSendCNPayloadType() failed to register CN to ACM");
1426 return -1;
1427 }
1428
1429 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1430 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1431 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
1432 _engineStatisticsPtr->SetLastError(
1433 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1434 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1435 "module");
1436 return -1;
1473 } 1437 }
1474 if (channel_state_.Get().receiving) 1438 }
1475 { 1439 return 0;
1476 _engineStatisticsPtr->SetLastError(
1477 VE_ALREADY_LISTENING, kTraceError,
1478 "SetRecPayloadType() unable to set PT while listening");
1479 return -1;
1480 }
1481
1482 if (codec.pltype == -1)
1483 {
1484 // De-register the selected codec (RTP/RTCP module and ACM)
1485
1486 int8_t pltype(-1);
1487 CodecInst rxCodec = codec;
1488
1489 // Get payload type for the given codec
1490 rtp_payload_registry_->ReceivePayloadType(
1491 rxCodec.plname,
1492 rxCodec.plfreq,
1493 rxCodec.channels,
1494 (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1495 &pltype);
1496 rxCodec.pltype = pltype;
1497
1498 if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
1499 {
1500 _engineStatisticsPtr->SetLastError(
1501 VE_RTP_RTCP_MODULE_ERROR,
1502 kTraceError,
1503 "SetRecPayloadType() RTP/RTCP-module deregistration "
1504 "failed");
1505 return -1;
1506 }
1507 if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0)
1508 {
1509 _engineStatisticsPtr->SetLastError(
1510 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1511 "SetRecPayloadType() ACM deregistration failed - 1");
1512 return -1;
1513 }
1514 return 0;
1515 }
1516
1517 if (rtp_receiver_->RegisterReceivePayload(
1518 codec.plname,
1519 codec.pltype,
1520 codec.plfreq,
1521 codec.channels,
1522 (codec.rate < 0) ? 0 : codec.rate) != 0)
1523 {
1524 // First attempt to register failed => de-register and try again
1525 rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1526 if (rtp_receiver_->RegisterReceivePayload(
1527 codec.plname,
1528 codec.pltype,
1529 codec.plfreq,
1530 codec.channels,
1531 (codec.rate < 0) ? 0 : codec.rate) != 0)
1532 {
1533 _engineStatisticsPtr->SetLastError(
1534 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1535 "SetRecPayloadType() RTP/RTCP-module registration failed");
1536 return -1;
1537 }
1538 }
1539 if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1540 {
1541 audio_coding_->UnregisterReceiveCodec(codec.pltype);
1542 if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1543 {
1544 _engineStatisticsPtr->SetLastError(
1545 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1546 "SetRecPayloadType() ACM registration failed - 1");
1547 return -1;
1548 }
1549 }
1550 return 0;
1551 }
1552
1553 int32_t
1554 Channel::GetRecPayloadType(CodecInst& codec)
1555 {
1556 int8_t payloadType(-1);
1557 if (rtp_payload_registry_->ReceivePayloadType(
1558 codec.plname,
1559 codec.plfreq,
1560 codec.channels,
1561 (codec.rate < 0) ? 0 : codec.rate,
1562 &payloadType) != 0)
1563 {
1564 _engineStatisticsPtr->SetLastError(
1565 VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1566 "GetRecPayloadType() failed to retrieve RX payload type");
1567 return -1;
1568 }
1569 codec.pltype = payloadType;
1570 return 0;
1571 }
1572
1573 int32_t
1574 Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1575 {
1576 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1577 "Channel::SetSendCNPayloadType()");
1578
1579 CodecInst codec;
1580 int32_t samplingFreqHz(-1);
1581 const size_t kMono = 1;
1582 if (frequency == kFreq32000Hz)
1583 samplingFreqHz = 32000;
1584 else if (frequency == kFreq16000Hz)
1585 samplingFreqHz = 16000;
1586
1587 if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1)
1588 {
1589 _engineStatisticsPtr->SetLastError(
1590 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1591 "SetSendCNPayloadType() failed to retrieve default CN codec "
1592 "settings");
1593 return -1;
1594 }
1595
1596 // Modify the payload type (must be set to dynamic range)
1597 codec.pltype = type;
1598
1599 if (audio_coding_->RegisterSendCodec(codec) != 0)
1600 {
1601 _engineStatisticsPtr->SetLastError(
1602 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1603 "SetSendCNPayloadType() failed to register CN to ACM");
1604 return -1;
1605 }
1606
1607 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1608 {
1609 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1610 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1611 {
1612 _engineStatisticsPtr->SetLastError(
1613 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1614 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1615 "module");
1616 return -1;
1617 }
1618 }
1619 return 0;
1620 } 1440 }
1621 1441
1622 int Channel::SetOpusMaxPlaybackRate(int frequency_hz) { 1442 int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
1623 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 1443 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1624 "Channel::SetOpusMaxPlaybackRate()"); 1444 "Channel::SetOpusMaxPlaybackRate()");
1625 1445
1626 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) { 1446 if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
1627 _engineStatisticsPtr->SetLastError( 1447 _engineStatisticsPtr->SetLastError(
1628 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, 1448 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1629 "SetOpusMaxPlaybackRate() failed to set maximum playback rate"); 1449 "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
1630 return -1; 1450 return -1;
1631 } 1451 }
1632 return 0; 1452 return 0;
1633 } 1453 }
1634 1454
1635 int Channel::SetOpusDtx(bool enable_dtx) { 1455 int Channel::SetOpusDtx(bool enable_dtx) {
1636 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 1456 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1637 "Channel::SetOpusDtx(%d)", enable_dtx); 1457 "Channel::SetOpusDtx(%d)", enable_dtx);
1638 int ret = enable_dtx ? audio_coding_->EnableOpusDtx() 1458 int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
1639 : audio_coding_->DisableOpusDtx(); 1459 : audio_coding_->DisableOpusDtx();
1640 if (ret != 0) { 1460 if (ret != 0) {
1641 _engineStatisticsPtr->SetLastError( 1461 _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1642 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, "SetOpusDtx() failed"); 1462 kTraceError, "SetOpusDtx() failed");
1643 return -1; 1463 return -1;
1644 } 1464 }
1645 return 0; 1465 return 0;
1646 } 1466 }
1647 1467
1648 int32_t Channel::RegisterExternalTransport(Transport& transport) 1468 int32_t Channel::RegisterExternalTransport(Transport& transport) {
1649 { 1469 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1650 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1651 "Channel::RegisterExternalTransport()"); 1470 "Channel::RegisterExternalTransport()");
1652 1471
1653 rtc::CritScope cs(&_callbackCritSect); 1472 rtc::CritScope cs(&_callbackCritSect);
1654 1473
1655 if (_externalTransport) 1474 if (_externalTransport) {
1656 { 1475 _engineStatisticsPtr->SetLastError(
1657 _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, 1476 VE_INVALID_OPERATION, kTraceError,
1658 kTraceError, 1477 "RegisterExternalTransport() external transport already enabled");
1659 "RegisterExternalTransport() external transport already enabled"); 1478 return -1;
1660 return -1; 1479 }
1661 } 1480 _externalTransport = true;
1662 _externalTransport = true; 1481 _transportPtr = &transport;
1663 _transportPtr = &transport; 1482 return 0;
1664 return 0;
1665 } 1483 }
1666 1484
1667 int32_t 1485 int32_t Channel::DeRegisterExternalTransport() {
1668 Channel::DeRegisterExternalTransport() 1486 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1669 { 1487 "Channel::DeRegisterExternalTransport()");
1670 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1671 "Channel::DeRegisterExternalTransport()");
1672 1488
1673 rtc::CritScope cs(&_callbackCritSect); 1489 rtc::CritScope cs(&_callbackCritSect);
1674 1490
1675 if (!_transportPtr) 1491 if (!_transportPtr) {
1676 { 1492 _engineStatisticsPtr->SetLastError(
1677 _engineStatisticsPtr->SetLastError( 1493 VE_INVALID_OPERATION, kTraceWarning,
1678 VE_INVALID_OPERATION, kTraceWarning, 1494 "DeRegisterExternalTransport() external transport already "
1679 "DeRegisterExternalTransport() external transport already " 1495 "disabled");
1680 "disabled");
1681 return 0;
1682 }
1683 _externalTransport = false;
1684 _transportPtr = NULL;
1685 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1686 "DeRegisterExternalTransport() all transport is disabled");
1687 return 0; 1496 return 0;
1497 }
1498 _externalTransport = false;
1499 _transportPtr = NULL;
1500 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1501 "DeRegisterExternalTransport() all transport is disabled");
1502 return 0;
1688 } 1503 }
1689 1504
1690 int32_t Channel::ReceivedRTPPacket(const int8_t* data, size_t length, 1505 int32_t Channel::ReceivedRTPPacket(const int8_t* data,
1506 size_t length,
1691 const PacketTime& packet_time) { 1507 const PacketTime& packet_time) {
1692 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 1508 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
1693 "Channel::ReceivedRTPPacket()"); 1509 "Channel::ReceivedRTPPacket()");
1694 1510
1695 // Store playout timestamp for the received RTP packet 1511 // Store playout timestamp for the received RTP packet
1696 UpdatePlayoutTimestamp(false); 1512 UpdatePlayoutTimestamp(false);
1697 1513
1698 const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data); 1514 const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
1699 RTPHeader header; 1515 RTPHeader header;
1700 if (!rtp_header_parser_->Parse(received_packet, length, &header)) { 1516 if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1701 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId, 1517 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1702 "Incoming packet: invalid RTP header"); 1518 "Incoming packet: invalid RTP header");
1703 return -1; 1519 return -1;
1704 } 1520 }
1705 header.payload_type_frequency = 1521 header.payload_type_frequency =
1706 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType); 1522 rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
1707 if (header.payload_type_frequency < 0) 1523 if (header.payload_type_frequency < 0)
1708 return -1; 1524 return -1;
1709 bool in_order = IsPacketInOrder(header); 1525 bool in_order = IsPacketInOrder(header);
1710 rtp_receive_statistics_->IncomingPacket(header, length, 1526 rtp_receive_statistics_->IncomingPacket(
1711 IsPacketRetransmitted(header, in_order)); 1527 header, length, IsPacketRetransmitted(header, in_order));
1712 rtp_payload_registry_->SetIncomingPayloadType(header); 1528 rtp_payload_registry_->SetIncomingPayloadType(header);
1713 1529
1714 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1; 1530 return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
1715 } 1531 }
1716 1532
1717 bool Channel::ReceivePacket(const uint8_t* packet, 1533 bool Channel::ReceivePacket(const uint8_t* packet,
1718 size_t packet_length, 1534 size_t packet_length,
1719 const RTPHeader& header, 1535 const RTPHeader& header,
1720 bool in_order) { 1536 bool in_order) {
1721 if (rtp_payload_registry_->IsRtx(header)) { 1537 if (rtp_payload_registry_->IsRtx(header)) {
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1775 // Retransmissions are handled separately if RTX is enabled. 1591 // Retransmissions are handled separately if RTX is enabled.
1776 if (rtp_payload_registry_->RtxEnabled()) 1592 if (rtp_payload_registry_->RtxEnabled())
1777 return false; 1593 return false;
1778 StreamStatistician* statistician = 1594 StreamStatistician* statistician =
1779 rtp_receive_statistics_->GetStatistician(header.ssrc); 1595 rtp_receive_statistics_->GetStatistician(header.ssrc);
1780 if (!statistician) 1596 if (!statistician)
1781 return false; 1597 return false;
1782 // Check if this is a retransmission. 1598 // Check if this is a retransmission.
1783 int64_t min_rtt = 0; 1599 int64_t min_rtt = 0;
1784 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL); 1600 _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
1785 return !in_order && 1601 return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
1786 statistician->IsRetransmitOfOldPacket(header, min_rtt);
1787 } 1602 }
1788 1603
1789 int32_t Channel::ReceivedRTCPPacket(const int8_t* data, size_t length) { 1604 int32_t Channel::ReceivedRTCPPacket(const int8_t* data, size_t length) {
1790 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 1605 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
1791 "Channel::ReceivedRTCPPacket()"); 1606 "Channel::ReceivedRTCPPacket()");
1792 // Store playout timestamp for the received RTCP packet 1607 // Store playout timestamp for the received RTCP packet
1793 UpdatePlayoutTimestamp(true); 1608 UpdatePlayoutTimestamp(true);
1794 1609
1795 // Deliver RTCP packet to RTP/RTCP module for parsing 1610 // Deliver RTCP packet to RTP/RTCP module for parsing
1796 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data, length) == -1) { 1611 if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data, length) == -1) {
1797 _engineStatisticsPtr->SetLastError( 1612 _engineStatisticsPtr->SetLastError(
1798 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning, 1613 VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1799 "Channel::IncomingRTPPacket() RTCP packet is invalid"); 1614 "Channel::IncomingRTPPacket() RTCP packet is invalid");
1800 } 1615 }
1801 1616
1802 int64_t rtt = GetRTT(true); 1617 int64_t rtt = GetRTT(true);
1803 if (rtt == 0) { 1618 if (rtt == 0) {
1804 // Waiting for valid RTT. 1619 // Waiting for valid RTT.
1805 return 0; 1620 return 0;
1806 } 1621 }
1807 uint32_t ntp_secs = 0; 1622 uint32_t ntp_secs = 0;
1808 uint32_t ntp_frac = 0; 1623 uint32_t ntp_frac = 0;
1809 uint32_t rtp_timestamp = 0; 1624 uint32_t rtp_timestamp = 0;
1810 if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, 1625 if (0 !=
1811 &rtp_timestamp)) { 1626 _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1627 &rtp_timestamp)) {
1812 // Waiting for RTCP. 1628 // Waiting for RTCP.
1813 return 0; 1629 return 0;
1814 } 1630 }
1815 1631
1816 { 1632 {
1817 rtc::CritScope lock(&ts_stats_lock_); 1633 rtc::CritScope lock(&ts_stats_lock_);
1818 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); 1634 ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
1819 } 1635 }
1820 return 0; 1636 return 0;
1821 } 1637 }
1822 1638
1823 int Channel::StartPlayingFileLocally(const char* fileName, 1639 int Channel::StartPlayingFileLocally(const char* fileName,
1824 bool loop, 1640 bool loop,
1825 FileFormats format, 1641 FileFormats format,
1826 int startPosition, 1642 int startPosition,
1827 float volumeScaling, 1643 float volumeScaling,
1828 int stopPosition, 1644 int stopPosition,
1829 const CodecInst* codecInst) 1645 const CodecInst* codecInst) {
1830 { 1646 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1831 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1647 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1832 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d," 1648 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1833 " format=%d, volumeScaling=%5.3f, startPosition=%d, " 1649 "stopPosition=%d)",
1834 "stopPosition=%d)", fileName, loop, format, volumeScaling, 1650 fileName, loop, format, volumeScaling, startPosition,
1835 startPosition, stopPosition); 1651 stopPosition);
1836 1652
1837 if (channel_state_.Get().output_file_playing) 1653 if (channel_state_.Get().output_file_playing) {
1838 { 1654 _engineStatisticsPtr->SetLastError(
1839 _engineStatisticsPtr->SetLastError( 1655 VE_ALREADY_PLAYING, kTraceError,
1840 VE_ALREADY_PLAYING, kTraceError, 1656 "StartPlayingFileLocally() is already playing");
1841 "StartPlayingFileLocally() is already playing"); 1657 return -1;
1842 return -1; 1658 }
1659
1660 {
1661 rtc::CritScope cs(&_fileCritSect);
1662
1663 if (_outputFilePlayerPtr) {
1664 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1665 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1666 _outputFilePlayerPtr = NULL;
1843 } 1667 }
1844 1668
1845 { 1669 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1846 rtc::CritScope cs(&_fileCritSect); 1670 _outputFilePlayerId, (const FileFormats)format);
1847 1671
1848 if (_outputFilePlayerPtr) 1672 if (_outputFilePlayerPtr == NULL) {
1849 { 1673 _engineStatisticsPtr->SetLastError(
1850 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); 1674 VE_INVALID_ARGUMENT, kTraceError,
1851 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); 1675 "StartPlayingFileLocally() filePlayer format is not correct");
1852 _outputFilePlayerPtr = NULL; 1676 return -1;
1853 }
1854
1855 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1856 _outputFilePlayerId, (const FileFormats)format);
1857
1858 if (_outputFilePlayerPtr == NULL)
1859 {
1860 _engineStatisticsPtr->SetLastError(
1861 VE_INVALID_ARGUMENT, kTraceError,
1862 "StartPlayingFileLocally() filePlayer format is not correct");
1863 return -1;
1864 }
1865
1866 const uint32_t notificationTime(0);
1867
1868 if (_outputFilePlayerPtr->StartPlayingFile(
1869 fileName,
1870 loop,
1871 startPosition,
1872 volumeScaling,
1873 notificationTime,
1874 stopPosition,
1875 (const CodecInst*)codecInst) != 0)
1876 {
1877 _engineStatisticsPtr->SetLastError(
1878 VE_BAD_FILE, kTraceError,
1879 "StartPlayingFile() failed to start file playout");
1880 _outputFilePlayerPtr->StopPlayingFile();
1881 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1882 _outputFilePlayerPtr = NULL;
1883 return -1;
1884 }
1885 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1886 channel_state_.SetOutputFilePlaying(true);
1887 } 1677 }
1888 1678
1889 if (RegisterFilePlayingToMixer() != 0) 1679 const uint32_t notificationTime(0);
1890 return -1;
1891 1680
1892 return 0; 1681 if (_outputFilePlayerPtr->StartPlayingFile(
1682 fileName, loop, startPosition, volumeScaling, notificationTime,
1683 stopPosition, (const CodecInst*)codecInst) != 0) {
1684 _engineStatisticsPtr->SetLastError(
1685 VE_BAD_FILE, kTraceError,
1686 "StartPlayingFile() failed to start file playout");
1687 _outputFilePlayerPtr->StopPlayingFile();
1688 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1689 _outputFilePlayerPtr = NULL;
1690 return -1;
1691 }
1692 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1693 channel_state_.SetOutputFilePlaying(true);
1694 }
1695
1696 if (RegisterFilePlayingToMixer() != 0)
1697 return -1;
1698
1699 return 0;
1893 } 1700 }
1894 1701
1895 int Channel::StartPlayingFileLocally(InStream* stream, 1702 int Channel::StartPlayingFileLocally(InStream* stream,
1896 FileFormats format, 1703 FileFormats format,
1897 int startPosition, 1704 int startPosition,
1898 float volumeScaling, 1705 float volumeScaling,
1899 int stopPosition, 1706 int stopPosition,
1900 const CodecInst* codecInst) 1707 const CodecInst* codecInst) {
1901 { 1708 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1902 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1709 "Channel::StartPlayingFileLocally(format=%d,"
1903 "Channel::StartPlayingFileLocally(format=%d," 1710 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1904 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)", 1711 format, volumeScaling, startPosition, stopPosition);
1905 format, volumeScaling, startPosition, stopPosition);
1906 1712
1907 if(stream == NULL) 1713 if (stream == NULL) {
1908 { 1714 _engineStatisticsPtr->SetLastError(
1909 _engineStatisticsPtr->SetLastError( 1715 VE_BAD_FILE, kTraceError,
1910 VE_BAD_FILE, kTraceError, 1716 "StartPlayingFileLocally() NULL as input stream");
1911 "StartPlayingFileLocally() NULL as input stream"); 1717 return -1;
1912 return -1; 1718 }
1719
1720 if (channel_state_.Get().output_file_playing) {
1721 _engineStatisticsPtr->SetLastError(
1722 VE_ALREADY_PLAYING, kTraceError,
1723 "StartPlayingFileLocally() is already playing");
1724 return -1;
1725 }
1726
1727 {
1728 rtc::CritScope cs(&_fileCritSect);
1729
1730 // Destroy the old instance
1731 if (_outputFilePlayerPtr) {
1732 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1733 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1734 _outputFilePlayerPtr = NULL;
1913 } 1735 }
1914 1736
1737 // Create the instance
1738 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1739 _outputFilePlayerId, (const FileFormats)format);
1915 1740
1916 if (channel_state_.Get().output_file_playing) 1741 if (_outputFilePlayerPtr == NULL) {
1917 { 1742 _engineStatisticsPtr->SetLastError(
1918 _engineStatisticsPtr->SetLastError( 1743 VE_INVALID_ARGUMENT, kTraceError,
1919 VE_ALREADY_PLAYING, kTraceError, 1744 "StartPlayingFileLocally() filePlayer format isnot correct");
1920 "StartPlayingFileLocally() is already playing"); 1745 return -1;
1921 return -1;
1922 } 1746 }
1923 1747
1924 { 1748 const uint32_t notificationTime(0);
1925 rtc::CritScope cs(&_fileCritSect);
1926 1749
1927 // Destroy the old instance 1750 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1928 if (_outputFilePlayerPtr) 1751 volumeScaling, notificationTime,
1929 { 1752 stopPosition, codecInst) != 0) {
1930 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL); 1753 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1931 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); 1754 "StartPlayingFile() failed to "
1932 _outputFilePlayerPtr = NULL; 1755 "start file playout");
1933 } 1756 _outputFilePlayerPtr->StopPlayingFile();
1757 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1758 _outputFilePlayerPtr = NULL;
1759 return -1;
1760 }
1761 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1762 channel_state_.SetOutputFilePlaying(true);
1763 }
1934 1764
1935 // Create the instance 1765 if (RegisterFilePlayingToMixer() != 0)
1936 _outputFilePlayerPtr = FilePlayer::CreateFilePlayer( 1766 return -1;
1937 _outputFilePlayerId,
1938 (const FileFormats)format);
1939 1767
1940 if (_outputFilePlayerPtr == NULL) 1768 return 0;
1941 {
1942 _engineStatisticsPtr->SetLastError(
1943 VE_INVALID_ARGUMENT, kTraceError,
1944 "StartPlayingFileLocally() filePlayer format isnot correct");
1945 return -1;
1946 }
1947
1948 const uint32_t notificationTime(0);
1949
1950 if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1951 volumeScaling,
1952 notificationTime,
1953 stopPosition, codecInst) != 0)
1954 {
1955 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1956 "StartPlayingFile() failed to "
1957 "start file playout");
1958 _outputFilePlayerPtr->StopPlayingFile();
1959 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1960 _outputFilePlayerPtr = NULL;
1961 return -1;
1962 }
1963 _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1964 channel_state_.SetOutputFilePlaying(true);
1965 }
1966
1967 if (RegisterFilePlayingToMixer() != 0)
1968 return -1;
1969
1970 return 0;
1971 } 1769 }
1972 1770
1973 int Channel::StopPlayingFileLocally() 1771 int Channel::StopPlayingFileLocally() {
1974 { 1772 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1975 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1773 "Channel::StopPlayingFileLocally()");
1976 "Channel::StopPlayingFileLocally()");
1977 1774
1978 if (!channel_state_.Get().output_file_playing) 1775 if (!channel_state_.Get().output_file_playing) {
1979 { 1776 return 0;
1980 return 0; 1777 }
1778
1779 {
1780 rtc::CritScope cs(&_fileCritSect);
1781
1782 if (_outputFilePlayerPtr->StopPlayingFile() != 0) {
1783 _engineStatisticsPtr->SetLastError(
1784 VE_STOP_RECORDING_FAILED, kTraceError,
1785 "StopPlayingFile() could not stop playing");
1786 return -1;
1981 } 1787 }
1788 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1789 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1790 _outputFilePlayerPtr = NULL;
1791 channel_state_.SetOutputFilePlaying(false);
1792 }
1793 // _fileCritSect cannot be taken while calling
1794 // SetAnonymousMixibilityStatus. Refer to comments in
1795 // StartPlayingFileLocally(const char* ...) for more details.
1796 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
1797 _engineStatisticsPtr->SetLastError(
1798 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1799 "StopPlayingFile() failed to stop participant from playing as"
1800 "file in the mixer");
1801 return -1;
1802 }
1982 1803
1983 { 1804 return 0;
1984 rtc::CritScope cs(&_fileCritSect);
1985
1986 if (_outputFilePlayerPtr->StopPlayingFile() != 0)
1987 {
1988 _engineStatisticsPtr->SetLastError(
1989 VE_STOP_RECORDING_FAILED, kTraceError,
1990 "StopPlayingFile() could not stop playing");
1991 return -1;
1992 }
1993 _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1994 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1995 _outputFilePlayerPtr = NULL;
1996 channel_state_.SetOutputFilePlaying(false);
1997 }
1998 // _fileCritSect cannot be taken while calling
1999 // SetAnonymousMixibilityStatus. Refer to comments in
2000 // StartPlayingFileLocally(const char* ...) for more details.
2001 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2002 {
2003 _engineStatisticsPtr->SetLastError(
2004 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2005 "StopPlayingFile() failed to stop participant from playing as"
2006 "file in the mixer");
2007 return -1;
2008 }
2009
2010 return 0;
2011 } 1805 }
2012 1806
2013 int Channel::IsPlayingFileLocally() const 1807 int Channel::IsPlayingFileLocally() const {
2014 { 1808 return channel_state_.Get().output_file_playing;
2015 return channel_state_.Get().output_file_playing;
2016 } 1809 }
2017 1810
2018 int Channel::RegisterFilePlayingToMixer() 1811 int Channel::RegisterFilePlayingToMixer() {
2019 { 1812 // Return success for not registering for file playing to mixer if:
2020 // Return success for not registering for file playing to mixer if: 1813 // 1. playing file before playout is started on that channel.
2021 // 1. playing file before playout is started on that channel. 1814 // 2. starting playout without file playing on that channel.
2022 // 2. starting playout without file playing on that channel. 1815 if (!channel_state_.Get().playing ||
2023 if (!channel_state_.Get().playing || 1816 !channel_state_.Get().output_file_playing) {
2024 !channel_state_.Get().output_file_playing) 1817 return 0;
2025 { 1818 }
2026 return 0;
2027 }
2028 1819
2029 // |_fileCritSect| cannot be taken while calling 1820 // |_fileCritSect| cannot be taken while calling
2030 // SetAnonymousMixabilityStatus() since as soon as the participant is added 1821 // SetAnonymousMixabilityStatus() since as soon as the participant is added
2031 // frames can be pulled by the mixer. Since the frames are generated from 1822 // frames can be pulled by the mixer. Since the frames are generated from
2032 // the file, _fileCritSect will be taken. This would result in a deadlock. 1823 // the file, _fileCritSect will be taken. This would result in a deadlock.
2033 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) 1824 if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
2034 { 1825 channel_state_.SetOutputFilePlaying(false);
2035 channel_state_.SetOutputFilePlaying(false); 1826 rtc::CritScope cs(&_fileCritSect);
2036 rtc::CritScope cs(&_fileCritSect); 1827 _engineStatisticsPtr->SetLastError(
2037 _engineStatisticsPtr->SetLastError( 1828 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2038 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError, 1829 "StartPlayingFile() failed to add participant as file to mixer");
2039 "StartPlayingFile() failed to add participant as file to mixer"); 1830 _outputFilePlayerPtr->StopPlayingFile();
2040 _outputFilePlayerPtr->StopPlayingFile(); 1831 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2041 FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr); 1832 _outputFilePlayerPtr = NULL;
2042 _outputFilePlayerPtr = NULL; 1833 return -1;
2043 return -1; 1834 }
2044 }
2045 1835
2046 return 0; 1836 return 0;
2047 } 1837 }
2048 1838
2049 int Channel::StartPlayingFileAsMicrophone(const char* fileName, 1839 int Channel::StartPlayingFileAsMicrophone(const char* fileName,
2050 bool loop, 1840 bool loop,
2051 FileFormats format, 1841 FileFormats format,
2052 int startPosition, 1842 int startPosition,
2053 float volumeScaling, 1843 float volumeScaling,
2054 int stopPosition, 1844 int stopPosition,
2055 const CodecInst* codecInst) 1845 const CodecInst* codecInst) {
2056 { 1846 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2057 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1847 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2058 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, " 1848 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2059 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, " 1849 "stopPosition=%d)",
2060 "stopPosition=%d)", fileName, loop, format, volumeScaling, 1850 fileName, loop, format, volumeScaling, startPosition,
2061 startPosition, stopPosition); 1851 stopPosition);
2062 1852
2063 rtc::CritScope cs(&_fileCritSect); 1853 rtc::CritScope cs(&_fileCritSect);
2064 1854
2065 if (channel_state_.Get().input_file_playing) 1855 if (channel_state_.Get().input_file_playing) {
2066 { 1856 _engineStatisticsPtr->SetLastError(
2067 _engineStatisticsPtr->SetLastError( 1857 VE_ALREADY_PLAYING, kTraceWarning,
2068 VE_ALREADY_PLAYING, kTraceWarning, 1858 "StartPlayingFileAsMicrophone() filePlayer is playing");
2069 "StartPlayingFileAsMicrophone() filePlayer is playing"); 1859 return 0;
2070 return 0; 1860 }
2071 }
2072 1861
2073 // Destroy the old instance 1862 // Destroy the old instance
2074 if (_inputFilePlayerPtr) 1863 if (_inputFilePlayerPtr) {
2075 { 1864 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2076 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL); 1865 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2077 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr); 1866 _inputFilePlayerPtr = NULL;
2078 _inputFilePlayerPtr = NULL; 1867 }
2079 }
2080 1868
2081 // Create the instance 1869 // Create the instance
2082 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer( 1870 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
2083 _inputFilePlayerId, (const FileFormats)format); 1871 (const FileFormats)format);
2084 1872
2085 if (_inputFilePlayerPtr == NULL) 1873 if (_inputFilePlayerPtr == NULL) {
2086 { 1874 _engineStatisticsPtr->SetLastError(
2087 _engineStatisticsPtr->SetLastError( 1875 VE_INVALID_ARGUMENT, kTraceError,
2088 VE_INVALID_ARGUMENT, kTraceError, 1876 "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2089 "StartPlayingFileAsMicrophone() filePlayer format isnot correct"); 1877 return -1;
2090 return -1; 1878 }
2091 }
2092 1879
2093 const uint32_t notificationTime(0); 1880 const uint32_t notificationTime(0);
2094 1881
2095 if (_inputFilePlayerPtr->StartPlayingFile( 1882 if (_inputFilePlayerPtr->StartPlayingFile(
2096 fileName, 1883 fileName, loop, startPosition, volumeScaling, notificationTime,
2097 loop, 1884 stopPosition, (const CodecInst*)codecInst) != 0) {
2098 startPosition, 1885 _engineStatisticsPtr->SetLastError(
2099 volumeScaling, 1886 VE_BAD_FILE, kTraceError,
2100 notificationTime, 1887 "StartPlayingFile() failed to start file playout");
2101 stopPosition, 1888 _inputFilePlayerPtr->StopPlayingFile();
2102 (const CodecInst*)codecInst) != 0) 1889 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2103 { 1890 _inputFilePlayerPtr = NULL;
2104 _engineStatisticsPtr->SetLastError( 1891 return -1;
2105 VE_BAD_FILE, kTraceError, 1892 }
2106 "StartPlayingFile() failed to start file playout"); 1893 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2107 _inputFilePlayerPtr->StopPlayingFile(); 1894 channel_state_.SetInputFilePlaying(true);
2108 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2109 _inputFilePlayerPtr = NULL;
2110 return -1;
2111 }
2112 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2113 channel_state_.SetInputFilePlaying(true);
2114 1895
2115 return 0; 1896 return 0;
2116 } 1897 }
2117 1898
2118 int Channel::StartPlayingFileAsMicrophone(InStream* stream, 1899 int Channel::StartPlayingFileAsMicrophone(InStream* stream,
2119 FileFormats format, 1900 FileFormats format,
2120 int startPosition, 1901 int startPosition,
2121 float volumeScaling, 1902 float volumeScaling,
2122 int stopPosition, 1903 int stopPosition,
2123 const CodecInst* codecInst) 1904 const CodecInst* codecInst) {
2124 { 1905 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2125 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1906 "Channel::StartPlayingFileAsMicrophone(format=%d, "
2126 "Channel::StartPlayingFileAsMicrophone(format=%d, " 1907 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2127 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)", 1908 format, volumeScaling, startPosition, stopPosition);
2128 format, volumeScaling, startPosition, stopPosition); 1909
2129 1910 if (stream == NULL) {
2130 if(stream == NULL) 1911 _engineStatisticsPtr->SetLastError(
2131 { 1912 VE_BAD_FILE, kTraceError,
2132 _engineStatisticsPtr->SetLastError( 1913 "StartPlayingFileAsMicrophone NULL as input stream");
2133 VE_BAD_FILE, kTraceError, 1914 return -1;
2134 "StartPlayingFileAsMicrophone NULL as input stream"); 1915 }
2135 return -1; 1916
2136 } 1917 rtc::CritScope cs(&_fileCritSect);
2137 1918
2138 rtc::CritScope cs(&_fileCritSect); 1919 if (channel_state_.Get().input_file_playing) {
2139 1920 _engineStatisticsPtr->SetLastError(
2140 if (channel_state_.Get().input_file_playing) 1921 VE_ALREADY_PLAYING, kTraceWarning,
2141 { 1922 "StartPlayingFileAsMicrophone() is playing");
2142 _engineStatisticsPtr->SetLastError(
2143 VE_ALREADY_PLAYING, kTraceWarning,
2144 "StartPlayingFileAsMicrophone() is playing");
2145 return 0;
2146 }
2147
2148 // Destroy the old instance
2149 if (_inputFilePlayerPtr)
2150 {
2151 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2152 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2153 _inputFilePlayerPtr = NULL;
2154 }
2155
2156 // Create the instance
2157 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2158 _inputFilePlayerId, (const FileFormats)format);
2159
2160 if (_inputFilePlayerPtr == NULL)
2161 {
2162 _engineStatisticsPtr->SetLastError(
2163 VE_INVALID_ARGUMENT, kTraceError,
2164 "StartPlayingInputFile() filePlayer format isnot correct");
2165 return -1;
2166 }
2167
2168 const uint32_t notificationTime(0);
2169
2170 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2171 volumeScaling, notificationTime,
2172 stopPosition, codecInst) != 0)
2173 {
2174 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2175 "StartPlayingFile() failed to start "
2176 "file playout");
2177 _inputFilePlayerPtr->StopPlayingFile();
2178 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2179 _inputFilePlayerPtr = NULL;
2180 return -1;
2181 }
2182
2183 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2184 channel_state_.SetInputFilePlaying(true);
2185
2186 return 0; 1923 return 0;
2187 } 1924 }
2188 1925
2189 int Channel::StopPlayingFileAsMicrophone() 1926 // Destroy the old instance
2190 { 1927 if (_inputFilePlayerPtr) {
2191 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2192 "Channel::StopPlayingFileAsMicrophone()");
2193
2194 rtc::CritScope cs(&_fileCritSect);
2195
2196 if (!channel_state_.Get().input_file_playing)
2197 {
2198 return 0;
2199 }
2200
2201 if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2202 {
2203 _engineStatisticsPtr->SetLastError(
2204 VE_STOP_RECORDING_FAILED, kTraceError,
2205 "StopPlayingFile() could not stop playing");
2206 return -1;
2207 }
2208 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL); 1928 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2209 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr); 1929 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2210 _inputFilePlayerPtr = NULL; 1930 _inputFilePlayerPtr = NULL;
2211 channel_state_.SetInputFilePlaying(false); 1931 }
2212 1932
1933 // Create the instance
1934 _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
1935 (const FileFormats)format);
1936
1937 if (_inputFilePlayerPtr == NULL) {
1938 _engineStatisticsPtr->SetLastError(
1939 VE_INVALID_ARGUMENT, kTraceError,
1940 "StartPlayingInputFile() filePlayer format isnot correct");
1941 return -1;
1942 }
1943
1944 const uint32_t notificationTime(0);
1945
1946 if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1947 volumeScaling, notificationTime,
1948 stopPosition, codecInst) != 0) {
1949 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1950 "StartPlayingFile() failed to start "
1951 "file playout");
1952 _inputFilePlayerPtr->StopPlayingFile();
1953 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1954 _inputFilePlayerPtr = NULL;
1955 return -1;
1956 }
1957
1958 _inputFilePlayerPtr->RegisterModuleFileCallback(this);
1959 channel_state_.SetInputFilePlaying(true);
1960
1961 return 0;
1962 }
1963
1964 int Channel::StopPlayingFileAsMicrophone() {
1965 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1966 "Channel::StopPlayingFileAsMicrophone()");
1967
1968 rtc::CritScope cs(&_fileCritSect);
1969
1970 if (!channel_state_.Get().input_file_playing) {
2213 return 0; 1971 return 0;
2214 } 1972 }
2215 1973
2216 int Channel::IsPlayingFileAsMicrophone() const 1974 if (_inputFilePlayerPtr->StopPlayingFile() != 0) {
2217 { 1975 _engineStatisticsPtr->SetLastError(
2218 return channel_state_.Get().input_file_playing; 1976 VE_STOP_RECORDING_FAILED, kTraceError,
1977 "StopPlayingFile() could not stop playing");
1978 return -1;
1979 }
1980 _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1981 FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
1982 _inputFilePlayerPtr = NULL;
1983 channel_state_.SetInputFilePlaying(false);
1984
1985 return 0;
1986 }
1987
1988 int Channel::IsPlayingFileAsMicrophone() const {
1989 return channel_state_.Get().input_file_playing;
2219 } 1990 }
2220 1991
2221 int Channel::StartRecordingPlayout(const char* fileName, 1992 int Channel::StartRecordingPlayout(const char* fileName,
2222 const CodecInst* codecInst) 1993 const CodecInst* codecInst) {
2223 { 1994 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2224 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 1995 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2225 "Channel::StartRecordingPlayout(fileName=%s)", fileName); 1996
2226 1997 if (_outputFileRecording) {
2227 if (_outputFileRecording) 1998 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2228 { 1999 "StartRecordingPlayout() is already recording");
2229 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2230 "StartRecordingPlayout() is already recording");
2231 return 0;
2232 }
2233
2234 FileFormats format;
2235 const uint32_t notificationTime(0); // Not supported in VoE
2236 CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2237
2238 if ((codecInst != NULL) &&
2239 ((codecInst->channels < 1) || (codecInst->channels > 2)))
2240 {
2241 _engineStatisticsPtr->SetLastError(
2242 VE_BAD_ARGUMENT, kTraceError,
2243 "StartRecordingPlayout() invalid compression");
2244 return(-1);
2245 }
2246 if(codecInst == NULL)
2247 {
2248 format = kFileFormatPcm16kHzFile;
2249 codecInst=&dummyCodec;
2250 }
2251 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2252 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2253 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2254 {
2255 format = kFileFormatWavFile;
2256 }
2257 else
2258 {
2259 format = kFileFormatCompressedFile;
2260 }
2261
2262 rtc::CritScope cs(&_fileCritSect);
2263
2264 // Destroy the old instance
2265 if (_outputFileRecorderPtr)
2266 {
2267 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2268 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2269 _outputFileRecorderPtr = NULL;
2270 }
2271
2272 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2273 _outputFileRecorderId, (const FileFormats)format);
2274 if (_outputFileRecorderPtr == NULL)
2275 {
2276 _engineStatisticsPtr->SetLastError(
2277 VE_INVALID_ARGUMENT, kTraceError,
2278 "StartRecordingPlayout() fileRecorder format isnot correct");
2279 return -1;
2280 }
2281
2282 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2283 fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2284 {
2285 _engineStatisticsPtr->SetLastError(
2286 VE_BAD_FILE, kTraceError,
2287 "StartRecordingAudioFile() failed to start file recording");
2288 _outputFileRecorderPtr->StopRecording();
2289 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2290 _outputFileRecorderPtr = NULL;
2291 return -1;
2292 }
2293 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2294 _outputFileRecording = true;
2295
2296 return 0; 2000 return 0;
2297 } 2001 }
2298 2002
2299 int Channel::StartRecordingPlayout(OutStream* stream, 2003 FileFormats format;
2300 const CodecInst* codecInst) 2004 const uint32_t notificationTime(0); // Not supported in VoE
2301 { 2005 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2302 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 2006
2303 "Channel::StartRecordingPlayout()"); 2007 if ((codecInst != NULL) &&
2304 2008 ((codecInst->channels < 1) || (codecInst->channels > 2))) {
2305 if (_outputFileRecording) 2009 _engineStatisticsPtr->SetLastError(
2306 { 2010 VE_BAD_ARGUMENT, kTraceError,
2307 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1), 2011 "StartRecordingPlayout() invalid compression");
2308 "StartRecordingPlayout() is already recording"); 2012 return (-1);
2309 return 0; 2013 }
2310 } 2014 if (codecInst == NULL) {
2311 2015 format = kFileFormatPcm16kHzFile;
2312 FileFormats format; 2016 codecInst = &dummyCodec;
2313 const uint32_t notificationTime(0); // Not supported in VoE 2017 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2314 CodecInst dummyCodec={100,"L16",16000,320,1,320000}; 2018 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2315 2019 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2316 if (codecInst != NULL && codecInst->channels != 1) 2020 format = kFileFormatWavFile;
2317 { 2021 } else {
2318 _engineStatisticsPtr->SetLastError( 2022 format = kFileFormatCompressedFile;
2319 VE_BAD_ARGUMENT, kTraceError, 2023 }
2320 "StartRecordingPlayout() invalid compression"); 2024
2321 return(-1); 2025 rtc::CritScope cs(&_fileCritSect);
2322 } 2026
2323 if(codecInst == NULL) 2027 // Destroy the old instance
2324 { 2028 if (_outputFileRecorderPtr) {
2325 format = kFileFormatPcm16kHzFile;
2326 codecInst=&dummyCodec;
2327 }
2328 else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2329 (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2330 (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2331 {
2332 format = kFileFormatWavFile;
2333 }
2334 else
2335 {
2336 format = kFileFormatCompressedFile;
2337 }
2338
2339 rtc::CritScope cs(&_fileCritSect);
2340
2341 // Destroy the old instance
2342 if (_outputFileRecorderPtr)
2343 {
2344 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2345 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2346 _outputFileRecorderPtr = NULL;
2347 }
2348
2349 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2350 _outputFileRecorderId, (const FileFormats)format);
2351 if (_outputFileRecorderPtr == NULL)
2352 {
2353 _engineStatisticsPtr->SetLastError(
2354 VE_INVALID_ARGUMENT, kTraceError,
2355 "StartRecordingPlayout() fileRecorder format isnot correct");
2356 return -1;
2357 }
2358
2359 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2360 notificationTime) != 0)
2361 {
2362 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2363 "StartRecordingPlayout() failed to "
2364 "start file recording");
2365 _outputFileRecorderPtr->StopRecording();
2366 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2367 _outputFileRecorderPtr = NULL;
2368 return -1;
2369 }
2370
2371 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2372 _outputFileRecording = true;
2373
2374 return 0;
2375 }
2376
2377 int Channel::StopRecordingPlayout()
2378 {
2379 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2380 "Channel::StopRecordingPlayout()");
2381
2382 if (!_outputFileRecording)
2383 {
2384 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2385 "StopRecordingPlayout() isnot recording");
2386 return -1;
2387 }
2388
2389
2390 rtc::CritScope cs(&_fileCritSect);
2391
2392 if (_outputFileRecorderPtr->StopRecording() != 0)
2393 {
2394 _engineStatisticsPtr->SetLastError(
2395 VE_STOP_RECORDING_FAILED, kTraceError,
2396 "StopRecording() could not stop recording");
2397 return(-1);
2398 }
2399 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL); 2029 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2400 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr); 2030 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2401 _outputFileRecorderPtr = NULL; 2031 _outputFileRecorderPtr = NULL;
2402 _outputFileRecording = false; 2032 }
2403 2033
2034 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2035 _outputFileRecorderId, (const FileFormats)format);
2036 if (_outputFileRecorderPtr == NULL) {
2037 _engineStatisticsPtr->SetLastError(
2038 VE_INVALID_ARGUMENT, kTraceError,
2039 "StartRecordingPlayout() fileRecorder format isnot correct");
2040 return -1;
2041 }
2042
2043 if (_outputFileRecorderPtr->StartRecordingAudioFile(
2044 fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
2045 _engineStatisticsPtr->SetLastError(
2046 VE_BAD_FILE, kTraceError,
2047 "StartRecordingAudioFile() failed to start file recording");
2048 _outputFileRecorderPtr->StopRecording();
2049 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2050 _outputFileRecorderPtr = NULL;
2051 return -1;
2052 }
2053 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2054 _outputFileRecording = true;
2055
2056 return 0;
2057 }
2058
2059 int Channel::StartRecordingPlayout(OutStream* stream,
2060 const CodecInst* codecInst) {
2061 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2062 "Channel::StartRecordingPlayout()");
2063
2064 if (_outputFileRecording) {
2065 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
2066 "StartRecordingPlayout() is already recording");
2404 return 0; 2067 return 0;
2405 } 2068 }
2406 2069
2407 void 2070 FileFormats format;
2408 Channel::SetMixWithMicStatus(bool mix) 2071 const uint32_t notificationTime(0); // Not supported in VoE
2409 { 2072 CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
2410 rtc::CritScope cs(&_fileCritSect); 2073
2411 _mixFileWithMicrophone=mix; 2074 if (codecInst != NULL && codecInst->channels != 1) {
2412 } 2075 _engineStatisticsPtr->SetLastError(
2413 2076 VE_BAD_ARGUMENT, kTraceError,
2414 int 2077 "StartRecordingPlayout() invalid compression");
2415 Channel::GetSpeechOutputLevel(uint32_t& level) const 2078 return (-1);
2416 { 2079 }
2417 int8_t currentLevel = _outputAudioLevel.Level(); 2080 if (codecInst == NULL) {
2418 level = static_cast<int32_t> (currentLevel); 2081 format = kFileFormatPcm16kHzFile;
2419 return 0; 2082 codecInst = &dummyCodec;
2420 } 2083 } else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
2421 2084 (STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
2422 int 2085 (STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
2423 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const 2086 format = kFileFormatWavFile;
2424 { 2087 } else {
2425 int16_t currentLevel = _outputAudioLevel.LevelFullRange(); 2088 format = kFileFormatCompressedFile;
2426 level = static_cast<int32_t> (currentLevel); 2089 }
2427 return 0; 2090
2428 } 2091 rtc::CritScope cs(&_fileCritSect);
2429 2092
2430 int 2093 // Destroy the old instance
2431 Channel::SetMute(bool enable) 2094 if (_outputFileRecorderPtr) {
2432 { 2095 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2433 rtc::CritScope cs(&volume_settings_critsect_); 2096 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2434 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 2097 _outputFileRecorderPtr = NULL;
2098 }
2099
2100 _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2101 _outputFileRecorderId, (const FileFormats)format);
2102 if (_outputFileRecorderPtr == NULL) {
2103 _engineStatisticsPtr->SetLastError(
2104 VE_INVALID_ARGUMENT, kTraceError,
2105 "StartRecordingPlayout() fileRecorder format isnot correct");
2106 return -1;
2107 }
2108
2109 if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2110 notificationTime) != 0) {
2111 _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2112 "StartRecordingPlayout() failed to "
2113 "start file recording");
2114 _outputFileRecorderPtr->StopRecording();
2115 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2116 _outputFileRecorderPtr = NULL;
2117 return -1;
2118 }
2119
2120 _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2121 _outputFileRecording = true;
2122
2123 return 0;
2124 }
2125
2126 int Channel::StopRecordingPlayout() {
2127 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
2128 "Channel::StopRecordingPlayout()");
2129
2130 if (!_outputFileRecording) {
2131 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
2132 "StopRecordingPlayout() isnot recording");
2133 return -1;
2134 }
2135
2136 rtc::CritScope cs(&_fileCritSect);
2137
2138 if (_outputFileRecorderPtr->StopRecording() != 0) {
2139 _engineStatisticsPtr->SetLastError(
2140 VE_STOP_RECORDING_FAILED, kTraceError,
2141 "StopRecording() could not stop recording");
2142 return (-1);
2143 }
2144 _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2145 FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2146 _outputFileRecorderPtr = NULL;
2147 _outputFileRecording = false;
2148
2149 return 0;
2150 }
2151
2152 void Channel::SetMixWithMicStatus(bool mix) {
2153 rtc::CritScope cs(&_fileCritSect);
2154 _mixFileWithMicrophone = mix;
2155 }
2156
2157 int Channel::GetSpeechOutputLevel(uint32_t& level) const {
2158 int8_t currentLevel = _outputAudioLevel.Level();
2159 level = static_cast<int32_t>(currentLevel);
2160 return 0;
2161 }
2162
2163 int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
2164 int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2165 level = static_cast<int32_t>(currentLevel);
2166 return 0;
2167 }
2168
2169 int Channel::SetMute(bool enable) {
2170 rtc::CritScope cs(&volume_settings_critsect_);
2171 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2435 "Channel::SetMute(enable=%d)", enable); 2172 "Channel::SetMute(enable=%d)", enable);
2436 _mute = enable; 2173 _mute = enable;
2437 return 0; 2174 return 0;
2438 } 2175 }
2439 2176
2440 bool 2177 bool Channel::Mute() const {
2441 Channel::Mute() const 2178 rtc::CritScope cs(&volume_settings_critsect_);
2442 { 2179 return _mute;
2443 rtc::CritScope cs(&volume_settings_critsect_); 2180 }
2444 return _mute; 2181
2445 } 2182 int Channel::SetOutputVolumePan(float left, float right) {
2446 2183 rtc::CritScope cs(&volume_settings_critsect_);
2447 int 2184 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2448 Channel::SetOutputVolumePan(float left, float right)
2449 {
2450 rtc::CritScope cs(&volume_settings_critsect_);
2451 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2452 "Channel::SetOutputVolumePan()"); 2185 "Channel::SetOutputVolumePan()");
2453 _panLeft = left; 2186 _panLeft = left;
2454 _panRight = right; 2187 _panRight = right;
2455 return 0; 2188 return 0;
2456 } 2189 }
2457 2190
2458 int 2191 int Channel::GetOutputVolumePan(float& left, float& right) const {
2459 Channel::GetOutputVolumePan(float& left, float& right) const 2192 rtc::CritScope cs(&volume_settings_critsect_);
2460 { 2193 left = _panLeft;
2461 rtc::CritScope cs(&volume_settings_critsect_); 2194 right = _panRight;
2462 left = _panLeft; 2195 return 0;
2463 right = _panRight; 2196 }
2464 return 0; 2197
2465 } 2198 int Channel::SetChannelOutputVolumeScaling(float scaling) {
2466 2199 rtc::CritScope cs(&volume_settings_critsect_);
2467 int 2200 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2468 Channel::SetChannelOutputVolumeScaling(float scaling)
2469 {
2470 rtc::CritScope cs(&volume_settings_critsect_);
2471 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2472 "Channel::SetChannelOutputVolumeScaling()"); 2201 "Channel::SetChannelOutputVolumeScaling()");
2473 _outputGain = scaling; 2202 _outputGain = scaling;
2474 return 0; 2203 return 0;
2475 } 2204 }
2476 2205
2477 int 2206 int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
2478 Channel::GetChannelOutputVolumeScaling(float& scaling) const 2207 rtc::CritScope cs(&volume_settings_critsect_);
2479 { 2208 scaling = _outputGain;
2480 rtc::CritScope cs(&volume_settings_critsect_); 2209 return 0;
2481 scaling = _outputGain;
2482 return 0;
2483 } 2210 }
2484 2211
2485 int Channel::SendTelephoneEventOutband(unsigned char eventCode, 2212 int Channel::SendTelephoneEventOutband(unsigned char eventCode,
2486 int lengthMs, int attenuationDb, 2213 int lengthMs,
2487 bool playDtmfEvent) 2214 int attenuationDb,
2488 { 2215 bool playDtmfEvent) {
2489 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 2216 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2490 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)", 2217 "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
2491 playDtmfEvent); 2218 playDtmfEvent);
2492 if (!Sending()) { 2219 if (!Sending()) {
2220 return -1;
2221 }
2222
2223 _playOutbandDtmfEvent = playDtmfEvent;
2224
2225 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
2226 attenuationDb) != 0) {
2227 _engineStatisticsPtr->SetLastError(
2228 VE_SEND_DTMF_FAILED, kTraceWarning,
2229 "SendTelephoneEventOutband() failed to send event");
2230 return -1;
2231 }
2232 return 0;
2233 }
2234
2235 int Channel::SendTelephoneEventInband(unsigned char eventCode,
2236 int lengthMs,
2237 int attenuationDb,
2238 bool playDtmfEvent) {
2239 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2240 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
2241 playDtmfEvent);
2242
2243 _playInbandDtmfEvent = playDtmfEvent;
2244 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
2245
2246 return 0;
2247 }
2248
2249 int Channel::SetSendTelephoneEventPayloadType(unsigned char type) {
2250 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2251 "Channel::SetSendTelephoneEventPayloadType()");
2252 if (type > 127) {
2253 _engineStatisticsPtr->SetLastError(
2254 VE_INVALID_ARGUMENT, kTraceError,
2255 "SetSendTelephoneEventPayloadType() invalid type");
2256 return -1;
2257 }
2258 CodecInst codec = {};
2259 codec.plfreq = 8000;
2260 codec.pltype = type;
2261 memcpy(codec.plname, "telephone-event", 16);
2262 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2263 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2264 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2265 _engineStatisticsPtr->SetLastError(
2266 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2267 "SetSendTelephoneEventPayloadType() failed to register send"
2268 "payload type");
2493 return -1; 2269 return -1;
2494 } 2270 }
2495 2271 }
2496 _playOutbandDtmfEvent = playDtmfEvent; 2272 _sendTelephoneEventPayloadType = type;
2497 2273 return 0;
2498 if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs, 2274 }
2499 attenuationDb) != 0) 2275
2500 { 2276 int Channel::GetSendTelephoneEventPayloadType(unsigned char& type) {
2501 _engineStatisticsPtr->SetLastError( 2277 type = _sendTelephoneEventPayloadType;
2502 VE_SEND_DTMF_FAILED, 2278 return 0;
2503 kTraceWarning, 2279 }
2504 "SendTelephoneEventOutband() failed to send event"); 2280
2505 return -1; 2281 int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
2506 } 2282 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2283 "Channel::UpdateRxVadDetection()");
2284
2285 int vadDecision = 1;
2286
2287 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
2288
2289 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
2290 OnRxVadDetected(vadDecision);
2291 _oldVadDecision = vadDecision;
2292 }
2293
2294 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
2295 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2296 vadDecision);
2297 return 0;
2298 }
2299
2300 int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
2301 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2302 "Channel::RegisterRxVadObserver()");
2303 rtc::CritScope cs(&_callbackCritSect);
2304
2305 if (_rxVadObserverPtr) {
2306 _engineStatisticsPtr->SetLastError(
2307 VE_INVALID_OPERATION, kTraceError,
2308 "RegisterRxVadObserver() observer already enabled");
2309 return -1;
2310 }
2311 _rxVadObserverPtr = &observer;
2312 _RxVadDetection = true;
2313 return 0;
2314 }
2315
2316 int Channel::DeRegisterRxVadObserver() {
2317 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2318 "Channel::DeRegisterRxVadObserver()");
2319 rtc::CritScope cs(&_callbackCritSect);
2320
2321 if (!_rxVadObserverPtr) {
2322 _engineStatisticsPtr->SetLastError(
2323 VE_INVALID_OPERATION, kTraceWarning,
2324 "DeRegisterRxVadObserver() observer already disabled");
2507 return 0; 2325 return 0;
2508 } 2326 }
2509 2327 _rxVadObserverPtr = NULL;
2510 int Channel::SendTelephoneEventInband(unsigned char eventCode, 2328 _RxVadDetection = false;
2511 int lengthMs, 2329 return 0;
2512 int attenuationDb, 2330 }
2513 bool playDtmfEvent) 2331
2514 { 2332 int Channel::VoiceActivityIndicator(int& activity) {
2515 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 2333 activity = _sendFrameType;
2516 "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)", 2334 return 0;
2517 playDtmfEvent);
2518
2519 _playInbandDtmfEvent = playDtmfEvent;
2520 _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
2521
2522 return 0;
2523 }
2524
2525 int
2526 Channel::SetSendTelephoneEventPayloadType(unsigned char type)
2527 {
2528 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2529 "Channel::SetSendTelephoneEventPayloadType()");
2530 if (type > 127)
2531 {
2532 _engineStatisticsPtr->SetLastError(
2533 VE_INVALID_ARGUMENT, kTraceError,
2534 "SetSendTelephoneEventPayloadType() invalid type");
2535 return -1;
2536 }
2537 CodecInst codec = {};
2538 codec.plfreq = 8000;
2539 codec.pltype = type;
2540 memcpy(codec.plname, "telephone-event", 16);
2541 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
2542 {
2543 _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2544 if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2545 _engineStatisticsPtr->SetLastError(
2546 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2547 "SetSendTelephoneEventPayloadType() failed to register send"
2548 "payload type");
2549 return -1;
2550 }
2551 }
2552 _sendTelephoneEventPayloadType = type;
2553 return 0;
2554 }
2555
2556 int
2557 Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
2558 {
2559 type = _sendTelephoneEventPayloadType;
2560 return 0;
2561 }
2562
2563 int
2564 Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
2565 {
2566 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2567 "Channel::UpdateRxVadDetection()");
2568
2569 int vadDecision = 1;
2570
2571 vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
2572
2573 if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
2574 {
2575 OnRxVadDetected(vadDecision);
2576 _oldVadDecision = vadDecision;
2577 }
2578
2579 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2580 "Channel::UpdateRxVadDetection() => vadDecision=%d",
2581 vadDecision);
2582 return 0;
2583 }
2584
2585 int
2586 Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
2587 {
2588 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2589 "Channel::RegisterRxVadObserver()");
2590 rtc::CritScope cs(&_callbackCritSect);
2591
2592 if (_rxVadObserverPtr)
2593 {
2594 _engineStatisticsPtr->SetLastError(
2595 VE_INVALID_OPERATION, kTraceError,
2596 "RegisterRxVadObserver() observer already enabled");
2597 return -1;
2598 }
2599 _rxVadObserverPtr = &observer;
2600 _RxVadDetection = true;
2601 return 0;
2602 }
2603
2604 int
2605 Channel::DeRegisterRxVadObserver()
2606 {
2607 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2608 "Channel::DeRegisterRxVadObserver()");
2609 rtc::CritScope cs(&_callbackCritSect);
2610
2611 if (!_rxVadObserverPtr)
2612 {
2613 _engineStatisticsPtr->SetLastError(
2614 VE_INVALID_OPERATION, kTraceWarning,
2615 "DeRegisterRxVadObserver() observer already disabled");
2616 return 0;
2617 }
2618 _rxVadObserverPtr = NULL;
2619 _RxVadDetection = false;
2620 return 0;
2621 }
2622
2623 int
2624 Channel::VoiceActivityIndicator(int &activity)
2625 {
2626 activity = _sendFrameType;
2627 return 0;
2628 } 2335 }
2629 2336
2630 #ifdef WEBRTC_VOICE_ENGINE_AGC 2337 #ifdef WEBRTC_VOICE_ENGINE_AGC
2631 2338
2632 int 2339 int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
2633 Channel::SetRxAgcStatus(bool enable, AgcModes mode) 2340 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2634 { 2341 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
2635 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 2342 (int)mode);
2636 "Channel::SetRxAgcStatus(enable=%d, mode=%d)", 2343
2637 (int)enable, (int)mode); 2344 GainControl::Mode agcMode = kDefaultRxAgcMode;
2638 2345 switch (mode) {
2639 GainControl::Mode agcMode = kDefaultRxAgcMode; 2346 case kAgcDefault:
2640 switch (mode) 2347 break;
2641 { 2348 case kAgcUnchanged:
2642 case kAgcDefault: 2349 agcMode = rx_audioproc_->gain_control()->mode();
2643 break; 2350 break;
2644 case kAgcUnchanged: 2351 case kAgcFixedDigital:
2645 agcMode = rx_audioproc_->gain_control()->mode(); 2352 agcMode = GainControl::kFixedDigital;
2646 break; 2353 break;
2647 case kAgcFixedDigital: 2354 case kAgcAdaptiveDigital:
2648 agcMode = GainControl::kFixedDigital; 2355 agcMode = GainControl::kAdaptiveDigital;
2649 break; 2356 break;
2650 case kAgcAdaptiveDigital: 2357 default:
2651 agcMode =GainControl::kAdaptiveDigital; 2358 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
2652 break; 2359 "SetRxAgcStatus() invalid Agc mode");
2653 default: 2360 return -1;
2654 _engineStatisticsPtr->SetLastError( 2361 }
2655 VE_INVALID_ARGUMENT, kTraceError, 2362
2656 "SetRxAgcStatus() invalid Agc mode"); 2363 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
2657 return -1; 2364 _engineStatisticsPtr->SetLastError(
2658 } 2365 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
2659 2366 return -1;
2660 if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) 2367 }
2661 { 2368 if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
2662 _engineStatisticsPtr->SetLastError( 2369 _engineStatisticsPtr->SetLastError(
2663 VE_APM_ERROR, kTraceError, 2370 VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
2664 "SetRxAgcStatus() failed to set Agc mode"); 2371 return -1;
2665 return -1; 2372 }
2666 } 2373
2667 if (rx_audioproc_->gain_control()->Enable(enable) != 0) 2374 _rxAgcIsEnabled = enable;
2668 { 2375 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
2669 _engineStatisticsPtr->SetLastError( 2376
2670 VE_APM_ERROR, kTraceError, 2377 return 0;
2671 "SetRxAgcStatus() failed to set Agc state"); 2378 }
2672 return -1; 2379
2673 } 2380 int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
2674 2381 bool enable = rx_audioproc_->gain_control()->is_enabled();
2675 _rxAgcIsEnabled = enable; 2382 GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
2676 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled); 2383
2677 2384 enabled = enable;
2678 return 0; 2385
2679 } 2386 switch (agcMode) {
2680 2387 case GainControl::kFixedDigital:
2681 int 2388 mode = kAgcFixedDigital;
2682 Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) 2389 break;
2683 { 2390 case GainControl::kAdaptiveDigital:
2684 bool enable = rx_audioproc_->gain_control()->is_enabled(); 2391 mode = kAgcAdaptiveDigital;
2685 GainControl::Mode agcMode = 2392 break;
2686 rx_audioproc_->gain_control()->mode(); 2393 default:
2687 2394 _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
2688 enabled = enable; 2395 "GetRxAgcStatus() invalid Agc mode");
2689 2396 return -1;
2690 switch (agcMode) 2397 }
2691 { 2398
2692 case GainControl::kFixedDigital: 2399 return 0;
2693 mode = kAgcFixedDigital; 2400 }
2694 break; 2401
2695 case GainControl::kAdaptiveDigital: 2402 int Channel::SetRxAgcConfig(AgcConfig config) {
2696 mode = kAgcAdaptiveDigital; 2403 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2697 break; 2404 "Channel::SetRxAgcConfig()");
2698 default: 2405
2699 _engineStatisticsPtr->SetLastError( 2406 if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2700 VE_APM_ERROR, kTraceError, 2407 config.targetLeveldBOv) != 0) {
2701 "GetRxAgcStatus() invalid Agc mode"); 2408 _engineStatisticsPtr->SetLastError(
2702 return -1; 2409 VE_APM_ERROR, kTraceError,
2703 } 2410 "SetRxAgcConfig() failed to set target peak |level|"
2704 2411 "(or envelope) of the Agc");
2705 return 0; 2412 return -1;
2706 } 2413 }
2707 2414 if (rx_audioproc_->gain_control()->set_compression_gain_db(
2708 int 2415 config.digitalCompressionGaindB) != 0) {
2709 Channel::SetRxAgcConfig(AgcConfig config) 2416 _engineStatisticsPtr->SetLastError(
2710 { 2417 VE_APM_ERROR, kTraceError,
2711 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 2418 "SetRxAgcConfig() failed to set the range in |gain| the"
2712 "Channel::SetRxAgcConfig()"); 2419 " digital compression stage may apply");
2713 2420 return -1;
2714 if (rx_audioproc_->gain_control()->set_target_level_dbfs( 2421 }
2715 config.targetLeveldBOv) != 0) 2422 if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
2716 { 2423 0) {
2717 _engineStatisticsPtr->SetLastError( 2424 _engineStatisticsPtr->SetLastError(
2718 VE_APM_ERROR, kTraceError, 2425 VE_APM_ERROR, kTraceError,
2719 "SetRxAgcConfig() failed to set target peak |level|" 2426 "SetRxAgcConfig() failed to set hard limiter to the signal");
2720 "(or envelope) of the Agc"); 2427 return -1;
2721 return -1; 2428 }
2722 } 2429
2723 if (rx_audioproc_->gain_control()->set_compression_gain_db( 2430 return 0;
2724 config.digitalCompressionGaindB) != 0) 2431 }
2725 { 2432
2726 _engineStatisticsPtr->SetLastError( 2433 int Channel::GetRxAgcConfig(AgcConfig& config) {
2727 VE_APM_ERROR, kTraceError, 2434 config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
2728 "SetRxAgcConfig() failed to set the range in |gain| the" 2435 config.digitalCompressionGaindB =
2729 " digital compression stage may apply"); 2436 rx_audioproc_->gain_control()->compression_gain_db();
2730 return -1; 2437 config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
2731 } 2438
2732 if (rx_audioproc_->gain_control()->enable_limiter( 2439 return 0;
2733 config.limiterEnable) != 0) 2440 }
2734 { 2441
2735 _engineStatisticsPtr->SetLastError( 2442 #endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
2736 VE_APM_ERROR, kTraceError,
2737 "SetRxAgcConfig() failed to set hard limiter to the signal");
2738 return -1;
2739 }
2740
2741 return 0;
2742 }
2743
2744 int
2745 Channel::GetRxAgcConfig(AgcConfig& config)
2746 {
2747 config.targetLeveldBOv =
2748 rx_audioproc_->gain_control()->target_level_dbfs();
2749 config.digitalCompressionGaindB =
2750 rx_audioproc_->gain_control()->compression_gain_db();
2751 config.limiterEnable =
2752 rx_audioproc_->gain_control()->is_limiter_enabled();
2753
2754 return 0;
2755 }
2756
2757 #endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
2758 2443
2759 #ifdef WEBRTC_VOICE_ENGINE_NR 2444 #ifdef WEBRTC_VOICE_ENGINE_NR
2760 2445
2761 int 2446 int Channel::SetRxNsStatus(bool enable, NsModes mode) {
2762 Channel::SetRxNsStatus(bool enable, NsModes mode) 2447 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2763 { 2448 "Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
2764 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 2449 (int)mode);
2765 "Channel::SetRxNsStatus(enable=%d, mode=%d)", 2450
2766 (int)enable, (int)mode); 2451 NoiseSuppression::Level nsLevel = kDefaultNsMode;
2767 2452 switch (mode) {
2768 NoiseSuppression::Level nsLevel = kDefaultNsMode; 2453 case kNsDefault:
2769 switch (mode) 2454 break;
2770 { 2455 case kNsUnchanged:
2771 2456 nsLevel = rx_audioproc_->noise_suppression()->level();
2772 case kNsDefault: 2457 break;
2773 break; 2458 case kNsConference:
2774 case kNsUnchanged: 2459 nsLevel = NoiseSuppression::kHigh;
2775 nsLevel = rx_audioproc_->noise_suppression()->level(); 2460 break;
2776 break; 2461 case kNsLowSuppression:
2777 case kNsConference: 2462 nsLevel = NoiseSuppression::kLow;
2778 nsLevel = NoiseSuppression::kHigh; 2463 break;
2779 break; 2464 case kNsModerateSuppression:
2780 case kNsLowSuppression: 2465 nsLevel = NoiseSuppression::kModerate;
2781 nsLevel = NoiseSuppression::kLow; 2466 break;
2782 break; 2467 case kNsHighSuppression:
2783 case kNsModerateSuppression: 2468 nsLevel = NoiseSuppression::kHigh;
2784 nsLevel = NoiseSuppression::kModerate; 2469 break;
2785 break; 2470 case kNsVeryHighSuppression:
2786 case kNsHighSuppression: 2471 nsLevel = NoiseSuppression::kVeryHigh;
2787 nsLevel = NoiseSuppression::kHigh; 2472 break;
2788 break; 2473 }
2789 case kNsVeryHighSuppression: 2474
2790 nsLevel = NoiseSuppression::kVeryHigh; 2475 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
2791 break; 2476 _engineStatisticsPtr->SetLastError(
2792 } 2477 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
2793 2478 return -1;
2794 if (rx_audioproc_->noise_suppression()->set_level(nsLevel) 2479 }
2795 != 0) 2480 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
2796 { 2481 _engineStatisticsPtr->SetLastError(
2797 _engineStatisticsPtr->SetLastError( 2482 VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
2798 VE_APM_ERROR, kTraceError, 2483 return -1;
2799 "SetRxNsStatus() failed to set NS level"); 2484 }
2800 return -1; 2485
2801 } 2486 _rxNsIsEnabled = enable;
2802 if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) 2487 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
2803 { 2488
2804 _engineStatisticsPtr->SetLastError( 2489 return 0;
2805 VE_APM_ERROR, kTraceError, 2490 }
2806 "SetRxNsStatus() failed to set NS state"); 2491
2807 return -1; 2492 int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
2808 } 2493 bool enable = rx_audioproc_->noise_suppression()->is_enabled();
2809 2494 NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
2810 _rxNsIsEnabled = enable; 2495
2811 channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled); 2496 enabled = enable;
2812 2497
2813 return 0; 2498 switch (ncLevel) {
2814 } 2499 case NoiseSuppression::kLow:
2815 2500 mode = kNsLowSuppression;
2816 int 2501 break;
2817 Channel::GetRxNsStatus(bool& enabled, NsModes& mode) 2502 case NoiseSuppression::kModerate:
2818 { 2503 mode = kNsModerateSuppression;
2819 bool enable = 2504 break;
2820 rx_audioproc_->noise_suppression()->is_enabled(); 2505 case NoiseSuppression::kHigh:
2821 NoiseSuppression::Level ncLevel = 2506 mode = kNsHighSuppression;
2822 rx_audioproc_->noise_suppression()->level(); 2507 break;
2823 2508 case NoiseSuppression::kVeryHigh:
2824 enabled = enable; 2509 mode = kNsVeryHighSuppression;
2825 2510 break;
2826 switch (ncLevel) 2511 }
2827 { 2512
2828 case NoiseSuppression::kLow: 2513 return 0;
2829 mode = kNsLowSuppression; 2514 }
2830 break; 2515
2831 case NoiseSuppression::kModerate: 2516 #endif // #ifdef WEBRTC_VOICE_ENGINE_NR
2832 mode = kNsModerateSuppression; 2517
2833 break; 2518 int Channel::SetLocalSSRC(unsigned int ssrc) {
2834 case NoiseSuppression::kHigh: 2519 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2835 mode = kNsHighSuppression; 2520 "Channel::SetLocalSSRC()");
2836 break; 2521 if (channel_state_.Get().sending) {
2837 case NoiseSuppression::kVeryHigh: 2522 _engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
2838 mode = kNsVeryHighSuppression; 2523 "SetLocalSSRC() already sending");
2839 break; 2524 return -1;
2840 } 2525 }
2841 2526 _rtpRtcpModule->SetSSRC(ssrc);
2842 return 0; 2527 return 0;
2843 } 2528 }
2844 2529
2845 #endif // #ifdef WEBRTC_VOICE_ENGINE_NR 2530 int Channel::GetLocalSSRC(unsigned int& ssrc) {
2846 2531 ssrc = _rtpRtcpModule->SSRC();
2847 int 2532 return 0;
2848 Channel::SetLocalSSRC(unsigned int ssrc) 2533 }
2849 { 2534
2850 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 2535 int Channel::GetRemoteSSRC(unsigned int& ssrc) {
2851 "Channel::SetLocalSSRC()"); 2536 ssrc = rtp_receiver_->SSRC();
2852 if (channel_state_.Get().sending) 2537 return 0;
2853 {
2854 _engineStatisticsPtr->SetLastError(
2855 VE_ALREADY_SENDING, kTraceError,
2856 "SetLocalSSRC() already sending");
2857 return -1;
2858 }
2859 _rtpRtcpModule->SetSSRC(ssrc);
2860 return 0;
2861 }
2862
2863 int
2864 Channel::GetLocalSSRC(unsigned int& ssrc)
2865 {
2866 ssrc = _rtpRtcpModule->SSRC();
2867 return 0;
2868 }
2869
2870 int
2871 Channel::GetRemoteSSRC(unsigned int& ssrc)
2872 {
2873 ssrc = rtp_receiver_->SSRC();
2874 return 0;
2875 } 2538 }
2876 2539
2877 int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) { 2540 int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
2878 _includeAudioLevelIndication = enable; 2541 _includeAudioLevelIndication = enable;
2879 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id); 2542 return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
2880 } 2543 }
2881 2544
2882 int Channel::SetReceiveAudioLevelIndicationStatus(bool enable, 2545 int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2883 unsigned char id) { 2546 unsigned char id) {
2884 rtp_header_parser_->DeregisterRtpHeaderExtension( 2547 rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
2885 kRtpExtensionAudioLevel); 2548 if (enable &&
2886 if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension( 2549 !rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
2887 kRtpExtensionAudioLevel, id)) { 2550 id)) {
2888 return -1; 2551 return -1;
2889 } 2552 }
2890 return 0; 2553 return 0;
2891 } 2554 }
2892 2555
2893 int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) { 2556 int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2894 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id); 2557 return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2895 } 2558 }
2896 2559
2897 int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) { 2560 int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2898 rtp_header_parser_->DeregisterRtpHeaderExtension( 2561 rtp_header_parser_->DeregisterRtpHeaderExtension(
2899 kRtpExtensionAbsoluteSendTime); 2562 kRtpExtensionAbsoluteSendTime);
2900 if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension( 2563 if (enable &&
2901 kRtpExtensionAbsoluteSendTime, id)) { 2564 !rtp_header_parser_->RegisterRtpHeaderExtension(
2565 kRtpExtensionAbsoluteSendTime, id)) {
2902 return -1; 2566 return -1;
2903 } 2567 }
2904 return 0; 2568 return 0;
2905 } 2569 }
2906 2570
2907 void Channel::EnableSendTransportSequenceNumber(int id) { 2571 void Channel::EnableSendTransportSequenceNumber(int id) {
2908 int ret = 2572 int ret =
2909 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id); 2573 SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
2910 RTC_DCHECK_EQ(0, ret); 2574 RTC_DCHECK_EQ(0, ret);
2911 } 2575 }
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2943 } 2607 }
2944 packet_router_ = packet_router; 2608 packet_router_ = packet_router;
2945 } 2609 }
2946 2610
2947 void Channel::SetRTCPStatus(bool enable) { 2611 void Channel::SetRTCPStatus(bool enable) {
2948 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 2612 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2949 "Channel::SetRTCPStatus()"); 2613 "Channel::SetRTCPStatus()");
2950 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff); 2614 _rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
2951 } 2615 }
2952 2616
2953 int 2617 int Channel::GetRTCPStatus(bool& enabled) {
2954 Channel::GetRTCPStatus(bool& enabled)
2955 {
2956 RtcpMode method = _rtpRtcpModule->RTCP(); 2618 RtcpMode method = _rtpRtcpModule->RTCP();
2957 enabled = (method != RtcpMode::kOff); 2619 enabled = (method != RtcpMode::kOff);
2958 return 0; 2620 return 0;
2959 } 2621 }
2960 2622
2961 int 2623 int Channel::SetRTCP_CNAME(const char cName[256]) {
2962 Channel::SetRTCP_CNAME(const char cName[256]) 2624 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2963 { 2625 "Channel::SetRTCP_CNAME()");
2964 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 2626 if (_rtpRtcpModule->SetCNAME(cName) != 0) {
2965 "Channel::SetRTCP_CNAME()"); 2627 _engineStatisticsPtr->SetLastError(
2966 if (_rtpRtcpModule->SetCNAME(cName) != 0) 2628 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2967 { 2629 "SetRTCP_CNAME() failed to set RTCP CNAME");
2968 _engineStatisticsPtr->SetLastError( 2630 return -1;
2969 VE_RTP_RTCP_MODULE_ERROR, kTraceError, 2631 }
2970 "SetRTCP_CNAME() failed to set RTCP CNAME"); 2632 return 0;
2971 return -1;
2972 }
2973 return 0;
2974 } 2633 }
2975 2634
2976 int 2635 int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
2977 Channel::GetRemoteRTCP_CNAME(char cName[256]) 2636 if (cName == NULL) {
2978 { 2637 _engineStatisticsPtr->SetLastError(
2979 if (cName == NULL) 2638 VE_INVALID_ARGUMENT, kTraceError,
2980 { 2639 "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
2981 _engineStatisticsPtr->SetLastError( 2640 return -1;
2982 VE_INVALID_ARGUMENT, kTraceError, 2641 }
2983 "GetRemoteRTCP_CNAME() invalid CNAME input buffer"); 2642 char cname[RTCP_CNAME_SIZE];
2984 return -1; 2643 const uint32_t remoteSSRC = rtp_receiver_->SSRC();
2985 } 2644 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
2986 char cname[RTCP_CNAME_SIZE]; 2645 _engineStatisticsPtr->SetLastError(
2987 const uint32_t remoteSSRC = rtp_receiver_->SSRC(); 2646 VE_CANNOT_RETRIEVE_CNAME, kTraceError,
2988 if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) 2647 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
2989 { 2648 return -1;
2990 _engineStatisticsPtr->SetLastError( 2649 }
2991 VE_CANNOT_RETRIEVE_CNAME, kTraceError, 2650 strcpy(cName, cname);
2992 "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME"); 2651 return 0;
2993 return -1;
2994 }
2995 strcpy(cName, cname);
2996 return 0;
2997 } 2652 }
2998 2653
2999 int 2654 int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
3000 Channel::GetRemoteRTCPData( 2655 unsigned int& NTPLow,
3001 unsigned int& NTPHigh, 2656 unsigned int& timestamp,
3002 unsigned int& NTPLow, 2657 unsigned int& playoutTimestamp,
3003 unsigned int& timestamp, 2658 unsigned int* jitter,
3004 unsigned int& playoutTimestamp, 2659 unsigned short* fractionLost) {
3005 unsigned int* jitter, 2660 // --- Information from sender info in received Sender Reports
3006 unsigned short* fractionLost)
3007 {
3008 // --- Information from sender info in received Sender Reports
3009 2661
3010 RTCPSenderInfo senderInfo; 2662 RTCPSenderInfo senderInfo;
3011 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) 2663 if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
3012 { 2664 _engineStatisticsPtr->SetLastError(
3013 _engineStatisticsPtr->SetLastError( 2665 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3014 VE_RTP_RTCP_MODULE_ERROR, kTraceError, 2666 "GetRemoteRTCPData() failed to retrieve sender info for remote "
3015 "GetRemoteRTCPData() failed to retrieve sender info for remote " 2667 "side");
3016 "side"); 2668 return -1;
3017 return -1; 2669 }
2670
2671 // We only utilize 12 out of 20 bytes in the sender info (ignores packet
2672 // and octet count)
2673 NTPHigh = senderInfo.NTPseconds;
2674 NTPLow = senderInfo.NTPfraction;
2675 timestamp = senderInfo.RTPtimeStamp;
2676
2677 // --- Locally derived information
2678
2679 // This value is updated on each incoming RTCP packet (0 when no packet
2680 // has been received)
2681 playoutTimestamp = playout_timestamp_rtcp_;
2682
2683 if (NULL != jitter || NULL != fractionLost) {
2684 // Get all RTCP receiver report blocks that have been received on this
2685 // channel. If we receive RTP packets from a remote source we know the
2686 // remote SSRC and use the report block from him.
2687 // Otherwise use the first report block.
2688 std::vector<RTCPReportBlock> remote_stats;
2689 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
2690 remote_stats.empty()) {
2691 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
2692 "GetRemoteRTCPData() failed to measure statistics due"
2693 " to lack of received RTP and/or RTCP packets");
2694 return -1;
3018 } 2695 }
3019 2696
3020 // We only utilize 12 out of 20 bytes in the sender info (ignores packet 2697 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3021 // and octet count) 2698 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3022 NTPHigh = senderInfo.NTPseconds; 2699 for (; it != remote_stats.end(); ++it) {
3023 NTPLow = senderInfo.NTPfraction; 2700 if (it->remoteSSRC == remoteSSRC)
3024 timestamp = senderInfo.RTPtimeStamp; 2701 break;
2702 }
3025 2703
3026 // --- Locally derived information 2704 if (it == remote_stats.end()) {
2705 // If we have not received any RTCP packets from this SSRC it probably
2706 // means that we have not received any RTP packets.
2707 // Use the first received report block instead.
2708 it = remote_stats.begin();
2709 remoteSSRC = it->remoteSSRC;
2710 }
3027 2711
3028 // This value is updated on each incoming RTCP packet (0 when no packet 2712 if (jitter) {
3029 // has been received) 2713 *jitter = it->jitter;
3030 playoutTimestamp = playout_timestamp_rtcp_; 2714 }
3031 2715
3032 if (NULL != jitter || NULL != fractionLost) 2716 if (fractionLost) {
3033 { 2717 *fractionLost = it->fractionLost;
3034 // Get all RTCP receiver report blocks that have been received on this
3035 // channel. If we receive RTP packets from a remote source we know the
3036 // remote SSRC and use the report block from him.
3037 // Otherwise use the first report block.
3038 std::vector<RTCPReportBlock> remote_stats;
3039 if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
3040 remote_stats.empty()) {
3041 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3042 VoEId(_instanceId, _channelId),
3043 "GetRemoteRTCPData() failed to measure statistics due"
3044 " to lack of received RTP and/or RTCP packets");
3045 return -1;
3046 }
3047
3048 uint32_t remoteSSRC = rtp_receiver_->SSRC();
3049 std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3050 for (; it != remote_stats.end(); ++it) {
3051 if (it->remoteSSRC == remoteSSRC)
3052 break;
3053 }
3054
3055 if (it == remote_stats.end()) {
3056 // If we have not received any RTCP packets from this SSRC it probably
3057 // means that we have not received any RTP packets.
3058 // Use the first received report block instead.
3059 it = remote_stats.begin();
3060 remoteSSRC = it->remoteSSRC;
3061 }
3062
3063 if (jitter) {
3064 *jitter = it->jitter;
3065 }
3066
3067 if (fractionLost) {
3068 *fractionLost = it->fractionLost;
3069 }
3070 } 2718 }
3071 return 0; 2719 }
2720 return 0;
3072 } 2721 }
3073 2722
3074 int 2723 int Channel::SendApplicationDefinedRTCPPacket(
3075 Channel::SendApplicationDefinedRTCPPacket(unsigned char subType, 2724 unsigned char subType,
3076 unsigned int name, 2725 unsigned int name,
3077 const char* data, 2726 const char* data,
3078 unsigned short dataLengthInBytes) 2727 unsigned short dataLengthInBytes) {
3079 { 2728 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3080 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 2729 "Channel::SendApplicationDefinedRTCPPacket()");
3081 "Channel::SendApplicationDefinedRTCPPacket()"); 2730 if (!channel_state_.Get().sending) {
3082 if (!channel_state_.Get().sending) 2731 _engineStatisticsPtr->SetLastError(
3083 { 2732 VE_NOT_SENDING, kTraceError,
3084 _engineStatisticsPtr->SetLastError( 2733 "SendApplicationDefinedRTCPPacket() not sending");
3085 VE_NOT_SENDING, kTraceError, 2734 return -1;
3086 "SendApplicationDefinedRTCPPacket() not sending"); 2735 }
3087 return -1; 2736 if (NULL == data) {
3088 } 2737 _engineStatisticsPtr->SetLastError(
3089 if (NULL == data) 2738 VE_INVALID_ARGUMENT, kTraceError,
3090 { 2739 "SendApplicationDefinedRTCPPacket() invalid data value");
3091 _engineStatisticsPtr->SetLastError( 2740 return -1;
3092 VE_INVALID_ARGUMENT, kTraceError, 2741 }
3093 "SendApplicationDefinedRTCPPacket() invalid data value"); 2742 if (dataLengthInBytes % 4 != 0) {
3094 return -1; 2743 _engineStatisticsPtr->SetLastError(
3095 } 2744 VE_INVALID_ARGUMENT, kTraceError,
3096 if (dataLengthInBytes % 4 != 0) 2745 "SendApplicationDefinedRTCPPacket() invalid length value");
3097 { 2746 return -1;
3098 _engineStatisticsPtr->SetLastError( 2747 }
3099 VE_INVALID_ARGUMENT, kTraceError, 2748 RtcpMode status = _rtpRtcpModule->RTCP();
3100 "SendApplicationDefinedRTCPPacket() invalid length value"); 2749 if (status == RtcpMode::kOff) {
3101 return -1; 2750 _engineStatisticsPtr->SetLastError(
3102 } 2751 VE_RTCP_ERROR, kTraceError,
3103 RtcpMode status = _rtpRtcpModule->RTCP(); 2752 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3104 if (status == RtcpMode::kOff) { 2753 return -1;
3105 _engineStatisticsPtr->SetLastError( 2754 }
3106 VE_RTCP_ERROR, kTraceError,
3107 "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3108 return -1;
3109 }
3110 2755
3111 // Create and schedule the RTCP APP packet for transmission 2756 // Create and schedule the RTCP APP packet for transmission
3112 if (_rtpRtcpModule->SetRTCPApplicationSpecificData( 2757 if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
3113 subType, 2758 subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
3114 name, 2759 _engineStatisticsPtr->SetLastError(
3115 (const unsigned char*) data, 2760 VE_SEND_ERROR, kTraceError,
3116 dataLengthInBytes) != 0) 2761 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3117 { 2762 return -1;
3118 _engineStatisticsPtr->SetLastError( 2763 }
3119 VE_SEND_ERROR, kTraceError, 2764 return 0;
3120 "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3121 return -1;
3122 }
3123 return 0;
3124 } 2765 }
3125 2766
3126 int 2767 int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
3127 Channel::GetRTPStatistics( 2768 unsigned int& maxJitterMs,
3128 unsigned int& averageJitterMs, 2769 unsigned int& discardedPackets) {
3129 unsigned int& maxJitterMs, 2770 // The jitter statistics is updated for each received RTP packet and is
3130 unsigned int& discardedPackets) 2771 // based on received packets.
3131 { 2772 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
3132 // The jitter statistics is updated for each received RTP packet and is 2773 // If RTCP is off, there is no timed thread in the RTCP module regularly
3133 // based on received packets. 2774 // generating new stats, trigger the update manually here instead.
3134 if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) { 2775 StreamStatistician* statistician =
3135 // If RTCP is off, there is no timed thread in the RTCP module regularly 2776 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3136 // generating new stats, trigger the update manually here instead. 2777 if (statistician) {
3137 StreamStatistician* statistician = 2778 // Don't use returned statistics, use data from proxy instead so that
3138 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC()); 2779 // max jitter can be fetched atomically.
3139 if (statistician) { 2780 RtcpStatistics s;
3140 // Don't use returned statistics, use data from proxy instead so that 2781 statistician->GetStatistics(&s, true);
3141 // max jitter can be fetched atomically.
3142 RtcpStatistics s;
3143 statistician->GetStatistics(&s, true);
3144 }
3145 } 2782 }
2783 }
3146 2784
3147 ChannelStatistics stats = statistics_proxy_->GetStats(); 2785 ChannelStatistics stats = statistics_proxy_->GetStats();
3148 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency(); 2786 const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
3149 if (playoutFrequency > 0) { 2787 if (playoutFrequency > 0) {
3150 // Scale RTP statistics given the current playout frequency 2788 // Scale RTP statistics given the current playout frequency
3151 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000); 2789 maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
3152 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000); 2790 averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
3153 } 2791 }
3154 2792
3155 discardedPackets = _numberOfDiscardedPackets; 2793 discardedPackets = _numberOfDiscardedPackets;
3156 2794
3157 return 0; 2795 return 0;
3158 } 2796 }
3159 2797
3160 int Channel::GetRemoteRTCPReportBlocks( 2798 int Channel::GetRemoteRTCPReportBlocks(
3161 std::vector<ReportBlock>* report_blocks) { 2799 std::vector<ReportBlock>* report_blocks) {
3162 if (report_blocks == NULL) { 2800 if (report_blocks == NULL) {
3163 _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError, 2801 _engineStatisticsPtr->SetLastError(
3164 "GetRemoteRTCPReportBlock()s invalid report_blocks."); 2802 VE_INVALID_ARGUMENT, kTraceError,
2803 "GetRemoteRTCPReportBlock()s invalid report_blocks.");
3165 return -1; 2804 return -1;
3166 } 2805 }
3167 2806
3168 // Get the report blocks from the latest received RTCP Sender or Receiver 2807 // Get the report blocks from the latest received RTCP Sender or Receiver
3169 // Report. Each element in the vector contains the sender's SSRC and a 2808 // Report. Each element in the vector contains the sender's SSRC and a
3170 // report block according to RFC 3550. 2809 // report block according to RFC 3550.
3171 std::vector<RTCPReportBlock> rtcp_report_blocks; 2810 std::vector<RTCPReportBlock> rtcp_report_blocks;
3172 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) { 2811 if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
3173 return -1; 2812 return -1;
3174 } 2813 }
(...skipping 10 matching lines...) Expand all
3185 report_block.cumulative_num_packets_lost = it->cumulativeLost; 2824 report_block.cumulative_num_packets_lost = it->cumulativeLost;
3186 report_block.extended_highest_sequence_number = it->extendedHighSeqNum; 2825 report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
3187 report_block.interarrival_jitter = it->jitter; 2826 report_block.interarrival_jitter = it->jitter;
3188 report_block.last_SR_timestamp = it->lastSR; 2827 report_block.last_SR_timestamp = it->lastSR;
3189 report_block.delay_since_last_SR = it->delaySinceLastSR; 2828 report_block.delay_since_last_SR = it->delaySinceLastSR;
3190 report_blocks->push_back(report_block); 2829 report_blocks->push_back(report_block);
3191 } 2830 }
3192 return 0; 2831 return 0;
3193 } 2832 }
3194 2833
3195 int 2834 int Channel::GetRTPStatistics(CallStatistics& stats) {
3196 Channel::GetRTPStatistics(CallStatistics& stats) 2835 // --- RtcpStatistics
3197 {
3198 // --- RtcpStatistics
3199 2836
3200 // The jitter statistics is updated for each received RTP packet and is 2837 // The jitter statistics is updated for each received RTP packet and is
3201 // based on received packets. 2838 // based on received packets.
3202 RtcpStatistics statistics; 2839 RtcpStatistics statistics;
3203 StreamStatistician* statistician = 2840 StreamStatistician* statistician =
3204 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC()); 2841 rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3205 if (!statistician || 2842 if (!statistician ||
3206 !statistician->GetStatistics( 2843 !statistician->GetStatistics(&statistics,
3207 &statistics, _rtpRtcpModule->RTCP() == RtcpMode::kOff)) { 2844 _rtpRtcpModule->RTCP() == RtcpMode::kOff)) {
3208 _engineStatisticsPtr->SetLastError( 2845 _engineStatisticsPtr->SetLastError(
3209 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning, 2846 VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3210 "GetRTPStatistics() failed to read RTP statistics from the " 2847 "GetRTPStatistics() failed to read RTP statistics from the "
3211 "RTP/RTCP module"); 2848 "RTP/RTCP module");
3212 } 2849 }
3213 2850
3214 stats.fractionLost = statistics.fraction_lost; 2851 stats.fractionLost = statistics.fraction_lost;
3215 stats.cumulativeLost = statistics.cumulative_lost; 2852 stats.cumulativeLost = statistics.cumulative_lost;
3216 stats.extendedMax = statistics.extended_max_sequence_number; 2853 stats.extendedMax = statistics.extended_max_sequence_number;
3217 stats.jitterSamples = statistics.jitter; 2854 stats.jitterSamples = statistics.jitter;
3218 2855
3219 // --- RTT 2856 // --- RTT
3220 stats.rttMs = GetRTT(true); 2857 stats.rttMs = GetRTT(true);
3221 2858
3222 // --- Data counters 2859 // --- Data counters
3223 2860
3224 size_t bytesSent(0); 2861 size_t bytesSent(0);
3225 uint32_t packetsSent(0); 2862 uint32_t packetsSent(0);
3226 size_t bytesReceived(0); 2863 size_t bytesReceived(0);
3227 uint32_t packetsReceived(0); 2864 uint32_t packetsReceived(0);
3228 2865
3229 if (statistician) { 2866 if (statistician) {
3230 statistician->GetDataCounters(&bytesReceived, &packetsReceived); 2867 statistician->GetDataCounters(&bytesReceived, &packetsReceived);
3231 } 2868 }
3232 2869
3233 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, 2870 if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
3234 &packetsSent) != 0) 2871 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3235 { 2872 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
3236 WEBRTC_TRACE(kTraceWarning, kTraceVoice, 2873 " output will not be complete");
3237 VoEId(_instanceId, _channelId), 2874 }
3238 "GetRTPStatistics() failed to retrieve RTP datacounters =>"
3239 " output will not be complete");
3240 }
3241 2875
3242 stats.bytesSent = bytesSent; 2876 stats.bytesSent = bytesSent;
3243 stats.packetsSent = packetsSent; 2877 stats.packetsSent = packetsSent;
3244 stats.bytesReceived = bytesReceived; 2878 stats.bytesReceived = bytesReceived;
3245 stats.packetsReceived = packetsReceived; 2879 stats.packetsReceived = packetsReceived;
3246 2880
3247 // --- Timestamps 2881 // --- Timestamps
3248 { 2882 {
3249 rtc::CritScope lock(&ts_stats_lock_); 2883 rtc::CritScope lock(&ts_stats_lock_);
3250 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; 2884 stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
3251 } 2885 }
3252 return 0; 2886 return 0;
3253 } 2887 }
3254 2888
3255 int Channel::SetREDStatus(bool enable, int redPayloadtype) { 2889 int Channel::SetREDStatus(bool enable, int redPayloadtype) {
3256 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 2890 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3257 "Channel::SetREDStatus()"); 2891 "Channel::SetREDStatus()");
3258 2892
3259 if (enable) { 2893 if (enable) {
3260 if (redPayloadtype < 0 || redPayloadtype > 127) { 2894 if (redPayloadtype < 0 || redPayloadtype > 127) {
3261 _engineStatisticsPtr->SetLastError( 2895 _engineStatisticsPtr->SetLastError(
3262 VE_PLTYPE_ERROR, kTraceError, 2896 VE_PLTYPE_ERROR, kTraceError,
(...skipping 11 matching lines...) Expand all
3274 2908
3275 if (audio_coding_->SetREDStatus(enable) != 0) { 2909 if (audio_coding_->SetREDStatus(enable) != 0) {
3276 _engineStatisticsPtr->SetLastError( 2910 _engineStatisticsPtr->SetLastError(
3277 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, 2911 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3278 "SetREDStatus() failed to set RED state in the ACM"); 2912 "SetREDStatus() failed to set RED state in the ACM");
3279 return -1; 2913 return -1;
3280 } 2914 }
3281 return 0; 2915 return 0;
3282 } 2916 }
3283 2917
3284 int 2918 int Channel::GetREDStatus(bool& enabled, int& redPayloadtype) {
3285 Channel::GetREDStatus(bool& enabled, int& redPayloadtype) 2919 enabled = audio_coding_->REDStatus();
3286 { 2920 if (enabled) {
3287 enabled = audio_coding_->REDStatus(); 2921 int8_t payloadType = 0;
3288 if (enabled) 2922 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) {
3289 { 2923 _engineStatisticsPtr->SetLastError(
3290 int8_t payloadType = 0; 2924 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3291 if (_rtpRtcpModule->SendREDPayloadType(&payloadType) != 0) { 2925 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
3292 _engineStatisticsPtr->SetLastError( 2926 "module");
3293 VE_RTP_RTCP_MODULE_ERROR, kTraceError, 2927 return -1;
3294 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
3295 "module");
3296 return -1;
3297 }
3298 redPayloadtype = payloadType;
3299 return 0;
3300 } 2928 }
2929 redPayloadtype = payloadType;
3301 return 0; 2930 return 0;
2931 }
2932 return 0;
3302 } 2933 }
3303 2934
3304 int Channel::SetCodecFECStatus(bool enable) { 2935 int Channel::SetCodecFECStatus(bool enable) {
3305 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), 2936 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3306 "Channel::SetCodecFECStatus()"); 2937 "Channel::SetCodecFECStatus()");
3307 2938
3308 if (audio_coding_->SetCodecFEC(enable) != 0) { 2939 if (audio_coding_->SetCodecFEC(enable) != 0) {
3309 _engineStatisticsPtr->SetLastError( 2940 _engineStatisticsPtr->SetLastError(
3310 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, 2941 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3311 "SetCodecFECStatus() failed to set FEC state"); 2942 "SetCodecFECStatus() failed to set FEC state");
(...skipping 18 matching lines...) Expand all
3330 audio_coding_->EnableNack(maxNumberOfPackets); 2961 audio_coding_->EnableNack(maxNumberOfPackets);
3331 else 2962 else
3332 audio_coding_->DisableNack(); 2963 audio_coding_->DisableNack();
3333 } 2964 }
3334 2965
3335 // Called when we are missing one or more packets. 2966 // Called when we are missing one or more packets.
3336 int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) { 2967 int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
3337 return _rtpRtcpModule->SendNACK(sequence_numbers, length); 2968 return _rtpRtcpModule->SendNACK(sequence_numbers, length);
3338 } 2969 }
3339 2970
3340 uint32_t 2971 uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
3341 Channel::Demultiplex(const AudioFrame& audioFrame) 2972 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3342 { 2973 "Channel::Demultiplex()");
3343 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 2974 _audioFrame.CopyFrom(audioFrame);
3344 "Channel::Demultiplex()"); 2975 _audioFrame.id_ = _channelId;
3345 _audioFrame.CopyFrom(audioFrame); 2976 return 0;
3346 _audioFrame.id_ = _channelId;
3347 return 0;
3348 } 2977 }
3349 2978
3350 void Channel::Demultiplex(const int16_t* audio_data, 2979 void Channel::Demultiplex(const int16_t* audio_data,
3351 int sample_rate, 2980 int sample_rate,
3352 size_t number_of_frames, 2981 size_t number_of_frames,
3353 size_t number_of_channels) { 2982 size_t number_of_channels) {
3354 CodecInst codec; 2983 CodecInst codec;
3355 GetSendCodec(codec); 2984 GetSendCodec(codec);
3356 2985
3357 // Never upsample or upmix the capture signal here. This should be done at the 2986 // Never upsample or upmix the capture signal here. This should be done at the
3358 // end of the send chain. 2987 // end of the send chain.
3359 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate); 2988 _audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
3360 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels); 2989 _audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
3361 RemixAndResample(audio_data, number_of_frames, number_of_channels, 2990 RemixAndResample(audio_data, number_of_frames, number_of_channels,
3362 sample_rate, &input_resampler_, &_audioFrame); 2991 sample_rate, &input_resampler_, &_audioFrame);
3363 } 2992 }
3364 2993
3365 uint32_t 2994 uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
3366 Channel::PrepareEncodeAndSend(int mixingFrequency) 2995 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3367 { 2996 "Channel::PrepareEncodeAndSend()");
3368 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3369 "Channel::PrepareEncodeAndSend()");
3370 2997
3371 if (_audioFrame.samples_per_channel_ == 0) 2998 if (_audioFrame.samples_per_channel_ == 0) {
3372 { 2999 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3373 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId), 3000 "Channel::PrepareEncodeAndSend() invalid audio frame");
3374 "Channel::PrepareEncodeAndSend() invalid audio frame"); 3001 return 0xFFFFFFFF;
3375 return 0xFFFFFFFF; 3002 }
3003
3004 if (channel_state_.Get().input_file_playing) {
3005 MixOrReplaceAudioWithFile(mixingFrequency);
3006 }
3007
3008 bool is_muted = Mute(); // Cache locally as Mute() takes a lock.
3009 if (is_muted) {
3010 AudioFrameOperations::Mute(_audioFrame);
3011 }
3012
3013 if (channel_state_.Get().input_external_media) {
3014 rtc::CritScope cs(&_callbackCritSect);
3015 const bool isStereo = (_audioFrame.num_channels_ == 2);
3016 if (_inputExternalMediaCallbackPtr) {
3017 _inputExternalMediaCallbackPtr->Process(
3018 _channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
3019 _audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
3020 isStereo);
3376 } 3021 }
3022 }
3377 3023
3378 if (channel_state_.Get().input_file_playing) 3024 InsertInbandDtmfTone();
3379 { 3025
3380 MixOrReplaceAudioWithFile(mixingFrequency); 3026 if (_includeAudioLevelIndication) {
3027 size_t length =
3028 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
3029 if (is_muted) {
3030 rms_level_.ProcessMuted(length);
3031 } else {
3032 rms_level_.Process(_audioFrame.data_, length);
3381 } 3033 }
3034 }
3382 3035
3383 bool is_muted = Mute(); // Cache locally as Mute() takes a lock. 3036 return 0;
3384 if (is_muted) {
3385 AudioFrameOperations::Mute(_audioFrame);
3386 }
3387
3388 if (channel_state_.Get().input_external_media)
3389 {
3390 rtc::CritScope cs(&_callbackCritSect);
3391 const bool isStereo = (_audioFrame.num_channels_ == 2);
3392 if (_inputExternalMediaCallbackPtr)
3393 {
3394 _inputExternalMediaCallbackPtr->Process(
3395 _channelId,
3396 kRecordingPerChannel,
3397 (int16_t*)_audioFrame.data_,
3398 _audioFrame.samples_per_channel_,
3399 _audioFrame.sample_rate_hz_,
3400 isStereo);
3401 }
3402 }
3403
3404 InsertInbandDtmfTone();
3405
3406 if (_includeAudioLevelIndication) {
3407 size_t length =
3408 _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
3409 if (is_muted) {
3410 rms_level_.ProcessMuted(length);
3411 } else {
3412 rms_level_.Process(_audioFrame.data_, length);
3413 }
3414 }
3415
3416 return 0;
3417 } 3037 }
3418 3038
3419 uint32_t 3039 uint32_t Channel::EncodeAndSend() {
3420 Channel::EncodeAndSend() 3040 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3421 { 3041 "Channel::EncodeAndSend()");
3422 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3423 "Channel::EncodeAndSend()");
3424 3042
3425 assert(_audioFrame.num_channels_ <= 2); 3043 assert(_audioFrame.num_channels_ <= 2);
3426 if (_audioFrame.samples_per_channel_ == 0) 3044 if (_audioFrame.samples_per_channel_ == 0) {
3427 { 3045 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3428 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId), 3046 "Channel::EncodeAndSend() invalid audio frame");
3429 "Channel::EncodeAndSend() invalid audio frame"); 3047 return 0xFFFFFFFF;
3430 return 0xFFFFFFFF; 3048 }
3431 }
3432 3049
3433 _audioFrame.id_ = _channelId; 3050 _audioFrame.id_ = _channelId;
3434 3051
3435 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz. 3052 // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
3436 3053
3437 // The ACM resamples internally. 3054 // The ACM resamples internally.
3438 _audioFrame.timestamp_ = _timeStamp; 3055 _audioFrame.timestamp_ = _timeStamp;
3439 // This call will trigger AudioPacketizationCallback::SendData if encoding 3056 // This call will trigger AudioPacketizationCallback::SendData if encoding
3440 // is done and payload is ready for packetization and transmission. 3057 // is done and payload is ready for packetization and transmission.
3441 // Otherwise, it will return without invoking the callback. 3058 // Otherwise, it will return without invoking the callback.
3442 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) 3059 if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
3443 { 3060 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
3444 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), 3061 "Channel::EncodeAndSend() ACM encoding failed");
3445 "Channel::EncodeAndSend() ACM encoding failed"); 3062 return 0xFFFFFFFF;
3446 return 0xFFFFFFFF; 3063 }
3447 }
3448 3064
3449 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_); 3065 _timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
3450 return 0; 3066 return 0;
3451 } 3067 }
3452 3068
3453 void Channel::DisassociateSendChannel(int channel_id) { 3069 void Channel::DisassociateSendChannel(int channel_id) {
3454 rtc::CritScope lock(&assoc_send_channel_lock_); 3070 rtc::CritScope lock(&assoc_send_channel_lock_);
3455 Channel* channel = associate_send_channel_.channel(); 3071 Channel* channel = associate_send_channel_.channel();
3456 if (channel && channel->ChannelId() == channel_id) { 3072 if (channel && channel->ChannelId() == channel_id) {
3457 // If this channel is associated with a send channel of the specified 3073 // If this channel is associated with a send channel of the specified
3458 // Channel ID, disassociate with it. 3074 // Channel ID, disassociate with it.
3459 ChannelOwner ref(NULL); 3075 ChannelOwner ref(NULL);
3460 associate_send_channel_ = ref; 3076 associate_send_channel_ = ref;
3461 } 3077 }
3462 } 3078 }
3463 3079
3464 int Channel::RegisterExternalMediaProcessing( 3080 int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
3465 ProcessingTypes type, 3081 VoEMediaProcess& processObject) {
3466 VoEMediaProcess& processObject) 3082 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3467 { 3083 "Channel::RegisterExternalMediaProcessing()");
3468 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3469 "Channel::RegisterExternalMediaProcessing()");
3470 3084
3471 rtc::CritScope cs(&_callbackCritSect); 3085 rtc::CritScope cs(&_callbackCritSect);
3472 3086
3473 if (kPlaybackPerChannel == type) 3087 if (kPlaybackPerChannel == type) {
3474 { 3088 if (_outputExternalMediaCallbackPtr) {
3475 if (_outputExternalMediaCallbackPtr) 3089 _engineStatisticsPtr->SetLastError(
3476 { 3090 VE_INVALID_OPERATION, kTraceError,
3477 _engineStatisticsPtr->SetLastError( 3091 "Channel::RegisterExternalMediaProcessing() "
3478 VE_INVALID_OPERATION, kTraceError, 3092 "output external media already enabled");
3479 "Channel::RegisterExternalMediaProcessing() " 3093 return -1;
3480 "output external media already enabled");
3481 return -1;
3482 }
3483 _outputExternalMediaCallbackPtr = &processObject;
3484 _outputExternalMedia = true;
3485 } 3094 }
3486 else if (kRecordingPerChannel == type) 3095 _outputExternalMediaCallbackPtr = &processObject;
3487 { 3096 _outputExternalMedia = true;
3488 if (_inputExternalMediaCallbackPtr) 3097 } else if (kRecordingPerChannel == type) {
3489 { 3098 if (_inputExternalMediaCallbackPtr) {
3490 _engineStatisticsPtr->SetLastError( 3099 _engineStatisticsPtr->SetLastError(
3491 VE_INVALID_OPERATION, kTraceError, 3100 VE_INVALID_OPERATION, kTraceError,
3492 "Channel::RegisterExternalMediaProcessing() " 3101 "Channel::RegisterExternalMediaProcessing() "
3493 "output external media already enabled"); 3102 "output external media already enabled");
3494 return -1; 3103 return -1;
3495 }
3496 _inputExternalMediaCallbackPtr = &processObject;
3497 channel_state_.SetInputExternalMedia(true);
3498 } 3104 }
3499 return 0; 3105 _inputExternalMediaCallbackPtr = &processObject;
3106 channel_state_.SetInputExternalMedia(true);
3107 }
3108 return 0;
3500 } 3109 }
3501 3110
3502 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) 3111 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
3503 { 3112 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3504 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 3113 "Channel::DeRegisterExternalMediaProcessing()");
3505 "Channel::DeRegisterExternalMediaProcessing()");
3506 3114
3507 rtc::CritScope cs(&_callbackCritSect); 3115 rtc::CritScope cs(&_callbackCritSect);
3508 3116
3509 if (kPlaybackPerChannel == type) 3117 if (kPlaybackPerChannel == type) {
3510 { 3118 if (!_outputExternalMediaCallbackPtr) {
3511 if (!_outputExternalMediaCallbackPtr) 3119 _engineStatisticsPtr->SetLastError(
3512 { 3120 VE_INVALID_OPERATION, kTraceWarning,
3513 _engineStatisticsPtr->SetLastError( 3121 "Channel::DeRegisterExternalMediaProcessing() "
3514 VE_INVALID_OPERATION, kTraceWarning, 3122 "output external media already disabled");
3515 "Channel::DeRegisterExternalMediaProcessing() " 3123 return 0;
3516 "output external media already disabled");
3517 return 0;
3518 }
3519 _outputExternalMedia = false;
3520 _outputExternalMediaCallbackPtr = NULL;
3521 } 3124 }
3522 else if (kRecordingPerChannel == type) 3125 _outputExternalMedia = false;
3523 { 3126 _outputExternalMediaCallbackPtr = NULL;
3524 if (!_inputExternalMediaCallbackPtr) 3127 } else if (kRecordingPerChannel == type) {
3525 { 3128 if (!_inputExternalMediaCallbackPtr) {
3526 _engineStatisticsPtr->SetLastError( 3129 _engineStatisticsPtr->SetLastError(
3527 VE_INVALID_OPERATION, kTraceWarning, 3130 VE_INVALID_OPERATION, kTraceWarning,
3528 "Channel::DeRegisterExternalMediaProcessing() " 3131 "Channel::DeRegisterExternalMediaProcessing() "
3529 "input external media already disabled"); 3132 "input external media already disabled");
3530 return 0; 3133 return 0;
3531 }
3532 channel_state_.SetInputExternalMedia(false);
3533 _inputExternalMediaCallbackPtr = NULL;
3534 } 3134 }
3135 channel_state_.SetInputExternalMedia(false);
3136 _inputExternalMediaCallbackPtr = NULL;
3137 }
3535 3138
3536 return 0; 3139 return 0;
3537 } 3140 }
3538 3141
3539 int Channel::SetExternalMixing(bool enabled) { 3142 int Channel::SetExternalMixing(bool enabled) {
3540 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 3143 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3541 "Channel::SetExternalMixing(enabled=%d)", enabled); 3144 "Channel::SetExternalMixing(enabled=%d)", enabled);
3542 3145
3543 if (channel_state_.Get().playing) 3146 if (channel_state_.Get().playing) {
3544 { 3147 _engineStatisticsPtr->SetLastError(
3545 _engineStatisticsPtr->SetLastError( 3148 VE_INVALID_OPERATION, kTraceError,
3546 VE_INVALID_OPERATION, kTraceError, 3149 "Channel::SetExternalMixing() "
3547 "Channel::SetExternalMixing() " 3150 "external mixing cannot be changed while playing.");
3548 "external mixing cannot be changed while playing."); 3151 return -1;
3549 return -1; 3152 }
3550 }
3551 3153
3552 _externalMixing = enabled; 3154 _externalMixing = enabled;
3553 3155
3554 return 0; 3156 return 0;
3555 } 3157 }
3556 3158
3557 int 3159 int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
3558 Channel::GetNetworkStatistics(NetworkStatistics& stats) 3160 return audio_coding_->GetNetworkStatistics(&stats);
3559 {
3560 return audio_coding_->GetNetworkStatistics(&stats);
3561 } 3161 }
3562 3162
3563 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const { 3163 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3564 audio_coding_->GetDecodingCallStatistics(stats); 3164 audio_coding_->GetDecodingCallStatistics(stats);
3565 } 3165 }
3566 3166
3567 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms, 3167 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3568 int* playout_buffer_delay_ms) const { 3168 int* playout_buffer_delay_ms) const {
3569 rtc::CritScope lock(&video_sync_lock_); 3169 rtc::CritScope lock(&video_sync_lock_);
3570 if (_average_jitter_buffer_delay_us == 0) { 3170 if (_average_jitter_buffer_delay_us == 0) {
3571 return false; 3171 return false;
3572 } 3172 }
3573 *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 + 3173 *jitter_buffer_delay_ms =
3574 _recPacketDelayMs; 3174 (_average_jitter_buffer_delay_us + 500) / 1000 + _recPacketDelayMs;
3575 *playout_buffer_delay_ms = playout_delay_ms_; 3175 *playout_buffer_delay_ms = playout_delay_ms_;
3576 return true; 3176 return true;
3577 } 3177 }
3578 3178
3579 uint32_t Channel::GetDelayEstimate() const { 3179 uint32_t Channel::GetDelayEstimate() const {
3580 int jitter_buffer_delay_ms = 0; 3180 int jitter_buffer_delay_ms = 0;
3581 int playout_buffer_delay_ms = 0; 3181 int playout_buffer_delay_ms = 0;
3582 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms); 3182 GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
3583 return jitter_buffer_delay_ms + playout_buffer_delay_ms; 3183 return jitter_buffer_delay_ms + playout_buffer_delay_ms;
3584 } 3184 }
3585 3185
3586 int Channel::LeastRequiredDelayMs() const { 3186 int Channel::LeastRequiredDelayMs() const {
3587 return audio_coding_->LeastRequiredDelayMs(); 3187 return audio_coding_->LeastRequiredDelayMs();
3588 } 3188 }
3589 3189
3590 int 3190 int Channel::SetMinimumPlayoutDelay(int delayMs) {
3591 Channel::SetMinimumPlayoutDelay(int delayMs) 3191 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3592 { 3192 "Channel::SetMinimumPlayoutDelay()");
3593 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), 3193 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3594 "Channel::SetMinimumPlayoutDelay()"); 3194 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
3595 if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) || 3195 _engineStatisticsPtr->SetLastError(
3596 (delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) 3196 VE_INVALID_ARGUMENT, kTraceError,
3597 { 3197 "SetMinimumPlayoutDelay() invalid min delay");
3598 _engineStatisticsPtr->SetLastError( 3198 return -1;
3599 VE_INVALID_ARGUMENT, kTraceError, 3199 }
3600 "SetMinimumPlayoutDelay() invalid min delay"); 3200 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
3601 return -1; 3201 _engineStatisticsPtr->SetLastError(
3602 } 3202 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3603 if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) 3203 "SetMinimumPlayoutDelay() failed to set min playout delay");
3604 { 3204 return -1;
3605 _engineStatisticsPtr->SetLastError( 3205 }
3606 VE_AUDIO_CODING_MODULE_ERROR, kTraceError, 3206 return 0;
3607 "SetMinimumPlayoutDelay() failed to set min playout delay");
3608 return -1;
3609 }
3610 return 0;
3611 } 3207 }
3612 3208
3613 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) { 3209 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
3614 uint32_t playout_timestamp_rtp = 0; 3210 uint32_t playout_timestamp_rtp = 0;
3615 { 3211 {
3616 rtc::CritScope lock(&video_sync_lock_); 3212 rtc::CritScope lock(&video_sync_lock_);
3617 playout_timestamp_rtp = playout_timestamp_rtp_; 3213 playout_timestamp_rtp = playout_timestamp_rtp_;
3618 } 3214 }
3619 if (playout_timestamp_rtp == 0) { 3215 if (playout_timestamp_rtp == 0) {
3620 _engineStatisticsPtr->SetLastError( 3216 _engineStatisticsPtr->SetLastError(
3621 VE_CANNOT_RETRIEVE_VALUE, kTraceError, 3217 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3622 "GetPlayoutTimestamp() failed to retrieve timestamp"); 3218 "GetPlayoutTimestamp() failed to retrieve timestamp");
3623 return -1; 3219 return -1;
3624 } 3220 }
3625 timestamp = playout_timestamp_rtp; 3221 timestamp = playout_timestamp_rtp;
3626 return 0; 3222 return 0;
3627 } 3223 }
3628 3224
3629 int Channel::SetInitTimestamp(unsigned int timestamp) { 3225 int Channel::SetInitTimestamp(unsigned int timestamp) {
(...skipping 13 matching lines...) Expand all
3643 "Channel::SetInitSequenceNumber()"); 3239 "Channel::SetInitSequenceNumber()");
3644 if (channel_state_.Get().sending) { 3240 if (channel_state_.Get().sending) {
3645 _engineStatisticsPtr->SetLastError( 3241 _engineStatisticsPtr->SetLastError(
3646 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending"); 3242 VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
3647 return -1; 3243 return -1;
3648 } 3244 }
3649 _rtpRtcpModule->SetSequenceNumber(sequenceNumber); 3245 _rtpRtcpModule->SetSequenceNumber(sequenceNumber);
3650 return 0; 3246 return 0;
3651 } 3247 }
3652 3248
3653 int 3249 int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
3654 Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const 3250 RtpReceiver** rtp_receiver) const {
3655 { 3251 *rtpRtcpModule = _rtpRtcpModule.get();
3656 *rtpRtcpModule = _rtpRtcpModule.get(); 3252 *rtp_receiver = rtp_receiver_.get();
3657 *rtp_receiver = rtp_receiver_.get(); 3253 return 0;
3658 return 0;
3659 } 3254 }
3660 3255
3661 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use 3256 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3662 // a shared helper. 3257 // a shared helper.
3663 int32_t 3258 int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
3664 Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
3665 {
3666 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]); 3259 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
3667 size_t fileSamples(0); 3260 size_t fileSamples(0);
3668 3261
3669 { 3262 {
3670 rtc::CritScope cs(&_fileCritSect); 3263 rtc::CritScope cs(&_fileCritSect);
3671 3264
3672 if (_inputFilePlayerPtr == NULL) 3265 if (_inputFilePlayerPtr == NULL) {
3673 { 3266 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3674 WEBRTC_TRACE(kTraceWarning, kTraceVoice, 3267 "Channel::MixOrReplaceAudioWithFile() fileplayer"
3675 VoEId(_instanceId, _channelId), 3268 " doesnt exist");
3676 "Channel::MixOrReplaceAudioWithFile() fileplayer" 3269 return -1;
3677 " doesnt exist");
3678 return -1;
3679 }
3680
3681 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
3682 fileSamples,
3683 mixingFrequency) == -1)
3684 {
3685 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3686 VoEId(_instanceId, _channelId),
3687 "Channel::MixOrReplaceAudioWithFile() file mixing "
3688 "failed");
3689 return -1;
3690 }
3691 if (fileSamples == 0)
3692 {
3693 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3694 VoEId(_instanceId, _channelId),
3695 "Channel::MixOrReplaceAudioWithFile() file is ended");
3696 return 0;
3697 }
3698 } 3270 }
3699 3271
3700 assert(_audioFrame.samples_per_channel_ == fileSamples); 3272 if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(), fileSamples,
3273 mixingFrequency) == -1) {
3274 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3275 "Channel::MixOrReplaceAudioWithFile() file mixing "
3276 "failed");
3277 return -1;
3278 }
3279 if (fileSamples == 0) {
3280 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3281 "Channel::MixOrReplaceAudioWithFile() file is ended");
3282 return 0;
3283 }
3284 }
3701 3285
3702 if (_mixFileWithMicrophone) 3286 assert(_audioFrame.samples_per_channel_ == fileSamples);
3703 {
3704 // Currently file stream is always mono.
3705 // TODO(xians): Change the code when FilePlayer supports real stereo.
3706 MixWithSat(_audioFrame.data_,
3707 _audioFrame.num_channels_,
3708 fileBuffer.get(),
3709 1,
3710 fileSamples);
3711 }
3712 else
3713 {
3714 // Replace ACM audio with file.
3715 // Currently file stream is always mono.
3716 // TODO(xians): Change the code when FilePlayer supports real stereo.
3717 _audioFrame.UpdateFrame(_channelId,
3718 0xFFFFFFFF,
3719 fileBuffer.get(),
3720 fileSamples,
3721 mixingFrequency,
3722 AudioFrame::kNormalSpeech,
3723 AudioFrame::kVadUnknown,
3724 1);
3725 3287
3726 } 3288 if (_mixFileWithMicrophone) {
3727 return 0; 3289 // Currently file stream is always mono.
3290 // TODO(xians): Change the code when FilePlayer supports real stereo.
3291 MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
3292 1, fileSamples);
3293 } else {
3294 // Replace ACM audio with file.
3295 // Currently file stream is always mono.
3296 // TODO(xians): Change the code when FilePlayer supports real stereo.
3297 _audioFrame.UpdateFrame(
3298 _channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
3299 AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
3300 }
3301 return 0;
3728 } 3302 }
3729 3303
3730 int32_t 3304 int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
3731 Channel::MixAudioWithFile(AudioFrame& audioFrame, 3305 assert(mixingFrequency <= 48000);
3732 int mixingFrequency)
3733 {
3734 assert(mixingFrequency <= 48000);
3735 3306
3736 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]); 3307 rtc::scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]);
3737 size_t fileSamples(0); 3308 size_t fileSamples(0);
3738 3309
3739 { 3310 {
3740 rtc::CritScope cs(&_fileCritSect); 3311 rtc::CritScope cs(&_fileCritSect);
3741 3312
3742 if (_outputFilePlayerPtr == NULL) 3313 if (_outputFilePlayerPtr == NULL) {
3743 { 3314 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3744 WEBRTC_TRACE(kTraceWarning, kTraceVoice, 3315 "Channel::MixAudioWithFile() file mixing failed");
3745 VoEId(_instanceId, _channelId), 3316 return -1;
3746 "Channel::MixAudioWithFile() file mixing failed");
3747 return -1;
3748 }
3749
3750 // We should get the frequency we ask for.
3751 if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
3752 fileSamples,
3753 mixingFrequency) == -1)
3754 {
3755 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3756 VoEId(_instanceId, _channelId),
3757 "Channel::MixAudioWithFile() file mixing failed");
3758 return -1;
3759 }
3760 } 3317 }
3761 3318
3762 if (audioFrame.samples_per_channel_ == fileSamples) 3319 // We should get the frequency we ask for.
3763 { 3320 if (_outputFilePlayerPtr->Get10msAudioFromFile(
3764 // Currently file stream is always mono. 3321 fileBuffer.get(), fileSamples, mixingFrequency) == -1) {
3765 // TODO(xians): Change the code when FilePlayer supports real stereo. 3322 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3766 MixWithSat(audioFrame.data_, 3323 "Channel::MixAudioWithFile() file mixing failed");
3767 audioFrame.num_channels_, 3324 return -1;
3768 fileBuffer.get(),
3769 1,
3770 fileSamples);
3771 } 3325 }
3772 else 3326 }
3773 { 3327
3774 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId), 3328 if (audioFrame.samples_per_channel_ == fileSamples) {
3775 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS ") != " 3329 // Currently file stream is always mono.
3776 "fileSamples(%" PRIuS ")", 3330 // TODO(xians): Change the code when FilePlayer supports real stereo.
3777 audioFrame.samples_per_channel_, fileSamples); 3331 MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
3778 return -1; 3332 fileSamples);
3333 } else {
3334 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3335 "Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
3336 ") != "
3337 "fileSamples(%" PRIuS ")",
3338 audioFrame.samples_per_channel_, fileSamples);
3339 return -1;
3340 }
3341
3342 return 0;
3343 }
3344
3345 int Channel::InsertInbandDtmfTone() {
3346 // Check if we should start a new tone.
3347 if (_inbandDtmfQueue.PendingDtmf() && !_inbandDtmfGenerator.IsAddingTone() &&
3348 _inbandDtmfGenerator.DelaySinceLastTone() >
3349 kMinTelephoneEventSeparationMs) {
3350 int8_t eventCode(0);
3351 uint16_t lengthMs(0);
3352 uint8_t attenuationDb(0);
3353
3354 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
3355 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
3356 if (_playInbandDtmfEvent) {
3357 // Add tone to output mixer using a reduced length to minimize
3358 // risk of echo.
3359 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80, attenuationDb);
3360 }
3361 }
3362
3363 if (_inbandDtmfGenerator.IsAddingTone()) {
3364 uint16_t frequency(0);
3365 _inbandDtmfGenerator.GetSampleRate(frequency);
3366
3367 if (frequency != _audioFrame.sample_rate_hz_) {
3368 // Update sample rate of Dtmf tone since the mixing frequency
3369 // has changed.
3370 _inbandDtmfGenerator.SetSampleRate(
3371 (uint16_t)(_audioFrame.sample_rate_hz_));
3372 // Reset the tone to be added taking the new sample rate into
3373 // account.
3374 _inbandDtmfGenerator.ResetTone();
3779 } 3375 }
3780 3376
3781 return 0; 3377 int16_t toneBuffer[320];
3782 } 3378 uint16_t toneSamples(0);
3783 3379 // Get 10ms tone segment and set time since last tone to zero
3784 int 3380 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1) {
3785 Channel::InsertInbandDtmfTone() 3381 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3786 { 3382 "Channel::EncodeAndSend() inserting Dtmf failed");
3787 // Check if we should start a new tone. 3383 return -1;
3788 if (_inbandDtmfQueue.PendingDtmf() &&
3789 !_inbandDtmfGenerator.IsAddingTone() &&
3790 _inbandDtmfGenerator.DelaySinceLastTone() >
3791 kMinTelephoneEventSeparationMs)
3792 {
3793 int8_t eventCode(0);
3794 uint16_t lengthMs(0);
3795 uint8_t attenuationDb(0);
3796
3797 eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
3798 _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
3799 if (_playInbandDtmfEvent)
3800 {
3801 // Add tone to output mixer using a reduced length to minimize
3802 // risk of echo.
3803 _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
3804 attenuationDb);
3805 }
3806 } 3384 }
3807 3385
3808 if (_inbandDtmfGenerator.IsAddingTone()) 3386 // Replace mixed audio with DTMF tone.
3809 { 3387 for (size_t sample = 0; sample < _audioFrame.samples_per_channel_;
3810 uint16_t frequency(0); 3388 sample++) {
3811 _inbandDtmfGenerator.GetSampleRate(frequency); 3389 for (size_t channel = 0; channel < _audioFrame.num_channels_; channel++) {
3390 const size_t index = sample * _audioFrame.num_channels_ + channel;
3391 _audioFrame.data_[index] = toneBuffer[sample];
3392 }
3393 }
3812 3394
3813 if (frequency != _audioFrame.sample_rate_hz_) 3395 assert(_audioFrame.samples_per_channel_ == toneSamples);
3814 { 3396 } else {
3815 // Update sample rate of Dtmf tone since the mixing frequency 3397 // Add 10ms to "delay-since-last-tone" counter
3816 // has changed. 3398 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
3817 _inbandDtmfGenerator.SetSampleRate( 3399 }
3818 (uint16_t) (_audioFrame.sample_rate_hz_)); 3400 return 0;
3819 // Reset the tone to be added taking the new sample rate into
3820 // account.
3821 _inbandDtmfGenerator.ResetTone();
3822 }
3823
3824 int16_t toneBuffer[320];
3825 uint16_t toneSamples(0);
3826 // Get 10ms tone segment and set time since last tone to zero
3827 if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
3828 {
3829 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3830 VoEId(_instanceId, _channelId),
3831 "Channel::EncodeAndSend() inserting Dtmf failed");
3832 return -1;
3833 }
3834
3835 // Replace mixed audio with DTMF tone.
3836 for (size_t sample = 0;
3837 sample < _audioFrame.samples_per_channel_;
3838 sample++)
3839 {
3840 for (size_t channel = 0;
3841 channel < _audioFrame.num_channels_;
3842 channel++)
3843 {
3844 const size_t index =
3845 sample * _audioFrame.num_channels_ + channel;
3846 _audioFrame.data_[index] = toneBuffer[sample];
3847 }
3848 }
3849
3850 assert(_audioFrame.samples_per_channel_ == toneSamples);
3851 } else
3852 {
3853 // Add 10ms to "delay-since-last-tone" counter
3854 _inbandDtmfGenerator.UpdateDelaySinceLastTone();
3855 }
3856 return 0;
3857 } 3401 }
3858 3402
3859 void Channel::UpdatePlayoutTimestamp(bool rtcp) { 3403 void Channel::UpdatePlayoutTimestamp(bool rtcp) {
3860 uint32_t playout_timestamp = 0; 3404 uint32_t playout_timestamp = 0;
3861 3405
3862 if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1) { 3406 if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1) {
3863 // This can happen if this channel has not been received any RTP packet. In 3407 // This can happen if this channel has not been received any RTP packet. In
3864 // this case, NetEq is not capable of computing playout timestamp. 3408 // this case, NetEq is not capable of computing playout timestamp.
3865 return; 3409 return;
3866 } 3410 }
3867 3411
3868 uint16_t delay_ms = 0; 3412 uint16_t delay_ms = 0;
3869 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) { 3413 if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
3870 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId), 3414 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3871 "Channel::UpdatePlayoutTimestamp() failed to read playout" 3415 "Channel::UpdatePlayoutTimestamp() failed to read playout"
3872 " delay from the ADM"); 3416 " delay from the ADM");
3873 _engineStatisticsPtr->SetLastError( 3417 _engineStatisticsPtr->SetLastError(
3874 VE_CANNOT_RETRIEVE_VALUE, kTraceError, 3418 VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3875 "UpdatePlayoutTimestamp() failed to retrieve playout delay"); 3419 "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3876 return; 3420 return;
3877 } 3421 }
3878 3422
3879 jitter_buffer_playout_timestamp_ = playout_timestamp; 3423 jitter_buffer_playout_timestamp_ = playout_timestamp;
3880 3424
3881 // Remove the playout delay. 3425 // Remove the playout delay.
3882 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000)); 3426 playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
3883 3427
3884 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 3428 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3885 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu", 3429 "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
3886 playout_timestamp); 3430 playout_timestamp);
3887 3431
3888 { 3432 {
3889 rtc::CritScope lock(&video_sync_lock_); 3433 rtc::CritScope lock(&video_sync_lock_);
3890 if (rtcp) { 3434 if (rtcp) {
3891 playout_timestamp_rtcp_ = playout_timestamp; 3435 playout_timestamp_rtcp_ = playout_timestamp;
3892 } else { 3436 } else {
3893 playout_timestamp_rtp_ = playout_timestamp; 3437 playout_timestamp_rtp_ = playout_timestamp;
3894 } 3438 }
3895 playout_delay_ms_ = delay_ms; 3439 playout_delay_ms_ = delay_ms;
3896 } 3440 }
3897 } 3441 }
3898 3442
3899 // Called for incoming RTP packets after successful RTP header parsing. 3443 // Called for incoming RTP packets after successful RTP header parsing.
3900 void Channel::UpdatePacketDelay(uint32_t rtp_timestamp, 3444 void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
3901 uint16_t sequence_number) { 3445 uint16_t sequence_number) {
3902 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), 3446 WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
3903 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)", 3447 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
3904 rtp_timestamp, sequence_number); 3448 rtp_timestamp, sequence_number);
3905 3449
3906 // Get frequency of last received payload 3450 // Get frequency of last received payload
3907 int rtp_receive_frequency = GetPlayoutFrequency(); 3451 int rtp_receive_frequency = GetPlayoutFrequency();
3908 3452
3909 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for 3453 // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
3910 // every incoming packet. 3454 // every incoming packet.
3911 uint32_t timestamp_diff_ms = (rtp_timestamp - 3455 uint32_t timestamp_diff_ms =
3912 jitter_buffer_playout_timestamp_) / (rtp_receive_frequency / 1000); 3456 (rtp_timestamp - jitter_buffer_playout_timestamp_) /
3457 (rtp_receive_frequency / 1000);
3913 if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) || 3458 if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
3914 timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) { 3459 timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
3915 // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP 3460 // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
3916 // timestamp, the resulting difference is negative, but is set to zero. 3461 // timestamp, the resulting difference is negative, but is set to zero.
3917 // This can happen when a network glitch causes a packet to arrive late, 3462 // This can happen when a network glitch causes a packet to arrive late,
3918 // and during long comfort noise periods with clock drift. 3463 // and during long comfort noise periods with clock drift.
3919 timestamp_diff_ms = 0; 3464 timestamp_diff_ms = 0;
3920 } 3465 }
3921 3466
3922 uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) / 3467 uint16_t packet_delay_ms =
3923 (rtp_receive_frequency / 1000); 3468 (rtp_timestamp - _previousTimestamp) / (rtp_receive_frequency / 1000);
3924 3469
3925 _previousTimestamp = rtp_timestamp; 3470 _previousTimestamp = rtp_timestamp;
3926 3471
3927 if (timestamp_diff_ms == 0) return; 3472 if (timestamp_diff_ms == 0)
3473 return;
3928 3474
3929 { 3475 {
3930 rtc::CritScope lock(&video_sync_lock_); 3476 rtc::CritScope lock(&video_sync_lock_);
3931 3477
3932 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) { 3478 if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
3933 _recPacketDelayMs = packet_delay_ms; 3479 _recPacketDelayMs = packet_delay_ms;
3934 } 3480 }
3935 3481
3936 if (_average_jitter_buffer_delay_us == 0) { 3482 if (_average_jitter_buffer_delay_us == 0) {
3937 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000; 3483 _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
3938 return; 3484 return;
3939 } 3485 }
3940 3486
3941 // Filter average delay value using exponential filter (alpha is 3487 // Filter average delay value using exponential filter (alpha is
3942 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces 3488 // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
3943 // risk of rounding error) and compensate for it in GetDelayEstimate() 3489 // risk of rounding error) and compensate for it in GetDelayEstimate()
3944 // later. 3490 // later.
3945 _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 + 3491 _average_jitter_buffer_delay_us =
3946 1000 * timestamp_diff_ms + 500) / 8; 3492 (_average_jitter_buffer_delay_us * 7 + 1000 * timestamp_diff_ms + 500) /
3493 8;
3947 } 3494 }
3948 } 3495 }
3949 3496
3950 void 3497 void Channel::RegisterReceiveCodecsToRTPModule() {
3951 Channel::RegisterReceiveCodecsToRTPModule() 3498 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3952 { 3499 "Channel::RegisterReceiveCodecsToRTPModule()");
3953 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3954 "Channel::RegisterReceiveCodecsToRTPModule()");
3955 3500
3956 CodecInst codec; 3501 CodecInst codec;
3957 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs(); 3502 const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
3958 3503
3959 for (int idx = 0; idx < nSupportedCodecs; idx++) 3504 for (int idx = 0; idx < nSupportedCodecs; idx++) {
3960 { 3505 // Open up the RTP/RTCP receiver for all supported codecs
3961 // Open up the RTP/RTCP receiver for all supported codecs 3506 if ((audio_coding_->Codec(idx, &codec) == -1) ||
3962 if ((audio_coding_->Codec(idx, &codec) == -1) || 3507 (rtp_receiver_->RegisterReceivePayload(
3963 (rtp_receiver_->RegisterReceivePayload( 3508 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3964 codec.plname, 3509 (codec.rate < 0) ? 0 : codec.rate) == -1)) {
3965 codec.pltype, 3510 WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
3966 codec.plfreq, 3511 "Channel::RegisterReceiveCodecsToRTPModule() unable"
3967 codec.channels, 3512 " to register %s (%d/%d/%" PRIuS
3968 (codec.rate < 0) ? 0 : codec.rate) == -1)) 3513 "/%d) to RTP/RTCP "
3969 { 3514 "receiver",
3970 WEBRTC_TRACE(kTraceWarning, 3515 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3971 kTraceVoice, 3516 codec.rate);
3972 VoEId(_instanceId, _channelId), 3517 } else {
3973 "Channel::RegisterReceiveCodecsToRTPModule() unable" 3518 WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3974 " to register %s (%d/%d/%" PRIuS "/%d) to RTP/RTCP " 3519 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3975 "receiver", 3520 "(%d/%d/%" PRIuS
3976 codec.plname, codec.pltype, codec.plfreq, 3521 "/%d) has been added to the RTP/RTCP "
3977 codec.channels, codec.rate); 3522 "receiver",
3978 } 3523 codec.plname, codec.pltype, codec.plfreq, codec.channels,
3979 else 3524 codec.rate);
3980 {
3981 WEBRTC_TRACE(kTraceInfo,
3982 kTraceVoice,
3983 VoEId(_instanceId, _channelId),
3984 "Channel::RegisterReceiveCodecsToRTPModule() %s "
3985 "(%d/%d/%" PRIuS "/%d) has been added to the RTP/RTCP "
3986 "receiver",
3987 codec.plname, codec.pltype, codec.plfreq,
3988 codec.channels, codec.rate);
3989 }
3990 } 3525 }
3526 }
3991 } 3527 }
3992 3528
3993 // Assuming this method is called with valid payload type. 3529 // Assuming this method is called with valid payload type.
3994 int Channel::SetRedPayloadType(int red_payload_type) { 3530 int Channel::SetRedPayloadType(int red_payload_type) {
3995 CodecInst codec; 3531 CodecInst codec;
3996 bool found_red = false; 3532 bool found_red = false;
3997 3533
3998 // Get default RED settings from the ACM database 3534 // Get default RED settings from the ACM database
3999 const int num_codecs = AudioCodingModule::NumberOfCodecs(); 3535 const int num_codecs = AudioCodingModule::NumberOfCodecs();
4000 for (int idx = 0; idx < num_codecs; idx++) { 3536 for (int idx = 0; idx < num_codecs; idx++) {
(...skipping 21 matching lines...) Expand all
4022 3558
4023 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) { 3559 if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
4024 _engineStatisticsPtr->SetLastError( 3560 _engineStatisticsPtr->SetLastError(
4025 VE_RTP_RTCP_MODULE_ERROR, kTraceError, 3561 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4026 "SetRedPayloadType() RED registration in RTP/RTCP module failed"); 3562 "SetRedPayloadType() RED registration in RTP/RTCP module failed");
4027 return -1; 3563 return -1;
4028 } 3564 }
4029 return 0; 3565 return 0;
4030 } 3566 }
4031 3567
4032 int Channel::SetSendRtpHeaderExtension(bool enable, RTPExtensionType type, 3568 int Channel::SetSendRtpHeaderExtension(bool enable,
3569 RTPExtensionType type,
4033 unsigned char id) { 3570 unsigned char id) {
4034 int error = 0; 3571 int error = 0;
4035 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type); 3572 _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
4036 if (enable) { 3573 if (enable) {
4037 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id); 3574 error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
4038 } 3575 }
4039 return error; 3576 return error;
4040 } 3577 }
4041 3578
4042 int32_t Channel::GetPlayoutFrequency() { 3579 int32_t Channel::GetPlayoutFrequency() {
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
4093 } 3630 }
4094 if (it == report_blocks.end()) { 3631 if (it == report_blocks.end()) {
4095 // We have not received packets with SSRC matching the report blocks. 3632 // We have not received packets with SSRC matching the report blocks.
4096 // To calculate RTT we try with the SSRC of the first report block. 3633 // To calculate RTT we try with the SSRC of the first report block.
4097 // This is very important for send-only channels where we don't know 3634 // This is very important for send-only channels where we don't know
4098 // the SSRC of the other end. 3635 // the SSRC of the other end.
4099 remoteSSRC = report_blocks[0].remoteSSRC; 3636 remoteSSRC = report_blocks[0].remoteSSRC;
4100 } 3637 }
4101 3638
4102 int64_t avg_rtt = 0; 3639 int64_t avg_rtt = 0;
4103 int64_t max_rtt= 0; 3640 int64_t max_rtt = 0;
4104 int64_t min_rtt = 0; 3641 int64_t min_rtt = 0;
4105 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) 3642 if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
4106 != 0) { 3643 0) {
4107 return 0; 3644 return 0;
4108 } 3645 }
4109 return rtt; 3646 return rtt;
4110 } 3647 }
4111 3648
4112 } // namespace voe 3649 } // namespace voe
4113 } // namespace webrtc 3650 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/voice_engine/channel.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698