Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(558)

Side by Side Diff: webrtc/modules/audio_processing/audio_buffer.cc

Issue 2750783004: Add mute state field to AudioFrame. (Closed)
Patch Set: Fix num_channels check in UpMix() Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 375 matching lines...) Expand 10 before | Expand all | Expand 10 after
386 new IFChannelBuffer(input_num_frames_, num_proc_channels_)); 386 new IFChannelBuffer(input_num_frames_, num_proc_channels_));
387 } 387 }
388 activity_ = frame->vad_activity_; 388 activity_ = frame->vad_activity_;
389 389
390 int16_t* const* deinterleaved; 390 int16_t* const* deinterleaved;
391 if (input_num_frames_ == proc_num_frames_) { 391 if (input_num_frames_ == proc_num_frames_) {
392 deinterleaved = data_->ibuf()->channels(); 392 deinterleaved = data_->ibuf()->channels();
393 } else { 393 } else {
394 deinterleaved = input_buffer_->ibuf()->channels(); 394 deinterleaved = input_buffer_->ibuf()->channels();
395 } 395 }
396 // TODO(yujo): handle muted frames more efficiently.
396 if (num_proc_channels_ == 1) { 397 if (num_proc_channels_ == 1) {
397 // Downmix and deinterleave simultaneously. 398 // Downmix and deinterleave simultaneously.
398 DownmixInterleavedToMono(frame->data_, input_num_frames_, 399 DownmixInterleavedToMono(frame->data(), input_num_frames_,
399 num_input_channels_, deinterleaved[0]); 400 num_input_channels_, deinterleaved[0]);
400 } else { 401 } else {
401 RTC_DCHECK_EQ(num_proc_channels_, num_input_channels_); 402 RTC_DCHECK_EQ(num_proc_channels_, num_input_channels_);
402 Deinterleave(frame->data_, 403 Deinterleave(frame->data(),
403 input_num_frames_, 404 input_num_frames_,
404 num_proc_channels_, 405 num_proc_channels_,
405 deinterleaved); 406 deinterleaved);
406 } 407 }
407 408
408 // Resample. 409 // Resample.
409 if (input_num_frames_ != proc_num_frames_) { 410 if (input_num_frames_ != proc_num_frames_) {
410 for (size_t i = 0; i < num_proc_channels_; ++i) { 411 for (size_t i = 0; i < num_proc_channels_; ++i) {
411 input_resamplers_[i]->Resample(input_buffer_->fbuf_const()->channels()[i], 412 input_resamplers_[i]->Resample(input_buffer_->fbuf_const()->channels()[i],
412 input_num_frames_, 413 input_num_frames_,
(...skipping 20 matching lines...) Expand all
433 new IFChannelBuffer(output_num_frames_, num_channels_)); 434 new IFChannelBuffer(output_num_frames_, num_channels_));
434 } 435 }
435 for (size_t i = 0; i < num_channels_; ++i) { 436 for (size_t i = 0; i < num_channels_; ++i) {
436 output_resamplers_[i]->Resample( 437 output_resamplers_[i]->Resample(
437 data_->fbuf()->channels()[i], proc_num_frames_, 438 data_->fbuf()->channels()[i], proc_num_frames_,
438 output_buffer_->fbuf()->channels()[i], output_num_frames_); 439 output_buffer_->fbuf()->channels()[i], output_num_frames_);
439 } 440 }
440 data_ptr = output_buffer_.get(); 441 data_ptr = output_buffer_.get();
441 } 442 }
442 443
444 // TODO(yujo): handle muted frames more efficiently.
443 if (frame->num_channels_ == num_channels_) { 445 if (frame->num_channels_ == num_channels_) {
444 Interleave(data_ptr->ibuf()->channels(), output_num_frames_, num_channels_, 446 Interleave(data_ptr->ibuf()->channels(), output_num_frames_, num_channels_,
445 frame->data_); 447 frame->mutable_data());
446 } else { 448 } else {
447 UpmixMonoToInterleaved(data_ptr->ibuf()->channels()[0], output_num_frames_, 449 UpmixMonoToInterleaved(data_ptr->ibuf()->channels()[0], output_num_frames_,
448 frame->num_channels_, frame->data_); 450 frame->num_channels_, frame->mutable_data());
449 } 451 }
450 } 452 }
451 453
452 void AudioBuffer::CopyLowPassToReference() { 454 void AudioBuffer::CopyLowPassToReference() {
453 reference_copied_ = true; 455 reference_copied_ = true;
454 if (!low_pass_reference_channels_.get() || 456 if (!low_pass_reference_channels_.get() ||
455 low_pass_reference_channels_->num_channels() != num_channels_) { 457 low_pass_reference_channels_->num_channels() != num_channels_) {
456 low_pass_reference_channels_.reset( 458 low_pass_reference_channels_.reset(
457 new ChannelBuffer<int16_t>(num_split_frames_, 459 new ChannelBuffer<int16_t>(num_split_frames_,
458 num_proc_channels_)); 460 num_proc_channels_));
459 } 461 }
460 for (size_t i = 0; i < num_proc_channels_; i++) { 462 for (size_t i = 0; i < num_proc_channels_; i++) {
461 memcpy(low_pass_reference_channels_->channels()[i], 463 memcpy(low_pass_reference_channels_->channels()[i],
462 split_bands_const(i)[kBand0To8kHz], 464 split_bands_const(i)[kBand0To8kHz],
463 low_pass_reference_channels_->num_frames_per_band() * 465 low_pass_reference_channels_->num_frames_per_band() *
464 sizeof(split_bands_const(i)[kBand0To8kHz][0])); 466 sizeof(split_bands_const(i)[kBand0To8kHz][0]));
465 } 467 }
466 } 468 }
467 469
468 void AudioBuffer::SplitIntoFrequencyBands() { 470 void AudioBuffer::SplitIntoFrequencyBands() {
469 splitting_filter_->Analysis(data_.get(), split_data_.get()); 471 splitting_filter_->Analysis(data_.get(), split_data_.get());
470 } 472 }
471 473
472 void AudioBuffer::MergeFrequencyBands() { 474 void AudioBuffer::MergeFrequencyBands() {
473 splitting_filter_->Synthesis(split_data_.get(), data_.get()); 475 splitting_filter_->Synthesis(split_data_.get(), data_.get());
474 } 476 }
475 477
476 } // namespace webrtc 478 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698