| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 387 new IFChannelBuffer(input_num_frames_, num_proc_channels_)); | 387 new IFChannelBuffer(input_num_frames_, num_proc_channels_)); |
| 388 } | 388 } |
| 389 activity_ = frame->vad_activity_; | 389 activity_ = frame->vad_activity_; |
| 390 | 390 |
| 391 int16_t* const* deinterleaved; | 391 int16_t* const* deinterleaved; |
| 392 if (input_num_frames_ == proc_num_frames_) { | 392 if (input_num_frames_ == proc_num_frames_) { |
| 393 deinterleaved = data_->ibuf()->channels(); | 393 deinterleaved = data_->ibuf()->channels(); |
| 394 } else { | 394 } else { |
| 395 deinterleaved = input_buffer_->ibuf()->channels(); | 395 deinterleaved = input_buffer_->ibuf()->channels(); |
| 396 } | 396 } |
| 397 // TODO(yujo): handle muted frames more efficiently. |
| 397 if (num_proc_channels_ == 1) { | 398 if (num_proc_channels_ == 1) { |
| 398 // Downmix and deinterleave simultaneously. | 399 // Downmix and deinterleave simultaneously. |
| 399 DownmixInterleavedToMono(frame->data_, input_num_frames_, | 400 DownmixInterleavedToMono(frame->data(), input_num_frames_, |
| 400 num_input_channels_, deinterleaved[0]); | 401 num_input_channels_, deinterleaved[0]); |
| 401 } else { | 402 } else { |
| 402 RTC_DCHECK_EQ(num_proc_channels_, num_input_channels_); | 403 RTC_DCHECK_EQ(num_proc_channels_, num_input_channels_); |
| 403 Deinterleave(frame->data_, | 404 Deinterleave(frame->data(), |
| 404 input_num_frames_, | 405 input_num_frames_, |
| 405 num_proc_channels_, | 406 num_proc_channels_, |
| 406 deinterleaved); | 407 deinterleaved); |
| 407 } | 408 } |
| 408 | 409 |
| 409 // Resample. | 410 // Resample. |
| 410 if (input_num_frames_ != proc_num_frames_) { | 411 if (input_num_frames_ != proc_num_frames_) { |
| 411 for (size_t i = 0; i < num_proc_channels_; ++i) { | 412 for (size_t i = 0; i < num_proc_channels_; ++i) { |
| 412 input_resamplers_[i]->Resample(input_buffer_->fbuf_const()->channels()[i], | 413 input_resamplers_[i]->Resample(input_buffer_->fbuf_const()->channels()[i], |
| 413 input_num_frames_, | 414 input_num_frames_, |
| (...skipping 16 matching lines...) Expand all Loading... |
| 430 IFChannelBuffer* data_ptr = data_.get(); | 431 IFChannelBuffer* data_ptr = data_.get(); |
| 431 if (proc_num_frames_ != output_num_frames_) { | 432 if (proc_num_frames_ != output_num_frames_) { |
| 432 for (size_t i = 0; i < num_channels_; ++i) { | 433 for (size_t i = 0; i < num_channels_; ++i) { |
| 433 output_resamplers_[i]->Resample( | 434 output_resamplers_[i]->Resample( |
| 434 data_->fbuf()->channels()[i], proc_num_frames_, | 435 data_->fbuf()->channels()[i], proc_num_frames_, |
| 435 output_buffer_->fbuf()->channels()[i], output_num_frames_); | 436 output_buffer_->fbuf()->channels()[i], output_num_frames_); |
| 436 } | 437 } |
| 437 data_ptr = output_buffer_.get(); | 438 data_ptr = output_buffer_.get(); |
| 438 } | 439 } |
| 439 | 440 |
| 441 // TODO(yujo): handle muted frames more efficiently. |
| 440 if (frame->num_channels_ == num_channels_) { | 442 if (frame->num_channels_ == num_channels_) { |
| 441 Interleave(data_ptr->ibuf()->channels(), output_num_frames_, num_channels_, | 443 Interleave(data_ptr->ibuf()->channels(), output_num_frames_, num_channels_, |
| 442 frame->data_); | 444 frame->mutable_data()); |
| 443 } else { | 445 } else { |
| 444 UpmixMonoToInterleaved(data_ptr->ibuf()->channels()[0], output_num_frames_, | 446 UpmixMonoToInterleaved(data_ptr->ibuf()->channels()[0], output_num_frames_, |
| 445 frame->num_channels_, frame->data_); | 447 frame->num_channels_, frame->mutable_data()); |
| 446 } | 448 } |
| 447 } | 449 } |
| 448 | 450 |
| 449 void AudioBuffer::CopyLowPassToReference() { | 451 void AudioBuffer::CopyLowPassToReference() { |
| 450 reference_copied_ = true; | 452 reference_copied_ = true; |
| 451 if (!low_pass_reference_channels_.get() || | 453 if (!low_pass_reference_channels_.get() || |
| 452 low_pass_reference_channels_->num_channels() != num_channels_) { | 454 low_pass_reference_channels_->num_channels() != num_channels_) { |
| 453 low_pass_reference_channels_.reset( | 455 low_pass_reference_channels_.reset( |
| 454 new ChannelBuffer<int16_t>(num_split_frames_, | 456 new ChannelBuffer<int16_t>(num_split_frames_, |
| 455 num_proc_channels_)); | 457 num_proc_channels_)); |
| 456 } | 458 } |
| 457 for (size_t i = 0; i < num_proc_channels_; i++) { | 459 for (size_t i = 0; i < num_proc_channels_; i++) { |
| 458 memcpy(low_pass_reference_channels_->channels()[i], | 460 memcpy(low_pass_reference_channels_->channels()[i], |
| 459 split_bands_const(i)[kBand0To8kHz], | 461 split_bands_const(i)[kBand0To8kHz], |
| 460 low_pass_reference_channels_->num_frames_per_band() * | 462 low_pass_reference_channels_->num_frames_per_band() * |
| 461 sizeof(split_bands_const(i)[kBand0To8kHz][0])); | 463 sizeof(split_bands_const(i)[kBand0To8kHz][0])); |
| 462 } | 464 } |
| 463 } | 465 } |
| 464 | 466 |
| 465 void AudioBuffer::SplitIntoFrequencyBands() { | 467 void AudioBuffer::SplitIntoFrequencyBands() { |
| 466 splitting_filter_->Analysis(data_.get(), split_data_.get()); | 468 splitting_filter_->Analysis(data_.get(), split_data_.get()); |
| 467 } | 469 } |
| 468 | 470 |
| 469 void AudioBuffer::MergeFrequencyBands() { | 471 void AudioBuffer::MergeFrequencyBands() { |
| 470 splitting_filter_->Synthesis(split_data_.get(), data_.get()); | 472 splitting_filter_->Synthesis(split_data_.get(), data_.get()); |
| 471 } | 473 } |
| 472 | 474 |
| 473 } // namespace webrtc | 475 } // namespace webrtc |
| OLD | NEW |