Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(24)

Unified Diff: webrtc/audio/utility/audio_frame_operations_unittest.cc

Issue 2750783004: Add mute state field to AudioFrame. (Closed)
Patch Set: don't return from Add() too early Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: webrtc/audio/utility/audio_frame_operations_unittest.cc
diff --git a/webrtc/audio/utility/audio_frame_operations_unittest.cc b/webrtc/audio/utility/audio_frame_operations_unittest.cc
index 096ea38d9d52855d94802bf7d3186e8912da6ed2..889a27c6b9567c3579e03f96620f2eca731d455a 100644
--- a/webrtc/audio/utility/audio_frame_operations_unittest.cc
+++ b/webrtc/audio/utility/audio_frame_operations_unittest.cc
@@ -32,24 +32,27 @@ void SetFrameData(int16_t ch1,
int16_t ch3,
int16_t ch4,
AudioFrame* frame) {
+ int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; i < frame->samples_per_channel_ * 4; i += 4) {
- frame->data_[i] = ch1;
- frame->data_[i + 1] = ch2;
- frame->data_[i + 2] = ch3;
- frame->data_[i + 3] = ch4;
+ frame_data[i] = ch1;
+ frame_data[i + 1] = ch2;
+ frame_data[i + 2] = ch3;
+ frame_data[i + 3] = ch4;
}
}
void SetFrameData(int16_t left, int16_t right, AudioFrame* frame) {
+ int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; i < frame->samples_per_channel_ * 2; i += 2) {
- frame->data_[i] = left;
- frame->data_[i + 1] = right;
+ frame_data[i] = left;
+ frame_data[i + 1] = right;
}
}
void SetFrameData(int16_t data, AudioFrame* frame) {
+ int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; i < frame->samples_per_channel_; i++) {
- frame->data_[i] = data;
+ frame_data[i] = data;
}
}
@@ -57,9 +60,11 @@ void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) {
EXPECT_EQ(frame1.num_channels_, frame2.num_channels_);
EXPECT_EQ(frame1.samples_per_channel_,
frame2.samples_per_channel_);
+ const int16_t* frame1_data = frame1.data();
+ const int16_t* frame2_data = frame2.data();
for (size_t i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_;
i++) {
- EXPECT_EQ(frame1.data_[i], frame2.data_[i]);
+ EXPECT_EQ(frame1_data[i], frame2_data[i]);
}
}
@@ -81,7 +86,7 @@ void InitFrame(AudioFrame* frame, size_t channels, size_t samples_per_channel,
int16_t GetChannelData(const AudioFrame& frame, size_t channel, size_t index) {
RTC_DCHECK_LT(channel, frame.num_channels_);
RTC_DCHECK_LT(index, frame.samples_per_channel_);
- return frame.data_[index * frame.num_channels_ + channel];
+ return frame.data()[index * frame.num_channels_ + channel];
}
void VerifyFrameDataBounds(const AudioFrame& frame, size_t channel, int16_t max,
@@ -122,8 +127,8 @@ TEST_F(AudioFrameOperationsTest, MonoToStereoBufferSucceeds) {
target_frame.num_channels_ = 2;
target_frame.samples_per_channel_ = frame_.samples_per_channel_;
- AudioFrameOperations::MonoToStereo(frame_.data_, frame_.samples_per_channel_,
- target_frame.data_);
+ AudioFrameOperations::MonoToStereo(frame_.data(), frame_.samples_per_channel_,
+ target_frame.mutable_data());
AudioFrame stereo_frame;
stereo_frame.samples_per_channel_ = 320;
@@ -155,8 +160,8 @@ TEST_F(AudioFrameOperationsTest, StereoToMonoBufferSucceeds) {
target_frame.num_channels_ = 1;
target_frame.samples_per_channel_ = frame_.samples_per_channel_;
- AudioFrameOperations::StereoToMono(frame_.data_, frame_.samples_per_channel_,
- target_frame.data_);
+ AudioFrameOperations::StereoToMono(frame_.data(), frame_.samples_per_channel_,
+ target_frame.mutable_data());
AudioFrame mono_frame;
mono_frame.samples_per_channel_ = 320;
@@ -204,8 +209,8 @@ TEST_F(AudioFrameOperationsTest, QuadToMonoBufferSucceeds) {
target_frame.num_channels_ = 1;
target_frame.samples_per_channel_ = frame_.samples_per_channel_;
- AudioFrameOperations::QuadToMono(frame_.data_, frame_.samples_per_channel_,
- target_frame.data_);
+ AudioFrameOperations::QuadToMono(frame_.data(), frame_.samples_per_channel_,
+ target_frame.mutable_data());
AudioFrame mono_frame;
mono_frame.samples_per_channel_ = 320;
mono_frame.num_channels_ = 1;
@@ -252,8 +257,8 @@ TEST_F(AudioFrameOperationsTest, QuadToStereoBufferSucceeds) {
target_frame.num_channels_ = 2;
target_frame.samples_per_channel_ = frame_.samples_per_channel_;
- AudioFrameOperations::QuadToStereo(frame_.data_, frame_.samples_per_channel_,
- target_frame.data_);
+ AudioFrameOperations::QuadToStereo(frame_.data(), frame_.samples_per_channel_,
+ target_frame.mutable_data());
AudioFrame stereo_frame;
stereo_frame.samples_per_channel_ = 320;
stereo_frame.num_channels_ = 2;
@@ -510,7 +515,7 @@ TEST_F(AudioFrameOperationsTest, AddingTwoFramesProducesTheirSum) {
SetFrameData(1000, &frame_to_add_to);
AudioFrameOperations::Add(frame_, &frame_to_add_to);
- SetFrameData(frame_.data_[0] + 1000, &frame_);
+ SetFrameData(frame_.data()[0] + 1000, &frame_);
VerifyFramesAreEqual(frame_, frame_to_add_to);
}
hlundin-webrtc 2017/03/16 14:47:48 It would be great if you could add some tests for
yujo 2017/03/16 23:37:21 Yep, planning to. Let me know whether you want to
yujo 2017/03/17 23:55:54 Done.
} // namespace

Powered by Google App Engine
This is Rietveld 408576698