Index: webrtc/modules/video_processing/test/video_processing_unittest.cc |
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.cc b/webrtc/modules/video_processing/test/video_processing_unittest.cc |
index 0d18d0a4c891a47381e359994818dd3bb9330efb..b935e38c3c3d26ec14fcb9ee6ab9440a5f806910 100644 |
--- a/webrtc/modules/video_processing/test/video_processing_unittest.cc |
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.cc |
@@ -17,6 +17,7 @@ |
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
#include "webrtc/system_wrappers/include/tick_util.h" |
+#include "webrtc/test/frame_utils.h" |
#include "webrtc/test/testsupport/fileutils.h" |
namespace webrtc { |
@@ -51,8 +52,6 @@ static void TestSize(const VideoFrame& source_frame, |
int target_height, |
double expected_psnr, |
VideoProcessing* vpm); |
-static bool CompareFrames(const webrtc::VideoFrame& frame1, |
- const webrtc::VideoFrame& frame2); |
static void WriteProcessedFrameForVisualInspection(const VideoFrame& source, |
const VideoFrame& processed); |
@@ -73,9 +72,12 @@ void VideoProcessingTest::SetUp() { |
video_frame_.CreateEmptyFrame(width_, height_, width_, |
half_width_, half_width_); |
// Clear video frame so DrMemory/Valgrind will allow reads of the buffer. |
- memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane)); |
- memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane)); |
- memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane)); |
+ memset(video_frame_.video_frame_buffer()->MutableDataY(), 0, |
+ video_frame_.allocated_size(kYPlane)); |
+ memset(video_frame_.video_frame_buffer()->MutableDataU(), 0, |
+ video_frame_.allocated_size(kUPlane)); |
+ memset(video_frame_.video_frame_buffer()->MutableDataV(), 0, |
+ video_frame_.allocated_size(kVPlane)); |
const std::string video_file = |
webrtc::test::ResourcePath("foreman_cif", "yuv"); |
source_file_ = fopen(video_file.c_str(), "rb"); |
@@ -150,7 +152,7 @@ TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) { |
vp_->GetFrameStats(video_frame2, &stats); |
EXPECT_GT(stats.num_pixels, 0u); |
ASSERT_EQ(0, vp_->Deflickering(&video_frame2, &stats)); |
- EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); |
+ EXPECT_TRUE(webrtc::test::FramesEqual(video_frame_, video_frame2)); |
ASSERT_EQ(frame_length_, |
fread(video_buffer.get(), 1, frame_length_, source_file_)); |
@@ -162,7 +164,7 @@ TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) { |
ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats)); |
ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats)); |
- EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); |
+ EXPECT_TRUE(webrtc::test::FramesEqual(video_frame_, video_frame2)); |
} |
#if defined(WEBRTC_IOS) |
@@ -378,22 +380,6 @@ void TestSize(const VideoFrame& source_frame, |
target_height); |
} |
-bool CompareFrames(const webrtc::VideoFrame& frame1, |
- const webrtc::VideoFrame& frame2) { |
- for (int plane = 0; plane < webrtc::kNumOfPlanes; plane++) { |
- webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane); |
- int allocated_size1 = frame1.allocated_size(plane_type); |
- int allocated_size2 = frame2.allocated_size(plane_type); |
- if (allocated_size1 != allocated_size2) |
- return false; |
- const uint8_t* plane_buffer1 = frame1.buffer(plane_type); |
- const uint8_t* plane_buffer2 = frame2.buffer(plane_type); |
- if (memcmp(plane_buffer1, plane_buffer2, allocated_size1)) |
- return false; |
- } |
- return true; |
-} |
- |
void WriteProcessedFrameForVisualInspection(const VideoFrame& source, |
const VideoFrame& processed) { |
// Skip if writing to files is not enabled. |