| Index: webrtc/modules/video_processing/test/video_processing_unittest.cc
|
| diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.cc b/webrtc/modules/video_processing/test/video_processing_unittest.cc
|
| index 8c1154acad379fe92746110866e92c6e71e1f00c..9e61b51884f75a41532eaf242319314ee2481b38 100644
|
| --- a/webrtc/modules/video_processing/test/video_processing_unittest.cc
|
| +++ b/webrtc/modules/video_processing/test/video_processing_unittest.cc
|
| @@ -15,7 +15,6 @@
|
| #include <memory>
|
| #include <string>
|
|
|
| -#include "webrtc/base/keep_ref_until_done.h"
|
| #include "webrtc/base/timeutils.h"
|
| #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
| #include "webrtc/test/testsupport/fileutils.h"
|
| @@ -34,25 +33,24 @@
|
| int target_height,
|
| VideoProcessing* vpm,
|
| const VideoFrame* out_frame);
|
| -rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
|
| - const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
|
| - int source_width,
|
| - int source_height,
|
| - int offset_x,
|
| - int offset_y,
|
| - int cropped_width,
|
| - int cropped_height);
|
| +static void CropFrame(const uint8_t* source_data,
|
| + int source_width,
|
| + int source_height,
|
| + int offset_x,
|
| + int offset_y,
|
| + int cropped_width,
|
| + int cropped_height,
|
| + VideoFrame* cropped_frame);
|
| // The |source_data| is cropped and scaled to |target_width| x |target_height|,
|
| // and then scaled back to the expected cropped size. |expected_psnr| is used to
|
| // verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
|
| // verified under the same conditions.
|
| -static void TestSize(
|
| - const VideoFrame& source_frame,
|
| - const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
|
| - int target_width,
|
| - int target_height,
|
| - double expected_psnr,
|
| - VideoProcessing* vpm);
|
| +static void TestSize(const VideoFrame& source_frame,
|
| + const VideoFrame& cropped_source_frame,
|
| + int target_width,
|
| + int target_height,
|
| + double expected_psnr,
|
| + VideoProcessing* vpm);
|
| static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
|
| const VideoFrame& processed);
|
|
|
| @@ -70,6 +68,15 @@
|
| vp_ = VideoProcessing::Create();
|
| ASSERT_TRUE(vp_ != NULL);
|
|
|
| + video_frame_.CreateEmptyFrame(width_, height_, width_,
|
| + half_width_, half_width_);
|
| + // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
|
| + memset(video_frame_.video_frame_buffer()->MutableDataY(), 0,
|
| + video_frame_.allocated_size(kYPlane));
|
| + memset(video_frame_.video_frame_buffer()->MutableDataU(), 0,
|
| + video_frame_.allocated_size(kUPlane));
|
| + memset(video_frame_.video_frame_buffer()->MutableDataV(), 0,
|
| + video_frame_.allocated_size(kVPlane));
|
| const std::string video_file =
|
| webrtc::test::ResourcePath("foreman_cif", "yuv");
|
| source_file_ = fopen(video_file.c_str(), "rb");
|
| @@ -102,18 +109,11 @@
|
| VideoFrame* out_frame = NULL;
|
| // Set rescaling => output frame != NULL.
|
| vp_->SetInputFrameResampleMode(kFastRescaling);
|
| -
|
| - rtc::scoped_refptr<webrtc::I420Buffer> buffer =
|
| - I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
|
| -
|
| - // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
|
| - buffer->InitializeData();
|
| - VideoFrame video_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
|
| -
|
| - PreprocessFrameAndVerify(video_frame, resolution, resolution, vp_, out_frame);
|
| + PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_,
|
| + out_frame);
|
| // No rescaling=> output frame = NULL.
|
| vp_->SetInputFrameResampleMode(kNoRescaling);
|
| - EXPECT_TRUE(vp_->PreprocessFrame(video_frame) != nullptr);
|
| + EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr);
|
| }
|
|
|
| #if defined(WEBRTC_IOS)
|
| @@ -133,15 +133,15 @@
|
| vp_->EnableTemporalDecimation(false);
|
|
|
| // Reading test frame
|
| - rtc::scoped_refptr<webrtc::I420Buffer> buffer =
|
| - I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
|
| -
|
| - ASSERT_EQ(static_cast<size_t>(size_y_),
|
| - fread(buffer->MutableDataY(), 1, size_y_, source_file_));
|
| - ASSERT_EQ(static_cast<size_t>(size_uv_),
|
| - fread(buffer->MutableDataU(), 1, size_uv_, source_file_));
|
| - ASSERT_EQ(static_cast<size_t>(size_uv_),
|
| - fread(buffer->MutableDataV(), 1, size_uv_, source_file_));
|
| + std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
|
| + ASSERT_EQ(frame_length_,
|
| + fread(video_buffer.get(), 1, frame_length_, source_file_));
|
| + // Using ConvertToI420 to add stride to the image.
|
| + EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
|
| + 0, kVideoRotation_0, &video_frame_));
|
| + // Cropped source frame that will contain the expected visible region.
|
| + VideoFrame cropped_source_frame;
|
| + cropped_source_frame.CopyFrame(video_frame_);
|
|
|
| for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
|
| // Initiate test timer.
|
| @@ -149,37 +149,48 @@
|
|
|
| // Init the sourceFrame with a timestamp.
|
| int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
|
| - VideoFrame video_frame(buffer, time_start_ms * 90, time_start_ms,
|
| - webrtc::kVideoRotation_0);
|
| + video_frame_.set_render_time_ms(time_start_ms);
|
| + video_frame_.set_timestamp(time_start_ms * 90);
|
|
|
| // Test scaling to different sizes: source is of |width|/|height| = 352/288.
|
| // Pure scaling:
|
| - TestSize(video_frame, buffer, width_ / 4, height_ / 4, 25.2, vp_);
|
| - TestSize(video_frame, buffer, width_ / 2, height_ / 2, 28.1, vp_);
|
| + TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vp_);
|
| + TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vp_);
|
| // No resampling:
|
| - TestSize(video_frame, buffer, width_, height_, -1, vp_);
|
| - TestSize(video_frame, buffer, 2 * width_, 2 * height_, 32.2, vp_);
|
| + TestSize(video_frame_, video_frame_, width_, height_, -1, vp_);
|
| + TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vp_);
|
|
|
| // Scaling and cropping. The cropped source frame is the largest center
|
| // aligned region that can be used from the source while preserving aspect
|
| // ratio.
|
| - TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 56, 352, 176),
|
| - 100, 50, 24.0, vp_);
|
| - TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 30, 352, 225),
|
| - 400, 256, 31.3, vp_);
|
| - TestSize(video_frame, CropBuffer(buffer, width_, height_, 68, 0, 216, 288),
|
| - 480, 640, 32.15, vp_);
|
| - TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 12, 352, 264),
|
| - 960, 720, 32.2, vp_);
|
| - TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 44, 352, 198),
|
| - 1280, 720, 32.15, vp_);
|
| + CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176,
|
| + &cropped_source_frame);
|
| + TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vp_);
|
| +
|
| + CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225,
|
| + &cropped_source_frame);
|
| + TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vp_);
|
| +
|
| + CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288,
|
| + &cropped_source_frame);
|
| + TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vp_);
|
| +
|
| + CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
|
| + &cropped_source_frame);
|
| + TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vp_);
|
| +
|
| + CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
|
| + &cropped_source_frame);
|
| + TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vp_);
|
|
|
| // Upsampling to odd size.
|
| - TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 26, 352, 233),
|
| - 501, 333, 32.05, vp_);
|
| + CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233,
|
| + &cropped_source_frame);
|
| + TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vp_);
|
| // Downsample to odd size.
|
| - TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 34, 352, 219),
|
| - 281, 175, 29.3, vp_);
|
| + CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219,
|
| + &cropped_source_frame);
|
| + TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
|
|
|
| // Stop timer.
|
| const int64_t runtime =
|
| @@ -218,32 +229,24 @@
|
| EXPECT_EQ(target_height, (out_frame)->height());
|
| }
|
|
|
| -rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
|
| - const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
|
| - int source_width,
|
| - int source_height,
|
| - int offset_x,
|
| - int offset_y,
|
| - int cropped_width,
|
| - int cropped_height) {
|
| - // Force even.
|
| - offset_x &= 1;
|
| - offset_y &= 1;
|
| -
|
| - size_t y_start = offset_x + offset_y * source_buffer->StrideY();
|
| - size_t u_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
|
| - size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
|
| -
|
| - return rtc::scoped_refptr<VideoFrameBuffer>(
|
| - new rtc::RefCountedObject<WrappedI420Buffer>(
|
| - cropped_width, cropped_height, source_buffer->DataY() + y_start,
|
| - source_buffer->StrideY(), source_buffer->DataU() + u_start,
|
| - source_buffer->StrideU(), source_buffer->DataV() + v_start,
|
| - source_buffer->StrideV(), rtc::KeepRefUntilDone(source_buffer)));
|
| +void CropFrame(const uint8_t* source_data,
|
| + int source_width,
|
| + int source_height,
|
| + int offset_x,
|
| + int offset_y,
|
| + int cropped_width,
|
| + int cropped_height,
|
| + VideoFrame* cropped_frame) {
|
| + cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
|
| + (cropped_width + 1) / 2,
|
| + (cropped_width + 1) / 2);
|
| + EXPECT_EQ(0,
|
| + ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
|
| + source_height, 0, kVideoRotation_0, cropped_frame));
|
| }
|
|
|
| void TestSize(const VideoFrame& source_frame,
|
| - const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
|
| + const VideoFrame& cropped_source_frame,
|
| int target_width,
|
| int target_height,
|
| double expected_psnr,
|
| @@ -260,14 +263,12 @@
|
| // Scale |resampled_source_frame| back to the source scale.
|
| VideoFrame resampled_source_frame;
|
| resampled_source_frame.CopyFrame(*out_frame);
|
| - PreprocessFrameAndVerify(resampled_source_frame,
|
| - cropped_source_buffer->width(),
|
| - cropped_source_buffer->height(), vpm, out_frame);
|
| + PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
|
| + cropped_source_frame.height(), vpm, out_frame);
|
| WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
|
|
|
| // Compute PSNR against the cropped source frame and check expectation.
|
| - double psnr =
|
| - I420PSNR(*cropped_source_buffer, *out_frame->video_frame_buffer());
|
| + double psnr = I420PSNR(&cropped_source_frame, out_frame);
|
| EXPECT_GT(psnr, expected_psnr);
|
| printf(
|
| "PSNR: %f. PSNR is between source of size %d %d, and a modified "
|
|
|