| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 44 // The |source_data| is cropped and scaled to |target_width| x |target_height|, | 44 // The |source_data| is cropped and scaled to |target_width| x |target_height|, |
| 45 // and then scaled back to the expected cropped size. |expected_psnr| is used to | 45 // and then scaled back to the expected cropped size. |expected_psnr| is used to |
| 46 // verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR | 46 // verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR |
| 47 // verified under the same conditions. | 47 // verified under the same conditions. |
| 48 static void TestSize(const VideoFrame& source_frame, | 48 static void TestSize(const VideoFrame& source_frame, |
| 49 const VideoFrame& cropped_source_frame, | 49 const VideoFrame& cropped_source_frame, |
| 50 int target_width, | 50 int target_width, |
| 51 int target_height, | 51 int target_height, |
| 52 double expected_psnr, | 52 double expected_psnr, |
| 53 VideoProcessing* vpm); | 53 VideoProcessing* vpm); |
| 54 static bool CompareFrames(const webrtc::VideoFrame& frame1, | |
| 55 const webrtc::VideoFrame& frame2); | |
| 56 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source, | 54 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source, |
| 57 const VideoFrame& processed); | 55 const VideoFrame& processed); |
| 58 | 56 |
| 59 VideoProcessingTest::VideoProcessingTest() | 57 VideoProcessingTest::VideoProcessingTest() |
| 60 : vp_(NULL), | 58 : vp_(NULL), |
| 61 source_file_(NULL), | 59 source_file_(NULL), |
| 62 width_(352), | 60 width_(352), |
| 63 half_width_((width_ + 1) / 2), | 61 half_width_((width_ + 1) / 2), |
| 64 height_(288), | 62 height_(288), |
| 65 size_y_(width_ * height_), | 63 size_y_(width_ * height_), |
| (...skipping 20 matching lines...) Expand all Loading... |
| 86 void VideoProcessingTest::TearDown() { | 84 void VideoProcessingTest::TearDown() { |
| 87 if (source_file_ != NULL) { | 85 if (source_file_ != NULL) { |
| 88 ASSERT_EQ(0, fclose(source_file_)); | 86 ASSERT_EQ(0, fclose(source_file_)); |
| 89 } | 87 } |
| 90 source_file_ = NULL; | 88 source_file_ = NULL; |
| 91 delete vp_; | 89 delete vp_; |
| 92 vp_ = NULL; | 90 vp_ = NULL; |
| 93 } | 91 } |
| 94 | 92 |
| 95 #if defined(WEBRTC_IOS) | 93 #if defined(WEBRTC_IOS) |
| 96 TEST_F(VideoProcessingTest, DISABLED_HandleNullBuffer) { | |
| 97 #else | |
| 98 TEST_F(VideoProcessingTest, HandleNullBuffer) { | |
| 99 #endif | |
| 100 // TODO(mikhal/stefan): Do we need this one? | |
| 101 VideoProcessing::FrameStats stats; | |
| 102 // Video frame with unallocated buffer. | |
| 103 VideoFrame videoFrame; | |
| 104 | |
| 105 vp_->GetFrameStats(videoFrame, &stats); | |
| 106 EXPECT_EQ(stats.num_pixels, 0u); | |
| 107 | |
| 108 EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats)); | |
| 109 | |
| 110 EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats)); | |
| 111 } | |
| 112 | |
| 113 #if defined(WEBRTC_IOS) | |
| 114 TEST_F(VideoProcessingTest, DISABLED_HandleBadStats) { | |
| 115 #else | |
| 116 TEST_F(VideoProcessingTest, HandleBadStats) { | |
| 117 #endif | |
| 118 VideoProcessing::FrameStats stats; | |
| 119 vp_->ClearFrameStats(&stats); | |
| 120 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | |
| 121 ASSERT_EQ(frame_length_, | |
| 122 fread(video_buffer.get(), 1, frame_length_, source_file_)); | |
| 123 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | |
| 124 0, kVideoRotation_0, &video_frame_)); | |
| 125 | |
| 126 EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats)); | |
| 127 | |
| 128 EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats)); | |
| 129 } | |
| 130 | |
| 131 #if defined(WEBRTC_IOS) | |
| 132 TEST_F(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) { | |
| 133 #else | |
| 134 TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) { | |
| 135 #endif | |
| 136 VideoFrame video_frame2; | |
| 137 VideoProcessing::FrameStats stats; | |
| 138 // Only testing non-static functions here. | |
| 139 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | |
| 140 ASSERT_EQ(frame_length_, | |
| 141 fread(video_buffer.get(), 1, frame_length_, source_file_)); | |
| 142 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | |
| 143 0, kVideoRotation_0, &video_frame_)); | |
| 144 vp_->GetFrameStats(video_frame_, &stats); | |
| 145 EXPECT_GT(stats.num_pixels, 0u); | |
| 146 video_frame2.CopyFrame(video_frame_); | |
| 147 ASSERT_EQ(0, vp_->Deflickering(&video_frame_, &stats)); | |
| 148 | |
| 149 // Retrieve frame stats again in case Deflickering() has zeroed them. | |
| 150 vp_->GetFrameStats(video_frame2, &stats); | |
| 151 EXPECT_GT(stats.num_pixels, 0u); | |
| 152 ASSERT_EQ(0, vp_->Deflickering(&video_frame2, &stats)); | |
| 153 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); | |
| 154 | |
| 155 ASSERT_EQ(frame_length_, | |
| 156 fread(video_buffer.get(), 1, frame_length_, source_file_)); | |
| 157 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | |
| 158 0, kVideoRotation_0, &video_frame_)); | |
| 159 vp_->GetFrameStats(video_frame_, &stats); | |
| 160 EXPECT_GT(stats.num_pixels, 0u); | |
| 161 video_frame2.CopyFrame(video_frame_); | |
| 162 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats)); | |
| 163 | |
| 164 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats)); | |
| 165 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); | |
| 166 } | |
| 167 | |
| 168 #if defined(WEBRTC_IOS) | |
| 169 TEST_F(VideoProcessingTest, DISABLED_FrameStats) { | |
| 170 #else | |
| 171 TEST_F(VideoProcessingTest, FrameStats) { | |
| 172 #endif | |
| 173 VideoProcessing::FrameStats stats; | |
| 174 vp_->ClearFrameStats(&stats); | |
| 175 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | |
| 176 ASSERT_EQ(frame_length_, | |
| 177 fread(video_buffer.get(), 1, frame_length_, source_file_)); | |
| 178 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | |
| 179 0, kVideoRotation_0, &video_frame_)); | |
| 180 | |
| 181 EXPECT_FALSE(vp_->ValidFrameStats(stats)); | |
| 182 vp_->GetFrameStats(video_frame_, &stats); | |
| 183 EXPECT_GT(stats.num_pixels, 0u); | |
| 184 EXPECT_TRUE(vp_->ValidFrameStats(stats)); | |
| 185 | |
| 186 printf("\nFrameStats\n"); | |
| 187 printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n", | |
| 188 static_cast<unsigned int>(stats.mean), | |
| 189 static_cast<unsigned int>(stats.num_pixels), | |
| 190 static_cast<unsigned int>(stats.sub_sampling_factor), | |
| 191 static_cast<unsigned int>(stats.sum)); | |
| 192 | |
| 193 vp_->ClearFrameStats(&stats); | |
| 194 EXPECT_FALSE(vp_->ValidFrameStats(stats)); | |
| 195 } | |
| 196 | |
| 197 #if defined(WEBRTC_IOS) | |
| 198 TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) { | 94 TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) { |
| 199 #else | 95 #else |
| 200 TEST_F(VideoProcessingTest, PreprocessorLogic) { | 96 TEST_F(VideoProcessingTest, PreprocessorLogic) { |
| 201 #endif | 97 #endif |
| 202 // Disable temporal sampling (frame dropping). | 98 // Disable temporal sampling (frame dropping). |
| 203 vp_->EnableTemporalDecimation(false); | 99 vp_->EnableTemporalDecimation(false); |
| 204 int resolution = 100; | 100 int resolution = 100; |
| 205 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); | 101 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); |
| 206 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); | 102 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); |
| 207 // Disable spatial sampling. | 103 // Disable spatial sampling. |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 371 // Compute PSNR against the cropped source frame and check expectation. | 267 // Compute PSNR against the cropped source frame and check expectation. |
| 372 double psnr = I420PSNR(&cropped_source_frame, out_frame); | 268 double psnr = I420PSNR(&cropped_source_frame, out_frame); |
| 373 EXPECT_GT(psnr, expected_psnr); | 269 EXPECT_GT(psnr, expected_psnr); |
| 374 printf( | 270 printf( |
| 375 "PSNR: %f. PSNR is between source of size %d %d, and a modified " | 271 "PSNR: %f. PSNR is between source of size %d %d, and a modified " |
| 376 "source which is scaled down/up to: %d %d, and back to source size \n", | 272 "source which is scaled down/up to: %d %d, and back to source size \n", |
| 377 psnr, source_frame.width(), source_frame.height(), target_width, | 273 psnr, source_frame.width(), source_frame.height(), target_width, |
| 378 target_height); | 274 target_height); |
| 379 } | 275 } |
| 380 | 276 |
| 381 bool CompareFrames(const webrtc::VideoFrame& frame1, | |
| 382 const webrtc::VideoFrame& frame2) { | |
| 383 for (int plane = 0; plane < webrtc::kNumOfPlanes; plane++) { | |
| 384 webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane); | |
| 385 int allocated_size1 = frame1.allocated_size(plane_type); | |
| 386 int allocated_size2 = frame2.allocated_size(plane_type); | |
| 387 if (allocated_size1 != allocated_size2) | |
| 388 return false; | |
| 389 const uint8_t* plane_buffer1 = frame1.buffer(plane_type); | |
| 390 const uint8_t* plane_buffer2 = frame2.buffer(plane_type); | |
| 391 if (memcmp(plane_buffer1, plane_buffer2, allocated_size1)) | |
| 392 return false; | |
| 393 } | |
| 394 return true; | |
| 395 } | |
| 396 | |
| 397 void WriteProcessedFrameForVisualInspection(const VideoFrame& source, | 277 void WriteProcessedFrameForVisualInspection(const VideoFrame& source, |
| 398 const VideoFrame& processed) { | 278 const VideoFrame& processed) { |
| 399 // Skip if writing to files is not enabled. | 279 // Skip if writing to files is not enabled. |
| 400 if (!FLAGS_gen_files) | 280 if (!FLAGS_gen_files) |
| 401 return; | 281 return; |
| 402 // Write the processed frame to file for visual inspection. | 282 // Write the processed frame to file for visual inspection. |
| 403 std::ostringstream filename; | 283 std::ostringstream filename; |
| 404 filename << webrtc::test::OutputPath() << "Resampler_from_" << source.width() | 284 filename << webrtc::test::OutputPath() << "Resampler_from_" << source.width() |
| 405 << "x" << source.height() << "_to_" << processed.width() << "x" | 285 << "x" << source.height() << "_to_" << processed.width() << "x" |
| 406 << processed.height() << "_30Hz_P420.yuv"; | 286 << processed.height() << "_30Hz_P420.yuv"; |
| 407 std::cout << "Watch " << filename.str() << " and verify that it is okay." | 287 std::cout << "Watch " << filename.str() << " and verify that it is okay." |
| 408 << std::endl; | 288 << std::endl; |
| 409 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); | 289 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); |
| 410 if (PrintVideoFrame(processed, stand_alone_file) < 0) | 290 if (PrintVideoFrame(processed, stand_alone_file) < 0) |
| 411 std::cerr << "Failed to write: " << filename.str() << std::endl; | 291 std::cerr << "Failed to write: " << filename.str() << std::endl; |
| 412 if (stand_alone_file) | 292 if (stand_alone_file) |
| 413 fclose(stand_alone_file); | 293 fclose(stand_alone_file); |
| 414 } | 294 } |
| 415 | 295 |
| 416 } // namespace webrtc | 296 } // namespace webrtc |
| OLD | NEW |