OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
52 double expected_psnr, | 52 double expected_psnr, |
53 VideoProcessing* vpm); | 53 VideoProcessing* vpm); |
54 static bool CompareFrames(const webrtc::VideoFrame& frame1, | 54 static bool CompareFrames(const webrtc::VideoFrame& frame1, |
55 const webrtc::VideoFrame& frame2); | 55 const webrtc::VideoFrame& frame2); |
56 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source, | 56 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source, |
57 const VideoFrame& processed); | 57 const VideoFrame& processed); |
58 | 58 |
59 VideoProcessingTest::VideoProcessingTest() | 59 VideoProcessingTest::VideoProcessingTest() |
60 : vp_(NULL), | 60 : vp_(NULL), |
61 source_file_(NULL), | 61 source_file_(NULL), |
62 vtt_(GetParam()), | 62 width_(352), |
63 width_(vtt_.width), | |
64 half_width_((width_ + 1) / 2), | 63 half_width_((width_ + 1) / 2), |
65 height_(vtt_.height), | 64 height_(288), |
66 size_y_(width_ * height_), | 65 size_y_(width_ * height_), |
67 size_uv_(half_width_ * ((height_ + 1) / 2)), | 66 size_uv_(half_width_ * ((height_ + 1) / 2)), |
68 frame_length_(CalcBufferSize(kI420, width_, height_)) {} | 67 frame_length_(CalcBufferSize(kI420, width_, height_)) {} |
69 | 68 |
70 void VideoProcessingTest::SetUp() { | 69 void VideoProcessingTest::SetUp() { |
71 vp_ = VideoProcessing::Create(); | 70 vp_ = VideoProcessing::Create(); |
72 ASSERT_TRUE(vp_ != NULL); | 71 ASSERT_TRUE(vp_ != NULL); |
| 72 |
73 video_frame_.CreateEmptyFrame(width_, height_, width_, | 73 video_frame_.CreateEmptyFrame(width_, height_, width_, |
74 half_width_, half_width_); | 74 half_width_, half_width_); |
75 // Clear video frame so DrMemory/Valgrind will allow reads of the buffer. | 75 // Clear video frame so DrMemory/Valgrind will allow reads of the buffer. |
76 memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane)); | 76 memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane)); |
77 memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane)); | 77 memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane)); |
78 memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane)); | 78 memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane)); |
79 source_file_ = fopen(vtt_.file_name.c_str(), "rb"); | 79 const std::string video_file = |
| 80 webrtc::test::ResourcePath("foreman_cif", "yuv"); |
| 81 source_file_ = fopen(video_file.c_str(), "rb"); |
80 ASSERT_TRUE(source_file_ != NULL) | 82 ASSERT_TRUE(source_file_ != NULL) |
81 << "Cannot read source file: " + vtt_.file_name + "\n"; | 83 << "Cannot read source file: " + video_file + "\n"; |
82 } | 84 } |
83 | 85 |
84 void VideoProcessingTest::TearDown() { | 86 void VideoProcessingTest::TearDown() { |
85 if (source_file_ != NULL) { | 87 if (source_file_ != NULL) { |
86 ASSERT_EQ(0, fclose(source_file_)); | 88 ASSERT_EQ(0, fclose(source_file_)); |
87 } | 89 } |
88 source_file_ = NULL; | 90 source_file_ = NULL; |
89 delete vp_; | 91 delete vp_; |
90 vp_ = NULL; | 92 vp_ = NULL; |
91 } | 93 } |
92 | 94 |
93 #if defined(WEBRTC_IOS) | 95 #if defined(WEBRTC_IOS) |
94 TEST_P(VideoProcessingTest, DISABLED_HandleNullBuffer) { | 96 TEST_F(VideoProcessingTest, DISABLED_HandleNullBuffer) { |
95 #else | 97 #else |
96 TEST_P(VideoProcessingTest, HandleNullBuffer) { | 98 TEST_F(VideoProcessingTest, HandleNullBuffer) { |
97 #endif | 99 #endif |
98 // TODO(mikhal/stefan): Do we need this one? | 100 // TODO(mikhal/stefan): Do we need this one? |
99 VideoProcessing::FrameStats stats; | 101 VideoProcessing::FrameStats stats; |
100 // Video frame with unallocated buffer. | 102 // Video frame with unallocated buffer. |
101 VideoFrame videoFrame; | 103 VideoFrame videoFrame; |
102 | 104 |
103 vp_->GetFrameStats(videoFrame, &stats); | 105 vp_->GetFrameStats(videoFrame, &stats); |
104 EXPECT_EQ(stats.num_pixels, 0u); | 106 EXPECT_EQ(stats.num_pixels, 0u); |
105 | 107 |
106 EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats)); | 108 EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats)); |
107 | 109 |
108 EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats)); | 110 EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats)); |
109 } | 111 } |
110 | 112 |
111 #if defined(WEBRTC_IOS) | 113 #if defined(WEBRTC_IOS) |
112 TEST_P(VideoProcessingTest, DISABLED_HandleBadStats) { | 114 TEST_F(VideoProcessingTest, DISABLED_HandleBadStats) { |
113 #else | 115 #else |
114 TEST_P(VideoProcessingTest, HandleBadStats) { | 116 TEST_F(VideoProcessingTest, HandleBadStats) { |
115 #endif | 117 #endif |
116 VideoProcessing::FrameStats stats; | 118 VideoProcessing::FrameStats stats; |
117 vp_->ClearFrameStats(&stats); | 119 vp_->ClearFrameStats(&stats); |
118 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 120 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
119 ASSERT_EQ(frame_length_, | 121 ASSERT_EQ(frame_length_, |
120 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 122 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
121 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 123 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
122 0, kVideoRotation_0, &video_frame_)); | 124 0, kVideoRotation_0, &video_frame_)); |
123 | 125 |
124 EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats)); | 126 EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats)); |
125 | 127 |
126 EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats)); | 128 EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats)); |
127 } | 129 } |
128 | 130 |
129 #if defined(WEBRTC_IOS) | 131 #if defined(WEBRTC_IOS) |
130 TEST_P(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) { | 132 TEST_F(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) { |
131 #else | 133 #else |
132 TEST_P(VideoProcessingTest, IdenticalResultsAfterReset) { | 134 TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) { |
133 #endif | 135 #endif |
134 VideoFrame video_frame2; | 136 VideoFrame video_frame2; |
135 VideoProcessing::FrameStats stats; | 137 VideoProcessing::FrameStats stats; |
136 // Only testing non-static functions here. | 138 // Only testing non-static functions here. |
137 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 139 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
138 ASSERT_EQ(frame_length_, | 140 ASSERT_EQ(frame_length_, |
139 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 141 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
140 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 142 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
141 0, kVideoRotation_0, &video_frame_)); | 143 0, kVideoRotation_0, &video_frame_)); |
142 vp_->GetFrameStats(video_frame_, &stats); | 144 vp_->GetFrameStats(video_frame_, &stats); |
(...skipping 14 matching lines...) Expand all Loading... |
157 vp_->GetFrameStats(video_frame_, &stats); | 159 vp_->GetFrameStats(video_frame_, &stats); |
158 EXPECT_GT(stats.num_pixels, 0u); | 160 EXPECT_GT(stats.num_pixels, 0u); |
159 video_frame2.CopyFrame(video_frame_); | 161 video_frame2.CopyFrame(video_frame_); |
160 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats)); | 162 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats)); |
161 | 163 |
162 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats)); | 164 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats)); |
163 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); | 165 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); |
164 } | 166 } |
165 | 167 |
166 #if defined(WEBRTC_IOS) | 168 #if defined(WEBRTC_IOS) |
167 TEST_P(VideoProcessingTest, DISABLED_FrameStats) { | 169 TEST_F(VideoProcessingTest, DISABLED_FrameStats) { |
168 #else | 170 #else |
169 TEST_P(VideoProcessingTest, FrameStats) { | 171 TEST_F(VideoProcessingTest, FrameStats) { |
170 #endif | 172 #endif |
171 VideoProcessing::FrameStats stats; | 173 VideoProcessing::FrameStats stats; |
172 vp_->ClearFrameStats(&stats); | 174 vp_->ClearFrameStats(&stats); |
173 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 175 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
174 ASSERT_EQ(frame_length_, | 176 ASSERT_EQ(frame_length_, |
175 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 177 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
176 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 178 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
177 0, kVideoRotation_0, &video_frame_)); | 179 0, kVideoRotation_0, &video_frame_)); |
178 | 180 |
179 EXPECT_FALSE(vp_->ValidFrameStats(stats)); | 181 EXPECT_FALSE(vp_->ValidFrameStats(stats)); |
180 vp_->GetFrameStats(video_frame_, &stats); | 182 vp_->GetFrameStats(video_frame_, &stats); |
181 EXPECT_GT(stats.num_pixels, 0u); | 183 EXPECT_GT(stats.num_pixels, 0u); |
182 EXPECT_TRUE(vp_->ValidFrameStats(stats)); | 184 EXPECT_TRUE(vp_->ValidFrameStats(stats)); |
183 | 185 |
184 printf("\nFrameStats\n"); | 186 printf("\nFrameStats\n"); |
185 printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n", | 187 printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n", |
186 static_cast<unsigned int>(stats.mean), | 188 static_cast<unsigned int>(stats.mean), |
187 static_cast<unsigned int>(stats.num_pixels), | 189 static_cast<unsigned int>(stats.num_pixels), |
188 static_cast<unsigned int>(stats.sub_sampling_factor), | 190 static_cast<unsigned int>(stats.sub_sampling_factor), |
189 static_cast<unsigned int>(stats.sum)); | 191 static_cast<unsigned int>(stats.sum)); |
190 | 192 |
191 vp_->ClearFrameStats(&stats); | 193 vp_->ClearFrameStats(&stats); |
192 EXPECT_FALSE(vp_->ValidFrameStats(stats)); | 194 EXPECT_FALSE(vp_->ValidFrameStats(stats)); |
193 } | 195 } |
194 | 196 |
195 #if defined(WEBRTC_IOS) | 197 #if defined(WEBRTC_IOS) |
196 TEST_P(VideoProcessingTest, DISABLED_PreprocessorLogic) { | 198 TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) { |
197 #else | 199 #else |
198 TEST_P(VideoProcessingTest, PreprocessorLogic) { | 200 TEST_F(VideoProcessingTest, PreprocessorLogic) { |
199 #endif | 201 #endif |
200 // Disable temporal sampling (frame dropping). | 202 // Disable temporal sampling (frame dropping). |
201 vp_->EnableTemporalDecimation(false); | 203 vp_->EnableTemporalDecimation(false); |
202 int resolution = 100; | 204 int resolution = 100; |
203 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); | 205 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); |
204 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); | 206 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); |
205 // Disable spatial sampling. | 207 // Disable spatial sampling. |
206 vp_->SetInputFrameResampleMode(kNoRescaling); | 208 vp_->SetInputFrameResampleMode(kNoRescaling); |
207 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); | 209 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); |
208 VideoFrame* out_frame = NULL; | 210 VideoFrame* out_frame = NULL; |
209 // Set rescaling => output frame != NULL. | 211 // Set rescaling => output frame != NULL. |
210 vp_->SetInputFrameResampleMode(kFastRescaling); | 212 vp_->SetInputFrameResampleMode(kFastRescaling); |
211 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_, | 213 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_, |
212 out_frame); | 214 out_frame); |
213 // No rescaling=> output frame = NULL. | 215 // No rescaling=> output frame = NULL. |
214 vp_->SetInputFrameResampleMode(kNoRescaling); | 216 vp_->SetInputFrameResampleMode(kNoRescaling); |
215 EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr); | 217 EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr); |
216 } | 218 } |
217 | 219 |
218 #if defined(WEBRTC_IOS) | 220 #if defined(WEBRTC_IOS) |
219 TEST_P(VideoProcessingTest, DISABLED_Resampler) { | 221 TEST_F(VideoProcessingTest, DISABLED_Resampler) { |
220 #else | 222 #else |
221 TEST_P(VideoProcessingTest, Resampler) { | 223 TEST_F(VideoProcessingTest, Resampler) { |
222 #endif | 224 #endif |
223 enum { NumRuns = 1 }; | 225 enum { NumRuns = 1 }; |
224 | 226 |
225 int64_t min_runtime = 0; | 227 int64_t min_runtime = 0; |
226 int64_t total_runtime = 0; | 228 int64_t total_runtime = 0; |
227 | 229 |
228 rewind(source_file_); | 230 rewind(source_file_); |
229 ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n"; | 231 ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n"; |
230 | 232 |
231 // CA not needed here | 233 // CA not needed here |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
298 min_runtime = runtime; | 300 min_runtime = runtime; |
299 } | 301 } |
300 total_runtime += runtime; | 302 total_runtime += runtime; |
301 } | 303 } |
302 | 304 |
303 printf("\nAverage run time = %d us / frame\n", | 305 printf("\nAverage run time = %d us / frame\n", |
304 static_cast<int>(total_runtime)); | 306 static_cast<int>(total_runtime)); |
305 printf("Min run time = %d us / frame\n\n", static_cast<int>(min_runtime)); | 307 printf("Min run time = %d us / frame\n\n", static_cast<int>(min_runtime)); |
306 } | 308 } |
307 | 309 |
308 INSTANTIATE_TEST_CASE_P(ForemanCif, | |
309 VideoProcessingTest, | |
310 ::testing::Values(VideoToTest( | |
311 {webrtc::test::ResourcePath("foreman_cif", "yuv"), | |
312 352, 288}))); | |
313 | |
314 void PreprocessFrameAndVerify(const VideoFrame& source, | 310 void PreprocessFrameAndVerify(const VideoFrame& source, |
315 int target_width, | 311 int target_width, |
316 int target_height, | 312 int target_height, |
317 VideoProcessing* vpm, | 313 VideoProcessing* vpm, |
318 const VideoFrame* out_frame) { | 314 const VideoFrame* out_frame) { |
319 ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30)); | 315 ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30)); |
320 out_frame = vpm->PreprocessFrame(source); | 316 out_frame = vpm->PreprocessFrame(source); |
321 EXPECT_TRUE(out_frame != nullptr); | 317 EXPECT_TRUE(out_frame != nullptr); |
322 | 318 |
323 // If no resizing is needed, expect the original frame. | 319 // If no resizing is needed, expect the original frame. |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
411 std::cout << "Watch " << filename.str() << " and verify that it is okay." | 407 std::cout << "Watch " << filename.str() << " and verify that it is okay." |
412 << std::endl; | 408 << std::endl; |
413 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); | 409 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); |
414 if (PrintVideoFrame(processed, stand_alone_file) < 0) | 410 if (PrintVideoFrame(processed, stand_alone_file) < 0) |
415 std::cerr << "Failed to write: " << filename.str() << std::endl; | 411 std::cerr << "Failed to write: " << filename.str() << std::endl; |
416 if (stand_alone_file) | 412 if (stand_alone_file) |
417 fclose(stand_alone_file); | 413 fclose(stand_alone_file); |
418 } | 414 } |
419 | 415 |
420 } // namespace webrtc | 416 } // namespace webrtc |
OLD | NEW |