OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
52 double expected_psnr, | 52 double expected_psnr, |
53 VideoProcessing* vpm); | 53 VideoProcessing* vpm); |
54 static bool CompareFrames(const webrtc::VideoFrame& frame1, | 54 static bool CompareFrames(const webrtc::VideoFrame& frame1, |
55 const webrtc::VideoFrame& frame2); | 55 const webrtc::VideoFrame& frame2); |
56 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source, | 56 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source, |
57 const VideoFrame& processed); | 57 const VideoFrame& processed); |
58 | 58 |
59 VideoProcessingTest::VideoProcessingTest() | 59 VideoProcessingTest::VideoProcessingTest() |
60 : vp_(NULL), | 60 : vp_(NULL), |
61 source_file_(NULL), | 61 source_file_(NULL), |
62 width_(352), | 62 vtt_(GetParam()), |
| 63 width_(vtt_.width), |
63 half_width_((width_ + 1) / 2), | 64 half_width_((width_ + 1) / 2), |
64 height_(288), | 65 height_(vtt_.height), |
65 size_y_(width_ * height_), | 66 size_y_(width_ * height_), |
66 size_uv_(half_width_ * ((height_ + 1) / 2)), | 67 size_uv_(half_width_ * ((height_ + 1) / 2)), |
67 frame_length_(CalcBufferSize(kI420, width_, height_)) {} | 68 frame_length_(CalcBufferSize(kI420, width_, height_)) {} |
68 | 69 |
69 void VideoProcessingTest::SetUp() { | 70 void VideoProcessingTest::SetUp() { |
70 vp_ = VideoProcessing::Create(); | 71 vp_ = VideoProcessing::Create(); |
71 ASSERT_TRUE(vp_ != NULL); | 72 ASSERT_TRUE(vp_ != NULL); |
72 | |
73 video_frame_.CreateEmptyFrame(width_, height_, width_, | 73 video_frame_.CreateEmptyFrame(width_, height_, width_, |
74 half_width_, half_width_); | 74 half_width_, half_width_); |
75 // Clear video frame so DrMemory/Valgrind will allow reads of the buffer. | 75 // Clear video frame so DrMemory/Valgrind will allow reads of the buffer. |
76 memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane)); | 76 memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane)); |
77 memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane)); | 77 memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane)); |
78 memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane)); | 78 memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane)); |
79 const std::string video_file = | 79 source_file_ = fopen(vtt_.file_name.c_str(), "rb"); |
80 webrtc::test::ResourcePath("foreman_cif", "yuv"); | |
81 source_file_ = fopen(video_file.c_str(), "rb"); | |
82 ASSERT_TRUE(source_file_ != NULL) | 80 ASSERT_TRUE(source_file_ != NULL) |
83 << "Cannot read source file: " + video_file + "\n"; | 81 << "Cannot read source file: " + vtt_.file_name + "\n"; |
84 } | 82 } |
85 | 83 |
86 void VideoProcessingTest::TearDown() { | 84 void VideoProcessingTest::TearDown() { |
87 if (source_file_ != NULL) { | 85 if (source_file_ != NULL) { |
88 ASSERT_EQ(0, fclose(source_file_)); | 86 ASSERT_EQ(0, fclose(source_file_)); |
89 } | 87 } |
90 source_file_ = NULL; | 88 source_file_ = NULL; |
91 delete vp_; | 89 delete vp_; |
92 vp_ = NULL; | 90 vp_ = NULL; |
93 } | 91 } |
94 | 92 |
95 #if defined(WEBRTC_IOS) | 93 #if defined(WEBRTC_IOS) |
96 TEST_F(VideoProcessingTest, DISABLED_HandleNullBuffer) { | 94 TEST_P(VideoProcessingTest, DISABLED_HandleNullBuffer) { |
97 #else | 95 #else |
98 TEST_F(VideoProcessingTest, HandleNullBuffer) { | 96 TEST_P(VideoProcessingTest, HandleNullBuffer) { |
99 #endif | 97 #endif |
100 // TODO(mikhal/stefan): Do we need this one? | 98 // TODO(mikhal/stefan): Do we need this one? |
101 VideoProcessing::FrameStats stats; | 99 VideoProcessing::FrameStats stats; |
102 // Video frame with unallocated buffer. | 100 // Video frame with unallocated buffer. |
103 VideoFrame videoFrame; | 101 VideoFrame videoFrame; |
104 | 102 |
105 vp_->GetFrameStats(videoFrame, &stats); | 103 vp_->GetFrameStats(videoFrame, &stats); |
106 EXPECT_EQ(stats.num_pixels, 0u); | 104 EXPECT_EQ(stats.num_pixels, 0u); |
107 | 105 |
108 EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats)); | 106 EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats)); |
109 | 107 |
110 EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats)); | 108 EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats)); |
111 } | 109 } |
112 | 110 |
113 #if defined(WEBRTC_IOS) | 111 #if defined(WEBRTC_IOS) |
114 TEST_F(VideoProcessingTest, DISABLED_HandleBadStats) { | 112 TEST_P(VideoProcessingTest, DISABLED_HandleBadStats) { |
115 #else | 113 #else |
116 TEST_F(VideoProcessingTest, HandleBadStats) { | 114 TEST_P(VideoProcessingTest, HandleBadStats) { |
117 #endif | 115 #endif |
118 VideoProcessing::FrameStats stats; | 116 VideoProcessing::FrameStats stats; |
119 vp_->ClearFrameStats(&stats); | 117 vp_->ClearFrameStats(&stats); |
120 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 118 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
121 ASSERT_EQ(frame_length_, | 119 ASSERT_EQ(frame_length_, |
122 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 120 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
123 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 121 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
124 0, kVideoRotation_0, &video_frame_)); | 122 0, kVideoRotation_0, &video_frame_)); |
125 | 123 |
126 EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats)); | 124 EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats)); |
127 | 125 |
128 EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats)); | 126 EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats)); |
129 } | 127 } |
130 | 128 |
131 #if defined(WEBRTC_IOS) | 129 #if defined(WEBRTC_IOS) |
132 TEST_F(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) { | 130 TEST_P(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) { |
133 #else | 131 #else |
134 TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) { | 132 TEST_P(VideoProcessingTest, IdenticalResultsAfterReset) { |
135 #endif | 133 #endif |
136 VideoFrame video_frame2; | 134 VideoFrame video_frame2; |
137 VideoProcessing::FrameStats stats; | 135 VideoProcessing::FrameStats stats; |
138 // Only testing non-static functions here. | 136 // Only testing non-static functions here. |
139 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 137 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
140 ASSERT_EQ(frame_length_, | 138 ASSERT_EQ(frame_length_, |
141 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 139 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
142 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 140 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
143 0, kVideoRotation_0, &video_frame_)); | 141 0, kVideoRotation_0, &video_frame_)); |
144 vp_->GetFrameStats(video_frame_, &stats); | 142 vp_->GetFrameStats(video_frame_, &stats); |
(...skipping 14 matching lines...) Expand all Loading... |
159 vp_->GetFrameStats(video_frame_, &stats); | 157 vp_->GetFrameStats(video_frame_, &stats); |
160 EXPECT_GT(stats.num_pixels, 0u); | 158 EXPECT_GT(stats.num_pixels, 0u); |
161 video_frame2.CopyFrame(video_frame_); | 159 video_frame2.CopyFrame(video_frame_); |
162 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats)); | 160 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats)); |
163 | 161 |
164 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats)); | 162 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats)); |
165 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); | 163 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); |
166 } | 164 } |
167 | 165 |
168 #if defined(WEBRTC_IOS) | 166 #if defined(WEBRTC_IOS) |
169 TEST_F(VideoProcessingTest, DISABLED_FrameStats) { | 167 TEST_P(VideoProcessingTest, DISABLED_FrameStats) { |
170 #else | 168 #else |
171 TEST_F(VideoProcessingTest, FrameStats) { | 169 TEST_P(VideoProcessingTest, FrameStats) { |
172 #endif | 170 #endif |
173 VideoProcessing::FrameStats stats; | 171 VideoProcessing::FrameStats stats; |
174 vp_->ClearFrameStats(&stats); | 172 vp_->ClearFrameStats(&stats); |
175 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 173 std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
176 ASSERT_EQ(frame_length_, | 174 ASSERT_EQ(frame_length_, |
177 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 175 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
178 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 176 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
179 0, kVideoRotation_0, &video_frame_)); | 177 0, kVideoRotation_0, &video_frame_)); |
180 | 178 |
181 EXPECT_FALSE(vp_->ValidFrameStats(stats)); | 179 EXPECT_FALSE(vp_->ValidFrameStats(stats)); |
182 vp_->GetFrameStats(video_frame_, &stats); | 180 vp_->GetFrameStats(video_frame_, &stats); |
183 EXPECT_GT(stats.num_pixels, 0u); | 181 EXPECT_GT(stats.num_pixels, 0u); |
184 EXPECT_TRUE(vp_->ValidFrameStats(stats)); | 182 EXPECT_TRUE(vp_->ValidFrameStats(stats)); |
185 | 183 |
186 printf("\nFrameStats\n"); | 184 printf("\nFrameStats\n"); |
187 printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n", | 185 printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n", |
188 static_cast<unsigned int>(stats.mean), | 186 static_cast<unsigned int>(stats.mean), |
189 static_cast<unsigned int>(stats.num_pixels), | 187 static_cast<unsigned int>(stats.num_pixels), |
190 static_cast<unsigned int>(stats.sub_sampling_factor), | 188 static_cast<unsigned int>(stats.sub_sampling_factor), |
191 static_cast<unsigned int>(stats.sum)); | 189 static_cast<unsigned int>(stats.sum)); |
192 | 190 |
193 vp_->ClearFrameStats(&stats); | 191 vp_->ClearFrameStats(&stats); |
194 EXPECT_FALSE(vp_->ValidFrameStats(stats)); | 192 EXPECT_FALSE(vp_->ValidFrameStats(stats)); |
195 } | 193 } |
196 | 194 |
197 #if defined(WEBRTC_IOS) | 195 #if defined(WEBRTC_IOS) |
198 TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) { | 196 TEST_P(VideoProcessingTest, DISABLED_PreprocessorLogic) { |
199 #else | 197 #else |
200 TEST_F(VideoProcessingTest, PreprocessorLogic) { | 198 TEST_P(VideoProcessingTest, PreprocessorLogic) { |
201 #endif | 199 #endif |
202 // Disable temporal sampling (frame dropping). | 200 // Disable temporal sampling (frame dropping). |
203 vp_->EnableTemporalDecimation(false); | 201 vp_->EnableTemporalDecimation(false); |
204 int resolution = 100; | 202 int resolution = 100; |
205 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); | 203 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); |
206 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); | 204 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); |
207 // Disable spatial sampling. | 205 // Disable spatial sampling. |
208 vp_->SetInputFrameResampleMode(kNoRescaling); | 206 vp_->SetInputFrameResampleMode(kNoRescaling); |
209 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); | 207 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); |
210 VideoFrame* out_frame = NULL; | 208 VideoFrame* out_frame = NULL; |
211 // Set rescaling => output frame != NULL. | 209 // Set rescaling => output frame != NULL. |
212 vp_->SetInputFrameResampleMode(kFastRescaling); | 210 vp_->SetInputFrameResampleMode(kFastRescaling); |
213 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_, | 211 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_, |
214 out_frame); | 212 out_frame); |
215 // No rescaling=> output frame = NULL. | 213 // No rescaling=> output frame = NULL. |
216 vp_->SetInputFrameResampleMode(kNoRescaling); | 214 vp_->SetInputFrameResampleMode(kNoRescaling); |
217 EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr); | 215 EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr); |
218 } | 216 } |
219 | 217 |
220 #if defined(WEBRTC_IOS) | 218 #if defined(WEBRTC_IOS) |
221 TEST_F(VideoProcessingTest, DISABLED_Resampler) { | 219 TEST_P(VideoProcessingTest, DISABLED_Resampler) { |
222 #else | 220 #else |
223 TEST_F(VideoProcessingTest, Resampler) { | 221 TEST_P(VideoProcessingTest, Resampler) { |
224 #endif | 222 #endif |
225 enum { NumRuns = 1 }; | 223 enum { NumRuns = 1 }; |
226 | 224 |
227 int64_t min_runtime = 0; | 225 int64_t min_runtime = 0; |
228 int64_t total_runtime = 0; | 226 int64_t total_runtime = 0; |
229 | 227 |
230 rewind(source_file_); | 228 rewind(source_file_); |
231 ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n"; | 229 ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n"; |
232 | 230 |
233 // CA not needed here | 231 // CA not needed here |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
300 min_runtime = runtime; | 298 min_runtime = runtime; |
301 } | 299 } |
302 total_runtime += runtime; | 300 total_runtime += runtime; |
303 } | 301 } |
304 | 302 |
305 printf("\nAverage run time = %d us / frame\n", | 303 printf("\nAverage run time = %d us / frame\n", |
306 static_cast<int>(total_runtime)); | 304 static_cast<int>(total_runtime)); |
307 printf("Min run time = %d us / frame\n\n", static_cast<int>(min_runtime)); | 305 printf("Min run time = %d us / frame\n\n", static_cast<int>(min_runtime)); |
308 } | 306 } |
309 | 307 |
| 308 INSTANTIATE_TEST_CASE_P(ForemanCif, |
| 309 VideoProcessingTest, |
| 310 ::testing::Values(VideoToTest( |
| 311 {webrtc::test::ResourcePath("foreman_cif", "yuv"), |
| 312 352, 288}))); |
| 313 |
310 void PreprocessFrameAndVerify(const VideoFrame& source, | 314 void PreprocessFrameAndVerify(const VideoFrame& source, |
311 int target_width, | 315 int target_width, |
312 int target_height, | 316 int target_height, |
313 VideoProcessing* vpm, | 317 VideoProcessing* vpm, |
314 const VideoFrame* out_frame) { | 318 const VideoFrame* out_frame) { |
315 ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30)); | 319 ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30)); |
316 out_frame = vpm->PreprocessFrame(source); | 320 out_frame = vpm->PreprocessFrame(source); |
317 EXPECT_TRUE(out_frame != nullptr); | 321 EXPECT_TRUE(out_frame != nullptr); |
318 | 322 |
319 // If no resizing is needed, expect the original frame. | 323 // If no resizing is needed, expect the original frame. |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
407 std::cout << "Watch " << filename.str() << " and verify that it is okay." | 411 std::cout << "Watch " << filename.str() << " and verify that it is okay." |
408 << std::endl; | 412 << std::endl; |
409 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); | 413 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); |
410 if (PrintVideoFrame(processed, stand_alone_file) < 0) | 414 if (PrintVideoFrame(processed, stand_alone_file) < 0) |
411 std::cerr << "Failed to write: " << filename.str() << std::endl; | 415 std::cerr << "Failed to write: " << filename.str() << std::endl; |
412 if (stand_alone_file) | 416 if (stand_alone_file) |
413 fclose(stand_alone_file); | 417 fclose(stand_alone_file); |
414 } | 418 } |
415 | 419 |
416 } // namespace webrtc | 420 } // namespace webrtc |
OLD | NEW |