OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/modules/video_processing/test/video_processing_unittest.h" | 11 #include "webrtc/modules/video_processing/test/video_processing_unittest.h" |
12 | 12 |
13 #include <gflags/gflags.h> | 13 #include <gflags/gflags.h> |
14 | 14 |
15 #include <string> | 15 #include <string> |
16 | 16 |
17 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 17 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
18 #include "webrtc/system_wrappers/include/tick_util.h" | 18 #include "webrtc/system_wrappers/include/tick_util.h" |
19 #include "webrtc/test/testsupport/fileutils.h" | 19 #include "webrtc/test/testsupport/fileutils.h" |
20 #include "webrtc/test/testsupport/gtest_disable.h" | |
21 | 20 |
22 namespace webrtc { | 21 namespace webrtc { |
23 | 22 |
24 namespace { | 23 namespace { |
25 | 24 |
26 // Define command line flag 'gen_files' (default value: false). | 25 // Define command line flag 'gen_files' (default value: false). |
27 DEFINE_bool(gen_files, false, "Output files for visual inspection."); | 26 DEFINE_bool(gen_files, false, "Output files for visual inspection."); |
28 | 27 |
29 } // namespace | 28 } // namespace |
30 | 29 |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
85 | 84 |
86 void VideoProcessingTest::TearDown() { | 85 void VideoProcessingTest::TearDown() { |
87 if (source_file_ != NULL) { | 86 if (source_file_ != NULL) { |
88 ASSERT_EQ(0, fclose(source_file_)); | 87 ASSERT_EQ(0, fclose(source_file_)); |
89 } | 88 } |
90 source_file_ = NULL; | 89 source_file_ = NULL; |
91 delete vp_; | 90 delete vp_; |
92 vp_ = NULL; | 91 vp_ = NULL; |
93 } | 92 } |
94 | 93 |
95 TEST_F(VideoProcessingTest, DISABLED_ON_IOS(HandleNullBuffer)) { | 94 #if defined(WEBRTC_IOS) |
| 95 TEST_F(VideoProcessingTest, DISABLED_HandleNullBuffer) { |
| 96 #else |
| 97 TEST_F(VideoProcessingTest, HandleNullBuffer) { |
| 98 #endif |
96 // TODO(mikhal/stefan): Do we need this one? | 99 // TODO(mikhal/stefan): Do we need this one? |
97 VideoProcessing::FrameStats stats; | 100 VideoProcessing::FrameStats stats; |
98 // Video frame with unallocated buffer. | 101 // Video frame with unallocated buffer. |
99 VideoFrame videoFrame; | 102 VideoFrame videoFrame; |
100 | 103 |
101 vp_->GetFrameStats(videoFrame, &stats); | 104 vp_->GetFrameStats(videoFrame, &stats); |
102 EXPECT_EQ(stats.num_pixels, 0u); | 105 EXPECT_EQ(stats.num_pixels, 0u); |
103 | 106 |
104 EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats)); | 107 EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats)); |
105 | 108 |
106 EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats)); | 109 EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats)); |
107 } | 110 } |
108 | 111 |
109 TEST_F(VideoProcessingTest, DISABLED_ON_IOS(HandleBadStats)) { | 112 #if defined(WEBRTC_IOS) |
| 113 TEST_F(VideoProcessingTest, DISABLED_HandleBadStats) { |
| 114 #else |
| 115 TEST_F(VideoProcessingTest, HandleBadStats) { |
| 116 #endif |
110 VideoProcessing::FrameStats stats; | 117 VideoProcessing::FrameStats stats; |
111 vp_->ClearFrameStats(&stats); | 118 vp_->ClearFrameStats(&stats); |
112 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 119 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
113 ASSERT_EQ(frame_length_, | 120 ASSERT_EQ(frame_length_, |
114 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 121 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
115 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 122 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
116 0, kVideoRotation_0, &video_frame_)); | 123 0, kVideoRotation_0, &video_frame_)); |
117 | 124 |
118 EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats)); | 125 EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats)); |
119 | 126 |
120 EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats)); | 127 EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats)); |
121 } | 128 } |
122 | 129 |
123 TEST_F(VideoProcessingTest, DISABLED_ON_IOS(IdenticalResultsAfterReset)) { | 130 #if defined(WEBRTC_IOS) |
| 131 TEST_F(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) { |
| 132 #else |
| 133 TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) { |
| 134 #endif |
124 VideoFrame video_frame2; | 135 VideoFrame video_frame2; |
125 VideoProcessing::FrameStats stats; | 136 VideoProcessing::FrameStats stats; |
126 // Only testing non-static functions here. | 137 // Only testing non-static functions here. |
127 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 138 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
128 ASSERT_EQ(frame_length_, | 139 ASSERT_EQ(frame_length_, |
129 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 140 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
130 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 141 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
131 0, kVideoRotation_0, &video_frame_)); | 142 0, kVideoRotation_0, &video_frame_)); |
132 vp_->GetFrameStats(video_frame_, &stats); | 143 vp_->GetFrameStats(video_frame_, &stats); |
133 EXPECT_GT(stats.num_pixels, 0u); | 144 EXPECT_GT(stats.num_pixels, 0u); |
(...skipping 12 matching lines...) Expand all Loading... |
146 0, kVideoRotation_0, &video_frame_)); | 157 0, kVideoRotation_0, &video_frame_)); |
147 vp_->GetFrameStats(video_frame_, &stats); | 158 vp_->GetFrameStats(video_frame_, &stats); |
148 EXPECT_GT(stats.num_pixels, 0u); | 159 EXPECT_GT(stats.num_pixels, 0u); |
149 video_frame2.CopyFrame(video_frame_); | 160 video_frame2.CopyFrame(video_frame_); |
150 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats)); | 161 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats)); |
151 | 162 |
152 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats)); | 163 ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats)); |
153 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); | 164 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); |
154 } | 165 } |
155 | 166 |
156 TEST_F(VideoProcessingTest, DISABLED_ON_IOS(FrameStats)) { | 167 #if defined(WEBRTC_IOS) |
| 168 TEST_F(VideoProcessingTest, DISABLED_FrameStats) { |
| 169 #else |
| 170 TEST_F(VideoProcessingTest, FrameStats) { |
| 171 #endif |
157 VideoProcessing::FrameStats stats; | 172 VideoProcessing::FrameStats stats; |
158 vp_->ClearFrameStats(&stats); | 173 vp_->ClearFrameStats(&stats); |
159 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 174 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
160 ASSERT_EQ(frame_length_, | 175 ASSERT_EQ(frame_length_, |
161 fread(video_buffer.get(), 1, frame_length_, source_file_)); | 176 fread(video_buffer.get(), 1, frame_length_, source_file_)); |
162 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 177 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
163 0, kVideoRotation_0, &video_frame_)); | 178 0, kVideoRotation_0, &video_frame_)); |
164 | 179 |
165 EXPECT_FALSE(vp_->ValidFrameStats(stats)); | 180 EXPECT_FALSE(vp_->ValidFrameStats(stats)); |
166 vp_->GetFrameStats(video_frame_, &stats); | 181 vp_->GetFrameStats(video_frame_, &stats); |
167 EXPECT_GT(stats.num_pixels, 0u); | 182 EXPECT_GT(stats.num_pixels, 0u); |
168 EXPECT_TRUE(vp_->ValidFrameStats(stats)); | 183 EXPECT_TRUE(vp_->ValidFrameStats(stats)); |
169 | 184 |
170 printf("\nFrameStats\n"); | 185 printf("\nFrameStats\n"); |
171 printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n", | 186 printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n", |
172 static_cast<unsigned int>(stats.mean), | 187 static_cast<unsigned int>(stats.mean), |
173 static_cast<unsigned int>(stats.num_pixels), | 188 static_cast<unsigned int>(stats.num_pixels), |
174 static_cast<unsigned int>(stats.sub_sampling_factor), | 189 static_cast<unsigned int>(stats.sub_sampling_factor), |
175 static_cast<unsigned int>(stats.sum)); | 190 static_cast<unsigned int>(stats.sum)); |
176 | 191 |
177 vp_->ClearFrameStats(&stats); | 192 vp_->ClearFrameStats(&stats); |
178 EXPECT_FALSE(vp_->ValidFrameStats(stats)); | 193 EXPECT_FALSE(vp_->ValidFrameStats(stats)); |
179 } | 194 } |
180 | 195 |
181 TEST_F(VideoProcessingTest, DISABLED_ON_IOS(PreprocessorLogic)) { | 196 #if defined(WEBRTC_IOS) |
| 197 TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) { |
| 198 #else |
| 199 TEST_F(VideoProcessingTest, PreprocessorLogic) { |
| 200 #endif |
182 // Disable temporal sampling (frame dropping). | 201 // Disable temporal sampling (frame dropping). |
183 vp_->EnableTemporalDecimation(false); | 202 vp_->EnableTemporalDecimation(false); |
184 int resolution = 100; | 203 int resolution = 100; |
185 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); | 204 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 15)); |
186 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); | 205 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); |
187 // Disable spatial sampling. | 206 // Disable spatial sampling. |
188 vp_->SetInputFrameResampleMode(kNoRescaling); | 207 vp_->SetInputFrameResampleMode(kNoRescaling); |
189 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); | 208 EXPECT_EQ(VPM_OK, vp_->SetTargetResolution(resolution, resolution, 30)); |
190 VideoFrame* out_frame = NULL; | 209 VideoFrame* out_frame = NULL; |
191 // Set rescaling => output frame != NULL. | 210 // Set rescaling => output frame != NULL. |
192 vp_->SetInputFrameResampleMode(kFastRescaling); | 211 vp_->SetInputFrameResampleMode(kFastRescaling); |
193 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_, | 212 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_, |
194 out_frame); | 213 out_frame); |
195 // No rescaling=> output frame = NULL. | 214 // No rescaling=> output frame = NULL. |
196 vp_->SetInputFrameResampleMode(kNoRescaling); | 215 vp_->SetInputFrameResampleMode(kNoRescaling); |
197 EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr); | 216 EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr); |
198 } | 217 } |
199 | 218 |
200 TEST_F(VideoProcessingTest, DISABLED_ON_IOS(Resampler)) { | 219 #if defined(WEBRTC_IOS) |
| 220 TEST_F(VideoProcessingTest, DISABLED_Resampler) { |
| 221 #else |
| 222 TEST_F(VideoProcessingTest, Resampler) { |
| 223 #endif |
201 enum { NumRuns = 1 }; | 224 enum { NumRuns = 1 }; |
202 | 225 |
203 int64_t min_runtime = 0; | 226 int64_t min_runtime = 0; |
204 int64_t total_runtime = 0; | 227 int64_t total_runtime = 0; |
205 | 228 |
206 rewind(source_file_); | 229 rewind(source_file_); |
207 ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n"; | 230 ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n"; |
208 | 231 |
209 // CA not needed here | 232 // CA not needed here |
210 vp_->EnableContentAnalysis(false); | 233 vp_->EnableContentAnalysis(false); |
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
383 std::cout << "Watch " << filename.str() << " and verify that it is okay." | 406 std::cout << "Watch " << filename.str() << " and verify that it is okay." |
384 << std::endl; | 407 << std::endl; |
385 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); | 408 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); |
386 if (PrintVideoFrame(processed, stand_alone_file) < 0) | 409 if (PrintVideoFrame(processed, stand_alone_file) < 0) |
387 std::cerr << "Failed to write: " << filename.str() << std::endl; | 410 std::cerr << "Failed to write: " << filename.str() << std::endl; |
388 if (stand_alone_file) | 411 if (stand_alone_file) |
389 fclose(stand_alone_file); | 412 fclose(stand_alone_file); |
390 } | 413 } |
391 | 414 |
392 } // namespace webrtc | 415 } // namespace webrtc |
OLD | NEW |