OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/modules/video_processing/main/test/unit_test/video_processing_u
nittest.h" | 11 #include "webrtc/modules/video_processing/main/test/unit_test/video_processing_u
nittest.h" |
12 | 12 |
13 #include <string> | 13 #include <string> |
14 | 14 |
15 #include <gflags/gflags.h> | 15 #include <gflags/gflags.h> |
16 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 16 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
17 #include "webrtc/system_wrappers/interface/tick_util.h" | 17 #include "webrtc/system_wrappers/interface/tick_util.h" |
18 #include "webrtc/test/testsupport/fileutils.h" | 18 #include "webrtc/test/testsupport/fileutils.h" |
| 19 #include "webrtc/test/testsupport/gtest_disable.h" |
19 | 20 |
20 namespace webrtc { | 21 namespace webrtc { |
21 | 22 |
22 namespace { | 23 namespace { |
23 | 24 |
24 // Define command line flag 'gen_files' (default value: false). | 25 // Define command line flag 'gen_files' (default value: false). |
25 DEFINE_bool(gen_files, false, "Output files for visual inspection."); | 26 DEFINE_bool(gen_files, false, "Output files for visual inspection."); |
26 | 27 |
27 } // namespace | 28 } // namespace |
28 | 29 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
86 ASSERT_EQ(0, fclose(source_file_)); | 87 ASSERT_EQ(0, fclose(source_file_)); |
87 } | 88 } |
88 source_file_ = NULL; | 89 source_file_ = NULL; |
89 | 90 |
90 if (vpm_ != NULL) { | 91 if (vpm_ != NULL) { |
91 VideoProcessingModule::Destroy(vpm_); | 92 VideoProcessingModule::Destroy(vpm_); |
92 } | 93 } |
93 vpm_ = NULL; | 94 vpm_ = NULL; |
94 } | 95 } |
95 | 96 |
96 TEST_F(VideoProcessingModuleTest, HandleNullBuffer) { | 97 TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(HandleNullBuffer)) { |
97 // TODO(mikhal/stefan): Do we need this one? | 98 // TODO(mikhal/stefan): Do we need this one? |
98 VideoProcessingModule::FrameStats stats; | 99 VideoProcessingModule::FrameStats stats; |
99 // Video frame with unallocated buffer. | 100 // Video frame with unallocated buffer. |
100 VideoFrame videoFrame; | 101 VideoFrame videoFrame; |
101 | 102 |
102 EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, videoFrame)); | 103 EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, videoFrame)); |
103 | 104 |
104 EXPECT_EQ(-1, vpm_->Deflickering(&videoFrame, &stats)); | 105 EXPECT_EQ(-1, vpm_->Deflickering(&videoFrame, &stats)); |
105 | 106 |
106 EXPECT_EQ(-3, vpm_->BrightnessDetection(videoFrame, stats)); | 107 EXPECT_EQ(-3, vpm_->BrightnessDetection(videoFrame, stats)); |
107 } | 108 } |
108 | 109 |
109 TEST_F(VideoProcessingModuleTest, HandleBadStats) { | 110 TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(HandleBadStats)) { |
110 VideoProcessingModule::FrameStats stats; | 111 VideoProcessingModule::FrameStats stats; |
111 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 112 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
112 ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, | 113 ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, |
113 source_file_)); | 114 source_file_)); |
114 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 115 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
115 0, kVideoRotation_0, &video_frame_)); | 116 0, kVideoRotation_0, &video_frame_)); |
116 | 117 |
117 EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats)); | 118 EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats)); |
118 | 119 |
119 EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats)); | 120 EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats)); |
120 } | 121 } |
121 | 122 |
122 TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) { | 123 TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(IdenticalResultsAfterReset)) { |
123 VideoFrame video_frame2; | 124 VideoFrame video_frame2; |
124 VideoProcessingModule::FrameStats stats; | 125 VideoProcessingModule::FrameStats stats; |
125 // Only testing non-static functions here. | 126 // Only testing non-static functions here. |
126 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 127 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
127 ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, | 128 ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, |
128 source_file_)); | 129 source_file_)); |
129 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 130 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
130 0, kVideoRotation_0, &video_frame_)); | 131 0, kVideoRotation_0, &video_frame_)); |
131 ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); | 132 ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); |
132 ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_)); | 133 ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_)); |
133 ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats)); | 134 ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats)); |
134 vpm_->Reset(); | 135 vpm_->Reset(); |
135 // Retrieve frame stats again in case Deflickering() has zeroed them. | 136 // Retrieve frame stats again in case Deflickering() has zeroed them. |
136 ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame2)); | 137 ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame2)); |
137 ASSERT_EQ(0, vpm_->Deflickering(&video_frame2, &stats)); | 138 ASSERT_EQ(0, vpm_->Deflickering(&video_frame2, &stats)); |
138 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); | 139 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); |
139 | 140 |
140 ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, | 141 ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, |
141 source_file_)); | 142 source_file_)); |
142 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 143 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
143 0, kVideoRotation_0, &video_frame_)); | 144 0, kVideoRotation_0, &video_frame_)); |
144 ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); | 145 ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); |
145 video_frame2.CopyFrame(video_frame_); | 146 video_frame2.CopyFrame(video_frame_); |
146 ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats)); | 147 ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats)); |
147 vpm_->Reset(); | 148 vpm_->Reset(); |
148 ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame2, stats)); | 149 ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame2, stats)); |
149 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); | 150 EXPECT_TRUE(CompareFrames(video_frame_, video_frame2)); |
150 } | 151 } |
151 | 152 |
152 TEST_F(VideoProcessingModuleTest, FrameStats) { | 153 TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(FrameStats)) { |
153 VideoProcessingModule::FrameStats stats; | 154 VideoProcessingModule::FrameStats stats; |
154 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); | 155 rtc::scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]); |
155 ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, | 156 ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_, |
156 source_file_)); | 157 source_file_)); |
157 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, | 158 EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_, |
158 0, kVideoRotation_0, &video_frame_)); | 159 0, kVideoRotation_0, &video_frame_)); |
159 | 160 |
160 EXPECT_FALSE(vpm_->ValidFrameStats(stats)); | 161 EXPECT_FALSE(vpm_->ValidFrameStats(stats)); |
161 EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); | 162 EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_)); |
162 EXPECT_TRUE(vpm_->ValidFrameStats(stats)); | 163 EXPECT_TRUE(vpm_->ValidFrameStats(stats)); |
163 | 164 |
164 printf("\nFrameStats\n"); | 165 printf("\nFrameStats\n"); |
165 printf("mean: %u\nnum_pixels: %u\nsubSamplWidth: " | 166 printf("mean: %u\nnum_pixels: %u\nsubSamplWidth: " |
166 "%u\nsumSamplHeight: %u\nsum: %u\n\n", | 167 "%u\nsumSamplHeight: %u\nsum: %u\n\n", |
167 static_cast<unsigned int>(stats.mean), | 168 static_cast<unsigned int>(stats.mean), |
168 static_cast<unsigned int>(stats.num_pixels), | 169 static_cast<unsigned int>(stats.num_pixels), |
169 static_cast<unsigned int>(stats.subSamplHeight), | 170 static_cast<unsigned int>(stats.subSamplHeight), |
170 static_cast<unsigned int>(stats.subSamplWidth), | 171 static_cast<unsigned int>(stats.subSamplWidth), |
171 static_cast<unsigned int>(stats.sum)); | 172 static_cast<unsigned int>(stats.sum)); |
172 | 173 |
173 vpm_->ClearFrameStats(&stats); | 174 vpm_->ClearFrameStats(&stats); |
174 EXPECT_FALSE(vpm_->ValidFrameStats(stats)); | 175 EXPECT_FALSE(vpm_->ValidFrameStats(stats)); |
175 } | 176 } |
176 | 177 |
177 TEST_F(VideoProcessingModuleTest, PreprocessorLogic) { | 178 TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(PreprocessorLogic)) { |
178 // Disable temporal sampling (frame dropping). | 179 // Disable temporal sampling (frame dropping). |
179 vpm_->EnableTemporalDecimation(false); | 180 vpm_->EnableTemporalDecimation(false); |
180 int resolution = 100; | 181 int resolution = 100; |
181 EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 15)); | 182 EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 15)); |
182 EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30)); | 183 EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30)); |
183 // Disable spatial sampling. | 184 // Disable spatial sampling. |
184 vpm_->SetInputFrameResampleMode(kNoRescaling); | 185 vpm_->SetInputFrameResampleMode(kNoRescaling); |
185 EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30)); | 186 EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30)); |
186 VideoFrame* out_frame = NULL; | 187 VideoFrame* out_frame = NULL; |
187 // Set rescaling => output frame != NULL. | 188 // Set rescaling => output frame != NULL. |
188 vpm_->SetInputFrameResampleMode(kFastRescaling); | 189 vpm_->SetInputFrameResampleMode(kFastRescaling); |
189 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vpm_, | 190 PreprocessFrameAndVerify(video_frame_, resolution, resolution, vpm_, |
190 &out_frame); | 191 &out_frame); |
191 // No rescaling=> output frame = NULL. | 192 // No rescaling=> output frame = NULL. |
192 vpm_->SetInputFrameResampleMode(kNoRescaling); | 193 vpm_->SetInputFrameResampleMode(kNoRescaling); |
193 EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame)); | 194 EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame)); |
194 EXPECT_TRUE(out_frame == NULL); | 195 EXPECT_TRUE(out_frame == NULL); |
195 } | 196 } |
196 | 197 |
197 TEST_F(VideoProcessingModuleTest, Resampler) { | 198 TEST_F(VideoProcessingModuleTest, DISABLED_ON_IOS(Resampler)) { |
198 enum { NumRuns = 1 }; | 199 enum { NumRuns = 1 }; |
199 | 200 |
200 int64_t min_runtime = 0; | 201 int64_t min_runtime = 0; |
201 int64_t total_runtime = 0; | 202 int64_t total_runtime = 0; |
202 | 203 |
203 rewind(source_file_); | 204 rewind(source_file_); |
204 ASSERT_TRUE(source_file_ != NULL) << | 205 ASSERT_TRUE(source_file_ != NULL) << |
205 "Cannot read input file \n"; | 206 "Cannot read input file \n"; |
206 | 207 |
207 // CA not needed here | 208 // CA not needed here |
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
380 std::cout << "Watch " << filename.str() << " and verify that it is okay." | 381 std::cout << "Watch " << filename.str() << " and verify that it is okay." |
381 << std::endl; | 382 << std::endl; |
382 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); | 383 FILE* stand_alone_file = fopen(filename.str().c_str(), "wb"); |
383 if (PrintVideoFrame(processed, stand_alone_file) < 0) | 384 if (PrintVideoFrame(processed, stand_alone_file) < 0) |
384 std::cerr << "Failed to write: " << filename.str() << std::endl; | 385 std::cerr << "Failed to write: " << filename.str() << std::endl; |
385 if (stand_alone_file) | 386 if (stand_alone_file) |
386 fclose(stand_alone_file); | 387 fclose(stand_alone_file); |
387 } | 388 } |
388 | 389 |
389 } // namespace webrtc | 390 } // namespace webrtc |
OLD | NEW |