Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 #include "webrtc/video/video_capture_input.h" | 10 #include "webrtc/video/video_capture_input.h" |
| 11 | 11 |
| 12 #include <memory> | 12 #include <memory> |
| 13 #include <vector> | 13 #include <vector> |
| 14 | 14 |
| 15 #include "testing/gtest/include/gtest/gtest.h" | 15 #include "testing/gtest/include/gtest/gtest.h" |
| 16 #include "webrtc/base/event.h" | 16 #include "webrtc/base/event.h" |
| 17 #include "webrtc/base/refcount.h" | 17 #include "webrtc/base/refcount.h" |
| 18 #include "webrtc/test/fake_texture_frame.h" | 18 #include "webrtc/test/fake_texture_frame.h" |
| 19 #include "webrtc/test/frame_utils.h" | |
| 19 #include "webrtc/video/send_statistics_proxy.h" | 20 #include "webrtc/video/send_statistics_proxy.h" |
| 20 | 21 |
| 21 // If an output frame does not arrive in 500ms, the test will fail. | 22 // If an output frame does not arrive in 500ms, the test will fail. |
| 22 #define FRAME_TIMEOUT_MS 500 | 23 #define FRAME_TIMEOUT_MS 500 |
| 23 | 24 |
| 24 namespace webrtc { | 25 namespace webrtc { |
| 25 | 26 |
| 26 bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2); | |
| 27 bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2); | |
| 28 bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2); | |
| 29 bool EqualFramesVector(const std::vector<std::unique_ptr<VideoFrame>>& frames1, | 27 bool EqualFramesVector(const std::vector<std::unique_ptr<VideoFrame>>& frames1, |
| 30 const std::vector<std::unique_ptr<VideoFrame>>& frames2); | 28 const std::vector<std::unique_ptr<VideoFrame>>& frames2); |
| 31 std::unique_ptr<VideoFrame> CreateVideoFrame(uint8_t length); | 29 std::unique_ptr<VideoFrame> CreateVideoFrame(uint8_t length); |
| 32 | 30 |
| 33 class VideoCaptureInputTest : public ::testing::Test { | 31 class VideoCaptureInputTest : public ::testing::Test { |
| 34 protected: | 32 protected: |
| 35 VideoCaptureInputTest() | 33 VideoCaptureInputTest() |
| 36 : stats_proxy_(Clock::GetRealTimeClock(), | 34 : stats_proxy_(Clock::GetRealTimeClock(), |
| 37 webrtc::VideoSendStream::Config(nullptr), | 35 webrtc::VideoSendStream::Config(nullptr), |
| 38 webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo), | 36 webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo), |
| 39 capture_event_(false, false) {} | 37 capture_event_(false, false) {} |
| 40 | 38 |
| 41 virtual void SetUp() { | 39 virtual void SetUp() { |
| 42 overuse_detector_.reset( | 40 overuse_detector_.reset( |
| 43 new OveruseFrameDetector(Clock::GetRealTimeClock(), CpuOveruseOptions(), | 41 new OveruseFrameDetector(Clock::GetRealTimeClock(), CpuOveruseOptions(), |
| 44 nullptr, nullptr, &stats_proxy_)); | 42 nullptr, nullptr, &stats_proxy_)); |
| 45 input_.reset(new internal::VideoCaptureInput( | 43 input_.reset(new internal::VideoCaptureInput( |
| 46 &capture_event_, nullptr, &stats_proxy_, overuse_detector_.get())); | 44 &capture_event_, nullptr, &stats_proxy_, overuse_detector_.get())); |
| 47 } | 45 } |
| 48 | 46 |
| 49 void AddInputFrame(VideoFrame* frame) { | 47 void AddInputFrame(VideoFrame* frame) { |
| 50 input_->IncomingCapturedFrame(*frame); | 48 input_->IncomingCapturedFrame(*frame); |
| 51 } | 49 } |
| 52 | 50 |
| 53 void WaitOutputFrame() { | 51 void WaitOutputFrame() { |
| 54 EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); | 52 EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); |
| 55 VideoFrame frame; | 53 VideoFrame frame; |
| 56 EXPECT_TRUE(input_->GetVideoFrame(&frame)); | 54 EXPECT_TRUE(input_->GetVideoFrame(&frame)); |
| 57 if (!frame.native_handle()) { | 55 ASSERT_TRUE(frame.video_frame_buffer()); |
| 56 if (!frame.video_frame_buffer()->native_handle()) { | |
| 58 output_frame_ybuffers_.push_back( | 57 output_frame_ybuffers_.push_back( |
| 59 static_cast<const VideoFrame*>(&frame)->buffer(kYPlane)); | 58 static_cast<const VideoFrame*>(&frame)->buffer(kYPlane)); |
| 60 } | 59 } |
| 61 output_frames_.push_back( | 60 output_frames_.push_back( |
| 62 std::unique_ptr<VideoFrame>(new VideoFrame(frame))); | 61 std::unique_ptr<VideoFrame>(new VideoFrame(frame))); |
| 63 } | 62 } |
| 64 | 63 |
| 65 SendStatisticsProxy stats_proxy_; | 64 SendStatisticsProxy stats_proxy_; |
| 66 | 65 |
| 67 rtc::Event capture_event_; | 66 rtc::Event capture_event_; |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 161 TEST_F(VideoCaptureInputTest, TestTextureFrames) { | 160 TEST_F(VideoCaptureInputTest, TestTextureFrames) { |
| 162 const int kNumFrame = 3; | 161 const int kNumFrame = 3; |
| 163 for (int i = 0 ; i < kNumFrame; ++i) { | 162 for (int i = 0 ; i < kNumFrame; ++i) { |
| 164 test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); | 163 test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); |
| 165 // Add one to |i| so that width/height > 0. | 164 // Add one to |i| so that width/height > 0. |
| 166 input_frames_.push_back(std::unique_ptr<VideoFrame>(new VideoFrame( | 165 input_frames_.push_back(std::unique_ptr<VideoFrame>(new VideoFrame( |
| 167 test::FakeNativeHandle::CreateFrame(dummy_handle, i + 1, i + 1, i + 1, | 166 test::FakeNativeHandle::CreateFrame(dummy_handle, i + 1, i + 1, i + 1, |
| 168 i + 1, webrtc::kVideoRotation_0)))); | 167 i + 1, webrtc::kVideoRotation_0)))); |
| 169 AddInputFrame(input_frames_[i].get()); | 168 AddInputFrame(input_frames_[i].get()); |
| 170 WaitOutputFrame(); | 169 WaitOutputFrame(); |
| 171 EXPECT_EQ(dummy_handle, output_frames_[i]->native_handle()); | 170 ASSERT_TRUE(output_frames_[i]->video_frame_buffer()); |
| 171 EXPECT_EQ(dummy_handle, | |
| 172 output_frames_[i]->video_frame_buffer()->native_handle()); | |
| 172 } | 173 } |
| 173 | 174 |
| 174 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 175 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 175 } | 176 } |
| 176 | 177 |
| 177 TEST_F(VideoCaptureInputTest, TestI420Frames) { | 178 TEST_F(VideoCaptureInputTest, TestI420Frames) { |
| 178 const int kNumFrame = 4; | 179 const int kNumFrame = 4; |
| 179 std::vector<const uint8_t*> ybuffer_pointers; | 180 std::vector<const uint8_t*> ybuffer_pointers; |
| 180 for (int i = 0; i < kNumFrame; ++i) { | 181 for (int i = 0; i < kNumFrame; ++i) { |
| 181 input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1))); | 182 input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1))); |
| 182 const VideoFrame* const_input_frame = input_frames_[i].get(); | 183 const VideoFrame* const_input_frame = input_frames_[i].get(); |
| 183 ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane)); | 184 ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane)); |
| 184 AddInputFrame(input_frames_[i].get()); | 185 AddInputFrame(input_frames_[i].get()); |
| 185 WaitOutputFrame(); | 186 WaitOutputFrame(); |
| 186 } | 187 } |
| 187 | 188 |
| 188 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 189 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 189 // Make sure the buffer is not copied. | 190 // Make sure the buffer is not copied. |
| 190 for (int i = 0; i < kNumFrame; ++i) | 191 for (int i = 0; i < kNumFrame; ++i) |
| 191 EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); | 192 EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); |
| 192 } | 193 } |
| 193 | 194 |
| 194 TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) { | 195 TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) { |
| 195 test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); | 196 test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); |
| 196 input_frames_.push_back(std::unique_ptr<VideoFrame>( | 197 input_frames_.push_back(std::unique_ptr<VideoFrame>( |
| 197 new VideoFrame(test::FakeNativeHandle::CreateFrame( | 198 new VideoFrame(test::FakeNativeHandle::CreateFrame( |
| 198 dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0)))); | 199 dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0)))); |
| 199 AddInputFrame(input_frames_[0].get()); | 200 AddInputFrame(input_frames_[0].get()); |
| 200 WaitOutputFrame(); | 201 WaitOutputFrame(); |
| 201 EXPECT_EQ(dummy_handle, output_frames_[0]->native_handle()); | 202 ASSERT_TRUE(output_frames_[0]->video_frame_buffer()); |
| 203 EXPECT_EQ(dummy_handle, | |
| 204 output_frames_[0]->video_frame_buffer()->native_handle()); | |
| 202 | 205 |
| 203 input_frames_.push_back(CreateVideoFrame(2)); | 206 input_frames_.push_back(CreateVideoFrame(2)); |
| 204 AddInputFrame(input_frames_[1].get()); | 207 AddInputFrame(input_frames_[1].get()); |
| 205 WaitOutputFrame(); | 208 WaitOutputFrame(); |
| 206 | 209 |
| 207 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 210 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 208 } | 211 } |
| 209 | 212 |
| 210 TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) { | 213 TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) { |
| 211 input_frames_.push_back(CreateVideoFrame(1)); | 214 input_frames_.push_back(CreateVideoFrame(1)); |
| 212 AddInputFrame(input_frames_[0].get()); | 215 AddInputFrame(input_frames_[0].get()); |
| 213 WaitOutputFrame(); | 216 WaitOutputFrame(); |
| 214 | 217 |
| 215 test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); | 218 test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle(); |
| 216 input_frames_.push_back(std::unique_ptr<VideoFrame>( | 219 input_frames_.push_back(std::unique_ptr<VideoFrame>( |
| 217 new VideoFrame(test::FakeNativeHandle::CreateFrame( | 220 new VideoFrame(test::FakeNativeHandle::CreateFrame( |
| 218 dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0)))); | 221 dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0)))); |
| 219 AddInputFrame(input_frames_[1].get()); | 222 AddInputFrame(input_frames_[1].get()); |
| 220 WaitOutputFrame(); | 223 WaitOutputFrame(); |
| 221 | 224 |
| 222 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 225 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 223 } | 226 } |
| 224 | 227 |
| 225 bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { | |
| 226 if (frame1.native_handle() || frame2.native_handle()) | |
| 227 return EqualTextureFrames(frame1, frame2); | |
| 228 return EqualBufferFrames(frame1, frame2); | |
| 229 } | |
| 230 | |
| 231 bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) { | |
| 232 return ((frame1.native_handle() == frame2.native_handle()) && | |
| 233 (frame1.width() == frame2.width()) && | |
| 234 (frame1.height() == frame2.height())); | |
| 235 } | |
| 236 | |
| 237 bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2) { | |
| 238 return ((frame1.width() == frame2.width()) && | |
| 239 (frame1.height() == frame2.height()) && | |
| 240 (frame1.stride(kYPlane) == frame2.stride(kYPlane)) && | |
| 241 (frame1.stride(kUPlane) == frame2.stride(kUPlane)) && | |
| 242 (frame1.stride(kVPlane) == frame2.stride(kVPlane)) && | |
| 243 (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) && | |
| 244 (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && | |
| 245 (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) && | |
| 246 (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), | |
| 247 frame1.allocated_size(kYPlane)) == 0) && | |
| 248 (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), | |
| 249 frame1.allocated_size(kUPlane)) == 0) && | |
| 250 (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), | |
| 251 frame1.allocated_size(kVPlane)) == 0)); | |
| 252 } | |
| 253 | |
| 254 bool EqualFramesVector( | 228 bool EqualFramesVector( |
| 255 const std::vector<std::unique_ptr<VideoFrame>>& frames1, | 229 const std::vector<std::unique_ptr<VideoFrame>>& frames1, |
| 256 const std::vector<std::unique_ptr<VideoFrame>>& frames2) { | 230 const std::vector<std::unique_ptr<VideoFrame>>& frames2) { |
| 257 if (frames1.size() != frames2.size()) | 231 if (frames1.size() != frames2.size()) |
| 258 return false; | 232 return false; |
| 259 for (size_t i = 0; i < frames1.size(); ++i) { | 233 for (size_t i = 0; i < frames1.size(); ++i) { |
| 260 if (!EqualFrames(*frames1[i], *frames2[i])) | 234 // Compare frame buffers, since we don't care about differing timestamps. |
| 235 if (!test::FrameBufsEqual(frames1[i]->video_frame_buffer(), | |
| 236 frames2[i]->video_frame_buffer())) | |
|
pbos-webrtc
2016/04/14 14:50:54
{}s
nisse-webrtc
2016/04/15 07:15:09
Done.
| |
| 261 return false; | 237 return false; |
| 262 } | 238 } |
| 263 return true; | 239 return true; |
| 264 } | 240 } |
| 265 | 241 |
| 266 std::unique_ptr<VideoFrame> CreateVideoFrame(uint8_t data) { | 242 std::unique_ptr<VideoFrame> CreateVideoFrame(uint8_t data) { |
| 267 std::unique_ptr<VideoFrame> frame(new VideoFrame()); | 243 std::unique_ptr<VideoFrame> frame(new VideoFrame()); |
| 268 const int width = 36; | 244 const int width = 36; |
| 269 const int height = 24; | 245 const int height = 24; |
| 270 const int kSizeY = width * height * 2; | 246 const int kSizeY = width * height * 2; |
| 271 uint8_t buffer[kSizeY]; | 247 uint8_t buffer[kSizeY]; |
| 272 memset(buffer, data, kSizeY); | 248 memset(buffer, data, kSizeY); |
| 273 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, | 249 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, |
| 274 width / 2, kVideoRotation_0); | 250 width / 2, kVideoRotation_0); |
| 275 frame->set_render_time_ms(data); | 251 frame->set_render_time_ms(data); |
| 276 return frame; | 252 return frame; |
| 277 } | 253 } |
| 278 | 254 |
| 279 } // namespace webrtc | 255 } // namespace webrtc |
| OLD | NEW |