OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 #include "webrtc/video/video_capture_input.h" | 10 #include "webrtc/video/video_capture_input.h" |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
47 void AddInputFrame(VideoFrame* frame) { | 47 void AddInputFrame(VideoFrame* frame) { |
48 input_->IncomingCapturedFrame(*frame); | 48 input_->IncomingCapturedFrame(*frame); |
49 } | 49 } |
50 | 50 |
51 void WaitOutputFrame() { | 51 void WaitOutputFrame() { |
52 EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); | 52 EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); |
53 VideoFrame frame; | 53 VideoFrame frame; |
54 EXPECT_TRUE(input_->GetVideoFrame(&frame)); | 54 EXPECT_TRUE(input_->GetVideoFrame(&frame)); |
55 ASSERT_TRUE(frame.video_frame_buffer()); | 55 ASSERT_TRUE(frame.video_frame_buffer()); |
56 if (!frame.video_frame_buffer()->native_handle()) { | 56 if (!frame.video_frame_buffer()->native_handle()) { |
57 output_frame_ybuffers_.push_back( | 57 output_frame_ybuffers_.push_back(frame.video_frame_buffer()->DataY()); |
58 static_cast<const VideoFrame*>(&frame)->buffer(kYPlane)); | |
59 } | 58 } |
60 output_frames_.push_back( | 59 output_frames_.push_back( |
61 std::unique_ptr<VideoFrame>(new VideoFrame(frame))); | 60 std::unique_ptr<VideoFrame>(new VideoFrame(frame))); |
62 } | 61 } |
63 | 62 |
64 SendStatisticsProxy stats_proxy_; | 63 SendStatisticsProxy stats_proxy_; |
65 | 64 |
66 rtc::Event capture_event_; | 65 rtc::Event capture_event_; |
67 | 66 |
68 std::unique_ptr<OveruseFrameDetector> overuse_detector_; | 67 std::unique_ptr<OveruseFrameDetector> overuse_detector_; |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
173 } | 172 } |
174 | 173 |
175 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 174 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
176 } | 175 } |
177 | 176 |
178 TEST_F(VideoCaptureInputTest, TestI420Frames) { | 177 TEST_F(VideoCaptureInputTest, TestI420Frames) { |
179 const int kNumFrame = 4; | 178 const int kNumFrame = 4; |
180 std::vector<const uint8_t*> ybuffer_pointers; | 179 std::vector<const uint8_t*> ybuffer_pointers; |
181 for (int i = 0; i < kNumFrame; ++i) { | 180 for (int i = 0; i < kNumFrame; ++i) { |
182 input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1))); | 181 input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1))); |
183 const VideoFrame* const_input_frame = input_frames_[i].get(); | 182 ybuffer_pointers.push_back(input_frames_[i]->video_frame_buffer()->DataY()); |
184 ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane)); | |
185 AddInputFrame(input_frames_[i].get()); | 183 AddInputFrame(input_frames_[i].get()); |
186 WaitOutputFrame(); | 184 WaitOutputFrame(); |
187 } | 185 } |
188 | 186 |
189 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 187 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
190 // Make sure the buffer is not copied. | 188 // Make sure the buffer is not copied. |
191 for (int i = 0; i < kNumFrame; ++i) | 189 for (int i = 0; i < kNumFrame; ++i) |
192 EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); | 190 EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); |
193 } | 191 } |
194 | 192 |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
247 const int kSizeY = width * height * 2; | 245 const int kSizeY = width * height * 2; |
248 uint8_t buffer[kSizeY]; | 246 uint8_t buffer[kSizeY]; |
249 memset(buffer, data, kSizeY); | 247 memset(buffer, data, kSizeY); |
250 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, | 248 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, |
251 width / 2, kVideoRotation_0); | 249 width / 2, kVideoRotation_0); |
252 frame->set_render_time_ms(data); | 250 frame->set_render_time_ms(data); |
253 return frame; | 251 return frame; |
254 } | 252 } |
255 | 253 |
256 } // namespace webrtc | 254 } // namespace webrtc |
OLD | NEW |