OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 #include "webrtc/video/video_capture_input.h" | 10 #include "webrtc/video/video_capture_input.h" |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
47 void AddInputFrame(VideoFrame* frame) { | 47 void AddInputFrame(VideoFrame* frame) { |
48 input_->IncomingCapturedFrame(*frame); | 48 input_->IncomingCapturedFrame(*frame); |
49 } | 49 } |
50 | 50 |
51 void WaitOutputFrame() { | 51 void WaitOutputFrame() { |
52 EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); | 52 EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); |
53 VideoFrame frame; | 53 VideoFrame frame; |
54 EXPECT_TRUE(input_->GetVideoFrame(&frame)); | 54 EXPECT_TRUE(input_->GetVideoFrame(&frame)); |
55 ASSERT_TRUE(frame.video_frame_buffer()); | 55 ASSERT_TRUE(frame.video_frame_buffer()); |
56 if (!frame.video_frame_buffer()->native_handle()) { | 56 if (!frame.video_frame_buffer()->native_handle()) { |
57 output_frame_ybuffers_.push_back( | 57 output_frame_ybuffers_.push_back(frame.video_frame_buffer()->DataY()); |
58 static_cast<const VideoFrame*>(&frame)->buffer(kYPlane)); | |
59 } | 58 } |
60 output_frames_.push_back( | 59 output_frames_.push_back( |
61 std::unique_ptr<VideoFrame>(new VideoFrame(frame))); | 60 std::unique_ptr<VideoFrame>(new VideoFrame(frame))); |
62 } | 61 } |
63 | 62 |
64 SendStatisticsProxy stats_proxy_; | 63 SendStatisticsProxy stats_proxy_; |
65 | 64 |
66 rtc::Event capture_event_; | 65 rtc::Event capture_event_; |
67 | 66 |
68 std::unique_ptr<OveruseFrameDetector> overuse_detector_; | 67 std::unique_ptr<OveruseFrameDetector> overuse_detector_; |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
174 } | 173 } |
175 | 174 |
176 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 175 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
177 } | 176 } |
178 | 177 |
179 TEST_F(VideoCaptureInputTest, TestI420Frames) { | 178 TEST_F(VideoCaptureInputTest, TestI420Frames) { |
180 const int kNumFrame = 4; | 179 const int kNumFrame = 4; |
181 std::vector<const uint8_t*> ybuffer_pointers; | 180 std::vector<const uint8_t*> ybuffer_pointers; |
182 for (int i = 0; i < kNumFrame; ++i) { | 181 for (int i = 0; i < kNumFrame; ++i) { |
183 input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1))); | 182 input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1))); |
184 const VideoFrame* const_input_frame = input_frames_[i].get(); | 183 ybuffer_pointers.push_back(input_frames_[i]->video_frame_buffer()->DataY()); |
185 ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane)); | |
186 AddInputFrame(input_frames_[i].get()); | 184 AddInputFrame(input_frames_[i].get()); |
187 WaitOutputFrame(); | 185 WaitOutputFrame(); |
188 } | 186 } |
189 | 187 |
190 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 188 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
191 // Make sure the buffer is not copied. | 189 // Make sure the buffer is not copied. |
192 for (int i = 0; i < kNumFrame; ++i) | 190 for (int i = 0; i < kNumFrame; ++i) |
193 EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); | 191 EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); |
194 } | 192 } |
195 | 193 |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
248 const int kSizeY = width * height * 2; | 246 const int kSizeY = width * height * 2; |
249 uint8_t buffer[kSizeY]; | 247 uint8_t buffer[kSizeY]; |
250 memset(buffer, data, kSizeY); | 248 memset(buffer, data, kSizeY); |
251 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, | 249 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, |
252 width / 2, kVideoRotation_0); | 250 width / 2, kVideoRotation_0); |
253 frame->set_render_time_ms(data); | 251 frame->set_render_time_ms(data); |
254 return frame; | 252 return frame; |
255 } | 253 } |
256 | 254 |
257 } // namespace webrtc | 255 } // namespace webrtc |
OLD | NEW |