| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 #include "webrtc/video/video_capture_input.h" | 10 #include "webrtc/video/video_capture_input.h" |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 47 void AddInputFrame(VideoFrame* frame) { | 47 void AddInputFrame(VideoFrame* frame) { |
| 48 input_->IncomingCapturedFrame(*frame); | 48 input_->IncomingCapturedFrame(*frame); |
| 49 } | 49 } |
| 50 | 50 |
| 51 void WaitOutputFrame() { | 51 void WaitOutputFrame() { |
| 52 EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); | 52 EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); |
| 53 VideoFrame frame; | 53 VideoFrame frame; |
| 54 EXPECT_TRUE(input_->GetVideoFrame(&frame)); | 54 EXPECT_TRUE(input_->GetVideoFrame(&frame)); |
| 55 ASSERT_TRUE(frame.video_frame_buffer()); | 55 ASSERT_TRUE(frame.video_frame_buffer()); |
| 56 if (!frame.video_frame_buffer()->native_handle()) { | 56 if (!frame.video_frame_buffer()->native_handle()) { |
| 57 output_frame_ybuffers_.push_back(frame.video_frame_buffer()->DataY()); | 57 output_frame_ybuffers_.push_back( |
| 58 static_cast<const VideoFrame*>(&frame)->buffer(kYPlane)); |
| 58 } | 59 } |
| 59 output_frames_.push_back( | 60 output_frames_.push_back( |
| 60 std::unique_ptr<VideoFrame>(new VideoFrame(frame))); | 61 std::unique_ptr<VideoFrame>(new VideoFrame(frame))); |
| 61 } | 62 } |
| 62 | 63 |
| 63 SendStatisticsProxy stats_proxy_; | 64 SendStatisticsProxy stats_proxy_; |
| 64 | 65 |
| 65 rtc::Event capture_event_; | 66 rtc::Event capture_event_; |
| 66 | 67 |
| 67 std::unique_ptr<OveruseFrameDetector> overuse_detector_; | 68 std::unique_ptr<OveruseFrameDetector> overuse_detector_; |
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 173 } | 174 } |
| 174 | 175 |
| 175 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 176 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 176 } | 177 } |
| 177 | 178 |
| 178 TEST_F(VideoCaptureInputTest, TestI420Frames) { | 179 TEST_F(VideoCaptureInputTest, TestI420Frames) { |
| 179 const int kNumFrame = 4; | 180 const int kNumFrame = 4; |
| 180 std::vector<const uint8_t*> ybuffer_pointers; | 181 std::vector<const uint8_t*> ybuffer_pointers; |
| 181 for (int i = 0; i < kNumFrame; ++i) { | 182 for (int i = 0; i < kNumFrame; ++i) { |
| 182 input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1))); | 183 input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1))); |
| 183 ybuffer_pointers.push_back(input_frames_[i]->video_frame_buffer()->DataY()); | 184 const VideoFrame* const_input_frame = input_frames_[i].get(); |
| 185 ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane)); |
| 184 AddInputFrame(input_frames_[i].get()); | 186 AddInputFrame(input_frames_[i].get()); |
| 185 WaitOutputFrame(); | 187 WaitOutputFrame(); |
| 186 } | 188 } |
| 187 | 189 |
| 188 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 190 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 189 // Make sure the buffer is not copied. | 191 // Make sure the buffer is not copied. |
| 190 for (int i = 0; i < kNumFrame; ++i) | 192 for (int i = 0; i < kNumFrame; ++i) |
| 191 EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); | 193 EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); |
| 192 } | 194 } |
| 193 | 195 |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 246 const int kSizeY = width * height * 2; | 248 const int kSizeY = width * height * 2; |
| 247 uint8_t buffer[kSizeY]; | 249 uint8_t buffer[kSizeY]; |
| 248 memset(buffer, data, kSizeY); | 250 memset(buffer, data, kSizeY); |
| 249 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, | 251 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, |
| 250 width / 2, kVideoRotation_0); | 252 width / 2, kVideoRotation_0); |
| 251 frame->set_render_time_ms(data); | 253 frame->set_render_time_ms(data); |
| 252 return frame; | 254 return frame; |
| 253 } | 255 } |
| 254 | 256 |
| 255 } // namespace webrtc | 257 } // namespace webrtc |
| OLD | NEW |