OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 #include "webrtc/video/video_capture_input.h" | 10 #include "webrtc/video/video_capture_input.h" |
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
216 input_frames_.push_back(std::unique_ptr<VideoFrame>( | 216 input_frames_.push_back(std::unique_ptr<VideoFrame>( |
217 new VideoFrame(test::FakeNativeHandle::CreateFrame( | 217 new VideoFrame(test::FakeNativeHandle::CreateFrame( |
218 dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0)))); | 218 dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0)))); |
219 AddInputFrame(input_frames_[1].get()); | 219 AddInputFrame(input_frames_[1].get()); |
220 WaitOutputFrame(); | 220 WaitOutputFrame(); |
221 | 221 |
222 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); | 222 EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
223 } | 223 } |
224 | 224 |
225 bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { | 225 bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { |
226 if (frame1.native_handle() != NULL || frame2.native_handle() != NULL) | 226 if (frame1.native_handle() || frame2.native_handle()) |
227 return EqualTextureFrames(frame1, frame2); | 227 return EqualTextureFrames(frame1, frame2); |
228 return EqualBufferFrames(frame1, frame2); | 228 return EqualBufferFrames(frame1, frame2); |
229 } | 229 } |
230 | 230 |
231 bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) { | 231 bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) { |
232 return ((frame1.native_handle() == frame2.native_handle()) && | 232 return ((frame1.native_handle() == frame2.native_handle()) && |
233 (frame1.width() == frame2.width()) && | 233 (frame1.width() == frame2.width()) && |
234 (frame1.height() == frame2.height())); | 234 (frame1.height() == frame2.height())); |
235 } | 235 } |
236 | 236 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
270 const int kSizeY = width * height * 2; | 270 const int kSizeY = width * height * 2; |
271 uint8_t buffer[kSizeY]; | 271 uint8_t buffer[kSizeY]; |
272 memset(buffer, data, kSizeY); | 272 memset(buffer, data, kSizeY); |
273 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, | 273 frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2, |
274 width / 2, kVideoRotation_0); | 274 width / 2, kVideoRotation_0); |
275 frame->set_render_time_ms(data); | 275 frame->set_render_time_ms(data); |
276 return frame; | 276 return frame; |
277 } | 277 } |
278 | 278 |
279 } // namespace webrtc | 279 } // namespace webrtc |
OLD | NEW |