OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
342 } | 342 } |
343 return true; | 343 return true; |
344 } | 344 } |
345 | 345 |
346 void WebRtcVideoCapturer::OnIncomingCapturedFrame( | 346 void WebRtcVideoCapturer::OnIncomingCapturedFrame( |
347 const int32_t id, | 347 const int32_t id, |
348 const webrtc::VideoFrame& sample) { | 348 const webrtc::VideoFrame& sample) { |
349 // This can only happen between Start() and Stop(). | 349 // This can only happen between Start() and Stop(). |
350 RTC_DCHECK(start_thread_); | 350 RTC_DCHECK(start_thread_); |
351 RTC_DCHECK(async_invoker_); | 351 RTC_DCHECK(async_invoker_); |
352 if (start_thread_->IsCurrent()) { | 352 |
353 SignalFrameCapturedOnStartThread(sample); | 353 ++captured_frames_; |
354 } else { | 354 // Log the size and pixel aspect ratio of the first captured frame. |
355 // This currently happens on with at least VideoCaptureModuleV4L2 and | 355 if (1 == captured_frames_) { |
356 // possibly other implementations of WebRTC's VideoCaptureModule. | 356 LOG(LS_INFO) << "Captured frame size " |
357 // In order to maintain the threading contract with the upper layers and | 357 << sample.width() << "x" << sample.height() |
358 // consistency with other capturers such as in Chrome, we need to do a | 358 << ". Expected format " << GetCaptureFormat()->ToString(); |
359 // thread hop. | |
360 // Note that Stop() can cause the async invoke call to be cancelled. | |
361 async_invoker_->AsyncInvoke<void>( | |
362 RTC_FROM_HERE, start_thread_, | |
363 // Note that Bind captures by value, so there's an intermediate copy | |
364 // of sample. | |
365 rtc::Bind(&WebRtcVideoCapturer::SignalFrameCapturedOnStartThread, this, | |
366 sample)); | |
367 } | 359 } |
| 360 |
| 361 OnFrame(cricket::WebRtcVideoFrame( |
| 362 sample.video_frame_buffer(), sample.rotation(), |
| 363 sample.render_time_ms() * rtc::kNumMicrosecsPerMillisec, 0), |
| 364 sample.width(), sample.height()); |
368 } | 365 } |
369 | 366 |
370 void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id, | 367 void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id, |
371 const int32_t delay) { | 368 const int32_t delay) { |
372 LOG(LS_INFO) << "Capture delay changed to " << delay << " ms"; | 369 LOG(LS_INFO) << "Capture delay changed to " << delay << " ms"; |
373 } | 370 } |
374 | 371 |
375 void WebRtcVideoCapturer::SignalFrameCapturedOnStartThread( | |
376 const webrtc::VideoFrame& frame) { | |
377 // This can only happen between Start() and Stop(). | |
378 RTC_DCHECK(start_thread_); | |
379 RTC_DCHECK(start_thread_->IsCurrent()); | |
380 RTC_DCHECK(async_invoker_); | |
381 | |
382 ++captured_frames_; | |
383 // Log the size and pixel aspect ratio of the first captured frame. | |
384 if (1 == captured_frames_) { | |
385 LOG(LS_INFO) << "Captured frame size " | |
386 << frame.width() << "x" << frame.height() | |
387 << ". Expected format " << GetCaptureFormat()->ToString(); | |
388 } | |
389 | |
390 // Signal down stream components on captured frame. | |
391 // The CapturedFrame class doesn't support planes. We have to ExtractBuffer | |
392 // to one block for it. | |
393 size_t length = | |
394 webrtc::CalcBufferSize(webrtc::kI420, frame.width(), frame.height()); | |
395 capture_buffer_.resize(length); | |
396 // TODO(magjed): Refactor the WebRtcCapturedFrame to avoid memory copy or | |
397 // take over ownership of the buffer held by |frame| if that's possible. | |
398 webrtc::ExtractBuffer(frame, length, &capture_buffer_[0]); | |
399 WebRtcCapturedFrame webrtc_frame(frame, &capture_buffer_[0], length); | |
400 SignalFrameCaptured(this, &webrtc_frame); | |
401 } | |
402 | |
403 // WebRtcCapturedFrame | |
404 WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::VideoFrame& sample, | |
405 void* buffer, | |
406 size_t length) { | |
407 width = sample.width(); | |
408 height = sample.height(); | |
409 fourcc = FOURCC_I420; | |
410 // TODO(hellner): Support pixel aspect ratio (for OSX). | |
411 pixel_width = 1; | |
412 pixel_height = 1; | |
413 // Convert units from VideoFrame RenderTimeMs to CapturedFrame (nanoseconds). | |
414 time_stamp = sample.render_time_ms() * rtc::kNumNanosecsPerMillisec; | |
415 data_size = rtc::checked_cast<uint32_t>(length); | |
416 data = buffer; | |
417 rotation = sample.rotation(); | |
418 } | |
419 | |
420 } // namespace cricket | 372 } // namespace cricket |
OLD | NEW |