| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 95 apply_rotation_(true) { | 95 apply_rotation_(true) { |
| 96 Construct(); | 96 Construct(); |
| 97 } | 97 } |
| 98 | 98 |
| 99 void VideoCapturer::Construct() { | 99 void VideoCapturer::Construct() { |
| 100 ClearAspectRatio(); | 100 ClearAspectRatio(); |
| 101 enable_camera_list_ = false; | 101 enable_camera_list_ = false; |
| 102 square_pixel_aspect_ratio_ = false; | 102 square_pixel_aspect_ratio_ = false; |
| 103 capture_state_ = CS_STOPPED; | 103 capture_state_ = CS_STOPPED; |
| 104 SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured); | 104 SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured); |
| 105 // TODO(perkj) SignalVideoFrame is used directly by Chrome remoting. |
| 106 // Before that is refactored, SignalVideoFrame must forward frames to the |
| 107 // |VideoBroadcaster|; |
| 108 SignalVideoFrame.connect(this, &VideoCapturer::OnFrame); |
| 105 scaled_width_ = 0; | 109 scaled_width_ = 0; |
| 106 scaled_height_ = 0; | 110 scaled_height_ = 0; |
| 107 muted_ = false; | 111 muted_ = false; |
| 108 black_frame_count_down_ = kNumBlackFramesOnMute; | 112 black_frame_count_down_ = kNumBlackFramesOnMute; |
| 109 enable_video_adapter_ = true; | 113 enable_video_adapter_ = true; |
| 110 adapt_frame_drops_ = 0; | 114 adapt_frame_drops_ = 0; |
| 111 previous_frame_time_ = 0.0; | 115 previous_frame_time_ = 0.0; |
| 112 #ifdef HAVE_WEBRTC_VIDEO | 116 #ifdef HAVE_WEBRTC_VIDEO |
| 113 // There are lots of video capturers out there that don't call | 117 // There are lots of video capturers out there that don't call |
| 114 // set_frame_factory. We can either go change all of them, or we | 118 // set_frame_factory. We can either go change all of them, or we |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 219 black_frame_count_down_ = kNumBlackFramesOnMute; | 223 black_frame_count_down_ = kNumBlackFramesOnMute; |
| 220 // Following frames will be overritten with black, then the camera will be | 224 // Following frames will be overritten with black, then the camera will be |
| 221 // paused. | 225 // paused. |
| 222 return true; | 226 return true; |
| 223 } | 227 } |
| 224 // Start the camera. | 228 // Start the camera. |
| 225 thread_->Clear(this, MSG_DO_PAUSE); | 229 thread_->Clear(this, MSG_DO_PAUSE); |
| 226 return Pause(false); | 230 return Pause(false); |
| 227 } | 231 } |
| 228 | 232 |
| 229 // Note that the last caller decides whether rotation should be applied if there | |
| 230 // are multiple send streams using the same camera. | |
| 231 bool VideoCapturer::SetApplyRotation(bool enable) { | |
| 232 apply_rotation_ = enable; | |
| 233 if (frame_factory_) { | |
| 234 frame_factory_->SetApplyRotation(apply_rotation_); | |
| 235 } | |
| 236 return true; | |
| 237 } | |
| 238 | |
| 239 void VideoCapturer::SetSupportedFormats( | 233 void VideoCapturer::SetSupportedFormats( |
| 240 const std::vector<VideoFormat>& formats) { | 234 const std::vector<VideoFormat>& formats) { |
| 241 supported_formats_ = formats; | 235 supported_formats_ = formats; |
| 242 UpdateFilteredSupportedFormats(); | 236 UpdateFilteredSupportedFormats(); |
| 243 } | 237 } |
| 244 | 238 |
| 245 bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format, | 239 bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format, |
| 246 VideoFormat* best_format) { | 240 VideoFormat* best_format) { |
| 247 // TODO(fbarchard): Directly support max_format. | 241 // TODO(fbarchard): Directly support max_format. |
| 248 UpdateFilteredSupportedFormats(); | 242 UpdateFilteredSupportedFormats(); |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 316 VideoFormat* last_captured_frame_format) { | 310 VideoFormat* last_captured_frame_format) { |
| 317 rtc::CritScope cs(&frame_stats_crit_); | 311 rtc::CritScope cs(&frame_stats_crit_); |
| 318 GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats); | 312 GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats); |
| 319 GetVariableSnapshot(frame_time_data_, frame_time_stats); | 313 GetVariableSnapshot(frame_time_data_, frame_time_stats); |
| 320 *last_captured_frame_format = last_captured_frame_format_; | 314 *last_captured_frame_format = last_captured_frame_format_; |
| 321 | 315 |
| 322 adapt_frame_drops_data_.Reset(); | 316 adapt_frame_drops_data_.Reset(); |
| 323 frame_time_data_.Reset(); | 317 frame_time_data_.Reset(); |
| 324 } | 318 } |
| 325 | 319 |
| 320 void VideoCapturer::RemoveSink( |
| 321 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { |
| 322 broadcaster_.RemoveSink(sink); |
| 323 } |
| 324 |
| 325 void VideoCapturer::AddOrUpdateSink( |
| 326 rtc::VideoSinkInterface<cricket::VideoFrame>* sink, |
| 327 const rtc::VideoSinkWants& wants) { |
| 328 broadcaster_.AddOrUpdateSink(sink, wants); |
| 329 OnSinkWantsChanged(broadcaster_.wants()); |
| 330 } |
| 331 |
| 332 void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) { |
| 333 apply_rotation_ = wants.rotation_applied; |
| 334 if (frame_factory_) { |
| 335 frame_factory_->SetApplyRotation(apply_rotation_); |
| 336 } |
| 337 } |
| 338 |
| 326 void VideoCapturer::OnFrameCaptured(VideoCapturer*, | 339 void VideoCapturer::OnFrameCaptured(VideoCapturer*, |
| 327 const CapturedFrame* captured_frame) { | 340 const CapturedFrame* captured_frame) { |
| 328 if (muted_) { | 341 if (muted_) { |
| 329 if (black_frame_count_down_ == 0) { | 342 if (black_frame_count_down_ == 0) { |
| 330 thread_->Post(this, MSG_DO_PAUSE, NULL); | 343 thread_->Post(this, MSG_DO_PAUSE, NULL); |
| 331 } else { | 344 } else { |
| 332 --black_frame_count_down_; | 345 --black_frame_count_down_; |
| 333 } | 346 } |
| 334 } | 347 } |
| 335 | 348 |
| 336 if (SignalVideoFrame.is_empty()) { | 349 if (!broadcaster_.frame_wanted()) { |
| 337 return; | 350 return; |
| 338 } | 351 } |
| 339 | 352 |
| 340 // Use a temporary buffer to scale | 353 // Use a temporary buffer to scale |
| 341 rtc::scoped_ptr<uint8_t[]> scale_buffer; | 354 rtc::scoped_ptr<uint8_t[]> scale_buffer; |
| 342 | 355 |
| 343 if (IsScreencast()) { | 356 if (IsScreencast()) { |
| 344 int scaled_width, scaled_height; | 357 int scaled_width, scaled_height; |
| 345 int desired_screencast_fps = capture_format_.get() ? | 358 int desired_screencast_fps = capture_format_.get() ? |
| 346 VideoFormat::IntervalToFps(capture_format_->interval) : | 359 VideoFormat::IntervalToFps(capture_format_->interval) : |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 510 << "From " << ToString(captured_frame) << " To " | 523 << "From " << ToString(captured_frame) << " To " |
| 511 << cropped_width << " x " << cropped_height; | 524 << cropped_width << " x " << cropped_height; |
| 512 return; | 525 return; |
| 513 } | 526 } |
| 514 | 527 |
| 515 if (muted_) { | 528 if (muted_) { |
| 516 // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead. | 529 // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead. |
| 517 adapted_frame->SetToBlack(); | 530 adapted_frame->SetToBlack(); |
| 518 } | 531 } |
| 519 SignalVideoFrame(this, adapted_frame.get()); | 532 SignalVideoFrame(this, adapted_frame.get()); |
| 533 UpdateStats(captured_frame); |
| 534 } |
| 520 | 535 |
| 521 UpdateStats(captured_frame); | 536 void VideoCapturer::OnFrame(VideoCapturer* capturer, const VideoFrame* frame) { |
| 537 broadcaster_.OnFrame(*frame); |
| 522 } | 538 } |
| 523 | 539 |
| 524 void VideoCapturer::SetCaptureState(CaptureState state) { | 540 void VideoCapturer::SetCaptureState(CaptureState state) { |
| 525 if (state == capture_state_) { | 541 if (state == capture_state_) { |
| 526 // Don't trigger a state changed callback if the state hasn't changed. | 542 // Don't trigger a state changed callback if the state hasn't changed. |
| 527 return; | 543 return; |
| 528 } | 544 } |
| 529 StateChangeParams* state_params = new StateChangeParams(state); | 545 StateChangeParams* state_params = new StateChangeParams(state); |
| 530 capture_state_ = state; | 546 capture_state_ = state; |
| 531 thread_->Post(this, MSG_STATE_CHANGE, state_params); | 547 thread_->Post(this, MSG_STATE_CHANGE, state_params); |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 702 void VideoCapturer::GetVariableSnapshot( | 718 void VideoCapturer::GetVariableSnapshot( |
| 703 const rtc::RollingAccumulator<T>& data, | 719 const rtc::RollingAccumulator<T>& data, |
| 704 VariableInfo<T>* stats) { | 720 VariableInfo<T>* stats) { |
| 705 stats->max_val = data.ComputeMax(); | 721 stats->max_val = data.ComputeMax(); |
| 706 stats->mean = data.ComputeMean(); | 722 stats->mean = data.ComputeMean(); |
| 707 stats->min_val = data.ComputeMin(); | 723 stats->min_val = data.ComputeMin(); |
| 708 stats->variance = data.ComputeVariance(); | 724 stats->variance = data.ComputeVariance(); |
| 709 } | 725 } |
| 710 | 726 |
| 711 } // namespace cricket | 727 } // namespace cricket |
| OLD | NEW |