OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
219 black_frame_count_down_ = kNumBlackFramesOnMute; | 219 black_frame_count_down_ = kNumBlackFramesOnMute; |
220 // Following frames will be overritten with black, then the camera will be | 220 // Following frames will be overritten with black, then the camera will be |
221 // paused. | 221 // paused. |
222 return true; | 222 return true; |
223 } | 223 } |
224 // Start the camera. | 224 // Start the camera. |
225 thread_->Clear(this, MSG_DO_PAUSE); | 225 thread_->Clear(this, MSG_DO_PAUSE); |
226 return Pause(false); | 226 return Pause(false); |
227 } | 227 } |
228 | 228 |
229 // Note that the last caller decides whether rotation should be applied if there | |
230 // are multiple send streams using the same camera. | |
231 bool VideoCapturer::SetApplyRotation(bool enable) { | |
232 apply_rotation_ = enable; | |
233 if (frame_factory_) { | |
234 frame_factory_->SetApplyRotation(apply_rotation_); | |
235 } | |
236 return true; | |
237 } | |
238 | |
239 void VideoCapturer::SetSupportedFormats( | 229 void VideoCapturer::SetSupportedFormats( |
240 const std::vector<VideoFormat>& formats) { | 230 const std::vector<VideoFormat>& formats) { |
241 supported_formats_ = formats; | 231 supported_formats_ = formats; |
242 UpdateFilteredSupportedFormats(); | 232 UpdateFilteredSupportedFormats(); |
243 } | 233 } |
244 | 234 |
245 bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format, | 235 bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format, |
246 VideoFormat* best_format) { | 236 VideoFormat* best_format) { |
247 // TODO(fbarchard): Directly support max_format. | 237 // TODO(fbarchard): Directly support max_format. |
248 UpdateFilteredSupportedFormats(); | 238 UpdateFilteredSupportedFormats(); |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
316 VideoFormat* last_captured_frame_format) { | 306 VideoFormat* last_captured_frame_format) { |
317 rtc::CritScope cs(&frame_stats_crit_); | 307 rtc::CritScope cs(&frame_stats_crit_); |
318 GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats); | 308 GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats); |
319 GetVariableSnapshot(frame_time_data_, frame_time_stats); | 309 GetVariableSnapshot(frame_time_data_, frame_time_stats); |
320 *last_captured_frame_format = last_captured_frame_format_; | 310 *last_captured_frame_format = last_captured_frame_format_; |
321 | 311 |
322 adapt_frame_drops_data_.Reset(); | 312 adapt_frame_drops_data_.Reset(); |
323 frame_time_data_.Reset(); | 313 frame_time_data_.Reset(); |
324 } | 314 } |
325 | 315 |
| 316 void VideoCapturer::AddSink( |
| 317 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { |
| 318 broadcaster_.AddSink(sink); |
| 319 } |
| 320 |
| 321 void VideoCapturer::RemoveSink( |
| 322 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { |
| 323 broadcaster_.RemoveSink(sink); |
| 324 } |
| 325 |
| 326 void VideoCapturer::AddOrUpdateSink( |
| 327 rtc::VideoSinkInterface<cricket::VideoFrame>* sink, |
| 328 const rtc::VideoSinkHints& hints) { |
| 329 broadcaster_.AddOrUpdateSink(sink, hints); |
| 330 OnSourceChangeRequested(broadcaster_.DerivedHints()); |
| 331 } |
| 332 |
| 333 void VideoCapturer::OnSourceChangeRequested(const rtc::VideoSinkHints& hints) { |
| 334 apply_rotation_ = !hints.can_apply_rotation; |
| 335 if (frame_factory_) { |
| 336 frame_factory_->SetApplyRotation(apply_rotation_); |
| 337 } |
| 338 } |
| 339 |
326 void VideoCapturer::OnFrameCaptured(VideoCapturer*, | 340 void VideoCapturer::OnFrameCaptured(VideoCapturer*, |
327 const CapturedFrame* captured_frame) { | 341 const CapturedFrame* captured_frame) { |
328 if (muted_) { | 342 if (muted_) { |
329 if (black_frame_count_down_ == 0) { | 343 if (black_frame_count_down_ == 0) { |
330 thread_->Post(this, MSG_DO_PAUSE, NULL); | 344 thread_->Post(this, MSG_DO_PAUSE, NULL); |
331 } else { | 345 } else { |
332 --black_frame_count_down_; | 346 --black_frame_count_down_; |
333 } | 347 } |
334 } | 348 } |
335 | 349 |
336 if (SignalVideoFrame.is_empty()) { | 350 if (!broadcaster_.FrameWillBeDelivered()) { |
337 return; | 351 return; |
338 } | 352 } |
339 | 353 |
340 // Use a temporary buffer to scale | 354 // Use a temporary buffer to scale |
341 rtc::scoped_ptr<uint8_t[]> scale_buffer; | 355 rtc::scoped_ptr<uint8_t[]> scale_buffer; |
342 | 356 |
343 if (IsScreencast()) { | 357 if (IsScreencast()) { |
344 int scaled_width, scaled_height; | 358 int scaled_width, scaled_height; |
345 int desired_screencast_fps = capture_format_.get() ? | 359 int desired_screencast_fps = capture_format_.get() ? |
346 VideoFormat::IntervalToFps(capture_format_->interval) : | 360 VideoFormat::IntervalToFps(capture_format_->interval) : |
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
509 LOG(LS_ERROR) << "Couldn't convert to I420! " | 523 LOG(LS_ERROR) << "Couldn't convert to I420! " |
510 << "From " << ToString(captured_frame) << " To " | 524 << "From " << ToString(captured_frame) << " To " |
511 << cropped_width << " x " << cropped_height; | 525 << cropped_width << " x " << cropped_height; |
512 return; | 526 return; |
513 } | 527 } |
514 | 528 |
515 if (muted_) { | 529 if (muted_) { |
516 // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead. | 530 // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead. |
517 adapted_frame->SetToBlack(); | 531 adapted_frame->SetToBlack(); |
518 } | 532 } |
519 SignalVideoFrame(this, adapted_frame.get()); | 533 broadcaster_.OnFrame(*adapted_frame.get()); |
520 | |
521 UpdateStats(captured_frame); | 534 UpdateStats(captured_frame); |
522 } | 535 } |
523 | 536 |
524 void VideoCapturer::SetCaptureState(CaptureState state) { | 537 void VideoCapturer::SetCaptureState(CaptureState state) { |
525 if (state == capture_state_) { | 538 if (state == capture_state_) { |
526 // Don't trigger a state changed callback if the state hasn't changed. | 539 // Don't trigger a state changed callback if the state hasn't changed. |
527 return; | 540 return; |
528 } | 541 } |
529 StateChangeParams* state_params = new StateChangeParams(state); | 542 StateChangeParams* state_params = new StateChangeParams(state); |
530 capture_state_ = state; | 543 capture_state_ = state; |
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
702 void VideoCapturer::GetVariableSnapshot( | 715 void VideoCapturer::GetVariableSnapshot( |
703 const rtc::RollingAccumulator<T>& data, | 716 const rtc::RollingAccumulator<T>& data, |
704 VariableInfo<T>* stats) { | 717 VariableInfo<T>* stats) { |
705 stats->max_val = data.ComputeMax(); | 718 stats->max_val = data.ComputeMax(); |
706 stats->mean = data.ComputeMean(); | 719 stats->mean = data.ComputeMean(); |
707 stats->min_val = data.ComputeMin(); | 720 stats->min_val = data.ComputeMin(); |
708 stats->variance = data.ComputeVariance(); | 721 stats->variance = data.ComputeVariance(); |
709 } | 722 } |
710 | 723 |
711 } // namespace cricket | 724 } // namespace cricket |
OLD | NEW |