| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 // Declaration of abstract class VideoCapturer | 11 // Declaration of abstract class VideoCapturer |
| 12 | 12 |
| 13 #ifndef WEBRTC_MEDIA_BASE_VIDEOCAPTURER_H_ | 13 #ifndef WEBRTC_MEDIA_BASE_VIDEOCAPTURER_H_ |
| 14 #define WEBRTC_MEDIA_BASE_VIDEOCAPTURER_H_ | 14 #define WEBRTC_MEDIA_BASE_VIDEOCAPTURER_H_ |
| 15 | 15 |
| 16 #include <algorithm> | 16 #include <algorithm> |
| 17 #include <string> | 17 #include <string> |
| 18 #include <vector> | 18 #include <vector> |
| 19 | 19 |
| 20 #include "webrtc/base/basictypes.h" | 20 #include "webrtc/base/basictypes.h" |
| 21 #include "webrtc/base/criticalsection.h" | 21 #include "webrtc/base/criticalsection.h" |
| 22 #include "webrtc/media/base/videosourceinterface.h" |
| 22 #include "webrtc/base/messagehandler.h" | 23 #include "webrtc/base/messagehandler.h" |
| 23 #include "webrtc/base/rollingaccumulator.h" | 24 #include "webrtc/base/rollingaccumulator.h" |
| 24 #include "webrtc/base/scoped_ptr.h" | 25 #include "webrtc/base/scoped_ptr.h" |
| 25 #include "webrtc/base/sigslot.h" | 26 #include "webrtc/base/sigslot.h" |
| 26 #include "webrtc/base/thread.h" | 27 #include "webrtc/base/thread.h" |
| 27 #include "webrtc/base/timing.h" | 28 #include "webrtc/base/timing.h" |
| 28 #include "webrtc/media/base/mediachannel.h" | 29 #include "webrtc/media/base/mediachannel.h" |
| 29 #include "webrtc/media/base/videoadapter.h" | 30 #include "webrtc/media/base/videoadapter.h" |
| 31 #include "webrtc/media/base/videobroadcaster.h" |
| 30 #include "webrtc/media/base/videocommon.h" | 32 #include "webrtc/media/base/videocommon.h" |
| 31 #include "webrtc/media/base/videoframefactory.h" | 33 #include "webrtc/media/base/videoframefactory.h" |
| 32 #include "webrtc/media/devices/devicemanager.h" | 34 #include "webrtc/media/devices/devicemanager.h" |
| 33 | 35 |
| 34 | 36 |
| 35 namespace cricket { | 37 namespace cricket { |
| 36 | 38 |
| 37 // Current state of the capturer. | 39 // Current state of the capturer. |
| 38 // TODO(hellner): CS_NO_DEVICE is an error code not a capture state. Separate | 40 // TODO(hellner): CS_NO_DEVICE is an error code not a capture state. Separate |
| 39 // error codes and states. | 41 // error codes and states. |
| (...skipping 26 matching lines...) Expand all Loading... |
| 66 // fourcc, pixel_width, and pixel_height should keep the same over frames. | 68 // fourcc, pixel_width, and pixel_height should keep the same over frames. |
| 67 int width; // in number of pixels | 69 int width; // in number of pixels |
| 68 int height; // in number of pixels | 70 int height; // in number of pixels |
| 69 uint32_t fourcc; // compression | 71 uint32_t fourcc; // compression |
| 70 uint32_t pixel_width; // width of a pixel, default is 1 | 72 uint32_t pixel_width; // width of a pixel, default is 1 |
| 71 uint32_t pixel_height; // height of a pixel, default is 1 | 73 uint32_t pixel_height; // height of a pixel, default is 1 |
| 72 int64_t time_stamp; // timestamp of when the frame was captured, in unix | 74 int64_t time_stamp; // timestamp of when the frame was captured, in unix |
| 73 // time with nanosecond units. | 75 // time with nanosecond units. |
| 74 uint32_t data_size; // number of bytes of the frame data | 76 uint32_t data_size; // number of bytes of the frame data |
| 75 | 77 |
| 76 webrtc::VideoRotation rotation; // rotation in degrees of the frame. | 78 webrtc::VideoRotation rotation; // rotation in degrees of the frame. |
| 77 | 79 |
| 78 void* data; // pointer to the frame data. This object allocates the | 80 void* data; // pointer to the frame data. This object allocates the |
| 79 // memory or points to an existing memory. | 81 // memory or points to an existing memory. |
| 80 | 82 |
| 81 private: | 83 private: |
| 82 RTC_DISALLOW_COPY_AND_ASSIGN(CapturedFrame); | 84 RTC_DISALLOW_COPY_AND_ASSIGN(CapturedFrame); |
| 83 }; | 85 }; |
| 84 | 86 |
| 85 // VideoCapturer is an abstract class that defines the interfaces for video | 87 // VideoCapturer is an abstract class that defines the interfaces for video |
| 86 // capturing. The subclasses implement the video capturer for various types of | 88 // capturing. The subclasses implement the video capturer for various types of |
| (...skipping 14 matching lines...) Expand all Loading... |
| 101 // video_adapter()->OnOutputFormatRequest(desired_encoding_format) | 103 // video_adapter()->OnOutputFormatRequest(desired_encoding_format) |
| 102 // Start() | 104 // Start() |
| 103 // GetCaptureFormat() optionally | 105 // GetCaptureFormat() optionally |
| 104 // Stop() | 106 // Stop() |
| 105 // | 107 // |
| 106 // Assumption: | 108 // Assumption: |
| 107 // The Start() and Stop() methods are called by a single thread (E.g., the | 109 // The Start() and Stop() methods are called by a single thread (E.g., the |
| 108 // media engine thread). Hence, the VideoCapture subclasses dont need to be | 110 // media engine thread). Hence, the VideoCapture subclasses dont need to be |
| 109 // thread safe. | 111 // thread safe. |
| 110 // | 112 // |
| 111 class VideoCapturer | 113 class VideoCapturer : public sigslot::has_slots<>, |
| 112 : public sigslot::has_slots<>, | 114 public rtc::MessageHandler, |
| 113 public rtc::MessageHandler { | 115 public rtc::VideoSourceInterface<cricket::VideoFrame> { |
| 114 public: | 116 public: |
| 115 // All signals are marshalled to |thread| or the creating thread if | 117 // All signals are marshalled to |thread| or the creating thread if |
| 116 // none is provided. | 118 // none is provided. |
| 117 VideoCapturer(); | 119 VideoCapturer(); |
| 118 explicit VideoCapturer(rtc::Thread* thread); | 120 explicit VideoCapturer(rtc::Thread* thread); |
| 119 virtual ~VideoCapturer() {} | 121 virtual ~VideoCapturer() {} |
| 120 | 122 |
| 121 // Gets the id of the underlying device, which is available after the capturer | 123 // Gets the id of the underlying device, which is available after the capturer |
| 122 // is initialized. Can be used to determine if two capturers reference the | 124 // is initialized. Can be used to determine if two capturers reference the |
| 123 // same device. | 125 // same device. |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 190 // When muting, produce black frames then pause the camera. | 192 // When muting, produce black frames then pause the camera. |
| 191 // When unmuting, start the camera. Camera starts unmuted. | 193 // When unmuting, start the camera. Camera starts unmuted. |
| 192 virtual bool MuteToBlackThenPause(bool muted); | 194 virtual bool MuteToBlackThenPause(bool muted); |
| 193 virtual bool IsMuted() const { | 195 virtual bool IsMuted() const { |
| 194 return muted_; | 196 return muted_; |
| 195 } | 197 } |
| 196 CaptureState capture_state() const { | 198 CaptureState capture_state() const { |
| 197 return capture_state_; | 199 return capture_state_; |
| 198 } | 200 } |
| 199 | 201 |
| 200 // Tells videocapturer whether to apply the pending rotation. By default, the | |
| 201 // rotation is applied and the generated frame is up right. When set to false, | |
| 202 // generated frames will carry the rotation information from | |
| 203 // SetCaptureRotation. Return value indicates whether this operation succeeds. | |
| 204 virtual bool SetApplyRotation(bool enable); | |
| 205 virtual bool GetApplyRotation() { return apply_rotation_; } | 202 virtual bool GetApplyRotation() { return apply_rotation_; } |
| 206 | 203 |
| 207 // Returns true if the capturer is screencasting. This can be used to | 204 // Returns true if the capturer is screencasting. This can be used to |
| 208 // implement screencast specific behavior. | 205 // implement screencast specific behavior. |
| 209 virtual bool IsScreencast() const = 0; | 206 virtual bool IsScreencast() const = 0; |
| 210 | 207 |
| 211 // Caps the VideoCapturer's format according to max_format. It can e.g. be | 208 // Caps the VideoCapturer's format according to max_format. It can e.g. be |
| 212 // used to prevent cameras from capturing at a resolution or framerate that | 209 // used to prevent cameras from capturing at a resolution or framerate that |
| 213 // the capturer is capable of but not performing satisfactorily at. | 210 // the capturer is capable of but not performing satisfactorily at. |
| 214 // The capping is an upper bound for each component of the capturing format. | 211 // The capping is an upper bound for each component of the capturing format. |
| (...skipping 18 matching lines...) Expand all Loading... |
| 233 // Signal all capture state changes that are not a direct result of calling | 230 // Signal all capture state changes that are not a direct result of calling |
| 234 // Start(). | 231 // Start(). |
| 235 sigslot::signal2<VideoCapturer*, CaptureState> SignalStateChange; | 232 sigslot::signal2<VideoCapturer*, CaptureState> SignalStateChange; |
| 236 // Frame callbacks are multithreaded to allow disconnect and connect to be | 233 // Frame callbacks are multithreaded to allow disconnect and connect to be |
| 237 // called concurrently. It also ensures that it is safe to call disconnect | 234 // called concurrently. It also ensures that it is safe to call disconnect |
| 238 // at any time which is needed since the signal may be called from an | 235 // at any time which is needed since the signal may be called from an |
| 239 // unmarshalled thread owned by the VideoCapturer. | 236 // unmarshalled thread owned by the VideoCapturer. |
| 240 // Signal the captured frame to downstream. | 237 // Signal the captured frame to downstream. |
| 241 sigslot::signal2<VideoCapturer*, const CapturedFrame*, | 238 sigslot::signal2<VideoCapturer*, const CapturedFrame*, |
| 242 sigslot::multi_threaded_local> SignalFrameCaptured; | 239 sigslot::multi_threaded_local> SignalFrameCaptured; |
| 243 // Signal the captured and possibly adapted frame to downstream consumers | |
| 244 // such as the encoder. | |
| 245 sigslot::signal2<VideoCapturer*, const VideoFrame*, | |
| 246 sigslot::multi_threaded_local> SignalVideoFrame; | |
| 247 | 240 |
| 248 // If true, run video adaptation. By default, video adaptation is enabled | 241 // If true, run video adaptation. By default, video adaptation is enabled |
| 249 // and users must call video_adapter()->OnOutputFormatRequest() | 242 // and users must call video_adapter()->OnOutputFormatRequest() |
| 250 // to receive frames. | 243 // to receive frames. |
| 251 bool enable_video_adapter() const { return enable_video_adapter_; } | 244 bool enable_video_adapter() const { return enable_video_adapter_; } |
| 252 void set_enable_video_adapter(bool enable_video_adapter) { | 245 void set_enable_video_adapter(bool enable_video_adapter) { |
| 253 enable_video_adapter_ = enable_video_adapter; | 246 enable_video_adapter_ = enable_video_adapter; |
| 254 } | 247 } |
| 255 | 248 |
| 256 CoordinatedVideoAdapter* video_adapter() { return &video_adapter_; } | 249 CoordinatedVideoAdapter* video_adapter() { return &video_adapter_; } |
| 257 const CoordinatedVideoAdapter* video_adapter() const { | 250 const CoordinatedVideoAdapter* video_adapter() const { |
| 258 return &video_adapter_; | 251 return &video_adapter_; |
| 259 } | 252 } |
| 260 | 253 |
| 261 // Takes ownership. | 254 // Takes ownership. |
| 262 void set_frame_factory(VideoFrameFactory* frame_factory); | 255 void set_frame_factory(VideoFrameFactory* frame_factory); |
| 263 | 256 |
| 264 // Gets statistics for tracked variables recorded since the last call to | 257 // Gets statistics for tracked variables recorded since the last call to |
| 265 // GetStats. Note that calling GetStats resets any gathered data so it | 258 // GetStats. Note that calling GetStats resets any gathered data so it |
| 266 // should be called only periodically to log statistics. | 259 // should be called only periodically to log statistics. |
| 267 void GetStats(VariableInfo<int>* adapt_drop_stats, | 260 void GetStats(VariableInfo<int>* adapt_drop_stats, |
| 268 VariableInfo<int>* effect_drop_stats, | 261 VariableInfo<int>* effect_drop_stats, |
| 269 VariableInfo<double>* frame_time_stats, | 262 VariableInfo<double>* frame_time_stats, |
| 270 VideoFormat* last_captured_frame_format); | 263 VideoFormat* last_captured_frame_format); |
| 271 | 264 |
| 265 // Implements VideoSourceInterface |
| 266 void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink, |
| 267 const rtc::VideoSinkWants& wants) override; |
| 268 void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override; |
| 269 |
| 272 protected: | 270 protected: |
| 271 // OnSinkWantsChanged can be overridden to change the default behavior |
| 272 // when a sink changes its VideoSinkWants by calling AddOrUpdateSink. |
| 273 virtual void OnSinkWantsChanged(const rtc::VideoSinkWants& wants); |
| 274 |
| 273 // Callback attached to SignalFrameCaptured where SignalVideoFrames is called. | 275 // Callback attached to SignalFrameCaptured where SignalVideoFrames is called. |
| 274 void OnFrameCaptured(VideoCapturer* video_capturer, | 276 void OnFrameCaptured(VideoCapturer* video_capturer, |
| 275 const CapturedFrame* captured_frame); | 277 const CapturedFrame* captured_frame); |
| 276 void SetCaptureState(CaptureState state); | 278 void SetCaptureState(CaptureState state); |
| 277 | 279 |
| 278 // Marshals SignalStateChange onto thread_. | 280 // Marshals SignalStateChange onto thread_. |
| 279 void OnMessage(rtc::Message* message); | 281 void OnMessage(rtc::Message* message); |
| 280 | 282 |
| 281 // subclasses override this virtual method to provide a vector of fourccs, in | 283 // subclasses override this virtual method to provide a vector of fourccs, in |
| 282 // order of preference, that are expected by the media engine. | 284 // order of preference, that are expected by the media engine. |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 337 | 339 |
| 338 int ratio_w_; // View resolution. e.g. 1280 x 720. | 340 int ratio_w_; // View resolution. e.g. 1280 x 720. |
| 339 int ratio_h_; | 341 int ratio_h_; |
| 340 bool enable_camera_list_; | 342 bool enable_camera_list_; |
| 341 bool square_pixel_aspect_ratio_; // Enable scaling to square pixels. | 343 bool square_pixel_aspect_ratio_; // Enable scaling to square pixels. |
| 342 int scaled_width_; // Current output size from ComputeScale. | 344 int scaled_width_; // Current output size from ComputeScale. |
| 343 int scaled_height_; | 345 int scaled_height_; |
| 344 bool muted_; | 346 bool muted_; |
| 345 int black_frame_count_down_; | 347 int black_frame_count_down_; |
| 346 | 348 |
| 349 rtc::VideoBroadcaster broadcaster_; |
| 347 bool enable_video_adapter_; | 350 bool enable_video_adapter_; |
| 348 CoordinatedVideoAdapter video_adapter_; | 351 CoordinatedVideoAdapter video_adapter_; |
| 349 | 352 |
| 350 rtc::Timing frame_length_time_reporter_; | 353 rtc::Timing frame_length_time_reporter_; |
| 351 rtc::CriticalSection frame_stats_crit_; | 354 rtc::CriticalSection frame_stats_crit_; |
| 352 | 355 |
| 353 int adapt_frame_drops_; | 356 int adapt_frame_drops_; |
| 354 rtc::RollingAccumulator<int> adapt_frame_drops_data_; | 357 rtc::RollingAccumulator<int> adapt_frame_drops_data_; |
| 355 double previous_frame_time_; | 358 double previous_frame_time_; |
| 356 rtc::RollingAccumulator<double> frame_time_data_; | 359 rtc::RollingAccumulator<double> frame_time_data_; |
| 357 // The captured frame format before potential adapation. | 360 // The captured frame format before potential adapation. |
| 358 VideoFormat last_captured_frame_format_; | 361 VideoFormat last_captured_frame_format_; |
| 359 | 362 |
| 360 // Whether capturer should apply rotation to the frame before signaling it. | 363 // Whether capturer should apply rotation to the frame before signaling it. |
| 361 bool apply_rotation_; | 364 bool apply_rotation_; |
| 362 | 365 |
| 363 RTC_DISALLOW_COPY_AND_ASSIGN(VideoCapturer); | 366 RTC_DISALLOW_COPY_AND_ASSIGN(VideoCapturer); |
| 364 }; | 367 }; |
| 365 | 368 |
| 366 } // namespace cricket | 369 } // namespace cricket |
| 367 | 370 |
| 368 #endif // WEBRTC_MEDIA_BASE_VIDEOCAPTURER_H_ | 371 #endif // WEBRTC_MEDIA_BASE_VIDEOCAPTURER_H_ |
| OLD | NEW |