Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(383)

Side by Side Diff: webrtc/media/base/videocapturer.cc

Issue 1733673002: Removed unused cricket::VideoCapturer methods (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Fixed thread checker for android. SetCaptureFormat is called on the thread where the capturer is cr… Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « webrtc/media/base/videocapturer.h ('k') | webrtc/media/base/videocapturer_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 // Implementation file of class VideoCapturer. 11 // Implementation file of class VideoCapturer.
12 12
13 #include "webrtc/media/base/videocapturer.h" 13 #include "webrtc/media/base/videocapturer.h"
14 14
15 #include <algorithm> 15 #include <algorithm>
16 16
17 #include "libyuv/scale_argb.h" 17 #include "libyuv/scale_argb.h"
18 #include "webrtc/base/common.h" 18 #include "webrtc/base/common.h"
19 #include "webrtc/base/logging.h" 19 #include "webrtc/base/logging.h"
20 #include "webrtc/base/systeminfo.h" 20 #include "webrtc/base/systeminfo.h"
21 #include "webrtc/media/base/videoframefactory.h" 21 #include "webrtc/media/base/videoframefactory.h"
22
23 #if defined(HAVE_WEBRTC_VIDEO)
24 #include "webrtc/media/engine/webrtcvideoframe.h" 22 #include "webrtc/media/engine/webrtcvideoframe.h"
25 #include "webrtc/media/engine/webrtcvideoframefactory.h" 23 #include "webrtc/media/engine/webrtcvideoframefactory.h"
26 #endif // HAVE_WEBRTC_VIDEO
27 24
28 namespace cricket { 25 namespace cricket {
29 26
30 namespace { 27 namespace {
31 28
32 // TODO(thorcarpenter): This is a BIG hack to flush the system with black
33 // frames. Frontends should coordinate to update the video state of a muted
34 // user. When all frontends to this consider removing the black frame business.
35 const int kNumBlackFramesOnMute = 30;
36
37 // MessageHandler constants.
38 enum {
39 MSG_DO_PAUSE = 0,
40 MSG_DO_UNPAUSE,
41 MSG_STATE_CHANGE
42 };
43
44 static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63); 29 static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63);
45 #ifdef WEBRTC_LINUX 30 #ifdef WEBRTC_LINUX
46 static const int kYU12Penalty = 16; // Needs to be higher than MJPG index. 31 static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
47 #endif 32 #endif
48 static const int kDefaultScreencastFps = 5; 33 static const int kDefaultScreencastFps = 5;
49 typedef rtc::TypedMessageData<CaptureState> StateChangeParams;
50 34
51 // Limit stats data collections to ~20 seconds of 30fps data before dropping 35 // Limit stats data collections to ~20 seconds of 30fps data before dropping
52 // old data in case stats aren't reset for long periods of time. 36 // old data in case stats aren't reset for long periods of time.
53 static const size_t kMaxAccumulatorSize = 600; 37 static const size_t kMaxAccumulatorSize = 600;
54 38
55 } // namespace 39 } // namespace
56 40
57 ///////////////////////////////////////////////////////////////////// 41 /////////////////////////////////////////////////////////////////////
58 // Implementation of struct CapturedFrame 42 // Implementation of struct CapturedFrame
59 ///////////////////////////////////////////////////////////////////// 43 /////////////////////////////////////////////////////////////////////
(...skipping 14 matching lines...) Expand all
74 return false; 58 return false;
75 } 59 }
76 *size = data_size; 60 *size = data_size;
77 return true; 61 return true;
78 } 62 }
79 63
80 ///////////////////////////////////////////////////////////////////// 64 /////////////////////////////////////////////////////////////////////
81 // Implementation of class VideoCapturer 65 // Implementation of class VideoCapturer
82 ///////////////////////////////////////////////////////////////////// 66 /////////////////////////////////////////////////////////////////////
83 VideoCapturer::VideoCapturer() 67 VideoCapturer::VideoCapturer()
84 : thread_(rtc::Thread::Current()), 68 : adapt_frame_drops_data_(kMaxAccumulatorSize),
85 adapt_frame_drops_data_(kMaxAccumulatorSize),
86 frame_time_data_(kMaxAccumulatorSize), 69 frame_time_data_(kMaxAccumulatorSize),
87 apply_rotation_(true) { 70 apply_rotation_(true) {
88 Construct(); 71 thread_checker_.DetachFromThread();
89 }
90
91 VideoCapturer::VideoCapturer(rtc::Thread* thread)
92 : thread_(thread),
93 adapt_frame_drops_data_(kMaxAccumulatorSize),
94 frame_time_data_(kMaxAccumulatorSize),
95 apply_rotation_(true) {
96 Construct(); 72 Construct();
97 } 73 }
98 74
99 void VideoCapturer::Construct() { 75 void VideoCapturer::Construct() {
100 ClearAspectRatio(); 76 ratio_w_ = 0;
77 ratio_h_ = 0;
101 enable_camera_list_ = false; 78 enable_camera_list_ = false;
102 square_pixel_aspect_ratio_ = false; 79 square_pixel_aspect_ratio_ = false;
103 capture_state_ = CS_STOPPED; 80 capture_state_ = CS_STOPPED;
104 SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured); 81 SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
105 // TODO(perkj) SignalVideoFrame is used directly by Chrome remoting. 82 // TODO(perkj) SignalVideoFrame is used directly by Chrome remoting.
106 // Before that is refactored, SignalVideoFrame must forward frames to the 83 // Before that is refactored, SignalVideoFrame must forward frames to the
107 // |VideoBroadcaster|; 84 // |VideoBroadcaster|;
108 SignalVideoFrame.connect(this, &VideoCapturer::OnFrame); 85 SignalVideoFrame.connect(this, &VideoCapturer::OnFrame);
109 scaled_width_ = 0; 86 scaled_width_ = 0;
110 scaled_height_ = 0; 87 scaled_height_ = 0;
111 muted_ = false;
112 black_frame_count_down_ = kNumBlackFramesOnMute;
113 enable_video_adapter_ = true; 88 enable_video_adapter_ = true;
114 adapt_frame_drops_ = 0; 89 adapt_frame_drops_ = 0;
115 previous_frame_time_ = 0.0; 90 previous_frame_time_ = 0.0;
116 #ifdef HAVE_WEBRTC_VIDEO
117 // There are lots of video capturers out there that don't call 91 // There are lots of video capturers out there that don't call
118 // set_frame_factory. We can either go change all of them, or we 92 // set_frame_factory. We can either go change all of them, or we
119 // can set this default. 93 // can set this default.
120 // TODO(pthatcher): Remove this hack and require the frame factory 94 // TODO(pthatcher): Remove this hack and require the frame factory
121 // to be passed in the constructor. 95 // to be passed in the constructor.
122 set_frame_factory(new WebRtcVideoFrameFactory()); 96 set_frame_factory(new WebRtcVideoFrameFactory());
123 #endif
124 } 97 }
125 98
126 const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const { 99 const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
127 return &filtered_supported_formats_; 100 return &filtered_supported_formats_;
128 } 101 }
129 102
130 bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) { 103 bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
104 RTC_DCHECK(thread_checker_.CalledOnValidThread());
131 previous_frame_time_ = frame_length_time_reporter_.TimerNow(); 105 previous_frame_time_ = frame_length_time_reporter_.TimerNow();
132 CaptureState result = Start(capture_format); 106 CaptureState result = Start(capture_format);
133 const bool success = (result == CS_RUNNING) || (result == CS_STARTING); 107 const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
134 if (!success) { 108 if (!success) {
135 return false; 109 return false;
136 } 110 }
137 if (result == CS_RUNNING) { 111 if (result == CS_RUNNING) {
138 SetCaptureState(result); 112 SetCaptureState(result);
139 } 113 }
140 return true; 114 return true;
141 } 115 }
142 116
143 void VideoCapturer::UpdateAspectRatio(int ratio_w, int ratio_h) {
144 if (ratio_w == 0 || ratio_h == 0) {
145 LOG(LS_WARNING) << "UpdateAspectRatio ignored invalid ratio: "
146 << ratio_w << "x" << ratio_h;
147 return;
148 }
149 ratio_w_ = ratio_w;
150 ratio_h_ = ratio_h;
151 }
152
153 void VideoCapturer::ClearAspectRatio() {
154 ratio_w_ = 0;
155 ratio_h_ = 0;
156 }
157
158 // Override this to have more control of how your device is started/stopped.
159 bool VideoCapturer::Pause(bool pause) {
160 if (pause) {
161 if (capture_state() == CS_PAUSED) {
162 return true;
163 }
164 bool is_running = capture_state() == CS_STARTING ||
165 capture_state() == CS_RUNNING;
166 if (!is_running) {
167 LOG(LS_ERROR) << "Cannot pause a stopped camera.";
168 return false;
169 }
170 LOG(LS_INFO) << "Pausing a camera.";
171 rtc::scoped_ptr<VideoFormat> capture_format_when_paused(
172 capture_format_ ? new VideoFormat(*capture_format_) : NULL);
173 Stop();
174 SetCaptureState(CS_PAUSED);
175 // If you override this function be sure to restore the capture format
176 // after calling Stop().
177 SetCaptureFormat(capture_format_when_paused.get());
178 } else { // Unpause.
179 if (capture_state() != CS_PAUSED) {
180 LOG(LS_WARNING) << "Cannot unpause a camera that hasn't been paused.";
181 return false;
182 }
183 if (!capture_format_) {
184 LOG(LS_ERROR) << "Missing capture_format_, cannot unpause a camera.";
185 return false;
186 }
187 if (muted_) {
188 LOG(LS_WARNING) << "Camera cannot be unpaused while muted.";
189 return false;
190 }
191 LOG(LS_INFO) << "Unpausing a camera.";
192 if (!Start(*capture_format_)) {
193 LOG(LS_ERROR) << "Camera failed to start when unpausing.";
194 return false;
195 }
196 }
197 return true;
198 }
199
200 bool VideoCapturer::Restart(const VideoFormat& capture_format) {
201 if (!IsRunning()) {
202 return StartCapturing(capture_format);
203 }
204
205 if (GetCaptureFormat() != NULL && *GetCaptureFormat() == capture_format) {
206 // The reqested format is the same; nothing to do.
207 return true;
208 }
209
210 Stop();
211 return StartCapturing(capture_format);
212 }
213
214 bool VideoCapturer::MuteToBlackThenPause(bool muted) {
215 if (muted == IsMuted()) {
216 return true;
217 }
218
219 LOG(LS_INFO) << (muted ? "Muting" : "Unmuting") << " this video capturer.";
220 muted_ = muted; // Do this before calling Pause().
221 if (muted) {
222 // Reset black frame count down.
223 black_frame_count_down_ = kNumBlackFramesOnMute;
224 // Following frames will be overritten with black, then the camera will be
225 // paused.
226 return true;
227 }
228 // Start the camera.
229 thread_->Clear(this, MSG_DO_PAUSE);
230 return Pause(false);
231 }
232
233 void VideoCapturer::SetSupportedFormats( 117 void VideoCapturer::SetSupportedFormats(
234 const std::vector<VideoFormat>& formats) { 118 const std::vector<VideoFormat>& formats) {
119 // This method is OK to call during initialization on a separate thread.
120 RTC_DCHECK(capture_state_ == CS_STOPPED ||
121 thread_checker_.CalledOnValidThread());
235 supported_formats_ = formats; 122 supported_formats_ = formats;
236 UpdateFilteredSupportedFormats(); 123 UpdateFilteredSupportedFormats();
237 } 124 }
238 125
239 bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format, 126 bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
240 VideoFormat* best_format) { 127 VideoFormat* best_format) {
128 RTC_DCHECK(thread_checker_.CalledOnValidThread());
241 // TODO(fbarchard): Directly support max_format. 129 // TODO(fbarchard): Directly support max_format.
242 UpdateFilteredSupportedFormats(); 130 UpdateFilteredSupportedFormats();
243 const std::vector<VideoFormat>* supported_formats = GetSupportedFormats(); 131 const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
244 132
245 if (supported_formats->empty()) { 133 if (supported_formats->empty()) {
246 return false; 134 return false;
247 } 135 }
248 LOG(LS_INFO) << " Capture Requested " << format.ToString(); 136 LOG(LS_INFO) << " Capture Requested " << format.ToString();
249 int64_t best_distance = kMaxDistance; 137 int64_t best_distance = kMaxDistance;
250 std::vector<VideoFormat>::const_iterator best = supported_formats->end(); 138 std::vector<VideoFormat>::const_iterator best = supported_formats->end();
(...skipping 18 matching lines...) Expand all
269 best_format->height = best->height; 157 best_format->height = best->height;
270 best_format->fourcc = best->fourcc; 158 best_format->fourcc = best->fourcc;
271 best_format->interval = best->interval; 159 best_format->interval = best->interval;
272 LOG(LS_INFO) << " Best " << best_format->ToString() << " Interval " 160 LOG(LS_INFO) << " Best " << best_format->ToString() << " Interval "
273 << best_format->interval << " distance " << best_distance; 161 << best_format->interval << " distance " << best_distance;
274 } 162 }
275 return true; 163 return true;
276 } 164 }
277 165
278 void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) { 166 void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
167 RTC_DCHECK(thread_checker_.CalledOnValidThread());
279 max_format_.reset(new VideoFormat(max_format)); 168 max_format_.reset(new VideoFormat(max_format));
280 LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString(); 169 LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
281 UpdateFilteredSupportedFormats(); 170 UpdateFilteredSupportedFormats();
282 } 171 }
283 172
284 std::string VideoCapturer::ToString(const CapturedFrame* captured_frame) const { 173 std::string VideoCapturer::ToString(const CapturedFrame* captured_frame) const {
285 std::string fourcc_name = GetFourccName(captured_frame->fourcc) + " "; 174 std::string fourcc_name = GetFourccName(captured_frame->fourcc) + " ";
286 for (std::string::const_iterator i = fourcc_name.begin(); 175 for (std::string::const_iterator i = fourcc_name.begin();
287 i < fourcc_name.end(); ++i) { 176 i < fourcc_name.end(); ++i) {
288 // Test character is printable; Avoid isprint() which asserts on negatives. 177 // Test character is printable; Avoid isprint() which asserts on negatives.
(...skipping 23 matching lines...) Expand all
312 GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats); 201 GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats);
313 GetVariableSnapshot(frame_time_data_, frame_time_stats); 202 GetVariableSnapshot(frame_time_data_, frame_time_stats);
314 *last_captured_frame_format = last_captured_frame_format_; 203 *last_captured_frame_format = last_captured_frame_format_;
315 204
316 adapt_frame_drops_data_.Reset(); 205 adapt_frame_drops_data_.Reset();
317 frame_time_data_.Reset(); 206 frame_time_data_.Reset();
318 } 207 }
319 208
320 void VideoCapturer::RemoveSink( 209 void VideoCapturer::RemoveSink(
321 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { 210 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
211 RTC_DCHECK(thread_checker_.CalledOnValidThread());
322 broadcaster_.RemoveSink(sink); 212 broadcaster_.RemoveSink(sink);
323 } 213 }
324 214
325 void VideoCapturer::AddOrUpdateSink( 215 void VideoCapturer::AddOrUpdateSink(
326 rtc::VideoSinkInterface<cricket::VideoFrame>* sink, 216 rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
327 const rtc::VideoSinkWants& wants) { 217 const rtc::VideoSinkWants& wants) {
218 RTC_DCHECK(thread_checker_.CalledOnValidThread());
328 broadcaster_.AddOrUpdateSink(sink, wants); 219 broadcaster_.AddOrUpdateSink(sink, wants);
329 OnSinkWantsChanged(broadcaster_.wants()); 220 OnSinkWantsChanged(broadcaster_.wants());
330 } 221 }
331 222
332 void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) { 223 void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
224 RTC_DCHECK(thread_checker_.CalledOnValidThread());
333 apply_rotation_ = wants.rotation_applied; 225 apply_rotation_ = wants.rotation_applied;
334 if (frame_factory_) { 226 if (frame_factory_) {
335 frame_factory_->SetApplyRotation(apply_rotation_); 227 frame_factory_->SetApplyRotation(apply_rotation_);
336 } 228 }
337 } 229 }
338 230
339 void VideoCapturer::OnFrameCaptured(VideoCapturer*, 231 void VideoCapturer::OnFrameCaptured(VideoCapturer*,
340 const CapturedFrame* captured_frame) { 232 const CapturedFrame* captured_frame) {
341 if (muted_) {
342 if (black_frame_count_down_ == 0) {
343 thread_->Post(this, MSG_DO_PAUSE, NULL);
344 } else {
345 --black_frame_count_down_;
346 }
347 }
348
349 if (!broadcaster_.frame_wanted()) { 233 if (!broadcaster_.frame_wanted()) {
350 return; 234 return;
351 } 235 }
352 236
353 // Use a temporary buffer to scale 237 // Use a temporary buffer to scale
354 rtc::scoped_ptr<uint8_t[]> scale_buffer; 238 rtc::scoped_ptr<uint8_t[]> scale_buffer;
355
356 if (IsScreencast()) { 239 if (IsScreencast()) {
357 int scaled_width, scaled_height; 240 int scaled_width, scaled_height;
358 int desired_screencast_fps = capture_format_.get() ? 241 int desired_screencast_fps =
359 VideoFormat::IntervalToFps(capture_format_->interval) : 242 capture_format_.get()
360 kDefaultScreencastFps; 243 ? VideoFormat::IntervalToFps(capture_format_->interval)
244 : kDefaultScreencastFps;
361 ComputeScale(captured_frame->width, captured_frame->height, 245 ComputeScale(captured_frame->width, captured_frame->height,
362 desired_screencast_fps, &scaled_width, &scaled_height); 246 desired_screencast_fps, &scaled_width, &scaled_height);
363 247
364 if (FOURCC_ARGB == captured_frame->fourcc && 248 if (FOURCC_ARGB == captured_frame->fourcc &&
365 (scaled_width != captured_frame->width || 249 (scaled_width != captured_frame->width ||
366 scaled_height != captured_frame->height)) { 250 scaled_height != captured_frame->height)) {
367 if (scaled_width != scaled_width_ || scaled_height != scaled_height_) { 251 if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
368 LOG(LS_INFO) << "Scaling Screencast from " 252 LOG(LS_INFO) << "Scaling Screencast from " << captured_frame->width
369 << captured_frame->width << "x" 253 << "x" << captured_frame->height << " to " << scaled_width
370 << captured_frame->height << " to " 254 << "x" << scaled_height;
371 << scaled_width << "x" << scaled_height;
372 scaled_width_ = scaled_width; 255 scaled_width_ = scaled_width;
373 scaled_height_ = scaled_height; 256 scaled_height_ = scaled_height;
374 } 257 }
375 CapturedFrame* modified_frame = 258 CapturedFrame* modified_frame =
376 const_cast<CapturedFrame*>(captured_frame); 259 const_cast<CapturedFrame*>(captured_frame);
377 const int modified_frame_size = scaled_width * scaled_height * 4; 260 const int modified_frame_size = scaled_width * scaled_height * 4;
378 scale_buffer.reset(new uint8_t[modified_frame_size]); 261 scale_buffer.reset(new uint8_t[modified_frame_size]);
379 // Compute new width such that width * height is less than maximum but 262 // Compute new width such that width * height is less than maximum but
380 // maintains original captured frame aspect ratio. 263 // maintains original captured frame aspect ratio.
381 // Round down width to multiple of 4 so odd width won't round up beyond 264 // Round down width to multiple of 4 so odd width won't round up beyond
382 // maximum, and so chroma channel is even width to simplify spatial 265 // maximum, and so chroma channel is even width to simplify spatial
383 // resampling. 266 // resampling.
384 libyuv::ARGBScale(reinterpret_cast<const uint8_t*>(captured_frame->data), 267 libyuv::ARGBScale(reinterpret_cast<const uint8_t*>(captured_frame->data),
385 captured_frame->width * 4, captured_frame->width, 268 captured_frame->width * 4, captured_frame->width,
386 captured_frame->height, scale_buffer.get(), 269 captured_frame->height, scale_buffer.get(),
387 scaled_width * 4, scaled_width, scaled_height, 270 scaled_width * 4, scaled_width, scaled_height,
388 libyuv::kFilterBilinear); 271 libyuv::kFilterBilinear);
389 modified_frame->width = scaled_width; 272 modified_frame->width = scaled_width;
390 modified_frame->height = scaled_height; 273 modified_frame->height = scaled_height;
391 modified_frame->data_size = scaled_width * 4 * scaled_height; 274 modified_frame->data_size = scaled_width * 4 * scaled_height;
392 modified_frame->data = scale_buffer.get(); 275 modified_frame->data = scale_buffer.get();
393 } 276 }
394 } 277 }
395 278
396 const int kYuy2Bpp = 2; 279 const int kYuy2Bpp = 2;
397 const int kArgbBpp = 4; 280 const int kArgbBpp = 4;
398 // TODO(fbarchard): Make a helper function to adjust pixels to square. 281 // TODO(fbarchard): Make a helper function to adjust pixels to square.
399 // TODO(fbarchard): Hook up experiment to scaling. 282 // TODO(fbarchard): Hook up experiment to scaling.
400 // TODO(fbarchard): Avoid scale and convert if muted.
401 // Temporary buffer is scoped here so it will persist until i420_frame.Init() 283 // Temporary buffer is scoped here so it will persist until i420_frame.Init()
402 // makes a copy of the frame, converting to I420. 284 // makes a copy of the frame, converting to I420.
403 rtc::scoped_ptr<uint8_t[]> temp_buffer; 285 rtc::scoped_ptr<uint8_t[]> temp_buffer;
404 // YUY2 can be scaled vertically using an ARGB scaler. Aspect ratio is only 286 // YUY2 can be scaled vertically using an ARGB scaler. Aspect ratio is only
405 // a problem on OSX. OSX always converts webcams to YUY2 or UYVY. 287 // a problem on OSX. OSX always converts webcams to YUY2 or UYVY.
406 bool can_scale = 288 bool can_scale =
407 FOURCC_YUY2 == CanonicalFourCC(captured_frame->fourcc) || 289 FOURCC_YUY2 == CanonicalFourCC(captured_frame->fourcc) ||
408 FOURCC_UYVY == CanonicalFourCC(captured_frame->fourcc); 290 FOURCC_UYVY == CanonicalFourCC(captured_frame->fourcc);
409 291
410 // If pixels are not square, optionally use vertical scaling to make them 292 // If pixels are not square, optionally use vertical scaling to make them
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
518 adapted_width, adapted_height)); 400 adapted_width, adapted_height));
519 401
520 if (!adapted_frame) { 402 if (!adapted_frame) {
521 // TODO(fbarchard): LOG more information about captured frame attributes. 403 // TODO(fbarchard): LOG more information about captured frame attributes.
522 LOG(LS_ERROR) << "Couldn't convert to I420! " 404 LOG(LS_ERROR) << "Couldn't convert to I420! "
523 << "From " << ToString(captured_frame) << " To " 405 << "From " << ToString(captured_frame) << " To "
524 << cropped_width << " x " << cropped_height; 406 << cropped_width << " x " << cropped_height;
525 return; 407 return;
526 } 408 }
527 409
528 if (muted_) {
529 // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead.
530 adapted_frame->SetToBlack();
531 }
532 SignalVideoFrame(this, adapted_frame.get()); 410 SignalVideoFrame(this, adapted_frame.get());
533 UpdateStats(captured_frame); 411 UpdateStats(captured_frame);
534 } 412 }
535 413
536 void VideoCapturer::OnFrame(VideoCapturer* capturer, const VideoFrame* frame) { 414 void VideoCapturer::OnFrame(VideoCapturer* capturer, const VideoFrame* frame) {
537 broadcaster_.OnFrame(*frame); 415 broadcaster_.OnFrame(*frame);
538 } 416 }
539 417
540 void VideoCapturer::SetCaptureState(CaptureState state) { 418 void VideoCapturer::SetCaptureState(CaptureState state) {
419 RTC_DCHECK(thread_checker_.CalledOnValidThread());
541 if (state == capture_state_) { 420 if (state == capture_state_) {
542 // Don't trigger a state changed callback if the state hasn't changed. 421 // Don't trigger a state changed callback if the state hasn't changed.
543 return; 422 return;
544 } 423 }
545 StateChangeParams* state_params = new StateChangeParams(state);
546 capture_state_ = state; 424 capture_state_ = state;
547 thread_->Post(this, MSG_STATE_CHANGE, state_params); 425 SignalStateChange(this, capture_state_);
548 }
549
550 void VideoCapturer::OnMessage(rtc::Message* message) {
551 switch (message->message_id) {
552 case MSG_STATE_CHANGE: {
553 rtc::scoped_ptr<StateChangeParams> p(
554 static_cast<StateChangeParams*>(message->pdata));
555 SignalStateChange(this, p->data());
556 break;
557 }
558 case MSG_DO_PAUSE: {
559 Pause(true);
560 break;
561 }
562 case MSG_DO_UNPAUSE: {
563 Pause(false);
564 break;
565 }
566 default: {
567 ASSERT(false);
568 }
569 }
570 } 426 }
571 427
572 // Get the distance between the supported and desired formats. 428 // Get the distance between the supported and desired formats.
573 // Prioritization is done according to this algorithm: 429 // Prioritization is done according to this algorithm:
574 // 1) Width closeness. If not same, we prefer wider. 430 // 1) Width closeness. If not same, we prefer wider.
575 // 2) Height closeness. If not same, we prefer higher. 431 // 2) Height closeness. If not same, we prefer higher.
576 // 3) Framerate closeness. If not same, we prefer faster. 432 // 3) Framerate closeness. If not same, we prefer faster.
577 // 4) Compression. If desired format has a specific fourcc, we need exact match; 433 // 4) Compression. If desired format has a specific fourcc, we need exact match;
578 // otherwise, we use preference. 434 // otherwise, we use preference.
579 int64_t VideoCapturer::GetFormatDistance(const VideoFormat& desired, 435 int64_t VideoCapturer::GetFormatDistance(const VideoFormat& desired,
580 const VideoFormat& supported) { 436 const VideoFormat& supported) {
437 RTC_DCHECK(thread_checker_.CalledOnValidThread());
581 int64_t distance = kMaxDistance; 438 int64_t distance = kMaxDistance;
582 439
583 // Check fourcc. 440 // Check fourcc.
584 uint32_t supported_fourcc = CanonicalFourCC(supported.fourcc); 441 uint32_t supported_fourcc = CanonicalFourCC(supported.fourcc);
585 int64_t delta_fourcc = kMaxDistance; 442 int64_t delta_fourcc = kMaxDistance;
586 if (FOURCC_ANY == desired.fourcc) { 443 if (FOURCC_ANY == desired.fourcc) {
587 // Any fourcc is OK for the desired. Use preference to find best fourcc. 444 // Any fourcc is OK for the desired. Use preference to find best fourcc.
588 std::vector<uint32_t> preferred_fourccs; 445 std::vector<uint32_t> preferred_fourccs;
589 if (!GetPreferredFourccs(&preferred_fourccs)) { 446 if (!GetPreferredFourccs(&preferred_fourccs)) {
590 return distance; 447 return distance;
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
681 } 538 }
682 if (filtered_supported_formats_.empty()) { 539 if (filtered_supported_formats_.empty()) {
683 // The device only captures at resolutions higher than |max_format_| this 540 // The device only captures at resolutions higher than |max_format_| this
684 // indicates that |max_format_| should be ignored as it is better to capture 541 // indicates that |max_format_| should be ignored as it is better to capture
685 // at too high a resolution than to not capture at all. 542 // at too high a resolution than to not capture at all.
686 filtered_supported_formats_ = supported_formats_; 543 filtered_supported_formats_ = supported_formats_;
687 } 544 }
688 } 545 }
689 546
690 bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const { 547 bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
548 RTC_DCHECK(thread_checker_.CalledOnValidThread());
691 if (!enable_camera_list_) { 549 if (!enable_camera_list_) {
692 return false; 550 return false;
693 } 551 }
694 return format.width > max_format_->width || 552 return format.width > max_format_->width ||
695 format.height > max_format_->height; 553 format.height > max_format_->height;
696 } 554 }
697 555
698 void VideoCapturer::UpdateStats(const CapturedFrame* captured_frame) { 556 void VideoCapturer::UpdateStats(const CapturedFrame* captured_frame) {
699 // Update stats protected from fetches from different thread. 557 // Update stats protected from fetches from different thread.
700 rtc::CritScope cs(&frame_stats_crit_); 558 rtc::CritScope cs(&frame_stats_crit_);
(...skipping 17 matching lines...) Expand all
718 void VideoCapturer::GetVariableSnapshot( 576 void VideoCapturer::GetVariableSnapshot(
719 const rtc::RollingAccumulator<T>& data, 577 const rtc::RollingAccumulator<T>& data,
720 VariableInfo<T>* stats) { 578 VariableInfo<T>* stats) {
721 stats->max_val = data.ComputeMax(); 579 stats->max_val = data.ComputeMax();
722 stats->mean = data.ComputeMean(); 580 stats->mean = data.ComputeMean();
723 stats->min_val = data.ComputeMin(); 581 stats->min_val = data.ComputeMin();
724 stats->variance = data.ComputeVariance(); 582 stats->variance = data.ComputeVariance();
725 } 583 }
726 584
727 } // namespace cricket 585 } // namespace cricket
OLDNEW
« no previous file with comments | « webrtc/media/base/videocapturer.h ('k') | webrtc/media/base/videocapturer_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698