Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(303)

Side by Side Diff: talk/media/base/videocapturer.cc

Issue 1587193006: Move talk/media to webrtc/media (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Rebased to b647aca12a884a13c1728118586245399b55fa3d (#11493) Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « talk/media/base/videocapturer.h ('k') | talk/media/base/videocapturer_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 /*
2 * libjingle
3 * Copyright 2010 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28 // Implementation file of class VideoCapturer.
29
30 #include "talk/media/base/videocapturer.h"
31
32 #include <algorithm>
33
34 #include "libyuv/scale_argb.h"
35 #include "talk/media/base/videoframefactory.h"
36 #include "webrtc/base/common.h"
37 #include "webrtc/base/logging.h"
38 #include "webrtc/base/systeminfo.h"
39
40 #if defined(HAVE_WEBRTC_VIDEO)
41 #include "talk/media/webrtc/webrtcvideoframe.h"
42 #include "talk/media/webrtc/webrtcvideoframefactory.h"
43 #endif // HAVE_WEBRTC_VIDEO
44
45 namespace cricket {
46
47 namespace {
48
49 // TODO(thorcarpenter): This is a BIG hack to flush the system with black
50 // frames. Frontends should coordinate to update the video state of a muted
51 // user. When all frontends to this consider removing the black frame business.
52 const int kNumBlackFramesOnMute = 30;
53
54 // MessageHandler constants.
55 enum {
56 MSG_DO_PAUSE = 0,
57 MSG_DO_UNPAUSE,
58 MSG_STATE_CHANGE
59 };
60
61 static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63);
62 #ifdef WEBRTC_LINUX
63 static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
64 #endif
65 static const int kDefaultScreencastFps = 5;
66 typedef rtc::TypedMessageData<CaptureState> StateChangeParams;
67
68 // Limit stats data collections to ~20 seconds of 30fps data before dropping
69 // old data in case stats aren't reset for long periods of time.
70 static const size_t kMaxAccumulatorSize = 600;
71
72 } // namespace
73
74 /////////////////////////////////////////////////////////////////////
75 // Implementation of struct CapturedFrame
76 /////////////////////////////////////////////////////////////////////
77 CapturedFrame::CapturedFrame()
78 : width(0),
79 height(0),
80 fourcc(0),
81 pixel_width(0),
82 pixel_height(0),
83 time_stamp(0),
84 data_size(0),
85 rotation(webrtc::kVideoRotation_0),
86 data(NULL) {}
87
88 // TODO(fbarchard): Remove this function once lmimediaengine stops using it.
89 bool CapturedFrame::GetDataSize(uint32_t* size) const {
90 if (!size || data_size == CapturedFrame::kUnknownDataSize) {
91 return false;
92 }
93 *size = data_size;
94 return true;
95 }
96
97 /////////////////////////////////////////////////////////////////////
98 // Implementation of class VideoCapturer
99 /////////////////////////////////////////////////////////////////////
100 VideoCapturer::VideoCapturer()
101 : thread_(rtc::Thread::Current()),
102 adapt_frame_drops_data_(kMaxAccumulatorSize),
103 frame_time_data_(kMaxAccumulatorSize),
104 apply_rotation_(true) {
105 Construct();
106 }
107
108 VideoCapturer::VideoCapturer(rtc::Thread* thread)
109 : thread_(thread),
110 adapt_frame_drops_data_(kMaxAccumulatorSize),
111 frame_time_data_(kMaxAccumulatorSize),
112 apply_rotation_(true) {
113 Construct();
114 }
115
116 void VideoCapturer::Construct() {
117 ClearAspectRatio();
118 enable_camera_list_ = false;
119 square_pixel_aspect_ratio_ = false;
120 capture_state_ = CS_STOPPED;
121 SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
122 scaled_width_ = 0;
123 scaled_height_ = 0;
124 muted_ = false;
125 black_frame_count_down_ = kNumBlackFramesOnMute;
126 enable_video_adapter_ = true;
127 adapt_frame_drops_ = 0;
128 previous_frame_time_ = 0.0;
129 #ifdef HAVE_WEBRTC_VIDEO
130 // There are lots of video capturers out there that don't call
131 // set_frame_factory. We can either go change all of them, or we
132 // can set this default.
133 // TODO(pthatcher): Remove this hack and require the frame factory
134 // to be passed in the constructor.
135 set_frame_factory(new WebRtcVideoFrameFactory());
136 #endif
137 }
138
139 const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
140 return &filtered_supported_formats_;
141 }
142
143 bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
144 previous_frame_time_ = frame_length_time_reporter_.TimerNow();
145 CaptureState result = Start(capture_format);
146 const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
147 if (!success) {
148 return false;
149 }
150 if (result == CS_RUNNING) {
151 SetCaptureState(result);
152 }
153 return true;
154 }
155
156 void VideoCapturer::UpdateAspectRatio(int ratio_w, int ratio_h) {
157 if (ratio_w == 0 || ratio_h == 0) {
158 LOG(LS_WARNING) << "UpdateAspectRatio ignored invalid ratio: "
159 << ratio_w << "x" << ratio_h;
160 return;
161 }
162 ratio_w_ = ratio_w;
163 ratio_h_ = ratio_h;
164 }
165
166 void VideoCapturer::ClearAspectRatio() {
167 ratio_w_ = 0;
168 ratio_h_ = 0;
169 }
170
171 // Override this to have more control of how your device is started/stopped.
172 bool VideoCapturer::Pause(bool pause) {
173 if (pause) {
174 if (capture_state() == CS_PAUSED) {
175 return true;
176 }
177 bool is_running = capture_state() == CS_STARTING ||
178 capture_state() == CS_RUNNING;
179 if (!is_running) {
180 LOG(LS_ERROR) << "Cannot pause a stopped camera.";
181 return false;
182 }
183 LOG(LS_INFO) << "Pausing a camera.";
184 rtc::scoped_ptr<VideoFormat> capture_format_when_paused(
185 capture_format_ ? new VideoFormat(*capture_format_) : NULL);
186 Stop();
187 SetCaptureState(CS_PAUSED);
188 // If you override this function be sure to restore the capture format
189 // after calling Stop().
190 SetCaptureFormat(capture_format_when_paused.get());
191 } else { // Unpause.
192 if (capture_state() != CS_PAUSED) {
193 LOG(LS_WARNING) << "Cannot unpause a camera that hasn't been paused.";
194 return false;
195 }
196 if (!capture_format_) {
197 LOG(LS_ERROR) << "Missing capture_format_, cannot unpause a camera.";
198 return false;
199 }
200 if (muted_) {
201 LOG(LS_WARNING) << "Camera cannot be unpaused while muted.";
202 return false;
203 }
204 LOG(LS_INFO) << "Unpausing a camera.";
205 if (!Start(*capture_format_)) {
206 LOG(LS_ERROR) << "Camera failed to start when unpausing.";
207 return false;
208 }
209 }
210 return true;
211 }
212
213 bool VideoCapturer::Restart(const VideoFormat& capture_format) {
214 if (!IsRunning()) {
215 return StartCapturing(capture_format);
216 }
217
218 if (GetCaptureFormat() != NULL && *GetCaptureFormat() == capture_format) {
219 // The reqested format is the same; nothing to do.
220 return true;
221 }
222
223 Stop();
224 return StartCapturing(capture_format);
225 }
226
227 bool VideoCapturer::MuteToBlackThenPause(bool muted) {
228 if (muted == IsMuted()) {
229 return true;
230 }
231
232 LOG(LS_INFO) << (muted ? "Muting" : "Unmuting") << " this video capturer.";
233 muted_ = muted; // Do this before calling Pause().
234 if (muted) {
235 // Reset black frame count down.
236 black_frame_count_down_ = kNumBlackFramesOnMute;
237 // Following frames will be overritten with black, then the camera will be
238 // paused.
239 return true;
240 }
241 // Start the camera.
242 thread_->Clear(this, MSG_DO_PAUSE);
243 return Pause(false);
244 }
245
246 // Note that the last caller decides whether rotation should be applied if there
247 // are multiple send streams using the same camera.
248 bool VideoCapturer::SetApplyRotation(bool enable) {
249 apply_rotation_ = enable;
250 if (frame_factory_) {
251 frame_factory_->SetApplyRotation(apply_rotation_);
252 }
253 return true;
254 }
255
256 void VideoCapturer::SetSupportedFormats(
257 const std::vector<VideoFormat>& formats) {
258 supported_formats_ = formats;
259 UpdateFilteredSupportedFormats();
260 }
261
262 bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
263 VideoFormat* best_format) {
264 // TODO(fbarchard): Directly support max_format.
265 UpdateFilteredSupportedFormats();
266 const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
267
268 if (supported_formats->empty()) {
269 return false;
270 }
271 LOG(LS_INFO) << " Capture Requested " << format.ToString();
272 int64_t best_distance = kMaxDistance;
273 std::vector<VideoFormat>::const_iterator best = supported_formats->end();
274 std::vector<VideoFormat>::const_iterator i;
275 for (i = supported_formats->begin(); i != supported_formats->end(); ++i) {
276 int64_t distance = GetFormatDistance(format, *i);
277 // TODO(fbarchard): Reduce to LS_VERBOSE if/when camera capture is
278 // relatively bug free.
279 LOG(LS_INFO) << " Supported " << i->ToString() << " distance " << distance;
280 if (distance < best_distance) {
281 best_distance = distance;
282 best = i;
283 }
284 }
285 if (supported_formats->end() == best) {
286 LOG(LS_ERROR) << " No acceptable camera format found";
287 return false;
288 }
289
290 if (best_format) {
291 best_format->width = best->width;
292 best_format->height = best->height;
293 best_format->fourcc = best->fourcc;
294 best_format->interval = best->interval;
295 LOG(LS_INFO) << " Best " << best_format->ToString() << " Interval "
296 << best_format->interval << " distance " << best_distance;
297 }
298 return true;
299 }
300
301 void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
302 max_format_.reset(new VideoFormat(max_format));
303 LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
304 UpdateFilteredSupportedFormats();
305 }
306
307 std::string VideoCapturer::ToString(const CapturedFrame* captured_frame) const {
308 std::string fourcc_name = GetFourccName(captured_frame->fourcc) + " ";
309 for (std::string::const_iterator i = fourcc_name.begin();
310 i < fourcc_name.end(); ++i) {
311 // Test character is printable; Avoid isprint() which asserts on negatives.
312 if (*i < 32 || *i >= 127) {
313 fourcc_name = "";
314 break;
315 }
316 }
317
318 std::ostringstream ss;
319 ss << fourcc_name << captured_frame->width << "x" << captured_frame->height;
320 return ss.str();
321 }
322
323 void VideoCapturer::set_frame_factory(VideoFrameFactory* frame_factory) {
324 frame_factory_.reset(frame_factory);
325 if (frame_factory) {
326 frame_factory->SetApplyRotation(apply_rotation_);
327 }
328 }
329
330 void VideoCapturer::GetStats(VariableInfo<int>* adapt_drops_stats,
331 VariableInfo<int>* effect_drops_stats,
332 VariableInfo<double>* frame_time_stats,
333 VideoFormat* last_captured_frame_format) {
334 rtc::CritScope cs(&frame_stats_crit_);
335 GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats);
336 GetVariableSnapshot(frame_time_data_, frame_time_stats);
337 *last_captured_frame_format = last_captured_frame_format_;
338
339 adapt_frame_drops_data_.Reset();
340 frame_time_data_.Reset();
341 }
342
343 void VideoCapturer::OnFrameCaptured(VideoCapturer*,
344 const CapturedFrame* captured_frame) {
345 if (muted_) {
346 if (black_frame_count_down_ == 0) {
347 thread_->Post(this, MSG_DO_PAUSE, NULL);
348 } else {
349 --black_frame_count_down_;
350 }
351 }
352
353 if (SignalVideoFrame.is_empty()) {
354 return;
355 }
356
357 // Use a temporary buffer to scale
358 rtc::scoped_ptr<uint8_t[]> scale_buffer;
359
360 if (IsScreencast()) {
361 int scaled_width, scaled_height;
362 int desired_screencast_fps = capture_format_.get() ?
363 VideoFormat::IntervalToFps(capture_format_->interval) :
364 kDefaultScreencastFps;
365 ComputeScale(captured_frame->width, captured_frame->height,
366 desired_screencast_fps, &scaled_width, &scaled_height);
367
368 if (FOURCC_ARGB == captured_frame->fourcc &&
369 (scaled_width != captured_frame->width ||
370 scaled_height != captured_frame->height)) {
371 if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
372 LOG(LS_INFO) << "Scaling Screencast from "
373 << captured_frame->width << "x"
374 << captured_frame->height << " to "
375 << scaled_width << "x" << scaled_height;
376 scaled_width_ = scaled_width;
377 scaled_height_ = scaled_height;
378 }
379 CapturedFrame* modified_frame =
380 const_cast<CapturedFrame*>(captured_frame);
381 const int modified_frame_size = scaled_width * scaled_height * 4;
382 scale_buffer.reset(new uint8_t[modified_frame_size]);
383 // Compute new width such that width * height is less than maximum but
384 // maintains original captured frame aspect ratio.
385 // Round down width to multiple of 4 so odd width won't round up beyond
386 // maximum, and so chroma channel is even width to simplify spatial
387 // resampling.
388 libyuv::ARGBScale(reinterpret_cast<const uint8_t*>(captured_frame->data),
389 captured_frame->width * 4, captured_frame->width,
390 captured_frame->height, scale_buffer.get(),
391 scaled_width * 4, scaled_width, scaled_height,
392 libyuv::kFilterBilinear);
393 modified_frame->width = scaled_width;
394 modified_frame->height = scaled_height;
395 modified_frame->data_size = scaled_width * 4 * scaled_height;
396 modified_frame->data = scale_buffer.get();
397 }
398 }
399
400 const int kYuy2Bpp = 2;
401 const int kArgbBpp = 4;
402 // TODO(fbarchard): Make a helper function to adjust pixels to square.
403 // TODO(fbarchard): Hook up experiment to scaling.
404 // TODO(fbarchard): Avoid scale and convert if muted.
405 // Temporary buffer is scoped here so it will persist until i420_frame.Init()
406 // makes a copy of the frame, converting to I420.
407 rtc::scoped_ptr<uint8_t[]> temp_buffer;
408 // YUY2 can be scaled vertically using an ARGB scaler. Aspect ratio is only
409 // a problem on OSX. OSX always converts webcams to YUY2 or UYVY.
410 bool can_scale =
411 FOURCC_YUY2 == CanonicalFourCC(captured_frame->fourcc) ||
412 FOURCC_UYVY == CanonicalFourCC(captured_frame->fourcc);
413
414 // If pixels are not square, optionally use vertical scaling to make them
415 // square. Square pixels simplify the rest of the pipeline, including
416 // effects and rendering.
417 if (can_scale && square_pixel_aspect_ratio_ &&
418 captured_frame->pixel_width != captured_frame->pixel_height) {
419 int scaled_width, scaled_height;
420 // modified_frame points to the captured_frame but with const casted away
421 // so it can be modified.
422 CapturedFrame* modified_frame = const_cast<CapturedFrame*>(captured_frame);
423 // Compute the frame size that makes pixels square pixel aspect ratio.
424 ComputeScaleToSquarePixels(captured_frame->width, captured_frame->height,
425 captured_frame->pixel_width,
426 captured_frame->pixel_height,
427 &scaled_width, &scaled_height);
428
429 if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
430 LOG(LS_INFO) << "Scaling WebCam from "
431 << captured_frame->width << "x"
432 << captured_frame->height << " to "
433 << scaled_width << "x" << scaled_height
434 << " for PAR "
435 << captured_frame->pixel_width << "x"
436 << captured_frame->pixel_height;
437 scaled_width_ = scaled_width;
438 scaled_height_ = scaled_height;
439 }
440 const int modified_frame_size = scaled_width * scaled_height * kYuy2Bpp;
441 uint8_t* temp_buffer_data;
442 // Pixels are wide and short; Increasing height. Requires temporary buffer.
443 if (scaled_height > captured_frame->height) {
444 temp_buffer.reset(new uint8_t[modified_frame_size]);
445 temp_buffer_data = temp_buffer.get();
446 } else {
447 // Pixels are narrow and tall; Decreasing height. Scale will be done
448 // in place.
449 temp_buffer_data = reinterpret_cast<uint8_t*>(captured_frame->data);
450 }
451
452 // Use ARGBScaler to vertically scale the YUY2 image, adjusting for 16 bpp.
453 libyuv::ARGBScale(reinterpret_cast<const uint8_t*>(captured_frame->data),
454 captured_frame->width * kYuy2Bpp, // Stride for YUY2.
455 captured_frame->width * kYuy2Bpp / kArgbBpp, // Width.
456 abs(captured_frame->height), // Height.
457 temp_buffer_data,
458 scaled_width * kYuy2Bpp, // Stride for YUY2.
459 scaled_width * kYuy2Bpp / kArgbBpp, // Width.
460 abs(scaled_height), // New height.
461 libyuv::kFilterBilinear);
462 modified_frame->width = scaled_width;
463 modified_frame->height = scaled_height;
464 modified_frame->pixel_width = 1;
465 modified_frame->pixel_height = 1;
466 modified_frame->data_size = modified_frame_size;
467 modified_frame->data = temp_buffer_data;
468 }
469
470 // Size to crop captured frame to. This adjusts the captured frames
471 // aspect ratio to match the final view aspect ratio, considering pixel
472 // aspect ratio and rotation. The final size may be scaled down by video
473 // adapter to better match ratio_w_ x ratio_h_.
474 // Note that abs() of frame height is passed in, because source may be
475 // inverted, but output will be positive.
476 int cropped_width = captured_frame->width;
477 int cropped_height = captured_frame->height;
478
479 // TODO(fbarchard): Improve logic to pad or crop.
480 // MJPG can crop vertically, but not horizontally. This logic disables crop.
481 // Alternatively we could pad the image with black, or implement a 2 step
482 // crop.
483 bool can_crop = true;
484 if (captured_frame->fourcc == FOURCC_MJPG) {
485 float cam_aspect = static_cast<float>(captured_frame->width) /
486 static_cast<float>(captured_frame->height);
487 float view_aspect = static_cast<float>(ratio_w_) /
488 static_cast<float>(ratio_h_);
489 can_crop = cam_aspect <= view_aspect;
490 }
491 if (can_crop && !IsScreencast()) {
492 // TODO(ronghuawu): The capturer should always produce the native
493 // resolution and the cropping should be done in downstream code.
494 ComputeCrop(ratio_w_, ratio_h_, captured_frame->width,
495 abs(captured_frame->height), captured_frame->pixel_width,
496 captured_frame->pixel_height, captured_frame->rotation,
497 &cropped_width, &cropped_height);
498 }
499
500 int adapted_width = cropped_width;
501 int adapted_height = cropped_height;
502 if (enable_video_adapter_ && !IsScreencast()) {
503 const VideoFormat adapted_format =
504 video_adapter_.AdaptFrameResolution(cropped_width, cropped_height);
505 if (adapted_format.IsSize0x0()) {
506 // VideoAdapter dropped the frame.
507 ++adapt_frame_drops_;
508 return;
509 }
510 adapted_width = adapted_format.width;
511 adapted_height = adapted_format.height;
512 }
513
514 if (!frame_factory_) {
515 LOG(LS_ERROR) << "No video frame factory.";
516 return;
517 }
518
519 rtc::scoped_ptr<VideoFrame> adapted_frame(
520 frame_factory_->CreateAliasedFrame(captured_frame,
521 cropped_width, cropped_height,
522 adapted_width, adapted_height));
523
524 if (!adapted_frame) {
525 // TODO(fbarchard): LOG more information about captured frame attributes.
526 LOG(LS_ERROR) << "Couldn't convert to I420! "
527 << "From " << ToString(captured_frame) << " To "
528 << cropped_width << " x " << cropped_height;
529 return;
530 }
531
532 if (muted_) {
533 // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead.
534 adapted_frame->SetToBlack();
535 }
536 SignalVideoFrame(this, adapted_frame.get());
537
538 UpdateStats(captured_frame);
539 }
540
541 void VideoCapturer::SetCaptureState(CaptureState state) {
542 if (state == capture_state_) {
543 // Don't trigger a state changed callback if the state hasn't changed.
544 return;
545 }
546 StateChangeParams* state_params = new StateChangeParams(state);
547 capture_state_ = state;
548 thread_->Post(this, MSG_STATE_CHANGE, state_params);
549 }
550
551 void VideoCapturer::OnMessage(rtc::Message* message) {
552 switch (message->message_id) {
553 case MSG_STATE_CHANGE: {
554 rtc::scoped_ptr<StateChangeParams> p(
555 static_cast<StateChangeParams*>(message->pdata));
556 SignalStateChange(this, p->data());
557 break;
558 }
559 case MSG_DO_PAUSE: {
560 Pause(true);
561 break;
562 }
563 case MSG_DO_UNPAUSE: {
564 Pause(false);
565 break;
566 }
567 default: {
568 ASSERT(false);
569 }
570 }
571 }
572
573 // Get the distance between the supported and desired formats.
574 // Prioritization is done according to this algorithm:
575 // 1) Width closeness. If not same, we prefer wider.
576 // 2) Height closeness. If not same, we prefer higher.
577 // 3) Framerate closeness. If not same, we prefer faster.
578 // 4) Compression. If desired format has a specific fourcc, we need exact match;
579 // otherwise, we use preference.
580 int64_t VideoCapturer::GetFormatDistance(const VideoFormat& desired,
581 const VideoFormat& supported) {
582 int64_t distance = kMaxDistance;
583
584 // Check fourcc.
585 uint32_t supported_fourcc = CanonicalFourCC(supported.fourcc);
586 int64_t delta_fourcc = kMaxDistance;
587 if (FOURCC_ANY == desired.fourcc) {
588 // Any fourcc is OK for the desired. Use preference to find best fourcc.
589 std::vector<uint32_t> preferred_fourccs;
590 if (!GetPreferredFourccs(&preferred_fourccs)) {
591 return distance;
592 }
593
594 for (size_t i = 0; i < preferred_fourccs.size(); ++i) {
595 if (supported_fourcc == CanonicalFourCC(preferred_fourccs[i])) {
596 delta_fourcc = i;
597 #ifdef WEBRTC_LINUX
598 // For HD avoid YU12 which is a software conversion and has 2 bugs
599 // b/7326348 b/6960899. Reenable when fixed.
600 if (supported.height >= 720 && (supported_fourcc == FOURCC_YU12 ||
601 supported_fourcc == FOURCC_YV12)) {
602 delta_fourcc += kYU12Penalty;
603 }
604 #endif
605 break;
606 }
607 }
608 } else if (supported_fourcc == CanonicalFourCC(desired.fourcc)) {
609 delta_fourcc = 0; // Need exact match.
610 }
611
612 if (kMaxDistance == delta_fourcc) {
613 // Failed to match fourcc.
614 return distance;
615 }
616
617 // Check resolution and fps.
618 int desired_width = desired.width;
619 int desired_height = desired.height;
620 int64_t delta_w = supported.width - desired_width;
621 float supported_fps = VideoFormat::IntervalToFpsFloat(supported.interval);
622 float delta_fps =
623 supported_fps - VideoFormat::IntervalToFpsFloat(desired.interval);
624 // Check height of supported height compared to height we would like it to be.
625 int64_t aspect_h = desired_width
626 ? supported.width * desired_height / desired_width
627 : desired_height;
628 int64_t delta_h = supported.height - aspect_h;
629
630 distance = 0;
631 // Set high penalty if the supported format is lower than the desired format.
632 // 3x means we would prefer down to down to 3/4, than up to double.
633 // But we'd prefer up to double than down to 1/2. This is conservative,
634 // strongly avoiding going down in resolution, similar to
635 // the old method, but not completely ruling it out in extreme situations.
636 // It also ignores framerate, which is often very low at high resolutions.
637 // TODO(fbarchard): Improve logic to use weighted factors.
638 static const int kDownPenalty = -3;
639 if (delta_w < 0) {
640 delta_w = delta_w * kDownPenalty;
641 }
642 if (delta_h < 0) {
643 delta_h = delta_h * kDownPenalty;
644 }
645 // Require camera fps to be at least 80% of what is requested if resolution
646 // matches.
647 // Require camera fps to be at least 96% of what is requested, or higher,
648 // if resolution differs. 96% allows for slight variations in fps. e.g. 29.97
649 if (delta_fps < 0) {
650 float min_desirable_fps = delta_w ?
651 VideoFormat::IntervalToFpsFloat(desired.interval) * 28.f / 30.f :
652 VideoFormat::IntervalToFpsFloat(desired.interval) * 23.f / 30.f;
653 delta_fps = -delta_fps;
654 if (supported_fps < min_desirable_fps) {
655 distance |= static_cast<int64_t>(1) << 62;
656 } else {
657 distance |= static_cast<int64_t>(1) << 15;
658 }
659 }
660 int64_t idelta_fps = static_cast<int>(delta_fps);
661
662 // 12 bits for width and height and 8 bits for fps and fourcc.
663 distance |=
664 (delta_w << 28) | (delta_h << 16) | (idelta_fps << 8) | delta_fourcc;
665
666 return distance;
667 }
668
669 void VideoCapturer::UpdateFilteredSupportedFormats() {
670 filtered_supported_formats_.clear();
671 filtered_supported_formats_ = supported_formats_;
672 if (!max_format_) {
673 return;
674 }
675 std::vector<VideoFormat>::iterator iter = filtered_supported_formats_.begin();
676 while (iter != filtered_supported_formats_.end()) {
677 if (ShouldFilterFormat(*iter)) {
678 iter = filtered_supported_formats_.erase(iter);
679 } else {
680 ++iter;
681 }
682 }
683 if (filtered_supported_formats_.empty()) {
684 // The device only captures at resolutions higher than |max_format_| this
685 // indicates that |max_format_| should be ignored as it is better to capture
686 // at too high a resolution than to not capture at all.
687 filtered_supported_formats_ = supported_formats_;
688 }
689 }
690
691 bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
692 if (!enable_camera_list_) {
693 return false;
694 }
695 return format.width > max_format_->width ||
696 format.height > max_format_->height;
697 }
698
699 void VideoCapturer::UpdateStats(const CapturedFrame* captured_frame) {
700 // Update stats protected from fetches from different thread.
701 rtc::CritScope cs(&frame_stats_crit_);
702
703 last_captured_frame_format_.width = captured_frame->width;
704 last_captured_frame_format_.height = captured_frame->height;
705 // TODO(ronghuawu): Useful to report interval as well?
706 last_captured_frame_format_.interval = 0;
707 last_captured_frame_format_.fourcc = captured_frame->fourcc;
708
709 double time_now = frame_length_time_reporter_.TimerNow();
710 if (previous_frame_time_ != 0.0) {
711 adapt_frame_drops_data_.AddSample(adapt_frame_drops_);
712 frame_time_data_.AddSample(time_now - previous_frame_time_);
713 }
714 previous_frame_time_ = time_now;
715 adapt_frame_drops_ = 0;
716 }
717
718 template<class T>
719 void VideoCapturer::GetVariableSnapshot(
720 const rtc::RollingAccumulator<T>& data,
721 VariableInfo<T>* stats) {
722 stats->max_val = data.ComputeMax();
723 stats->mean = data.ComputeMean();
724 stats->min_val = data.ComputeMin();
725 stats->variance = data.ComputeVariance();
726 }
727
728 } // namespace cricket
OLDNEW
« no previous file with comments | « talk/media/base/videocapturer.h ('k') | talk/media/base/videocapturer_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698