Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(20)

Side by Side Diff: media/filters/ffmpeg_video_decoder.cc

Issue 1254953004: Hacking ffvp9 decoder support for profiling. Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Supress windows warning Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/filters/ffmpeg_video_decoder.h ('k') | media/filters/vpx_video_decoder.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/filters/ffmpeg_video_decoder.h" 5 #include "media/filters/ffmpeg_video_decoder.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <stdint.h> 8 #include <stdint.h>
9 9
10 #include <algorithm> 10 #include <algorithm>
11 #include <string> 11 #include <string>
12 12
13 #include "base/bind.h" 13 #include "base/bind.h"
14 #include "base/callback_helpers.h" 14 #include "base/callback_helpers.h"
15 #include "base/command_line.h" 15 #include "base/command_line.h"
16 #include "base/location.h" 16 #include "base/location.h"
17 #include "base/single_thread_task_runner.h" 17 #include "base/single_thread_task_runner.h"
18 #include "base/strings/string_number_conversions.h" 18 #include "base/strings/string_number_conversions.h"
19 #include "base/sys_info.h"
19 #include "media/base/bind_to_current_loop.h" 20 #include "media/base/bind_to_current_loop.h"
20 #include "media/base/decoder_buffer.h" 21 #include "media/base/decoder_buffer.h"
21 #include "media/base/limits.h" 22 #include "media/base/limits.h"
22 #include "media/base/media_switches.h" 23 #include "media/base/media_switches.h"
23 #include "media/base/timestamp_constants.h" 24 #include "media/base/timestamp_constants.h"
24 #include "media/base/video_frame.h" 25 #include "media/base/video_frame.h"
25 #include "media/base/video_util.h" 26 #include "media/base/video_util.h"
26 #include "media/ffmpeg/ffmpeg_common.h" 27 #include "media/ffmpeg/ffmpeg_common.h"
27 #include "media/filters/ffmpeg_glue.h" 28 #include "media/filters/ffmpeg_glue.h"
29 #include "third_party/ffmpeg/libavutil/intreadwrite.h"
28 30
29 namespace media { 31 namespace media {
30 32
31 // Always try to use three threads for video decoding. There is little reason 33 // Always try to use three threads for video decoding. There is little reason
32 // not to since current day CPUs tend to be multi-core and we measured 34 // not to since current day CPUs tend to be multi-core and we measured
33 // performance benefits on older machines such as P4s with hyperthreading. 35 // performance benefits on older machines such as P4s with hyperthreading.
34 // 36 //
35 // Handling decoding on separate threads also frees up the pipeline thread to 37 // Handling decoding on separate threads also frees up the pipeline thread to
36 // continue processing. Although it'd be nice to have the option of a single 38 // continue processing. Although it'd be nice to have the option of a single
37 // decoding thread, FFmpeg treats having one thread the same as having zero 39 // decoding thread, FFmpeg treats having one thread the same as having zero
38 // threads (i.e., avcodec_decode_video() will execute on the calling thread). 40 // threads (i.e., avcodec_decode_video() will execute on the calling thread).
39 // Yet another reason for having two threads :) 41 // Yet another reason for having two threads :)
40 static const int kDecodeThreads = 2; 42 static const int kDecodeThreads = 2;
41 static const int kMaxDecodeThreads = 16; 43 static const int kMaxDecodeThreads = 16;
42 44
43 // Returns the number of threads given the FFmpeg CodecID. Also inspects the 45 // Returns the number of threads given the FFmpeg CodecID. Also inspects the
44 // command line for a valid --video-threads flag. 46 // command line for a valid --video-threads flag.
45 static int GetThreadCount(AVCodecID codec_id) { 47 static int GetThreadCount(const VideoDecoderConfig& config) {
46 // Refer to http://crbug.com/93932 for tsan suppressions on decoding. 48 // Refer to http://crbug.com/93932 for tsan suppressions on decoding.
47 int decode_threads = kDecodeThreads; 49 int decode_threads = kDecodeThreads;
48 50
49 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess(); 51 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
50 std::string threads(cmd_line->GetSwitchValueASCII(switches::kVideoThreads)); 52 std::string threads(cmd_line->GetSwitchValueASCII(switches::kVideoThreads));
51 if (threads.empty() || !base::StringToInt(threads, &decode_threads)) 53 if (threads.empty() || !base::StringToInt(threads, &decode_threads)) {
54 // TODO(chcunningham): Investigate perf with more threads.
55 if (config.codec() == kCodecVP9) {
56 // For VP9 decode when using the default thread count, increase the number
57 // of decode threads to equal the maximum number of tiles possible for
58 // higher resolution streams.
59 if (config.coded_size().width() >= 2048)
60 decode_threads = 8;
61 else if (config.coded_size().width() >= 1024)
62 decode_threads = 4;
63 }
64 decode_threads =
65 std::min(decode_threads, base::SysInfo::NumberOfProcessors());
52 return decode_threads; 66 return decode_threads;
67 }
53 68
54 decode_threads = std::max(decode_threads, 0); 69 decode_threads = std::max(decode_threads, 0);
55 decode_threads = std::min(decode_threads, kMaxDecodeThreads); 70 decode_threads = std::min(decode_threads, kMaxDecodeThreads);
56 return decode_threads; 71 return decode_threads;
57 } 72 }
58 73
59 static int GetVideoBufferImpl(struct AVCodecContext* s, 74 static int GetVideoBufferImpl(struct AVCodecContext* s,
60 AVFrame* frame, 75 AVFrame* frame,
61 int flags) { 76 int flags) {
62 FFmpegVideoDecoder* decoder = static_cast<FFmpegVideoDecoder*>(s->opaque); 77 FFmpegVideoDecoder* decoder = static_cast<FFmpegVideoDecoder*>(s->opaque);
63 return decoder->GetVideoBuffer(s, frame, flags); 78 return decoder->GetVideoBuffer(s, frame, flags);
64 } 79 }
65 80
66 static void ReleaseVideoBufferImpl(void* opaque, uint8_t* data) { 81 static void ReleaseVideoBufferImpl(void* opaque, uint8_t* data) {
67 scoped_refptr<VideoFrame> video_frame; 82 scoped_refptr<VideoFrame> video_frame;
68 video_frame.swap(reinterpret_cast<VideoFrame**>(&opaque)); 83 video_frame.swap(reinterpret_cast<VideoFrame**>(&opaque));
69 } 84 }
70 85
71 // static 86 // static
72 bool FFmpegVideoDecoder::IsCodecSupported(VideoCodec codec) { 87 bool FFmpegVideoDecoder::IsCodecSupported(VideoCodec codec) {
73 FFmpegGlue::InitializeFFmpeg(); 88 FFmpegGlue::InitializeFFmpeg();
74 return avcodec_find_decoder(VideoCodecToCodecID(codec)) != nullptr; 89 return avcodec_find_decoder(VideoCodecToCodecID(codec)) != nullptr;
75 } 90 }
76 91
77 FFmpegVideoDecoder::FFmpegVideoDecoder() 92 FFmpegVideoDecoder::FFmpegVideoDecoder()
78 : state_(kUninitialized), decode_nalus_(false) { 93 : state_(kUninitialized), decode_nalus_(false), parser_context_(nullptr) {
79 thread_checker_.DetachFromThread(); 94 thread_checker_.DetachFromThread();
80 } 95 }
81 96
82 int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context, 97 int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
83 AVFrame* frame, 98 AVFrame* frame,
84 int flags) { 99 int flags) {
85 // Don't use |codec_context_| here! With threaded decoding, 100 // Don't use |codec_context_| here! With threaded decoding,
86 // it will contain unsynchronized width/height/pix_fmt values, 101 // it will contain unsynchronized width/height/pix_fmt values,
87 // whereas |codec_context| contains the current threads's 102 // whereas |codec_context| contains the current threads's
88 // updated width/height/pix_fmt, which can change for adaptive 103 // updated width/height/pix_fmt, which can change for adaptive
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after
276 291
277 if (state_ != kUninitialized) 292 if (state_ != kUninitialized)
278 ReleaseFFmpegResources(); 293 ReleaseFFmpegResources();
279 } 294 }
280 295
281 bool FFmpegVideoDecoder::FFmpegDecode( 296 bool FFmpegVideoDecoder::FFmpegDecode(
282 const scoped_refptr<DecoderBuffer>& buffer, 297 const scoped_refptr<DecoderBuffer>& buffer,
283 bool* has_produced_frame) { 298 bool* has_produced_frame) {
284 DCHECK(!*has_produced_frame); 299 DCHECK(!*has_produced_frame);
285 300
301 size_t remaining_size = buffer->end_of_stream() ? 0 : buffer->data_size();
302
286 // Create a packet for input data. 303 // Create a packet for input data.
287 // Due to FFmpeg API changes we no longer have const read-only pointers. 304 // Due to FFmpeg API changes we no longer have const read-only pointers.
288 AVPacket packet; 305 AVPacket packet;
289 av_init_packet(&packet);
290 if (buffer->end_of_stream()) {
291 packet.data = NULL;
292 packet.size = 0;
293 } else {
294 packet.data = const_cast<uint8_t*>(buffer->data());
295 packet.size = buffer->data_size();
296 306
297 // Let FFmpeg handle presentation timestamp reordering. 307 do {
298 codec_context_->reordered_opaque = buffer->timestamp().InMicroseconds(); 308 av_init_packet(&packet);
299 } 309 if (buffer->end_of_stream()) {
310 packet.data = NULL;
311 packet.size = 0;
312 } else {
313 // FFvp9 can't handle superframes, which are multiple frames inside a
314 // single packet. Normally its demuxer handles this parsing, but when
315 // ChunkDemuxer is used, the packet isn't split right.
316 if (codec_context_->codec_id == AV_CODEC_ID_VP9) {
317 if (!parser_context_) {
318 parser_context_.reset(av_parser_init(AV_CODEC_ID_VP9));
319 }
320 // TODO(chcunningham): The PTS/DTS stuff is a bit weird, seems mostly
321 // unused in internal parsing code. Setting NOPTS for now
322 // I think superframes have no presentation, so it should be fine to
323 // set the same timestamp (like alt-ref?)
324 int len = av_parser_parse2(
325 parser_context_.get(), codec_context_.get(), &packet.data,
326 &packet.size,
327 buffer->writable_data() + (buffer->data_size() - remaining_size),
328 remaining_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, -1);
329 remaining_size -= len;
330 } else {
331 packet.data = const_cast<uint8_t*>(buffer->data());
332 packet.size = buffer->data_size();
333 remaining_size = 0;
334 }
300 335
301 int frame_decoded = 0; 336 // Let FFmpeg handle presentation timestamp reordering.
302 int result = avcodec_decode_video2(codec_context_.get(), 337 codec_context_->reordered_opaque = buffer->timestamp().InMicroseconds();
303 av_frame_.get(), 338 }
304 &frame_decoded,
305 &packet);
306 // Log the problem if we can't decode a video frame and exit early.
307 if (result < 0) {
308 LOG(ERROR) << "Error decoding video: " << buffer->AsHumanReadableString();
309 return false;
310 }
311 339
312 // FFmpeg says some codecs might have multiple frames per packet. Previous 340 int got_picture = 0;
313 // discussions with rbultje@ indicate this shouldn't be true for the codecs 341 int result = avcodec_decode_video2(codec_context_.get(), av_frame_.get(),
314 // we use. 342 &got_picture, &packet);
315 DCHECK_EQ(result, packet.size); 343 // Log the problem if we can't decode a video frame and exit early.
344 if (result < 0) {
345 LOG(ERROR) << "Error decoding video: " << buffer->AsHumanReadableString();
346 return false;
347 }
316 348
317 // If no frame was produced then signal that more data is required to 349 // FFmpeg says some codecs might have multiple frames per packet. Previous
318 // produce more frames. This can happen under two circumstances: 350 // discussions with rbultje@ indicate this shouldn't be true for the codecs
319 // 1) Decoder was recently initialized/flushed 351 // we use.
320 // 2) End of stream was reached and all internal frames have been output 352 DCHECK_EQ(result, packet.size);
321 if (frame_decoded == 0) {
322 return true;
323 }
324 353
325 // TODO(fbarchard): Work around for FFmpeg http://crbug.com/27675 354 // If no picture was produced then signal that more data is required to
326 // The decoder is in a bad state and not decoding correctly. 355 // produce more frames. This can happen under three circumstances:
327 // Checking for NULL avoids a crash in CopyPlane(). 356 // 1) Decoder was recently initialized/flushed
328 if (!av_frame_->data[VideoFrame::kYPlane] || 357 // 2) End of stream was reached and all internal frames have been output
329 !av_frame_->data[VideoFrame::kUPlane] || 358 // 3) The frame is needed for decoding dependencies, but is not rendered
330 !av_frame_->data[VideoFrame::kVPlane]) { 359 if (got_picture == 0) {
331 LOG(ERROR) << "Video frame was produced yet has invalid frame data."; 360 continue;
361 }
362
363 // TODO(fbarchard): Work around for FFmpeg http://crbug.com/27675
364 // The decoder is in a bad state and not decoding correctly.
365 // Checking for NULL avoids a crash in CopyPlane().
366 if (!av_frame_->data[VideoFrame::kYPlane] ||
367 !av_frame_->data[VideoFrame::kUPlane] ||
368 !av_frame_->data[VideoFrame::kVPlane]) {
369 LOG(ERROR) << "Video frame was produced yet has invalid frame data.";
370 av_frame_unref(av_frame_.get());
371 return false;
372 }
373
374 scoped_refptr<VideoFrame> frame =
375 reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(av_frame_->buf[0]));
376 frame->set_timestamp(
377 base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque));
378 *has_produced_frame = true;
379 output_cb_.Run(frame);
380
332 av_frame_unref(av_frame_.get()); 381 av_frame_unref(av_frame_.get());
333 return false; 382 } while (remaining_size > 0);
334 }
335
336 scoped_refptr<VideoFrame> frame =
337 reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(av_frame_->buf[0]));
338 frame->set_timestamp(
339 base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque));
340 *has_produced_frame = true;
341 output_cb_.Run(frame);
342
343 av_frame_unref(av_frame_.get());
344 return true; 383 return true;
345 } 384 }
346 385
347 void FFmpegVideoDecoder::ReleaseFFmpegResources() { 386 void FFmpegVideoDecoder::ReleaseFFmpegResources() {
348 codec_context_.reset(); 387 codec_context_.reset();
349 av_frame_.reset(); 388 av_frame_.reset();
350 } 389 }
351 390
352 bool FFmpegVideoDecoder::ConfigureDecoder(bool low_delay) { 391 bool FFmpegVideoDecoder::ConfigureDecoder(bool low_delay) {
353 DCHECK(config_.IsValidConfig()); 392 DCHECK(config_.IsValidConfig());
354 DCHECK(!config_.is_encrypted()); 393 DCHECK(!config_.is_encrypted());
355 394
356 // Release existing decoder resources if necessary. 395 // Release existing decoder resources if necessary.
357 ReleaseFFmpegResources(); 396 ReleaseFFmpegResources();
358 397
359 // Initialize AVCodecContext structure. 398 // Initialize AVCodecContext structure.
360 codec_context_.reset(avcodec_alloc_context3(NULL)); 399 codec_context_.reset(avcodec_alloc_context3(NULL));
361 VideoDecoderConfigToAVCodecContext(config_, codec_context_.get()); 400 VideoDecoderConfigToAVCodecContext(config_, codec_context_.get());
362 401
363 codec_context_->thread_count = GetThreadCount(codec_context_->codec_id); 402 codec_context_->thread_count = GetThreadCount(config_);
364 codec_context_->thread_type = low_delay ? FF_THREAD_SLICE : FF_THREAD_FRAME; 403 codec_context_->thread_type = low_delay ? FF_THREAD_SLICE : FF_THREAD_FRAME;
365 codec_context_->opaque = this; 404 codec_context_->opaque = this;
366 codec_context_->flags |= CODEC_FLAG_EMU_EDGE; 405 codec_context_->flags |= CODEC_FLAG_EMU_EDGE;
367 codec_context_->get_buffer2 = GetVideoBufferImpl; 406 codec_context_->get_buffer2 = GetVideoBufferImpl;
368 codec_context_->refcounted_frames = 1; 407 codec_context_->refcounted_frames = 1;
369 408
370 if (decode_nalus_) 409 if (decode_nalus_)
371 codec_context_->flags2 |= CODEC_FLAG2_CHUNKS; 410 codec_context_->flags2 |= CODEC_FLAG2_CHUNKS;
372 411
373 AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id); 412 AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
374 if (!codec || avcodec_open2(codec_context_.get(), codec, NULL) < 0) { 413 if (!codec || avcodec_open2(codec_context_.get(), codec, NULL) < 0) {
375 ReleaseFFmpegResources(); 414 ReleaseFFmpegResources();
376 return false; 415 return false;
377 } 416 }
378 417
418 LOG(ERROR) << "Using ffmpeg!";
419 LOG(ERROR) << "Configured FFmpeg decoder with thread count:"
420 << codec_context_->thread_count;
421
379 av_frame_.reset(av_frame_alloc()); 422 av_frame_.reset(av_frame_alloc());
380 return true; 423 return true;
381 } 424 }
382 425
383 } // namespace media 426 } // namespace media
OLDNEW
« no previous file with comments | « media/filters/ffmpeg_video_decoder.h ('k') | media/filters/vpx_video_decoder.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698