Fix some potential after free errors on TestCompletionCallback
[chromium-blink-merge.git] / media / filters / ffmpeg_video_decoder.cc
blob8131d07c075ea86f643f7196cb589eeef5e3d216
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/filters/ffmpeg_video_decoder.h"
7 #include <algorithm>
8 #include <string>
10 #include "base/bind.h"
11 #include "base/callback_helpers.h"
12 #include "base/command_line.h"
13 #include "base/location.h"
14 #include "base/message_loop/message_loop_proxy.h"
15 #include "base/strings/string_number_conversions.h"
16 #include "media/base/bind_to_loop.h"
17 #include "media/base/decoder_buffer.h"
18 #include "media/base/demuxer_stream.h"
19 #include "media/base/limits.h"
20 #include "media/base/media_switches.h"
21 #include "media/base/pipeline.h"
22 #include "media/base/video_decoder_config.h"
23 #include "media/base/video_frame.h"
24 #include "media/base/video_util.h"
25 #include "media/ffmpeg/ffmpeg_common.h"
26 #include "media/filters/ffmpeg_glue.h"
28 namespace media {
30 // Always try to use three threads for video decoding. There is little reason
31 // not to since current day CPUs tend to be multi-core and we measured
32 // performance benefits on older machines such as P4s with hyperthreading.
34 // Handling decoding on separate threads also frees up the pipeline thread to
35 // continue processing. Although it'd be nice to have the option of a single
36 // decoding thread, FFmpeg treats having one thread the same as having zero
37 // threads (i.e., avcodec_decode_video() will execute on the calling thread).
38 // Yet another reason for having two threads :)
39 static const int kDecodeThreads = 2;
40 static const int kMaxDecodeThreads = 16;
42 // Returns the number of threads given the FFmpeg CodecID. Also inspects the
43 // command line for a valid --video-threads flag.
44 static int GetThreadCount(AVCodecID codec_id) {
45 // Refer to http://crbug.com/93932 for tsan suppressions on decoding.
46 int decode_threads = kDecodeThreads;
48 const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
49 std::string threads(cmd_line->GetSwitchValueASCII(switches::kVideoThreads));
50 if (threads.empty() || !base::StringToInt(threads, &decode_threads))
51 return decode_threads;
53 decode_threads = std::max(decode_threads, 0);
54 decode_threads = std::min(decode_threads, kMaxDecodeThreads);
55 return decode_threads;
58 FFmpegVideoDecoder::FFmpegVideoDecoder(
59 const scoped_refptr<base::MessageLoopProxy>& message_loop)
60 : message_loop_(message_loop),
61 weak_factory_(this),
62 state_(kUninitialized),
63 codec_context_(NULL),
64 av_frame_(NULL),
65 demuxer_stream_(NULL) {
68 int FFmpegVideoDecoder::GetVideoBuffer(AVCodecContext* codec_context,
69 AVFrame* frame) {
70 // Don't use |codec_context_| here! With threaded decoding,
71 // it will contain unsynchronized width/height/pix_fmt values,
72 // whereas |codec_context| contains the current threads's
73 // updated width/height/pix_fmt, which can change for adaptive
74 // content.
75 VideoFrame::Format format = PixelFormatToVideoFormat(codec_context->pix_fmt);
76 if (format == VideoFrame::INVALID)
77 return AVERROR(EINVAL);
78 DCHECK(format == VideoFrame::YV12 || format == VideoFrame::YV16);
80 gfx::Size size(codec_context->width, codec_context->height);
81 int ret;
82 if ((ret = av_image_check_size(size.width(), size.height(), 0, NULL)) < 0)
83 return ret;
85 gfx::Size natural_size;
86 if (codec_context->sample_aspect_ratio.num > 0) {
87 natural_size = GetNaturalSize(size,
88 codec_context->sample_aspect_ratio.num,
89 codec_context->sample_aspect_ratio.den);
90 } else {
91 natural_size = demuxer_stream_->video_decoder_config().natural_size();
94 if (!VideoFrame::IsValidConfig(format, size, gfx::Rect(size), natural_size))
95 return AVERROR(EINVAL);
97 scoped_refptr<VideoFrame> video_frame =
98 VideoFrame::CreateFrame(format, size, gfx::Rect(size), natural_size,
99 kNoTimestamp());
101 for (int i = 0; i < 3; i++) {
102 frame->base[i] = video_frame->data(i);
103 frame->data[i] = video_frame->data(i);
104 frame->linesize[i] = video_frame->stride(i);
107 frame->opaque = NULL;
108 video_frame.swap(reinterpret_cast<VideoFrame**>(&frame->opaque));
109 frame->type = FF_BUFFER_TYPE_USER;
110 frame->pkt_pts = codec_context->pkt ? codec_context->pkt->pts :
111 AV_NOPTS_VALUE;
112 frame->width = codec_context->width;
113 frame->height = codec_context->height;
114 frame->format = codec_context->pix_fmt;
116 return 0;
119 static int GetVideoBufferImpl(AVCodecContext* s, AVFrame* frame) {
120 FFmpegVideoDecoder* vd = static_cast<FFmpegVideoDecoder*>(s->opaque);
121 return vd->GetVideoBuffer(s, frame);
124 static void ReleaseVideoBufferImpl(AVCodecContext* s, AVFrame* frame) {
125 scoped_refptr<VideoFrame> video_frame;
126 video_frame.swap(reinterpret_cast<VideoFrame**>(&frame->opaque));
128 // The FFmpeg API expects us to zero the data pointers in
129 // this callback
130 memset(frame->data, 0, sizeof(frame->data));
131 frame->opaque = NULL;
134 void FFmpegVideoDecoder::Initialize(DemuxerStream* stream,
135 const PipelineStatusCB& status_cb,
136 const StatisticsCB& statistics_cb) {
137 DCHECK(message_loop_->BelongsToCurrentThread());
138 DCHECK(stream);
139 DCHECK(read_cb_.is_null());
140 DCHECK(reset_cb_.is_null());
142 FFmpegGlue::InitializeFFmpeg();
143 weak_this_ = weak_factory_.GetWeakPtr();
145 demuxer_stream_ = stream;
146 statistics_cb_ = statistics_cb;
147 PipelineStatusCB initialize_cb = BindToCurrentLoop(status_cb);
149 if (!ConfigureDecoder()) {
150 initialize_cb.Run(DECODER_ERROR_NOT_SUPPORTED);
151 return;
154 // Success!
155 state_ = kNormal;
156 initialize_cb.Run(PIPELINE_OK);
159 void FFmpegVideoDecoder::Read(const ReadCB& read_cb) {
160 DCHECK(message_loop_->BelongsToCurrentThread());
161 DCHECK(!read_cb.is_null());
162 CHECK_NE(state_, kUninitialized);
163 CHECK(read_cb_.is_null()) << "Overlapping decodes are not supported.";
164 read_cb_ = BindToCurrentLoop(read_cb);
166 if (state_ == kError) {
167 base::ResetAndReturn(&read_cb_).Run(kDecodeError, NULL);
168 return;
171 // Return empty frames if decoding has finished.
172 if (state_ == kDecodeFinished) {
173 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame());
174 return;
177 ReadFromDemuxerStream();
180 void FFmpegVideoDecoder::Reset(const base::Closure& closure) {
181 DCHECK(message_loop_->BelongsToCurrentThread());
182 DCHECK(reset_cb_.is_null());
183 reset_cb_ = BindToCurrentLoop(closure);
185 // Defer the reset if a read is pending.
186 if (!read_cb_.is_null())
187 return;
189 DoReset();
192 void FFmpegVideoDecoder::DoReset() {
193 DCHECK(read_cb_.is_null());
195 avcodec_flush_buffers(codec_context_);
196 state_ = kNormal;
197 base::ResetAndReturn(&reset_cb_).Run();
200 void FFmpegVideoDecoder::Stop(const base::Closure& closure) {
201 DCHECK(message_loop_->BelongsToCurrentThread());
202 base::ScopedClosureRunner runner(BindToCurrentLoop(closure));
204 if (state_ == kUninitialized)
205 return;
207 if (!read_cb_.is_null()) {
208 base::ResetAndReturn(&read_cb_).Run(kOk, NULL);
209 // Reset is pending only when read is pending.
210 if (!reset_cb_.is_null())
211 base::ResetAndReturn(&reset_cb_).Run();
214 ReleaseFFmpegResources();
215 state_ = kUninitialized;
218 FFmpegVideoDecoder::~FFmpegVideoDecoder() {
219 DCHECK_EQ(kUninitialized, state_);
220 DCHECK(!codec_context_);
221 DCHECK(!av_frame_);
224 void FFmpegVideoDecoder::ReadFromDemuxerStream() {
225 DCHECK_NE(state_, kUninitialized);
226 DCHECK_NE(state_, kDecodeFinished);
227 DCHECK_NE(state_, kError);
228 DCHECK(!read_cb_.is_null());
230 demuxer_stream_->Read(base::Bind(
231 &FFmpegVideoDecoder::BufferReady, weak_this_));
234 void FFmpegVideoDecoder::BufferReady(
235 DemuxerStream::Status status,
236 const scoped_refptr<DecoderBuffer>& buffer) {
237 DCHECK(message_loop_->BelongsToCurrentThread());
238 DCHECK_NE(state_, kDecodeFinished);
239 DCHECK_NE(state_, kError);
240 DCHECK_EQ(status != DemuxerStream::kOk, !buffer.get()) << status;
242 if (state_ == kUninitialized)
243 return;
245 DCHECK(!read_cb_.is_null());
247 if (!reset_cb_.is_null()) {
248 base::ResetAndReturn(&read_cb_).Run(kOk, NULL);
249 DoReset();
250 return;
253 if (status == DemuxerStream::kAborted) {
254 base::ResetAndReturn(&read_cb_).Run(kOk, NULL);
255 return;
258 // VideoFrameStream ensures no kConfigChanged is passed to VideoDecoders.
259 DCHECK_EQ(status, DemuxerStream::kOk) << status;
260 DecodeBuffer(buffer);
263 void FFmpegVideoDecoder::DecodeBuffer(
264 const scoped_refptr<DecoderBuffer>& buffer) {
265 DCHECK(message_loop_->BelongsToCurrentThread());
266 DCHECK_NE(state_, kUninitialized);
267 DCHECK_NE(state_, kDecodeFinished);
268 DCHECK_NE(state_, kError);
269 DCHECK(reset_cb_.is_null());
270 DCHECK(!read_cb_.is_null());
271 DCHECK(buffer.get());
273 // During decode, because reads are issued asynchronously, it is possible to
274 // receive multiple end of stream buffers since each read is acked. When the
275 // first end of stream buffer is read, FFmpeg may still have frames queued
276 // up in the decoder so we need to go through the decode loop until it stops
277 // giving sensible data. After that, the decoder should output empty
278 // frames. There are three states the decoder can be in:
280 // kNormal: This is the starting state. Buffers are decoded. Decode errors
281 // are discarded.
282 // kFlushCodec: There isn't any more input data. Call avcodec_decode_video2
283 // until no more data is returned to flush out remaining
284 // frames. The input buffer is ignored at this point.
285 // kDecodeFinished: All calls return empty frames.
286 // kError: Unexpected error happened.
288 // These are the possible state transitions.
290 // kNormal -> kFlushCodec:
291 // When buffer->IsEndOfStream() is first true.
292 // kNormal -> kError:
293 // A decoding error occurs and decoding needs to stop.
294 // kFlushCodec -> kDecodeFinished:
295 // When avcodec_decode_video2() returns 0 data.
296 // kFlushCodec -> kError:
297 // When avcodec_decode_video2() errors out.
298 // (any state) -> kNormal:
299 // Any time Reset() is called.
301 // Transition to kFlushCodec on the first end of stream buffer.
302 if (state_ == kNormal && buffer->IsEndOfStream()) {
303 state_ = kFlushCodec;
306 scoped_refptr<VideoFrame> video_frame;
307 if (!Decode(buffer, &video_frame)) {
308 state_ = kError;
309 base::ResetAndReturn(&read_cb_).Run(kDecodeError, NULL);
310 return;
313 // Any successful decode counts!
314 if (!buffer->IsEndOfStream() && buffer->GetDataSize() > 0) {
315 PipelineStatistics statistics;
316 statistics.video_bytes_decoded = buffer->GetDataSize();
317 statistics_cb_.Run(statistics);
320 if (!video_frame.get()) {
321 if (state_ == kFlushCodec) {
322 DCHECK(buffer->IsEndOfStream());
323 state_ = kDecodeFinished;
324 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame());
325 return;
328 ReadFromDemuxerStream();
329 return;
332 base::ResetAndReturn(&read_cb_).Run(kOk, video_frame);
335 bool FFmpegVideoDecoder::Decode(
336 const scoped_refptr<DecoderBuffer>& buffer,
337 scoped_refptr<VideoFrame>* video_frame) {
338 DCHECK(video_frame);
340 // Reset frame to default values.
341 avcodec_get_frame_defaults(av_frame_);
343 // Create a packet for input data.
344 // Due to FFmpeg API changes we no longer have const read-only pointers.
345 AVPacket packet;
346 av_init_packet(&packet);
347 if (buffer->IsEndOfStream()) {
348 packet.data = NULL;
349 packet.size = 0;
350 } else {
351 packet.data = const_cast<uint8*>(buffer->GetData());
352 packet.size = buffer->GetDataSize();
354 // Let FFmpeg handle presentation timestamp reordering.
355 codec_context_->reordered_opaque = buffer->GetTimestamp().InMicroseconds();
357 // This is for codecs not using get_buffer to initialize
358 // |av_frame_->reordered_opaque|
359 av_frame_->reordered_opaque = codec_context_->reordered_opaque;
362 int frame_decoded = 0;
363 int result = avcodec_decode_video2(codec_context_,
364 av_frame_,
365 &frame_decoded,
366 &packet);
367 // Log the problem if we can't decode a video frame and exit early.
368 if (result < 0) {
369 LOG(ERROR) << "Error decoding video: " << buffer->AsHumanReadableString();
370 *video_frame = NULL;
371 return false;
374 // If no frame was produced then signal that more data is required to
375 // produce more frames. This can happen under two circumstances:
376 // 1) Decoder was recently initialized/flushed
377 // 2) End of stream was reached and all internal frames have been output
378 if (frame_decoded == 0) {
379 *video_frame = NULL;
380 return true;
383 // TODO(fbarchard): Work around for FFmpeg http://crbug.com/27675
384 // The decoder is in a bad state and not decoding correctly.
385 // Checking for NULL avoids a crash in CopyPlane().
386 if (!av_frame_->data[VideoFrame::kYPlane] ||
387 !av_frame_->data[VideoFrame::kUPlane] ||
388 !av_frame_->data[VideoFrame::kVPlane]) {
389 LOG(ERROR) << "Video frame was produced yet has invalid frame data.";
390 *video_frame = NULL;
391 return false;
394 if (!av_frame_->opaque) {
395 LOG(ERROR) << "VideoFrame object associated with frame data not set.";
396 return false;
398 *video_frame = static_cast<VideoFrame*>(av_frame_->opaque);
400 (*video_frame)->SetTimestamp(
401 base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque));
403 return true;
406 void FFmpegVideoDecoder::ReleaseFFmpegResources() {
407 if (codec_context_) {
408 av_free(codec_context_->extradata);
409 avcodec_close(codec_context_);
410 av_free(codec_context_);
411 codec_context_ = NULL;
413 if (av_frame_) {
414 av_free(av_frame_);
415 av_frame_ = NULL;
419 bool FFmpegVideoDecoder::ConfigureDecoder() {
420 const VideoDecoderConfig& config = demuxer_stream_->video_decoder_config();
422 if (!config.IsValidConfig()) {
423 DLOG(ERROR) << "Invalid video stream - " << config.AsHumanReadableString();
424 return false;
427 if (config.is_encrypted()) {
428 DLOG(ERROR) << "Encrypted video stream not supported.";
429 return false;
432 // Release existing decoder resources if necessary.
433 ReleaseFFmpegResources();
435 // Initialize AVCodecContext structure.
436 codec_context_ = avcodec_alloc_context3(NULL);
437 VideoDecoderConfigToAVCodecContext(config, codec_context_);
439 // Enable motion vector search (potentially slow), strong deblocking filter
440 // for damaged macroblocks, and set our error detection sensitivity.
441 codec_context_->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK;
442 codec_context_->thread_count = GetThreadCount(codec_context_->codec_id);
443 codec_context_->opaque = this;
444 codec_context_->flags |= CODEC_FLAG_EMU_EDGE;
445 codec_context_->get_buffer = GetVideoBufferImpl;
446 codec_context_->release_buffer = ReleaseVideoBufferImpl;
448 AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
449 if (!codec || avcodec_open2(codec_context_, codec, NULL) < 0) {
450 ReleaseFFmpegResources();
451 return false;
454 av_frame_ = avcodec_alloc_frame();
455 return true;
458 } // namespace media