Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(73)

Side by Side Diff: webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc

Issue 1900673002: Delete webrtc::VideoFrame methods buffer and stride. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Rebase. Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 * 9 *
10 */ 10 */
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
122 } 122 }
123 123
124 // The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version 124 // The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
125 // of a video frame and will be set up to reference |video_frame|'s buffers. 125 // of a video frame and will be set up to reference |video_frame|'s buffers.
126 VideoFrame* video_frame = new VideoFrame(); 126 VideoFrame* video_frame = new VideoFrame();
127 // FFmpeg expects the initial allocation to be zero-initialized according to 127 // FFmpeg expects the initial allocation to be zero-initialized according to
128 // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers. 128 // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
129 video_frame->set_video_frame_buffer( 129 video_frame->set_video_frame_buffer(
130 decoder->pool_.CreateBuffer(width, height)); 130 decoder->pool_.CreateBuffer(width, height));
131 // DCHECK that we have a continuous buffer as is required. 131 // DCHECK that we have a continuous buffer as is required.
132 RTC_DCHECK_EQ(video_frame->buffer(kUPlane), 132 RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
133 video_frame->buffer(kYPlane) + video_frame->allocated_size(kYPlane)); 133 video_frame->video_frame_buffer()->DataY() +
134 RTC_DCHECK_EQ(video_frame->buffer(kVPlane), 134 video_frame->allocated_size(kYPlane));
135 video_frame->buffer(kUPlane) + video_frame->allocated_size(kUPlane)); 135 RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(),
136 video_frame->video_frame_buffer()->DataU() +
137 video_frame->allocated_size(kUPlane));
136 int total_size = video_frame->allocated_size(kYPlane) + 138 int total_size = video_frame->allocated_size(kYPlane) +
137 video_frame->allocated_size(kUPlane) + 139 video_frame->allocated_size(kUPlane) +
138 video_frame->allocated_size(kVPlane); 140 video_frame->allocated_size(kVPlane);
139 141
140 av_frame->format = context->pix_fmt; 142 av_frame->format = context->pix_fmt;
141 av_frame->reordered_opaque = context->reordered_opaque; 143 av_frame->reordered_opaque = context->reordered_opaque;
142 144
143 // Set |av_frame| members as required by FFmpeg. 145 // Set |av_frame| members as required by FFmpeg.
144 av_frame->data[kYPlaneIndex] = video_frame->buffer(kYPlane); 146 av_frame->data[kYPlaneIndex] =
145 av_frame->linesize[kYPlaneIndex] = video_frame->stride(kYPlane); 147 video_frame->video_frame_buffer()->MutableDataY();
146 av_frame->data[kUPlaneIndex] = video_frame->buffer(kUPlane); 148 av_frame->linesize[kYPlaneIndex] =
147 av_frame->linesize[kUPlaneIndex] = video_frame->stride(kUPlane); 149 video_frame->video_frame_buffer()->StrideY();
148 av_frame->data[kVPlaneIndex] = video_frame->buffer(kVPlane); 150 av_frame->data[kUPlaneIndex] =
149 av_frame->linesize[kVPlaneIndex] = video_frame->stride(kVPlane); 151 video_frame->video_frame_buffer()->MutableDataU();
152 av_frame->linesize[kUPlaneIndex] =
153 video_frame->video_frame_buffer()->StrideU();
154 av_frame->data[kVPlaneIndex] =
155 video_frame->video_frame_buffer()->MutableDataV();
156 av_frame->linesize[kVPlaneIndex] =
157 video_frame->video_frame_buffer()->StrideV();
150 RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); 158 RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
151 159
152 av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex], 160 av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex],
153 total_size, 161 total_size,
154 AVFreeBuffer2, 162 AVFreeBuffer2,
155 static_cast<void*>(video_frame), 163 static_cast<void*>(video_frame),
156 0); 164 0);
157 RTC_CHECK(av_frame->buf[0]); 165 RTC_CHECK(av_frame->buf[0]);
158 return 0; 166 return 0;
159 } 167 }
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
332 if (!frame_decoded) { 340 if (!frame_decoded) {
333 LOG(LS_WARNING) << "avcodec_decode_video2 successful but no frame was " 341 LOG(LS_WARNING) << "avcodec_decode_video2 successful but no frame was "
334 "decoded."; 342 "decoded.";
335 return WEBRTC_VIDEO_CODEC_OK; 343 return WEBRTC_VIDEO_CODEC_OK;
336 } 344 }
337 345
338 // Obtain the |video_frame| containing the decoded image. 346 // Obtain the |video_frame| containing the decoded image.
339 VideoFrame* video_frame = static_cast<VideoFrame*>( 347 VideoFrame* video_frame = static_cast<VideoFrame*>(
340 av_buffer_get_opaque(av_frame_->buf[0])); 348 av_buffer_get_opaque(av_frame_->buf[0]));
341 RTC_DCHECK(video_frame); 349 RTC_DCHECK(video_frame);
342 RTC_CHECK_EQ(av_frame_->data[kYPlane], video_frame->buffer(kYPlane)); 350 RTC_CHECK_EQ(av_frame_->data[kYPlane],
343 RTC_CHECK_EQ(av_frame_->data[kUPlane], video_frame->buffer(kUPlane)); 351 video_frame->video_frame_buffer()->DataY());
344 RTC_CHECK_EQ(av_frame_->data[kVPlane], video_frame->buffer(kVPlane)); 352 RTC_CHECK_EQ(av_frame_->data[kUPlane],
353 video_frame->video_frame_buffer()->DataU());
354 RTC_CHECK_EQ(av_frame_->data[kVPlane],
355 video_frame->video_frame_buffer()->DataV());
345 video_frame->set_timestamp(input_image._timeStamp); 356 video_frame->set_timestamp(input_image._timeStamp);
346 357
347 // The decoded image may be larger than what is supposed to be visible, see 358 // The decoded image may be larger than what is supposed to be visible, see
348 // |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image 359 // |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
349 // without copying the underlying buffer. 360 // without copying the underlying buffer.
350 rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer(); 361 rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer();
351 if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) { 362 if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) {
352 video_frame->set_video_frame_buffer( 363 video_frame->set_video_frame_buffer(
353 new rtc::RefCountedObject<WrappedI420Buffer>( 364 new rtc::RefCountedObject<WrappedI420Buffer>(
354 av_frame_->width, av_frame_->height, 365 av_frame_->width, av_frame_->height,
355 buf->data(kYPlane), buf->stride(kYPlane), 366 buf->DataY(), buf->StrideY(),
356 buf->data(kUPlane), buf->stride(kUPlane), 367 buf->DataU(), buf->StrideU(),
357 buf->data(kVPlane), buf->stride(kVPlane), 368 buf->DataV(), buf->StrideV(),
358 rtc::KeepRefUntilDone(buf))); 369 rtc::KeepRefUntilDone(buf)));
359 } 370 }
360 371
361 // Return decoded frame. 372 // Return decoded frame.
362 int32_t ret = decoded_image_callback_->Decoded(*video_frame); 373 int32_t ret = decoded_image_callback_->Decoded(*video_frame);
363 // Stop referencing it, possibly freeing |video_frame|. 374 // Stop referencing it, possibly freeing |video_frame|.
364 av_frame_unref(av_frame_.get()); 375 av_frame_unref(av_frame_.get());
365 video_frame = nullptr; 376 video_frame = nullptr;
366 377
367 if (ret) { 378 if (ret) {
(...skipping 19 matching lines...) Expand all
387 void H264DecoderImpl::ReportError() { 398 void H264DecoderImpl::ReportError() {
388 if (has_reported_error_) 399 if (has_reported_error_)
389 return; 400 return;
390 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264DecoderImpl.Event", 401 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264DecoderImpl.Event",
391 kH264DecoderEventError, 402 kH264DecoderEventError,
392 kH264DecoderEventMax); 403 kH264DecoderEventMax);
393 has_reported_error_ = true; 404 has_reported_error_ = true;
394 } 405 }
395 406
396 } // namespace webrtc 407 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/modules/video_capture/video_capture.gypi ('k') | webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698