Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(105)

Side by Side Diff: webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc

Issue 1935443002: Revert of Delete webrtc::VideoFrame methods buffer and stride. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 * 9 *
10 */ 10 */
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
122 } 122 }
123 123
124 // The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version 124 // The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
125 // of a video frame and will be set up to reference |video_frame|'s buffers. 125 // of a video frame and will be set up to reference |video_frame|'s buffers.
126 VideoFrame* video_frame = new VideoFrame(); 126 VideoFrame* video_frame = new VideoFrame();
127 // FFmpeg expects the initial allocation to be zero-initialized according to 127 // FFmpeg expects the initial allocation to be zero-initialized according to
128 // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers. 128 // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
129 video_frame->set_video_frame_buffer( 129 video_frame->set_video_frame_buffer(
130 decoder->pool_.CreateBuffer(width, height)); 130 decoder->pool_.CreateBuffer(width, height));
131 // DCHECK that we have a continuous buffer as is required. 131 // DCHECK that we have a continuous buffer as is required.
132 RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(), 132 RTC_DCHECK_EQ(video_frame->buffer(kUPlane),
133 video_frame->video_frame_buffer()->DataY() + 133 video_frame->buffer(kYPlane) + video_frame->allocated_size(kYPlane));
134 video_frame->allocated_size(kYPlane)); 134 RTC_DCHECK_EQ(video_frame->buffer(kVPlane),
135 RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(), 135 video_frame->buffer(kUPlane) + video_frame->allocated_size(kUPlane));
136 video_frame->video_frame_buffer()->DataU() +
137 video_frame->allocated_size(kUPlane));
138 int total_size = video_frame->allocated_size(kYPlane) + 136 int total_size = video_frame->allocated_size(kYPlane) +
139 video_frame->allocated_size(kUPlane) + 137 video_frame->allocated_size(kUPlane) +
140 video_frame->allocated_size(kVPlane); 138 video_frame->allocated_size(kVPlane);
141 139
142 av_frame->format = context->pix_fmt; 140 av_frame->format = context->pix_fmt;
143 av_frame->reordered_opaque = context->reordered_opaque; 141 av_frame->reordered_opaque = context->reordered_opaque;
144 142
145 // Set |av_frame| members as required by FFmpeg. 143 // Set |av_frame| members as required by FFmpeg.
146 av_frame->data[kYPlaneIndex] = 144 av_frame->data[kYPlaneIndex] = video_frame->buffer(kYPlane);
147 video_frame->video_frame_buffer()->MutableDataY(); 145 av_frame->linesize[kYPlaneIndex] = video_frame->stride(kYPlane);
148 av_frame->linesize[kYPlaneIndex] = 146 av_frame->data[kUPlaneIndex] = video_frame->buffer(kUPlane);
149 video_frame->video_frame_buffer()->StrideY(); 147 av_frame->linesize[kUPlaneIndex] = video_frame->stride(kUPlane);
150 av_frame->data[kUPlaneIndex] = 148 av_frame->data[kVPlaneIndex] = video_frame->buffer(kVPlane);
151 video_frame->video_frame_buffer()->MutableDataU(); 149 av_frame->linesize[kVPlaneIndex] = video_frame->stride(kVPlane);
152 av_frame->linesize[kUPlaneIndex] =
153 video_frame->video_frame_buffer()->StrideU();
154 av_frame->data[kVPlaneIndex] =
155 video_frame->video_frame_buffer()->MutableDataV();
156 av_frame->linesize[kVPlaneIndex] =
157 video_frame->video_frame_buffer()->StrideV();
158 RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); 150 RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
159 151
160 av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex], 152 av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex],
161 total_size, 153 total_size,
162 AVFreeBuffer2, 154 AVFreeBuffer2,
163 static_cast<void*>(video_frame), 155 static_cast<void*>(video_frame),
164 0); 156 0);
165 RTC_CHECK(av_frame->buf[0]); 157 RTC_CHECK(av_frame->buf[0]);
166 return 0; 158 return 0;
167 } 159 }
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
340 if (!frame_decoded) { 332 if (!frame_decoded) {
341 LOG(LS_WARNING) << "avcodec_decode_video2 successful but no frame was " 333 LOG(LS_WARNING) << "avcodec_decode_video2 successful but no frame was "
342 "decoded."; 334 "decoded.";
343 return WEBRTC_VIDEO_CODEC_OK; 335 return WEBRTC_VIDEO_CODEC_OK;
344 } 336 }
345 337
346 // Obtain the |video_frame| containing the decoded image. 338 // Obtain the |video_frame| containing the decoded image.
347 VideoFrame* video_frame = static_cast<VideoFrame*>( 339 VideoFrame* video_frame = static_cast<VideoFrame*>(
348 av_buffer_get_opaque(av_frame_->buf[0])); 340 av_buffer_get_opaque(av_frame_->buf[0]));
349 RTC_DCHECK(video_frame); 341 RTC_DCHECK(video_frame);
350 RTC_CHECK_EQ(av_frame_->data[kYPlane], 342 RTC_CHECK_EQ(av_frame_->data[kYPlane], video_frame->buffer(kYPlane));
351 video_frame->video_frame_buffer()->DataY()); 343 RTC_CHECK_EQ(av_frame_->data[kUPlane], video_frame->buffer(kUPlane));
352 RTC_CHECK_EQ(av_frame_->data[kUPlane], 344 RTC_CHECK_EQ(av_frame_->data[kVPlane], video_frame->buffer(kVPlane));
353 video_frame->video_frame_buffer()->DataU());
354 RTC_CHECK_EQ(av_frame_->data[kVPlane],
355 video_frame->video_frame_buffer()->DataV());
356 video_frame->set_timestamp(input_image._timeStamp); 345 video_frame->set_timestamp(input_image._timeStamp);
357 346
358 // The decoded image may be larger than what is supposed to be visible, see 347 // The decoded image may be larger than what is supposed to be visible, see
359 // |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image 348 // |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
360 // without copying the underlying buffer. 349 // without copying the underlying buffer.
361 rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer(); 350 rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer();
362 if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) { 351 if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) {
363 video_frame->set_video_frame_buffer( 352 video_frame->set_video_frame_buffer(
364 new rtc::RefCountedObject<WrappedI420Buffer>( 353 new rtc::RefCountedObject<WrappedI420Buffer>(
365 av_frame_->width, av_frame_->height, 354 av_frame_->width, av_frame_->height,
366 buf->DataY(), buf->StrideY(), 355 buf->data(kYPlane), buf->stride(kYPlane),
367 buf->DataU(), buf->StrideU(), 356 buf->data(kUPlane), buf->stride(kUPlane),
368 buf->DataV(), buf->StrideV(), 357 buf->data(kVPlane), buf->stride(kVPlane),
369 rtc::KeepRefUntilDone(buf))); 358 rtc::KeepRefUntilDone(buf)));
370 } 359 }
371 360
372 // Return decoded frame. 361 // Return decoded frame.
373 int32_t ret = decoded_image_callback_->Decoded(*video_frame); 362 int32_t ret = decoded_image_callback_->Decoded(*video_frame);
374 // Stop referencing it, possibly freeing |video_frame|. 363 // Stop referencing it, possibly freeing |video_frame|.
375 av_frame_unref(av_frame_.get()); 364 av_frame_unref(av_frame_.get());
376 video_frame = nullptr; 365 video_frame = nullptr;
377 366
378 if (ret) { 367 if (ret) {
(...skipping 19 matching lines...) Expand all
398 void H264DecoderImpl::ReportError() { 387 void H264DecoderImpl::ReportError() {
399 if (has_reported_error_) 388 if (has_reported_error_)
400 return; 389 return;
401 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264DecoderImpl.Event", 390 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264DecoderImpl.Event",
402 kH264DecoderEventError, 391 kH264DecoderEventError,
403 kH264DecoderEventMax); 392 kH264DecoderEventMax);
404 has_reported_error_ = true; 393 has_reported_error_ = true;
405 } 394 }
406 395
407 } // namespace webrtc 396 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/modules/video_capture/video_capture.gypi ('k') | webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698