Index: webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc |
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc |
index f560a37d0ece018524675795dbc49e17e867ed69..e98666d073695016e89a48600ae863d53a768a87 100644 |
--- a/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc |
+++ b/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc |
@@ -129,12 +129,10 @@ |
video_frame->set_video_frame_buffer( |
decoder->pool_.CreateBuffer(width, height)); |
// DCHECK that we have a continuous buffer as is required. |
- RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(), |
- video_frame->video_frame_buffer()->DataY() + |
- video_frame->allocated_size(kYPlane)); |
- RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(), |
- video_frame->video_frame_buffer()->DataU() + |
- video_frame->allocated_size(kUPlane)); |
+ RTC_DCHECK_EQ(video_frame->buffer(kUPlane), |
+ video_frame->buffer(kYPlane) + video_frame->allocated_size(kYPlane)); |
+ RTC_DCHECK_EQ(video_frame->buffer(kVPlane), |
+ video_frame->buffer(kUPlane) + video_frame->allocated_size(kUPlane)); |
int total_size = video_frame->allocated_size(kYPlane) + |
video_frame->allocated_size(kUPlane) + |
video_frame->allocated_size(kVPlane); |
@@ -143,18 +141,12 @@ |
av_frame->reordered_opaque = context->reordered_opaque; |
// Set |av_frame| members as required by FFmpeg. |
- av_frame->data[kYPlaneIndex] = |
- video_frame->video_frame_buffer()->MutableDataY(); |
- av_frame->linesize[kYPlaneIndex] = |
- video_frame->video_frame_buffer()->StrideY(); |
- av_frame->data[kUPlaneIndex] = |
- video_frame->video_frame_buffer()->MutableDataU(); |
- av_frame->linesize[kUPlaneIndex] = |
- video_frame->video_frame_buffer()->StrideU(); |
- av_frame->data[kVPlaneIndex] = |
- video_frame->video_frame_buffer()->MutableDataV(); |
- av_frame->linesize[kVPlaneIndex] = |
- video_frame->video_frame_buffer()->StrideV(); |
+ av_frame->data[kYPlaneIndex] = video_frame->buffer(kYPlane); |
+ av_frame->linesize[kYPlaneIndex] = video_frame->stride(kYPlane); |
+ av_frame->data[kUPlaneIndex] = video_frame->buffer(kUPlane); |
+ av_frame->linesize[kUPlaneIndex] = video_frame->stride(kUPlane); |
+ av_frame->data[kVPlaneIndex] = video_frame->buffer(kVPlane); |
+ av_frame->linesize[kVPlaneIndex] = video_frame->stride(kVPlane); |
RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); |
av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex], |
@@ -347,12 +339,9 @@ |
VideoFrame* video_frame = static_cast<VideoFrame*>( |
av_buffer_get_opaque(av_frame_->buf[0])); |
RTC_DCHECK(video_frame); |
- RTC_CHECK_EQ(av_frame_->data[kYPlane], |
- video_frame->video_frame_buffer()->DataY()); |
- RTC_CHECK_EQ(av_frame_->data[kUPlane], |
- video_frame->video_frame_buffer()->DataU()); |
- RTC_CHECK_EQ(av_frame_->data[kVPlane], |
- video_frame->video_frame_buffer()->DataV()); |
+ RTC_CHECK_EQ(av_frame_->data[kYPlane], video_frame->buffer(kYPlane)); |
+ RTC_CHECK_EQ(av_frame_->data[kUPlane], video_frame->buffer(kUPlane)); |
+ RTC_CHECK_EQ(av_frame_->data[kVPlane], video_frame->buffer(kVPlane)); |
video_frame->set_timestamp(input_image._timeStamp); |
// The decoded image may be larger than what is supposed to be visible, see |
@@ -363,9 +352,9 @@ |
video_frame->set_video_frame_buffer( |
new rtc::RefCountedObject<WrappedI420Buffer>( |
av_frame_->width, av_frame_->height, |
- buf->DataY(), buf->StrideY(), |
- buf->DataU(), buf->StrideU(), |
- buf->DataV(), buf->StrideV(), |
+ buf->data(kYPlane), buf->stride(kYPlane), |
+ buf->data(kUPlane), buf->stride(kUPlane), |
+ buf->data(kVPlane), buf->stride(kVPlane), |
rtc::KeepRefUntilDone(buf))); |
} |