| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/video_frame.h" | 11 #include "webrtc/video_frame.h" |
| 12 | 12 |
| 13 #include <string.h> | 13 #include <string.h> |
| 14 | 14 |
| 15 #include <algorithm> // swap | 15 #include <algorithm> // swap |
| 16 | 16 |
| 17 #include "webrtc/base/bind.h" | 17 #include "webrtc/base/bind.h" |
| 18 #include "webrtc/base/checks.h" | 18 #include "webrtc/base/checks.h" |
| 19 | 19 |
| 20 namespace webrtc { | 20 namespace webrtc { |
| 21 | 21 |
| 22 // FFmpeg's decoder, used by H264DecoderImpl, requires up to 8 bytes padding due | 22 // FFmpeg's decoder, used by H264DecoderImpl, requires up to 8 bytes padding due |
| 23 // to optimized bitstream readers. See avcodec_decode_video2. | 23 // to optimized bitstream readers. See avcodec_decode_video2. |
| 24 const size_t EncodedImage::kBufferPaddingBytesH264 = 8; | 24 const size_t EncodedImage::kBufferPaddingBytesH264 = 8; |
| 25 | 25 |
| 26 bool EqualPlane(const uint8_t* data1, | |
| 27 const uint8_t* data2, | |
| 28 int stride, | |
| 29 int width, | |
| 30 int height) { | |
| 31 for (int y = 0; y < height; ++y) { | |
| 32 if (memcmp(data1, data2, width) != 0) | |
| 33 return false; | |
| 34 data1 += stride; | |
| 35 data2 += stride; | |
| 36 } | |
| 37 return true; | |
| 38 } | |
| 39 | |
| 40 int ExpectedSize(int plane_stride, int image_height, PlaneType type) { | 26 int ExpectedSize(int plane_stride, int image_height, PlaneType type) { |
| 41 if (type == kYPlane) | 27 if (type == kYPlane) |
| 42 return plane_stride * image_height; | 28 return plane_stride * image_height; |
| 43 return plane_stride * ((image_height + 1) / 2); | 29 return plane_stride * ((image_height + 1) / 2); |
| 44 } | 30 } |
| 45 | 31 |
| 46 VideoFrame::VideoFrame() { | 32 VideoFrame::VideoFrame() { |
| 47 // Intentionally using Reset instead of initializer list so that any missed | 33 // Intentionally using Reset instead of initializer list so that any missed |
| 48 // fields in Reset will be caught by memory checkers. | 34 // fields in Reset will be caught by memory checkers. |
| 49 Reset(); | 35 Reset(); |
| 50 } | 36 } |
| 51 | 37 |
| 52 VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer, | 38 VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer, |
| 53 uint32_t timestamp, | 39 uint32_t timestamp, |
| 54 int64_t render_time_ms, | 40 int64_t render_time_ms, |
| 55 VideoRotation rotation) | 41 VideoRotation rotation) |
| 56 : video_frame_buffer_(buffer), | 42 : video_frame_buffer_(buffer), |
| 57 timestamp_(timestamp), | 43 timestamp_(timestamp), |
| 58 ntp_time_ms_(0), | 44 ntp_time_ms_(0), |
| 59 render_time_ms_(render_time_ms), | 45 render_time_ms_(render_time_ms), |
| 60 rotation_(rotation) { | 46 rotation_(rotation) { |
| 61 } | 47 } |
| 62 | 48 |
| 63 int VideoFrame::CreateEmptyFrame(int width, | 49 void VideoFrame::CreateEmptyFrame(int width, |
| 64 int height, | 50 int height, |
| 65 int stride_y, | 51 int stride_y, |
| 66 int stride_u, | 52 int stride_u, |
| 67 int stride_v) { | 53 int stride_v) { |
| 68 const int half_width = (width + 1) / 2; | 54 const int half_width = (width + 1) / 2; |
| 69 RTC_DCHECK_GT(width, 0); | 55 RTC_DCHECK_GT(width, 0); |
| 70 RTC_DCHECK_GT(height, 0); | 56 RTC_DCHECK_GT(height, 0); |
| 71 RTC_DCHECK_GE(stride_y, width); | 57 RTC_DCHECK_GE(stride_y, width); |
| 72 RTC_DCHECK_GE(stride_u, half_width); | 58 RTC_DCHECK_GE(stride_u, half_width); |
| 73 RTC_DCHECK_GE(stride_v, half_width); | 59 RTC_DCHECK_GE(stride_v, half_width); |
| 74 | 60 |
| 75 // Creating empty frame - reset all values. | 61 // Creating empty frame - reset all values. |
| 76 timestamp_ = 0; | 62 timestamp_ = 0; |
| 77 ntp_time_ms_ = 0; | 63 ntp_time_ms_ = 0; |
| 78 render_time_ms_ = 0; | 64 render_time_ms_ = 0; |
| 79 rotation_ = kVideoRotation_0; | 65 rotation_ = kVideoRotation_0; |
| 80 | 66 |
| 81 // Check if it's safe to reuse allocation. | 67 // Check if it's safe to reuse allocation. |
| 82 if (video_frame_buffer_ && video_frame_buffer_->HasOneRef() && | 68 if (video_frame_buffer_ && video_frame_buffer_->HasOneRef() && |
| 83 !video_frame_buffer_->native_handle() && | 69 !video_frame_buffer_->native_handle() && |
| 84 width == video_frame_buffer_->width() && | 70 width == video_frame_buffer_->width() && |
| 85 height == video_frame_buffer_->height() && stride_y == stride(kYPlane) && | 71 height == video_frame_buffer_->height() && stride_y == stride(kYPlane) && |
| 86 stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) { | 72 stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) { |
| 87 return 0; | 73 return; |
| 88 } | 74 } |
| 89 | 75 |
| 90 // Need to allocate new buffer. | 76 // Need to allocate new buffer. |
| 91 video_frame_buffer_ = new rtc::RefCountedObject<I420Buffer>( | 77 video_frame_buffer_ = new rtc::RefCountedObject<I420Buffer>( |
| 92 width, height, stride_y, stride_u, stride_v); | 78 width, height, stride_y, stride_u, stride_v); |
| 93 return 0; | |
| 94 } | 79 } |
| 95 | 80 |
| 96 int VideoFrame::CreateFrame(const uint8_t* buffer_y, | 81 void VideoFrame::CreateFrame(const uint8_t* buffer_y, |
| 97 const uint8_t* buffer_u, | 82 const uint8_t* buffer_u, |
| 98 const uint8_t* buffer_v, | 83 const uint8_t* buffer_v, |
| 99 int width, | 84 int width, |
| 100 int height, | 85 int height, |
| 101 int stride_y, | 86 int stride_y, |
| 102 int stride_u, | 87 int stride_u, |
| 103 int stride_v) { | 88 int stride_v, |
| 104 return CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y, | 89 VideoRotation rotation) { |
| 105 stride_u, stride_v, kVideoRotation_0); | |
| 106 } | |
| 107 | |
| 108 int VideoFrame::CreateFrame(const uint8_t* buffer_y, | |
| 109 const uint8_t* buffer_u, | |
| 110 const uint8_t* buffer_v, | |
| 111 int width, | |
| 112 int height, | |
| 113 int stride_y, | |
| 114 int stride_u, | |
| 115 int stride_v, | |
| 116 VideoRotation rotation) { | |
| 117 const int half_height = (height + 1) / 2; | 90 const int half_height = (height + 1) / 2; |
| 118 const int expected_size_y = height * stride_y; | 91 const int expected_size_y = height * stride_y; |
| 119 const int expected_size_u = half_height * stride_u; | 92 const int expected_size_u = half_height * stride_u; |
| 120 const int expected_size_v = half_height * stride_v; | 93 const int expected_size_v = half_height * stride_v; |
| 121 CreateEmptyFrame(width, height, stride_y, stride_u, stride_v); | 94 CreateEmptyFrame(width, height, stride_y, stride_u, stride_v); |
| 122 memcpy(buffer(kYPlane), buffer_y, expected_size_y); | 95 memcpy(buffer(kYPlane), buffer_y, expected_size_y); |
| 123 memcpy(buffer(kUPlane), buffer_u, expected_size_u); | 96 memcpy(buffer(kUPlane), buffer_u, expected_size_u); |
| 124 memcpy(buffer(kVPlane), buffer_v, expected_size_v); | 97 memcpy(buffer(kVPlane), buffer_v, expected_size_v); |
| 125 rotation_ = rotation; | 98 rotation_ = rotation; |
| 126 return 0; | |
| 127 } | 99 } |
| 128 | 100 |
| 129 int VideoFrame::CreateFrame(const uint8_t* buffer, | 101 void VideoFrame::CreateFrame(const uint8_t* buffer, |
| 130 int width, | 102 int width, |
| 131 int height, | 103 int height, |
| 132 VideoRotation rotation) { | 104 VideoRotation rotation) { |
| 133 const int stride_y = width; | 105 const int stride_y = width; |
| 134 const int stride_uv = (width + 1) / 2; | 106 const int stride_uv = (width + 1) / 2; |
| 135 | 107 |
| 136 const uint8_t* buffer_y = buffer; | 108 const uint8_t* buffer_y = buffer; |
| 137 const uint8_t* buffer_u = buffer_y + stride_y * height; | 109 const uint8_t* buffer_u = buffer_y + stride_y * height; |
| 138 const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2); | 110 const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2); |
| 139 return CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y, | 111 CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y, |
| 140 stride_uv, stride_uv, rotation); | 112 stride_uv, stride_uv, rotation); |
| 141 } | 113 } |
| 142 | 114 |
| 143 int VideoFrame::CopyFrame(const VideoFrame& videoFrame) { | 115 void VideoFrame::CopyFrame(const VideoFrame& videoFrame) { |
| 144 if (videoFrame.IsZeroSize()) { | 116 if (videoFrame.IsZeroSize()) { |
| 145 video_frame_buffer_ = nullptr; | 117 video_frame_buffer_ = nullptr; |
| 146 } else if (videoFrame.native_handle()) { | 118 } else if (videoFrame.native_handle()) { |
| 147 video_frame_buffer_ = videoFrame.video_frame_buffer(); | 119 video_frame_buffer_ = videoFrame.video_frame_buffer(); |
| 148 } else { | 120 } else { |
| 149 CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane), | 121 CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane), |
| 150 videoFrame.buffer(kVPlane), videoFrame.width(), | 122 videoFrame.buffer(kVPlane), videoFrame.width(), |
| 151 videoFrame.height(), videoFrame.stride(kYPlane), | 123 videoFrame.height(), videoFrame.stride(kYPlane), |
| 152 videoFrame.stride(kUPlane), videoFrame.stride(kVPlane)); | 124 videoFrame.stride(kUPlane), videoFrame.stride(kVPlane), |
| 125 kVideoRotation_0); |
| 153 } | 126 } |
| 154 | 127 |
| 155 timestamp_ = videoFrame.timestamp_; | 128 timestamp_ = videoFrame.timestamp_; |
| 156 ntp_time_ms_ = videoFrame.ntp_time_ms_; | 129 ntp_time_ms_ = videoFrame.ntp_time_ms_; |
| 157 render_time_ms_ = videoFrame.render_time_ms_; | 130 render_time_ms_ = videoFrame.render_time_ms_; |
| 158 rotation_ = videoFrame.rotation_; | 131 rotation_ = videoFrame.rotation_; |
| 159 return 0; | |
| 160 } | 132 } |
| 161 | 133 |
| 162 void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) { | 134 void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) { |
| 163 video_frame_buffer_ = videoFrame.video_frame_buffer(); | 135 video_frame_buffer_ = videoFrame.video_frame_buffer(); |
| 164 timestamp_ = videoFrame.timestamp_; | 136 timestamp_ = videoFrame.timestamp_; |
| 165 ntp_time_ms_ = videoFrame.ntp_time_ms_; | 137 ntp_time_ms_ = videoFrame.ntp_time_ms_; |
| 166 render_time_ms_ = videoFrame.render_time_ms_; | 138 render_time_ms_ = videoFrame.render_time_ms_; |
| 167 rotation_ = videoFrame.rotation_; | 139 rotation_ = videoFrame.rotation_; |
| 168 } | 140 } |
| 169 | 141 |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 219 } | 191 } |
| 220 | 192 |
| 221 VideoFrame VideoFrame::ConvertNativeToI420Frame() const { | 193 VideoFrame VideoFrame::ConvertNativeToI420Frame() const { |
| 222 RTC_DCHECK(native_handle()); | 194 RTC_DCHECK(native_handle()); |
| 223 VideoFrame frame; | 195 VideoFrame frame; |
| 224 frame.ShallowCopy(*this); | 196 frame.ShallowCopy(*this); |
| 225 frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer()); | 197 frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer()); |
| 226 return frame; | 198 return frame; |
| 227 } | 199 } |
| 228 | 200 |
| 229 bool VideoFrame::EqualsFrame(const VideoFrame& frame) const { | |
| 230 if (width() != frame.width() || height() != frame.height() || | |
| 231 stride(kYPlane) != frame.stride(kYPlane) || | |
| 232 stride(kUPlane) != frame.stride(kUPlane) || | |
| 233 stride(kVPlane) != frame.stride(kVPlane) || | |
| 234 timestamp() != frame.timestamp() || | |
| 235 ntp_time_ms() != frame.ntp_time_ms() || | |
| 236 render_time_ms() != frame.render_time_ms()) { | |
| 237 return false; | |
| 238 } | |
| 239 const int half_width = (width() + 1) / 2; | |
| 240 const int half_height = (height() + 1) / 2; | |
| 241 return EqualPlane(buffer(kYPlane), frame.buffer(kYPlane), | |
| 242 stride(kYPlane), width(), height()) && | |
| 243 EqualPlane(buffer(kUPlane), frame.buffer(kUPlane), | |
| 244 stride(kUPlane), half_width, half_height) && | |
| 245 EqualPlane(buffer(kVPlane), frame.buffer(kVPlane), | |
| 246 stride(kVPlane), half_width, half_height); | |
| 247 } | |
| 248 | |
| 249 size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) { | 201 size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) { |
| 250 switch (codec_type) { | 202 switch (codec_type) { |
| 251 case kVideoCodecVP8: | 203 case kVideoCodecVP8: |
| 252 case kVideoCodecVP9: | 204 case kVideoCodecVP9: |
| 253 return 0; | 205 return 0; |
| 254 case kVideoCodecH264: | 206 case kVideoCodecH264: |
| 255 return kBufferPaddingBytesH264; | 207 return kBufferPaddingBytesH264; |
| 256 case kVideoCodecI420: | 208 case kVideoCodecI420: |
| 257 case kVideoCodecRED: | 209 case kVideoCodecRED: |
| 258 case kVideoCodecULPFEC: | 210 case kVideoCodecULPFEC: |
| 259 case kVideoCodecGeneric: | 211 case kVideoCodecGeneric: |
| 260 case kVideoCodecUnknown: | 212 case kVideoCodecUnknown: |
| 261 return 0; | 213 return 0; |
| 262 } | 214 } |
| 263 RTC_NOTREACHED(); | 215 RTC_NOTREACHED(); |
| 264 return 0; | 216 return 0; |
| 265 } | 217 } |
| 266 | 218 |
| 267 } // namespace webrtc | 219 } // namespace webrtc |
| OLD | NEW |