OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/video_frame.h" | 11 #include "webrtc/video_frame.h" |
12 | 12 |
13 #include <string.h> | 13 #include <string.h> |
14 | 14 |
15 #include <algorithm> // swap | 15 #include <algorithm> // swap |
16 | 16 |
17 #include "webrtc/base/bind.h" | 17 #include "webrtc/base/bind.h" |
18 #include "webrtc/base/checks.h" | 18 #include "webrtc/base/checks.h" |
19 | 19 |
20 namespace webrtc { | 20 namespace webrtc { |
21 | 21 |
22 // FFmpeg's decoder, used by H264DecoderImpl, requires up to 8 bytes padding due | 22 // FFmpeg's decoder, used by H264DecoderImpl, requires up to 8 bytes padding due |
23 // to optimized bitstream readers. See avcodec_decode_video2. | 23 // to optimized bitstream readers. See avcodec_decode_video2. |
24 const size_t EncodedImage::kBufferPaddingBytesH264 = 8; | 24 const size_t EncodedImage::kBufferPaddingBytesH264 = 8; |
25 | 25 |
| 26 bool EqualPlane(const uint8_t* data1, |
| 27 const uint8_t* data2, |
| 28 int stride, |
| 29 int width, |
| 30 int height) { |
| 31 for (int y = 0; y < height; ++y) { |
| 32 if (memcmp(data1, data2, width) != 0) |
| 33 return false; |
| 34 data1 += stride; |
| 35 data2 += stride; |
| 36 } |
| 37 return true; |
| 38 } |
| 39 |
26 int ExpectedSize(int plane_stride, int image_height, PlaneType type) { | 40 int ExpectedSize(int plane_stride, int image_height, PlaneType type) { |
27 if (type == kYPlane) | 41 if (type == kYPlane) |
28 return plane_stride * image_height; | 42 return plane_stride * image_height; |
29 return plane_stride * ((image_height + 1) / 2); | 43 return plane_stride * ((image_height + 1) / 2); |
30 } | 44 } |
31 | 45 |
32 VideoFrame::VideoFrame() { | 46 VideoFrame::VideoFrame() { |
33 // Intentionally using Reset instead of initializer list so that any missed | 47 // Intentionally using Reset instead of initializer list so that any missed |
34 // fields in Reset will be caught by memory checkers. | 48 // fields in Reset will be caught by memory checkers. |
35 Reset(); | 49 Reset(); |
36 } | 50 } |
37 | 51 |
38 VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer, | 52 VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer, |
39 uint32_t timestamp, | 53 uint32_t timestamp, |
40 int64_t render_time_ms, | 54 int64_t render_time_ms, |
41 VideoRotation rotation) | 55 VideoRotation rotation) |
42 : video_frame_buffer_(buffer), | 56 : video_frame_buffer_(buffer), |
43 timestamp_(timestamp), | 57 timestamp_(timestamp), |
44 ntp_time_ms_(0), | 58 ntp_time_ms_(0), |
45 render_time_ms_(render_time_ms), | 59 render_time_ms_(render_time_ms), |
46 rotation_(rotation) { | 60 rotation_(rotation) { |
47 } | 61 } |
48 | 62 |
49 void VideoFrame::CreateEmptyFrame(int width, | 63 int VideoFrame::CreateEmptyFrame(int width, |
50 int height, | 64 int height, |
51 int stride_y, | 65 int stride_y, |
52 int stride_u, | 66 int stride_u, |
53 int stride_v) { | 67 int stride_v) { |
54 const int half_width = (width + 1) / 2; | 68 const int half_width = (width + 1) / 2; |
55 RTC_DCHECK_GT(width, 0); | 69 RTC_DCHECK_GT(width, 0); |
56 RTC_DCHECK_GT(height, 0); | 70 RTC_DCHECK_GT(height, 0); |
57 RTC_DCHECK_GE(stride_y, width); | 71 RTC_DCHECK_GE(stride_y, width); |
58 RTC_DCHECK_GE(stride_u, half_width); | 72 RTC_DCHECK_GE(stride_u, half_width); |
59 RTC_DCHECK_GE(stride_v, half_width); | 73 RTC_DCHECK_GE(stride_v, half_width); |
60 | 74 |
61 // Creating empty frame - reset all values. | 75 // Creating empty frame - reset all values. |
62 timestamp_ = 0; | 76 timestamp_ = 0; |
63 ntp_time_ms_ = 0; | 77 ntp_time_ms_ = 0; |
64 render_time_ms_ = 0; | 78 render_time_ms_ = 0; |
65 rotation_ = kVideoRotation_0; | 79 rotation_ = kVideoRotation_0; |
66 | 80 |
67 // Check if it's safe to reuse allocation. | 81 // Check if it's safe to reuse allocation. |
68 if (video_frame_buffer_ && video_frame_buffer_->HasOneRef() && | 82 if (video_frame_buffer_ && video_frame_buffer_->HasOneRef() && |
69 !video_frame_buffer_->native_handle() && | 83 !video_frame_buffer_->native_handle() && |
70 width == video_frame_buffer_->width() && | 84 width == video_frame_buffer_->width() && |
71 height == video_frame_buffer_->height() && stride_y == stride(kYPlane) && | 85 height == video_frame_buffer_->height() && stride_y == stride(kYPlane) && |
72 stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) { | 86 stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) { |
73 return; | 87 return 0; |
74 } | 88 } |
75 | 89 |
76 // Need to allocate new buffer. | 90 // Need to allocate new buffer. |
77 video_frame_buffer_ = new rtc::RefCountedObject<I420Buffer>( | 91 video_frame_buffer_ = new rtc::RefCountedObject<I420Buffer>( |
78 width, height, stride_y, stride_u, stride_v); | 92 width, height, stride_y, stride_u, stride_v); |
| 93 return 0; |
79 } | 94 } |
80 | 95 |
81 void VideoFrame::CreateFrame(const uint8_t* buffer_y, | 96 int VideoFrame::CreateFrame(const uint8_t* buffer_y, |
82 const uint8_t* buffer_u, | 97 const uint8_t* buffer_u, |
83 const uint8_t* buffer_v, | 98 const uint8_t* buffer_v, |
84 int width, | 99 int width, |
85 int height, | 100 int height, |
86 int stride_y, | 101 int stride_y, |
87 int stride_u, | 102 int stride_u, |
88 int stride_v, | 103 int stride_v) { |
89 VideoRotation rotation) { | 104 return CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y, |
| 105 stride_u, stride_v, kVideoRotation_0); |
| 106 } |
| 107 |
| 108 int VideoFrame::CreateFrame(const uint8_t* buffer_y, |
| 109 const uint8_t* buffer_u, |
| 110 const uint8_t* buffer_v, |
| 111 int width, |
| 112 int height, |
| 113 int stride_y, |
| 114 int stride_u, |
| 115 int stride_v, |
| 116 VideoRotation rotation) { |
90 const int half_height = (height + 1) / 2; | 117 const int half_height = (height + 1) / 2; |
91 const int expected_size_y = height * stride_y; | 118 const int expected_size_y = height * stride_y; |
92 const int expected_size_u = half_height * stride_u; | 119 const int expected_size_u = half_height * stride_u; |
93 const int expected_size_v = half_height * stride_v; | 120 const int expected_size_v = half_height * stride_v; |
94 CreateEmptyFrame(width, height, stride_y, stride_u, stride_v); | 121 CreateEmptyFrame(width, height, stride_y, stride_u, stride_v); |
95 memcpy(buffer(kYPlane), buffer_y, expected_size_y); | 122 memcpy(buffer(kYPlane), buffer_y, expected_size_y); |
96 memcpy(buffer(kUPlane), buffer_u, expected_size_u); | 123 memcpy(buffer(kUPlane), buffer_u, expected_size_u); |
97 memcpy(buffer(kVPlane), buffer_v, expected_size_v); | 124 memcpy(buffer(kVPlane), buffer_v, expected_size_v); |
98 rotation_ = rotation; | 125 rotation_ = rotation; |
| 126 return 0; |
99 } | 127 } |
100 | 128 |
101 void VideoFrame::CreateFrame(const uint8_t* buffer, | 129 int VideoFrame::CreateFrame(const uint8_t* buffer, |
102 int width, | 130 int width, |
103 int height, | 131 int height, |
104 VideoRotation rotation) { | 132 VideoRotation rotation) { |
105 const int stride_y = width; | 133 const int stride_y = width; |
106 const int stride_uv = (width + 1) / 2; | 134 const int stride_uv = (width + 1) / 2; |
107 | 135 |
108 const uint8_t* buffer_y = buffer; | 136 const uint8_t* buffer_y = buffer; |
109 const uint8_t* buffer_u = buffer_y + stride_y * height; | 137 const uint8_t* buffer_u = buffer_y + stride_y * height; |
110 const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2); | 138 const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2); |
111 CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y, | 139 return CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y, |
112 stride_uv, stride_uv, rotation); | 140 stride_uv, stride_uv, rotation); |
113 } | 141 } |
114 | 142 |
115 void VideoFrame::CopyFrame(const VideoFrame& videoFrame) { | 143 int VideoFrame::CopyFrame(const VideoFrame& videoFrame) { |
116 if (videoFrame.IsZeroSize()) { | 144 if (videoFrame.IsZeroSize()) { |
117 video_frame_buffer_ = nullptr; | 145 video_frame_buffer_ = nullptr; |
118 } else if (videoFrame.native_handle()) { | 146 } else if (videoFrame.native_handle()) { |
119 video_frame_buffer_ = videoFrame.video_frame_buffer(); | 147 video_frame_buffer_ = videoFrame.video_frame_buffer(); |
120 } else { | 148 } else { |
121 CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane), | 149 CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane), |
122 videoFrame.buffer(kVPlane), videoFrame.width(), | 150 videoFrame.buffer(kVPlane), videoFrame.width(), |
123 videoFrame.height(), videoFrame.stride(kYPlane), | 151 videoFrame.height(), videoFrame.stride(kYPlane), |
124 videoFrame.stride(kUPlane), videoFrame.stride(kVPlane), | 152 videoFrame.stride(kUPlane), videoFrame.stride(kVPlane)); |
125 kVideoRotation_0); | |
126 } | 153 } |
127 | 154 |
128 timestamp_ = videoFrame.timestamp_; | 155 timestamp_ = videoFrame.timestamp_; |
129 ntp_time_ms_ = videoFrame.ntp_time_ms_; | 156 ntp_time_ms_ = videoFrame.ntp_time_ms_; |
130 render_time_ms_ = videoFrame.render_time_ms_; | 157 render_time_ms_ = videoFrame.render_time_ms_; |
131 rotation_ = videoFrame.rotation_; | 158 rotation_ = videoFrame.rotation_; |
| 159 return 0; |
132 } | 160 } |
133 | 161 |
134 void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) { | 162 void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) { |
135 video_frame_buffer_ = videoFrame.video_frame_buffer(); | 163 video_frame_buffer_ = videoFrame.video_frame_buffer(); |
136 timestamp_ = videoFrame.timestamp_; | 164 timestamp_ = videoFrame.timestamp_; |
137 ntp_time_ms_ = videoFrame.ntp_time_ms_; | 165 ntp_time_ms_ = videoFrame.ntp_time_ms_; |
138 render_time_ms_ = videoFrame.render_time_ms_; | 166 render_time_ms_ = videoFrame.render_time_ms_; |
139 rotation_ = videoFrame.rotation_; | 167 rotation_ = videoFrame.rotation_; |
140 } | 168 } |
141 | 169 |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
191 } | 219 } |
192 | 220 |
193 VideoFrame VideoFrame::ConvertNativeToI420Frame() const { | 221 VideoFrame VideoFrame::ConvertNativeToI420Frame() const { |
194 RTC_DCHECK(native_handle()); | 222 RTC_DCHECK(native_handle()); |
195 VideoFrame frame; | 223 VideoFrame frame; |
196 frame.ShallowCopy(*this); | 224 frame.ShallowCopy(*this); |
197 frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer()); | 225 frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer()); |
198 return frame; | 226 return frame; |
199 } | 227 } |
200 | 228 |
| 229 bool VideoFrame::EqualsFrame(const VideoFrame& frame) const { |
| 230 if (width() != frame.width() || height() != frame.height() || |
| 231 stride(kYPlane) != frame.stride(kYPlane) || |
| 232 stride(kUPlane) != frame.stride(kUPlane) || |
| 233 stride(kVPlane) != frame.stride(kVPlane) || |
| 234 timestamp() != frame.timestamp() || |
| 235 ntp_time_ms() != frame.ntp_time_ms() || |
| 236 render_time_ms() != frame.render_time_ms()) { |
| 237 return false; |
| 238 } |
| 239 const int half_width = (width() + 1) / 2; |
| 240 const int half_height = (height() + 1) / 2; |
| 241 return EqualPlane(buffer(kYPlane), frame.buffer(kYPlane), |
| 242 stride(kYPlane), width(), height()) && |
| 243 EqualPlane(buffer(kUPlane), frame.buffer(kUPlane), |
| 244 stride(kUPlane), half_width, half_height) && |
| 245 EqualPlane(buffer(kVPlane), frame.buffer(kVPlane), |
| 246 stride(kVPlane), half_width, half_height); |
| 247 } |
| 248 |
201 size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) { | 249 size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) { |
202 switch (codec_type) { | 250 switch (codec_type) { |
203 case kVideoCodecVP8: | 251 case kVideoCodecVP8: |
204 case kVideoCodecVP9: | 252 case kVideoCodecVP9: |
205 return 0; | 253 return 0; |
206 case kVideoCodecH264: | 254 case kVideoCodecH264: |
207 return kBufferPaddingBytesH264; | 255 return kBufferPaddingBytesH264; |
208 case kVideoCodecI420: | 256 case kVideoCodecI420: |
209 case kVideoCodecRED: | 257 case kVideoCodecRED: |
210 case kVideoCodecULPFEC: | 258 case kVideoCodecULPFEC: |
211 case kVideoCodecGeneric: | 259 case kVideoCodecGeneric: |
212 case kVideoCodecUnknown: | 260 case kVideoCodecUnknown: |
213 return 0; | 261 return 0; |
214 } | 262 } |
215 RTC_NOTREACHED(); | 263 RTC_NOTREACHED(); |
216 return 0; | 264 return 0; |
217 } | 265 } |
218 | 266 |
219 } // namespace webrtc | 267 } // namespace webrtc |
OLD | NEW |