OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/media/engine/webrtcvideoframe.h" | 11 #include "webrtc/media/engine/webrtcvideoframe.h" |
12 | 12 |
13 #include "libyuv/convert.h" | 13 #include "libyuv/convert.h" |
14 #include "webrtc/base/logging.h" | 14 #include "webrtc/base/logging.h" |
15 #include "webrtc/media/base/videocapturer.h" | 15 #include "webrtc/media/base/videocapturer.h" |
16 #include "webrtc/media/base/videocommon.h" | 16 #include "webrtc/media/base/videocommon.h" |
17 #include "webrtc/video_frame.h" | 17 #include "webrtc/video_frame.h" |
18 | 18 |
19 using webrtc::kYPlane; | 19 using webrtc::kYPlane; |
20 using webrtc::kUPlane; | 20 using webrtc::kUPlane; |
21 using webrtc::kVPlane; | 21 using webrtc::kVPlane; |
22 | 22 |
23 namespace cricket { | 23 namespace cricket { |
24 | 24 |
25 WebRtcVideoFrame::WebRtcVideoFrame() | 25 WebRtcVideoFrame::WebRtcVideoFrame(): |
26 : timestamp_us_(0), rotation_(webrtc::kVideoRotation_0) {} | 26 time_stamp_ns_(0), |
27 | 27 rotation_(webrtc::kVideoRotation_0) {} |
28 WebRtcVideoFrame::WebRtcVideoFrame( | |
29 const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, | |
30 webrtc::VideoRotation rotation, | |
31 int64_t timestamp_us) | |
32 : video_frame_buffer_(buffer), | |
33 timestamp_us_(timestamp_us), | |
34 rotation_(rotation) {} | |
35 | 28 |
36 WebRtcVideoFrame::WebRtcVideoFrame( | 29 WebRtcVideoFrame::WebRtcVideoFrame( |
37 const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, | 30 const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, |
38 int64_t time_stamp_ns, | 31 int64_t time_stamp_ns, |
39 webrtc::VideoRotation rotation) | 32 webrtc::VideoRotation rotation) |
40 : WebRtcVideoFrame(buffer, | 33 : video_frame_buffer_(buffer), |
41 rotation, | 34 time_stamp_ns_(time_stamp_ns), |
42 time_stamp_ns / rtc::kNumNanosecsPerMicrosec) {} | 35 rotation_(rotation) { |
| 36 } |
43 | 37 |
44 WebRtcVideoFrame::~WebRtcVideoFrame() {} | 38 WebRtcVideoFrame::~WebRtcVideoFrame() {} |
45 | 39 |
46 bool WebRtcVideoFrame::Init(uint32_t format, | 40 bool WebRtcVideoFrame::Init(uint32_t format, |
47 int w, | 41 int w, |
48 int h, | 42 int h, |
49 int dw, | 43 int dw, |
50 int dh, | 44 int dh, |
51 uint8_t* sample, | 45 uint8_t* sample, |
52 size_t sample_size, | 46 size_t sample_size, |
53 int64_t time_stamp_ns, | 47 int64_t time_stamp_ns, |
54 webrtc::VideoRotation rotation) { | 48 webrtc::VideoRotation rotation) { |
55 return Reset(format, w, h, dw, dh, sample, sample_size, | 49 return Reset(format, w, h, dw, dh, sample, sample_size, |
56 time_stamp_ns / rtc::kNumNanosecsPerMicrosec, rotation, | 50 time_stamp_ns, rotation, |
57 true /*apply_rotation*/); | 51 true /*apply_rotation*/); |
58 } | 52 } |
59 | 53 |
60 bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh, | 54 bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh, |
61 bool apply_rotation) { | 55 bool apply_rotation) { |
62 return Reset(frame->fourcc, frame->width, frame->height, dw, dh, | 56 return Reset(frame->fourcc, frame->width, frame->height, dw, dh, |
63 static_cast<uint8_t*>(frame->data), frame->data_size, | 57 static_cast<uint8_t*>(frame->data), frame->data_size, |
64 frame->time_stamp / rtc::kNumNanosecsPerMicrosec, | 58 frame->time_stamp, |
65 frame->rotation, apply_rotation); | 59 frame->rotation, apply_rotation); |
66 } | 60 } |
67 | 61 |
68 bool WebRtcVideoFrame::InitToBlack(int w, int h, | 62 bool WebRtcVideoFrame::InitToBlack(int w, int h, |
69 int64_t time_stamp_ns) { | 63 int64_t time_stamp_ns) { |
70 InitToEmptyBuffer(w, h, time_stamp_ns); | 64 InitToEmptyBuffer(w, h, time_stamp_ns); |
71 return SetToBlack(); | 65 return SetToBlack(); |
72 } | 66 } |
73 | 67 |
74 int WebRtcVideoFrame::width() const { | 68 int WebRtcVideoFrame::width() const { |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
125 void* WebRtcVideoFrame::GetNativeHandle() const { | 119 void* WebRtcVideoFrame::GetNativeHandle() const { |
126 return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr; | 120 return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr; |
127 } | 121 } |
128 | 122 |
129 rtc::scoped_refptr<webrtc::VideoFrameBuffer> | 123 rtc::scoped_refptr<webrtc::VideoFrameBuffer> |
130 WebRtcVideoFrame::video_frame_buffer() const { | 124 WebRtcVideoFrame::video_frame_buffer() const { |
131 return video_frame_buffer_; | 125 return video_frame_buffer_; |
132 } | 126 } |
133 | 127 |
134 VideoFrame* WebRtcVideoFrame::Copy() const { | 128 VideoFrame* WebRtcVideoFrame::Copy() const { |
135 return new WebRtcVideoFrame(video_frame_buffer_, rotation_, timestamp_us_); | 129 WebRtcVideoFrame* new_frame = new WebRtcVideoFrame( |
| 130 video_frame_buffer_, time_stamp_ns_, rotation_); |
| 131 return new_frame; |
136 } | 132 } |
137 | 133 |
138 size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc, | 134 size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc, |
139 uint8_t* buffer, | 135 uint8_t* buffer, |
140 size_t size, | 136 size_t size, |
141 int stride_rgb) const { | 137 int stride_rgb) const { |
142 RTC_CHECK(video_frame_buffer_); | 138 RTC_CHECK(video_frame_buffer_); |
143 RTC_CHECK(video_frame_buffer_->native_handle() == nullptr); | 139 RTC_CHECK(video_frame_buffer_->native_handle() == nullptr); |
144 return VideoFrame::ConvertToRgbBuffer(to_fourcc, buffer, size, stride_rgb); | 140 return VideoFrame::ConvertToRgbBuffer(to_fourcc, buffer, size, stride_rgb); |
145 } | 141 } |
146 | 142 |
147 bool WebRtcVideoFrame::Reset(uint32_t format, | 143 bool WebRtcVideoFrame::Reset(uint32_t format, |
148 int w, | 144 int w, |
149 int h, | 145 int h, |
150 int dw, | 146 int dw, |
151 int dh, | 147 int dh, |
152 uint8_t* sample, | 148 uint8_t* sample, |
153 size_t sample_size, | 149 size_t sample_size, |
154 int64_t timestamp_us, | 150 int64_t time_stamp_ns, |
155 webrtc::VideoRotation rotation, | 151 webrtc::VideoRotation rotation, |
156 bool apply_rotation) { | 152 bool apply_rotation) { |
157 if (!Validate(format, w, h, sample, sample_size)) { | 153 if (!Validate(format, w, h, sample, sample_size)) { |
158 return false; | 154 return false; |
159 } | 155 } |
160 // Translate aliases to standard enums (e.g., IYUV -> I420). | 156 // Translate aliases to standard enums (e.g., IYUV -> I420). |
161 format = CanonicalFourCC(format); | 157 format = CanonicalFourCC(format); |
162 | 158 |
163 // Set up a new buffer. | 159 // Set up a new buffer. |
164 // TODO(fbarchard): Support lazy allocation. | 160 // TODO(fbarchard): Support lazy allocation. |
165 int new_width = dw; | 161 int new_width = dw; |
166 int new_height = dh; | 162 int new_height = dh; |
167 // If rotated swap width, height. | 163 // If rotated swap width, height. |
168 if (apply_rotation && (rotation == 90 || rotation == 270)) { | 164 if (apply_rotation && (rotation == 90 || rotation == 270)) { |
169 new_width = dh; | 165 new_width = dh; |
170 new_height = dw; | 166 new_height = dw; |
171 } | 167 } |
172 | 168 |
173 InitToEmptyBuffer(new_width, new_height); | 169 InitToEmptyBuffer(new_width, new_height, |
| 170 time_stamp_ns); |
174 rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation; | 171 rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation; |
175 | 172 |
176 int horiz_crop = ((w - dw) / 2) & ~1; | 173 int horiz_crop = ((w - dw) / 2) & ~1; |
177 // ARGB on Windows has negative height. | 174 // ARGB on Windows has negative height. |
178 // The sample's layout in memory is normal, so just correct crop. | 175 // The sample's layout in memory is normal, so just correct crop. |
179 int vert_crop = ((abs(h) - dh) / 2) & ~1; | 176 int vert_crop = ((abs(h) - dh) / 2) & ~1; |
180 // Conversion functions expect negative height to flip the image. | 177 // Conversion functions expect negative height to flip the image. |
181 int idh = (h < 0) ? -dh : dh; | 178 int idh = (h < 0) ? -dh : dh; |
182 int r = libyuv::ConvertToI420( | 179 int r = libyuv::ConvertToI420( |
183 sample, sample_size, | 180 sample, sample_size, |
184 GetYPlane(), GetYPitch(), | 181 GetYPlane(), GetYPitch(), |
185 GetUPlane(), GetUPitch(), | 182 GetUPlane(), GetUPitch(), |
186 GetVPlane(), GetVPitch(), | 183 GetVPlane(), GetVPitch(), |
187 horiz_crop, vert_crop, | 184 horiz_crop, vert_crop, |
188 w, h, | 185 w, h, |
189 dw, idh, | 186 dw, idh, |
190 static_cast<libyuv::RotationMode>( | 187 static_cast<libyuv::RotationMode>( |
191 apply_rotation ? rotation : webrtc::kVideoRotation_0), | 188 apply_rotation ? rotation : webrtc::kVideoRotation_0), |
192 format); | 189 format); |
193 if (r) { | 190 if (r) { |
194 LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format) | 191 LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format) |
195 << " return code : " << r; | 192 << " return code : " << r; |
196 return false; | 193 return false; |
197 } | 194 } |
198 timestamp_us_ = timestamp_us; | |
199 return true; | 195 return true; |
200 } | 196 } |
201 | 197 |
202 VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(int w, | 198 VideoFrame* WebRtcVideoFrame::CreateEmptyFrame( |
203 int h, | 199 int w, int h, |
204 int64_t timestamp_us) const { | 200 int64_t time_stamp_ns) const { |
205 WebRtcVideoFrame* frame = new WebRtcVideoFrame(); | 201 WebRtcVideoFrame* frame = new WebRtcVideoFrame(); |
206 frame->InitToEmptyBuffer(w, h, rtc::kNumNanosecsPerMicrosec * timestamp_us); | 202 frame->InitToEmptyBuffer(w, h, time_stamp_ns); |
207 return frame; | 203 return frame; |
208 } | 204 } |
209 | 205 |
210 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) { | |
211 video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h); | |
212 rotation_ = webrtc::kVideoRotation_0; | |
213 } | |
214 | |
215 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, | 206 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, |
216 int64_t time_stamp_ns) { | 207 int64_t time_stamp_ns) { |
217 video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h); | 208 video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h); |
218 SetTimeStamp(time_stamp_ns); | 209 time_stamp_ns_ = time_stamp_ns; |
219 rotation_ = webrtc::kVideoRotation_0; | 210 rotation_ = webrtc::kVideoRotation_0; |
220 } | 211 } |
221 | 212 |
222 const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const { | 213 const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const { |
223 // If the frame is not rotated, the caller should reuse this frame instead of | 214 // If the frame is not rotated, the caller should reuse this frame instead of |
224 // making a redundant copy. | 215 // making a redundant copy. |
225 if (rotation() == webrtc::kVideoRotation_0) { | 216 if (rotation() == webrtc::kVideoRotation_0) { |
226 return this; | 217 return this; |
227 } | 218 } |
228 | 219 |
(...skipping 10 matching lines...) Expand all Loading... |
239 int orig_height = height(); | 230 int orig_height = height(); |
240 | 231 |
241 int rotated_width = orig_width; | 232 int rotated_width = orig_width; |
242 int rotated_height = orig_height; | 233 int rotated_height = orig_height; |
243 if (rotation() == webrtc::kVideoRotation_90 || | 234 if (rotation() == webrtc::kVideoRotation_90 || |
244 rotation() == webrtc::kVideoRotation_270) { | 235 rotation() == webrtc::kVideoRotation_270) { |
245 rotated_width = orig_height; | 236 rotated_width = orig_height; |
246 rotated_height = orig_width; | 237 rotated_height = orig_width; |
247 } | 238 } |
248 | 239 |
249 rotated_frame_.reset( | 240 rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height, |
250 CreateEmptyFrame(rotated_width, rotated_height, timestamp_us_)); | 241 GetTimeStamp())); |
251 | 242 |
252 // TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from | 243 // TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from |
253 // VideoRotation to libyuv::RotationMode. | 244 // VideoRotation to libyuv::RotationMode. |
254 int ret = libyuv::I420Rotate( | 245 int ret = libyuv::I420Rotate( |
255 GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(), | 246 GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(), |
256 GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(), | 247 GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(), |
257 rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(), | 248 rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(), |
258 rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(), | 249 rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(), |
259 orig_width, orig_height, | 250 orig_width, orig_height, |
260 static_cast<libyuv::RotationMode>(rotation())); | 251 static_cast<libyuv::RotationMode>(rotation())); |
261 if (ret == 0) { | 252 if (ret == 0) { |
262 return rotated_frame_.get(); | 253 return rotated_frame_.get(); |
263 } | 254 } |
264 return nullptr; | 255 return nullptr; |
265 } | 256 } |
266 | 257 |
267 } // namespace cricket | 258 } // namespace cricket |
OLD | NEW |