| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/media/engine/webrtcvideoframe.h" | 11 #include "webrtc/media/engine/webrtcvideoframe.h" |
| 12 | 12 |
| 13 #include "libyuv/convert.h" | 13 #include "libyuv/convert.h" |
| 14 #include "webrtc/base/logging.h" | 14 #include "webrtc/base/logging.h" |
| 15 #include "webrtc/media/base/videocapturer.h" | 15 #include "webrtc/media/base/videocapturer.h" |
| 16 #include "webrtc/media/base/videocommon.h" | 16 #include "webrtc/media/base/videocommon.h" |
| 17 #include "webrtc/video_frame.h" | 17 #include "webrtc/video_frame.h" |
| 18 | 18 |
| 19 using webrtc::kYPlane; | 19 using webrtc::kYPlane; |
| 20 using webrtc::kUPlane; | 20 using webrtc::kUPlane; |
| 21 using webrtc::kVPlane; | 21 using webrtc::kVPlane; |
| 22 | 22 |
| 23 namespace cricket { | 23 namespace cricket { |
| 24 | 24 |
| 25 WebRtcVideoFrame::WebRtcVideoFrame(): | 25 WebRtcVideoFrame::WebRtcVideoFrame(): |
| 26 time_stamp_ns_(0), | 26 timestamp_us_(0), |
| 27 rotation_(webrtc::kVideoRotation_0) {} | 27 rotation_(webrtc::kVideoRotation_0) {} |
| 28 | 28 |
| 29 WebRtcVideoFrame::WebRtcVideoFrame( | 29 WebRtcVideoFrame::WebRtcVideoFrame( |
| 30 const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, | 30 const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, |
| 31 int64_t time_stamp_ns, | 31 int64_t time_stamp_ns, |
| 32 webrtc::VideoRotation rotation) | 32 webrtc::VideoRotation rotation) |
| 33 : video_frame_buffer_(buffer), | 33 : video_frame_buffer_(buffer), |
| 34 time_stamp_ns_(time_stamp_ns), | |
| 35 rotation_(rotation) { | 34 rotation_(rotation) { |
| 35 // Convert to usecs. |
| 36 SetTimeStamp(time_stamp_ns); |
| 36 } | 37 } |
| 37 | 38 |
| 38 WebRtcVideoFrame::~WebRtcVideoFrame() {} | 39 WebRtcVideoFrame::~WebRtcVideoFrame() {} |
| 39 | 40 |
| 40 bool WebRtcVideoFrame::Init(uint32_t format, | 41 bool WebRtcVideoFrame::Init(uint32_t format, |
| 41 int w, | 42 int w, |
| 42 int h, | 43 int h, |
| 43 int dw, | 44 int dw, |
| 44 int dh, | 45 int dh, |
| 45 uint8_t* sample, | 46 uint8_t* sample, |
| 46 size_t sample_size, | 47 size_t sample_size, |
| 47 int64_t time_stamp_ns, | 48 int64_t time_stamp_ns, |
| 48 webrtc::VideoRotation rotation) { | 49 webrtc::VideoRotation rotation) { |
| 49 return Reset(format, w, h, dw, dh, sample, sample_size, | 50 if (!Reset(format, w, h, dw, dh, sample, sample_size, rotation, |
| 50 time_stamp_ns, rotation, | 51 true /*apply_rotation*/)) |
| 51 true /*apply_rotation*/); | 52 return false; |
| 53 |
| 54 SetTimeStamp(time_stamp_ns); |
| 55 return true; |
| 52 } | 56 } |
| 53 | 57 |
| 54 bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh, | 58 bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh, |
| 55 bool apply_rotation) { | 59 bool apply_rotation) { |
| 56 return Reset(frame->fourcc, frame->width, frame->height, dw, dh, | 60 if (!Reset(frame->fourcc, frame->width, frame->height, dw, dh, |
| 57 static_cast<uint8_t*>(frame->data), frame->data_size, | 61 static_cast<uint8_t*>(frame->data), frame->data_size, |
| 58 frame->time_stamp, | 62 frame->rotation, apply_rotation)) { |
| 59 frame->rotation, apply_rotation); | 63 return false; |
| 64 } |
| 65 SetTimeStamp(frame->time_stamp); |
| 66 |
| 67 // Check that we always get the right epoch (with a 30s margin). If |
| 68 // this is false for some camera, we need conversion logic adding |
| 69 // the camera's clock offset. |
| 70 RTC_DCHECK_LT(std::abs(timestamp_us_ - |
| 71 static_cast<int64_t>(rtc::TimeMicros())), |
| 72 30 * rtc::kNumMicrosecsPerSec); |
| 73 |
| 74 return true; |
| 60 } | 75 } |
| 61 | 76 |
| 62 bool WebRtcVideoFrame::InitToBlack(int w, int h, | 77 bool WebRtcVideoFrame::InitToBlack(int w, int h, |
| 63 int64_t time_stamp_ns) { | 78 int64_t time_stamp_ns) { |
| 64 InitToEmptyBuffer(w, h, time_stamp_ns); | 79 InitToEmptyBuffer(w, h, time_stamp_ns); |
| 65 return SetToBlack(); | 80 return SetToBlack(); |
| 66 } | 81 } |
| 67 | 82 |
| 68 int WebRtcVideoFrame::width() const { | 83 int WebRtcVideoFrame::width() const { |
| 69 return video_frame_buffer_ ? video_frame_buffer_->width() : 0; | 84 return video_frame_buffer_ ? video_frame_buffer_->width() : 0; |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 120 return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr; | 135 return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr; |
| 121 } | 136 } |
| 122 | 137 |
| 123 rtc::scoped_refptr<webrtc::VideoFrameBuffer> | 138 rtc::scoped_refptr<webrtc::VideoFrameBuffer> |
| 124 WebRtcVideoFrame::GetVideoFrameBuffer() const { | 139 WebRtcVideoFrame::GetVideoFrameBuffer() const { |
| 125 return video_frame_buffer_; | 140 return video_frame_buffer_; |
| 126 } | 141 } |
| 127 | 142 |
| 128 VideoFrame* WebRtcVideoFrame::Copy() const { | 143 VideoFrame* WebRtcVideoFrame::Copy() const { |
| 129 WebRtcVideoFrame* new_frame = new WebRtcVideoFrame( | 144 WebRtcVideoFrame* new_frame = new WebRtcVideoFrame( |
| 130 video_frame_buffer_, time_stamp_ns_, rotation_); | 145 video_frame_buffer_, 0 /* Dummy timestamp, overwridden below */, |
| 146 rotation_); |
| 147 new_frame->set_timestamp_us(timestamp_us_); |
| 131 return new_frame; | 148 return new_frame; |
| 132 } | 149 } |
| 133 | 150 |
| 134 size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc, | 151 size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc, |
| 135 uint8_t* buffer, | 152 uint8_t* buffer, |
| 136 size_t size, | 153 size_t size, |
| 137 int stride_rgb) const { | 154 int stride_rgb) const { |
| 138 RTC_CHECK(video_frame_buffer_); | 155 RTC_CHECK(video_frame_buffer_); |
| 139 RTC_CHECK(video_frame_buffer_->native_handle() == nullptr); | 156 RTC_CHECK(video_frame_buffer_->native_handle() == nullptr); |
| 140 return VideoFrame::ConvertToRgbBuffer(to_fourcc, buffer, size, stride_rgb); | 157 return VideoFrame::ConvertToRgbBuffer(to_fourcc, buffer, size, stride_rgb); |
| 141 } | 158 } |
| 142 | 159 |
| 143 bool WebRtcVideoFrame::Reset(uint32_t format, | 160 bool WebRtcVideoFrame::Reset(uint32_t format, |
| 144 int w, | 161 int w, |
| 145 int h, | 162 int h, |
| 146 int dw, | 163 int dw, |
| 147 int dh, | 164 int dh, |
| 148 uint8_t* sample, | 165 uint8_t* sample, |
| 149 size_t sample_size, | 166 size_t sample_size, |
| 150 int64_t time_stamp_ns, | |
| 151 webrtc::VideoRotation rotation, | 167 webrtc::VideoRotation rotation, |
| 152 bool apply_rotation) { | 168 bool apply_rotation) { |
| 153 if (!Validate(format, w, h, sample, sample_size)) { | 169 if (!Validate(format, w, h, sample, sample_size)) { |
| 154 return false; | 170 return false; |
| 155 } | 171 } |
| 156 // Translate aliases to standard enums (e.g., IYUV -> I420). | 172 // Translate aliases to standard enums (e.g., IYUV -> I420). |
| 157 format = CanonicalFourCC(format); | 173 format = CanonicalFourCC(format); |
| 158 | 174 |
| 159 // Set up a new buffer. | 175 // Set up a new buffer. |
| 160 // TODO(fbarchard): Support lazy allocation. | 176 // TODO(fbarchard): Support lazy allocation. |
| 161 int new_width = dw; | 177 int new_width = dw; |
| 162 int new_height = dh; | 178 int new_height = dh; |
| 163 // If rotated swap width, height. | 179 // If rotated swap width, height. |
| 164 if (apply_rotation && (rotation == 90 || rotation == 270)) { | 180 if (apply_rotation && (rotation == 90 || rotation == 270)) { |
| 165 new_width = dh; | 181 new_width = dh; |
| 166 new_height = dw; | 182 new_height = dw; |
| 167 } | 183 } |
| 168 | 184 |
| 169 InitToEmptyBuffer(new_width, new_height, | 185 InitToEmptyBuffer(new_width, new_height); |
| 170 time_stamp_ns); | |
| 171 rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation; | 186 rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation; |
| 172 | 187 |
| 173 int horiz_crop = ((w - dw) / 2) & ~1; | 188 int horiz_crop = ((w - dw) / 2) & ~1; |
| 174 // ARGB on Windows has negative height. | 189 // ARGB on Windows has negative height. |
| 175 // The sample's layout in memory is normal, so just correct crop. | 190 // The sample's layout in memory is normal, so just correct crop. |
| 176 int vert_crop = ((abs(h) - dh) / 2) & ~1; | 191 int vert_crop = ((abs(h) - dh) / 2) & ~1; |
| 177 // Conversion functions expect negative height to flip the image. | 192 // Conversion functions expect negative height to flip the image. |
| 178 int idh = (h < 0) ? -dh : dh; | 193 int idh = (h < 0) ? -dh : dh; |
| 179 int r = libyuv::ConvertToI420( | 194 int r = libyuv::ConvertToI420( |
| 180 sample, sample_size, | 195 sample, sample_size, |
| 181 GetYPlane(), GetYPitch(), | 196 GetYPlane(), GetYPitch(), |
| 182 GetUPlane(), GetUPitch(), | 197 GetUPlane(), GetUPitch(), |
| 183 GetVPlane(), GetVPitch(), | 198 GetVPlane(), GetVPitch(), |
| 184 horiz_crop, vert_crop, | 199 horiz_crop, vert_crop, |
| 185 w, h, | 200 w, h, |
| 186 dw, idh, | 201 dw, idh, |
| 187 static_cast<libyuv::RotationMode>( | 202 static_cast<libyuv::RotationMode>( |
| 188 apply_rotation ? rotation : webrtc::kVideoRotation_0), | 203 apply_rotation ? rotation : webrtc::kVideoRotation_0), |
| 189 format); | 204 format); |
| 190 if (r) { | 205 if (r) { |
| 191 LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format) | 206 LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format) |
| 192 << " return code : " << r; | 207 << " return code : " << r; |
| 193 return false; | 208 return false; |
| 194 } | 209 } |
| 195 return true; | 210 return true; |
| 196 } | 211 } |
| 197 | 212 |
| 198 VideoFrame* WebRtcVideoFrame::CreateEmptyFrame( | 213 VideoFrame* WebRtcVideoFrame::CreateEmptyFrame( |
| 199 int w, int h, | 214 int w, int h, |
| 200 int64_t time_stamp_ns) const { | 215 int64_t timestamp_us) const { |
| 201 WebRtcVideoFrame* frame = new WebRtcVideoFrame(); | 216 WebRtcVideoFrame* frame = new WebRtcVideoFrame(); |
| 202 frame->InitToEmptyBuffer(w, h, time_stamp_ns); | 217 frame->InitToEmptyBuffer(w, h, rtc::kNumNanosecsPerMicrosec * timestamp_us); |
| 203 return frame; | 218 return frame; |
| 204 } | 219 } |
| 205 | 220 |
| 221 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) { |
| 222 video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h); |
| 223 rotation_ = webrtc::kVideoRotation_0; |
| 224 } |
| 225 |
| 206 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, | 226 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, |
| 207 int64_t time_stamp_ns) { | 227 int64_t time_stamp_ns) { |
| 208 video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h); | 228 video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h); |
| 209 time_stamp_ns_ = time_stamp_ns; | 229 SetTimeStamp(time_stamp_ns); |
| 210 rotation_ = webrtc::kVideoRotation_0; | 230 rotation_ = webrtc::kVideoRotation_0; |
| 211 } | 231 } |
| 212 | 232 |
| 213 const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const { | 233 const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const { |
| 214 // If the frame is not rotated, the caller should reuse this frame instead of | 234 // If the frame is not rotated, the caller should reuse this frame instead of |
| 215 // making a redundant copy. | 235 // making a redundant copy. |
| 216 if (GetVideoRotation() == webrtc::kVideoRotation_0) { | 236 if (GetVideoRotation() == webrtc::kVideoRotation_0) { |
| 217 return this; | 237 return this; |
| 218 } | 238 } |
| 219 | 239 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 231 | 251 |
| 232 int rotated_width = orig_width; | 252 int rotated_width = orig_width; |
| 233 int rotated_height = orig_height; | 253 int rotated_height = orig_height; |
| 234 if (GetVideoRotation() == webrtc::kVideoRotation_90 || | 254 if (GetVideoRotation() == webrtc::kVideoRotation_90 || |
| 235 GetVideoRotation() == webrtc::kVideoRotation_270) { | 255 GetVideoRotation() == webrtc::kVideoRotation_270) { |
| 236 rotated_width = orig_height; | 256 rotated_width = orig_height; |
| 237 rotated_height = orig_width; | 257 rotated_height = orig_width; |
| 238 } | 258 } |
| 239 | 259 |
| 240 rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height, | 260 rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height, |
| 241 GetTimeStamp())); | 261 timestamp_us_)); |
| 242 | 262 |
| 243 // TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from | 263 // TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from |
| 244 // VideoRotation to libyuv::RotationMode. | 264 // VideoRotation to libyuv::RotationMode. |
| 245 int ret = libyuv::I420Rotate( | 265 int ret = libyuv::I420Rotate( |
| 246 GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(), | 266 GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(), |
| 247 GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(), | 267 GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(), |
| 248 rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(), | 268 rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(), |
| 249 rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(), | 269 rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(), |
| 250 orig_width, orig_height, | 270 orig_width, orig_height, |
| 251 static_cast<libyuv::RotationMode>(GetVideoRotation())); | 271 static_cast<libyuv::RotationMode>(GetVideoRotation())); |
| 252 if (ret == 0) { | 272 if (ret == 0) { |
| 253 return rotated_frame_.get(); | 273 return rotated_frame_.get(); |
| 254 } | 274 } |
| 255 return nullptr; | 275 return nullptr; |
| 256 } | 276 } |
| 257 | 277 |
| 258 } // namespace cricket | 278 } // namespace cricket |
| OLD | NEW |