| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * libjingle | |
| 3 * Copyright 2011 Google Inc. | |
| 4 * | |
| 5 * Redistribution and use in source and binary forms, with or without | |
| 6 * modification, are permitted provided that the following conditions are met: | |
| 7 * | |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | |
| 9 * this list of conditions and the following disclaimer. | |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | |
| 11 * this list of conditions and the following disclaimer in the documentation | |
| 12 * and/or other materials provided with the distribution. | |
| 13 * 3. The name of the author may not be used to endorse or promote products | |
| 14 * derived from this software without specific prior written permission. | |
| 15 * | |
| 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | |
| 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | |
| 18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | |
| 19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
| 20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
| 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | |
| 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | |
| 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | |
| 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | |
| 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
| 26 */ | |
| 27 | |
| 28 #include "talk/media/webrtc/webrtcvideoframe.h" | |
| 29 | |
| 30 #include "libyuv/convert.h" | |
| 31 #include "talk/media/base/videocapturer.h" | |
| 32 #include "talk/media/base/videocommon.h" | |
| 33 #include "webrtc/base/logging.h" | |
| 34 #include "webrtc/video_frame.h" | |
| 35 | |
| 36 using webrtc::kYPlane; | |
| 37 using webrtc::kUPlane; | |
| 38 using webrtc::kVPlane; | |
| 39 | |
| 40 namespace cricket { | |
| 41 | |
| 42 WebRtcVideoFrame::WebRtcVideoFrame(): | |
| 43 time_stamp_ns_(0), | |
| 44 rotation_(webrtc::kVideoRotation_0) {} | |
| 45 | |
| 46 WebRtcVideoFrame::WebRtcVideoFrame( | |
| 47 const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, | |
| 48 int64_t time_stamp_ns, | |
| 49 webrtc::VideoRotation rotation) | |
| 50 : video_frame_buffer_(buffer), | |
| 51 time_stamp_ns_(time_stamp_ns), | |
| 52 rotation_(rotation) { | |
| 53 } | |
| 54 | |
| 55 WebRtcVideoFrame::~WebRtcVideoFrame() {} | |
| 56 | |
| 57 bool WebRtcVideoFrame::Init(uint32_t format, | |
| 58 int w, | |
| 59 int h, | |
| 60 int dw, | |
| 61 int dh, | |
| 62 uint8_t* sample, | |
| 63 size_t sample_size, | |
| 64 int64_t time_stamp_ns, | |
| 65 webrtc::VideoRotation rotation) { | |
| 66 return Reset(format, w, h, dw, dh, sample, sample_size, | |
| 67 time_stamp_ns, rotation, | |
| 68 true /*apply_rotation*/); | |
| 69 } | |
| 70 | |
| 71 bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh, | |
| 72 bool apply_rotation) { | |
| 73 return Reset(frame->fourcc, frame->width, frame->height, dw, dh, | |
| 74 static_cast<uint8_t*>(frame->data), frame->data_size, | |
| 75 frame->time_stamp, | |
| 76 frame->rotation, apply_rotation); | |
| 77 } | |
| 78 | |
| 79 bool WebRtcVideoFrame::InitToBlack(int w, int h, | |
| 80 int64_t time_stamp_ns) { | |
| 81 InitToEmptyBuffer(w, h, time_stamp_ns); | |
| 82 return SetToBlack(); | |
| 83 } | |
| 84 | |
| 85 size_t WebRtcVideoFrame::GetWidth() const { | |
| 86 return video_frame_buffer_ ? video_frame_buffer_->width() : 0; | |
| 87 } | |
| 88 | |
| 89 size_t WebRtcVideoFrame::GetHeight() const { | |
| 90 return video_frame_buffer_ ? video_frame_buffer_->height() : 0; | |
| 91 } | |
| 92 | |
| 93 const uint8_t* WebRtcVideoFrame::GetYPlane() const { | |
| 94 return video_frame_buffer_ ? video_frame_buffer_->data(kYPlane) : nullptr; | |
| 95 } | |
| 96 | |
| 97 const uint8_t* WebRtcVideoFrame::GetUPlane() const { | |
| 98 return video_frame_buffer_ ? video_frame_buffer_->data(kUPlane) : nullptr; | |
| 99 } | |
| 100 | |
| 101 const uint8_t* WebRtcVideoFrame::GetVPlane() const { | |
| 102 return video_frame_buffer_ ? video_frame_buffer_->data(kVPlane) : nullptr; | |
| 103 } | |
| 104 | |
| 105 uint8_t* WebRtcVideoFrame::GetYPlane() { | |
| 106 return video_frame_buffer_ ? video_frame_buffer_->MutableData(kYPlane) | |
| 107 : nullptr; | |
| 108 } | |
| 109 | |
| 110 uint8_t* WebRtcVideoFrame::GetUPlane() { | |
| 111 return video_frame_buffer_ ? video_frame_buffer_->MutableData(kUPlane) | |
| 112 : nullptr; | |
| 113 } | |
| 114 | |
| 115 uint8_t* WebRtcVideoFrame::GetVPlane() { | |
| 116 return video_frame_buffer_ ? video_frame_buffer_->MutableData(kVPlane) | |
| 117 : nullptr; | |
| 118 } | |
| 119 | |
| 120 int32_t WebRtcVideoFrame::GetYPitch() const { | |
| 121 return video_frame_buffer_ ? video_frame_buffer_->stride(kYPlane) : 0; | |
| 122 } | |
| 123 | |
| 124 int32_t WebRtcVideoFrame::GetUPitch() const { | |
| 125 return video_frame_buffer_ ? video_frame_buffer_->stride(kUPlane) : 0; | |
| 126 } | |
| 127 | |
| 128 int32_t WebRtcVideoFrame::GetVPitch() const { | |
| 129 return video_frame_buffer_ ? video_frame_buffer_->stride(kVPlane) : 0; | |
| 130 } | |
| 131 | |
| 132 bool WebRtcVideoFrame::IsExclusive() const { | |
| 133 return video_frame_buffer_->HasOneRef(); | |
| 134 } | |
| 135 | |
| 136 void* WebRtcVideoFrame::GetNativeHandle() const { | |
| 137 return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr; | |
| 138 } | |
| 139 | |
| 140 rtc::scoped_refptr<webrtc::VideoFrameBuffer> | |
| 141 WebRtcVideoFrame::GetVideoFrameBuffer() const { | |
| 142 return video_frame_buffer_; | |
| 143 } | |
| 144 | |
| 145 VideoFrame* WebRtcVideoFrame::Copy() const { | |
| 146 WebRtcVideoFrame* new_frame = new WebRtcVideoFrame( | |
| 147 video_frame_buffer_, time_stamp_ns_, rotation_); | |
| 148 return new_frame; | |
| 149 } | |
| 150 | |
| 151 bool WebRtcVideoFrame::MakeExclusive() { | |
| 152 RTC_DCHECK(video_frame_buffer_->native_handle() == nullptr); | |
| 153 if (IsExclusive()) | |
| 154 return true; | |
| 155 | |
| 156 // Not exclusive already, need to copy buffer. | |
| 157 rtc::scoped_refptr<webrtc::VideoFrameBuffer> new_buffer = | |
| 158 new rtc::RefCountedObject<webrtc::I420Buffer>( | |
| 159 video_frame_buffer_->width(), video_frame_buffer_->height(), | |
| 160 video_frame_buffer_->stride(kYPlane), | |
| 161 video_frame_buffer_->stride(kUPlane), | |
| 162 video_frame_buffer_->stride(kVPlane)); | |
| 163 | |
| 164 if (!CopyToPlanes( | |
| 165 new_buffer->MutableData(kYPlane), new_buffer->MutableData(kUPlane), | |
| 166 new_buffer->MutableData(kVPlane), new_buffer->stride(kYPlane), | |
| 167 new_buffer->stride(kUPlane), new_buffer->stride(kVPlane))) { | |
| 168 return false; | |
| 169 } | |
| 170 | |
| 171 video_frame_buffer_ = new_buffer; | |
| 172 return true; | |
| 173 } | |
| 174 | |
| 175 size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc, | |
| 176 uint8_t* buffer, | |
| 177 size_t size, | |
| 178 int stride_rgb) const { | |
| 179 RTC_CHECK(video_frame_buffer_); | |
| 180 RTC_CHECK(video_frame_buffer_->native_handle() == nullptr); | |
| 181 return VideoFrame::ConvertToRgbBuffer(to_fourcc, buffer, size, stride_rgb); | |
| 182 } | |
| 183 | |
| 184 bool WebRtcVideoFrame::Reset(uint32_t format, | |
| 185 int w, | |
| 186 int h, | |
| 187 int dw, | |
| 188 int dh, | |
| 189 uint8_t* sample, | |
| 190 size_t sample_size, | |
| 191 int64_t time_stamp_ns, | |
| 192 webrtc::VideoRotation rotation, | |
| 193 bool apply_rotation) { | |
| 194 if (!Validate(format, w, h, sample, sample_size)) { | |
| 195 return false; | |
| 196 } | |
| 197 // Translate aliases to standard enums (e.g., IYUV -> I420). | |
| 198 format = CanonicalFourCC(format); | |
| 199 | |
| 200 // Set up a new buffer. | |
| 201 // TODO(fbarchard): Support lazy allocation. | |
| 202 int new_width = dw; | |
| 203 int new_height = dh; | |
| 204 // If rotated swap width, height. | |
| 205 if (apply_rotation && (rotation == 90 || rotation == 270)) { | |
| 206 new_width = dh; | |
| 207 new_height = dw; | |
| 208 } | |
| 209 | |
| 210 InitToEmptyBuffer(new_width, new_height, | |
| 211 time_stamp_ns); | |
| 212 rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation; | |
| 213 | |
| 214 int horiz_crop = ((w - dw) / 2) & ~1; | |
| 215 // ARGB on Windows has negative height. | |
| 216 // The sample's layout in memory is normal, so just correct crop. | |
| 217 int vert_crop = ((abs(h) - dh) / 2) & ~1; | |
| 218 // Conversion functions expect negative height to flip the image. | |
| 219 int idh = (h < 0) ? -dh : dh; | |
| 220 int r = libyuv::ConvertToI420( | |
| 221 sample, sample_size, | |
| 222 GetYPlane(), GetYPitch(), | |
| 223 GetUPlane(), GetUPitch(), | |
| 224 GetVPlane(), GetVPitch(), | |
| 225 horiz_crop, vert_crop, | |
| 226 w, h, | |
| 227 dw, idh, | |
| 228 static_cast<libyuv::RotationMode>( | |
| 229 apply_rotation ? rotation : webrtc::kVideoRotation_0), | |
| 230 format); | |
| 231 if (r) { | |
| 232 LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format) | |
| 233 << " return code : " << r; | |
| 234 return false; | |
| 235 } | |
| 236 return true; | |
| 237 } | |
| 238 | |
| 239 VideoFrame* WebRtcVideoFrame::CreateEmptyFrame( | |
| 240 int w, int h, | |
| 241 int64_t time_stamp_ns) const { | |
| 242 WebRtcVideoFrame* frame = new WebRtcVideoFrame(); | |
| 243 frame->InitToEmptyBuffer(w, h, time_stamp_ns); | |
| 244 return frame; | |
| 245 } | |
| 246 | |
| 247 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, | |
| 248 int64_t time_stamp_ns) { | |
| 249 video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h); | |
| 250 time_stamp_ns_ = time_stamp_ns; | |
| 251 rotation_ = webrtc::kVideoRotation_0; | |
| 252 } | |
| 253 | |
| 254 const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const { | |
| 255 // If the frame is not rotated, the caller should reuse this frame instead of | |
| 256 // making a redundant copy. | |
| 257 if (GetVideoRotation() == webrtc::kVideoRotation_0) { | |
| 258 return this; | |
| 259 } | |
| 260 | |
| 261 // If the video frame is backed up by a native handle, it resides in the GPU | |
| 262 // memory which we can't rotate here. The assumption is that the renderers | |
| 263 // which uses GPU to render should be able to rotate themselves. | |
| 264 RTC_DCHECK(!GetNativeHandle()); | |
| 265 | |
| 266 if (rotated_frame_) { | |
| 267 return rotated_frame_.get(); | |
| 268 } | |
| 269 | |
| 270 int width = static_cast<int>(GetWidth()); | |
| 271 int height = static_cast<int>(GetHeight()); | |
| 272 | |
| 273 int rotated_width = width; | |
| 274 int rotated_height = height; | |
| 275 if (GetVideoRotation() == webrtc::kVideoRotation_90 || | |
| 276 GetVideoRotation() == webrtc::kVideoRotation_270) { | |
| 277 rotated_width = height; | |
| 278 rotated_height = width; | |
| 279 } | |
| 280 | |
| 281 rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height, | |
| 282 GetTimeStamp())); | |
| 283 | |
| 284 // TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from | |
| 285 // VideoRotation to libyuv::RotationMode. | |
| 286 int ret = libyuv::I420Rotate( | |
| 287 GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(), | |
| 288 GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(), | |
| 289 rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(), | |
| 290 rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(), width, height, | |
| 291 static_cast<libyuv::RotationMode>(GetVideoRotation())); | |
| 292 if (ret == 0) { | |
| 293 return rotated_frame_.get(); | |
| 294 } | |
| 295 return nullptr; | |
| 296 } | |
| 297 | |
| 298 } // namespace cricket | |
| OLD | NEW |