| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 11 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
| 12 | 12 |
| 13 #include <assert.h> | 13 #include <assert.h> |
| 14 #include <string.h> | 14 #include <string.h> |
| 15 | 15 |
| 16 // NOTE(ajm): Path provided by gyp. | 16 // NOTE(ajm): Path provided by gyp. |
| 17 #include "libyuv.h" // NOLINT | 17 #include "libyuv.h" // NOLINT |
| 18 | 18 |
| 19 namespace webrtc { | 19 namespace webrtc { |
| 20 | 20 |
| 21 const int k16ByteAlignment = 16; | |
| 22 | |
| 23 VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type) { | 21 VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type) { |
| 24 switch (type) { | 22 switch (type) { |
| 25 case kVideoI420: | 23 case kVideoI420: |
| 26 return kI420; | 24 return kI420; |
| 27 case kVideoIYUV: | 25 case kVideoIYUV: |
| 28 return kIYUV; | 26 return kIYUV; |
| 29 case kVideoRGB24: | 27 case kVideoRGB24: |
| 30 return kRGB24; | 28 return kRGB24; |
| 31 case kVideoARGB: | 29 case kVideoARGB: |
| 32 return kARGB; | 30 return kARGB; |
| (...skipping 16 matching lines...) Expand all Loading... |
| 49 case kVideoBGRA: | 47 case kVideoBGRA: |
| 50 return kBGRA; | 48 return kBGRA; |
| 51 case kVideoMJPEG: | 49 case kVideoMJPEG: |
| 52 return kMJPG; | 50 return kMJPG; |
| 53 default: | 51 default: |
| 54 assert(false); | 52 assert(false); |
| 55 } | 53 } |
| 56 return kUnknown; | 54 return kUnknown; |
| 57 } | 55 } |
| 58 | 56 |
| 59 int AlignInt(int value, int alignment) { | |
| 60 assert(!((alignment - 1) & alignment)); | |
| 61 return ((value + alignment - 1) & ~(alignment - 1)); | |
| 62 } | |
| 63 | |
| 64 void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) { | |
| 65 *stride_y = AlignInt(width, k16ByteAlignment); | |
| 66 *stride_uv = AlignInt((width + 1) / 2, k16ByteAlignment); | |
| 67 } | |
| 68 | |
| 69 size_t CalcBufferSize(VideoType type, int width, int height) { | 57 size_t CalcBufferSize(VideoType type, int width, int height) { |
| 70 assert(width >= 0); | 58 assert(width >= 0); |
| 71 assert(height >= 0); | 59 assert(height >= 0); |
| 72 size_t buffer_size = 0; | 60 size_t buffer_size = 0; |
| 73 switch (type) { | 61 switch (type) { |
| 74 case kI420: | 62 case kI420: |
| 75 case kNV12: | 63 case kNV12: |
| 76 case kNV21: | 64 case kNV21: |
| 77 case kIYUV: | 65 case kIYUV: |
| 78 case kYV12: { | 66 case kYV12: { |
| (...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 290 src_frame.video_frame_buffer()->StrideY(), | 278 src_frame.video_frame_buffer()->StrideY(), |
| 291 src_frame.video_frame_buffer()->DataU(), | 279 src_frame.video_frame_buffer()->DataU(), |
| 292 src_frame.video_frame_buffer()->StrideU(), | 280 src_frame.video_frame_buffer()->StrideU(), |
| 293 src_frame.video_frame_buffer()->DataV(), | 281 src_frame.video_frame_buffer()->DataV(), |
| 294 src_frame.video_frame_buffer()->StrideV(), | 282 src_frame.video_frame_buffer()->StrideV(), |
| 295 dst_frame, dst_sample_size, | 283 dst_frame, dst_sample_size, |
| 296 src_frame.width(), src_frame.height(), | 284 src_frame.width(), src_frame.height(), |
| 297 ConvertVideoType(dst_video_type)); | 285 ConvertVideoType(dst_video_type)); |
| 298 } | 286 } |
| 299 | 287 |
| 300 // TODO(mikhal): Create a designated VideoFrame for non I420. | |
| 301 int ConvertFromYV12(const VideoFrame& src_frame, | |
| 302 VideoType dst_video_type, | |
| 303 int dst_sample_size, | |
| 304 uint8_t* dst_frame) { | |
| 305 // YV12 = Y, V, U | |
| 306 return libyuv::ConvertFromI420( | |
| 307 src_frame.video_frame_buffer()->DataY(), | |
| 308 src_frame.video_frame_buffer()->StrideY(), | |
| 309 src_frame.video_frame_buffer()->DataV(), | |
| 310 src_frame.video_frame_buffer()->StrideV(), | |
| 311 src_frame.video_frame_buffer()->DataU(), | |
| 312 src_frame.video_frame_buffer()->StrideU(), | |
| 313 dst_frame, dst_sample_size, | |
| 314 src_frame.width(), src_frame.height(), | |
| 315 ConvertVideoType(dst_video_type)); | |
| 316 } | |
| 317 | |
| 318 // Compute PSNR for an I420 frame (all planes) | 288 // Compute PSNR for an I420 frame (all planes) |
| 319 double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) { | 289 double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) { |
| 320 if (!ref_frame || !test_frame) | 290 if (!ref_frame || !test_frame) |
| 321 return -1; | 291 return -1; |
| 322 else if ((ref_frame->width() != test_frame->width()) || | 292 else if ((ref_frame->width() != test_frame->width()) || |
| 323 (ref_frame->height() != test_frame->height())) | 293 (ref_frame->height() != test_frame->height())) |
| 324 return -1; | 294 return -1; |
| 325 else if (ref_frame->width() < 0 || ref_frame->height() < 0) | 295 else if (ref_frame->width() < 0 || ref_frame->height() < 0) |
| 326 return -1; | 296 return -1; |
| 327 | 297 |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 361 ref_frame->video_frame_buffer()->StrideV(), | 331 ref_frame->video_frame_buffer()->StrideV(), |
| 362 test_frame->video_frame_buffer()->DataY(), | 332 test_frame->video_frame_buffer()->DataY(), |
| 363 test_frame->video_frame_buffer()->StrideY(), | 333 test_frame->video_frame_buffer()->StrideY(), |
| 364 test_frame->video_frame_buffer()->DataU(), | 334 test_frame->video_frame_buffer()->DataU(), |
| 365 test_frame->video_frame_buffer()->StrideU(), | 335 test_frame->video_frame_buffer()->StrideU(), |
| 366 test_frame->video_frame_buffer()->DataV(), | 336 test_frame->video_frame_buffer()->DataV(), |
| 367 test_frame->video_frame_buffer()->StrideV(), | 337 test_frame->video_frame_buffer()->StrideV(), |
| 368 test_frame->width(), test_frame->height()); | 338 test_frame->width(), test_frame->height()); |
| 369 } | 339 } |
| 370 } // namespace webrtc | 340 } // namespace webrtc |
| OLD | NEW |