| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 734 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 745 // Since we are extracting raw pointers from |input_image| to | 745 // Since we are extracting raw pointers from |input_image| to |
| 746 // |raw_images_[0]|, the resolution of these frames must match. Note that | 746 // |raw_images_[0]|, the resolution of these frames must match. Note that |
| 747 // |input_image| might be scaled from |frame|. In that case, the resolution of | 747 // |input_image| might be scaled from |frame|. In that case, the resolution of |
| 748 // |raw_images_[0]| should have been updated in UpdateCodecFrameSize. | 748 // |raw_images_[0]| should have been updated in UpdateCodecFrameSize. |
| 749 RTC_DCHECK_EQ(input_image.width(), static_cast<int>(raw_images_[0].d_w)); | 749 RTC_DCHECK_EQ(input_image.width(), static_cast<int>(raw_images_[0].d_w)); |
| 750 RTC_DCHECK_EQ(input_image.height(), static_cast<int>(raw_images_[0].d_h)); | 750 RTC_DCHECK_EQ(input_image.height(), static_cast<int>(raw_images_[0].d_h)); |
| 751 | 751 |
| 752 // Image in vpx_image_t format. | 752 // Image in vpx_image_t format. |
| 753 // Input image is const. VP8's raw image is not defined as const. | 753 // Input image is const. VP8's raw image is not defined as const. |
| 754 raw_images_[0].planes[VPX_PLANE_Y] = | 754 raw_images_[0].planes[VPX_PLANE_Y] = |
| 755 const_cast<uint8_t*>(input_image.buffer(kYPlane)); | 755 const_cast<uint8_t*>(input_image.video_frame_buffer()->DataY()); |
| 756 raw_images_[0].planes[VPX_PLANE_U] = | 756 raw_images_[0].planes[VPX_PLANE_U] = |
| 757 const_cast<uint8_t*>(input_image.buffer(kUPlane)); | 757 const_cast<uint8_t*>(input_image.video_frame_buffer()->DataU()); |
| 758 raw_images_[0].planes[VPX_PLANE_V] = | 758 raw_images_[0].planes[VPX_PLANE_V] = |
| 759 const_cast<uint8_t*>(input_image.buffer(kVPlane)); | 759 const_cast<uint8_t*>(input_image.video_frame_buffer()->DataV()); |
| 760 | 760 |
| 761 raw_images_[0].stride[VPX_PLANE_Y] = input_image.stride(kYPlane); | 761 raw_images_[0].stride[VPX_PLANE_Y] = |
| 762 raw_images_[0].stride[VPX_PLANE_U] = input_image.stride(kUPlane); | 762 input_image.video_frame_buffer()->StrideY(); |
| 763 raw_images_[0].stride[VPX_PLANE_V] = input_image.stride(kVPlane); | 763 raw_images_[0].stride[VPX_PLANE_U] = |
| 764 input_image.video_frame_buffer()->StrideU(); |
| 765 raw_images_[0].stride[VPX_PLANE_V] = |
| 766 input_image.video_frame_buffer()->StrideV(); |
| 764 | 767 |
| 765 for (size_t i = 1; i < encoders_.size(); ++i) { | 768 for (size_t i = 1; i < encoders_.size(); ++i) { |
| 766 // Scale the image down a number of times by downsampling factor | 769 // Scale the image down a number of times by downsampling factor |
| 767 libyuv::I420Scale( | 770 libyuv::I420Scale( |
| 768 raw_images_[i - 1].planes[VPX_PLANE_Y], | 771 raw_images_[i - 1].planes[VPX_PLANE_Y], |
| 769 raw_images_[i - 1].stride[VPX_PLANE_Y], | 772 raw_images_[i - 1].stride[VPX_PLANE_Y], |
| 770 raw_images_[i - 1].planes[VPX_PLANE_U], | 773 raw_images_[i - 1].planes[VPX_PLANE_U], |
| 771 raw_images_[i - 1].stride[VPX_PLANE_U], | 774 raw_images_[i - 1].stride[VPX_PLANE_U], |
| 772 raw_images_[i - 1].planes[VPX_PLANE_V], | 775 raw_images_[i - 1].planes[VPX_PLANE_V], |
| 773 raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, | 776 raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, |
| (...skipping 576 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1350 return WEBRTC_VIDEO_CODEC_NO_OUTPUT; | 1353 return WEBRTC_VIDEO_CODEC_NO_OUTPUT; |
| 1351 } | 1354 } |
| 1352 last_frame_width_ = img->d_w; | 1355 last_frame_width_ = img->d_w; |
| 1353 last_frame_height_ = img->d_h; | 1356 last_frame_height_ = img->d_h; |
| 1354 // Allocate memory for decoded image. | 1357 // Allocate memory for decoded image. |
| 1355 VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h), | 1358 VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h), |
| 1356 timestamp, 0, kVideoRotation_0); | 1359 timestamp, 0, kVideoRotation_0); |
| 1357 libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], | 1360 libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], |
| 1358 img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], | 1361 img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], |
| 1359 img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], | 1362 img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], |
| 1360 decoded_image.buffer(kYPlane), decoded_image.stride(kYPlane), | 1363 decoded_image.video_frame_buffer()->MutableDataY(), |
| 1361 decoded_image.buffer(kUPlane), decoded_image.stride(kUPlane), | 1364 decoded_image.video_frame_buffer()->StrideY(), |
| 1362 decoded_image.buffer(kVPlane), decoded_image.stride(kVPlane), | 1365 decoded_image.video_frame_buffer()->MutableDataU(), |
| 1366 decoded_image.video_frame_buffer()->StrideU(), |
| 1367 decoded_image.video_frame_buffer()->MutableDataV(), |
| 1368 decoded_image.video_frame_buffer()->StrideV(), |
| 1363 img->d_w, img->d_h); | 1369 img->d_w, img->d_h); |
| 1364 decoded_image.set_ntp_time_ms(ntp_time_ms); | 1370 decoded_image.set_ntp_time_ms(ntp_time_ms); |
| 1365 int ret = decode_complete_callback_->Decoded(decoded_image); | 1371 int ret = decode_complete_callback_->Decoded(decoded_image); |
| 1366 if (ret != 0) | 1372 if (ret != 0) |
| 1367 return ret; | 1373 return ret; |
| 1368 | 1374 |
| 1369 // Remember image format for later | 1375 // Remember image format for later |
| 1370 image_format_ = img->fmt; | 1376 image_format_ = img->fmt; |
| 1371 return WEBRTC_VIDEO_CODEC_OK; | 1377 return WEBRTC_VIDEO_CODEC_OK; |
| 1372 } | 1378 } |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1411 return -1; | 1417 return -1; |
| 1412 } | 1418 } |
| 1413 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != | 1419 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != |
| 1414 VPX_CODEC_OK) { | 1420 VPX_CODEC_OK) { |
| 1415 return -1; | 1421 return -1; |
| 1416 } | 1422 } |
| 1417 return 0; | 1423 return 0; |
| 1418 } | 1424 } |
| 1419 | 1425 |
| 1420 } // namespace webrtc | 1426 } // namespace webrtc |
| OLD | NEW |