| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include <stdio.h> | 11 #include <stdio.h> |
| 12 | 12 |
| 13 #include <memory> | 13 #include <memory> |
| 14 | 14 |
| 15 #include "testing/gtest/include/gtest/gtest.h" | 15 #include "testing/gtest/include/gtest/gtest.h" |
| 16 #include "webrtc/base/checks.h" | 16 #include "webrtc/base/checks.h" |
| 17 #include "webrtc/base/timeutils.h" | 17 #include "webrtc/base/timeutils.h" |
| 18 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 18 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
| 19 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" | 19 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" |
| 20 #include "webrtc/test/frame_utils.h" |
| 20 #include "webrtc/test/testsupport/fileutils.h" | 21 #include "webrtc/test/testsupport/fileutils.h" |
| 21 | 22 |
| 22 namespace webrtc { | 23 namespace webrtc { |
| 23 | 24 |
| 24 namespace { | 25 namespace { |
| 25 void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) { | 26 void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) { |
| 26 *stride_y = 16 * ((width + 15) / 16); | 27 *stride_y = 16 * ((width + 15) / 16); |
| 27 *stride_uv = 16 * ((width + 31) / 32); | 28 *stride_uv = 16 * ((width + 31) / 32); |
| 28 } | 29 } |
| 29 | 30 |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 121 decoder_.reset(VP8Decoder::Create()); | 122 decoder_.reset(VP8Decoder::Create()); |
| 122 memset(&codec_inst_, 0, sizeof(codec_inst_)); | 123 memset(&codec_inst_, 0, sizeof(codec_inst_)); |
| 123 encode_complete_callback_.reset( | 124 encode_complete_callback_.reset( |
| 124 new Vp8UnitTestEncodeCompleteCallback(&encoded_frame_, 0, NULL)); | 125 new Vp8UnitTestEncodeCompleteCallback(&encoded_frame_, 0, NULL)); |
| 125 decode_complete_callback_.reset( | 126 decode_complete_callback_.reset( |
| 126 new Vp8UnitTestDecodeCompleteCallback(&decoded_frame_)); | 127 new Vp8UnitTestDecodeCompleteCallback(&decoded_frame_)); |
| 127 encoder_->RegisterEncodeCompleteCallback(encode_complete_callback_.get()); | 128 encoder_->RegisterEncodeCompleteCallback(encode_complete_callback_.get()); |
| 128 decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get()); | 129 decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get()); |
| 129 // Using a QCIF image (aligned stride (u,v planes) > width). | 130 // Using a QCIF image (aligned stride (u,v planes) > width). |
| 130 // Processing only one frame. | 131 // Processing only one frame. |
| 131 length_source_frame_ = CalcBufferSize(kI420, kWidth, kHeight); | |
| 132 source_buffer_.reset(new uint8_t[length_source_frame_]); | |
| 133 source_file_ = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb"); | 132 source_file_ = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb"); |
| 134 ASSERT_TRUE(source_file_ != NULL); | 133 ASSERT_TRUE(source_file_ != NULL); |
| 135 // Set input frame. | 134 rtc::scoped_refptr<VideoFrameBuffer> compact_buffer( |
| 136 ASSERT_EQ( | 135 test::ReadI420Buffer(kWidth, kHeight, source_file_)); |
| 137 fread(source_buffer_.get(), 1, length_source_frame_, source_file_), | 136 ASSERT_TRUE(compact_buffer); |
| 138 length_source_frame_); | |
| 139 codec_inst_.width = kWidth; | 137 codec_inst_.width = kWidth; |
| 140 codec_inst_.height = kHeight; | 138 codec_inst_.height = kHeight; |
| 141 const int kFramerate = 30; | 139 const int kFramerate = 30; |
| 142 codec_inst_.maxFramerate = kFramerate; | 140 codec_inst_.maxFramerate = kFramerate; |
| 143 // Setting aligned stride values. | 141 // Setting aligned stride values. |
| 144 int stride_uv; | 142 int stride_uv; |
| 145 int stride_y; | 143 int stride_y; |
| 146 Calc16ByteAlignedStride(codec_inst_.width, &stride_y, &stride_uv); | 144 Calc16ByteAlignedStride(codec_inst_.width, &stride_y, &stride_uv); |
| 147 EXPECT_EQ(stride_y, 176); | 145 EXPECT_EQ(stride_y, 176); |
| 148 EXPECT_EQ(stride_uv, 96); | 146 EXPECT_EQ(stride_uv, 96); |
| 149 | 147 |
| 150 rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create( | 148 rtc::scoped_refptr<I420Buffer> stride_buffer( |
| 151 codec_inst_.width, codec_inst_.height, stride_y, stride_uv, stride_uv); | 149 I420Buffer::Create(kWidth, kHeight, stride_y, stride_uv, stride_uv)); |
| 152 // Using ConvertToI420 to add stride to the image. | 150 |
| 153 EXPECT_EQ( | 151 // No scaling in our case, just a copy, to add stride to the image. |
| 154 0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width, | 152 stride_buffer->ScaleFrom(compact_buffer); |
| 155 codec_inst_.height, 0, kVideoRotation_0, | 153 |
| 156 buffer.get())); | |
| 157 input_frame_.reset( | 154 input_frame_.reset( |
| 158 new VideoFrame(buffer, kTestTimestamp, 0, webrtc::kVideoRotation_0)); | 155 new VideoFrame(stride_buffer, kVideoRotation_0, 0)); |
| 156 input_frame_->set_timestamp(kTestTimestamp); |
| 159 } | 157 } |
| 160 | 158 |
| 161 void SetUpEncodeDecode() { | 159 void SetUpEncodeDecode() { |
| 162 codec_inst_.startBitrate = 300; | 160 codec_inst_.startBitrate = 300; |
| 163 codec_inst_.maxBitrate = 4000; | 161 codec_inst_.maxBitrate = 4000; |
| 164 codec_inst_.qpMax = 56; | 162 codec_inst_.qpMax = 56; |
| 165 codec_inst_.codecSpecific.VP8.denoisingOn = true; | 163 codec_inst_.codecSpecific.VP8.denoisingOn = true; |
| 166 | 164 |
| 167 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | 165 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, |
| 168 encoder_->InitEncode(&codec_inst_, 1, 1440)); | 166 encoder_->InitEncode(&codec_inst_, 1, 1440)); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 195 | 193 |
| 196 std::unique_ptr<Vp8UnitTestEncodeCompleteCallback> encode_complete_callback_; | 194 std::unique_ptr<Vp8UnitTestEncodeCompleteCallback> encode_complete_callback_; |
| 197 std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_; | 195 std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_; |
| 198 std::unique_ptr<uint8_t[]> source_buffer_; | 196 std::unique_ptr<uint8_t[]> source_buffer_; |
| 199 FILE* source_file_; | 197 FILE* source_file_; |
| 200 std::unique_ptr<VideoFrame> input_frame_; | 198 std::unique_ptr<VideoFrame> input_frame_; |
| 201 std::unique_ptr<VideoEncoder> encoder_; | 199 std::unique_ptr<VideoEncoder> encoder_; |
| 202 std::unique_ptr<VideoDecoder> decoder_; | 200 std::unique_ptr<VideoDecoder> decoder_; |
| 203 EncodedImage encoded_frame_; | 201 EncodedImage encoded_frame_; |
| 204 VideoFrame decoded_frame_; | 202 VideoFrame decoded_frame_; |
| 205 size_t length_source_frame_; | |
| 206 VideoCodec codec_inst_; | 203 VideoCodec codec_inst_; |
| 207 }; | 204 }; |
| 208 | 205 |
| 209 TEST_F(TestVp8Impl, EncoderParameterTest) { | 206 TEST_F(TestVp8Impl, EncoderParameterTest) { |
| 210 strncpy(codec_inst_.plName, "VP8", 31); | 207 strncpy(codec_inst_.plName, "VP8", 31); |
| 211 codec_inst_.plType = 126; | 208 codec_inst_.plType = 126; |
| 212 codec_inst_.maxBitrate = 0; | 209 codec_inst_.maxBitrate = 0; |
| 213 codec_inst_.minBitrate = 0; | 210 codec_inst_.minBitrate = 0; |
| 214 codec_inst_.width = 1440; | 211 codec_inst_.width = 1440; |
| 215 codec_inst_.height = 1080; | 212 codec_inst_.height = 1080; |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 272 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, | 269 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, |
| 273 decoder_->Decode(encoded_frame_, false, NULL)); | 270 decoder_->Decode(encoded_frame_, false, NULL)); |
| 274 // Now setting a key frame. | 271 // Now setting a key frame. |
| 275 encoded_frame_._frameType = kVideoFrameKey; | 272 encoded_frame_._frameType = kVideoFrameKey; |
| 276 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | 273 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, |
| 277 decoder_->Decode(encoded_frame_, false, NULL)); | 274 decoder_->Decode(encoded_frame_, false, NULL)); |
| 278 EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36); | 275 EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36); |
| 279 } | 276 } |
| 280 | 277 |
| 281 } // namespace webrtc | 278 } // namespace webrtc |
| OLD | NEW |