| Index: webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
|
| diff --git a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
|
| index c4c600d67e328eb96f00cf7e68346eba07db9353..ece5b63f051695fe7a79b265ccf67cde210a40b4 100644
|
| --- a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
|
| +++ b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
|
| @@ -19,6 +19,7 @@
|
| #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
| #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
|
| #include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
|
| +#include "webrtc/modules/video_coding/include/video_codec_interface.h"
|
| #include "webrtc/test/frame_utils.h"
|
| #include "webrtc/test/gtest.h"
|
| #include "webrtc/test/testsupport/fileutils.h"
|
| @@ -26,26 +27,31 @@
|
| namespace webrtc {
|
|
|
| namespace {
|
| +
|
| void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) {
|
| *stride_y = 16 * ((width + 15) / 16);
|
| *stride_uv = 16 * ((width + 31) / 32);
|
| }
|
|
|
| -} // Anonymous namespace
|
| -
|
| enum { kMaxWaitEncTimeMs = 100 };
|
| enum { kMaxWaitDecTimeMs = 25 };
|
|
|
| -static const uint32_t kTestTimestamp = 123;
|
| -static const int64_t kTestNtpTimeMs = 456;
|
| +constexpr uint32_t kTestTimestamp = 123;
|
| +constexpr int64_t kTestNtpTimeMs = 456;
|
| +constexpr uint32_t kTimestampIncrementPerFrame = 3000;
|
| +
|
| +} // namespace
|
|
|
| // TODO(mikhal): Replace these with mocks.
|
| class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback {
|
| public:
|
| Vp8UnitTestEncodeCompleteCallback(EncodedImage* frame,
|
| + CodecSpecificInfo* codec_specific_info,
|
| unsigned int decoderSpecificSize,
|
| void* decoderSpecificInfo)
|
| - : encoded_frame_(frame), encode_complete_(false) {}
|
| + : encoded_frame_(frame),
|
| + codec_specific_info_(codec_specific_info),
|
| + encode_complete_(false) {}
|
|
|
| Result OnEncodedImage(const EncodedImage& encoded_frame_,
|
| const CodecSpecificInfo* codec_specific_info,
|
| @@ -54,6 +60,7 @@ class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback {
|
|
|
| private:
|
| EncodedImage* const encoded_frame_;
|
| + CodecSpecificInfo* const codec_specific_info_;
|
| std::unique_ptr<uint8_t[]> frame_buffer_;
|
| bool encode_complete_;
|
| };
|
| @@ -77,6 +84,9 @@ Vp8UnitTestEncodeCompleteCallback::OnEncodedImage(
|
| encoded_frame_->_frameType = encoded_frame._frameType;
|
| encoded_frame_->_completeFrame = encoded_frame._completeFrame;
|
| encoded_frame_->qp_ = encoded_frame.qp_;
|
| + codec_specific_info_->codecType = codec_specific_info->codecType;
|
| + // Skip |codec_name|, to avoid allocating.
|
| + codec_specific_info_->codecSpecific = codec_specific_info->codecSpecific;
|
| encode_complete_ = true;
|
| return Result(Result::OK, 0);
|
| }
|
| @@ -135,9 +145,9 @@ class TestVp8Impl : public ::testing::Test {
|
| virtual void SetUp() {
|
| encoder_.reset(VP8Encoder::Create());
|
| decoder_.reset(VP8Decoder::Create());
|
| - memset(&codec_inst_, 0, sizeof(codec_inst_));
|
| - encode_complete_callback_.reset(
|
| - new Vp8UnitTestEncodeCompleteCallback(&encoded_frame_, 0, NULL));
|
| + memset(&codec_settings_, 0, sizeof(codec_settings_));
|
| + encode_complete_callback_.reset(new Vp8UnitTestEncodeCompleteCallback(
|
| + &encoded_frame_, &codec_specific_info_, 0, nullptr));
|
| decode_complete_callback_.reset(
|
| new Vp8UnitTestDecodeCompleteCallback(&decoded_frame_, &decoded_qp_));
|
| encoder_->RegisterEncodeCompleteCallback(encode_complete_callback_.get());
|
| @@ -145,18 +155,18 @@ class TestVp8Impl : public ::testing::Test {
|
| // Using a QCIF image (aligned stride (u,v planes) > width).
|
| // Processing only one frame.
|
| source_file_ = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb");
|
| - ASSERT_TRUE(source_file_ != NULL);
|
| + ASSERT_TRUE(source_file_ != nullptr);
|
| rtc::scoped_refptr<VideoFrameBuffer> compact_buffer(
|
| test::ReadI420Buffer(kWidth, kHeight, source_file_));
|
| ASSERT_TRUE(compact_buffer);
|
| - codec_inst_.width = kWidth;
|
| - codec_inst_.height = kHeight;
|
| + codec_settings_.width = kWidth;
|
| + codec_settings_.height = kHeight;
|
| const int kFramerate = 30;
|
| - codec_inst_.maxFramerate = kFramerate;
|
| + codec_settings_.maxFramerate = kFramerate;
|
| // Setting aligned stride values.
|
| int stride_uv;
|
| int stride_y;
|
| - Calc16ByteAlignedStride(codec_inst_.width, &stride_y, &stride_uv);
|
| + Calc16ByteAlignedStride(codec_settings_.width, &stride_y, &stride_uv);
|
| EXPECT_EQ(stride_y, 176);
|
| EXPECT_EQ(stride_uv, 96);
|
|
|
| @@ -172,16 +182,16 @@ class TestVp8Impl : public ::testing::Test {
|
| }
|
|
|
| void SetUpEncodeDecode() {
|
| - codec_inst_.startBitrate = 300;
|
| - codec_inst_.maxBitrate = 4000;
|
| - codec_inst_.qpMax = 56;
|
| - codec_inst_.VP8()->denoisingOn = true;
|
| - codec_inst_.VP8()->tl_factory = &tl_factory_;
|
| - codec_inst_.VP8()->numberOfTemporalLayers = 1;
|
| + codec_settings_.startBitrate = 300;
|
| + codec_settings_.maxBitrate = 4000;
|
| + codec_settings_.qpMax = 56;
|
| + codec_settings_.VP8()->denoisingOn = true;
|
| + codec_settings_.VP8()->tl_factory = &tl_factory_;
|
| + codec_settings_.VP8()->numberOfTemporalLayers = 1;
|
|
|
| EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| - encoder_->InitEncode(&codec_inst_, 1, 1440));
|
| - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
|
| + encoder_->InitEncode(&codec_settings_, 1, 1440));
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_settings_, 1));
|
| }
|
|
|
| size_t WaitForEncodedFrame() const {
|
| @@ -205,6 +215,15 @@ class TestVp8Impl : public ::testing::Test {
|
| return 0;
|
| }
|
|
|
| + void ExpectFrameWith(int16_t picture_id,
|
| + int tl0_pic_idx,
|
| + uint8_t temporal_idx) {
|
| + ASSERT_TRUE(WaitForEncodedFrame());
|
| + EXPECT_EQ(picture_id, codec_specific_info_.codecSpecific.VP8.pictureId);
|
| + EXPECT_EQ(tl0_pic_idx, codec_specific_info_.codecSpecific.VP8.tl0PicIdx);
|
| + EXPECT_EQ(temporal_idx, codec_specific_info_.codecSpecific.VP8.temporalIdx);
|
| + }
|
| +
|
| const int kWidth = 172;
|
| const int kHeight = 144;
|
|
|
| @@ -216,25 +235,26 @@ class TestVp8Impl : public ::testing::Test {
|
| std::unique_ptr<VideoEncoder> encoder_;
|
| std::unique_ptr<VideoDecoder> decoder_;
|
| EncodedImage encoded_frame_;
|
| + CodecSpecificInfo codec_specific_info_;
|
| rtc::Optional<VideoFrame> decoded_frame_;
|
| rtc::Optional<uint8_t> decoded_qp_;
|
| - VideoCodec codec_inst_;
|
| + VideoCodec codec_settings_;
|
| TemporalLayersFactory tl_factory_;
|
| };
|
|
|
| TEST_F(TestVp8Impl, EncoderParameterTest) {
|
| - strncpy(codec_inst_.plName, "VP8", 31);
|
| - codec_inst_.plType = 126;
|
| - codec_inst_.maxBitrate = 0;
|
| - codec_inst_.minBitrate = 0;
|
| - codec_inst_.width = 1440;
|
| - codec_inst_.height = 1080;
|
| - codec_inst_.maxFramerate = 30;
|
| - codec_inst_.startBitrate = 300;
|
| - codec_inst_.qpMax = 56;
|
| - codec_inst_.VP8()->complexity = kComplexityNormal;
|
| - codec_inst_.VP8()->numberOfTemporalLayers = 1;
|
| - codec_inst_.VP8()->tl_factory = &tl_factory_;
|
| + strncpy(codec_settings_.plName, "VP8", 31);
|
| + codec_settings_.plType = 126;
|
| + codec_settings_.maxBitrate = 0;
|
| + codec_settings_.minBitrate = 0;
|
| + codec_settings_.width = 1440;
|
| + codec_settings_.height = 1080;
|
| + codec_settings_.maxFramerate = 30;
|
| + codec_settings_.startBitrate = 300;
|
| + codec_settings_.qpMax = 56;
|
| + codec_settings_.VP8()->complexity = kComplexityNormal;
|
| + codec_settings_.VP8()->numberOfTemporalLayers = 1;
|
| + codec_settings_.VP8()->tl_factory = &tl_factory_;
|
| // Calls before InitEncode().
|
| EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
|
| int bit_rate = 300;
|
| @@ -242,14 +262,15 @@ TEST_F(TestVp8Impl, EncoderParameterTest) {
|
| bitrate_allocation.SetBitrate(0, 0, bit_rate * 1000);
|
| EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
|
| encoder_->SetRateAllocation(bitrate_allocation,
|
| - codec_inst_.maxFramerate));
|
| + codec_settings_.maxFramerate));
|
|
|
| - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_inst_, 1, 1440));
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->InitEncode(&codec_settings_, 1, 1440));
|
|
|
| // Decoder parameter tests.
|
| // Calls before InitDecode().
|
| EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
|
| - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_settings_, 1));
|
| }
|
|
|
| TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
|
| @@ -274,13 +295,13 @@ TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
|
| #endif
|
| TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
| SetUpEncodeDecode();
|
| - encoder_->Encode(*input_frame_, NULL, NULL);
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr);
|
| EXPECT_GT(WaitForEncodedFrame(), 0u);
|
| // First frame should be a key frame.
|
| encoded_frame_._frameType = kVideoFrameKey;
|
| encoded_frame_.ntp_time_ms_ = kTestNtpTimeMs;
|
| EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| - decoder_->Decode(encoded_frame_, false, NULL));
|
| + decoder_->Decode(encoded_frame_, false, nullptr));
|
| EXPECT_GT(WaitForDecodedFrame(), 0u);
|
| ASSERT_TRUE(decoded_frame_);
|
| // Compute PSNR on all planes (faster than SSIM).
|
| @@ -296,23 +317,99 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
| #endif
|
| TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
|
| SetUpEncodeDecode();
|
| - encoder_->Encode(*input_frame_, NULL, NULL);
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr);
|
| EXPECT_GT(WaitForEncodedFrame(), 0u);
|
| // Setting complete to false -> should return an error.
|
| encoded_frame_._completeFrame = false;
|
| EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
|
| - decoder_->Decode(encoded_frame_, false, NULL));
|
| + decoder_->Decode(encoded_frame_, false, nullptr));
|
| // Setting complete back to true. Forcing a delta frame.
|
| encoded_frame_._frameType = kVideoFrameDelta;
|
| encoded_frame_._completeFrame = true;
|
| EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
|
| - decoder_->Decode(encoded_frame_, false, NULL));
|
| + decoder_->Decode(encoded_frame_, false, nullptr));
|
| // Now setting a key frame.
|
| encoded_frame_._frameType = kVideoFrameKey;
|
| EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| - decoder_->Decode(encoded_frame_, false, NULL));
|
| + decoder_->Decode(encoded_frame_, false, nullptr));
|
| ASSERT_TRUE(decoded_frame_);
|
| EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_frame_), 36);
|
| }
|
|
|
| +TEST_F(TestVp8Impl, EncoderRetainsRtpStateAfterRelease) {
|
| + SetUpEncodeDecode();
|
| + // Override default settings.
|
| + codec_settings_.VP8()->numberOfTemporalLayers = 2;
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->InitEncode(&codec_settings_, 1, 1440));
|
| +
|
| + // Temporal layer 0.
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr));
|
| + ASSERT_TRUE(WaitForEncodedFrame());
|
| + EXPECT_EQ(0, codec_specific_info_.codecSpecific.VP8.temporalIdx);
|
| + int16_t picture_id = codec_specific_info_.codecSpecific.VP8.pictureId;
|
| + int tl0_pic_idx = codec_specific_info_.codecSpecific.VP8.tl0PicIdx;
|
| +
|
| + // Temporal layer 1.
|
| + input_frame_->set_timestamp(input_frame_->timestamp() +
|
| + kTimestampIncrementPerFrame);
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr));
|
| + ExpectFrameWith((picture_id + 1) % (1 << 15), tl0_pic_idx, 1);
|
| +
|
| + // Temporal layer 0.
|
| + input_frame_->set_timestamp(input_frame_->timestamp() +
|
| + kTimestampIncrementPerFrame);
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr));
|
| + ExpectFrameWith((picture_id + 2) % (1 << 15), (tl0_pic_idx + 1) % (1 << 8),
|
| + 0);
|
| +
|
| + // Temporal layer 1.
|
| + input_frame_->set_timestamp(input_frame_->timestamp() +
|
| + kTimestampIncrementPerFrame);
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr));
|
| + ExpectFrameWith((picture_id + 3) % (1 << 15), (tl0_pic_idx + 1) % (1 << 8),
|
| + 1);
|
| +
|
| + // Reinit.
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->InitEncode(&codec_settings_, 1, 1440));
|
| +
|
| + // Temporal layer 0.
|
| + input_frame_->set_timestamp(input_frame_->timestamp() +
|
| + kTimestampIncrementPerFrame);
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr));
|
| + ExpectFrameWith((picture_id + 4) % (1 << 15), (tl0_pic_idx + 2) % (1 << 8),
|
| + 0);
|
| +
|
| + // Temporal layer 1.
|
| + input_frame_->set_timestamp(input_frame_->timestamp() +
|
| + kTimestampIncrementPerFrame);
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr));
|
| + ExpectFrameWith((picture_id + 5) % (1 << 15), (tl0_pic_idx + 2) % (1 << 8),
|
| + 1);
|
| +
|
| + // Temporal layer 0.
|
| + input_frame_->set_timestamp(input_frame_->timestamp() +
|
| + kTimestampIncrementPerFrame);
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr));
|
| + ExpectFrameWith((picture_id + 6) % (1 << 15), (tl0_pic_idx + 3) % (1 << 8),
|
| + 0);
|
| +
|
| + // Temporal layer 1.
|
| + input_frame_->set_timestamp(input_frame_->timestamp() +
|
| + kTimestampIncrementPerFrame);
|
| + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
| + encoder_->Encode(*input_frame_, nullptr, nullptr));
|
| + ExpectFrameWith((picture_id + 7) % (1 << 15), (tl0_pic_idx + 3) % (1 << 8),
|
| + 1);
|
| +}
|
| +
|
| } // namespace webrtc
|
|
|