Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include <stdio.h> | 11 #include <stdio.h> |
| 12 | 12 |
| 13 #include <memory> | 13 #include <memory> |
| 14 | 14 |
| 15 #include "webrtc/api/video/i420_buffer.h" | 15 #include "webrtc/api/video/i420_buffer.h" |
| 16 #include "webrtc/base/checks.h" | 16 #include "webrtc/base/checks.h" |
| 17 #include "webrtc/base/optional.h" | 17 #include "webrtc/base/optional.h" |
| 18 #include "webrtc/base/timeutils.h" | 18 #include "webrtc/base/timeutils.h" |
| 19 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 19 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
| 20 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" | 20 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" |
| 21 #include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" | 21 #include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" |
| 22 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | |
| 22 #include "webrtc/test/frame_utils.h" | 23 #include "webrtc/test/frame_utils.h" |
| 23 #include "webrtc/test/gtest.h" | 24 #include "webrtc/test/gtest.h" |
| 24 #include "webrtc/test/testsupport/fileutils.h" | 25 #include "webrtc/test/testsupport/fileutils.h" |
| 25 | 26 |
| 26 namespace webrtc { | 27 namespace webrtc { |
| 27 | 28 |
| 28 namespace { | 29 namespace { |
| 30 | |
| 29 void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) { | 31 void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) { |
| 30 *stride_y = 16 * ((width + 15) / 16); | 32 *stride_y = 16 * ((width + 15) / 16); |
| 31 *stride_uv = 16 * ((width + 31) / 32); | 33 *stride_uv = 16 * ((width + 31) / 32); |
| 32 } | 34 } |
| 33 | 35 |
| 34 } // Anonymous namespace | |
| 35 | |
| 36 enum { kMaxWaitEncTimeMs = 100 }; | 36 enum { kMaxWaitEncTimeMs = 100 }; |
| 37 enum { kMaxWaitDecTimeMs = 25 }; | 37 enum { kMaxWaitDecTimeMs = 25 }; |
| 38 | 38 |
| 39 static const uint32_t kTestTimestamp = 123; | 39 constexpr uint32_t kTestTimestamp = 123; |
| 40 static const int64_t kTestNtpTimeMs = 456; | 40 constexpr int64_t kTestNtpTimeMs = 456; |
| 41 constexpr uint32_t kTimestampIncrementPerFrame = 3000; | |
| 42 | |
| 43 } // namespace | |
| 41 | 44 |
| 42 // TODO(mikhal): Replace these with mocks. | 45 // TODO(mikhal): Replace these with mocks. |
| 43 class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback { | 46 class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback { |
| 44 public: | 47 public: |
| 45 Vp8UnitTestEncodeCompleteCallback(EncodedImage* frame, | 48 Vp8UnitTestEncodeCompleteCallback(EncodedImage* frame, |
| 49 CodecSpecificInfo* codec_specific_info, | |
| 46 unsigned int decoderSpecificSize, | 50 unsigned int decoderSpecificSize, |
| 47 void* decoderSpecificInfo) | 51 void* decoderSpecificInfo) |
| 48 : encoded_frame_(frame), encode_complete_(false) {} | 52 : encoded_frame_(frame), |
| 53 codec_specific_info_(codec_specific_info), | |
| 54 encode_complete_(false) {} | |
| 49 | 55 |
| 50 Result OnEncodedImage(const EncodedImage& encoded_frame_, | 56 Result OnEncodedImage(const EncodedImage& encoded_frame_, |
| 51 const CodecSpecificInfo* codec_specific_info, | 57 const CodecSpecificInfo* codec_specific_info, |
| 52 const RTPFragmentationHeader* fragmentation) override; | 58 const RTPFragmentationHeader* fragmentation) override; |
| 53 bool EncodeComplete(); | 59 bool EncodeComplete(); |
| 54 | 60 |
| 55 private: | 61 private: |
| 56 EncodedImage* const encoded_frame_; | 62 EncodedImage* const encoded_frame_; |
| 63 CodecSpecificInfo* const codec_specific_info_; | |
| 57 std::unique_ptr<uint8_t[]> frame_buffer_; | 64 std::unique_ptr<uint8_t[]> frame_buffer_; |
| 58 bool encode_complete_; | 65 bool encode_complete_; |
| 59 }; | 66 }; |
| 60 | 67 |
| 61 webrtc::EncodedImageCallback::Result | 68 webrtc::EncodedImageCallback::Result |
| 62 Vp8UnitTestEncodeCompleteCallback::OnEncodedImage( | 69 Vp8UnitTestEncodeCompleteCallback::OnEncodedImage( |
| 63 const EncodedImage& encoded_frame, | 70 const EncodedImage& encoded_frame, |
| 64 const CodecSpecificInfo* codec_specific_info, | 71 const CodecSpecificInfo* codec_specific_info, |
| 65 const RTPFragmentationHeader* fragmentation) { | 72 const RTPFragmentationHeader* fragmentation) { |
| 66 if (encoded_frame_->_size < encoded_frame._length) { | 73 if (encoded_frame_->_size < encoded_frame._length) { |
| 67 delete[] encoded_frame_->_buffer; | 74 delete[] encoded_frame_->_buffer; |
| 68 frame_buffer_.reset(new uint8_t[encoded_frame._length]); | 75 frame_buffer_.reset(new uint8_t[encoded_frame._length]); |
| 69 encoded_frame_->_buffer = frame_buffer_.get(); | 76 encoded_frame_->_buffer = frame_buffer_.get(); |
| 70 encoded_frame_->_size = encoded_frame._length; | 77 encoded_frame_->_size = encoded_frame._length; |
| 71 } | 78 } |
| 72 memcpy(encoded_frame_->_buffer, encoded_frame._buffer, encoded_frame._length); | 79 memcpy(encoded_frame_->_buffer, encoded_frame._buffer, encoded_frame._length); |
| 73 encoded_frame_->_length = encoded_frame._length; | 80 encoded_frame_->_length = encoded_frame._length; |
| 74 encoded_frame_->_encodedWidth = encoded_frame._encodedWidth; | 81 encoded_frame_->_encodedWidth = encoded_frame._encodedWidth; |
| 75 encoded_frame_->_encodedHeight = encoded_frame._encodedHeight; | 82 encoded_frame_->_encodedHeight = encoded_frame._encodedHeight; |
| 76 encoded_frame_->_timeStamp = encoded_frame._timeStamp; | 83 encoded_frame_->_timeStamp = encoded_frame._timeStamp; |
| 77 encoded_frame_->_frameType = encoded_frame._frameType; | 84 encoded_frame_->_frameType = encoded_frame._frameType; |
| 78 encoded_frame_->_completeFrame = encoded_frame._completeFrame; | 85 encoded_frame_->_completeFrame = encoded_frame._completeFrame; |
| 79 encoded_frame_->qp_ = encoded_frame.qp_; | 86 encoded_frame_->qp_ = encoded_frame.qp_; |
| 87 codec_specific_info_->codecType = codec_specific_info->codecType; | |
| 88 // Skip |codec_name|, to avoid allocating. | |
| 89 codec_specific_info_->codecSpecific = codec_specific_info->codecSpecific; | |
| 80 encode_complete_ = true; | 90 encode_complete_ = true; |
| 81 return Result(Result::OK, 0); | 91 return Result(Result::OK, 0); |
| 82 } | 92 } |
| 83 | 93 |
| 84 bool Vp8UnitTestEncodeCompleteCallback::EncodeComplete() { | 94 bool Vp8UnitTestEncodeCompleteCallback::EncodeComplete() { |
| 85 if (encode_complete_) { | 95 if (encode_complete_) { |
| 86 encode_complete_ = false; | 96 encode_complete_ = false; |
| 87 return true; | 97 return true; |
| 88 } | 98 } |
| 89 return false; | 99 return false; |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 129 *decoded_qp_ = qp; | 139 *decoded_qp_ = qp; |
| 130 decode_complete = true; | 140 decode_complete = true; |
| 131 } | 141 } |
| 132 | 142 |
| 133 class TestVp8Impl : public ::testing::Test { | 143 class TestVp8Impl : public ::testing::Test { |
| 134 protected: | 144 protected: |
| 135 virtual void SetUp() { | 145 virtual void SetUp() { |
| 136 encoder_.reset(VP8Encoder::Create()); | 146 encoder_.reset(VP8Encoder::Create()); |
| 137 decoder_.reset(VP8Decoder::Create()); | 147 decoder_.reset(VP8Decoder::Create()); |
| 138 memset(&codec_inst_, 0, sizeof(codec_inst_)); | 148 memset(&codec_inst_, 0, sizeof(codec_inst_)); |
| 139 encode_complete_callback_.reset( | 149 encode_complete_callback_.reset(new Vp8UnitTestEncodeCompleteCallback( |
| 140 new Vp8UnitTestEncodeCompleteCallback(&encoded_frame_, 0, NULL)); | 150 &encoded_frame_, &codec_specific_info_, 0, nullptr)); |
| 141 decode_complete_callback_.reset( | 151 decode_complete_callback_.reset( |
| 142 new Vp8UnitTestDecodeCompleteCallback(&decoded_frame_, &decoded_qp_)); | 152 new Vp8UnitTestDecodeCompleteCallback(&decoded_frame_, &decoded_qp_)); |
| 143 encoder_->RegisterEncodeCompleteCallback(encode_complete_callback_.get()); | 153 encoder_->RegisterEncodeCompleteCallback(encode_complete_callback_.get()); |
| 144 decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get()); | 154 decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get()); |
| 145 // Using a QCIF image (aligned stride (u,v planes) > width). | 155 // Using a QCIF image (aligned stride (u,v planes) > width). |
| 146 // Processing only one frame. | 156 // Processing only one frame. |
| 147 source_file_ = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb"); | 157 source_file_ = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb"); |
| 148 ASSERT_TRUE(source_file_ != NULL); | 158 ASSERT_TRUE(source_file_ != nullptr); |
| 149 rtc::scoped_refptr<VideoFrameBuffer> compact_buffer( | 159 rtc::scoped_refptr<VideoFrameBuffer> compact_buffer( |
| 150 test::ReadI420Buffer(kWidth, kHeight, source_file_)); | 160 test::ReadI420Buffer(kWidth, kHeight, source_file_)); |
| 151 ASSERT_TRUE(compact_buffer); | 161 ASSERT_TRUE(compact_buffer); |
| 152 codec_inst_.width = kWidth; | 162 codec_inst_.width = kWidth; |
| 153 codec_inst_.height = kHeight; | 163 codec_inst_.height = kHeight; |
| 154 const int kFramerate = 30; | 164 const int kFramerate = 30; |
| 155 codec_inst_.maxFramerate = kFramerate; | 165 codec_inst_.maxFramerate = kFramerate; |
| 156 // Setting aligned stride values. | 166 // Setting aligned stride values. |
| 157 int stride_uv; | 167 int stride_uv; |
| 158 int stride_y; | 168 int stride_y; |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 198 int64_t startTime = rtc::TimeMillis(); | 208 int64_t startTime = rtc::TimeMillis(); |
| 199 while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) { | 209 while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) { |
| 200 if (decode_complete_callback_->DecodeComplete()) { | 210 if (decode_complete_callback_->DecodeComplete()) { |
| 201 return CalcBufferSize(kI420, decoded_frame_->width(), | 211 return CalcBufferSize(kI420, decoded_frame_->width(), |
| 202 decoded_frame_->height()); | 212 decoded_frame_->height()); |
| 203 } | 213 } |
| 204 } | 214 } |
| 205 return 0; | 215 return 0; |
| 206 } | 216 } |
| 207 | 217 |
| 218 void ExpectFrame(int16_t picture_id, int tl0_pic_idx, uint8_t temporal_idx) { | |
|
stefan-webrtc
2017/05/02 07:21:50
ExpectFrameWith() might be a better name
brandtr
2017/05/02 07:48:50
Done.
| |
| 219 ASSERT_TRUE(WaitForEncodedFrame()); | |
| 220 EXPECT_EQ(picture_id, codec_specific_info_.codecSpecific.VP8.pictureId); | |
| 221 EXPECT_EQ(tl0_pic_idx, codec_specific_info_.codecSpecific.VP8.tl0PicIdx); | |
| 222 EXPECT_EQ(temporal_idx, codec_specific_info_.codecSpecific.VP8.temporalIdx); | |
| 223 } | |
| 224 | |
| 208 const int kWidth = 172; | 225 const int kWidth = 172; |
| 209 const int kHeight = 144; | 226 const int kHeight = 144; |
| 210 | 227 |
| 211 std::unique_ptr<Vp8UnitTestEncodeCompleteCallback> encode_complete_callback_; | 228 std::unique_ptr<Vp8UnitTestEncodeCompleteCallback> encode_complete_callback_; |
| 212 std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_; | 229 std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_; |
| 213 std::unique_ptr<uint8_t[]> source_buffer_; | 230 std::unique_ptr<uint8_t[]> source_buffer_; |
| 214 FILE* source_file_; | 231 FILE* source_file_; |
| 215 std::unique_ptr<VideoFrame> input_frame_; | 232 std::unique_ptr<VideoFrame> input_frame_; |
| 216 std::unique_ptr<VideoEncoder> encoder_; | 233 std::unique_ptr<VideoEncoder> encoder_; |
| 217 std::unique_ptr<VideoDecoder> decoder_; | 234 std::unique_ptr<VideoDecoder> decoder_; |
| 218 EncodedImage encoded_frame_; | 235 EncodedImage encoded_frame_; |
| 236 CodecSpecificInfo codec_specific_info_; | |
| 219 rtc::Optional<VideoFrame> decoded_frame_; | 237 rtc::Optional<VideoFrame> decoded_frame_; |
| 220 rtc::Optional<uint8_t> decoded_qp_; | 238 rtc::Optional<uint8_t> decoded_qp_; |
| 221 VideoCodec codec_inst_; | 239 VideoCodec codec_inst_; |
| 222 TemporalLayersFactory tl_factory_; | 240 TemporalLayersFactory tl_factory_; |
| 223 }; | 241 }; |
| 224 | 242 |
| 225 TEST_F(TestVp8Impl, EncoderParameterTest) { | 243 TEST_F(TestVp8Impl, EncoderParameterTest) { |
| 226 strncpy(codec_inst_.plName, "VP8", 31); | 244 strncpy(codec_inst_.plName, "VP8", 31); |
| 227 codec_inst_.plType = 126; | 245 codec_inst_.plType = 126; |
| 228 codec_inst_.maxBitrate = 0; | 246 codec_inst_.maxBitrate = 0; |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 267 EXPECT_EQ(encoded_frame_.qp_, *decoded_qp_); | 285 EXPECT_EQ(encoded_frame_.qp_, *decoded_qp_); |
| 268 } | 286 } |
| 269 | 287 |
| 270 #if defined(WEBRTC_ANDROID) | 288 #if defined(WEBRTC_ANDROID) |
| 271 #define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode | 289 #define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode |
| 272 #else | 290 #else |
| 273 #define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode | 291 #define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode |
| 274 #endif | 292 #endif |
| 275 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { | 293 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { |
| 276 SetUpEncodeDecode(); | 294 SetUpEncodeDecode(); |
| 277 encoder_->Encode(*input_frame_, NULL, NULL); | 295 encoder_->Encode(*input_frame_, nullptr, nullptr); |
| 278 EXPECT_GT(WaitForEncodedFrame(), 0u); | 296 EXPECT_GT(WaitForEncodedFrame(), 0u); |
| 279 // First frame should be a key frame. | 297 // First frame should be a key frame. |
| 280 encoded_frame_._frameType = kVideoFrameKey; | 298 encoded_frame_._frameType = kVideoFrameKey; |
| 281 encoded_frame_.ntp_time_ms_ = kTestNtpTimeMs; | 299 encoded_frame_.ntp_time_ms_ = kTestNtpTimeMs; |
| 282 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | 300 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, |
| 283 decoder_->Decode(encoded_frame_, false, NULL)); | 301 decoder_->Decode(encoded_frame_, false, nullptr)); |
| 284 EXPECT_GT(WaitForDecodedFrame(), 0u); | 302 EXPECT_GT(WaitForDecodedFrame(), 0u); |
| 285 ASSERT_TRUE(decoded_frame_); | 303 ASSERT_TRUE(decoded_frame_); |
| 286 // Compute PSNR on all planes (faster than SSIM). | 304 // Compute PSNR on all planes (faster than SSIM). |
| 287 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_frame_), 36); | 305 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_frame_), 36); |
| 288 EXPECT_EQ(kTestTimestamp, decoded_frame_->timestamp()); | 306 EXPECT_EQ(kTestTimestamp, decoded_frame_->timestamp()); |
| 289 EXPECT_EQ(kTestNtpTimeMs, decoded_frame_->ntp_time_ms()); | 307 EXPECT_EQ(kTestNtpTimeMs, decoded_frame_->ntp_time_ms()); |
| 290 } | 308 } |
| 291 | 309 |
| 292 #if defined(WEBRTC_ANDROID) | 310 #if defined(WEBRTC_ANDROID) |
| 293 #define MAYBE_DecodeWithACompleteKeyFrame DISABLED_DecodeWithACompleteKeyFrame | 311 #define MAYBE_DecodeWithACompleteKeyFrame DISABLED_DecodeWithACompleteKeyFrame |
| 294 #else | 312 #else |
| 295 #define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame | 313 #define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame |
| 296 #endif | 314 #endif |
| 297 TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) { | 315 TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) { |
| 298 SetUpEncodeDecode(); | 316 SetUpEncodeDecode(); |
| 299 encoder_->Encode(*input_frame_, NULL, NULL); | 317 encoder_->Encode(*input_frame_, nullptr, nullptr); |
| 300 EXPECT_GT(WaitForEncodedFrame(), 0u); | 318 EXPECT_GT(WaitForEncodedFrame(), 0u); |
| 301 // Setting complete to false -> should return an error. | 319 // Setting complete to false -> should return an error. |
| 302 encoded_frame_._completeFrame = false; | 320 encoded_frame_._completeFrame = false; |
| 303 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, | 321 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, |
| 304 decoder_->Decode(encoded_frame_, false, NULL)); | 322 decoder_->Decode(encoded_frame_, false, nullptr)); |
| 305 // Setting complete back to true. Forcing a delta frame. | 323 // Setting complete back to true. Forcing a delta frame. |
| 306 encoded_frame_._frameType = kVideoFrameDelta; | 324 encoded_frame_._frameType = kVideoFrameDelta; |
| 307 encoded_frame_._completeFrame = true; | 325 encoded_frame_._completeFrame = true; |
| 308 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, | 326 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, |
| 309 decoder_->Decode(encoded_frame_, false, NULL)); | 327 decoder_->Decode(encoded_frame_, false, nullptr)); |
| 310 // Now setting a key frame. | 328 // Now setting a key frame. |
| 311 encoded_frame_._frameType = kVideoFrameKey; | 329 encoded_frame_._frameType = kVideoFrameKey; |
| 312 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | 330 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, |
| 313 decoder_->Decode(encoded_frame_, false, NULL)); | 331 decoder_->Decode(encoded_frame_, false, nullptr)); |
| 314 ASSERT_TRUE(decoded_frame_); | 332 ASSERT_TRUE(decoded_frame_); |
| 315 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_frame_), 36); | 333 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_frame_), 36); |
| 316 } | 334 } |
| 317 | 335 |
| 336 TEST_F(TestVp8Impl, EncoderRetainsRtpStateAfterRelease) { | |
| 337 SetUpEncodeDecode(); | |
| 338 // Override default settings. | |
| 339 codec_inst_.VP8()->numberOfTemporalLayers = 2; | |
| 340 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_inst_, 1, 1440)); | |
| 341 | |
| 342 // Temporal layer 0. | |
| 343 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | |
| 344 encoder_->Encode(*input_frame_, nullptr, nullptr)); | |
| 345 ASSERT_TRUE(WaitForEncodedFrame()); | |
| 346 EXPECT_EQ(0, codec_specific_info_.codecSpecific.VP8.temporalIdx); | |
| 347 int16_t picture_id = codec_specific_info_.codecSpecific.VP8.pictureId; | |
| 348 int tl0_pic_idx = codec_specific_info_.codecSpecific.VP8.tl0PicIdx; | |
| 349 | |
| 350 // Temporal layer 1. | |
| 351 input_frame_->set_timestamp(input_frame_->timestamp() + | |
| 352 kTimestampIncrementPerFrame); | |
| 353 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | |
| 354 encoder_->Encode(*input_frame_, nullptr, nullptr)); | |
| 355 ExpectFrame((picture_id + 1) % (1 << 15), tl0_pic_idx, 1); | |
| 356 | |
| 357 // Temporal layer 0. | |
| 358 input_frame_->set_timestamp(input_frame_->timestamp() + | |
| 359 kTimestampIncrementPerFrame); | |
| 360 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | |
| 361 encoder_->Encode(*input_frame_, nullptr, nullptr)); | |
| 362 ExpectFrame((picture_id + 2) % (1 << 15), (tl0_pic_idx + 1) % (1 << 8), 0); | |
| 363 | |
| 364 // Temporal layer 1. | |
| 365 input_frame_->set_timestamp(input_frame_->timestamp() + | |
| 366 kTimestampIncrementPerFrame); | |
| 367 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | |
| 368 encoder_->Encode(*input_frame_, nullptr, nullptr)); | |
| 369 ExpectFrame((picture_id + 3) % (1 << 15), (tl0_pic_idx + 1) % (1 << 8), 1); | |
| 370 | |
| 371 // Reinit. | |
| 372 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); | |
| 373 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_inst_, 1, 1440)); | |
| 374 | |
| 375 // Temporal layer 0. | |
| 376 input_frame_->set_timestamp(input_frame_->timestamp() + | |
| 377 kTimestampIncrementPerFrame); | |
| 378 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | |
| 379 encoder_->Encode(*input_frame_, nullptr, nullptr)); | |
| 380 ExpectFrame((picture_id + 4) % (1 << 15), (tl0_pic_idx + 2) % (1 << 8), 0); | |
| 381 | |
| 382 // Temporal layer 1. | |
| 383 input_frame_->set_timestamp(input_frame_->timestamp() + | |
| 384 kTimestampIncrementPerFrame); | |
| 385 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | |
| 386 encoder_->Encode(*input_frame_, nullptr, nullptr)); | |
| 387 ExpectFrame((picture_id + 5) % (1 << 15), (tl0_pic_idx + 2) % (1 << 8), 1); | |
| 388 | |
| 389 // Temporal layer 0. | |
| 390 input_frame_->set_timestamp(input_frame_->timestamp() + | |
| 391 kTimestampIncrementPerFrame); | |
| 392 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | |
| 393 encoder_->Encode(*input_frame_, nullptr, nullptr)); | |
| 394 ExpectFrame((picture_id + 6) % (1 << 15), (tl0_pic_idx + 3) % (1 << 8), 0); | |
| 395 | |
| 396 // Temporal layer 1. | |
| 397 input_frame_->set_timestamp(input_frame_->timestamp() + | |
| 398 kTimestampIncrementPerFrame); | |
| 399 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, | |
| 400 encoder_->Encode(*input_frame_, nullptr, nullptr)); | |
| 401 ExpectFrame((picture_id + 7) % (1 << 15), (tl0_pic_idx + 3) % (1 << 8), 1); | |
| 402 } | |
| 403 | |
| 318 } // namespace webrtc | 404 } // namespace webrtc |
| OLD | NEW |