Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(259)

Side by Side Diff: webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc

Issue 3005533003: Add some unit tests to TestVp8Impl. (Closed)
Patch Set: remove WaitForDecodedFrame Created 3 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 11 matching lines...) Expand all
22 #include "webrtc/rtc_base/timeutils.h" 22 #include "webrtc/rtc_base/timeutils.h"
23 #include "webrtc/test/field_trial.h" 23 #include "webrtc/test/field_trial.h"
24 #include "webrtc/test/frame_utils.h" 24 #include "webrtc/test/frame_utils.h"
25 #include "webrtc/test/gtest.h" 25 #include "webrtc/test/gtest.h"
26 #include "webrtc/test/testsupport/fileutils.h" 26 #include "webrtc/test/testsupport/fileutils.h"
27 #include "webrtc/test/video_codec_settings.h" 27 #include "webrtc/test/video_codec_settings.h"
28 28
29 namespace webrtc { 29 namespace webrtc {
30 30
31 namespace { 31 namespace {
32 constexpr int64_t kMaxWaitEncTimeMs = 100; 32 constexpr uint32_t kInitialTimestampRtp = 123;
33 constexpr int64_t kMaxWaitDecTimeMs = 25;
34 constexpr uint32_t kTestTimestamp = 123;
35 constexpr int64_t kTestNtpTimeMs = 456; 33 constexpr int64_t kTestNtpTimeMs = 456;
36 constexpr uint32_t kTimestampIncrementPerFrame = 3000; 34 constexpr int64_t kInitialTimestampMs = 789;
35 constexpr uint32_t kTimestampIncrement = 3000;
37 constexpr int kNumCores = 1; 36 constexpr int kNumCores = 1;
38 constexpr size_t kMaxPayloadSize = 1440; 37 constexpr size_t kMaxPayloadSize = 1440;
39 constexpr int kMinPixelsPerFrame = 12345; 38 constexpr int kMinPixelsPerFrame = 12345;
40 constexpr int kDefaultMinPixelsPerFrame = 320 * 180; 39 constexpr int kDefaultMinPixelsPerFrame = 320 * 180;
41 constexpr int kWidth = 172; 40 constexpr int kWidth = 172;
42 constexpr int kHeight = 144; 41 constexpr int kHeight = 144;
43 42
44 void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) { 43 void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) {
45 *stride_y = 16 * ((width + 15) / 16); 44 *stride_y = 16 * ((width + 15) / 16);
46 *stride_uv = 16 * ((width + 31) / 32); 45 *stride_uv = 16 * ((width + 31) / 32);
(...skipping 13 matching lines...) Expand all
60 frame_buffer_.reset(new uint8_t[encoded_frame._size]); 59 frame_buffer_.reset(new uint8_t[encoded_frame._size]);
61 } 60 }
62 RTC_DCHECK(frame_buffer_); 61 RTC_DCHECK(frame_buffer_);
63 memcpy(frame_buffer_.get(), encoded_frame._buffer, encoded_frame._length); 62 memcpy(frame_buffer_.get(), encoded_frame._buffer, encoded_frame._length);
64 encoded_frame_ = encoded_frame; 63 encoded_frame_ = encoded_frame;
65 encoded_frame_._buffer = frame_buffer_.get(); 64 encoded_frame_._buffer = frame_buffer_.get();
66 65
67 // Skip |codec_name|, to avoid allocating. 66 // Skip |codec_name|, to avoid allocating.
68 EXPECT_STREQ("libvpx", codec_specific_info->codec_name); 67 EXPECT_STREQ("libvpx", codec_specific_info->codec_name);
69 EXPECT_EQ(kVideoCodecVP8, codec_specific_info->codecType); 68 EXPECT_EQ(kVideoCodecVP8, codec_specific_info->codecType);
69 EXPECT_EQ(0u, codec_specific_info->codecSpecific.VP8.simulcastIdx);
70 codec_specific_info_.codecType = codec_specific_info->codecType; 70 codec_specific_info_.codecType = codec_specific_info->codecType;
71 codec_specific_info_.codecSpecific = codec_specific_info->codecSpecific; 71 codec_specific_info_.codecSpecific = codec_specific_info->codecSpecific;
72 complete_ = true; 72 complete_ = true;
73 return Result(Result::OK, 0); 73 return Result(Result::OK, 0);
74 } 74 }
75 75
76 void VerifyQpParser(const EncodedImage& encoded_frame) const { 76 void VerifyQpParser(const EncodedImage& encoded_frame) const {
77 int qp; 77 int qp;
78 ASSERT_TRUE(vp8::GetQp(encoded_frame._buffer, encoded_frame._length, &qp)); 78 ASSERT_TRUE(vp8::GetQp(encoded_frame._buffer, encoded_frame._length, &qp));
79 EXPECT_EQ(encoded_frame.qp_, qp) << "Encoder QP != parsed bitstream QP."; 79 EXPECT_EQ(encoded_frame.qp_, qp) << "Encoder QP != parsed bitstream QP.";
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
158 int stride_y; 158 int stride_y;
159 Calc16ByteAlignedStride(kWidth, &stride_y, &stride_uv); 159 Calc16ByteAlignedStride(kWidth, &stride_y, &stride_uv);
160 EXPECT_EQ(stride_y, 176); 160 EXPECT_EQ(stride_y, 176);
161 EXPECT_EQ(stride_uv, 96); 161 EXPECT_EQ(stride_uv, 96);
162 rtc::scoped_refptr<I420Buffer> stride_buffer( 162 rtc::scoped_refptr<I420Buffer> stride_buffer(
163 I420Buffer::Create(kWidth, kHeight, stride_y, stride_uv, stride_uv)); 163 I420Buffer::Create(kWidth, kHeight, stride_y, stride_uv, stride_uv));
164 164
165 // No scaling in our case, just a copy, to add stride to the image. 165 // No scaling in our case, just a copy, to add stride to the image.
166 stride_buffer->ScaleFrom(*compact_buffer); 166 stride_buffer->ScaleFrom(*compact_buffer);
167 167
168 input_frame_.reset(new VideoFrame(stride_buffer, kVideoRotation_0, 0)); 168 input_frame_.reset(new VideoFrame(stride_buffer, kInitialTimestampRtp,
169 input_frame_->set_timestamp(kTestTimestamp); 169 kInitialTimestampMs, kVideoRotation_0));
170 fclose(file); 170 fclose(file);
171 } 171 }
172 172
173 void SetupCodecSettings() { 173 void SetupCodecSettings() {
174 webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings_); 174 webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings_);
175 codec_settings_.maxBitrate = 4000; 175 codec_settings_.maxBitrate = 4000;
176 codec_settings_.width = kWidth; 176 codec_settings_.width = kWidth;
177 codec_settings_.height = kHeight; 177 codec_settings_.height = kHeight;
178 codec_settings_.VP8()->denoisingOn = true; 178 codec_settings_.VP8()->denoisingOn = true;
179 codec_settings_.VP8()->frameDroppingOn = false; 179 codec_settings_.VP8()->frameDroppingOn = false;
180 codec_settings_.VP8()->automaticResizeOn = false; 180 codec_settings_.VP8()->automaticResizeOn = false;
181 codec_settings_.VP8()->complexity = kComplexityNormal; 181 codec_settings_.VP8()->complexity = kComplexityNormal;
182 codec_settings_.VP8()->tl_factory = &tl_factory_; 182 codec_settings_.VP8()->tl_factory = &tl_factory_;
183 } 183 }
184 184
185 void InitEncodeDecode() { 185 void InitEncodeDecode() {
186 EXPECT_EQ( 186 EXPECT_EQ(
187 WEBRTC_VIDEO_CODEC_OK, 187 WEBRTC_VIDEO_CODEC_OK,
188 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize)); 188 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
189 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 189 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
190 decoder_->InitDecode(&codec_settings_, kNumCores)); 190 decoder_->InitDecode(&codec_settings_, kNumCores));
191 } 191 }
192 192
193 void WaitForEncodedFrame() { 193 void EncodeFrame() {
194 int64_t start_ms = rtc::TimeMillis(); 194 EXPECT_FALSE(encoded_cb_.EncodeComplete());
195 while (rtc::TimeMillis() - start_ms < kMaxWaitEncTimeMs) { 195 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
196 if (encoded_cb_.EncodeComplete()) 196 encoder_->Encode(*input_frame_, nullptr, nullptr));
197 return; 197 EXPECT_TRUE(encoded_cb_.EncodeComplete());
198 }
199 ASSERT_TRUE(false);
200 }
201
202 void WaitForDecodedFrame() {
203 int64_t start_ms = rtc::TimeMillis();
204 while (rtc::TimeMillis() - start_ms < kMaxWaitDecTimeMs) {
205 if (decoded_cb_.DecodeComplete())
206 return;
207 }
208 ASSERT_TRUE(false);
209 } 198 }
210 199
211 void ExpectFrameWith(int16_t picture_id, 200 void ExpectFrameWith(int16_t picture_id,
212 int tl0_pic_idx, 201 int tl0_pic_idx,
213 uint8_t temporal_idx) { 202 uint8_t temporal_idx) {
214 WaitForEncodedFrame(); 203 EXPECT_EQ(picture_id % (1 << 15),
215 EXPECT_EQ(picture_id,
216 encoded_cb_.codec_specific_info_.codecSpecific.VP8.pictureId); 204 encoded_cb_.codec_specific_info_.codecSpecific.VP8.pictureId);
217 EXPECT_EQ(tl0_pic_idx, 205 EXPECT_EQ(tl0_pic_idx % (1 << 8),
218 encoded_cb_.codec_specific_info_.codecSpecific.VP8.tl0PicIdx); 206 encoded_cb_.codec_specific_info_.codecSpecific.VP8.tl0PicIdx);
219 EXPECT_EQ(temporal_idx, 207 EXPECT_EQ(temporal_idx,
220 encoded_cb_.codec_specific_info_.codecSpecific.VP8.temporalIdx); 208 encoded_cb_.codec_specific_info_.codecSpecific.VP8.temporalIdx);
221 } 209 }
222 210
223 test::ScopedFieldTrials override_field_trials_; 211 test::ScopedFieldTrials override_field_trials_;
224 EncodedImageCallbackTestImpl encoded_cb_; 212 EncodedImageCallbackTestImpl encoded_cb_;
225 DecodedImageCallbackTestImpl decoded_cb_; 213 DecodedImageCallbackTestImpl decoded_cb_;
226 std::unique_ptr<VideoFrame> input_frame_; 214 std::unique_ptr<VideoFrame> input_frame_;
227 const std::unique_ptr<VideoEncoder> encoder_; 215 const std::unique_ptr<VideoEncoder> encoder_;
228 const std::unique_ptr<VideoDecoder> decoder_; 216 const std::unique_ptr<VideoDecoder> decoder_;
229 VideoCodec codec_settings_; 217 VideoCodec codec_settings_;
230 TemporalLayersFactory tl_factory_; 218 TemporalLayersFactory tl_factory_;
231 }; 219 };
232 220
233 TEST_F(TestVp8Impl, EncodeFrame) { 221 TEST_F(TestVp8Impl, SetRateAllocation) {
234 InitEncodeDecode();
235 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
236 encoder_->Encode(*input_frame_, nullptr, nullptr));
237 WaitForEncodedFrame();
238 }
239
240 TEST_F(TestVp8Impl, EncoderParameterTest) {
241 codec_settings_.maxBitrate = 0;
242 codec_settings_.width = 1440;
243 codec_settings_.height = 1080;
244
245 // Calls before InitEncode().
246 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
247 const int kBitrateBps = 300000; 222 const int kBitrateBps = 300000;
248 BitrateAllocation bitrate_allocation; 223 BitrateAllocation bitrate_allocation;
249 bitrate_allocation.SetBitrate(0, 0, kBitrateBps); 224 bitrate_allocation.SetBitrate(0, 0, kBitrateBps);
250 EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED, 225 EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
251 encoder_->SetRateAllocation(bitrate_allocation, 226 encoder_->SetRateAllocation(bitrate_allocation,
252 codec_settings_.maxFramerate)); 227 codec_settings_.maxFramerate));
228 InitEncodeDecode();
229 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
230 encoder_->SetRateAllocation(bitrate_allocation,
231 codec_settings_.maxFramerate));
232 }
233
234 TEST_F(TestVp8Impl, EncodeFrameAndRelease) {
235 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
253 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 236 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
254 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize)); 237 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
238 EncodeFrame();
239 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
240 EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
241 encoder_->Encode(*input_frame_, nullptr, nullptr));
255 } 242 }
256 243
257 TEST_F(TestVp8Impl, DecoderParameterTest) { 244 TEST_F(TestVp8Impl, InitDecode) {
258 // Calls before InitDecode().
259 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release()); 245 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
260 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 246 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
261 decoder_->InitDecode(&codec_settings_, kNumCores)); 247 decoder_->InitDecode(&codec_settings_, kNumCores));
262 } 248 }
263 249
250 TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
251 InitEncodeDecode();
252 EncodeFrame();
253 EXPECT_EQ(kInitialTimestampRtp, encoded_cb_.encoded_frame_._timeStamp);
254 EXPECT_EQ(kInitialTimestampMs, encoded_cb_.encoded_frame_.capture_time_ms_);
255 EXPECT_EQ(kWidth, static_cast<int>(encoded_cb_.encoded_frame_._encodedWidth));
256 EXPECT_EQ(kHeight,
257 static_cast<int>(encoded_cb_.encoded_frame_._encodedHeight));
258 EXPECT_EQ(-1, // Disabled for single stream.
259 encoded_cb_.encoded_frame_.adapt_reason_.bw_resolutions_disabled);
260 }
261
264 // We only test the encoder here, since the decoded frame rotation is set based 262 // We only test the encoder here, since the decoded frame rotation is set based
265 // on the CVO RTP header extension in VCMDecodedFrameCallback::Decoded. 263 // on the CVO RTP header extension in VCMDecodedFrameCallback::Decoded.
266 // TODO(brandtr): Consider passing through the rotation flag through the decoder 264 // TODO(brandtr): Consider passing through the rotation flag through the decoder
267 // in the same way as done in the encoder. 265 // in the same way as done in the encoder.
268 TEST_F(TestVp8Impl, EncodedRotationEqualsInputRotation) { 266 TEST_F(TestVp8Impl, EncodedRotationEqualsInputRotation) {
269 InitEncodeDecode(); 267 InitEncodeDecode();
270
271 input_frame_->set_rotation(kVideoRotation_0); 268 input_frame_->set_rotation(kVideoRotation_0);
272 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 269 EncodeFrame();
273 encoder_->Encode(*input_frame_, nullptr, nullptr));
274 WaitForEncodedFrame();
275 EXPECT_EQ(kVideoRotation_0, encoded_cb_.encoded_frame_.rotation_); 270 EXPECT_EQ(kVideoRotation_0, encoded_cb_.encoded_frame_.rotation_);
276 271
277 input_frame_->set_rotation(kVideoRotation_90); 272 input_frame_->set_rotation(kVideoRotation_90);
278 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 273 EncodeFrame();
279 encoder_->Encode(*input_frame_, nullptr, nullptr));
280 WaitForEncodedFrame();
281 EXPECT_EQ(kVideoRotation_90, encoded_cb_.encoded_frame_.rotation_); 274 EXPECT_EQ(kVideoRotation_90, encoded_cb_.encoded_frame_.rotation_);
282 } 275 }
283 276
284 TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) { 277 TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
285 InitEncodeDecode(); 278 InitEncodeDecode();
286 encoder_->Encode(*input_frame_, nullptr, nullptr); 279 EncodeFrame();
287 WaitForEncodedFrame();
288 // First frame should be a key frame. 280 // First frame should be a key frame.
289 encoded_cb_.encoded_frame_._frameType = kVideoFrameKey; 281 encoded_cb_.encoded_frame_._frameType = kVideoFrameKey;
290 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 282 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
291 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr)); 283 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr));
292 WaitForDecodedFrame(); 284 EXPECT_TRUE(decoded_cb_.DecodeComplete());
293 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_cb_.frame_), 36); 285 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_cb_.frame_), 36);
294 EXPECT_EQ(encoded_cb_.encoded_frame_.qp_, *decoded_cb_.qp_); 286 EXPECT_EQ(encoded_cb_.encoded_frame_.qp_, *decoded_cb_.qp_);
295 } 287 }
296 288
297 #if defined(WEBRTC_ANDROID) 289 #if defined(WEBRTC_ANDROID)
298 #define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode 290 #define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode
299 #else 291 #else
300 #define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode 292 #define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode
301 #endif 293 #endif
302 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { 294 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
303 InitEncodeDecode(); 295 InitEncodeDecode();
304 encoder_->Encode(*input_frame_, nullptr, nullptr); 296 EncodeFrame();
305 WaitForEncodedFrame();
306 // First frame should be a key frame. 297 // First frame should be a key frame.
307 encoded_cb_.encoded_frame_._frameType = kVideoFrameKey; 298 encoded_cb_.encoded_frame_._frameType = kVideoFrameKey;
308 encoded_cb_.encoded_frame_.ntp_time_ms_ = kTestNtpTimeMs; 299 encoded_cb_.encoded_frame_.ntp_time_ms_ = kTestNtpTimeMs;
309 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 300 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
310 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr)); 301 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr));
311 WaitForDecodedFrame(); 302 EXPECT_TRUE(decoded_cb_.DecodeComplete());
312 // Compute PSNR on all planes (faster than SSIM). 303 // Compute PSNR on all planes (faster than SSIM).
313 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_cb_.frame_), 36); 304 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_cb_.frame_), 36);
314 EXPECT_EQ(kTestTimestamp, decoded_cb_.frame_->timestamp()); 305 EXPECT_EQ(kInitialTimestampRtp, decoded_cb_.frame_->timestamp());
315 EXPECT_EQ(kTestNtpTimeMs, decoded_cb_.frame_->ntp_time_ms()); 306 EXPECT_EQ(kTestNtpTimeMs, decoded_cb_.frame_->ntp_time_ms());
316 } 307 }
317 308
318 #if defined(WEBRTC_ANDROID) 309 #if defined(WEBRTC_ANDROID)
319 #define MAYBE_DecodeWithACompleteKeyFrame DISABLED_DecodeWithACompleteKeyFrame 310 #define MAYBE_DecodeWithACompleteKeyFrame DISABLED_DecodeWithACompleteKeyFrame
320 #else 311 #else
321 #define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame 312 #define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame
322 #endif 313 #endif
323 TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) { 314 TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
324 InitEncodeDecode(); 315 InitEncodeDecode();
325 encoder_->Encode(*input_frame_, nullptr, nullptr); 316 EncodeFrame();
326 WaitForEncodedFrame();
327 // Setting complete to false -> should return an error. 317 // Setting complete to false -> should return an error.
328 encoded_cb_.encoded_frame_._completeFrame = false; 318 encoded_cb_.encoded_frame_._completeFrame = false;
329 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, 319 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
330 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr)); 320 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr));
331 // Setting complete back to true. Forcing a delta frame. 321 // Setting complete back to true. Forcing a delta frame.
332 encoded_cb_.encoded_frame_._frameType = kVideoFrameDelta; 322 encoded_cb_.encoded_frame_._frameType = kVideoFrameDelta;
333 encoded_cb_.encoded_frame_._completeFrame = true; 323 encoded_cb_.encoded_frame_._completeFrame = true;
334 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, 324 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
335 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr)); 325 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr));
336 // Now setting a key frame. 326 // Now setting a key frame.
337 encoded_cb_.encoded_frame_._frameType = kVideoFrameKey; 327 encoded_cb_.encoded_frame_._frameType = kVideoFrameKey;
338 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 328 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
339 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr)); 329 decoder_->Decode(encoded_cb_.encoded_frame_, false, nullptr));
340 ASSERT_TRUE(decoded_cb_.frame_); 330 ASSERT_TRUE(decoded_cb_.frame_);
341 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_cb_.frame_), 36); 331 EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_cb_.frame_), 36);
342 } 332 }
343 333
344 TEST_F(TestVp8Impl, EncoderRetainsRtpStateAfterRelease) { 334 TEST_F(TestVp8Impl, EncoderWith2TemporalLayersRetainsRtpStateAfterRelease) {
335 codec_settings_.VP8()->numberOfTemporalLayers = 2;
345 InitEncodeDecode(); 336 InitEncodeDecode();
346 // Override default settings.
347 codec_settings_.VP8()->numberOfTemporalLayers = 2;
348 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
349 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
350 337
351 // Temporal layer 0. 338 // Temporal layer 0.
352 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 339 EncodeFrame();
353 encoder_->Encode(*input_frame_, nullptr, nullptr));
354 WaitForEncodedFrame();
355 EXPECT_EQ(0, encoded_cb_.codec_specific_info_.codecSpecific.VP8.temporalIdx); 340 EXPECT_EQ(0, encoded_cb_.codec_specific_info_.codecSpecific.VP8.temporalIdx);
356 int16_t picture_id = 341 int16_t picture_id =
357 encoded_cb_.codec_specific_info_.codecSpecific.VP8.pictureId; 342 encoded_cb_.codec_specific_info_.codecSpecific.VP8.pictureId;
358 int tl0_pic_idx = 343 int tl0_pic_idx =
359 encoded_cb_.codec_specific_info_.codecSpecific.VP8.tl0PicIdx; 344 encoded_cb_.codec_specific_info_.codecSpecific.VP8.tl0PicIdx;
360
361 // Temporal layer 1. 345 // Temporal layer 1.
362 input_frame_->set_timestamp(input_frame_->timestamp() + 346 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
363 kTimestampIncrementPerFrame); 347 EncodeFrame();
364 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 348 ExpectFrameWith(picture_id + 1, tl0_pic_idx + 0, 1);
365 encoder_->Encode(*input_frame_, nullptr, nullptr));
366 ExpectFrameWith((picture_id + 1) % (1 << 15), tl0_pic_idx, 1);
367
368 // Temporal layer 0. 349 // Temporal layer 0.
369 input_frame_->set_timestamp(input_frame_->timestamp() + 350 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
370 kTimestampIncrementPerFrame); 351 EncodeFrame();
371 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 352 ExpectFrameWith(picture_id + 2, tl0_pic_idx + 1, 0);
372 encoder_->Encode(*input_frame_, nullptr, nullptr));
373 ExpectFrameWith((picture_id + 2) % (1 << 15), (tl0_pic_idx + 1) % (1 << 8),
374 0);
375
376 // Temporal layer 1. 353 // Temporal layer 1.
377 input_frame_->set_timestamp(input_frame_->timestamp() + 354 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
378 kTimestampIncrementPerFrame); 355 EncodeFrame();
379 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 356 ExpectFrameWith(picture_id + 3, tl0_pic_idx + 1, 1);
380 encoder_->Encode(*input_frame_, nullptr, nullptr));
381 ExpectFrameWith((picture_id + 3) % (1 << 15), (tl0_pic_idx + 1) % (1 << 8),
382 1);
383 357
384 // Reinit. 358 // Reinit.
385 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); 359 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
386 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 360 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
387 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize)); 361 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
388 362
389 // Temporal layer 0. 363 // Temporal layer 0.
390 input_frame_->set_timestamp(input_frame_->timestamp() + 364 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
391 kTimestampIncrementPerFrame); 365 EncodeFrame();
392 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 366 ExpectFrameWith(picture_id + 4, tl0_pic_idx + 2, 0);
393 encoder_->Encode(*input_frame_, nullptr, nullptr)); 367 // Temporal layer 1.
394 ExpectFrameWith((picture_id + 4) % (1 << 15), (tl0_pic_idx + 2) % (1 << 8), 368 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
395 0); 369 EncodeFrame();
370 ExpectFrameWith(picture_id + 5, tl0_pic_idx + 2, 1);
371 // Temporal layer 0.
372 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
373 EncodeFrame();
374 ExpectFrameWith(picture_id + 6, tl0_pic_idx + 3, 0);
375 // Temporal layer 1.
376 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
377 EncodeFrame();
378 ExpectFrameWith(picture_id + 7, tl0_pic_idx + 3, 1);
379 }
396 380
397 // Temporal layer 1. 381 TEST_F(TestVp8Impl, EncoderWith3TemporalLayersRetainsRtpStateAfterRelease) {
398 input_frame_->set_timestamp(input_frame_->timestamp() + 382 codec_settings_.VP8()->numberOfTemporalLayers = 3;
399 kTimestampIncrementPerFrame); 383 InitEncodeDecode();
400 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
401 encoder_->Encode(*input_frame_, nullptr, nullptr));
402 ExpectFrameWith((picture_id + 5) % (1 << 15), (tl0_pic_idx + 2) % (1 << 8),
403 1);
404 384
405 // Temporal layer 0. 385 // Temporal layer 0.
406 input_frame_->set_timestamp(input_frame_->timestamp() + 386 EncodeFrame();
407 kTimestampIncrementPerFrame); 387 EXPECT_EQ(0, encoded_cb_.codec_specific_info_.codecSpecific.VP8.temporalIdx);
388 int16_t picture_id =
389 encoded_cb_.codec_specific_info_.codecSpecific.VP8.pictureId;
390 int tl0_pic_idx =
391 encoded_cb_.codec_specific_info_.codecSpecific.VP8.tl0PicIdx;
392 // Temporal layer 2.
393 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
394 EncodeFrame();
395 ExpectFrameWith(picture_id + 1, tl0_pic_idx + 0, 2);
396 // Temporal layer 1.
397 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
398 EncodeFrame();
399 ExpectFrameWith(picture_id + 2, tl0_pic_idx + 0, 1);
400 // Temporal layer 2.
401 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
402 EncodeFrame();
403 ExpectFrameWith(picture_id + 3, tl0_pic_idx + 0, 2);
404
405 // Reinit.
406 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
408 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 407 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
409 encoder_->Encode(*input_frame_, nullptr, nullptr)); 408 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
410 ExpectFrameWith((picture_id + 6) % (1 << 15), (tl0_pic_idx + 3) % (1 << 8),
411 0);
412 409
410 // Temporal layer 0.
411 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
412 EncodeFrame();
413 ExpectFrameWith(picture_id + 4, tl0_pic_idx + 1, 0);
414 // Temporal layer 2.
415 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
416 EncodeFrame();
417 ExpectFrameWith(picture_id + 5, tl0_pic_idx + 1, 2);
413 // Temporal layer 1. 418 // Temporal layer 1.
414 input_frame_->set_timestamp(input_frame_->timestamp() + 419 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
415 kTimestampIncrementPerFrame); 420 EncodeFrame();
416 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 421 ExpectFrameWith(picture_id + 6, tl0_pic_idx + 1, 1);
417 encoder_->Encode(*input_frame_, nullptr, nullptr)); 422 // Temporal layer 2.
418 ExpectFrameWith((picture_id + 7) % (1 << 15), (tl0_pic_idx + 3) % (1 << 8), 423 input_frame_->set_timestamp(input_frame_->timestamp() + kTimestampIncrement);
419 1); 424 EncodeFrame();
425 ExpectFrameWith(picture_id + 7, tl0_pic_idx + 1, 2);
420 } 426 }
421 427
422 TEST_F(TestVp8Impl, ScalingDisabledIfAutomaticResizeOff) { 428 TEST_F(TestVp8Impl, ScalingDisabledIfAutomaticResizeOff) {
423 codec_settings_.VP8()->frameDroppingOn = true; 429 codec_settings_.VP8()->frameDroppingOn = true;
424 codec_settings_.VP8()->automaticResizeOn = false; 430 codec_settings_.VP8()->automaticResizeOn = false;
425 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 431 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
426 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize)); 432 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
427 433
428 VideoEncoder::ScalingSettings settings = encoder_->GetScalingSettings(); 434 VideoEncoder::ScalingSettings settings = encoder_->GetScalingSettings();
429 EXPECT_FALSE(settings.enabled); 435 EXPECT_FALSE(settings.enabled);
(...skipping 22 matching lines...) Expand all
452 codec_settings_.VP8()->automaticResizeOn = true; 458 codec_settings_.VP8()->automaticResizeOn = true;
453 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, 459 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
454 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize)); 460 encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
455 461
456 VideoEncoder::ScalingSettings settings = encoder_->GetScalingSettings(); 462 VideoEncoder::ScalingSettings settings = encoder_->GetScalingSettings();
457 EXPECT_TRUE(settings.enabled); 463 EXPECT_TRUE(settings.enabled);
458 EXPECT_EQ(kMinPixelsPerFrame, settings.min_pixels_per_frame); 464 EXPECT_EQ(kMinPixelsPerFrame, settings.min_pixels_per_frame);
459 } 465 }
460 466
461 } // namespace webrtc 467 } // namespace webrtc
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698