OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ | 11 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ |
12 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ | 12 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ |
13 | 13 |
14 #include <algorithm> | 14 #include <algorithm> |
15 #include <map> | |
16 #include <memory> | 15 #include <memory> |
17 #include <vector> | 16 #include <vector> |
18 | 17 |
19 #include "webrtc/base/checks.h" | 18 #include "webrtc/base/checks.h" |
20 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 19 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
21 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" | 20 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" |
22 #include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" | 21 #include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" |
23 #include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h" | 22 #include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h" |
24 #include "webrtc/modules/video_coding/utility/simulcast_rate_allocator.h" | |
25 #include "webrtc/test/gtest.h" | 23 #include "webrtc/test/gtest.h" |
26 #include "webrtc/video_frame.h" | 24 #include "webrtc/video_frame.h" |
27 | 25 |
28 using ::testing::_; | 26 using ::testing::_; |
29 using ::testing::AllOf; | 27 using ::testing::AllOf; |
30 using ::testing::Field; | 28 using ::testing::Field; |
31 using ::testing::Return; | 29 using ::testing::Return; |
32 | 30 |
33 namespace webrtc { | 31 namespace webrtc { |
34 namespace testing { | 32 namespace testing { |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
142 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { | 140 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { |
143 RTC_NOTREACHED(); | 141 RTC_NOTREACHED(); |
144 return -1; | 142 return -1; |
145 } | 143 } |
146 int DecodedFrames() { return decoded_frames_; } | 144 int DecodedFrames() { return decoded_frames_; } |
147 | 145 |
148 private: | 146 private: |
149 int decoded_frames_; | 147 int decoded_frames_; |
150 }; | 148 }; |
151 | 149 |
| 150 class SkipEncodingUnusedStreamsTest { |
| 151 public: |
| 152 std::vector<unsigned int> RunTest(VP8Encoder* encoder, |
| 153 VideoCodec* settings, |
| 154 uint32_t target_bitrate) { |
| 155 SpyingTemporalLayersFactory spy_factory; |
| 156 settings->VP8()->tl_factory = &spy_factory; |
| 157 EXPECT_EQ(0, encoder->InitEncode(settings, 1, 1200)); |
| 158 |
| 159 encoder->SetRates(target_bitrate, 30); |
| 160 |
| 161 std::vector<unsigned int> configured_bitrates; |
| 162 for (std::vector<TemporalLayers*>::const_iterator it = |
| 163 spy_factory.spying_layers_.begin(); |
| 164 it != spy_factory.spying_layers_.end(); ++it) { |
| 165 configured_bitrates.push_back( |
| 166 static_cast<SpyingTemporalLayers*>(*it)->configured_bitrate_); |
| 167 } |
| 168 return configured_bitrates; |
| 169 } |
| 170 |
| 171 class SpyingTemporalLayers : public TemporalLayers { |
| 172 public: |
| 173 explicit SpyingTemporalLayers(TemporalLayers* layers) |
| 174 : configured_bitrate_(0), layers_(layers) {} |
| 175 |
| 176 virtual ~SpyingTemporalLayers() { delete layers_; } |
| 177 |
| 178 int EncodeFlags(uint32_t timestamp) override { |
| 179 return layers_->EncodeFlags(timestamp); |
| 180 } |
| 181 |
| 182 bool ConfigureBitrates(int bitrate_kbit, |
| 183 int max_bitrate_kbit, |
| 184 int framerate, |
| 185 vpx_codec_enc_cfg_t* cfg) override { |
| 186 configured_bitrate_ = bitrate_kbit; |
| 187 return layers_->ConfigureBitrates(bitrate_kbit, max_bitrate_kbit, |
| 188 framerate, cfg); |
| 189 } |
| 190 |
| 191 void PopulateCodecSpecific(bool base_layer_sync, |
| 192 CodecSpecificInfoVP8* vp8_info, |
| 193 uint32_t timestamp) override { |
| 194 layers_->PopulateCodecSpecific(base_layer_sync, vp8_info, timestamp); |
| 195 } |
| 196 |
| 197 void FrameEncoded(unsigned int size, uint32_t timestamp, int qp) override { |
| 198 layers_->FrameEncoded(size, timestamp, qp); |
| 199 } |
| 200 |
| 201 int CurrentLayerId() const override { return layers_->CurrentLayerId(); } |
| 202 |
| 203 bool UpdateConfiguration(vpx_codec_enc_cfg_t* cfg) override { |
| 204 return false; |
| 205 } |
| 206 |
| 207 int configured_bitrate_; |
| 208 TemporalLayers* layers_; |
| 209 }; |
| 210 |
| 211 class SpyingTemporalLayersFactory : public TemporalLayersFactory { |
| 212 public: |
| 213 virtual ~SpyingTemporalLayersFactory() {} |
| 214 TemporalLayers* Create(int temporal_layers, |
| 215 uint8_t initial_tl0_pic_idx) const override { |
| 216 SpyingTemporalLayers* layers = |
| 217 new SpyingTemporalLayers(TemporalLayersFactory::Create( |
| 218 temporal_layers, initial_tl0_pic_idx)); |
| 219 spying_layers_.push_back(layers); |
| 220 return layers; |
| 221 } |
| 222 |
| 223 mutable std::vector<TemporalLayers*> spying_layers_; |
| 224 }; |
| 225 }; |
| 226 |
152 class TestVp8Simulcast : public ::testing::Test { | 227 class TestVp8Simulcast : public ::testing::Test { |
153 public: | 228 public: |
154 TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder) | 229 TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder) |
155 : encoder_(encoder), decoder_(decoder) {} | 230 : encoder_(encoder), decoder_(decoder) {} |
156 | 231 |
157 static void SetPlane(uint8_t* data, | 232 static void SetPlane(uint8_t* data, |
158 uint8_t value, | 233 uint8_t value, |
159 int width, | 234 int width, |
160 int height, | 235 int height, |
161 int stride) { | 236 int stride) { |
(...skipping 21 matching lines...) Expand all Loading... |
183 chroma_width, chroma_height, | 258 chroma_width, chroma_height, |
184 buffer->StrideU()); | 259 buffer->StrideU()); |
185 | 260 |
186 SetPlane(buffer->MutableDataV(), plane_colors[2], | 261 SetPlane(buffer->MutableDataV(), plane_colors[2], |
187 chroma_width, chroma_height, | 262 chroma_width, chroma_height, |
188 buffer->StrideV()); | 263 buffer->StrideV()); |
189 } | 264 } |
190 | 265 |
191 static void DefaultSettings(VideoCodec* settings, | 266 static void DefaultSettings(VideoCodec* settings, |
192 const int* temporal_layer_profile) { | 267 const int* temporal_layer_profile) { |
193 RTC_CHECK(settings); | 268 assert(settings); |
194 memset(settings, 0, sizeof(VideoCodec)); | 269 memset(settings, 0, sizeof(VideoCodec)); |
195 strncpy(settings->plName, "VP8", 4); | 270 strncpy(settings->plName, "VP8", 4); |
196 settings->codecType = kVideoCodecVP8; | 271 settings->codecType = kVideoCodecVP8; |
197 // 96 to 127 dynamic payload types for video codecs | 272 // 96 to 127 dynamic payload types for video codecs |
198 settings->plType = 120; | 273 settings->plType = 120; |
199 settings->startBitrate = 300; | 274 settings->startBitrate = 300; |
200 settings->minBitrate = 30; | 275 settings->minBitrate = 30; |
201 settings->maxBitrate = 0; | 276 settings->maxBitrate = 0; |
202 settings->maxFramerate = 30; | 277 settings->maxFramerate = 30; |
203 settings->width = kDefaultWidth; | 278 settings->width = kDefaultWidth; |
(...skipping 29 matching lines...) Expand all Loading... |
233 stream->width = width; | 308 stream->width = width; |
234 stream->height = height; | 309 stream->height = height; |
235 stream->maxBitrate = max_bitrate; | 310 stream->maxBitrate = max_bitrate; |
236 stream->minBitrate = min_bitrate; | 311 stream->minBitrate = min_bitrate; |
237 stream->targetBitrate = target_bitrate; | 312 stream->targetBitrate = target_bitrate; |
238 stream->numberOfTemporalLayers = num_temporal_layers; | 313 stream->numberOfTemporalLayers = num_temporal_layers; |
239 stream->qpMax = 45; | 314 stream->qpMax = 45; |
240 } | 315 } |
241 | 316 |
242 protected: | 317 protected: |
243 void SetUp() override { SetUpCodec(kDefaultTemporalLayerProfile); } | 318 virtual void SetUp() { SetUpCodec(kDefaultTemporalLayerProfile); } |
244 | 319 |
245 void TearDown() override { | 320 virtual void SetUpCodec(const int* temporal_layer_profile) { |
246 encoder_->Release(); | |
247 decoder_->Release(); | |
248 } | |
249 | |
250 void SetUpCodec(const int* temporal_layer_profile) { | |
251 encoder_->RegisterEncodeCompleteCallback(&encoder_callback_); | 321 encoder_->RegisterEncodeCompleteCallback(&encoder_callback_); |
252 decoder_->RegisterDecodeCompleteCallback(&decoder_callback_); | 322 decoder_->RegisterDecodeCompleteCallback(&decoder_callback_); |
253 DefaultSettings(&settings_, temporal_layer_profile); | 323 DefaultSettings(&settings_, temporal_layer_profile); |
254 SetUpRateAllocator(); | |
255 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); | 324 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
256 EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); | 325 EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); |
257 int half_width = (kDefaultWidth + 1) / 2; | 326 int half_width = (kDefaultWidth + 1) / 2; |
258 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, | 327 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, |
259 kDefaultWidth, half_width, half_width); | 328 kDefaultWidth, half_width, half_width); |
260 input_buffer_->InitializeData(); | 329 input_buffer_->InitializeData(); |
261 input_frame_.reset( | 330 input_frame_.reset( |
262 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); | 331 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
263 } | 332 } |
264 | 333 |
265 void SetUpRateAllocator() { | 334 virtual void TearDown() { |
266 TemporalLayersFactory* tl_factory = new TemporalLayersFactory(); | 335 encoder_->Release(); |
267 rate_allocator_.reset(new SimulcastRateAllocator( | 336 decoder_->Release(); |
268 settings_, std::unique_ptr<TemporalLayersFactory>(tl_factory))); | |
269 settings_.codecSpecific.VP8.tl_factory = tl_factory; | |
270 } | |
271 | |
272 void SetRates(uint32_t bitrate_kbps, uint32_t fps) { | |
273 encoder_->SetRateAllocation( | |
274 rate_allocator_->GetAllocation(bitrate_kbps * 1000, fps), fps); | |
275 } | 337 } |
276 | 338 |
277 void ExpectStreams(FrameType frame_type, int expected_video_streams) { | 339 void ExpectStreams(FrameType frame_type, int expected_video_streams) { |
278 ASSERT_GE(expected_video_streams, 0); | 340 ASSERT_GE(expected_video_streams, 0); |
279 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams); | 341 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams); |
280 if (expected_video_streams >= 1) { | 342 if (expected_video_streams >= 1) { |
281 EXPECT_CALL( | 343 EXPECT_CALL( |
282 encoder_callback_, | 344 encoder_callback_, |
283 OnEncodedImage( | 345 OnEncodedImage( |
284 AllOf(Field(&EncodedImage::_frameType, frame_type), | 346 AllOf(Field(&EncodedImage::_frameType, frame_type), |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
327 encoder_callback->GetLastEncodedFrameInfo(&picture_id, &temporal_layer, | 389 encoder_callback->GetLastEncodedFrameInfo(&picture_id, &temporal_layer, |
328 &layer_sync, i); | 390 &layer_sync, i); |
329 EXPECT_EQ(expected_temporal_idx[i], temporal_layer); | 391 EXPECT_EQ(expected_temporal_idx[i], temporal_layer); |
330 EXPECT_EQ(expected_layer_sync[i], layer_sync); | 392 EXPECT_EQ(expected_layer_sync[i], layer_sync); |
331 } | 393 } |
332 } | 394 } |
333 | 395 |
334 // We currently expect all active streams to generate a key frame even though | 396 // We currently expect all active streams to generate a key frame even though |
335 // a key frame was only requested for some of them. | 397 // a key frame was only requested for some of them. |
336 void TestKeyFrameRequestsOnAllStreams() { | 398 void TestKeyFrameRequestsOnAllStreams() { |
337 SetRates(kMaxBitrates[2], 30); // To get all three streams. | 399 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. |
338 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 400 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
339 kVideoFrameDelta); | 401 kVideoFrameDelta); |
340 ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); | 402 ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); |
341 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 403 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
342 | 404 |
343 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); | 405 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); |
344 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 406 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
345 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 407 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
346 | 408 |
347 frame_types[0] = kVideoFrameKey; | 409 frame_types[0] = kVideoFrameKey; |
(...skipping 14 matching lines...) Expand all Loading... |
362 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 424 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
363 | 425 |
364 std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); | 426 std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); |
365 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); | 427 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); |
366 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 428 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
367 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 429 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
368 } | 430 } |
369 | 431 |
370 void TestPaddingAllStreams() { | 432 void TestPaddingAllStreams() { |
371 // We should always encode the base layer. | 433 // We should always encode the base layer. |
372 SetRates(kMinBitrates[0] - 1, 30); | 434 encoder_->SetRates(kMinBitrates[0] - 1, 30); |
373 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 435 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
374 kVideoFrameDelta); | 436 kVideoFrameDelta); |
375 ExpectStreams(kVideoFrameKey, 1); | 437 ExpectStreams(kVideoFrameKey, 1); |
376 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 438 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
377 | 439 |
378 ExpectStreams(kVideoFrameDelta, 1); | 440 ExpectStreams(kVideoFrameDelta, 1); |
379 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 441 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
380 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 442 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
381 } | 443 } |
382 | 444 |
383 void TestPaddingTwoStreams() { | 445 void TestPaddingTwoStreams() { |
384 // We have just enough to get only the first stream and padding for two. | 446 // We have just enough to get only the first stream and padding for two. |
385 SetRates(kMinBitrates[0], 30); | 447 encoder_->SetRates(kMinBitrates[0], 30); |
386 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 448 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
387 kVideoFrameDelta); | 449 kVideoFrameDelta); |
388 ExpectStreams(kVideoFrameKey, 1); | 450 ExpectStreams(kVideoFrameKey, 1); |
389 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 451 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
390 | 452 |
391 ExpectStreams(kVideoFrameDelta, 1); | 453 ExpectStreams(kVideoFrameDelta, 1); |
392 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 454 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
393 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 455 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
394 } | 456 } |
395 | 457 |
396 void TestPaddingTwoStreamsOneMaxedOut() { | 458 void TestPaddingTwoStreamsOneMaxedOut() { |
397 // We are just below limit of sending second stream, so we should get | 459 // We are just below limit of sending second stream, so we should get |
398 // the first stream maxed out (at |maxBitrate|), and padding for two. | 460 // the first stream maxed out (at |maxBitrate|), and padding for two. |
399 SetRates(kTargetBitrates[0] + kMinBitrates[1] - 1, 30); | 461 encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] - 1, 30); |
400 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 462 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
401 kVideoFrameDelta); | 463 kVideoFrameDelta); |
402 ExpectStreams(kVideoFrameKey, 1); | 464 ExpectStreams(kVideoFrameKey, 1); |
403 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 465 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
404 | 466 |
405 ExpectStreams(kVideoFrameDelta, 1); | 467 ExpectStreams(kVideoFrameDelta, 1); |
406 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 468 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
407 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 469 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
408 } | 470 } |
409 | 471 |
410 void TestPaddingOneStream() { | 472 void TestPaddingOneStream() { |
411 // We have just enough to send two streams, so padding for one stream. | 473 // We have just enough to send two streams, so padding for one stream. |
412 SetRates(kTargetBitrates[0] + kMinBitrates[1], 30); | 474 encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1], 30); |
413 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 475 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
414 kVideoFrameDelta); | 476 kVideoFrameDelta); |
415 ExpectStreams(kVideoFrameKey, 2); | 477 ExpectStreams(kVideoFrameKey, 2); |
416 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 478 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
417 | 479 |
418 ExpectStreams(kVideoFrameDelta, 2); | 480 ExpectStreams(kVideoFrameDelta, 2); |
419 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 481 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
420 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 482 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
421 } | 483 } |
422 | 484 |
423 void TestPaddingOneStreamTwoMaxedOut() { | 485 void TestPaddingOneStreamTwoMaxedOut() { |
424 // We are just below limit of sending third stream, so we should get | 486 // We are just below limit of sending third stream, so we should get |
425 // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|. | 487 // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|. |
426 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30); | 488 encoder_->SetRates( |
| 489 kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30); |
427 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 490 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
428 kVideoFrameDelta); | 491 kVideoFrameDelta); |
429 ExpectStreams(kVideoFrameKey, 2); | 492 ExpectStreams(kVideoFrameKey, 2); |
430 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 493 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
431 | 494 |
432 ExpectStreams(kVideoFrameDelta, 2); | 495 ExpectStreams(kVideoFrameDelta, 2); |
433 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 496 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
434 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 497 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
435 } | 498 } |
436 | 499 |
437 void TestSendAllStreams() { | 500 void TestSendAllStreams() { |
438 // We have just enough to send all streams. | 501 // We have just enough to send all streams. |
439 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30); | 502 encoder_->SetRates( |
| 503 kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30); |
440 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 504 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
441 kVideoFrameDelta); | 505 kVideoFrameDelta); |
442 ExpectStreams(kVideoFrameKey, 3); | 506 ExpectStreams(kVideoFrameKey, 3); |
443 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 507 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
444 | 508 |
445 ExpectStreams(kVideoFrameDelta, 3); | 509 ExpectStreams(kVideoFrameDelta, 3); |
446 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 510 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
447 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 511 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
448 } | 512 } |
449 | 513 |
450 void TestDisablingStreams() { | 514 void TestDisablingStreams() { |
451 // We should get three media streams. | 515 // We should get three media streams. |
452 SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); | 516 encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); |
453 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 517 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
454 kVideoFrameDelta); | 518 kVideoFrameDelta); |
455 ExpectStreams(kVideoFrameKey, 3); | 519 ExpectStreams(kVideoFrameKey, 3); |
456 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 520 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
457 | 521 |
458 ExpectStreams(kVideoFrameDelta, 3); | 522 ExpectStreams(kVideoFrameDelta, 3); |
459 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 523 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
460 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 524 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
461 | 525 |
462 // We should only get two streams and padding for one. | 526 // We should only get two streams and padding for one. |
463 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); | 527 encoder_->SetRates( |
| 528 kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); |
464 ExpectStreams(kVideoFrameDelta, 2); | 529 ExpectStreams(kVideoFrameDelta, 2); |
465 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 530 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
466 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 531 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
467 | 532 |
468 // We should only get the first stream and padding for two. | 533 // We should only get the first stream and padding for two. |
469 SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); | 534 encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); |
470 ExpectStreams(kVideoFrameDelta, 1); | 535 ExpectStreams(kVideoFrameDelta, 1); |
471 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 536 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
472 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 537 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
473 | 538 |
474 // We don't have enough bitrate for the thumbnail stream, but we should get | 539 // We don't have enough bitrate for the thumbnail stream, but we should get |
475 // it anyway with current configuration. | 540 // it anyway with current configuration. |
476 SetRates(kTargetBitrates[0] - 1, 30); | 541 encoder_->SetRates(kTargetBitrates[0] - 1, 30); |
477 ExpectStreams(kVideoFrameDelta, 1); | 542 ExpectStreams(kVideoFrameDelta, 1); |
478 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 543 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
479 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 544 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
480 | 545 |
481 // We should only get two streams and padding for one. | 546 // We should only get two streams and padding for one. |
482 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); | 547 encoder_->SetRates( |
| 548 kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); |
483 // We get a key frame because a new stream is being enabled. | 549 // We get a key frame because a new stream is being enabled. |
484 ExpectStreams(kVideoFrameKey, 2); | 550 ExpectStreams(kVideoFrameKey, 2); |
485 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 551 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
486 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 552 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
487 | 553 |
488 // We should get all three streams. | 554 // We should get all three streams. |
489 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); | 555 encoder_->SetRates( |
| 556 kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); |
490 // We get a key frame because a new stream is being enabled. | 557 // We get a key frame because a new stream is being enabled. |
491 ExpectStreams(kVideoFrameKey, 3); | 558 ExpectStreams(kVideoFrameKey, 3); |
492 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 559 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
493 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 560 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
494 } | 561 } |
495 | 562 |
496 void SwitchingToOneStream(int width, int height) { | 563 void SwitchingToOneStream(int width, int height) { |
497 // Disable all streams except the last and set the bitrate of the last to | 564 // Disable all streams except the last and set the bitrate of the last to |
498 // 100 kbps. This verifies the way GTP switches to screenshare mode. | 565 // 100 kbps. This verifies the way GTP switches to screenshare mode. |
499 settings_.VP8()->numberOfTemporalLayers = 1; | 566 settings_.VP8()->numberOfTemporalLayers = 1; |
(...skipping 16 matching lines...) Expand all Loading... |
516 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); | 583 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
517 | 584 |
518 // The for loop above did not set the bitrate of the highest layer. | 585 // The for loop above did not set the bitrate of the highest layer. |
519 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1] | 586 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1] |
520 .maxBitrate = 0; | 587 .maxBitrate = 0; |
521 // The highest layer has to correspond to the non-simulcast resolution. | 588 // The highest layer has to correspond to the non-simulcast resolution. |
522 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = | 589 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = |
523 settings_.width; | 590 settings_.width; |
524 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = | 591 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = |
525 settings_.height; | 592 settings_.height; |
526 SetUpRateAllocator(); | |
527 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); | 593 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
528 | 594 |
529 // Encode one frame and verify. | 595 // Encode one frame and verify. |
530 SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30); | 596 encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30); |
531 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 597 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
532 kVideoFrameDelta); | 598 kVideoFrameDelta); |
533 EXPECT_CALL( | 599 EXPECT_CALL( |
534 encoder_callback_, | 600 encoder_callback_, |
535 OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey), | 601 OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey), |
536 Field(&EncodedImage::_encodedWidth, width), | 602 Field(&EncodedImage::_encodedWidth, width), |
537 Field(&EncodedImage::_encodedHeight, height)), | 603 Field(&EncodedImage::_encodedHeight, height)), |
538 _, _)) | 604 _, _)) |
539 .Times(1) | 605 .Times(1) |
540 .WillRepeatedly(Return( | 606 .WillRepeatedly(Return( |
541 EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); | 607 EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); |
542 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 608 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
543 | 609 |
544 // Switch back. | 610 // Switch back. |
545 DefaultSettings(&settings_, kDefaultTemporalLayerProfile); | 611 DefaultSettings(&settings_, kDefaultTemporalLayerProfile); |
546 // Start at the lowest bitrate for enabling base stream. | 612 // Start at the lowest bitrate for enabling base stream. |
547 settings_.startBitrate = kMinBitrates[0]; | 613 settings_.startBitrate = kMinBitrates[0]; |
548 SetUpRateAllocator(); | |
549 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); | 614 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
550 SetRates(settings_.startBitrate, 30); | 615 encoder_->SetRates(settings_.startBitrate, 30); |
551 ExpectStreams(kVideoFrameKey, 1); | 616 ExpectStreams(kVideoFrameKey, 1); |
552 // Resize |input_frame_| to the new resolution. | 617 // Resize |input_frame_| to the new resolution. |
553 half_width = (settings_.width + 1) / 2; | 618 half_width = (settings_.width + 1) / 2; |
554 input_buffer_ = I420Buffer::Create(settings_.width, settings_.height, | 619 input_buffer_ = I420Buffer::Create(settings_.width, settings_.height, |
555 settings_.width, half_width, half_width); | 620 settings_.width, half_width, half_width); |
556 input_buffer_->InitializeData(); | 621 input_buffer_->InitializeData(); |
557 input_frame_.reset( | 622 input_frame_.reset( |
558 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); | 623 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
559 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 624 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
560 } | 625 } |
561 | 626 |
562 void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); } | 627 void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); } |
563 | 628 |
564 void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); } | 629 void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); } |
565 | 630 |
566 void TestSwitchingToOneSmallStream() { SwitchingToOneStream(4, 4); } | 631 void TestSwitchingToOneSmallStream() { SwitchingToOneStream(4, 4); } |
567 | 632 |
568 void TestRPSIEncoder() { | 633 void TestRPSIEncoder() { |
569 Vp8TestEncodedImageCallback encoder_callback; | 634 Vp8TestEncodedImageCallback encoder_callback; |
570 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 635 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
571 | 636 |
572 SetRates(kMaxBitrates[2], 30); // To get all three streams. | 637 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. |
573 | 638 |
574 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); | 639 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
575 int picture_id = -1; | 640 int picture_id = -1; |
576 int temporal_layer = -1; | 641 int temporal_layer = -1; |
577 bool layer_sync = false; | 642 bool layer_sync = false; |
578 encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, | 643 encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, |
579 &layer_sync, 0); | 644 &layer_sync, 0); |
580 EXPECT_EQ(0, temporal_layer); | 645 EXPECT_EQ(0, temporal_layer); |
581 EXPECT_TRUE(layer_sync); | 646 EXPECT_TRUE(layer_sync); |
582 int key_frame_picture_id = picture_id; | 647 int key_frame_picture_id = picture_id; |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
631 // so this frame (the next one) must have |layer_sync| set to true. | 696 // so this frame (the next one) must have |layer_sync| set to true. |
632 EXPECT_TRUE(layer_sync); | 697 EXPECT_TRUE(layer_sync); |
633 } | 698 } |
634 | 699 |
635 void TestRPSIEncodeDecode() { | 700 void TestRPSIEncodeDecode() { |
636 Vp8TestEncodedImageCallback encoder_callback; | 701 Vp8TestEncodedImageCallback encoder_callback; |
637 Vp8TestDecodedImageCallback decoder_callback; | 702 Vp8TestDecodedImageCallback decoder_callback; |
638 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 703 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
639 decoder_->RegisterDecodeCompleteCallback(&decoder_callback); | 704 decoder_->RegisterDecodeCompleteCallback(&decoder_callback); |
640 | 705 |
641 SetRates(kMaxBitrates[2], 30); // To get all three streams. | 706 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. |
642 | 707 |
643 // Set color. | 708 // Set color. |
644 int plane_offset[kNumOfPlanes]; | 709 int plane_offset[kNumOfPlanes]; |
645 plane_offset[kYPlane] = kColorY; | 710 plane_offset[kYPlane] = kColorY; |
646 plane_offset[kUPlane] = kColorU; | 711 plane_offset[kUPlane] = kColorU; |
647 plane_offset[kVPlane] = kColorV; | 712 plane_offset[kVPlane] = kColorV; |
648 CreateImage(input_buffer_, plane_offset); | 713 CreateImage(input_buffer_, plane_offset); |
649 | 714 |
650 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); | 715 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
651 int picture_id = -1; | 716 int picture_id = -1; |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
705 decoder_->Decode(encoded_frame, false, NULL); | 770 decoder_->Decode(encoded_frame, false, NULL); |
706 EXPECT_EQ(2, decoder_callback.DecodedFrames()); | 771 EXPECT_EQ(2, decoder_callback.DecodedFrames()); |
707 } | 772 } |
708 | 773 |
709 // Test the layer pattern and sync flag for various spatial-temporal patterns. | 774 // Test the layer pattern and sync flag for various spatial-temporal patterns. |
710 // 3-3-3 pattern: 3 temporal layers for all spatial streams, so same | 775 // 3-3-3 pattern: 3 temporal layers for all spatial streams, so same |
711 // temporal_layer id and layer_sync is expected for all streams. | 776 // temporal_layer id and layer_sync is expected for all streams. |
712 void TestSaptioTemporalLayers333PatternEncoder() { | 777 void TestSaptioTemporalLayers333PatternEncoder() { |
713 Vp8TestEncodedImageCallback encoder_callback; | 778 Vp8TestEncodedImageCallback encoder_callback; |
714 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 779 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
715 SetRates(kMaxBitrates[2], 30); // To get all three streams. | 780 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. |
716 | 781 |
717 int expected_temporal_idx[3] = {-1, -1, -1}; | 782 int expected_temporal_idx[3] = {-1, -1, -1}; |
718 bool expected_layer_sync[3] = {false, false, false}; | 783 bool expected_layer_sync[3] = {false, false, false}; |
719 | 784 |
720 // First frame: #0. | 785 // First frame: #0. |
721 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); | 786 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
722 SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx); | 787 SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx); |
723 SetExpectedValues3<bool>(true, true, true, expected_layer_sync); | 788 SetExpectedValues3<bool>(true, true, true, expected_layer_sync); |
724 VerifyTemporalIdxAndSyncForAllSpatialLayers( | 789 VerifyTemporalIdxAndSyncForAllSpatialLayers( |
725 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); | 790 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
774 // 3rd stream: -1, -1, -1, -1, .... | 839 // 3rd stream: -1, -1, -1, -1, .... |
775 // Regarding the 3rd stream, note that a stream/encoder with 1 temporal layer | 840 // Regarding the 3rd stream, note that a stream/encoder with 1 temporal layer |
776 // should always have temporal layer idx set to kNoTemporalIdx = -1. | 841 // should always have temporal layer idx set to kNoTemporalIdx = -1. |
777 // Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255. | 842 // Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255. |
778 // TODO(marpan): Although this seems safe for now, we should fix this. | 843 // TODO(marpan): Although this seems safe for now, we should fix this. |
779 void TestSpatioTemporalLayers321PatternEncoder() { | 844 void TestSpatioTemporalLayers321PatternEncoder() { |
780 int temporal_layer_profile[3] = {3, 2, 1}; | 845 int temporal_layer_profile[3] = {3, 2, 1}; |
781 SetUpCodec(temporal_layer_profile); | 846 SetUpCodec(temporal_layer_profile); |
782 Vp8TestEncodedImageCallback encoder_callback; | 847 Vp8TestEncodedImageCallback encoder_callback; |
783 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 848 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
784 SetRates(kMaxBitrates[2], 30); // To get all three streams. | 849 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. |
785 | 850 |
786 int expected_temporal_idx[3] = {-1, -1, -1}; | 851 int expected_temporal_idx[3] = {-1, -1, -1}; |
787 bool expected_layer_sync[3] = {false, false, false}; | 852 bool expected_layer_sync[3] = {false, false, false}; |
788 | 853 |
789 // First frame: #0. | 854 // First frame: #0. |
790 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); | 855 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
791 SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx); | 856 SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx); |
792 SetExpectedValues3<bool>(true, true, false, expected_layer_sync); | 857 SetExpectedValues3<bool>(true, true, false, expected_layer_sync); |
793 VerifyTemporalIdxAndSyncForAllSpatialLayers( | 858 VerifyTemporalIdxAndSyncForAllSpatialLayers( |
794 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); | 859 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
833 VerifyTemporalIdxAndSyncForAllSpatialLayers( | 898 VerifyTemporalIdxAndSyncForAllSpatialLayers( |
834 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); | 899 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
835 } | 900 } |
836 | 901 |
837 void TestStrideEncodeDecode() { | 902 void TestStrideEncodeDecode() { |
838 Vp8TestEncodedImageCallback encoder_callback; | 903 Vp8TestEncodedImageCallback encoder_callback; |
839 Vp8TestDecodedImageCallback decoder_callback; | 904 Vp8TestDecodedImageCallback decoder_callback; |
840 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 905 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
841 decoder_->RegisterDecodeCompleteCallback(&decoder_callback); | 906 decoder_->RegisterDecodeCompleteCallback(&decoder_callback); |
842 | 907 |
843 SetRates(kMaxBitrates[2], 30); // To get all three streams. | 908 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. |
844 // Setting two (possibly) problematic use cases for stride: | 909 // Setting two (possibly) problematic use cases for stride: |
845 // 1. stride > width 2. stride_y != stride_uv/2 | 910 // 1. stride > width 2. stride_y != stride_uv/2 |
846 int stride_y = kDefaultWidth + 20; | 911 int stride_y = kDefaultWidth + 20; |
847 int stride_uv = ((kDefaultWidth + 1) / 2) + 5; | 912 int stride_uv = ((kDefaultWidth + 1) / 2) + 5; |
848 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y, | 913 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y, |
849 stride_uv, stride_uv); | 914 stride_uv, stride_uv); |
850 input_frame_.reset( | 915 input_frame_.reset( |
851 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); | 916 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
852 | 917 |
853 // Set color. | 918 // Set color. |
(...skipping 15 matching lines...) Expand all Loading... |
869 | 934 |
870 EncodedImage encoded_frame; | 935 EncodedImage encoded_frame; |
871 // Only encoding one frame - so will be a key frame. | 936 // Only encoding one frame - so will be a key frame. |
872 encoder_callback.GetLastEncodedKeyFrame(&encoded_frame); | 937 encoder_callback.GetLastEncodedKeyFrame(&encoded_frame); |
873 EXPECT_EQ(0, decoder_->Decode(encoded_frame, false, NULL)); | 938 EXPECT_EQ(0, decoder_->Decode(encoded_frame, false, NULL)); |
874 encoder_callback.GetLastEncodedFrame(&encoded_frame); | 939 encoder_callback.GetLastEncodedFrame(&encoded_frame); |
875 decoder_->Decode(encoded_frame, false, NULL); | 940 decoder_->Decode(encoded_frame, false, NULL); |
876 EXPECT_EQ(2, decoder_callback.DecodedFrames()); | 941 EXPECT_EQ(2, decoder_callback.DecodedFrames()); |
877 } | 942 } |
878 | 943 |
| 944 void TestSkipEncodingUnusedStreams() { |
| 945 SkipEncodingUnusedStreamsTest test; |
| 946 std::vector<unsigned int> configured_bitrate = |
| 947 test.RunTest(encoder_.get(), &settings_, |
| 948 1); // Target bit rate 1, to force all streams but the |
| 949 // base one to be exceeding bandwidth constraints. |
| 950 EXPECT_EQ(static_cast<size_t>(kNumberOfSimulcastStreams), |
| 951 configured_bitrate.size()); |
| 952 |
| 953 unsigned int min_bitrate = |
| 954 std::max(settings_.simulcastStream[0].minBitrate, settings_.minBitrate); |
| 955 int stream = 0; |
| 956 for (std::vector<unsigned int>::const_iterator it = |
| 957 configured_bitrate.begin(); |
| 958 it != configured_bitrate.end(); ++it) { |
| 959 if (stream == 0) { |
| 960 EXPECT_EQ(min_bitrate, *it); |
| 961 } else { |
| 962 EXPECT_EQ(0u, *it); |
| 963 } |
| 964 ++stream; |
| 965 } |
| 966 } |
| 967 |
879 std::unique_ptr<VP8Encoder> encoder_; | 968 std::unique_ptr<VP8Encoder> encoder_; |
880 MockEncodedImageCallback encoder_callback_; | 969 MockEncodedImageCallback encoder_callback_; |
881 std::unique_ptr<VP8Decoder> decoder_; | 970 std::unique_ptr<VP8Decoder> decoder_; |
882 MockDecodedImageCallback decoder_callback_; | 971 MockDecodedImageCallback decoder_callback_; |
883 VideoCodec settings_; | 972 VideoCodec settings_; |
884 rtc::scoped_refptr<I420Buffer> input_buffer_; | 973 rtc::scoped_refptr<I420Buffer> input_buffer_; |
885 std::unique_ptr<VideoFrame> input_frame_; | 974 std::unique_ptr<VideoFrame> input_frame_; |
886 std::unique_ptr<SimulcastRateAllocator> rate_allocator_; | |
887 }; | 975 }; |
888 | 976 |
889 } // namespace testing | 977 } // namespace testing |
890 } // namespace webrtc | 978 } // namespace webrtc |
891 | 979 |
892 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ | 980 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ |
OLD | NEW |