| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ | 11 #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ |
| 12 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ | 12 #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ |
| 13 | 13 |
| 14 #include <algorithm> | 14 #include <algorithm> |
| 15 #include <map> |
| 15 #include <memory> | 16 #include <memory> |
| 16 #include <vector> | 17 #include <vector> |
| 17 | 18 |
| 18 #include "webrtc/base/checks.h" | 19 #include "webrtc/base/checks.h" |
| 19 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 20 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
| 20 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" | 21 #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" |
| 21 #include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" | 22 #include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" |
| 22 #include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h" | 23 #include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h" |
| 24 #include "webrtc/modules/video_coding/utility/simulcast_rate_allocator.h" |
| 23 #include "webrtc/test/gtest.h" | 25 #include "webrtc/test/gtest.h" |
| 24 #include "webrtc/video_frame.h" | 26 #include "webrtc/video_frame.h" |
| 25 | 27 |
| 26 using ::testing::_; | 28 using ::testing::_; |
| 27 using ::testing::AllOf; | 29 using ::testing::AllOf; |
| 28 using ::testing::Field; | 30 using ::testing::Field; |
| 29 using ::testing::Return; | 31 using ::testing::Return; |
| 30 | 32 |
| 31 namespace webrtc { | 33 namespace webrtc { |
| 32 namespace testing { | 34 namespace testing { |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 140 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { | 142 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { |
| 141 RTC_NOTREACHED(); | 143 RTC_NOTREACHED(); |
| 142 return -1; | 144 return -1; |
| 143 } | 145 } |
| 144 int DecodedFrames() { return decoded_frames_; } | 146 int DecodedFrames() { return decoded_frames_; } |
| 145 | 147 |
| 146 private: | 148 private: |
| 147 int decoded_frames_; | 149 int decoded_frames_; |
| 148 }; | 150 }; |
| 149 | 151 |
| 150 class SkipEncodingUnusedStreamsTest { | |
| 151 public: | |
| 152 std::vector<unsigned int> RunTest(VP8Encoder* encoder, | |
| 153 VideoCodec* settings, | |
| 154 uint32_t target_bitrate) { | |
| 155 SpyingTemporalLayersFactory spy_factory; | |
| 156 settings->VP8()->tl_factory = &spy_factory; | |
| 157 EXPECT_EQ(0, encoder->InitEncode(settings, 1, 1200)); | |
| 158 | |
| 159 encoder->SetRates(target_bitrate, 30); | |
| 160 | |
| 161 std::vector<unsigned int> configured_bitrates; | |
| 162 for (std::vector<TemporalLayers*>::const_iterator it = | |
| 163 spy_factory.spying_layers_.begin(); | |
| 164 it != spy_factory.spying_layers_.end(); ++it) { | |
| 165 configured_bitrates.push_back( | |
| 166 static_cast<SpyingTemporalLayers*>(*it)->configured_bitrate_); | |
| 167 } | |
| 168 return configured_bitrates; | |
| 169 } | |
| 170 | |
| 171 class SpyingTemporalLayers : public TemporalLayers { | |
| 172 public: | |
| 173 explicit SpyingTemporalLayers(TemporalLayers* layers) | |
| 174 : configured_bitrate_(0), layers_(layers) {} | |
| 175 | |
| 176 virtual ~SpyingTemporalLayers() { delete layers_; } | |
| 177 | |
| 178 int EncodeFlags(uint32_t timestamp) override { | |
| 179 return layers_->EncodeFlags(timestamp); | |
| 180 } | |
| 181 | |
| 182 bool ConfigureBitrates(int bitrate_kbit, | |
| 183 int max_bitrate_kbit, | |
| 184 int framerate, | |
| 185 vpx_codec_enc_cfg_t* cfg) override { | |
| 186 configured_bitrate_ = bitrate_kbit; | |
| 187 return layers_->ConfigureBitrates(bitrate_kbit, max_bitrate_kbit, | |
| 188 framerate, cfg); | |
| 189 } | |
| 190 | |
| 191 void PopulateCodecSpecific(bool base_layer_sync, | |
| 192 CodecSpecificInfoVP8* vp8_info, | |
| 193 uint32_t timestamp) override { | |
| 194 layers_->PopulateCodecSpecific(base_layer_sync, vp8_info, timestamp); | |
| 195 } | |
| 196 | |
| 197 void FrameEncoded(unsigned int size, uint32_t timestamp, int qp) override { | |
| 198 layers_->FrameEncoded(size, timestamp, qp); | |
| 199 } | |
| 200 | |
| 201 int CurrentLayerId() const override { return layers_->CurrentLayerId(); } | |
| 202 | |
| 203 bool UpdateConfiguration(vpx_codec_enc_cfg_t* cfg) override { | |
| 204 return false; | |
| 205 } | |
| 206 | |
| 207 int configured_bitrate_; | |
| 208 TemporalLayers* layers_; | |
| 209 }; | |
| 210 | |
| 211 class SpyingTemporalLayersFactory : public TemporalLayersFactory { | |
| 212 public: | |
| 213 virtual ~SpyingTemporalLayersFactory() {} | |
| 214 TemporalLayers* Create(int temporal_layers, | |
| 215 uint8_t initial_tl0_pic_idx) const override { | |
| 216 SpyingTemporalLayers* layers = | |
| 217 new SpyingTemporalLayers(TemporalLayersFactory::Create( | |
| 218 temporal_layers, initial_tl0_pic_idx)); | |
| 219 spying_layers_.push_back(layers); | |
| 220 return layers; | |
| 221 } | |
| 222 | |
| 223 mutable std::vector<TemporalLayers*> spying_layers_; | |
| 224 }; | |
| 225 }; | |
| 226 | |
| 227 class TestVp8Simulcast : public ::testing::Test { | 152 class TestVp8Simulcast : public ::testing::Test { |
| 228 public: | 153 public: |
| 229 TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder) | 154 TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder) |
| 230 : encoder_(encoder), decoder_(decoder) {} | 155 : encoder_(encoder), decoder_(decoder) {} |
| 231 | 156 |
| 232 static void SetPlane(uint8_t* data, | 157 static void SetPlane(uint8_t* data, |
| 233 uint8_t value, | 158 uint8_t value, |
| 234 int width, | 159 int width, |
| 235 int height, | 160 int height, |
| 236 int stride) { | 161 int stride) { |
| (...skipping 21 matching lines...) Expand all Loading... |
| 258 chroma_width, chroma_height, | 183 chroma_width, chroma_height, |
| 259 buffer->StrideU()); | 184 buffer->StrideU()); |
| 260 | 185 |
| 261 SetPlane(buffer->MutableDataV(), plane_colors[2], | 186 SetPlane(buffer->MutableDataV(), plane_colors[2], |
| 262 chroma_width, chroma_height, | 187 chroma_width, chroma_height, |
| 263 buffer->StrideV()); | 188 buffer->StrideV()); |
| 264 } | 189 } |
| 265 | 190 |
| 266 static void DefaultSettings(VideoCodec* settings, | 191 static void DefaultSettings(VideoCodec* settings, |
| 267 const int* temporal_layer_profile) { | 192 const int* temporal_layer_profile) { |
| 268 assert(settings); | 193 RTC_CHECK(settings); |
| 269 memset(settings, 0, sizeof(VideoCodec)); | 194 memset(settings, 0, sizeof(VideoCodec)); |
| 270 strncpy(settings->plName, "VP8", 4); | 195 strncpy(settings->plName, "VP8", 4); |
| 271 settings->codecType = kVideoCodecVP8; | 196 settings->codecType = kVideoCodecVP8; |
| 272 // 96 to 127 dynamic payload types for video codecs | 197 // 96 to 127 dynamic payload types for video codecs |
| 273 settings->plType = 120; | 198 settings->plType = 120; |
| 274 settings->startBitrate = 300; | 199 settings->startBitrate = 300; |
| 275 settings->minBitrate = 30; | 200 settings->minBitrate = 30; |
| 276 settings->maxBitrate = 0; | 201 settings->maxBitrate = 0; |
| 277 settings->maxFramerate = 30; | 202 settings->maxFramerate = 30; |
| 278 settings->width = kDefaultWidth; | 203 settings->width = kDefaultWidth; |
| (...skipping 29 matching lines...) Expand all Loading... |
| 308 stream->width = width; | 233 stream->width = width; |
| 309 stream->height = height; | 234 stream->height = height; |
| 310 stream->maxBitrate = max_bitrate; | 235 stream->maxBitrate = max_bitrate; |
| 311 stream->minBitrate = min_bitrate; | 236 stream->minBitrate = min_bitrate; |
| 312 stream->targetBitrate = target_bitrate; | 237 stream->targetBitrate = target_bitrate; |
| 313 stream->numberOfTemporalLayers = num_temporal_layers; | 238 stream->numberOfTemporalLayers = num_temporal_layers; |
| 314 stream->qpMax = 45; | 239 stream->qpMax = 45; |
| 315 } | 240 } |
| 316 | 241 |
| 317 protected: | 242 protected: |
| 318 virtual void SetUp() { SetUpCodec(kDefaultTemporalLayerProfile); } | 243 void SetUp() override { SetUpCodec(kDefaultTemporalLayerProfile); } |
| 319 | 244 |
| 320 virtual void SetUpCodec(const int* temporal_layer_profile) { | 245 void TearDown() override { |
| 246 encoder_->Release(); |
| 247 decoder_->Release(); |
| 248 } |
| 249 |
| 250 void SetUpCodec(const int* temporal_layer_profile) { |
| 321 encoder_->RegisterEncodeCompleteCallback(&encoder_callback_); | 251 encoder_->RegisterEncodeCompleteCallback(&encoder_callback_); |
| 322 decoder_->RegisterDecodeCompleteCallback(&decoder_callback_); | 252 decoder_->RegisterDecodeCompleteCallback(&decoder_callback_); |
| 323 DefaultSettings(&settings_, temporal_layer_profile); | 253 DefaultSettings(&settings_, temporal_layer_profile); |
| 254 SetUpRateAllocator(); |
| 324 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); | 255 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
| 325 EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); | 256 EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); |
| 326 int half_width = (kDefaultWidth + 1) / 2; | 257 int half_width = (kDefaultWidth + 1) / 2; |
| 327 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, | 258 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, |
| 328 kDefaultWidth, half_width, half_width); | 259 kDefaultWidth, half_width, half_width); |
| 329 input_buffer_->InitializeData(); | 260 input_buffer_->InitializeData(); |
| 330 input_frame_.reset( | 261 input_frame_.reset( |
| 331 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); | 262 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
| 332 } | 263 } |
| 333 | 264 |
| 334 virtual void TearDown() { | 265 void SetUpRateAllocator() { |
| 335 encoder_->Release(); | 266 TemporalLayersFactory* tl_factory = new TemporalLayersFactory(); |
| 336 decoder_->Release(); | 267 rate_allocator_.reset(new SimulcastRateAllocator( |
| 268 settings_, std::unique_ptr<TemporalLayersFactory>(tl_factory))); |
| 269 settings_.VP8()->tl_factory = tl_factory; |
| 270 } |
| 271 |
| 272 void SetRates(uint32_t bitrate_kbps, uint32_t fps) { |
| 273 encoder_->SetRateAllocation( |
| 274 rate_allocator_->GetAllocation(bitrate_kbps * 1000, fps), fps); |
| 337 } | 275 } |
| 338 | 276 |
| 339 void ExpectStreams(FrameType frame_type, int expected_video_streams) { | 277 void ExpectStreams(FrameType frame_type, int expected_video_streams) { |
| 340 ASSERT_GE(expected_video_streams, 0); | 278 ASSERT_GE(expected_video_streams, 0); |
| 341 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams); | 279 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams); |
| 342 if (expected_video_streams >= 1) { | 280 if (expected_video_streams >= 1) { |
| 343 EXPECT_CALL( | 281 EXPECT_CALL( |
| 344 encoder_callback_, | 282 encoder_callback_, |
| 345 OnEncodedImage( | 283 OnEncodedImage( |
| 346 AllOf(Field(&EncodedImage::_frameType, frame_type), | 284 AllOf(Field(&EncodedImage::_frameType, frame_type), |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 389 encoder_callback->GetLastEncodedFrameInfo(&picture_id, &temporal_layer, | 327 encoder_callback->GetLastEncodedFrameInfo(&picture_id, &temporal_layer, |
| 390 &layer_sync, i); | 328 &layer_sync, i); |
| 391 EXPECT_EQ(expected_temporal_idx[i], temporal_layer); | 329 EXPECT_EQ(expected_temporal_idx[i], temporal_layer); |
| 392 EXPECT_EQ(expected_layer_sync[i], layer_sync); | 330 EXPECT_EQ(expected_layer_sync[i], layer_sync); |
| 393 } | 331 } |
| 394 } | 332 } |
| 395 | 333 |
| 396 // We currently expect all active streams to generate a key frame even though | 334 // We currently expect all active streams to generate a key frame even though |
| 397 // a key frame was only requested for some of them. | 335 // a key frame was only requested for some of them. |
| 398 void TestKeyFrameRequestsOnAllStreams() { | 336 void TestKeyFrameRequestsOnAllStreams() { |
| 399 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. | 337 SetRates(kMaxBitrates[2], 30); // To get all three streams. |
| 400 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 338 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 401 kVideoFrameDelta); | 339 kVideoFrameDelta); |
| 402 ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); | 340 ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); |
| 403 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 341 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 404 | 342 |
| 405 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); | 343 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); |
| 406 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 344 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 407 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 345 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 408 | 346 |
| 409 frame_types[0] = kVideoFrameKey; | 347 frame_types[0] = kVideoFrameKey; |
| (...skipping 14 matching lines...) Expand all Loading... |
| 424 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 362 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 425 | 363 |
| 426 std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); | 364 std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); |
| 427 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); | 365 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); |
| 428 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 366 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 429 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 367 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 430 } | 368 } |
| 431 | 369 |
| 432 void TestPaddingAllStreams() { | 370 void TestPaddingAllStreams() { |
| 433 // We should always encode the base layer. | 371 // We should always encode the base layer. |
| 434 encoder_->SetRates(kMinBitrates[0] - 1, 30); | 372 SetRates(kMinBitrates[0] - 1, 30); |
| 435 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 373 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 436 kVideoFrameDelta); | 374 kVideoFrameDelta); |
| 437 ExpectStreams(kVideoFrameKey, 1); | 375 ExpectStreams(kVideoFrameKey, 1); |
| 438 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 376 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 439 | 377 |
| 440 ExpectStreams(kVideoFrameDelta, 1); | 378 ExpectStreams(kVideoFrameDelta, 1); |
| 441 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 379 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 442 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 380 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 443 } | 381 } |
| 444 | 382 |
| 445 void TestPaddingTwoStreams() { | 383 void TestPaddingTwoStreams() { |
| 446 // We have just enough to get only the first stream and padding for two. | 384 // We have just enough to get only the first stream and padding for two. |
| 447 encoder_->SetRates(kMinBitrates[0], 30); | 385 SetRates(kMinBitrates[0], 30); |
| 448 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 386 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 449 kVideoFrameDelta); | 387 kVideoFrameDelta); |
| 450 ExpectStreams(kVideoFrameKey, 1); | 388 ExpectStreams(kVideoFrameKey, 1); |
| 451 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 389 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 452 | 390 |
| 453 ExpectStreams(kVideoFrameDelta, 1); | 391 ExpectStreams(kVideoFrameDelta, 1); |
| 454 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 392 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 455 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 393 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 456 } | 394 } |
| 457 | 395 |
| 458 void TestPaddingTwoStreamsOneMaxedOut() { | 396 void TestPaddingTwoStreamsOneMaxedOut() { |
| 459 // We are just below limit of sending second stream, so we should get | 397 // We are just below limit of sending second stream, so we should get |
| 460 // the first stream maxed out (at |maxBitrate|), and padding for two. | 398 // the first stream maxed out (at |maxBitrate|), and padding for two. |
| 461 encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] - 1, 30); | 399 SetRates(kTargetBitrates[0] + kMinBitrates[1] - 1, 30); |
| 462 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 400 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 463 kVideoFrameDelta); | 401 kVideoFrameDelta); |
| 464 ExpectStreams(kVideoFrameKey, 1); | 402 ExpectStreams(kVideoFrameKey, 1); |
| 465 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 403 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 466 | 404 |
| 467 ExpectStreams(kVideoFrameDelta, 1); | 405 ExpectStreams(kVideoFrameDelta, 1); |
| 468 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 406 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 469 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 407 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 470 } | 408 } |
| 471 | 409 |
| 472 void TestPaddingOneStream() { | 410 void TestPaddingOneStream() { |
| 473 // We have just enough to send two streams, so padding for one stream. | 411 // We have just enough to send two streams, so padding for one stream. |
| 474 encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1], 30); | 412 SetRates(kTargetBitrates[0] + kMinBitrates[1], 30); |
| 475 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 413 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 476 kVideoFrameDelta); | 414 kVideoFrameDelta); |
| 477 ExpectStreams(kVideoFrameKey, 2); | 415 ExpectStreams(kVideoFrameKey, 2); |
| 478 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 416 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 479 | 417 |
| 480 ExpectStreams(kVideoFrameDelta, 2); | 418 ExpectStreams(kVideoFrameDelta, 2); |
| 481 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 419 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 482 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 420 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 483 } | 421 } |
| 484 | 422 |
| 485 void TestPaddingOneStreamTwoMaxedOut() { | 423 void TestPaddingOneStreamTwoMaxedOut() { |
| 486 // We are just below limit of sending third stream, so we should get | 424 // We are just below limit of sending third stream, so we should get |
| 487 // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|. | 425 // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|. |
| 488 encoder_->SetRates( | 426 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30); |
| 489 kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30); | |
| 490 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 427 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 491 kVideoFrameDelta); | 428 kVideoFrameDelta); |
| 492 ExpectStreams(kVideoFrameKey, 2); | 429 ExpectStreams(kVideoFrameKey, 2); |
| 493 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 430 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 494 | 431 |
| 495 ExpectStreams(kVideoFrameDelta, 2); | 432 ExpectStreams(kVideoFrameDelta, 2); |
| 496 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 433 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 497 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 434 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 498 } | 435 } |
| 499 | 436 |
| 500 void TestSendAllStreams() { | 437 void TestSendAllStreams() { |
| 501 // We have just enough to send all streams. | 438 // We have just enough to send all streams. |
| 502 encoder_->SetRates( | 439 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30); |
| 503 kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30); | |
| 504 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 440 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 505 kVideoFrameDelta); | 441 kVideoFrameDelta); |
| 506 ExpectStreams(kVideoFrameKey, 3); | 442 ExpectStreams(kVideoFrameKey, 3); |
| 507 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 443 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 508 | 444 |
| 509 ExpectStreams(kVideoFrameDelta, 3); | 445 ExpectStreams(kVideoFrameDelta, 3); |
| 510 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 446 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 511 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 447 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 512 } | 448 } |
| 513 | 449 |
| 514 void TestDisablingStreams() { | 450 void TestDisablingStreams() { |
| 515 // We should get three media streams. | 451 // We should get three media streams. |
| 516 encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); | 452 SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); |
| 517 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 453 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 518 kVideoFrameDelta); | 454 kVideoFrameDelta); |
| 519 ExpectStreams(kVideoFrameKey, 3); | 455 ExpectStreams(kVideoFrameKey, 3); |
| 520 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 456 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 521 | 457 |
| 522 ExpectStreams(kVideoFrameDelta, 3); | 458 ExpectStreams(kVideoFrameDelta, 3); |
| 523 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 459 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 524 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 460 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 525 | 461 |
| 526 // We should only get two streams and padding for one. | 462 // We should only get two streams and padding for one. |
| 527 encoder_->SetRates( | 463 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); |
| 528 kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); | |
| 529 ExpectStreams(kVideoFrameDelta, 2); | 464 ExpectStreams(kVideoFrameDelta, 2); |
| 530 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 465 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 531 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 466 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 532 | 467 |
| 533 // We should only get the first stream and padding for two. | 468 // We should only get the first stream and padding for two. |
| 534 encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); | 469 SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); |
| 535 ExpectStreams(kVideoFrameDelta, 1); | 470 ExpectStreams(kVideoFrameDelta, 1); |
| 536 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 471 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 537 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 472 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 538 | 473 |
| 539 // We don't have enough bitrate for the thumbnail stream, but we should get | 474 // We don't have enough bitrate for the thumbnail stream, but we should get |
| 540 // it anyway with current configuration. | 475 // it anyway with current configuration. |
| 541 encoder_->SetRates(kTargetBitrates[0] - 1, 30); | 476 SetRates(kTargetBitrates[0] - 1, 30); |
| 542 ExpectStreams(kVideoFrameDelta, 1); | 477 ExpectStreams(kVideoFrameDelta, 1); |
| 543 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 478 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 544 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 479 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 545 | 480 |
| 546 // We should only get two streams and padding for one. | 481 // We should only get two streams and padding for one. |
| 547 encoder_->SetRates( | 482 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); |
| 548 kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); | |
| 549 // We get a key frame because a new stream is being enabled. | 483 // We get a key frame because a new stream is being enabled. |
| 550 ExpectStreams(kVideoFrameKey, 2); | 484 ExpectStreams(kVideoFrameKey, 2); |
| 551 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 485 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 552 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 486 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 553 | 487 |
| 554 // We should get all three streams. | 488 // We should get all three streams. |
| 555 encoder_->SetRates( | 489 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); |
| 556 kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); | |
| 557 // We get a key frame because a new stream is being enabled. | 490 // We get a key frame because a new stream is being enabled. |
| 558 ExpectStreams(kVideoFrameKey, 3); | 491 ExpectStreams(kVideoFrameKey, 3); |
| 559 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); | 492 input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 560 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 493 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 561 } | 494 } |
| 562 | 495 |
| 563 void SwitchingToOneStream(int width, int height) { | 496 void SwitchingToOneStream(int width, int height) { |
| 564 // Disable all streams except the last and set the bitrate of the last to | 497 // Disable all streams except the last and set the bitrate of the last to |
| 565 // 100 kbps. This verifies the way GTP switches to screenshare mode. | 498 // 100 kbps. This verifies the way GTP switches to screenshare mode. |
| 566 settings_.VP8()->numberOfTemporalLayers = 1; | 499 settings_.VP8()->numberOfTemporalLayers = 1; |
| (...skipping 16 matching lines...) Expand all Loading... |
| 583 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); | 516 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
| 584 | 517 |
| 585 // The for loop above did not set the bitrate of the highest layer. | 518 // The for loop above did not set the bitrate of the highest layer. |
| 586 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1] | 519 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1] |
| 587 .maxBitrate = 0; | 520 .maxBitrate = 0; |
| 588 // The highest layer has to correspond to the non-simulcast resolution. | 521 // The highest layer has to correspond to the non-simulcast resolution. |
| 589 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = | 522 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = |
| 590 settings_.width; | 523 settings_.width; |
| 591 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = | 524 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = |
| 592 settings_.height; | 525 settings_.height; |
| 526 SetUpRateAllocator(); |
| 593 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); | 527 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
| 594 | 528 |
| 595 // Encode one frame and verify. | 529 // Encode one frame and verify. |
| 596 encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30); | 530 SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30); |
| 597 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, | 531 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 598 kVideoFrameDelta); | 532 kVideoFrameDelta); |
| 599 EXPECT_CALL( | 533 EXPECT_CALL( |
| 600 encoder_callback_, | 534 encoder_callback_, |
| 601 OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey), | 535 OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey), |
| 602 Field(&EncodedImage::_encodedWidth, width), | 536 Field(&EncodedImage::_encodedWidth, width), |
| 603 Field(&EncodedImage::_encodedHeight, height)), | 537 Field(&EncodedImage::_encodedHeight, height)), |
| 604 _, _)) | 538 _, _)) |
| 605 .Times(1) | 539 .Times(1) |
| 606 .WillRepeatedly(Return( | 540 .WillRepeatedly(Return( |
| 607 EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); | 541 EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); |
| 608 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 542 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 609 | 543 |
| 610 // Switch back. | 544 // Switch back. |
| 611 DefaultSettings(&settings_, kDefaultTemporalLayerProfile); | 545 DefaultSettings(&settings_, kDefaultTemporalLayerProfile); |
| 612 // Start at the lowest bitrate for enabling base stream. | 546 // Start at the lowest bitrate for enabling base stream. |
| 613 settings_.startBitrate = kMinBitrates[0]; | 547 settings_.startBitrate = kMinBitrates[0]; |
| 548 SetUpRateAllocator(); |
| 614 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); | 549 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
| 615 encoder_->SetRates(settings_.startBitrate, 30); | 550 SetRates(settings_.startBitrate, 30); |
| 616 ExpectStreams(kVideoFrameKey, 1); | 551 ExpectStreams(kVideoFrameKey, 1); |
| 617 // Resize |input_frame_| to the new resolution. | 552 // Resize |input_frame_| to the new resolution. |
| 618 half_width = (settings_.width + 1) / 2; | 553 half_width = (settings_.width + 1) / 2; |
| 619 input_buffer_ = I420Buffer::Create(settings_.width, settings_.height, | 554 input_buffer_ = I420Buffer::Create(settings_.width, settings_.height, |
| 620 settings_.width, half_width, half_width); | 555 settings_.width, half_width, half_width); |
| 621 input_buffer_->InitializeData(); | 556 input_buffer_->InitializeData(); |
| 622 input_frame_.reset( | 557 input_frame_.reset( |
| 623 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); | 558 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
| 624 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); | 559 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
| 625 } | 560 } |
| 626 | 561 |
| 627 void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); } | 562 void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); } |
| 628 | 563 |
| 629 void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); } | 564 void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); } |
| 630 | 565 |
| 631 void TestSwitchingToOneSmallStream() { SwitchingToOneStream(4, 4); } | 566 void TestSwitchingToOneSmallStream() { SwitchingToOneStream(4, 4); } |
| 632 | 567 |
| 633 void TestRPSIEncoder() { | 568 void TestRPSIEncoder() { |
| 634 Vp8TestEncodedImageCallback encoder_callback; | 569 Vp8TestEncodedImageCallback encoder_callback; |
| 635 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 570 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
| 636 | 571 |
| 637 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. | 572 SetRates(kMaxBitrates[2], 30); // To get all three streams. |
| 638 | 573 |
| 639 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); | 574 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
| 640 int picture_id = -1; | 575 int picture_id = -1; |
| 641 int temporal_layer = -1; | 576 int temporal_layer = -1; |
| 642 bool layer_sync = false; | 577 bool layer_sync = false; |
| 643 encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, | 578 encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer, |
| 644 &layer_sync, 0); | 579 &layer_sync, 0); |
| 645 EXPECT_EQ(0, temporal_layer); | 580 EXPECT_EQ(0, temporal_layer); |
| 646 EXPECT_TRUE(layer_sync); | 581 EXPECT_TRUE(layer_sync); |
| 647 int key_frame_picture_id = picture_id; | 582 int key_frame_picture_id = picture_id; |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 696 // so this frame (the next one) must have |layer_sync| set to true. | 631 // so this frame (the next one) must have |layer_sync| set to true. |
| 697 EXPECT_TRUE(layer_sync); | 632 EXPECT_TRUE(layer_sync); |
| 698 } | 633 } |
| 699 | 634 |
| 700 void TestRPSIEncodeDecode() { | 635 void TestRPSIEncodeDecode() { |
| 701 Vp8TestEncodedImageCallback encoder_callback; | 636 Vp8TestEncodedImageCallback encoder_callback; |
| 702 Vp8TestDecodedImageCallback decoder_callback; | 637 Vp8TestDecodedImageCallback decoder_callback; |
| 703 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 638 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
| 704 decoder_->RegisterDecodeCompleteCallback(&decoder_callback); | 639 decoder_->RegisterDecodeCompleteCallback(&decoder_callback); |
| 705 | 640 |
| 706 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. | 641 SetRates(kMaxBitrates[2], 30); // To get all three streams. |
| 707 | 642 |
| 708 // Set color. | 643 // Set color. |
| 709 int plane_offset[kNumOfPlanes]; | 644 int plane_offset[kNumOfPlanes]; |
| 710 plane_offset[kYPlane] = kColorY; | 645 plane_offset[kYPlane] = kColorY; |
| 711 plane_offset[kUPlane] = kColorU; | 646 plane_offset[kUPlane] = kColorU; |
| 712 plane_offset[kVPlane] = kColorV; | 647 plane_offset[kVPlane] = kColorV; |
| 713 CreateImage(input_buffer_, plane_offset); | 648 CreateImage(input_buffer_, plane_offset); |
| 714 | 649 |
| 715 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); | 650 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
| 716 int picture_id = -1; | 651 int picture_id = -1; |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 770 decoder_->Decode(encoded_frame, false, NULL); | 705 decoder_->Decode(encoded_frame, false, NULL); |
| 771 EXPECT_EQ(2, decoder_callback.DecodedFrames()); | 706 EXPECT_EQ(2, decoder_callback.DecodedFrames()); |
| 772 } | 707 } |
| 773 | 708 |
| 774 // Test the layer pattern and sync flag for various spatial-temporal patterns. | 709 // Test the layer pattern and sync flag for various spatial-temporal patterns. |
| 775 // 3-3-3 pattern: 3 temporal layers for all spatial streams, so same | 710 // 3-3-3 pattern: 3 temporal layers for all spatial streams, so same |
| 776 // temporal_layer id and layer_sync is expected for all streams. | 711 // temporal_layer id and layer_sync is expected for all streams. |
| 777 void TestSaptioTemporalLayers333PatternEncoder() { | 712 void TestSaptioTemporalLayers333PatternEncoder() { |
| 778 Vp8TestEncodedImageCallback encoder_callback; | 713 Vp8TestEncodedImageCallback encoder_callback; |
| 779 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 714 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
| 780 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. | 715 SetRates(kMaxBitrates[2], 30); // To get all three streams. |
| 781 | 716 |
| 782 int expected_temporal_idx[3] = {-1, -1, -1}; | 717 int expected_temporal_idx[3] = {-1, -1, -1}; |
| 783 bool expected_layer_sync[3] = {false, false, false}; | 718 bool expected_layer_sync[3] = {false, false, false}; |
| 784 | 719 |
| 785 // First frame: #0. | 720 // First frame: #0. |
| 786 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); | 721 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
| 787 SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx); | 722 SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx); |
| 788 SetExpectedValues3<bool>(true, true, true, expected_layer_sync); | 723 SetExpectedValues3<bool>(true, true, true, expected_layer_sync); |
| 789 VerifyTemporalIdxAndSyncForAllSpatialLayers( | 724 VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 790 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); | 725 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 839 // 3rd stream: -1, -1, -1, -1, .... | 774 // 3rd stream: -1, -1, -1, -1, .... |
| 840 // Regarding the 3rd stream, note that a stream/encoder with 1 temporal layer | 775 // Regarding the 3rd stream, note that a stream/encoder with 1 temporal layer |
| 841 // should always have temporal layer idx set to kNoTemporalIdx = -1. | 776 // should always have temporal layer idx set to kNoTemporalIdx = -1. |
| 842 // Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255. | 777 // Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255. |
| 843 // TODO(marpan): Although this seems safe for now, we should fix this. | 778 // TODO(marpan): Although this seems safe for now, we should fix this. |
| 844 void TestSpatioTemporalLayers321PatternEncoder() { | 779 void TestSpatioTemporalLayers321PatternEncoder() { |
| 845 int temporal_layer_profile[3] = {3, 2, 1}; | 780 int temporal_layer_profile[3] = {3, 2, 1}; |
| 846 SetUpCodec(temporal_layer_profile); | 781 SetUpCodec(temporal_layer_profile); |
| 847 Vp8TestEncodedImageCallback encoder_callback; | 782 Vp8TestEncodedImageCallback encoder_callback; |
| 848 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 783 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
| 849 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. | 784 SetRates(kMaxBitrates[2], 30); // To get all three streams. |
| 850 | 785 |
| 851 int expected_temporal_idx[3] = {-1, -1, -1}; | 786 int expected_temporal_idx[3] = {-1, -1, -1}; |
| 852 bool expected_layer_sync[3] = {false, false, false}; | 787 bool expected_layer_sync[3] = {false, false, false}; |
| 853 | 788 |
| 854 // First frame: #0. | 789 // First frame: #0. |
| 855 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); | 790 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
| 856 SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx); | 791 SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx); |
| 857 SetExpectedValues3<bool>(true, true, false, expected_layer_sync); | 792 SetExpectedValues3<bool>(true, true, false, expected_layer_sync); |
| 858 VerifyTemporalIdxAndSyncForAllSpatialLayers( | 793 VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 859 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); | 794 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 898 VerifyTemporalIdxAndSyncForAllSpatialLayers( | 833 VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 899 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); | 834 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
| 900 } | 835 } |
| 901 | 836 |
| 902 void TestStrideEncodeDecode() { | 837 void TestStrideEncodeDecode() { |
| 903 Vp8TestEncodedImageCallback encoder_callback; | 838 Vp8TestEncodedImageCallback encoder_callback; |
| 904 Vp8TestDecodedImageCallback decoder_callback; | 839 Vp8TestDecodedImageCallback decoder_callback; |
| 905 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); | 840 encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
| 906 decoder_->RegisterDecodeCompleteCallback(&decoder_callback); | 841 decoder_->RegisterDecodeCompleteCallback(&decoder_callback); |
| 907 | 842 |
| 908 encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams. | 843 SetRates(kMaxBitrates[2], 30); // To get all three streams. |
| 909 // Setting two (possibly) problematic use cases for stride: | 844 // Setting two (possibly) problematic use cases for stride: |
| 910 // 1. stride > width 2. stride_y != stride_uv/2 | 845 // 1. stride > width 2. stride_y != stride_uv/2 |
| 911 int stride_y = kDefaultWidth + 20; | 846 int stride_y = kDefaultWidth + 20; |
| 912 int stride_uv = ((kDefaultWidth + 1) / 2) + 5; | 847 int stride_uv = ((kDefaultWidth + 1) / 2) + 5; |
| 913 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y, | 848 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y, |
| 914 stride_uv, stride_uv); | 849 stride_uv, stride_uv); |
| 915 input_frame_.reset( | 850 input_frame_.reset( |
| 916 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); | 851 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
| 917 | 852 |
| 918 // Set color. | 853 // Set color. |
| (...skipping 15 matching lines...) Expand all Loading... |
| 934 | 869 |
| 935 EncodedImage encoded_frame; | 870 EncodedImage encoded_frame; |
| 936 // Only encoding one frame - so will be a key frame. | 871 // Only encoding one frame - so will be a key frame. |
| 937 encoder_callback.GetLastEncodedKeyFrame(&encoded_frame); | 872 encoder_callback.GetLastEncodedKeyFrame(&encoded_frame); |
| 938 EXPECT_EQ(0, decoder_->Decode(encoded_frame, false, NULL)); | 873 EXPECT_EQ(0, decoder_->Decode(encoded_frame, false, NULL)); |
| 939 encoder_callback.GetLastEncodedFrame(&encoded_frame); | 874 encoder_callback.GetLastEncodedFrame(&encoded_frame); |
| 940 decoder_->Decode(encoded_frame, false, NULL); | 875 decoder_->Decode(encoded_frame, false, NULL); |
| 941 EXPECT_EQ(2, decoder_callback.DecodedFrames()); | 876 EXPECT_EQ(2, decoder_callback.DecodedFrames()); |
| 942 } | 877 } |
| 943 | 878 |
| 944 void TestSkipEncodingUnusedStreams() { | |
| 945 SkipEncodingUnusedStreamsTest test; | |
| 946 std::vector<unsigned int> configured_bitrate = | |
| 947 test.RunTest(encoder_.get(), &settings_, | |
| 948 1); // Target bit rate 1, to force all streams but the | |
| 949 // base one to be exceeding bandwidth constraints. | |
| 950 EXPECT_EQ(static_cast<size_t>(kNumberOfSimulcastStreams), | |
| 951 configured_bitrate.size()); | |
| 952 | |
| 953 unsigned int min_bitrate = | |
| 954 std::max(settings_.simulcastStream[0].minBitrate, settings_.minBitrate); | |
| 955 int stream = 0; | |
| 956 for (std::vector<unsigned int>::const_iterator it = | |
| 957 configured_bitrate.begin(); | |
| 958 it != configured_bitrate.end(); ++it) { | |
| 959 if (stream == 0) { | |
| 960 EXPECT_EQ(min_bitrate, *it); | |
| 961 } else { | |
| 962 EXPECT_EQ(0u, *it); | |
| 963 } | |
| 964 ++stream; | |
| 965 } | |
| 966 } | |
| 967 | |
| 968 std::unique_ptr<VP8Encoder> encoder_; | 879 std::unique_ptr<VP8Encoder> encoder_; |
| 969 MockEncodedImageCallback encoder_callback_; | 880 MockEncodedImageCallback encoder_callback_; |
| 970 std::unique_ptr<VP8Decoder> decoder_; | 881 std::unique_ptr<VP8Decoder> decoder_; |
| 971 MockDecodedImageCallback decoder_callback_; | 882 MockDecodedImageCallback decoder_callback_; |
| 972 VideoCodec settings_; | 883 VideoCodec settings_; |
| 973 rtc::scoped_refptr<I420Buffer> input_buffer_; | 884 rtc::scoped_refptr<I420Buffer> input_buffer_; |
| 974 std::unique_ptr<VideoFrame> input_frame_; | 885 std::unique_ptr<VideoFrame> input_frame_; |
| 886 std::unique_ptr<SimulcastRateAllocator> rate_allocator_; |
| 975 }; | 887 }; |
| 976 | 888 |
| 977 } // namespace testing | 889 } // namespace testing |
| 978 } // namespace webrtc | 890 } // namespace webrtc |
| 979 | 891 |
| 980 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ | 892 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ |
| OLD | NEW |