Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(445)

Side by Side Diff: webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h

Issue 2089773002: Add EncodedImageCallback::OnEncodedImage(). (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: . Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
54 Vp8TestEncodedImageCallback() : picture_id_(-1) { 54 Vp8TestEncodedImageCallback() : picture_id_(-1) {
55 memset(temporal_layer_, -1, sizeof(temporal_layer_)); 55 memset(temporal_layer_, -1, sizeof(temporal_layer_));
56 memset(layer_sync_, false, sizeof(layer_sync_)); 56 memset(layer_sync_, false, sizeof(layer_sync_));
57 } 57 }
58 58
59 ~Vp8TestEncodedImageCallback() { 59 ~Vp8TestEncodedImageCallback() {
60 delete[] encoded_key_frame_._buffer; 60 delete[] encoded_key_frame_._buffer;
61 delete[] encoded_frame_._buffer; 61 delete[] encoded_frame_._buffer;
62 } 62 }
63 63
64 virtual int32_t Encoded(const EncodedImage& encoded_image, 64 virtual Result OnEncodedImage(const EncodedImage& encoded_image,
65 const CodecSpecificInfo* codec_specific_info, 65 const CodecSpecificInfo* codec_specific_info,
66 const RTPFragmentationHeader* fragmentation) { 66 const RTPFragmentationHeader* fragmentation) {
67 // Only store the base layer. 67 // Only store the base layer.
68 if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) { 68 if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) {
69 if (encoded_image._frameType == kVideoFrameKey) { 69 if (encoded_image._frameType == kVideoFrameKey) {
70 delete[] encoded_key_frame_._buffer; 70 delete[] encoded_key_frame_._buffer;
71 encoded_key_frame_._buffer = new uint8_t[encoded_image._size]; 71 encoded_key_frame_._buffer = new uint8_t[encoded_image._size];
72 encoded_key_frame_._size = encoded_image._size; 72 encoded_key_frame_._size = encoded_image._size;
73 encoded_key_frame_._length = encoded_image._length; 73 encoded_key_frame_._length = encoded_image._length;
74 encoded_key_frame_._frameType = kVideoFrameKey; 74 encoded_key_frame_._frameType = kVideoFrameKey;
75 encoded_key_frame_._completeFrame = encoded_image._completeFrame; 75 encoded_key_frame_._completeFrame = encoded_image._completeFrame;
76 memcpy(encoded_key_frame_._buffer, encoded_image._buffer, 76 memcpy(encoded_key_frame_._buffer, encoded_image._buffer,
77 encoded_image._length); 77 encoded_image._length);
78 } else { 78 } else {
79 delete[] encoded_frame_._buffer; 79 delete[] encoded_frame_._buffer;
80 encoded_frame_._buffer = new uint8_t[encoded_image._size]; 80 encoded_frame_._buffer = new uint8_t[encoded_image._size];
81 encoded_frame_._size = encoded_image._size; 81 encoded_frame_._size = encoded_image._size;
82 encoded_frame_._length = encoded_image._length; 82 encoded_frame_._length = encoded_image._length;
83 memcpy(encoded_frame_._buffer, encoded_image._buffer, 83 memcpy(encoded_frame_._buffer, encoded_image._buffer,
84 encoded_image._length); 84 encoded_image._length);
85 } 85 }
86 } 86 }
87 picture_id_ = codec_specific_info->codecSpecific.VP8.pictureId; 87 picture_id_ = codec_specific_info->codecSpecific.VP8.pictureId;
88 layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = 88 layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
89 codec_specific_info->codecSpecific.VP8.layerSync; 89 codec_specific_info->codecSpecific.VP8.layerSync;
90 temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = 90 temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
91 codec_specific_info->codecSpecific.VP8.temporalIdx; 91 codec_specific_info->codecSpecific.VP8.temporalIdx;
92 return 0; 92 return Result(Result::OK, encoded_image._timeStamp);
93 } 93 }
94 void GetLastEncodedFrameInfo(int* picture_id, 94 void GetLastEncodedFrameInfo(int* picture_id,
95 int* temporal_layer, 95 int* temporal_layer,
96 bool* layer_sync, 96 bool* layer_sync,
97 int stream) { 97 int stream) {
98 *picture_id = picture_id_; 98 *picture_id = picture_id_;
99 *temporal_layer = temporal_layer_[stream]; 99 *temporal_layer = temporal_layer_[stream];
100 *layer_sync = layer_sync_[stream]; 100 *layer_sync = layer_sync_[stream];
101 } 101 }
102 void GetLastEncodedKeyFrame(EncodedImage* encoded_key_frame) { 102 void GetLastEncodedKeyFrame(EncodedImage* encoded_key_frame) {
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after
331 encoder_->Release(); 331 encoder_->Release();
332 decoder_->Release(); 332 decoder_->Release();
333 } 333 }
334 334
335 void ExpectStreams(FrameType frame_type, int expected_video_streams) { 335 void ExpectStreams(FrameType frame_type, int expected_video_streams) {
336 ASSERT_GE(expected_video_streams, 0); 336 ASSERT_GE(expected_video_streams, 0);
337 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams); 337 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams);
338 if (expected_video_streams >= 1) { 338 if (expected_video_streams >= 1) {
339 EXPECT_CALL( 339 EXPECT_CALL(
340 encoder_callback_, 340 encoder_callback_,
341 Encoded( 341 OnEncodedImage(
342 AllOf(Field(&EncodedImage::_frameType, frame_type), 342 AllOf(Field(&EncodedImage::_frameType, frame_type),
343 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4), 343 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4),
344 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), 344 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)),
345 _, _)) 345 _, _))
346 .Times(1) 346 .Times(1)
347 .WillRepeatedly(Return(0)); 347 .WillRepeatedly(Return(EncodedImageCallback::Result(
348 EncodedImageCallback::Result::OK, 0)));
348 } 349 }
349 if (expected_video_streams >= 2) { 350 if (expected_video_streams >= 2) {
350 EXPECT_CALL( 351 EXPECT_CALL(
351 encoder_callback_, 352 encoder_callback_,
352 Encoded( 353 OnEncodedImage(
353 AllOf(Field(&EncodedImage::_frameType, frame_type), 354 AllOf(Field(&EncodedImage::_frameType, frame_type),
354 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2), 355 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2),
355 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), 356 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)),
356 _, _)) 357 _, _))
357 .Times(1) 358 .Times(1)
358 .WillRepeatedly(Return(0)); 359 .WillRepeatedly(Return(EncodedImageCallback::Result(
360 EncodedImageCallback::Result::OK, 0)));
359 } 361 }
360 if (expected_video_streams >= 3) { 362 if (expected_video_streams >= 3) {
361 EXPECT_CALL( 363 EXPECT_CALL(
362 encoder_callback_, 364 encoder_callback_,
363 Encoded(AllOf(Field(&EncodedImage::_frameType, frame_type), 365 OnEncodedImage(
364 Field(&EncodedImage::_encodedWidth, kDefaultWidth), 366 AllOf(Field(&EncodedImage::_frameType, frame_type),
365 Field(&EncodedImage::_encodedHeight, kDefaultHeight)), 367 Field(&EncodedImage::_encodedWidth, kDefaultWidth),
366 _, _)) 368 Field(&EncodedImage::_encodedHeight, kDefaultHeight)),
369 _, _))
367 .Times(1) 370 .Times(1)
368 .WillRepeatedly(Return(0)); 371 .WillRepeatedly(Return(EncodedImageCallback::Result(
372 EncodedImageCallback::Result::OK, 0)));
369 } 373 }
370 } 374 }
371 375
372 void VerifyTemporalIdxAndSyncForAllSpatialLayers( 376 void VerifyTemporalIdxAndSyncForAllSpatialLayers(
373 Vp8TestEncodedImageCallback* encoder_callback, 377 Vp8TestEncodedImageCallback* encoder_callback,
374 const int* expected_temporal_idx, 378 const int* expected_temporal_idx,
375 const bool* expected_layer_sync, 379 const bool* expected_layer_sync,
376 int num_spatial_layers) { 380 int num_spatial_layers) {
377 int picture_id = -1; 381 int picture_id = -1;
378 int temporal_layer = -1; 382 int temporal_layer = -1;
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
583 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = 587 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width =
584 settings_.width; 588 settings_.width;
585 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = 589 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height =
586 settings_.height; 590 settings_.height;
587 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); 591 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
588 592
589 // Encode one frame and verify. 593 // Encode one frame and verify.
590 encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30); 594 encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30);
591 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, 595 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
592 kVideoFrameDelta); 596 kVideoFrameDelta);
593 EXPECT_CALL(encoder_callback_, 597 EXPECT_CALL(
594 Encoded(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey), 598 encoder_callback_,
595 Field(&EncodedImage::_encodedWidth, width), 599 OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey),
596 Field(&EncodedImage::_encodedHeight, height)), 600 Field(&EncodedImage::_encodedWidth, width),
597 _, _)) 601 Field(&EncodedImage::_encodedHeight, height)),
602 _, _))
598 .Times(1) 603 .Times(1)
599 .WillRepeatedly(Return(0)); 604 .WillRepeatedly(Return(
605 EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0)));
600 EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types)); 606 EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
601 607
602 // Switch back. 608 // Switch back.
603 DefaultSettings(&settings_, kDefaultTemporalLayerProfile); 609 DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
604 // Start at the lowest bitrate for enabling base stream. 610 // Start at the lowest bitrate for enabling base stream.
605 settings_.startBitrate = kMinBitrates[0]; 611 settings_.startBitrate = kMinBitrates[0];
606 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); 612 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
607 encoder_->SetRates(settings_.startBitrate, 30); 613 encoder_->SetRates(settings_.startBitrate, 30);
608 ExpectStreams(kVideoFrameKey, 1); 614 ExpectStreams(kVideoFrameKey, 1);
609 // Resize |input_frame_| to the new resolution. 615 // Resize |input_frame_| to the new resolution.
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
962 std::unique_ptr<VP8Decoder> decoder_; 968 std::unique_ptr<VP8Decoder> decoder_;
963 MockDecodedImageCallback decoder_callback_; 969 MockDecodedImageCallback decoder_callback_;
964 VideoCodec settings_; 970 VideoCodec settings_;
965 VideoFrame input_frame_; 971 VideoFrame input_frame_;
966 }; 972 };
967 973
968 } // namespace testing 974 } // namespace testing
969 } // namespace webrtc 975 } // namespace webrtc
970 976
971 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ 977 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698