Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(623)

Side by Side Diff: webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h

Issue 2089773002: Add EncodedImageCallback::OnEncodedImage(). (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
54 Vp8TestEncodedImageCallback() : picture_id_(-1) { 54 Vp8TestEncodedImageCallback() : picture_id_(-1) {
55 memset(temporal_layer_, -1, sizeof(temporal_layer_)); 55 memset(temporal_layer_, -1, sizeof(temporal_layer_));
56 memset(layer_sync_, false, sizeof(layer_sync_)); 56 memset(layer_sync_, false, sizeof(layer_sync_));
57 } 57 }
58 58
59 ~Vp8TestEncodedImageCallback() { 59 ~Vp8TestEncodedImageCallback() {
60 delete[] encoded_key_frame_._buffer; 60 delete[] encoded_key_frame_._buffer;
61 delete[] encoded_frame_._buffer; 61 delete[] encoded_frame_._buffer;
62 } 62 }
63 63
64 virtual int32_t Encoded(const EncodedImage& encoded_image, 64 virtual Result OnEncodedImage(const EncodedImage& encoded_image,
65 const CodecSpecificInfo* codec_specific_info, 65 const CodecSpecificInfo* codec_specific_info,
66 const RTPFragmentationHeader* fragmentation) { 66 const RTPFragmentationHeader* fragmentation) {
67 // Only store the base layer. 67 // Only store the base layer.
68 if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) { 68 if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) {
69 if (encoded_image._frameType == kVideoFrameKey) { 69 if (encoded_image._frameType == kVideoFrameKey) {
70 delete[] encoded_key_frame_._buffer; 70 delete[] encoded_key_frame_._buffer;
71 encoded_key_frame_._buffer = new uint8_t[encoded_image._size]; 71 encoded_key_frame_._buffer = new uint8_t[encoded_image._size];
72 encoded_key_frame_._size = encoded_image._size; 72 encoded_key_frame_._size = encoded_image._size;
73 encoded_key_frame_._length = encoded_image._length; 73 encoded_key_frame_._length = encoded_image._length;
74 encoded_key_frame_._frameType = kVideoFrameKey; 74 encoded_key_frame_._frameType = kVideoFrameKey;
75 encoded_key_frame_._completeFrame = encoded_image._completeFrame; 75 encoded_key_frame_._completeFrame = encoded_image._completeFrame;
76 memcpy(encoded_key_frame_._buffer, encoded_image._buffer, 76 memcpy(encoded_key_frame_._buffer, encoded_image._buffer,
77 encoded_image._length); 77 encoded_image._length);
78 } else { 78 } else {
79 delete[] encoded_frame_._buffer; 79 delete[] encoded_frame_._buffer;
80 encoded_frame_._buffer = new uint8_t[encoded_image._size]; 80 encoded_frame_._buffer = new uint8_t[encoded_image._size];
81 encoded_frame_._size = encoded_image._size; 81 encoded_frame_._size = encoded_image._size;
82 encoded_frame_._length = encoded_image._length; 82 encoded_frame_._length = encoded_image._length;
83 memcpy(encoded_frame_._buffer, encoded_image._buffer, 83 memcpy(encoded_frame_._buffer, encoded_image._buffer,
84 encoded_image._length); 84 encoded_image._length);
85 } 85 }
86 } 86 }
87 picture_id_ = codec_specific_info->codecSpecific.VP8.pictureId; 87 picture_id_ = codec_specific_info->codecSpecific.VP8.pictureId;
88 layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = 88 layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
89 codec_specific_info->codecSpecific.VP8.layerSync; 89 codec_specific_info->codecSpecific.VP8.layerSync;
90 temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = 90 temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
91 codec_specific_info->codecSpecific.VP8.temporalIdx; 91 codec_specific_info->codecSpecific.VP8.temporalIdx;
92 return 0; 92 return Result();
93 } 93 }
94 void GetLastEncodedFrameInfo(int* picture_id, 94 void GetLastEncodedFrameInfo(int* picture_id,
95 int* temporal_layer, 95 int* temporal_layer,
96 bool* layer_sync, 96 bool* layer_sync,
97 int stream) { 97 int stream) {
98 *picture_id = picture_id_; 98 *picture_id = picture_id_;
99 *temporal_layer = temporal_layer_[stream]; 99 *temporal_layer = temporal_layer_[stream];
100 *layer_sync = layer_sync_[stream]; 100 *layer_sync = layer_sync_[stream];
101 } 101 }
102 void GetLastEncodedKeyFrame(EncodedImage* encoded_key_frame) { 102 void GetLastEncodedKeyFrame(EncodedImage* encoded_key_frame) {
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after
331 encoder_->Release(); 331 encoder_->Release();
332 decoder_->Release(); 332 decoder_->Release();
333 } 333 }
334 334
335 void ExpectStreams(FrameType frame_type, int expected_video_streams) { 335 void ExpectStreams(FrameType frame_type, int expected_video_streams) {
336 ASSERT_GE(expected_video_streams, 0); 336 ASSERT_GE(expected_video_streams, 0);
337 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams); 337 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams);
338 if (expected_video_streams >= 1) { 338 if (expected_video_streams >= 1) {
339 EXPECT_CALL( 339 EXPECT_CALL(
340 encoder_callback_, 340 encoder_callback_,
341 Encoded( 341 OnEncodedImage(
342 AllOf(Field(&EncodedImage::_frameType, frame_type), 342 AllOf(Field(&EncodedImage::_frameType, frame_type),
343 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4), 343 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4),
344 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), 344 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)),
345 _, _)) 345 _, _))
346 .Times(1) 346 .Times(1)
347 .WillRepeatedly(Return(0)); 347 .WillRepeatedly(Return(EncodedImageCallback::Result()));
348 } 348 }
349 if (expected_video_streams >= 2) { 349 if (expected_video_streams >= 2) {
350 EXPECT_CALL( 350 EXPECT_CALL(
351 encoder_callback_, 351 encoder_callback_,
352 Encoded( 352 OnEncodedImage(
353 AllOf(Field(&EncodedImage::_frameType, frame_type), 353 AllOf(Field(&EncodedImage::_frameType, frame_type),
354 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2), 354 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2),
355 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), 355 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)),
356 _, _)) 356 _, _))
357 .Times(1) 357 .Times(1)
358 .WillRepeatedly(Return(0)); 358 .WillRepeatedly(Return(EncodedImageCallback::Result()));
359 } 359 }
360 if (expected_video_streams >= 3) { 360 if (expected_video_streams >= 3) {
361 EXPECT_CALL( 361 EXPECT_CALL(
362 encoder_callback_, 362 encoder_callback_,
363 Encoded(AllOf(Field(&EncodedImage::_frameType, frame_type), 363 OnEncodedImage(
364 Field(&EncodedImage::_encodedWidth, kDefaultWidth), 364 AllOf(Field(&EncodedImage::_frameType, frame_type),
365 Field(&EncodedImage::_encodedHeight, kDefaultHeight)), 365 Field(&EncodedImage::_encodedWidth, kDefaultWidth),
366 _, _)) 366 Field(&EncodedImage::_encodedHeight, kDefaultHeight)),
367 _, _))
367 .Times(1) 368 .Times(1)
368 .WillRepeatedly(Return(0)); 369 .WillRepeatedly(Return(EncodedImageCallback::Result()));
369 } 370 }
370 } 371 }
371 372
372 void VerifyTemporalIdxAndSyncForAllSpatialLayers( 373 void VerifyTemporalIdxAndSyncForAllSpatialLayers(
373 Vp8TestEncodedImageCallback* encoder_callback, 374 Vp8TestEncodedImageCallback* encoder_callback,
374 const int* expected_temporal_idx, 375 const int* expected_temporal_idx,
375 const bool* expected_layer_sync, 376 const bool* expected_layer_sync,
376 int num_spatial_layers) { 377 int num_spatial_layers) {
377 int picture_id = -1; 378 int picture_id = -1;
378 int temporal_layer = -1; 379 int temporal_layer = -1;
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
583 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = 584 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width =
584 settings_.width; 585 settings_.width;
585 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = 586 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height =
586 settings_.height; 587 settings_.height;
587 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); 588 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
588 589
589 // Encode one frame and verify. 590 // Encode one frame and verify.
590 encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30); 591 encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30);
591 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, 592 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
592 kVideoFrameDelta); 593 kVideoFrameDelta);
593 EXPECT_CALL(encoder_callback_, 594 EXPECT_CALL(
594 Encoded(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey), 595 encoder_callback_,
595 Field(&EncodedImage::_encodedWidth, width), 596 OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey),
596 Field(&EncodedImage::_encodedHeight, height)), 597 Field(&EncodedImage::_encodedWidth, width),
597 _, _)) 598 Field(&EncodedImage::_encodedHeight, height)),
599 _, _))
598 .Times(1) 600 .Times(1)
599 .WillRepeatedly(Return(0)); 601 .WillRepeatedly(Return(EncodedImageCallback::Result()));
600 EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types)); 602 EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
601 603
602 // Switch back. 604 // Switch back.
603 DefaultSettings(&settings_, kDefaultTemporalLayerProfile); 605 DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
604 // Start at the lowest bitrate for enabling base stream. 606 // Start at the lowest bitrate for enabling base stream.
605 settings_.startBitrate = kMinBitrates[0]; 607 settings_.startBitrate = kMinBitrates[0];
606 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); 608 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
607 encoder_->SetRates(settings_.startBitrate, 30); 609 encoder_->SetRates(settings_.startBitrate, 30);
608 ExpectStreams(kVideoFrameKey, 1); 610 ExpectStreams(kVideoFrameKey, 1);
609 // Resize |input_frame_| to the new resolution. 611 // Resize |input_frame_| to the new resolution.
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
962 std::unique_ptr<VP8Decoder> decoder_; 964 std::unique_ptr<VP8Decoder> decoder_;
963 MockDecodedImageCallback decoder_callback_; 965 MockDecodedImageCallback decoder_callback_;
964 VideoCodec settings_; 966 VideoCodec settings_;
965 VideoFrame input_frame_; 967 VideoFrame input_frame_;
966 }; 968 };
967 969
968 } // namespace testing 970 } // namespace testing
969 } // namespace webrtc 971 } // namespace webrtc
970 972
971 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ 973 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698