Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(19)

Side by Side Diff: webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h

Issue 1900673002: Delete webrtc::VideoFrame methods buffer and stride. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Update ios video_render. Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 int picture_id_; 112 int picture_id_;
113 int temporal_layer_[kNumberOfSimulcastStreams]; 113 int temporal_layer_[kNumberOfSimulcastStreams];
114 bool layer_sync_[kNumberOfSimulcastStreams]; 114 bool layer_sync_[kNumberOfSimulcastStreams];
115 }; 115 };
116 116
117 class Vp8TestDecodedImageCallback : public DecodedImageCallback { 117 class Vp8TestDecodedImageCallback : public DecodedImageCallback {
118 public: 118 public:
119 Vp8TestDecodedImageCallback() : decoded_frames_(0) {} 119 Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
120 int32_t Decoded(VideoFrame& decoded_image) override { 120 int32_t Decoded(VideoFrame& decoded_image) override {
121 for (int i = 0; i < decoded_image.width(); ++i) { 121 for (int i = 0; i < decoded_image.width(); ++i) {
122 EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1); 122 EXPECT_NEAR(kColorY, decoded_image.video_frame_buffer()->DataY()[i], 1);
123 } 123 }
124 124
125 // TODO(mikhal): Verify the difference between U,V and the original. 125 // TODO(mikhal): Verify the difference between U,V and the original.
126 for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) { 126 for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) {
127 EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4); 127 EXPECT_NEAR(kColorU, decoded_image.video_frame_buffer()->DataU()[i], 4);
128 EXPECT_NEAR(kColorV, decoded_image.buffer(kVPlane)[i], 4); 128 EXPECT_NEAR(kColorV, decoded_image.video_frame_buffer()->DataV()[i], 4);
129 } 129 }
130 decoded_frames_++; 130 decoded_frames_++;
131 return 0; 131 return 0;
132 } 132 }
133 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { 133 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override {
134 RTC_NOTREACHED(); 134 RTC_NOTREACHED();
135 return -1; 135 return -1;
136 } 136 }
137 int DecodedFrames() { return decoded_frames_; } 137 int DecodedFrames() { return decoded_frames_; }
138 138
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
215 215
216 mutable std::vector<TemporalLayers*> spying_layers_; 216 mutable std::vector<TemporalLayers*> spying_layers_;
217 }; 217 };
218 }; 218 };
219 219
220 class TestVp8Simulcast : public ::testing::Test { 220 class TestVp8Simulcast : public ::testing::Test {
221 public: 221 public:
222 TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder) 222 TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder)
223 : encoder_(encoder), decoder_(decoder) {} 223 : encoder_(encoder), decoder_(decoder) {}
224 224
225 // Creates an VideoFrame from |plane_colors|. 225 static void SetPlane(uint8_t* data,
226 static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) { 226 uint8_t value,
227 for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) { 227 int width,
228 int width = 228 int height,
229 (plane_num != kYPlane ? (frame->width() + 1) / 2 : frame->width()); 229 int stride) {
230 int height = 230 for (int i = 0; i < height; i++, data += stride) {
231 (plane_num != kYPlane ? (frame->height() + 1) / 2 : frame->height());
232 PlaneType plane_type = static_cast<PlaneType>(plane_num);
233 uint8_t* data = frame->buffer(plane_type);
234 // Setting allocated area to zero - setting only image size to 231 // Setting allocated area to zero - setting only image size to
235 // requested values - will make it easier to distinguish between image 232 // requested values - will make it easier to distinguish between image
236 // size and frame size (accounting for stride). 233 // size and frame size (accounting for stride).
237 memset(frame->buffer(plane_type), 0, frame->allocated_size(plane_type)); 234 memset(data, value, width);
238 for (int i = 0; i < height; i++) { 235 memset(data + width, 0, stride - width);
239 memset(data, plane_colors[plane_num], width);
240 data += frame->stride(plane_type);
241 }
242 } 236 }
243 } 237 }
238 // Creates an VideoFrame from |plane_colors|.
239 static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) {
perkj_webrtc 2016/04/19 12:28:13 const scoped_refptr<VideoFrameBuffer>& buffer inst
nisse-webrtc 2016/04/19 13:55:39 Maybe, I'll look into it.
nisse-webrtc 2016/04/20 12:24:34 Done.
240 int width = frame->video_frame_buffer()->width();
241 int height = frame->video_frame_buffer()->height();
242 int chroma_width = (width + 1) / 2;
243 int chroma_height = (height + 1) / 2;
244
245 SetPlane(frame->video_frame_buffer()->MutableDataY(), plane_colors[0],
246 width, height, frame->video_frame_buffer()->StrideY());
247
248 SetPlane(frame->video_frame_buffer()->MutableDataU(), plane_colors[1],
249 chroma_width, chroma_height,
250 frame->video_frame_buffer()->StrideU());
251
252 SetPlane(frame->video_frame_buffer()->MutableDataV(), plane_colors[2],
253 chroma_width, chroma_height,
254 frame->video_frame_buffer()->StrideV());
255 }
244 256
245 static void DefaultSettings(VideoCodec* settings, 257 static void DefaultSettings(VideoCodec* settings,
246 const int* temporal_layer_profile) { 258 const int* temporal_layer_profile) {
247 assert(settings); 259 assert(settings);
248 memset(settings, 0, sizeof(VideoCodec)); 260 memset(settings, 0, sizeof(VideoCodec));
249 strncpy(settings->plName, "VP8", 4); 261 strncpy(settings->plName, "VP8", 4);
250 settings->codecType = kVideoCodecVP8; 262 settings->codecType = kVideoCodecVP8;
251 // 96 to 127 dynamic payload types for video codecs 263 // 96 to 127 dynamic payload types for video codecs
252 settings->plType = 120; 264 settings->plType = 120;
253 settings->startBitrate = 300; 265 settings->startBitrate = 300;
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
298 310
299 virtual void SetUpCodec(const int* temporal_layer_profile) { 311 virtual void SetUpCodec(const int* temporal_layer_profile) {
300 encoder_->RegisterEncodeCompleteCallback(&encoder_callback_); 312 encoder_->RegisterEncodeCompleteCallback(&encoder_callback_);
301 decoder_->RegisterDecodeCompleteCallback(&decoder_callback_); 313 decoder_->RegisterDecodeCompleteCallback(&decoder_callback_);
302 DefaultSettings(&settings_, temporal_layer_profile); 314 DefaultSettings(&settings_, temporal_layer_profile);
303 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); 315 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
304 EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); 316 EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
305 int half_width = (kDefaultWidth + 1) / 2; 317 int half_width = (kDefaultWidth + 1) / 2;
306 input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth, 318 input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
307 half_width, half_width); 319 half_width, half_width);
308 memset(input_frame_.buffer(kYPlane), 0, 320 memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
309 input_frame_.allocated_size(kYPlane)); 321 input_frame_.allocated_size(kYPlane));
310 memset(input_frame_.buffer(kUPlane), 0, 322 memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
311 input_frame_.allocated_size(kUPlane)); 323 input_frame_.allocated_size(kUPlane));
312 memset(input_frame_.buffer(kVPlane), 0, 324 memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
313 input_frame_.allocated_size(kVPlane)); 325 input_frame_.allocated_size(kVPlane));
314 } 326 }
315 327
316 virtual void TearDown() { 328 virtual void TearDown() {
317 encoder_->Release(); 329 encoder_->Release();
318 decoder_->Release(); 330 decoder_->Release();
319 } 331 }
320 332
321 void ExpectStreams(FrameType frame_type, int expected_video_streams) { 333 void ExpectStreams(FrameType frame_type, int expected_video_streams) {
322 ASSERT_GE(expected_video_streams, 0); 334 ASSERT_GE(expected_video_streams, 0);
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after
548 settings_.height = height; 560 settings_.height = height;
549 for (int i = 0; i < settings_.numberOfSimulcastStreams - 1; ++i) { 561 for (int i = 0; i < settings_.numberOfSimulcastStreams - 1; ++i) {
550 settings_.simulcastStream[i].maxBitrate = 0; 562 settings_.simulcastStream[i].maxBitrate = 0;
551 settings_.simulcastStream[i].width = settings_.width; 563 settings_.simulcastStream[i].width = settings_.width;
552 settings_.simulcastStream[i].height = settings_.height; 564 settings_.simulcastStream[i].height = settings_.height;
553 } 565 }
554 // Setting input image to new resolution. 566 // Setting input image to new resolution.
555 int half_width = (settings_.width + 1) / 2; 567 int half_width = (settings_.width + 1) / 2;
556 input_frame_.CreateEmptyFrame(settings_.width, settings_.height, 568 input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
557 settings_.width, half_width, half_width); 569 settings_.width, half_width, half_width);
558 memset(input_frame_.buffer(kYPlane), 0, 570 memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
559 input_frame_.allocated_size(kYPlane)); 571 input_frame_.allocated_size(kYPlane));
560 memset(input_frame_.buffer(kUPlane), 0, 572 memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
561 input_frame_.allocated_size(kUPlane)); 573 input_frame_.allocated_size(kUPlane));
562 memset(input_frame_.buffer(kVPlane), 0, 574 memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
563 input_frame_.allocated_size(kVPlane)); 575 input_frame_.allocated_size(kVPlane));
564 576
565 // The for loop above did not set the bitrate of the highest layer. 577 // The for loop above did not set the bitrate of the highest layer.
566 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1] 578 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1]
567 .maxBitrate = 0; 579 .maxBitrate = 0;
568 // The highest layer has to correspond to the non-simulcast resolution. 580 // The highest layer has to correspond to the non-simulcast resolution.
569 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = 581 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width =
570 settings_.width; 582 settings_.width;
571 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = 583 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height =
572 settings_.height; 584 settings_.height;
(...skipping 16 matching lines...) Expand all
589 DefaultSettings(&settings_, kDefaultTemporalLayerProfile); 601 DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
590 // Start at the lowest bitrate for enabling base stream. 602 // Start at the lowest bitrate for enabling base stream.
591 settings_.startBitrate = kMinBitrates[0]; 603 settings_.startBitrate = kMinBitrates[0];
592 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); 604 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
593 encoder_->SetRates(settings_.startBitrate, 30); 605 encoder_->SetRates(settings_.startBitrate, 30);
594 ExpectStreams(kVideoFrameKey, 1); 606 ExpectStreams(kVideoFrameKey, 1);
595 // Resize |input_frame_| to the new resolution. 607 // Resize |input_frame_| to the new resolution.
596 half_width = (settings_.width + 1) / 2; 608 half_width = (settings_.width + 1) / 2;
597 input_frame_.CreateEmptyFrame(settings_.width, settings_.height, 609 input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
598 settings_.width, half_width, half_width); 610 settings_.width, half_width, half_width);
599 memset(input_frame_.buffer(kYPlane), 0, 611 memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
600 input_frame_.allocated_size(kYPlane)); 612 input_frame_.allocated_size(kYPlane));
601 memset(input_frame_.buffer(kUPlane), 0, 613 memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
602 input_frame_.allocated_size(kUPlane)); 614 input_frame_.allocated_size(kUPlane));
603 memset(input_frame_.buffer(kVPlane), 0, 615 memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
604 input_frame_.allocated_size(kVPlane)); 616 input_frame_.allocated_size(kVPlane));
605 EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types)); 617 EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
606 } 618 }
607 619
608 void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); } 620 void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); }
609 621
610 void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); } 622 void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); }
611 623
612 void TestSwitchingToOneSmallStream() { SwitchingToOneStream(4, 4); } 624 void TestSwitchingToOneSmallStream() { SwitchingToOneStream(4, 4); }
613 625
(...skipping 334 matching lines...) Expand 10 before | Expand all | Expand 10 after
948 std::unique_ptr<VP8Decoder> decoder_; 960 std::unique_ptr<VP8Decoder> decoder_;
949 MockDecodedImageCallback decoder_callback_; 961 MockDecodedImageCallback decoder_callback_;
950 VideoCodec settings_; 962 VideoCodec settings_;
951 VideoFrame input_frame_; 963 VideoFrame input_frame_;
952 }; 964 };
953 965
954 } // namespace testing 966 } // namespace testing
955 } // namespace webrtc 967 } // namespace webrtc
956 968
957 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_ 969 #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_UNITTEST_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698