Chromium Code Reviews| Index: webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h |
| diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h |
| index 2b2aa5de69f30ca59b702559711f2da5b8fa0245..98378d964f2dd557f1377ddde228564ae330e936 100644 |
| --- a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h |
| +++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h |
| @@ -119,13 +119,13 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback { |
| Vp8TestDecodedImageCallback() : decoded_frames_(0) {} |
| int32_t Decoded(VideoFrame& decoded_image) override { |
| for (int i = 0; i < decoded_image.width(); ++i) { |
| - EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1); |
| + EXPECT_NEAR(kColorY, decoded_image.video_frame_buffer()->DataY()[i], 1); |
| } |
| // TODO(mikhal): Verify the difference between U,V and the original. |
| for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) { |
| - EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4); |
| - EXPECT_NEAR(kColorV, decoded_image.buffer(kVPlane)[i], 4); |
| + EXPECT_NEAR(kColorU, decoded_image.video_frame_buffer()->DataU()[i], 4); |
| + EXPECT_NEAR(kColorV, decoded_image.video_frame_buffer()->DataV()[i], 4); |
| } |
| decoded_frames_++; |
| return 0; |
| @@ -222,25 +222,37 @@ class TestVp8Simulcast : public ::testing::Test { |
| TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder) |
| : encoder_(encoder), decoder_(decoder) {} |
| - // Creates an VideoFrame from |plane_colors|. |
| - static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) { |
| - for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) { |
| - int width = |
| - (plane_num != kYPlane ? (frame->width() + 1) / 2 : frame->width()); |
| - int height = |
| - (plane_num != kYPlane ? (frame->height() + 1) / 2 : frame->height()); |
| - PlaneType plane_type = static_cast<PlaneType>(plane_num); |
| - uint8_t* data = frame->buffer(plane_type); |
| + static void SetPlane(uint8_t* data, |
| + uint8_t value, |
| + int width, |
| + int height, |
| + int stride) { |
| + for (int i = 0; i < height; i++, data += stride) { |
| // Setting allocated area to zero - setting only image size to |
| // requested values - will make it easier to distinguish between image |
| // size and frame size (accounting for stride). |
| - memset(frame->buffer(plane_type), 0, frame->allocated_size(plane_type)); |
| - for (int i = 0; i < height; i++) { |
| - memset(data, plane_colors[plane_num], width); |
| - data += frame->stride(plane_type); |
| - } |
| + memset(data, value, width); |
| + memset(data + width, 0, stride - width); |
| } |
| } |
| + // Creates an VideoFrame from |plane_colors|. |
| + static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) { |
|
perkj_webrtc
2016/04/19 12:28:13
const scoped_refptr<VideoFrameBuffer>& buffer inst
nisse-webrtc
2016/04/19 13:55:39
Maybe, I'll look into it.
nisse-webrtc
2016/04/20 12:24:34
Done.
|
| + int width = frame->video_frame_buffer()->width(); |
| + int height = frame->video_frame_buffer()->height(); |
| + int chroma_width = (width + 1) / 2; |
| + int chroma_height = (height + 1) / 2; |
| + |
| + SetPlane(frame->video_frame_buffer()->MutableDataY(), plane_colors[0], |
| + width, height, frame->video_frame_buffer()->StrideY()); |
| + |
| + SetPlane(frame->video_frame_buffer()->MutableDataU(), plane_colors[1], |
| + chroma_width, chroma_height, |
| + frame->video_frame_buffer()->StrideU()); |
| + |
| + SetPlane(frame->video_frame_buffer()->MutableDataV(), plane_colors[2], |
| + chroma_width, chroma_height, |
| + frame->video_frame_buffer()->StrideV()); |
| + } |
| static void DefaultSettings(VideoCodec* settings, |
| const int* temporal_layer_profile) { |
| @@ -305,11 +317,11 @@ class TestVp8Simulcast : public ::testing::Test { |
| int half_width = (kDefaultWidth + 1) / 2; |
| input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth, |
| half_width, half_width); |
| - memset(input_frame_.buffer(kYPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataY(), 0, |
| input_frame_.allocated_size(kYPlane)); |
| - memset(input_frame_.buffer(kUPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataU(), 0, |
| input_frame_.allocated_size(kUPlane)); |
| - memset(input_frame_.buffer(kVPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataU(), 0, |
| input_frame_.allocated_size(kVPlane)); |
| } |
| @@ -555,11 +567,11 @@ class TestVp8Simulcast : public ::testing::Test { |
| int half_width = (settings_.width + 1) / 2; |
| input_frame_.CreateEmptyFrame(settings_.width, settings_.height, |
| settings_.width, half_width, half_width); |
| - memset(input_frame_.buffer(kYPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataY(), 0, |
| input_frame_.allocated_size(kYPlane)); |
| - memset(input_frame_.buffer(kUPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataU(), 0, |
| input_frame_.allocated_size(kUPlane)); |
| - memset(input_frame_.buffer(kVPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataV(), 0, |
| input_frame_.allocated_size(kVPlane)); |
| // The for loop above did not set the bitrate of the highest layer. |
| @@ -596,11 +608,11 @@ class TestVp8Simulcast : public ::testing::Test { |
| half_width = (settings_.width + 1) / 2; |
| input_frame_.CreateEmptyFrame(settings_.width, settings_.height, |
| settings_.width, half_width, half_width); |
| - memset(input_frame_.buffer(kYPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataY(), 0, |
| input_frame_.allocated_size(kYPlane)); |
| - memset(input_frame_.buffer(kUPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataU(), 0, |
| input_frame_.allocated_size(kUPlane)); |
| - memset(input_frame_.buffer(kVPlane), 0, |
| + memset(input_frame_.video_frame_buffer()->MutableDataV(), 0, |
| input_frame_.allocated_size(kVPlane)); |
| EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types)); |
| } |