| Index: webrtc/media/base/videoframe_unittest.h
|
| diff --git a/webrtc/media/base/videoframe_unittest.h b/webrtc/media/base/videoframe_unittest.h
|
| index 9e9b7dddb1f6e6967a0f3275804659932dcd6933..6ceba833b5b834c2a9f1c9d5b02813ab99bb78d0 100644
|
| --- a/webrtc/media/base/videoframe_unittest.h
|
| +++ b/webrtc/media/base/videoframe_unittest.h
|
| @@ -267,24 +267,27 @@
|
| const uint8_t* start = reinterpret_cast<const uint8_t*>(ms->GetBuffer());
|
| int awidth = (width + 1) & ~1;
|
| frame->InitToBlack(width, height, 0);
|
| - int stride_y = frame->GetYPitch();
|
| - int stride_u = frame->GetUPitch();
|
| - int stride_v = frame->GetVPitch();
|
| + int stride_y = frame->video_frame_buffer()->StrideY();
|
| + int stride_u = frame->video_frame_buffer()->StrideU();
|
| + int stride_v = frame->video_frame_buffer()->StrideV();
|
| + uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY();
|
| + uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU();
|
| + uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV();
|
| for (uint32_t y = 0; y < height; ++y) {
|
| for (uint32_t x = 0; x < width; x += 2) {
|
| const uint8_t* quad1 = start + (y * awidth + x) * 2;
|
| - frame->GetYPlane()[stride_y * y + x] = quad1[y1_pos];
|
| + plane_y[stride_y * y + x] = quad1[y1_pos];
|
| if ((x + 1) < width) {
|
| - frame->GetYPlane()[stride_y * y + x + 1] = quad1[y2_pos];
|
| + plane_y[stride_y * y + x + 1] = quad1[y2_pos];
|
| }
|
| if ((y & 1) == 0) {
|
| const uint8_t* quad2 = quad1 + awidth * 2;
|
| if ((y + 1) >= height) {
|
| quad2 = quad1;
|
| }
|
| - frame->GetUPlane()[stride_u * (y / 2) + x / 2] =
|
| + plane_u[stride_u * (y / 2) + x / 2] =
|
| (quad1[u_pos] + quad2[u_pos] + 1) / 2;
|
| - frame->GetVPlane()[stride_v * (y / 2) + x / 2] =
|
| + plane_v[stride_v * (y / 2) + x / 2] =
|
| (quad1[v_pos] + quad2[v_pos] + 1) / 2;
|
| }
|
| }
|
| @@ -311,9 +314,12 @@
|
| pitch = -pitch;
|
| }
|
| frame->InitToBlack(width, height, 0);
|
| - int stride_y = frame->GetYPitch();
|
| - int stride_u = frame->GetUPitch();
|
| - int stride_v = frame->GetVPitch();
|
| + int stride_y = frame->video_frame_buffer()->StrideY();
|
| + int stride_u = frame->video_frame_buffer()->StrideU();
|
| + int stride_v = frame->video_frame_buffer()->StrideV();
|
| + uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY();
|
| + uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU();
|
| + uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV();
|
| for (int32_t y = 0; y < height; y += 2) {
|
| for (int32_t x = 0; x < width; x += 2) {
|
| const uint8_t* rgb[4];
|
| @@ -326,19 +332,19 @@
|
| ConvertRgbPixel(rgb[i][r_pos], rgb[i][g_pos], rgb[i][b_pos],
|
| &yuv[i][0], &yuv[i][1], &yuv[i][2]);
|
| }
|
| - frame->GetYPlane()[stride_y * y + x] = yuv[0][0];
|
| + plane_y[stride_y * y + x] = yuv[0][0];
|
| if ((x + 1) < width) {
|
| - frame->GetYPlane()[stride_y * y + x + 1] = yuv[1][0];
|
| + plane_y[stride_y * y + x + 1] = yuv[1][0];
|
| }
|
| if ((y + 1) < height) {
|
| - frame->GetYPlane()[stride_y * (y + 1) + x] = yuv[2][0];
|
| + plane_y[stride_y * (y + 1) + x] = yuv[2][0];
|
| if ((x + 1) < width) {
|
| - frame->GetYPlane()[stride_y * (y + 1) + x + 1] = yuv[3][0];
|
| + plane_y[stride_y * (y + 1) + x + 1] = yuv[3][0];
|
| }
|
| }
|
| - frame->GetUPlane()[stride_u * (y / 2) + x / 2] =
|
| + plane_u[stride_u * (y / 2) + x / 2] =
|
| (yuv[0][1] + yuv[1][1] + yuv[2][1] + yuv[3][1] + 2) / 4;
|
| - frame->GetVPlane()[stride_v * (y / 2) + x / 2] =
|
| + plane_v[stride_v * (y / 2) + x / 2] =
|
| (yuv[0][2] + yuv[1][2] + yuv[2][2] + yuv[3][2] + 2) / 4;
|
| }
|
| }
|
| @@ -395,15 +401,15 @@
|
|
|
| // Comparison functions for testing.
|
| static bool IsNull(const cricket::VideoFrame& frame) {
|
| - return !frame.GetYPlane();
|
| + return !frame.video_frame_buffer();
|
| }
|
|
|
| static bool IsSize(const cricket::VideoFrame& frame,
|
| int width,
|
| int height) {
|
| - return !IsNull(frame) && frame.GetYPitch() >= width &&
|
| - frame.GetUPitch() >= width / 2 &&
|
| - frame.GetVPitch() >= width / 2 &&
|
| + return !IsNull(frame) && frame.video_frame_buffer()->StrideY() >= width &&
|
| + frame.video_frame_buffer()->StrideU() >= width / 2 &&
|
| + frame.video_frame_buffer()->StrideV() >= width / 2 &&
|
| frame.width() == width && frame.height() == height;
|
| }
|
|
|
| @@ -444,15 +450,17 @@
|
| const uint8_t* v,
|
| uint32_t vpitch,
|
| int max_error) {
|
| - return IsSize(frame, width, height) &&
|
| - frame.GetTimeStamp() == time_stamp &&
|
| - IsPlaneEqual("y", frame.GetYPlane(), frame.GetYPitch(), y, ypitch,
|
| + return IsSize(frame, width, height) && frame.GetTimeStamp() == time_stamp &&
|
| + IsPlaneEqual("y", frame.video_frame_buffer()->DataY(),
|
| + frame.video_frame_buffer()->StrideY(), y, ypitch,
|
| static_cast<uint32_t>(width),
|
| static_cast<uint32_t>(height), max_error) &&
|
| - IsPlaneEqual("u", frame.GetUPlane(), frame.GetUPitch(), u, upitch,
|
| + IsPlaneEqual("u", frame.video_frame_buffer()->DataU(),
|
| + frame.video_frame_buffer()->StrideU(), u, upitch,
|
| static_cast<uint32_t>((width + 1) / 2),
|
| static_cast<uint32_t>((height + 1) / 2), max_error) &&
|
| - IsPlaneEqual("v", frame.GetVPlane(), frame.GetVPitch(), v, vpitch,
|
| + IsPlaneEqual("v", frame.video_frame_buffer()->DataV(),
|
| + frame.video_frame_buffer()->StrideV(), v, vpitch,
|
| static_cast<uint32_t>((width + 1) / 2),
|
| static_cast<uint32_t>((height + 1) / 2), max_error);
|
| }
|
| @@ -463,9 +471,12 @@
|
| return IsEqual(frame1,
|
| frame2.width(), frame2.height(),
|
| frame2.GetTimeStamp(),
|
| - frame2.GetYPlane(), frame2.GetYPitch(),
|
| - frame2.GetUPlane(), frame2.GetUPitch(),
|
| - frame2.GetVPlane(), frame2.GetVPitch(),
|
| + frame2.video_frame_buffer()->DataY(),
|
| + frame2.video_frame_buffer()->StrideY(),
|
| + frame2.video_frame_buffer()->DataU(),
|
| + frame2.video_frame_buffer()->StrideU(),
|
| + frame2.video_frame_buffer()->DataV(),
|
| + frame2.video_frame_buffer()->StrideV(),
|
| max_error);
|
| }
|
|
|
| @@ -478,23 +489,26 @@
|
| frame2.width() - hcrop * 2,
|
| frame2.height() - vcrop * 2,
|
| frame2.GetTimeStamp(),
|
| - frame2.GetYPlane() + vcrop * frame2.GetYPitch()
|
| + frame2.video_frame_buffer()->DataY()
|
| + + vcrop * frame2.video_frame_buffer()->StrideY()
|
| + hcrop,
|
| - frame2.GetYPitch(),
|
| - frame2.GetUPlane() + vcrop * frame2.GetUPitch() / 2
|
| + frame2.video_frame_buffer()->StrideY(),
|
| + frame2.video_frame_buffer()->DataU()
|
| + + vcrop * frame2.video_frame_buffer()->StrideU() / 2
|
| + hcrop / 2,
|
| - frame2.GetUPitch(),
|
| - frame2.GetVPlane() + vcrop * frame2.GetVPitch() / 2
|
| + frame2.video_frame_buffer()->StrideU(),
|
| + frame2.video_frame_buffer()->DataV()
|
| + + vcrop * frame2.video_frame_buffer()->StrideV() / 2
|
| + hcrop / 2,
|
| - frame2.GetVPitch(),
|
| + frame2.video_frame_buffer()->StrideV(),
|
| max_error);
|
| }
|
|
|
| static bool IsBlack(const cricket::VideoFrame& frame) {
|
| return !IsNull(frame) &&
|
| - *frame.GetYPlane() == 16 &&
|
| - *frame.GetUPlane() == 128 &&
|
| - *frame.GetVPlane() == 128;
|
| + *frame.video_frame_buffer()->DataY() == 16 &&
|
| + *frame.video_frame_buffer()->DataU() == 128 &&
|
| + *frame.video_frame_buffer()->DataV() == 128;
|
| }
|
|
|
| ////////////////////////
|
| @@ -541,9 +555,12 @@
|
| uint8_t* y = ALIGNP(buf.get(), kAlignment);
|
| uint8_t* u = y + kWidth * kHeight;
|
| uint8_t* v = u + (kWidth / 2) * kHeight;
|
| - EXPECT_EQ(0, libyuv::I420ToI422(frame1.GetYPlane(), frame1.GetYPitch(),
|
| - frame1.GetUPlane(), frame1.GetUPitch(),
|
| - frame1.GetVPlane(), frame1.GetVPitch(),
|
| + EXPECT_EQ(0, libyuv::I420ToI422(frame1.video_frame_buffer()->DataY(),
|
| + frame1.video_frame_buffer()->StrideY(),
|
| + frame1.video_frame_buffer()->DataU(),
|
| + frame1.video_frame_buffer()->StrideU(),
|
| + frame1.video_frame_buffer()->DataV(),
|
| + frame1.video_frame_buffer()->StrideV(),
|
| y, kWidth,
|
| u, kWidth / 2,
|
| v, kWidth / 2,
|
| @@ -560,9 +577,12 @@
|
| size_t buf_size = kWidth * kHeight * 2;
|
| std::unique_ptr<uint8_t[]> buf(new uint8_t[buf_size + kAlignment]);
|
| uint8_t* yuy2 = ALIGNP(buf.get(), kAlignment);
|
| - EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(),
|
| - frame1.GetUPlane(), frame1.GetUPitch(),
|
| - frame1.GetVPlane(), frame1.GetVPitch(),
|
| + EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.video_frame_buffer()->DataY(),
|
| + frame1.video_frame_buffer()->StrideY(),
|
| + frame1.video_frame_buffer()->DataU(),
|
| + frame1.video_frame_buffer()->StrideU(),
|
| + frame1.video_frame_buffer()->DataV(),
|
| + frame1.video_frame_buffer()->StrideV(),
|
| yuy2, kWidth * 2,
|
| kWidth, kHeight));
|
| EXPECT_TRUE(LoadFrame(yuy2, buf_size, cricket::FOURCC_YUY2,
|
| @@ -577,9 +597,12 @@
|
| size_t buf_size = kWidth * kHeight * 2;
|
| std::unique_ptr<uint8_t[]> buf(new uint8_t[buf_size + kAlignment + 1]);
|
| uint8_t* yuy2 = ALIGNP(buf.get(), kAlignment) + 1;
|
| - EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(),
|
| - frame1.GetUPlane(), frame1.GetUPitch(),
|
| - frame1.GetVPlane(), frame1.GetVPitch(),
|
| + EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.video_frame_buffer()->DataY(),
|
| + frame1.video_frame_buffer()->StrideY(),
|
| + frame1.video_frame_buffer()->DataU(),
|
| + frame1.video_frame_buffer()->StrideU(),
|
| + frame1.video_frame_buffer()->DataV(),
|
| + frame1.video_frame_buffer()->StrideV(),
|
| yuy2, kWidth * 2,
|
| kWidth, kHeight));
|
| EXPECT_TRUE(LoadFrame(yuy2, buf_size, cricket::FOURCC_YUY2,
|
| @@ -792,16 +815,23 @@
|
| EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
|
| kHeight, \
|
| reinterpret_cast<uint8_t*>(ms->GetBuffer()), \
|
| - data_size, 0, webrtc::kVideoRotation_0)); \
|
| - int width_rotate = frame1.width(); \
|
| - int height_rotate = frame1.height(); \
|
| - EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
| - libyuv::I420Mirror( \
|
| - frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
|
| - frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \
|
| - frame3.GetYPlane(), frame3.GetYPitch(), frame3.GetUPlane(), \
|
| - frame3.GetUPitch(), frame3.GetVPlane(), frame3.GetVPitch(), kWidth, \
|
| - kHeight); \
|
| + data_size, 0, webrtc::kVideoRotation_0)); \
|
| + int width_rotate = frame1.width(); \
|
| + int height_rotate = frame1.height(); \
|
| + EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
| + libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(), \
|
| + frame2.video_frame_buffer()->StrideY(), \
|
| + frame2.video_frame_buffer()->DataU(), \
|
| + frame2.video_frame_buffer()->StrideU(), \
|
| + frame2.video_frame_buffer()->DataV(), \
|
| + frame2.video_frame_buffer()->StrideV(), \
|
| + frame3.video_frame_buffer()->MutableDataY(), \
|
| + frame3.video_frame_buffer()->StrideY(), \
|
| + frame3.video_frame_buffer()->MutableDataU(), \
|
| + frame3.video_frame_buffer()->StrideU(), \
|
| + frame3.video_frame_buffer()->MutableDataV(), \
|
| + frame3.video_frame_buffer()->StrideV(), \
|
| + kWidth, kHeight); \
|
| EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
|
| }
|
|
|
| @@ -823,16 +853,23 @@
|
| EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
|
| kHeight, \
|
| reinterpret_cast<uint8_t*>(ms->GetBuffer()), \
|
| - data_size, 0, webrtc::kVideoRotation_0)); \
|
| - int width_rotate = frame1.width(); \
|
| - int height_rotate = frame1.height(); \
|
| - EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
| - libyuv::I420Rotate( \
|
| - frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
|
| - frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \
|
| - frame3.GetYPlane(), frame3.GetYPitch(), frame3.GetUPlane(), \
|
| - frame3.GetUPitch(), frame3.GetVPlane(), frame3.GetVPitch(), kWidth, \
|
| - kHeight, libyuv::kRotate##ROTATE); \
|
| + data_size, 0, webrtc::kVideoRotation_0)); \
|
| + int width_rotate = frame1.width(); \
|
| + int height_rotate = frame1.height(); \
|
| + EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
|
| + libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(), \
|
| + frame2.video_frame_buffer()->StrideY(), \
|
| + frame2.video_frame_buffer()->DataU(), \
|
| + frame2.video_frame_buffer()->StrideU(), \
|
| + frame2.video_frame_buffer()->DataV(), \
|
| + frame2.video_frame_buffer()->StrideV(), \
|
| + frame3.video_frame_buffer()->MutableDataY(), \
|
| + frame3.video_frame_buffer()->StrideY(), \
|
| + frame3.video_frame_buffer()->MutableDataU(), \
|
| + frame3.video_frame_buffer()->StrideU(), \
|
| + frame3.video_frame_buffer()->MutableDataV(), \
|
| + frame3.video_frame_buffer()->StrideV(), \
|
| + kWidth, kHeight, libyuv::kRotate##ROTATE); \
|
| EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
|
| }
|
|
|
| @@ -952,9 +989,9 @@
|
| }
|
| EXPECT_EQ(5, frame.width());
|
| EXPECT_EQ(5, frame.height());
|
| - EXPECT_EQ(5, frame.GetYPitch());
|
| - EXPECT_EQ(3, frame.GetUPitch());
|
| - EXPECT_EQ(3, frame.GetVPitch());
|
| + EXPECT_EQ(5, frame.video_frame_buffer()->StrideY());
|
| + EXPECT_EQ(3, frame.video_frame_buffer()->StrideU());
|
| + EXPECT_EQ(3, frame.video_frame_buffer()->StrideV());
|
| }
|
|
|
| // Test 1 pixel edge case image ARGB buffer.
|
| @@ -1121,8 +1158,10 @@
|
| ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
|
| ASSERT_TRUE(LoadFrame(kJpeg400Filename,
|
| cricket::FOURCC_MJPG, kWidth, kHeight, &frame2));
|
| - EXPECT_TRUE(IsPlaneEqual("y", frame1.GetYPlane(), frame1.GetYPitch(),
|
| - frame2.GetYPlane(), frame2.GetYPitch(),
|
| + EXPECT_TRUE(IsPlaneEqual("y", frame1.video_frame_buffer()->DataY(),
|
| + frame1.video_frame_buffer()->StrideY(),
|
| + frame2.video_frame_buffer()->DataY(),
|
| + frame2.video_frame_buffer()->StrideY(),
|
| kWidth, kHeight, 32));
|
| EXPECT_TRUE(IsEqual(frame1, frame2, 128));
|
| }
|
| @@ -1304,9 +1343,7 @@
|
| EXPECT_TRUE(frame2.Init(frame1));
|
| }
|
| EXPECT_TRUE(IsEqual(frame1, frame2, 0));
|
| - EXPECT_EQ(frame1.GetYPlane(), frame2.GetYPlane());
|
| - EXPECT_EQ(frame1.GetUPlane(), frame2.GetUPlane());
|
| - EXPECT_EQ(frame1.GetVPlane(), frame2.GetVPlane());
|
| + EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer());
|
| }
|
|
|
| // Test creating an empty image and initing it to black.
|
| @@ -1419,9 +1456,12 @@
|
| EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 0));
|
| for (int i = 0; i < repeat_from; ++i) {
|
| EXPECT_EQ(0, RGBToI420(out, stride,
|
| - frame2.GetYPlane(), frame2.GetYPitch(),
|
| - frame2.GetUPlane(), frame2.GetUPitch(),
|
| - frame2.GetVPlane(), frame2.GetVPitch(),
|
| + frame2.video_frame_buffer()->MutableDataY(),
|
| + frame2.video_frame_buffer()->StrideY(),
|
| + frame2.video_frame_buffer()->MutableDataU(),
|
| + frame2.video_frame_buffer()->StrideU(),
|
| + frame2.video_frame_buffer()->MutableDataV(),
|
| + frame2.video_frame_buffer()->StrideV(),
|
| kWidth, kHeight));
|
| }
|
| if (rowpad) {
|
| @@ -1724,9 +1764,12 @@
|
| uint8_t* v = u + (kWidth / 2) * kHeight;
|
| ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
|
| for (int i = 0; i < repeat_; ++i) {
|
| - EXPECT_EQ(0, libyuv::I420ToI422(frame1.GetYPlane(), frame1.GetYPitch(),
|
| - frame1.GetUPlane(), frame1.GetUPitch(),
|
| - frame1.GetVPlane(), frame1.GetVPitch(),
|
| + EXPECT_EQ(0, libyuv::I420ToI422(frame1.video_frame_buffer()->DataY(),
|
| + frame1.video_frame_buffer()->StrideY(),
|
| + frame1.video_frame_buffer()->DataU(),
|
| + frame1.video_frame_buffer()->StrideU(),
|
| + frame1.video_frame_buffer()->DataV(),
|
| + frame1.video_frame_buffer()->StrideV(),
|
| y, kWidth,
|
| u, kWidth / 2,
|
| v, kWidth / 2,
|
| @@ -1749,7 +1792,8 @@
|
| target.reset(source->Copy());
|
| EXPECT_TRUE(IsEqual(*source, *target, 0));
|
| source.reset();
|
| - EXPECT_TRUE(target->GetYPlane() != NULL);
|
| + ASSERT_TRUE(target->video_frame_buffer() != NULL);
|
| + EXPECT_TRUE(target->video_frame_buffer()->DataY() != NULL);
|
| }
|
|
|
| void CopyIsRef() {
|
| @@ -1759,9 +1803,7 @@
|
| target.reset(source->Copy());
|
| EXPECT_TRUE(IsEqual(*source, *target, 0));
|
| const T* const_source = source.get();
|
| - EXPECT_EQ(const_source->GetYPlane(), target->GetYPlane());
|
| - EXPECT_EQ(const_source->GetUPlane(), target->GetUPlane());
|
| - EXPECT_EQ(const_source->GetVPlane(), target->GetVPlane());
|
| + EXPECT_EQ(const_source->video_frame_buffer(), target->video_frame_buffer());
|
| }
|
|
|
| void StretchToFrame() {
|
|
|