| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 95 case kARGB: | 95 case kARGB: |
| 96 buffer_size = width * height * 4; | 96 buffer_size = width * height * 4; |
| 97 break; | 97 break; |
| 98 default: | 98 default: |
| 99 assert(false); | 99 assert(false); |
| 100 break; | 100 break; |
| 101 } | 101 } |
| 102 return buffer_size; | 102 return buffer_size; |
| 103 } | 103 } |
| 104 | 104 |
| 105 static int PrintPlane(const uint8_t* buf, | |
| 106 int width, | |
| 107 int height, | |
| 108 int stride, | |
| 109 FILE* file) { | |
| 110 for (int i = 0; i < height; i++, buf += stride) { | |
| 111 if (fwrite(buf, 1, width, file) != static_cast<unsigned int>(width)) | |
| 112 return -1; | |
| 113 } | |
| 114 return 0; | |
| 115 } | |
| 116 | |
| 117 // TODO(nisse): Belongs with the test code? | |
| 118 int PrintVideoFrame(const VideoFrame& frame, FILE* file) { | 105 int PrintVideoFrame(const VideoFrame& frame, FILE* file) { |
| 119 if (file == NULL) | 106 if (file == NULL) |
| 120 return -1; | 107 return -1; |
| 121 if (frame.IsZeroSize()) | 108 if (frame.IsZeroSize()) |
| 122 return -1; | 109 return -1; |
| 123 int width = frame.video_frame_buffer()->width(); | 110 for (int planeNum = 0; planeNum < kNumOfPlanes; ++planeNum) { |
| 124 int height = frame.video_frame_buffer()->height(); | 111 int width = (planeNum ? (frame.width() + 1) / 2 : frame.width()); |
| 125 int chroma_width = (width + 1) / 2; | 112 int height = (planeNum ? (frame.height() + 1) / 2 : frame.height()); |
| 126 int chroma_height = (height + 1) / 2; | 113 PlaneType plane_type = static_cast<PlaneType>(planeNum); |
| 127 | 114 const uint8_t* plane_buffer = frame.buffer(plane_type); |
| 128 if (PrintPlane(frame.video_frame_buffer()->DataY(), width, height, | 115 for (int y = 0; y < height; y++) { |
| 129 frame.video_frame_buffer()->StrideY(), file) < 0) { | 116 if (fwrite(plane_buffer, 1, width, file) != |
| 130 return -1; | 117 static_cast<unsigned int>(width)) { |
| 131 } | 118 return -1; |
| 132 if (PrintPlane(frame.video_frame_buffer()->DataU(), | 119 } |
| 133 chroma_width, chroma_height, | 120 plane_buffer += frame.stride(plane_type); |
| 134 frame.video_frame_buffer()->StrideU(), file) < 0) { | 121 } |
| 135 return -1; | |
| 136 } | |
| 137 if (PrintPlane(frame.video_frame_buffer()->DataV(), | |
| 138 chroma_width, chroma_height, | |
| 139 frame.video_frame_buffer()->StrideV(), file) < 0) { | |
| 140 return -1; | |
| 141 } | 122 } |
| 142 return 0; | 123 return 0; |
| 143 } | 124 } |
| 144 | 125 |
| 145 int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) { | 126 int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) { |
| 146 assert(buffer); | 127 assert(buffer); |
| 147 if (input_frame.IsZeroSize()) | 128 if (input_frame.IsZeroSize()) |
| 148 return -1; | 129 return -1; |
| 149 size_t length = | 130 size_t length = |
| 150 CalcBufferSize(kI420, input_frame.width(), input_frame.height()); | 131 CalcBufferSize(kI420, input_frame.width(), input_frame.height()); |
| 151 if (size < length) { | 132 if (size < length) { |
| 152 return -1; | 133 return -1; |
| 153 } | 134 } |
| 154 | 135 |
| 155 int width = input_frame.video_frame_buffer()->width(); | 136 int pos = 0; |
| 156 int height = input_frame.video_frame_buffer()->height(); | 137 uint8_t* buffer_ptr = buffer; |
| 157 int chroma_width = (width + 1) / 2; | |
| 158 int chroma_height = (height + 1) / 2; | |
| 159 | 138 |
| 160 libyuv::I420Copy(input_frame.video_frame_buffer()->DataY(), | 139 for (int plane = 0; plane < kNumOfPlanes; ++plane) { |
| 161 input_frame.video_frame_buffer()->StrideY(), | 140 int width = (plane ? (input_frame.width() + 1) / 2 : |
| 162 input_frame.video_frame_buffer()->DataU(), | 141 input_frame.width()); |
| 163 input_frame.video_frame_buffer()->StrideU(), | 142 int height = (plane ? (input_frame.height() + 1) / 2 : |
| 164 input_frame.video_frame_buffer()->DataV(), | 143 input_frame.height()); |
| 165 input_frame.video_frame_buffer()->StrideV(), | 144 const uint8_t* plane_ptr = input_frame.buffer( |
| 166 buffer, width, | 145 static_cast<PlaneType>(plane)); |
| 167 buffer + width*height, chroma_width, | 146 for (int y = 0; y < height; y++) { |
| 168 buffer + width*height + chroma_width*chroma_height, | 147 memcpy(&buffer_ptr[pos], plane_ptr, width); |
| 169 chroma_width, | 148 pos += width; |
| 170 width, height); | 149 plane_ptr += input_frame.stride(static_cast<PlaneType>(plane)); |
| 171 | 150 } |
| 151 } |
| 172 return static_cast<int>(length); | 152 return static_cast<int>(length); |
| 173 } | 153 } |
| 174 | 154 |
| 175 | 155 |
| 176 int ConvertNV12ToRGB565(const uint8_t* src_frame, | 156 int ConvertNV12ToRGB565(const uint8_t* src_frame, |
| 177 uint8_t* dst_frame, | 157 uint8_t* dst_frame, |
| 178 int width, int height) { | 158 int width, int height) { |
| 179 int abs_height = (height < 0) ? -height : height; | 159 int abs_height = (height < 0) ? -height : height; |
| 180 const uint8_t* yplane = src_frame; | 160 const uint8_t* yplane = src_frame; |
| 181 const uint8_t* uvInterlaced = src_frame + (width * abs_height); | 161 const uint8_t* uvInterlaced = src_frame + (width * abs_height); |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 241 return libyuv::FOURCC_BGRA; | 221 return libyuv::FOURCC_BGRA; |
| 242 case kARGB4444: | 222 case kARGB4444: |
| 243 return libyuv::FOURCC_R444; | 223 return libyuv::FOURCC_R444; |
| 244 case kARGB1555: | 224 case kARGB1555: |
| 245 return libyuv::FOURCC_RGBO; | 225 return libyuv::FOURCC_RGBO; |
| 246 } | 226 } |
| 247 assert(false); | 227 assert(false); |
| 248 return libyuv::FOURCC_ANY; | 228 return libyuv::FOURCC_ANY; |
| 249 } | 229 } |
| 250 | 230 |
| 251 // TODO(nisse): Delete this wrapper, let callers use libyuv directly. | |
| 252 int ConvertToI420(VideoType src_video_type, | 231 int ConvertToI420(VideoType src_video_type, |
| 253 const uint8_t* src_frame, | 232 const uint8_t* src_frame, |
| 254 int crop_x, | 233 int crop_x, |
| 255 int crop_y, | 234 int crop_y, |
| 256 int src_width, | 235 int src_width, |
| 257 int src_height, | 236 int src_height, |
| 258 size_t sample_size, | 237 size_t sample_size, |
| 259 VideoRotation rotation, | 238 VideoRotation rotation, |
| 260 VideoFrame* dst_frame) { | 239 VideoFrame* dst_frame) { |
| 261 int dst_width = dst_frame->width(); | 240 int dst_width = dst_frame->width(); |
| 262 int dst_height = dst_frame->height(); | 241 int dst_height = dst_frame->height(); |
| 263 // LibYuv expects pre-rotation values for dst. | 242 // LibYuv expects pre-rotation values for dst. |
| 264 // Stride values should correspond to the destination values. | 243 // Stride values should correspond to the destination values. |
| 265 if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { | 244 if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { |
| 266 dst_width = dst_frame->height(); | 245 dst_width = dst_frame->height(); |
| 267 dst_height = dst_frame->width(); | 246 dst_height = dst_frame->width(); |
| 268 } | 247 } |
| 269 return libyuv::ConvertToI420( | 248 return libyuv::ConvertToI420(src_frame, sample_size, |
| 270 src_frame, sample_size, | 249 dst_frame->buffer(kYPlane), |
| 271 dst_frame->video_frame_buffer()->MutableDataY(), | 250 dst_frame->stride(kYPlane), |
| 272 dst_frame->video_frame_buffer()->StrideY(), | 251 dst_frame->buffer(kUPlane), |
| 273 dst_frame->video_frame_buffer()->MutableDataU(), | 252 dst_frame->stride(kUPlane), |
| 274 dst_frame->video_frame_buffer()->StrideU(), | 253 dst_frame->buffer(kVPlane), |
| 275 dst_frame->video_frame_buffer()->MutableDataV(), | 254 dst_frame->stride(kVPlane), |
| 276 dst_frame->video_frame_buffer()->StrideV(), | 255 crop_x, crop_y, |
| 277 crop_x, crop_y, | 256 src_width, src_height, |
| 278 src_width, src_height, | 257 dst_width, dst_height, |
| 279 dst_width, dst_height, | 258 ConvertRotationMode(rotation), |
| 280 ConvertRotationMode(rotation), | 259 ConvertVideoType(src_video_type)); |
| 281 ConvertVideoType(src_video_type)); | |
| 282 } | 260 } |
| 283 | 261 |
| 284 int ConvertFromI420(const VideoFrame& src_frame, | 262 int ConvertFromI420(const VideoFrame& src_frame, |
| 285 VideoType dst_video_type, | 263 VideoType dst_video_type, |
| 286 int dst_sample_size, | 264 int dst_sample_size, |
| 287 uint8_t* dst_frame) { | 265 uint8_t* dst_frame) { |
| 288 return libyuv::ConvertFromI420( | 266 return libyuv::ConvertFromI420(src_frame.buffer(kYPlane), |
| 289 src_frame.video_frame_buffer()->DataY(), | 267 src_frame.stride(kYPlane), |
| 290 src_frame.video_frame_buffer()->StrideY(), | 268 src_frame.buffer(kUPlane), |
| 291 src_frame.video_frame_buffer()->DataU(), | 269 src_frame.stride(kUPlane), |
| 292 src_frame.video_frame_buffer()->StrideU(), | 270 src_frame.buffer(kVPlane), |
| 293 src_frame.video_frame_buffer()->DataV(), | 271 src_frame.stride(kVPlane), |
| 294 src_frame.video_frame_buffer()->StrideV(), | 272 dst_frame, dst_sample_size, |
| 295 dst_frame, dst_sample_size, | 273 src_frame.width(), src_frame.height(), |
| 296 src_frame.width(), src_frame.height(), | 274 ConvertVideoType(dst_video_type)); |
| 297 ConvertVideoType(dst_video_type)); | |
| 298 } | 275 } |
| 299 | 276 |
| 300 // TODO(mikhal): Create a designated VideoFrame for non I420. | 277 // TODO(mikhal): Create a designated VideoFrame for non I420. |
| 301 int ConvertFromYV12(const VideoFrame& src_frame, | 278 int ConvertFromYV12(const VideoFrame& src_frame, |
| 302 VideoType dst_video_type, | 279 VideoType dst_video_type, |
| 303 int dst_sample_size, | 280 int dst_sample_size, |
| 304 uint8_t* dst_frame) { | 281 uint8_t* dst_frame) { |
| 305 // YV12 = Y, V, U | 282 // YV12 = Y, V, U |
| 306 return libyuv::ConvertFromI420( | 283 return libyuv::ConvertFromI420(src_frame.buffer(kYPlane), |
| 307 src_frame.video_frame_buffer()->DataY(), | 284 src_frame.stride(kYPlane), |
| 308 src_frame.video_frame_buffer()->StrideY(), | 285 src_frame.buffer(kVPlane), |
| 309 src_frame.video_frame_buffer()->DataV(), | 286 src_frame.stride(kVPlane), |
| 310 src_frame.video_frame_buffer()->StrideV(), | 287 src_frame.buffer(kUPlane), |
| 311 src_frame.video_frame_buffer()->DataU(), | 288 src_frame.stride(kUPlane), |
| 312 src_frame.video_frame_buffer()->StrideU(), | 289 dst_frame, dst_sample_size, |
| 313 dst_frame, dst_sample_size, | 290 src_frame.width(), src_frame.height(), |
| 314 src_frame.width(), src_frame.height(), | 291 ConvertVideoType(dst_video_type)); |
| 315 ConvertVideoType(dst_video_type)); | |
| 316 } | 292 } |
| 317 | 293 |
| 318 // Compute PSNR for an I420 frame (all planes) | 294 // Compute PSNR for an I420 frame (all planes) |
| 319 double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) { | 295 double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) { |
| 320 if (!ref_frame || !test_frame) | 296 if (!ref_frame || !test_frame) |
| 321 return -1; | 297 return -1; |
| 322 else if ((ref_frame->width() != test_frame->width()) || | 298 else if ((ref_frame->width() != test_frame->width()) || |
| 323 (ref_frame->height() != test_frame->height())) | 299 (ref_frame->height() != test_frame->height())) |
| 324 return -1; | 300 return -1; |
| 325 else if (ref_frame->width() < 0 || ref_frame->height() < 0) | 301 else if (ref_frame->width() < 0 || ref_frame->height() < 0) |
| 326 return -1; | 302 return -1; |
| 327 | 303 |
| 328 double psnr = libyuv::I420Psnr(ref_frame->video_frame_buffer()->DataY(), | 304 double psnr = libyuv::I420Psnr(ref_frame->buffer(kYPlane), |
| 329 ref_frame->video_frame_buffer()->StrideY(), | 305 ref_frame->stride(kYPlane), |
| 330 ref_frame->video_frame_buffer()->DataU(), | 306 ref_frame->buffer(kUPlane), |
| 331 ref_frame->video_frame_buffer()->StrideU(), | 307 ref_frame->stride(kUPlane), |
| 332 ref_frame->video_frame_buffer()->DataV(), | 308 ref_frame->buffer(kVPlane), |
| 333 ref_frame->video_frame_buffer()->StrideV(), | 309 ref_frame->stride(kVPlane), |
| 334 test_frame->video_frame_buffer()->DataY(), | 310 test_frame->buffer(kYPlane), |
| 335 test_frame->video_frame_buffer()->StrideY(), | 311 test_frame->stride(kYPlane), |
| 336 test_frame->video_frame_buffer()->DataU(), | 312 test_frame->buffer(kUPlane), |
| 337 test_frame->video_frame_buffer()->StrideU(), | 313 test_frame->stride(kUPlane), |
| 338 test_frame->video_frame_buffer()->DataV(), | 314 test_frame->buffer(kVPlane), |
| 339 test_frame->video_frame_buffer()->StrideV(), | 315 test_frame->stride(kVPlane), |
| 340 test_frame->width(), test_frame->height()); | 316 test_frame->width(), test_frame->height()); |
| 341 // LibYuv sets the max psnr value to 128, we restrict it here. | 317 // LibYuv sets the max psnr value to 128, we restrict it here. |
| 342 // In case of 0 mse in one frame, 128 can skew the results significantly. | 318 // In case of 0 mse in one frame, 128 can skew the results significantly. |
| 343 return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr; | 319 return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr; |
| 344 } | 320 } |
| 345 | 321 |
| 346 // Compute SSIM for an I420 frame (all planes) | 322 // Compute SSIM for an I420 frame (all planes) |
| 347 double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) { | 323 double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) { |
| 348 if (!ref_frame || !test_frame) | 324 if (!ref_frame || !test_frame) |
| 349 return -1; | 325 return -1; |
| 350 else if ((ref_frame->width() != test_frame->width()) || | 326 else if ((ref_frame->width() != test_frame->width()) || |
| 351 (ref_frame->height() != test_frame->height())) | 327 (ref_frame->height() != test_frame->height())) |
| 352 return -1; | 328 return -1; |
| 353 else if (ref_frame->width() < 0 || ref_frame->height() < 0) | 329 else if (ref_frame->width() < 0 || ref_frame->height() < 0) |
| 354 return -1; | 330 return -1; |
| 355 | 331 |
| 356 return libyuv::I420Ssim(ref_frame->video_frame_buffer()->DataY(), | 332 return libyuv::I420Ssim(ref_frame->buffer(kYPlane), |
| 357 ref_frame->video_frame_buffer()->StrideY(), | 333 ref_frame->stride(kYPlane), |
| 358 ref_frame->video_frame_buffer()->DataU(), | 334 ref_frame->buffer(kUPlane), |
| 359 ref_frame->video_frame_buffer()->StrideU(), | 335 ref_frame->stride(kUPlane), |
| 360 ref_frame->video_frame_buffer()->DataV(), | 336 ref_frame->buffer(kVPlane), |
| 361 ref_frame->video_frame_buffer()->StrideV(), | 337 ref_frame->stride(kVPlane), |
| 362 test_frame->video_frame_buffer()->DataY(), | 338 test_frame->buffer(kYPlane), |
| 363 test_frame->video_frame_buffer()->StrideY(), | 339 test_frame->stride(kYPlane), |
| 364 test_frame->video_frame_buffer()->DataU(), | 340 test_frame->buffer(kUPlane), |
| 365 test_frame->video_frame_buffer()->StrideU(), | 341 test_frame->stride(kUPlane), |
| 366 test_frame->video_frame_buffer()->DataV(), | 342 test_frame->buffer(kVPlane), |
| 367 test_frame->video_frame_buffer()->StrideV(), | 343 test_frame->stride(kVPlane), |
| 368 test_frame->width(), test_frame->height()); | 344 test_frame->width(), test_frame->height()); |
| 369 } | 345 } |
| 370 } // namespace webrtc | 346 } // namespace webrtc |
| OLD | NEW |