Index: webrtc/modules/video_coding/codecs/test/videoprocessor.cc |
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc |
index 888adb8939e52bf7993c1ae049b310a9a54f6708..30329eccecced6b89b81f69a4653ca6aa09bebea 100644 |
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc |
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc |
@@ -16,6 +16,8 @@ |
#include <limits> |
#include <vector> |
+#include "webrtc/base/checks.h" |
+#include "webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h" |
#include "webrtc/system_wrappers/interface/cpu_info.h" |
namespace webrtc { |
@@ -222,7 +224,9 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) { |
} |
} |
-void VideoProcessorImpl::FrameEncoded(const EncodedImage& encoded_image) { |
+void VideoProcessorImpl::FrameEncoded( |
+ const EncodedImage& encoded_image, |
+ const webrtc::RTPFragmentationHeader* fragmentation) { |
// Timestamp is frame number, so this gives us #dropped frames. |
int num_dropped_from_prev_encode = encoded_image._timeStamp - |
prev_time_stamp_ - 1; |
@@ -272,12 +276,16 @@ void VideoProcessorImpl::FrameEncoded(const EncodedImage& encoded_image) { |
assert(false); |
} |
} |
- rtc::scoped_ptr<uint8_t[]> copied_buffer(new uint8_t[encoded_image._length]); |
- memcpy(copied_buffer.get(), encoded_image._buffer, encoded_image._length); |
+ |
+ // The image to feed to the decoder. |
EncodedImage copied_image; |
memcpy(&copied_image, &encoded_image, sizeof(copied_image)); |
+ // Make a raw copy of the |encoded_image| buffer. |
+ rtc::scoped_ptr<uint8_t[]> copied_buffer(new uint8_t[encoded_image._length]); |
+ memcpy(copied_buffer.get(), encoded_image._buffer, encoded_image._length); |
copied_image._size = copied_image._length; |
copied_image._buffer = copied_buffer.get(); |
+ |
if (!exclude_this_frame) { |
stat.packets_dropped = |
packet_manipulator_->ManipulatePackets(&copied_image); |
@@ -409,12 +417,14 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded( |
const EncodedImage& encoded_image, |
const webrtc::CodecSpecificInfo* codec_specific_info, |
const webrtc::RTPFragmentationHeader* fragmentation) { |
- video_processor_->FrameEncoded(encoded_image); // Forward to parent class. |
+ // Forward to parent class. |
+ video_processor_->FrameEncoded(encoded_image, fragmentation); |
return 0; |
} |
int32_t VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded( |
VideoFrame& image) { |
- video_processor_->FrameDecoded(image); // forward to parent class |
+ // Forward to parent class. |
+ video_processor_->FrameDecoded(image); |
return 0; |
} |