Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(113)

Side by Side Diff: webrtc/modules/video_coding/codecs/test/videoprocessor.cc

Issue 2711133002: Step #1: Support pipelining codecs in VideoProcessor. (Closed)
Patch Set: Tidy. Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « webrtc/modules/video_coding/codecs/test/videoprocessor.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 12 matching lines...) Expand all
23 #include "webrtc/common_types.h" 23 #include "webrtc/common_types.h"
24 #include "webrtc/modules/video_coding/include/video_codec_initializer.h" 24 #include "webrtc/modules/video_coding/include/video_codec_initializer.h"
25 #include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h" 25 #include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h"
26 #include "webrtc/modules/video_coding/utility/simulcast_rate_allocator.h" 26 #include "webrtc/modules/video_coding/utility/simulcast_rate_allocator.h"
27 #include "webrtc/system_wrappers/include/cpu_info.h" 27 #include "webrtc/system_wrappers/include/cpu_info.h"
28 28
29 namespace webrtc { 29 namespace webrtc {
30 namespace test { 30 namespace test {
31 31
32 namespace { 32 namespace {
33
34 // TODO(brandtr): Update this to use the real frame rate.
33 const int k90khzTimestampFrameDiff = 3000; // Assuming 30 fps. 35 const int k90khzTimestampFrameDiff = 3000; // Assuming 30 fps.
34 36
37 // Use the frame number as the basis for timestamp to identify frames. Let the
38 // first timestamp be non-zero, to not make the IvfFileWriter believe that we
39 // want to use capture timestamps in the IVF files.
40 uint32_t FrameNumberToTimestamp(int frame_number) {
41 RTC_DCHECK_GE(frame_number, 0);
42 return (frame_number + 1) * k90khzTimestampFrameDiff;
43 }
44
45 int TimestampToFrameNumber(uint32_t timestamp) {
46 RTC_DCHECK_GT(timestamp, 0);
47 RTC_DCHECK_EQ(timestamp % k90khzTimestampFrameDiff, 0);
48 return (timestamp / k90khzTimestampFrameDiff) - 1;
49 }
50
35 std::unique_ptr<VideoBitrateAllocator> CreateBitrateAllocator( 51 std::unique_ptr<VideoBitrateAllocator> CreateBitrateAllocator(
36 const TestConfig& config) { 52 const TestConfig& config) {
37 std::unique_ptr<TemporalLayersFactory> tl_factory; 53 std::unique_ptr<TemporalLayersFactory> tl_factory;
38 if (config.codec_settings->codecType == VideoCodecType::kVideoCodecVP8) { 54 if (config.codec_settings->codecType == VideoCodecType::kVideoCodecVP8) {
39 tl_factory.reset(new TemporalLayersFactory()); 55 tl_factory.reset(new TemporalLayersFactory());
40 config.codec_settings->VP8()->tl_factory = tl_factory.get(); 56 config.codec_settings->VP8()->tl_factory = tl_factory.get();
41 } 57 }
42 return std::unique_ptr<VideoBitrateAllocator>( 58 return std::unique_ptr<VideoBitrateAllocator>(
43 VideoCodecInitializer::CreateBitrateAllocator(*config.codec_settings, 59 VideoCodecInitializer::CreateBitrateAllocator(*config.codec_settings,
44 std::move(tl_factory))); 60 std::move(tl_factory)));
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 FrameWriter* decoded_frame_writer) 128 FrameWriter* decoded_frame_writer)
113 : encoder_(encoder), 129 : encoder_(encoder),
114 decoder_(decoder), 130 decoder_(decoder),
115 bitrate_allocator_(CreateBitrateAllocator(config)), 131 bitrate_allocator_(CreateBitrateAllocator(config)),
116 encode_callback_(new VideoProcessorEncodeCompleteCallback(this)), 132 encode_callback_(new VideoProcessorEncodeCompleteCallback(this)),
117 decode_callback_(new VideoProcessorDecodeCompleteCallback(this)), 133 decode_callback_(new VideoProcessorDecodeCompleteCallback(this)),
118 packet_manipulator_(packet_manipulator), 134 packet_manipulator_(packet_manipulator),
119 config_(config), 135 config_(config),
120 analysis_frame_reader_(analysis_frame_reader), 136 analysis_frame_reader_(analysis_frame_reader),
121 analysis_frame_writer_(analysis_frame_writer), 137 analysis_frame_writer_(analysis_frame_writer),
138 num_frames_(analysis_frame_reader->NumberOfFrames()),
122 source_frame_writer_(source_frame_writer), 139 source_frame_writer_(source_frame_writer),
123 encoded_frame_writer_(encoded_frame_writer), 140 encoded_frame_writer_(encoded_frame_writer),
124 decoded_frame_writer_(decoded_frame_writer), 141 decoded_frame_writer_(decoded_frame_writer),
142 initialized_(false),
143 last_encoded_frame_num_(-1),
144 last_decoded_frame_num_(-1),
125 first_key_frame_has_been_excluded_(false), 145 first_key_frame_has_been_excluded_(false),
126 last_frame_missing_(false), 146 last_decoded_frame_buffer_(0, analysis_frame_reader->FrameLength()),
127 initialized_(false),
128 encoded_frame_size_(0),
129 encoded_frame_type_(kVideoFrameKey),
130 prev_time_stamp_(0),
131 last_encoder_frame_width_(0),
132 last_encoder_frame_height_(0),
133 stats_(stats), 147 stats_(stats),
134 num_dropped_frames_(0), 148 num_dropped_frames_(0),
135 num_spatial_resizes_(0), 149 num_spatial_resizes_(0),
136 bit_rate_factor_(0.0), 150 bit_rate_factor_(0.0) {
137 encode_start_ns_(0),
138 decode_start_ns_(0) {
139 RTC_DCHECK(encoder); 151 RTC_DCHECK(encoder);
140 RTC_DCHECK(decoder); 152 RTC_DCHECK(decoder);
141 RTC_DCHECK(packet_manipulator); 153 RTC_DCHECK(packet_manipulator);
142 RTC_DCHECK(analysis_frame_reader); 154 RTC_DCHECK(analysis_frame_reader);
143 RTC_DCHECK(analysis_frame_writer); 155 RTC_DCHECK(analysis_frame_writer);
144 RTC_DCHECK(stats); 156 RTC_DCHECK(stats);
157
158 frame_infos_.reserve(num_frames_);
145 } 159 }
146 160
147 bool VideoProcessorImpl::Init() { 161 bool VideoProcessorImpl::Init() {
162 RTC_DCHECK(!initialized_)
163 << "This VideoProcessor has already been initialized.";
164
148 // Calculate a factor used for bit rate calculations. 165 // Calculate a factor used for bit rate calculations.
149 bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8; // bits 166 bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8; // bits
150 167
151 // Initialize data structures used by the encoder/decoder APIs.
152 size_t frame_length_in_bytes = analysis_frame_reader_->FrameLength();
153 last_successful_frame_buffer_.reset(new uint8_t[frame_length_in_bytes]);
154
155 // Set fixed properties common for all frames.
156 // To keep track of spatial resize actions by encoder.
157 last_encoder_frame_width_ = config_.codec_settings->width;
158 last_encoder_frame_height_ = config_.codec_settings->height;
159
160 // Setup required callbacks for the encoder/decoder. 168 // Setup required callbacks for the encoder/decoder.
161 RTC_CHECK_EQ(encoder_->RegisterEncodeCompleteCallback(encode_callback_.get()), 169 RTC_CHECK_EQ(encoder_->RegisterEncodeCompleteCallback(encode_callback_.get()),
162 WEBRTC_VIDEO_CODEC_OK) 170 WEBRTC_VIDEO_CODEC_OK)
163 << "Failed to register encode complete callback"; 171 << "Failed to register encode complete callback";
164 RTC_CHECK_EQ(decoder_->RegisterDecodeCompleteCallback(decode_callback_.get()), 172 RTC_CHECK_EQ(decoder_->RegisterDecodeCompleteCallback(decode_callback_.get()),
165 WEBRTC_VIDEO_CODEC_OK) 173 WEBRTC_VIDEO_CODEC_OK)
166 << "Failed to register decode complete callback"; 174 << "Failed to register decode complete callback";
167 175
168 // Initialize the encoder and decoder. 176 // Initialize the encoder and decoder.
169 uint32_t num_cores = 177 uint32_t num_cores =
170 config_.use_single_core ? 1 : CpuInfo::DetectNumberOfCores(); 178 config_.use_single_core ? 1 : CpuInfo::DetectNumberOfCores();
171 RTC_CHECK_EQ( 179 RTC_CHECK_EQ(
172 encoder_->InitEncode(config_.codec_settings, num_cores, 180 encoder_->InitEncode(config_.codec_settings, num_cores,
173 config_.networking_config.max_payload_size_in_bytes), 181 config_.networking_config.max_payload_size_in_bytes),
174 WEBRTC_VIDEO_CODEC_OK) 182 WEBRTC_VIDEO_CODEC_OK)
175 << "Failed to initialize VideoEncoder"; 183 << "Failed to initialize VideoEncoder";
176 184
177 RTC_CHECK_EQ(decoder_->InitDecode(config_.codec_settings, num_cores), 185 RTC_CHECK_EQ(decoder_->InitDecode(config_.codec_settings, num_cores),
178 WEBRTC_VIDEO_CODEC_OK) 186 WEBRTC_VIDEO_CODEC_OK)
179 << "Failed to initialize VideoDecoder"; 187 << "Failed to initialize VideoDecoder";
180 188
181 if (config_.verbose) { 189 if (config_.verbose) {
182 printf("Video Processor:\n"); 190 printf("Video Processor:\n");
183 printf(" #CPU cores used : %d\n", num_cores); 191 printf(" #CPU cores used : %d\n", num_cores);
184 printf(" Total # of frames: %d\n", 192 printf(" Total # of frames: %d\n", num_frames_);
185 analysis_frame_reader_->NumberOfFrames());
186 printf(" Codec settings:\n"); 193 printf(" Codec settings:\n");
187 printf(" Encoder implementation name: %s\n", 194 printf(" Encoder implementation name: %s\n",
188 encoder_->ImplementationName()); 195 encoder_->ImplementationName());
189 printf(" Decoder implementation name: %s\n", 196 printf(" Decoder implementation name: %s\n",
190 decoder_->ImplementationName()); 197 decoder_->ImplementationName());
191 PrintCodecSettings(config_.codec_settings); 198 PrintCodecSettings(config_.codec_settings);
192 } 199 }
200
193 initialized_ = true; 201 initialized_ = true;
202
194 return true; 203 return true;
195 } 204 }
196 205
197 VideoProcessorImpl::~VideoProcessorImpl() { 206 VideoProcessorImpl::~VideoProcessorImpl() {
198 encoder_->RegisterEncodeCompleteCallback(nullptr); 207 encoder_->RegisterEncodeCompleteCallback(nullptr);
199 decoder_->RegisterDecodeCompleteCallback(nullptr); 208 decoder_->RegisterDecodeCompleteCallback(nullptr);
200 } 209 }
201 210
202 void VideoProcessorImpl::SetRates(int bit_rate, int frame_rate) { 211 void VideoProcessorImpl::SetRates(int bit_rate, int frame_rate) {
203 int set_rates_result = encoder_->SetRateAllocation( 212 int set_rates_result = encoder_->SetRateAllocation(
204 bitrate_allocator_->GetAllocation(bit_rate * 1000, frame_rate), 213 bitrate_allocator_->GetAllocation(bit_rate * 1000, frame_rate),
205 frame_rate); 214 frame_rate);
206 RTC_CHECK_GE(set_rates_result, 0) << "Failed to update encoder with new rate " 215 RTC_DCHECK_GE(set_rates_result, 0)
207 << bit_rate; 216 << "Failed to update encoder with new rate " << bit_rate;
208 num_dropped_frames_ = 0; 217 num_dropped_frames_ = 0;
209 num_spatial_resizes_ = 0; 218 num_spatial_resizes_ = 0;
210 } 219 }
211 220
221 // TODO(brandtr): Update implementation of EncodedFrameSize and EncodedFrameType
222 // to support batch processing in the caller.
212 size_t VideoProcessorImpl::EncodedFrameSize() { 223 size_t VideoProcessorImpl::EncodedFrameSize() {
213 return encoded_frame_size_; 224 RTC_DCHECK(!frame_infos_.empty());
225 return frame_infos_.back().encoded_frame_size;
214 } 226 }
215 227
216 FrameType VideoProcessorImpl::EncodedFrameType() { 228 FrameType VideoProcessorImpl::EncodedFrameType() {
217 return encoded_frame_type_; 229 RTC_DCHECK(!frame_infos_.empty());
230 return frame_infos_.back().encoded_frame_type;
218 } 231 }
219 232
220 int VideoProcessorImpl::NumberDroppedFrames() { 233 int VideoProcessorImpl::NumberDroppedFrames() {
221 return num_dropped_frames_; 234 return num_dropped_frames_;
222 } 235 }
223 236
224 int VideoProcessorImpl::NumberSpatialResizes() { 237 int VideoProcessorImpl::NumberSpatialResizes() {
225 return num_spatial_resizes_; 238 return num_spatial_resizes_;
226 } 239 }
227 240
228 bool VideoProcessorImpl::ProcessFrame(int frame_number) { 241 bool VideoProcessorImpl::ProcessFrame(int frame_number) {
229 RTC_CHECK_GE(frame_number, 0); 242 RTC_DCHECK_GE(frame_number, 0);
230 RTC_CHECK(initialized_) << "Attempting to use uninitialized VideoProcessor"; 243 RTC_DCHECK_LE(frame_number, frame_infos_.size())
244 << "Must process frames without gaps.";
245 RTC_DCHECK(initialized_) << "Attempting to use uninitialized VideoProcessor";
231 246
232 rtc::scoped_refptr<VideoFrameBuffer> buffer( 247 rtc::scoped_refptr<VideoFrameBuffer> buffer(
233 analysis_frame_reader_->ReadFrame()); 248 analysis_frame_reader_->ReadFrame());
234 if (buffer) {
235 if (source_frame_writer_) {
236 // TODO(brandtr): Introduce temp buffer as data member, to avoid
237 // allocating for every frame.
238 size_t length = CalcBufferSize(kI420, buffer->width(), buffer->height());
239 std::unique_ptr<uint8_t[]> extracted_buffer(new uint8_t[length]);
240 int extracted_length =
241 ExtractBuffer(buffer, length, extracted_buffer.get());
242 RTC_CHECK_EQ(extracted_length, source_frame_writer_->FrameLength());
243 source_frame_writer_->WriteFrame(extracted_buffer.get());
244 }
245 249
246 // Use the frame number as basis for timestamp to identify frames. Let the 250 if (!buffer) {
247 // first timestamp be non-zero, to not make the IvfFileWriter believe that
248 // we want to use capture timestamps in the IVF files.
249 VideoFrame source_frame(buffer,
250 (frame_number + 1) * k90khzTimestampFrameDiff, 0,
251 webrtc::kVideoRotation_0);
252
253 // Ensure we have a new statistics data object we can fill.
254 FrameStatistic& stat = stats_->NewFrame(frame_number);
255
256 // Decide if we are going to force a keyframe.
257 std::vector<FrameType> frame_types(1, kVideoFrameDelta);
258 if (config_.keyframe_interval > 0 &&
259 frame_number % config_.keyframe_interval == 0) {
260 frame_types[0] = kVideoFrameKey;
261 }
262
263 // For dropped frames, we regard them as zero size encoded frames.
264 encoded_frame_size_ = 0;
265 encoded_frame_type_ = kVideoFrameDelta;
266
267 // For the highest measurement accuracy of the encode time, the start/stop
268 // time recordings should wrap the Encode call as tightly as possible.
269 encode_start_ns_ = rtc::TimeNanos();
270 int32_t encode_result =
271 encoder_->Encode(source_frame, nullptr, &frame_types);
272
273 if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
274 fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
275 frame_number, encode_result);
276 }
277 stat.encode_return_code = encode_result;
278
279 return true;
280 } else {
281 // Last frame has been reached. 251 // Last frame has been reached.
282 return false; 252 return false;
283 } 253 }
254
255 if (source_frame_writer_) {
256 size_t length = CalcBufferSize(kI420, buffer->width(), buffer->height());
257 rtc::Buffer extracted_buffer(length);
258 int extracted_length =
259 ExtractBuffer(buffer, length, extracted_buffer.data());
260 RTC_DCHECK_EQ(extracted_length, source_frame_writer_->FrameLength());
261 RTC_CHECK(source_frame_writer_->WriteFrame(extracted_buffer.data()));
262 }
263
264 uint32_t timestamp = FrameNumberToTimestamp(frame_number);
265 VideoFrame source_frame(buffer, timestamp, 0, webrtc::kVideoRotation_0);
266
267 // Store frame information during the different stages of encode and decode.
268 frame_infos_.emplace_back();
269 FrameInfo* frame_info = &frame_infos_.back();
270 frame_info->timestamp = timestamp;
271
272 // Decide if we are going to force a keyframe.
273 std::vector<FrameType> frame_types(1, kVideoFrameDelta);
274 if (config_.keyframe_interval > 0 &&
275 frame_number % config_.keyframe_interval == 0) {
276 frame_types[0] = kVideoFrameKey;
277 }
278
279 // Create frame statistics object used for aggregation at end of test run.
280 FrameStatistic* frame_stat = &stats_->NewFrame(frame_number);
281
282 // For the highest measurement accuracy of the encode time, the start/stop
283 // time recordings should wrap the Encode call as tightly as possible.
284 frame_info->encode_start_ns = rtc::TimeNanos();
285 frame_stat->encode_return_code =
286 encoder_->Encode(source_frame, nullptr, &frame_types);
287
288 if (frame_stat->encode_return_code != WEBRTC_VIDEO_CODEC_OK) {
289 fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
290 frame_number, frame_stat->encode_return_code);
291 }
292
293 return true;
284 } 294 }
285 295
286 void VideoProcessorImpl::FrameEncoded( 296 void VideoProcessorImpl::FrameEncoded(
287 webrtc::VideoCodecType codec, 297 webrtc::VideoCodecType codec,
288 const EncodedImage& encoded_image, 298 const EncodedImage& encoded_image,
289 const webrtc::RTPFragmentationHeader* fragmentation) { 299 const webrtc::RTPFragmentationHeader* fragmentation) {
290 // For the highest measurement accuracy of the encode time, the start/stop 300 // For the highest measurement accuracy of the encode time, the start/stop
291 // time recordings should wrap the Encode call as tightly as possible. 301 // time recordings should wrap the Encode call as tightly as possible.
292 int64_t encode_stop_ns = rtc::TimeNanos(); 302 int64_t encode_stop_ns = rtc::TimeNanos();
293 303
294 if (encoded_frame_writer_) { 304 if (encoded_frame_writer_) {
295 RTC_CHECK(encoded_frame_writer_->WriteFrame(encoded_image, codec)); 305 RTC_CHECK(encoded_frame_writer_->WriteFrame(encoded_image, codec));
296 } 306 }
297 307
298 // Timestamp is proportional to frame number, so this gives us number of 308 // Timestamp is proportional to frame number, so this gives us number of
299 // dropped frames. 309 // dropped frames.
300 int num_dropped_from_prev_encode = 310 int frame_number = TimestampToFrameNumber(encoded_image._timeStamp);
301 (encoded_image._timeStamp - prev_time_stamp_) / k90khzTimestampFrameDiff - 311 bool last_frame_missing = false;
302 1; 312 if (frame_number > 0) {
303 num_dropped_frames_ += num_dropped_from_prev_encode; 313 RTC_DCHECK_GE(last_encoded_frame_num_, 0);
304 prev_time_stamp_ = encoded_image._timeStamp; 314 int num_dropped_from_last_encode =
305 if (num_dropped_from_prev_encode > 0) { 315 frame_number - last_encoded_frame_num_ - 1;
306 // For dropped frames, we write out the last decoded frame to avoid getting 316 RTC_DCHECK_GE(num_dropped_from_last_encode, 0);
307 // out of sync for the computation of PSNR and SSIM. 317 num_dropped_frames_ += num_dropped_from_last_encode;
308 for (int i = 0; i < num_dropped_from_prev_encode; i++) { 318 if (num_dropped_from_last_encode > 0) {
309 RTC_CHECK(analysis_frame_writer_->WriteFrame( 319 // For dropped frames, we write out the last decoded frame to avoid
310 last_successful_frame_buffer_.get())); 320 // getting out of sync for the computation of PSNR and SSIM.
311 if (decoded_frame_writer_) { 321 for (int i = 0; i < num_dropped_from_last_encode; i++) {
312 RTC_CHECK(decoded_frame_writer_->WriteFrame( 322 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(),
313 last_successful_frame_buffer_.get())); 323 analysis_frame_writer_->FrameLength());
324 RTC_CHECK(analysis_frame_writer_->WriteFrame(
325 last_decoded_frame_buffer_.data()));
326 if (decoded_frame_writer_) {
327 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(),
328 decoded_frame_writer_->FrameLength());
329 RTC_CHECK(decoded_frame_writer_->WriteFrame(
330 last_decoded_frame_buffer_.data()));
331 }
314 } 332 }
315 } 333 }
334
335 last_frame_missing =
336 (frame_infos_[last_encoded_frame_num_].manipulated_length == 0);
316 } 337 }
338 // Ensure strict monotonicity.
339 RTC_CHECK_GT(frame_number, last_encoded_frame_num_);
340 last_encoded_frame_num_ = frame_number;
317 341
318 // Frame is not dropped, so update the encoded frame size 342 // Frame is not dropped, so update frame information and statistics.
319 // (encoder callback is only called for non-zero length frames). 343 RTC_DCHECK_LT(frame_number, frame_infos_.size());
320 encoded_frame_size_ = encoded_image._length; 344 FrameInfo* frame_info = &frame_infos_[frame_number];
321 encoded_frame_type_ = encoded_image._frameType; 345 frame_info->encoded_frame_size = encoded_image._length;
322 int frame_number = encoded_image._timeStamp / k90khzTimestampFrameDiff - 1; 346 frame_info->encoded_frame_type = encoded_image._frameType;
323 FrameStatistic& stat = stats_->stats_[frame_number]; 347 FrameStatistic* frame_stat = &stats_->stats_[frame_number];
324 stat.encode_time_in_us = 348 frame_stat->encode_time_in_us =
325 GetElapsedTimeMicroseconds(encode_start_ns_, encode_stop_ns); 349 GetElapsedTimeMicroseconds(frame_info->encode_start_ns, encode_stop_ns);
326 stat.encoding_successful = true; 350 frame_stat->encoding_successful = true;
327 stat.encoded_frame_length_in_bytes = encoded_image._length; 351 frame_stat->encoded_frame_length_in_bytes = encoded_image._length;
328 stat.frame_number = frame_number; 352 frame_stat->frame_number = frame_number;
329 stat.frame_type = encoded_image._frameType; 353 frame_stat->frame_type = encoded_image._frameType;
330 stat.qp = encoded_image.qp_; 354 frame_stat->qp = encoded_image.qp_;
331 stat.bit_rate_in_kbps = encoded_image._length * bit_rate_factor_; 355 frame_stat->bit_rate_in_kbps = encoded_image._length * bit_rate_factor_;
332 stat.total_packets = 356 frame_stat->total_packets =
333 encoded_image._length / config_.networking_config.packet_size_in_bytes + 357 encoded_image._length / config_.networking_config.packet_size_in_bytes +
334 1; 358 1;
335 359
336 // Simulate packet loss. 360 // Simulate packet loss.
337 bool exclude_this_frame = false; 361 bool exclude_this_frame = false;
338 if (encoded_image._frameType == kVideoFrameKey) { 362 if (encoded_image._frameType == kVideoFrameKey) {
339 // Only keyframes can be excluded. 363 // Only keyframes can be excluded.
340 switch (config_.exclude_frame_types) { 364 switch (config_.exclude_frame_types) {
341 case kExcludeOnlyFirstKeyFrame: 365 case kExcludeOnlyFirstKeyFrame:
342 if (!first_key_frame_has_been_excluded_) { 366 if (!first_key_frame_has_been_excluded_) {
(...skipping 14 matching lines...) Expand all
357 EncodedImage::GetBufferPaddingBytes(codec); 381 EncodedImage::GetBufferPaddingBytes(codec);
358 std::unique_ptr<uint8_t[]> copied_buffer(new uint8_t[copied_buffer_size]); 382 std::unique_ptr<uint8_t[]> copied_buffer(new uint8_t[copied_buffer_size]);
359 memcpy(copied_buffer.get(), encoded_image._buffer, encoded_image._length); 383 memcpy(copied_buffer.get(), encoded_image._buffer, encoded_image._length);
360 // The image to feed to the decoder. 384 // The image to feed to the decoder.
361 EncodedImage copied_image; 385 EncodedImage copied_image;
362 memcpy(&copied_image, &encoded_image, sizeof(copied_image)); 386 memcpy(&copied_image, &encoded_image, sizeof(copied_image));
363 copied_image._size = copied_buffer_size; 387 copied_image._size = copied_buffer_size;
364 copied_image._buffer = copied_buffer.get(); 388 copied_image._buffer = copied_buffer.get();
365 389
366 if (!exclude_this_frame) { 390 if (!exclude_this_frame) {
367 stat.packets_dropped = 391 frame_stat->packets_dropped =
368 packet_manipulator_->ManipulatePackets(&copied_image); 392 packet_manipulator_->ManipulatePackets(&copied_image);
369 } 393 }
394 frame_info->manipulated_length = copied_image._length;
370 395
371 // Keep track of if frames are lost due to packet loss so we can tell 396 // Keep track of if frames are lost due to packet loss so we can tell
372 // this to the encoder (this is handled by the RTP logic in the full stack). 397 // this to the encoder (this is handled by the RTP logic in the full stack).
373 // TODO(kjellander): Pass fragmentation header to the decoder when 398 // TODO(kjellander): Pass fragmentation header to the decoder when
374 // CL 172001 has been submitted and PacketManipulator supports this. 399 // CL 172001 has been submitted and PacketManipulator supports this.
375 400
376 // For the highest measurement accuracy of the decode time, the start/stop 401 // For the highest measurement accuracy of the decode time, the start/stop
377 // time recordings should wrap the Decode call as tightly as possible. 402 // time recordings should wrap the Decode call as tightly as possible.
378 decode_start_ns_ = rtc::TimeNanos(); 403 frame_info->decode_start_ns = rtc::TimeNanos();
379 int32_t decode_result = 404 frame_stat->decode_return_code =
380 decoder_->Decode(copied_image, last_frame_missing_, nullptr); 405 decoder_->Decode(copied_image, last_frame_missing, nullptr);
381 stat.decode_return_code = decode_result;
382 406
383 if (decode_result != WEBRTC_VIDEO_CODEC_OK) { 407 if (frame_stat->decode_return_code != WEBRTC_VIDEO_CODEC_OK) {
384 // Write the last successful frame the output file to avoid getting it out 408 // Write the last successful frame the output file to avoid getting it out
385 // of sync with the source file for SSIM and PSNR comparisons. 409 // of sync with the source file for SSIM and PSNR comparisons.
386 RTC_CHECK(analysis_frame_writer_->WriteFrame( 410 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(),
387 last_successful_frame_buffer_.get())); 411 analysis_frame_writer_->FrameLength());
412 RTC_CHECK(
413 analysis_frame_writer_->WriteFrame(last_decoded_frame_buffer_.data()));
388 if (decoded_frame_writer_) { 414 if (decoded_frame_writer_) {
389 RTC_CHECK(decoded_frame_writer_->WriteFrame( 415 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(),
390 last_successful_frame_buffer_.get())); 416 decoded_frame_writer_->FrameLength());
417 RTC_CHECK(
418 decoded_frame_writer_->WriteFrame(last_decoded_frame_buffer_.data()));
391 } 419 }
392 } 420 }
393
394 // Save status for losses so we can inform the decoder for the next frame.
395 last_frame_missing_ = copied_image._length == 0;
396 } 421 }
397 422
398 void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) { 423 void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
399 // For the highest measurement accuracy of the decode time, the start/stop 424 // For the highest measurement accuracy of the decode time, the start/stop
400 // time recordings should wrap the Decode call as tightly as possible. 425 // time recordings should wrap the Decode call as tightly as possible.
401 int64_t decode_stop_ns = rtc::TimeNanos(); 426 int64_t decode_stop_ns = rtc::TimeNanos();
402 427
403 // Report stats. 428 // Update frame information and statistics.
404 int frame_number = image.timestamp() / k90khzTimestampFrameDiff - 1; 429 int frame_number = TimestampToFrameNumber(image.timestamp());
405 FrameStatistic& stat = stats_->stats_[frame_number]; 430 RTC_DCHECK_LT(frame_number, frame_infos_.size());
406 stat.decode_time_in_us = 431 FrameInfo* frame_info = &frame_infos_[frame_number];
407 GetElapsedTimeMicroseconds(decode_start_ns_, decode_stop_ns); 432 frame_info->decoded_width = image.width();
408 stat.decoding_successful = true; 433 frame_info->decoded_height = image.height();
434 FrameStatistic* frame_stat = &stats_->stats_[frame_number];
435 frame_stat->decode_time_in_us =
436 GetElapsedTimeMicroseconds(frame_info->decode_start_ns, decode_stop_ns);
437 frame_stat->decoding_successful = true;
409 438
410 // Check for resize action (either down or up). 439 // Check if the codecs have resized the frame since previously decoded frame.
411 if (static_cast<int>(image.width()) != last_encoder_frame_width_ || 440 if (frame_number > 0) {
412 static_cast<int>(image.height()) != last_encoder_frame_height_) { 441 RTC_DCHECK_GE(last_decoded_frame_num_, 0);
413 ++num_spatial_resizes_; 442 const FrameInfo& last_decoded_frame_info =
414 last_encoder_frame_width_ = image.width(); 443 frame_infos_[last_decoded_frame_num_];
415 last_encoder_frame_height_ = image.height(); 444 if (static_cast<int>(image.width()) !=
445 last_decoded_frame_info.decoded_width ||
446 static_cast<int>(image.height()) !=
447 last_decoded_frame_info.decoded_height) {
448 ++num_spatial_resizes_;
449 }
416 } 450 }
417 // Check if codec size is different from native/original size, and if so, 451 // Ensure strict monotonicity.
418 // upsample back to original size. This is needed for PSNR and SSIM 452 RTC_CHECK_GT(frame_number, last_decoded_frame_num_);
453 last_decoded_frame_num_ = frame_number;
454
455 // Check if codec size is different from the original size, and if so,
456 // scale back to original size. This is needed for the PSNR and SSIM
419 // calculations. 457 // calculations.
458 size_t extracted_length;
459 rtc::Buffer extracted_buffer;
420 if (image.width() != config_.codec_settings->width || 460 if (image.width() != config_.codec_settings->width ||
421 image.height() != config_.codec_settings->height) { 461 image.height() != config_.codec_settings->height) {
422 rtc::scoped_refptr<I420Buffer> up_image( 462 rtc::scoped_refptr<I420Buffer> scaled_buffer(I420Buffer::Create(
423 I420Buffer::Create(config_.codec_settings->width, 463 config_.codec_settings->width, config_.codec_settings->height));
424 config_.codec_settings->height));
425
426 // Should be the same aspect ratio, no cropping needed. 464 // Should be the same aspect ratio, no cropping needed.
427 if (image.video_frame_buffer()->native_handle()) { 465 if (image.video_frame_buffer()->native_handle()) {
428 up_image->ScaleFrom(*image.video_frame_buffer()->NativeToI420Buffer()); 466 scaled_buffer->ScaleFrom(
467 *image.video_frame_buffer()->NativeToI420Buffer());
429 } else { 468 } else {
430 up_image->ScaleFrom(*image.video_frame_buffer()); 469 scaled_buffer->ScaleFrom(*image.video_frame_buffer());
431 } 470 }
432 471
433 // TODO(mikhal): Extracting the buffer for now - need to update test.
434 size_t length = 472 size_t length =
435 CalcBufferSize(kI420, up_image->width(), up_image->height()); 473 CalcBufferSize(kI420, scaled_buffer->width(), scaled_buffer->height());
436 std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[length]); 474 extracted_buffer.SetSize(length);
437 int extracted_length = ExtractBuffer(up_image, length, image_buffer.get()); 475 extracted_length =
438 RTC_CHECK_GT(extracted_length, 0); 476 ExtractBuffer(scaled_buffer, length, extracted_buffer.data());
439 // Update our copy of the last successful frame. 477 } else {
440 memcpy(last_successful_frame_buffer_.get(), image_buffer.get(), 478 // No resize.
441 extracted_length);
442
443 RTC_CHECK(analysis_frame_writer_->WriteFrame(image_buffer.get()));
444 if (decoded_frame_writer_) {
445 RTC_CHECK(decoded_frame_writer_->WriteFrame(image_buffer.get()));
446 }
447 } else { // No resize.
448 // Update our copy of the last successful frame.
449 // TODO(mikhal): Add as a member function, so won't be allocated per frame.
450 size_t length = CalcBufferSize(kI420, image.width(), image.height()); 479 size_t length = CalcBufferSize(kI420, image.width(), image.height());
451 std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[length]); 480 extracted_buffer.SetSize(length);
452 int extracted_length;
453 if (image.video_frame_buffer()->native_handle()) { 481 if (image.video_frame_buffer()->native_handle()) {
454 extracted_length = 482 extracted_length =
455 ExtractBuffer(image.video_frame_buffer()->NativeToI420Buffer(), 483 ExtractBuffer(image.video_frame_buffer()->NativeToI420Buffer(),
456 length, image_buffer.get()); 484 length, extracted_buffer.data());
457 } else { 485 } else {
458 extracted_length = 486 extracted_length = ExtractBuffer(image.video_frame_buffer(), length,
459 ExtractBuffer(image.video_frame_buffer(), length, image_buffer.get()); 487 extracted_buffer.data());
460 }
461 RTC_CHECK_GT(extracted_length, 0);
462 memcpy(last_successful_frame_buffer_.get(), image_buffer.get(),
463 extracted_length);
464
465 RTC_CHECK(analysis_frame_writer_->WriteFrame(image_buffer.get()));
466 if (decoded_frame_writer_) {
467 RTC_CHECK(decoded_frame_writer_->WriteFrame(image_buffer.get()));
468 } 488 }
469 } 489 }
490
491 RTC_DCHECK_EQ(extracted_length, analysis_frame_writer_->FrameLength());
492 RTC_CHECK(analysis_frame_writer_->WriteFrame(extracted_buffer.data()));
493 if (decoded_frame_writer_) {
494 RTC_DCHECK_EQ(extracted_length, decoded_frame_writer_->FrameLength());
495 RTC_CHECK(decoded_frame_writer_->WriteFrame(extracted_buffer.data()));
496 }
497
498 last_decoded_frame_buffer_ = std::move(extracted_buffer);
470 } 499 }
471 500
472 } // namespace test 501 } // namespace test
473 } // namespace webrtc 502 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/modules/video_coding/codecs/test/videoprocessor.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698