| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 143 encoder_(encoder), | 143 encoder_(encoder), |
| 144 decoder_(decoder), | 144 decoder_(decoder), |
| 145 bitrate_allocator_(CreateBitrateAllocator(&config_)), | 145 bitrate_allocator_(CreateBitrateAllocator(&config_)), |
| 146 encode_callback_(this), | 146 encode_callback_(this), |
| 147 decode_callback_(this), | 147 decode_callback_(this), |
| 148 packet_manipulator_(packet_manipulator), | 148 packet_manipulator_(packet_manipulator), |
| 149 analysis_frame_reader_(analysis_frame_reader), | 149 analysis_frame_reader_(analysis_frame_reader), |
| 150 analysis_frame_writer_(analysis_frame_writer), | 150 analysis_frame_writer_(analysis_frame_writer), |
| 151 encoded_frame_writer_(encoded_frame_writer), | 151 encoded_frame_writer_(encoded_frame_writer), |
| 152 decoded_frame_writer_(decoded_frame_writer), | 152 decoded_frame_writer_(decoded_frame_writer), |
| 153 last_inputed_frame_num_(-1), |
| 153 last_encoded_frame_num_(-1), | 154 last_encoded_frame_num_(-1), |
| 154 last_decoded_frame_num_(-1), | 155 last_decoded_frame_num_(-1), |
| 155 first_key_frame_has_been_excluded_(false), | 156 first_key_frame_has_been_excluded_(false), |
| 156 last_decoded_frame_buffer_(analysis_frame_reader->FrameLength()), | 157 last_decoded_frame_buffer_(analysis_frame_reader->FrameLength()), |
| 157 stats_(stats), | 158 stats_(stats), |
| 158 rate_update_index_(-1) { | 159 rate_update_index_(-1) { |
| 159 RTC_DCHECK(encoder); | 160 RTC_DCHECK(encoder); |
| 160 RTC_DCHECK(decoder); | 161 RTC_DCHECK(decoder); |
| 161 RTC_DCHECK(packet_manipulator); | 162 RTC_DCHECK(packet_manipulator); |
| 162 RTC_DCHECK(analysis_frame_reader); | 163 RTC_DCHECK(analysis_frame_reader); |
| 163 RTC_DCHECK(analysis_frame_writer); | 164 RTC_DCHECK(analysis_frame_writer); |
| 164 RTC_DCHECK(stats); | 165 RTC_DCHECK(stats); |
| 165 frame_infos_.reserve(analysis_frame_reader->NumberOfFrames()); | |
| 166 } | 166 } |
| 167 | 167 |
| 168 VideoProcessor::~VideoProcessor() = default; | 168 VideoProcessor::~VideoProcessor() = default; |
| 169 | 169 |
| 170 void VideoProcessor::Init() { | 170 void VideoProcessor::Init() { |
| 171 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); | 171 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); |
| 172 RTC_DCHECK(!initialized_) << "VideoProcessor already initialized."; | 172 RTC_DCHECK(!initialized_) << "VideoProcessor already initialized."; |
| 173 initialized_ = true; | 173 initialized_ = true; |
| 174 | 174 |
| 175 // Setup required callbacks for the encoder and decoder. | 175 // Setup required callbacks for the encoder and decoder. |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 218 | 218 |
| 219 RTC_CHECK_EQ(encoder_->Release(), WEBRTC_VIDEO_CODEC_OK); | 219 RTC_CHECK_EQ(encoder_->Release(), WEBRTC_VIDEO_CODEC_OK); |
| 220 RTC_CHECK_EQ(decoder_->Release(), WEBRTC_VIDEO_CODEC_OK); | 220 RTC_CHECK_EQ(decoder_->Release(), WEBRTC_VIDEO_CODEC_OK); |
| 221 | 221 |
| 222 encoder_->RegisterEncodeCompleteCallback(nullptr); | 222 encoder_->RegisterEncodeCompleteCallback(nullptr); |
| 223 decoder_->RegisterDecodeCompleteCallback(nullptr); | 223 decoder_->RegisterDecodeCompleteCallback(nullptr); |
| 224 | 224 |
| 225 initialized_ = false; | 225 initialized_ = false; |
| 226 } | 226 } |
| 227 | 227 |
| 228 void VideoProcessor::ProcessFrame(int frame_number) { | 228 void VideoProcessor::ProcessFrame() { |
| 229 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); | 229 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); |
| 230 RTC_DCHECK_EQ(frame_number, frame_infos_.size()) | |
| 231 << "Must process frames in sequence."; | |
| 232 RTC_DCHECK(initialized_) << "VideoProcessor not initialized."; | 230 RTC_DCHECK(initialized_) << "VideoProcessor not initialized."; |
| 231 ++last_inputed_frame_num_; |
| 233 | 232 |
| 234 // Get frame from file. | 233 // Get frame from file. |
| 235 rtc::scoped_refptr<I420BufferInterface> buffer( | 234 rtc::scoped_refptr<I420BufferInterface> buffer( |
| 236 analysis_frame_reader_->ReadFrame()); | 235 analysis_frame_reader_->ReadFrame()); |
| 237 RTC_CHECK(buffer) << "Tried to read too many frames from the file."; | 236 RTC_CHECK(buffer) << "Tried to read too many frames from the file."; |
| 238 // Use the frame number as the basis for timestamp to identify frames. Let the | 237 // Use the frame number as the basis for timestamp to identify frames. Let the |
| 239 // first timestamp be non-zero, to not make the IvfFileWriter believe that we | 238 // first timestamp be non-zero, to not make the IvfFileWriter believe that we |
| 240 // want to use capture timestamps in the IVF files. | 239 // want to use capture timestamps in the IVF files. |
| 241 const uint32_t rtp_timestamp = (frame_number + 1) * kRtpClockRateHz / | 240 const uint32_t rtp_timestamp = (last_inputed_frame_num_ + 1) * |
| 241 kRtpClockRateHz / |
| 242 config_.codec_settings.maxFramerate; | 242 config_.codec_settings.maxFramerate; |
| 243 rtp_timestamp_to_frame_num_[rtp_timestamp] = frame_number; | 243 rtp_timestamp_to_frame_num_[rtp_timestamp] = last_inputed_frame_num_; |
| 244 const int64_t kNoRenderTime = 0; | 244 const int64_t kNoRenderTime = 0; |
| 245 VideoFrame source_frame(buffer, rtp_timestamp, kNoRenderTime, | 245 VideoFrame source_frame(buffer, rtp_timestamp, kNoRenderTime, |
| 246 webrtc::kVideoRotation_0); | 246 webrtc::kVideoRotation_0); |
| 247 | 247 |
| 248 // Decide if we are going to force a keyframe. | 248 // Decide if we are going to force a keyframe. |
| 249 std::vector<FrameType> frame_types(1, kVideoFrameDelta); | 249 std::vector<FrameType> frame_types(1, kVideoFrameDelta); |
| 250 if (config_.keyframe_interval > 0 && | 250 if (config_.keyframe_interval > 0 && |
| 251 frame_number % config_.keyframe_interval == 0) { | 251 last_inputed_frame_num_ % config_.keyframe_interval == 0) { |
| 252 frame_types[0] = kVideoFrameKey; | 252 frame_types[0] = kVideoFrameKey; |
| 253 } | 253 } |
| 254 | 254 |
| 255 // Store frame information during the different stages of encode and decode. | |
| 256 frame_infos_.emplace_back(); | |
| 257 FrameInfo* frame_info = &frame_infos_.back(); | |
| 258 | |
| 259 // Create frame statistics object used for aggregation at end of test run. | 255 // Create frame statistics object used for aggregation at end of test run. |
| 260 FrameStatistic* frame_stat = &stats_->NewFrame(frame_number); | 256 FrameStatistic* frame_stat = stats_->AddFrame(); |
| 261 | 257 |
| 262 // For the highest measurement accuracy of the encode time, the start/stop | 258 // For the highest measurement accuracy of the encode time, the start/stop |
| 263 // time recordings should wrap the Encode call as tightly as possible. | 259 // time recordings should wrap the Encode call as tightly as possible. |
| 264 frame_info->encode_start_ns = rtc::TimeNanos(); | 260 frame_stat->encode_start_ns = rtc::TimeNanos(); |
| 265 frame_stat->encode_return_code = | 261 frame_stat->encode_return_code = |
| 266 encoder_->Encode(source_frame, nullptr, &frame_types); | 262 encoder_->Encode(source_frame, nullptr, &frame_types); |
| 267 | 263 |
| 268 if (frame_stat->encode_return_code != WEBRTC_VIDEO_CODEC_OK) { | 264 if (frame_stat->encode_return_code != WEBRTC_VIDEO_CODEC_OK) { |
| 269 LOG(LS_WARNING) << "Failed to encode frame " << frame_number | 265 LOG(LS_WARNING) << "Failed to encode frame " << last_inputed_frame_num_ |
| 270 << ", return code: " << frame_stat->encode_return_code | 266 << ", return code: " << frame_stat->encode_return_code |
| 271 << "."; | 267 << "."; |
| 272 } | 268 } |
| 273 } | 269 } |
| 274 | 270 |
| 275 void VideoProcessor::SetRates(int bitrate_kbps, int framerate_fps) { | 271 void VideoProcessor::SetRates(int bitrate_kbps, int framerate_fps) { |
| 276 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); | 272 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); |
| 277 config_.codec_settings.maxFramerate = framerate_fps; | 273 config_.codec_settings.maxFramerate = framerate_fps; |
| 278 int set_rates_result = encoder_->SetRateAllocation( | 274 int set_rates_result = encoder_->SetRateAllocation( |
| 279 bitrate_allocator_->GetAllocation(bitrate_kbps * 1000, framerate_fps), | 275 bitrate_allocator_->GetAllocation(bitrate_kbps * 1000, framerate_fps), |
| (...skipping 16 matching lines...) Expand all Loading... |
| 296 } | 292 } |
| 297 | 293 |
| 298 void VideoProcessor::FrameEncoded(webrtc::VideoCodecType codec, | 294 void VideoProcessor::FrameEncoded(webrtc::VideoCodecType codec, |
| 299 const EncodedImage& encoded_image) { | 295 const EncodedImage& encoded_image) { |
| 300 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); | 296 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); |
| 301 | 297 |
| 302 // For the highest measurement accuracy of the encode time, the start/stop | 298 // For the highest measurement accuracy of the encode time, the start/stop |
| 303 // time recordings should wrap the Encode call as tightly as possible. | 299 // time recordings should wrap the Encode call as tightly as possible. |
| 304 int64_t encode_stop_ns = rtc::TimeNanos(); | 300 int64_t encode_stop_ns = rtc::TimeNanos(); |
| 305 | 301 |
| 306 if (encoded_frame_writer_) { | 302 // Take the opportunity to verify the QP bitstream parser. |
| 307 RTC_CHECK(encoded_frame_writer_->WriteFrame(encoded_image, codec)); | 303 VerifyQpParser(encoded_image, config_); |
| 308 } | |
| 309 | 304 |
| 310 // Check for dropped frames. | 305 // Check for dropped frames. |
| 311 const int frame_number = | 306 const int frame_number = |
| 312 rtp_timestamp_to_frame_num_[encoded_image._timeStamp]; | 307 rtp_timestamp_to_frame_num_[encoded_image._timeStamp]; |
| 313 bool last_frame_missing = false; | 308 bool last_frame_missing = false; |
| 314 if (frame_number > 0) { | 309 if (frame_number > 0) { |
| 315 RTC_DCHECK_GE(last_encoded_frame_num_, 0); | 310 RTC_DCHECK_GE(last_encoded_frame_num_, 0); |
| 316 int num_dropped_from_last_encode = | 311 int num_dropped_from_last_encode = |
| 317 frame_number - last_encoded_frame_num_ - 1; | 312 frame_number - last_encoded_frame_num_ - 1; |
| 318 RTC_DCHECK_GE(num_dropped_from_last_encode, 0); | 313 RTC_DCHECK_GE(num_dropped_from_last_encode, 0); |
| 319 RTC_CHECK_GE(rate_update_index_, 0); | 314 RTC_CHECK_GE(rate_update_index_, 0); |
| 320 num_dropped_frames_[rate_update_index_] += num_dropped_from_last_encode; | 315 num_dropped_frames_[rate_update_index_] += num_dropped_from_last_encode; |
| 321 if (num_dropped_from_last_encode > 0) { | 316 if (num_dropped_from_last_encode > 0) { |
| 322 // For dropped frames, we write out the last decoded frame to avoid | 317 // For dropped frames, we write out the last decoded frame to avoid |
| 323 // getting out of sync for the computation of PSNR and SSIM. | 318 // getting out of sync for the computation of PSNR and SSIM. |
| 324 for (int i = 0; i < num_dropped_from_last_encode; i++) { | 319 for (int i = 0; i < num_dropped_from_last_encode; i++) { |
| 325 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(), | 320 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(), |
| 326 analysis_frame_writer_->FrameLength()); | 321 analysis_frame_writer_->FrameLength()); |
| 327 RTC_CHECK(analysis_frame_writer_->WriteFrame( | 322 RTC_CHECK(analysis_frame_writer_->WriteFrame( |
| 328 last_decoded_frame_buffer_.data())); | 323 last_decoded_frame_buffer_.data())); |
| 329 if (decoded_frame_writer_) { | 324 if (decoded_frame_writer_) { |
| 330 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(), | 325 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(), |
| 331 decoded_frame_writer_->FrameLength()); | 326 decoded_frame_writer_->FrameLength()); |
| 332 RTC_CHECK(decoded_frame_writer_->WriteFrame( | 327 RTC_CHECK(decoded_frame_writer_->WriteFrame( |
| 333 last_decoded_frame_buffer_.data())); | 328 last_decoded_frame_buffer_.data())); |
| 334 } | 329 } |
| 335 } | 330 } |
| 336 } | 331 } |
| 337 last_frame_missing = | 332 const FrameStatistic* last_encoded_frame_stat = |
| 338 (frame_infos_[last_encoded_frame_num_].manipulated_length == 0); | 333 stats_->GetFrame(last_encoded_frame_num_); |
| 334 last_frame_missing = (last_encoded_frame_stat->manipulated_length == 0); |
| 339 } | 335 } |
| 340 // Ensure strict monotonicity. | 336 // Ensure strict monotonicity. |
| 341 RTC_CHECK_GT(frame_number, last_encoded_frame_num_); | 337 RTC_CHECK_GT(frame_number, last_encoded_frame_num_); |
| 342 last_encoded_frame_num_ = frame_number; | 338 last_encoded_frame_num_ = frame_number; |
| 343 | 339 |
| 344 // Update frame information and statistics. | 340 // Update frame statistics. |
| 345 VerifyQpParser(encoded_image, config_); | 341 FrameStatistic* frame_stat = stats_->GetFrame(frame_number); |
| 346 RTC_CHECK_LT(frame_number, frame_infos_.size()); | 342 frame_stat->encode_time_us = |
| 347 FrameInfo* frame_info = &frame_infos_[frame_number]; | 343 GetElapsedTimeMicroseconds(frame_stat->encode_start_ns, encode_stop_ns); |
| 348 FrameStatistic* frame_stat = &stats_->stats_[frame_number]; | |
| 349 frame_stat->encode_time_in_us = | |
| 350 GetElapsedTimeMicroseconds(frame_info->encode_start_ns, encode_stop_ns); | |
| 351 frame_stat->encoding_successful = true; | 344 frame_stat->encoding_successful = true; |
| 352 frame_stat->encoded_frame_length_in_bytes = encoded_image._length; | 345 frame_stat->encoded_frame_size_bytes = encoded_image._length; |
| 353 frame_stat->frame_number = frame_number; | |
| 354 frame_stat->frame_type = encoded_image._frameType; | 346 frame_stat->frame_type = encoded_image._frameType; |
| 355 frame_stat->qp = encoded_image.qp_; | 347 frame_stat->qp = encoded_image.qp_; |
| 356 frame_stat->bit_rate_in_kbps = static_cast<int>( | 348 frame_stat->bitrate_kbps = static_cast<int>( |
| 357 encoded_image._length * config_.codec_settings.maxFramerate * 8 / 1000); | 349 encoded_image._length * config_.codec_settings.maxFramerate * 8 / 1000); |
| 358 frame_stat->total_packets = | 350 frame_stat->total_packets = |
| 359 encoded_image._length / config_.networking_config.packet_size_in_bytes + | 351 encoded_image._length / config_.networking_config.packet_size_in_bytes + |
| 360 1; | 352 1; |
| 361 | 353 |
| 362 // Simulate packet loss. | 354 // Simulate packet loss. |
| 363 bool exclude_this_frame = false; | 355 bool exclude_this_frame = false; |
| 364 if (encoded_image._frameType == kVideoFrameKey) { | 356 if (encoded_image._frameType == kVideoFrameKey) { |
| 365 // Only keyframes can be excluded. | 357 // Only keyframes can be excluded. |
| 366 switch (config_.exclude_frame_types) { | 358 switch (config_.exclude_frame_types) { |
| (...skipping 19 matching lines...) Expand all Loading... |
| 386 // The image to feed to the decoder. | 378 // The image to feed to the decoder. |
| 387 EncodedImage copied_image; | 379 EncodedImage copied_image; |
| 388 memcpy(&copied_image, &encoded_image, sizeof(copied_image)); | 380 memcpy(&copied_image, &encoded_image, sizeof(copied_image)); |
| 389 copied_image._size = copied_buffer_size; | 381 copied_image._size = copied_buffer_size; |
| 390 copied_image._buffer = copied_buffer.get(); | 382 copied_image._buffer = copied_buffer.get(); |
| 391 | 383 |
| 392 if (!exclude_this_frame) { | 384 if (!exclude_this_frame) { |
| 393 frame_stat->packets_dropped = | 385 frame_stat->packets_dropped = |
| 394 packet_manipulator_->ManipulatePackets(&copied_image); | 386 packet_manipulator_->ManipulatePackets(&copied_image); |
| 395 } | 387 } |
| 396 frame_info->manipulated_length = copied_image._length; | 388 frame_stat->manipulated_length = copied_image._length; |
| 397 | 389 |
| 398 // For the highest measurement accuracy of the decode time, the start/stop | 390 // For the highest measurement accuracy of the decode time, the start/stop |
| 399 // time recordings should wrap the Decode call as tightly as possible. | 391 // time recordings should wrap the Decode call as tightly as possible. |
| 400 frame_info->decode_start_ns = rtc::TimeNanos(); | 392 frame_stat->decode_start_ns = rtc::TimeNanos(); |
| 401 frame_stat->decode_return_code = | 393 frame_stat->decode_return_code = |
| 402 decoder_->Decode(copied_image, last_frame_missing, nullptr); | 394 decoder_->Decode(copied_image, last_frame_missing, nullptr); |
| 403 | 395 |
| 404 if (frame_stat->decode_return_code != WEBRTC_VIDEO_CODEC_OK) { | 396 if (frame_stat->decode_return_code != WEBRTC_VIDEO_CODEC_OK) { |
| 405 // Write the last successful frame the output file to avoid getting it out | 397 // Write the last successful frame the output file to avoid getting it out |
| 406 // of sync with the source file for SSIM and PSNR comparisons. | 398 // of sync with the source file for SSIM and PSNR comparisons. |
| 407 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(), | 399 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(), |
| 408 analysis_frame_writer_->FrameLength()); | 400 analysis_frame_writer_->FrameLength()); |
| 409 RTC_CHECK( | 401 RTC_CHECK( |
| 410 analysis_frame_writer_->WriteFrame(last_decoded_frame_buffer_.data())); | 402 analysis_frame_writer_->WriteFrame(last_decoded_frame_buffer_.data())); |
| 411 if (decoded_frame_writer_) { | 403 if (decoded_frame_writer_) { |
| 412 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(), | 404 RTC_DCHECK_EQ(last_decoded_frame_buffer_.size(), |
| 413 decoded_frame_writer_->FrameLength()); | 405 decoded_frame_writer_->FrameLength()); |
| 414 RTC_CHECK( | 406 RTC_CHECK( |
| 415 decoded_frame_writer_->WriteFrame(last_decoded_frame_buffer_.data())); | 407 decoded_frame_writer_->WriteFrame(last_decoded_frame_buffer_.data())); |
| 416 } | 408 } |
| 417 } | 409 } |
| 410 |
| 411 if (encoded_frame_writer_) { |
| 412 RTC_CHECK(encoded_frame_writer_->WriteFrame(encoded_image, codec)); |
| 413 } |
| 418 } | 414 } |
| 419 | 415 |
| 420 void VideoProcessor::FrameDecoded(const VideoFrame& image) { | 416 void VideoProcessor::FrameDecoded(const VideoFrame& image) { |
| 421 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); | 417 RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_); |
| 422 | 418 |
| 423 // For the highest measurement accuracy of the decode time, the start/stop | 419 // For the highest measurement accuracy of the decode time, the start/stop |
| 424 // time recordings should wrap the Decode call as tightly as possible. | 420 // time recordings should wrap the Decode call as tightly as possible. |
| 425 int64_t decode_stop_ns = rtc::TimeNanos(); | 421 int64_t decode_stop_ns = rtc::TimeNanos(); |
| 426 | 422 |
| 427 // Update frame information and statistics. | 423 // Update frame statistics. |
| 428 const int frame_number = rtp_timestamp_to_frame_num_[image.timestamp()]; | 424 const int frame_number = rtp_timestamp_to_frame_num_[image.timestamp()]; |
| 429 RTC_CHECK_LT(frame_number, frame_infos_.size()); | 425 FrameStatistic* frame_stat = stats_->GetFrame(frame_number); |
| 430 FrameInfo* frame_info = &frame_infos_[frame_number]; | 426 frame_stat->decoded_width = image.width(); |
| 431 frame_info->decoded_width = image.width(); | 427 frame_stat->decoded_height = image.height(); |
| 432 frame_info->decoded_height = image.height(); | 428 frame_stat->decode_time_us = |
| 433 FrameStatistic* frame_stat = &stats_->stats_[frame_number]; | 429 GetElapsedTimeMicroseconds(frame_stat->decode_start_ns, decode_stop_ns); |
| 434 frame_stat->decode_time_in_us = | |
| 435 GetElapsedTimeMicroseconds(frame_info->decode_start_ns, decode_stop_ns); | |
| 436 frame_stat->decoding_successful = true; | 430 frame_stat->decoding_successful = true; |
| 437 | 431 |
| 438 // Check if the codecs have resized the frame since previously decoded frame. | 432 // Check if the codecs have resized the frame since previously decoded frame. |
| 439 if (frame_number > 0) { | 433 if (frame_number > 0) { |
| 440 RTC_CHECK_GE(last_decoded_frame_num_, 0); | 434 RTC_CHECK_GE(last_decoded_frame_num_, 0); |
| 441 const FrameInfo& last_decoded_frame_info = | 435 const FrameStatistic* last_decoded_frame_stat = |
| 442 frame_infos_[last_decoded_frame_num_]; | 436 stats_->GetFrame(last_decoded_frame_num_); |
| 443 if (static_cast<int>(image.width()) != | 437 if (static_cast<int>(image.width()) != |
| 444 last_decoded_frame_info.decoded_width || | 438 last_decoded_frame_stat->decoded_width || |
| 445 static_cast<int>(image.height()) != | 439 static_cast<int>(image.height()) != |
| 446 last_decoded_frame_info.decoded_height) { | 440 last_decoded_frame_stat->decoded_height) { |
| 447 RTC_CHECK_GE(rate_update_index_, 0); | 441 RTC_CHECK_GE(rate_update_index_, 0); |
| 448 ++num_spatial_resizes_[rate_update_index_]; | 442 ++num_spatial_resizes_[rate_update_index_]; |
| 449 } | 443 } |
| 450 } | 444 } |
| 451 // Ensure strict monotonicity. | 445 // Ensure strict monotonicity. |
| 452 RTC_CHECK_GT(frame_number, last_decoded_frame_num_); | 446 RTC_CHECK_GT(frame_number, last_decoded_frame_num_); |
| 453 last_decoded_frame_num_ = frame_number; | 447 last_decoded_frame_num_ = frame_number; |
| 454 | 448 |
| 455 // Check if codec size is different from the original size, and if so, | 449 // Check if frame size is different from the original size, and if so, |
| 456 // scale back to original size. This is needed for the PSNR and SSIM | 450 // scale back to original size. This is needed for the PSNR and SSIM |
| 457 // calculations. | 451 // calculations. |
| 458 size_t extracted_length; | 452 size_t extracted_length; |
| 459 rtc::Buffer extracted_buffer; | 453 rtc::Buffer extracted_buffer; |
| 460 if (image.width() != config_.codec_settings.width || | 454 if (image.width() != config_.codec_settings.width || |
| 461 image.height() != config_.codec_settings.height) { | 455 image.height() != config_.codec_settings.height) { |
| 462 rtc::scoped_refptr<I420Buffer> scaled_buffer(I420Buffer::Create( | 456 rtc::scoped_refptr<I420Buffer> scaled_buffer(I420Buffer::Create( |
| 463 config_.codec_settings.width, config_.codec_settings.height)); | 457 config_.codec_settings.width, config_.codec_settings.height)); |
| 464 // Should be the same aspect ratio, no cropping needed. | 458 // Should be the same aspect ratio, no cropping needed. |
| 465 scaled_buffer->ScaleFrom(*image.video_frame_buffer()->ToI420()); | 459 scaled_buffer->ScaleFrom(*image.video_frame_buffer()->ToI420()); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 483 if (decoded_frame_writer_) { | 477 if (decoded_frame_writer_) { |
| 484 RTC_DCHECK_EQ(extracted_length, decoded_frame_writer_->FrameLength()); | 478 RTC_DCHECK_EQ(extracted_length, decoded_frame_writer_->FrameLength()); |
| 485 RTC_CHECK(decoded_frame_writer_->WriteFrame(extracted_buffer.data())); | 479 RTC_CHECK(decoded_frame_writer_->WriteFrame(extracted_buffer.data())); |
| 486 } | 480 } |
| 487 | 481 |
| 488 last_decoded_frame_buffer_ = std::move(extracted_buffer); | 482 last_decoded_frame_buffer_ = std::move(extracted_buffer); |
| 489 } | 483 } |
| 490 | 484 |
| 491 } // namespace test | 485 } // namespace test |
| 492 } // namespace webrtc | 486 } // namespace webrtc |
| OLD | NEW |