Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(104)

Side by Side Diff: webrtc/modules/video_coding/codecs/test/videoprocessor.cc

Issue 2684223002: Use std::unique_ptr in VideoProcessor. (Closed)
Patch Set: address comments Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
53 PacketManipulator* packet_manipulator, 53 PacketManipulator* packet_manipulator,
54 const TestConfig& config, 54 const TestConfig& config,
55 Stats* stats) 55 Stats* stats)
56 : encoder_(encoder), 56 : encoder_(encoder),
57 decoder_(decoder), 57 decoder_(decoder),
58 frame_reader_(frame_reader), 58 frame_reader_(frame_reader),
59 frame_writer_(frame_writer), 59 frame_writer_(frame_writer),
60 packet_manipulator_(packet_manipulator), 60 packet_manipulator_(packet_manipulator),
61 config_(config), 61 config_(config),
62 stats_(stats), 62 stats_(stats),
63 encode_callback_(nullptr),
64 decode_callback_(nullptr),
65 last_successful_frame_buffer_(nullptr),
66 first_key_frame_has_been_excluded_(false), 63 first_key_frame_has_been_excluded_(false),
67 last_frame_missing_(false), 64 last_frame_missing_(false),
68 initialized_(false), 65 initialized_(false),
69 encoded_frame_size_(0), 66 encoded_frame_size_(0),
70 encoded_frame_type_(kVideoFrameKey), 67 encoded_frame_type_(kVideoFrameKey),
71 prev_time_stamp_(0), 68 prev_time_stamp_(0),
72 num_dropped_frames_(0), 69 num_dropped_frames_(0),
73 num_spatial_resizes_(0), 70 num_spatial_resizes_(0),
74 last_encoder_frame_width_(0), 71 last_encoder_frame_width_(0),
75 last_encoder_frame_height_(0), 72 last_encoder_frame_height_(0),
(...skipping 14 matching lines...) Expand all
90 RTC_DCHECK(packet_manipulator); 87 RTC_DCHECK(packet_manipulator);
91 RTC_DCHECK(stats); 88 RTC_DCHECK(stats);
92 } 89 }
93 90
94 bool VideoProcessorImpl::Init() { 91 bool VideoProcessorImpl::Init() {
95 // Calculate a factor used for bit rate calculations: 92 // Calculate a factor used for bit rate calculations:
96 bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8; // bits 93 bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8; // bits
97 94
98 // Initialize data structures used by the encoder/decoder APIs 95 // Initialize data structures used by the encoder/decoder APIs
99 size_t frame_length_in_bytes = frame_reader_->FrameLength(); 96 size_t frame_length_in_bytes = frame_reader_->FrameLength();
100 last_successful_frame_buffer_ = new uint8_t[frame_length_in_bytes]; 97 last_successful_frame_buffer_.reset(new uint8_t[frame_length_in_bytes]);
101 // Set fixed properties common for all frames. 98 // Set fixed properties common for all frames.
102 // To keep track of spatial resize actions by encoder. 99 // To keep track of spatial resize actions by encoder.
103 last_encoder_frame_width_ = config_.codec_settings->width; 100 last_encoder_frame_width_ = config_.codec_settings->width;
104 last_encoder_frame_height_ = config_.codec_settings->height; 101 last_encoder_frame_height_ = config_.codec_settings->height;
105 102
106 // Setup required callbacks for the encoder/decoder: 103 // Setup required callbacks for the encoder/decoder:
107 encode_callback_ = new VideoProcessorEncodeCompleteCallback(this); 104 encode_callback_.reset(new VideoProcessorEncodeCompleteCallback(this));
108 decode_callback_ = new VideoProcessorDecodeCompleteCallback(this); 105 decode_callback_.reset(new VideoProcessorDecodeCompleteCallback(this));
109 int32_t register_result = 106 RTC_CHECK_EQ(encoder_->RegisterEncodeCompleteCallback(encode_callback_.get()),
110 encoder_->RegisterEncodeCompleteCallback(encode_callback_); 107 WEBRTC_VIDEO_CODEC_OK)
111 if (register_result != WEBRTC_VIDEO_CODEC_OK) { 108 << "Failed to register encode complete callback";
112 fprintf(stderr, 109 RTC_CHECK_EQ(decoder_->RegisterDecodeCompleteCallback(decode_callback_.get()),
113 "Failed to register encode complete callback, return code: " 110 WEBRTC_VIDEO_CODEC_OK)
114 "%d\n", 111 << "Failed to register decode complete callback";
115 register_result); 112
116 return false;
117 }
118 register_result = decoder_->RegisterDecodeCompleteCallback(decode_callback_);
119 if (register_result != WEBRTC_VIDEO_CODEC_OK) {
120 fprintf(stderr,
121 "Failed to register decode complete callback, return code: "
122 "%d\n",
123 register_result);
124 return false;
125 }
126 // Init the encoder and decoder 113 // Init the encoder and decoder
127 uint32_t nbr_of_cores = 1; 114 uint32_t nbr_of_cores = 1;
128 if (!config_.use_single_core) { 115 if (!config_.use_single_core) {
129 nbr_of_cores = CpuInfo::DetectNumberOfCores(); 116 nbr_of_cores = CpuInfo::DetectNumberOfCores();
130 } 117 }
131 int32_t init_result = 118 RTC_CHECK_EQ(
132 encoder_->InitEncode(config_.codec_settings, nbr_of_cores, 119 encoder_->InitEncode(config_.codec_settings, nbr_of_cores,
133 config_.networking_config.max_payload_size_in_bytes); 120 config_.networking_config.max_payload_size_in_bytes),
134 if (init_result != WEBRTC_VIDEO_CODEC_OK) { 121 WEBRTC_VIDEO_CODEC_OK)
135 fprintf(stderr, "Failed to initialize VideoEncoder, return code: %d\n", 122 << "Failed to initialize VideoEncoder";
136 init_result); 123
137 return false; 124 RTC_CHECK_EQ(decoder_->InitDecode(config_.codec_settings, nbr_of_cores),
138 } 125 WEBRTC_VIDEO_CODEC_OK)
139 init_result = decoder_->InitDecode(config_.codec_settings, nbr_of_cores); 126 << "Failed to initialize VideoDecoder";
140 if (init_result != WEBRTC_VIDEO_CODEC_OK) {
141 fprintf(stderr, "Failed to initialize VideoDecoder, return code: %d\n",
142 init_result);
143 return false;
144 }
145 127
146 if (config_.verbose) { 128 if (config_.verbose) {
147 printf("Video Processor:\n"); 129 printf("Video Processor:\n");
148 printf(" #CPU cores used : %d\n", nbr_of_cores); 130 printf(" #CPU cores used : %d\n", nbr_of_cores);
149 printf(" Total # of frames: %d\n", frame_reader_->NumberOfFrames()); 131 printf(" Total # of frames: %d\n", frame_reader_->NumberOfFrames());
150 printf(" Codec settings:\n"); 132 printf(" Codec settings:\n");
151 printf(" Start bitrate : %d kbps\n", 133 printf(" Start bitrate : %d kbps\n",
152 config_.codec_settings->startBitrate); 134 config_.codec_settings->startBitrate);
153 printf(" Width : %d\n", config_.codec_settings->width); 135 printf(" Width : %d\n", config_.codec_settings->width);
154 printf(" Height : %d\n", config_.codec_settings->height); 136 printf(" Height : %d\n", config_.codec_settings->height);
(...skipping 16 matching lines...) Expand all
171 } else if (config_.codec_settings->codecType == kVideoCodecVP9) { 153 } else if (config_.codec_settings->codecType == kVideoCodecVP9) {
172 printf(" Resilience : %d\n", 154 printf(" Resilience : %d\n",
173 config_.codec_settings->VP9()->resilience); 155 config_.codec_settings->VP9()->resilience);
174 } 156 }
175 } 157 }
176 initialized_ = true; 158 initialized_ = true;
177 return true; 159 return true;
178 } 160 }
179 161
180 VideoProcessorImpl::~VideoProcessorImpl() { 162 VideoProcessorImpl::~VideoProcessorImpl() {
181 delete[] last_successful_frame_buffer_; 163 encoder_->RegisterEncodeCompleteCallback(nullptr);
182 encoder_->RegisterEncodeCompleteCallback(NULL); 164 decoder_->RegisterDecodeCompleteCallback(nullptr);
183 delete encode_callback_;
184 decoder_->RegisterDecodeCompleteCallback(NULL);
185 delete decode_callback_;
186 } 165 }
187 166
188 void VideoProcessorImpl::SetRates(int bit_rate, int frame_rate) { 167 void VideoProcessorImpl::SetRates(int bit_rate, int frame_rate) {
189 int set_rates_result = encoder_->SetRateAllocation( 168 int set_rates_result = encoder_->SetRateAllocation(
190 bitrate_allocator_->GetAllocation(bit_rate * 1000, frame_rate), 169 bitrate_allocator_->GetAllocation(bit_rate * 1000, frame_rate),
191 frame_rate); 170 frame_rate);
192 RTC_CHECK_GE(set_rates_result, 0); 171 RTC_CHECK_GE(set_rates_result, 0) << "Failed to update encoder with new rate "
193 if (set_rates_result < 0) { 172 << bit_rate;
194 fprintf(stderr,
195 "Failed to update encoder with new rate %d, "
196 "return code: %d\n",
197 bit_rate, set_rates_result);
198 }
199 num_dropped_frames_ = 0; 173 num_dropped_frames_ = 0;
200 num_spatial_resizes_ = 0; 174 num_spatial_resizes_ = 0;
201 } 175 }
202 176
203 size_t VideoProcessorImpl::EncodedFrameSize() { 177 size_t VideoProcessorImpl::EncodedFrameSize() {
204 return encoded_frame_size_; 178 return encoded_frame_size_;
205 } 179 }
206 180
207 FrameType VideoProcessorImpl::EncodedFrameType() { 181 FrameType VideoProcessorImpl::EncodedFrameType() {
208 return encoded_frame_type_; 182 return encoded_frame_type_;
209 } 183 }
210 184
211 int VideoProcessorImpl::NumberDroppedFrames() { 185 int VideoProcessorImpl::NumberDroppedFrames() {
212 return num_dropped_frames_; 186 return num_dropped_frames_;
213 } 187 }
214 188
215 int VideoProcessorImpl::NumberSpatialResizes() { 189 int VideoProcessorImpl::NumberSpatialResizes() {
216 return num_spatial_resizes_; 190 return num_spatial_resizes_;
217 } 191 }
218 192
219 bool VideoProcessorImpl::ProcessFrame(int frame_number) { 193 bool VideoProcessorImpl::ProcessFrame(int frame_number) {
220 RTC_DCHECK_GE(frame_number, 0); 194 RTC_DCHECK_GE(frame_number, 0);
221 if (!initialized_) { 195 RTC_CHECK(initialized_) << "Attempting to use uninitialized VideoProcessor";
222 fprintf(stderr, "Attempting to use uninitialized VideoProcessor!\n"); 196
223 return false;
224 }
225 // |prev_time_stamp_| is used for getting number of dropped frames. 197 // |prev_time_stamp_| is used for getting number of dropped frames.
226 if (frame_number == 0) { 198 if (frame_number == 0) {
227 prev_time_stamp_ = -1; 199 prev_time_stamp_ = -1;
228 } 200 }
229 rtc::scoped_refptr<VideoFrameBuffer> buffer(frame_reader_->ReadFrame()); 201 rtc::scoped_refptr<VideoFrameBuffer> buffer(frame_reader_->ReadFrame());
230 if (buffer) { 202 if (buffer) {
231 // Use the frame number as "timestamp" to identify frames 203 // Use the frame number as "timestamp" to identify frames
232 VideoFrame source_frame(buffer, frame_number, 0, webrtc::kVideoRotation_0); 204 VideoFrame source_frame(buffer, frame_number, 0, webrtc::kVideoRotation_0);
233 205
234 // Ensure we have a new statistics data object we can fill: 206 // Ensure we have a new statistics data object we can fill:
235 FrameStatistic& stat = stats_->NewFrame(frame_number); 207 FrameStatistic& stat = stats_->NewFrame(frame_number);
236 208
237 encode_start_ns_ = rtc::TimeNanos(); 209 encode_start_ns_ = rtc::TimeNanos();
238 210
239 // Decide if we're going to force a keyframe: 211 // Decide if we're going to force a keyframe:
240 std::vector<FrameType> frame_types(1, kVideoFrameDelta); 212 std::vector<FrameType> frame_types(1, kVideoFrameDelta);
241 if (config_.keyframe_interval > 0 && 213 if (config_.keyframe_interval > 0 &&
242 frame_number % config_.keyframe_interval == 0) { 214 frame_number % config_.keyframe_interval == 0) {
243 frame_types[0] = kVideoFrameKey; 215 frame_types[0] = kVideoFrameKey;
244 } 216 }
245 217
246 // For dropped frames, we regard them as zero size encoded frames. 218 // For dropped frames, we regard them as zero size encoded frames.
247 encoded_frame_size_ = 0; 219 encoded_frame_size_ = 0;
248 encoded_frame_type_ = kVideoFrameDelta; 220 encoded_frame_type_ = kVideoFrameDelta;
249 221
250 int32_t encode_result = encoder_->Encode(source_frame, NULL, &frame_types); 222 int32_t encode_result =
223 encoder_->Encode(source_frame, nullptr, &frame_types);
251 224
252 if (encode_result != WEBRTC_VIDEO_CODEC_OK) { 225 if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
253 fprintf(stderr, "Failed to encode frame %d, return code: %d\n", 226 fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
254 frame_number, encode_result); 227 frame_number, encode_result);
255 } 228 }
256 stat.encode_return_code = encode_result; 229 stat.encode_return_code = encode_result;
257 return true; 230 return true;
258 } else { 231 } else {
259 return false; // we've reached the last frame 232 return false; // we've reached the last frame
260 } 233 }
261 } 234 }
262 235
263 void VideoProcessorImpl::FrameEncoded( 236 void VideoProcessorImpl::FrameEncoded(
264 webrtc::VideoCodecType codec, 237 webrtc::VideoCodecType codec,
265 const EncodedImage& encoded_image, 238 const EncodedImage& encoded_image,
266 const webrtc::RTPFragmentationHeader* fragmentation) { 239 const webrtc::RTPFragmentationHeader* fragmentation) {
267 // Timestamp is frame number, so this gives us #dropped frames. 240 // Timestamp is frame number, so this gives us #dropped frames.
268 int num_dropped_from_prev_encode = 241 int num_dropped_from_prev_encode =
269 encoded_image._timeStamp - prev_time_stamp_ - 1; 242 encoded_image._timeStamp - prev_time_stamp_ - 1;
270 num_dropped_frames_ += num_dropped_from_prev_encode; 243 num_dropped_frames_ += num_dropped_from_prev_encode;
271 prev_time_stamp_ = encoded_image._timeStamp; 244 prev_time_stamp_ = encoded_image._timeStamp;
272 if (num_dropped_from_prev_encode > 0) { 245 if (num_dropped_from_prev_encode > 0) {
273 // For dropped frames, we write out the last decoded frame to avoid getting 246 // For dropped frames, we write out the last decoded frame to avoid getting
274 // out of sync for the computation of PSNR and SSIM. 247 // out of sync for the computation of PSNR and SSIM.
275 for (int i = 0; i < num_dropped_from_prev_encode; i++) { 248 for (int i = 0; i < num_dropped_from_prev_encode; i++) {
276 frame_writer_->WriteFrame(last_successful_frame_buffer_); 249 frame_writer_->WriteFrame(last_successful_frame_buffer_.get());
277 } 250 }
278 } 251 }
279 // Frame is not dropped, so update the encoded frame size 252 // Frame is not dropped, so update the encoded frame size
280 // (encoder callback is only called for non-zero length frames). 253 // (encoder callback is only called for non-zero length frames).
281 encoded_frame_size_ = encoded_image._length; 254 encoded_frame_size_ = encoded_image._length;
282 255
283 encoded_frame_type_ = encoded_image._frameType; 256 encoded_frame_type_ = encoded_image._frameType;
284 257
285 int64_t encode_stop_ns = rtc::TimeNanos(); 258 int64_t encode_stop_ns = rtc::TimeNanos();
286 int frame_number = encoded_image._timeStamp; 259 int frame_number = encoded_image._timeStamp;
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
330 stat.packets_dropped = 303 stat.packets_dropped =
331 packet_manipulator_->ManipulatePackets(&copied_image); 304 packet_manipulator_->ManipulatePackets(&copied_image);
332 } 305 }
333 306
334 // Keep track of if frames are lost due to packet loss so we can tell 307 // Keep track of if frames are lost due to packet loss so we can tell
335 // this to the encoder (this is handled by the RTP logic in the full stack) 308 // this to the encoder (this is handled by the RTP logic in the full stack)
336 decode_start_ns_ = rtc::TimeNanos(); 309 decode_start_ns_ = rtc::TimeNanos();
337 // TODO(kjellander): Pass fragmentation header to the decoder when 310 // TODO(kjellander): Pass fragmentation header to the decoder when
338 // CL 172001 has been submitted and PacketManipulator supports this. 311 // CL 172001 has been submitted and PacketManipulator supports this.
339 int32_t decode_result = 312 int32_t decode_result =
340 decoder_->Decode(copied_image, last_frame_missing_, NULL); 313 decoder_->Decode(copied_image, last_frame_missing_, nullptr);
341 stat.decode_return_code = decode_result; 314 stat.decode_return_code = decode_result;
342 if (decode_result != WEBRTC_VIDEO_CODEC_OK) { 315 if (decode_result != WEBRTC_VIDEO_CODEC_OK) {
343 // Write the last successful frame the output file to avoid getting it out 316 // Write the last successful frame the output file to avoid getting it out
344 // of sync with the source file for SSIM and PSNR comparisons: 317 // of sync with the source file for SSIM and PSNR comparisons:
345 frame_writer_->WriteFrame(last_successful_frame_buffer_); 318 frame_writer_->WriteFrame(last_successful_frame_buffer_.get());
346 } 319 }
347 // save status for losses so we can inform the decoder for the next frame: 320 // save status for losses so we can inform the decoder for the next frame:
348 last_frame_missing_ = copied_image._length == 0; 321 last_frame_missing_ = copied_image._length == 0;
349 } 322 }
350 323
351 void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) { 324 void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
352 int64_t decode_stop_ns = rtc::TimeNanos(); 325 int64_t decode_stop_ns = rtc::TimeNanos();
353 int frame_number = image.timestamp(); 326 int frame_number = image.timestamp();
354 // Report stats 327 // Report stats
355 FrameStatistic& stat = stats_->stats_[frame_number]; 328 FrameStatistic& stat = stats_->stats_[frame_number];
(...skipping 19 matching lines...) Expand all
375 // Should be the same aspect ratio, no cropping needed. 348 // Should be the same aspect ratio, no cropping needed.
376 up_image->ScaleFrom(*image.video_frame_buffer()); 349 up_image->ScaleFrom(*image.video_frame_buffer());
377 350
378 // TODO(mikhal): Extracting the buffer for now - need to update test. 351 // TODO(mikhal): Extracting the buffer for now - need to update test.
379 size_t length = 352 size_t length =
380 CalcBufferSize(kI420, up_image->width(), up_image->height()); 353 CalcBufferSize(kI420, up_image->width(), up_image->height());
381 std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[length]); 354 std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[length]);
382 int extracted_length = ExtractBuffer(up_image, length, image_buffer.get()); 355 int extracted_length = ExtractBuffer(up_image, length, image_buffer.get());
383 RTC_DCHECK_GT(extracted_length, 0); 356 RTC_DCHECK_GT(extracted_length, 0);
384 // Update our copy of the last successful frame: 357 // Update our copy of the last successful frame:
385 memcpy(last_successful_frame_buffer_, image_buffer.get(), extracted_length); 358 memcpy(last_successful_frame_buffer_.get(), image_buffer.get(),
359 extracted_length);
386 bool write_success = frame_writer_->WriteFrame(image_buffer.get()); 360 bool write_success = frame_writer_->WriteFrame(image_buffer.get());
387 RTC_DCHECK(write_success); 361 RTC_DCHECK(write_success);
388 if (!write_success) { 362 if (!write_success) {
389 fprintf(stderr, "Failed to write frame %d to disk!", frame_number); 363 fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
390 } 364 }
391 } else { // No resize. 365 } else { // No resize.
392 // Update our copy of the last successful frame: 366 // Update our copy of the last successful frame:
393 // TODO(mikhal): Add as a member function, so won't be allocated per frame. 367 // TODO(mikhal): Add as a member function, so won't be allocated per frame.
394 size_t length = CalcBufferSize(kI420, image.width(), image.height()); 368 size_t length = CalcBufferSize(kI420, image.width(), image.height());
395 std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[length]); 369 std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[length]);
396 int extracted_length = ExtractBuffer(image, length, image_buffer.get()); 370 int extracted_length = ExtractBuffer(image, length, image_buffer.get());
397 RTC_DCHECK_GT(extracted_length, 0); 371 RTC_DCHECK_GT(extracted_length, 0);
398 memcpy(last_successful_frame_buffer_, image_buffer.get(), extracted_length); 372 memcpy(last_successful_frame_buffer_.get(), image_buffer.get(),
373 extracted_length);
399 374
400 bool write_success = frame_writer_->WriteFrame(image_buffer.get()); 375 bool write_success = frame_writer_->WriteFrame(image_buffer.get());
401 RTC_DCHECK(write_success); 376 RTC_DCHECK(write_success);
402 if (!write_success) { 377 if (!write_success) {
403 fprintf(stderr, "Failed to write frame %d to disk!", frame_number); 378 fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
404 } 379 }
405 } 380 }
406 } 381 }
407 382
408 int VideoProcessorImpl::GetElapsedTimeMicroseconds(int64_t start, 383 int VideoProcessorImpl::GetElapsedTimeMicroseconds(int64_t start,
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
440 } 415 }
441 int32_t VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded( 416 int32_t VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
442 VideoFrame& image) { 417 VideoFrame& image) {
443 // Forward to parent class. 418 // Forward to parent class.
444 video_processor_->FrameDecoded(image); 419 video_processor_->FrameDecoded(image);
445 return 0; 420 return 0;
446 } 421 }
447 422
448 } // namespace test 423 } // namespace test
449 } // namespace webrtc 424 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698