OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
52 packet_manipulator_(packet_manipulator), | 52 packet_manipulator_(packet_manipulator), |
53 config_(config), | 53 config_(config), |
54 stats_(stats), | 54 stats_(stats), |
55 encode_callback_(NULL), | 55 encode_callback_(NULL), |
56 decode_callback_(NULL), | 56 decode_callback_(NULL), |
57 source_buffer_(NULL), | 57 source_buffer_(NULL), |
58 first_key_frame_has_been_excluded_(false), | 58 first_key_frame_has_been_excluded_(false), |
59 last_frame_missing_(false), | 59 last_frame_missing_(false), |
60 initialized_(false), | 60 initialized_(false), |
61 encoded_frame_size_(0), | 61 encoded_frame_size_(0), |
| 62 encoded_frame_type_(kKeyFrame), |
62 prev_time_stamp_(0), | 63 prev_time_stamp_(0), |
63 num_dropped_frames_(0), | 64 num_dropped_frames_(0), |
64 num_spatial_resizes_(0), | 65 num_spatial_resizes_(0), |
65 last_encoder_frame_width_(0), | 66 last_encoder_frame_width_(0), |
66 last_encoder_frame_height_(0), | 67 last_encoder_frame_height_(0), |
67 scaler_() { | 68 scaler_() { |
68 assert(encoder); | 69 assert(encoder); |
69 assert(decoder); | 70 assert(decoder); |
70 assert(frame_reader); | 71 assert(frame_reader); |
71 assert(frame_writer); | 72 assert(frame_writer); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
154 "return code: %d\n", bit_rate, set_rates_result); | 155 "return code: %d\n", bit_rate, set_rates_result); |
155 } | 156 } |
156 num_dropped_frames_ = 0; | 157 num_dropped_frames_ = 0; |
157 num_spatial_resizes_ = 0; | 158 num_spatial_resizes_ = 0; |
158 } | 159 } |
159 | 160 |
160 size_t VideoProcessorImpl::EncodedFrameSize() { | 161 size_t VideoProcessorImpl::EncodedFrameSize() { |
161 return encoded_frame_size_; | 162 return encoded_frame_size_; |
162 } | 163 } |
163 | 164 |
| 165 VideoFrameType VideoProcessorImpl::EncodedFrameType() { |
| 166 return encoded_frame_type_; |
| 167 } |
| 168 |
164 int VideoProcessorImpl::NumberDroppedFrames() { | 169 int VideoProcessorImpl::NumberDroppedFrames() { |
165 return num_dropped_frames_; | 170 return num_dropped_frames_; |
166 } | 171 } |
167 | 172 |
168 int VideoProcessorImpl::NumberSpatialResizes() { | 173 int VideoProcessorImpl::NumberSpatialResizes() { |
169 return num_spatial_resizes_; | 174 return num_spatial_resizes_; |
170 } | 175 } |
171 | 176 |
172 bool VideoProcessorImpl::ProcessFrame(int frame_number) { | 177 bool VideoProcessorImpl::ProcessFrame(int frame_number) { |
173 assert(frame_number >=0); | 178 assert(frame_number >=0); |
(...skipping 21 matching lines...) Expand all Loading... |
195 | 200 |
196 // Decide if we're going to force a keyframe: | 201 // Decide if we're going to force a keyframe: |
197 std::vector<VideoFrameType> frame_types(1, kDeltaFrame); | 202 std::vector<VideoFrameType> frame_types(1, kDeltaFrame); |
198 if (config_.keyframe_interval > 0 && | 203 if (config_.keyframe_interval > 0 && |
199 frame_number % config_.keyframe_interval == 0) { | 204 frame_number % config_.keyframe_interval == 0) { |
200 frame_types[0] = kKeyFrame; | 205 frame_types[0] = kKeyFrame; |
201 } | 206 } |
202 | 207 |
203 // For dropped frames, we regard them as zero size encoded frames. | 208 // For dropped frames, we regard them as zero size encoded frames. |
204 encoded_frame_size_ = 0; | 209 encoded_frame_size_ = 0; |
| 210 encoded_frame_type_ = kDeltaFrame; |
205 | 211 |
206 int32_t encode_result = encoder_->Encode(source_frame_, NULL, &frame_types); | 212 int32_t encode_result = encoder_->Encode(source_frame_, NULL, &frame_types); |
207 | 213 |
208 if (encode_result != WEBRTC_VIDEO_CODEC_OK) { | 214 if (encode_result != WEBRTC_VIDEO_CODEC_OK) { |
209 fprintf(stderr, "Failed to encode frame %d, return code: %d\n", | 215 fprintf(stderr, "Failed to encode frame %d, return code: %d\n", |
210 frame_number, encode_result); | 216 frame_number, encode_result); |
211 } | 217 } |
212 stat.encode_return_code = encode_result; | 218 stat.encode_return_code = encode_result; |
213 return true; | 219 return true; |
214 } else { | 220 } else { |
(...skipping 11 matching lines...) Expand all Loading... |
226 // For dropped frames, we write out the last decoded frame to avoid getting | 232 // For dropped frames, we write out the last decoded frame to avoid getting |
227 // out of sync for the computation of PSNR and SSIM. | 233 // out of sync for the computation of PSNR and SSIM. |
228 for (int i = 0; i < num_dropped_from_prev_encode; i++) { | 234 for (int i = 0; i < num_dropped_from_prev_encode; i++) { |
229 frame_writer_->WriteFrame(last_successful_frame_buffer_); | 235 frame_writer_->WriteFrame(last_successful_frame_buffer_); |
230 } | 236 } |
231 } | 237 } |
232 // Frame is not dropped, so update the encoded frame size | 238 // Frame is not dropped, so update the encoded frame size |
233 // (encoder callback is only called for non-zero length frames). | 239 // (encoder callback is only called for non-zero length frames). |
234 encoded_frame_size_ = encoded_image._length; | 240 encoded_frame_size_ = encoded_image._length; |
235 | 241 |
| 242 encoded_frame_type_ = encoded_image._frameType; |
| 243 |
236 TickTime encode_stop = TickTime::Now(); | 244 TickTime encode_stop = TickTime::Now(); |
237 int frame_number = encoded_image._timeStamp; | 245 int frame_number = encoded_image._timeStamp; |
238 FrameStatistic& stat = stats_->stats_[frame_number]; | 246 FrameStatistic& stat = stats_->stats_[frame_number]; |
239 stat.encode_time_in_us = GetElapsedTimeMicroseconds(encode_start_, | 247 stat.encode_time_in_us = GetElapsedTimeMicroseconds(encode_start_, |
240 encode_stop); | 248 encode_stop); |
241 stat.encoding_successful = true; | 249 stat.encoding_successful = true; |
242 stat.encoded_frame_length_in_bytes = encoded_image._length; | 250 stat.encoded_frame_length_in_bytes = encoded_image._length; |
243 stat.frame_number = encoded_image._timeStamp; | 251 stat.frame_number = encoded_image._timeStamp; |
244 stat.frame_type = encoded_image._frameType; | 252 stat.frame_type = encoded_image._frameType; |
245 stat.bit_rate_in_kbps = encoded_image._length * bit_rate_factor_; | 253 stat.bit_rate_in_kbps = encoded_image._length * bit_rate_factor_; |
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
405 return 0; | 413 return 0; |
406 } | 414 } |
407 int32_t VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded( | 415 int32_t VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded( |
408 VideoFrame& image) { | 416 VideoFrame& image) { |
409 video_processor_->FrameDecoded(image); // forward to parent class | 417 video_processor_->FrameDecoded(image); // forward to parent class |
410 return 0; | 418 return 0; |
411 } | 419 } |
412 | 420 |
413 } // namespace test | 421 } // namespace test |
414 } // namespace webrtc | 422 } // namespace webrtc |
OLD | NEW |