| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 259 }; | 259 }; |
| 260 std::list<InputFrameInfo> input_frame_infos_; | 260 std::list<InputFrameInfo> input_frame_infos_; |
| 261 int32_t output_timestamp_; // Last output frame timestamp from | 261 int32_t output_timestamp_; // Last output frame timestamp from |
| 262 // |input_frame_infos_|. | 262 // |input_frame_infos_|. |
| 263 int64_t output_render_time_ms_; // Last output frame render time from | 263 int64_t output_render_time_ms_; // Last output frame render time from |
| 264 // |input_frame_infos_|. | 264 // |input_frame_infos_|. |
| 265 webrtc::VideoRotation output_rotation_; // Last output frame rotation from | 265 webrtc::VideoRotation output_rotation_; // Last output frame rotation from |
| 266 // |input_frame_infos_|. | 266 // |input_frame_infos_|. |
| 267 // Frame size in bytes fed to MediaCodec. | 267 // Frame size in bytes fed to MediaCodec. |
| 268 int yuv_size_; | 268 int yuv_size_; |
| 269 // True only when between a callback_->Encoded() call return a positive value | 269 // True only when between a callback_->OnEncodedImage() call return a positive |
| 270 // and the next Encode() call being ignored. | 270 // value and the next Encode() call being ignored. |
| 271 bool drop_next_input_frame_; | 271 bool drop_next_input_frame_; |
| 272 // Global references; must be deleted in Release(). | 272 // Global references; must be deleted in Release(). |
| 273 std::vector<jobject> input_buffers_; | 273 std::vector<jobject> input_buffers_; |
| 274 QualityScaler quality_scaler_; | 274 QualityScaler quality_scaler_; |
| 275 // Dynamic resolution change, off by default. | 275 // Dynamic resolution change, off by default. |
| 276 bool scale_; | 276 bool scale_; |
| 277 | 277 |
| 278 // H264 bitstream parser, used to extract QP from encoded bitstreams. | 278 // H264 bitstream parser, used to extract QP from encoded bitstreams. |
| 279 webrtc::H264BitstreamParser h264_bitstream_parser_; | 279 webrtc::H264BitstreamParser h264_bitstream_parser_; |
| 280 | 280 |
| (...skipping 778 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1059 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 1059 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
| 1060 uint8_t* payload = reinterpret_cast<uint8_t*>( | 1060 uint8_t* payload = reinterpret_cast<uint8_t*>( |
| 1061 jni->GetDirectBufferAddress(j_output_buffer)); | 1061 jni->GetDirectBufferAddress(j_output_buffer)); |
| 1062 if (CheckException(jni)) { | 1062 if (CheckException(jni)) { |
| 1063 ALOGE << "Exception in get direct buffer address."; | 1063 ALOGE << "Exception in get direct buffer address."; |
| 1064 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 1064 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
| 1065 return WEBRTC_VIDEO_CODEC_ERROR; | 1065 return WEBRTC_VIDEO_CODEC_ERROR; |
| 1066 } | 1066 } |
| 1067 | 1067 |
| 1068 // Callback - return encoded frame. | 1068 // Callback - return encoded frame. |
| 1069 int32_t callback_status = 0; | 1069 webrtc::EncodedImageCallback::Result callback_result( |
| 1070 webrtc::EncodedImageCallback::Result::OK); |
| 1070 if (callback_) { | 1071 if (callback_) { |
| 1071 std::unique_ptr<webrtc::EncodedImage> image( | 1072 std::unique_ptr<webrtc::EncodedImage> image( |
| 1072 new webrtc::EncodedImage(payload, payload_size, payload_size)); | 1073 new webrtc::EncodedImage(payload, payload_size, payload_size)); |
| 1073 image->_encodedWidth = width_; | 1074 image->_encodedWidth = width_; |
| 1074 image->_encodedHeight = height_; | 1075 image->_encodedHeight = height_; |
| 1075 image->_timeStamp = output_timestamp_; | 1076 image->_timeStamp = output_timestamp_; |
| 1076 image->capture_time_ms_ = output_render_time_ms_; | 1077 image->capture_time_ms_ = output_render_time_ms_; |
| 1077 image->rotation_ = output_rotation_; | 1078 image->rotation_ = output_rotation_; |
| 1078 image->_frameType = | 1079 image->_frameType = |
| 1079 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 1080 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1170 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); | 1171 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); |
| 1171 for (size_t i = 0; i < scPositionsLength; i++) { | 1172 for (size_t i = 0; i < scPositionsLength; i++) { |
| 1172 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; | 1173 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; |
| 1173 header.fragmentationLength[i] = | 1174 header.fragmentationLength[i] = |
| 1174 scPositions[i + 1] - header.fragmentationOffset[i]; | 1175 scPositions[i + 1] - header.fragmentationOffset[i]; |
| 1175 header.fragmentationPlType[i] = 0; | 1176 header.fragmentationPlType[i] = 0; |
| 1176 header.fragmentationTimeDiff[i] = 0; | 1177 header.fragmentationTimeDiff[i] = 0; |
| 1177 } | 1178 } |
| 1178 } | 1179 } |
| 1179 | 1180 |
| 1180 callback_status = callback_->Encoded(*image, &info, &header); | 1181 callback_result = callback_->OnEncodedImage(*image, &info, &header); |
| 1181 } | 1182 } |
| 1182 | 1183 |
| 1183 // Return output buffer back to the encoder. | 1184 // Return output buffer back to the encoder. |
| 1184 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 1185 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 1185 j_release_output_buffer_method_, | 1186 j_release_output_buffer_method_, |
| 1186 output_buffer_index); | 1187 output_buffer_index); |
| 1187 if (CheckException(jni) || !success) { | 1188 if (CheckException(jni) || !success) { |
| 1188 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 1189 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
| 1189 return false; | 1190 return false; |
| 1190 } | 1191 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1204 ". EncTime: " << frame_encoding_time_ms; | 1205 ". EncTime: " << frame_encoding_time_ms; |
| 1205 } | 1206 } |
| 1206 | 1207 |
| 1207 // Calculate and print encoding statistics - every 3 seconds. | 1208 // Calculate and print encoding statistics - every 3 seconds. |
| 1208 frames_encoded_++; | 1209 frames_encoded_++; |
| 1209 current_frames_++; | 1210 current_frames_++; |
| 1210 current_bytes_ += payload_size; | 1211 current_bytes_ += payload_size; |
| 1211 current_encoding_time_ms_ += frame_encoding_time_ms; | 1212 current_encoding_time_ms_ += frame_encoding_time_ms; |
| 1212 LogStatistics(false); | 1213 LogStatistics(false); |
| 1213 | 1214 |
| 1214 if (callback_status > 0) { | 1215 // Errors in callback_result are currently ignored. |
| 1216 if (callback_result.drop_next_frame) |
| 1215 drop_next_input_frame_ = true; | 1217 drop_next_input_frame_ = true; |
| 1216 // Theoretically could handle callback_status<0 here, but unclear what | |
| 1217 // that would mean for us. | |
| 1218 } | |
| 1219 } | 1218 } |
| 1220 return true; | 1219 return true; |
| 1221 } | 1220 } |
| 1222 | 1221 |
| 1223 void MediaCodecVideoEncoder::LogStatistics(bool force_log) { | 1222 void MediaCodecVideoEncoder::LogStatistics(bool force_log) { |
| 1224 int statistic_time_ms = rtc::TimeMillis() - stat_start_time_ms_; | 1223 int statistic_time_ms = rtc::TimeMillis() - stat_start_time_ms_; |
| 1225 if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) | 1224 if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) |
| 1226 && statistic_time_ms > 0) { | 1225 && statistic_time_ms > 0) { |
| 1227 // Prevent division by zero. | 1226 // Prevent division by zero. |
| 1228 int current_frames_divider = current_frames_ != 0 ? current_frames_ : 1; | 1227 int current_frames_divider = current_frames_ != 0 ? current_frames_ : 1; |
| (...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1380 return supported_codecs_; | 1379 return supported_codecs_; |
| 1381 } | 1380 } |
| 1382 | 1381 |
| 1383 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1382 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
| 1384 webrtc::VideoEncoder* encoder) { | 1383 webrtc::VideoEncoder* encoder) { |
| 1385 ALOGD << "Destroy video encoder."; | 1384 ALOGD << "Destroy video encoder."; |
| 1386 delete encoder; | 1385 delete encoder; |
| 1387 } | 1386 } |
| 1388 | 1387 |
| 1389 } // namespace webrtc_jni | 1388 } // namespace webrtc_jni |
| OLD | NEW |